From 3b7aa8242ab44e48a62199d19426a9776d9c565d Mon Sep 17 00:00:00 2001 From: nym21 Date: Mon, 23 Feb 2026 17:22:12 +0100 Subject: [PATCH] global: MASSIVE snapshot --- Cargo.lock | 232 +- Cargo.toml | 12 +- crates/brk/Cargo.toml | 2 +- crates/brk/README.md | 7 +- crates/brk_bindgen/src/analysis/tree.rs | 22 +- crates/brk_bindgen/src/generate/constants.rs | 2 +- .../src/generators/javascript/client.rs | 167 +- .../brk_bindgen/src/generators/python/api.rs | 13 +- .../src/generators/python/client.rs | 187 +- .../brk_bindgen/src/generators/python/mod.rs | 2 +- crates/brk_bindgen/src/generators/rust/api.rs | 2 +- .../brk_bindgen/src/generators/rust/client.rs | 2 +- crates/brk_bindgen/src/types/case.rs | 4 +- crates/brk_cli/Cargo.toml | 5 +- crates/brk_cli/README.md | 15 +- crates/brk_cli/src/config.rs | 31 +- crates/brk_cli/src/main.rs | 11 +- crates/brk_client/Cargo.toml | 1 + crates/brk_client/README.md | 2 +- crates/brk_client/examples/basic.rs | 23 +- crates/brk_client/examples/fetch_prices.rs | 51 - crates/brk_client/examples/tree.rs | 2 +- crates/brk_client/src/lib.rs | 4865 +++++++++------- crates/brk_cohort/src/address.rs | 14 +- crates/brk_computer/Cargo.toml | 5 +- crates/brk_computer/README.md | 17 +- crates/brk_computer/examples/computer.rs | 19 +- .../brk_computer/examples/computer_bench.rs | 16 +- crates/brk_computer/examples/computer_read.rs | 21 +- crates/brk_computer/examples/full_bench.rs | 21 +- crates/brk_computer/src/blocks/compute.rs | 41 +- .../brk_computer/src/blocks/count/compute.rs | 228 +- .../brk_computer/src/blocks/count/import.rs | 77 +- crates/brk_computer/src/blocks/count/vecs.rs | 92 +- .../src/blocks/difficulty/compute.rs | 101 +- .../src/blocks/difficulty/import.rs | 16 +- .../src/blocks/difficulty/vecs.rs | 21 +- .../src/blocks/halving/compute.rs | 59 +- .../brk_computer/src/blocks/halving/import.rs | 13 +- .../brk_computer/src/blocks/halving/vecs.rs | 13 +- crates/brk_computer/src/blocks/import.rs | 13 +- .../src/blocks/interval/compute.rs | 30 +- .../src/blocks/interval/import.rs | 24 +- .../brk_computer/src/blocks/interval/vecs.rs | 9 +- .../brk_computer/src/blocks/mining/compute.rs | 234 - crates/brk_computer/src/blocks/mining/vecs.rs | 26 - crates/brk_computer/src/blocks/mod.rs | 35 +- .../src/blocks/rewards/compute.rs | 175 - .../brk_computer/src/blocks/rewards/import.rs | 46 - .../brk_computer/src/blocks/rewards/vecs.rs | 17 - .../brk_computer/src/blocks/size/compute.rs | 15 +- crates/brk_computer/src/blocks/size/import.rs | 19 +- crates/brk_computer/src/blocks/size/vecs.rs | 9 +- .../brk_computer/src/blocks/time/compute.rs | 35 +- crates/brk_computer/src/blocks/time/import.rs | 13 +- crates/brk_computer/src/blocks/time/vecs.rs | 8 +- .../brk_computer/src/blocks/weight/compute.rs | 7 +- .../brk_computer/src/blocks/weight/import.rs | 8 +- crates/brk_computer/src/blocks/weight/vecs.rs | 7 +- .../src/cointime/activity/compute.rs | 68 +- .../src/cointime/activity/import.rs | 2 +- .../src/cointime/activity/vecs.rs | 15 +- .../src/cointime/adjusted/compute.rs | 59 +- .../src/cointime/adjusted/import.rs | 10 +- .../src/cointime/adjusted/vecs.rs | 13 +- .../brk_computer/src/cointime/cap/compute.rs | 111 +- .../brk_computer/src/cointime/cap/import.rs | 2 +- crates/brk_computer/src/cointime/cap/vecs.rs | 15 +- crates/brk_computer/src/cointime/compute.rs | 85 +- crates/brk_computer/src/cointime/import.rs | 16 +- crates/brk_computer/src/cointime/mod.rs | 20 +- .../src/cointime/pricing/compute.rs | 95 +- .../src/cointime/pricing/import.rs | 12 +- .../brk_computer/src/cointime/pricing/vecs.rs | 24 +- .../src/cointime/reserve_risk/compute.rs | 44 +- .../src/cointime/reserve_risk/import.rs | 9 +- .../src/cointime/reserve_risk/vecs.rs | 16 +- .../src/cointime/supply/compute.rs | 37 +- .../src/cointime/supply/import.rs | 10 +- .../brk_computer/src/cointime/supply/vecs.rs | 9 +- .../src/cointime/value/compute.rs | 36 +- .../brk_computer/src/cointime/value/import.rs | 2 +- .../brk_computer/src/cointime/value/vecs.rs | 13 +- crates/brk_computer/src/constants.rs | 2 +- .../src/distribution/address/activity.rs | 114 +- .../src/distribution/address/address_count.rs | 182 +- .../src/distribution/address/data.rs | 45 +- .../src/distribution/address/growth_rate.rs | 37 +- .../src/distribution/address/indexes/any.rs | 88 +- .../distribution/address/new_addr_count.rs | 63 +- .../distribution/address/total_addr_count.rs | 35 +- .../address/type_map/height_vec.rs | 4 +- .../address/type_map/index_map.rs | 38 +- .../src/distribution/address/type_map/vec.rs | 4 +- .../src/distribution/block/cache/address.rs | 35 +- .../src/distribution/block/cache/lookup.rs | 6 +- .../block/cohort/address_updates.rs | 4 +- .../src/distribution/block/cohort/mod.rs | 10 +- .../src/distribution/block/cohort/received.rs | 8 +- .../src/distribution/block/cohort/sent.rs | 25 +- .../distribution/block/cohort/tx_counts.rs | 2 +- .../src/distribution/block/mod.rs | 6 +- .../src/distribution/block/utxo/inputs.rs | 21 +- .../src/distribution/block/utxo/outputs.rs | 11 +- .../distribution/cohorts/address/groups.rs | 86 +- .../src/distribution/cohorts/address/vecs.rs | 102 +- .../src/distribution/cohorts/traits.rs | 28 +- .../src/distribution/cohorts/utxo/groups.rs | 253 +- .../src/distribution/cohorts/utxo/receive.rs | 6 +- .../src/distribution/cohorts/utxo/send.rs | 52 +- .../distribution/cohorts/utxo/tick_tock.rs | 2 +- .../src/distribution/cohorts/utxo/vecs.rs | 79 +- .../src/distribution/compute/aggregates.rs | 41 +- .../src/distribution/compute/block_loop.rs | 252 +- .../src/distribution/compute/context.rs | 53 +- .../src/distribution/compute/mod.rs | 10 +- .../src/distribution/compute/readers.rs | 146 +- .../src/distribution/compute/recover.rs | 6 +- .../src/distribution/compute/write.rs | 6 +- .../src/distribution/metrics/activity.rs | 95 +- .../src/distribution/metrics/config.rs | 36 +- .../src/distribution/metrics/cost_basis.rs | 127 +- .../src/distribution/metrics/mod.rs | 334 +- .../src/distribution/metrics/outputs.rs | 55 +- .../src/distribution/metrics/realized.rs | 1052 ++-- .../src/distribution/metrics/relative.rs | 296 +- .../src/distribution/metrics/supply.rs | 63 +- .../src/distribution/metrics/unrealized.rs | 388 +- .../src/distribution/range_map.rs | 6 +- .../src/distribution/state/block.rs | 4 +- .../src/distribution/state/cohort/address.rs | 51 +- .../src/distribution/state/cohort/base.rs | 262 +- .../src/distribution/state/cohort/utxo.rs | 14 +- .../src/distribution/state/cost_basis/data.rs | 74 +- .../state/cost_basis/percentiles.rs | 12 +- .../distribution/state/cost_basis/realized.rs | 94 +- .../state/cost_basis/unrealized.rs | 89 +- .../src/distribution/state/transacted.rs | 2 +- crates/brk_computer/src/distribution/vecs.rs | 187 +- crates/brk_computer/src/indexes/address.rs | 52 +- crates/brk_computer/src/indexes/dateindex.rs | 28 - crates/brk_computer/src/indexes/day1.rs | 24 + crates/brk_computer/src/indexes/day3.rs | 20 + .../brk_computer/src/indexes/decadeindex.rs | 24 - .../src/indexes/difficultyepoch.rs | 14 +- .../brk_computer/src/indexes/halvingepoch.rs | 12 +- crates/brk_computer/src/indexes/height.rs | 54 +- crates/brk_computer/src/indexes/hour1.rs | 20 + crates/brk_computer/src/indexes/hour12.rs | 20 + crates/brk_computer/src/indexes/hour4.rs | 20 + crates/brk_computer/src/indexes/minute1.rs | 20 + crates/brk_computer/src/indexes/minute10.rs | 20 + crates/brk_computer/src/indexes/minute30.rs | 20 + crates/brk_computer/src/indexes/minute5.rs | 20 + crates/brk_computer/src/indexes/mod.rs | 761 ++- crates/brk_computer/src/indexes/month1.rs | 22 + crates/brk_computer/src/indexes/month3.rs | 22 + crates/brk_computer/src/indexes/month6.rs | 22 + crates/brk_computer/src/indexes/monthindex.rs | 32 - .../brk_computer/src/indexes/quarterindex.rs | 24 - .../brk_computer/src/indexes/semesterindex.rs | 24 - crates/brk_computer/src/indexes/txindex.rs | 16 +- crates/brk_computer/src/indexes/txinindex.rs | 8 +- crates/brk_computer/src/indexes/txoutindex.rs | 8 +- crates/brk_computer/src/indexes/week1.rs | 22 + crates/brk_computer/src/indexes/weekindex.rs | 24 - crates/brk_computer/src/indexes/year1.rs | 22 + crates/brk_computer/src/indexes/year10.rs | 22 + crates/brk_computer/src/indexes/yearindex.rs | 26 - crates/brk_computer/src/inputs/compute.rs | 2 +- .../brk_computer/src/inputs/count/compute.rs | 2 +- .../brk_computer/src/inputs/count/import.rs | 2 +- crates/brk_computer/src/inputs/count/vecs.rs | 5 +- crates/brk_computer/src/inputs/import.rs | 2 +- crates/brk_computer/src/inputs/mod.rs | 10 +- .../brk_computer/src/inputs/spent/compute.rs | 23 +- .../brk_computer/src/inputs/spent/import.rs | 2 +- crates/brk_computer/src/inputs/spent/vecs.rs | 10 +- crates/brk_computer/src/internal/compute.rs | 288 +- .../internal/multi/date_derived/average.rs | 58 - .../src/internal/multi/date_derived/first.rs | 58 - .../src/internal/multi/date_derived/full.rs | 68 - .../src/internal/multi/date_derived/last.rs | 58 - .../src/internal/multi/date_derived/max.rs | 82 - .../src/internal/multi/date_derived/min.rs | 82 - .../src/internal/multi/date_derived/mod.rs | 19 - .../src/internal/multi/date_derived/spread.rs | 67 - .../src/internal/multi/date_derived/sum.rs | 58 - .../internal/multi/date_derived/sum_cum.rs | 62 - .../src/internal/multi/from_date/average.rs | 64 - .../internal/multi/from_date/binary_last.rs | 954 --- .../internal/multi/from_date/binary_sum.rs | 134 - .../multi/from_date/binary_sum_cum.rs | 324 -- .../src/internal/multi/from_date/first.rs | 64 - .../src/internal/multi/from_date/last.rs | 73 - .../src/internal/multi/from_date/lazy.rs | 88 - .../multi/from_date/lazy_binary_price.rs | 152 - .../multi/from_date/lazy_distribution.rs | 69 - .../src/internal/multi/from_date/lazy_full.rs | 62 - .../src/internal/multi/from_date/lazy_last.rs | 117 - .../internal/multi/from_date/lazy_price.rs | 62 - .../src/internal/multi/from_date/lazy_sum.rs | 57 - .../internal/multi/from_date/lazy_sum_cum.rs | 59 - .../src/internal/multi/from_date/max.rs | 95 - .../src/internal/multi/from_date/min.rs | 95 - .../src/internal/multi/from_date/mod.rs | 53 - .../src/internal/multi/from_date/price.rs | 48 - .../src/internal/multi/from_date/ratio.rs | 418 -- .../src/internal/multi/from_date/stddev.rs | 503 -- .../internal/multi/from_date/unary_last.rs | 58 - .../internal/multi/from_date/value_change.rs | 77 - .../multi/from_date/value_change_derived.rs | 84 - .../multi/from_date/value_derived_last.rs | 86 - .../internal/multi/from_date/value_last.rs | 146 - .../multi/from_date/value_lazy_last.rs | 94 - .../internal/multi/from_height/binary_full.rs | 26 +- .../internal/multi/from_height/binary_last.rs | 370 +- .../internal/multi/from_height/binary_sum.rs | 50 +- .../multi/from_height/binary_sum_cum.rs | 173 +- .../internal/multi/from_height/constant.rs | 98 + .../multi/from_height/distribution.rs | 46 +- .../src/internal/multi/from_height/full.rs | 33 +- .../src/internal/multi/from_height/last.rs | 47 +- .../lazy_binary_computed_distribution.rs | 33 +- .../from_height/lazy_binary_computed_full.rs | 25 +- .../from_height/lazy_binary_computed_last.rs | 32 +- .../from_height/lazy_binary_computed_sum.rs | 32 +- .../lazy_binary_computed_sum_cum.rs | 28 +- .../multi/from_height/lazy_binary_price.rs | 72 - .../multi/from_height/lazy_computed_full.rs | 44 +- .../from_height/lazy_computed_sum_cum.rs | 24 +- .../multi/from_height/lazy_distribution.rs | 95 - .../internal/multi/from_height/lazy_full.rs | 32 +- .../internal/multi/from_height/lazy_last.rs | 72 +- .../from_height/lazy_price_from_cents.rs | 47 - .../internal/multi/from_height/lazy_sum.rs | 32 +- .../multi/from_height/lazy_sum_cum.rs | 27 +- .../lazy_transform_distribution.rs | 10 +- .../internal/multi/from_height/lazy_value.rs | 30 +- .../src/internal/multi/from_height/mod.rs | 22 +- .../{from_date => from_height}/percentiles.rs | 94 +- .../src/internal/multi/from_height/price.rs | 182 +- .../src/internal/multi/from_height/ratio.rs | 418 ++ .../src/internal/multi/from_height/stddev.rs | 501 ++ .../multi/from_height/stored_value_last.rs | 65 + .../src/internal/multi/from_height/sum.rs | 45 +- .../src/internal/multi/from_height/sum_cum.rs | 75 +- .../internal/multi/from_height/unary_last.rs | 65 - .../multi/from_height/value_binary.rs | 159 +- .../multi/from_height/value_change.rs | 68 + .../internal/multi/from_height/value_ema.rs | 68 + .../internal/multi/from_height/value_full.rs | 78 +- .../internal/multi/from_height/value_last.rs | 96 +- .../from_height/value_lazy_binary_last.rs | 56 +- .../value_lazy_computed_sum_cum.rs | 77 +- .../multi/from_height/value_lazy_last.rs | 19 +- .../multi/from_height/value_lazy_sum_cum.rs | 79 +- .../internal/multi/from_height/value_sum.rs | 81 +- .../multi/from_height/value_sum_cum.rs | 117 +- .../multi/from_height_and_date/binary_last.rs | 76 - .../from_height_and_date/computed_ohlc.rs | 68 - .../multi/from_height_and_date/constant.rs | 94 - .../multi/from_height_and_date/first.rs | 72 - .../multi/from_height_and_date/last.rs | 72 - .../multi/from_height_and_date/lazy_ohlc.rs | 30 - .../multi/from_height_and_date/max.rs | 105 - .../multi/from_height_and_date/min.rs | 105 - .../multi/from_height_and_date/mod.rs | 23 - .../multi/from_height_and_date/price.rs | 49 - .../multi/from_height_and_date/unary_last.rs | 71 - .../multi/from_height_and_date/value_last.rs | 81 - .../multi/from_tx/lazy_distribution.rs | 14 +- .../internal/multi/from_tx/value_dollars.rs | 185 +- .../src/internal/multi/from_tx/value_full.rs | 30 +- .../internal/multi/height_and_date/bytes.rs | 37 - .../src/internal/multi/height_and_date/mod.rs | 5 - .../internal/multi/height_and_date/ohlc.rs | 21 - .../multi/height_derived/binary_last.rs | 274 +- .../multi/height_derived/binary_sum.rs | 154 +- .../multi/height_derived/binary_sum_cum.rs | 300 +- .../multi/height_derived/distribution.rs | 126 +- .../internal/multi/height_derived/first.rs | 119 +- .../src/internal/multi/height_derived/full.rs | 160 +- .../src/internal/multi/height_derived/last.rs | 113 +- .../multi/height_derived/lazy_distribution.rs | 104 +- .../multi/height_derived/lazy_full.rs | 110 +- .../multi/height_derived/lazy_last.rs | 199 +- .../internal/multi/height_derived/lazy_sum.rs | 87 +- .../multi/height_derived/lazy_sum_cum.rs | 89 +- .../src/internal/multi/height_derived/max.rs | 97 + .../src/internal/multi/height_derived/min.rs | 97 + .../src/internal/multi/height_derived/mod.rs | 12 +- .../src/internal/multi/height_derived/ohlc.rs | 91 + .../multi/height_derived/split_ohlc.rs | 49 + .../src/internal/multi/height_derived/sum.rs | 152 +- .../internal/multi/height_derived/sum_cum.rs | 197 +- .../multi/height_derived/value_lazy_last.rs | 51 + .../multi/height_derived/value_sum_cum.rs | 96 - crates/brk_computer/src/internal/multi/mod.rs | 8 - .../internal/multi/tx_derived/distribution.rs | 140 +- .../src/internal/multi/tx_derived/full.rs | 143 +- .../internal/multi/tx_derived/lazy_full.rs | 83 +- .../internal/multi/tx_derived/value_full.rs | 71 +- .../single/difficultyepoch/lazy_value.rs | 141 - .../internal/single/difficultyepoch/mod.rs | 3 - .../src/internal/single/group/distribution.rs | 81 +- .../src/internal/single/group/full.rs | 130 +- .../src/internal/single/group/min_max.rs | 19 +- .../internal/single/group/min_max_average.rs | 125 +- .../src/internal/single/group/mod.rs | 2 - .../src/internal/single/group/percentiles.rs | 50 +- .../src/internal/single/group/stats.rs | 110 - .../src/internal/single/group/sum_cum.rs | 112 +- .../internal/single/height/derived_values.rs | 38 +- .../src/internal/single/height/lazy_value.rs | 28 +- .../src/internal/single/height/mod.rs | 2 - .../src/internal/single/height/value.rs | 49 - .../src/internal/single/lazy/average.rs | 165 +- .../src/internal/single/lazy/cumulative.rs | 131 +- .../src/internal/single/lazy/distribution.rs | 75 + .../src/internal/single/lazy/first.rs | 122 +- .../src/internal/single/lazy/full.rs | 51 +- .../src/internal/single/lazy/last.rs | 130 +- .../src/internal/single/lazy/max.rs | 165 +- .../src/internal/single/lazy/min.rs | 165 +- .../src/internal/single/lazy/mod.rs | 190 +- .../src/internal/single/lazy/ohlc.rs | 370 ++ .../src/internal/single/lazy/percentile.rs | 156 + .../src/internal/single/lazy/percentiles.rs | 75 + .../src/internal/single/lazy/sparse_last.rs | 279 + .../src/internal/single/lazy/spread.rs | 52 - .../src/internal/single/lazy/sum.rs | 169 +- .../src/internal/single/lazy/sum_cum.rs | 58 +- .../single/lazy_transform/binary_full.rs | 22 +- .../single/lazy_transform/binary_last.rs | 14 +- .../lazy_transform/binary_percentiles.rs | 6 +- .../single/lazy_transform/binary_sum.rs | 26 +- .../single/lazy_transform/binary_sum_cum.rs | 201 +- .../single/lazy_transform/distribution.rs | 69 +- .../internal/single/lazy_transform/full.rs | 74 +- .../internal/single/lazy_transform/last.rs | 34 +- .../src/internal/single/lazy_transform/mod.rs | 4 - .../single/lazy_transform/percentiles.rs | 43 +- .../internal/single/lazy_transform/spread.rs | 49 - .../internal/single/lazy_transform/stats.rs | 56 - .../src/internal/single/lazy_transform/sum.rs | 20 +- .../internal/single/lazy_transform/sum_cum.rs | 62 +- .../brk_computer/src/internal/single/mod.rs | 2 - .../single/transform/block_count_target.rs | 142 + ...gned_to_dollars.rs => cents_to_dollars.rs} | 6 +- .../single/transform/cents_to_sats.rs | 17 + .../transform/cents_unsigned_to_sats_fract.rs | 19 - .../transform/close_price_times_sats.rs | 13 +- .../transform/half_close_price_times_sats.rs | 13 +- .../src/internal/single/transform/mod.rs | 16 +- .../src/internal/single/transform/ohlc.rs | 21 - .../percentage_diff_close_dollars.rs | 14 +- .../src/internal/single/transform/ratio64.rs | 13 + .../transform/sats_times_close_price.rs | 14 +- .../single/transform/volatility_sqrt30.rs | 3 +- .../single/transform/volatility_sqrt365.rs | 3 +- .../single/transform/volatility_sqrt7.rs | 3 +- .../single/transform/weight_to_vbytes.rs | 12 + .../src/internal/single/tx/distribution.rs | 24 +- .../src/internal/single/vec/average.rs | 21 +- .../src/internal/single/vec/cumulative.rs | 29 +- .../src/internal/single/vec/first.rs | 78 - .../src/internal/single/vec/last.rs | 78 - .../src/internal/single/vec/max.rs | 21 +- .../src/internal/single/vec/min.rs | 21 +- .../src/internal/single/vec/mod.rs | 4 - .../src/internal/single/vec/percentiles.rs | 21 +- .../src/internal/single/vec/sum.rs | 29 +- crates/brk_computer/src/lib.rs | 366 +- crates/brk_computer/src/market/ath/compute.rs | 112 +- crates/brk_computer/src/market/ath/import.rs | 46 +- crates/brk_computer/src/market/ath/vecs.rs | 21 +- crates/brk_computer/src/market/compute.rs | 32 +- .../brk_computer/src/market/dca/by_class.rs | 128 +- .../brk_computer/src/market/dca/by_period.rs | 159 +- crates/brk_computer/src/market/dca/compute.rs | 505 +- crates/brk_computer/src/market/dca/import.rs | 139 +- crates/brk_computer/src/market/dca/vecs.rs | 57 +- crates/brk_computer/src/market/import.rs | 15 +- .../src/market/indicators/compute.rs | 256 +- .../src/market/indicators/gini.rs | 110 + .../src/market/indicators/import.rs | 189 +- .../src/market/indicators/macd.rs | 83 + .../brk_computer/src/market/indicators/mod.rs | 7 +- .../brk_computer/src/market/indicators/rsi.rs | 123 + .../src/market/indicators/smoothing.rs | 93 + .../src/market/indicators/timeframe.rs | 50 + .../src/market/indicators/vecs.rs | 99 +- .../src/market/lookback/by_period.rs | 73 +- .../src/market/lookback/compute.rs | 26 +- .../src/market/lookback/import.rs | 6 +- .../brk_computer/src/market/lookback/vecs.rs | 10 +- crates/brk_computer/src/market/mod.rs | 20 +- .../src/market/moving_average/compute.rs | 50 +- .../src/market/moving_average/import.rs | 78 +- .../src/market/moving_average/vecs.rs | 77 +- .../brk_computer/src/market/range/compute.rs | 172 +- .../brk_computer/src/market/range/import.rs | 37 +- crates/brk_computer/src/market/range/vecs.rs | 32 +- .../src/market/returns/compute.rs | 60 +- .../brk_computer/src/market/returns/import.rs | 42 +- .../brk_computer/src/market/returns/vecs.rs | 28 +- .../src/market/volatility/import.rs | 39 +- .../src/market/volatility/vecs.rs | 22 +- crates/brk_computer/src/mining/compute.rs | 41 + .../src/mining/hashrate/compute.rs | 173 + .../mining => mining/hashrate}/import.rs | 12 +- .../{blocks/mining => mining/hashrate}/mod.rs | 0 .../brk_computer/src/mining/hashrate/vecs.rs | 27 + crates/brk_computer/src/mining/import.rs | 42 + crates/brk_computer/src/mining/mod.rs | 22 + .../src/mining/rewards/compute.rs | 223 + .../brk_computer/src/mining/rewards/import.rs | 105 + .../src/{blocks => mining}/rewards/mod.rs | 0 .../brk_computer/src/mining/rewards/vecs.rs | 32 + crates/brk_computer/src/outputs/compute.rs | 2 +- .../brk_computer/src/outputs/count/compute.rs | 71 +- .../brk_computer/src/outputs/count/import.rs | 2 +- crates/brk_computer/src/outputs/count/vecs.rs | 9 +- crates/brk_computer/src/outputs/import.rs | 2 +- crates/brk_computer/src/outputs/mod.rs | 10 +- .../brk_computer/src/outputs/spent/compute.rs | 63 +- .../brk_computer/src/outputs/spent/import.rs | 2 +- crates/brk_computer/src/outputs/spent/vecs.rs | 8 +- crates/brk_computer/src/pools/mod.rs | 123 +- crates/brk_computer/src/pools/vecs.rs | 274 +- crates/brk_computer/src/positions.rs | 28 +- crates/brk_computer/src/price/cents/import.rs | 73 - crates/brk_computer/src/price/cents/vecs.rs | 10 - crates/brk_computer/src/price/fetch.rs | 99 - .../brk_computer/src/price/oracle/compute.rs | 491 -- .../brk_computer/src/price/oracle/import.rs | 54 - crates/brk_computer/src/price/oracle/vecs.rs | 14 - crates/brk_computer/src/price/sats/compute.rs | 275 - crates/brk_computer/src/price/sats/import.rs | 28 - crates/brk_computer/src/price/sats/vecs.rs | 10 - crates/brk_computer/src/price/usd/compute.rs | 268 - crates/brk_computer/src/price/usd/import.rs | 28 - crates/brk_computer/src/price/usd/mod.rs | 5 - crates/brk_computer/src/price/usd/vecs.rs | 10 - .../brk_computer/src/prices/cents/compute.rs | 191 + .../brk_computer/src/prices/cents/import.rs | 35 + .../src/{price/oracle => prices/cents}/mod.rs | 0 crates/brk_computer/src/prices/cents/vecs.rs | 12 + .../src/{price => prices}/compute.rs | 13 +- .../brk_computer/src/{price => prices}/mod.rs | 37 +- crates/brk_computer/src/prices/sats/import.rs | 39 + .../src/{price/cents => prices/sats}/mod.rs | 0 crates/brk_computer/src/prices/sats/vecs.rs | 12 + crates/brk_computer/src/prices/usd/import.rs | 39 + .../src/{price/sats => prices/usd}/mod.rs | 1 - crates/brk_computer/src/prices/usd/vecs.rs | 12 + crates/brk_computer/src/scripts/compute.rs | 9 +- .../brk_computer/src/scripts/count/compute.rs | 45 +- .../brk_computer/src/scripts/count/import.rs | 14 +- crates/brk_computer/src/scripts/count/vecs.rs | 31 +- crates/brk_computer/src/scripts/import.rs | 8 +- crates/brk_computer/src/scripts/mod.rs | 10 +- .../brk_computer/src/scripts/value/compute.rs | 48 +- .../brk_computer/src/scripts/value/import.rs | 8 +- crates/brk_computer/src/scripts/value/vecs.rs | 7 +- .../brk_computer/src/supply/burned/compute.rs | 68 +- .../brk_computer/src/supply/burned/import.rs | 10 +- crates/brk_computer/src/supply/burned/vecs.rs | 9 +- crates/brk_computer/src/supply/compute.rs | 83 +- crates/brk_computer/src/supply/import.rs | 53 +- crates/brk_computer/src/supply/vecs.rs | 26 +- .../src/supply/velocity/compute.rs | 48 +- .../src/supply/velocity/import.rs | 12 +- .../brk_computer/src/supply/velocity/vecs.rs | 11 +- crates/brk_computer/src/traits/mod.rs | 541 +- crates/brk_computer/src/traits/pricing.rs | 10 +- .../brk_computer/src/transactions/compute.rs | 12 +- .../src/transactions/count/compute.rs | 7 +- .../src/transactions/count/import.rs | 15 +- .../src/transactions/count/vecs.rs | 8 +- .../src/transactions/fees/compute.rs | 2 +- .../src/transactions/fees/import.rs | 12 +- .../src/transactions/fees/vecs.rs | 14 +- .../brk_computer/src/transactions/import.rs | 10 +- crates/brk_computer/src/transactions/mod.rs | 16 +- .../src/transactions/size/compute.rs | 2 +- .../src/transactions/size/import.rs | 28 +- .../src/transactions/size/vecs.rs | 9 +- .../src/transactions/versions/compute.rs | 19 +- .../src/transactions/versions/import.rs | 2 +- .../src/transactions/versions/vecs.rs | 11 +- .../src/transactions/volume/compute.rs | 183 +- .../src/transactions/volume/import.rs | 37 +- .../src/transactions/volume/vecs.rs | 19 +- crates/brk_computer/src/utils.rs | 2 +- crates/brk_error/Cargo.toml | 2 + crates/brk_error/src/lib.rs | 5 + crates/brk_fetcher/Cargo.toml | 1 + crates/brk_fetcher/src/binance.rs | 28 +- crates/brk_fetcher/src/brk.rs | 50 +- crates/brk_fetcher/src/kraken.rs | 24 +- crates/brk_fetcher/src/lib.rs | 14 +- crates/brk_fetcher/src/ohlc.rs | 20 +- crates/brk_fetcher/src/source.rs | 14 +- crates/brk_indexer/src/lib.rs | 18 +- crates/brk_indexer/src/vecs/addresses.rs | 38 +- crates/brk_indexer/src/vecs/blocks.rs | 16 +- crates/brk_indexer/src/vecs/inputs.rs | 16 +- crates/brk_indexer/src/vecs/mod.rs | 19 +- crates/brk_indexer/src/vecs/outputs.rs | 16 +- crates/brk_indexer/src/vecs/scripts.rs | 22 +- crates/brk_indexer/src/vecs/transactions.rs | 26 +- crates/brk_iterator/Cargo.toml | 3 +- crates/brk_logger/Cargo.toml | 3 +- crates/brk_logger/src/lib.rs | 14 +- crates/brk_mempool/Cargo.toml | 3 +- crates/brk_oracle/README.md | 117 +- crates/brk_oracle/examples/compare_digits.rs | 38 +- crates/brk_oracle/examples/determinism.rs | 38 +- crates/brk_oracle/examples/noise.rs | 10 +- crates/brk_oracle/examples/report.rs | 84 +- crates/brk_oracle/examples/sweep_digits.rs | 38 +- crates/brk_oracle/examples/sweep_tolerance.rs | 38 +- crates/brk_oracle/examples/validate.rs | 39 +- crates/brk_query/Cargo.toml | 3 +- crates/brk_query/README.md | 2 +- crates/brk_query/examples/query.rs | 15 +- crates/brk_query/src/impl/address.rs | 36 +- crates/brk_query/src/impl/block/info.rs | 85 +- crates/brk_query/src/impl/block/raw.rs | 6 +- crates/brk_query/src/impl/block/status.rs | 2 +- crates/brk_query/src/impl/block/timestamp.rs | 28 +- crates/brk_query/src/impl/block/txs.rs | 30 +- crates/brk_query/src/impl/cost_basis.rs | 20 +- crates/brk_query/src/impl/metrics.rs | 103 +- crates/brk_query/src/impl/metrics_legacy.rs | 66 - .../src/impl/mining/block_fee_rates.rs | 17 +- .../brk_query/src/impl/mining/block_fees.rs | 15 +- .../src/impl/mining/block_rewards.rs | 18 +- .../brk_query/src/impl/mining/block_sizes.rs | 30 +- .../{dateindex_iter.rs => day1_iter.rs} | 36 +- .../brk_query/src/impl/mining/difficulty.rs | 23 +- crates/brk_query/src/impl/mining/epochs.rs | 26 +- crates/brk_query/src/impl/mining/hashrate.rs | 47 +- crates/brk_query/src/impl/mining/mod.rs | 2 +- crates/brk_query/src/impl/mining/pools.rs | 24 +- .../brk_query/src/impl/mining/reward_stats.rs | 41 +- crates/brk_query/src/impl/mod.rs | 1 - crates/brk_query/src/impl/price.rs | 11 +- crates/brk_query/src/impl/transaction.rs | 74 +- crates/brk_query/src/lib.rs | 16 +- crates/brk_query/src/resolved.rs | 25 - crates/brk_query/src/vecs.rs | 4 +- crates/brk_rpc/Cargo.toml | 15 +- .../brk_rpc/examples/bench_hash_prefixes.rs | 62 + crates/brk_rpc/examples/compare_backends.rs | 267 + crates/brk_rpc/examples/rpc.rs | 3 +- crates/brk_rpc/src/backend/bitcoincore.rs | 243 + crates/brk_rpc/src/backend/corepc.rs | 318 + crates/brk_rpc/src/backend/mod.rs | 64 + crates/brk_rpc/src/inner.rs | 141 - crates/brk_rpc/src/lib.rs | 91 +- crates/brk_server/Cargo.toml | 4 +- crates/brk_server/examples/server.rs | 17 +- crates/brk_server/src/api/metrics/mod.rs | 4 +- crates/brk_server/src/api/mod.rs | 9 +- crates/brk_server/src/api/scalar.html | 3 + crates/brk_server/src/api/scalar.js | 6 +- crates/brk_server/src/api/scalar.js.br | Bin 0 -> 764040 bytes crates/brk_server/src/api/server/mod.rs | 4 +- crates/brk_store/Cargo.toml | 1 + crates/brk_store/src/lib.rs | 17 +- crates/brk_store/src/meta.rs | 4 +- crates/brk_traversable/src/lib.rs | 37 +- crates/brk_traversable_derive/src/lib.rs | 488 +- crates/brk_types/Cargo.toml | 4 +- crates/brk_types/pools-v2.json | 1808 ++++++ crates/brk_types/src/addressbytes.rs | 64 +- crates/brk_types/src/anyaddressindex.rs | 5 +- crates/brk_types/src/bitcoin.rs | 5 +- crates/brk_types/src/blkposition.rs | 5 +- crates/brk_types/src/block.rs | 84 + crates/brk_types/src/blockhash.rs | 5 +- crates/brk_types/src/cents.rs | 272 +- ...s_unsigned_compact.rs => cents_compact.rs} | 51 +- crates/brk_types/src/cents_sats.rs | 21 +- crates/brk_types/src/cents_signed.rs | 5 +- crates/brk_types/src/cents_squared_sats.rs | 5 +- crates/brk_types/src/cents_unsigned.rs | 274 - crates/brk_types/src/cost_basis_bucket.rs | 10 +- .../brk_types/src/cost_basis_distribution.rs | 25 +- crates/brk_types/src/date.rs | 145 +- .../brk_types/src/{dateindex.rs => day1.rs} | 91 +- crates/brk_types/src/day3.rs | 84 + crates/brk_types/src/difficultyepoch.rs | 19 +- crates/brk_types/src/dollars.rs | 13 +- crates/brk_types/src/emptyaddressdata.rs | 12 +- crates/brk_types/src/emptyaddressindex.rs | 5 +- crates/brk_types/src/emptyoutputindex.rs | 5 +- crates/brk_types/src/etag.rs | 7 +- crates/brk_types/src/feerate.rs | 5 +- crates/brk_types/src/fundedaddressdata.rs | 42 +- crates/brk_types/src/fundedaddressindex.rs | 5 +- crates/brk_types/src/halvingepoch.rs | 19 +- crates/brk_types/src/height.rs | 17 +- crates/brk_types/src/hour1.rs | 84 + crates/brk_types/src/hour12.rs | 84 + crates/brk_types/src/hour4.rs | 84 + crates/brk_types/src/index.rs | 338 +- crates/brk_types/src/indexes.rs | 79 +- crates/brk_types/src/indexinfo.rs | 2 +- crates/brk_types/src/lib.rs | 52 +- crates/brk_types/src/metricdata.rs | 20 +- crates/brk_types/src/metricoutput.rs | 6 +- crates/brk_types/src/minute1.rs | 84 + crates/brk_types/src/minute10.rs | 84 + crates/brk_types/src/minute30.rs | 84 + crates/brk_types/src/minute5.rs | 84 + .../src/{monthindex.rs => month1.rs} | 53 +- .../src/{quarterindex.rs => month3.rs} | 45 +- .../src/{semesterindex.rs => month6.rs} | 45 +- crates/brk_types/src/ohlc.rs | 120 +- crates/brk_types/src/opreturnindex.rs | 5 +- crates/brk_types/src/oracle_bins.rs | 12 +- crates/brk_types/src/outpoint.rs | 12 +- crates/brk_types/src/outputtype.rs | 42 +- crates/brk_types/src/p2aaddressindex.rs | 5 +- crates/brk_types/src/p2abytes.rs | 12 +- crates/brk_types/src/p2msoutputindex.rs | 5 +- crates/brk_types/src/p2pk33addressindex.rs | 5 +- crates/brk_types/src/p2pk33bytes.rs | 12 +- crates/brk_types/src/p2pk65addressindex.rs | 5 +- crates/brk_types/src/p2pk65bytes.rs | 12 +- crates/brk_types/src/p2pkhaddressindex.rs | 5 +- crates/brk_types/src/p2pkhbytes.rs | 12 +- crates/brk_types/src/p2shaddressindex.rs | 5 +- crates/brk_types/src/p2shbytes.rs | 12 +- crates/brk_types/src/p2traddressindex.rs | 5 +- crates/brk_types/src/p2trbytes.rs | 12 +- crates/brk_types/src/p2wpkhaddressindex.rs | 5 +- crates/brk_types/src/p2wpkhbytes.rs | 12 +- crates/brk_types/src/p2wshaddressindex.rs | 5 +- crates/brk_types/src/p2wshbytes.rs | 12 +- crates/brk_types/src/pairoutputindex.rs | 5 +- crates/brk_types/src/pool.rs | 26 - crates/brk_types/src/pools.rs | 1226 +--- crates/brk_types/src/poolslug.rs | 43 +- crates/brk_types/src/rawlocktime.rs | 12 +- crates/brk_types/src/sats.rs | 9 +- crates/brk_types/src/sats_signed.rs | 5 +- crates/brk_types/src/satsfract.rs | 5 +- crates/brk_types/src/stored_bool.rs | 5 +- crates/brk_types/src/stored_f32.rs | 22 +- crates/brk_types/src/stored_f64.rs | 5 +- crates/brk_types/src/stored_i16.rs | 5 +- crates/brk_types/src/stored_i8.rs | 5 +- crates/brk_types/src/stored_u16.rs | 5 +- crates/brk_types/src/stored_u32.rs | 5 +- crates/brk_types/src/stored_u64.rs | 21 +- crates/brk_types/src/stored_u8.rs | 5 +- crates/brk_types/src/supply_state.rs | 12 +- crates/brk_types/src/timestamp.rs | 8 +- crates/brk_types/src/treenode.rs | 314 +- crates/brk_types/src/txid.rs | 5 +- crates/brk_types/src/txindex.rs | 5 +- crates/brk_types/src/txinindex.rs | 5 +- crates/brk_types/src/txoutindex.rs | 5 +- crates/brk_types/src/txversion.rs | 5 +- crates/brk_types/src/typeindex.rs | 5 +- crates/brk_types/src/unknownoutputindex.rs | 5 +- crates/brk_types/src/vout.rs | 5 +- crates/brk_types/src/vsize.rs | 5 +- .../brk_types/src/{weekindex.rs => week1.rs} | 47 +- crates/brk_types/src/weight.rs | 5 +- crates/brk_types/src/year.rs | 5 +- .../brk_types/src/{yearindex.rs => year1.rs} | 55 +- .../src/{decadeindex.rs => year10.rs} | 51 +- crates/brk_website/Cargo.toml | 4 +- docker/README.md | 4 +- docs/README.md | 18 +- modules/brk-client/index.js | 5157 +++++++++-------- modules/brk-client/tests/basic.js | 19 +- modules/brk-client/tests/metric_data.js | 251 +- modules/quickmatch-js/0.3.1/src/index.js | 2 +- packages/brk_client/DOCS.md | 14 +- packages/brk_client/brk_client/__init__.py | 2826 +++++---- packages/brk_client/tests/test_basic.py | 16 +- packages/brk_client/tests/test_metric_data.py | 287 +- packages/brk_client/uv.lock | 2 +- website/scripts/chart/index.js | 12 +- .../scripts/options/distribution/activity.js | 16 +- website/scripts/options/full.js | 10 +- website/scripts/options/market.js | 37 +- website/scripts/options/mining.js | 76 +- website/scripts/options/series.js | 42 +- website/scripts/options/shared.js | 20 +- website/scripts/options/types.js | 10 +- website/scripts/options/unused.js | 8 +- website/scripts/panes/_simulation.js | 1103 ---- website/scripts/panes/chart.js | 10 +- website/scripts/types.js | 24 +- website/scripts/utils/serde.js | 42 +- 703 files changed, 29130 insertions(+), 30779 deletions(-) delete mode 100644 crates/brk_client/examples/fetch_prices.rs delete mode 100644 crates/brk_computer/src/blocks/mining/compute.rs delete mode 100644 crates/brk_computer/src/blocks/mining/vecs.rs delete mode 100644 crates/brk_computer/src/blocks/rewards/compute.rs delete mode 100644 crates/brk_computer/src/blocks/rewards/import.rs delete mode 100644 crates/brk_computer/src/blocks/rewards/vecs.rs delete mode 100644 crates/brk_computer/src/indexes/dateindex.rs create mode 100644 crates/brk_computer/src/indexes/day1.rs create mode 100644 crates/brk_computer/src/indexes/day3.rs delete mode 100644 crates/brk_computer/src/indexes/decadeindex.rs create mode 100644 crates/brk_computer/src/indexes/hour1.rs create mode 100644 crates/brk_computer/src/indexes/hour12.rs create mode 100644 crates/brk_computer/src/indexes/hour4.rs create mode 100644 crates/brk_computer/src/indexes/minute1.rs create mode 100644 crates/brk_computer/src/indexes/minute10.rs create mode 100644 crates/brk_computer/src/indexes/minute30.rs create mode 100644 crates/brk_computer/src/indexes/minute5.rs create mode 100644 crates/brk_computer/src/indexes/month1.rs create mode 100644 crates/brk_computer/src/indexes/month3.rs create mode 100644 crates/brk_computer/src/indexes/month6.rs delete mode 100644 crates/brk_computer/src/indexes/monthindex.rs delete mode 100644 crates/brk_computer/src/indexes/quarterindex.rs delete mode 100644 crates/brk_computer/src/indexes/semesterindex.rs create mode 100644 crates/brk_computer/src/indexes/week1.rs delete mode 100644 crates/brk_computer/src/indexes/weekindex.rs create mode 100644 crates/brk_computer/src/indexes/year1.rs create mode 100644 crates/brk_computer/src/indexes/year10.rs delete mode 100644 crates/brk_computer/src/indexes/yearindex.rs delete mode 100644 crates/brk_computer/src/internal/multi/date_derived/average.rs delete mode 100644 crates/brk_computer/src/internal/multi/date_derived/first.rs delete mode 100644 crates/brk_computer/src/internal/multi/date_derived/full.rs delete mode 100644 crates/brk_computer/src/internal/multi/date_derived/last.rs delete mode 100644 crates/brk_computer/src/internal/multi/date_derived/max.rs delete mode 100644 crates/brk_computer/src/internal/multi/date_derived/min.rs delete mode 100644 crates/brk_computer/src/internal/multi/date_derived/mod.rs delete mode 100644 crates/brk_computer/src/internal/multi/date_derived/spread.rs delete mode 100644 crates/brk_computer/src/internal/multi/date_derived/sum.rs delete mode 100644 crates/brk_computer/src/internal/multi/date_derived/sum_cum.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/average.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/binary_last.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/binary_sum.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/binary_sum_cum.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/first.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/last.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/lazy.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/lazy_binary_price.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/lazy_distribution.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/lazy_full.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/lazy_last.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/lazy_price.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/lazy_sum.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/lazy_sum_cum.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/max.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/min.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/mod.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/price.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/ratio.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/stddev.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/unary_last.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/value_change.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/value_change_derived.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/value_derived_last.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/value_last.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_date/value_lazy_last.rs create mode 100644 crates/brk_computer/src/internal/multi/from_height/constant.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_height/lazy_binary_price.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_height/lazy_distribution.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_height/lazy_price_from_cents.rs rename crates/brk_computer/src/internal/multi/{from_date => from_height}/percentiles.rs (59%) create mode 100644 crates/brk_computer/src/internal/multi/from_height/ratio.rs create mode 100644 crates/brk_computer/src/internal/multi/from_height/stddev.rs create mode 100644 crates/brk_computer/src/internal/multi/from_height/stored_value_last.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_height/unary_last.rs create mode 100644 crates/brk_computer/src/internal/multi/from_height/value_change.rs create mode 100644 crates/brk_computer/src/internal/multi/from_height/value_ema.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_height_and_date/binary_last.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_height_and_date/computed_ohlc.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_height_and_date/constant.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_height_and_date/first.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_height_and_date/last.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_height_and_date/lazy_ohlc.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_height_and_date/max.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_height_and_date/min.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_height_and_date/mod.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_height_and_date/price.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_height_and_date/unary_last.rs delete mode 100644 crates/brk_computer/src/internal/multi/from_height_and_date/value_last.rs delete mode 100644 crates/brk_computer/src/internal/multi/height_and_date/bytes.rs delete mode 100644 crates/brk_computer/src/internal/multi/height_and_date/mod.rs delete mode 100644 crates/brk_computer/src/internal/multi/height_and_date/ohlc.rs create mode 100644 crates/brk_computer/src/internal/multi/height_derived/max.rs create mode 100644 crates/brk_computer/src/internal/multi/height_derived/min.rs create mode 100644 crates/brk_computer/src/internal/multi/height_derived/ohlc.rs create mode 100644 crates/brk_computer/src/internal/multi/height_derived/split_ohlc.rs create mode 100644 crates/brk_computer/src/internal/multi/height_derived/value_lazy_last.rs delete mode 100644 crates/brk_computer/src/internal/multi/height_derived/value_sum_cum.rs delete mode 100644 crates/brk_computer/src/internal/single/difficultyepoch/lazy_value.rs delete mode 100644 crates/brk_computer/src/internal/single/difficultyepoch/mod.rs delete mode 100644 crates/brk_computer/src/internal/single/group/stats.rs delete mode 100644 crates/brk_computer/src/internal/single/height/value.rs create mode 100644 crates/brk_computer/src/internal/single/lazy/distribution.rs create mode 100644 crates/brk_computer/src/internal/single/lazy/ohlc.rs create mode 100644 crates/brk_computer/src/internal/single/lazy/percentile.rs create mode 100644 crates/brk_computer/src/internal/single/lazy/percentiles.rs create mode 100644 crates/brk_computer/src/internal/single/lazy/sparse_last.rs delete mode 100644 crates/brk_computer/src/internal/single/lazy/spread.rs delete mode 100644 crates/brk_computer/src/internal/single/lazy_transform/spread.rs delete mode 100644 crates/brk_computer/src/internal/single/lazy_transform/stats.rs create mode 100644 crates/brk_computer/src/internal/single/transform/block_count_target.rs rename crates/brk_computer/src/internal/single/transform/{cents_unsigned_to_dollars.rs => cents_to_dollars.rs} (53%) create mode 100644 crates/brk_computer/src/internal/single/transform/cents_to_sats.rs delete mode 100644 crates/brk_computer/src/internal/single/transform/cents_unsigned_to_sats_fract.rs delete mode 100644 crates/brk_computer/src/internal/single/transform/ohlc.rs create mode 100644 crates/brk_computer/src/internal/single/transform/ratio64.rs create mode 100644 crates/brk_computer/src/internal/single/transform/weight_to_vbytes.rs delete mode 100644 crates/brk_computer/src/internal/single/vec/first.rs delete mode 100644 crates/brk_computer/src/internal/single/vec/last.rs create mode 100644 crates/brk_computer/src/market/indicators/gini.rs create mode 100644 crates/brk_computer/src/market/indicators/macd.rs create mode 100644 crates/brk_computer/src/market/indicators/rsi.rs create mode 100644 crates/brk_computer/src/market/indicators/smoothing.rs create mode 100644 crates/brk_computer/src/market/indicators/timeframe.rs create mode 100644 crates/brk_computer/src/mining/compute.rs create mode 100644 crates/brk_computer/src/mining/hashrate/compute.rs rename crates/brk_computer/src/{blocks/mining => mining/hashrate}/import.rs (90%) rename crates/brk_computer/src/{blocks/mining => mining/hashrate}/mod.rs (100%) create mode 100644 crates/brk_computer/src/mining/hashrate/vecs.rs create mode 100644 crates/brk_computer/src/mining/import.rs create mode 100644 crates/brk_computer/src/mining/mod.rs create mode 100644 crates/brk_computer/src/mining/rewards/compute.rs create mode 100644 crates/brk_computer/src/mining/rewards/import.rs rename crates/brk_computer/src/{blocks => mining}/rewards/mod.rs (100%) create mode 100644 crates/brk_computer/src/mining/rewards/vecs.rs delete mode 100644 crates/brk_computer/src/price/cents/import.rs delete mode 100644 crates/brk_computer/src/price/cents/vecs.rs delete mode 100644 crates/brk_computer/src/price/fetch.rs delete mode 100644 crates/brk_computer/src/price/oracle/compute.rs delete mode 100644 crates/brk_computer/src/price/oracle/import.rs delete mode 100644 crates/brk_computer/src/price/oracle/vecs.rs delete mode 100644 crates/brk_computer/src/price/sats/compute.rs delete mode 100644 crates/brk_computer/src/price/sats/import.rs delete mode 100644 crates/brk_computer/src/price/sats/vecs.rs delete mode 100644 crates/brk_computer/src/price/usd/compute.rs delete mode 100644 crates/brk_computer/src/price/usd/import.rs delete mode 100644 crates/brk_computer/src/price/usd/mod.rs delete mode 100644 crates/brk_computer/src/price/usd/vecs.rs create mode 100644 crates/brk_computer/src/prices/cents/compute.rs create mode 100644 crates/brk_computer/src/prices/cents/import.rs rename crates/brk_computer/src/{price/oracle => prices/cents}/mod.rs (100%) create mode 100644 crates/brk_computer/src/prices/cents/vecs.rs rename crates/brk_computer/src/{price => prices}/compute.rs (50%) rename crates/brk_computer/src/{price => prices}/mod.rs (59%) create mode 100644 crates/brk_computer/src/prices/sats/import.rs rename crates/brk_computer/src/{price/cents => prices/sats}/mod.rs (100%) create mode 100644 crates/brk_computer/src/prices/sats/vecs.rs create mode 100644 crates/brk_computer/src/prices/usd/import.rs rename crates/brk_computer/src/{price/sats => prices/usd}/mod.rs (76%) create mode 100644 crates/brk_computer/src/prices/usd/vecs.rs delete mode 100644 crates/brk_query/src/impl/metrics_legacy.rs rename crates/brk_query/src/impl/mining/{dateindex_iter.rs => day1_iter.rs} (56%) delete mode 100644 crates/brk_query/src/resolved.rs create mode 100644 crates/brk_rpc/examples/bench_hash_prefixes.rs create mode 100644 crates/brk_rpc/examples/compare_backends.rs create mode 100644 crates/brk_rpc/src/backend/bitcoincore.rs create mode 100644 crates/brk_rpc/src/backend/corepc.rs create mode 100644 crates/brk_rpc/src/backend/mod.rs delete mode 100644 crates/brk_rpc/src/inner.rs create mode 100644 crates/brk_server/src/api/scalar.js.br create mode 100644 crates/brk_types/pools-v2.json rename crates/brk_types/src/{cents_unsigned_compact.rs => cents_compact.rs} (79%) delete mode 100644 crates/brk_types/src/cents_unsigned.rs rename crates/brk_types/src/{dateindex.rs => day1.rs} (68%) create mode 100644 crates/brk_types/src/day3.rs create mode 100644 crates/brk_types/src/hour1.rs create mode 100644 crates/brk_types/src/hour12.rs create mode 100644 crates/brk_types/src/hour4.rs create mode 100644 crates/brk_types/src/minute1.rs create mode 100644 crates/brk_types/src/minute10.rs create mode 100644 crates/brk_types/src/minute30.rs create mode 100644 crates/brk_types/src/minute5.rs rename crates/brk_types/src/{monthindex.rs => month1.rs} (63%) rename crates/brk_types/src/{quarterindex.rs => month3.rs} (66%) rename crates/brk_types/src/{semesterindex.rs => month6.rs} (65%) rename crates/brk_types/src/{weekindex.rs => week1.rs} (72%) rename crates/brk_types/src/{yearindex.rs => year1.rs} (65%) rename crates/brk_types/src/{decadeindex.rs => year10.rs} (68%) delete mode 100644 website/scripts/panes/_simulation.js diff --git a/Cargo.lock b/Cargo.lock index 09c97d0d3..85a7e9ecc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -86,9 +86,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.101" +version = "1.0.102" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" [[package]] name = "arrayvec" @@ -98,9 +98,9 @@ checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" [[package]] name = "async-compression" -version = "0.4.39" +version = "0.4.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68650b7df54f0293fd061972a0fb05aaf4fc0879d3b3d21a638a182c5c543b9f" +checksum = "7d67d43201f4d20c78bcda740c142ca52482d81da80681533d33bf3f0596c8e2" dependencies = [ "compression-codecs", "compression-core", @@ -203,6 +203,18 @@ version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + [[package]] name = "bech32" version = "0.11.1" @@ -221,7 +233,7 @@ version = "0.72.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "cexpr", "clang-sys", "itertools", @@ -240,6 +252,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e499f9fc0407f50fe98af744ab44fa67d409f76b6772e1689ec8485eb0c0f66" dependencies = [ "base58ck", + "base64 0.21.7", "bech32", "bitcoin-internals", "bitcoin-io", @@ -294,7 +307,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aedd23ae0fd321affb4bbbc36126c6f49a32818dc6b979395d24da8c9d4e80ee" dependencies = [ "bitcoincore-rpc-json", - "jsonrpc", + "jsonrpc 0.18.0", "log", "serde", "serde_json", @@ -319,9 +332,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.10.0" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" [[package]] name = "block2" @@ -403,7 +416,6 @@ dependencies = [ "brk_alloc", "brk_computer", "brk_error", - "brk_fetcher", "brk_indexer", "brk_iterator", "brk_logger", @@ -454,7 +466,6 @@ dependencies = [ "brk_bencher", "brk_cohort", "brk_error", - "brk_fetcher", "brk_indexer", "brk_iterator", "brk_logger", @@ -471,6 +482,7 @@ dependencies = [ "rustc-hash", "schemars", "serde", + "serde_json", "smallvec", "tracing", "vecdb", @@ -482,6 +494,7 @@ version = "0.1.9" dependencies = [ "bitcoin", "bitcoincore-rpc", + "corepc-client", "fjall", "jiff", "minreq", @@ -619,7 +632,11 @@ dependencies = [ "brk_error", "brk_logger", "brk_types", + "corepc-client", + "jsonrpc 0.19.0", "parking_lot", + "serde", + "serde_json", "tracing", ] @@ -632,7 +649,6 @@ dependencies = [ "brk_bindgen", "brk_computer", "brk_error", - "brk_fetcher", "brk_indexer", "brk_logger", "brk_mempool", @@ -749,9 +765,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.19.1" +version = "3.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" +checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" [[package]] name = "bytemuck" @@ -881,9 +897,9 @@ checksum = "ea0095f6103c2a8b44acd6fd15960c801dafebf02e21940360833e0673f48ba7" [[package]] name = "compression-codecs" -version = "0.4.36" +version = "0.4.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00828ba6fd27b45a448e57dbfe84f1029d4c9f26b368157e9a448a5f49a2ec2a" +checksum = "eb7b51a7d9c967fc26773061ba86150f19c50c0d65c887cb1fbe295fd16619b7" dependencies = [ "brotli", "compression-core", @@ -960,6 +976,27 @@ dependencies = [ "libc", ] +[[package]] +name = "corepc-client" +version = "0.11.0" +dependencies = [ + "bitcoin", + "corepc-types", + "jsonrpc 0.19.0", + "log", + "serde", + "serde_json", +] + +[[package]] +name = "corepc-types" +version = "0.11.0" +dependencies = [ + "bitcoin", + "serde", + "serde_json", +] + [[package]] name = "crc32fast" version = "1.5.0" @@ -1122,7 +1159,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "block2", "libc", "objc2", @@ -1239,9 +1276,9 @@ checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" [[package]] name = "fjall" -version = "3.0.1" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f69637c02d38ad1b0f003101d0195a60368130aa17d9ef78b1557d265a22093" +checksum = "5a2799b4198427a08c774838e44d0b77f677208f19a1927671cd2cd36bb30d69" dependencies = [ "byteorder-lite", "byteview", @@ -1291,7 +1328,7 @@ version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c7e611d49285d4c4b2e1727b72cf05353558885cc5252f93707b845dfcaf3d3" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "byteorder", "core-foundation", "core-graphics", @@ -1359,9 +1396,9 @@ dependencies = [ [[package]] name = "futures" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" dependencies = [ "futures-channel", "futures-core", @@ -1374,9 +1411,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" dependencies = [ "futures-core", "futures-sink", @@ -1384,15 +1421,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" [[package]] name = "futures-executor" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" dependencies = [ "futures-core", "futures-task", @@ -1401,15 +1438,15 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" [[package]] name = "futures-macro" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" dependencies = [ "proc-macro2", "quote", @@ -1418,21 +1455,21 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" [[package]] name = "futures-task" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" [[package]] name = "futures-util" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" dependencies = [ "futures-channel", "futures-core", @@ -1442,7 +1479,6 @@ dependencies = [ "futures-task", "memchr", "pin-project-lite", - "pin-utils", "slab", ] @@ -1786,9 +1822,9 @@ dependencies = [ [[package]] name = "importmap" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "689654b51e7a463f6d1009c18e784a43d287d82296b67f27411f01c38ab5adb7" +checksum = "136c4b5b2e42ffb57d7b99c2104350ae3ee5a2c555fe48ca04534ded7e599060" dependencies = [ "include_dir", "rapidhash", @@ -1858,9 +1894,9 @@ checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" [[package]] name = "jiff" -version = "0.2.20" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c867c356cc096b33f4981825ab281ecba3db0acefe60329f044c1789d94c6543" +checksum = "b3e3d65f018c6ae946ab16e80944b97096ed73c35b221d1c478a6c81d8f57940" dependencies = [ "jiff-static", "log", @@ -1872,9 +1908,9 @@ dependencies = [ [[package]] name = "jiff-static" -version = "0.2.20" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7946b4325269738f270bb55b3c19ab5c5040525f83fd625259422a9d25d9be5" +checksum = "a17c2b211d863c7fde02cbea8a3c1a439b98e109286554f2860bdded7ff83818" dependencies = [ "proc-macro2", "quote", @@ -1899,9 +1935,9 @@ checksum = "00810f1d8b74be64b13dbf3db89ac67740615d6c891f0e7b6179326533011a07" [[package]] name = "js-sys" -version = "0.3.85" +version = "0.3.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c942ebf8e95485ca0d52d97da7c5a2c387d0e7f0ba4c35e93bfcaee045955b3" +checksum = "c7e709f3e3d22866f9c25b3aff01af289b18422cc8b4262fb19103ee80fe513d" dependencies = [ "once_cell", "wasm-bindgen", @@ -1913,12 +1949,21 @@ version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3662a38d341d77efecb73caf01420cfa5aa63c0253fd7bc05289ef9f6616e1bf" dependencies = [ - "base64", + "base64 0.13.1", "minreq", "serde", "serde_json", ] +[[package]] +name = "jsonrpc" +version = "0.19.0" +dependencies = [ + "base64 0.22.1", + "serde", + "serde_json", +] + [[package]] name = "lazy_static" version = "1.5.0" @@ -1981,7 +2026,7 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "libc", ] @@ -2014,9 +2059,9 @@ checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" [[package]] name = "lsm-tree" -version = "3.0.1" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b875f1dfe14f557f805b167fb9b0fc54c5560c7a4bd6ae02535b2846f276a8cb" +checksum = "86e8d0b8e0cf2531a437788ce94d95570dbaabfe9888db20022c2d5ccec9b221" dependencies = [ "byteorder-lite", "byteview", @@ -2049,15 +2094,6 @@ version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab6473172471198271ff72e9379150e9dfd70d8e533e0752a27e515b48dd375e" -[[package]] -name = "matchers" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" -dependencies = [ - "regex-automata", -] - [[package]] name = "matchit" version = "0.8.4" @@ -2072,9 +2108,9 @@ checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" [[package]] name = "memmap2" -version = "0.9.9" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "744133e4a0e0a658e1374cf3bf8e415c4052a15a111acd372764c55b4177d490" +checksum = "714098028fe011992e1c3962653c96b2d578c4b4bce9036e15ff220319b1e0e3" dependencies = [ "libc", ] @@ -2140,7 +2176,7 @@ version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "225e7cfe711e0ba79a68baeddb2982723e4235247aefce1482f2f16c27865b66" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "cfg-if", "cfg_aliases", "libc", @@ -2220,9 +2256,9 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" [[package]] name = "owo-colors" -version = "4.2.3" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c6901729fa79e91a0913333229e9ca5dc725089d1c363b2f4b4760709dc4a52" +checksum = "d211803b9b6b570f68772237e415a029d5a50c65d382910b879fb19d3271f94d" [[package]] name = "parking_lot" @@ -2490,9 +2526,9 @@ dependencies = [ [[package]] name = "rapidhash" -version = "4.3.0" +version = "4.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84816e4c99c467e92cf984ee6328caa976dfecd33a673544489d79ca2caaefe5" +checksum = "b5e48930979c155e2f33aa36ab3119b5ee81332beb6482199a8ecd6029b80b59" dependencies = [ "rustversion", ] @@ -2500,8 +2536,6 @@ dependencies = [ [[package]] name = "rawdb" version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a66c17743b9a7e6a3bb8edb10fef25c62516e281b723ea38d7c1feea2035c75d" dependencies = [ "libc", "log", @@ -2538,7 +2572,7 @@ version = "0.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", ] [[package]] @@ -2651,7 +2685,7 @@ version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "errno", "libc", "linux-raw-sys", @@ -2957,18 +2991,18 @@ checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" [[package]] name = "strum" -version = "0.27.2" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" +checksum = "9628de9b8791db39ceda2b119bbe13134770b56c138ec1d3af810d045c04f9bd" dependencies = [ "strum_macros", ] [[package]] name = "strum_macros" -version = "0.27.2" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" +checksum = "ab85eea0270ee17587ed4156089e10b9e6880ee688791d45a905f5b1ca36f664" dependencies = [ "heck", "proc-macro2", @@ -2978,9 +3012,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.115" +version = "2.0.117" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e614ed320ac28113fa64972c4262d5dbc89deacdfd00c34a3e4cea073243c12" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" dependencies = [ "proc-macro2", "quote", @@ -3096,9 +3130,9 @@ dependencies = [ [[package]] name = "toml" -version = "1.0.1+spec-1.1.0" +version = "1.0.3+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbe30f93627849fa362d4a602212d41bb237dc2bd0f8ba0b2ce785012e124220" +checksum = "c7614eaf19ad818347db24addfa201729cf2a9b6fdfd9eb0ab870fcacc606c0c" dependencies = [ "indexmap", "serde_core", @@ -3120,9 +3154,9 @@ dependencies = [ [[package]] name = "toml_parser" -version = "1.0.8+spec-1.1.0" +version = "1.0.9+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0742ff5ff03ea7e67c8ae6c93cac239e0d9784833362da3f9a9c1da8dfefcbdc" +checksum = "702d4415e08923e7e1ef96cd5727c0dfed80b4d2fa25db9647fe5eb6f7c5a4c4" dependencies = [ "winnow", ] @@ -3155,7 +3189,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" dependencies = [ "async-compression", - "bitflags 2.10.0", + "bitflags 2.11.0", "bytes", "futures-core", "futures-util", @@ -3241,12 +3275,8 @@ version = "0.3.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e" dependencies = [ - "matchers", - "once_cell", - "regex-automata", "sharded-slab", "thread_local", - "tracing", "tracing-core", ] @@ -3264,9 +3294,9 @@ checksum = "9ea3136b675547379c4bd395ca6b938e5ad3c3d20fad76e7fe85f9e0d011419c" [[package]] name = "unicode-ident" -version = "1.0.23" +version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "537dd038a89878be9b64dd4bd1b260315c1bb94f4d784956b81e27a088d9a09e" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" [[package]] name = "unicode-segmentation" @@ -3320,8 +3350,6 @@ checksum = "8f54a172d0620933a27a4360d3db3e2ae0dd6cceae9730751a036bbf182c4b23" [[package]] name = "vecdb" version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16459a73939ec1c7ddb5c2f4264916f7bb96c88287b15dcce29cd95c16d2f6c0" dependencies = [ "ctrlc", "log", @@ -3341,8 +3369,6 @@ dependencies = [ [[package]] name = "vecdb_derive" version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1845265e89f36a22175ebef07dc1340050ef3ec54aa9f9c84859d4dda0a3a03" dependencies = [ "quote", "syn", @@ -3390,9 +3416,9 @@ dependencies = [ [[package]] name = "wasm-bindgen" -version = "0.2.108" +version = "0.2.111" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64024a30ec1e37399cf85a7ffefebdb72205ca1c972291c51512360d90bd8566" +checksum = "ec1adf1535672f5b7824f817792b1afd731d7e843d2d04ec8f27e8cb51edd8ac" dependencies = [ "cfg-if", "once_cell", @@ -3403,9 +3429,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.108" +version = "0.2.111" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "008b239d9c740232e71bd39e8ef6429d27097518b6b30bdf9086833bd5b6d608" +checksum = "19e638317c08b21663aed4d2b9a2091450548954695ff4efa75bff5fa546b3b1" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -3413,9 +3439,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.108" +version = "0.2.111" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5256bae2d58f54820e6490f9839c49780dff84c65aeab9e772f15d5f0e913a55" +checksum = "2c64760850114d03d5f65457e96fc988f11f01d38fbaa51b254e4ab5809102af" dependencies = [ "bumpalo", "proc-macro2", @@ -3426,9 +3452,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.108" +version = "0.2.111" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f01b580c9ac74c8d8f0c0e4afb04eeef2acf145458e52c03845ee9cd23e3d12" +checksum = "60eecd4fe26177cfa3339eb00b4a36445889ba3ad37080c2429879718e20ca41" dependencies = [ "unicode-ident", ] @@ -3461,7 +3487,7 @@ version = "0.244.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "hashbrown 0.15.5", "indexmap", "semver", @@ -3469,9 +3495,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.85" +version = "0.3.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "312e32e551d92129218ea9a2452120f4aabc03529ef03e4d0d82fb2780608598" +checksum = "9d6bb20ed2d9572df8584f6dc81d68a41a625cadc6f15999d649a70ce7e3597a" dependencies = [ "js-sys", "wasm-bindgen", @@ -3808,7 +3834,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" dependencies = [ "anyhow", - "bitflags 2.10.0", + "bitflags 2.11.0", "indexmap", "log", "serde", diff --git a/Cargo.toml b/Cargo.toml index a4b9f9d13..542f25725 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -40,6 +40,8 @@ aide = { version = "0.16.0-alpha.2", features = ["axum-json", "axum-query"] } axum = { version = "0.8.8", default-features = false, features = ["http1", "json", "query", "tokio", "tracing"] } bitcoin = { version = "0.32.8", features = ["serde"] } bitcoincore-rpc = "0.19.0" +corepc-client = { path = "/Users/k/Developer/corepc/client", features = ["client-sync"] } +corepc-jsonrpc = { package = "jsonrpc", path = "/Users/k/Developer/corepc/jsonrpc", features = ["simple_http"], default-features = false } brk_alloc = { version = "0.1.9", path = "crates/brk_alloc" } brk_bencher = { version = "0.1.9", path = "crates/brk_bencher" } brk_bindgen = { version = "0.1.9", path = "crates/brk_bindgen" } @@ -66,11 +68,11 @@ brk_website = { version = "0.1.9", path = "crates/brk_website" } byteview = "0.10.1" color-eyre = "0.6.5" derive_more = { version = "2.1.1", features = ["deref", "deref_mut"] } -fjall = "3.0.1" +fjall = "3.0.2" indexmap = { version = "2.13.0", features = ["serde"] } -jiff = { version = "0.2.20", features = ["perf-inline", "tz-system"], default-features = false } +jiff = { version = "0.2.21", features = ["perf-inline", "tz-system"], default-features = false } minreq = { version = "2.14.1", features = ["https", "json-using-serde"] } -owo-colors = "4.2.3" +owo-colors = "4.3.0" parking_lot = "0.12.5" pco = "1.0.1" rayon = "1.11.0" @@ -85,8 +87,8 @@ tokio = { version = "1.49.0", features = ["rt-multi-thread"] } tracing = { version = "0.1", default-features = false, features = ["std"] } tower-http = { version = "0.6.8", features = ["catch-panic", "compression-br", "compression-gzip", "compression-zstd", "cors", "normalize-path", "timeout", "trace"] } tower-layer = "0.3" -vecdb = { version = "0.6.8", features = ["derive", "serde_json", "pco", "schemars"] } -# vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco", "schemars"] } +# vecdb = { version = "0.6.8", features = ["derive", "serde_json", "pco", "schemars"] } +vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco", "schemars"] } [workspace.metadata.release] shared-version = true diff --git a/crates/brk/Cargo.toml b/crates/brk/Cargo.toml index 5c7075a79..4642072bc 100644 --- a/crates/brk/Cargo.toml +++ b/crates/brk/Cargo.toml @@ -64,7 +64,7 @@ brk_mempool = { workspace = true, optional = true } brk_oracle = { workspace = true, optional = true } brk_query = { workspace = true, optional = true } brk_reader = { workspace = true, optional = true } -brk_rpc = { workspace = true, optional = true } +brk_rpc = { workspace = true, optional = true, features = ["corepc"] } brk_server = { workspace = true, optional = true } brk_store = { workspace = true, optional = true } brk_traversable = { workspace = true, optional = true } diff --git a/crates/brk/README.md b/crates/brk/README.md index 4a1a548e9..cf0340153 100644 --- a/crates/brk/README.md +++ b/crates/brk/README.md @@ -18,7 +18,12 @@ use brk::query::Query; use brk::types::Height; ``` -Feature flags match crate names without the `brk_` prefix. Use `full` to enable all. +Feature flags match crate names without the `brk_` prefix. Use `full` to enable all: + +```toml +[dependencies] +brk = { version = "0.1", features = ["full"] } +``` ## Crates diff --git a/crates/brk_bindgen/src/analysis/tree.rs b/crates/brk_bindgen/src/analysis/tree.rs index 4ddd67eae..be1b34604 100644 --- a/crates/brk_bindgen/src/analysis/tree.rs +++ b/crates/brk_bindgen/src/analysis/tree.rs @@ -371,15 +371,15 @@ mod tests { let tree = make_branch(vec![ ( "base", - make_branch(vec![("dateindex", make_leaf("block_vbytes"))]), + make_branch(vec![("day1", make_leaf("block_vbytes"))]), ), ( "average", - make_branch(vec![("dateindex", make_leaf("block_vbytes_average"))]), + make_branch(vec![("day1", make_leaf("block_vbytes_average"))]), ), ( "sum", - make_branch(vec![("dateindex", make_leaf("block_vbytes_sum"))]), + make_branch(vec![("day1", make_leaf("block_vbytes_sum"))]), ), ]); @@ -394,23 +394,23 @@ mod tests { let tree = make_branch(vec![ ( "average", - make_branch(vec![("dateindex", make_leaf("block_weight_average"))]), + make_branch(vec![("day1", make_leaf("block_weight_average"))]), ), ( "sum", - make_branch(vec![("dateindex", make_leaf("block_weight_sum"))]), + make_branch(vec![("day1", make_leaf("block_weight_sum"))]), ), ( "cumulative", - make_branch(vec![("dateindex", make_leaf("block_weight_cumulative"))]), + make_branch(vec![("day1", make_leaf("block_weight_cumulative"))]), ), ( "max", - make_branch(vec![("dateindex", make_leaf("block_weight_max"))]), + make_branch(vec![("day1", make_leaf("block_weight_max"))]), ), ( "min", - make_branch(vec![("dateindex", make_leaf("block_weight_min"))]), + make_branch(vec![("day1", make_leaf("block_weight_min"))]), ), ]); @@ -426,15 +426,15 @@ mod tests { let tree = make_branch(vec![ ( "base", - make_branch(vec![("dateindex", make_leaf("block_weight_average"))]), + make_branch(vec![("day1", make_leaf("block_weight_average"))]), ), ( "average", - make_branch(vec![("dateindex", make_leaf("block_weight_average"))]), + make_branch(vec![("day1", make_leaf("block_weight_average"))]), ), ( "sum", - make_branch(vec![("dateindex", make_leaf("block_weight_sum"))]), + make_branch(vec![("day1", make_leaf("block_weight_sum"))]), ), ]); diff --git a/crates/brk_bindgen/src/generate/constants.rs b/crates/brk_bindgen/src/generate/constants.rs index 2baf83713..04fe098b6 100644 --- a/crates/brk_bindgen/src/generate/constants.rs +++ b/crates/brk_bindgen/src/generate/constants.rs @@ -26,7 +26,7 @@ impl ClientConstants { /// Collect all constant data. pub fn collect() -> Self { let indexes = Index::all(); - let indexes: Vec<&'static str> = indexes.iter().map(|i| i.serialize_long()).collect(); + let indexes: Vec<&'static str> = indexes.iter().map(|i| i.name()).collect(); let pools = pools(); let mut sorted_pools: Vec<_> = pools.iter().collect(); diff --git a/crates/brk_bindgen/src/generators/javascript/client.rs b/crates/brk_bindgen/src/generators/javascript/client.rs index dbdca434d..2c6d271c9 100644 --- a/crates/brk_bindgen/src/generators/javascript/client.rs +++ b/crates/brk_bindgen/src/generators/javascript/client.rs @@ -49,11 +49,18 @@ class BrkError extends Error {{ }} // Date conversion constants and helpers -const _GENESIS = new Date(2009, 0, 3); // dateindex 0, weekindex 0 -const _DAY_ONE = new Date(2009, 0, 9); // dateindex 1 (6 day gap after genesis) -const _MS_PER_DAY = 24 * 60 * 60 * 1000; +const _GENESIS = new Date(2009, 0, 3); // day1 0, week1 0 +const _DAY_ONE = new Date(2009, 0, 9); // day1 1 (6 day gap after genesis) +const _MS_PER_DAY = 86400000; const _MS_PER_WEEK = 7 * _MS_PER_DAY; -const _DATE_INDEXES = new Set(['dateindex', 'weekindex', 'monthindex', 'yearindex', 'quarterindex', 'semesterindex', 'decadeindex']); +const _EPOCH_MS = 1230768000000; +const _DATE_INDEXES = new Set([ + 'minute1', 'minute5', 'minute10', 'minute30', + 'hour1', 'hour4', 'hour12', + 'day1', 'day3', 'week1', + 'month1', 'month3', 'month6', + 'year1', 'year10', +]); /** @param {{number}} months @returns {{globalThis.Date}} */ const _addMonths = (months) => new Date(2009, months, 1); @@ -66,24 +73,55 @@ const _addMonths = (months) => new Date(2009, months, 1); */ function indexToDate(index, i) {{ switch (index) {{ - case 'dateindex': return i === 0 ? _GENESIS : new Date(_DAY_ONE.getTime() + (i - 1) * _MS_PER_DAY); - case 'weekindex': return new Date(_GENESIS.getTime() + i * _MS_PER_WEEK); - case 'monthindex': return _addMonths(i); - case 'yearindex': return new Date(2009 + i, 0, 1); - case 'quarterindex': return _addMonths(i * 3); - case 'semesterindex': return _addMonths(i * 6); - case 'decadeindex': return new Date(2009 + i * 10, 0, 1); + case 'minute1': return new Date(_EPOCH_MS + i * 60000); + case 'minute5': return new Date(_EPOCH_MS + i * 300000); + case 'minute10': return new Date(_EPOCH_MS + i * 600000); + case 'minute30': return new Date(_EPOCH_MS + i * 1800000); + case 'hour1': return new Date(_EPOCH_MS + i * 3600000); + case 'hour4': return new Date(_EPOCH_MS + i * 14400000); + case 'hour12': return new Date(_EPOCH_MS + i * 43200000); + case 'day1': return i === 0 ? _GENESIS : new Date(_DAY_ONE.getTime() + (i - 1) * _MS_PER_DAY); + case 'day3': return new Date(_EPOCH_MS + i * 259200000); + case 'week1': return new Date(_GENESIS.getTime() + i * _MS_PER_WEEK); + case 'month1': return _addMonths(i); + case 'month3': return _addMonths(i * 3); + case 'month6': return _addMonths(i * 6); + case 'year1': return new Date(2009 + i, 0, 1); + case 'year10': return new Date(2009 + i * 10, 0, 1); default: throw new Error(`${{index}} is not a date-based index`); }} }} /** - * Check if an index type is date-based. - * @param {{Index}} index - * @returns {{boolean}} + * Convert a Date to an index value for date-based indexes. + * Returns the floor index (latest index whose date is <= the given date). + * @param {{Index}} index - The index type + * @param {{globalThis.Date}} d - The date to convert + * @returns {{number}} */ -function isDateIndex(index) {{ - return _DATE_INDEXES.has(index); +function dateToIndex(index, d) {{ + const ms = d.getTime(); + switch (index) {{ + case 'minute1': return Math.floor((ms - _EPOCH_MS) / 60000); + case 'minute5': return Math.floor((ms - _EPOCH_MS) / 300000); + case 'minute10': return Math.floor((ms - _EPOCH_MS) / 600000); + case 'minute30': return Math.floor((ms - _EPOCH_MS) / 1800000); + case 'hour1': return Math.floor((ms - _EPOCH_MS) / 3600000); + case 'hour4': return Math.floor((ms - _EPOCH_MS) / 14400000); + case 'hour12': return Math.floor((ms - _EPOCH_MS) / 43200000); + case 'day1': {{ + if (ms < _DAY_ONE.getTime()) return 0; + return 1 + Math.floor((ms - _DAY_ONE.getTime()) / _MS_PER_DAY); + }} + case 'day3': return Math.floor((ms - _EPOCH_MS) / 259200000); + case 'week1': return Math.floor((ms - _GENESIS.getTime()) / _MS_PER_WEEK); + case 'month1': return (d.getFullYear() - 2009) * 12 + d.getMonth(); + case 'month3': return (d.getFullYear() - 2009) * 4 + Math.floor(d.getMonth() / 3); + case 'month6': return (d.getFullYear() - 2009) * 2 + Math.floor(d.getMonth() / 6); + case 'year1': return d.getFullYear() - 2009; + case 'year10': return Math.floor((d.getFullYear() - 2009) / 10); + default: throw new Error(`${{index}} is not a date-based index`); + }} }} /** @@ -94,8 +132,10 @@ function isDateIndex(index) {{ */ function _wrapMetricData(raw) {{ const {{ index, start, end, data }} = raw; + const _dateBased = _DATE_INDEXES.has(index); return /** @type {{MetricData}} */ ({{ ...raw, + isDateBased: _dateBased, dates() {{ /** @type {{globalThis.Date[]}} */ const result = []; @@ -108,38 +148,35 @@ function _wrapMetricData(raw) {{ for (let i = start; i < end; i++) result.push(i); return result; }}, - toDateMap() {{ - /** @type {{Map}} */ - const map = new Map(); - for (let i = 0; i < data.length; i++) map.set(indexToDate(index, start + i), data[i]); - return map; + keys() {{ + return _dateBased ? this.dates() : this.indexes(); }}, - toIndexMap() {{ - /** @type {{Map}} */ - const map = new Map(); - for (let i = 0; i < data.length; i++) map.set(start + i, data[i]); - return map; - }}, - dateEntries() {{ - /** @type {{Array<[globalThis.Date, T]>}} */ + entries() {{ + /** @type {{Array<[globalThis.Date | number, T]>}} */ const result = []; - for (let i = 0; i < data.length; i++) result.push([indexToDate(index, start + i), data[i]]); + if (_dateBased) {{ + for (let i = 0; i < data.length; i++) result.push([indexToDate(index, start + i), data[i]]); + }} else {{ + for (let i = 0; i < data.length; i++) result.push([start + i, data[i]]); + }} return result; }}, - indexEntries() {{ - /** @type {{Array<[number, T]>}} */ - const result = []; - for (let i = 0; i < data.length; i++) result.push([start + i, data[i]]); - return result; + toMap() {{ + /** @type {{Map}} */ + const map = new Map(); + if (_dateBased) {{ + for (let i = 0; i < data.length; i++) map.set(indexToDate(index, start + i), data[i]); + }} else {{ + for (let i = 0; i < data.length; i++) map.set(start + i, data[i]); + }} + return map; }}, - *iter() {{ - for (let i = 0; i < data.length; i++) yield [start + i, data[i]]; - }}, - *iterDates() {{ - for (let i = 0; i < data.length; i++) yield [indexToDate(index, start + i), data[i]]; - }}, - [Symbol.iterator]() {{ - return this.iter(); + *[Symbol.iterator]() {{ + if (_dateBased) {{ + for (let i = 0; i < data.length; i++) yield [indexToDate(index, start + i), data[i]]; + }} else {{ + for (let i = 0; i < data.length; i++) yield [start + i, data[i]]; + }} }}, }}); }} @@ -154,14 +191,12 @@ function _wrapMetricData(raw) {{ * @property {{number}} end - End index (exclusive) * @property {{string}} stamp - ISO 8601 timestamp of when the response was generated * @property {{T[]}} data - The metric data - * @property {{() => globalThis.Date[]}} dates - Convert index range to dates (date-based indexes only) - * @property {{() => number[]}} indexes - Get index range as array - * @property {{() => Map}} toDateMap - Return data as Map keyed by date (date-based only) - * @property {{() => Map}} toIndexMap - Return data as Map keyed by index - * @property {{() => Array<[globalThis.Date, T]>}} dateEntries - Return data as [date, value] pairs (date-based only) - * @property {{() => Array<[number, T]>}} indexEntries - Return data as [index, value] pairs - * @property {{() => IterableIterator<[number, T]>}} iter - Iterate over [index, value] pairs - * @property {{() => IterableIterator<[globalThis.Date, T]>}} iterDates - Iterate over [date, value] pairs (date-based only) + * @property {{boolean}} isDateBased - Whether this metric uses a date-based index + * @property {{() => (globalThis.Date[] | number[])}} keys - Get keys (dates for date-based, index numbers otherwise) + * @property {{() => Array<[globalThis.Date | number, T]>}} entries - Get [key, value] pairs (dates for date-based, index numbers otherwise) + * @property {{() => Map}} toMap - Return data as Map (dates for date-based, index numbers otherwise) + * @property {{() => globalThis.Date[]}} dates - Get dates (date-based indexes only, throws otherwise) + * @property {{() => number[]}} indexes - Get index numbers */ /** @typedef {{MetricData}} AnyMetricData */ @@ -172,11 +207,11 @@ function _wrapMetricData(raw) {{ */ /** - * Metric endpoint builder. Callable (returns itself) so both .by.dateindex and .by.dateindex() work. + * Metric endpoint builder. Callable (returns itself) so both .by.day1 and .by.day1() work. * @template T * @typedef {{Object}} MetricEndpointBuilder * @property {{(index: number) => SingleItemBuilder}} get - Get single item at index - * @property {{(start?: number, end?: number) => RangeBuilder}} slice - Slice like Array.slice + * @property {{(start?: number | globalThis.Date, end?: number | globalThis.Date) => RangeBuilder}} slice - Slice by index or Date * @property {{(n: number) => RangeBuilder}} first - Get first n items * @property {{(n: number) => RangeBuilder}} last - Get last n items * @property {{(n: number) => SkippedBuilder}} skip - Skip first n items, chain with take() @@ -216,7 +251,7 @@ function _wrapMetricData(raw) {{ * @template T * @typedef {{Object}} MetricPattern * @property {{string}} name - The metric name - * @property {{Readonly>>>}} by - Index endpoints as lazy getters. Access via .by.dateindex or .by['dateindex'] + * @property {{Readonly>>>}} by - Index endpoints as lazy getters. Access via .by.day1 or .by['day1'] * @property {{() => readonly Index[]}} indexes - Get the list of available indexes * @property {{(index: Index) => MetricEndpointBuilder|undefined}} get - Get an endpoint for a specific index */ @@ -284,7 +319,11 @@ function _endpoint(client, name, index) {{ /** @type {{MetricEndpointBuilder}} */ const endpoint = {{ get(idx) {{ return singleItemBuilder(idx); }}, - slice(start, end) {{ return rangeBuilder(start, end); }}, + slice(start, end) {{ + if (start instanceof Date) start = dateToIndex(index, start); + if (end instanceof Date) end = dateToIndex(index, end); + return rangeBuilder(start, end); + }}, first(n) {{ return rangeBuilder(undefined, n); }}, last(n) {{ return n === 0 ? rangeBuilder(undefined, 0) : rangeBuilder(-n, undefined); }}, skip(n) {{ return skippedBuilder(n); }}, @@ -457,13 +496,15 @@ pub fn generate_static_constants(output: &mut String) { }} /** - * Check if an index type is date-based. - * @param {{Index}} index - * @returns {{boolean}} + * Convert a Date to an index value for date-based indexes. + * @param {{Index}} index - The index type + * @param {{globalThis.Date}} d - The date to convert + * @returns {{number}} */ - isDateIndex(index) {{ - return isDateIndex(index); + dateToIndex(index, d) {{ + return dateToIndex(index, d); }} + "# ) .unwrap(); @@ -501,14 +542,14 @@ pub fn generate_index_accessors(output: &mut String, patterns: &[IndexSetPattern writeln!(output, "// Index group constants and factory\n").unwrap(); - // Generate index array constants (e.g., _i1 = ["dateindex", "height"]) + // Generate index array constants (e.g., _i1 = ["day1", "height"]) for (i, pattern) in patterns.iter().enumerate() { write!(output, "const _i{} = /** @type {{const}} */ ([", i + 1).unwrap(); for (j, index) in pattern.indexes.iter().enumerate() { if j > 0 { write!(output, ", ").unwrap(); } - write!(output, "\"{}\"", index.serialize_long()).unwrap(); + write!(output, "\"{}\"", index.name()).unwrap(); } writeln!(output, "]);").unwrap(); } @@ -554,7 +595,7 @@ function _mp(client, name, indexes) {{ .map(|idx| { format!( "readonly {}: MetricEndpointBuilder", - idx.serialize_long() + idx.name() ) }) .collect(); diff --git a/crates/brk_bindgen/src/generators/python/api.rs b/crates/brk_bindgen/src/generators/python/api.rs index 94f9d2ec0..b2657030a 100644 --- a/crates/brk_bindgen/src/generators/python/api.rs +++ b/crates/brk_bindgen/src/generators/python/api.rs @@ -40,15 +40,14 @@ pub fn generate_main_client(output: &mut String, endpoints: &[Endpoint]) { writeln!(output).unwrap(); // Generate helper methods - writeln!(output, " def index_to_date(self, index: Index, i: int) -> date:").unwrap(); - writeln!(output, " \"\"\"Convert an index value to a date for date-based indexes.\"\"\"").unwrap(); - writeln!(output, " return index_to_date(index, i)").unwrap(); + writeln!(output, " def index_to_date(self, index: Index, i: int) -> Union[date, datetime]:").unwrap(); + writeln!(output, " \"\"\"Convert an index value to a date/datetime for date-based indexes.\"\"\"").unwrap(); + writeln!(output, " return _index_to_date(index, i)").unwrap(); writeln!(output).unwrap(); - writeln!(output, " def is_date_index(self, index: Index) -> bool:").unwrap(); - writeln!(output, " \"\"\"Check if an index type is date-based.\"\"\"").unwrap(); - writeln!(output, " return is_date_index(index)").unwrap(); + writeln!(output, " def date_to_index(self, index: Index, d: Union[date, datetime]) -> int:").unwrap(); + writeln!(output, " \"\"\"Convert a date/datetime to an index value for date-based indexes.\"\"\"").unwrap(); + writeln!(output, " return _date_to_index(index, d)").unwrap(); writeln!(output).unwrap(); - // Generate API methods generate_api_methods(output, endpoints); } diff --git a/crates/brk_bindgen/src/generators/python/client.rs b/crates/brk_bindgen/src/generators/python/client.rs index fc05e8d48..62383cf28 100644 --- a/crates/brk_bindgen/src/generators/python/client.rs +++ b/crates/brk_bindgen/src/generators/python/client.rs @@ -132,36 +132,93 @@ pub fn generate_endpoint_class(output: &mut String) { writeln!( output, r#"# Date conversion constants -_GENESIS = date(2009, 1, 3) # dateindex 0, weekindex 0 -_DAY_ONE = date(2009, 1, 9) # dateindex 1 (6 day gap after genesis) -_DATE_INDEXES = frozenset(['dateindex', 'weekindex', 'monthindex', 'yearindex', 'quarterindex', 'semesterindex', 'decadeindex']) +_GENESIS = date(2009, 1, 3) # day1 0, week1 0 +_DAY_ONE = date(2009, 1, 9) # day1 1 (6 day gap after genesis) +_EPOCH = datetime(2009, 1, 1, tzinfo=timezone.utc) +_DATE_INDEXES = frozenset([ + 'minute1', 'minute5', 'minute10', 'minute30', + 'hour1', 'hour4', 'hour12', + 'day1', 'day3', 'week1', + 'month1', 'month3', 'month6', + 'year1', 'year10', +]) -def is_date_index(index: str) -> bool: - """Check if an index type is date-based.""" - return index in _DATE_INDEXES - -def index_to_date(index: str, i: int) -> date: - """Convert an index value to a date for date-based indexes.""" - if index == 'dateindex': +def _index_to_date(index: str, i: int) -> Union[date, datetime]: + """Convert an index value to a date/datetime for date-based indexes.""" + if index == 'minute1': + return _EPOCH + timedelta(minutes=i) + elif index == 'minute5': + return _EPOCH + timedelta(minutes=i * 5) + elif index == 'minute10': + return _EPOCH + timedelta(minutes=i * 10) + elif index == 'minute30': + return _EPOCH + timedelta(minutes=i * 30) + elif index == 'hour1': + return _EPOCH + timedelta(hours=i) + elif index == 'hour4': + return _EPOCH + timedelta(hours=i * 4) + elif index == 'hour12': + return _EPOCH + timedelta(hours=i * 12) + elif index == 'day1': return _GENESIS if i == 0 else _DAY_ONE + timedelta(days=i - 1) - elif index == 'weekindex': + elif index == 'day3': + return _EPOCH.date() + timedelta(days=i * 3) + elif index == 'week1': return _GENESIS + timedelta(weeks=i) - elif index == 'monthindex': + elif index == 'month1': return date(2009 + i // 12, i % 12 + 1, 1) - elif index == 'yearindex': - return date(2009 + i, 1, 1) - elif index == 'quarterindex': + elif index == 'month3': m = i * 3 return date(2009 + m // 12, m % 12 + 1, 1) - elif index == 'semesterindex': + elif index == 'month6': m = i * 6 return date(2009 + m // 12, m % 12 + 1, 1) - elif index == 'decadeindex': + elif index == 'year1': + return date(2009 + i, 1, 1) + elif index == 'year10': return date(2009 + i * 10, 1, 1) else: raise ValueError(f"{{index}} is not a date-based index") +def _date_to_index(index: str, d: Union[date, datetime]) -> int: + """Convert a date/datetime to an index value for date-based indexes. + + Returns the floor index (latest index whose date is <= the given date). + For sub-day indexes (minute*, hour*), a plain date is treated as midnight UTC. + """ + if index in ('minute1', 'minute5', 'minute10', 'minute30', 'hour1', 'hour4', 'hour12'): + if isinstance(d, datetime): + dt = d if d.tzinfo else d.replace(tzinfo=timezone.utc) + else: + dt = datetime(d.year, d.month, d.day, tzinfo=timezone.utc) + secs = int((dt - _EPOCH).total_seconds()) + div = {{'minute1': 60, 'minute5': 300, 'minute10': 600, 'minute30': 1800, + 'hour1': 3600, 'hour4': 14400, 'hour12': 43200}} + return secs // div[index] + dd = d.date() if isinstance(d, datetime) else d + if index == 'day1': + if dd < _DAY_ONE: + return 0 + return 1 + (dd - _DAY_ONE).days + elif index == 'day3': + return (dd - date(2009, 1, 1)).days // 3 + elif index == 'week1': + return (dd - _GENESIS).days // 7 + elif index == 'month1': + return (dd.year - 2009) * 12 + (dd.month - 1) + elif index == 'month3': + return (dd.year - 2009) * 4 + (dd.month - 1) // 3 + elif index == 'month6': + return (dd.year - 2009) * 2 + (dd.month - 1) // 6 + elif index == 'year1': + return dd.year - 2009 + elif index == 'year10': + return (dd.year - 2009) // 10 + else: + raise ValueError(f"{{index}} is not a date-based index") + + @dataclass class MetricData(Generic[T]): """Metric data with range information.""" @@ -173,71 +230,64 @@ class MetricData(Generic[T]): stamp: str data: List[T] - def dates(self) -> List[date]: - """Convert index range to dates. Only works for date-based indexes.""" - return [index_to_date(self.index, i) for i in range(self.start, self.end)] + @property + def is_date_based(self) -> bool: + """Whether this metric uses a date-based index.""" + return self.index in _DATE_INDEXES + + def dates(self) -> list: + """Get dates for the index range. Date-based indexes only, throws otherwise.""" + return [_index_to_date(self.index, i) for i in range(self.start, self.end)] def indexes(self) -> List[int]: - """Get index range as list.""" + """Get raw index numbers.""" return list(range(self.start, self.end)) - def to_date_dict(self) -> dict[date, T]: - """Return data as {{date: value}} dict. Only works for date-based indexes.""" - return dict(zip(self.dates(), self.data)) + def keys(self) -> list: + """Get keys: dates for date-based indexes, index numbers otherwise.""" + return self.dates() if self.is_date_based else self.indexes() - def to_index_dict(self) -> dict[int, T]: - """Return data as {{index: value}} dict.""" - return dict(zip(range(self.start, self.end), self.data)) + def items(self) -> list: + """Get (key, value) pairs: keys are dates for date-based, numbers otherwise.""" + return list(zip(self.keys(), self.data)) - def date_items(self) -> List[Tuple[date, T]]: - """Return data as [(date, value), ...] pairs. Only works for date-based indexes.""" - return list(zip(self.dates(), self.data)) + def to_dict(self) -> dict: + """Return {{key: value}} dict: keys are dates for date-based, numbers otherwise.""" + return dict(zip(self.keys(), self.data)) - def index_items(self) -> List[Tuple[int, T]]: - """Return data as [(index, value), ...] pairs.""" - return list(zip(range(self.start, self.end), self.data)) - - def iter(self) -> Iterator[Tuple[int, T]]: - """Iterate over (index, value) pairs.""" - return iter(zip(range(self.start, self.end), self.data)) - - def iter_dates(self) -> Iterator[Tuple[date, T]]: - """Iterate over (date, value) pairs. Date-based indexes only.""" - return iter(zip(self.dates(), self.data)) - - def __iter__(self) -> Iterator[Tuple[int, T]]: - """Default iteration over (index, value) pairs.""" - return self.iter() + def __iter__(self): + """Iterate over (key, value) pairs. Keys are dates for date-based, numbers otherwise.""" + return iter(zip(self.keys(), self.data)) def to_polars(self, with_dates: bool = True) -> pl.DataFrame: """Convert to Polars DataFrame. Requires polars to be installed. Returns a DataFrame with columns: - - 'date' (date) and 'value' (T) if with_dates=True and index is date-based - - 'index' (int) and 'value' (T) otherwise + - 'date' and 'value' if with_dates=True and index is date-based + - 'index' and 'value' otherwise """ try: import polars as pl # type: ignore[import-not-found] except ImportError: raise ImportError("polars is required: pip install polars") - if with_dates and self.index in _DATE_INDEXES: + if with_dates and self.is_date_based: return pl.DataFrame({{"date": self.dates(), "value": self.data}}) - return pl.DataFrame({{"index": list(range(self.start, self.end)), "value": self.data}}) + return pl.DataFrame({{"index": self.indexes(), "value": self.data}}) def to_pandas(self, with_dates: bool = True) -> pd.DataFrame: """Convert to Pandas DataFrame. Requires pandas to be installed. Returns a DataFrame with columns: - - 'date' (date) and 'value' (T) if with_dates=True and index is date-based - - 'index' (int) and 'value' (T) otherwise + - 'date' and 'value' if with_dates=True and index is date-based + - 'index' and 'value' otherwise """ try: import pandas as pd # type: ignore[import-not-found] except ImportError: raise ImportError("pandas is required: pip install pandas") - if with_dates and self.index in _DATE_INDEXES: + if with_dates and self.is_date_based: return pd.DataFrame({{"date": self.dates(), "value": self.data}}) - return pd.DataFrame({{"index": list(range(self.start, self.end)), "value": self.data}}) + return pd.DataFrame({{"index": self.indexes(), "value": self.data}}) # Type alias for non-generic usage @@ -369,23 +419,36 @@ class MetricEndpointBuilder(Generic[T]): @overload def __getitem__(self, key: slice) -> RangeBuilder[T]: ... - def __getitem__(self, key: Union[int, slice]) -> Union[SingleItemBuilder[T], RangeBuilder[T]]: - """Access single item or slice. + def __getitem__(self, key: Union[int, slice, date, datetime]) -> Union[SingleItemBuilder[T], RangeBuilder[T]]: + """Access single item or slice. Accepts dates for date-based indexes. Examples: - endpoint[5] # Single item at index 5 - endpoint[:10] # First 10 - endpoint[-5:] # Last 5 - endpoint[100:110] # Range 100-109 + endpoint[5] # Single item at index 5 + endpoint[:10] # First 10 + endpoint[-5:] # Last 5 + endpoint[100:110] # Range 100-109 + endpoint[date(2020, 1, 1):date(2023, 1, 1)] # Date range + endpoint[date(2020, 1, 1):] # Since date """ + if isinstance(key, (date, datetime)): + idx = _date_to_index(self._config.index, key) + return SingleItemBuilder(_EndpointConfig( + self._config.client, self._config.name, self._config.index, + idx, idx + 1 + )) if isinstance(key, int): return SingleItemBuilder(_EndpointConfig( self._config.client, self._config.name, self._config.index, key, key + 1 )) + start, stop = key.start, key.stop + if isinstance(start, (date, datetime)): + start = _date_to_index(self._config.index, start) + if isinstance(stop, (date, datetime)): + stop = _date_to_index(self._config.index, stop) return RangeBuilder(_EndpointConfig( self._config.client, self._config.name, self._config.index, - key.start, key.stop + start, stop )) def head(self, n: int = 10) -> RangeBuilder[T]: @@ -462,7 +525,7 @@ pub fn generate_index_accessors(output: &mut String, patterns: &[IndexSetPattern if j > 0 { write!(output, ", ").unwrap(); } - write!(output, "'{}'", index.serialize_long()).unwrap(); + write!(output, "'{}'", index.name()).unwrap(); } // Single-element tuple needs trailing comma if pattern.indexes.len() == 1 { @@ -496,7 +559,7 @@ pub fn generate_index_accessors(output: &mut String, patterns: &[IndexSetPattern .unwrap(); for index in &pattern.indexes { let method_name = index_to_field_name(index); - let index_name = index.serialize_long(); + let index_name = index.name(); writeln!( output, " def {}(self) -> MetricEndpointBuilder[T]: return _ep(self._c, self._n, '{}')", diff --git a/crates/brk_bindgen/src/generators/python/mod.rs b/crates/brk_bindgen/src/generators/python/mod.rs index e1a6129c7..ed37e27db 100644 --- a/crates/brk_bindgen/src/generators/python/mod.rs +++ b/crates/brk_bindgen/src/generators/python/mod.rs @@ -38,7 +38,7 @@ pub fn generate_python_client( ) .unwrap(); writeln!(output, "from urllib.parse import urlparse").unwrap(); - writeln!(output, "from datetime import date, timedelta").unwrap(); + writeln!(output, "from datetime import date, datetime, timedelta, timezone").unwrap(); writeln!(output, "import json\n").unwrap(); writeln!(output, "if TYPE_CHECKING:").unwrap(); writeln!(output, " import pandas as pd # type: ignore[import-not-found]").unwrap(); diff --git a/crates/brk_bindgen/src/generators/rust/api.rs b/crates/brk_bindgen/src/generators/rust/api.rs index f6d719b06..e8f7e896c 100644 --- a/crates/brk_bindgen/src/generators/rust/api.rs +++ b/crates/brk_bindgen/src/generators/rust/api.rs @@ -187,7 +187,7 @@ fn param_type_to_rust(param_type: &str) -> String { fn build_path_template(endpoint: &Endpoint) -> (String, &'static str) { let has_index_param = endpoint.path_params.iter().any(|p| p.name == "index" && p.param_type == "Index"); if has_index_param { - (endpoint.path.replace("{index}", "{}"), ", index.serialize_long()") + (endpoint.path.replace("{index}", "{}"), ", index.name()") } else { (endpoint.path.clone(), "") } diff --git a/crates/brk_bindgen/src/generators/rust/client.rs b/crates/brk_bindgen/src/generators/rust/client.rs index b91a3805d..21310dcff 100644 --- a/crates/brk_bindgen/src/generators/rust/client.rs +++ b/crates/brk_bindgen/src/generators/rust/client.rs @@ -179,7 +179,7 @@ impl EndpointConfig {{ }} fn path(&self) -> String {{ - format!("/api/metric/{{}}/{{}}", self.name, self.index.serialize_long()) + format!("/api/metric/{{}}/{{}}", self.name, self.index.name()) }} fn build_path(&self, format: Option<&str>) -> String {{ diff --git a/crates/brk_bindgen/src/types/case.rs b/crates/brk_bindgen/src/types/case.rs index 6202faf27..767b505a8 100644 --- a/crates/brk_bindgen/src/types/case.rs +++ b/crates/brk_bindgen/src/types/case.rs @@ -54,9 +54,9 @@ pub fn to_camel_case(s: &str) -> String { } } -/// Convert an Index to a snake_case field name (e.g., DateIndex -> dateindex). +/// Convert an Index to a snake_case field name (e.g., Day1 -> day1). pub fn index_to_field_name(index: &Index) -> String { - to_snake_case(index.serialize_long()) + to_snake_case(index.name()) } /// Generate a child type/struct/class name (e.g., ParentName + child_name -> ParentName_ChildName). diff --git a/crates/brk_cli/Cargo.toml b/crates/brk_cli/Cargo.toml index 877ae0697..a0b07e1bb 100644 --- a/crates/brk_cli/Cargo.toml +++ b/crates/brk_cli/Cargo.toml @@ -12,14 +12,13 @@ anyhow = "1.0" brk_alloc = { workspace = true } brk_computer = { workspace = true } brk_error = { workspace = true, features = ["tokio", "vecdb"] } -brk_fetcher = { workspace = true } brk_indexer = { workspace = true } brk_iterator = { workspace = true } brk_logger = { workspace = true } brk_mempool = { workspace = true } brk_query = { workspace = true } brk_reader = { workspace = true } -brk_rpc = { workspace = true } +brk_rpc = { workspace = true, features = ["corepc"] } brk_server = { workspace = true } brk_types = { workspace = true } lexopt = "0.3" @@ -27,7 +26,7 @@ owo-colors = { workspace = true } tracing = { workspace = true } serde = { workspace = true } tokio = { workspace = true } -toml = "1.0.1" +toml = "1.0.3" vecdb = { workspace = true } [[bin]] diff --git a/crates/brk_cli/README.md b/crates/brk_cli/README.md index b04b2c2ee..102531cb8 100644 --- a/crates/brk_cli/README.md +++ b/crates/brk_cli/README.md @@ -1,15 +1,16 @@ -# brk_cli +# BRK CLI Command-line interface for running a Bitcoin Research Kit instance. -## Preview +## Demo -- https://bitview.space - web interface -- https://bitview.space/api - API docs +- [bitview.space](https://bitview.space) - web interface +- [bitview.space/api](https://bitview.space/api) - API docs ## Requirements -- Bitcoin Core running with RPC enabled +- Linux or macOS +- Bitcoin Core with `server=1` in `bitcoin.conf` - Access to `blk*.dat` files - [~400 GB disk space](https://bitview.space/api/server/disk) - [12+ GB RAM](https://github.com/bitcoinresearchkit/benches#benchmarks) @@ -35,7 +36,9 @@ brk Indexes the blockchain, computes datasets, starts the server on `localhost:3110`, and waits for new blocks. -**Note:** When more than 10,000 blocks behind, indexing completes before the server starts to free up memory from fragmentation that occurs during large syncs. The web interface at `localhost:3110` won't be available until sync finishes. +## First sync + +The initial sync processes the entire blockchain and can take several hours. During this time (more than 10,000 blocks behind), indexing completes before the server starts to free up memory. The web interface at `localhost:3110` won't be available until sync finishes. ## Options diff --git a/crates/brk_cli/src/config.rs b/crates/brk_cli/src/config.rs index 329f2c326..056545a39 100644 --- a/crates/brk_cli/src/config.rs +++ b/crates/brk_cli/src/config.rs @@ -4,7 +4,6 @@ use std::{ }; use brk_error::{Error, Result}; -use brk_fetcher::Fetcher; use brk_rpc::{Auth, Client}; use brk_server::Website; use brk_types::Port; @@ -24,9 +23,6 @@ pub struct Config { #[serde(default, deserialize_with = "default_on_error")] website: Option, - #[serde(default, deserialize_with = "default_on_error")] - fetch: Option, - #[serde(default, deserialize_with = "default_on_error")] bitcoindir: Option, @@ -70,9 +66,6 @@ impl Config { if let Some(v) = config_args.website { config.website = Some(v); } - if let Some(v) = config_args.fetch { - config.fetch = Some(v); - } if let Some(v) = config_args.bitcoindir { config.bitcoindir = Some(v); } @@ -119,7 +112,6 @@ impl Config { Long("brkdir") => config.brkdir = Some(parser.value().unwrap().parse().unwrap()), Long("brkport") => config.brkport = Some(parser.value().unwrap().parse().unwrap()), Long("website") => config.website = Some(parser.value().unwrap().parse().unwrap()), - Long("fetch") => config.fetch = Some(parser.value().unwrap().parse().unwrap()), Long("bitcoindir") => { config.bitcoindir = Some(parser.value().unwrap().parse().unwrap()) } @@ -179,11 +171,6 @@ impl Config { "".bright_black(), "[true]".bright_black() ); - println!( - " --fetch {} Fetch prices {}", - "".bright_black(), - "[true]".bright_black() - ); println!(); println!( " --bitcoindir {} Bitcoin directory {}", @@ -236,10 +223,7 @@ impl Config { " Edit {} to persist settings:", "~/.brk/config.toml".bright_black() ); - println!( - " {}", - "brkdir = \"/path/to/data\"".bright_black() - ); + println!(" {}", "brkdir = \"/path/to/data\"".bright_black()); println!( " {}", "bitcoindir = \"/path/to/.bitcoin\"".bright_black() @@ -338,10 +322,6 @@ Finally, you can run the program with '-h' for help." .map_or_else(default_brk_path, |s| fix_user_path(s.as_ref())) } - pub fn harsdir(&self) -> PathBuf { - self.brkdir().join("hars") - } - fn path_cookiefile(&self) -> PathBuf { self.rpccookiefile.as_ref().map_or_else( || self.bitcoindir().join(".cookie"), @@ -356,15 +336,6 @@ Finally, you can run the program with '-h' for help." pub fn brkport(&self) -> Option { self.brkport } - - pub fn fetch(&self) -> bool { - self.fetch.is_none_or(|b| b) - } - - pub fn fetcher(&self) -> Option { - self.fetch() - .then(|| Fetcher::import(Some(self.harsdir().as_path())).unwrap()) - } } fn default_on_error<'de, D, T>(deserializer: D) -> Result diff --git a/crates/brk_cli/src/main.rs b/crates/brk_cli/src/main.rs index 487d3889e..de5c3524a 100644 --- a/crates/brk_cli/src/main.rs +++ b/crates/brk_cli/src/main.rs @@ -24,15 +24,6 @@ mod paths; use crate::{config::Config, paths::*}; pub fn main() -> anyhow::Result<()> { - // Can't increase main thread's stack size, thus we need to use another thread - thread::Builder::new() - .stack_size(512 * 1024 * 1024) - .spawn(run)? - .join() - .unwrap() -} - -pub fn run() -> anyhow::Result<()> { fs::create_dir_all(dot_brk_path())?; brk_logger::init(Some(&dot_brk_log_path()))?; @@ -68,7 +59,7 @@ pub fn run() -> anyhow::Result<()> { } } - let mut computer = Computer::forced_import(&config.brkdir(), &indexer, config.fetcher())?; + let mut computer = Computer::forced_import(&config.brkdir(), &indexer)?; let mempool = Mempool::new(&client); diff --git a/crates/brk_client/Cargo.toml b/crates/brk_client/Cargo.toml index 0f7e36aa7..2e487f063 100644 --- a/crates/brk_client/Cargo.toml +++ b/crates/brk_client/Cargo.toml @@ -8,6 +8,7 @@ homepage.workspace = true repository.workspace = true keywords = ["bitcoin", "blockchain", "analytics", "on-chain"] categories = ["api-bindings", "cryptography::cryptocurrencies"] +exclude = ["examples/"] [dependencies] brk_cohort = { workspace = true } diff --git a/crates/brk_client/README.md b/crates/brk_client/README.md index 71b8ed3d3..7f01420b3 100644 --- a/crates/brk_client/README.md +++ b/crates/brk_client/README.md @@ -21,7 +21,7 @@ fn main() -> brk_client::Result<()> { // Blockchain data (mempool.space compatible) let block = client.get_block_by_height(800000)?; - let tx = client.get_tx("abc123...")?; + let tx = client.get_tx("a1075db55d416d3ca199f55b6084e2115b9345e16c5cf302fc80e9d5fbf5d48d")?; let address = client.get_address("bc1q...")?; // Metrics API - typed, chainable diff --git a/crates/brk_client/examples/basic.rs b/crates/brk_client/examples/basic.rs index 1f2091a04..9afa86a95 100644 --- a/crates/brk_client/examples/basic.rs +++ b/crates/brk_client/examples/basic.rs @@ -17,12 +17,12 @@ fn main() -> brk_client::Result<()> { // Using new idiomatic API: last(3).fetch() let price_close = client .metrics() - .price + .prices .usd .split .close .by - .dateindex() + .day1() .last(3) .fetch()?; println!("Last 3 price close values: {:?}", price_close); @@ -35,29 +35,20 @@ fn main() -> brk_client::Result<()> { .block_count .sum .by - .dateindex() + .day1() .last(3) .fetch()?; println!("Last 3 block count values: {:?}", block_count); // Fetch supply data - dbg!( - client - .metrics() - .supply - .circulating - .bitcoin - .by - .dateindex() - .path() - ); + dbg!(client.metrics().supply.circulating.btc.by.day1().path()); let circulating = client .metrics() .supply .circulating - .bitcoin + .btc .by - .dateindex() + .day1() .last(3) .fetch_csv()?; println!("Last 3 circulating supply values: {:?}", circulating); @@ -65,7 +56,7 @@ fn main() -> brk_client::Result<()> { // Using generic metric fetching let metricdata = client.get_metric( Metric::from("price_close"), - Index::DateIndex, + Index::Day1, Some(-3), None, None, diff --git a/crates/brk_client/examples/fetch_prices.rs b/crates/brk_client/examples/fetch_prices.rs deleted file mode 100644 index f9c4959c6..000000000 --- a/crates/brk_client/examples/fetch_prices.rs +++ /dev/null @@ -1,51 +0,0 @@ -use std::fs::File; -use std::io::{BufWriter, Write}; - -use brk_client::{BrkClient, BrkClientOptions, Result}; -use brk_types::Dollars; - -const CHUNK_SIZE: usize = 10_000; -const END_HEIGHT: usize = 630_000; -const OUTPUT_FILE: &str = "prices_avg.txt"; - -fn main() -> Result<()> { - let client = BrkClient::with_options(BrkClientOptions { - base_url: "https://next.bitview.space".to_string(), - timeout_secs: 60, - }); - - let file = File::create(OUTPUT_FILE).map_err(|e| brk_client::BrkError { - message: e.to_string(), - })?; - let mut writer = BufWriter::new(file); - - for start in (0..END_HEIGHT).step_by(CHUNK_SIZE) { - let end = (start + CHUNK_SIZE).min(END_HEIGHT); - eprintln!("Fetching {start} to {end}..."); - - let ohlcs = client - .metrics() - .price - .cents - .ohlc - .by - .height() - .range(start..end) - .fetch()?; - - for ohlc in ohlcs.data { - let avg = (u64::from(*ohlc.open) + u64::from(*ohlc.close)) / 2; - let avg = Dollars::from(avg); - writeln!(writer, "{avg}").map_err(|e| brk_client::BrkError { - message: e.to_string(), - })?; - } - } - - writer.flush().map_err(|e| brk_client::BrkError { - message: e.to_string(), - })?; - eprintln!("Done. Output in {OUTPUT_FILE}"); - - Ok(()) -} diff --git a/crates/brk_client/examples/tree.rs b/crates/brk_client/examples/tree.rs index 35c900a57..e4a4d36b0 100644 --- a/crates/brk_client/examples/tree.rs +++ b/crates/brk_client/examples/tree.rs @@ -55,7 +55,7 @@ fn main() -> brk_client::Result<()> { for metric in &metrics { for index in &metric.indexes { - let index_str = index.serialize_long(); + let index_str = index.name(); let full_path = format!("{}.by.{}", metric.path, index_str); match client.get_metric( diff --git a/crates/brk_client/src/lib.rs b/crates/brk_client/src/lib.rs index 3e0dc428d..f417fd143 100644 --- a/crates/brk_client/src/lib.rs +++ b/crates/brk_client/src/lib.rs @@ -151,7 +151,7 @@ impl EndpointConfig { } fn path(&self) -> String { - format!("/api/metric/{}/{}", self.name, self.index.serialize_long()) + format!("/api/metric/{}/{}", self.name, self.index.name()) } fn build_path(&self, format: Option<&str>) -> String { @@ -338,38 +338,43 @@ impl RangeBuilder { // Static index arrays -const _I1: &[Index] = &[Index::DateIndex, Index::DecadeIndex, Index::DifficultyEpoch, Index::Height, Index::MonthIndex, Index::QuarterIndex, Index::SemesterIndex, Index::WeekIndex, Index::YearIndex]; -const _I2: &[Index] = &[Index::DateIndex, Index::DecadeIndex, Index::DifficultyEpoch, Index::MonthIndex, Index::QuarterIndex, Index::SemesterIndex, Index::WeekIndex, Index::YearIndex]; -const _I3: &[Index] = &[Index::DateIndex, Index::DecadeIndex, Index::Height, Index::MonthIndex, Index::QuarterIndex, Index::SemesterIndex, Index::WeekIndex, Index::YearIndex]; -const _I4: &[Index] = &[Index::DateIndex, Index::DecadeIndex, Index::MonthIndex, Index::QuarterIndex, Index::SemesterIndex, Index::WeekIndex, Index::YearIndex]; -const _I5: &[Index] = &[Index::DateIndex, Index::Height]; -const _I6: &[Index] = &[Index::DateIndex]; -const _I7: &[Index] = &[Index::DecadeIndex]; -const _I8: &[Index] = &[Index::DifficultyEpoch]; -const _I9: &[Index] = &[Index::EmptyOutputIndex]; -const _I10: &[Index] = &[Index::HalvingEpoch]; -const _I11: &[Index] = &[Index::Height]; -const _I12: &[Index] = &[Index::TxInIndex]; -const _I13: &[Index] = &[Index::MonthIndex]; -const _I14: &[Index] = &[Index::OpReturnIndex]; -const _I15: &[Index] = &[Index::TxOutIndex]; -const _I16: &[Index] = &[Index::P2AAddressIndex]; -const _I17: &[Index] = &[Index::P2MSOutputIndex]; -const _I18: &[Index] = &[Index::P2PK33AddressIndex]; -const _I19: &[Index] = &[Index::P2PK65AddressIndex]; -const _I20: &[Index] = &[Index::P2PKHAddressIndex]; -const _I21: &[Index] = &[Index::P2SHAddressIndex]; -const _I22: &[Index] = &[Index::P2TRAddressIndex]; -const _I23: &[Index] = &[Index::P2WPKHAddressIndex]; -const _I24: &[Index] = &[Index::P2WSHAddressIndex]; -const _I25: &[Index] = &[Index::QuarterIndex]; -const _I26: &[Index] = &[Index::SemesterIndex]; -const _I27: &[Index] = &[Index::TxIndex]; -const _I28: &[Index] = &[Index::UnknownOutputIndex]; -const _I29: &[Index] = &[Index::WeekIndex]; -const _I30: &[Index] = &[Index::YearIndex]; -const _I31: &[Index] = &[Index::FundedAddressIndex]; -const _I32: &[Index] = &[Index::EmptyAddressIndex]; +const _I1: &[Index] = &[Index::Minute1, Index::Minute5, Index::Minute10, Index::Minute30, Index::Hour1, Index::Hour4, Index::Hour12, Index::Day1, Index::Day3, Index::Week1, Index::Month1, Index::Month3, Index::Month6, Index::Year1, Index::Year10, Index::HalvingEpoch, Index::DifficultyEpoch, Index::Height]; +const _I2: &[Index] = &[Index::Minute1, Index::Minute5, Index::Minute10, Index::Minute30, Index::Hour1, Index::Hour4, Index::Hour12, Index::Day1, Index::Day3, Index::Week1, Index::Month1, Index::Month3, Index::Month6, Index::Year1, Index::Year10, Index::HalvingEpoch, Index::DifficultyEpoch]; +const _I3: &[Index] = &[Index::Minute1]; +const _I4: &[Index] = &[Index::Minute5]; +const _I5: &[Index] = &[Index::Minute10]; +const _I6: &[Index] = &[Index::Minute30]; +const _I7: &[Index] = &[Index::Hour1]; +const _I8: &[Index] = &[Index::Hour4]; +const _I9: &[Index] = &[Index::Hour12]; +const _I10: &[Index] = &[Index::Day1]; +const _I11: &[Index] = &[Index::Day3]; +const _I12: &[Index] = &[Index::Week1]; +const _I13: &[Index] = &[Index::Month1]; +const _I14: &[Index] = &[Index::Month3]; +const _I15: &[Index] = &[Index::Month6]; +const _I16: &[Index] = &[Index::Year1]; +const _I17: &[Index] = &[Index::Year10]; +const _I18: &[Index] = &[Index::HalvingEpoch]; +const _I19: &[Index] = &[Index::DifficultyEpoch]; +const _I20: &[Index] = &[Index::Height]; +const _I21: &[Index] = &[Index::TxIndex]; +const _I22: &[Index] = &[Index::TxInIndex]; +const _I23: &[Index] = &[Index::TxOutIndex]; +const _I24: &[Index] = &[Index::EmptyOutputIndex]; +const _I25: &[Index] = &[Index::OpReturnIndex]; +const _I26: &[Index] = &[Index::P2AAddressIndex]; +const _I27: &[Index] = &[Index::P2MSOutputIndex]; +const _I28: &[Index] = &[Index::P2PK33AddressIndex]; +const _I29: &[Index] = &[Index::P2PK65AddressIndex]; +const _I30: &[Index] = &[Index::P2PKHAddressIndex]; +const _I31: &[Index] = &[Index::P2SHAddressIndex]; +const _I32: &[Index] = &[Index::P2TRAddressIndex]; +const _I33: &[Index] = &[Index::P2WPKHAddressIndex]; +const _I34: &[Index] = &[Index::P2WSHAddressIndex]; +const _I35: &[Index] = &[Index::UnknownOutputIndex]; +const _I36: &[Index] = &[Index::FundedAddressIndex]; +const _I37: &[Index] = &[Index::EmptyAddressIndex]; #[inline] fn _ep(c: &Arc, n: &Arc, i: Index) -> MetricEndpointBuilder { @@ -380,15 +385,24 @@ fn _ep(c: &Arc, n: &Arc, i: Index) -> M pub struct MetricPattern1By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern1By { - pub fn dateindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DateIndex) } - pub fn decadeindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DecadeIndex) } + pub fn minute1(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Minute1) } + pub fn minute5(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Minute5) } + pub fn minute10(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Minute10) } + pub fn minute30(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Minute30) } + pub fn hour1(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Hour1) } + pub fn hour4(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Hour4) } + pub fn hour12(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Hour12) } + pub fn day1(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Day1) } + pub fn day3(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Day3) } + pub fn week1(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Week1) } + pub fn month1(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Month1) } + pub fn month3(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Month3) } + pub fn month6(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Month6) } + pub fn year1(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Year1) } + pub fn year10(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Year10) } + pub fn halvingepoch(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::HalvingEpoch) } pub fn difficultyepoch(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DifficultyEpoch) } pub fn height(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Height) } - pub fn monthindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::MonthIndex) } - pub fn quarterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::QuarterIndex) } - pub fn semesterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::SemesterIndex) } - pub fn weekindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::WeekIndex) } - pub fn yearindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::YearIndex) } } pub struct MetricPattern1 { name: Arc, pub by: MetricPattern1By } @@ -402,14 +416,23 @@ impl MetricPattern for MetricPattern1 { fn get(&self, pub struct MetricPattern2By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern2By { - pub fn dateindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DateIndex) } - pub fn decadeindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DecadeIndex) } + pub fn minute1(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Minute1) } + pub fn minute5(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Minute5) } + pub fn minute10(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Minute10) } + pub fn minute30(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Minute30) } + pub fn hour1(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Hour1) } + pub fn hour4(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Hour4) } + pub fn hour12(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Hour12) } + pub fn day1(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Day1) } + pub fn day3(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Day3) } + pub fn week1(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Week1) } + pub fn month1(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Month1) } + pub fn month3(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Month3) } + pub fn month6(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Month6) } + pub fn year1(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Year1) } + pub fn year10(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Year10) } + pub fn halvingepoch(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::HalvingEpoch) } pub fn difficultyepoch(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DifficultyEpoch) } - pub fn monthindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::MonthIndex) } - pub fn quarterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::QuarterIndex) } - pub fn semesterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::SemesterIndex) } - pub fn weekindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::WeekIndex) } - pub fn yearindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::YearIndex) } } pub struct MetricPattern2 { name: Arc, pub by: MetricPattern2By } @@ -423,14 +446,7 @@ impl MetricPattern for MetricPattern2 { fn get(&self, pub struct MetricPattern3By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern3By { - pub fn dateindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DateIndex) } - pub fn decadeindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DecadeIndex) } - pub fn height(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Height) } - pub fn monthindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::MonthIndex) } - pub fn quarterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::QuarterIndex) } - pub fn semesterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::SemesterIndex) } - pub fn weekindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::WeekIndex) } - pub fn yearindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::YearIndex) } + pub fn minute1(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Minute1) } } pub struct MetricPattern3 { name: Arc, pub by: MetricPattern3By } @@ -444,13 +460,7 @@ impl MetricPattern for MetricPattern3 { fn get(&self, pub struct MetricPattern4By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern4By { - pub fn dateindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DateIndex) } - pub fn decadeindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DecadeIndex) } - pub fn monthindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::MonthIndex) } - pub fn quarterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::QuarterIndex) } - pub fn semesterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::SemesterIndex) } - pub fn weekindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::WeekIndex) } - pub fn yearindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::YearIndex) } + pub fn minute5(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Minute5) } } pub struct MetricPattern4 { name: Arc, pub by: MetricPattern4By } @@ -464,8 +474,7 @@ impl MetricPattern for MetricPattern4 { fn get(&self, pub struct MetricPattern5By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern5By { - pub fn dateindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DateIndex) } - pub fn height(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Height) } + pub fn minute10(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Minute10) } } pub struct MetricPattern5 { name: Arc, pub by: MetricPattern5By } @@ -479,7 +488,7 @@ impl MetricPattern for MetricPattern5 { fn get(&self, pub struct MetricPattern6By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern6By { - pub fn dateindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DateIndex) } + pub fn minute30(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Minute30) } } pub struct MetricPattern6 { name: Arc, pub by: MetricPattern6By } @@ -493,7 +502,7 @@ impl MetricPattern for MetricPattern6 { fn get(&self, pub struct MetricPattern7By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern7By { - pub fn decadeindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DecadeIndex) } + pub fn hour1(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Hour1) } } pub struct MetricPattern7 { name: Arc, pub by: MetricPattern7By } @@ -507,7 +516,7 @@ impl MetricPattern for MetricPattern7 { fn get(&self, pub struct MetricPattern8By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern8By { - pub fn difficultyepoch(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DifficultyEpoch) } + pub fn hour4(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Hour4) } } pub struct MetricPattern8 { name: Arc, pub by: MetricPattern8By } @@ -521,7 +530,7 @@ impl MetricPattern for MetricPattern8 { fn get(&self, pub struct MetricPattern9By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern9By { - pub fn emptyoutputindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::EmptyOutputIndex) } + pub fn hour12(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Hour12) } } pub struct MetricPattern9 { name: Arc, pub by: MetricPattern9By } @@ -535,7 +544,7 @@ impl MetricPattern for MetricPattern9 { fn get(&self, pub struct MetricPattern10By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern10By { - pub fn halvingepoch(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::HalvingEpoch) } + pub fn day1(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Day1) } } pub struct MetricPattern10 { name: Arc, pub by: MetricPattern10By } @@ -549,7 +558,7 @@ impl MetricPattern for MetricPattern10 { fn get(&self pub struct MetricPattern11By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern11By { - pub fn height(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Height) } + pub fn day3(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Day3) } } pub struct MetricPattern11 { name: Arc, pub by: MetricPattern11By } @@ -563,7 +572,7 @@ impl MetricPattern for MetricPattern11 { fn get(&self pub struct MetricPattern12By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern12By { - pub fn txinindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::TxInIndex) } + pub fn week1(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Week1) } } pub struct MetricPattern12 { name: Arc, pub by: MetricPattern12By } @@ -577,7 +586,7 @@ impl MetricPattern for MetricPattern12 { fn get(&self pub struct MetricPattern13By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern13By { - pub fn monthindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::MonthIndex) } + pub fn month1(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Month1) } } pub struct MetricPattern13 { name: Arc, pub by: MetricPattern13By } @@ -591,7 +600,7 @@ impl MetricPattern for MetricPattern13 { fn get(&self pub struct MetricPattern14By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern14By { - pub fn opreturnindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::OpReturnIndex) } + pub fn month3(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Month3) } } pub struct MetricPattern14 { name: Arc, pub by: MetricPattern14By } @@ -605,7 +614,7 @@ impl MetricPattern for MetricPattern14 { fn get(&self pub struct MetricPattern15By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern15By { - pub fn txoutindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::TxOutIndex) } + pub fn month6(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Month6) } } pub struct MetricPattern15 { name: Arc, pub by: MetricPattern15By } @@ -619,7 +628,7 @@ impl MetricPattern for MetricPattern15 { fn get(&self pub struct MetricPattern16By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern16By { - pub fn p2aaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2AAddressIndex) } + pub fn year1(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Year1) } } pub struct MetricPattern16 { name: Arc, pub by: MetricPattern16By } @@ -633,7 +642,7 @@ impl MetricPattern for MetricPattern16 { fn get(&self pub struct MetricPattern17By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern17By { - pub fn p2msoutputindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2MSOutputIndex) } + pub fn year10(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Year10) } } pub struct MetricPattern17 { name: Arc, pub by: MetricPattern17By } @@ -647,7 +656,7 @@ impl MetricPattern for MetricPattern17 { fn get(&self pub struct MetricPattern18By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern18By { - pub fn p2pk33addressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2PK33AddressIndex) } + pub fn halvingepoch(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::HalvingEpoch) } } pub struct MetricPattern18 { name: Arc, pub by: MetricPattern18By } @@ -661,7 +670,7 @@ impl MetricPattern for MetricPattern18 { fn get(&self pub struct MetricPattern19By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern19By { - pub fn p2pk65addressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2PK65AddressIndex) } + pub fn difficultyepoch(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DifficultyEpoch) } } pub struct MetricPattern19 { name: Arc, pub by: MetricPattern19By } @@ -675,7 +684,7 @@ impl MetricPattern for MetricPattern19 { fn get(&self pub struct MetricPattern20By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern20By { - pub fn p2pkhaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2PKHAddressIndex) } + pub fn height(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Height) } } pub struct MetricPattern20 { name: Arc, pub by: MetricPattern20By } @@ -689,7 +698,7 @@ impl MetricPattern for MetricPattern20 { fn get(&self pub struct MetricPattern21By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern21By { - pub fn p2shaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2SHAddressIndex) } + pub fn txindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::TxIndex) } } pub struct MetricPattern21 { name: Arc, pub by: MetricPattern21By } @@ -703,7 +712,7 @@ impl MetricPattern for MetricPattern21 { fn get(&self pub struct MetricPattern22By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern22By { - pub fn p2traddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2TRAddressIndex) } + pub fn txinindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::TxInIndex) } } pub struct MetricPattern22 { name: Arc, pub by: MetricPattern22By } @@ -717,7 +726,7 @@ impl MetricPattern for MetricPattern22 { fn get(&self pub struct MetricPattern23By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern23By { - pub fn p2wpkhaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2WPKHAddressIndex) } + pub fn txoutindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::TxOutIndex) } } pub struct MetricPattern23 { name: Arc, pub by: MetricPattern23By } @@ -731,7 +740,7 @@ impl MetricPattern for MetricPattern23 { fn get(&self pub struct MetricPattern24By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern24By { - pub fn p2wshaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2WSHAddressIndex) } + pub fn emptyoutputindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::EmptyOutputIndex) } } pub struct MetricPattern24 { name: Arc, pub by: MetricPattern24By } @@ -745,7 +754,7 @@ impl MetricPattern for MetricPattern24 { fn get(&self pub struct MetricPattern25By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern25By { - pub fn quarterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::QuarterIndex) } + pub fn opreturnindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::OpReturnIndex) } } pub struct MetricPattern25 { name: Arc, pub by: MetricPattern25By } @@ -759,7 +768,7 @@ impl MetricPattern for MetricPattern25 { fn get(&self pub struct MetricPattern26By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern26By { - pub fn semesterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::SemesterIndex) } + pub fn p2aaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2AAddressIndex) } } pub struct MetricPattern26 { name: Arc, pub by: MetricPattern26By } @@ -773,7 +782,7 @@ impl MetricPattern for MetricPattern26 { fn get(&self pub struct MetricPattern27By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern27By { - pub fn txindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::TxIndex) } + pub fn p2msoutputindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2MSOutputIndex) } } pub struct MetricPattern27 { name: Arc, pub by: MetricPattern27By } @@ -787,7 +796,7 @@ impl MetricPattern for MetricPattern27 { fn get(&self pub struct MetricPattern28By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern28By { - pub fn unknownoutputindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::UnknownOutputIndex) } + pub fn p2pk33addressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2PK33AddressIndex) } } pub struct MetricPattern28 { name: Arc, pub by: MetricPattern28By } @@ -801,7 +810,7 @@ impl MetricPattern for MetricPattern28 { fn get(&self pub struct MetricPattern29By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern29By { - pub fn weekindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::WeekIndex) } + pub fn p2pk65addressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2PK65AddressIndex) } } pub struct MetricPattern29 { name: Arc, pub by: MetricPattern29By } @@ -815,7 +824,7 @@ impl MetricPattern for MetricPattern29 { fn get(&self pub struct MetricPattern30By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern30By { - pub fn yearindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::YearIndex) } + pub fn p2pkhaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2PKHAddressIndex) } } pub struct MetricPattern30 { name: Arc, pub by: MetricPattern30By } @@ -829,7 +838,7 @@ impl MetricPattern for MetricPattern30 { fn get(&self pub struct MetricPattern31By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern31By { - pub fn fundedaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::FundedAddressIndex) } + pub fn p2shaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2SHAddressIndex) } } pub struct MetricPattern31 { name: Arc, pub by: MetricPattern31By } @@ -843,7 +852,7 @@ impl MetricPattern for MetricPattern31 { fn get(&self pub struct MetricPattern32By { client: Arc, name: Arc, _marker: std::marker::PhantomData } impl MetricPattern32By { - pub fn emptyaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::EmptyAddressIndex) } + pub fn p2traddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2TRAddressIndex) } } pub struct MetricPattern32 { name: Arc, pub by: MetricPattern32By } @@ -855,31 +864,115 @@ impl MetricPattern32 { impl AnyMetricPattern for MetricPattern32 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I32 } } impl MetricPattern for MetricPattern32 { fn get(&self, index: Index) -> Option> { _I32.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +pub struct MetricPattern33By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +impl MetricPattern33By { + pub fn p2wpkhaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2WPKHAddressIndex) } +} + +pub struct MetricPattern33 { name: Arc, pub by: MetricPattern33By } +impl MetricPattern33 { + pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern33By { client, name, _marker: std::marker::PhantomData } } } + pub fn name(&self) -> &str { &self.name } +} + +impl AnyMetricPattern for MetricPattern33 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I33 } } +impl MetricPattern for MetricPattern33 { fn get(&self, index: Index) -> Option> { _I33.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } + +pub struct MetricPattern34By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +impl MetricPattern34By { + pub fn p2wshaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2WSHAddressIndex) } +} + +pub struct MetricPattern34 { name: Arc, pub by: MetricPattern34By } +impl MetricPattern34 { + pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern34By { client, name, _marker: std::marker::PhantomData } } } + pub fn name(&self) -> &str { &self.name } +} + +impl AnyMetricPattern for MetricPattern34 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I34 } } +impl MetricPattern for MetricPattern34 { fn get(&self, index: Index) -> Option> { _I34.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } + +pub struct MetricPattern35By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +impl MetricPattern35By { + pub fn unknownoutputindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::UnknownOutputIndex) } +} + +pub struct MetricPattern35 { name: Arc, pub by: MetricPattern35By } +impl MetricPattern35 { + pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern35By { client, name, _marker: std::marker::PhantomData } } } + pub fn name(&self) -> &str { &self.name } +} + +impl AnyMetricPattern for MetricPattern35 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I35 } } +impl MetricPattern for MetricPattern35 { fn get(&self, index: Index) -> Option> { _I35.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } + +pub struct MetricPattern36By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +impl MetricPattern36By { + pub fn fundedaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::FundedAddressIndex) } +} + +pub struct MetricPattern36 { name: Arc, pub by: MetricPattern36By } +impl MetricPattern36 { + pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern36By { client, name, _marker: std::marker::PhantomData } } } + pub fn name(&self) -> &str { &self.name } +} + +impl AnyMetricPattern for MetricPattern36 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I36 } } +impl MetricPattern for MetricPattern36 { fn get(&self, index: Index) -> Option> { _I36.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } + +pub struct MetricPattern37By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +impl MetricPattern37By { + pub fn emptyaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::EmptyAddressIndex) } +} + +pub struct MetricPattern37 { name: Arc, pub by: MetricPattern37By } +impl MetricPattern37 { + pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern37By { client, name, _marker: std::marker::PhantomData } } } + pub fn name(&self) -> &str { &self.name } +} + +impl AnyMetricPattern for MetricPattern37 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I37 } } +impl MetricPattern for MetricPattern37 { fn get(&self, index: Index) -> Option> { _I37.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } + // Reusable pattern structs /// Pattern struct for repeated tree structure. pub struct AdjustedCapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentSoprTotalUpperValuePattern { - pub adjusted_sopr: MetricPattern6, - pub adjusted_sopr_30d_ema: MetricPattern6, - pub adjusted_sopr_7d_ema: MetricPattern6, + pub adjusted_sopr: MetricPattern1, + pub adjusted_sopr_1y: MetricPattern1, + pub adjusted_sopr_24h: MetricPattern1, + pub adjusted_sopr_24h_30d_ema: MetricPattern1, + pub adjusted_sopr_24h_7d_ema: MetricPattern1, + pub adjusted_sopr_30d: MetricPattern1, + pub adjusted_sopr_30d_ema: MetricPattern1, + pub adjusted_sopr_7d: MetricPattern1, + pub adjusted_sopr_7d_ema: MetricPattern1, pub adjusted_value_created: MetricPattern1, + pub adjusted_value_created_1y: MetricPattern1, + pub adjusted_value_created_24h: MetricPattern1, + pub adjusted_value_created_30d: MetricPattern1, + pub adjusted_value_created_7d: MetricPattern1, pub adjusted_value_destroyed: MetricPattern1, - pub cap_raw: MetricPattern11, + pub adjusted_value_destroyed_1y: MetricPattern1, + pub adjusted_value_destroyed_24h: MetricPattern1, + pub adjusted_value_destroyed_30d: MetricPattern1, + pub adjusted_value_destroyed_7d: MetricPattern1, + pub cap_raw: MetricPattern20, pub capitulation_flow: MetricPattern1, - pub investor_cap_raw: MetricPattern11, - pub investor_price: DollarsSatsPattern, - pub investor_price_cents: MetricPattern1, + pub investor_cap_raw: MetricPattern20, + pub investor_price: SatsUsdPattern, + pub investor_price_cents: MetricPattern1, pub investor_price_extra: RatioPattern, pub loss_value_created: MetricPattern1, pub loss_value_destroyed: MetricPattern1, - pub lower_price_band: DollarsSatsPattern, - pub mvrv: MetricPattern4, + pub lower_price_band: SatsUsdPattern, + pub mvrv: MetricPattern1, pub neg_realized_loss: CumulativeSumPattern2, pub net_realized_pnl: CumulativeSumPattern, - pub net_realized_pnl_7d_ema: MetricPattern4, - pub net_realized_pnl_cumulative_30d_delta: MetricPattern4, - pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4, - pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4, + pub net_realized_pnl_7d_ema: MetricPattern1, + pub net_realized_pnl_cumulative_30d_delta: MetricPattern1, + pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern1, + pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern1, pub net_realized_pnl_rel_to_realized_cap: CumulativeSumPattern, pub peak_regret: CumulativeSumPattern, pub peak_regret_rel_to_realized_cap: MetricPattern1, @@ -887,60 +980,109 @@ pub struct AdjustedCapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedS pub profit_value_created: MetricPattern1, pub profit_value_destroyed: MetricPattern1, pub realized_cap: MetricPattern1, - pub realized_cap_30d_delta: MetricPattern4, - pub realized_cap_cents: MetricPattern1, + pub realized_cap_30d_delta: MetricPattern1, + pub realized_cap_cents: MetricPattern1, pub realized_cap_rel_to_own_market_cap: MetricPattern1, pub realized_loss: CumulativeSumPattern, - pub realized_loss_7d_ema: MetricPattern4, + pub realized_loss_1y: MetricPattern1, + pub realized_loss_24h: MetricPattern1, + pub realized_loss_30d: MetricPattern1, + pub realized_loss_7d: MetricPattern1, + pub realized_loss_7d_ema: MetricPattern1, pub realized_loss_rel_to_realized_cap: CumulativeSumPattern, - pub realized_price: DollarsSatsPattern, + pub realized_price: SatsUsdPattern, pub realized_price_extra: RatioPattern, pub realized_profit: CumulativeSumPattern, - pub realized_profit_7d_ema: MetricPattern4, + pub realized_profit_1y: MetricPattern1, + pub realized_profit_24h: MetricPattern1, + pub realized_profit_30d: MetricPattern1, + pub realized_profit_7d: MetricPattern1, + pub realized_profit_7d_ema: MetricPattern1, pub realized_profit_rel_to_realized_cap: CumulativeSumPattern, - pub realized_profit_to_loss_ratio: MetricPattern6, + pub realized_profit_to_loss_ratio_1y: MetricPattern1, + pub realized_profit_to_loss_ratio_24h: MetricPattern1, + pub realized_profit_to_loss_ratio_30d: MetricPattern1, + pub realized_profit_to_loss_ratio_7d: MetricPattern1, pub realized_value: MetricPattern1, - pub sell_side_risk_ratio: MetricPattern6, - pub sell_side_risk_ratio_30d_ema: MetricPattern6, - pub sell_side_risk_ratio_7d_ema: MetricPattern6, - pub sent_in_loss: BitcoinDollarsSatsPattern3, - pub sent_in_loss_14d_ema: BitcoinDollarsSatsPattern5, - pub sent_in_profit: BitcoinDollarsSatsPattern3, - pub sent_in_profit_14d_ema: BitcoinDollarsSatsPattern5, - pub sopr: MetricPattern6, - pub sopr_30d_ema: MetricPattern6, - pub sopr_7d_ema: MetricPattern6, + pub realized_value_1y: MetricPattern1, + pub realized_value_24h: MetricPattern1, + pub realized_value_30d: MetricPattern1, + pub realized_value_7d: MetricPattern1, + pub sell_side_risk_ratio: MetricPattern1, + pub sell_side_risk_ratio_1y: MetricPattern1, + pub sell_side_risk_ratio_24h: MetricPattern1, + pub sell_side_risk_ratio_24h_30d_ema: MetricPattern1, + pub sell_side_risk_ratio_24h_7d_ema: MetricPattern1, + pub sell_side_risk_ratio_30d: MetricPattern1, + pub sell_side_risk_ratio_30d_ema: MetricPattern1, + pub sell_side_risk_ratio_7d: MetricPattern1, + pub sell_side_risk_ratio_7d_ema: MetricPattern1, + pub sent_in_loss: BtcSatsUsdPattern2, + pub sent_in_loss_14d_ema: BtcSatsUsdPattern, + pub sent_in_profit: BtcSatsUsdPattern2, + pub sent_in_profit_14d_ema: BtcSatsUsdPattern, + pub sopr: MetricPattern1, + pub sopr_1y: MetricPattern1, + pub sopr_24h: MetricPattern1, + pub sopr_24h_30d_ema: MetricPattern1, + pub sopr_24h_7d_ema: MetricPattern1, + pub sopr_30d: MetricPattern1, + pub sopr_30d_ema: MetricPattern1, + pub sopr_7d: MetricPattern1, + pub sopr_7d_ema: MetricPattern1, pub total_realized_pnl: MetricPattern1, - pub upper_price_band: DollarsSatsPattern, + pub upper_price_band: SatsUsdPattern, pub value_created: MetricPattern1, + pub value_created_1y: MetricPattern1, + pub value_created_24h: MetricPattern1, + pub value_created_30d: MetricPattern1, + pub value_created_7d: MetricPattern1, pub value_destroyed: MetricPattern1, + pub value_destroyed_1y: MetricPattern1, + pub value_destroyed_24h: MetricPattern1, + pub value_destroyed_30d: MetricPattern1, + pub value_destroyed_7d: MetricPattern1, } impl AdjustedCapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentSoprTotalUpperValuePattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - adjusted_sopr: MetricPattern6::new(client.clone(), _m(&acc, "adjusted_sopr")), - adjusted_sopr_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "adjusted_sopr_30d_ema")), - adjusted_sopr_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "adjusted_sopr_7d_ema")), + adjusted_sopr: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_sopr")), + adjusted_sopr_1y: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_sopr_1y")), + adjusted_sopr_24h: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_sopr_24h")), + adjusted_sopr_24h_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_sopr_24h_30d_ema")), + adjusted_sopr_24h_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_sopr_24h_7d_ema")), + adjusted_sopr_30d: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_sopr_30d")), + adjusted_sopr_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_sopr_30d_ema")), + adjusted_sopr_7d: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_sopr_7d")), + adjusted_sopr_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_sopr_7d_ema")), adjusted_value_created: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_created")), + adjusted_value_created_1y: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_created_1y")), + adjusted_value_created_24h: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_created_24h")), + adjusted_value_created_30d: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_created_30d")), + adjusted_value_created_7d: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_created_7d")), adjusted_value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_destroyed")), - cap_raw: MetricPattern11::new(client.clone(), _m(&acc, "cap_raw")), + adjusted_value_destroyed_1y: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_destroyed_1y")), + adjusted_value_destroyed_24h: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_destroyed_24h")), + adjusted_value_destroyed_30d: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_destroyed_30d")), + adjusted_value_destroyed_7d: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_destroyed_7d")), + cap_raw: MetricPattern20::new(client.clone(), _m(&acc, "cap_raw")), capitulation_flow: MetricPattern1::new(client.clone(), _m(&acc, "capitulation_flow")), - investor_cap_raw: MetricPattern11::new(client.clone(), _m(&acc, "investor_cap_raw")), - investor_price: DollarsSatsPattern::new(client.clone(), _m(&acc, "investor_price")), + investor_cap_raw: MetricPattern20::new(client.clone(), _m(&acc, "investor_cap_raw")), + investor_price: SatsUsdPattern::new(client.clone(), _m(&acc, "investor_price")), investor_price_cents: MetricPattern1::new(client.clone(), _m(&acc, "investor_price_cents")), investor_price_extra: RatioPattern::new(client.clone(), _m(&acc, "investor_price_ratio")), loss_value_created: MetricPattern1::new(client.clone(), _m(&acc, "loss_value_created")), loss_value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "loss_value_destroyed")), - lower_price_band: DollarsSatsPattern::new(client.clone(), _m(&acc, "lower_price_band")), - mvrv: MetricPattern4::new(client.clone(), _m(&acc, "mvrv")), + lower_price_band: SatsUsdPattern::new(client.clone(), _m(&acc, "lower_price_band")), + mvrv: MetricPattern1::new(client.clone(), _m(&acc, "mvrv")), neg_realized_loss: CumulativeSumPattern2::new(client.clone(), _m(&acc, "neg_realized_loss")), net_realized_pnl: CumulativeSumPattern::new(client.clone(), _m(&acc, "net_realized_pnl")), - net_realized_pnl_7d_ema: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_7d_ema")), - net_realized_pnl_cumulative_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), - net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), - net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), + net_realized_pnl_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "net_realized_pnl_7d_ema")), + net_realized_pnl_cumulative_30d_delta: MetricPattern1::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), + net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), + net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), net_realized_pnl_rel_to_realized_cap: CumulativeSumPattern::new(client.clone(), _m(&acc, "net_realized_pnl_rel_to_realized_cap")), peak_regret: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_peak_regret")), peak_regret_rel_to_realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "peak_regret_rel_to_realized_cap")), @@ -948,60 +1090,109 @@ impl AdjustedCapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSen profit_value_created: MetricPattern1::new(client.clone(), _m(&acc, "profit_value_created")), profit_value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "profit_value_destroyed")), realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap")), - realized_cap_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), + realized_cap_30d_delta: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), realized_cap_cents: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap_cents")), realized_cap_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap_rel_to_own_market_cap")), realized_loss: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_loss")), - realized_loss_7d_ema: MetricPattern4::new(client.clone(), _m(&acc, "realized_loss_7d_ema")), + realized_loss_1y: MetricPattern1::new(client.clone(), _m(&acc, "realized_loss_1y")), + realized_loss_24h: MetricPattern1::new(client.clone(), _m(&acc, "realized_loss_24h")), + realized_loss_30d: MetricPattern1::new(client.clone(), _m(&acc, "realized_loss_30d")), + realized_loss_7d: MetricPattern1::new(client.clone(), _m(&acc, "realized_loss_7d")), + realized_loss_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "realized_loss_7d_ema")), realized_loss_rel_to_realized_cap: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_loss_rel_to_realized_cap")), - realized_price: DollarsSatsPattern::new(client.clone(), _m(&acc, "realized_price")), + realized_price: SatsUsdPattern::new(client.clone(), _m(&acc, "realized_price")), realized_price_extra: RatioPattern::new(client.clone(), _m(&acc, "realized_price_ratio")), realized_profit: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_profit")), - realized_profit_7d_ema: MetricPattern4::new(client.clone(), _m(&acc, "realized_profit_7d_ema")), + realized_profit_1y: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_1y")), + realized_profit_24h: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_24h")), + realized_profit_30d: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_30d")), + realized_profit_7d: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_7d")), + realized_profit_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_7d_ema")), realized_profit_rel_to_realized_cap: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_profit_rel_to_realized_cap")), - realized_profit_to_loss_ratio: MetricPattern6::new(client.clone(), _m(&acc, "realized_profit_to_loss_ratio")), + realized_profit_to_loss_ratio_1y: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_to_loss_ratio_1y")), + realized_profit_to_loss_ratio_24h: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_to_loss_ratio_24h")), + realized_profit_to_loss_ratio_30d: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_to_loss_ratio_30d")), + realized_profit_to_loss_ratio_7d: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_to_loss_ratio_7d")), realized_value: MetricPattern1::new(client.clone(), _m(&acc, "realized_value")), - sell_side_risk_ratio: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), - sell_side_risk_ratio_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), - sell_side_risk_ratio_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), - sent_in_loss: BitcoinDollarsSatsPattern3::new(client.clone(), _m(&acc, "sent_in_loss")), - sent_in_loss_14d_ema: BitcoinDollarsSatsPattern5::new(client.clone(), _m(&acc, "sent_in_loss_14d_ema")), - sent_in_profit: BitcoinDollarsSatsPattern3::new(client.clone(), _m(&acc, "sent_in_profit")), - sent_in_profit_14d_ema: BitcoinDollarsSatsPattern5::new(client.clone(), _m(&acc, "sent_in_profit_14d_ema")), - sopr: MetricPattern6::new(client.clone(), _m(&acc, "sopr")), - sopr_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sopr_30d_ema")), - sopr_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sopr_7d_ema")), + realized_value_1y: MetricPattern1::new(client.clone(), _m(&acc, "realized_value_1y")), + realized_value_24h: MetricPattern1::new(client.clone(), _m(&acc, "realized_value_24h")), + realized_value_30d: MetricPattern1::new(client.clone(), _m(&acc, "realized_value_30d")), + realized_value_7d: MetricPattern1::new(client.clone(), _m(&acc, "realized_value_7d")), + sell_side_risk_ratio: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), + sell_side_risk_ratio_1y: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_1y")), + sell_side_risk_ratio_24h: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_24h")), + sell_side_risk_ratio_24h_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_24h_30d_ema")), + sell_side_risk_ratio_24h_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_24h_7d_ema")), + sell_side_risk_ratio_30d: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d")), + sell_side_risk_ratio_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), + sell_side_risk_ratio_7d: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d")), + sell_side_risk_ratio_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), + sent_in_loss: BtcSatsUsdPattern2::new(client.clone(), _m(&acc, "sent_in_loss")), + sent_in_loss_14d_ema: BtcSatsUsdPattern::new(client.clone(), _m(&acc, "sent_in_loss_14d_ema")), + sent_in_profit: BtcSatsUsdPattern2::new(client.clone(), _m(&acc, "sent_in_profit")), + sent_in_profit_14d_ema: BtcSatsUsdPattern::new(client.clone(), _m(&acc, "sent_in_profit_14d_ema")), + sopr: MetricPattern1::new(client.clone(), _m(&acc, "sopr")), + sopr_1y: MetricPattern1::new(client.clone(), _m(&acc, "sopr_1y")), + sopr_24h: MetricPattern1::new(client.clone(), _m(&acc, "sopr_24h")), + sopr_24h_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sopr_24h_30d_ema")), + sopr_24h_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sopr_24h_7d_ema")), + sopr_30d: MetricPattern1::new(client.clone(), _m(&acc, "sopr_30d")), + sopr_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sopr_30d_ema")), + sopr_7d: MetricPattern1::new(client.clone(), _m(&acc, "sopr_7d")), + sopr_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sopr_7d_ema")), total_realized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "total_realized_pnl")), - upper_price_band: DollarsSatsPattern::new(client.clone(), _m(&acc, "upper_price_band")), + upper_price_band: SatsUsdPattern::new(client.clone(), _m(&acc, "upper_price_band")), value_created: MetricPattern1::new(client.clone(), _m(&acc, "value_created")), + value_created_1y: MetricPattern1::new(client.clone(), _m(&acc, "value_created_1y")), + value_created_24h: MetricPattern1::new(client.clone(), _m(&acc, "value_created_24h")), + value_created_30d: MetricPattern1::new(client.clone(), _m(&acc, "value_created_30d")), + value_created_7d: MetricPattern1::new(client.clone(), _m(&acc, "value_created_7d")), value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed")), + value_destroyed_1y: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed_1y")), + value_destroyed_24h: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed_24h")), + value_destroyed_30d: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed_30d")), + value_destroyed_7d: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed_7d")), } } } /// Pattern struct for repeated tree structure. pub struct AdjustedCapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentSoprTotalUpperValuePattern2 { - pub adjusted_sopr: MetricPattern6, - pub adjusted_sopr_30d_ema: MetricPattern6, - pub adjusted_sopr_7d_ema: MetricPattern6, + pub adjusted_sopr: MetricPattern1, + pub adjusted_sopr_1y: MetricPattern1, + pub adjusted_sopr_24h: MetricPattern1, + pub adjusted_sopr_24h_30d_ema: MetricPattern1, + pub adjusted_sopr_24h_7d_ema: MetricPattern1, + pub adjusted_sopr_30d: MetricPattern1, + pub adjusted_sopr_30d_ema: MetricPattern1, + pub adjusted_sopr_7d: MetricPattern1, + pub adjusted_sopr_7d_ema: MetricPattern1, pub adjusted_value_created: MetricPattern1, + pub adjusted_value_created_1y: MetricPattern1, + pub adjusted_value_created_24h: MetricPattern1, + pub adjusted_value_created_30d: MetricPattern1, + pub adjusted_value_created_7d: MetricPattern1, pub adjusted_value_destroyed: MetricPattern1, - pub cap_raw: MetricPattern11, + pub adjusted_value_destroyed_1y: MetricPattern1, + pub adjusted_value_destroyed_24h: MetricPattern1, + pub adjusted_value_destroyed_30d: MetricPattern1, + pub adjusted_value_destroyed_7d: MetricPattern1, + pub cap_raw: MetricPattern20, pub capitulation_flow: MetricPattern1, - pub investor_cap_raw: MetricPattern11, - pub investor_price: DollarsSatsPattern, - pub investor_price_cents: MetricPattern1, + pub investor_cap_raw: MetricPattern20, + pub investor_price: SatsUsdPattern, + pub investor_price_cents: MetricPattern1, pub investor_price_extra: RatioPattern2, pub loss_value_created: MetricPattern1, pub loss_value_destroyed: MetricPattern1, - pub lower_price_band: DollarsSatsPattern, - pub mvrv: MetricPattern4, + pub lower_price_band: SatsUsdPattern, + pub mvrv: MetricPattern1, pub neg_realized_loss: CumulativeSumPattern2, pub net_realized_pnl: CumulativeSumPattern, - pub net_realized_pnl_7d_ema: MetricPattern4, - pub net_realized_pnl_cumulative_30d_delta: MetricPattern4, - pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4, - pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4, + pub net_realized_pnl_7d_ema: MetricPattern1, + pub net_realized_pnl_cumulative_30d_delta: MetricPattern1, + pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern1, + pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern1, pub net_realized_pnl_rel_to_realized_cap: CumulativeSumPattern, pub peak_regret: CumulativeSumPattern, pub peak_regret_rel_to_realized_cap: MetricPattern1, @@ -1009,58 +1200,96 @@ pub struct AdjustedCapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedS pub profit_value_created: MetricPattern1, pub profit_value_destroyed: MetricPattern1, pub realized_cap: MetricPattern1, - pub realized_cap_30d_delta: MetricPattern4, - pub realized_cap_cents: MetricPattern1, + pub realized_cap_30d_delta: MetricPattern1, + pub realized_cap_cents: MetricPattern1, pub realized_loss: CumulativeSumPattern, - pub realized_loss_7d_ema: MetricPattern4, + pub realized_loss_7d_ema: MetricPattern1, pub realized_loss_rel_to_realized_cap: CumulativeSumPattern, - pub realized_price: DollarsSatsPattern, + pub realized_price: SatsUsdPattern, pub realized_price_extra: RatioPattern2, pub realized_profit: CumulativeSumPattern, - pub realized_profit_7d_ema: MetricPattern4, + pub realized_profit_7d_ema: MetricPattern1, pub realized_profit_rel_to_realized_cap: CumulativeSumPattern, pub realized_value: MetricPattern1, - pub sell_side_risk_ratio: MetricPattern6, - pub sell_side_risk_ratio_30d_ema: MetricPattern6, - pub sell_side_risk_ratio_7d_ema: MetricPattern6, - pub sent_in_loss: BitcoinDollarsSatsPattern3, - pub sent_in_loss_14d_ema: BitcoinDollarsSatsPattern5, - pub sent_in_profit: BitcoinDollarsSatsPattern3, - pub sent_in_profit_14d_ema: BitcoinDollarsSatsPattern5, - pub sopr: MetricPattern6, - pub sopr_30d_ema: MetricPattern6, - pub sopr_7d_ema: MetricPattern6, + pub realized_value_1y: MetricPattern1, + pub realized_value_24h: MetricPattern1, + pub realized_value_30d: MetricPattern1, + pub realized_value_7d: MetricPattern1, + pub sell_side_risk_ratio: MetricPattern1, + pub sell_side_risk_ratio_1y: MetricPattern1, + pub sell_side_risk_ratio_24h: MetricPattern1, + pub sell_side_risk_ratio_24h_30d_ema: MetricPattern1, + pub sell_side_risk_ratio_24h_7d_ema: MetricPattern1, + pub sell_side_risk_ratio_30d: MetricPattern1, + pub sell_side_risk_ratio_30d_ema: MetricPattern1, + pub sell_side_risk_ratio_7d: MetricPattern1, + pub sell_side_risk_ratio_7d_ema: MetricPattern1, + pub sent_in_loss: BtcSatsUsdPattern2, + pub sent_in_loss_14d_ema: BtcSatsUsdPattern, + pub sent_in_profit: BtcSatsUsdPattern2, + pub sent_in_profit_14d_ema: BtcSatsUsdPattern, + pub sopr: MetricPattern1, + pub sopr_1y: MetricPattern1, + pub sopr_24h: MetricPattern1, + pub sopr_24h_30d_ema: MetricPattern1, + pub sopr_24h_7d_ema: MetricPattern1, + pub sopr_30d: MetricPattern1, + pub sopr_30d_ema: MetricPattern1, + pub sopr_7d: MetricPattern1, + pub sopr_7d_ema: MetricPattern1, pub total_realized_pnl: MetricPattern1, - pub upper_price_band: DollarsSatsPattern, + pub upper_price_band: SatsUsdPattern, pub value_created: MetricPattern1, + pub value_created_1y: MetricPattern1, + pub value_created_24h: MetricPattern1, + pub value_created_30d: MetricPattern1, + pub value_created_7d: MetricPattern1, pub value_destroyed: MetricPattern1, + pub value_destroyed_1y: MetricPattern1, + pub value_destroyed_24h: MetricPattern1, + pub value_destroyed_30d: MetricPattern1, + pub value_destroyed_7d: MetricPattern1, } impl AdjustedCapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentSoprTotalUpperValuePattern2 { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - adjusted_sopr: MetricPattern6::new(client.clone(), _m(&acc, "adjusted_sopr")), - adjusted_sopr_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "adjusted_sopr_30d_ema")), - adjusted_sopr_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "adjusted_sopr_7d_ema")), + adjusted_sopr: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_sopr")), + adjusted_sopr_1y: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_sopr_1y")), + adjusted_sopr_24h: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_sopr_24h")), + adjusted_sopr_24h_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_sopr_24h_30d_ema")), + adjusted_sopr_24h_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_sopr_24h_7d_ema")), + adjusted_sopr_30d: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_sopr_30d")), + adjusted_sopr_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_sopr_30d_ema")), + adjusted_sopr_7d: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_sopr_7d")), + adjusted_sopr_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_sopr_7d_ema")), adjusted_value_created: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_created")), + adjusted_value_created_1y: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_created_1y")), + adjusted_value_created_24h: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_created_24h")), + adjusted_value_created_30d: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_created_30d")), + adjusted_value_created_7d: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_created_7d")), adjusted_value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_destroyed")), - cap_raw: MetricPattern11::new(client.clone(), _m(&acc, "cap_raw")), + adjusted_value_destroyed_1y: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_destroyed_1y")), + adjusted_value_destroyed_24h: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_destroyed_24h")), + adjusted_value_destroyed_30d: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_destroyed_30d")), + adjusted_value_destroyed_7d: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_destroyed_7d")), + cap_raw: MetricPattern20::new(client.clone(), _m(&acc, "cap_raw")), capitulation_flow: MetricPattern1::new(client.clone(), _m(&acc, "capitulation_flow")), - investor_cap_raw: MetricPattern11::new(client.clone(), _m(&acc, "investor_cap_raw")), - investor_price: DollarsSatsPattern::new(client.clone(), _m(&acc, "investor_price")), + investor_cap_raw: MetricPattern20::new(client.clone(), _m(&acc, "investor_cap_raw")), + investor_price: SatsUsdPattern::new(client.clone(), _m(&acc, "investor_price")), investor_price_cents: MetricPattern1::new(client.clone(), _m(&acc, "investor_price_cents")), investor_price_extra: RatioPattern2::new(client.clone(), _m(&acc, "investor_price_ratio")), loss_value_created: MetricPattern1::new(client.clone(), _m(&acc, "loss_value_created")), loss_value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "loss_value_destroyed")), - lower_price_band: DollarsSatsPattern::new(client.clone(), _m(&acc, "lower_price_band")), - mvrv: MetricPattern4::new(client.clone(), _m(&acc, "mvrv")), + lower_price_band: SatsUsdPattern::new(client.clone(), _m(&acc, "lower_price_band")), + mvrv: MetricPattern1::new(client.clone(), _m(&acc, "mvrv")), neg_realized_loss: CumulativeSumPattern2::new(client.clone(), _m(&acc, "neg_realized_loss")), net_realized_pnl: CumulativeSumPattern::new(client.clone(), _m(&acc, "net_realized_pnl")), - net_realized_pnl_7d_ema: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_7d_ema")), - net_realized_pnl_cumulative_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), - net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), - net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), + net_realized_pnl_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "net_realized_pnl_7d_ema")), + net_realized_pnl_cumulative_30d_delta: MetricPattern1::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), + net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), + net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), net_realized_pnl_rel_to_realized_cap: CumulativeSumPattern::new(client.clone(), _m(&acc, "net_realized_pnl_rel_to_realized_cap")), peak_regret: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_peak_regret")), peak_regret_rel_to_realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "peak_regret_rel_to_realized_cap")), @@ -1068,53 +1297,77 @@ impl AdjustedCapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSen profit_value_created: MetricPattern1::new(client.clone(), _m(&acc, "profit_value_created")), profit_value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "profit_value_destroyed")), realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap")), - realized_cap_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), + realized_cap_30d_delta: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), realized_cap_cents: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap_cents")), realized_loss: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_loss")), - realized_loss_7d_ema: MetricPattern4::new(client.clone(), _m(&acc, "realized_loss_7d_ema")), + realized_loss_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "realized_loss_7d_ema")), realized_loss_rel_to_realized_cap: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_loss_rel_to_realized_cap")), - realized_price: DollarsSatsPattern::new(client.clone(), _m(&acc, "realized_price")), + realized_price: SatsUsdPattern::new(client.clone(), _m(&acc, "realized_price")), realized_price_extra: RatioPattern2::new(client.clone(), _m(&acc, "realized_price_ratio")), realized_profit: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_profit")), - realized_profit_7d_ema: MetricPattern4::new(client.clone(), _m(&acc, "realized_profit_7d_ema")), + realized_profit_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_7d_ema")), realized_profit_rel_to_realized_cap: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_profit_rel_to_realized_cap")), realized_value: MetricPattern1::new(client.clone(), _m(&acc, "realized_value")), - sell_side_risk_ratio: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), - sell_side_risk_ratio_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), - sell_side_risk_ratio_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), - sent_in_loss: BitcoinDollarsSatsPattern3::new(client.clone(), _m(&acc, "sent_in_loss")), - sent_in_loss_14d_ema: BitcoinDollarsSatsPattern5::new(client.clone(), _m(&acc, "sent_in_loss_14d_ema")), - sent_in_profit: BitcoinDollarsSatsPattern3::new(client.clone(), _m(&acc, "sent_in_profit")), - sent_in_profit_14d_ema: BitcoinDollarsSatsPattern5::new(client.clone(), _m(&acc, "sent_in_profit_14d_ema")), - sopr: MetricPattern6::new(client.clone(), _m(&acc, "sopr")), - sopr_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sopr_30d_ema")), - sopr_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sopr_7d_ema")), + realized_value_1y: MetricPattern1::new(client.clone(), _m(&acc, "realized_value_1y")), + realized_value_24h: MetricPattern1::new(client.clone(), _m(&acc, "realized_value_24h")), + realized_value_30d: MetricPattern1::new(client.clone(), _m(&acc, "realized_value_30d")), + realized_value_7d: MetricPattern1::new(client.clone(), _m(&acc, "realized_value_7d")), + sell_side_risk_ratio: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), + sell_side_risk_ratio_1y: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_1y")), + sell_side_risk_ratio_24h: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_24h")), + sell_side_risk_ratio_24h_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_24h_30d_ema")), + sell_side_risk_ratio_24h_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_24h_7d_ema")), + sell_side_risk_ratio_30d: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d")), + sell_side_risk_ratio_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), + sell_side_risk_ratio_7d: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d")), + sell_side_risk_ratio_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), + sent_in_loss: BtcSatsUsdPattern2::new(client.clone(), _m(&acc, "sent_in_loss")), + sent_in_loss_14d_ema: BtcSatsUsdPattern::new(client.clone(), _m(&acc, "sent_in_loss_14d_ema")), + sent_in_profit: BtcSatsUsdPattern2::new(client.clone(), _m(&acc, "sent_in_profit")), + sent_in_profit_14d_ema: BtcSatsUsdPattern::new(client.clone(), _m(&acc, "sent_in_profit_14d_ema")), + sopr: MetricPattern1::new(client.clone(), _m(&acc, "sopr")), + sopr_1y: MetricPattern1::new(client.clone(), _m(&acc, "sopr_1y")), + sopr_24h: MetricPattern1::new(client.clone(), _m(&acc, "sopr_24h")), + sopr_24h_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sopr_24h_30d_ema")), + sopr_24h_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sopr_24h_7d_ema")), + sopr_30d: MetricPattern1::new(client.clone(), _m(&acc, "sopr_30d")), + sopr_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sopr_30d_ema")), + sopr_7d: MetricPattern1::new(client.clone(), _m(&acc, "sopr_7d")), + sopr_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sopr_7d_ema")), total_realized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "total_realized_pnl")), - upper_price_band: DollarsSatsPattern::new(client.clone(), _m(&acc, "upper_price_band")), + upper_price_band: SatsUsdPattern::new(client.clone(), _m(&acc, "upper_price_band")), value_created: MetricPattern1::new(client.clone(), _m(&acc, "value_created")), + value_created_1y: MetricPattern1::new(client.clone(), _m(&acc, "value_created_1y")), + value_created_24h: MetricPattern1::new(client.clone(), _m(&acc, "value_created_24h")), + value_created_30d: MetricPattern1::new(client.clone(), _m(&acc, "value_created_30d")), + value_created_7d: MetricPattern1::new(client.clone(), _m(&acc, "value_created_7d")), value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed")), + value_destroyed_1y: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed_1y")), + value_destroyed_24h: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed_24h")), + value_destroyed_30d: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed_30d")), + value_destroyed_7d: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed_7d")), } } } /// Pattern struct for repeated tree structure. pub struct CapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentSoprTotalUpperValuePattern2 { - pub cap_raw: MetricPattern11, + pub cap_raw: MetricPattern20, pub capitulation_flow: MetricPattern1, - pub investor_cap_raw: MetricPattern11, - pub investor_price: DollarsSatsPattern, - pub investor_price_cents: MetricPattern1, + pub investor_cap_raw: MetricPattern20, + pub investor_price: SatsUsdPattern, + pub investor_price_cents: MetricPattern1, pub investor_price_extra: RatioPattern, pub loss_value_created: MetricPattern1, pub loss_value_destroyed: MetricPattern1, - pub lower_price_band: DollarsSatsPattern, - pub mvrv: MetricPattern4, + pub lower_price_band: SatsUsdPattern, + pub mvrv: MetricPattern1, pub neg_realized_loss: CumulativeSumPattern2, pub net_realized_pnl: CumulativeSumPattern, - pub net_realized_pnl_7d_ema: MetricPattern4, - pub net_realized_pnl_cumulative_30d_delta: MetricPattern4, - pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4, - pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4, + pub net_realized_pnl_7d_ema: MetricPattern1, + pub net_realized_pnl_cumulative_30d_delta: MetricPattern1, + pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern1, + pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern1, pub net_realized_pnl_rel_to_realized_cap: CumulativeSumPattern, pub peak_regret: CumulativeSumPattern, pub peak_regret_rel_to_realized_cap: MetricPattern1, @@ -1122,55 +1375,90 @@ pub struct CapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentS pub profit_value_created: MetricPattern1, pub profit_value_destroyed: MetricPattern1, pub realized_cap: MetricPattern1, - pub realized_cap_30d_delta: MetricPattern4, - pub realized_cap_cents: MetricPattern1, + pub realized_cap_30d_delta: MetricPattern1, + pub realized_cap_cents: MetricPattern1, pub realized_cap_rel_to_own_market_cap: MetricPattern1, pub realized_loss: CumulativeSumPattern, - pub realized_loss_7d_ema: MetricPattern4, + pub realized_loss_1y: MetricPattern1, + pub realized_loss_24h: MetricPattern1, + pub realized_loss_30d: MetricPattern1, + pub realized_loss_7d: MetricPattern1, + pub realized_loss_7d_ema: MetricPattern1, pub realized_loss_rel_to_realized_cap: CumulativeSumPattern, - pub realized_price: DollarsSatsPattern, + pub realized_price: SatsUsdPattern, pub realized_price_extra: RatioPattern, pub realized_profit: CumulativeSumPattern, - pub realized_profit_7d_ema: MetricPattern4, + pub realized_profit_1y: MetricPattern1, + pub realized_profit_24h: MetricPattern1, + pub realized_profit_30d: MetricPattern1, + pub realized_profit_7d: MetricPattern1, + pub realized_profit_7d_ema: MetricPattern1, pub realized_profit_rel_to_realized_cap: CumulativeSumPattern, - pub realized_profit_to_loss_ratio: MetricPattern6, + pub realized_profit_to_loss_ratio_1y: MetricPattern1, + pub realized_profit_to_loss_ratio_24h: MetricPattern1, + pub realized_profit_to_loss_ratio_30d: MetricPattern1, + pub realized_profit_to_loss_ratio_7d: MetricPattern1, pub realized_value: MetricPattern1, - pub sell_side_risk_ratio: MetricPattern6, - pub sell_side_risk_ratio_30d_ema: MetricPattern6, - pub sell_side_risk_ratio_7d_ema: MetricPattern6, - pub sent_in_loss: BitcoinDollarsSatsPattern3, - pub sent_in_loss_14d_ema: BitcoinDollarsSatsPattern5, - pub sent_in_profit: BitcoinDollarsSatsPattern3, - pub sent_in_profit_14d_ema: BitcoinDollarsSatsPattern5, - pub sopr: MetricPattern6, - pub sopr_30d_ema: MetricPattern6, - pub sopr_7d_ema: MetricPattern6, + pub realized_value_1y: MetricPattern1, + pub realized_value_24h: MetricPattern1, + pub realized_value_30d: MetricPattern1, + pub realized_value_7d: MetricPattern1, + pub sell_side_risk_ratio: MetricPattern1, + pub sell_side_risk_ratio_1y: MetricPattern1, + pub sell_side_risk_ratio_24h: MetricPattern1, + pub sell_side_risk_ratio_24h_30d_ema: MetricPattern1, + pub sell_side_risk_ratio_24h_7d_ema: MetricPattern1, + pub sell_side_risk_ratio_30d: MetricPattern1, + pub sell_side_risk_ratio_30d_ema: MetricPattern1, + pub sell_side_risk_ratio_7d: MetricPattern1, + pub sell_side_risk_ratio_7d_ema: MetricPattern1, + pub sent_in_loss: BtcSatsUsdPattern2, + pub sent_in_loss_14d_ema: BtcSatsUsdPattern, + pub sent_in_profit: BtcSatsUsdPattern2, + pub sent_in_profit_14d_ema: BtcSatsUsdPattern, + pub sopr: MetricPattern1, + pub sopr_1y: MetricPattern1, + pub sopr_24h: MetricPattern1, + pub sopr_24h_30d_ema: MetricPattern1, + pub sopr_24h_7d_ema: MetricPattern1, + pub sopr_30d: MetricPattern1, + pub sopr_30d_ema: MetricPattern1, + pub sopr_7d: MetricPattern1, + pub sopr_7d_ema: MetricPattern1, pub total_realized_pnl: MetricPattern1, - pub upper_price_band: DollarsSatsPattern, + pub upper_price_band: SatsUsdPattern, pub value_created: MetricPattern1, + pub value_created_1y: MetricPattern1, + pub value_created_24h: MetricPattern1, + pub value_created_30d: MetricPattern1, + pub value_created_7d: MetricPattern1, pub value_destroyed: MetricPattern1, + pub value_destroyed_1y: MetricPattern1, + pub value_destroyed_24h: MetricPattern1, + pub value_destroyed_30d: MetricPattern1, + pub value_destroyed_7d: MetricPattern1, } impl CapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentSoprTotalUpperValuePattern2 { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - cap_raw: MetricPattern11::new(client.clone(), _m(&acc, "cap_raw")), + cap_raw: MetricPattern20::new(client.clone(), _m(&acc, "cap_raw")), capitulation_flow: MetricPattern1::new(client.clone(), _m(&acc, "capitulation_flow")), - investor_cap_raw: MetricPattern11::new(client.clone(), _m(&acc, "investor_cap_raw")), - investor_price: DollarsSatsPattern::new(client.clone(), _m(&acc, "investor_price")), + investor_cap_raw: MetricPattern20::new(client.clone(), _m(&acc, "investor_cap_raw")), + investor_price: SatsUsdPattern::new(client.clone(), _m(&acc, "investor_price")), investor_price_cents: MetricPattern1::new(client.clone(), _m(&acc, "investor_price_cents")), investor_price_extra: RatioPattern::new(client.clone(), _m(&acc, "investor_price_ratio")), loss_value_created: MetricPattern1::new(client.clone(), _m(&acc, "loss_value_created")), loss_value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "loss_value_destroyed")), - lower_price_band: DollarsSatsPattern::new(client.clone(), _m(&acc, "lower_price_band")), - mvrv: MetricPattern4::new(client.clone(), _m(&acc, "mvrv")), + lower_price_band: SatsUsdPattern::new(client.clone(), _m(&acc, "lower_price_band")), + mvrv: MetricPattern1::new(client.clone(), _m(&acc, "mvrv")), neg_realized_loss: CumulativeSumPattern2::new(client.clone(), _m(&acc, "neg_realized_loss")), net_realized_pnl: CumulativeSumPattern::new(client.clone(), _m(&acc, "net_realized_pnl")), - net_realized_pnl_7d_ema: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_7d_ema")), - net_realized_pnl_cumulative_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), - net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), - net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), + net_realized_pnl_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "net_realized_pnl_7d_ema")), + net_realized_pnl_cumulative_30d_delta: MetricPattern1::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), + net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), + net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), net_realized_pnl_rel_to_realized_cap: CumulativeSumPattern::new(client.clone(), _m(&acc, "net_realized_pnl_rel_to_realized_cap")), peak_regret: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_peak_regret")), peak_regret_rel_to_realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "peak_regret_rel_to_realized_cap")), @@ -1178,55 +1466,90 @@ impl CapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentSoprTot profit_value_created: MetricPattern1::new(client.clone(), _m(&acc, "profit_value_created")), profit_value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "profit_value_destroyed")), realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap")), - realized_cap_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), + realized_cap_30d_delta: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), realized_cap_cents: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap_cents")), realized_cap_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap_rel_to_own_market_cap")), realized_loss: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_loss")), - realized_loss_7d_ema: MetricPattern4::new(client.clone(), _m(&acc, "realized_loss_7d_ema")), + realized_loss_1y: MetricPattern1::new(client.clone(), _m(&acc, "realized_loss_1y")), + realized_loss_24h: MetricPattern1::new(client.clone(), _m(&acc, "realized_loss_24h")), + realized_loss_30d: MetricPattern1::new(client.clone(), _m(&acc, "realized_loss_30d")), + realized_loss_7d: MetricPattern1::new(client.clone(), _m(&acc, "realized_loss_7d")), + realized_loss_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "realized_loss_7d_ema")), realized_loss_rel_to_realized_cap: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_loss_rel_to_realized_cap")), - realized_price: DollarsSatsPattern::new(client.clone(), _m(&acc, "realized_price")), + realized_price: SatsUsdPattern::new(client.clone(), _m(&acc, "realized_price")), realized_price_extra: RatioPattern::new(client.clone(), _m(&acc, "realized_price_ratio")), realized_profit: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_profit")), - realized_profit_7d_ema: MetricPattern4::new(client.clone(), _m(&acc, "realized_profit_7d_ema")), + realized_profit_1y: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_1y")), + realized_profit_24h: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_24h")), + realized_profit_30d: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_30d")), + realized_profit_7d: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_7d")), + realized_profit_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_7d_ema")), realized_profit_rel_to_realized_cap: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_profit_rel_to_realized_cap")), - realized_profit_to_loss_ratio: MetricPattern6::new(client.clone(), _m(&acc, "realized_profit_to_loss_ratio")), + realized_profit_to_loss_ratio_1y: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_to_loss_ratio_1y")), + realized_profit_to_loss_ratio_24h: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_to_loss_ratio_24h")), + realized_profit_to_loss_ratio_30d: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_to_loss_ratio_30d")), + realized_profit_to_loss_ratio_7d: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_to_loss_ratio_7d")), realized_value: MetricPattern1::new(client.clone(), _m(&acc, "realized_value")), - sell_side_risk_ratio: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), - sell_side_risk_ratio_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), - sell_side_risk_ratio_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), - sent_in_loss: BitcoinDollarsSatsPattern3::new(client.clone(), _m(&acc, "sent_in_loss")), - sent_in_loss_14d_ema: BitcoinDollarsSatsPattern5::new(client.clone(), _m(&acc, "sent_in_loss_14d_ema")), - sent_in_profit: BitcoinDollarsSatsPattern3::new(client.clone(), _m(&acc, "sent_in_profit")), - sent_in_profit_14d_ema: BitcoinDollarsSatsPattern5::new(client.clone(), _m(&acc, "sent_in_profit_14d_ema")), - sopr: MetricPattern6::new(client.clone(), _m(&acc, "sopr")), - sopr_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sopr_30d_ema")), - sopr_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sopr_7d_ema")), + realized_value_1y: MetricPattern1::new(client.clone(), _m(&acc, "realized_value_1y")), + realized_value_24h: MetricPattern1::new(client.clone(), _m(&acc, "realized_value_24h")), + realized_value_30d: MetricPattern1::new(client.clone(), _m(&acc, "realized_value_30d")), + realized_value_7d: MetricPattern1::new(client.clone(), _m(&acc, "realized_value_7d")), + sell_side_risk_ratio: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), + sell_side_risk_ratio_1y: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_1y")), + sell_side_risk_ratio_24h: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_24h")), + sell_side_risk_ratio_24h_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_24h_30d_ema")), + sell_side_risk_ratio_24h_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_24h_7d_ema")), + sell_side_risk_ratio_30d: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d")), + sell_side_risk_ratio_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), + sell_side_risk_ratio_7d: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d")), + sell_side_risk_ratio_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), + sent_in_loss: BtcSatsUsdPattern2::new(client.clone(), _m(&acc, "sent_in_loss")), + sent_in_loss_14d_ema: BtcSatsUsdPattern::new(client.clone(), _m(&acc, "sent_in_loss_14d_ema")), + sent_in_profit: BtcSatsUsdPattern2::new(client.clone(), _m(&acc, "sent_in_profit")), + sent_in_profit_14d_ema: BtcSatsUsdPattern::new(client.clone(), _m(&acc, "sent_in_profit_14d_ema")), + sopr: MetricPattern1::new(client.clone(), _m(&acc, "sopr")), + sopr_1y: MetricPattern1::new(client.clone(), _m(&acc, "sopr_1y")), + sopr_24h: MetricPattern1::new(client.clone(), _m(&acc, "sopr_24h")), + sopr_24h_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sopr_24h_30d_ema")), + sopr_24h_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sopr_24h_7d_ema")), + sopr_30d: MetricPattern1::new(client.clone(), _m(&acc, "sopr_30d")), + sopr_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sopr_30d_ema")), + sopr_7d: MetricPattern1::new(client.clone(), _m(&acc, "sopr_7d")), + sopr_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sopr_7d_ema")), total_realized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "total_realized_pnl")), - upper_price_band: DollarsSatsPattern::new(client.clone(), _m(&acc, "upper_price_band")), + upper_price_band: SatsUsdPattern::new(client.clone(), _m(&acc, "upper_price_band")), value_created: MetricPattern1::new(client.clone(), _m(&acc, "value_created")), + value_created_1y: MetricPattern1::new(client.clone(), _m(&acc, "value_created_1y")), + value_created_24h: MetricPattern1::new(client.clone(), _m(&acc, "value_created_24h")), + value_created_30d: MetricPattern1::new(client.clone(), _m(&acc, "value_created_30d")), + value_created_7d: MetricPattern1::new(client.clone(), _m(&acc, "value_created_7d")), value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed")), + value_destroyed_1y: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed_1y")), + value_destroyed_24h: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed_24h")), + value_destroyed_30d: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed_30d")), + value_destroyed_7d: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed_7d")), } } } /// Pattern struct for repeated tree structure. pub struct CapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentSoprTotalUpperValuePattern { - pub cap_raw: MetricPattern11, + pub cap_raw: MetricPattern20, pub capitulation_flow: MetricPattern1, - pub investor_cap_raw: MetricPattern11, - pub investor_price: DollarsSatsPattern, - pub investor_price_cents: MetricPattern1, + pub investor_cap_raw: MetricPattern20, + pub investor_price: SatsUsdPattern, + pub investor_price_cents: MetricPattern1, pub investor_price_extra: RatioPattern2, pub loss_value_created: MetricPattern1, pub loss_value_destroyed: MetricPattern1, - pub lower_price_band: DollarsSatsPattern, - pub mvrv: MetricPattern4, + pub lower_price_band: SatsUsdPattern, + pub mvrv: MetricPattern1, pub neg_realized_loss: CumulativeSumPattern2, pub net_realized_pnl: CumulativeSumPattern, - pub net_realized_pnl_7d_ema: MetricPattern4, - pub net_realized_pnl_cumulative_30d_delta: MetricPattern4, - pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4, - pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4, + pub net_realized_pnl_7d_ema: MetricPattern1, + pub net_realized_pnl_cumulative_30d_delta: MetricPattern1, + pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern1, + pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern1, pub net_realized_pnl_rel_to_realized_cap: CumulativeSumPattern, pub peak_regret: CumulativeSumPattern, pub peak_regret_rel_to_realized_cap: MetricPattern1, @@ -1234,53 +1557,77 @@ pub struct CapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentS pub profit_value_created: MetricPattern1, pub profit_value_destroyed: MetricPattern1, pub realized_cap: MetricPattern1, - pub realized_cap_30d_delta: MetricPattern4, - pub realized_cap_cents: MetricPattern1, + pub realized_cap_30d_delta: MetricPattern1, + pub realized_cap_cents: MetricPattern1, pub realized_loss: CumulativeSumPattern, - pub realized_loss_7d_ema: MetricPattern4, + pub realized_loss_7d_ema: MetricPattern1, pub realized_loss_rel_to_realized_cap: CumulativeSumPattern, - pub realized_price: DollarsSatsPattern, + pub realized_price: SatsUsdPattern, pub realized_price_extra: RatioPattern2, pub realized_profit: CumulativeSumPattern, - pub realized_profit_7d_ema: MetricPattern4, + pub realized_profit_7d_ema: MetricPattern1, pub realized_profit_rel_to_realized_cap: CumulativeSumPattern, pub realized_value: MetricPattern1, - pub sell_side_risk_ratio: MetricPattern6, - pub sell_side_risk_ratio_30d_ema: MetricPattern6, - pub sell_side_risk_ratio_7d_ema: MetricPattern6, - pub sent_in_loss: BitcoinDollarsSatsPattern3, - pub sent_in_loss_14d_ema: BitcoinDollarsSatsPattern5, - pub sent_in_profit: BitcoinDollarsSatsPattern3, - pub sent_in_profit_14d_ema: BitcoinDollarsSatsPattern5, - pub sopr: MetricPattern6, - pub sopr_30d_ema: MetricPattern6, - pub sopr_7d_ema: MetricPattern6, + pub realized_value_1y: MetricPattern1, + pub realized_value_24h: MetricPattern1, + pub realized_value_30d: MetricPattern1, + pub realized_value_7d: MetricPattern1, + pub sell_side_risk_ratio: MetricPattern1, + pub sell_side_risk_ratio_1y: MetricPattern1, + pub sell_side_risk_ratio_24h: MetricPattern1, + pub sell_side_risk_ratio_24h_30d_ema: MetricPattern1, + pub sell_side_risk_ratio_24h_7d_ema: MetricPattern1, + pub sell_side_risk_ratio_30d: MetricPattern1, + pub sell_side_risk_ratio_30d_ema: MetricPattern1, + pub sell_side_risk_ratio_7d: MetricPattern1, + pub sell_side_risk_ratio_7d_ema: MetricPattern1, + pub sent_in_loss: BtcSatsUsdPattern2, + pub sent_in_loss_14d_ema: BtcSatsUsdPattern, + pub sent_in_profit: BtcSatsUsdPattern2, + pub sent_in_profit_14d_ema: BtcSatsUsdPattern, + pub sopr: MetricPattern1, + pub sopr_1y: MetricPattern1, + pub sopr_24h: MetricPattern1, + pub sopr_24h_30d_ema: MetricPattern1, + pub sopr_24h_7d_ema: MetricPattern1, + pub sopr_30d: MetricPattern1, + pub sopr_30d_ema: MetricPattern1, + pub sopr_7d: MetricPattern1, + pub sopr_7d_ema: MetricPattern1, pub total_realized_pnl: MetricPattern1, - pub upper_price_band: DollarsSatsPattern, + pub upper_price_band: SatsUsdPattern, pub value_created: MetricPattern1, + pub value_created_1y: MetricPattern1, + pub value_created_24h: MetricPattern1, + pub value_created_30d: MetricPattern1, + pub value_created_7d: MetricPattern1, pub value_destroyed: MetricPattern1, + pub value_destroyed_1y: MetricPattern1, + pub value_destroyed_24h: MetricPattern1, + pub value_destroyed_30d: MetricPattern1, + pub value_destroyed_7d: MetricPattern1, } impl CapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentSoprTotalUpperValuePattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - cap_raw: MetricPattern11::new(client.clone(), _m(&acc, "cap_raw")), + cap_raw: MetricPattern20::new(client.clone(), _m(&acc, "cap_raw")), capitulation_flow: MetricPattern1::new(client.clone(), _m(&acc, "capitulation_flow")), - investor_cap_raw: MetricPattern11::new(client.clone(), _m(&acc, "investor_cap_raw")), - investor_price: DollarsSatsPattern::new(client.clone(), _m(&acc, "investor_price")), + investor_cap_raw: MetricPattern20::new(client.clone(), _m(&acc, "investor_cap_raw")), + investor_price: SatsUsdPattern::new(client.clone(), _m(&acc, "investor_price")), investor_price_cents: MetricPattern1::new(client.clone(), _m(&acc, "investor_price_cents")), investor_price_extra: RatioPattern2::new(client.clone(), _m(&acc, "investor_price_ratio")), loss_value_created: MetricPattern1::new(client.clone(), _m(&acc, "loss_value_created")), loss_value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "loss_value_destroyed")), - lower_price_band: DollarsSatsPattern::new(client.clone(), _m(&acc, "lower_price_band")), - mvrv: MetricPattern4::new(client.clone(), _m(&acc, "mvrv")), + lower_price_band: SatsUsdPattern::new(client.clone(), _m(&acc, "lower_price_band")), + mvrv: MetricPattern1::new(client.clone(), _m(&acc, "mvrv")), neg_realized_loss: CumulativeSumPattern2::new(client.clone(), _m(&acc, "neg_realized_loss")), net_realized_pnl: CumulativeSumPattern::new(client.clone(), _m(&acc, "net_realized_pnl")), - net_realized_pnl_7d_ema: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_7d_ema")), - net_realized_pnl_cumulative_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), - net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), - net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), + net_realized_pnl_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "net_realized_pnl_7d_ema")), + net_realized_pnl_cumulative_30d_delta: MetricPattern1::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), + net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), + net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), net_realized_pnl_rel_to_realized_cap: CumulativeSumPattern::new(client.clone(), _m(&acc, "net_realized_pnl_rel_to_realized_cap")), peak_regret: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_peak_regret")), peak_regret_rel_to_realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "peak_regret_rel_to_realized_cap")), @@ -1288,99 +1635,123 @@ impl CapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentSoprTot profit_value_created: MetricPattern1::new(client.clone(), _m(&acc, "profit_value_created")), profit_value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "profit_value_destroyed")), realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap")), - realized_cap_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), + realized_cap_30d_delta: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), realized_cap_cents: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap_cents")), realized_loss: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_loss")), - realized_loss_7d_ema: MetricPattern4::new(client.clone(), _m(&acc, "realized_loss_7d_ema")), + realized_loss_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "realized_loss_7d_ema")), realized_loss_rel_to_realized_cap: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_loss_rel_to_realized_cap")), - realized_price: DollarsSatsPattern::new(client.clone(), _m(&acc, "realized_price")), + realized_price: SatsUsdPattern::new(client.clone(), _m(&acc, "realized_price")), realized_price_extra: RatioPattern2::new(client.clone(), _m(&acc, "realized_price_ratio")), realized_profit: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_profit")), - realized_profit_7d_ema: MetricPattern4::new(client.clone(), _m(&acc, "realized_profit_7d_ema")), + realized_profit_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "realized_profit_7d_ema")), realized_profit_rel_to_realized_cap: CumulativeSumPattern::new(client.clone(), _m(&acc, "realized_profit_rel_to_realized_cap")), realized_value: MetricPattern1::new(client.clone(), _m(&acc, "realized_value")), - sell_side_risk_ratio: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), - sell_side_risk_ratio_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), - sell_side_risk_ratio_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), - sent_in_loss: BitcoinDollarsSatsPattern3::new(client.clone(), _m(&acc, "sent_in_loss")), - sent_in_loss_14d_ema: BitcoinDollarsSatsPattern5::new(client.clone(), _m(&acc, "sent_in_loss_14d_ema")), - sent_in_profit: BitcoinDollarsSatsPattern3::new(client.clone(), _m(&acc, "sent_in_profit")), - sent_in_profit_14d_ema: BitcoinDollarsSatsPattern5::new(client.clone(), _m(&acc, "sent_in_profit_14d_ema")), - sopr: MetricPattern6::new(client.clone(), _m(&acc, "sopr")), - sopr_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sopr_30d_ema")), - sopr_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sopr_7d_ema")), + realized_value_1y: MetricPattern1::new(client.clone(), _m(&acc, "realized_value_1y")), + realized_value_24h: MetricPattern1::new(client.clone(), _m(&acc, "realized_value_24h")), + realized_value_30d: MetricPattern1::new(client.clone(), _m(&acc, "realized_value_30d")), + realized_value_7d: MetricPattern1::new(client.clone(), _m(&acc, "realized_value_7d")), + sell_side_risk_ratio: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), + sell_side_risk_ratio_1y: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_1y")), + sell_side_risk_ratio_24h: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_24h")), + sell_side_risk_ratio_24h_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_24h_30d_ema")), + sell_side_risk_ratio_24h_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_24h_7d_ema")), + sell_side_risk_ratio_30d: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d")), + sell_side_risk_ratio_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), + sell_side_risk_ratio_7d: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d")), + sell_side_risk_ratio_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), + sent_in_loss: BtcSatsUsdPattern2::new(client.clone(), _m(&acc, "sent_in_loss")), + sent_in_loss_14d_ema: BtcSatsUsdPattern::new(client.clone(), _m(&acc, "sent_in_loss_14d_ema")), + sent_in_profit: BtcSatsUsdPattern2::new(client.clone(), _m(&acc, "sent_in_profit")), + sent_in_profit_14d_ema: BtcSatsUsdPattern::new(client.clone(), _m(&acc, "sent_in_profit_14d_ema")), + sopr: MetricPattern1::new(client.clone(), _m(&acc, "sopr")), + sopr_1y: MetricPattern1::new(client.clone(), _m(&acc, "sopr_1y")), + sopr_24h: MetricPattern1::new(client.clone(), _m(&acc, "sopr_24h")), + sopr_24h_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sopr_24h_30d_ema")), + sopr_24h_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sopr_24h_7d_ema")), + sopr_30d: MetricPattern1::new(client.clone(), _m(&acc, "sopr_30d")), + sopr_30d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sopr_30d_ema")), + sopr_7d: MetricPattern1::new(client.clone(), _m(&acc, "sopr_7d")), + sopr_7d_ema: MetricPattern1::new(client.clone(), _m(&acc, "sopr_7d_ema")), total_realized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "total_realized_pnl")), - upper_price_band: DollarsSatsPattern::new(client.clone(), _m(&acc, "upper_price_band")), + upper_price_band: SatsUsdPattern::new(client.clone(), _m(&acc, "upper_price_band")), value_created: MetricPattern1::new(client.clone(), _m(&acc, "value_created")), + value_created_1y: MetricPattern1::new(client.clone(), _m(&acc, "value_created_1y")), + value_created_24h: MetricPattern1::new(client.clone(), _m(&acc, "value_created_24h")), + value_created_30d: MetricPattern1::new(client.clone(), _m(&acc, "value_created_30d")), + value_created_7d: MetricPattern1::new(client.clone(), _m(&acc, "value_created_7d")), value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed")), + value_destroyed_1y: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed_1y")), + value_destroyed_24h: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed_24h")), + value_destroyed_30d: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed_30d")), + value_destroyed_7d: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed_7d")), } } } /// Pattern struct for repeated tree structure. pub struct _0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdSmaZscorePattern { - pub _0sd_usd: DollarsSatsPattern2, - pub m0_5sd: MetricPattern4, - pub m0_5sd_usd: DollarsSatsPattern2, - pub m1_5sd: MetricPattern4, - pub m1_5sd_usd: DollarsSatsPattern2, - pub m1sd: MetricPattern4, - pub m1sd_usd: DollarsSatsPattern2, - pub m2_5sd: MetricPattern4, - pub m2_5sd_usd: DollarsSatsPattern2, - pub m2sd: MetricPattern4, - pub m2sd_usd: DollarsSatsPattern2, - pub m3sd: MetricPattern4, - pub m3sd_usd: DollarsSatsPattern2, - pub p0_5sd: MetricPattern4, - pub p0_5sd_usd: DollarsSatsPattern2, - pub p1_5sd: MetricPattern4, - pub p1_5sd_usd: DollarsSatsPattern2, - pub p1sd: MetricPattern4, - pub p1sd_usd: DollarsSatsPattern2, - pub p2_5sd: MetricPattern4, - pub p2_5sd_usd: DollarsSatsPattern2, - pub p2sd: MetricPattern4, - pub p2sd_usd: DollarsSatsPattern2, - pub p3sd: MetricPattern4, - pub p3sd_usd: DollarsSatsPattern2, - pub sd: MetricPattern4, - pub sma: MetricPattern4, - pub zscore: MetricPattern4, + pub _0sd_usd: SatsUsdPattern, + pub m0_5sd: MetricPattern1, + pub m0_5sd_usd: SatsUsdPattern, + pub m1_5sd: MetricPattern1, + pub m1_5sd_usd: SatsUsdPattern, + pub m1sd: MetricPattern1, + pub m1sd_usd: SatsUsdPattern, + pub m2_5sd: MetricPattern1, + pub m2_5sd_usd: SatsUsdPattern, + pub m2sd: MetricPattern1, + pub m2sd_usd: SatsUsdPattern, + pub m3sd: MetricPattern1, + pub m3sd_usd: SatsUsdPattern, + pub p0_5sd: MetricPattern1, + pub p0_5sd_usd: SatsUsdPattern, + pub p1_5sd: MetricPattern1, + pub p1_5sd_usd: SatsUsdPattern, + pub p1sd: MetricPattern1, + pub p1sd_usd: SatsUsdPattern, + pub p2_5sd: MetricPattern1, + pub p2_5sd_usd: SatsUsdPattern, + pub p2sd: MetricPattern1, + pub p2sd_usd: SatsUsdPattern, + pub p3sd: MetricPattern1, + pub p3sd_usd: SatsUsdPattern, + pub sd: MetricPattern1, + pub sma: MetricPattern1, + pub zscore: MetricPattern1, } impl _0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdSmaZscorePattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - _0sd_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "0sd_usd")), - m0_5sd: MetricPattern4::new(client.clone(), _m(&acc, "m0_5sd")), - m0_5sd_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "m0_5sd_usd")), - m1_5sd: MetricPattern4::new(client.clone(), _m(&acc, "m1_5sd")), - m1_5sd_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "m1_5sd_usd")), - m1sd: MetricPattern4::new(client.clone(), _m(&acc, "m1sd")), - m1sd_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "m1sd_usd")), - m2_5sd: MetricPattern4::new(client.clone(), _m(&acc, "m2_5sd")), - m2_5sd_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "m2_5sd_usd")), - m2sd: MetricPattern4::new(client.clone(), _m(&acc, "m2sd")), - m2sd_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "m2sd_usd")), - m3sd: MetricPattern4::new(client.clone(), _m(&acc, "m3sd")), - m3sd_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "m3sd_usd")), - p0_5sd: MetricPattern4::new(client.clone(), _m(&acc, "p0_5sd")), - p0_5sd_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "p0_5sd_usd")), - p1_5sd: MetricPattern4::new(client.clone(), _m(&acc, "p1_5sd")), - p1_5sd_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "p1_5sd_usd")), - p1sd: MetricPattern4::new(client.clone(), _m(&acc, "p1sd")), - p1sd_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "p1sd_usd")), - p2_5sd: MetricPattern4::new(client.clone(), _m(&acc, "p2_5sd")), - p2_5sd_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "p2_5sd_usd")), - p2sd: MetricPattern4::new(client.clone(), _m(&acc, "p2sd")), - p2sd_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "p2sd_usd")), - p3sd: MetricPattern4::new(client.clone(), _m(&acc, "p3sd")), - p3sd_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "p3sd_usd")), - sd: MetricPattern4::new(client.clone(), _m(&acc, "sd")), - sma: MetricPattern4::new(client.clone(), _m(&acc, "sma")), - zscore: MetricPattern4::new(client.clone(), _m(&acc, "zscore")), + _0sd_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "0sd_usd")), + m0_5sd: MetricPattern1::new(client.clone(), _m(&acc, "m0_5sd")), + m0_5sd_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "m0_5sd_usd")), + m1_5sd: MetricPattern1::new(client.clone(), _m(&acc, "m1_5sd")), + m1_5sd_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "m1_5sd_usd")), + m1sd: MetricPattern1::new(client.clone(), _m(&acc, "m1sd")), + m1sd_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "m1sd_usd")), + m2_5sd: MetricPattern1::new(client.clone(), _m(&acc, "m2_5sd")), + m2_5sd_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "m2_5sd_usd")), + m2sd: MetricPattern1::new(client.clone(), _m(&acc, "m2sd")), + m2sd_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "m2sd_usd")), + m3sd: MetricPattern1::new(client.clone(), _m(&acc, "m3sd")), + m3sd_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "m3sd_usd")), + p0_5sd: MetricPattern1::new(client.clone(), _m(&acc, "p0_5sd")), + p0_5sd_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "p0_5sd_usd")), + p1_5sd: MetricPattern1::new(client.clone(), _m(&acc, "p1_5sd")), + p1_5sd_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "p1_5sd_usd")), + p1sd: MetricPattern1::new(client.clone(), _m(&acc, "p1sd")), + p1sd_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "p1sd_usd")), + p2_5sd: MetricPattern1::new(client.clone(), _m(&acc, "p2_5sd")), + p2_5sd_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "p2_5sd_usd")), + p2sd: MetricPattern1::new(client.clone(), _m(&acc, "p2sd")), + p2sd_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "p2sd_usd")), + p3sd: MetricPattern1::new(client.clone(), _m(&acc, "p3sd")), + p3sd_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "p3sd_usd")), + sd: MetricPattern1::new(client.clone(), _m(&acc, "sd")), + sma: MetricPattern1::new(client.clone(), _m(&acc, "sma")), + zscore: MetricPattern1::new(client.clone(), _m(&acc, "zscore")), } } } @@ -1400,11 +1771,11 @@ pub struct InvestedNegNetNuplSupplyUnrealizedPattern4 { pub supply_in_loss_rel_to_own_supply: MetricPattern1, pub supply_in_profit_rel_to_circulating_supply: MetricPattern1, pub supply_in_profit_rel_to_own_supply: MetricPattern1, - pub supply_rel_to_circulating_supply: MetricPattern4, + pub supply_rel_to_circulating_supply: MetricPattern1, pub unrealized_loss_rel_to_market_cap: MetricPattern1, pub unrealized_loss_rel_to_own_market_cap: MetricPattern1, pub unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1, - pub unrealized_peak_regret_rel_to_market_cap: MetricPattern4, + pub unrealized_peak_regret_rel_to_market_cap: MetricPattern1, pub unrealized_profit_rel_to_market_cap: MetricPattern1, pub unrealized_profit_rel_to_own_market_cap: MetricPattern1, pub unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1, @@ -1427,11 +1798,11 @@ impl InvestedNegNetNuplSupplyUnrealizedPattern4 { supply_in_loss_rel_to_own_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_loss_rel_to_own_supply")), supply_in_profit_rel_to_circulating_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_circulating_supply")), supply_in_profit_rel_to_own_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_own_supply")), - supply_rel_to_circulating_supply: MetricPattern4::new(client.clone(), _m(&acc, "supply_rel_to_circulating_supply")), + supply_rel_to_circulating_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_rel_to_circulating_supply")), unrealized_loss_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_market_cap")), unrealized_loss_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_own_market_cap")), unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_own_total_unrealized_pnl")), - unrealized_peak_regret_rel_to_market_cap: MetricPattern4::new(client.clone(), _m(&acc, "unrealized_peak_regret_rel_to_market_cap")), + unrealized_peak_regret_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_peak_regret_rel_to_market_cap")), unrealized_profit_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_market_cap")), unrealized_profit_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_own_market_cap")), unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_own_total_unrealized_pnl")), @@ -1441,25 +1812,25 @@ impl InvestedNegNetNuplSupplyUnrealizedPattern4 { /// Pattern struct for repeated tree structure. pub struct PriceRatioPattern { - pub price: DollarsSatsPattern2, - pub ratio: MetricPattern4, - pub ratio_1m_sma: MetricPattern4, - pub ratio_1w_sma: MetricPattern4, + pub price: SatsUsdPattern, + pub ratio: MetricPattern1, + pub ratio_1m_sma: MetricPattern1, + pub ratio_1w_sma: MetricPattern1, pub ratio_1y_sd: _0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdSmaZscorePattern, pub ratio_2y_sd: _0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdSmaZscorePattern, pub ratio_4y_sd: _0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdSmaZscorePattern, - pub ratio_pct1: MetricPattern4, - pub ratio_pct1_usd: DollarsSatsPattern2, - pub ratio_pct2: MetricPattern4, - pub ratio_pct2_usd: DollarsSatsPattern2, - pub ratio_pct5: MetricPattern4, - pub ratio_pct5_usd: DollarsSatsPattern2, - pub ratio_pct95: MetricPattern4, - pub ratio_pct95_usd: DollarsSatsPattern2, - pub ratio_pct98: MetricPattern4, - pub ratio_pct98_usd: DollarsSatsPattern2, - pub ratio_pct99: MetricPattern4, - pub ratio_pct99_usd: DollarsSatsPattern2, + pub ratio_pct1: MetricPattern1, + pub ratio_pct1_usd: SatsUsdPattern, + pub ratio_pct2: MetricPattern1, + pub ratio_pct2_usd: SatsUsdPattern, + pub ratio_pct5: MetricPattern1, + pub ratio_pct5_usd: SatsUsdPattern, + pub ratio_pct95: MetricPattern1, + pub ratio_pct95_usd: SatsUsdPattern, + pub ratio_pct98: MetricPattern1, + pub ratio_pct98_usd: SatsUsdPattern, + pub ratio_pct99: MetricPattern1, + pub ratio_pct99_usd: SatsUsdPattern, pub ratio_sd: _0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdSmaZscorePattern, } @@ -1467,25 +1838,25 @@ impl PriceRatioPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - price: DollarsSatsPattern2::new(client.clone(), acc.clone()), - ratio: MetricPattern4::new(client.clone(), _m(&acc, "ratio")), - ratio_1m_sma: MetricPattern4::new(client.clone(), _m(&acc, "ratio_1m_sma")), - ratio_1w_sma: MetricPattern4::new(client.clone(), _m(&acc, "ratio_1w_sma")), + price: SatsUsdPattern::new(client.clone(), acc.clone()), + ratio: MetricPattern1::new(client.clone(), _m(&acc, "ratio")), + ratio_1m_sma: MetricPattern1::new(client.clone(), _m(&acc, "ratio_1m_sma")), + ratio_1w_sma: MetricPattern1::new(client.clone(), _m(&acc, "ratio_1w_sma")), ratio_1y_sd: _0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdSmaZscorePattern::new(client.clone(), _m(&acc, "ratio_1y")), ratio_2y_sd: _0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdSmaZscorePattern::new(client.clone(), _m(&acc, "ratio_2y")), ratio_4y_sd: _0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdSmaZscorePattern::new(client.clone(), _m(&acc, "ratio_4y")), - ratio_pct1: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct1")), - ratio_pct1_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "ratio_pct1_usd")), - ratio_pct2: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct2")), - ratio_pct2_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "ratio_pct2_usd")), - ratio_pct5: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct5")), - ratio_pct5_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "ratio_pct5_usd")), - ratio_pct95: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct95")), - ratio_pct95_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "ratio_pct95_usd")), - ratio_pct98: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct98")), - ratio_pct98_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "ratio_pct98_usd")), - ratio_pct99: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct99")), - ratio_pct99_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "ratio_pct99_usd")), + ratio_pct1: MetricPattern1::new(client.clone(), _m(&acc, "ratio_pct1")), + ratio_pct1_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "ratio_pct1_usd")), + ratio_pct2: MetricPattern1::new(client.clone(), _m(&acc, "ratio_pct2")), + ratio_pct2_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "ratio_pct2_usd")), + ratio_pct5: MetricPattern1::new(client.clone(), _m(&acc, "ratio_pct5")), + ratio_pct5_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "ratio_pct5_usd")), + ratio_pct95: MetricPattern1::new(client.clone(), _m(&acc, "ratio_pct95")), + ratio_pct95_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "ratio_pct95_usd")), + ratio_pct98: MetricPattern1::new(client.clone(), _m(&acc, "ratio_pct98")), + ratio_pct98_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "ratio_pct98_usd")), + ratio_pct99: MetricPattern1::new(client.clone(), _m(&acc, "ratio_pct99")), + ratio_pct99_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "ratio_pct99_usd")), ratio_sd: _0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdSmaZscorePattern::new(client.clone(), _m(&acc, "ratio")), } } @@ -1493,74 +1864,74 @@ impl PriceRatioPattern { /// Pattern struct for repeated tree structure. pub struct Pct05Pct10Pct15Pct20Pct25Pct30Pct35Pct40Pct45Pct50Pct55Pct60Pct65Pct70Pct75Pct80Pct85Pct90Pct95Pattern { - pub pct05: DollarsSatsPattern2, - pub pct10: DollarsSatsPattern2, - pub pct15: DollarsSatsPattern2, - pub pct20: DollarsSatsPattern2, - pub pct25: DollarsSatsPattern2, - pub pct30: DollarsSatsPattern2, - pub pct35: DollarsSatsPattern2, - pub pct40: DollarsSatsPattern2, - pub pct45: DollarsSatsPattern2, - pub pct50: DollarsSatsPattern2, - pub pct55: DollarsSatsPattern2, - pub pct60: DollarsSatsPattern2, - pub pct65: DollarsSatsPattern2, - pub pct70: DollarsSatsPattern2, - pub pct75: DollarsSatsPattern2, - pub pct80: DollarsSatsPattern2, - pub pct85: DollarsSatsPattern2, - pub pct90: DollarsSatsPattern2, - pub pct95: DollarsSatsPattern2, + pub pct05: SatsUsdPattern, + pub pct10: SatsUsdPattern, + pub pct15: SatsUsdPattern, + pub pct20: SatsUsdPattern, + pub pct25: SatsUsdPattern, + pub pct30: SatsUsdPattern, + pub pct35: SatsUsdPattern, + pub pct40: SatsUsdPattern, + pub pct45: SatsUsdPattern, + pub pct50: SatsUsdPattern, + pub pct55: SatsUsdPattern, + pub pct60: SatsUsdPattern, + pub pct65: SatsUsdPattern, + pub pct70: SatsUsdPattern, + pub pct75: SatsUsdPattern, + pub pct80: SatsUsdPattern, + pub pct85: SatsUsdPattern, + pub pct90: SatsUsdPattern, + pub pct95: SatsUsdPattern, } impl Pct05Pct10Pct15Pct20Pct25Pct30Pct35Pct40Pct45Pct50Pct55Pct60Pct65Pct70Pct75Pct80Pct85Pct90Pct95Pattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - pct05: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct05")), - pct10: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct10")), - pct15: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct15")), - pct20: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct20")), - pct25: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct25")), - pct30: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct30")), - pct35: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct35")), - pct40: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct40")), - pct45: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct45")), - pct50: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct50")), - pct55: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct55")), - pct60: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct60")), - pct65: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct65")), - pct70: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct70")), - pct75: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct75")), - pct80: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct80")), - pct85: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct85")), - pct90: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct90")), - pct95: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct95")), + pct05: SatsUsdPattern::new(client.clone(), _m(&acc, "pct05")), + pct10: SatsUsdPattern::new(client.clone(), _m(&acc, "pct10")), + pct15: SatsUsdPattern::new(client.clone(), _m(&acc, "pct15")), + pct20: SatsUsdPattern::new(client.clone(), _m(&acc, "pct20")), + pct25: SatsUsdPattern::new(client.clone(), _m(&acc, "pct25")), + pct30: SatsUsdPattern::new(client.clone(), _m(&acc, "pct30")), + pct35: SatsUsdPattern::new(client.clone(), _m(&acc, "pct35")), + pct40: SatsUsdPattern::new(client.clone(), _m(&acc, "pct40")), + pct45: SatsUsdPattern::new(client.clone(), _m(&acc, "pct45")), + pct50: SatsUsdPattern::new(client.clone(), _m(&acc, "pct50")), + pct55: SatsUsdPattern::new(client.clone(), _m(&acc, "pct55")), + pct60: SatsUsdPattern::new(client.clone(), _m(&acc, "pct60")), + pct65: SatsUsdPattern::new(client.clone(), _m(&acc, "pct65")), + pct70: SatsUsdPattern::new(client.clone(), _m(&acc, "pct70")), + pct75: SatsUsdPattern::new(client.clone(), _m(&acc, "pct75")), + pct80: SatsUsdPattern::new(client.clone(), _m(&acc, "pct80")), + pct85: SatsUsdPattern::new(client.clone(), _m(&acc, "pct85")), + pct90: SatsUsdPattern::new(client.clone(), _m(&acc, "pct90")), + pct95: SatsUsdPattern::new(client.clone(), _m(&acc, "pct95")), } } } /// Pattern struct for repeated tree structure. pub struct RatioPattern { - pub ratio: MetricPattern4, - pub ratio_1m_sma: MetricPattern4, - pub ratio_1w_sma: MetricPattern4, + pub ratio: MetricPattern1, + pub ratio_1m_sma: MetricPattern1, + pub ratio_1w_sma: MetricPattern1, pub ratio_1y_sd: _0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdSmaZscorePattern, pub ratio_2y_sd: _0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdSmaZscorePattern, pub ratio_4y_sd: _0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdSmaZscorePattern, - pub ratio_pct1: MetricPattern4, - pub ratio_pct1_usd: DollarsSatsPattern2, - pub ratio_pct2: MetricPattern4, - pub ratio_pct2_usd: DollarsSatsPattern2, - pub ratio_pct5: MetricPattern4, - pub ratio_pct5_usd: DollarsSatsPattern2, - pub ratio_pct95: MetricPattern4, - pub ratio_pct95_usd: DollarsSatsPattern2, - pub ratio_pct98: MetricPattern4, - pub ratio_pct98_usd: DollarsSatsPattern2, - pub ratio_pct99: MetricPattern4, - pub ratio_pct99_usd: DollarsSatsPattern2, + pub ratio_pct1: MetricPattern1, + pub ratio_pct1_usd: SatsUsdPattern, + pub ratio_pct2: MetricPattern1, + pub ratio_pct2_usd: SatsUsdPattern, + pub ratio_pct5: MetricPattern1, + pub ratio_pct5_usd: SatsUsdPattern, + pub ratio_pct95: MetricPattern1, + pub ratio_pct95_usd: SatsUsdPattern, + pub ratio_pct98: MetricPattern1, + pub ratio_pct98_usd: SatsUsdPattern, + pub ratio_pct99: MetricPattern1, + pub ratio_pct99_usd: SatsUsdPattern, pub ratio_sd: _0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdSmaZscorePattern, } @@ -1568,24 +1939,24 @@ impl RatioPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - ratio: MetricPattern4::new(client.clone(), acc.clone()), - ratio_1m_sma: MetricPattern4::new(client.clone(), _m(&acc, "1m_sma")), - ratio_1w_sma: MetricPattern4::new(client.clone(), _m(&acc, "1w_sma")), + ratio: MetricPattern1::new(client.clone(), acc.clone()), + ratio_1m_sma: MetricPattern1::new(client.clone(), _m(&acc, "1m_sma")), + ratio_1w_sma: MetricPattern1::new(client.clone(), _m(&acc, "1w_sma")), ratio_1y_sd: _0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdSmaZscorePattern::new(client.clone(), _m(&acc, "1y")), ratio_2y_sd: _0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdSmaZscorePattern::new(client.clone(), _m(&acc, "2y")), ratio_4y_sd: _0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdSmaZscorePattern::new(client.clone(), _m(&acc, "4y")), - ratio_pct1: MetricPattern4::new(client.clone(), _m(&acc, "pct1")), - ratio_pct1_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct1_usd")), - ratio_pct2: MetricPattern4::new(client.clone(), _m(&acc, "pct2")), - ratio_pct2_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct2_usd")), - ratio_pct5: MetricPattern4::new(client.clone(), _m(&acc, "pct5")), - ratio_pct5_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct5_usd")), - ratio_pct95: MetricPattern4::new(client.clone(), _m(&acc, "pct95")), - ratio_pct95_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct95_usd")), - ratio_pct98: MetricPattern4::new(client.clone(), _m(&acc, "pct98")), - ratio_pct98_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct98_usd")), - ratio_pct99: MetricPattern4::new(client.clone(), _m(&acc, "pct99")), - ratio_pct99_usd: DollarsSatsPattern2::new(client.clone(), _m(&acc, "pct99_usd")), + ratio_pct1: MetricPattern1::new(client.clone(), _m(&acc, "pct1")), + ratio_pct1_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "pct1_usd")), + ratio_pct2: MetricPattern1::new(client.clone(), _m(&acc, "pct2")), + ratio_pct2_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "pct2_usd")), + ratio_pct5: MetricPattern1::new(client.clone(), _m(&acc, "pct5")), + ratio_pct5_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "pct5_usd")), + ratio_pct95: MetricPattern1::new(client.clone(), _m(&acc, "pct95")), + ratio_pct95_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "pct95_usd")), + ratio_pct98: MetricPattern1::new(client.clone(), _m(&acc, "pct98")), + ratio_pct98_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "pct98_usd")), + ratio_pct99: MetricPattern1::new(client.clone(), _m(&acc, "pct99")), + ratio_pct99_usd: SatsUsdPattern::new(client.clone(), _m(&acc, "pct99_usd")), ratio_sd: _0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdSmaZscorePattern::new(client.clone(), acc.clone()), } } @@ -1595,18 +1966,18 @@ impl RatioPattern { pub struct GreedInvestedInvestorNegNetPainPeakSupplyTotalUnrealizedPattern { pub greed_index: MetricPattern1, pub invested_capital_in_loss: MetricPattern1, - pub invested_capital_in_loss_raw: MetricPattern11, + pub invested_capital_in_loss_raw: MetricPattern20, pub invested_capital_in_profit: MetricPattern1, - pub invested_capital_in_profit_raw: MetricPattern11, - pub investor_cap_in_loss_raw: MetricPattern11, - pub investor_cap_in_profit_raw: MetricPattern11, + pub invested_capital_in_profit_raw: MetricPattern20, + pub investor_cap_in_loss_raw: MetricPattern20, + pub investor_cap_in_profit_raw: MetricPattern20, pub neg_unrealized_loss: MetricPattern1, pub net_sentiment: MetricPattern1, pub net_unrealized_pnl: MetricPattern1, pub pain_index: MetricPattern1, - pub peak_regret: MetricPattern4, - pub supply_in_loss: BitcoinDollarsSatsPattern4, - pub supply_in_profit: BitcoinDollarsSatsPattern4, + pub peak_regret: MetricPattern1, + pub supply_in_loss: BtcSatsUsdPattern, + pub supply_in_profit: BtcSatsUsdPattern, pub total_unrealized_pnl: MetricPattern1, pub unrealized_loss: MetricPattern1, pub unrealized_profit: MetricPattern1, @@ -1618,18 +1989,18 @@ impl GreedInvestedInvestorNegNetPainPeakSupplyTotalUnrealizedPattern { Self { greed_index: MetricPattern1::new(client.clone(), _m(&acc, "greed_index")), invested_capital_in_loss: MetricPattern1::new(client.clone(), _m(&acc, "invested_capital_in_loss")), - invested_capital_in_loss_raw: MetricPattern11::new(client.clone(), _m(&acc, "invested_capital_in_loss_raw")), + invested_capital_in_loss_raw: MetricPattern20::new(client.clone(), _m(&acc, "invested_capital_in_loss_raw")), invested_capital_in_profit: MetricPattern1::new(client.clone(), _m(&acc, "invested_capital_in_profit")), - invested_capital_in_profit_raw: MetricPattern11::new(client.clone(), _m(&acc, "invested_capital_in_profit_raw")), - investor_cap_in_loss_raw: MetricPattern11::new(client.clone(), _m(&acc, "investor_cap_in_loss_raw")), - investor_cap_in_profit_raw: MetricPattern11::new(client.clone(), _m(&acc, "investor_cap_in_profit_raw")), + invested_capital_in_profit_raw: MetricPattern20::new(client.clone(), _m(&acc, "invested_capital_in_profit_raw")), + investor_cap_in_loss_raw: MetricPattern20::new(client.clone(), _m(&acc, "investor_cap_in_loss_raw")), + investor_cap_in_profit_raw: MetricPattern20::new(client.clone(), _m(&acc, "investor_cap_in_profit_raw")), neg_unrealized_loss: MetricPattern1::new(client.clone(), _m(&acc, "neg_unrealized_loss")), net_sentiment: MetricPattern1::new(client.clone(), _m(&acc, "net_sentiment")), net_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "net_unrealized_pnl")), pain_index: MetricPattern1::new(client.clone(), _m(&acc, "pain_index")), - peak_regret: MetricPattern4::new(client.clone(), _m(&acc, "unrealized_peak_regret")), - supply_in_loss: BitcoinDollarsSatsPattern4::new(client.clone(), _m(&acc, "supply_in_loss")), - supply_in_profit: BitcoinDollarsSatsPattern4::new(client.clone(), _m(&acc, "supply_in_profit")), + peak_regret: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_peak_regret")), + supply_in_loss: BtcSatsUsdPattern::new(client.clone(), _m(&acc, "supply_in_loss")), + supply_in_profit: BtcSatsUsdPattern::new(client.clone(), _m(&acc, "supply_in_profit")), total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "total_unrealized_pnl")), unrealized_loss: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_loss")), unrealized_profit: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_profit")), @@ -1641,17 +2012,17 @@ impl GreedInvestedInvestorNegNetPainPeakSupplyTotalUnrealizedPattern { pub struct GreedInvestedInvestorNegNetPainSupplyTotalUnrealizedPattern { pub greed_index: MetricPattern1, pub invested_capital_in_loss: MetricPattern1, - pub invested_capital_in_loss_raw: MetricPattern11, + pub invested_capital_in_loss_raw: MetricPattern20, pub invested_capital_in_profit: MetricPattern1, - pub invested_capital_in_profit_raw: MetricPattern11, - pub investor_cap_in_loss_raw: MetricPattern11, - pub investor_cap_in_profit_raw: MetricPattern11, + pub invested_capital_in_profit_raw: MetricPattern20, + pub investor_cap_in_loss_raw: MetricPattern20, + pub investor_cap_in_profit_raw: MetricPattern20, pub neg_unrealized_loss: MetricPattern1, pub net_sentiment: MetricPattern1, pub net_unrealized_pnl: MetricPattern1, pub pain_index: MetricPattern1, - pub supply_in_loss: BitcoinDollarsSatsPattern4, - pub supply_in_profit: BitcoinDollarsSatsPattern4, + pub supply_in_loss: BtcSatsUsdPattern, + pub supply_in_profit: BtcSatsUsdPattern, pub total_unrealized_pnl: MetricPattern1, pub unrealized_loss: MetricPattern1, pub unrealized_profit: MetricPattern1, @@ -1663,17 +2034,17 @@ impl GreedInvestedInvestorNegNetPainSupplyTotalUnrealizedPattern { Self { greed_index: MetricPattern1::new(client.clone(), _m(&acc, "greed_index")), invested_capital_in_loss: MetricPattern1::new(client.clone(), _m(&acc, "invested_capital_in_loss")), - invested_capital_in_loss_raw: MetricPattern11::new(client.clone(), _m(&acc, "invested_capital_in_loss_raw")), + invested_capital_in_loss_raw: MetricPattern20::new(client.clone(), _m(&acc, "invested_capital_in_loss_raw")), invested_capital_in_profit: MetricPattern1::new(client.clone(), _m(&acc, "invested_capital_in_profit")), - invested_capital_in_profit_raw: MetricPattern11::new(client.clone(), _m(&acc, "invested_capital_in_profit_raw")), - investor_cap_in_loss_raw: MetricPattern11::new(client.clone(), _m(&acc, "investor_cap_in_loss_raw")), - investor_cap_in_profit_raw: MetricPattern11::new(client.clone(), _m(&acc, "investor_cap_in_profit_raw")), + invested_capital_in_profit_raw: MetricPattern20::new(client.clone(), _m(&acc, "invested_capital_in_profit_raw")), + investor_cap_in_loss_raw: MetricPattern20::new(client.clone(), _m(&acc, "investor_cap_in_loss_raw")), + investor_cap_in_profit_raw: MetricPattern20::new(client.clone(), _m(&acc, "investor_cap_in_profit_raw")), neg_unrealized_loss: MetricPattern1::new(client.clone(), _m(&acc, "neg_unrealized_loss")), net_sentiment: MetricPattern1::new(client.clone(), _m(&acc, "net_sentiment")), net_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "net_unrealized_pnl")), pain_index: MetricPattern1::new(client.clone(), _m(&acc, "pain_index")), - supply_in_loss: BitcoinDollarsSatsPattern4::new(client.clone(), _m(&acc, "supply_in_loss")), - supply_in_profit: BitcoinDollarsSatsPattern4::new(client.clone(), _m(&acc, "supply_in_profit")), + supply_in_loss: BtcSatsUsdPattern::new(client.clone(), _m(&acc, "supply_in_loss")), + supply_in_profit: BtcSatsUsdPattern::new(client.clone(), _m(&acc, "supply_in_profit")), total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "total_unrealized_pnl")), unrealized_loss: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_loss")), unrealized_profit: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_profit")), @@ -1682,43 +2053,43 @@ impl GreedInvestedInvestorNegNetPainSupplyTotalUnrealizedPattern { } /// Pattern struct for repeated tree structure. -pub struct _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern { - pub _1m_blocks_mined: MetricPattern1, - pub _1m_dominance: MetricPattern1, - pub _1w_blocks_mined: MetricPattern1, - pub _1w_dominance: MetricPattern1, - pub _1y_blocks_mined: MetricPattern1, - pub _1y_dominance: MetricPattern1, - pub _24h_blocks_mined: MetricPattern1, - pub _24h_dominance: MetricPattern1, +pub struct BlocksCoinbaseDaysDominanceFeeSubsidyPattern { pub blocks_mined: CumulativeSumPattern, + pub blocks_mined_1m_sum: MetricPattern1, + pub blocks_mined_1w_sum: MetricPattern1, + pub blocks_mined_1y_sum: MetricPattern1, + pub blocks_mined_24h_sum: MetricPattern1, pub blocks_since_block: MetricPattern1, - pub coinbase: BitcoinDollarsSatsPattern6, - pub days_since_block: MetricPattern4, + pub coinbase: BtcSatsUsdPattern4, + pub days_since_block: MetricPattern1, pub dominance: MetricPattern1, - pub fee: BitcoinDollarsSatsPattern3, - pub subsidy: BitcoinDollarsSatsPattern3, + pub dominance_1m: MetricPattern1, + pub dominance_1w: MetricPattern1, + pub dominance_1y: MetricPattern1, + pub dominance_24h: MetricPattern1, + pub fee: BtcSatsUsdPattern2, + pub subsidy: BtcSatsUsdPattern2, } -impl _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern { +impl BlocksCoinbaseDaysDominanceFeeSubsidyPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - _1m_blocks_mined: MetricPattern1::new(client.clone(), _m(&acc, "1m_blocks_mined")), - _1m_dominance: MetricPattern1::new(client.clone(), _m(&acc, "1m_dominance")), - _1w_blocks_mined: MetricPattern1::new(client.clone(), _m(&acc, "1w_blocks_mined")), - _1w_dominance: MetricPattern1::new(client.clone(), _m(&acc, "1w_dominance")), - _1y_blocks_mined: MetricPattern1::new(client.clone(), _m(&acc, "1y_blocks_mined")), - _1y_dominance: MetricPattern1::new(client.clone(), _m(&acc, "1y_dominance")), - _24h_blocks_mined: MetricPattern1::new(client.clone(), _m(&acc, "24h_blocks_mined")), - _24h_dominance: MetricPattern1::new(client.clone(), _m(&acc, "24h_dominance")), blocks_mined: CumulativeSumPattern::new(client.clone(), _m(&acc, "blocks_mined")), + blocks_mined_1m_sum: MetricPattern1::new(client.clone(), _m(&acc, "blocks_mined_1m_sum")), + blocks_mined_1w_sum: MetricPattern1::new(client.clone(), _m(&acc, "blocks_mined_1w_sum")), + blocks_mined_1y_sum: MetricPattern1::new(client.clone(), _m(&acc, "blocks_mined_1y_sum")), + blocks_mined_24h_sum: MetricPattern1::new(client.clone(), _m(&acc, "blocks_mined_24h_sum")), blocks_since_block: MetricPattern1::new(client.clone(), _m(&acc, "blocks_since_block")), - coinbase: BitcoinDollarsSatsPattern6::new(client.clone(), _m(&acc, "coinbase")), - days_since_block: MetricPattern4::new(client.clone(), _m(&acc, "days_since_block")), + coinbase: BtcSatsUsdPattern4::new(client.clone(), _m(&acc, "coinbase")), + days_since_block: MetricPattern1::new(client.clone(), _m(&acc, "days_since_block")), dominance: MetricPattern1::new(client.clone(), _m(&acc, "dominance")), - fee: BitcoinDollarsSatsPattern3::new(client.clone(), _m(&acc, "fee")), - subsidy: BitcoinDollarsSatsPattern3::new(client.clone(), _m(&acc, "subsidy")), + dominance_1m: MetricPattern1::new(client.clone(), _m(&acc, "dominance_1m")), + dominance_1w: MetricPattern1::new(client.clone(), _m(&acc, "dominance_1w")), + dominance_1y: MetricPattern1::new(client.clone(), _m(&acc, "dominance_1y")), + dominance_24h: MetricPattern1::new(client.clone(), _m(&acc, "dominance_24h")), + fee: BtcSatsUsdPattern2::new(client.clone(), _m(&acc, "fee")), + subsidy: BtcSatsUsdPattern2::new(client.clone(), _m(&acc, "subsidy")), } } } @@ -1734,9 +2105,9 @@ pub struct InvestedNegNetNuplSupplyUnrealizedPattern3 { pub supply_in_loss_rel_to_own_supply: MetricPattern1, pub supply_in_profit_rel_to_circulating_supply: MetricPattern1, pub supply_in_profit_rel_to_own_supply: MetricPattern1, - pub supply_rel_to_circulating_supply: MetricPattern4, + pub supply_rel_to_circulating_supply: MetricPattern1, pub unrealized_loss_rel_to_market_cap: MetricPattern1, - pub unrealized_peak_regret_rel_to_market_cap: MetricPattern4, + pub unrealized_peak_regret_rel_to_market_cap: MetricPattern1, pub unrealized_profit_rel_to_market_cap: MetricPattern1, } @@ -1753,9 +2124,9 @@ impl InvestedNegNetNuplSupplyUnrealizedPattern3 { supply_in_loss_rel_to_own_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_loss_rel_to_own_supply")), supply_in_profit_rel_to_circulating_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_circulating_supply")), supply_in_profit_rel_to_own_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_own_supply")), - supply_rel_to_circulating_supply: MetricPattern4::new(client.clone(), _m(&acc, "supply_rel_to_circulating_supply")), + supply_rel_to_circulating_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_rel_to_circulating_supply")), unrealized_loss_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_market_cap")), - unrealized_peak_regret_rel_to_market_cap: MetricPattern4::new(client.clone(), _m(&acc, "unrealized_peak_regret_rel_to_market_cap")), + unrealized_peak_regret_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_peak_regret_rel_to_market_cap")), unrealized_profit_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_market_cap")), } } @@ -1763,36 +2134,36 @@ impl InvestedNegNetNuplSupplyUnrealizedPattern3 { /// Pattern struct for repeated tree structure. pub struct _10y1m1w1y2y3m3y4y5y6m6y8yPattern3 { - pub _10y: BitcoinDollarsSatsPattern5, - pub _1m: BitcoinDollarsSatsPattern5, - pub _1w: BitcoinDollarsSatsPattern5, - pub _1y: BitcoinDollarsSatsPattern5, - pub _2y: BitcoinDollarsSatsPattern5, - pub _3m: BitcoinDollarsSatsPattern5, - pub _3y: BitcoinDollarsSatsPattern5, - pub _4y: BitcoinDollarsSatsPattern5, - pub _5y: BitcoinDollarsSatsPattern5, - pub _6m: BitcoinDollarsSatsPattern5, - pub _6y: BitcoinDollarsSatsPattern5, - pub _8y: BitcoinDollarsSatsPattern5, + pub _10y: BtcSatsUsdPattern, + pub _1m: BtcSatsUsdPattern, + pub _1w: BtcSatsUsdPattern, + pub _1y: BtcSatsUsdPattern, + pub _2y: BtcSatsUsdPattern, + pub _3m: BtcSatsUsdPattern, + pub _3y: BtcSatsUsdPattern, + pub _4y: BtcSatsUsdPattern, + pub _5y: BtcSatsUsdPattern, + pub _6m: BtcSatsUsdPattern, + pub _6y: BtcSatsUsdPattern, + pub _8y: BtcSatsUsdPattern, } impl _10y1m1w1y2y3m3y4y5y6m6y8yPattern3 { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - _10y: BitcoinDollarsSatsPattern5::new(client.clone(), _p("10y", &acc)), - _1m: BitcoinDollarsSatsPattern5::new(client.clone(), _p("1m", &acc)), - _1w: BitcoinDollarsSatsPattern5::new(client.clone(), _p("1w", &acc)), - _1y: BitcoinDollarsSatsPattern5::new(client.clone(), _p("1y", &acc)), - _2y: BitcoinDollarsSatsPattern5::new(client.clone(), _p("2y", &acc)), - _3m: BitcoinDollarsSatsPattern5::new(client.clone(), _p("3m", &acc)), - _3y: BitcoinDollarsSatsPattern5::new(client.clone(), _p("3y", &acc)), - _4y: BitcoinDollarsSatsPattern5::new(client.clone(), _p("4y", &acc)), - _5y: BitcoinDollarsSatsPattern5::new(client.clone(), _p("5y", &acc)), - _6m: BitcoinDollarsSatsPattern5::new(client.clone(), _p("6m", &acc)), - _6y: BitcoinDollarsSatsPattern5::new(client.clone(), _p("6y", &acc)), - _8y: BitcoinDollarsSatsPattern5::new(client.clone(), _p("8y", &acc)), + _10y: BtcSatsUsdPattern::new(client.clone(), _p("10y", &acc)), + _1m: BtcSatsUsdPattern::new(client.clone(), _p("1m", &acc)), + _1w: BtcSatsUsdPattern::new(client.clone(), _p("1w", &acc)), + _1y: BtcSatsUsdPattern::new(client.clone(), _p("1y", &acc)), + _2y: BtcSatsUsdPattern::new(client.clone(), _p("2y", &acc)), + _3m: BtcSatsUsdPattern::new(client.clone(), _p("3m", &acc)), + _3y: BtcSatsUsdPattern::new(client.clone(), _p("3y", &acc)), + _4y: BtcSatsUsdPattern::new(client.clone(), _p("4y", &acc)), + _5y: BtcSatsUsdPattern::new(client.clone(), _p("5y", &acc)), + _6m: BtcSatsUsdPattern::new(client.clone(), _p("6m", &acc)), + _6y: BtcSatsUsdPattern::new(client.clone(), _p("6y", &acc)), + _8y: BtcSatsUsdPattern::new(client.clone(), _p("8y", &acc)), } } } @@ -1808,7 +2179,7 @@ pub struct InvestedNegNetNuplSupplyUnrealizedPattern { pub supply_in_loss_rel_to_own_supply: MetricPattern1, pub supply_in_profit_rel_to_circulating_supply: MetricPattern1, pub supply_in_profit_rel_to_own_supply: MetricPattern1, - pub supply_rel_to_circulating_supply: MetricPattern4, + pub supply_rel_to_circulating_supply: MetricPattern1, pub unrealized_loss_rel_to_market_cap: MetricPattern1, pub unrealized_profit_rel_to_market_cap: MetricPattern1, } @@ -1826,7 +2197,7 @@ impl InvestedNegNetNuplSupplyUnrealizedPattern { supply_in_loss_rel_to_own_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_loss_rel_to_own_supply")), supply_in_profit_rel_to_circulating_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_circulating_supply")), supply_in_profit_rel_to_own_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_own_supply")), - supply_rel_to_circulating_supply: MetricPattern4::new(client.clone(), _m(&acc, "supply_rel_to_circulating_supply")), + supply_rel_to_circulating_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_rel_to_circulating_supply")), unrealized_loss_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_market_cap")), unrealized_profit_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_market_cap")), } @@ -1871,155 +2242,187 @@ impl InvestedNegNetSupplyUnrealizedPattern { /// Pattern struct for repeated tree structure. pub struct _10y1m1w1y2y3m3y4y5y6m6y8yPattern2 { - pub _10y: MetricPattern4, - pub _1m: MetricPattern4, - pub _1w: MetricPattern4, - pub _1y: MetricPattern4, - pub _2y: MetricPattern4, - pub _3m: MetricPattern4, - pub _3y: MetricPattern4, - pub _4y: MetricPattern4, - pub _5y: MetricPattern4, - pub _6m: MetricPattern4, - pub _6y: MetricPattern4, - pub _8y: MetricPattern4, + pub _10y: MetricPattern1, + pub _1m: MetricPattern1, + pub _1w: MetricPattern1, + pub _1y: MetricPattern1, + pub _2y: MetricPattern1, + pub _3m: MetricPattern1, + pub _3y: MetricPattern1, + pub _4y: MetricPattern1, + pub _5y: MetricPattern1, + pub _6m: MetricPattern1, + pub _6y: MetricPattern1, + pub _8y: MetricPattern1, } impl _10y1m1w1y2y3m3y4y5y6m6y8yPattern2 { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - _10y: MetricPattern4::new(client.clone(), _p("10y", &acc)), - _1m: MetricPattern4::new(client.clone(), _p("1m", &acc)), - _1w: MetricPattern4::new(client.clone(), _p("1w", &acc)), - _1y: MetricPattern4::new(client.clone(), _p("1y", &acc)), - _2y: MetricPattern4::new(client.clone(), _p("2y", &acc)), - _3m: MetricPattern4::new(client.clone(), _p("3m", &acc)), - _3y: MetricPattern4::new(client.clone(), _p("3y", &acc)), - _4y: MetricPattern4::new(client.clone(), _p("4y", &acc)), - _5y: MetricPattern4::new(client.clone(), _p("5y", &acc)), - _6m: MetricPattern4::new(client.clone(), _p("6m", &acc)), - _6y: MetricPattern4::new(client.clone(), _p("6y", &acc)), - _8y: MetricPattern4::new(client.clone(), _p("8y", &acc)), + _10y: MetricPattern1::new(client.clone(), _p("10y", &acc)), + _1m: MetricPattern1::new(client.clone(), _p("1m", &acc)), + _1w: MetricPattern1::new(client.clone(), _p("1w", &acc)), + _1y: MetricPattern1::new(client.clone(), _p("1y", &acc)), + _2y: MetricPattern1::new(client.clone(), _p("2y", &acc)), + _3m: MetricPattern1::new(client.clone(), _p("3m", &acc)), + _3y: MetricPattern1::new(client.clone(), _p("3y", &acc)), + _4y: MetricPattern1::new(client.clone(), _p("4y", &acc)), + _5y: MetricPattern1::new(client.clone(), _p("5y", &acc)), + _6m: MetricPattern1::new(client.clone(), _p("6m", &acc)), + _6y: MetricPattern1::new(client.clone(), _p("6y", &acc)), + _8y: MetricPattern1::new(client.clone(), _p("8y", &acc)), } } } /// Pattern struct for repeated tree structure. pub struct _201520162017201820192020202120222023202420252026Pattern2 { - pub _2015: MetricPattern4, - pub _2016: MetricPattern4, - pub _2017: MetricPattern4, - pub _2018: MetricPattern4, - pub _2019: MetricPattern4, - pub _2020: MetricPattern4, - pub _2021: MetricPattern4, - pub _2022: MetricPattern4, - pub _2023: MetricPattern4, - pub _2024: MetricPattern4, - pub _2025: MetricPattern4, - pub _2026: MetricPattern4, + pub _2015: MetricPattern1, + pub _2016: MetricPattern1, + pub _2017: MetricPattern1, + pub _2018: MetricPattern1, + pub _2019: MetricPattern1, + pub _2020: MetricPattern1, + pub _2021: MetricPattern1, + pub _2022: MetricPattern1, + pub _2023: MetricPattern1, + pub _2024: MetricPattern1, + pub _2025: MetricPattern1, + pub _2026: MetricPattern1, } impl _201520162017201820192020202120222023202420252026Pattern2 { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - _2015: MetricPattern4::new(client.clone(), _m(&acc, "2015_returns")), - _2016: MetricPattern4::new(client.clone(), _m(&acc, "2016_returns")), - _2017: MetricPattern4::new(client.clone(), _m(&acc, "2017_returns")), - _2018: MetricPattern4::new(client.clone(), _m(&acc, "2018_returns")), - _2019: MetricPattern4::new(client.clone(), _m(&acc, "2019_returns")), - _2020: MetricPattern4::new(client.clone(), _m(&acc, "2020_returns")), - _2021: MetricPattern4::new(client.clone(), _m(&acc, "2021_returns")), - _2022: MetricPattern4::new(client.clone(), _m(&acc, "2022_returns")), - _2023: MetricPattern4::new(client.clone(), _m(&acc, "2023_returns")), - _2024: MetricPattern4::new(client.clone(), _m(&acc, "2024_returns")), - _2025: MetricPattern4::new(client.clone(), _m(&acc, "2025_returns")), - _2026: MetricPattern4::new(client.clone(), _m(&acc, "2026_returns")), + _2015: MetricPattern1::new(client.clone(), _m(&acc, "2015_returns")), + _2016: MetricPattern1::new(client.clone(), _m(&acc, "2016_returns")), + _2017: MetricPattern1::new(client.clone(), _m(&acc, "2017_returns")), + _2018: MetricPattern1::new(client.clone(), _m(&acc, "2018_returns")), + _2019: MetricPattern1::new(client.clone(), _m(&acc, "2019_returns")), + _2020: MetricPattern1::new(client.clone(), _m(&acc, "2020_returns")), + _2021: MetricPattern1::new(client.clone(), _m(&acc, "2021_returns")), + _2022: MetricPattern1::new(client.clone(), _m(&acc, "2022_returns")), + _2023: MetricPattern1::new(client.clone(), _m(&acc, "2023_returns")), + _2024: MetricPattern1::new(client.clone(), _m(&acc, "2024_returns")), + _2025: MetricPattern1::new(client.clone(), _m(&acc, "2025_returns")), + _2026: MetricPattern1::new(client.clone(), _m(&acc, "2026_returns")), } } } /// Pattern struct for repeated tree structure. -pub struct AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern { +pub struct AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2 { pub average: MetricPattern2, - pub base: MetricPattern11, + pub base: MetricPattern20, pub cumulative: MetricPattern2, pub max: MetricPattern2, - pub median: MetricPattern6, + pub median: MetricPattern2, pub min: MetricPattern2, - pub pct10: MetricPattern6, - pub pct25: MetricPattern6, - pub pct75: MetricPattern6, - pub pct90: MetricPattern6, + pub pct10: MetricPattern2, + pub pct25: MetricPattern2, + pub pct75: MetricPattern2, + pub pct90: MetricPattern2, pub sum: MetricPattern2, } -impl AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern { +impl AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2 { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { average: MetricPattern2::new(client.clone(), _m(&acc, "average")), - base: MetricPattern11::new(client.clone(), acc.clone()), + base: MetricPattern20::new(client.clone(), acc.clone()), cumulative: MetricPattern2::new(client.clone(), _m(&acc, "cumulative")), max: MetricPattern2::new(client.clone(), _m(&acc, "max")), - median: MetricPattern6::new(client.clone(), _m(&acc, "median")), + median: MetricPattern2::new(client.clone(), _m(&acc, "median")), min: MetricPattern2::new(client.clone(), _m(&acc, "min")), - pct10: MetricPattern6::new(client.clone(), _m(&acc, "pct10")), - pct25: MetricPattern6::new(client.clone(), _m(&acc, "pct25")), - pct75: MetricPattern6::new(client.clone(), _m(&acc, "pct75")), - pct90: MetricPattern6::new(client.clone(), _m(&acc, "pct90")), + pct10: MetricPattern2::new(client.clone(), _m(&acc, "pct10")), + pct25: MetricPattern2::new(client.clone(), _m(&acc, "pct25")), + pct75: MetricPattern2::new(client.clone(), _m(&acc, "pct75")), + pct90: MetricPattern2::new(client.clone(), _m(&acc, "pct90")), sum: MetricPattern2::new(client.clone(), _m(&acc, "sum")), } } } /// Pattern struct for repeated tree structure. -pub struct AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2 { +pub struct AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern { pub average: MetricPattern2, - pub base: MetricPattern11, + pub base: MetricPattern20, pub cumulative: MetricPattern1, pub max: MetricPattern2, - pub median: MetricPattern6, + pub median: MetricPattern2, pub min: MetricPattern2, - pub pct10: MetricPattern6, - pub pct25: MetricPattern6, - pub pct75: MetricPattern6, - pub pct90: MetricPattern6, + pub pct10: MetricPattern2, + pub pct25: MetricPattern2, + pub pct75: MetricPattern2, + pub pct90: MetricPattern2, pub sum: MetricPattern2, } -impl AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2 { +impl AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { average: MetricPattern2::new(client.clone(), _m(&acc, "average")), - base: MetricPattern11::new(client.clone(), acc.clone()), + base: MetricPattern20::new(client.clone(), acc.clone()), cumulative: MetricPattern1::new(client.clone(), _m(&acc, "cumulative")), max: MetricPattern2::new(client.clone(), _m(&acc, "max")), - median: MetricPattern6::new(client.clone(), _m(&acc, "median")), + median: MetricPattern2::new(client.clone(), _m(&acc, "median")), min: MetricPattern2::new(client.clone(), _m(&acc, "min")), - pct10: MetricPattern6::new(client.clone(), _m(&acc, "pct10")), - pct25: MetricPattern6::new(client.clone(), _m(&acc, "pct25")), - pct75: MetricPattern6::new(client.clone(), _m(&acc, "pct75")), - pct90: MetricPattern6::new(client.clone(), _m(&acc, "pct90")), + pct10: MetricPattern2::new(client.clone(), _m(&acc, "pct10")), + pct25: MetricPattern2::new(client.clone(), _m(&acc, "pct25")), + pct75: MetricPattern2::new(client.clone(), _m(&acc, "pct75")), + pct90: MetricPattern2::new(client.clone(), _m(&acc, "pct90")), sum: MetricPattern2::new(client.clone(), _m(&acc, "sum")), } } } +/// Pattern struct for repeated tree structure. +pub struct AverageGainsLossesRsiStochPattern { + pub average_gain: MetricPattern1, + pub average_loss: MetricPattern1, + pub gains: MetricPattern1, + pub losses: MetricPattern1, + pub rsi: MetricPattern20, + pub rsi_max: MetricPattern1, + pub rsi_min: MetricPattern1, + pub stoch_rsi: MetricPattern1, + pub stoch_rsi_d: MetricPattern1, + pub stoch_rsi_k: MetricPattern1, +} + +impl AverageGainsLossesRsiStochPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + average_gain: MetricPattern1::new(client.clone(), _m(&acc, "avg_gain_1y")), + average_loss: MetricPattern1::new(client.clone(), _m(&acc, "avg_loss_1y")), + gains: MetricPattern1::new(client.clone(), _m(&acc, "gains_1y")), + losses: MetricPattern1::new(client.clone(), _m(&acc, "losses_1y")), + rsi: MetricPattern20::new(client.clone(), _m(&acc, "1y")), + rsi_max: MetricPattern1::new(client.clone(), _m(&acc, "rsi_max_1y")), + rsi_min: MetricPattern1::new(client.clone(), _m(&acc, "rsi_min_1y")), + stoch_rsi: MetricPattern1::new(client.clone(), _m(&acc, "stoch_rsi_1y")), + stoch_rsi_d: MetricPattern1::new(client.clone(), _m(&acc, "stoch_rsi_d_1y")), + stoch_rsi_k: MetricPattern1::new(client.clone(), _m(&acc, "stoch_rsi_k_1y")), + } + } +} + /// Pattern struct for repeated tree structure. pub struct AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2 { pub average: MetricPattern1, pub cumulative: MetricPattern1, pub max: MetricPattern1, - pub median: MetricPattern11, + pub median: MetricPattern1, pub min: MetricPattern1, - pub pct10: MetricPattern11, - pub pct25: MetricPattern11, - pub pct75: MetricPattern11, - pub pct90: MetricPattern11, + pub pct10: MetricPattern1, + pub pct25: MetricPattern1, + pub pct75: MetricPattern1, + pub pct90: MetricPattern1, pub sum: MetricPattern1, } @@ -2030,12 +2433,12 @@ impl AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPa average: MetricPattern1::new(client.clone(), _m(&acc, "average")), cumulative: MetricPattern1::new(client.clone(), _m(&acc, "cumulative")), max: MetricPattern1::new(client.clone(), _m(&acc, "max")), - median: MetricPattern11::new(client.clone(), _m(&acc, "median")), + median: MetricPattern1::new(client.clone(), _m(&acc, "median")), min: MetricPattern1::new(client.clone(), _m(&acc, "min")), - pct10: MetricPattern11::new(client.clone(), _m(&acc, "pct10")), - pct25: MetricPattern11::new(client.clone(), _m(&acc, "pct25")), - pct75: MetricPattern11::new(client.clone(), _m(&acc, "pct75")), - pct90: MetricPattern11::new(client.clone(), _m(&acc, "pct90")), + pct10: MetricPattern1::new(client.clone(), _m(&acc, "pct10")), + pct25: MetricPattern1::new(client.clone(), _m(&acc, "pct25")), + pct75: MetricPattern1::new(client.clone(), _m(&acc, "pct75")), + pct90: MetricPattern1::new(client.clone(), _m(&acc, "pct90")), sum: MetricPattern1::new(client.clone(), _m(&acc, "sum")), } } @@ -2045,7 +2448,7 @@ impl AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPa pub struct ActivityAddrCostOutputsRealizedRelativeSupplyUnrealizedPattern { pub activity: CoinblocksCoindaysSatblocksSatdaysSentPattern, pub addr_count: MetricPattern1, - pub addr_count_30d_change: MetricPattern4, + pub addr_count_30d_change: MetricPattern1, pub cost_basis: MaxMinPattern, pub outputs: UtxoPattern, pub realized: CapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentSoprTotalUpperValuePattern, @@ -2060,7 +2463,7 @@ impl ActivityAddrCostOutputsRealizedRelativeSupplyUnrealizedPattern { Self { activity: CoinblocksCoindaysSatblocksSatdaysSentPattern::new(client.clone(), acc.clone()), addr_count: MetricPattern1::new(client.clone(), _m(&acc, "addr_count")), - addr_count_30d_change: MetricPattern4::new(client.clone(), _m(&acc, "addr_count_30d_change")), + addr_count_30d_change: MetricPattern1::new(client.clone(), _m(&acc, "addr_count_30d_change")), cost_basis: MaxMinPattern::new(client.clone(), acc.clone()), outputs: UtxoPattern::new(client.clone(), _m(&acc, "utxo_count")), realized: CapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentSoprTotalUpperValuePattern::new(client.clone(), acc.clone()), @@ -2101,17 +2504,47 @@ impl AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern { } } +/// Pattern struct for repeated tree structure. +pub struct AverageBaseMaxMedianMinPct10Pct25Pct75Pct90Pattern { + pub average: MetricPattern2, + pub base: MetricPattern20, + pub max: MetricPattern2, + pub median: MetricPattern2, + pub min: MetricPattern2, + pub pct10: MetricPattern2, + pub pct25: MetricPattern2, + pub pct75: MetricPattern2, + pub pct90: MetricPattern2, +} + +impl AverageBaseMaxMedianMinPct10Pct25Pct75Pct90Pattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + average: MetricPattern2::new(client.clone(), _m(&acc, "average")), + base: MetricPattern20::new(client.clone(), acc.clone()), + max: MetricPattern2::new(client.clone(), _m(&acc, "max")), + median: MetricPattern2::new(client.clone(), _m(&acc, "median")), + min: MetricPattern2::new(client.clone(), _m(&acc, "min")), + pct10: MetricPattern2::new(client.clone(), _m(&acc, "pct10")), + pct25: MetricPattern2::new(client.clone(), _m(&acc, "pct25")), + pct75: MetricPattern2::new(client.clone(), _m(&acc, "pct75")), + pct90: MetricPattern2::new(client.clone(), _m(&acc, "pct90")), + } + } +} + /// Pattern struct for repeated tree structure. pub struct AverageMaxMedianMinPct10Pct25Pct75Pct90TxindexPattern { pub average: MetricPattern1, pub max: MetricPattern1, - pub median: MetricPattern11, + pub median: MetricPattern1, pub min: MetricPattern1, - pub pct10: MetricPattern11, - pub pct25: MetricPattern11, - pub pct75: MetricPattern11, - pub pct90: MetricPattern11, - pub txindex: MetricPattern27, + pub pct10: MetricPattern1, + pub pct25: MetricPattern1, + pub pct75: MetricPattern1, + pub pct90: MetricPattern1, + pub txindex: MetricPattern21, } impl AverageMaxMedianMinPct10Pct25Pct75Pct90TxindexPattern { @@ -2120,69 +2553,39 @@ impl AverageMaxMedianMinPct10Pct25Pct75Pct90TxindexPattern< Self { average: MetricPattern1::new(client.clone(), _m(&acc, "average")), max: MetricPattern1::new(client.clone(), _m(&acc, "max")), - median: MetricPattern11::new(client.clone(), _m(&acc, "median")), + median: MetricPattern1::new(client.clone(), _m(&acc, "median")), min: MetricPattern1::new(client.clone(), _m(&acc, "min")), - pct10: MetricPattern11::new(client.clone(), _m(&acc, "pct10")), - pct25: MetricPattern11::new(client.clone(), _m(&acc, "pct25")), - pct75: MetricPattern11::new(client.clone(), _m(&acc, "pct75")), - pct90: MetricPattern11::new(client.clone(), _m(&acc, "pct90")), - txindex: MetricPattern27::new(client.clone(), acc.clone()), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct AverageBaseMaxMedianMinPct10Pct25Pct75Pct90Pattern { - pub average: MetricPattern2, - pub base: MetricPattern11, - pub max: MetricPattern2, - pub median: MetricPattern6, - pub min: MetricPattern2, - pub pct10: MetricPattern6, - pub pct25: MetricPattern6, - pub pct75: MetricPattern6, - pub pct90: MetricPattern6, -} - -impl AverageBaseMaxMedianMinPct10Pct25Pct75Pct90Pattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - average: MetricPattern2::new(client.clone(), _m(&acc, "average")), - base: MetricPattern11::new(client.clone(), acc.clone()), - max: MetricPattern2::new(client.clone(), _m(&acc, "max")), - median: MetricPattern6::new(client.clone(), _m(&acc, "median")), - min: MetricPattern2::new(client.clone(), _m(&acc, "min")), - pct10: MetricPattern6::new(client.clone(), _m(&acc, "pct10")), - pct25: MetricPattern6::new(client.clone(), _m(&acc, "pct25")), - pct75: MetricPattern6::new(client.clone(), _m(&acc, "pct75")), - pct90: MetricPattern6::new(client.clone(), _m(&acc, "pct90")), + pct10: MetricPattern1::new(client.clone(), _m(&acc, "pct10")), + pct25: MetricPattern1::new(client.clone(), _m(&acc, "pct25")), + pct75: MetricPattern1::new(client.clone(), _m(&acc, "pct75")), + pct90: MetricPattern1::new(client.clone(), _m(&acc, "pct90")), + txindex: MetricPattern21::new(client.clone(), acc.clone()), } } } /// Pattern struct for repeated tree structure. pub struct _10y2y3y4y5y6y8yPattern { - pub _10y: MetricPattern4, - pub _2y: MetricPattern4, - pub _3y: MetricPattern4, - pub _4y: MetricPattern4, - pub _5y: MetricPattern4, - pub _6y: MetricPattern4, - pub _8y: MetricPattern4, + pub _10y: MetricPattern1, + pub _2y: MetricPattern1, + pub _3y: MetricPattern1, + pub _4y: MetricPattern1, + pub _5y: MetricPattern1, + pub _6y: MetricPattern1, + pub _8y: MetricPattern1, } impl _10y2y3y4y5y6y8yPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - _10y: MetricPattern4::new(client.clone(), _p("10y", &acc)), - _2y: MetricPattern4::new(client.clone(), _p("2y", &acc)), - _3y: MetricPattern4::new(client.clone(), _p("3y", &acc)), - _4y: MetricPattern4::new(client.clone(), _p("4y", &acc)), - _5y: MetricPattern4::new(client.clone(), _p("5y", &acc)), - _6y: MetricPattern4::new(client.clone(), _p("6y", &acc)), - _8y: MetricPattern4::new(client.clone(), _p("8y", &acc)), + _10y: MetricPattern1::new(client.clone(), _p("10y", &acc)), + _2y: MetricPattern1::new(client.clone(), _p("2y", &acc)), + _3y: MetricPattern1::new(client.clone(), _p("3y", &acc)), + _4y: MetricPattern1::new(client.clone(), _p("4y", &acc)), + _5y: MetricPattern1::new(client.clone(), _p("5y", &acc)), + _6y: MetricPattern1::new(client.clone(), _p("6y", &acc)), + _8y: MetricPattern1::new(client.clone(), _p("8y", &acc)), } } } @@ -2369,10 +2772,10 @@ impl BalanceBothReactivatedReceivingSendingPattern { pub struct CoinblocksCoindaysSatblocksSatdaysSentPattern { pub coinblocks_destroyed: CumulativeSumPattern, pub coindays_destroyed: CumulativeSumPattern, - pub satblocks_destroyed: MetricPattern11, - pub satdays_destroyed: MetricPattern11, - pub sent: BitcoinDollarsSatsPattern3, - pub sent_14d_ema: BitcoinDollarsSatsPattern5, + pub satblocks_destroyed: MetricPattern20, + pub satdays_destroyed: MetricPattern20, + pub sent: BtcSatsUsdPattern2, + pub sent_14d_ema: BtcSatsUsdPattern, } impl CoinblocksCoindaysSatblocksSatdaysSentPattern { @@ -2381,10 +2784,10 @@ impl CoinblocksCoindaysSatblocksSatdaysSentPattern { Self { coinblocks_destroyed: CumulativeSumPattern::new(client.clone(), _m(&acc, "coinblocks_destroyed")), coindays_destroyed: CumulativeSumPattern::new(client.clone(), _m(&acc, "coindays_destroyed")), - satblocks_destroyed: MetricPattern11::new(client.clone(), _m(&acc, "satblocks_destroyed")), - satdays_destroyed: MetricPattern11::new(client.clone(), _m(&acc, "satdays_destroyed")), - sent: BitcoinDollarsSatsPattern3::new(client.clone(), _m(&acc, "sent")), - sent_14d_ema: BitcoinDollarsSatsPattern5::new(client.clone(), _m(&acc, "sent_14d_ema")), + satblocks_destroyed: MetricPattern20::new(client.clone(), _m(&acc, "satblocks_destroyed")), + satdays_destroyed: MetricPattern20::new(client.clone(), _m(&acc, "satdays_destroyed")), + sent: BtcSatsUsdPattern2::new(client.clone(), _m(&acc, "sent")), + sent_14d_ema: BtcSatsUsdPattern::new(client.clone(), _m(&acc, "sent_14d_ema")), } } } @@ -2392,11 +2795,11 @@ impl CoinblocksCoindaysSatblocksSatdaysSentPattern { /// Pattern struct for repeated tree structure. pub struct InvestedMaxMinPercentilesSpotPattern { pub invested_capital: Pct05Pct10Pct15Pct20Pct25Pct30Pct35Pct40Pct45Pct50Pct55Pct60Pct65Pct70Pct75Pct80Pct85Pct90Pct95Pattern, - pub max: DollarsSatsPattern, - pub min: DollarsSatsPattern, + pub max: SatsUsdPattern, + pub min: SatsUsdPattern, pub percentiles: Pct05Pct10Pct15Pct20Pct25Pct30Pct35Pct40Pct45Pct50Pct55Pct60Pct65Pct70Pct75Pct80Pct85Pct90Pct95Pattern, - pub spot_cost_basis_percentile: MetricPattern4, - pub spot_invested_capital_percentile: MetricPattern4, + pub spot_cost_basis_percentile: MetricPattern1, + pub spot_invested_capital_percentile: MetricPattern1, } impl InvestedMaxMinPercentilesSpotPattern { @@ -2404,11 +2807,11 @@ impl InvestedMaxMinPercentilesSpotPattern { pub fn new(client: Arc, acc: String) -> Self { Self { invested_capital: Pct05Pct10Pct15Pct20Pct25Pct30Pct35Pct40Pct45Pct50Pct55Pct60Pct65Pct70Pct75Pct80Pct85Pct90Pct95Pattern::new(client.clone(), _m(&acc, "invested_capital")), - max: DollarsSatsPattern::new(client.clone(), _m(&acc, "max_cost_basis")), - min: DollarsSatsPattern::new(client.clone(), _m(&acc, "min_cost_basis")), + max: SatsUsdPattern::new(client.clone(), _m(&acc, "max_cost_basis")), + min: SatsUsdPattern::new(client.clone(), _m(&acc, "min_cost_basis")), percentiles: Pct05Pct10Pct15Pct20Pct25Pct30Pct35Pct40Pct45Pct50Pct55Pct60Pct65Pct70Pct75Pct80Pct85Pct90Pct95Pattern::new(client.clone(), _m(&acc, "cost_basis")), - spot_cost_basis_percentile: MetricPattern4::new(client.clone(), _m(&acc, "spot_cost_basis_percentile")), - spot_invested_capital_percentile: MetricPattern4::new(client.clone(), _m(&acc, "spot_invested_capital_percentile")), + spot_cost_basis_percentile: MetricPattern1::new(client.clone(), _m(&acc, "spot_cost_basis_percentile")), + spot_invested_capital_percentile: MetricPattern1::new(client.clone(), _m(&acc, "spot_invested_capital_percentile")), } } } @@ -2434,46 +2837,46 @@ impl InvestedSupplyPattern { } /// Pattern struct for repeated tree structure. -pub struct CloseHighLowOpenPattern2 { - pub close: MetricPattern1, - pub high: MetricPattern1, - pub low: MetricPattern1, - pub open: MetricPattern1, +pub struct CloseHighLowOpenPattern { + pub close: MetricPattern2, + pub high: MetricPattern2, + pub low: MetricPattern2, + pub open: MetricPattern2, } -impl CloseHighLowOpenPattern2 { +impl CloseHighLowOpenPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - close: MetricPattern1::new(client.clone(), _m(&acc, "close")), - high: MetricPattern1::new(client.clone(), _m(&acc, "high")), - low: MetricPattern1::new(client.clone(), _m(&acc, "low")), - open: MetricPattern1::new(client.clone(), _m(&acc, "open")), + close: MetricPattern2::new(client.clone(), _m(&acc, "close")), + high: MetricPattern2::new(client.clone(), _m(&acc, "high")), + low: MetricPattern2::new(client.clone(), _m(&acc, "low")), + open: MetricPattern2::new(client.clone(), _m(&acc, "open")), } } } /// Pattern struct for repeated tree structure. pub struct _30dHalvedTotalPattern { - pub _30d_change: BitcoinDollarsSatsPattern5, - pub halved: BitcoinDollarsSatsPattern4, - pub total: BitcoinDollarsSatsPattern4, + pub _30d_change: BtcSatsUsdPattern, + pub halved: BtcSatsUsdPattern, + pub total: BtcSatsUsdPattern, } impl _30dHalvedTotalPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - _30d_change: BitcoinDollarsSatsPattern5::new(client.clone(), _m(&acc, "_30d_change")), - halved: BitcoinDollarsSatsPattern4::new(client.clone(), _m(&acc, "supply_halved")), - total: BitcoinDollarsSatsPattern4::new(client.clone(), _m(&acc, "supply")), + _30d_change: BtcSatsUsdPattern::new(client.clone(), _m(&acc, "_30d_change")), + halved: BtcSatsUsdPattern::new(client.clone(), _m(&acc, "supply_halved")), + total: BtcSatsUsdPattern::new(client.clone(), _m(&acc, "supply")), } } } /// Pattern struct for repeated tree structure. pub struct BaseCumulativeSumPattern { - pub base: MetricPattern11, + pub base: MetricPattern20, pub cumulative: MetricPattern2, pub sum: MetricPattern2, } @@ -2482,7 +2885,7 @@ impl BaseCumulativeSumPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - base: MetricPattern11::new(client.clone(), acc.clone()), + base: MetricPattern20::new(client.clone(), acc.clone()), cumulative: MetricPattern2::new(client.clone(), _m(&acc, "cumulative")), sum: MetricPattern2::new(client.clone(), _m(&acc, "sum")), } @@ -2490,98 +2893,98 @@ impl BaseCumulativeSumPattern { } /// Pattern struct for repeated tree structure. -pub struct BitcoinDollarsSatsPattern2 { - pub bitcoin: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, - pub dollars: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub sats: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, +pub struct BtcSatsUsdPattern3 { + pub btc: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, + pub sats: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub usd: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, } -impl BitcoinDollarsSatsPattern2 { +impl BtcSatsUsdPattern3 { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - bitcoin: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), _m(&acc, "btc")), - dollars: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), _m(&acc, "usd")), - sats: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), acc.clone()), + btc: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), _m(&acc, "btc")), + sats: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), acc.clone()), + usd: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), _m(&acc, "usd")), } } } /// Pattern struct for repeated tree structure. -pub struct BitcoinDollarsSatsPattern4 { - pub bitcoin: MetricPattern1, - pub dollars: MetricPattern1, +pub struct BtcSatsUsdPattern { + pub btc: MetricPattern1, pub sats: MetricPattern1, + pub usd: MetricPattern1, } -impl BitcoinDollarsSatsPattern4 { +impl BtcSatsUsdPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - bitcoin: MetricPattern1::new(client.clone(), _m(&acc, "btc")), - dollars: MetricPattern1::new(client.clone(), _m(&acc, "usd")), + btc: MetricPattern1::new(client.clone(), _m(&acc, "btc")), sats: MetricPattern1::new(client.clone(), acc.clone()), + usd: MetricPattern1::new(client.clone(), _m(&acc, "usd")), } } } /// Pattern struct for repeated tree structure. -pub struct BitcoinDollarsSatsPattern5 { - pub bitcoin: MetricPattern4, - pub dollars: MetricPattern4, - pub sats: MetricPattern4, -} - -impl BitcoinDollarsSatsPattern5 { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - bitcoin: MetricPattern4::new(client.clone(), _m(&acc, "btc")), - dollars: MetricPattern4::new(client.clone(), _m(&acc, "usd")), - sats: MetricPattern4::new(client.clone(), acc.clone()), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct BitcoinDollarsSatsPattern6 { - pub bitcoin: CumulativeSumPattern, - pub dollars: CumulativeSumPattern, +pub struct BtcSatsUsdPattern4 { + pub btc: CumulativeSumPattern, pub sats: CumulativeSumPattern, + pub usd: CumulativeSumPattern, } -impl BitcoinDollarsSatsPattern6 { +impl BtcSatsUsdPattern4 { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - bitcoin: CumulativeSumPattern::new(client.clone(), _m(&acc, "btc")), - dollars: CumulativeSumPattern::new(client.clone(), _m(&acc, "usd")), + btc: CumulativeSumPattern::new(client.clone(), _m(&acc, "btc")), sats: CumulativeSumPattern::new(client.clone(), acc.clone()), + usd: CumulativeSumPattern::new(client.clone(), _m(&acc, "usd")), } } } /// Pattern struct for repeated tree structure. -pub struct BitcoinDollarsSatsPattern3 { - pub bitcoin: CumulativeSumPattern2, - pub dollars: CumulativeSumPattern, +pub struct BtcSatsUsdPattern2 { + pub btc: CumulativeSumPattern2, pub sats: CumulativeSumPattern, + pub usd: CumulativeSumPattern, } -impl BitcoinDollarsSatsPattern3 { +impl BtcSatsUsdPattern2 { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - bitcoin: CumulativeSumPattern2::new(client.clone(), _m(&acc, "btc")), - dollars: CumulativeSumPattern::new(client.clone(), _m(&acc, "usd")), + btc: CumulativeSumPattern2::new(client.clone(), _m(&acc, "btc")), sats: CumulativeSumPattern::new(client.clone(), acc.clone()), + usd: CumulativeSumPattern::new(client.clone(), _m(&acc, "usd")), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct HistogramLineSignalPattern { + pub histogram: MetricPattern20, + pub line: MetricPattern1, + pub signal: MetricPattern1, +} + +impl HistogramLineSignalPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + histogram: MetricPattern20::new(client.clone(), _m(&acc, "histogram_1y")), + line: MetricPattern1::new(client.clone(), _m(&acc, "line_1y")), + signal: MetricPattern1::new(client.clone(), _m(&acc, "signal_1y")), } } } /// Pattern struct for repeated tree structure. pub struct _30dCountPattern { - pub _30d_change: MetricPattern4, + pub _30d_change: MetricPattern1, pub count: MetricPattern1, } @@ -2589,72 +2992,56 @@ impl _30dCountPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - _30d_change: MetricPattern4::new(client.clone(), _m(&acc, "30d_change")), + _30d_change: MetricPattern1::new(client.clone(), _m(&acc, "30d_change")), count: MetricPattern1::new(client.clone(), acc.clone()), } } } -/// Pattern struct for repeated tree structure. -pub struct DollarsSatsPattern { - pub dollars: MetricPattern1, - pub sats: MetricPattern1, -} - -impl DollarsSatsPattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - dollars: MetricPattern1::new(client.clone(), acc.clone()), - sats: MetricPattern1::new(client.clone(), _m(&acc, "sats")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct DollarsSatsPattern2 { - pub dollars: MetricPattern4, - pub sats: MetricPattern4, -} - -impl DollarsSatsPattern2 { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - dollars: MetricPattern4::new(client.clone(), acc.clone()), - sats: MetricPattern4::new(client.clone(), _m(&acc, "sats")), - } - } -} - /// Pattern struct for repeated tree structure. pub struct MaxMinPattern { - pub max: DollarsSatsPattern, - pub min: DollarsSatsPattern, + pub max: SatsUsdPattern, + pub min: SatsUsdPattern, } impl MaxMinPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - max: DollarsSatsPattern::new(client.clone(), _m(&acc, "max_cost_basis")), - min: DollarsSatsPattern::new(client.clone(), _m(&acc, "min_cost_basis")), + max: SatsUsdPattern::new(client.clone(), _m(&acc, "max_cost_basis")), + min: SatsUsdPattern::new(client.clone(), _m(&acc, "min_cost_basis")), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct SatsUsdPattern { + pub sats: MetricPattern1, + pub usd: MetricPattern1, +} + +impl SatsUsdPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + sats: MetricPattern1::new(client.clone(), _m(&acc, "sats")), + usd: MetricPattern1::new(client.clone(), acc.clone()), } } } /// Pattern struct for repeated tree structure. pub struct SdSmaPattern { - pub sd: MetricPattern4, - pub sma: MetricPattern4, + pub sd: MetricPattern1, + pub sma: MetricPattern1, } impl SdSmaPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - sd: MetricPattern4::new(client.clone(), _m(&acc, "sd")), - sma: MetricPattern4::new(client.clone(), _m(&acc, "sma")), + sd: MetricPattern1::new(client.clone(), _m(&acc, "sd")), + sma: MetricPattern1::new(client.clone(), _m(&acc, "sma")), } } } @@ -2662,7 +3049,7 @@ impl SdSmaPattern { /// Pattern struct for repeated tree structure. pub struct UtxoPattern { pub utxo_count: MetricPattern1, - pub utxo_count_30d_change: MetricPattern4, + pub utxo_count_30d_change: MetricPattern1, } impl UtxoPattern { @@ -2670,23 +3057,7 @@ impl UtxoPattern { pub fn new(client: Arc, acc: String) -> Self { Self { utxo_count: MetricPattern1::new(client.clone(), acc.clone()), - utxo_count_30d_change: MetricPattern4::new(client.clone(), _m(&acc, "30d_change")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct CumulativeSumPattern { - pub cumulative: MetricPattern1, - pub sum: MetricPattern1, -} - -impl CumulativeSumPattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - cumulative: MetricPattern1::new(client.clone(), _m(&acc, "cumulative")), - sum: MetricPattern1::new(client.clone(), acc.clone()), + utxo_count_30d_change: MetricPattern1::new(client.clone(), _m(&acc, "30d_change")), } } } @@ -2708,31 +3079,31 @@ impl CumulativeSumPattern2 { } /// Pattern struct for repeated tree structure. -pub struct OhlcSplitPattern2 { - pub ohlc: MetricPattern1, - pub split: CloseHighLowOpenPattern2, +pub struct CumulativeSumPattern { + pub cumulative: MetricPattern1, + pub sum: MetricPattern1, } -impl OhlcSplitPattern2 { +impl CumulativeSumPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - ohlc: MetricPattern1::new(client.clone(), _m(&acc, "ohlc_sats")), - split: CloseHighLowOpenPattern2::new(client.clone(), _m(&acc, "sats")), + cumulative: MetricPattern1::new(client.clone(), _m(&acc, "cumulative")), + sum: MetricPattern1::new(client.clone(), acc.clone()), } } } /// Pattern struct for repeated tree structure. pub struct RatioPattern2 { - pub ratio: MetricPattern4, + pub ratio: MetricPattern1, } impl RatioPattern2 { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - ratio: MetricPattern4::new(client.clone(), acc.clone()), + ratio: MetricPattern1::new(client.clone(), acc.clone()), } } } @@ -2747,13 +3118,14 @@ pub struct MetricsTree { pub outputs: MetricsTree_Outputs, pub addresses: MetricsTree_Addresses, pub scripts: MetricsTree_Scripts, + pub mining: MetricsTree_Mining, pub positions: MetricsTree_Positions, pub cointime: MetricsTree_Cointime, pub constants: MetricsTree_Constants, pub indexes: MetricsTree_Indexes, pub market: MetricsTree_Market, pub pools: MetricsTree_Pools, - pub price: MetricsTree_Price, + pub prices: MetricsTree_Prices, pub distribution: MetricsTree_Distribution, pub supply: MetricsTree_Supply, } @@ -2767,13 +3139,14 @@ impl MetricsTree { outputs: MetricsTree_Outputs::new(client.clone(), format!("{base_path}_outputs")), addresses: MetricsTree_Addresses::new(client.clone(), format!("{base_path}_addresses")), scripts: MetricsTree_Scripts::new(client.clone(), format!("{base_path}_scripts")), + mining: MetricsTree_Mining::new(client.clone(), format!("{base_path}_mining")), positions: MetricsTree_Positions::new(client.clone(), format!("{base_path}_positions")), cointime: MetricsTree_Cointime::new(client.clone(), format!("{base_path}_cointime")), constants: MetricsTree_Constants::new(client.clone(), format!("{base_path}_constants")), indexes: MetricsTree_Indexes::new(client.clone(), format!("{base_path}_indexes")), market: MetricsTree_Market::new(client.clone(), format!("{base_path}_market")), pools: MetricsTree_Pools::new(client.clone(), format!("{base_path}_pools")), - price: MetricsTree_Price::new(client.clone(), format!("{base_path}_price")), + prices: MetricsTree_Prices::new(client.clone(), format!("{base_path}_prices")), distribution: MetricsTree_Distribution::new(client.clone(), format!("{base_path}_distribution")), supply: MetricsTree_Supply::new(client.clone(), format!("{base_path}_supply")), } @@ -2782,17 +3155,15 @@ impl MetricsTree { /// Metrics tree node. pub struct MetricsTree_Blocks { - pub blockhash: MetricPattern11, + pub blockhash: MetricPattern20, pub difficulty: MetricsTree_Blocks_Difficulty, pub time: MetricsTree_Blocks_Time, - pub total_size: MetricPattern11, - pub weight: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, + pub total_size: MetricPattern20, + pub weight: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, pub count: MetricsTree_Blocks_Count, pub interval: AverageBaseMaxMedianMinPct10Pct25Pct75Pct90Pattern, - pub mining: MetricsTree_Blocks_Mining, - pub rewards: MetricsTree_Blocks_Rewards, pub halving: MetricsTree_Blocks_Halving, - pub vbytes: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, + pub vbytes: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, pub size: MetricsTree_Blocks_Size, pub fullness: AverageBaseMaxMedianMinPct10Pct25Pct75Pct90Pattern, } @@ -2800,17 +3171,15 @@ pub struct MetricsTree_Blocks { impl MetricsTree_Blocks { pub fn new(client: Arc, base_path: String) -> Self { Self { - blockhash: MetricPattern11::new(client.clone(), "blockhash".to_string()), + blockhash: MetricPattern20::new(client.clone(), "blockhash".to_string()), difficulty: MetricsTree_Blocks_Difficulty::new(client.clone(), format!("{base_path}_difficulty")), time: MetricsTree_Blocks_Time::new(client.clone(), format!("{base_path}_time")), - total_size: MetricPattern11::new(client.clone(), "total_size".to_string()), - weight: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "block_weight".to_string()), + total_size: MetricPattern20::new(client.clone(), "total_size".to_string()), + weight: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "block_weight".to_string()), count: MetricsTree_Blocks_Count::new(client.clone(), format!("{base_path}_count")), interval: AverageBaseMaxMedianMinPct10Pct25Pct75Pct90Pattern::new(client.clone(), "block_interval".to_string()), - mining: MetricsTree_Blocks_Mining::new(client.clone(), format!("{base_path}_mining")), - rewards: MetricsTree_Blocks_Rewards::new(client.clone(), format!("{base_path}_rewards")), halving: MetricsTree_Blocks_Halving::new(client.clone(), format!("{base_path}_halving")), - vbytes: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "block_vbytes".to_string()), + vbytes: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "block_vbytes".to_string()), size: MetricsTree_Blocks_Size::new(client.clone(), format!("{base_path}_size")), fullness: AverageBaseMaxMedianMinPct10Pct25Pct75Pct90Pattern::new(client.clone(), "block_fullness".to_string()), } @@ -2822,7 +3191,7 @@ pub struct MetricsTree_Blocks_Difficulty { pub raw: MetricPattern1, pub as_hash: MetricPattern1, pub adjustment: MetricPattern1, - pub epoch: MetricPattern4, + pub epoch: MetricPattern1, pub blocks_before_next_adjustment: MetricPattern1, pub days_before_next_adjustment: MetricPattern1, } @@ -2833,7 +3202,7 @@ impl MetricsTree_Blocks_Difficulty { raw: MetricPattern1::new(client.clone(), "difficulty".to_string()), as_hash: MetricPattern1::new(client.clone(), "difficulty_as_hash".to_string()), adjustment: MetricPattern1::new(client.clone(), "difficulty_adjustment".to_string()), - epoch: MetricPattern4::new(client.clone(), "difficultyepoch".to_string()), + epoch: MetricPattern1::new(client.clone(), "difficulty_epoch".to_string()), blocks_before_next_adjustment: MetricPattern1::new(client.clone(), "blocks_before_next_difficulty_adjustment".to_string()), days_before_next_adjustment: MetricPattern1::new(client.clone(), "days_before_next_difficulty_adjustment".to_string()), } @@ -2843,141 +3212,106 @@ impl MetricsTree_Blocks_Difficulty { /// Metrics tree node. pub struct MetricsTree_Blocks_Time { pub timestamp: MetricPattern1, - pub date: MetricPattern11, - pub timestamp_monotonic: MetricPattern11, + pub date: MetricPattern20, + pub timestamp_monotonic: MetricPattern20, } impl MetricsTree_Blocks_Time { pub fn new(client: Arc, base_path: String) -> Self { Self { timestamp: MetricPattern1::new(client.clone(), "timestamp".to_string()), - date: MetricPattern11::new(client.clone(), "date".to_string()), - timestamp_monotonic: MetricPattern11::new(client.clone(), "timestamp_monotonic".to_string()), + date: MetricPattern20::new(client.clone(), "date".to_string()), + timestamp_monotonic: MetricPattern20::new(client.clone(), "timestamp_monotonic".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Blocks_Count { - pub block_count_target: MetricPattern4, + pub block_count_target: MetricPattern1, pub block_count: CumulativeSumPattern, - pub _24h_start: MetricPattern11, - pub _1w_start: MetricPattern11, - pub _1m_start: MetricPattern11, - pub _1y_start: MetricPattern11, - pub _24h_block_count: MetricPattern1, - pub _1w_block_count: MetricPattern1, - pub _1m_block_count: MetricPattern1, - pub _1y_block_count: MetricPattern1, + pub height_24h_ago: MetricPattern20, + pub height_3d_ago: MetricPattern20, + pub height_1w_ago: MetricPattern20, + pub height_8d_ago: MetricPattern20, + pub height_9d_ago: MetricPattern20, + pub height_12d_ago: MetricPattern20, + pub height_13d_ago: MetricPattern20, + pub height_2w_ago: MetricPattern20, + pub height_21d_ago: MetricPattern20, + pub height_26d_ago: MetricPattern20, + pub height_1m_ago: MetricPattern20, + pub height_34d_ago: MetricPattern20, + pub height_55d_ago: MetricPattern20, + pub height_2m_ago: MetricPattern20, + pub height_89d_ago: MetricPattern20, + pub height_111d_ago: MetricPattern20, + pub height_144d_ago: MetricPattern20, + pub height_3m_ago: MetricPattern20, + pub height_6m_ago: MetricPattern20, + pub height_200d_ago: MetricPattern20, + pub height_350d_ago: MetricPattern20, + pub height_1y_ago: MetricPattern20, + pub height_2y_ago: MetricPattern20, + pub height_200w_ago: MetricPattern20, + pub height_3y_ago: MetricPattern20, + pub height_4y_ago: MetricPattern20, + pub height_5y_ago: MetricPattern20, + pub height_6y_ago: MetricPattern20, + pub height_8y_ago: MetricPattern20, + pub height_10y_ago: MetricPattern20, + pub block_count_24h_sum: MetricPattern1, + pub block_count_1w_sum: MetricPattern1, + pub block_count_1m_sum: MetricPattern1, + pub block_count_1y_sum: MetricPattern1, } impl MetricsTree_Blocks_Count { pub fn new(client: Arc, base_path: String) -> Self { Self { - block_count_target: MetricPattern4::new(client.clone(), "block_count_target".to_string()), + block_count_target: MetricPattern1::new(client.clone(), "block_count_target".to_string()), block_count: CumulativeSumPattern::new(client.clone(), "block_count".to_string()), - _24h_start: MetricPattern11::new(client.clone(), "24h_start".to_string()), - _1w_start: MetricPattern11::new(client.clone(), "1w_start".to_string()), - _1m_start: MetricPattern11::new(client.clone(), "1m_start".to_string()), - _1y_start: MetricPattern11::new(client.clone(), "1y_start".to_string()), - _24h_block_count: MetricPattern1::new(client.clone(), "24h_block_count".to_string()), - _1w_block_count: MetricPattern1::new(client.clone(), "1w_block_count".to_string()), - _1m_block_count: MetricPattern1::new(client.clone(), "1m_block_count".to_string()), - _1y_block_count: MetricPattern1::new(client.clone(), "1y_block_count".to_string()), - } - } -} - -/// Metrics tree node. -pub struct MetricsTree_Blocks_Mining { - pub hash_rate: MetricPattern1, - pub hash_rate_1w_sma: MetricPattern4, - pub hash_rate_1m_sma: MetricPattern4, - pub hash_rate_2m_sma: MetricPattern4, - pub hash_rate_1y_sma: MetricPattern4, - pub hash_rate_ath: MetricPattern1, - pub hash_rate_drawdown: MetricPattern1, - pub hash_price_ths: MetricPattern1, - pub hash_price_ths_min: MetricPattern1, - pub hash_price_phs: MetricPattern1, - pub hash_price_phs_min: MetricPattern1, - pub hash_price_rebound: MetricPattern1, - pub hash_value_ths: MetricPattern1, - pub hash_value_ths_min: MetricPattern1, - pub hash_value_phs: MetricPattern1, - pub hash_value_phs_min: MetricPattern1, - pub hash_value_rebound: MetricPattern1, -} - -impl MetricsTree_Blocks_Mining { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - hash_rate: MetricPattern1::new(client.clone(), "hash_rate".to_string()), - hash_rate_1w_sma: MetricPattern4::new(client.clone(), "hash_rate_1w_sma".to_string()), - hash_rate_1m_sma: MetricPattern4::new(client.clone(), "hash_rate_1m_sma".to_string()), - hash_rate_2m_sma: MetricPattern4::new(client.clone(), "hash_rate_2m_sma".to_string()), - hash_rate_1y_sma: MetricPattern4::new(client.clone(), "hash_rate_1y_sma".to_string()), - hash_rate_ath: MetricPattern1::new(client.clone(), "hash_rate_ath".to_string()), - hash_rate_drawdown: MetricPattern1::new(client.clone(), "hash_rate_drawdown".to_string()), - hash_price_ths: MetricPattern1::new(client.clone(), "hash_price_ths".to_string()), - hash_price_ths_min: MetricPattern1::new(client.clone(), "hash_price_ths_min".to_string()), - hash_price_phs: MetricPattern1::new(client.clone(), "hash_price_phs".to_string()), - hash_price_phs_min: MetricPattern1::new(client.clone(), "hash_price_phs_min".to_string()), - hash_price_rebound: MetricPattern1::new(client.clone(), "hash_price_rebound".to_string()), - hash_value_ths: MetricPattern1::new(client.clone(), "hash_value_ths".to_string()), - hash_value_ths_min: MetricPattern1::new(client.clone(), "hash_value_ths_min".to_string()), - hash_value_phs: MetricPattern1::new(client.clone(), "hash_value_phs".to_string()), - hash_value_phs_min: MetricPattern1::new(client.clone(), "hash_value_phs_min".to_string()), - hash_value_rebound: MetricPattern1::new(client.clone(), "hash_value_rebound".to_string()), - } - } -} - -/// Metrics tree node. -pub struct MetricsTree_Blocks_Rewards { - pub _24h_coinbase_sum: MetricsTree_Blocks_Rewards_24hCoinbaseSum, - pub coinbase: BitcoinDollarsSatsPattern2, - pub subsidy: BitcoinDollarsSatsPattern2, - pub unclaimed_rewards: BitcoinDollarsSatsPattern3, - pub fee_dominance: MetricPattern6, - pub subsidy_dominance: MetricPattern6, - pub subsidy_usd_1y_sma: MetricPattern4, -} - -impl MetricsTree_Blocks_Rewards { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - _24h_coinbase_sum: MetricsTree_Blocks_Rewards_24hCoinbaseSum::new(client.clone(), format!("{base_path}_24h_coinbase_sum")), - coinbase: BitcoinDollarsSatsPattern2::new(client.clone(), "coinbase".to_string()), - subsidy: BitcoinDollarsSatsPattern2::new(client.clone(), "subsidy".to_string()), - unclaimed_rewards: BitcoinDollarsSatsPattern3::new(client.clone(), "unclaimed_rewards".to_string()), - fee_dominance: MetricPattern6::new(client.clone(), "fee_dominance".to_string()), - subsidy_dominance: MetricPattern6::new(client.clone(), "subsidy_dominance".to_string()), - subsidy_usd_1y_sma: MetricPattern4::new(client.clone(), "subsidy_usd_1y_sma".to_string()), - } - } -} - -/// Metrics tree node. -pub struct MetricsTree_Blocks_Rewards_24hCoinbaseSum { - pub sats: MetricPattern11, - pub bitcoin: MetricPattern11, - pub dollars: MetricPattern11, -} - -impl MetricsTree_Blocks_Rewards_24hCoinbaseSum { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - sats: MetricPattern11::new(client.clone(), "24h_coinbase_sum".to_string()), - bitcoin: MetricPattern11::new(client.clone(), "24h_coinbase_sum_btc".to_string()), - dollars: MetricPattern11::new(client.clone(), "24h_coinbase_sum_usd".to_string()), + height_24h_ago: MetricPattern20::new(client.clone(), "height_24h_ago".to_string()), + height_3d_ago: MetricPattern20::new(client.clone(), "height_3d_ago".to_string()), + height_1w_ago: MetricPattern20::new(client.clone(), "height_1w_ago".to_string()), + height_8d_ago: MetricPattern20::new(client.clone(), "height_8d_ago".to_string()), + height_9d_ago: MetricPattern20::new(client.clone(), "height_9d_ago".to_string()), + height_12d_ago: MetricPattern20::new(client.clone(), "height_12d_ago".to_string()), + height_13d_ago: MetricPattern20::new(client.clone(), "height_13d_ago".to_string()), + height_2w_ago: MetricPattern20::new(client.clone(), "height_2w_ago".to_string()), + height_21d_ago: MetricPattern20::new(client.clone(), "height_21d_ago".to_string()), + height_26d_ago: MetricPattern20::new(client.clone(), "height_26d_ago".to_string()), + height_1m_ago: MetricPattern20::new(client.clone(), "height_1m_ago".to_string()), + height_34d_ago: MetricPattern20::new(client.clone(), "height_34d_ago".to_string()), + height_55d_ago: MetricPattern20::new(client.clone(), "height_55d_ago".to_string()), + height_2m_ago: MetricPattern20::new(client.clone(), "height_2m_ago".to_string()), + height_89d_ago: MetricPattern20::new(client.clone(), "height_89d_ago".to_string()), + height_111d_ago: MetricPattern20::new(client.clone(), "height_111d_ago".to_string()), + height_144d_ago: MetricPattern20::new(client.clone(), "height_144d_ago".to_string()), + height_3m_ago: MetricPattern20::new(client.clone(), "height_3m_ago".to_string()), + height_6m_ago: MetricPattern20::new(client.clone(), "height_6m_ago".to_string()), + height_200d_ago: MetricPattern20::new(client.clone(), "height_200d_ago".to_string()), + height_350d_ago: MetricPattern20::new(client.clone(), "height_350d_ago".to_string()), + height_1y_ago: MetricPattern20::new(client.clone(), "height_1y_ago".to_string()), + height_2y_ago: MetricPattern20::new(client.clone(), "height_2y_ago".to_string()), + height_200w_ago: MetricPattern20::new(client.clone(), "height_200w_ago".to_string()), + height_3y_ago: MetricPattern20::new(client.clone(), "height_3y_ago".to_string()), + height_4y_ago: MetricPattern20::new(client.clone(), "height_4y_ago".to_string()), + height_5y_ago: MetricPattern20::new(client.clone(), "height_5y_ago".to_string()), + height_6y_ago: MetricPattern20::new(client.clone(), "height_6y_ago".to_string()), + height_8y_ago: MetricPattern20::new(client.clone(), "height_8y_ago".to_string()), + height_10y_ago: MetricPattern20::new(client.clone(), "height_10y_ago".to_string()), + block_count_24h_sum: MetricPattern1::new(client.clone(), "block_count_24h_sum".to_string()), + block_count_1w_sum: MetricPattern1::new(client.clone(), "block_count_1w_sum".to_string()), + block_count_1m_sum: MetricPattern1::new(client.clone(), "block_count_1m_sum".to_string()), + block_count_1y_sum: MetricPattern1::new(client.clone(), "block_count_1y_sum".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Blocks_Halving { - pub epoch: MetricPattern4, + pub epoch: MetricPattern1, pub blocks_before_next_halving: MetricPattern1, pub days_before_next_halving: MetricPattern1, } @@ -2985,7 +3319,7 @@ pub struct MetricsTree_Blocks_Halving { impl MetricsTree_Blocks_Halving { pub fn new(client: Arc, base_path: String) -> Self { Self { - epoch: MetricPattern4::new(client.clone(), "halvingepoch".to_string()), + epoch: MetricPattern1::new(client.clone(), "halving_epoch".to_string()), blocks_before_next_halving: MetricPattern1::new(client.clone(), "blocks_before_next_halving".to_string()), days_before_next_halving: MetricPattern1::new(client.clone(), "days_before_next_halving".to_string()), } @@ -2998,11 +3332,11 @@ pub struct MetricsTree_Blocks_Size { pub average: MetricPattern2, pub min: MetricPattern2, pub max: MetricPattern2, - pub pct10: MetricPattern6, - pub pct25: MetricPattern6, - pub median: MetricPattern6, - pub pct75: MetricPattern6, - pub pct90: MetricPattern6, + pub pct10: MetricPattern2, + pub pct25: MetricPattern2, + pub median: MetricPattern2, + pub pct75: MetricPattern2, + pub pct90: MetricPattern2, pub sum: MetricPattern2, } @@ -3013,11 +3347,11 @@ impl MetricsTree_Blocks_Size { average: MetricPattern2::new(client.clone(), "block_size_average".to_string()), min: MetricPattern2::new(client.clone(), "block_size_min".to_string()), max: MetricPattern2::new(client.clone(), "block_size_max".to_string()), - pct10: MetricPattern6::new(client.clone(), "block_size_pct10".to_string()), - pct25: MetricPattern6::new(client.clone(), "block_size_pct25".to_string()), - median: MetricPattern6::new(client.clone(), "block_size_median".to_string()), - pct75: MetricPattern6::new(client.clone(), "block_size_pct75".to_string()), - pct90: MetricPattern6::new(client.clone(), "block_size_pct90".to_string()), + pct10: MetricPattern2::new(client.clone(), "block_size_pct10".to_string()), + pct25: MetricPattern2::new(client.clone(), "block_size_pct25".to_string()), + median: MetricPattern2::new(client.clone(), "block_size_median".to_string()), + pct75: MetricPattern2::new(client.clone(), "block_size_pct75".to_string()), + pct90: MetricPattern2::new(client.clone(), "block_size_pct90".to_string()), sum: MetricPattern2::new(client.clone(), "block_size_sum".to_string()), } } @@ -3025,16 +3359,16 @@ impl MetricsTree_Blocks_Size { /// Metrics tree node. pub struct MetricsTree_Transactions { - pub first_txindex: MetricPattern11, - pub height: MetricPattern27, - pub txid: MetricPattern27, - pub txversion: MetricPattern27, - pub rawlocktime: MetricPattern27, - pub base_size: MetricPattern27, - pub total_size: MetricPattern27, - pub is_explicitly_rbf: MetricPattern27, - pub first_txinindex: MetricPattern27, - pub first_txoutindex: MetricPattern27, + pub first_txindex: MetricPattern20, + pub height: MetricPattern21, + pub txid: MetricPattern21, + pub txversion: MetricPattern21, + pub rawlocktime: MetricPattern21, + pub base_size: MetricPattern21, + pub total_size: MetricPattern21, + pub is_explicitly_rbf: MetricPattern21, + pub first_txinindex: MetricPattern21, + pub first_txoutindex: MetricPattern21, pub count: MetricsTree_Transactions_Count, pub size: MetricsTree_Transactions_Size, pub fees: MetricsTree_Transactions_Fees, @@ -3045,16 +3379,16 @@ pub struct MetricsTree_Transactions { impl MetricsTree_Transactions { pub fn new(client: Arc, base_path: String) -> Self { Self { - first_txindex: MetricPattern11::new(client.clone(), "first_txindex".to_string()), - height: MetricPattern27::new(client.clone(), "height".to_string()), - txid: MetricPattern27::new(client.clone(), "txid".to_string()), - txversion: MetricPattern27::new(client.clone(), "txversion".to_string()), - rawlocktime: MetricPattern27::new(client.clone(), "rawlocktime".to_string()), - base_size: MetricPattern27::new(client.clone(), "base_size".to_string()), - total_size: MetricPattern27::new(client.clone(), "total_size".to_string()), - is_explicitly_rbf: MetricPattern27::new(client.clone(), "is_explicitly_rbf".to_string()), - first_txinindex: MetricPattern27::new(client.clone(), "first_txinindex".to_string()), - first_txoutindex: MetricPattern27::new(client.clone(), "first_txoutindex".to_string()), + first_txindex: MetricPattern20::new(client.clone(), "first_txindex".to_string()), + height: MetricPattern21::new(client.clone(), "height".to_string()), + txid: MetricPattern21::new(client.clone(), "txid".to_string()), + txversion: MetricPattern21::new(client.clone(), "txversion".to_string()), + rawlocktime: MetricPattern21::new(client.clone(), "rawlocktime".to_string()), + base_size: MetricPattern21::new(client.clone(), "base_size".to_string()), + total_size: MetricPattern21::new(client.clone(), "total_size".to_string()), + is_explicitly_rbf: MetricPattern21::new(client.clone(), "is_explicitly_rbf".to_string()), + first_txinindex: MetricPattern21::new(client.clone(), "first_txinindex".to_string()), + first_txoutindex: MetricPattern21::new(client.clone(), "first_txoutindex".to_string()), count: MetricsTree_Transactions_Count::new(client.clone(), format!("{base_path}_count")), size: MetricsTree_Transactions_Size::new(client.clone(), format!("{base_path}_size")), fees: MetricsTree_Transactions_Fees::new(client.clone(), format!("{base_path}_fees")), @@ -3066,15 +3400,15 @@ impl MetricsTree_Transactions { /// Metrics tree node. pub struct MetricsTree_Transactions_Count { - pub tx_count: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub is_coinbase: MetricPattern27, + pub tx_count: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub is_coinbase: MetricPattern21, } impl MetricsTree_Transactions_Count { pub fn new(client: Arc, base_path: String) -> Self { Self { - tx_count: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "tx_count".to_string()), - is_coinbase: MetricPattern27::new(client.clone(), "is_coinbase".to_string()), + tx_count: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "tx_count".to_string()), + is_coinbase: MetricPattern21::new(client.clone(), "is_coinbase".to_string()), } } } @@ -3096,8 +3430,8 @@ impl MetricsTree_Transactions_Size { /// Metrics tree node. pub struct MetricsTree_Transactions_Fees { - pub input_value: MetricPattern27, - pub output_value: MetricPattern27, + pub input_value: MetricPattern21, + pub output_value: MetricPattern21, pub fee: MetricsTree_Transactions_Fees_Fee, pub fee_rate: AverageMaxMedianMinPct10Pct25Pct75Pct90TxindexPattern, } @@ -3105,8 +3439,8 @@ pub struct MetricsTree_Transactions_Fees { impl MetricsTree_Transactions_Fees { pub fn new(client: Arc, base_path: String) -> Self { Self { - input_value: MetricPattern27::new(client.clone(), "input_value".to_string()), - output_value: MetricPattern27::new(client.clone(), "output_value".to_string()), + input_value: MetricPattern21::new(client.clone(), "input_value".to_string()), + output_value: MetricPattern21::new(client.clone(), "output_value".to_string()), fee: MetricsTree_Transactions_Fees_Fee::new(client.clone(), format!("{base_path}_fee")), fee_rate: AverageMaxMedianMinPct10Pct25Pct75Pct90TxindexPattern::new(client.clone(), "fee_rate".to_string()), } @@ -3115,19 +3449,19 @@ impl MetricsTree_Transactions_Fees { /// Metrics tree node. pub struct MetricsTree_Transactions_Fees_Fee { - pub txindex: MetricPattern27, + pub txindex: MetricPattern21, pub sats: AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub bitcoin: AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub dollars: AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, + pub btc: AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, + pub usd: AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, } impl MetricsTree_Transactions_Fees_Fee { pub fn new(client: Arc, base_path: String) -> Self { Self { - txindex: MetricPattern27::new(client.clone(), "fee".to_string()), + txindex: MetricPattern21::new(client.clone(), "fee".to_string()), sats: AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "fee".to_string()), - bitcoin: AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "fee_btc".to_string()), - dollars: AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "fee_usd".to_string()), + btc: AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "fee_btc".to_string()), + usd: AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "fee_usd".to_string()), } } } @@ -3151,34 +3485,34 @@ impl MetricsTree_Transactions_Versions { /// Metrics tree node. pub struct MetricsTree_Transactions_Volume { - pub sent_sum: BitcoinDollarsSatsPattern4, - pub received_sum: BitcoinDollarsSatsPattern4, - pub annualized_volume: BitcoinDollarsSatsPattern5, - pub tx_per_sec: MetricPattern4, - pub outputs_per_sec: MetricPattern4, - pub inputs_per_sec: MetricPattern4, + pub sent_sum: BtcSatsUsdPattern, + pub received_sum: BtcSatsUsdPattern, + pub annualized_volume: BtcSatsUsdPattern, + pub tx_per_sec: MetricPattern1, + pub outputs_per_sec: MetricPattern1, + pub inputs_per_sec: MetricPattern1, } impl MetricsTree_Transactions_Volume { pub fn new(client: Arc, base_path: String) -> Self { Self { - sent_sum: BitcoinDollarsSatsPattern4::new(client.clone(), "sent_sum".to_string()), - received_sum: BitcoinDollarsSatsPattern4::new(client.clone(), "received_sum".to_string()), - annualized_volume: BitcoinDollarsSatsPattern5::new(client.clone(), "annualized_volume".to_string()), - tx_per_sec: MetricPattern4::new(client.clone(), "tx_per_sec".to_string()), - outputs_per_sec: MetricPattern4::new(client.clone(), "outputs_per_sec".to_string()), - inputs_per_sec: MetricPattern4::new(client.clone(), "inputs_per_sec".to_string()), + sent_sum: BtcSatsUsdPattern::new(client.clone(), "sent_sum".to_string()), + received_sum: BtcSatsUsdPattern::new(client.clone(), "received_sum".to_string()), + annualized_volume: BtcSatsUsdPattern::new(client.clone(), "annualized_volume".to_string()), + tx_per_sec: MetricPattern1::new(client.clone(), "tx_per_sec".to_string()), + outputs_per_sec: MetricPattern1::new(client.clone(), "outputs_per_sec".to_string()), + inputs_per_sec: MetricPattern1::new(client.clone(), "inputs_per_sec".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Inputs { - pub first_txinindex: MetricPattern11, - pub outpoint: MetricPattern12, - pub txindex: MetricPattern12, - pub outputtype: MetricPattern12, - pub typeindex: MetricPattern12, + pub first_txinindex: MetricPattern20, + pub outpoint: MetricPattern22, + pub txindex: MetricPattern22, + pub outputtype: MetricPattern22, + pub typeindex: MetricPattern22, pub spent: MetricsTree_Inputs_Spent, pub count: AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, } @@ -3186,11 +3520,11 @@ pub struct MetricsTree_Inputs { impl MetricsTree_Inputs { pub fn new(client: Arc, base_path: String) -> Self { Self { - first_txinindex: MetricPattern11::new(client.clone(), "first_txinindex".to_string()), - outpoint: MetricPattern12::new(client.clone(), "outpoint".to_string()), - txindex: MetricPattern12::new(client.clone(), "txindex".to_string()), - outputtype: MetricPattern12::new(client.clone(), "outputtype".to_string()), - typeindex: MetricPattern12::new(client.clone(), "typeindex".to_string()), + first_txinindex: MetricPattern20::new(client.clone(), "first_txinindex".to_string()), + outpoint: MetricPattern22::new(client.clone(), "outpoint".to_string()), + txindex: MetricPattern22::new(client.clone(), "txindex".to_string()), + outputtype: MetricPattern22::new(client.clone(), "outputtype".to_string()), + typeindex: MetricPattern22::new(client.clone(), "typeindex".to_string()), spent: MetricsTree_Inputs_Spent::new(client.clone(), format!("{base_path}_spent")), count: AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "input_count".to_string()), } @@ -3199,26 +3533,26 @@ impl MetricsTree_Inputs { /// Metrics tree node. pub struct MetricsTree_Inputs_Spent { - pub txoutindex: MetricPattern12, - pub value: MetricPattern12, + pub txoutindex: MetricPattern22, + pub value: MetricPattern22, } impl MetricsTree_Inputs_Spent { pub fn new(client: Arc, base_path: String) -> Self { Self { - txoutindex: MetricPattern12::new(client.clone(), "txoutindex".to_string()), - value: MetricPattern12::new(client.clone(), "value".to_string()), + txoutindex: MetricPattern22::new(client.clone(), "txoutindex".to_string()), + value: MetricPattern22::new(client.clone(), "value".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Outputs { - pub first_txoutindex: MetricPattern11, - pub value: MetricPattern15, - pub outputtype: MetricPattern15, - pub typeindex: MetricPattern15, - pub txindex: MetricPattern15, + pub first_txoutindex: MetricPattern20, + pub value: MetricPattern23, + pub outputtype: MetricPattern23, + pub typeindex: MetricPattern23, + pub txindex: MetricPattern23, pub spent: MetricsTree_Outputs_Spent, pub count: MetricsTree_Outputs_Count, } @@ -3226,11 +3560,11 @@ pub struct MetricsTree_Outputs { impl MetricsTree_Outputs { pub fn new(client: Arc, base_path: String) -> Self { Self { - first_txoutindex: MetricPattern11::new(client.clone(), "first_txoutindex".to_string()), - value: MetricPattern15::new(client.clone(), "value".to_string()), - outputtype: MetricPattern15::new(client.clone(), "outputtype".to_string()), - typeindex: MetricPattern15::new(client.clone(), "typeindex".to_string()), - txindex: MetricPattern15::new(client.clone(), "txindex".to_string()), + first_txoutindex: MetricPattern20::new(client.clone(), "first_txoutindex".to_string()), + value: MetricPattern23::new(client.clone(), "value".to_string()), + outputtype: MetricPattern23::new(client.clone(), "outputtype".to_string()), + typeindex: MetricPattern23::new(client.clone(), "typeindex".to_string()), + txindex: MetricPattern23::new(client.clone(), "txindex".to_string()), spent: MetricsTree_Outputs_Spent::new(client.clone(), format!("{base_path}_spent")), count: MetricsTree_Outputs_Count::new(client.clone(), format!("{base_path}_count")), } @@ -3239,13 +3573,13 @@ impl MetricsTree_Outputs { /// Metrics tree node. pub struct MetricsTree_Outputs_Spent { - pub txinindex: MetricPattern15, + pub txinindex: MetricPattern23, } impl MetricsTree_Outputs_Spent { pub fn new(client: Arc, base_path: String) -> Self { Self { - txinindex: MetricPattern15::new(client.clone(), "txinindex".to_string()), + txinindex: MetricPattern23::new(client.clone(), "txinindex".to_string()), } } } @@ -3267,57 +3601,57 @@ impl MetricsTree_Outputs_Count { /// Metrics tree node. pub struct MetricsTree_Addresses { - pub first_p2pk65addressindex: MetricPattern11, - pub first_p2pk33addressindex: MetricPattern11, - pub first_p2pkhaddressindex: MetricPattern11, - pub first_p2shaddressindex: MetricPattern11, - pub first_p2wpkhaddressindex: MetricPattern11, - pub first_p2wshaddressindex: MetricPattern11, - pub first_p2traddressindex: MetricPattern11, - pub first_p2aaddressindex: MetricPattern11, - pub p2pk65bytes: MetricPattern19, - pub p2pk33bytes: MetricPattern18, - pub p2pkhbytes: MetricPattern20, - pub p2shbytes: MetricPattern21, - pub p2wpkhbytes: MetricPattern23, - pub p2wshbytes: MetricPattern24, - pub p2trbytes: MetricPattern22, - pub p2abytes: MetricPattern16, + pub first_p2pk65addressindex: MetricPattern20, + pub first_p2pk33addressindex: MetricPattern20, + pub first_p2pkhaddressindex: MetricPattern20, + pub first_p2shaddressindex: MetricPattern20, + pub first_p2wpkhaddressindex: MetricPattern20, + pub first_p2wshaddressindex: MetricPattern20, + pub first_p2traddressindex: MetricPattern20, + pub first_p2aaddressindex: MetricPattern20, + pub p2pk65bytes: MetricPattern29, + pub p2pk33bytes: MetricPattern28, + pub p2pkhbytes: MetricPattern30, + pub p2shbytes: MetricPattern31, + pub p2wpkhbytes: MetricPattern33, + pub p2wshbytes: MetricPattern34, + pub p2trbytes: MetricPattern32, + pub p2abytes: MetricPattern26, } impl MetricsTree_Addresses { pub fn new(client: Arc, base_path: String) -> Self { Self { - first_p2pk65addressindex: MetricPattern11::new(client.clone(), "first_p2pk65addressindex".to_string()), - first_p2pk33addressindex: MetricPattern11::new(client.clone(), "first_p2pk33addressindex".to_string()), - first_p2pkhaddressindex: MetricPattern11::new(client.clone(), "first_p2pkhaddressindex".to_string()), - first_p2shaddressindex: MetricPattern11::new(client.clone(), "first_p2shaddressindex".to_string()), - first_p2wpkhaddressindex: MetricPattern11::new(client.clone(), "first_p2wpkhaddressindex".to_string()), - first_p2wshaddressindex: MetricPattern11::new(client.clone(), "first_p2wshaddressindex".to_string()), - first_p2traddressindex: MetricPattern11::new(client.clone(), "first_p2traddressindex".to_string()), - first_p2aaddressindex: MetricPattern11::new(client.clone(), "first_p2aaddressindex".to_string()), - p2pk65bytes: MetricPattern19::new(client.clone(), "p2pk65bytes".to_string()), - p2pk33bytes: MetricPattern18::new(client.clone(), "p2pk33bytes".to_string()), - p2pkhbytes: MetricPattern20::new(client.clone(), "p2pkhbytes".to_string()), - p2shbytes: MetricPattern21::new(client.clone(), "p2shbytes".to_string()), - p2wpkhbytes: MetricPattern23::new(client.clone(), "p2wpkhbytes".to_string()), - p2wshbytes: MetricPattern24::new(client.clone(), "p2wshbytes".to_string()), - p2trbytes: MetricPattern22::new(client.clone(), "p2trbytes".to_string()), - p2abytes: MetricPattern16::new(client.clone(), "p2abytes".to_string()), + first_p2pk65addressindex: MetricPattern20::new(client.clone(), "first_p2pk65addressindex".to_string()), + first_p2pk33addressindex: MetricPattern20::new(client.clone(), "first_p2pk33addressindex".to_string()), + first_p2pkhaddressindex: MetricPattern20::new(client.clone(), "first_p2pkhaddressindex".to_string()), + first_p2shaddressindex: MetricPattern20::new(client.clone(), "first_p2shaddressindex".to_string()), + first_p2wpkhaddressindex: MetricPattern20::new(client.clone(), "first_p2wpkhaddressindex".to_string()), + first_p2wshaddressindex: MetricPattern20::new(client.clone(), "first_p2wshaddressindex".to_string()), + first_p2traddressindex: MetricPattern20::new(client.clone(), "first_p2traddressindex".to_string()), + first_p2aaddressindex: MetricPattern20::new(client.clone(), "first_p2aaddressindex".to_string()), + p2pk65bytes: MetricPattern29::new(client.clone(), "p2pk65bytes".to_string()), + p2pk33bytes: MetricPattern28::new(client.clone(), "p2pk33bytes".to_string()), + p2pkhbytes: MetricPattern30::new(client.clone(), "p2pkhbytes".to_string()), + p2shbytes: MetricPattern31::new(client.clone(), "p2shbytes".to_string()), + p2wpkhbytes: MetricPattern33::new(client.clone(), "p2wpkhbytes".to_string()), + p2wshbytes: MetricPattern34::new(client.clone(), "p2wshbytes".to_string()), + p2trbytes: MetricPattern32::new(client.clone(), "p2trbytes".to_string()), + p2abytes: MetricPattern26::new(client.clone(), "p2abytes".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Scripts { - pub first_emptyoutputindex: MetricPattern11, - pub first_opreturnindex: MetricPattern11, - pub first_p2msoutputindex: MetricPattern11, - pub first_unknownoutputindex: MetricPattern11, - pub empty_to_txindex: MetricPattern9, - pub opreturn_to_txindex: MetricPattern14, - pub p2ms_to_txindex: MetricPattern17, - pub unknown_to_txindex: MetricPattern28, + pub first_emptyoutputindex: MetricPattern20, + pub first_opreturnindex: MetricPattern20, + pub first_p2msoutputindex: MetricPattern20, + pub first_unknownoutputindex: MetricPattern20, + pub empty_to_txindex: MetricPattern24, + pub opreturn_to_txindex: MetricPattern25, + pub p2ms_to_txindex: MetricPattern27, + pub unknown_to_txindex: MetricPattern35, pub count: MetricsTree_Scripts_Count, pub value: MetricsTree_Scripts_Value, } @@ -3325,14 +3659,14 @@ pub struct MetricsTree_Scripts { impl MetricsTree_Scripts { pub fn new(client: Arc, base_path: String) -> Self { Self { - first_emptyoutputindex: MetricPattern11::new(client.clone(), "first_emptyoutputindex".to_string()), - first_opreturnindex: MetricPattern11::new(client.clone(), "first_opreturnindex".to_string()), - first_p2msoutputindex: MetricPattern11::new(client.clone(), "first_p2msoutputindex".to_string()), - first_unknownoutputindex: MetricPattern11::new(client.clone(), "first_unknownoutputindex".to_string()), - empty_to_txindex: MetricPattern9::new(client.clone(), "txindex".to_string()), - opreturn_to_txindex: MetricPattern14::new(client.clone(), "txindex".to_string()), - p2ms_to_txindex: MetricPattern17::new(client.clone(), "txindex".to_string()), - unknown_to_txindex: MetricPattern28::new(client.clone(), "txindex".to_string()), + first_emptyoutputindex: MetricPattern20::new(client.clone(), "first_emptyoutputindex".to_string()), + first_opreturnindex: MetricPattern20::new(client.clone(), "first_opreturnindex".to_string()), + first_p2msoutputindex: MetricPattern20::new(client.clone(), "first_p2msoutputindex".to_string()), + first_unknownoutputindex: MetricPattern20::new(client.clone(), "first_unknownoutputindex".to_string()), + empty_to_txindex: MetricPattern24::new(client.clone(), "txindex".to_string()), + opreturn_to_txindex: MetricPattern25::new(client.clone(), "txindex".to_string()), + p2ms_to_txindex: MetricPattern27::new(client.clone(), "txindex".to_string()), + unknown_to_txindex: MetricPattern35::new(client.clone(), "txindex".to_string()), count: MetricsTree_Scripts_Count::new(client.clone(), format!("{base_path}_count")), value: MetricsTree_Scripts_Value::new(client.clone(), format!("{base_path}_value")), } @@ -3341,19 +3675,19 @@ impl MetricsTree_Scripts { /// Metrics tree node. pub struct MetricsTree_Scripts_Count { - pub p2a: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub p2ms: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub p2pk33: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub p2pk65: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub p2pkh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub p2sh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub p2tr: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub p2wpkh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub p2wsh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub opreturn: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub emptyoutput: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub unknownoutput: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub segwit: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, + pub p2a: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub p2ms: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub p2pk33: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub p2pk65: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub p2pkh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub p2sh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub p2tr: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub p2wpkh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub p2wsh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub opreturn: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub emptyoutput: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub unknownoutput: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub segwit: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, pub taproot_adoption: BaseCumulativeSumPattern, pub segwit_adoption: BaseCumulativeSumPattern, } @@ -3361,19 +3695,19 @@ pub struct MetricsTree_Scripts_Count { impl MetricsTree_Scripts_Count { pub fn new(client: Arc, base_path: String) -> Self { Self { - p2a: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "p2a_count".to_string()), - p2ms: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "p2ms_count".to_string()), - p2pk33: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "p2pk33_count".to_string()), - p2pk65: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "p2pk65_count".to_string()), - p2pkh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "p2pkh_count".to_string()), - p2sh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "p2sh_count".to_string()), - p2tr: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "p2tr_count".to_string()), - p2wpkh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "p2wpkh_count".to_string()), - p2wsh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "p2wsh_count".to_string()), - opreturn: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "opreturn_count".to_string()), - emptyoutput: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "emptyoutput_count".to_string()), - unknownoutput: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "unknownoutput_count".to_string()), - segwit: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "segwit_count".to_string()), + p2a: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "p2a_count".to_string()), + p2ms: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "p2ms_count".to_string()), + p2pk33: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "p2pk33_count".to_string()), + p2pk65: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "p2pk65_count".to_string()), + p2pkh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "p2pkh_count".to_string()), + p2sh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "p2sh_count".to_string()), + p2tr: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "p2tr_count".to_string()), + p2wpkh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "p2wpkh_count".to_string()), + p2wsh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "p2wsh_count".to_string()), + opreturn: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "opreturn_count".to_string()), + emptyoutput: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "emptyoutput_count".to_string()), + unknownoutput: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "unknownoutput_count".to_string()), + segwit: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "segwit_count".to_string()), taproot_adoption: BaseCumulativeSumPattern::new(client.clone(), "taproot_adoption".to_string()), segwit_adoption: BaseCumulativeSumPattern::new(client.clone(), "segwit_adoption".to_string()), } @@ -3382,28 +3716,143 @@ impl MetricsTree_Scripts_Count { /// Metrics tree node. pub struct MetricsTree_Scripts_Value { - pub opreturn: BitcoinDollarsSatsPattern2, + pub opreturn: BtcSatsUsdPattern3, } impl MetricsTree_Scripts_Value { pub fn new(client: Arc, base_path: String) -> Self { Self { - opreturn: BitcoinDollarsSatsPattern2::new(client.clone(), "opreturn_value".to_string()), + opreturn: BtcSatsUsdPattern3::new(client.clone(), "opreturn_value".to_string()), + } + } +} + +/// Metrics tree node. +pub struct MetricsTree_Mining { + pub rewards: MetricsTree_Mining_Rewards, + pub hashrate: MetricsTree_Mining_Hashrate, +} + +impl MetricsTree_Mining { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + rewards: MetricsTree_Mining_Rewards::new(client.clone(), format!("{base_path}_rewards")), + hashrate: MetricsTree_Mining_Hashrate::new(client.clone(), format!("{base_path}_hashrate")), + } + } +} + +/// Metrics tree node. +pub struct MetricsTree_Mining_Rewards { + pub coinbase_24h_sum: BtcSatsUsdPattern, + pub coinbase_7d_sum: BtcSatsUsdPattern, + pub coinbase_30d_sum: BtcSatsUsdPattern, + pub coinbase_1y_sum: BtcSatsUsdPattern, + pub fee_24h_sum: BtcSatsUsdPattern, + pub fee_7d_sum: BtcSatsUsdPattern, + pub fee_30d_sum: BtcSatsUsdPattern, + pub fee_1y_sum: BtcSatsUsdPattern, + pub coinbase: BtcSatsUsdPattern3, + pub subsidy: BtcSatsUsdPattern3, + pub unclaimed_rewards: BtcSatsUsdPattern2, + pub fee_dominance: MetricPattern1, + pub fee_dominance_24h: MetricPattern1, + pub fee_dominance_7d: MetricPattern1, + pub fee_dominance_30d: MetricPattern1, + pub fee_dominance_1y: MetricPattern1, + pub subsidy_dominance: MetricPattern1, + pub subsidy_dominance_24h: MetricPattern1, + pub subsidy_dominance_7d: MetricPattern1, + pub subsidy_dominance_30d: MetricPattern1, + pub subsidy_dominance_1y: MetricPattern1, + pub subsidy_usd_1y_sma: MetricPattern1, +} + +impl MetricsTree_Mining_Rewards { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + coinbase_24h_sum: BtcSatsUsdPattern::new(client.clone(), "coinbase_24h_sum".to_string()), + coinbase_7d_sum: BtcSatsUsdPattern::new(client.clone(), "coinbase_7d_sum".to_string()), + coinbase_30d_sum: BtcSatsUsdPattern::new(client.clone(), "coinbase_30d_sum".to_string()), + coinbase_1y_sum: BtcSatsUsdPattern::new(client.clone(), "coinbase_1y_sum".to_string()), + fee_24h_sum: BtcSatsUsdPattern::new(client.clone(), "fee_24h_sum".to_string()), + fee_7d_sum: BtcSatsUsdPattern::new(client.clone(), "fee_7d_sum".to_string()), + fee_30d_sum: BtcSatsUsdPattern::new(client.clone(), "fee_30d_sum".to_string()), + fee_1y_sum: BtcSatsUsdPattern::new(client.clone(), "fee_1y_sum".to_string()), + coinbase: BtcSatsUsdPattern3::new(client.clone(), "coinbase".to_string()), + subsidy: BtcSatsUsdPattern3::new(client.clone(), "subsidy".to_string()), + unclaimed_rewards: BtcSatsUsdPattern2::new(client.clone(), "unclaimed_rewards".to_string()), + fee_dominance: MetricPattern1::new(client.clone(), "fee_dominance".to_string()), + fee_dominance_24h: MetricPattern1::new(client.clone(), "fee_dominance_24h".to_string()), + fee_dominance_7d: MetricPattern1::new(client.clone(), "fee_dominance_7d".to_string()), + fee_dominance_30d: MetricPattern1::new(client.clone(), "fee_dominance_30d".to_string()), + fee_dominance_1y: MetricPattern1::new(client.clone(), "fee_dominance_1y".to_string()), + subsidy_dominance: MetricPattern1::new(client.clone(), "subsidy_dominance".to_string()), + subsidy_dominance_24h: MetricPattern1::new(client.clone(), "subsidy_dominance_24h".to_string()), + subsidy_dominance_7d: MetricPattern1::new(client.clone(), "subsidy_dominance_7d".to_string()), + subsidy_dominance_30d: MetricPattern1::new(client.clone(), "subsidy_dominance_30d".to_string()), + subsidy_dominance_1y: MetricPattern1::new(client.clone(), "subsidy_dominance_1y".to_string()), + subsidy_usd_1y_sma: MetricPattern1::new(client.clone(), "subsidy_usd_1y_sma".to_string()), + } + } +} + +/// Metrics tree node. +pub struct MetricsTree_Mining_Hashrate { + pub hash_rate: MetricPattern1, + pub hash_rate_1w_sma: MetricPattern1, + pub hash_rate_1m_sma: MetricPattern1, + pub hash_rate_2m_sma: MetricPattern1, + pub hash_rate_1y_sma: MetricPattern1, + pub hash_rate_ath: MetricPattern1, + pub hash_rate_drawdown: MetricPattern1, + pub hash_price_ths: MetricPattern1, + pub hash_price_ths_min: MetricPattern1, + pub hash_price_phs: MetricPattern1, + pub hash_price_phs_min: MetricPattern1, + pub hash_price_rebound: MetricPattern1, + pub hash_value_ths: MetricPattern1, + pub hash_value_ths_min: MetricPattern1, + pub hash_value_phs: MetricPattern1, + pub hash_value_phs_min: MetricPattern1, + pub hash_value_rebound: MetricPattern1, +} + +impl MetricsTree_Mining_Hashrate { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + hash_rate: MetricPattern1::new(client.clone(), "hash_rate".to_string()), + hash_rate_1w_sma: MetricPattern1::new(client.clone(), "hash_rate_1w_sma".to_string()), + hash_rate_1m_sma: MetricPattern1::new(client.clone(), "hash_rate_1m_sma".to_string()), + hash_rate_2m_sma: MetricPattern1::new(client.clone(), "hash_rate_2m_sma".to_string()), + hash_rate_1y_sma: MetricPattern1::new(client.clone(), "hash_rate_1y_sma".to_string()), + hash_rate_ath: MetricPattern1::new(client.clone(), "hash_rate_ath".to_string()), + hash_rate_drawdown: MetricPattern1::new(client.clone(), "hash_rate_drawdown".to_string()), + hash_price_ths: MetricPattern1::new(client.clone(), "hash_price_ths".to_string()), + hash_price_ths_min: MetricPattern1::new(client.clone(), "hash_price_ths_min".to_string()), + hash_price_phs: MetricPattern1::new(client.clone(), "hash_price_phs".to_string()), + hash_price_phs_min: MetricPattern1::new(client.clone(), "hash_price_phs_min".to_string()), + hash_price_rebound: MetricPattern1::new(client.clone(), "hash_price_rebound".to_string()), + hash_value_ths: MetricPattern1::new(client.clone(), "hash_value_ths".to_string()), + hash_value_ths_min: MetricPattern1::new(client.clone(), "hash_value_ths_min".to_string()), + hash_value_phs: MetricPattern1::new(client.clone(), "hash_value_phs".to_string()), + hash_value_phs_min: MetricPattern1::new(client.clone(), "hash_value_phs_min".to_string()), + hash_value_rebound: MetricPattern1::new(client.clone(), "hash_value_rebound".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Positions { - pub block_position: MetricPattern11, - pub tx_position: MetricPattern27, + pub block_position: MetricPattern20, + pub tx_position: MetricPattern21, } impl MetricsTree_Positions { pub fn new(client: Arc, base_path: String) -> Self { Self { - block_position: MetricPattern11::new(client.clone(), "position".to_string()), - tx_position: MetricPattern27::new(client.clone(), "position".to_string()), + block_position: MetricPattern20::new(client.clone(), "position".to_string()), + tx_position: MetricPattern21::new(client.clone(), "position".to_string()), } } } @@ -3456,15 +3905,15 @@ impl MetricsTree_Cointime_Activity { /// Metrics tree node. pub struct MetricsTree_Cointime_Supply { - pub vaulted_supply: BitcoinDollarsSatsPattern4, - pub active_supply: BitcoinDollarsSatsPattern4, + pub vaulted_supply: BtcSatsUsdPattern, + pub active_supply: BtcSatsUsdPattern, } impl MetricsTree_Cointime_Supply { pub fn new(client: Arc, base_path: String) -> Self { Self { - vaulted_supply: BitcoinDollarsSatsPattern4::new(client.clone(), "vaulted_supply".to_string()), - active_supply: BitcoinDollarsSatsPattern4::new(client.clone(), "active_supply".to_string()), + vaulted_supply: BtcSatsUsdPattern::new(client.clone(), "vaulted_supply".to_string()), + active_supply: BtcSatsUsdPattern::new(client.clone(), "active_supply".to_string()), } } } @@ -3511,26 +3960,26 @@ impl MetricsTree_Cointime_Cap { /// Metrics tree node. pub struct MetricsTree_Cointime_Pricing { - pub vaulted_price: DollarsSatsPattern, + pub vaulted_price: SatsUsdPattern, pub vaulted_price_ratio: RatioPattern, - pub active_price: DollarsSatsPattern, + pub active_price: SatsUsdPattern, pub active_price_ratio: RatioPattern, - pub true_market_mean: DollarsSatsPattern, + pub true_market_mean: SatsUsdPattern, pub true_market_mean_ratio: RatioPattern, - pub cointime_price: DollarsSatsPattern, + pub cointime_price: SatsUsdPattern, pub cointime_price_ratio: RatioPattern, } impl MetricsTree_Cointime_Pricing { pub fn new(client: Arc, base_path: String) -> Self { Self { - vaulted_price: DollarsSatsPattern::new(client.clone(), "vaulted_price".to_string()), + vaulted_price: SatsUsdPattern::new(client.clone(), "vaulted_price".to_string()), vaulted_price_ratio: RatioPattern::new(client.clone(), "vaulted_price_ratio".to_string()), - active_price: DollarsSatsPattern::new(client.clone(), "active_price".to_string()), + active_price: SatsUsdPattern::new(client.clone(), "active_price".to_string()), active_price_ratio: RatioPattern::new(client.clone(), "active_price_ratio".to_string()), - true_market_mean: DollarsSatsPattern::new(client.clone(), "true_market_mean".to_string()), + true_market_mean: SatsUsdPattern::new(client.clone(), "true_market_mean".to_string()), true_market_mean_ratio: RatioPattern::new(client.clone(), "true_market_mean_ratio".to_string()), - cointime_price: DollarsSatsPattern::new(client.clone(), "cointime_price".to_string()), + cointime_price: SatsUsdPattern::new(client.clone(), "cointime_price".to_string()), cointime_price_ratio: RatioPattern::new(client.clone(), "cointime_price_ratio".to_string()), } } @@ -3538,34 +3987,34 @@ impl MetricsTree_Cointime_Pricing { /// Metrics tree node. pub struct MetricsTree_Cointime_Adjusted { - pub cointime_adj_inflation_rate: MetricPattern4, - pub cointime_adj_tx_btc_velocity: MetricPattern4, - pub cointime_adj_tx_usd_velocity: MetricPattern4, + pub cointime_adj_inflation_rate: MetricPattern1, + pub cointime_adj_tx_btc_velocity: MetricPattern1, + pub cointime_adj_tx_usd_velocity: MetricPattern1, } impl MetricsTree_Cointime_Adjusted { pub fn new(client: Arc, base_path: String) -> Self { Self { - cointime_adj_inflation_rate: MetricPattern4::new(client.clone(), "cointime_adj_inflation_rate".to_string()), - cointime_adj_tx_btc_velocity: MetricPattern4::new(client.clone(), "cointime_adj_tx_btc_velocity".to_string()), - cointime_adj_tx_usd_velocity: MetricPattern4::new(client.clone(), "cointime_adj_tx_usd_velocity".to_string()), + cointime_adj_inflation_rate: MetricPattern1::new(client.clone(), "cointime_adj_inflation_rate".to_string()), + cointime_adj_tx_btc_velocity: MetricPattern1::new(client.clone(), "cointime_adj_tx_btc_velocity".to_string()), + cointime_adj_tx_usd_velocity: MetricPattern1::new(client.clone(), "cointime_adj_tx_usd_velocity".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Cointime_ReserveRisk { - pub vocdd_365d_median: MetricPattern6, - pub hodl_bank: MetricPattern6, - pub reserve_risk: MetricPattern4, + pub vocdd_365d_median: MetricPattern20, + pub hodl_bank: MetricPattern20, + pub reserve_risk: MetricPattern1, } impl MetricsTree_Cointime_ReserveRisk { pub fn new(client: Arc, base_path: String) -> Self { Self { - vocdd_365d_median: MetricPattern6::new(client.clone(), "vocdd_365d_median".to_string()), - hodl_bank: MetricPattern6::new(client.clone(), "hodl_bank".to_string()), - reserve_risk: MetricPattern4::new(client.clone(), "reserve_risk".to_string()), + vocdd_365d_median: MetricPattern20::new(client.clone(), "vocdd_365d_median".to_string()), + hodl_bank: MetricPattern20::new(client.clone(), "hodl_bank".to_string()), + reserve_risk: MetricPattern1::new(client.clone(), "reserve_risk".to_string()), } } } @@ -3623,13 +4072,21 @@ pub struct MetricsTree_Indexes { pub height: MetricsTree_Indexes_Height, pub difficultyepoch: MetricsTree_Indexes_Difficultyepoch, pub halvingepoch: MetricsTree_Indexes_Halvingepoch, - pub dateindex: MetricsTree_Indexes_Dateindex, - pub weekindex: MetricsTree_Indexes_Weekindex, - pub monthindex: MetricsTree_Indexes_Monthindex, - pub quarterindex: MetricsTree_Indexes_Quarterindex, - pub semesterindex: MetricsTree_Indexes_Semesterindex, - pub yearindex: MetricsTree_Indexes_Yearindex, - pub decadeindex: MetricsTree_Indexes_Decadeindex, + pub minute1: MetricsTree_Indexes_Minute1, + pub minute5: MetricsTree_Indexes_Minute5, + pub minute10: MetricsTree_Indexes_Minute10, + pub minute30: MetricsTree_Indexes_Minute30, + pub hour1: MetricsTree_Indexes_Hour1, + pub hour4: MetricsTree_Indexes_Hour4, + pub hour12: MetricsTree_Indexes_Hour12, + pub day1: MetricsTree_Indexes_Day1, + pub day3: MetricsTree_Indexes_Day3, + pub week1: MetricsTree_Indexes_Week1, + pub month1: MetricsTree_Indexes_Month1, + pub month3: MetricsTree_Indexes_Month3, + pub month6: MetricsTree_Indexes_Month6, + pub year1: MetricsTree_Indexes_Year1, + pub year10: MetricsTree_Indexes_Year10, pub txindex: MetricsTree_Indexes_Txindex, pub txinindex: MetricsTree_Indexes_Txinindex, pub txoutindex: MetricsTree_Indexes_Txoutindex, @@ -3642,13 +4099,21 @@ impl MetricsTree_Indexes { height: MetricsTree_Indexes_Height::new(client.clone(), format!("{base_path}_height")), difficultyepoch: MetricsTree_Indexes_Difficultyepoch::new(client.clone(), format!("{base_path}_difficultyepoch")), halvingepoch: MetricsTree_Indexes_Halvingepoch::new(client.clone(), format!("{base_path}_halvingepoch")), - dateindex: MetricsTree_Indexes_Dateindex::new(client.clone(), format!("{base_path}_dateindex")), - weekindex: MetricsTree_Indexes_Weekindex::new(client.clone(), format!("{base_path}_weekindex")), - monthindex: MetricsTree_Indexes_Monthindex::new(client.clone(), format!("{base_path}_monthindex")), - quarterindex: MetricsTree_Indexes_Quarterindex::new(client.clone(), format!("{base_path}_quarterindex")), - semesterindex: MetricsTree_Indexes_Semesterindex::new(client.clone(), format!("{base_path}_semesterindex")), - yearindex: MetricsTree_Indexes_Yearindex::new(client.clone(), format!("{base_path}_yearindex")), - decadeindex: MetricsTree_Indexes_Decadeindex::new(client.clone(), format!("{base_path}_decadeindex")), + minute1: MetricsTree_Indexes_Minute1::new(client.clone(), format!("{base_path}_minute1")), + minute5: MetricsTree_Indexes_Minute5::new(client.clone(), format!("{base_path}_minute5")), + minute10: MetricsTree_Indexes_Minute10::new(client.clone(), format!("{base_path}_minute10")), + minute30: MetricsTree_Indexes_Minute30::new(client.clone(), format!("{base_path}_minute30")), + hour1: MetricsTree_Indexes_Hour1::new(client.clone(), format!("{base_path}_hour1")), + hour4: MetricsTree_Indexes_Hour4::new(client.clone(), format!("{base_path}_hour4")), + hour12: MetricsTree_Indexes_Hour12::new(client.clone(), format!("{base_path}_hour12")), + day1: MetricsTree_Indexes_Day1::new(client.clone(), format!("{base_path}_day1")), + day3: MetricsTree_Indexes_Day3::new(client.clone(), format!("{base_path}_day3")), + week1: MetricsTree_Indexes_Week1::new(client.clone(), format!("{base_path}_week1")), + month1: MetricsTree_Indexes_Month1::new(client.clone(), format!("{base_path}_month1")), + month3: MetricsTree_Indexes_Month3::new(client.clone(), format!("{base_path}_month3")), + month6: MetricsTree_Indexes_Month6::new(client.clone(), format!("{base_path}_month6")), + year1: MetricsTree_Indexes_Year1::new(client.clone(), format!("{base_path}_year1")), + year10: MetricsTree_Indexes_Year10::new(client.clone(), format!("{base_path}_year10")), txindex: MetricsTree_Indexes_Txindex::new(client.clone(), format!("{base_path}_txindex")), txinindex: MetricsTree_Indexes_Txinindex::new(client.clone(), format!("{base_path}_txinindex")), txoutindex: MetricsTree_Indexes_Txoutindex::new(client.clone(), format!("{base_path}_txoutindex")), @@ -3693,397 +4158,521 @@ impl MetricsTree_Indexes_Address { /// Metrics tree node. pub struct MetricsTree_Indexes_Address_P2pk33 { - pub identity: MetricPattern18, + pub identity: MetricPattern28, } impl MetricsTree_Indexes_Address_P2pk33 { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern18::new(client.clone(), "p2pk33addressindex".to_string()), + identity: MetricPattern28::new(client.clone(), "p2pk33addressindex".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Indexes_Address_P2pk65 { - pub identity: MetricPattern19, + pub identity: MetricPattern29, } impl MetricsTree_Indexes_Address_P2pk65 { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern19::new(client.clone(), "p2pk65addressindex".to_string()), + identity: MetricPattern29::new(client.clone(), "p2pk65addressindex".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Indexes_Address_P2pkh { - pub identity: MetricPattern20, + pub identity: MetricPattern30, } impl MetricsTree_Indexes_Address_P2pkh { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern20::new(client.clone(), "p2pkhaddressindex".to_string()), + identity: MetricPattern30::new(client.clone(), "p2pkhaddressindex".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Indexes_Address_P2sh { - pub identity: MetricPattern21, + pub identity: MetricPattern31, } impl MetricsTree_Indexes_Address_P2sh { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern21::new(client.clone(), "p2shaddressindex".to_string()), + identity: MetricPattern31::new(client.clone(), "p2shaddressindex".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Indexes_Address_P2tr { - pub identity: MetricPattern22, + pub identity: MetricPattern32, } impl MetricsTree_Indexes_Address_P2tr { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern22::new(client.clone(), "p2traddressindex".to_string()), + identity: MetricPattern32::new(client.clone(), "p2traddressindex".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Indexes_Address_P2wpkh { - pub identity: MetricPattern23, + pub identity: MetricPattern33, } impl MetricsTree_Indexes_Address_P2wpkh { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern23::new(client.clone(), "p2wpkhaddressindex".to_string()), + identity: MetricPattern33::new(client.clone(), "p2wpkhaddressindex".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Indexes_Address_P2wsh { - pub identity: MetricPattern24, + pub identity: MetricPattern34, } impl MetricsTree_Indexes_Address_P2wsh { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern24::new(client.clone(), "p2wshaddressindex".to_string()), + identity: MetricPattern34::new(client.clone(), "p2wshaddressindex".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Indexes_Address_P2a { - pub identity: MetricPattern16, + pub identity: MetricPattern26, } impl MetricsTree_Indexes_Address_P2a { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern16::new(client.clone(), "p2aaddressindex".to_string()), + identity: MetricPattern26::new(client.clone(), "p2aaddressindex".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Indexes_Address_P2ms { - pub identity: MetricPattern17, + pub identity: MetricPattern27, } impl MetricsTree_Indexes_Address_P2ms { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern17::new(client.clone(), "p2msoutputindex".to_string()), + identity: MetricPattern27::new(client.clone(), "p2msoutputindex".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Indexes_Address_Empty { - pub identity: MetricPattern9, + pub identity: MetricPattern24, } impl MetricsTree_Indexes_Address_Empty { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern9::new(client.clone(), "emptyoutputindex".to_string()), + identity: MetricPattern24::new(client.clone(), "emptyoutputindex".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Indexes_Address_Unknown { - pub identity: MetricPattern28, + pub identity: MetricPattern35, } impl MetricsTree_Indexes_Address_Unknown { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern28::new(client.clone(), "unknownoutputindex".to_string()), + identity: MetricPattern35::new(client.clone(), "unknownoutputindex".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Indexes_Address_Opreturn { - pub identity: MetricPattern14, + pub identity: MetricPattern25, } impl MetricsTree_Indexes_Address_Opreturn { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern14::new(client.clone(), "opreturnindex".to_string()), + identity: MetricPattern25::new(client.clone(), "opreturnindex".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Indexes_Height { - pub identity: MetricPattern11, - pub dateindex: MetricPattern11, - pub difficultyepoch: MetricPattern11, - pub halvingepoch: MetricPattern11, - pub txindex_count: MetricPattern11, + pub identity: MetricPattern20, + pub minute1: MetricPattern20, + pub minute5: MetricPattern20, + pub minute10: MetricPattern20, + pub minute30: MetricPattern20, + pub hour1: MetricPattern20, + pub hour4: MetricPattern20, + pub hour12: MetricPattern20, + pub day1: MetricPattern20, + pub day3: MetricPattern20, + pub difficultyepoch: MetricPattern20, + pub halvingepoch: MetricPattern20, + pub week1: MetricPattern20, + pub month1: MetricPattern20, + pub month3: MetricPattern20, + pub month6: MetricPattern20, + pub year1: MetricPattern20, + pub year10: MetricPattern20, + pub txindex_count: MetricPattern20, } impl MetricsTree_Indexes_Height { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern11::new(client.clone(), "height".to_string()), - dateindex: MetricPattern11::new(client.clone(), "dateindex".to_string()), - difficultyepoch: MetricPattern11::new(client.clone(), "difficultyepoch".to_string()), - halvingepoch: MetricPattern11::new(client.clone(), "halvingepoch".to_string()), - txindex_count: MetricPattern11::new(client.clone(), "txindex_count".to_string()), + identity: MetricPattern20::new(client.clone(), "height".to_string()), + minute1: MetricPattern20::new(client.clone(), "minute1".to_string()), + minute5: MetricPattern20::new(client.clone(), "minute5".to_string()), + minute10: MetricPattern20::new(client.clone(), "minute10".to_string()), + minute30: MetricPattern20::new(client.clone(), "minute30".to_string()), + hour1: MetricPattern20::new(client.clone(), "hour1".to_string()), + hour4: MetricPattern20::new(client.clone(), "hour4".to_string()), + hour12: MetricPattern20::new(client.clone(), "hour12".to_string()), + day1: MetricPattern20::new(client.clone(), "day1".to_string()), + day3: MetricPattern20::new(client.clone(), "day3".to_string()), + difficultyepoch: MetricPattern20::new(client.clone(), "difficultyepoch".to_string()), + halvingepoch: MetricPattern20::new(client.clone(), "halvingepoch".to_string()), + week1: MetricPattern20::new(client.clone(), "week1".to_string()), + month1: MetricPattern20::new(client.clone(), "month1".to_string()), + month3: MetricPattern20::new(client.clone(), "month3".to_string()), + month6: MetricPattern20::new(client.clone(), "month6".to_string()), + year1: MetricPattern20::new(client.clone(), "year1".to_string()), + year10: MetricPattern20::new(client.clone(), "year10".to_string()), + txindex_count: MetricPattern20::new(client.clone(), "txindex_count".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Indexes_Difficultyepoch { - pub identity: MetricPattern8, - pub first_height: MetricPattern8, - pub height_count: MetricPattern8, + pub identity: MetricPattern19, + pub first_height: MetricPattern19, + pub height_count: MetricPattern19, } impl MetricsTree_Indexes_Difficultyepoch { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern8::new(client.clone(), "difficultyepoch".to_string()), - first_height: MetricPattern8::new(client.clone(), "first_height".to_string()), - height_count: MetricPattern8::new(client.clone(), "height_count".to_string()), + identity: MetricPattern19::new(client.clone(), "difficultyepoch".to_string()), + first_height: MetricPattern19::new(client.clone(), "first_height".to_string()), + height_count: MetricPattern19::new(client.clone(), "height_count".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Indexes_Halvingepoch { - pub identity: MetricPattern10, - pub first_height: MetricPattern10, + pub identity: MetricPattern18, + pub first_height: MetricPattern18, } impl MetricsTree_Indexes_Halvingepoch { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern10::new(client.clone(), "halvingepoch".to_string()), - first_height: MetricPattern10::new(client.clone(), "first_height".to_string()), + identity: MetricPattern18::new(client.clone(), "halvingepoch".to_string()), + first_height: MetricPattern18::new(client.clone(), "first_height".to_string()), } } } /// Metrics tree node. -pub struct MetricsTree_Indexes_Dateindex { - pub identity: MetricPattern6, - pub date: MetricPattern6, +pub struct MetricsTree_Indexes_Minute1 { + pub identity: MetricPattern3, + pub first_height: MetricPattern3, +} + +impl MetricsTree_Indexes_Minute1 { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + identity: MetricPattern3::new(client.clone(), "minute1".to_string()), + first_height: MetricPattern3::new(client.clone(), "minute1_first_height".to_string()), + } + } +} + +/// Metrics tree node. +pub struct MetricsTree_Indexes_Minute5 { + pub identity: MetricPattern4, + pub first_height: MetricPattern4, +} + +impl MetricsTree_Indexes_Minute5 { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + identity: MetricPattern4::new(client.clone(), "minute5".to_string()), + first_height: MetricPattern4::new(client.clone(), "minute5_first_height".to_string()), + } + } +} + +/// Metrics tree node. +pub struct MetricsTree_Indexes_Minute10 { + pub identity: MetricPattern5, + pub first_height: MetricPattern5, +} + +impl MetricsTree_Indexes_Minute10 { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + identity: MetricPattern5::new(client.clone(), "minute10".to_string()), + first_height: MetricPattern5::new(client.clone(), "minute10_first_height".to_string()), + } + } +} + +/// Metrics tree node. +pub struct MetricsTree_Indexes_Minute30 { + pub identity: MetricPattern6, pub first_height: MetricPattern6, - pub height_count: MetricPattern6, - pub weekindex: MetricPattern6, - pub monthindex: MetricPattern6, } -impl MetricsTree_Indexes_Dateindex { +impl MetricsTree_Indexes_Minute30 { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern6::new(client.clone(), "dateindex".to_string()), - date: MetricPattern6::new(client.clone(), "date".to_string()), - first_height: MetricPattern6::new(client.clone(), "first_height".to_string()), - height_count: MetricPattern6::new(client.clone(), "height_count".to_string()), - weekindex: MetricPattern6::new(client.clone(), "weekindex".to_string()), - monthindex: MetricPattern6::new(client.clone(), "monthindex".to_string()), + identity: MetricPattern6::new(client.clone(), "minute30".to_string()), + first_height: MetricPattern6::new(client.clone(), "minute30_first_height".to_string()), } } } /// Metrics tree node. -pub struct MetricsTree_Indexes_Weekindex { - pub identity: MetricPattern29, - pub date: MetricPattern29, - pub first_dateindex: MetricPattern29, - pub dateindex_count: MetricPattern29, +pub struct MetricsTree_Indexes_Hour1 { + pub identity: MetricPattern7, + pub first_height: MetricPattern7, } -impl MetricsTree_Indexes_Weekindex { +impl MetricsTree_Indexes_Hour1 { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern29::new(client.clone(), "weekindex".to_string()), - date: MetricPattern29::new(client.clone(), "date".to_string()), - first_dateindex: MetricPattern29::new(client.clone(), "first_dateindex".to_string()), - dateindex_count: MetricPattern29::new(client.clone(), "dateindex_count".to_string()), + identity: MetricPattern7::new(client.clone(), "hour1".to_string()), + first_height: MetricPattern7::new(client.clone(), "hour1_first_height".to_string()), } } } /// Metrics tree node. -pub struct MetricsTree_Indexes_Monthindex { - pub identity: MetricPattern13, +pub struct MetricsTree_Indexes_Hour4 { + pub identity: MetricPattern8, + pub first_height: MetricPattern8, +} + +impl MetricsTree_Indexes_Hour4 { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + identity: MetricPattern8::new(client.clone(), "hour4".to_string()), + first_height: MetricPattern8::new(client.clone(), "hour4_first_height".to_string()), + } + } +} + +/// Metrics tree node. +pub struct MetricsTree_Indexes_Hour12 { + pub identity: MetricPattern9, + pub first_height: MetricPattern9, +} + +impl MetricsTree_Indexes_Hour12 { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + identity: MetricPattern9::new(client.clone(), "hour12".to_string()), + first_height: MetricPattern9::new(client.clone(), "hour12_first_height".to_string()), + } + } +} + +/// Metrics tree node. +pub struct MetricsTree_Indexes_Day1 { + pub identity: MetricPattern10, + pub date: MetricPattern10, + pub first_height: MetricPattern10, + pub height_count: MetricPattern10, +} + +impl MetricsTree_Indexes_Day1 { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + identity: MetricPattern10::new(client.clone(), "day1".to_string()), + date: MetricPattern10::new(client.clone(), "date".to_string()), + first_height: MetricPattern10::new(client.clone(), "first_height".to_string()), + height_count: MetricPattern10::new(client.clone(), "height_count".to_string()), + } + } +} + +/// Metrics tree node. +pub struct MetricsTree_Indexes_Day3 { + pub identity: MetricPattern11, + pub first_height: MetricPattern11, +} + +impl MetricsTree_Indexes_Day3 { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + identity: MetricPattern11::new(client.clone(), "day3".to_string()), + first_height: MetricPattern11::new(client.clone(), "day3_first_height".to_string()), + } + } +} + +/// Metrics tree node. +pub struct MetricsTree_Indexes_Week1 { + pub identity: MetricPattern12, + pub date: MetricPattern12, + pub first_height: MetricPattern12, +} + +impl MetricsTree_Indexes_Week1 { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + identity: MetricPattern12::new(client.clone(), "week1".to_string()), + date: MetricPattern12::new(client.clone(), "date".to_string()), + first_height: MetricPattern12::new(client.clone(), "week1_first_height".to_string()), + } + } +} + +/// Metrics tree node. +pub struct MetricsTree_Indexes_Month1 { + pub identity: MetricPattern13, pub date: MetricPattern13, - pub first_dateindex: MetricPattern13, - pub dateindex_count: MetricPattern13, - pub quarterindex: MetricPattern13, - pub semesterindex: MetricPattern13, - pub yearindex: MetricPattern13, + pub first_height: MetricPattern13, } -impl MetricsTree_Indexes_Monthindex { +impl MetricsTree_Indexes_Month1 { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern13::new(client.clone(), "monthindex".to_string()), + identity: MetricPattern13::new(client.clone(), "month1".to_string()), date: MetricPattern13::new(client.clone(), "date".to_string()), - first_dateindex: MetricPattern13::new(client.clone(), "first_dateindex".to_string()), - dateindex_count: MetricPattern13::new(client.clone(), "dateindex_count".to_string()), - quarterindex: MetricPattern13::new(client.clone(), "quarterindex".to_string()), - semesterindex: MetricPattern13::new(client.clone(), "semesterindex".to_string()), - yearindex: MetricPattern13::new(client.clone(), "yearindex".to_string()), + first_height: MetricPattern13::new(client.clone(), "month1_first_height".to_string()), } } } /// Metrics tree node. -pub struct MetricsTree_Indexes_Quarterindex { - pub identity: MetricPattern25, - pub date: MetricPattern25, - pub first_monthindex: MetricPattern25, - pub monthindex_count: MetricPattern25, +pub struct MetricsTree_Indexes_Month3 { + pub identity: MetricPattern14, + pub date: MetricPattern14, + pub first_height: MetricPattern14, } -impl MetricsTree_Indexes_Quarterindex { +impl MetricsTree_Indexes_Month3 { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern25::new(client.clone(), "quarterindex".to_string()), - date: MetricPattern25::new(client.clone(), "date".to_string()), - first_monthindex: MetricPattern25::new(client.clone(), "first_monthindex".to_string()), - monthindex_count: MetricPattern25::new(client.clone(), "monthindex_count".to_string()), + identity: MetricPattern14::new(client.clone(), "month3".to_string()), + date: MetricPattern14::new(client.clone(), "date".to_string()), + first_height: MetricPattern14::new(client.clone(), "month3_first_height".to_string()), } } } /// Metrics tree node. -pub struct MetricsTree_Indexes_Semesterindex { - pub identity: MetricPattern26, - pub date: MetricPattern26, - pub first_monthindex: MetricPattern26, - pub monthindex_count: MetricPattern26, +pub struct MetricsTree_Indexes_Month6 { + pub identity: MetricPattern15, + pub date: MetricPattern15, + pub first_height: MetricPattern15, } -impl MetricsTree_Indexes_Semesterindex { +impl MetricsTree_Indexes_Month6 { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern26::new(client.clone(), "semesterindex".to_string()), - date: MetricPattern26::new(client.clone(), "date".to_string()), - first_monthindex: MetricPattern26::new(client.clone(), "first_monthindex".to_string()), - monthindex_count: MetricPattern26::new(client.clone(), "monthindex_count".to_string()), + identity: MetricPattern15::new(client.clone(), "month6".to_string()), + date: MetricPattern15::new(client.clone(), "date".to_string()), + first_height: MetricPattern15::new(client.clone(), "month6_first_height".to_string()), } } } /// Metrics tree node. -pub struct MetricsTree_Indexes_Yearindex { - pub identity: MetricPattern30, - pub date: MetricPattern30, - pub first_monthindex: MetricPattern30, - pub monthindex_count: MetricPattern30, - pub decadeindex: MetricPattern30, +pub struct MetricsTree_Indexes_Year1 { + pub identity: MetricPattern16, + pub date: MetricPattern16, + pub first_height: MetricPattern16, } -impl MetricsTree_Indexes_Yearindex { +impl MetricsTree_Indexes_Year1 { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern30::new(client.clone(), "yearindex".to_string()), - date: MetricPattern30::new(client.clone(), "date".to_string()), - first_monthindex: MetricPattern30::new(client.clone(), "first_monthindex".to_string()), - monthindex_count: MetricPattern30::new(client.clone(), "monthindex_count".to_string()), - decadeindex: MetricPattern30::new(client.clone(), "decadeindex".to_string()), + identity: MetricPattern16::new(client.clone(), "year1".to_string()), + date: MetricPattern16::new(client.clone(), "date".to_string()), + first_height: MetricPattern16::new(client.clone(), "year1_first_height".to_string()), } } } /// Metrics tree node. -pub struct MetricsTree_Indexes_Decadeindex { - pub identity: MetricPattern7, - pub date: MetricPattern7, - pub first_yearindex: MetricPattern7, - pub yearindex_count: MetricPattern7, +pub struct MetricsTree_Indexes_Year10 { + pub identity: MetricPattern17, + pub date: MetricPattern17, + pub first_height: MetricPattern17, } -impl MetricsTree_Indexes_Decadeindex { +impl MetricsTree_Indexes_Year10 { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern7::new(client.clone(), "decadeindex".to_string()), - date: MetricPattern7::new(client.clone(), "date".to_string()), - first_yearindex: MetricPattern7::new(client.clone(), "first_yearindex".to_string()), - yearindex_count: MetricPattern7::new(client.clone(), "yearindex_count".to_string()), + identity: MetricPattern17::new(client.clone(), "year10".to_string()), + date: MetricPattern17::new(client.clone(), "date".to_string()), + first_height: MetricPattern17::new(client.clone(), "year10_first_height".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Indexes_Txindex { - pub identity: MetricPattern27, - pub input_count: MetricPattern27, - pub output_count: MetricPattern27, + pub identity: MetricPattern21, + pub input_count: MetricPattern21, + pub output_count: MetricPattern21, } impl MetricsTree_Indexes_Txindex { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern27::new(client.clone(), "txindex".to_string()), - input_count: MetricPattern27::new(client.clone(), "input_count".to_string()), - output_count: MetricPattern27::new(client.clone(), "output_count".to_string()), + identity: MetricPattern21::new(client.clone(), "txindex".to_string()), + input_count: MetricPattern21::new(client.clone(), "input_count".to_string()), + output_count: MetricPattern21::new(client.clone(), "output_count".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Indexes_Txinindex { - pub identity: MetricPattern12, + pub identity: MetricPattern22, } impl MetricsTree_Indexes_Txinindex { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern12::new(client.clone(), "txinindex".to_string()), + identity: MetricPattern22::new(client.clone(), "txinindex".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Indexes_Txoutindex { - pub identity: MetricPattern15, + pub identity: MetricPattern23, } impl MetricsTree_Indexes_Txoutindex { pub fn new(client: Arc, base_path: String) -> Self { Self { - identity: MetricPattern15::new(client.clone(), "txoutindex".to_string()), + identity: MetricPattern23::new(client.clone(), "txoutindex".to_string()), } } } @@ -4117,60 +4706,60 @@ impl MetricsTree_Market { /// Metrics tree node. pub struct MetricsTree_Market_Ath { - pub price_ath: DollarsSatsPattern, - pub price_drawdown: MetricPattern3, - pub days_since_price_ath: MetricPattern4, - pub years_since_price_ath: MetricPattern4, - pub max_days_between_price_aths: MetricPattern4, - pub max_years_between_price_aths: MetricPattern4, + pub price_ath: SatsUsdPattern, + pub price_drawdown: MetricPattern1, + pub days_since_price_ath: MetricPattern1, + pub years_since_price_ath: MetricPattern2, + pub max_days_between_price_aths: MetricPattern1, + pub max_years_between_price_aths: MetricPattern2, } impl MetricsTree_Market_Ath { pub fn new(client: Arc, base_path: String) -> Self { Self { - price_ath: DollarsSatsPattern::new(client.clone(), "price_ath".to_string()), - price_drawdown: MetricPattern3::new(client.clone(), "price_drawdown".to_string()), - days_since_price_ath: MetricPattern4::new(client.clone(), "days_since_price_ath".to_string()), - years_since_price_ath: MetricPattern4::new(client.clone(), "years_since_price_ath".to_string()), - max_days_between_price_aths: MetricPattern4::new(client.clone(), "max_days_between_price_aths".to_string()), - max_years_between_price_aths: MetricPattern4::new(client.clone(), "max_years_between_price_aths".to_string()), + price_ath: SatsUsdPattern::new(client.clone(), "price_ath".to_string()), + price_drawdown: MetricPattern1::new(client.clone(), "price_drawdown".to_string()), + days_since_price_ath: MetricPattern1::new(client.clone(), "days_since_price_ath".to_string()), + years_since_price_ath: MetricPattern2::new(client.clone(), "years_since_price_ath".to_string()), + max_days_between_price_aths: MetricPattern1::new(client.clone(), "max_days_between_price_aths".to_string()), + max_years_between_price_aths: MetricPattern2::new(client.clone(), "max_years_between_price_aths".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Market_Lookback { - pub _1d: DollarsSatsPattern2, - pub _1w: DollarsSatsPattern2, - pub _1m: DollarsSatsPattern2, - pub _3m: DollarsSatsPattern2, - pub _6m: DollarsSatsPattern2, - pub _1y: DollarsSatsPattern2, - pub _2y: DollarsSatsPattern2, - pub _3y: DollarsSatsPattern2, - pub _4y: DollarsSatsPattern2, - pub _5y: DollarsSatsPattern2, - pub _6y: DollarsSatsPattern2, - pub _8y: DollarsSatsPattern2, - pub _10y: DollarsSatsPattern2, + pub _1d: SatsUsdPattern, + pub _1w: SatsUsdPattern, + pub _1m: SatsUsdPattern, + pub _3m: SatsUsdPattern, + pub _6m: SatsUsdPattern, + pub _1y: SatsUsdPattern, + pub _2y: SatsUsdPattern, + pub _3y: SatsUsdPattern, + pub _4y: SatsUsdPattern, + pub _5y: SatsUsdPattern, + pub _6y: SatsUsdPattern, + pub _8y: SatsUsdPattern, + pub _10y: SatsUsdPattern, } impl MetricsTree_Market_Lookback { pub fn new(client: Arc, base_path: String) -> Self { Self { - _1d: DollarsSatsPattern2::new(client.clone(), "price_1d_ago".to_string()), - _1w: DollarsSatsPattern2::new(client.clone(), "price_1w_ago".to_string()), - _1m: DollarsSatsPattern2::new(client.clone(), "price_1m_ago".to_string()), - _3m: DollarsSatsPattern2::new(client.clone(), "price_3m_ago".to_string()), - _6m: DollarsSatsPattern2::new(client.clone(), "price_6m_ago".to_string()), - _1y: DollarsSatsPattern2::new(client.clone(), "price_1y_ago".to_string()), - _2y: DollarsSatsPattern2::new(client.clone(), "price_2y_ago".to_string()), - _3y: DollarsSatsPattern2::new(client.clone(), "price_3y_ago".to_string()), - _4y: DollarsSatsPattern2::new(client.clone(), "price_4y_ago".to_string()), - _5y: DollarsSatsPattern2::new(client.clone(), "price_5y_ago".to_string()), - _6y: DollarsSatsPattern2::new(client.clone(), "price_6y_ago".to_string()), - _8y: DollarsSatsPattern2::new(client.clone(), "price_8y_ago".to_string()), - _10y: DollarsSatsPattern2::new(client.clone(), "price_10y_ago".to_string()), + _1d: SatsUsdPattern::new(client.clone(), "price_1d_ago".to_string()), + _1w: SatsUsdPattern::new(client.clone(), "price_1w_ago".to_string()), + _1m: SatsUsdPattern::new(client.clone(), "price_1m_ago".to_string()), + _3m: SatsUsdPattern::new(client.clone(), "price_3m_ago".to_string()), + _6m: SatsUsdPattern::new(client.clone(), "price_6m_ago".to_string()), + _1y: SatsUsdPattern::new(client.clone(), "price_1y_ago".to_string()), + _2y: SatsUsdPattern::new(client.clone(), "price_2y_ago".to_string()), + _3y: SatsUsdPattern::new(client.clone(), "price_3y_ago".to_string()), + _4y: SatsUsdPattern::new(client.clone(), "price_4y_ago".to_string()), + _5y: SatsUsdPattern::new(client.clone(), "price_5y_ago".to_string()), + _6y: SatsUsdPattern::new(client.clone(), "price_6y_ago".to_string()), + _8y: SatsUsdPattern::new(client.clone(), "price_8y_ago".to_string()), + _10y: SatsUsdPattern::new(client.clone(), "price_10y_ago".to_string()), } } } @@ -4182,7 +4771,7 @@ pub struct MetricsTree_Market_Returns { pub _1d_returns_1w_sd: SdSmaPattern, pub _1d_returns_1m_sd: SdSmaPattern, pub _1d_returns_1y_sd: SdSmaPattern, - pub downside_returns: MetricPattern6, + pub downside_returns: MetricPattern20, pub downside_1w_sd: SdSmaPattern, pub downside_1m_sd: SdSmaPattern, pub downside_1y_sd: SdSmaPattern, @@ -4196,7 +4785,7 @@ impl MetricsTree_Market_Returns { _1d_returns_1w_sd: SdSmaPattern::new(client.clone(), "1d_returns_1w_sd".to_string()), _1d_returns_1m_sd: SdSmaPattern::new(client.clone(), "1d_returns_1m_sd".to_string()), _1d_returns_1y_sd: SdSmaPattern::new(client.clone(), "1d_returns_1y_sd".to_string()), - downside_returns: MetricPattern6::new(client.clone(), "downside_returns".to_string()), + downside_returns: MetricPattern20::new(client.clone(), "downside_returns".to_string()), downside_1w_sd: SdSmaPattern::new(client.clone(), "downside_1w_sd".to_string()), downside_1m_sd: SdSmaPattern::new(client.clone(), "downside_1m_sd".to_string()), downside_1y_sd: SdSmaPattern::new(client.clone(), "downside_1y_sd".to_string()), @@ -4206,99 +4795,99 @@ impl MetricsTree_Market_Returns { /// Metrics tree node. pub struct MetricsTree_Market_Returns_PriceReturns { - pub _1d: MetricPattern4, - pub _1w: MetricPattern4, - pub _1m: MetricPattern4, - pub _3m: MetricPattern4, - pub _6m: MetricPattern4, - pub _1y: MetricPattern4, - pub _2y: MetricPattern4, - pub _3y: MetricPattern4, - pub _4y: MetricPattern4, - pub _5y: MetricPattern4, - pub _6y: MetricPattern4, - pub _8y: MetricPattern4, - pub _10y: MetricPattern4, + pub _1d: MetricPattern1, + pub _1w: MetricPattern1, + pub _1m: MetricPattern1, + pub _3m: MetricPattern1, + pub _6m: MetricPattern1, + pub _1y: MetricPattern1, + pub _2y: MetricPattern1, + pub _3y: MetricPattern1, + pub _4y: MetricPattern1, + pub _5y: MetricPattern1, + pub _6y: MetricPattern1, + pub _8y: MetricPattern1, + pub _10y: MetricPattern1, } impl MetricsTree_Market_Returns_PriceReturns { pub fn new(client: Arc, base_path: String) -> Self { Self { - _1d: MetricPattern4::new(client.clone(), "1d_price_returns".to_string()), - _1w: MetricPattern4::new(client.clone(), "1w_price_returns".to_string()), - _1m: MetricPattern4::new(client.clone(), "1m_price_returns".to_string()), - _3m: MetricPattern4::new(client.clone(), "3m_price_returns".to_string()), - _6m: MetricPattern4::new(client.clone(), "6m_price_returns".to_string()), - _1y: MetricPattern4::new(client.clone(), "1y_price_returns".to_string()), - _2y: MetricPattern4::new(client.clone(), "2y_price_returns".to_string()), - _3y: MetricPattern4::new(client.clone(), "3y_price_returns".to_string()), - _4y: MetricPattern4::new(client.clone(), "4y_price_returns".to_string()), - _5y: MetricPattern4::new(client.clone(), "5y_price_returns".to_string()), - _6y: MetricPattern4::new(client.clone(), "6y_price_returns".to_string()), - _8y: MetricPattern4::new(client.clone(), "8y_price_returns".to_string()), - _10y: MetricPattern4::new(client.clone(), "10y_price_returns".to_string()), + _1d: MetricPattern1::new(client.clone(), "1d_price_returns".to_string()), + _1w: MetricPattern1::new(client.clone(), "1w_price_returns".to_string()), + _1m: MetricPattern1::new(client.clone(), "1m_price_returns".to_string()), + _3m: MetricPattern1::new(client.clone(), "3m_price_returns".to_string()), + _6m: MetricPattern1::new(client.clone(), "6m_price_returns".to_string()), + _1y: MetricPattern1::new(client.clone(), "1y_price_returns".to_string()), + _2y: MetricPattern1::new(client.clone(), "2y_price_returns".to_string()), + _3y: MetricPattern1::new(client.clone(), "3y_price_returns".to_string()), + _4y: MetricPattern1::new(client.clone(), "4y_price_returns".to_string()), + _5y: MetricPattern1::new(client.clone(), "5y_price_returns".to_string()), + _6y: MetricPattern1::new(client.clone(), "6y_price_returns".to_string()), + _8y: MetricPattern1::new(client.clone(), "8y_price_returns".to_string()), + _10y: MetricPattern1::new(client.clone(), "10y_price_returns".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Market_Volatility { - pub price_1w_volatility: MetricPattern4, - pub price_1m_volatility: MetricPattern4, - pub price_1y_volatility: MetricPattern4, - pub sharpe_1w: MetricPattern6, - pub sharpe_1m: MetricPattern6, - pub sharpe_1y: MetricPattern6, - pub sortino_1w: MetricPattern6, - pub sortino_1m: MetricPattern6, - pub sortino_1y: MetricPattern6, + pub price_1w_volatility: MetricPattern1, + pub price_1m_volatility: MetricPattern1, + pub price_1y_volatility: MetricPattern1, + pub sharpe_1w: MetricPattern20, + pub sharpe_1m: MetricPattern20, + pub sharpe_1y: MetricPattern20, + pub sortino_1w: MetricPattern20, + pub sortino_1m: MetricPattern20, + pub sortino_1y: MetricPattern20, } impl MetricsTree_Market_Volatility { pub fn new(client: Arc, base_path: String) -> Self { Self { - price_1w_volatility: MetricPattern4::new(client.clone(), "price_1w_volatility".to_string()), - price_1m_volatility: MetricPattern4::new(client.clone(), "price_1m_volatility".to_string()), - price_1y_volatility: MetricPattern4::new(client.clone(), "price_1y_volatility".to_string()), - sharpe_1w: MetricPattern6::new(client.clone(), "sharpe_1w".to_string()), - sharpe_1m: MetricPattern6::new(client.clone(), "sharpe_1m".to_string()), - sharpe_1y: MetricPattern6::new(client.clone(), "sharpe_1y".to_string()), - sortino_1w: MetricPattern6::new(client.clone(), "sortino_1w".to_string()), - sortino_1m: MetricPattern6::new(client.clone(), "sortino_1m".to_string()), - sortino_1y: MetricPattern6::new(client.clone(), "sortino_1y".to_string()), + price_1w_volatility: MetricPattern1::new(client.clone(), "price_1w_volatility".to_string()), + price_1m_volatility: MetricPattern1::new(client.clone(), "price_1m_volatility".to_string()), + price_1y_volatility: MetricPattern1::new(client.clone(), "price_1y_volatility".to_string()), + sharpe_1w: MetricPattern20::new(client.clone(), "sharpe_1w".to_string()), + sharpe_1m: MetricPattern20::new(client.clone(), "sharpe_1m".to_string()), + sharpe_1y: MetricPattern20::new(client.clone(), "sharpe_1y".to_string()), + sortino_1w: MetricPattern20::new(client.clone(), "sortino_1w".to_string()), + sortino_1m: MetricPattern20::new(client.clone(), "sortino_1m".to_string()), + sortino_1y: MetricPattern20::new(client.clone(), "sortino_1y".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Market_Range { - pub price_1w_min: DollarsSatsPattern2, - pub price_1w_max: DollarsSatsPattern2, - pub price_2w_min: DollarsSatsPattern2, - pub price_2w_max: DollarsSatsPattern2, - pub price_1m_min: DollarsSatsPattern2, - pub price_1m_max: DollarsSatsPattern2, - pub price_1y_min: DollarsSatsPattern2, - pub price_1y_max: DollarsSatsPattern2, - pub price_true_range: MetricPattern6, - pub price_true_range_2w_sum: MetricPattern6, - pub price_2w_choppiness_index: MetricPattern4, + pub price_1w_min: SatsUsdPattern, + pub price_1w_max: SatsUsdPattern, + pub price_2w_min: SatsUsdPattern, + pub price_2w_max: SatsUsdPattern, + pub price_1m_min: SatsUsdPattern, + pub price_1m_max: SatsUsdPattern, + pub price_1y_min: SatsUsdPattern, + pub price_1y_max: SatsUsdPattern, + pub price_true_range: MetricPattern1, + pub price_true_range_2w_sum: MetricPattern1, + pub price_2w_choppiness_index: MetricPattern1, } impl MetricsTree_Market_Range { pub fn new(client: Arc, base_path: String) -> Self { Self { - price_1w_min: DollarsSatsPattern2::new(client.clone(), "price_1w_min".to_string()), - price_1w_max: DollarsSatsPattern2::new(client.clone(), "price_1w_max".to_string()), - price_2w_min: DollarsSatsPattern2::new(client.clone(), "price_2w_min".to_string()), - price_2w_max: DollarsSatsPattern2::new(client.clone(), "price_2w_max".to_string()), - price_1m_min: DollarsSatsPattern2::new(client.clone(), "price_1m_min".to_string()), - price_1m_max: DollarsSatsPattern2::new(client.clone(), "price_1m_max".to_string()), - price_1y_min: DollarsSatsPattern2::new(client.clone(), "price_1y_min".to_string()), - price_1y_max: DollarsSatsPattern2::new(client.clone(), "price_1y_max".to_string()), - price_true_range: MetricPattern6::new(client.clone(), "price_true_range".to_string()), - price_true_range_2w_sum: MetricPattern6::new(client.clone(), "price_true_range_2w_sum".to_string()), - price_2w_choppiness_index: MetricPattern4::new(client.clone(), "price_2w_choppiness_index".to_string()), + price_1w_min: SatsUsdPattern::new(client.clone(), "price_1w_min".to_string()), + price_1w_max: SatsUsdPattern::new(client.clone(), "price_1w_max".to_string()), + price_2w_min: SatsUsdPattern::new(client.clone(), "price_2w_min".to_string()), + price_2w_max: SatsUsdPattern::new(client.clone(), "price_2w_max".to_string()), + price_1m_min: SatsUsdPattern::new(client.clone(), "price_1m_min".to_string()), + price_1m_max: SatsUsdPattern::new(client.clone(), "price_1m_max".to_string()), + price_1y_min: SatsUsdPattern::new(client.clone(), "price_1y_min".to_string()), + price_1y_max: SatsUsdPattern::new(client.clone(), "price_1y_max".to_string()), + price_true_range: MetricPattern1::new(client.clone(), "price_true_range".to_string()), + price_true_range_2w_sum: MetricPattern1::new(client.clone(), "price_true_range_2w_sum".to_string()), + price_2w_choppiness_index: MetricPattern1::new(client.clone(), "price_2w_choppiness_index".to_string()), } } } @@ -4337,9 +4926,9 @@ pub struct MetricsTree_Market_MovingAverage { pub price_2y_ema: PriceRatioPattern, pub price_200w_ema: PriceRatioPattern, pub price_4y_ema: PriceRatioPattern, - pub price_200d_sma_x2_4: DollarsSatsPattern2, - pub price_200d_sma_x0_8: DollarsSatsPattern2, - pub price_350d_sma_x2: DollarsSatsPattern2, + pub price_200d_sma_x2_4: SatsUsdPattern, + pub price_200d_sma_x0_8: SatsUsdPattern, + pub price_350d_sma_x2: SatsUsdPattern, } impl MetricsTree_Market_MovingAverage { @@ -4377,15 +4966,16 @@ impl MetricsTree_Market_MovingAverage { price_2y_ema: PriceRatioPattern::new(client.clone(), "price_2y_ema".to_string()), price_200w_ema: PriceRatioPattern::new(client.clone(), "price_200w_ema".to_string()), price_4y_ema: PriceRatioPattern::new(client.clone(), "price_4y_ema".to_string()), - price_200d_sma_x2_4: DollarsSatsPattern2::new(client.clone(), "price_200d_sma_x2_4".to_string()), - price_200d_sma_x0_8: DollarsSatsPattern2::new(client.clone(), "price_200d_sma_x0_8".to_string()), - price_350d_sma_x2: DollarsSatsPattern2::new(client.clone(), "price_350d_sma_x2".to_string()), + price_200d_sma_x2_4: SatsUsdPattern::new(client.clone(), "price_200d_sma_x2_4".to_string()), + price_200d_sma_x0_8: SatsUsdPattern::new(client.clone(), "price_200d_sma_x0_8".to_string()), + price_350d_sma_x2: SatsUsdPattern::new(client.clone(), "price_350d_sma_x2".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Market_Dca { + pub dca_sats_per_day: MetricPattern20, pub period_stack: _10y1m1w1y2y3m3y4y5y6m6y8yPattern3, pub period_average_price: MetricsTree_Market_Dca_PeriodAveragePrice, pub period_returns: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2, @@ -4412,6 +5002,7 @@ pub struct MetricsTree_Market_Dca { impl MetricsTree_Market_Dca { pub fn new(client: Arc, base_path: String) -> Self { Self { + dca_sats_per_day: MetricPattern20::new(client.clone(), "dca_sats_per_day".to_string()), period_stack: _10y1m1w1y2y3m3y4y5y6m6y8yPattern3::new(client.clone(), "dca_stack".to_string()), period_average_price: MetricsTree_Market_Dca_PeriodAveragePrice::new(client.clone(), format!("{base_path}_period_average_price")), period_returns: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2::new(client.clone(), "dca_returns".to_string()), @@ -4439,308 +5030,468 @@ impl MetricsTree_Market_Dca { /// Metrics tree node. pub struct MetricsTree_Market_Dca_PeriodAveragePrice { - pub _1w: DollarsSatsPattern2, - pub _1m: DollarsSatsPattern2, - pub _3m: DollarsSatsPattern2, - pub _6m: DollarsSatsPattern2, - pub _1y: DollarsSatsPattern2, - pub _2y: DollarsSatsPattern2, - pub _3y: DollarsSatsPattern2, - pub _4y: DollarsSatsPattern2, - pub _5y: DollarsSatsPattern2, - pub _6y: DollarsSatsPattern2, - pub _8y: DollarsSatsPattern2, - pub _10y: DollarsSatsPattern2, + pub _1w: SatsUsdPattern, + pub _1m: SatsUsdPattern, + pub _3m: SatsUsdPattern, + pub _6m: SatsUsdPattern, + pub _1y: SatsUsdPattern, + pub _2y: SatsUsdPattern, + pub _3y: SatsUsdPattern, + pub _4y: SatsUsdPattern, + pub _5y: SatsUsdPattern, + pub _6y: SatsUsdPattern, + pub _8y: SatsUsdPattern, + pub _10y: SatsUsdPattern, } impl MetricsTree_Market_Dca_PeriodAveragePrice { pub fn new(client: Arc, base_path: String) -> Self { Self { - _1w: DollarsSatsPattern2::new(client.clone(), "1w_dca_average_price".to_string()), - _1m: DollarsSatsPattern2::new(client.clone(), "1m_dca_average_price".to_string()), - _3m: DollarsSatsPattern2::new(client.clone(), "3m_dca_average_price".to_string()), - _6m: DollarsSatsPattern2::new(client.clone(), "6m_dca_average_price".to_string()), - _1y: DollarsSatsPattern2::new(client.clone(), "1y_dca_average_price".to_string()), - _2y: DollarsSatsPattern2::new(client.clone(), "2y_dca_average_price".to_string()), - _3y: DollarsSatsPattern2::new(client.clone(), "3y_dca_average_price".to_string()), - _4y: DollarsSatsPattern2::new(client.clone(), "4y_dca_average_price".to_string()), - _5y: DollarsSatsPattern2::new(client.clone(), "5y_dca_average_price".to_string()), - _6y: DollarsSatsPattern2::new(client.clone(), "6y_dca_average_price".to_string()), - _8y: DollarsSatsPattern2::new(client.clone(), "8y_dca_average_price".to_string()), - _10y: DollarsSatsPattern2::new(client.clone(), "10y_dca_average_price".to_string()), + _1w: SatsUsdPattern::new(client.clone(), "1w_dca_average_price".to_string()), + _1m: SatsUsdPattern::new(client.clone(), "1m_dca_average_price".to_string()), + _3m: SatsUsdPattern::new(client.clone(), "3m_dca_average_price".to_string()), + _6m: SatsUsdPattern::new(client.clone(), "6m_dca_average_price".to_string()), + _1y: SatsUsdPattern::new(client.clone(), "1y_dca_average_price".to_string()), + _2y: SatsUsdPattern::new(client.clone(), "2y_dca_average_price".to_string()), + _3y: SatsUsdPattern::new(client.clone(), "3y_dca_average_price".to_string()), + _4y: SatsUsdPattern::new(client.clone(), "4y_dca_average_price".to_string()), + _5y: SatsUsdPattern::new(client.clone(), "5y_dca_average_price".to_string()), + _6y: SatsUsdPattern::new(client.clone(), "6y_dca_average_price".to_string()), + _8y: SatsUsdPattern::new(client.clone(), "8y_dca_average_price".to_string()), + _10y: SatsUsdPattern::new(client.clone(), "10y_dca_average_price".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Market_Dca_ClassStack { - pub _2015: BitcoinDollarsSatsPattern5, - pub _2016: BitcoinDollarsSatsPattern5, - pub _2017: BitcoinDollarsSatsPattern5, - pub _2018: BitcoinDollarsSatsPattern5, - pub _2019: BitcoinDollarsSatsPattern5, - pub _2020: BitcoinDollarsSatsPattern5, - pub _2021: BitcoinDollarsSatsPattern5, - pub _2022: BitcoinDollarsSatsPattern5, - pub _2023: BitcoinDollarsSatsPattern5, - pub _2024: BitcoinDollarsSatsPattern5, - pub _2025: BitcoinDollarsSatsPattern5, - pub _2026: BitcoinDollarsSatsPattern5, + pub _2015: BtcSatsUsdPattern, + pub _2016: BtcSatsUsdPattern, + pub _2017: BtcSatsUsdPattern, + pub _2018: BtcSatsUsdPattern, + pub _2019: BtcSatsUsdPattern, + pub _2020: BtcSatsUsdPattern, + pub _2021: BtcSatsUsdPattern, + pub _2022: BtcSatsUsdPattern, + pub _2023: BtcSatsUsdPattern, + pub _2024: BtcSatsUsdPattern, + pub _2025: BtcSatsUsdPattern, + pub _2026: BtcSatsUsdPattern, } impl MetricsTree_Market_Dca_ClassStack { pub fn new(client: Arc, base_path: String) -> Self { Self { - _2015: BitcoinDollarsSatsPattern5::new(client.clone(), "dca_class_2015_stack".to_string()), - _2016: BitcoinDollarsSatsPattern5::new(client.clone(), "dca_class_2016_stack".to_string()), - _2017: BitcoinDollarsSatsPattern5::new(client.clone(), "dca_class_2017_stack".to_string()), - _2018: BitcoinDollarsSatsPattern5::new(client.clone(), "dca_class_2018_stack".to_string()), - _2019: BitcoinDollarsSatsPattern5::new(client.clone(), "dca_class_2019_stack".to_string()), - _2020: BitcoinDollarsSatsPattern5::new(client.clone(), "dca_class_2020_stack".to_string()), - _2021: BitcoinDollarsSatsPattern5::new(client.clone(), "dca_class_2021_stack".to_string()), - _2022: BitcoinDollarsSatsPattern5::new(client.clone(), "dca_class_2022_stack".to_string()), - _2023: BitcoinDollarsSatsPattern5::new(client.clone(), "dca_class_2023_stack".to_string()), - _2024: BitcoinDollarsSatsPattern5::new(client.clone(), "dca_class_2024_stack".to_string()), - _2025: BitcoinDollarsSatsPattern5::new(client.clone(), "dca_class_2025_stack".to_string()), - _2026: BitcoinDollarsSatsPattern5::new(client.clone(), "dca_class_2026_stack".to_string()), + _2015: BtcSatsUsdPattern::new(client.clone(), "dca_class_2015_stack".to_string()), + _2016: BtcSatsUsdPattern::new(client.clone(), "dca_class_2016_stack".to_string()), + _2017: BtcSatsUsdPattern::new(client.clone(), "dca_class_2017_stack".to_string()), + _2018: BtcSatsUsdPattern::new(client.clone(), "dca_class_2018_stack".to_string()), + _2019: BtcSatsUsdPattern::new(client.clone(), "dca_class_2019_stack".to_string()), + _2020: BtcSatsUsdPattern::new(client.clone(), "dca_class_2020_stack".to_string()), + _2021: BtcSatsUsdPattern::new(client.clone(), "dca_class_2021_stack".to_string()), + _2022: BtcSatsUsdPattern::new(client.clone(), "dca_class_2022_stack".to_string()), + _2023: BtcSatsUsdPattern::new(client.clone(), "dca_class_2023_stack".to_string()), + _2024: BtcSatsUsdPattern::new(client.clone(), "dca_class_2024_stack".to_string()), + _2025: BtcSatsUsdPattern::new(client.clone(), "dca_class_2025_stack".to_string()), + _2026: BtcSatsUsdPattern::new(client.clone(), "dca_class_2026_stack".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Market_Dca_ClassAveragePrice { - pub _2015: DollarsSatsPattern2, - pub _2016: DollarsSatsPattern2, - pub _2017: DollarsSatsPattern2, - pub _2018: DollarsSatsPattern2, - pub _2019: DollarsSatsPattern2, - pub _2020: DollarsSatsPattern2, - pub _2021: DollarsSatsPattern2, - pub _2022: DollarsSatsPattern2, - pub _2023: DollarsSatsPattern2, - pub _2024: DollarsSatsPattern2, - pub _2025: DollarsSatsPattern2, - pub _2026: DollarsSatsPattern2, + pub _2015: SatsUsdPattern, + pub _2016: SatsUsdPattern, + pub _2017: SatsUsdPattern, + pub _2018: SatsUsdPattern, + pub _2019: SatsUsdPattern, + pub _2020: SatsUsdPattern, + pub _2021: SatsUsdPattern, + pub _2022: SatsUsdPattern, + pub _2023: SatsUsdPattern, + pub _2024: SatsUsdPattern, + pub _2025: SatsUsdPattern, + pub _2026: SatsUsdPattern, } impl MetricsTree_Market_Dca_ClassAveragePrice { pub fn new(client: Arc, base_path: String) -> Self { Self { - _2015: DollarsSatsPattern2::new(client.clone(), "dca_class_2015_average_price".to_string()), - _2016: DollarsSatsPattern2::new(client.clone(), "dca_class_2016_average_price".to_string()), - _2017: DollarsSatsPattern2::new(client.clone(), "dca_class_2017_average_price".to_string()), - _2018: DollarsSatsPattern2::new(client.clone(), "dca_class_2018_average_price".to_string()), - _2019: DollarsSatsPattern2::new(client.clone(), "dca_class_2019_average_price".to_string()), - _2020: DollarsSatsPattern2::new(client.clone(), "dca_class_2020_average_price".to_string()), - _2021: DollarsSatsPattern2::new(client.clone(), "dca_class_2021_average_price".to_string()), - _2022: DollarsSatsPattern2::new(client.clone(), "dca_class_2022_average_price".to_string()), - _2023: DollarsSatsPattern2::new(client.clone(), "dca_class_2023_average_price".to_string()), - _2024: DollarsSatsPattern2::new(client.clone(), "dca_class_2024_average_price".to_string()), - _2025: DollarsSatsPattern2::new(client.clone(), "dca_class_2025_average_price".to_string()), - _2026: DollarsSatsPattern2::new(client.clone(), "dca_class_2026_average_price".to_string()), + _2015: SatsUsdPattern::new(client.clone(), "dca_class_2015_average_price".to_string()), + _2016: SatsUsdPattern::new(client.clone(), "dca_class_2016_average_price".to_string()), + _2017: SatsUsdPattern::new(client.clone(), "dca_class_2017_average_price".to_string()), + _2018: SatsUsdPattern::new(client.clone(), "dca_class_2018_average_price".to_string()), + _2019: SatsUsdPattern::new(client.clone(), "dca_class_2019_average_price".to_string()), + _2020: SatsUsdPattern::new(client.clone(), "dca_class_2020_average_price".to_string()), + _2021: SatsUsdPattern::new(client.clone(), "dca_class_2021_average_price".to_string()), + _2022: SatsUsdPattern::new(client.clone(), "dca_class_2022_average_price".to_string()), + _2023: SatsUsdPattern::new(client.clone(), "dca_class_2023_average_price".to_string()), + _2024: SatsUsdPattern::new(client.clone(), "dca_class_2024_average_price".to_string()), + _2025: SatsUsdPattern::new(client.clone(), "dca_class_2025_average_price".to_string()), + _2026: SatsUsdPattern::new(client.clone(), "dca_class_2026_average_price".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Market_Dca_ClassDaysInProfit { - pub _2015: MetricPattern4, - pub _2016: MetricPattern4, - pub _2017: MetricPattern4, - pub _2018: MetricPattern4, - pub _2019: MetricPattern4, - pub _2020: MetricPattern4, - pub _2021: MetricPattern4, - pub _2022: MetricPattern4, - pub _2023: MetricPattern4, - pub _2024: MetricPattern4, - pub _2025: MetricPattern4, - pub _2026: MetricPattern4, + pub _2015: MetricPattern1, + pub _2016: MetricPattern1, + pub _2017: MetricPattern1, + pub _2018: MetricPattern1, + pub _2019: MetricPattern1, + pub _2020: MetricPattern1, + pub _2021: MetricPattern1, + pub _2022: MetricPattern1, + pub _2023: MetricPattern1, + pub _2024: MetricPattern1, + pub _2025: MetricPattern1, + pub _2026: MetricPattern1, } impl MetricsTree_Market_Dca_ClassDaysInProfit { pub fn new(client: Arc, base_path: String) -> Self { Self { - _2015: MetricPattern4::new(client.clone(), "dca_class_2015_days_in_profit".to_string()), - _2016: MetricPattern4::new(client.clone(), "dca_class_2016_days_in_profit".to_string()), - _2017: MetricPattern4::new(client.clone(), "dca_class_2017_days_in_profit".to_string()), - _2018: MetricPattern4::new(client.clone(), "dca_class_2018_days_in_profit".to_string()), - _2019: MetricPattern4::new(client.clone(), "dca_class_2019_days_in_profit".to_string()), - _2020: MetricPattern4::new(client.clone(), "dca_class_2020_days_in_profit".to_string()), - _2021: MetricPattern4::new(client.clone(), "dca_class_2021_days_in_profit".to_string()), - _2022: MetricPattern4::new(client.clone(), "dca_class_2022_days_in_profit".to_string()), - _2023: MetricPattern4::new(client.clone(), "dca_class_2023_days_in_profit".to_string()), - _2024: MetricPattern4::new(client.clone(), "dca_class_2024_days_in_profit".to_string()), - _2025: MetricPattern4::new(client.clone(), "dca_class_2025_days_in_profit".to_string()), - _2026: MetricPattern4::new(client.clone(), "dca_class_2026_days_in_profit".to_string()), + _2015: MetricPattern1::new(client.clone(), "dca_class_2015_days_in_profit".to_string()), + _2016: MetricPattern1::new(client.clone(), "dca_class_2016_days_in_profit".to_string()), + _2017: MetricPattern1::new(client.clone(), "dca_class_2017_days_in_profit".to_string()), + _2018: MetricPattern1::new(client.clone(), "dca_class_2018_days_in_profit".to_string()), + _2019: MetricPattern1::new(client.clone(), "dca_class_2019_days_in_profit".to_string()), + _2020: MetricPattern1::new(client.clone(), "dca_class_2020_days_in_profit".to_string()), + _2021: MetricPattern1::new(client.clone(), "dca_class_2021_days_in_profit".to_string()), + _2022: MetricPattern1::new(client.clone(), "dca_class_2022_days_in_profit".to_string()), + _2023: MetricPattern1::new(client.clone(), "dca_class_2023_days_in_profit".to_string()), + _2024: MetricPattern1::new(client.clone(), "dca_class_2024_days_in_profit".to_string()), + _2025: MetricPattern1::new(client.clone(), "dca_class_2025_days_in_profit".to_string()), + _2026: MetricPattern1::new(client.clone(), "dca_class_2026_days_in_profit".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Market_Dca_ClassDaysInLoss { - pub _2015: MetricPattern4, - pub _2016: MetricPattern4, - pub _2017: MetricPattern4, - pub _2018: MetricPattern4, - pub _2019: MetricPattern4, - pub _2020: MetricPattern4, - pub _2021: MetricPattern4, - pub _2022: MetricPattern4, - pub _2023: MetricPattern4, - pub _2024: MetricPattern4, - pub _2025: MetricPattern4, - pub _2026: MetricPattern4, + pub _2015: MetricPattern1, + pub _2016: MetricPattern1, + pub _2017: MetricPattern1, + pub _2018: MetricPattern1, + pub _2019: MetricPattern1, + pub _2020: MetricPattern1, + pub _2021: MetricPattern1, + pub _2022: MetricPattern1, + pub _2023: MetricPattern1, + pub _2024: MetricPattern1, + pub _2025: MetricPattern1, + pub _2026: MetricPattern1, } impl MetricsTree_Market_Dca_ClassDaysInLoss { pub fn new(client: Arc, base_path: String) -> Self { Self { - _2015: MetricPattern4::new(client.clone(), "dca_class_2015_days_in_loss".to_string()), - _2016: MetricPattern4::new(client.clone(), "dca_class_2016_days_in_loss".to_string()), - _2017: MetricPattern4::new(client.clone(), "dca_class_2017_days_in_loss".to_string()), - _2018: MetricPattern4::new(client.clone(), "dca_class_2018_days_in_loss".to_string()), - _2019: MetricPattern4::new(client.clone(), "dca_class_2019_days_in_loss".to_string()), - _2020: MetricPattern4::new(client.clone(), "dca_class_2020_days_in_loss".to_string()), - _2021: MetricPattern4::new(client.clone(), "dca_class_2021_days_in_loss".to_string()), - _2022: MetricPattern4::new(client.clone(), "dca_class_2022_days_in_loss".to_string()), - _2023: MetricPattern4::new(client.clone(), "dca_class_2023_days_in_loss".to_string()), - _2024: MetricPattern4::new(client.clone(), "dca_class_2024_days_in_loss".to_string()), - _2025: MetricPattern4::new(client.clone(), "dca_class_2025_days_in_loss".to_string()), - _2026: MetricPattern4::new(client.clone(), "dca_class_2026_days_in_loss".to_string()), + _2015: MetricPattern1::new(client.clone(), "dca_class_2015_days_in_loss".to_string()), + _2016: MetricPattern1::new(client.clone(), "dca_class_2016_days_in_loss".to_string()), + _2017: MetricPattern1::new(client.clone(), "dca_class_2017_days_in_loss".to_string()), + _2018: MetricPattern1::new(client.clone(), "dca_class_2018_days_in_loss".to_string()), + _2019: MetricPattern1::new(client.clone(), "dca_class_2019_days_in_loss".to_string()), + _2020: MetricPattern1::new(client.clone(), "dca_class_2020_days_in_loss".to_string()), + _2021: MetricPattern1::new(client.clone(), "dca_class_2021_days_in_loss".to_string()), + _2022: MetricPattern1::new(client.clone(), "dca_class_2022_days_in_loss".to_string()), + _2023: MetricPattern1::new(client.clone(), "dca_class_2023_days_in_loss".to_string()), + _2024: MetricPattern1::new(client.clone(), "dca_class_2024_days_in_loss".to_string()), + _2025: MetricPattern1::new(client.clone(), "dca_class_2025_days_in_loss".to_string()), + _2026: MetricPattern1::new(client.clone(), "dca_class_2026_days_in_loss".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Market_Dca_ClassMinReturn { - pub _2015: MetricPattern4, - pub _2016: MetricPattern4, - pub _2017: MetricPattern4, - pub _2018: MetricPattern4, - pub _2019: MetricPattern4, - pub _2020: MetricPattern4, - pub _2021: MetricPattern4, - pub _2022: MetricPattern4, - pub _2023: MetricPattern4, - pub _2024: MetricPattern4, - pub _2025: MetricPattern4, - pub _2026: MetricPattern4, + pub _2015: MetricPattern1, + pub _2016: MetricPattern1, + pub _2017: MetricPattern1, + pub _2018: MetricPattern1, + pub _2019: MetricPattern1, + pub _2020: MetricPattern1, + pub _2021: MetricPattern1, + pub _2022: MetricPattern1, + pub _2023: MetricPattern1, + pub _2024: MetricPattern1, + pub _2025: MetricPattern1, + pub _2026: MetricPattern1, } impl MetricsTree_Market_Dca_ClassMinReturn { pub fn new(client: Arc, base_path: String) -> Self { Self { - _2015: MetricPattern4::new(client.clone(), "dca_class_2015_min_return".to_string()), - _2016: MetricPattern4::new(client.clone(), "dca_class_2016_min_return".to_string()), - _2017: MetricPattern4::new(client.clone(), "dca_class_2017_min_return".to_string()), - _2018: MetricPattern4::new(client.clone(), "dca_class_2018_min_return".to_string()), - _2019: MetricPattern4::new(client.clone(), "dca_class_2019_min_return".to_string()), - _2020: MetricPattern4::new(client.clone(), "dca_class_2020_min_return".to_string()), - _2021: MetricPattern4::new(client.clone(), "dca_class_2021_min_return".to_string()), - _2022: MetricPattern4::new(client.clone(), "dca_class_2022_min_return".to_string()), - _2023: MetricPattern4::new(client.clone(), "dca_class_2023_min_return".to_string()), - _2024: MetricPattern4::new(client.clone(), "dca_class_2024_min_return".to_string()), - _2025: MetricPattern4::new(client.clone(), "dca_class_2025_min_return".to_string()), - _2026: MetricPattern4::new(client.clone(), "dca_class_2026_min_return".to_string()), + _2015: MetricPattern1::new(client.clone(), "dca_class_2015_min_return".to_string()), + _2016: MetricPattern1::new(client.clone(), "dca_class_2016_min_return".to_string()), + _2017: MetricPattern1::new(client.clone(), "dca_class_2017_min_return".to_string()), + _2018: MetricPattern1::new(client.clone(), "dca_class_2018_min_return".to_string()), + _2019: MetricPattern1::new(client.clone(), "dca_class_2019_min_return".to_string()), + _2020: MetricPattern1::new(client.clone(), "dca_class_2020_min_return".to_string()), + _2021: MetricPattern1::new(client.clone(), "dca_class_2021_min_return".to_string()), + _2022: MetricPattern1::new(client.clone(), "dca_class_2022_min_return".to_string()), + _2023: MetricPattern1::new(client.clone(), "dca_class_2023_min_return".to_string()), + _2024: MetricPattern1::new(client.clone(), "dca_class_2024_min_return".to_string()), + _2025: MetricPattern1::new(client.clone(), "dca_class_2025_min_return".to_string()), + _2026: MetricPattern1::new(client.clone(), "dca_class_2026_min_return".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Market_Dca_ClassMaxReturn { - pub _2015: MetricPattern4, - pub _2016: MetricPattern4, - pub _2017: MetricPattern4, - pub _2018: MetricPattern4, - pub _2019: MetricPattern4, - pub _2020: MetricPattern4, - pub _2021: MetricPattern4, - pub _2022: MetricPattern4, - pub _2023: MetricPattern4, - pub _2024: MetricPattern4, - pub _2025: MetricPattern4, - pub _2026: MetricPattern4, + pub _2015: MetricPattern1, + pub _2016: MetricPattern1, + pub _2017: MetricPattern1, + pub _2018: MetricPattern1, + pub _2019: MetricPattern1, + pub _2020: MetricPattern1, + pub _2021: MetricPattern1, + pub _2022: MetricPattern1, + pub _2023: MetricPattern1, + pub _2024: MetricPattern1, + pub _2025: MetricPattern1, + pub _2026: MetricPattern1, } impl MetricsTree_Market_Dca_ClassMaxReturn { pub fn new(client: Arc, base_path: String) -> Self { Self { - _2015: MetricPattern4::new(client.clone(), "dca_class_2015_max_return".to_string()), - _2016: MetricPattern4::new(client.clone(), "dca_class_2016_max_return".to_string()), - _2017: MetricPattern4::new(client.clone(), "dca_class_2017_max_return".to_string()), - _2018: MetricPattern4::new(client.clone(), "dca_class_2018_max_return".to_string()), - _2019: MetricPattern4::new(client.clone(), "dca_class_2019_max_return".to_string()), - _2020: MetricPattern4::new(client.clone(), "dca_class_2020_max_return".to_string()), - _2021: MetricPattern4::new(client.clone(), "dca_class_2021_max_return".to_string()), - _2022: MetricPattern4::new(client.clone(), "dca_class_2022_max_return".to_string()), - _2023: MetricPattern4::new(client.clone(), "dca_class_2023_max_return".to_string()), - _2024: MetricPattern4::new(client.clone(), "dca_class_2024_max_return".to_string()), - _2025: MetricPattern4::new(client.clone(), "dca_class_2025_max_return".to_string()), - _2026: MetricPattern4::new(client.clone(), "dca_class_2026_max_return".to_string()), + _2015: MetricPattern1::new(client.clone(), "dca_class_2015_max_return".to_string()), + _2016: MetricPattern1::new(client.clone(), "dca_class_2016_max_return".to_string()), + _2017: MetricPattern1::new(client.clone(), "dca_class_2017_max_return".to_string()), + _2018: MetricPattern1::new(client.clone(), "dca_class_2018_max_return".to_string()), + _2019: MetricPattern1::new(client.clone(), "dca_class_2019_max_return".to_string()), + _2020: MetricPattern1::new(client.clone(), "dca_class_2020_max_return".to_string()), + _2021: MetricPattern1::new(client.clone(), "dca_class_2021_max_return".to_string()), + _2022: MetricPattern1::new(client.clone(), "dca_class_2022_max_return".to_string()), + _2023: MetricPattern1::new(client.clone(), "dca_class_2023_max_return".to_string()), + _2024: MetricPattern1::new(client.clone(), "dca_class_2024_max_return".to_string()), + _2025: MetricPattern1::new(client.clone(), "dca_class_2025_max_return".to_string()), + _2026: MetricPattern1::new(client.clone(), "dca_class_2026_max_return".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Market_Indicators { - pub puell_multiple: MetricPattern4, - pub nvt: MetricPattern4, - pub rsi_gains: MetricPattern6, - pub rsi_losses: MetricPattern6, - pub rsi_average_gain_14d: MetricPattern6, - pub rsi_average_loss_14d: MetricPattern6, - pub rsi_14d: MetricPattern6, - pub rsi_14d_min: MetricPattern6, - pub rsi_14d_max: MetricPattern6, - pub stoch_rsi: MetricPattern6, - pub stoch_rsi_k: MetricPattern6, - pub stoch_rsi_d: MetricPattern6, - pub stoch_k: MetricPattern6, - pub stoch_d: MetricPattern6, - pub pi_cycle: MetricPattern6, - pub macd_line: MetricPattern6, - pub macd_signal: MetricPattern6, - pub macd_histogram: MetricPattern6, - pub gini: MetricPattern6, + pub puell_multiple: MetricPattern1, + pub nvt: MetricPattern1, + pub rsi: MetricsTree_Market_Indicators_Rsi, + pub stoch_k: MetricPattern1, + pub stoch_d: MetricPattern1, + pub pi_cycle: MetricPattern1, + pub macd: MetricsTree_Market_Indicators_Macd, + pub gini: MetricPattern1, } impl MetricsTree_Market_Indicators { pub fn new(client: Arc, base_path: String) -> Self { Self { - puell_multiple: MetricPattern4::new(client.clone(), "puell_multiple".to_string()), - nvt: MetricPattern4::new(client.clone(), "nvt".to_string()), - rsi_gains: MetricPattern6::new(client.clone(), "rsi_gains".to_string()), - rsi_losses: MetricPattern6::new(client.clone(), "rsi_losses".to_string()), - rsi_average_gain_14d: MetricPattern6::new(client.clone(), "rsi_average_gain_14d".to_string()), - rsi_average_loss_14d: MetricPattern6::new(client.clone(), "rsi_average_loss_14d".to_string()), - rsi_14d: MetricPattern6::new(client.clone(), "rsi_14d".to_string()), - rsi_14d_min: MetricPattern6::new(client.clone(), "rsi_14d_min".to_string()), - rsi_14d_max: MetricPattern6::new(client.clone(), "rsi_14d_max".to_string()), - stoch_rsi: MetricPattern6::new(client.clone(), "stoch_rsi".to_string()), - stoch_rsi_k: MetricPattern6::new(client.clone(), "stoch_rsi_k".to_string()), - stoch_rsi_d: MetricPattern6::new(client.clone(), "stoch_rsi_d".to_string()), - stoch_k: MetricPattern6::new(client.clone(), "stoch_k".to_string()), - stoch_d: MetricPattern6::new(client.clone(), "stoch_d".to_string()), - pi_cycle: MetricPattern6::new(client.clone(), "pi_cycle".to_string()), - macd_line: MetricPattern6::new(client.clone(), "macd_line".to_string()), - macd_signal: MetricPattern6::new(client.clone(), "macd_signal".to_string()), - macd_histogram: MetricPattern6::new(client.clone(), "macd_histogram".to_string()), - gini: MetricPattern6::new(client.clone(), "gini".to_string()), + puell_multiple: MetricPattern1::new(client.clone(), "puell_multiple".to_string()), + nvt: MetricPattern1::new(client.clone(), "nvt".to_string()), + rsi: MetricsTree_Market_Indicators_Rsi::new(client.clone(), format!("{base_path}_rsi")), + stoch_k: MetricPattern1::new(client.clone(), "stoch_k".to_string()), + stoch_d: MetricPattern1::new(client.clone(), "stoch_d".to_string()), + pi_cycle: MetricPattern1::new(client.clone(), "pi_cycle".to_string()), + macd: MetricsTree_Market_Indicators_Macd::new(client.clone(), format!("{base_path}_macd")), + gini: MetricPattern1::new(client.clone(), "gini".to_string()), + } + } +} + +/// Metrics tree node. +pub struct MetricsTree_Market_Indicators_Rsi { + pub _1d: MetricsTree_Market_Indicators_Rsi_1d, + pub _1w: MetricsTree_Market_Indicators_Rsi_1w, + pub _1m: MetricsTree_Market_Indicators_Rsi_1m, + pub _1y: AverageGainsLossesRsiStochPattern, +} + +impl MetricsTree_Market_Indicators_Rsi { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + _1d: MetricsTree_Market_Indicators_Rsi_1d::new(client.clone(), format!("{base_path}_1d")), + _1w: MetricsTree_Market_Indicators_Rsi_1w::new(client.clone(), format!("{base_path}_1w")), + _1m: MetricsTree_Market_Indicators_Rsi_1m::new(client.clone(), format!("{base_path}_1m")), + _1y: AverageGainsLossesRsiStochPattern::new(client.clone(), "rsi".to_string()), + } + } +} + +/// Metrics tree node. +pub struct MetricsTree_Market_Indicators_Rsi_1d { + pub gains: MetricPattern1, + pub losses: MetricPattern1, + pub average_gain: MetricPattern1, + pub average_loss: MetricPattern1, + pub rsi: MetricPattern20, + pub rsi_min: MetricPattern1, + pub rsi_max: MetricPattern1, + pub stoch_rsi: MetricPattern1, + pub stoch_rsi_k: MetricPattern1, + pub stoch_rsi_d: MetricPattern1, +} + +impl MetricsTree_Market_Indicators_Rsi_1d { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + gains: MetricPattern1::new(client.clone(), "rsi_gains_1d".to_string()), + losses: MetricPattern1::new(client.clone(), "rsi_losses_1d".to_string()), + average_gain: MetricPattern1::new(client.clone(), "rsi_avg_gain_1d".to_string()), + average_loss: MetricPattern1::new(client.clone(), "rsi_avg_loss_1d".to_string()), + rsi: MetricPattern20::new(client.clone(), "rsi_1d".to_string()), + rsi_min: MetricPattern1::new(client.clone(), "rsi_rsi_min_1d".to_string()), + rsi_max: MetricPattern1::new(client.clone(), "rsi_rsi_max_1d".to_string()), + stoch_rsi: MetricPattern1::new(client.clone(), "rsi_stoch_rsi_1d".to_string()), + stoch_rsi_k: MetricPattern1::new(client.clone(), "rsi_stoch_rsi_k_1d".to_string()), + stoch_rsi_d: MetricPattern1::new(client.clone(), "rsi_stoch_rsi_d_1d".to_string()), + } + } +} + +/// Metrics tree node. +pub struct MetricsTree_Market_Indicators_Rsi_1w { + pub gains: MetricPattern1, + pub losses: MetricPattern1, + pub average_gain: MetricPattern1, + pub average_loss: MetricPattern1, + pub rsi: MetricPattern20, + pub rsi_min: MetricPattern1, + pub rsi_max: MetricPattern1, + pub stoch_rsi: MetricPattern1, + pub stoch_rsi_k: MetricPattern1, + pub stoch_rsi_d: MetricPattern1, +} + +impl MetricsTree_Market_Indicators_Rsi_1w { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + gains: MetricPattern1::new(client.clone(), "rsi_gains_1w".to_string()), + losses: MetricPattern1::new(client.clone(), "rsi_losses_1w".to_string()), + average_gain: MetricPattern1::new(client.clone(), "rsi_avg_gain_1w".to_string()), + average_loss: MetricPattern1::new(client.clone(), "rsi_avg_loss_1w".to_string()), + rsi: MetricPattern20::new(client.clone(), "rsi_1w".to_string()), + rsi_min: MetricPattern1::new(client.clone(), "rsi_rsi_min_1w".to_string()), + rsi_max: MetricPattern1::new(client.clone(), "rsi_rsi_max_1w".to_string()), + stoch_rsi: MetricPattern1::new(client.clone(), "rsi_stoch_rsi_1w".to_string()), + stoch_rsi_k: MetricPattern1::new(client.clone(), "rsi_stoch_rsi_k_1w".to_string()), + stoch_rsi_d: MetricPattern1::new(client.clone(), "rsi_stoch_rsi_d_1w".to_string()), + } + } +} + +/// Metrics tree node. +pub struct MetricsTree_Market_Indicators_Rsi_1m { + pub gains: MetricPattern1, + pub losses: MetricPattern1, + pub average_gain: MetricPattern1, + pub average_loss: MetricPattern1, + pub rsi: MetricPattern20, + pub rsi_min: MetricPattern1, + pub rsi_max: MetricPattern1, + pub stoch_rsi: MetricPattern1, + pub stoch_rsi_k: MetricPattern1, + pub stoch_rsi_d: MetricPattern1, +} + +impl MetricsTree_Market_Indicators_Rsi_1m { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + gains: MetricPattern1::new(client.clone(), "rsi_gains_1m".to_string()), + losses: MetricPattern1::new(client.clone(), "rsi_losses_1m".to_string()), + average_gain: MetricPattern1::new(client.clone(), "rsi_avg_gain_1m".to_string()), + average_loss: MetricPattern1::new(client.clone(), "rsi_avg_loss_1m".to_string()), + rsi: MetricPattern20::new(client.clone(), "rsi_1m".to_string()), + rsi_min: MetricPattern1::new(client.clone(), "rsi_rsi_min_1m".to_string()), + rsi_max: MetricPattern1::new(client.clone(), "rsi_rsi_max_1m".to_string()), + stoch_rsi: MetricPattern1::new(client.clone(), "rsi_stoch_rsi_1m".to_string()), + stoch_rsi_k: MetricPattern1::new(client.clone(), "rsi_stoch_rsi_k_1m".to_string()), + stoch_rsi_d: MetricPattern1::new(client.clone(), "rsi_stoch_rsi_d_1m".to_string()), + } + } +} + +/// Metrics tree node. +pub struct MetricsTree_Market_Indicators_Macd { + pub _1d: MetricsTree_Market_Indicators_Macd_1d, + pub _1w: MetricsTree_Market_Indicators_Macd_1w, + pub _1m: MetricsTree_Market_Indicators_Macd_1m, + pub _1y: HistogramLineSignalPattern, +} + +impl MetricsTree_Market_Indicators_Macd { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + _1d: MetricsTree_Market_Indicators_Macd_1d::new(client.clone(), format!("{base_path}_1d")), + _1w: MetricsTree_Market_Indicators_Macd_1w::new(client.clone(), format!("{base_path}_1w")), + _1m: MetricsTree_Market_Indicators_Macd_1m::new(client.clone(), format!("{base_path}_1m")), + _1y: HistogramLineSignalPattern::new(client.clone(), "macd".to_string()), + } + } +} + +/// Metrics tree node. +pub struct MetricsTree_Market_Indicators_Macd_1d { + pub line: MetricPattern1, + pub signal: MetricPattern1, + pub histogram: MetricPattern20, +} + +impl MetricsTree_Market_Indicators_Macd_1d { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + line: MetricPattern1::new(client.clone(), "macd_line_1d".to_string()), + signal: MetricPattern1::new(client.clone(), "macd_signal_1d".to_string()), + histogram: MetricPattern20::new(client.clone(), "macd_histogram_1d".to_string()), + } + } +} + +/// Metrics tree node. +pub struct MetricsTree_Market_Indicators_Macd_1w { + pub line: MetricPattern1, + pub signal: MetricPattern1, + pub histogram: MetricPattern20, +} + +impl MetricsTree_Market_Indicators_Macd_1w { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + line: MetricPattern1::new(client.clone(), "macd_line_1w".to_string()), + signal: MetricPattern1::new(client.clone(), "macd_signal_1w".to_string()), + histogram: MetricPattern20::new(client.clone(), "macd_histogram_1w".to_string()), + } + } +} + +/// Metrics tree node. +pub struct MetricsTree_Market_Indicators_Macd_1m { + pub line: MetricPattern1, + pub signal: MetricPattern1, + pub histogram: MetricPattern20, +} + +impl MetricsTree_Market_Indicators_Macd_1m { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + line: MetricPattern1::new(client.clone(), "macd_line_1m".to_string()), + signal: MetricPattern1::new(client.clone(), "macd_signal_1m".to_string()), + histogram: MetricPattern20::new(client.clone(), "macd_histogram_1m".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Pools { - pub height_to_pool: MetricPattern11, + pub height_to_pool: MetricPattern20, pub vecs: MetricsTree_Pools_Vecs, } impl MetricsTree_Pools { pub fn new(client: Arc, base_path: String) -> Self { Self { - height_to_pool: MetricPattern11::new(client.clone(), "pool".to_string()), + height_to_pool: MetricPattern20::new(client.clone(), "pool".to_string()), vecs: MetricsTree_Pools_Vecs::new(client.clone(), format!("{base_path}_vecs")), } } @@ -4748,423 +5499,410 @@ impl MetricsTree_Pools { /// Metrics tree node. pub struct MetricsTree_Pools_Vecs { - pub unknown: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub blockfills: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub ultimuspool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub terrapool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub luxor: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub onethash: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub btccom: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub bitfarms: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub huobipool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub wayicn: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub canoepool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub btctop: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub bitcoincom: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub pool175btc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub gbminers: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub axbt: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub asicminer: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub bitminter: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub bitcoinrussia: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub btcserv: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub simplecoinus: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub btcguild: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub eligius: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub ozcoin: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub eclipsemc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub maxbtc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub triplemining: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub coinlab: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub pool50btc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub ghashio: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub stminingcorp: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub bitparking: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub mmpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub polmine: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub kncminer: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub bitalo: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub f2pool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub hhtt: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub megabigpower: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub mtred: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub nmcbit: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub yourbtcnet: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub givemecoins: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub braiinspool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub antpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub multicoinco: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub bcpoolio: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub cointerra: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub kanopool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub solock: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub ckpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub nicehash: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub bitclub: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub bitcoinaffiliatenetwork: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub btcc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub bwpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub exxbw: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub bitsolo: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub bitfury: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub twentyoneinc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub digitalbtc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub eightbaochi: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub mybtccoinpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub tbdice: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub hashpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub nexious: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub bravomining: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub hotpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub okexpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub bcmonster: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub onehash: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub bixin: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub tatmaspool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub viabtc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub connectbtc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub batpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub waterhole: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub dcexploration: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub dcex: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub btpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub fiftyeightcoin: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub bitcoinindia: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub shawnp0wers: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub phashio: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub rigpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub haozhuzhu: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub sevenpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub miningkings: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub hashbx: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub dpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub rawpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub haominer: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub helix: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub bitcoinukraine: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub poolin: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub secretsuperstar: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub tigerpoolnet: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub sigmapoolcom: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub okpooltop: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub hummerpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub tangpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub bytepool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub spiderpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub novablock: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub miningcity: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub binancepool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub minerium: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub lubiancom: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub okkong: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub aaopool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub emcdpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub foundryusa: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub sbicrypto: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub arkpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub purebtccom: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub marapool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub kucoinpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub entrustcharitypool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub okminer: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub titan: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub pegapool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub btcnuggets: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub cloudhashing: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub digitalxmintsy: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub telco214: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub btcpoolparty: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub multipool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub transactioncoinmining: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub btcdig: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub trickysbtcpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub btcmp: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub eobot: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub unomp: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub patels: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub gogreenlight: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub ekanembtc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub canoe: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub tiger: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub onem1x: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub zulupool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub secpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub ocean: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub whitepool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub wk057: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub futurebitapollosolo: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub carbonnegative: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub portlandhodl: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub phoenix: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub neopool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub maxipool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub bitfufupool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub luckypool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub miningdutch: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub publicpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub miningsquared: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub innopolistech: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub btclab: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, - pub parasite: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub unknown: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub blockfills: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub ultimuspool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub terrapool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub luxor: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub onethash: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub btccom: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bitfarms: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub huobipool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub wayicn: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub canoepool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub btctop: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bitcoincom: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub pool175btc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub gbminers: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub axbt: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub asicminer: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bitminter: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bitcoinrussia: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub btcserv: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub simplecoinus: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub btcguild: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub eligius: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub ozcoin: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub eclipsemc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub maxbtc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub triplemining: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub coinlab: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub pool50btc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub ghashio: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub stminingcorp: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bitparking: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub mmpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub polmine: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub kncminer: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bitalo: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub f2pool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub hhtt: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub megabigpower: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub mtred: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub nmcbit: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub yourbtcnet: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub givemecoins: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub braiinspool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub antpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub multicoinco: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bcpoolio: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub cointerra: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub kanopool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub solock: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub ckpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub nicehash: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bitclub: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bitcoinaffiliatenetwork: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub btcc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bwpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub exxbw: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bitsolo: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bitfury: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub twentyoneinc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub digitalbtc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub eightbaochi: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub mybtccoinpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub tbdice: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub hashpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub nexious: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bravomining: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub hotpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub okexpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bcmonster: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub onehash: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bixin: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub tatmaspool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub viabtc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub connectbtc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub batpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub waterhole: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub dcexploration: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub dcex: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub btpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub fiftyeightcoin: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bitcoinindia: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub shawnp0wers: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub phashio: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub rigpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub haozhuzhu: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub sevenpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub miningkings: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub hashbx: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub dpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub rawpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub haominer: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub helix: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bitcoinukraine: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub poolin: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub secretsuperstar: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub tigerpoolnet: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub sigmapoolcom: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub okpooltop: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub hummerpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub tangpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bytepool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub spiderpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub novablock: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub miningcity: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub binancepool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub minerium: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub lubiancom: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub okkong: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub aaopool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub emcdpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub foundryusa: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub sbicrypto: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub arkpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub purebtccom: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub marapool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub kucoinpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub entrustcharitypool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub okminer: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub titan: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub pegapool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub btcnuggets: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub cloudhashing: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub digitalxmintsy: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub telco214: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub btcpoolparty: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub multipool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub transactioncoinmining: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub btcdig: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub trickysbtcpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub btcmp: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub eobot: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub unomp: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub patels: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub gogreenlight: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bitcoinindiapool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub ekanembtc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub canoe: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub tiger: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub onem1x: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub zulupool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub secpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub ocean: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub whitepool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub wiz: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub wk057: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub futurebitapollosolo: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub carbonnegative: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub portlandhodl: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub phoenix: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub neopool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub maxipool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub bitfufupool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub gdpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub miningdutch: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub publicpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub miningsquared: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub innopolistech: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub btclab: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub parasite: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub redrockpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, + pub est3lar: BlocksCoinbaseDaysDominanceFeeSubsidyPattern, } impl MetricsTree_Pools_Vecs { pub fn new(client: Arc, base_path: String) -> Self { Self { - unknown: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "unknown".to_string()), - blockfills: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "blockfills".to_string()), - ultimuspool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "ultimuspool".to_string()), - terrapool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "terrapool".to_string()), - luxor: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "luxor".to_string()), - onethash: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "onethash".to_string()), - btccom: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btccom".to_string()), - bitfarms: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitfarms".to_string()), - huobipool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "huobipool".to_string()), - wayicn: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "wayicn".to_string()), - canoepool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "canoepool".to_string()), - btctop: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btctop".to_string()), - bitcoincom: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitcoincom".to_string()), - pool175btc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "pool175btc".to_string()), - gbminers: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "gbminers".to_string()), - axbt: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "axbt".to_string()), - asicminer: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "asicminer".to_string()), - bitminter: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitminter".to_string()), - bitcoinrussia: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitcoinrussia".to_string()), - btcserv: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btcserv".to_string()), - simplecoinus: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "simplecoinus".to_string()), - btcguild: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btcguild".to_string()), - eligius: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "eligius".to_string()), - ozcoin: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "ozcoin".to_string()), - eclipsemc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "eclipsemc".to_string()), - maxbtc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "maxbtc".to_string()), - triplemining: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "triplemining".to_string()), - coinlab: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "coinlab".to_string()), - pool50btc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "pool50btc".to_string()), - ghashio: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "ghashio".to_string()), - stminingcorp: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "stminingcorp".to_string()), - bitparking: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitparking".to_string()), - mmpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "mmpool".to_string()), - polmine: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "polmine".to_string()), - kncminer: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "kncminer".to_string()), - bitalo: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitalo".to_string()), - f2pool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "f2pool".to_string()), - hhtt: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "hhtt".to_string()), - megabigpower: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "megabigpower".to_string()), - mtred: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "mtred".to_string()), - nmcbit: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "nmcbit".to_string()), - yourbtcnet: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "yourbtcnet".to_string()), - givemecoins: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "givemecoins".to_string()), - braiinspool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "braiinspool".to_string()), - antpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "antpool".to_string()), - multicoinco: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "multicoinco".to_string()), - bcpoolio: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bcpoolio".to_string()), - cointerra: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "cointerra".to_string()), - kanopool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "kanopool".to_string()), - solock: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "solock".to_string()), - ckpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "ckpool".to_string()), - nicehash: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "nicehash".to_string()), - bitclub: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitclub".to_string()), - bitcoinaffiliatenetwork: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitcoinaffiliatenetwork".to_string()), - btcc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btcc".to_string()), - bwpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bwpool".to_string()), - exxbw: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "exxbw".to_string()), - bitsolo: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitsolo".to_string()), - bitfury: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitfury".to_string()), - twentyoneinc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "twentyoneinc".to_string()), - digitalbtc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "digitalbtc".to_string()), - eightbaochi: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "eightbaochi".to_string()), - mybtccoinpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "mybtccoinpool".to_string()), - tbdice: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "tbdice".to_string()), - hashpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "hashpool".to_string()), - nexious: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "nexious".to_string()), - bravomining: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bravomining".to_string()), - hotpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "hotpool".to_string()), - okexpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "okexpool".to_string()), - bcmonster: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bcmonster".to_string()), - onehash: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "onehash".to_string()), - bixin: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bixin".to_string()), - tatmaspool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "tatmaspool".to_string()), - viabtc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "viabtc".to_string()), - connectbtc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "connectbtc".to_string()), - batpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "batpool".to_string()), - waterhole: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "waterhole".to_string()), - dcexploration: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "dcexploration".to_string()), - dcex: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "dcex".to_string()), - btpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btpool".to_string()), - fiftyeightcoin: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "fiftyeightcoin".to_string()), - bitcoinindia: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitcoinindia".to_string()), - shawnp0wers: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "shawnp0wers".to_string()), - phashio: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "phashio".to_string()), - rigpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "rigpool".to_string()), - haozhuzhu: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "haozhuzhu".to_string()), - sevenpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "sevenpool".to_string()), - miningkings: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "miningkings".to_string()), - hashbx: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "hashbx".to_string()), - dpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "dpool".to_string()), - rawpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "rawpool".to_string()), - haominer: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "haominer".to_string()), - helix: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "helix".to_string()), - bitcoinukraine: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitcoinukraine".to_string()), - poolin: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "poolin".to_string()), - secretsuperstar: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "secretsuperstar".to_string()), - tigerpoolnet: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "tigerpoolnet".to_string()), - sigmapoolcom: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "sigmapoolcom".to_string()), - okpooltop: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "okpooltop".to_string()), - hummerpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "hummerpool".to_string()), - tangpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "tangpool".to_string()), - bytepool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bytepool".to_string()), - spiderpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "spiderpool".to_string()), - novablock: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "novablock".to_string()), - miningcity: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "miningcity".to_string()), - binancepool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "binancepool".to_string()), - minerium: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "minerium".to_string()), - lubiancom: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "lubiancom".to_string()), - okkong: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "okkong".to_string()), - aaopool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "aaopool".to_string()), - emcdpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "emcdpool".to_string()), - foundryusa: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "foundryusa".to_string()), - sbicrypto: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "sbicrypto".to_string()), - arkpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "arkpool".to_string()), - purebtccom: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "purebtccom".to_string()), - marapool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "marapool".to_string()), - kucoinpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "kucoinpool".to_string()), - entrustcharitypool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "entrustcharitypool".to_string()), - okminer: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "okminer".to_string()), - titan: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "titan".to_string()), - pegapool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "pegapool".to_string()), - btcnuggets: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btcnuggets".to_string()), - cloudhashing: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "cloudhashing".to_string()), - digitalxmintsy: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "digitalxmintsy".to_string()), - telco214: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "telco214".to_string()), - btcpoolparty: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btcpoolparty".to_string()), - multipool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "multipool".to_string()), - transactioncoinmining: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "transactioncoinmining".to_string()), - btcdig: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btcdig".to_string()), - trickysbtcpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "trickysbtcpool".to_string()), - btcmp: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btcmp".to_string()), - eobot: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "eobot".to_string()), - unomp: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "unomp".to_string()), - patels: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "patels".to_string()), - gogreenlight: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "gogreenlight".to_string()), - ekanembtc: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "ekanembtc".to_string()), - canoe: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "canoe".to_string()), - tiger: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "tiger".to_string()), - onem1x: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "onem1x".to_string()), - zulupool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "zulupool".to_string()), - secpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "secpool".to_string()), - ocean: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "ocean".to_string()), - whitepool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "whitepool".to_string()), - wk057: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "wk057".to_string()), - futurebitapollosolo: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "futurebitapollosolo".to_string()), - carbonnegative: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "carbonnegative".to_string()), - portlandhodl: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "portlandhodl".to_string()), - phoenix: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "phoenix".to_string()), - neopool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "neopool".to_string()), - maxipool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "maxipool".to_string()), - bitfufupool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitfufupool".to_string()), - luckypool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "luckypool".to_string()), - miningdutch: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "miningdutch".to_string()), - publicpool: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "publicpool".to_string()), - miningsquared: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "miningsquared".to_string()), - innopolistech: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "innopolistech".to_string()), - btclab: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btclab".to_string()), - parasite: _1m1w1y24hBlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "parasite".to_string()), + unknown: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "unknown".to_string()), + blockfills: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "blockfills".to_string()), + ultimuspool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "ultimuspool".to_string()), + terrapool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "terrapool".to_string()), + luxor: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "luxor".to_string()), + onethash: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "onethash".to_string()), + btccom: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btccom".to_string()), + bitfarms: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitfarms".to_string()), + huobipool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "huobipool".to_string()), + wayicn: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "wayicn".to_string()), + canoepool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "canoepool".to_string()), + btctop: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btctop".to_string()), + bitcoincom: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitcoincom".to_string()), + pool175btc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "pool175btc".to_string()), + gbminers: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "gbminers".to_string()), + axbt: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "axbt".to_string()), + asicminer: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "asicminer".to_string()), + bitminter: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitminter".to_string()), + bitcoinrussia: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitcoinrussia".to_string()), + btcserv: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btcserv".to_string()), + simplecoinus: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "simplecoinus".to_string()), + btcguild: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btcguild".to_string()), + eligius: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "eligius".to_string()), + ozcoin: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "ozcoin".to_string()), + eclipsemc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "eclipsemc".to_string()), + maxbtc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "maxbtc".to_string()), + triplemining: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "triplemining".to_string()), + coinlab: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "coinlab".to_string()), + pool50btc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "pool50btc".to_string()), + ghashio: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "ghashio".to_string()), + stminingcorp: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "stminingcorp".to_string()), + bitparking: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitparking".to_string()), + mmpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "mmpool".to_string()), + polmine: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "polmine".to_string()), + kncminer: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "kncminer".to_string()), + bitalo: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitalo".to_string()), + f2pool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "f2pool".to_string()), + hhtt: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "hhtt".to_string()), + megabigpower: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "megabigpower".to_string()), + mtred: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "mtred".to_string()), + nmcbit: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "nmcbit".to_string()), + yourbtcnet: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "yourbtcnet".to_string()), + givemecoins: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "givemecoins".to_string()), + braiinspool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "braiinspool".to_string()), + antpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "antpool".to_string()), + multicoinco: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "multicoinco".to_string()), + bcpoolio: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bcpoolio".to_string()), + cointerra: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "cointerra".to_string()), + kanopool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "kanopool".to_string()), + solock: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "solock".to_string()), + ckpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "ckpool".to_string()), + nicehash: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "nicehash".to_string()), + bitclub: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitclub".to_string()), + bitcoinaffiliatenetwork: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitcoinaffiliatenetwork".to_string()), + btcc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btcc".to_string()), + bwpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bwpool".to_string()), + exxbw: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "exxbw".to_string()), + bitsolo: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitsolo".to_string()), + bitfury: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitfury".to_string()), + twentyoneinc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "twentyoneinc".to_string()), + digitalbtc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "digitalbtc".to_string()), + eightbaochi: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "eightbaochi".to_string()), + mybtccoinpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "mybtccoinpool".to_string()), + tbdice: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "tbdice".to_string()), + hashpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "hashpool".to_string()), + nexious: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "nexious".to_string()), + bravomining: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bravomining".to_string()), + hotpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "hotpool".to_string()), + okexpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "okexpool".to_string()), + bcmonster: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bcmonster".to_string()), + onehash: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "onehash".to_string()), + bixin: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bixin".to_string()), + tatmaspool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "tatmaspool".to_string()), + viabtc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "viabtc".to_string()), + connectbtc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "connectbtc".to_string()), + batpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "batpool".to_string()), + waterhole: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "waterhole".to_string()), + dcexploration: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "dcexploration".to_string()), + dcex: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "dcex".to_string()), + btpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btpool".to_string()), + fiftyeightcoin: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "fiftyeightcoin".to_string()), + bitcoinindia: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitcoinindia".to_string()), + shawnp0wers: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "shawnp0wers".to_string()), + phashio: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "phashio".to_string()), + rigpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "rigpool".to_string()), + haozhuzhu: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "haozhuzhu".to_string()), + sevenpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "sevenpool".to_string()), + miningkings: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "miningkings".to_string()), + hashbx: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "hashbx".to_string()), + dpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "dpool".to_string()), + rawpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "rawpool".to_string()), + haominer: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "haominer".to_string()), + helix: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "helix".to_string()), + bitcoinukraine: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitcoinukraine".to_string()), + poolin: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "poolin".to_string()), + secretsuperstar: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "secretsuperstar".to_string()), + tigerpoolnet: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "tigerpoolnet".to_string()), + sigmapoolcom: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "sigmapoolcom".to_string()), + okpooltop: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "okpooltop".to_string()), + hummerpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "hummerpool".to_string()), + tangpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "tangpool".to_string()), + bytepool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bytepool".to_string()), + spiderpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "spiderpool".to_string()), + novablock: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "novablock".to_string()), + miningcity: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "miningcity".to_string()), + binancepool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "binancepool".to_string()), + minerium: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "minerium".to_string()), + lubiancom: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "lubiancom".to_string()), + okkong: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "okkong".to_string()), + aaopool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "aaopool".to_string()), + emcdpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "emcdpool".to_string()), + foundryusa: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "foundryusa".to_string()), + sbicrypto: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "sbicrypto".to_string()), + arkpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "arkpool".to_string()), + purebtccom: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "purebtccom".to_string()), + marapool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "marapool".to_string()), + kucoinpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "kucoinpool".to_string()), + entrustcharitypool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "entrustcharitypool".to_string()), + okminer: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "okminer".to_string()), + titan: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "titan".to_string()), + pegapool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "pegapool".to_string()), + btcnuggets: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btcnuggets".to_string()), + cloudhashing: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "cloudhashing".to_string()), + digitalxmintsy: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "digitalxmintsy".to_string()), + telco214: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "telco214".to_string()), + btcpoolparty: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btcpoolparty".to_string()), + multipool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "multipool".to_string()), + transactioncoinmining: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "transactioncoinmining".to_string()), + btcdig: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btcdig".to_string()), + trickysbtcpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "trickysbtcpool".to_string()), + btcmp: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btcmp".to_string()), + eobot: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "eobot".to_string()), + unomp: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "unomp".to_string()), + patels: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "patels".to_string()), + gogreenlight: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "gogreenlight".to_string()), + bitcoinindiapool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitcoinindiapool".to_string()), + ekanembtc: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "ekanembtc".to_string()), + canoe: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "canoe".to_string()), + tiger: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "tiger".to_string()), + onem1x: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "onem1x".to_string()), + zulupool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "zulupool".to_string()), + secpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "secpool".to_string()), + ocean: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "ocean".to_string()), + whitepool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "whitepool".to_string()), + wiz: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "wiz".to_string()), + wk057: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "wk057".to_string()), + futurebitapollosolo: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "futurebitapollosolo".to_string()), + carbonnegative: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "carbonnegative".to_string()), + portlandhodl: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "portlandhodl".to_string()), + phoenix: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "phoenix".to_string()), + neopool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "neopool".to_string()), + maxipool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "maxipool".to_string()), + bitfufupool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "bitfufupool".to_string()), + gdpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "gdpool".to_string()), + miningdutch: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "miningdutch".to_string()), + publicpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "publicpool".to_string()), + miningsquared: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "miningsquared".to_string()), + innopolistech: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "innopolistech".to_string()), + btclab: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "btclab".to_string()), + parasite: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "parasite".to_string()), + redrockpool: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "redrockpool".to_string()), + est3lar: BlocksCoinbaseDaysDominanceFeeSubsidyPattern::new(client.clone(), "est3lar".to_string()), } } } /// Metrics tree node. -pub struct MetricsTree_Price { - pub cents: MetricsTree_Price_Cents, - pub usd: MetricsTree_Price_Usd, - pub sats: OhlcSplitPattern2, - pub oracle: MetricsTree_Price_Oracle, +pub struct MetricsTree_Prices { + pub cents: MetricsTree_Prices_Cents, + pub usd: MetricsTree_Prices_Usd, + pub sats: MetricsTree_Prices_Sats, } -impl MetricsTree_Price { +impl MetricsTree_Prices { pub fn new(client: Arc, base_path: String) -> Self { Self { - cents: MetricsTree_Price_Cents::new(client.clone(), format!("{base_path}_cents")), - usd: MetricsTree_Price_Usd::new(client.clone(), format!("{base_path}_usd")), - sats: OhlcSplitPattern2::new(client.clone(), "price".to_string()), - oracle: MetricsTree_Price_Oracle::new(client.clone(), format!("{base_path}_oracle")), + cents: MetricsTree_Prices_Cents::new(client.clone(), format!("{base_path}_cents")), + usd: MetricsTree_Prices_Usd::new(client.clone(), format!("{base_path}_usd")), + sats: MetricsTree_Prices_Sats::new(client.clone(), format!("{base_path}_sats")), } } } /// Metrics tree node. -pub struct MetricsTree_Price_Cents { - pub split: MetricsTree_Price_Cents_Split, - pub ohlc: MetricPattern5, +pub struct MetricsTree_Prices_Cents { + pub price: MetricPattern20, + pub split: CloseHighLowOpenPattern, + pub ohlc: MetricPattern2, } -impl MetricsTree_Price_Cents { +impl MetricsTree_Prices_Cents { pub fn new(client: Arc, base_path: String) -> Self { Self { - split: MetricsTree_Price_Cents_Split::new(client.clone(), format!("{base_path}_split")), - ohlc: MetricPattern5::new(client.clone(), "ohlc_cents".to_string()), + price: MetricPattern20::new(client.clone(), "price_cents".to_string()), + split: CloseHighLowOpenPattern::new(client.clone(), "price_cents".to_string()), + ohlc: MetricPattern2::new(client.clone(), "price_cents_ohlc".to_string()), } } } /// Metrics tree node. -pub struct MetricsTree_Price_Cents_Split { - pub open: MetricPattern5, - pub high: MetricPattern5, - pub low: MetricPattern5, - pub close: MetricPattern5, +pub struct MetricsTree_Prices_Usd { + pub price: MetricPattern20, + pub split: CloseHighLowOpenPattern, + pub ohlc: MetricPattern2, } -impl MetricsTree_Price_Cents_Split { +impl MetricsTree_Prices_Usd { pub fn new(client: Arc, base_path: String) -> Self { Self { - open: MetricPattern5::new(client.clone(), "price_open_cents".to_string()), - high: MetricPattern5::new(client.clone(), "price_high_cents".to_string()), - low: MetricPattern5::new(client.clone(), "price_low_cents".to_string()), - close: MetricPattern5::new(client.clone(), "price_close_cents".to_string()), + price: MetricPattern20::new(client.clone(), "price_usd".to_string()), + split: CloseHighLowOpenPattern::new(client.clone(), "price".to_string()), + ohlc: MetricPattern2::new(client.clone(), "price_usd_ohlc".to_string()), } } } /// Metrics tree node. -pub struct MetricsTree_Price_Usd { - pub split: CloseHighLowOpenPattern2, - pub ohlc: MetricPattern1, +pub struct MetricsTree_Prices_Sats { + pub price: MetricPattern20, + pub split: CloseHighLowOpenPattern, + pub ohlc: MetricPattern2, } -impl MetricsTree_Price_Usd { +impl MetricsTree_Prices_Sats { pub fn new(client: Arc, base_path: String) -> Self { Self { - split: CloseHighLowOpenPattern2::new(client.clone(), "price".to_string()), - ohlc: MetricPattern1::new(client.clone(), "price_ohlc".to_string()), - } - } -} - -/// Metrics tree node. -pub struct MetricsTree_Price_Oracle { - pub price_cents: MetricPattern11, - pub ohlc_cents: MetricPattern6, - pub split: CloseHighLowOpenPattern2, - pub ohlc: MetricPattern1, - pub ohlc_dollars: MetricPattern1, -} - -impl MetricsTree_Price_Oracle { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - price_cents: MetricPattern11::new(client.clone(), "oracle_price_cents".to_string()), - ohlc_cents: MetricPattern6::new(client.clone(), "oracle_ohlc_cents".to_string()), - split: CloseHighLowOpenPattern2::new(client.clone(), "oracle_price".to_string()), - ohlc: MetricPattern1::new(client.clone(), "oracle_price_ohlc".to_string()), - ohlc_dollars: MetricPattern1::new(client.clone(), "oracle_ohlc_dollars".to_string()), + price: MetricPattern20::new(client.clone(), "price_sats".to_string()), + split: CloseHighLowOpenPattern::new(client.clone(), "price_sats".to_string()), + ohlc: MetricPattern2::new(client.clone(), "price_sats_ohlc".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Distribution { - pub supply_state: MetricPattern11, + pub supply_state: MetricPattern20, pub any_address_indexes: MetricsTree_Distribution_AnyAddressIndexes, pub addresses_data: MetricsTree_Distribution_AddressesData, pub utxo_cohorts: MetricsTree_Distribution_UtxoCohorts, @@ -5175,14 +5913,14 @@ pub struct MetricsTree_Distribution { pub total_addr_count: MetricsTree_Distribution_TotalAddrCount, pub new_addr_count: MetricsTree_Distribution_NewAddrCount, pub growth_rate: MetricsTree_Distribution_GrowthRate, - pub fundedaddressindex: MetricPattern31, - pub emptyaddressindex: MetricPattern32, + pub fundedaddressindex: MetricPattern36, + pub emptyaddressindex: MetricPattern37, } impl MetricsTree_Distribution { pub fn new(client: Arc, base_path: String) -> Self { Self { - supply_state: MetricPattern11::new(client.clone(), "supply_state".to_string()), + supply_state: MetricPattern20::new(client.clone(), "supply_state".to_string()), any_address_indexes: MetricsTree_Distribution_AnyAddressIndexes::new(client.clone(), format!("{base_path}_any_address_indexes")), addresses_data: MetricsTree_Distribution_AddressesData::new(client.clone(), format!("{base_path}_addresses_data")), utxo_cohorts: MetricsTree_Distribution_UtxoCohorts::new(client.clone(), format!("{base_path}_utxo_cohorts")), @@ -5193,50 +5931,50 @@ impl MetricsTree_Distribution { total_addr_count: MetricsTree_Distribution_TotalAddrCount::new(client.clone(), format!("{base_path}_total_addr_count")), new_addr_count: MetricsTree_Distribution_NewAddrCount::new(client.clone(), format!("{base_path}_new_addr_count")), growth_rate: MetricsTree_Distribution_GrowthRate::new(client.clone(), format!("{base_path}_growth_rate")), - fundedaddressindex: MetricPattern31::new(client.clone(), "fundedaddressindex".to_string()), - emptyaddressindex: MetricPattern32::new(client.clone(), "emptyaddressindex".to_string()), + fundedaddressindex: MetricPattern36::new(client.clone(), "fundedaddressindex".to_string()), + emptyaddressindex: MetricPattern37::new(client.clone(), "emptyaddressindex".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Distribution_AnyAddressIndexes { - pub p2a: MetricPattern16, - pub p2pk33: MetricPattern18, - pub p2pk65: MetricPattern19, - pub p2pkh: MetricPattern20, - pub p2sh: MetricPattern21, - pub p2tr: MetricPattern22, - pub p2wpkh: MetricPattern23, - pub p2wsh: MetricPattern24, + pub p2a: MetricPattern26, + pub p2pk33: MetricPattern28, + pub p2pk65: MetricPattern29, + pub p2pkh: MetricPattern30, + pub p2sh: MetricPattern31, + pub p2tr: MetricPattern32, + pub p2wpkh: MetricPattern33, + pub p2wsh: MetricPattern34, } impl MetricsTree_Distribution_AnyAddressIndexes { pub fn new(client: Arc, base_path: String) -> Self { Self { - p2a: MetricPattern16::new(client.clone(), "anyaddressindex".to_string()), - p2pk33: MetricPattern18::new(client.clone(), "anyaddressindex".to_string()), - p2pk65: MetricPattern19::new(client.clone(), "anyaddressindex".to_string()), - p2pkh: MetricPattern20::new(client.clone(), "anyaddressindex".to_string()), - p2sh: MetricPattern21::new(client.clone(), "anyaddressindex".to_string()), - p2tr: MetricPattern22::new(client.clone(), "anyaddressindex".to_string()), - p2wpkh: MetricPattern23::new(client.clone(), "anyaddressindex".to_string()), - p2wsh: MetricPattern24::new(client.clone(), "anyaddressindex".to_string()), + p2a: MetricPattern26::new(client.clone(), "anyaddressindex".to_string()), + p2pk33: MetricPattern28::new(client.clone(), "anyaddressindex".to_string()), + p2pk65: MetricPattern29::new(client.clone(), "anyaddressindex".to_string()), + p2pkh: MetricPattern30::new(client.clone(), "anyaddressindex".to_string()), + p2sh: MetricPattern31::new(client.clone(), "anyaddressindex".to_string()), + p2tr: MetricPattern32::new(client.clone(), "anyaddressindex".to_string()), + p2wpkh: MetricPattern33::new(client.clone(), "anyaddressindex".to_string()), + p2wsh: MetricPattern34::new(client.clone(), "anyaddressindex".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Distribution_AddressesData { - pub funded: MetricPattern31, - pub empty: MetricPattern32, + pub funded: MetricPattern36, + pub empty: MetricPattern37, } impl MetricsTree_Distribution_AddressesData { pub fn new(client: Arc, base_path: String) -> Self { Self { - funded: MetricPattern31::new(client.clone(), "fundedaddressdata".to_string()), - empty: MetricPattern32::new(client.clone(), "emptyaddressdata".to_string()), + funded: MetricPattern36::new(client.clone(), "fundedaddressdata".to_string()), + empty: MetricPattern37::new(client.clone(), "emptyaddressdata".to_string()), } } } @@ -5314,7 +6052,7 @@ pub struct MetricsTree_Distribution_UtxoCohorts_All_Relative { pub net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1, pub invested_capital_in_profit_pct: MetricPattern1, pub invested_capital_in_loss_pct: MetricPattern1, - pub unrealized_peak_regret_rel_to_market_cap: MetricPattern4, + pub unrealized_peak_regret_rel_to_market_cap: MetricPattern1, } impl MetricsTree_Distribution_UtxoCohorts_All_Relative { @@ -5333,7 +6071,7 @@ impl MetricsTree_Distribution_UtxoCohorts_All_Relative { net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), "net_unrealized_pnl_rel_to_own_total_unrealized_pnl".to_string()), invested_capital_in_profit_pct: MetricPattern1::new(client.clone(), "invested_capital_in_profit_pct".to_string()), invested_capital_in_loss_pct: MetricPattern1::new(client.clone(), "invested_capital_in_loss_pct".to_string()), - unrealized_peak_regret_rel_to_market_cap: MetricPattern4::new(client.clone(), "unrealized_peak_regret_rel_to_market_cap".to_string()), + unrealized_peak_regret_rel_to_market_cap: MetricPattern1::new(client.clone(), "unrealized_peak_regret_rel_to_market_cap".to_string()), } } } @@ -5958,29 +6696,29 @@ impl MetricsTree_Distribution_TotalAddrCount { /// Metrics tree node. pub struct MetricsTree_Distribution_NewAddrCount { - pub all: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub p2pk65: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub p2pk33: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub p2pkh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub p2sh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub p2wpkh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub p2wsh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub p2tr: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, - pub p2a: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2, + pub all: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub p2pk65: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub p2pk33: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub p2pkh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub p2sh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub p2wpkh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub p2wsh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub p2tr: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, + pub p2a: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern, } impl MetricsTree_Distribution_NewAddrCount { pub fn new(client: Arc, base_path: String) -> Self { Self { - all: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "new_addr_count".to_string()), - p2pk65: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "p2pk65_new_addr_count".to_string()), - p2pk33: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "p2pk33_new_addr_count".to_string()), - p2pkh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "p2pkh_new_addr_count".to_string()), - p2sh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "p2sh_new_addr_count".to_string()), - p2wpkh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "p2wpkh_new_addr_count".to_string()), - p2wsh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "p2wsh_new_addr_count".to_string()), - p2tr: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "p2tr_new_addr_count".to_string()), - p2a: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2::new(client.clone(), "p2a_new_addr_count".to_string()), + all: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "new_addr_count".to_string()), + p2pk65: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "p2pk65_new_addr_count".to_string()), + p2pk33: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "p2pk33_new_addr_count".to_string()), + p2pkh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "p2pkh_new_addr_count".to_string()), + p2sh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "p2sh_new_addr_count".to_string()), + p2wpkh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "p2wpkh_new_addr_count".to_string()), + p2wsh: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "p2wsh_new_addr_count".to_string()), + p2tr: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "p2tr_new_addr_count".to_string()), + p2a: AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern::new(client.clone(), "p2a_new_addr_count".to_string()), } } } @@ -6016,74 +6754,57 @@ impl MetricsTree_Distribution_GrowthRate { /// Metrics tree node. pub struct MetricsTree_Supply { - pub circulating: MetricsTree_Supply_Circulating, + pub circulating: BtcSatsUsdPattern, pub burned: MetricsTree_Supply_Burned, - pub inflation: MetricPattern4, + pub inflation: MetricPattern1, pub velocity: MetricsTree_Supply_Velocity, pub market_cap: MetricPattern1, - pub market_cap_growth_rate: MetricPattern4, - pub realized_cap_growth_rate: MetricPattern4, - pub cap_growth_rate_diff: MetricPattern6, + pub market_cap_growth_rate: MetricPattern1, + pub realized_cap_growth_rate: MetricPattern1, + pub cap_growth_rate_diff: MetricPattern1, } impl MetricsTree_Supply { pub fn new(client: Arc, base_path: String) -> Self { Self { - circulating: MetricsTree_Supply_Circulating::new(client.clone(), format!("{base_path}_circulating")), + circulating: BtcSatsUsdPattern::new(client.clone(), "circulating_supply".to_string()), burned: MetricsTree_Supply_Burned::new(client.clone(), format!("{base_path}_burned")), - inflation: MetricPattern4::new(client.clone(), "inflation_rate".to_string()), + inflation: MetricPattern1::new(client.clone(), "inflation_rate".to_string()), velocity: MetricsTree_Supply_Velocity::new(client.clone(), format!("{base_path}_velocity")), market_cap: MetricPattern1::new(client.clone(), "market_cap".to_string()), - market_cap_growth_rate: MetricPattern4::new(client.clone(), "market_cap_growth_rate".to_string()), - realized_cap_growth_rate: MetricPattern4::new(client.clone(), "realized_cap_growth_rate".to_string()), - cap_growth_rate_diff: MetricPattern6::new(client.clone(), "cap_growth_rate_diff".to_string()), - } - } -} - -/// Metrics tree node. -pub struct MetricsTree_Supply_Circulating { - pub sats: MetricPattern3, - pub bitcoin: MetricPattern3, - pub dollars: MetricPattern3, -} - -impl MetricsTree_Supply_Circulating { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - sats: MetricPattern3::new(client.clone(), "circulating_supply".to_string()), - bitcoin: MetricPattern3::new(client.clone(), "circulating_supply_btc".to_string()), - dollars: MetricPattern3::new(client.clone(), "circulating_supply_usd".to_string()), + market_cap_growth_rate: MetricPattern1::new(client.clone(), "market_cap_growth_rate".to_string()), + realized_cap_growth_rate: MetricPattern1::new(client.clone(), "realized_cap_growth_rate".to_string()), + cap_growth_rate_diff: MetricPattern1::new(client.clone(), "cap_growth_rate_diff".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Supply_Burned { - pub opreturn: BitcoinDollarsSatsPattern3, - pub unspendable: BitcoinDollarsSatsPattern3, + pub opreturn: BtcSatsUsdPattern2, + pub unspendable: BtcSatsUsdPattern2, } impl MetricsTree_Supply_Burned { pub fn new(client: Arc, base_path: String) -> Self { Self { - opreturn: BitcoinDollarsSatsPattern3::new(client.clone(), "opreturn_supply".to_string()), - unspendable: BitcoinDollarsSatsPattern3::new(client.clone(), "unspendable_supply".to_string()), + opreturn: BtcSatsUsdPattern2::new(client.clone(), "opreturn_supply".to_string()), + unspendable: BtcSatsUsdPattern2::new(client.clone(), "unspendable_supply".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Supply_Velocity { - pub btc: MetricPattern4, - pub usd: MetricPattern4, + pub btc: MetricPattern1, + pub usd: MetricPattern1, } impl MetricsTree_Supply_Velocity { pub fn new(client: Arc, base_path: String) -> Self { Self { - btc: MetricPattern4::new(client.clone(), "btc_velocity".to_string()), - usd: MetricPattern4::new(client.clone(), "usd_velocity".to_string()), + btc: MetricPattern1::new(client.clone(), "btc_velocity".to_string()), + usd: MetricPattern1::new(client.clone(), "usd_velocity".to_string()), } } } @@ -6096,7 +6817,7 @@ pub struct BrkClient { impl BrkClient { /// Client version. - pub const VERSION: &'static str = "v0.1.7"; + pub const VERSION: &'static str = "v0.1.9"; /// Create a new client with the given base URL. pub fn new(base_url: impl Into) -> Self { @@ -6342,7 +7063,7 @@ impl BrkClient { /// Get supported indexes for a metric /// - /// Returns the list of indexes supported by the specified metric. For example, `realized_price` might be available on dateindex, weekindex, and monthindex. + /// Returns the list of indexes supported by the specified metric. For example, `realized_price` might be available on day1, week1, and month1. /// /// Endpoint: `GET /api/metric/{metric}` pub fn get_metric_info(&self, metric: Metric) -> Result> { @@ -6361,7 +7082,7 @@ impl BrkClient { if let Some(v) = limit { query.push(format!("limit={}", v)); } if let Some(v) = format { query.push(format!("format={}", v)); } let query_str = if query.is_empty() { String::new() } else { format!("?{}", query.join("&")) }; - let path = format!("/api/metric/{metric}/{}{}", index.serialize_long(), query_str); + let path = format!("/api/metric/{metric}/{}{}", index.name(), query_str); if format == Some(Format::CSV) { self.base.get_text(&path).map(FormatResponse::Csv) } else { diff --git a/crates/brk_cohort/src/address.rs b/crates/brk_cohort/src/address.rs index f93cd4a01..b8d7e3f97 100644 --- a/crates/brk_cohort/src/address.rs +++ b/crates/brk_cohort/src/address.rs @@ -1,6 +1,6 @@ use brk_traversable::Traversable; use rayon::prelude::*; -use vecdb::AnyExportableVec; +use vecdb::{AnyExportableVec, ReadOnlyClone}; use crate::Filter; @@ -80,6 +80,18 @@ impl AddressGroups { } } +impl ReadOnlyClone for AddressGroups { + type ReadOnly = AddressGroups; + + fn read_only_clone(&self) -> Self::ReadOnly { + AddressGroups { + ge_amount: self.ge_amount.read_only_clone(), + amount_range: self.amount_range.read_only_clone(), + lt_amount: self.lt_amount.read_only_clone(), + } + } +} + impl Traversable for AddressGroups where ByGreatEqualAmount: brk_traversable::Traversable, diff --git a/crates/brk_computer/Cargo.toml b/crates/brk_computer/Cargo.toml index a2a9fcdf4..d54949372 100644 --- a/crates/brk_computer/Cargo.toml +++ b/crates/brk_computer/Cargo.toml @@ -6,18 +6,18 @@ edition.workspace = true license.workspace = true homepage.workspace = true repository.workspace = true +exclude = ["examples/"] [dependencies] bitcoin = { workspace = true } brk_error = { workspace = true, features = ["vecdb"] } -brk_fetcher = { workspace = true } brk_cohort = { workspace = true } brk_indexer = { workspace = true } brk_oracle = { workspace = true } brk_iterator = { workspace = true } brk_logger = { workspace = true } brk_reader = { workspace = true } -brk_rpc = { workspace = true } +brk_rpc = { workspace = true, features = ["corepc"] } brk_store = { workspace = true } brk_traversable = { workspace = true } brk_types = { workspace = true } @@ -28,6 +28,7 @@ rayon = { workspace = true } rustc-hash = { workspace = true } schemars = { workspace = true } serde = { workspace = true } +serde_json = { workspace = true } smallvec = { workspace = true } vecdb = { workspace = true } diff --git a/crates/brk_computer/README.md b/crates/brk_computer/README.md index 157a85ff2..a75dc1013 100644 --- a/crates/brk_computer/README.md +++ b/crates/brk_computer/README.md @@ -19,7 +19,7 @@ Compute 1000+ on-chain metrics from indexed blockchain data: supply breakdowns, ## Core API ```rust,ignore -let mut computer = Computer::forced_import(&outputs_path, &indexer, Some(fetcher))?; +let mut computer = Computer::forced_import(&outputs_path, &indexer)?; // Compute all metrics for new blocks computer.compute(&indexer, starting_indexes, &reader, &exit)?; @@ -40,7 +40,7 @@ let realized_cap = computer.distribution.utxo_cohorts.all.metrics.realized.unwra | `cointime` | Liveliness, vaultedness, true market mean | | `pools` | Per-pool block counts, rewards, fees | | `market` | Market cap, NVT, Puell multiple | -| `price` | Height-to-price mapping from fetched data | +| `price` | Height-to-price mapping from on-chain oracle | ## Cohort System @@ -50,6 +50,17 @@ UTXO and address cohorts support filtering by: - **Type**: P2PK, P2PKH, P2MS, P2SH, P2WPKH, P2WSH, P2TR, P2A - **Epoch**: By halving epoch +```rust,ignore +// Access metrics for a specific UTXO cohort (e.g. long-term holders) +let lth_supply = computer.distribution.utxo_cohorts.lth.metrics.supply.total.sats.height; + +// Access metrics for an address cohort (e.g. 1-10 BTC holders) +let whale_count = computer.distribution.address_cohorts.from_1_to_10.metrics.address_count.height; + +// Access metrics for all UTXOs combined +let sopr = computer.distribution.utxo_cohorts.all.metrics.realized.unwrap().sopr.height; +``` + ## Performance ### End-to-End @@ -71,6 +82,6 @@ Use [mimalloc v3](https://crates.io/crates/mimalloc) as the global allocator to - `brk_indexer` for indexed blockchain data - `brk_cohort` for cohort filtering -- `brk_fetcher` for price data +- `brk_oracle` for on-chain price data - `brk_reader` for raw block access - `brk_traversable` for data export diff --git a/crates/brk_computer/examples/computer.rs b/crates/brk_computer/examples/computer.rs index c18c41a35..ef582d0d5 100644 --- a/crates/brk_computer/examples/computer.rs +++ b/crates/brk_computer/examples/computer.rs @@ -1,14 +1,12 @@ use std::{ env, path::Path, - thread::{self, sleep}, + thread::sleep, time::{Duration, Instant}, }; use brk_alloc::Mimalloc; use brk_computer::Computer; -use brk_error::Result; -use brk_fetcher::Fetcher; use brk_indexer::Indexer; use brk_iterator::Blocks; use brk_reader::Reader; @@ -18,17 +16,6 @@ use vecdb::Exit; pub fn main() -> color_eyre::Result<()> { color_eyre::install()?; - // Can't increase main thread's stack size, thus we need to use another thread - thread::Builder::new() - .stack_size(512 * 1024 * 1024) - .spawn(run)? - .join() - .unwrap()?; - - Ok(()) -} - -fn run() -> Result<()> { brk_logger::init(Some(Path::new(".log")))?; let bitcoin_dir = Client::default_bitcoin_path(); @@ -48,8 +35,6 @@ fn run() -> Result<()> { let mut indexer = Indexer::forced_import(&outputs_dir)?; - let fetcher = Fetcher::import(None)?; - let exit = Exit::new(); exit.set_ctrlc_handler(); @@ -63,7 +48,7 @@ fn run() -> Result<()> { indexer = Indexer::forced_import(&outputs_dir)?; } - let mut computer = Computer::forced_import(&outputs_dir, &indexer, Some(fetcher))?; + let mut computer = Computer::forced_import(&outputs_dir, &indexer)?; loop { let i = Instant::now(); diff --git a/crates/brk_computer/examples/computer_bench.rs b/crates/brk_computer/examples/computer_bench.rs index 4220084fc..49b1a3140 100644 --- a/crates/brk_computer/examples/computer_bench.rs +++ b/crates/brk_computer/examples/computer_bench.rs @@ -1,10 +1,9 @@ -use std::{env, path::Path, thread, time::Instant}; +use std::{env, path::Path, time::Instant}; use brk_alloc::Mimalloc; use brk_bencher::Bencher; use brk_computer::Computer; use brk_error::Result; -use brk_fetcher::Fetcher; use brk_indexer::Indexer; use brk_iterator::Blocks; use brk_reader::Reader; @@ -13,15 +12,6 @@ use tracing::{debug, info}; use vecdb::Exit; pub fn main() -> Result<()> { - // Can't increase main thread's stack size, thus we need to use another thread - thread::Builder::new() - .stack_size(512 * 1024 * 1024) - .spawn(run)? - .join() - .unwrap() -} - -fn run() -> Result<()> { brk_logger::init(None)?; let bitcoin_dir = Client::default_bitcoin_path(); @@ -42,9 +32,7 @@ fn run() -> Result<()> { let mut indexer = Indexer::forced_import(&outputs_dir)?; - let fetcher = Fetcher::import(None)?; - - let mut computer = Computer::forced_import(&outputs_benches_dir, &indexer, Some(fetcher))?; + let mut computer = Computer::forced_import(&outputs_benches_dir, &indexer)?; let mut bencher = Bencher::from_cargo_env(env!("CARGO_PKG_NAME"), &outputs_dir.join("computed"))?; diff --git a/crates/brk_computer/examples/computer_read.rs b/crates/brk_computer/examples/computer_read.rs index 78ec9993a..380fd4173 100644 --- a/crates/brk_computer/examples/computer_read.rs +++ b/crates/brk_computer/examples/computer_read.rs @@ -1,33 +1,18 @@ -use std::{env, path::Path, thread, time::Instant}; +use std::{env, path::Path, time::Instant}; use brk_computer::Computer; use brk_error::Result; -use brk_fetcher::Fetcher; use brk_indexer::Indexer; -use vecdb::{AnySerializableVec, AnyVec, Exit}; +use vecdb::{AnySerializableVec, AnyVec}; pub fn main() -> Result<()> { - // Can't increase main thread's stack size, thus we need to use another thread - thread::Builder::new() - .stack_size(512 * 1024 * 1024) - .spawn(run)? - .join() - .unwrap() -} - -fn run() -> Result<()> { brk_logger::init(None)?; let outputs_dir = Path::new(&env::var("HOME").unwrap()).join(".brk"); let indexer = Indexer::forced_import(&outputs_dir)?; - let fetcher = Fetcher::import(None)?; - - let exit = Exit::new(); - exit.set_ctrlc_handler(); - - let computer = Computer::forced_import(&outputs_dir, &indexer, Some(fetcher))?; + let computer = Computer::forced_import(&outputs_dir, &indexer)?; // Test emptyaddressdata (underlying BytesVec) - direct access let empty_data = &computer.distribution.addresses_data.empty; diff --git a/crates/brk_computer/examples/full_bench.rs b/crates/brk_computer/examples/full_bench.rs index 03c3a715a..bc4d682ed 100644 --- a/crates/brk_computer/examples/full_bench.rs +++ b/crates/brk_computer/examples/full_bench.rs @@ -1,15 +1,13 @@ use std::{ env, fs, path::Path, - thread::{self, sleep}, + thread::sleep, time::{Duration, Instant}, }; use brk_alloc::Mimalloc; use brk_bencher::Bencher; use brk_computer::Computer; -use brk_error::Result; -use brk_fetcher::{Fetcher, PriceSource}; use brk_indexer::Indexer; use brk_iterator::Blocks; use brk_reader::Reader; @@ -20,17 +18,6 @@ use vecdb::Exit; pub fn main() -> color_eyre::Result<()> { color_eyre::install()?; - // Can't increase main thread's stack size, thus we need to use another thread - thread::Builder::new() - .stack_size(512 * 1024 * 1024) - .spawn(run)? - .join() - .unwrap()?; - - Ok(()) -} - -fn run() -> Result<()> { let bitcoin_dir = Client::default_bitcoin_path(); // let bitcoin_dir = Path::new("/Volumes/WD_BLACK1/bitcoin"); @@ -60,10 +47,6 @@ fn run() -> Result<()> { let blocks = Blocks::new(&client, &reader); - let fetcher = Fetcher::import(None)?; - - info!("Ping: {:?}", fetcher.brk.ping()?); - let mut indexer = Indexer::forced_import(&outputs_dir)?; // Pre-run indexer if too far behind, then drop and reimport to reduce memory @@ -76,7 +59,7 @@ fn run() -> Result<()> { indexer = Indexer::forced_import(&outputs_dir)?; } - let mut computer = Computer::forced_import(&outputs_dir, &indexer, Some(fetcher))?; + let mut computer = Computer::forced_import(&outputs_dir, &indexer)?; loop { let i = Instant::now(); diff --git a/crates/brk_computer/src/blocks/compute.rs b/crates/brk_computer/src/blocks/compute.rs index 87301909f..6a07e2bb0 100644 --- a/crates/brk_computer/src/blocks/compute.rs +++ b/crates/brk_computer/src/blocks/compute.rs @@ -2,56 +2,27 @@ use brk_error::Result; use brk_indexer::Indexer; use vecdb::Exit; -use crate::{ComputeIndexes, indexes, transactions}; +use crate::{ComputeIndexes, indexes}; use super::Vecs; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, - transactions: &transactions::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - // Core block metrics + self.interval.compute(indexer, starting_indexes, exit)?; self.count - .compute(indexer, indexes, &self.time, starting_indexes, exit)?; - self.interval.compute(indexes, starting_indexes, exit)?; - self.size - .compute(indexer, indexes, starting_indexes, exit)?; - self.weight - .compute(indexer, indexes, starting_indexes, exit)?; - - // Time metrics (timestamps) - self.time.compute(indexes, starting_indexes, exit)?; - - // Epoch metrics + .compute(indexer, &self.time, starting_indexes, exit)?; + self.size.compute(indexer, starting_indexes, exit)?; + self.weight.compute(indexer, starting_indexes, exit)?; self.difficulty .compute(indexer, indexes, starting_indexes, exit)?; self.halving.compute(indexes, starting_indexes, exit)?; - // Rewards depends on count and transactions fees - self.rewards.compute( - indexer, - indexes, - &self.count, - &transactions.fees, - starting_indexes, - exit, - )?; - - // Mining depends on count, difficulty, and rewards - self.mining.compute( - indexes, - &self.count, - &self.difficulty, - &self.rewards, - starting_indexes, - exit, - )?; - let _lock = exit.lock(); self.db.compact()?; Ok(()) diff --git a/crates/brk_computer/src/blocks/count/compute.rs b/crates/brk_computer/src/blocks/count/compute.rs index 639a27f9d..505310305 100644 --- a/crates/brk_computer/src/blocks/count/compute.rs +++ b/crates/brk_computer/src/blocks/count/compute.rs @@ -1,66 +1,186 @@ use brk_error::Result; use brk_indexer::Indexer; -use brk_types::{Height, StoredU32}; -use vecdb::{EagerVec, Exit, PcoVec, TypedVecIterator}; +use brk_types::{Height, StoredU32, Timestamp}; +use vecdb::{EagerVec, Exit, PcoVec, ReadableVec, VecIndex}; -use super::super::time; -use super::Vecs; -use crate::{ComputeIndexes, indexes, internal::ComputedFromHeightLast}; +use crate::ComputeIndexes; + +use super::{super::time, Vecs}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, - indexes: &indexes::Vecs, time: &time::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { + self.block_count.height.compute_range( + starting_indexes.height, + &indexer.vecs.blocks.weight, + |h| (h, StoredU32::from(1_u32)), + exit, + )?; self.block_count - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_range( - starting_indexes.height, - &indexer.vecs.blocks.weight, - |h| (h, StoredU32::from(1_u32)), - exit, - )?; - Ok(()) - })?; + .compute_cumulative(starting_indexes, exit)?; - // Compute rolling window starts - self.compute_rolling_start(time, starting_indexes, exit, 1, |s| &mut s._24h_start)?; - self.compute_rolling_start(time, starting_indexes, exit, 7, |s| &mut s._1w_start)?; - self.compute_rolling_start(time, starting_indexes, exit, 30, |s| &mut s._1m_start)?; - self.compute_rolling_start(time, starting_indexes, exit, 365, |s| &mut s._1y_start)?; + // Compute rolling window starts (collect monotonic data once for all windows) + let monotonic_data: Vec = time.timestamp_monotonic.collect(); + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 1, |s| { + &mut s.height_24h_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 3, |s| { + &mut s.height_3d_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 7, |s| { + &mut s.height_1w_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 8, |s| { + &mut s.height_8d_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 9, |s| { + &mut s.height_9d_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 12, |s| { + &mut s.height_12d_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 13, |s| { + &mut s.height_13d_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 14, |s| { + &mut s.height_2w_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 21, |s| { + &mut s.height_21d_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 26, |s| { + &mut s.height_26d_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 30, |s| { + &mut s.height_1m_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 34, |s| { + &mut s.height_34d_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 55, |s| { + &mut s.height_55d_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 2 * 30, |s| { + &mut s.height_2m_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 89, |s| { + &mut s.height_89d_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 3 * 30, |s| { + &mut s.height_3m_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 111, |s| { + &mut s.height_111d_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 144, |s| { + &mut s.height_144d_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 6 * 30, |s| { + &mut s.height_6m_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 200, |s| { + &mut s.height_200d_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 350, |s| { + &mut s.height_350d_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 365, |s| { + &mut s.height_1y_ago + })?; + self.compute_rolling_start( + &monotonic_data, + time, + starting_indexes, + exit, + 2 * 365, + |s| &mut s.height_2y_ago, + )?; + self.compute_rolling_start( + &monotonic_data, + time, + starting_indexes, + exit, + 200 * 7, + |s| &mut s.height_200w_ago, + )?; + self.compute_rolling_start( + &monotonic_data, + time, + starting_indexes, + exit, + 3 * 365, + |s| &mut s.height_3y_ago, + )?; + self.compute_rolling_start( + &monotonic_data, + time, + starting_indexes, + exit, + 4 * 365, + |s| &mut s.height_4y_ago, + )?; + self.compute_rolling_start( + &monotonic_data, + time, + starting_indexes, + exit, + 5 * 365, + |s| &mut s.height_5y_ago, + )?; + self.compute_rolling_start( + &monotonic_data, + time, + starting_indexes, + exit, + 6 * 365, + |s| &mut s.height_6y_ago, + )?; + self.compute_rolling_start( + &monotonic_data, + time, + starting_indexes, + exit, + 8 * 365, + |s| &mut s.height_8y_ago, + )?; + self.compute_rolling_start( + &monotonic_data, + time, + starting_indexes, + exit, + 10 * 365, + |s| &mut s.height_10y_ago, + )?; // Compute rolling window block counts - self.compute_rolling_block_count( - indexes, - starting_indexes, + self.block_count_24h_sum.height.compute_transform( + starting_indexes.height, + &self.height_24h_ago, + |(h, start, ..)| (h, StoredU32::from(*h + 1 - *start)), exit, - &self._24h_start.clone(), - |s| &mut s._24h_block_count, )?; - self.compute_rolling_block_count( - indexes, - starting_indexes, + self.block_count_1w_sum.height.compute_transform( + starting_indexes.height, + &self.height_1w_ago, + |(h, start, ..)| (h, StoredU32::from(*h + 1 - *start)), exit, - &self._1w_start.clone(), - |s| &mut s._1w_block_count, )?; - self.compute_rolling_block_count( - indexes, - starting_indexes, + self.block_count_1m_sum.height.compute_transform( + starting_indexes.height, + &self.height_1m_ago, + |(h, start, ..)| (h, StoredU32::from(*h + 1 - *start)), exit, - &self._1m_start.clone(), - |s| &mut s._1m_block_count, )?; - self.compute_rolling_block_count( - indexes, - starting_indexes, + self.block_count_1y_sum.height.compute_transform( + starting_indexes.height, + &self.height_1y_ago, + |(h, start, ..)| (h, StoredU32::from(*h + 1 - *start)), exit, - &self._1y_start.clone(), - |s| &mut s._1y_block_count, )?; Ok(()) @@ -68,6 +188,7 @@ impl Vecs { fn compute_rolling_start( &mut self, + monotonic_data: &[Timestamp], time: &time::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, @@ -77,13 +198,12 @@ impl Vecs { where F: FnOnce(&mut Self) -> &mut EagerVec>, { - let mut iter = time.timestamp_monotonic.into_iter(); let mut prev = Height::ZERO; Ok(get_field(self).compute_transform( starting_indexes.height, &time.timestamp_monotonic, |(h, t, ..)| { - while t.difference_in_days_between(iter.get_unwrap(prev)) >= days { + while t.difference_in_days_between(monotonic_data[prev.to_usize()]) >= days { prev.increment(); if prev > h { unreachable!() @@ -94,26 +214,4 @@ impl Vecs { exit, )?) } - - fn compute_rolling_block_count( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - start_height: &EagerVec>, - get_field: F, - ) -> Result<()> - where - F: FnOnce(&mut Self) -> &mut ComputedFromHeightLast, - { - get_field(self).compute_all(indexes, starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - start_height, - |(h, start, ..)| (h, StoredU32::from(*h + 1 - *start)), - exit, - )?; - Ok(()) - }) - } } diff --git a/crates/brk_computer/src/blocks/count/import.rs b/crates/brk_computer/src/blocks/count/import.rs index 2b0720b45..d40ede602 100644 --- a/crates/brk_computer/src/blocks/count/import.rs +++ b/crates/brk_computer/src/blocks/count/import.rs @@ -1,59 +1,78 @@ use brk_error::Result; -use brk_types::{StoredU64, Version}; +use brk_types::Version; use vecdb::{Database, ImportableVec}; use super::Vecs; use crate::{ - blocks::{ - TARGET_BLOCKS_PER_DAY, TARGET_BLOCKS_PER_DECADE, TARGET_BLOCKS_PER_MONTH, - TARGET_BLOCKS_PER_QUARTER, TARGET_BLOCKS_PER_SEMESTER, TARGET_BLOCKS_PER_WEEK, - TARGET_BLOCKS_PER_YEAR, - }, indexes, - internal::{ComputedFromHeightLast, ComputedFromHeightSumCum, LazyFromDate}, + internal::{BlockCountTarget, ComputedFromHeightLast, ComputedFromHeightSumCum, ConstantVecs}, }; impl Vecs { - pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { + pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { Ok(Self { - block_count_target: LazyFromDate::new( + block_count_target: ConstantVecs::new::( "block_count_target", version, indexes, - |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_DAY)), - |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_WEEK)), - |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_MONTH)), - |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_QUARTER)), - |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_SEMESTER)), - |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_YEAR)), - |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_DECADE)), ), - block_count: ComputedFromHeightSumCum::forced_import(db, "block_count", version, indexes)?, - _24h_start: ImportableVec::forced_import(db, "24h_start", version)?, - _1w_start: ImportableVec::forced_import(db, "1w_start", version)?, - _1m_start: ImportableVec::forced_import(db, "1m_start", version)?, - _1y_start: ImportableVec::forced_import(db, "1y_start", version)?, - _24h_block_count: ComputedFromHeightLast::forced_import( + block_count: ComputedFromHeightSumCum::forced_import( db, - "24h_block_count", + "block_count", version, indexes, )?, - _1w_block_count: ComputedFromHeightLast::forced_import( + height_24h_ago: ImportableVec::forced_import(db, "height_24h_ago", version)?, + height_3d_ago: ImportableVec::forced_import(db, "height_3d_ago", version)?, + height_1w_ago: ImportableVec::forced_import(db, "height_1w_ago", version)?, + height_8d_ago: ImportableVec::forced_import(db, "height_8d_ago", version)?, + height_9d_ago: ImportableVec::forced_import(db, "height_9d_ago", version)?, + height_12d_ago: ImportableVec::forced_import(db, "height_12d_ago", version)?, + height_13d_ago: ImportableVec::forced_import(db, "height_13d_ago", version)?, + height_2w_ago: ImportableVec::forced_import(db, "height_2w_ago", version)?, + height_21d_ago: ImportableVec::forced_import(db, "height_21d_ago", version)?, + height_26d_ago: ImportableVec::forced_import(db, "height_26d_ago", version)?, + height_1m_ago: ImportableVec::forced_import(db, "height_1m_ago", version)?, + height_34d_ago: ImportableVec::forced_import(db, "height_34d_ago", version)?, + height_55d_ago: ImportableVec::forced_import(db, "height_55d_ago", version)?, + height_2m_ago: ImportableVec::forced_import(db, "height_2m_ago", version)?, + height_89d_ago: ImportableVec::forced_import(db, "height_89d_ago", version)?, + height_111d_ago: ImportableVec::forced_import(db, "height_111d_ago", version)?, + height_144d_ago: ImportableVec::forced_import(db, "height_144d_ago", version)?, + height_3m_ago: ImportableVec::forced_import(db, "height_3m_ago", version)?, + height_6m_ago: ImportableVec::forced_import(db, "height_6m_ago", version)?, + height_200d_ago: ImportableVec::forced_import(db, "height_200d_ago", version)?, + height_350d_ago: ImportableVec::forced_import(db, "height_350d_ago", version)?, + height_1y_ago: ImportableVec::forced_import(db, "height_1y_ago", version)?, + height_2y_ago: ImportableVec::forced_import(db, "height_2y_ago", version)?, + height_200w_ago: ImportableVec::forced_import(db, "height_200w_ago", version)?, + height_3y_ago: ImportableVec::forced_import(db, "height_3y_ago", version)?, + height_4y_ago: ImportableVec::forced_import(db, "height_4y_ago", version)?, + height_5y_ago: ImportableVec::forced_import(db, "height_5y_ago", version)?, + height_6y_ago: ImportableVec::forced_import(db, "height_6y_ago", version)?, + height_8y_ago: ImportableVec::forced_import(db, "height_8y_ago", version)?, + height_10y_ago: ImportableVec::forced_import(db, "height_10y_ago", version)?, + block_count_24h_sum: ComputedFromHeightLast::forced_import( db, - "1w_block_count", + "block_count_24h_sum", version, indexes, )?, - _1m_block_count: ComputedFromHeightLast::forced_import( + block_count_1w_sum: ComputedFromHeightLast::forced_import( db, - "1m_block_count", + "block_count_1w_sum", version, indexes, )?, - _1y_block_count: ComputedFromHeightLast::forced_import( + block_count_1m_sum: ComputedFromHeightLast::forced_import( db, - "1y_block_count", + "block_count_1m_sum", + version, + indexes, + )?, + block_count_1y_sum: ComputedFromHeightLast::forced_import( + db, + "block_count_1y_sum", version, indexes, )?, diff --git a/crates/brk_computer/src/blocks/count/vecs.rs b/crates/brk_computer/src/blocks/count/vecs.rs index 93b24920c..8ffdb0dfb 100644 --- a/crates/brk_computer/src/blocks/count/vecs.rs +++ b/crates/brk_computer/src/blocks/count/vecs.rs @@ -1,21 +1,85 @@ use brk_traversable::Traversable; use brk_types::{Height, StoredU32, StoredU64}; -use vecdb::{EagerVec, PcoVec}; +use vecdb::{EagerVec, PcoVec, Rw, StorageMode}; -use crate::internal::{ComputedFromHeightLast, ComputedFromHeightSumCum, LazyFromDate}; +use crate::internal::{ComputedFromHeightLast, ComputedFromHeightSumCum, ConstantVecs}; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub block_count_target: LazyFromDate, - pub block_count: ComputedFromHeightSumCum, +#[derive(Traversable)] +pub struct Vecs { + pub block_count_target: ConstantVecs, + pub block_count: ComputedFromHeightSumCum, // Rolling window starts (height-indexed only, no date aggregation needed) - pub _24h_start: EagerVec>, - pub _1w_start: EagerVec>, - pub _1m_start: EagerVec>, - pub _1y_start: EagerVec>, + pub height_24h_ago: M::Stored>>, + pub height_3d_ago: M::Stored>>, + pub height_1w_ago: M::Stored>>, + pub height_8d_ago: M::Stored>>, + pub height_9d_ago: M::Stored>>, + pub height_12d_ago: M::Stored>>, + pub height_13d_ago: M::Stored>>, + pub height_2w_ago: M::Stored>>, + pub height_21d_ago: M::Stored>>, + pub height_26d_ago: M::Stored>>, + pub height_1m_ago: M::Stored>>, + pub height_34d_ago: M::Stored>>, + pub height_55d_ago: M::Stored>>, + pub height_2m_ago: M::Stored>>, + pub height_89d_ago: M::Stored>>, + pub height_111d_ago: M::Stored>>, + pub height_144d_ago: M::Stored>>, + pub height_3m_ago: M::Stored>>, + pub height_6m_ago: M::Stored>>, + pub height_200d_ago: M::Stored>>, + pub height_350d_ago: M::Stored>>, + pub height_1y_ago: M::Stored>>, + pub height_2y_ago: M::Stored>>, + pub height_200w_ago: M::Stored>>, + pub height_3y_ago: M::Stored>>, + pub height_4y_ago: M::Stored>>, + pub height_5y_ago: M::Stored>>, + pub height_6y_ago: M::Stored>>, + pub height_8y_ago: M::Stored>>, + pub height_10y_ago: M::Stored>>, // Rolling window block counts - pub _24h_block_count: ComputedFromHeightLast, - pub _1w_block_count: ComputedFromHeightLast, - pub _1m_block_count: ComputedFromHeightLast, - pub _1y_block_count: ComputedFromHeightLast, + pub block_count_24h_sum: ComputedFromHeightLast, + pub block_count_1w_sum: ComputedFromHeightLast, + pub block_count_1m_sum: ComputedFromHeightLast, + pub block_count_1y_sum: ComputedFromHeightLast, +} + +impl Vecs { + pub fn start_vec(&self, days: usize) -> &EagerVec> { + match days { + 1 => &self.height_24h_ago, + 3 => &self.height_3d_ago, + 7 => &self.height_1w_ago, + 8 => &self.height_8d_ago, + 9 => &self.height_9d_ago, + 12 => &self.height_12d_ago, + 13 => &self.height_13d_ago, + 14 => &self.height_2w_ago, + 21 => &self.height_21d_ago, + 26 => &self.height_26d_ago, + 30 => &self.height_1m_ago, + 34 => &self.height_34d_ago, + 55 => &self.height_55d_ago, + 60 => &self.height_2m_ago, + 89 => &self.height_89d_ago, + 90 => &self.height_3m_ago, + 111 => &self.height_111d_ago, + 144 => &self.height_144d_ago, + 180 => &self.height_6m_ago, + 200 => &self.height_200d_ago, + 350 => &self.height_350d_ago, + 365 => &self.height_1y_ago, + 730 => &self.height_2y_ago, + 1095 => &self.height_3y_ago, + 1400 => &self.height_200w_ago, + 1460 => &self.height_4y_ago, + 1825 => &self.height_5y_ago, + 2190 => &self.height_6y_ago, + 2920 => &self.height_8y_ago, + 3650 => &self.height_10y_ago, + _ => panic!("No start vec for {days} days"), + } + } } diff --git a/crates/brk_computer/src/blocks/difficulty/compute.rs b/crates/brk_computer/src/blocks/difficulty/compute.rs index 490b3b29a..264610003 100644 --- a/crates/brk_computer/src/blocks/difficulty/compute.rs +++ b/crates/brk_computer/src/blocks/difficulty/compute.rs @@ -1,102 +1,61 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_types::{StoredF32, StoredU32}; -use vecdb::{Exit, TypedVecIterator}; +use vecdb::Exit; use super::super::TARGET_BLOCKS_PER_DAY_F32; use super::Vecs; use crate::{ComputeIndexes, indexes}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - // Derive dateindex/period stats from raw difficulty - self.raw.derive_from( - indexes, - starting_indexes, + // raw is fully lazy from indexer height source — no compute needed + + // Compute difficulty as hash rate equivalent + let multiplier = 2.0_f64.powi(32) / 600.0; + self.as_hash.height.compute_transform( + starting_indexes.height, &indexer.vecs.blocks.difficulty, + |(i, v, ..)| (i, StoredF32::from(*v * multiplier)), exit, )?; - // Compute difficulty as hash rate equivalent - self.as_hash - .compute_all(indexes, starting_indexes, exit, |v| { - let multiplier = 2.0_f64.powi(32) / 600.0; - v.compute_transform( - starting_indexes.height, - &indexer.vecs.blocks.difficulty, - |(i, v, ..)| (i, StoredF32::from(*v * multiplier)), - exit, - )?; - Ok(()) - })?; - // Compute difficulty adjustment percentage - self.adjustment - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_percentage_change( - starting_indexes.height, - &indexer.vecs.blocks.difficulty, - 1, - exit, - )?; - Ok(()) - })?; + self.adjustment.height.compute_percentage_change( + starting_indexes.height, + &indexer.vecs.blocks.difficulty, + 1, + exit, + )?; - // Compute epoch by dateindex - let mut height_to_difficultyepoch_iter = indexes.height.difficultyepoch.into_iter(); - self.epoch.compute_all(starting_indexes, exit, |vec| { - let mut height_count_iter = indexes.dateindex.height_count.into_iter(); - vec.compute_transform( - starting_indexes.dateindex, - &indexes.dateindex.first_height, - |(di, height, ..)| { - ( - di, - height_to_difficultyepoch_iter - .get_unwrap(height + (*height_count_iter.get_unwrap(di) - 1)), - ) - }, - exit, - )?; - Ok(()) - })?; + // Compute epoch by height + self.epoch.height.compute_transform( + starting_indexes.height, + &indexes.height.difficultyepoch, + |(h, epoch, ..)| (h, epoch), + exit, + )?; // Compute blocks before next adjustment - self.blocks_before_next_adjustment.compute_all( - indexes, - starting_indexes, + self.blocks_before_next_adjustment.height.compute_transform( + starting_indexes.height, + &indexes.height.identity, + |(h, ..)| (h, StoredU32::from(h.left_before_next_diff_adj())), exit, - |v| { - v.compute_transform( - starting_indexes.height, - &indexes.height.identity, - |(h, ..)| (h, StoredU32::from(h.left_before_next_diff_adj())), - exit, - )?; - Ok(()) - }, )?; // Compute days before next adjustment - self.days_before_next_adjustment.compute_all( - indexes, - starting_indexes, + self.days_before_next_adjustment.height.compute_transform( + starting_indexes.height, + &self.blocks_before_next_adjustment.height, + |(h, blocks, ..)| (h, (*blocks as f32 / TARGET_BLOCKS_PER_DAY_F32).into()), exit, - |v| { - v.compute_transform( - starting_indexes.height, - &self.blocks_before_next_adjustment.height, - |(h, blocks, ..)| (h, (*blocks as f32 / TARGET_BLOCKS_PER_DAY_F32).into()), - exit, - )?; - Ok(()) - }, )?; Ok(()) diff --git a/crates/brk_computer/src/blocks/difficulty/import.rs b/crates/brk_computer/src/blocks/difficulty/import.rs index 48d4b32d9..e33ede1dd 100644 --- a/crates/brk_computer/src/blocks/difficulty/import.rs +++ b/crates/brk_computer/src/blocks/difficulty/import.rs @@ -1,19 +1,16 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_types::Version; -use vecdb::{Database, IterableCloneableVec}; +use vecdb::{Database, ReadableCloneableVec}; use super::Vecs; use crate::{ indexes, - internal::{ - ComputedFromHeightLast, ComputedFromHeightSum, ComputedFromDateLast, - ComputedHeightDerivedLast, - }, + internal::{ComputedFromHeightLast, ComputedFromHeightSum, ComputedHeightDerivedLast}, }; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexer: &Indexer, @@ -23,15 +20,14 @@ impl Vecs { Ok(Self { raw: ComputedHeightDerivedLast::forced_import( - db, "difficulty", - indexer.vecs.blocks.difficulty.boxed_clone(), + indexer.vecs.blocks.difficulty.read_only_boxed_clone(), version, indexes, - )?, + ), as_hash: ComputedFromHeightLast::forced_import(db, "difficulty_as_hash", version, indexes)?, adjustment: ComputedFromHeightSum::forced_import(db, "difficulty_adjustment", version, indexes)?, - epoch: ComputedFromDateLast::forced_import(db, "difficultyepoch", version, indexes)?, + epoch: ComputedFromHeightLast::forced_import(db, "difficulty_epoch", version, indexes)?, blocks_before_next_adjustment: ComputedFromHeightLast::forced_import( db, "blocks_before_next_difficulty_adjustment", diff --git a/crates/brk_computer/src/blocks/difficulty/vecs.rs b/crates/brk_computer/src/blocks/difficulty/vecs.rs index 583bf0378..820dd3f2e 100644 --- a/crates/brk_computer/src/blocks/difficulty/vecs.rs +++ b/crates/brk_computer/src/blocks/difficulty/vecs.rs @@ -1,18 +1,17 @@ use brk_traversable::Traversable; use brk_types::{DifficultyEpoch, StoredF32, StoredF64, StoredU32}; +use vecdb::{Rw, StorageMode}; -use crate::internal::{ - ComputedFromHeightLast, ComputedFromHeightSum, ComputedFromDateLast, ComputedHeightDerivedLast, -}; +use crate::internal::{ComputedFromHeightLast, ComputedFromHeightSum, ComputedHeightDerivedLast}; /// Difficulty metrics: raw difficulty, derived stats, adjustment, and countdown -#[derive(Clone, Traversable)] -pub struct Vecs { - /// Raw difficulty with dateindex/period stats - merges with indexer's raw +#[derive(Traversable)] +pub struct Vecs { + /// Raw difficulty with day1/period stats - merges with indexer's raw pub raw: ComputedHeightDerivedLast, - pub as_hash: ComputedFromHeightLast, - pub adjustment: ComputedFromHeightSum, - pub epoch: ComputedFromDateLast, - pub blocks_before_next_adjustment: ComputedFromHeightLast, - pub days_before_next_adjustment: ComputedFromHeightLast, + pub as_hash: ComputedFromHeightLast, + pub adjustment: ComputedFromHeightSum, + pub epoch: ComputedFromHeightLast, + pub blocks_before_next_adjustment: ComputedFromHeightLast, + pub days_before_next_adjustment: ComputedFromHeightLast, } diff --git a/crates/brk_computer/src/blocks/halving/compute.rs b/crates/brk_computer/src/blocks/halving/compute.rs index fa2c8f3f9..92e150f16 100644 --- a/crates/brk_computer/src/blocks/halving/compute.rs +++ b/crates/brk_computer/src/blocks/halving/compute.rs @@ -1,57 +1,38 @@ use brk_error::Result; use brk_types::StoredU32; -use vecdb::{Exit, TypedVecIterator}; +use vecdb::Exit; use super::super::TARGET_BLOCKS_PER_DAY_F32; use super::Vecs; use crate::{ComputeIndexes, indexes}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let mut height_to_halvingepoch_iter = indexes.height.halvingepoch.into_iter(); - self.epoch.compute_all(starting_indexes, exit, |vec| { - let mut height_count_iter = indexes.dateindex.height_count.into_iter(); - vec.compute_transform( - starting_indexes.dateindex, - &indexes.dateindex.first_height, - |(di, height, ..)| { - ( - di, - height_to_halvingepoch_iter - .get_unwrap(height + (*height_count_iter.get_unwrap(di) - 1)), - ) - }, - exit, - )?; - Ok(()) - })?; + self.epoch.height.compute_transform( + starting_indexes.height, + &indexes.height.halvingepoch, + |(h, epoch, ..)| (h, epoch), + exit, + )?; - self.blocks_before_next_halving - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &indexes.height.identity, - |(h, ..)| (h, StoredU32::from(h.left_before_next_halving())), - exit, - )?; - Ok(()) - })?; + self.blocks_before_next_halving.height.compute_transform( + starting_indexes.height, + &indexes.height.identity, + |(h, ..)| (h, StoredU32::from(h.left_before_next_halving())), + exit, + )?; - self.days_before_next_halving - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &self.blocks_before_next_halving.height, - |(h, blocks, ..)| (h, (*blocks as f32 / TARGET_BLOCKS_PER_DAY_F32).into()), - exit, - )?; - Ok(()) - })?; + self.days_before_next_halving.height.compute_transform( + starting_indexes.height, + &self.blocks_before_next_halving.height, + |(h, blocks, ..)| (h, (*blocks as f32 / TARGET_BLOCKS_PER_DAY_F32).into()), + exit, + )?; Ok(()) } diff --git a/crates/brk_computer/src/blocks/halving/import.rs b/crates/brk_computer/src/blocks/halving/import.rs index d21adf77a..33ad67f2e 100644 --- a/crates/brk_computer/src/blocks/halving/import.rs +++ b/crates/brk_computer/src/blocks/halving/import.rs @@ -3,17 +3,18 @@ use brk_types::Version; use vecdb::Database; use super::Vecs; -use crate::{ - indexes, - internal::{ComputedFromHeightLast, ComputedFromDateLast}, -}; +use crate::{indexes, internal::ComputedFromHeightLast}; impl Vecs { - pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { + pub(crate) fn forced_import( + db: &Database, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { let v2 = Version::TWO; Ok(Self { - epoch: ComputedFromDateLast::forced_import(db, "halvingepoch", version, indexes)?, + epoch: ComputedFromHeightLast::forced_import(db, "halving_epoch", version, indexes)?, blocks_before_next_halving: ComputedFromHeightLast::forced_import( db, "blocks_before_next_halving", diff --git a/crates/brk_computer/src/blocks/halving/vecs.rs b/crates/brk_computer/src/blocks/halving/vecs.rs index 60ec1f434..e6e288be6 100644 --- a/crates/brk_computer/src/blocks/halving/vecs.rs +++ b/crates/brk_computer/src/blocks/halving/vecs.rs @@ -1,12 +1,13 @@ use brk_traversable::Traversable; use brk_types::{HalvingEpoch, StoredF32, StoredU32}; +use vecdb::{Rw, StorageMode}; -use crate::internal::{ComputedFromHeightLast, ComputedFromDateLast}; +use crate::internal::ComputedFromHeightLast; /// Halving epoch metrics and countdown -#[derive(Clone, Traversable)] -pub struct Vecs { - pub epoch: ComputedFromDateLast, - pub blocks_before_next_halving: ComputedFromHeightLast, - pub days_before_next_halving: ComputedFromHeightLast, +#[derive(Traversable)] +pub struct Vecs { + pub epoch: ComputedFromHeightLast, + pub blocks_before_next_halving: ComputedFromHeightLast, + pub days_before_next_halving: ComputedFromHeightLast, } diff --git a/crates/brk_computer/src/blocks/import.rs b/crates/brk_computer/src/blocks/import.rs index 54311e306..0bcf9cee0 100644 --- a/crates/brk_computer/src/blocks/import.rs +++ b/crates/brk_computer/src/blocks/import.rs @@ -6,20 +6,19 @@ use brk_traversable::Traversable; use brk_types::Version; use vecdb::{Database, PAGE_SIZE}; -use crate::{indexes, price}; +use crate::indexes; use super::{ - CountVecs, DifficultyVecs, HalvingVecs, IntervalVecs, MiningVecs, RewardsVecs, SizeVecs, + CountVecs, DifficultyVecs, HalvingVecs, IntervalVecs, SizeVecs, TimeVecs, Vecs, WeightVecs, }; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( parent_path: &Path, parent_version: Version, indexer: &Indexer, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, ) -> Result { let db = Database::open(&parent_path.join(super::DB_NAME))?; db.set_min_len(PAGE_SIZE * 50_000_000)?; @@ -27,12 +26,10 @@ impl Vecs { let version = parent_version; let count = CountVecs::forced_import(&db, version, indexes)?; - let interval = IntervalVecs::forced_import(&db, version, indexer, indexes)?; + let interval = IntervalVecs::forced_import(&db, version, indexes)?; let size = SizeVecs::forced_import(&db, version, indexer, indexes)?; let weight = WeightVecs::forced_import(&db, version, indexer, indexes)?; let time = TimeVecs::forced_import(&db, version, indexer, indexes)?; - let mining = MiningVecs::forced_import(&db, version, indexes)?; - let rewards = RewardsVecs::forced_import(&db, version, indexes, price)?; let difficulty = DifficultyVecs::forced_import(&db, version, indexer, indexes)?; let halving = HalvingVecs::forced_import(&db, version, indexes)?; @@ -43,8 +40,6 @@ impl Vecs { size, weight, time, - mining, - rewards, difficulty, halving, }; diff --git a/crates/brk_computer/src/blocks/interval/compute.rs b/crates/brk_computer/src/blocks/interval/compute.rs index a923a8147..e2a8bbc9a 100644 --- a/crates/brk_computer/src/blocks/interval/compute.rs +++ b/crates/brk_computer/src/blocks/interval/compute.rs @@ -1,18 +1,36 @@ use brk_error::Result; -use vecdb::Exit; +use brk_indexer::Indexer; +use brk_types::{CheckedSub, Timestamp}; +use vecdb::{Exit, ReadableVec}; use super::Vecs; -use crate::{ComputeIndexes, indexes}; +use crate::ComputeIndexes; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, - indexes: &indexes::Vecs, + indexer: &Indexer, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.interval.derive_from(indexes, starting_indexes, exit)?; - + let mut prev_timestamp = None; + self.interval.height.compute_transform( + starting_indexes.height, + &indexer.vecs.blocks.timestamp, + |(h, timestamp, ..)| { + let interval = if let Some(prev_h) = h.decremented() { + let prev = prev_timestamp.unwrap_or_else(|| { + indexer.vecs.blocks.timestamp.collect_one(prev_h).unwrap() + }); + timestamp.checked_sub(prev).unwrap_or(Timestamp::ZERO) + } else { + Timestamp::ZERO + }; + prev_timestamp = Some(timestamp); + (h, interval) + }, + exit, + )?; Ok(()) } } diff --git a/crates/brk_computer/src/blocks/interval/import.rs b/crates/brk_computer/src/blocks/interval/import.rs index 5c4fed49e..08f9c6eea 100644 --- a/crates/brk_computer/src/blocks/interval/import.rs +++ b/crates/brk_computer/src/blocks/interval/import.rs @@ -1,35 +1,21 @@ use brk_error::Result; -use brk_indexer::Indexer; -use brk_types::{CheckedSub, Height, Timestamp, Version}; -use vecdb::{Database, VecIndex}; +use brk_types::Version; +use vecdb::Database; use super::Vecs; -use crate::{indexes, internal::LazyFromHeightDistribution}; +use crate::{indexes, internal::ComputedFromHeightDistribution}; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, - indexer: &Indexer, indexes: &indexes::Vecs, ) -> Result { - let interval = LazyFromHeightDistribution::forced_import_with_init( + let interval = ComputedFromHeightDistribution::forced_import( db, "block_interval", version, - indexer.vecs.blocks.timestamp.clone(), indexes, - |height: Height, timestamp_iter| { - let timestamp = timestamp_iter.get_at(height.to_usize())?; - let interval = height.decremented().map_or(Timestamp::ZERO, |prev_h| { - timestamp_iter - .get_at(prev_h.to_usize()) - .map_or(Timestamp::ZERO, |prev_t| { - timestamp.checked_sub(prev_t).unwrap_or(Timestamp::ZERO) - }) - }); - Some(interval) - }, )?; Ok(Self { interval }) diff --git a/crates/brk_computer/src/blocks/interval/vecs.rs b/crates/brk_computer/src/blocks/interval/vecs.rs index e96c42880..81a501962 100644 --- a/crates/brk_computer/src/blocks/interval/vecs.rs +++ b/crates/brk_computer/src/blocks/interval/vecs.rs @@ -1,10 +1,11 @@ use brk_traversable::Traversable; use brk_types::Timestamp; +use vecdb::{Rw, StorageMode}; -use crate::internal::LazyFromHeightDistribution; +use crate::internal::ComputedFromHeightDistribution; -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { #[traversable(flatten)] - pub interval: LazyFromHeightDistribution, + pub interval: ComputedFromHeightDistribution, } diff --git a/crates/brk_computer/src/blocks/mining/compute.rs b/crates/brk_computer/src/blocks/mining/compute.rs deleted file mode 100644 index 9be770024..000000000 --- a/crates/brk_computer/src/blocks/mining/compute.rs +++ /dev/null @@ -1,234 +0,0 @@ -use brk_error::Result; -use brk_types::{StoredF32, StoredF64}; -use vecdb::Exit; - -use super::super::{ONE_TERA_HASH, TARGET_BLOCKS_PER_DAY_F64, count, difficulty, rewards}; -use super::Vecs; -use crate::{ComputeIndexes, indexes, traits::ComputeDrawdown}; - -impl Vecs { - pub fn compute( - &mut self, - indexes: &indexes::Vecs, - count_vecs: &count::Vecs, - difficulty_vecs: &difficulty::Vecs, - rewards_vecs: &rewards::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.hash_rate - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_transform2( - starting_indexes.height, - &count_vecs._24h_block_count.height, - &difficulty_vecs.as_hash.height, - |(i, block_count_sum, difficulty_as_hash, ..)| { - ( - i, - StoredF64::from( - (f64::from(block_count_sum) / TARGET_BLOCKS_PER_DAY_F64) - * f64::from(difficulty_as_hash), - ), - ) - }, - exit, - )?; - Ok(()) - })?; - - self.hash_rate_1w_sma - .compute_all(starting_indexes, exit, |v| { - v.compute_sma( - starting_indexes.dateindex, - self.hash_rate.dateindex.inner(), - 7, - exit, - )?; - Ok(()) - })?; - - self.hash_rate_1m_sma - .compute_all(starting_indexes, exit, |v| { - v.compute_sma( - starting_indexes.dateindex, - self.hash_rate.dateindex.inner(), - 30, - exit, - )?; - Ok(()) - })?; - - self.hash_rate_2m_sma - .compute_all(starting_indexes, exit, |v| { - v.compute_sma( - starting_indexes.dateindex, - self.hash_rate.dateindex.inner(), - 2 * 30, - exit, - )?; - Ok(()) - })?; - - self.hash_rate_1y_sma - .compute_all(starting_indexes, exit, |v| { - v.compute_sma( - starting_indexes.dateindex, - self.hash_rate.dateindex.inner(), - 365, - exit, - )?; - Ok(()) - })?; - - self.hash_rate_ath - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_all_time_high( - starting_indexes.height, - &self.hash_rate.height, - exit, - )?; - Ok(()) - })?; - - self.hash_rate_drawdown - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_drawdown( - starting_indexes.height, - &self.hash_rate.height, - &self.hash_rate_ath.height, - exit, - )?; - Ok(()) - })?; - - self.hash_price_ths - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_transform2( - starting_indexes.height, - rewards_vecs._24h_coinbase_sum.dollars.as_ref().unwrap(), - &self.hash_rate.height, - |(i, coinbase_sum, hashrate, ..)| { - let hashrate_ths = *hashrate / ONE_TERA_HASH; - let price = if hashrate_ths == 0.0 { - StoredF32::NAN - } else { - (*coinbase_sum / hashrate_ths).into() - }; - (i, price) - }, - exit, - )?; - Ok(()) - })?; - - self.hash_price_phs - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &self.hash_price_ths.height, - |(i, price, ..)| (i, (*price * 1000.0).into()), - exit, - )?; - Ok(()) - })?; - - self.hash_value_ths - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_transform2( - starting_indexes.height, - &rewards_vecs._24h_coinbase_sum.sats, - &self.hash_rate.height, - |(i, coinbase_sum, hashrate, ..)| { - let hashrate_ths = *hashrate / ONE_TERA_HASH; - let value = if hashrate_ths == 0.0 { - StoredF32::NAN - } else { - StoredF32::from(*coinbase_sum as f64 / hashrate_ths) - }; - (i, value) - }, - exit, - )?; - Ok(()) - })?; - - self.hash_value_phs - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &self.hash_value_ths.height, - |(i, value, ..)| (i, (*value * 1000.0).into()), - exit, - )?; - Ok(()) - })?; - - self.hash_price_ths_min - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_all_time_low_( - starting_indexes.height, - &self.hash_price_ths.height, - exit, - true, - )?; - Ok(()) - })?; - - self.hash_price_phs_min - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_all_time_low_( - starting_indexes.height, - &self.hash_price_phs.height, - exit, - true, - )?; - Ok(()) - })?; - - self.hash_value_ths_min - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_all_time_low_( - starting_indexes.height, - &self.hash_value_ths.height, - exit, - true, - )?; - Ok(()) - })?; - - self.hash_value_phs_min - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_all_time_low_( - starting_indexes.height, - &self.hash_value_phs.height, - exit, - true, - )?; - Ok(()) - })?; - - self.hash_price_rebound - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_percentage_difference( - starting_indexes.height, - &self.hash_price_phs.height, - &self.hash_price_phs_min.height, - exit, - )?; - Ok(()) - })?; - - self.hash_value_rebound - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_percentage_difference( - starting_indexes.height, - &self.hash_value_phs.height, - &self.hash_value_phs_min.height, - exit, - )?; - Ok(()) - })?; - - Ok(()) - } -} diff --git a/crates/brk_computer/src/blocks/mining/vecs.rs b/crates/brk_computer/src/blocks/mining/vecs.rs deleted file mode 100644 index 77c9075a1..000000000 --- a/crates/brk_computer/src/blocks/mining/vecs.rs +++ /dev/null @@ -1,26 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{StoredF32, StoredF64}; - -use crate::internal::{ComputedFromHeightLast, ComputedFromDateLast}; - -/// Mining-related metrics: hash rate, hash price, hash value -#[derive(Clone, Traversable)] -pub struct Vecs { - pub hash_rate: ComputedFromHeightLast, - pub hash_rate_1w_sma: ComputedFromDateLast, - pub hash_rate_1m_sma: ComputedFromDateLast, - pub hash_rate_2m_sma: ComputedFromDateLast, - pub hash_rate_1y_sma: ComputedFromDateLast, - pub hash_rate_ath: ComputedFromHeightLast, - pub hash_rate_drawdown: ComputedFromHeightLast, - pub hash_price_ths: ComputedFromHeightLast, - pub hash_price_ths_min: ComputedFromHeightLast, - pub hash_price_phs: ComputedFromHeightLast, - pub hash_price_phs_min: ComputedFromHeightLast, - pub hash_price_rebound: ComputedFromHeightLast, - pub hash_value_ths: ComputedFromHeightLast, - pub hash_value_ths_min: ComputedFromHeightLast, - pub hash_value_phs: ComputedFromHeightLast, - pub hash_value_phs_min: ComputedFromHeightLast, - pub hash_value_rebound: ComputedFromHeightLast, -} diff --git a/crates/brk_computer/src/blocks/mod.rs b/crates/brk_computer/src/blocks/mod.rs index a5a852126..d3e8fe0b5 100644 --- a/crates/brk_computer/src/blocks/mod.rs +++ b/crates/brk_computer/src/blocks/mod.rs @@ -2,8 +2,6 @@ pub mod count; pub mod difficulty; pub mod halving; pub mod interval; -pub mod mining; -pub mod rewards; pub mod size; pub mod time; pub mod weight; @@ -12,14 +10,12 @@ mod compute; mod import; use brk_traversable::Traversable; -use vecdb::Database; +use vecdb::{Database, Rw, StorageMode}; pub use count::Vecs as CountVecs; pub use difficulty::Vecs as DifficultyVecs; pub use halving::Vecs as HalvingVecs; pub use interval::Vecs as IntervalVecs; -pub use mining::Vecs as MiningVecs; -pub use rewards::Vecs as RewardsVecs; pub use size::Vecs as SizeVecs; pub use time::Vecs as TimeVecs; pub use weight::Vecs as WeightVecs; @@ -28,29 +24,36 @@ pub const DB_NAME: &str = "blocks"; pub(crate) const TARGET_BLOCKS_PER_DAY_F64: f64 = 144.0; pub(crate) const TARGET_BLOCKS_PER_DAY_F32: f32 = 144.0; +pub(crate) const TARGET_BLOCKS_PER_MINUTE1: u64 = 0; +pub(crate) const TARGET_BLOCKS_PER_MINUTE5: u64 = 0; +pub(crate) const TARGET_BLOCKS_PER_MINUTE10: u64 = 1; +pub(crate) const TARGET_BLOCKS_PER_MINUTE30: u64 = 3; +pub(crate) const TARGET_BLOCKS_PER_HOUR1: u64 = 6; +pub(crate) const TARGET_BLOCKS_PER_HOUR4: u64 = 24; +pub(crate) const TARGET_BLOCKS_PER_HOUR12: u64 = 72; pub(crate) const TARGET_BLOCKS_PER_DAY: u64 = 144; +pub(crate) const TARGET_BLOCKS_PER_DAY3: u64 = 3 * TARGET_BLOCKS_PER_DAY; pub(crate) const TARGET_BLOCKS_PER_WEEK: u64 = 7 * TARGET_BLOCKS_PER_DAY; pub(crate) const TARGET_BLOCKS_PER_MONTH: u64 = 30 * TARGET_BLOCKS_PER_DAY; pub(crate) const TARGET_BLOCKS_PER_QUARTER: u64 = 3 * TARGET_BLOCKS_PER_MONTH; pub(crate) const TARGET_BLOCKS_PER_SEMESTER: u64 = 2 * TARGET_BLOCKS_PER_QUARTER; pub(crate) const TARGET_BLOCKS_PER_YEAR: u64 = 2 * TARGET_BLOCKS_PER_SEMESTER; pub(crate) const TARGET_BLOCKS_PER_DECADE: u64 = 10 * TARGET_BLOCKS_PER_YEAR; +pub(crate) const TARGET_BLOCKS_PER_HALVING: u64 = 210_000; pub(crate) const ONE_TERA_HASH: f64 = 1_000_000_000_000.0; -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { #[traversable(skip)] pub(crate) db: Database, - pub count: CountVecs, - pub interval: IntervalVecs, + pub count: CountVecs, + pub interval: IntervalVecs, #[traversable(flatten)] - pub size: SizeVecs, + pub size: SizeVecs, #[traversable(flatten)] - pub weight: WeightVecs, - pub time: TimeVecs, - pub mining: MiningVecs, - pub rewards: RewardsVecs, - pub difficulty: DifficultyVecs, - pub halving: HalvingVecs, + pub weight: WeightVecs, + pub time: TimeVecs, + pub difficulty: DifficultyVecs, + pub halving: HalvingVecs, } diff --git a/crates/brk_computer/src/blocks/rewards/compute.rs b/crates/brk_computer/src/blocks/rewards/compute.rs deleted file mode 100644 index ebf871d40..000000000 --- a/crates/brk_computer/src/blocks/rewards/compute.rs +++ /dev/null @@ -1,175 +0,0 @@ -use brk_error::Result; -use brk_indexer::Indexer; -use brk_types::{CheckedSub, Dollars, HalvingEpoch, Height, Sats, StoredF32, TxOutIndex}; -use vecdb::{Exit, IterableVec, TypedVecIterator, VecIndex}; - -use super::super::count; -use super::Vecs; -use crate::{ComputeIndexes, indexes, transactions}; - -impl Vecs { - pub fn compute( - &mut self, - indexer: &Indexer, - indexes: &indexes::Vecs, - count_vecs: &count::Vecs, - transactions_fees: &transactions::FeesVecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.coinbase - .compute_all(indexes, starting_indexes, exit, |vec| { - let mut txindex_to_first_txoutindex_iter = - indexer.vecs.transactions.first_txoutindex.iter()?; - let mut txindex_to_output_count_iter = - indexes.txindex.output_count.iter(); - let mut txoutindex_to_value_iter = indexer.vecs.outputs.value.iter()?; - vec.compute_transform( - starting_indexes.height, - &indexer.vecs.transactions.first_txindex, - |(height, txindex, ..)| { - let first_txoutindex = txindex_to_first_txoutindex_iter - .get_unwrap(txindex) - .to_usize(); - let output_count = txindex_to_output_count_iter.get_unwrap(txindex); - let mut sats = Sats::ZERO; - (first_txoutindex..first_txoutindex + usize::from(output_count)).for_each( - |txoutindex| { - sats += txoutindex_to_value_iter - .get_unwrap(TxOutIndex::from(txoutindex)); - }, - ); - (height, sats) - }, - exit, - )?; - Ok(()) - })?; - - let mut height_to_coinbase_iter = self.coinbase.sats.height.into_iter(); - self._24h_coinbase_sum.sats.compute_transform( - starting_indexes.height, - &count_vecs._24h_block_count.height, - |(h, count, ..)| { - let range = *h - (*count - 1)..=*h; - let sum = range - .map(Height::from) - .map(|h| height_to_coinbase_iter.get_unwrap(h)) - .sum::(); - (h, sum) - }, - exit, - )?; - drop(height_to_coinbase_iter); - - if let (Some(dollars_out), Some(dollars_in)) = - (&mut self._24h_coinbase_sum.dollars, &self.coinbase.dollars) - { - let mut height_to_coinbase_iter = dollars_in.height.into_iter(); - dollars_out.compute_transform( - starting_indexes.height, - &count_vecs._24h_block_count.height, - |(h, count, ..)| { - let range = *h - (*count - 1)..=*h; - let sum = range - .map(Height::from) - .map(|h| height_to_coinbase_iter.get_unwrap(h)) - .sum::(); - (h, sum) - }, - exit, - )?; - } - - self.subsidy - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_transform2( - starting_indexes.height, - &self.coinbase.sats.height, - &transactions_fees.fee.sats.height.sum_cum.sum.0, - |(height, coinbase, fees, ..)| { - ( - height, - coinbase.checked_sub(fees).unwrap_or_else(|| { - dbg!(height, coinbase, fees); - panic!() - }), - ) - }, - exit, - )?; - Ok(()) - })?; - - self.unclaimed_rewards - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_transform( - starting_indexes.height, - &self.subsidy.sats.height, - |(height, subsidy, ..)| { - let halving = HalvingEpoch::from(height); - let expected = Sats::FIFTY_BTC / 2_usize.pow(halving.to_usize() as u32); - (height, expected.checked_sub(subsidy).unwrap()) - }, - exit, - )?; - Ok(()) - })?; - - self.fee_dominance.compute_transform2( - starting_indexes.dateindex, - &transactions_fees.fee.sats.dateindex.sum_cum.sum.0, - &self.coinbase.sats.dateindex.sum_cum.sum.0, - |(i, fee, coinbase, ..)| { - let coinbase_f64 = u64::from(coinbase) as f64; - let dominance = if coinbase_f64 == 0.0 { - StoredF32::NAN - } else { - StoredF32::from(u64::from(fee) as f64 / coinbase_f64 * 100.0) - }; - (i, dominance) - }, - exit, - )?; - - self.subsidy_dominance.compute_transform2( - starting_indexes.dateindex, - &self.subsidy.sats.dateindex.sum_cum.sum.0, - &self.coinbase.sats.dateindex.sum_cum.sum.0, - |(i, subsidy, coinbase, ..)| { - let coinbase_f64 = u64::from(coinbase) as f64; - let dominance = if coinbase_f64 == 0.0 { - StoredF32::NAN - } else { - StoredF32::from(u64::from(subsidy) as f64 / coinbase_f64 * 100.0) - }; - (i, dominance) - }, - exit, - )?; - - if let Some(sma) = self.subsidy_usd_1y_sma.as_mut() { - let date_to_coinbase_usd_sum = &self - .coinbase - .dollars - .as_ref() - .unwrap() - .dateindex - .sum_cum - .sum - .0; - - sma.compute_all(starting_indexes, exit, |v| { - v.compute_sma( - starting_indexes.dateindex, - date_to_coinbase_usd_sum, - 365, - exit, - )?; - Ok(()) - })?; - } - - Ok(()) - } -} diff --git a/crates/brk_computer/src/blocks/rewards/import.rs b/crates/brk_computer/src/blocks/rewards/import.rs deleted file mode 100644 index 48ac51563..000000000 --- a/crates/brk_computer/src/blocks/rewards/import.rs +++ /dev/null @@ -1,46 +0,0 @@ -use brk_error::Result; -use brk_types::Version; -use vecdb::{Database, EagerVec, ImportableVec}; - -use super::Vecs; -use crate::{ - indexes, - internal::{ComputedFromDateLast, ValueFromHeightFull, ValueHeight, ValueFromHeightSumCum}, - price, -}; - -impl Vecs { - pub fn forced_import( - db: &Database, - version: Version, - indexes: &indexes::Vecs, - price: Option<&price::Vecs>, - ) -> Result { - let compute_dollars = price.is_some(); - - Ok(Self { - _24h_coinbase_sum: ValueHeight::forced_import( - db, - "24h_coinbase_sum", - version, - compute_dollars, - )?, - coinbase: ValueFromHeightFull::forced_import(db, "coinbase", version, indexes, price)?, - subsidy: ValueFromHeightFull::forced_import(db, "subsidy", version, indexes, price)?, - unclaimed_rewards: ValueFromHeightSumCum::forced_import( - db, - "unclaimed_rewards", - version, - indexes, - price, - )?, - fee_dominance: EagerVec::forced_import(db, "fee_dominance", version)?, - subsidy_dominance: EagerVec::forced_import(db, "subsidy_dominance", version)?, - subsidy_usd_1y_sma: compute_dollars - .then(|| { - ComputedFromDateLast::forced_import(db, "subsidy_usd_1y_sma", version, indexes) - }) - .transpose()?, - }) - } -} diff --git a/crates/brk_computer/src/blocks/rewards/vecs.rs b/crates/brk_computer/src/blocks/rewards/vecs.rs deleted file mode 100644 index dc8075cfc..000000000 --- a/crates/brk_computer/src/blocks/rewards/vecs.rs +++ /dev/null @@ -1,17 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{DateIndex, Dollars, StoredF32}; -use vecdb::{EagerVec, PcoVec}; - -use crate::internal::{ComputedFromDateLast, ValueFromHeightFull, ValueHeight, ValueFromHeightSumCum}; - -/// Coinbase/subsidy/rewards metrics -#[derive(Clone, Traversable)] -pub struct Vecs { - pub _24h_coinbase_sum: ValueHeight, - pub coinbase: ValueFromHeightFull, - pub subsidy: ValueFromHeightFull, - pub unclaimed_rewards: ValueFromHeightSumCum, - pub fee_dominance: EagerVec>, - pub subsidy_dominance: EagerVec>, - pub subsidy_usd_1y_sma: Option>, -} diff --git a/crates/brk_computer/src/blocks/size/compute.rs b/crates/brk_computer/src/blocks/size/compute.rs index 8771780a7..09784a50e 100644 --- a/crates/brk_computer/src/blocks/size/compute.rs +++ b/crates/brk_computer/src/blocks/size/compute.rs @@ -3,24 +3,19 @@ use brk_indexer::Indexer; use vecdb::Exit; use super::Vecs; -use crate::{ComputeIndexes, indexes}; +use crate::ComputeIndexes; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.size.derive_from( - indexes, - starting_indexes, - &indexer.vecs.blocks.total_size, - exit, - )?; + self.size + .compute_cumulative(starting_indexes, &indexer.vecs.blocks.total_size, exit)?; - self.vbytes.derive_from(indexes, starting_indexes, exit)?; + self.vbytes.compute_cumulative(starting_indexes, exit)?; Ok(()) } diff --git a/crates/brk_computer/src/blocks/size/import.rs b/crates/brk_computer/src/blocks/size/import.rs index 2815a91d0..b98c5c157 100644 --- a/crates/brk_computer/src/blocks/size/import.rs +++ b/crates/brk_computer/src/blocks/size/import.rs @@ -1,35 +1,30 @@ use brk_error::Result; use brk_indexer::Indexer; -use brk_types::{Height, StoredU64, Version}; -use vecdb::{Database, IterableCloneableVec, VecIndex}; +use brk_types::Version; +use vecdb::{Database, ReadableCloneableVec}; use super::Vecs; -use crate::{indexes, internal::{ComputedHeightDerivedFull, LazyComputedFromHeightFull}}; +use crate::{indexes, internal::{ComputedHeightDerivedFull, LazyComputedFromHeightFull, WeightToVbytes}}; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexer: &Indexer, indexes: &indexes::Vecs, ) -> Result { Ok(Self { - vbytes: LazyComputedFromHeightFull::forced_import_with_init( + vbytes: LazyComputedFromHeightFull::forced_import::( db, "block_vbytes", version, - indexer.vecs.blocks.weight.clone(), + &indexer.vecs.blocks.weight, indexes, - |height: Height, weight_iter| { - weight_iter - .get_at(height.to_usize()) - .map(|w| StoredU64::from(w.to_vbytes_floor())) - }, )?, size: ComputedHeightDerivedFull::forced_import( db, "block_size", - indexer.vecs.blocks.total_size.boxed_clone(), + indexer.vecs.blocks.total_size.read_only_boxed_clone(), version, indexes, )?, diff --git a/crates/brk_computer/src/blocks/size/vecs.rs b/crates/brk_computer/src/blocks/size/vecs.rs index 32396bafb..d2f01bc06 100644 --- a/crates/brk_computer/src/blocks/size/vecs.rs +++ b/crates/brk_computer/src/blocks/size/vecs.rs @@ -1,10 +1,11 @@ use brk_traversable::Traversable; use brk_types::{StoredU64, Weight}; +use vecdb::{Rw, StorageMode}; use crate::internal::{ComputedHeightDerivedFull, LazyComputedFromHeightFull}; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub vbytes: LazyComputedFromHeightFull, - pub size: ComputedHeightDerivedFull, +#[derive(Traversable)] +pub struct Vecs { + pub vbytes: LazyComputedFromHeightFull, + pub size: ComputedHeightDerivedFull, } diff --git a/crates/brk_computer/src/blocks/time/compute.rs b/crates/brk_computer/src/blocks/time/compute.rs index 588023b37..95b622d45 100644 --- a/crates/brk_computer/src/blocks/time/compute.rs +++ b/crates/brk_computer/src/blocks/time/compute.rs @@ -1,15 +1,11 @@ use brk_error::Result; use brk_indexer::Indexer; -use brk_types::Timestamp; -use vecdb::{Exit, TypedVecIterator}; +use vecdb::{Exit, ReadableVec}; use super::Vecs; -use crate::{ComputeIndexes, indexes}; impl Vecs { - /// Compute height-to-time fields early, before indexes are computed. - /// These are needed by indexes::block to compute height_to_dateindex. - pub fn compute_early( + pub(crate) fn compute( &mut self, indexer: &Indexer, starting_height: brk_types::Height, @@ -19,15 +15,11 @@ impl Vecs { self.timestamp_monotonic.compute_transform( starting_height, &indexer.vecs.blocks.timestamp, - |(h, timestamp, height_to_timestamp_monotonic_iter)| { + |(h, timestamp, this)| { if prev_timestamp_monotonic.is_none() && let Some(prev_h) = h.decremented() { - prev_timestamp_monotonic.replace( - height_to_timestamp_monotonic_iter - .into_iter() - .get_unwrap(prev_h), - ); + prev_timestamp_monotonic.replace(this.collect_one(prev_h).unwrap()); } let timestamp_monotonic = prev_timestamp_monotonic.map_or(timestamp, |prev_d| prev_d.max(timestamp)); @@ -39,23 +31,4 @@ impl Vecs { Ok(()) } - - pub fn compute( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.timestamp.compute_all(|vec| { - vec.compute_transform( - starting_indexes.dateindex, - &indexes.dateindex.date, - |(di, d, ..)| (di, Timestamp::from(d)), - exit, - )?; - Ok(()) - })?; - - Ok(()) - } } diff --git a/crates/brk_computer/src/blocks/time/import.rs b/crates/brk_computer/src/blocks/time/import.rs index 4bb55b131..101dafc58 100644 --- a/crates/brk_computer/src/blocks/time/import.rs +++ b/crates/brk_computer/src/blocks/time/import.rs @@ -1,13 +1,13 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_types::{Date, Height, Version}; -use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1}; +use vecdb::{Database, EagerVec, ImportableVec, ReadableCloneableVec, LazyVecFrom1}; use super::Vecs; use crate::{indexes, internal::ComputedHeightDerivedFirst}; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexer: &Indexer, @@ -20,17 +20,16 @@ impl Vecs { date: LazyVecFrom1::init( "date", version, - timestamp_monotonic.boxed_clone(), - |height: Height, timestamp_iter| timestamp_iter.get(height).map(Date::from), + timestamp_monotonic.read_only_boxed_clone(), + |_height: Height, timestamp| Date::from(timestamp), ), timestamp_monotonic, timestamp: ComputedHeightDerivedFirst::forced_import( - db, "timestamp", - indexer.vecs.blocks.timestamp.boxed_clone(), + indexer.vecs.blocks.timestamp.read_only_boxed_clone(), version, indexes, - )?, + ), }) } } diff --git a/crates/brk_computer/src/blocks/time/vecs.rs b/crates/brk_computer/src/blocks/time/vecs.rs index 0d84340bd..d3a3b5155 100644 --- a/crates/brk_computer/src/blocks/time/vecs.rs +++ b/crates/brk_computer/src/blocks/time/vecs.rs @@ -1,13 +1,13 @@ use brk_traversable::Traversable; use brk_types::{Date, Height, Timestamp}; -use vecdb::{EagerVec, LazyVecFrom1, PcoVec}; +use vecdb::{EagerVec, LazyVecFrom1, PcoVec, Rw, StorageMode}; use crate::internal::ComputedHeightDerivedFirst; /// Timestamp and date metrics for blocks -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { pub date: LazyVecFrom1, - pub timestamp_monotonic: EagerVec>, + pub timestamp_monotonic: M::Stored>>, pub timestamp: ComputedHeightDerivedFirst, } diff --git a/crates/brk_computer/src/blocks/weight/compute.rs b/crates/brk_computer/src/blocks/weight/compute.rs index 7cb5ad097..b425b69ca 100644 --- a/crates/brk_computer/src/blocks/weight/compute.rs +++ b/crates/brk_computer/src/blocks/weight/compute.rs @@ -3,18 +3,17 @@ use brk_indexer::Indexer; use vecdb::Exit; use super::Vecs; -use crate::{ComputeIndexes, indexes}; +use crate::ComputeIndexes; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { self.weight - .derive_from(indexes, starting_indexes, &indexer.vecs.blocks.weight, exit)?; + .compute_cumulative(starting_indexes, &indexer.vecs.blocks.weight, exit)?; Ok(()) } diff --git a/crates/brk_computer/src/blocks/weight/import.rs b/crates/brk_computer/src/blocks/weight/import.rs index c71822b9e..227189ca6 100644 --- a/crates/brk_computer/src/blocks/weight/import.rs +++ b/crates/brk_computer/src/blocks/weight/import.rs @@ -1,7 +1,7 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_types::Version; -use vecdb::{Database, IterableCloneableVec}; +use vecdb::{Database, ReadableCloneableVec}; use super::Vecs; use crate::{ @@ -10,7 +10,7 @@ use crate::{ }; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexer: &Indexer, @@ -19,7 +19,7 @@ impl Vecs { let weight = ComputedHeightDerivedFull::forced_import( db, "block_weight", - indexer.vecs.blocks.weight.boxed_clone(), + indexer.vecs.blocks.weight.read_only_boxed_clone(), version, indexes, )?; @@ -27,7 +27,7 @@ impl Vecs { let fullness = LazyFromHeightTransformDistribution::from_derived::( "block_fullness", version, - indexer.vecs.blocks.weight.boxed_clone(), + indexer.vecs.blocks.weight.read_only_boxed_clone(), &weight, ); diff --git a/crates/brk_computer/src/blocks/weight/vecs.rs b/crates/brk_computer/src/blocks/weight/vecs.rs index 4a62fd67e..e5600004a 100644 --- a/crates/brk_computer/src/blocks/weight/vecs.rs +++ b/crates/brk_computer/src/blocks/weight/vecs.rs @@ -1,10 +1,11 @@ use brk_traversable::Traversable; use brk_types::{StoredF32, Weight}; +use vecdb::{Rw, StorageMode}; use crate::internal::{ComputedHeightDerivedFull, LazyFromHeightTransformDistribution}; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub weight: ComputedHeightDerivedFull, +#[derive(Traversable)] +pub struct Vecs { + pub weight: ComputedHeightDerivedFull, pub fullness: LazyFromHeightTransformDistribution, } diff --git a/crates/brk_computer/src/cointime/activity/compute.rs b/crates/brk_computer/src/cointime/activity/compute.rs index 26e15fad9..74ebd9ead 100644 --- a/crates/brk_computer/src/cointime/activity/compute.rs +++ b/crates/brk_computer/src/cointime/activity/compute.rs @@ -1,14 +1,13 @@ use brk_error::Result; use brk_types::{Bitcoin, CheckedSub, StoredF64}; -use vecdb::{Exit, TypedVecIterator}; +use vecdb::Exit; use super::Vecs; -use crate::{ComputeIndexes, distribution, indexes}; +use crate::{ComputeIndexes, distribution}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, distribution: &distribution::Vecs, exit: &Exit, @@ -23,7 +22,7 @@ impl Vecs { .height; self.coinblocks_created - .compute_all(indexes, starting_indexes, exit, |vec| { + .compute(starting_indexes, exit, |vec| { vec.compute_transform( starting_indexes.height, circulating_supply, @@ -41,52 +40,37 @@ impl Vecs { .coinblocks_destroyed; self.coinblocks_stored - .compute_all(indexes, starting_indexes, exit, |vec| { - let mut coinblocks_destroyed_iter = coinblocks_destroyed.height.into_iter(); - vec.compute_transform( + .compute(starting_indexes, exit, |vec| { + vec.compute_transform2( starting_indexes.height, &self.coinblocks_created.height, - |(i, created, ..)| { - let destroyed = coinblocks_destroyed_iter.get_unwrap(i); - (i, created.checked_sub(destroyed).unwrap()) - }, + &coinblocks_destroyed.height, + |(i, created, destroyed, ..)| (i, created.checked_sub(destroyed).unwrap()), exit, )?; Ok(()) })?; - self.liveliness - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_divide( - starting_indexes.height, - coinblocks_destroyed.height_cumulative.inner(), - self.coinblocks_created.height_cumulative.inner(), - exit, - )?; - Ok(()) - })?; + self.liveliness.height.compute_divide( + starting_indexes.height, + &*coinblocks_destroyed.height_cumulative, + &*self.coinblocks_created.height_cumulative, + exit, + )?; - self.vaultedness - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_transform( - starting_indexes.height, - &self.liveliness.height, - |(i, v, ..)| (i, StoredF64::from(1.0).checked_sub(v).unwrap()), - exit, - )?; - Ok(()) - })?; + self.vaultedness.height.compute_transform( + starting_indexes.height, + &self.liveliness.height, + |(i, v, ..)| (i, StoredF64::from(1.0).checked_sub(v).unwrap()), + exit, + )?; - self.activity_to_vaultedness_ratio - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_divide( - starting_indexes.height, - &self.liveliness.height, - &self.vaultedness.height, - exit, - )?; - Ok(()) - })?; + self.activity_to_vaultedness_ratio.height.compute_divide( + starting_indexes.height, + &self.liveliness.height, + &self.vaultedness.height, + exit, + )?; Ok(()) } diff --git a/crates/brk_computer/src/cointime/activity/import.rs b/crates/brk_computer/src/cointime/activity/import.rs index ef21138bd..c4f42f865 100644 --- a/crates/brk_computer/src/cointime/activity/import.rs +++ b/crates/brk_computer/src/cointime/activity/import.rs @@ -9,7 +9,7 @@ use crate::{ }; impl Vecs { - pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { + pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { Ok(Self { coinblocks_created: ComputedFromHeightSumCum::forced_import( db, diff --git a/crates/brk_computer/src/cointime/activity/vecs.rs b/crates/brk_computer/src/cointime/activity/vecs.rs index 99295c230..9f9d480f1 100644 --- a/crates/brk_computer/src/cointime/activity/vecs.rs +++ b/crates/brk_computer/src/cointime/activity/vecs.rs @@ -1,13 +1,14 @@ use brk_traversable::Traversable; use brk_types::StoredF64; +use vecdb::{Rw, StorageMode}; use crate::internal::{ComputedFromHeightLast, ComputedFromHeightSumCum}; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub coinblocks_created: ComputedFromHeightSumCum, - pub coinblocks_stored: ComputedFromHeightSumCum, - pub liveliness: ComputedFromHeightLast, - pub vaultedness: ComputedFromHeightLast, - pub activity_to_vaultedness_ratio: ComputedFromHeightLast, +#[derive(Traversable)] +pub struct Vecs { + pub coinblocks_created: ComputedFromHeightSumCum, + pub coinblocks_stored: ComputedFromHeightSumCum, + pub liveliness: ComputedFromHeightLast, + pub vaultedness: ComputedFromHeightLast, + pub activity_to_vaultedness_ratio: ComputedFromHeightLast, } diff --git a/crates/brk_computer/src/cointime/adjusted/compute.rs b/crates/brk_computer/src/cointime/adjusted/compute.rs index 674afdce0..2b059bb93 100644 --- a/crates/brk_computer/src/cointime/adjusted/compute.rs +++ b/crates/brk_computer/src/cointime/adjusted/compute.rs @@ -1,4 +1,5 @@ use brk_error::Result; +use brk_types::{StoredF32, StoredF64}; use vecdb::Exit; use super::super::activity; @@ -6,48 +7,40 @@ use super::Vecs; use crate::{ComputeIndexes, supply}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, starting_indexes: &ComputeIndexes, supply: &supply::Vecs, activity: &activity::Vecs, - has_price: bool, exit: &Exit, ) -> Result<()> { - self.cointime_adj_inflation_rate - .compute_all(starting_indexes, exit, |v| { - v.compute_multiply( - starting_indexes.dateindex, - activity.activity_to_vaultedness_ratio.dateindex.inner(), - &supply.inflation.dateindex, - exit, - )?; - Ok(()) - })?; + self.cointime_adj_inflation_rate.height.compute_transform2( + starting_indexes.height, + &activity.activity_to_vaultedness_ratio.height, + &supply.inflation.height, + |(h, ratio, inflation, ..)| (h, StoredF32::from((*ratio) * f64::from(*inflation))), + exit, + )?; self.cointime_adj_tx_btc_velocity - .compute_all(starting_indexes, exit, |v| { - v.compute_multiply( - starting_indexes.dateindex, - activity.activity_to_vaultedness_ratio.dateindex.inner(), - &supply.velocity.btc.dateindex, - exit, - )?; - Ok(()) - })?; + .height + .compute_transform2( + starting_indexes.height, + &activity.activity_to_vaultedness_ratio.height, + &supply.velocity.btc.height, + |(h, ratio, vel, ..)| (h, StoredF64::from(*ratio * *vel)), + exit, + )?; - if has_price { - self.cointime_adj_tx_usd_velocity - .compute_all(starting_indexes, exit, |v| { - v.compute_multiply( - starting_indexes.dateindex, - activity.activity_to_vaultedness_ratio.dateindex.inner(), - &supply.velocity.usd.as_ref().unwrap().dateindex, - exit, - )?; - Ok(()) - })?; - } + self.cointime_adj_tx_usd_velocity + .height + .compute_transform2( + starting_indexes.height, + &activity.activity_to_vaultedness_ratio.height, + &supply.velocity.usd.height, + |(h, ratio, vel, ..)| (h, StoredF64::from(*ratio * *vel)), + exit, + )?; Ok(()) } diff --git a/crates/brk_computer/src/cointime/adjusted/import.rs b/crates/brk_computer/src/cointime/adjusted/import.rs index b8629b928..3e1859069 100644 --- a/crates/brk_computer/src/cointime/adjusted/import.rs +++ b/crates/brk_computer/src/cointime/adjusted/import.rs @@ -3,24 +3,24 @@ use brk_types::Version; use vecdb::Database; use super::Vecs; -use crate::{indexes, internal::ComputedFromDateLast}; +use crate::{indexes, internal::ComputedFromHeightLast}; impl Vecs { - pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { + pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { Ok(Self { - cointime_adj_inflation_rate: ComputedFromDateLast::forced_import( + cointime_adj_inflation_rate: ComputedFromHeightLast::forced_import( db, "cointime_adj_inflation_rate", version, indexes, )?, - cointime_adj_tx_btc_velocity: ComputedFromDateLast::forced_import( + cointime_adj_tx_btc_velocity: ComputedFromHeightLast::forced_import( db, "cointime_adj_tx_btc_velocity", version, indexes, )?, - cointime_adj_tx_usd_velocity: ComputedFromDateLast::forced_import( + cointime_adj_tx_usd_velocity: ComputedFromHeightLast::forced_import( db, "cointime_adj_tx_usd_velocity", version, diff --git a/crates/brk_computer/src/cointime/adjusted/vecs.rs b/crates/brk_computer/src/cointime/adjusted/vecs.rs index c36a02a19..3be5ab394 100644 --- a/crates/brk_computer/src/cointime/adjusted/vecs.rs +++ b/crates/brk_computer/src/cointime/adjusted/vecs.rs @@ -1,11 +1,12 @@ use brk_traversable::Traversable; use brk_types::{StoredF32, StoredF64}; +use vecdb::{Rw, StorageMode}; -use crate::internal::ComputedFromDateLast; +use crate::internal::ComputedFromHeightLast; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub cointime_adj_inflation_rate: ComputedFromDateLast, - pub cointime_adj_tx_btc_velocity: ComputedFromDateLast, - pub cointime_adj_tx_usd_velocity: ComputedFromDateLast, +#[derive(Traversable)] +pub struct Vecs { + pub cointime_adj_inflation_rate: ComputedFromHeightLast, + pub cointime_adj_tx_btc_velocity: ComputedFromHeightLast, + pub cointime_adj_tx_usd_velocity: ComputedFromHeightLast, } diff --git a/crates/brk_computer/src/cointime/cap/compute.rs b/crates/brk_computer/src/cointime/cap/compute.rs index bc584a20a..47892850d 100644 --- a/crates/brk_computer/src/cointime/cap/compute.rs +++ b/crates/brk_computer/src/cointime/cap/compute.rs @@ -4,15 +4,14 @@ use vecdb::Exit; use super::super::{activity, value}; use super::Vecs; -use crate::{ComputeIndexes, blocks, distribution, indexes, utils::OptionExt}; +use crate::{ComputeIndexes, distribution, mining}; impl Vecs { #[allow(clippy::too_many_arguments)] - pub fn compute( + pub(crate) fn compute( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, - blocks: &blocks::Vecs, + mining: &mining::Vecs, distribution: &distribution::Vecs, activity: &activity::Vecs, value: &value::Vecs, @@ -23,7 +22,6 @@ impl Vecs { .all .metrics .realized - .u() .realized_cap .height; @@ -33,78 +31,51 @@ impl Vecs { .metrics .supply .total - .bitcoin + .btc .height; - self.thermo_cap - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_transform( - starting_indexes.height, - &blocks - .rewards - .subsidy - .dollars - .as_ref() - .unwrap() - .height_cumulative - .0, - |(i, v, ..)| (i, v), - exit, - )?; - Ok(()) - })?; + self.thermo_cap.height.compute_transform( + starting_indexes.height, + &*mining.rewards.subsidy.usd.height_cumulative, + |(i, v, ..)| (i, v), + exit, + )?; - self.investor_cap - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_subtract( - starting_indexes.height, - realized_cap, - &self.thermo_cap.height, - exit, - )?; - Ok(()) - })?; + self.investor_cap.height.compute_subtract( + starting_indexes.height, + realized_cap, + &self.thermo_cap.height, + exit, + )?; - self.vaulted_cap - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_divide( - starting_indexes.height, - realized_cap, - &activity.vaultedness.height, - exit, - )?; - Ok(()) - })?; + self.vaulted_cap.height.compute_divide( + starting_indexes.height, + realized_cap, + &activity.vaultedness.height, + exit, + )?; - self.active_cap - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_multiply( - starting_indexes.height, - realized_cap, - &activity.liveliness.height, - exit, - )?; - Ok(()) - })?; + self.active_cap.height.compute_multiply( + starting_indexes.height, + realized_cap, + &activity.liveliness.height, + exit, + )?; // cointime_cap = (cointime_value_destroyed_cumulative * circulating_supply) / coinblocks_stored_cumulative - self.cointime_cap - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_transform3( - starting_indexes.height, - value.cointime_value_destroyed.height_cumulative.inner(), - circulating_supply, - activity.coinblocks_stored.height_cumulative.inner(), - |(i, destroyed, supply, stored, ..)| { - let destroyed: f64 = *destroyed; - let supply: f64 = supply.into(); - let stored: f64 = *stored; - (i, Dollars::from(destroyed * supply / stored)) - }, - exit, - )?; - Ok(()) - })?; + self.cointime_cap.height.compute_transform3( + starting_indexes.height, + &value.cointime_value_destroyed.height_cumulative.0, + circulating_supply, + &activity.coinblocks_stored.height_cumulative.0, + |(i, destroyed, supply, stored, ..)| { + let destroyed: f64 = *destroyed; + let supply: f64 = supply.into(); + let stored: f64 = *stored; + (i, Dollars::from(destroyed * supply / stored)) + }, + exit, + )?; Ok(()) } diff --git a/crates/brk_computer/src/cointime/cap/import.rs b/crates/brk_computer/src/cointime/cap/import.rs index 310cab508..eecce86e7 100644 --- a/crates/brk_computer/src/cointime/cap/import.rs +++ b/crates/brk_computer/src/cointime/cap/import.rs @@ -6,7 +6,7 @@ use super::Vecs; use crate::{indexes, internal::ComputedFromHeightLast}; impl Vecs { - pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { + pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { Ok(Self { thermo_cap: ComputedFromHeightLast::forced_import(db, "thermo_cap", version, indexes)?, investor_cap: ComputedFromHeightLast::forced_import(db, "investor_cap", version, indexes)?, diff --git a/crates/brk_computer/src/cointime/cap/vecs.rs b/crates/brk_computer/src/cointime/cap/vecs.rs index 994503b33..cc37f3329 100644 --- a/crates/brk_computer/src/cointime/cap/vecs.rs +++ b/crates/brk_computer/src/cointime/cap/vecs.rs @@ -1,13 +1,14 @@ use brk_traversable::Traversable; use brk_types::Dollars; +use vecdb::{Rw, StorageMode}; use crate::internal::ComputedFromHeightLast; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub thermo_cap: ComputedFromHeightLast, - pub investor_cap: ComputedFromHeightLast, - pub vaulted_cap: ComputedFromHeightLast, - pub active_cap: ComputedFromHeightLast, - pub cointime_cap: ComputedFromHeightLast, +#[derive(Traversable)] +pub struct Vecs { + pub thermo_cap: ComputedFromHeightLast, + pub investor_cap: ComputedFromHeightLast, + pub vaulted_cap: ComputedFromHeightLast, + pub active_cap: ComputedFromHeightLast, + pub cointime_cap: ComputedFromHeightLast, } diff --git a/crates/brk_computer/src/cointime/compute.rs b/crates/brk_computer/src/cointime/compute.rs index 9176795ab..b11b57ade 100644 --- a/crates/brk_computer/src/cointime/compute.rs +++ b/crates/brk_computer/src/cointime/compute.rs @@ -2,27 +2,26 @@ use brk_error::Result; use vecdb::Exit; use super::Vecs; -use crate::{blocks, distribution, indexes, price, supply, ComputeIndexes}; +use crate::{ComputeIndexes, blocks, distribution, mining, prices, supply}; impl Vecs { #[allow(clippy::too_many_arguments)] - pub fn compute( + pub(crate) fn compute( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, - price: Option<&price::Vecs>, + prices: &prices::Vecs, blocks: &blocks::Vecs, + mining: &mining::Vecs, supply_vecs: &supply::Vecs, distribution: &distribution::Vecs, exit: &Exit, ) -> Result<()> { // Activity computes first (liveliness, vaultedness, etc.) self.activity - .compute(indexes, starting_indexes, distribution, exit)?; + .compute(starting_indexes, distribution, exit)?; // Supply computes next (depends on activity) self.supply.compute( - indexes, starting_indexes, distribution, &self.activity, @@ -30,57 +29,43 @@ impl Vecs { )?; // Adjusted velocity metrics (BTC) - can compute without price - self.adjusted.compute( + self.adjusted + .compute(starting_indexes, supply_vecs, &self.activity, exit)?; + + // Value computes (cointime value destroyed/created/stored, VOCDD) + self.value.compute( starting_indexes, - supply_vecs, + prices, + distribution, &self.activity, - price.is_some(), exit, )?; - // Price-dependent metrics - if let Some(price) = price { - // Value computes (cointime value destroyed/created/stored, VOCDD) - self.value.compute( - indexes, - starting_indexes, - price, - distribution, - &self.activity, - exit, - )?; + // Cap computes (thermo, investor, vaulted, active, cointime caps) + self.cap.compute( + starting_indexes, + mining, + distribution, + &self.activity, + &self.value, + exit, + )?; - // Cap computes (thermo, investor, vaulted, active, cointime caps) - self.cap.compute( - indexes, - starting_indexes, - blocks, - distribution, - &self.activity, - &self.value, - exit, - )?; + // Pricing computes (all prices derived from caps) + self.pricing.compute( + starting_indexes, + prices, + blocks, + distribution, + &self.activity, + &self.supply, + &self.cap, + exit, + )?; - // Pricing computes (all prices derived from caps) - self.pricing.compute( - indexes, - starting_indexes, - price, - distribution, - &self.activity, - &self.supply, - &self.cap, - exit, - )?; - - // Reserve Risk computes (depends on value.vocdd and price) - self.reserve_risk.compute( - starting_indexes, - price, - &self.value, - exit, - )?; - } + // Reserve Risk computes (depends on value.vocdd and price) + self.reserve_risk + .compute(starting_indexes, blocks, prices, &self.value, exit)?; let _lock = exit.lock(); self.db.compact()?; diff --git a/crates/brk_computer/src/cointime/import.rs b/crates/brk_computer/src/cointime/import.rs index 35a4f5baa..2be42cf25 100644 --- a/crates/brk_computer/src/cointime/import.rs +++ b/crates/brk_computer/src/cointime/import.rs @@ -6,32 +6,30 @@ use brk_types::Version; use vecdb::{Database, PAGE_SIZE}; use super::{ - ActivityVecs, AdjustedVecs, CapVecs, PricingVecs, ReserveRiskVecs, SupplyVecs, ValueVecs, Vecs, - DB_NAME, VERSION, + ActivityVecs, AdjustedVecs, CapVecs, DB_NAME, PricingVecs, ReserveRiskVecs, SupplyVecs, + VERSION, ValueVecs, Vecs, }; -use crate::{indexes, price}; +use crate::{indexes, prices}; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( parent_path: &Path, parent_version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, ) -> Result { let db = Database::open(&parent_path.join(DB_NAME))?; db.set_min_len(PAGE_SIZE * 1_000_000)?; let version = parent_version + VERSION; let v1 = version + Version::ONE; - let compute_dollars = price.is_some(); - let activity = ActivityVecs::forced_import(&db, version, indexes)?; - let supply = SupplyVecs::forced_import(&db, v1, indexes, price)?; + let supply = SupplyVecs::forced_import(&db, v1, indexes, prices)?; let value = ValueVecs::forced_import(&db, v1, indexes)?; let cap = CapVecs::forced_import(&db, v1, indexes)?; let pricing = PricingVecs::forced_import(&db, version, indexes)?; let adjusted = AdjustedVecs::forced_import(&db, version, indexes)?; - let reserve_risk = ReserveRiskVecs::forced_import(&db, v1, indexes, compute_dollars)?; + let reserve_risk = ReserveRiskVecs::forced_import(&db, v1, indexes)?; let this = Self { db, diff --git a/crates/brk_computer/src/cointime/mod.rs b/crates/brk_computer/src/cointime/mod.rs index 6d3e97a3b..999739cab 100644 --- a/crates/brk_computer/src/cointime/mod.rs +++ b/crates/brk_computer/src/cointime/mod.rs @@ -11,7 +11,7 @@ mod import; use brk_traversable::Traversable; use brk_types::Version; -use vecdb::Database; +use vecdb::{Database, Rw, StorageMode}; pub use activity::Vecs as ActivityVecs; pub use adjusted::Vecs as AdjustedVecs; @@ -24,16 +24,16 @@ pub use value::Vecs as ValueVecs; pub const DB_NAME: &str = "cointime"; const VERSION: Version = Version::ZERO; -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { #[traversable(skip)] pub(crate) db: Database, - pub activity: ActivityVecs, - pub supply: SupplyVecs, - pub value: ValueVecs, - pub cap: CapVecs, - pub pricing: PricingVecs, - pub adjusted: AdjustedVecs, - pub reserve_risk: ReserveRiskVecs, + pub activity: ActivityVecs, + pub supply: SupplyVecs, + pub value: ValueVecs, + pub cap: CapVecs, + pub pricing: PricingVecs, + pub adjusted: AdjustedVecs, + pub reserve_risk: ReserveRiskVecs, } diff --git a/crates/brk_computer/src/cointime/pricing/compute.rs b/crates/brk_computer/src/cointime/pricing/compute.rs index 765507982..832ccfd68 100644 --- a/crates/brk_computer/src/cointime/pricing/compute.rs +++ b/crates/brk_computer/src/cointime/pricing/compute.rs @@ -3,15 +3,15 @@ use vecdb::Exit; use super::super::{activity, cap, supply}; use super::Vecs; -use crate::{ComputeIndexes, distribution, indexes, price, utils::OptionExt}; +use crate::{ComputeIndexes, blocks, distribution, prices}; impl Vecs { #[allow(clippy::too_many_arguments)] - pub fn compute( + pub(crate) fn compute( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, - price: &price::Vecs, + prices: &prices::Vecs, + blocks: &blocks::Vecs, distribution: &distribution::Vecs, activity: &activity::Vecs, supply: &supply::Vecs, @@ -24,88 +24,75 @@ impl Vecs { .metrics .supply .total - .bitcoin + .btc .height; let realized_price = &distribution .utxo_cohorts .all .metrics .realized - .u() .realized_price .height; - self.vaulted_price - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_divide( - starting_indexes.height, - realized_price, - &activity.vaultedness.height, - exit, - )?; - Ok(()) - })?; + self.vaulted_price.height.compute_divide( + starting_indexes.height, + realized_price, + &activity.vaultedness.height, + exit, + )?; self.vaulted_price_ratio.compute_rest( - price, + blocks, + prices, starting_indexes, exit, - Some(&self.vaulted_price.dateindex.0), + Some(&self.vaulted_price.height), )?; - self.active_price - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_multiply( - starting_indexes.height, - realized_price, - &activity.liveliness.height, - exit, - )?; - Ok(()) - })?; + self.active_price.height.compute_multiply( + starting_indexes.height, + realized_price, + &activity.liveliness.height, + exit, + )?; self.active_price_ratio.compute_rest( - price, + blocks, + prices, starting_indexes, exit, - Some(&self.active_price.dateindex.0), + Some(&self.active_price.height), )?; - self.true_market_mean - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_divide( - starting_indexes.height, - &cap.investor_cap.height, - &supply.active_supply.bitcoin.height, - exit, - )?; - Ok(()) - })?; + self.true_market_mean.height.compute_divide( + starting_indexes.height, + &cap.investor_cap.height, + &supply.active_supply.btc.height, + exit, + )?; self.true_market_mean_ratio.compute_rest( - price, + blocks, + prices, starting_indexes, exit, - Some(&self.true_market_mean.dateindex.0), + Some(&self.true_market_mean.height), )?; // cointime_price = cointime_cap / circulating_supply - self.cointime_price - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_divide( - starting_indexes.height, - &cap.cointime_cap.height, - circulating_supply, - exit, - )?; - Ok(()) - })?; + self.cointime_price.height.compute_divide( + starting_indexes.height, + &cap.cointime_cap.height, + circulating_supply, + exit, + )?; self.cointime_price_ratio.compute_rest( - price, + blocks, + prices, starting_indexes, exit, - Some(&self.cointime_price.dateindex.0), + Some(&self.cointime_price.height), )?; Ok(()) diff --git a/crates/brk_computer/src/cointime/pricing/import.rs b/crates/brk_computer/src/cointime/pricing/import.rs index 2e997f63e..c617152da 100644 --- a/crates/brk_computer/src/cointime/pricing/import.rs +++ b/crates/brk_computer/src/cointime/pricing/import.rs @@ -5,17 +5,17 @@ use vecdb::Database; use super::Vecs; use crate::{ indexes, - internal::{ComputedFromDateRatio, PriceFromHeight}, + internal::{ComputedFromHeightRatio, PriceFromHeight}, }; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexes: &indexes::Vecs, ) -> Result { let vaulted_price = PriceFromHeight::forced_import(db, "vaulted_price", version, indexes)?; - let vaulted_price_ratio = ComputedFromDateRatio::forced_import( + let vaulted_price_ratio = ComputedFromHeightRatio::forced_import( db, "vaulted_price", Some(&vaulted_price), @@ -25,7 +25,7 @@ impl Vecs { )?; let active_price = PriceFromHeight::forced_import(db, "active_price", version, indexes)?; - let active_price_ratio = ComputedFromDateRatio::forced_import( + let active_price_ratio = ComputedFromHeightRatio::forced_import( db, "active_price", Some(&active_price), @@ -36,7 +36,7 @@ impl Vecs { let true_market_mean = PriceFromHeight::forced_import(db, "true_market_mean", version, indexes)?; - let true_market_mean_ratio = ComputedFromDateRatio::forced_import( + let true_market_mean_ratio = ComputedFromHeightRatio::forced_import( db, "true_market_mean", Some(&true_market_mean), @@ -47,7 +47,7 @@ impl Vecs { let cointime_price = PriceFromHeight::forced_import(db, "cointime_price", version, indexes)?; - let cointime_price_ratio = ComputedFromDateRatio::forced_import( + let cointime_price_ratio = ComputedFromHeightRatio::forced_import( db, "cointime_price", Some(&cointime_price), diff --git a/crates/brk_computer/src/cointime/pricing/vecs.rs b/crates/brk_computer/src/cointime/pricing/vecs.rs index 4756849bc..ef7b5cda0 100644 --- a/crates/brk_computer/src/cointime/pricing/vecs.rs +++ b/crates/brk_computer/src/cointime/pricing/vecs.rs @@ -1,15 +1,17 @@ use brk_traversable::Traversable; +use brk_types::Dollars; +use vecdb::{Rw, StorageMode}; -use crate::internal::{ComputedFromDateRatio, PriceFromHeight}; +use crate::internal::{ComputedFromHeightLast, ComputedFromHeightRatio, Price}; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub vaulted_price: PriceFromHeight, - pub vaulted_price_ratio: ComputedFromDateRatio, - pub active_price: PriceFromHeight, - pub active_price_ratio: ComputedFromDateRatio, - pub true_market_mean: PriceFromHeight, - pub true_market_mean_ratio: ComputedFromDateRatio, - pub cointime_price: PriceFromHeight, - pub cointime_price_ratio: ComputedFromDateRatio, +#[derive(Traversable)] +pub struct Vecs { + pub vaulted_price: Price>, + pub vaulted_price_ratio: ComputedFromHeightRatio, + pub active_price: Price>, + pub active_price_ratio: ComputedFromHeightRatio, + pub true_market_mean: Price>, + pub true_market_mean_ratio: ComputedFromHeightRatio, + pub cointime_price: Price>, + pub cointime_price_ratio: ComputedFromHeightRatio, } diff --git a/crates/brk_computer/src/cointime/reserve_risk/compute.rs b/crates/brk_computer/src/cointime/reserve_risk/compute.rs index 17e4d4c01..c1520b1f8 100644 --- a/crates/brk_computer/src/cointime/reserve_risk/compute.rs +++ b/crates/brk_computer/src/cointime/reserve_risk/compute.rs @@ -1,48 +1,40 @@ use brk_error::Result; -use brk_types::{Close, Dollars, StoredF64}; +use brk_types::StoredF64; use vecdb::Exit; use super::{super::value, Vecs}; -use crate::{price, ComputeIndexes}; +use crate::{blocks, ComputeIndexes, prices, traits::ComputeRollingMedianFromStarts}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, starting_indexes: &ComputeIndexes, - price: &price::Vecs, + blocks: &blocks::Vecs, + prices: &prices::Vecs, value: &value::Vecs, exit: &Exit, ) -> Result<()> { - let vocdd_dateindex_sum = &value.vocdd.dateindex.sum.0; - - self.vocdd_365d_median.compute_rolling_median( - starting_indexes.dateindex, - vocdd_dateindex_sum, - 365, + self.vocdd_365d_median.compute_rolling_median_from_starts( + starting_indexes.height, + &blocks.count.height_1y_ago, + &value.vocdd.height, exit, )?; - let price_close = &price.usd.split.close.dateindex; - self.hodl_bank.compute_cumulative_transformed_binary( - starting_indexes.dateindex, - price_close, + starting_indexes.height, + &prices.usd.price, &self.vocdd_365d_median, - |price: Close, median: StoredF64| StoredF64::from(f64::from(price) - f64::from(median)), + |price, median| StoredF64::from(f64::from(price) - f64::from(median)), exit, )?; - if let Some(reserve_risk) = self.reserve_risk.as_mut() { - reserve_risk.compute_all(starting_indexes, exit, |v| { - v.compute_divide( - starting_indexes.dateindex, - price_close, - &self.hodl_bank, - exit, - )?; - Ok(()) - })?; - } + self.reserve_risk.height.compute_divide( + starting_indexes.height, + &prices.usd.price, + &self.hodl_bank, + exit, + )?; Ok(()) } diff --git a/crates/brk_computer/src/cointime/reserve_risk/import.rs b/crates/brk_computer/src/cointime/reserve_risk/import.rs index d6e9c1d8e..5545f8df7 100644 --- a/crates/brk_computer/src/cointime/reserve_risk/import.rs +++ b/crates/brk_computer/src/cointime/reserve_risk/import.rs @@ -3,22 +3,19 @@ use brk_types::Version; use vecdb::{Database, EagerVec, ImportableVec}; use super::Vecs; -use crate::{indexes, internal::ComputedFromDateLast}; +use crate::{indexes, internal::ComputedFromHeightLast}; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexes: &indexes::Vecs, - compute_dollars: bool, ) -> Result { let v1 = version + Version::ONE; Ok(Self { vocdd_365d_median: EagerVec::forced_import(db, "vocdd_365d_median", v1)?, hodl_bank: EagerVec::forced_import(db, "hodl_bank", v1)?, - reserve_risk: compute_dollars - .then(|| ComputedFromDateLast::forced_import(db, "reserve_risk", v1, indexes)) - .transpose()?, + reserve_risk: ComputedFromHeightLast::forced_import(db, "reserve_risk", v1, indexes)?, }) } } diff --git a/crates/brk_computer/src/cointime/reserve_risk/vecs.rs b/crates/brk_computer/src/cointime/reserve_risk/vecs.rs index 0969f0e02..c47b1d372 100644 --- a/crates/brk_computer/src/cointime/reserve_risk/vecs.rs +++ b/crates/brk_computer/src/cointime/reserve_risk/vecs.rs @@ -1,12 +1,12 @@ use brk_traversable::Traversable; -use brk_types::{DateIndex, StoredF64}; -use vecdb::{EagerVec, PcoVec}; +use brk_types::{Height, StoredF64}; +use vecdb::{EagerVec, PcoVec, Rw, StorageMode}; -use crate::internal::ComputedFromDateLast; +use crate::internal::ComputedFromHeightLast; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub vocdd_365d_median: EagerVec>, - pub hodl_bank: EagerVec>, - pub reserve_risk: Option>, +#[derive(Traversable)] +pub struct Vecs { + pub vocdd_365d_median: M::Stored>>, + pub hodl_bank: M::Stored>>, + pub reserve_risk: ComputedFromHeightLast, } diff --git a/crates/brk_computer/src/cointime/supply/compute.rs b/crates/brk_computer/src/cointime/supply/compute.rs index 858a5454a..5563d9b43 100644 --- a/crates/brk_computer/src/cointime/supply/compute.rs +++ b/crates/brk_computer/src/cointime/supply/compute.rs @@ -3,12 +3,11 @@ use vecdb::Exit; use super::super::activity; use super::Vecs; -use crate::{ComputeIndexes, distribution, indexes}; +use crate::{ComputeIndexes, distribution}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, distribution: &distribution::Vecs, activity: &activity::Vecs, @@ -23,27 +22,19 @@ impl Vecs { .sats .height; - self.vaulted_supply - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_multiply( - starting_indexes.height, - circulating_supply, - &activity.vaultedness.height, - exit, - )?; - Ok(()) - })?; + self.vaulted_supply.sats.height.compute_multiply( + starting_indexes.height, + circulating_supply, + &activity.vaultedness.height, + exit, + )?; - self.active_supply - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_multiply( - starting_indexes.height, - circulating_supply, - &activity.liveliness.height, - exit, - )?; - Ok(()) - })?; + self.active_supply.sats.height.compute_multiply( + starting_indexes.height, + circulating_supply, + &activity.liveliness.height, + exit, + )?; Ok(()) } diff --git a/crates/brk_computer/src/cointime/supply/import.rs b/crates/brk_computer/src/cointime/supply/import.rs index 32d34a88f..df1bb5a1f 100644 --- a/crates/brk_computer/src/cointime/supply/import.rs +++ b/crates/brk_computer/src/cointime/supply/import.rs @@ -3,14 +3,14 @@ use brk_types::Version; use vecdb::Database; use super::Vecs; -use crate::{indexes, internal::ValueFromHeightLast, price}; +use crate::{indexes, internal::ValueFromHeightLast, prices}; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, ) -> Result { Ok(Self { vaulted_supply: ValueFromHeightLast::forced_import( @@ -18,14 +18,14 @@ impl Vecs { "vaulted_supply", version, indexes, - price, + prices, )?, active_supply: ValueFromHeightLast::forced_import( db, "active_supply", version, indexes, - price, + prices, )?, }) } diff --git a/crates/brk_computer/src/cointime/supply/vecs.rs b/crates/brk_computer/src/cointime/supply/vecs.rs index 027794af1..55ed82df7 100644 --- a/crates/brk_computer/src/cointime/supply/vecs.rs +++ b/crates/brk_computer/src/cointime/supply/vecs.rs @@ -1,9 +1,10 @@ use brk_traversable::Traversable; +use vecdb::{Rw, StorageMode}; use crate::internal::ValueFromHeightLast; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub vaulted_supply: ValueFromHeightLast, - pub active_supply: ValueFromHeightLast, +#[derive(Traversable)] +pub struct Vecs { + pub vaulted_supply: ValueFromHeightLast, + pub active_supply: ValueFromHeightLast, } diff --git a/crates/brk_computer/src/cointime/value/compute.rs b/crates/brk_computer/src/cointime/value/compute.rs index 05fcc3fc1..f1c28f1ba 100644 --- a/crates/brk_computer/src/cointime/value/compute.rs +++ b/crates/brk_computer/src/cointime/value/compute.rs @@ -1,17 +1,16 @@ use brk_error::Result; -use brk_types::{Bitcoin, Close, Dollars, StoredF64}; -use vecdb::{Exit, TypedVecIterator}; +use brk_types::{Bitcoin, Dollars, StoredF64}; +use vecdb::Exit; use super::super::activity; use super::Vecs; -use crate::{ComputeIndexes, distribution, indexes, price}; +use crate::{ComputeIndexes, distribution, prices}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, - price: &price::Vecs, + prices: &prices::Vecs, distribution: &distribution::Vecs, activity: &activity::Vecs, exit: &Exit, @@ -36,14 +35,14 @@ impl Vecs { .metrics .supply .total - .bitcoin + .btc .height; self.cointime_value_destroyed - .compute_all(indexes, starting_indexes, exit, |vec| { + .compute(starting_indexes, exit,|vec| { vec.compute_multiply( starting_indexes.height, - &price.usd.split.close.height, + &prices.usd.price, &coinblocks_destroyed.height, exit, )?; @@ -51,10 +50,10 @@ impl Vecs { })?; self.cointime_value_created - .compute_all(indexes, starting_indexes, exit, |vec| { + .compute(starting_indexes, exit,|vec| { vec.compute_multiply( starting_indexes.height, - &price.usd.split.close.height, + &prices.usd.price, &activity.coinblocks_created.height, exit, )?; @@ -62,10 +61,10 @@ impl Vecs { })?; self.cointime_value_stored - .compute_all(indexes, starting_indexes, exit, |vec| { + .compute(starting_indexes, exit,|vec| { vec.compute_multiply( starting_indexes.height, - &price.usd.split.close.height, + &prices.usd.price, &activity.coinblocks_stored.height, exit, )?; @@ -76,14 +75,13 @@ impl Vecs { // Supply-adjusted to account for growing supply over time // This is a key input for Reserve Risk / HODL Bank calculation self.vocdd - .compute_all(indexes, starting_indexes, exit, |vec| { - let mut supply_iter = circulating_supply.into_iter(); - vec.compute_transform2( + .compute(starting_indexes, exit,|vec| { + vec.compute_transform3( starting_indexes.height, - &price.usd.split.close.height, + &prices.usd.price, &coindays_destroyed.height, - |(i, price, cdd, _): (_, Close, StoredF64, _)| { - let supply: Bitcoin = supply_iter.get_unwrap(i); + circulating_supply, + |(i, price, cdd, supply, _): (_, Dollars, StoredF64, Bitcoin, _)| { let supply_f64 = f64::from(supply); if supply_f64 == 0.0 { (i, StoredF64::from(0.0)) diff --git a/crates/brk_computer/src/cointime/value/import.rs b/crates/brk_computer/src/cointime/value/import.rs index 7e45a2fed..e89f6a681 100644 --- a/crates/brk_computer/src/cointime/value/import.rs +++ b/crates/brk_computer/src/cointime/value/import.rs @@ -6,7 +6,7 @@ use super::Vecs; use crate::{indexes, internal::ComputedFromHeightSumCum}; impl Vecs { - pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { + pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { Ok(Self { cointime_value_destroyed: ComputedFromHeightSumCum::forced_import( db, diff --git a/crates/brk_computer/src/cointime/value/vecs.rs b/crates/brk_computer/src/cointime/value/vecs.rs index d49782198..511fa38b4 100644 --- a/crates/brk_computer/src/cointime/value/vecs.rs +++ b/crates/brk_computer/src/cointime/value/vecs.rs @@ -1,12 +1,13 @@ use brk_traversable::Traversable; use brk_types::StoredF64; +use vecdb::{Rw, StorageMode}; use crate::internal::ComputedFromHeightSumCum; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub cointime_value_destroyed: ComputedFromHeightSumCum, - pub cointime_value_created: ComputedFromHeightSumCum, - pub cointime_value_stored: ComputedFromHeightSumCum, - pub vocdd: ComputedFromHeightSumCum, +#[derive(Traversable)] +pub struct Vecs { + pub cointime_value_destroyed: ComputedFromHeightSumCum, + pub cointime_value_created: ComputedFromHeightSumCum, + pub cointime_value_stored: ComputedFromHeightSumCum, + pub vocdd: ComputedFromHeightSumCum, } diff --git a/crates/brk_computer/src/constants.rs b/crates/brk_computer/src/constants.rs index fe9af32e2..50c60ed15 100644 --- a/crates/brk_computer/src/constants.rs +++ b/crates/brk_computer/src/constants.rs @@ -31,7 +31,7 @@ pub struct Vecs { } impl Vecs { - pub fn new(version: Version, indexes: &indexes::Vecs) -> Self { + pub(crate) fn new(version: Version, indexes: &indexes::Vecs) -> Self { let v = version; Self { diff --git a/crates/brk_computer/src/distribution/address/activity.rs b/crates/brk_computer/src/distribution/address/activity.rs index b4ecd2584..d87349e92 100644 --- a/crates/brk_computer/src/distribution/address/activity.rs +++ b/crates/brk_computer/src/distribution/address/activity.rs @@ -20,9 +20,9 @@ use brk_traversable::Traversable; use brk_types::{Height, StoredU32, Version}; use derive_more::{Deref, DerefMut}; use rayon::prelude::*; -use vecdb::{AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec}; +use vecdb::{AnyStoredVec, AnyVec, Database, Rw, StorageMode, WritableVec}; -use crate::{ComputeIndexes, indexes, internal::ComputedFromHeightDistribution}; +use crate::{indexes, internal::ComputedFromHeightDistribution}; /// Per-block activity counts - reset each block. /// @@ -40,7 +40,7 @@ pub struct BlockActivityCounts { impl BlockActivityCounts { /// Reset all counts to zero. #[inline] - pub fn reset(&mut self) { + pub(crate) fn reset(&mut self) { *self = Self::default(); } } @@ -51,12 +51,12 @@ pub struct AddressTypeToActivityCounts(pub ByAddressType); impl AddressTypeToActivityCounts { /// Reset all per-type counts. - pub fn reset(&mut self) { + pub(crate) fn reset(&mut self) { self.0.values_mut().for_each(|v| v.reset()); } /// Sum all types to get totals. - pub fn totals(&self) -> BlockActivityCounts { + pub(crate) fn totals(&self) -> BlockActivityCounts { let mut total = BlockActivityCounts::default(); for counts in self.0.values() { total.reactivated += counts.reactivated; @@ -69,18 +69,18 @@ impl AddressTypeToActivityCounts { } /// Activity count vectors for a single category (e.g., one address type or "all"). -#[derive(Clone, Traversable)] -pub struct ActivityCountVecs { - pub reactivated: ComputedFromHeightDistribution, - pub sending: ComputedFromHeightDistribution, - pub receiving: ComputedFromHeightDistribution, - pub balance_increased: ComputedFromHeightDistribution, - pub balance_decreased: ComputedFromHeightDistribution, - pub both: ComputedFromHeightDistribution, +#[derive(Traversable)] +pub struct ActivityCountVecs { + pub reactivated: ComputedFromHeightDistribution, + pub sending: ComputedFromHeightDistribution, + pub receiving: ComputedFromHeightDistribution, + pub balance_increased: ComputedFromHeightDistribution, + pub balance_decreased: ComputedFromHeightDistribution, + pub both: ComputedFromHeightDistribution, } impl ActivityCountVecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, @@ -126,7 +126,7 @@ impl ActivityCountVecs { }) } - pub fn min_stateful_height(&self) -> usize { + pub(crate) fn min_stateful_height(&self) -> usize { self.reactivated .height .len() @@ -137,7 +137,7 @@ impl ActivityCountVecs { .min(self.both.height.len()) } - pub fn par_iter_height_mut(&mut self) -> impl ParallelIterator { + pub(crate) fn par_iter_height_mut(&mut self) -> impl ParallelIterator { [ &mut self.reactivated.height as &mut dyn AnyStoredVec, &mut self.sending.height as &mut dyn AnyStoredVec, @@ -149,7 +149,7 @@ impl ActivityCountVecs { .into_par_iter() } - pub fn reset_height(&mut self) -> Result<()> { + pub(crate) fn reset_height(&mut self) -> Result<()> { self.reactivated.height.reset()?; self.sending.height.reset()?; self.receiving.height.reset()?; @@ -159,7 +159,7 @@ impl ActivityCountVecs { Ok(()) } - pub fn truncate_push_height( + pub(crate) fn truncate_push_height( &mut self, height: Height, counts: &BlockActivityCounts, @@ -187,30 +187,11 @@ impl ActivityCountVecs { Ok(()) } - pub fn compute_rest( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.reactivated - .compute_rest(indexes, starting_indexes, exit)?; - self.sending - .compute_rest(indexes, starting_indexes, exit)?; - self.receiving - .compute_rest(indexes, starting_indexes, exit)?; - self.balance_increased - .compute_rest(indexes, starting_indexes, exit)?; - self.balance_decreased - .compute_rest(indexes, starting_indexes, exit)?; - self.both.compute_rest(indexes, starting_indexes, exit)?; - Ok(()) - } } /// Per-address-type activity count vecs. -#[derive(Clone, Deref, DerefMut, Traversable)] -pub struct AddressTypeToActivityCountVecs(ByAddressType); +#[derive(Deref, DerefMut, Traversable)] +pub struct AddressTypeToActivityCountVecs(ByAddressType>); impl From> for AddressTypeToActivityCountVecs { #[inline] @@ -220,7 +201,7 @@ impl From> for AddressTypeToActivityCountVecs { } impl AddressTypeToActivityCountVecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, @@ -233,11 +214,11 @@ impl AddressTypeToActivityCountVecs { )) } - pub fn min_stateful_height(&self) -> usize { + pub(crate) fn min_stateful_height(&self) -> usize { self.0.values().map(|v| v.min_stateful_height()).min().unwrap_or(0) } - pub fn par_iter_height_mut(&mut self) -> impl ParallelIterator { + pub(crate) fn par_iter_height_mut(&mut self) -> impl ParallelIterator { let inner = &mut self.0; let mut vecs: Vec<&mut dyn AnyStoredVec> = Vec::new(); for type_vecs in [ @@ -260,7 +241,7 @@ impl AddressTypeToActivityCountVecs { vecs.into_par_iter() } - pub fn reset_height(&mut self) -> Result<()> { + pub(crate) fn reset_height(&mut self) -> Result<()> { self.p2pk65.reset_height()?; self.p2pk33.reset_height()?; self.p2pkh.reset_height()?; @@ -272,7 +253,7 @@ impl AddressTypeToActivityCountVecs { Ok(()) } - pub fn truncate_push_height( + pub(crate) fn truncate_push_height( &mut self, height: Height, counts: &AddressTypeToActivityCounts, @@ -293,34 +274,18 @@ impl AddressTypeToActivityCountVecs { Ok(()) } - pub fn compute_rest( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.p2pk65.compute_rest(indexes, starting_indexes, exit)?; - self.p2pk33.compute_rest(indexes, starting_indexes, exit)?; - self.p2pkh.compute_rest(indexes, starting_indexes, exit)?; - self.p2sh.compute_rest(indexes, starting_indexes, exit)?; - self.p2wpkh.compute_rest(indexes, starting_indexes, exit)?; - self.p2wsh.compute_rest(indexes, starting_indexes, exit)?; - self.p2tr.compute_rest(indexes, starting_indexes, exit)?; - self.p2a.compute_rest(indexes, starting_indexes, exit)?; - Ok(()) - } } /// Storage for activity metrics (global + per type). -#[derive(Clone, Traversable)] -pub struct AddressActivityVecs { - pub all: ActivityCountVecs, +#[derive(Traversable)] +pub struct AddressActivityVecs { + pub all: ActivityCountVecs, #[traversable(flatten)] - pub by_addresstype: AddressTypeToActivityCountVecs, + pub by_addresstype: AddressTypeToActivityCountVecs, } impl AddressActivityVecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, @@ -334,23 +299,23 @@ impl AddressActivityVecs { }) } - pub fn min_stateful_height(&self) -> usize { + pub(crate) fn min_stateful_height(&self) -> usize { self.all.min_stateful_height().min(self.by_addresstype.min_stateful_height()) } - pub fn par_iter_height_mut(&mut self) -> impl ParallelIterator { + pub(crate) fn par_iter_height_mut(&mut self) -> impl ParallelIterator { self.all .par_iter_height_mut() .chain(self.by_addresstype.par_iter_height_mut()) } - pub fn reset_height(&mut self) -> Result<()> { + pub(crate) fn reset_height(&mut self) -> Result<()> { self.all.reset_height()?; self.by_addresstype.reset_height()?; Ok(()) } - pub fn truncate_push_height( + pub(crate) fn truncate_push_height( &mut self, height: Height, counts: &AddressTypeToActivityCounts, @@ -361,15 +326,4 @@ impl AddressActivityVecs { Ok(()) } - pub fn compute_rest( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.all.compute_rest(indexes, starting_indexes, exit)?; - self.by_addresstype - .compute_rest(indexes, starting_indexes, exit)?; - Ok(()) - } } diff --git a/crates/brk_computer/src/distribution/address/address_count.rs b/crates/brk_computer/src/distribution/address/address_count.rs index 57fd428c3..cf7c9be8c 100644 --- a/crates/brk_computer/src/distribution/address/address_count.rs +++ b/crates/brk_computer/src/distribution/address/address_count.rs @@ -5,21 +5,22 @@ use brk_types::{Height, StoredF64, StoredU64, Version}; use derive_more::{Deref, DerefMut}; use rayon::prelude::*; use vecdb::{ - AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, PcoVec, TypedVecIterator, + AnyStoredVec, AnyVec, Database, EagerVec, Exit, PcoVec, ReadableVec, Rw, StorageMode, + WritableVec, }; -use crate::{ComputeIndexes, indexes, internal::{ComputedFromDateLast, ComputedFromHeightLast}}; +use crate::{ComputeIndexes, blocks, indexes, internal::ComputedFromHeightLast}; /// Address count with 30d change metric for a single type. -#[derive(Clone, Traversable)] -pub struct AddrCountVecs { +#[derive(Traversable)] +pub struct AddrCountVecs { #[traversable(flatten)] - pub count: ComputedFromHeightLast, - pub _30d_change: ComputedFromDateLast, + pub count: ComputedFromHeightLast, + pub _30d_change: ComputedFromHeightLast, } impl AddrCountVecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, @@ -27,7 +28,7 @@ impl AddrCountVecs { ) -> Result { Ok(Self { count: ComputedFromHeightLast::forced_import(db, name, version, indexes)?, - _30d_change: ComputedFromDateLast::forced_import( + _30d_change: ComputedFromHeightLast::forced_import( db, &format!("{name}_30d_change"), version, @@ -36,24 +37,18 @@ impl AddrCountVecs { }) } - pub fn compute_rest( + pub(crate) fn compute_rest( &mut self, - indexes: &indexes::Vecs, + blocks: &blocks::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.count.compute_rest(indexes, starting_indexes, exit)?; - - self._30d_change - .compute_all(starting_indexes, exit, |v| { - v.compute_change( - starting_indexes.dateindex, - &*self.count.dateindex, - 30, - exit, - )?; - Ok(()) - })?; + self._30d_change.height.compute_rolling_change( + starting_indexes.height, + &blocks.count.height_1m_ago, + &self.count.height, + exit, + )?; Ok(()) } @@ -65,7 +60,7 @@ pub struct AddressTypeToAddressCount(ByAddressType); impl AddressTypeToAddressCount { #[inline] - pub fn sum(&self) -> u64 { + pub(crate) fn sum(&self) -> u64 { self.0.values().sum() } } @@ -75,56 +70,14 @@ impl From<(&AddressTypeToAddrCountVecs, Height)> for AddressTypeToAddressCount { fn from((groups, starting_height): (&AddressTypeToAddrCountVecs, Height)) -> Self { if let Some(prev_height) = starting_height.decremented() { Self(ByAddressType { - p2pk65: groups - .p2pk65 - .count - .height - .into_iter() - .get_unwrap(prev_height) - .into(), - p2pk33: groups - .p2pk33 - .count - .height - .into_iter() - .get_unwrap(prev_height) - .into(), - p2pkh: groups - .p2pkh - .count - .height - .into_iter() - .get_unwrap(prev_height) - .into(), - p2sh: groups - .p2sh - .count - .height - .into_iter() - .get_unwrap(prev_height) - .into(), - p2wpkh: groups - .p2wpkh - .count - .height - .into_iter() - .get_unwrap(prev_height) - .into(), - p2wsh: groups - .p2wsh - .count - .height - .into_iter() - .get_unwrap(prev_height) - .into(), - p2tr: groups - .p2tr - .count - .height - .into_iter() - .get_unwrap(prev_height) - .into(), - p2a: groups.p2a.count.height.into_iter().get_unwrap(prev_height).into(), + p2pk65: groups.p2pk65.count.height.collect_one(prev_height).unwrap().into(), + p2pk33: groups.p2pk33.count.height.collect_one(prev_height).unwrap().into(), + p2pkh: groups.p2pkh.count.height.collect_one(prev_height).unwrap().into(), + p2sh: groups.p2sh.count.height.collect_one(prev_height).unwrap().into(), + p2wpkh: groups.p2wpkh.count.height.collect_one(prev_height).unwrap().into(), + p2wsh: groups.p2wsh.count.height.collect_one(prev_height).unwrap().into(), + p2tr: groups.p2tr.count.height.collect_one(prev_height).unwrap().into(), + p2a: groups.p2a.count.height.collect_one(prev_height).unwrap().into(), }) } else { Default::default() @@ -133,8 +86,8 @@ impl From<(&AddressTypeToAddrCountVecs, Height)> for AddressTypeToAddressCount { } /// Address count per address type, with height + derived indexes + 30d change. -#[derive(Clone, Deref, DerefMut, Traversable)] -pub struct AddressTypeToAddrCountVecs(ByAddressType); +#[derive(Deref, DerefMut, Traversable)] +pub struct AddressTypeToAddrCountVecs(ByAddressType>); impl From> for AddressTypeToAddrCountVecs { #[inline] @@ -144,7 +97,7 @@ impl From> for AddressTypeToAddrCountVecs { } impl AddressTypeToAddrCountVecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, @@ -162,7 +115,7 @@ impl AddressTypeToAddrCountVecs { )) } - pub fn min_stateful_height(&self) -> usize { + pub(crate) fn min_stateful_height(&self) -> usize { self.p2pk65 .count .height @@ -176,7 +129,7 @@ impl AddressTypeToAddrCountVecs { .min(self.p2a.count.height.len()) } - pub fn par_iter_height_mut(&mut self) -> impl ParallelIterator { + pub(crate) fn par_iter_height_mut(&mut self) -> impl ParallelIterator { let inner = &mut self.0; [ &mut inner.p2pk65.count.height as &mut dyn AnyStoredVec, @@ -191,7 +144,7 @@ impl AddressTypeToAddrCountVecs { .into_par_iter() } - pub fn truncate_push_height( + pub(crate) fn truncate_push_height( &mut self, height: Height, addr_counts: &AddressTypeToAddressCount, @@ -231,8 +184,8 @@ impl AddressTypeToAddrCountVecs { Ok(()) } - pub fn reset_height(&mut self) -> Result<()> { - use vecdb::GenericStoredVec; + pub(crate) fn reset_height(&mut self) -> Result<()> { + use vecdb::WritableVec; self.p2pk65.count.height.reset()?; self.p2pk33.count.height.reset()?; self.p2pkh.count.height.reset()?; @@ -244,24 +197,24 @@ impl AddressTypeToAddrCountVecs { Ok(()) } - pub fn compute_rest( + pub(crate) fn compute_rest( &mut self, - indexes: &indexes::Vecs, + blocks: &blocks::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.p2pk65.compute_rest(indexes, starting_indexes, exit)?; - self.p2pk33.compute_rest(indexes, starting_indexes, exit)?; - self.p2pkh.compute_rest(indexes, starting_indexes, exit)?; - self.p2sh.compute_rest(indexes, starting_indexes, exit)?; - self.p2wpkh.compute_rest(indexes, starting_indexes, exit)?; - self.p2wsh.compute_rest(indexes, starting_indexes, exit)?; - self.p2tr.compute_rest(indexes, starting_indexes, exit)?; - self.p2a.compute_rest(indexes, starting_indexes, exit)?; + self.p2pk65.compute_rest(blocks, starting_indexes, exit)?; + self.p2pk33.compute_rest(blocks, starting_indexes, exit)?; + self.p2pkh.compute_rest(blocks, starting_indexes, exit)?; + self.p2sh.compute_rest(blocks, starting_indexes, exit)?; + self.p2wpkh.compute_rest(blocks, starting_indexes, exit)?; + self.p2wsh.compute_rest(blocks, starting_indexes, exit)?; + self.p2tr.compute_rest(blocks, starting_indexes, exit)?; + self.p2a.compute_rest(blocks, starting_indexes, exit)?; Ok(()) } - pub fn by_height(&self) -> Vec<&EagerVec>> { + pub(crate) fn by_height(&self) -> Vec<&EagerVec>> { vec![ &self.p2pk65.count.height, &self.p2pk33.count.height, @@ -275,15 +228,15 @@ impl AddressTypeToAddrCountVecs { } } -#[derive(Clone, Traversable)] -pub struct AddrCountsVecs { - pub all: AddrCountVecs, +#[derive(Traversable)] +pub struct AddrCountsVecs { + pub all: AddrCountVecs, #[traversable(flatten)] - pub by_addresstype: AddressTypeToAddrCountVecs, + pub by_addresstype: AddressTypeToAddrCountVecs, } impl AddrCountsVecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, @@ -295,22 +248,22 @@ impl AddrCountsVecs { }) } - pub fn min_stateful_height(&self) -> usize { + pub(crate) fn min_stateful_height(&self) -> usize { self.all.count.height.len().min(self.by_addresstype.min_stateful_height()) } - pub fn par_iter_height_mut(&mut self) -> impl ParallelIterator { + pub(crate) fn par_iter_height_mut(&mut self) -> impl ParallelIterator { rayon::iter::once(&mut self.all.count.height as &mut dyn AnyStoredVec) .chain(self.by_addresstype.par_iter_height_mut()) } - pub fn reset_height(&mut self) -> Result<()> { + pub(crate) fn reset_height(&mut self) -> Result<()> { self.all.count.height.reset()?; self.by_addresstype.reset_height()?; Ok(()) } - pub fn truncate_push_height( + pub(crate) fn truncate_push_height( &mut self, height: Height, total: u64, @@ -322,32 +275,27 @@ impl AddrCountsVecs { Ok(()) } - pub fn compute_rest( + pub(crate) fn compute_rest( &mut self, - indexes: &indexes::Vecs, + blocks: &blocks::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { self.by_addresstype - .compute_rest(indexes, starting_indexes, exit)?; + .compute_rest(blocks, starting_indexes, exit)?; let sources = self.by_addresstype.by_height(); self.all .count - .compute_all(indexes, starting_indexes, exit, |height_vec| { - Ok(height_vec.compute_sum_of_others(starting_indexes.height, &sources, exit)?) - })?; + .height + .compute_sum_of_others(starting_indexes.height, &sources, exit)?; - self.all._30d_change - .compute_all(starting_indexes, exit, |v| { - v.compute_change( - starting_indexes.dateindex, - &*self.all.count.dateindex, - 30, - exit, - )?; - Ok(()) - })?; + self.all._30d_change.height.compute_rolling_change( + starting_indexes.height, + &blocks.count.height_1m_ago, + &self.all.count.height, + exit, + )?; Ok(()) } diff --git a/crates/brk_computer/src/distribution/address/data.rs b/crates/brk_computer/src/distribution/address/data.rs index b227b3f9d..b56455638 100644 --- a/crates/brk_computer/src/distribution/address/data.rs +++ b/crates/brk_computer/src/distribution/address/data.rs @@ -1,46 +1,30 @@ use brk_error::Result; use brk_traversable::Traversable; use brk_types::{ - EmptyAddressData, EmptyAddressIndex, FundedAddressData, FundedAddressIndex, Height, Version, + EmptyAddressData, EmptyAddressIndex, FundedAddressData, FundedAddressIndex, Height, }; use rayon::prelude::*; use vecdb::{ - AnyStoredVec, BytesVec, Database, GenericStoredVec, ImportOptions, ImportableVec, Stamp, + AnyStoredVec, BytesVec, Rw, Stamp, StorageMode, WritableVec, }; -const SAVED_STAMPED_CHANGES: u16 = 10; - /// Storage for both funded and empty address data. -#[derive(Clone, Traversable)] -pub struct AddressesDataVecs { - pub funded: BytesVec, - pub empty: BytesVec, +#[derive(Traversable)] +pub struct AddressesDataVecs { + pub funded: M::Stored>, + pub empty: M::Stored>, } impl AddressesDataVecs { - /// Import from database. - pub fn forced_import(db: &Database, version: Version) -> Result { - Ok(Self { - funded: BytesVec::forced_import_with( - ImportOptions::new(db, "fundedaddressdata", version) - .with_saved_stamped_changes(SAVED_STAMPED_CHANGES), - )?, - empty: BytesVec::forced_import_with( - ImportOptions::new(db, "emptyaddressdata", version) - .with_saved_stamped_changes(SAVED_STAMPED_CHANGES), - )?, - }) - } - /// Get minimum stamped height across funded and empty data. - pub fn min_stamped_height(&self) -> Height { + pub(crate) fn min_stamped_height(&self) -> Height { Height::from(self.funded.stamp()) .incremented() .min(Height::from(self.empty.stamp()).incremented()) } /// Rollback both funded and empty data to before the given stamp. - pub fn rollback_before(&mut self, stamp: Stamp) -> Result<[Stamp; 2]> { + pub(crate) fn rollback_before(&mut self, stamp: Stamp) -> Result<[Stamp; 2]> { Ok([ self.funded.rollback_before(stamp)?, self.empty.rollback_before(stamp)?, @@ -48,23 +32,14 @@ impl AddressesDataVecs { } /// Reset both funded and empty data. - pub fn reset(&mut self) -> Result<()> { + pub(crate) fn reset(&mut self) -> Result<()> { self.funded.reset()?; self.empty.reset()?; Ok(()) } - /// Flush both funded and empty data with stamp. - pub fn write(&mut self, stamp: Stamp, with_changes: bool) -> Result<()> { - self.funded - .stamped_write_maybe_with_changes(stamp, with_changes)?; - self.empty - .stamped_write_maybe_with_changes(stamp, with_changes)?; - Ok(()) - } - /// Returns a parallel iterator over all vecs for parallel writing. - pub fn par_iter_mut(&mut self) -> impl ParallelIterator { + pub(crate) fn par_iter_mut(&mut self) -> impl ParallelIterator { vec![ &mut self.funded as &mut dyn AnyStoredVec, &mut self.empty as &mut dyn AnyStoredVec, diff --git a/crates/brk_computer/src/distribution/address/growth_rate.rs b/crates/brk_computer/src/distribution/address/growth_rate.rs index d3804c30f..4a07529f2 100644 --- a/crates/brk_computer/src/distribution/address/growth_rate.rs +++ b/crates/brk_computer/src/distribution/address/growth_rate.rs @@ -4,10 +4,10 @@ use brk_cohort::{ByAddressType, zip2_by_addresstype}; use brk_error::Result; use brk_traversable::Traversable; use brk_types::{Height, StoredF32, StoredU64, Version}; -use vecdb::{Database, Exit, IterableCloneableVec}; +use vecdb::ReadableCloneableVec; use crate::{ - ComputeIndexes, indexes, + indexes, internal::{LazyBinaryComputedFromHeightDistribution, RatioU64F32}, }; @@ -26,67 +26,50 @@ pub struct GrowthRateVecs { } impl GrowthRateVecs { - pub fn forced_import( - db: &Database, + pub(crate) fn forced_import( version: Version, indexes: &indexes::Vecs, new_addr_count: &NewAddrCountVecs, addr_count: &AddrCountsVecs, ) -> Result { let all = make_growth_rate( - db, "growth_rate", version, indexes, &new_addr_count.all.height, &addr_count.all.count.height, - )?; + ); let by_addresstype: GrowthRateByType = zip2_by_addresstype( &new_addr_count.by_addresstype, &addr_count.by_addresstype, |name, new, addr| { - make_growth_rate( - db, + Ok(make_growth_rate( &format!("{name}_growth_rate"), version, indexes, &new.height, &addr.count.height, - ) + )) }, )?; Ok(Self { all, by_addresstype }) } - - pub fn derive_from( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.all.derive_from(indexes, starting_indexes, exit)?; - for vecs in self.by_addresstype.values_mut() { - vecs.derive_from(indexes, starting_indexes, exit)?; - } - Ok(()) - } } fn make_growth_rate( - db: &Database, name: &str, version: Version, indexes: &indexes::Vecs, new: &V1, addr: &V2, -) -> Result> +) -> LazyBinaryComputedFromHeightDistribution where - V1: IterableCloneableVec, - V2: IterableCloneableVec, + V1: ReadableCloneableVec, + V2: ReadableCloneableVec, { LazyBinaryComputedFromHeightDistribution::::forced_import::< RatioU64F32, - >(db, name, version, new.boxed_clone(), addr.boxed_clone(), indexes) + >(name, version, new.read_only_boxed_clone(), addr.read_only_boxed_clone(), indexes) } diff --git a/crates/brk_computer/src/distribution/address/indexes/any.rs b/crates/brk_computer/src/distribution/address/indexes/any.rs index 8f6434c1f..34b005a82 100644 --- a/crates/brk_computer/src/distribution/address/indexes/any.rs +++ b/crates/brk_computer/src/distribution/address/indexes/any.rs @@ -11,8 +11,8 @@ use brk_types::{ use rayon::prelude::*; use rustc_hash::FxHashMap; use vecdb::{ - AnyStoredVec, AnyVec, BytesVec, Database, GenericStoredVec, ImportOptions, ImportableVec, - Reader, Stamp, + AnyStoredVec, AnyVec, BytesVec, Database, ReadableVec, WritableVec, ImportOptions, ImportableVec, + Reader, Rw, Stamp, StorageMode, }; use super::super::AddressTypeToTypeIndexMap; @@ -22,14 +22,14 @@ const SAVED_STAMPED_CHANGES: u16 = 10; /// Macro to define AnyAddressIndexesVecs and its methods. macro_rules! define_any_address_indexes_vecs { ($(($field:ident, $variant:ident, $index:ty)),* $(,)?) => { - #[derive(Clone, Traversable)] - pub struct AnyAddressIndexesVecs { - $(pub $field: BytesVec<$index, AnyAddressIndex>,)* + #[derive(Traversable)] + pub struct AnyAddressIndexesVecs { + $(pub $field: M::Stored>,)* } impl AnyAddressIndexesVecs { /// Import from database. - pub fn forced_import(db: &Database, version: Version) -> Result { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { Ok(Self { $($field: BytesVec::forced_import_with( ImportOptions::new(db, "anyaddressindex", version) @@ -39,7 +39,7 @@ macro_rules! define_any_address_indexes_vecs { } /// Get minimum stamped height across all address types. - pub fn min_stamped_height(&self) -> Height { + pub(crate) fn min_stamped_height(&self) -> Height { [$(Height::from(self.$field.stamp()).incremented()),*] .into_iter() .min() @@ -47,78 +47,42 @@ macro_rules! define_any_address_indexes_vecs { } /// Rollback all address types to before the given stamp. - pub fn rollback_before(&mut self, stamp: Stamp) -> Result> { + pub(crate) fn rollback_before(&mut self, stamp: Stamp) -> Result> { Ok(vec![$(self.$field.rollback_before(stamp)?),*]) } /// Reset all address types. - pub fn reset(&mut self) -> Result<()> { + pub(crate) fn reset(&mut self) -> Result<()> { $(self.$field.reset()?;)* Ok(()) } /// Get address index for a given type and typeindex. - /// Uses get_any_or_read_at_unwrap to check updated layer (needed after rollback). - pub fn get(&self, address_type: OutputType, typeindex: TypeIndex, reader: &Reader) -> AnyAddressIndex { + /// Uses get_any_or_read_at to check updated layer (needed after rollback). + pub(crate) fn get(&self, address_type: OutputType, typeindex: TypeIndex, reader: &Reader) -> Result { match address_type { - $(OutputType::$variant => self.$field.get_any_or_read_at_unwrap(typeindex.into(), reader),)* + $(OutputType::$variant => Ok(self.$field.get_any_or_read_at(typeindex.into(), reader)?.unwrap()),)* _ => unreachable!("Invalid address type: {:?}", address_type), } } - /// Get address index with single read (no caching). - pub fn get_once(&self, address_type: OutputType, typeindex: TypeIndex) -> Result { - match address_type { - $(OutputType::$variant => self.$field.read_at_once(typeindex.into()).map_err(Into::into),)* - _ => Err(Error::UnsupportedType(address_type.to_string())), - } - } - - /// Update or push address index for a given type. - pub fn update_or_push(&mut self, address_type: OutputType, typeindex: TypeIndex, index: AnyAddressIndex) -> Result<()> { - match address_type { - $(OutputType::$variant => self.$field.update_or_push(typeindex.into(), index)?,)* - _ => unreachable!("Invalid address type: {:?}", address_type), - } - Ok(()) - } - - /// Get length for a given address type. - pub fn len_of(&self, address_type: OutputType) -> usize { - match address_type { - $(OutputType::$variant => self.$field.len(),)* - _ => unreachable!("Invalid address type: {:?}", address_type), - } - } - - /// Update existing entry (must be within bounds). - pub fn update(&mut self, address_type: OutputType, typeindex: TypeIndex, index: AnyAddressIndex) -> Result<()> { - match address_type { - $(OutputType::$variant => self.$field.update(typeindex.into(), index)?,)* - _ => unreachable!("Invalid address type: {:?}", address_type), - } - Ok(()) - } - - /// Push new entry (must be at exactly len position). - pub fn push(&mut self, address_type: OutputType, index: AnyAddressIndex) { - match address_type { - $(OutputType::$variant => self.$field.push(index),)* - _ => unreachable!("Invalid address type: {:?}", address_type), - } - } - - /// Write all address types with stamp. - pub fn write(&mut self, stamp: Stamp, with_changes: bool) -> Result<()> { - $(self.$field.stamped_write_maybe_with_changes(stamp, with_changes)?;)* - Ok(()) - } - /// Returns a parallel iterator over all vecs for parallel writing. - pub fn par_iter_mut(&mut self) -> impl ParallelIterator { + pub(crate) fn par_iter_mut(&mut self) -> impl ParallelIterator { vec![$(&mut self.$field as &mut dyn AnyStoredVec),*].into_par_iter() } } + + impl AnyAddressIndexesVecs { + /// Get address index with single read (no caching). + pub fn get_once(&self, address_type: OutputType, typeindex: TypeIndex) -> Result { + match address_type { + $(OutputType::$variant => self.$field + .collect_one(<$index>::from(usize::from(typeindex))) + .ok_or_else(|| Error::UnsupportedType(address_type.to_string())),)* + _ => Err(Error::UnsupportedType(address_type.to_string())), + } + } + } }; } @@ -139,7 +103,7 @@ impl AnyAddressIndexesVecs { /// Accepts two maps (e.g. from empty and funded processing) and merges per-thread. /// Updates existing entries and pushes new ones (sorted). /// Returns (update_count, push_count). - pub fn par_batch_update( + pub(crate) fn par_batch_update( &mut self, updates1: AddressTypeToTypeIndexMap, updates2: AddressTypeToTypeIndexMap, diff --git a/crates/brk_computer/src/distribution/address/new_addr_count.rs b/crates/brk_computer/src/distribution/address/new_addr_count.rs index 8e8a2edfb..8310faf94 100644 --- a/crates/brk_computer/src/distribution/address/new_addr_count.rs +++ b/crates/brk_computer/src/distribution/address/new_addr_count.rs @@ -3,81 +3,66 @@ use brk_cohort::{ByAddressType, zip_by_addresstype}; use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{Height, StoredU64, Version}; -use vecdb::{Database, Exit, TypedVecIterator}; +use brk_types::{StoredU64, Version}; +use vecdb::{Database, Exit, Ident, Rw, StorageMode}; use crate::{ComputeIndexes, indexes, internal::LazyComputedFromHeightFull}; use super::TotalAddrCountVecs; -/// New addresses by type - lazy delta with stored dateindex stats -pub type NewAddrCountByType = ByAddressType>; +/// New addresses by type - identity transform with stored day1 stats +/// The delta is computed at the compute step, not lazily +pub type NewAddrCountByType = ByAddressType>; /// New address count per block (global + per-type) -#[derive(Clone, Traversable)] -pub struct NewAddrCountVecs { - pub all: LazyComputedFromHeightFull, +#[derive(Traversable)] +pub struct NewAddrCountVecs { + pub all: LazyComputedFromHeightFull, #[traversable(flatten)] - pub by_addresstype: NewAddrCountByType, + pub by_addresstype: NewAddrCountByType, } impl NewAddrCountVecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexes: &indexes::Vecs, total_addr_count: &TotalAddrCountVecs, ) -> Result { - let all = LazyComputedFromHeightFull::forced_import_with_init( + let all = LazyComputedFromHeightFull::forced_import::( db, "new_addr_count", version, - total_addr_count.all.height.clone(), + &total_addr_count.all.height, indexes, - delta_init_fn, )?; - let by_addresstype: NewAddrCountByType = zip_by_addresstype( - &total_addr_count.by_addresstype, - |name, total| { - LazyComputedFromHeightFull::forced_import_with_init( + let by_addresstype: NewAddrCountByType = + zip_by_addresstype(&total_addr_count.by_addresstype, |name, total| { + LazyComputedFromHeightFull::forced_import::( db, &format!("{name}_new_addr_count"), version, - total.height.clone(), + &total.height, indexes, - delta_init_fn, ) - }, - )?; + })?; - Ok(Self { all, by_addresstype }) + Ok(Self { + all, + by_addresstype, + }) } - pub fn derive_from( + pub(crate) fn compute_cumulative( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.all.derive_from(indexes, starting_indexes, exit)?; + self.all.compute_cumulative(starting_indexes, exit)?; for vecs in self.by_addresstype.values_mut() { - vecs.derive_from(indexes, starting_indexes, exit)?; + vecs.compute_cumulative(starting_indexes, exit)?; } Ok(()) } } - -/// Delta init function: value[h] = source[h] - source[h-1] -fn delta_init_fn( - h: Height, - total_iter: &mut dyn TypedVecIterator, -) -> Option { - let current: u64 = total_iter.get(h)?.into(); - let prev: u64 = h - .decremented() - .and_then(|prev_h| total_iter.get(prev_h)) - .map(|v: StoredU64| v.into()) - .unwrap_or(0); - Some(StoredU64::from(current.saturating_sub(prev))) -} diff --git a/crates/brk_computer/src/distribution/address/total_addr_count.rs b/crates/brk_computer/src/distribution/address/total_addr_count.rs index d3482c26f..fd66c2aea 100644 --- a/crates/brk_computer/src/distribution/address/total_addr_count.rs +++ b/crates/brk_computer/src/distribution/address/total_addr_count.rs @@ -4,9 +4,9 @@ use brk_cohort::{ByAddressType, zip2_by_addresstype}; use brk_error::Result; use brk_traversable::Traversable; use brk_types::{StoredU64, Version}; -use vecdb::{Database, Exit, IterableCloneableVec}; +use vecdb::ReadableCloneableVec; -use crate::{ComputeIndexes, indexes, internal::{LazyBinaryComputedFromHeightLast, U64Plus}}; +use crate::{indexes, internal::{LazyBinaryComputedFromHeightLast, U64Plus}}; use super::AddrCountsVecs; @@ -23,50 +23,35 @@ pub struct TotalAddrCountVecs { } impl TotalAddrCountVecs { - pub fn forced_import( - db: &Database, + pub(crate) fn forced_import( version: Version, indexes: &indexes::Vecs, addr_count: &AddrCountsVecs, empty_addr_count: &AddrCountsVecs, ) -> Result { let all = LazyBinaryComputedFromHeightLast::forced_import::( - db, "total_addr_count", version, - addr_count.all.count.height.boxed_clone(), - empty_addr_count.all.count.height.boxed_clone(), + addr_count.all.count.height.read_only_boxed_clone(), + empty_addr_count.all.count.height.read_only_boxed_clone(), indexes, - )?; + ); let by_addresstype: TotalAddrCountByType = zip2_by_addresstype( &addr_count.by_addresstype, &empty_addr_count.by_addresstype, |name, addr, empty| { - LazyBinaryComputedFromHeightLast::forced_import::( - db, + Ok(LazyBinaryComputedFromHeightLast::forced_import::( &format!("{name}_total_addr_count"), version, - addr.count.height.boxed_clone(), - empty.count.height.boxed_clone(), + addr.count.height.read_only_boxed_clone(), + empty.count.height.read_only_boxed_clone(), indexes, - ) + )) }, )?; Ok(Self { all, by_addresstype }) } - pub fn derive_from( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.all.derive_from(indexes, starting_indexes, exit)?; - for vecs in self.by_addresstype.values_mut() { - vecs.derive_from(indexes, starting_indexes, exit)?; - } - Ok(()) - } } diff --git a/crates/brk_computer/src/distribution/address/type_map/height_vec.rs b/crates/brk_computer/src/distribution/address/type_map/height_vec.rs index 245387ccf..3ff51503e 100644 --- a/crates/brk_computer/src/distribution/address/type_map/height_vec.rs +++ b/crates/brk_computer/src/distribution/address/type_map/height_vec.rs @@ -10,7 +10,7 @@ pub struct HeightToAddressTypeToVec(FxHashMap>); impl HeightToAddressTypeToVec { /// Create with pre-allocated capacity for unique heights. - pub fn with_capacity(capacity: usize) -> Self { + pub(crate) fn with_capacity(capacity: usize) -> Self { Self(FxHashMap::with_capacity_and_hasher( capacity, Default::default(), @@ -20,7 +20,7 @@ impl HeightToAddressTypeToVec { impl HeightToAddressTypeToVec { /// Consume and iterate over (Height, AddressTypeToVec) pairs. - pub fn into_iter(self) -> impl Iterator)> { + pub(crate) fn into_iter(self) -> impl Iterator)> { self.0.into_iter() } } diff --git a/crates/brk_computer/src/distribution/address/type_map/index_map.rs b/crates/brk_computer/src/distribution/address/type_map/index_map.rs index eeeb8195d..baa2a682f 100644 --- a/crates/brk_computer/src/distribution/address/type_map/index_map.rs +++ b/crates/brk_computer/src/distribution/address/type_map/index_map.rs @@ -7,7 +7,7 @@ use rustc_hash::FxHashMap; use smallvec::{Array, SmallVec}; /// A hashmap for each address type, keyed by TypeIndex. -#[derive(Debug, Clone, Deref, DerefMut)] +#[derive(Debug, Deref, DerefMut)] pub struct AddressTypeToTypeIndexMap(ByAddressType>); impl Default for AddressTypeToTypeIndexMap { @@ -27,7 +27,7 @@ impl Default for AddressTypeToTypeIndexMap { impl AddressTypeToTypeIndexMap { /// Create with pre-allocated capacity per address type. - pub fn with_capacity(capacity: usize) -> Self { + pub(crate) fn with_capacity(capacity: usize) -> Self { Self(ByAddressType { p2a: FxHashMap::with_capacity_and_hasher(capacity, Default::default()), p2pk33: FxHashMap::with_capacity_and_hasher(capacity, Default::default()), @@ -40,19 +40,6 @@ impl AddressTypeToTypeIndexMap { }) } - /// Merge two maps, consuming other and extending self. - pub fn merge(mut self, mut other: Self) -> Self { - Self::merge_single(&mut self.p2a, &mut other.p2a); - Self::merge_single(&mut self.p2pk33, &mut other.p2pk33); - Self::merge_single(&mut self.p2pk65, &mut other.p2pk65); - Self::merge_single(&mut self.p2pkh, &mut other.p2pkh); - Self::merge_single(&mut self.p2sh, &mut other.p2sh); - Self::merge_single(&mut self.p2tr, &mut other.p2tr); - Self::merge_single(&mut self.p2wpkh, &mut other.p2wpkh); - Self::merge_single(&mut self.p2wsh, &mut other.p2wsh); - self - } - fn merge_single(own: &mut FxHashMap, other: &mut FxHashMap) { if own.len() < other.len() { mem::swap(own, other); @@ -61,7 +48,7 @@ impl AddressTypeToTypeIndexMap { } /// Merge another map into self, consuming other. - pub fn merge_mut(&mut self, mut other: Self) { + pub(crate) fn merge_mut(&mut self, mut other: Self) { Self::merge_single(&mut self.p2a, &mut other.p2a); Self::merge_single(&mut self.p2pk33, &mut other.p2pk33); Self::merge_single(&mut self.p2pk65, &mut other.p2pk65); @@ -73,32 +60,23 @@ impl AddressTypeToTypeIndexMap { } /// Insert a value for a specific address type and typeindex. - pub fn insert_for_type(&mut self, address_type: OutputType, typeindex: TypeIndex, value: T) { + pub(crate) fn insert_for_type(&mut self, address_type: OutputType, typeindex: TypeIndex, value: T) { self.get_mut(address_type).unwrap().insert(typeindex, value); } - /// Iterate over sorted entries by address type. - pub fn into_sorted_iter(self) -> impl Iterator)> { - self.0.into_iter().map(|(output_type, map)| { - let mut sorted: Vec<_> = map.into_iter().collect(); - sorted.sort_unstable_by_key(|(typeindex, _)| *typeindex); - (output_type, sorted) - }) - } - /// Consume and iterate over entries by address type. #[allow(clippy::should_implement_trait)] - pub fn into_iter(self) -> impl Iterator)> { + pub(crate) fn into_iter(self) -> impl Iterator)> { self.0.into_iter() } /// Consume and return the inner ByAddressType. - pub fn into_inner(self) -> ByAddressType> { + pub(crate) fn into_inner(self) -> ByAddressType> { self.0 } /// Iterate mutably over entries by address type. - pub fn iter_mut(&mut self) -> impl Iterator)> { + pub(crate) fn iter_mut(&mut self) -> impl Iterator)> { self.0.iter_mut() } } @@ -108,7 +86,7 @@ where T: Array, { /// Merge two maps of SmallVec values, concatenating vectors. - pub fn merge_vec(mut self, other: Self) -> Self { + pub(crate) fn merge_vec(mut self, other: Self) -> Self { for (address_type, other_map) in other.0.into_iter() { let self_map = self.0.get_mut_unwrap(address_type); for (typeindex, mut other_vec) in other_map { diff --git a/crates/brk_computer/src/distribution/address/type_map/vec.rs b/crates/brk_computer/src/distribution/address/type_map/vec.rs index 28e0c10f8..45d5ef9d0 100644 --- a/crates/brk_computer/src/distribution/address/type_map/vec.rs +++ b/crates/brk_computer/src/distribution/address/type_map/vec.rs @@ -22,7 +22,7 @@ impl Default for AddressTypeToVec { impl AddressTypeToVec { /// Create with pre-allocated capacity per address type. - pub fn with_capacity(capacity: usize) -> Self { + pub(crate) fn with_capacity(capacity: usize) -> Self { Self(ByAddressType { p2a: Vec::with_capacity(capacity), p2pk33: Vec::with_capacity(capacity), @@ -38,7 +38,7 @@ impl AddressTypeToVec { impl AddressTypeToVec { /// Unwrap the inner ByAddressType. - pub fn unwrap(self) -> ByAddressType> { + pub(crate) fn unwrap(self) -> ByAddressType> { self.0 } } diff --git a/crates/brk_computer/src/distribution/block/cache/address.rs b/crates/brk_computer/src/distribution/block/cache/address.rs index 0ca9f6363..090bc9843 100644 --- a/crates/brk_computer/src/distribution/block/cache/address.rs +++ b/crates/brk_computer/src/distribution/block/cache/address.rs @@ -1,4 +1,5 @@ use brk_cohort::ByAddressType; +use brk_error::Result; use brk_types::{AnyAddressDataIndexEnum, FundedAddressData, OutputType, TypeIndex}; use crate::distribution::{ @@ -27,7 +28,7 @@ impl Default for AddressCache { } impl AddressCache { - pub fn new() -> Self { + pub(crate) fn new() -> Self { Self { funded: AddressTypeToTypeIndexMap::default(), empty: AddressTypeToTypeIndexMap::default(), @@ -36,7 +37,7 @@ impl AddressCache { /// Check if address is in cache (either funded or empty). #[inline] - pub fn contains(&self, address_type: OutputType, typeindex: TypeIndex) -> bool { + pub(crate) fn contains(&self, address_type: OutputType, typeindex: TypeIndex) -> bool { self.funded .get(address_type) .is_some_and(|m| m.contains_key(&typeindex)) @@ -48,13 +49,13 @@ impl AddressCache { /// Merge address data into funded cache. #[inline] - pub fn merge_funded(&mut self, data: AddressTypeToTypeIndexMap) { + pub(crate) fn merge_funded(&mut self, data: AddressTypeToTypeIndexMap) { self.funded.merge_mut(data); } /// Create an AddressLookup view into this cache. #[inline] - pub fn as_lookup(&mut self) -> AddressLookup<'_> { + pub(crate) fn as_lookup(&mut self) -> AddressLookup<'_> { AddressLookup { funded: &mut self.funded, empty: &mut self.empty, @@ -62,12 +63,12 @@ impl AddressCache { } /// Update transaction counts for addresses. - pub fn update_tx_counts(&mut self, txindex_vecs: AddressTypeToTypeIndexMap) { + pub(crate) fn update_tx_counts(&mut self, txindex_vecs: AddressTypeToTypeIndexMap) { update_tx_counts(&mut self.funded, &mut self.empty, txindex_vecs); } /// Take the cache contents for flushing, leaving empty caches. - pub fn take( + pub(crate) fn take( &mut self, ) -> ( AddressTypeToTypeIndexMap, @@ -84,7 +85,7 @@ impl AddressCache { /// /// Returns None if address is already in cache (funded or empty). #[allow(clippy::too_many_arguments)] -pub fn load_uncached_address_data( +pub(crate) fn load_uncached_address_data( address_type: OutputType, typeindex: TypeIndex, first_addressindexes: &ByAddressType, @@ -92,38 +93,38 @@ pub fn load_uncached_address_data( vr: &VecsReaders, any_address_indexes: &AnyAddressIndexesVecs, addresses_data: &AddressesDataVecs, -) -> Option { +) -> Result> { // Check if this is a new address (typeindex >= first for this height) let first = *first_addressindexes.get(address_type).unwrap(); if first <= typeindex { - return Some(WithAddressDataSource::New(FundedAddressData::default())); + return Ok(Some(WithAddressDataSource::New(FundedAddressData::default()))); } // Skip if already in cache if cache.contains(address_type, typeindex) { - return None; + return Ok(None); } // Read from storage let reader = vr.address_reader(address_type); - let anyaddressindex = any_address_indexes.get(address_type, typeindex, reader); + let anyaddressindex = any_address_indexes.get(address_type, typeindex, reader)?; - Some(match anyaddressindex.to_enum() { + Ok(Some(match anyaddressindex.to_enum() { AnyAddressDataIndexEnum::Funded(funded_index) => { let reader = &vr.anyaddressindex_to_anyaddressdata.funded; - // Use get_any_or_read_unwrap to check updated layer (needed after rollback) let funded_data = addresses_data .funded - .get_any_or_read_unwrap(funded_index, reader); + .get_any_or_read_at(funded_index.into(), reader)? + .unwrap(); WithAddressDataSource::FromFunded(funded_index, funded_data) } AnyAddressDataIndexEnum::Empty(empty_index) => { let reader = &vr.anyaddressindex_to_anyaddressdata.empty; - // Use get_any_or_read_unwrap to check updated layer (needed after rollback) let empty_data = addresses_data .empty - .get_any_or_read_unwrap(empty_index, reader); + .get_any_or_read_at(empty_index.into(), reader)? + .unwrap(); WithAddressDataSource::FromEmpty(empty_index, empty_data.into()) } - }) + })) } diff --git a/crates/brk_computer/src/distribution/block/cache/lookup.rs b/crates/brk_computer/src/distribution/block/cache/lookup.rs index 1fc17230c..458dd8437 100644 --- a/crates/brk_computer/src/distribution/block/cache/lookup.rs +++ b/crates/brk_computer/src/distribution/block/cache/lookup.rs @@ -24,7 +24,7 @@ pub struct AddressLookup<'a> { } impl<'a> AddressLookup<'a> { - pub fn get_or_create_for_receive( + pub(crate) fn get_or_create_for_receive( &mut self, output_type: OutputType, type_index: TypeIndex, @@ -79,7 +79,7 @@ impl<'a> AddressLookup<'a> { } /// Get address data for a send operation (must exist in cache). - pub fn get_for_send( + pub(crate) fn get_for_send( &mut self, output_type: OutputType, type_index: TypeIndex, @@ -92,7 +92,7 @@ impl<'a> AddressLookup<'a> { } /// Move address from funded to empty set. - pub fn move_to_empty(&mut self, output_type: OutputType, type_index: TypeIndex) { + pub(crate) fn move_to_empty(&mut self, output_type: OutputType, type_index: TypeIndex) { let data = self .funded .get_mut(output_type) diff --git a/crates/brk_computer/src/distribution/block/cohort/address_updates.rs b/crates/brk_computer/src/distribution/block/cohort/address_updates.rs index 24825297e..c1646e5cf 100644 --- a/crates/brk_computer/src/distribution/block/cohort/address_updates.rs +++ b/crates/brk_computer/src/distribution/block/cohort/address_updates.rs @@ -15,7 +15,7 @@ use super::with_source::{EmptyAddressDataWithSource, FundedAddressDataWithSource /// - New funded address: push to funded storage /// - Updated funded address (was funded): update in place /// - Transition empty -> funded: delete from empty, push to funded -pub fn process_funded_addresses( +pub(crate) fn process_funded_addresses( addresses_data: &mut AddressesDataVecs, funded_updates: AddressTypeToTypeIndexMap, ) -> Result> { @@ -86,7 +86,7 @@ pub fn process_funded_addresses( /// - New empty address: push to empty storage /// - Updated empty address (was empty): update in place /// - Transition funded -> empty: delete from funded, push to empty -pub fn process_empty_addresses( +pub(crate) fn process_empty_addresses( addresses_data: &mut AddressesDataVecs, empty_updates: AddressTypeToTypeIndexMap, ) -> Result> { diff --git a/crates/brk_computer/src/distribution/block/cohort/mod.rs b/crates/brk_computer/src/distribution/block/cohort/mod.rs index 41eb838ce..96af2745e 100644 --- a/crates/brk_computer/src/distribution/block/cohort/mod.rs +++ b/crates/brk_computer/src/distribution/block/cohort/mod.rs @@ -4,8 +4,8 @@ mod sent; mod tx_counts; mod with_source; -pub use address_updates::*; -pub use received::*; -pub use sent::*; -pub use tx_counts::*; -pub use with_source::*; +pub(crate) use address_updates::*; +pub(crate) use received::*; +pub(crate) use sent::*; +pub(crate) use tx_counts::*; +pub(crate) use with_source::*; diff --git a/crates/brk_computer/src/distribution/block/cohort/received.rs b/crates/brk_computer/src/distribution/block/cohort/received.rs index b3c10a6b1..c4295808e 100644 --- a/crates/brk_computer/src/distribution/block/cohort/received.rs +++ b/crates/brk_computer/src/distribution/block/cohort/received.rs @@ -1,5 +1,5 @@ use brk_cohort::{AmountBucket, ByAddressType}; -use brk_types::{CentsUnsigned, Sats, TypeIndex}; +use brk_types::{Cents, Sats, TypeIndex}; use rustc_hash::FxHashMap; use crate::distribution::{ @@ -10,11 +10,11 @@ use crate::distribution::{ use super::super::cache::{AddressLookup, TrackingStatus}; #[allow(clippy::too_many_arguments)] -pub fn process_received( +pub(crate) fn process_received( received_data: AddressTypeToVec<(TypeIndex, Sats)>, cohorts: &mut AddressCohorts, lookup: &mut AddressLookup<'_>, - price: Option, + price: Cents, addr_count: &mut ByAddressType, empty_addr_count: &mut ByAddressType, activity_counts: &mut AddressTypeToActivityCounts, @@ -118,7 +118,7 @@ pub fn process_received( .state .as_mut() .unwrap() - .receive_outputs(addr_data, total_value, price.unwrap(), output_count); + .receive_outputs(addr_data, total_value, price, output_count); } } } diff --git a/crates/brk_computer/src/distribution/block/cohort/sent.rs b/crates/brk_computer/src/distribution/block/cohort/sent.rs index a185f3382..444ce9d50 100644 --- a/crates/brk_computer/src/distribution/block/cohort/sent.rs +++ b/crates/brk_computer/src/distribution/block/cohort/sent.rs @@ -1,6 +1,6 @@ use brk_cohort::{AmountBucket, ByAddressType}; use brk_error::Result; -use brk_types::{Age, CentsUnsigned, CheckedSub, Height, Sats, Timestamp, TypeIndex}; +use brk_types::{Age, Cents, CheckedSub, Height, Sats, Timestamp, TypeIndex}; use rustc_hash::FxHashSet; use vecdb::{VecIndex, unlikely}; @@ -26,17 +26,17 @@ use super::super::cache::AddressLookup; /// `price_range_max` is used to compute the peak price during each UTXO's holding period /// for accurate peak regret calculation. #[allow(clippy::too_many_arguments)] -pub fn process_sent( +pub(crate) fn process_sent( sent_data: HeightToAddressTypeToVec<(TypeIndex, Sats)>, cohorts: &mut AddressCohorts, lookup: &mut AddressLookup<'_>, - current_price: Option, - price_range_max: Option<&PriceRangeMax>, + current_price: Cents, + price_range_max: &PriceRangeMax, addr_count: &mut ByAddressType, empty_addr_count: &mut ByAddressType, activity_counts: &mut AddressTypeToActivityCounts, received_addresses: &ByAddressType>, - height_to_price: Option<&[CentsUnsigned]>, + height_to_price: &[Cents], height_to_timestamp: &[Timestamp], current_height: Height, current_timestamp: Timestamp, @@ -45,22 +45,21 @@ pub fn process_sent( let mut seen_senders: ByAddressType> = ByAddressType::default(); for (receive_height, by_type) in sent_data.into_iter() { - let prev_price = height_to_price.map(|v| v[receive_height.to_usize()]); + let prev_price = height_to_price[receive_height.to_usize()]; let prev_timestamp = height_to_timestamp[receive_height.to_usize()]; let blocks_old = current_height.to_usize() - receive_height.to_usize(); let age = Age::new(current_timestamp, prev_timestamp, blocks_old); // Compute peak price during holding period for peak regret // This is the max HIGH price between receive and send heights - let peak_price: Option = - price_range_max.map(|t| t.max_between(receive_height, current_height)); + let peak_price = price_range_max.max_between(receive_height, current_height); for (output_type, vec) in by_type.unwrap().into_iter() { // Cache mutable refs for this address type let type_addr_count = addr_count.get_mut(output_type).unwrap(); let type_empty_count = empty_addr_count.get_mut(output_type).unwrap(); let type_activity = activity_counts.get_mut_unwrap(output_type); - let type_received = received_addresses.get_unwrap(output_type); + let type_received = received_addresses.get(output_type); let type_seen = seen_senders.get_mut_unwrap(output_type); for (type_index, value) in vec { @@ -74,7 +73,7 @@ pub fn process_sent( type_activity.sending += 1; // Track "both" - addresses that sent AND received this block - if type_received.contains(&type_index) { + if type_received.is_some_and(|s| s.contains(&type_index)) { type_activity.both += 1; } } @@ -154,9 +153,9 @@ pub fn process_sent( .send( addr_data, value, - current_price.unwrap(), - prev_price.unwrap(), - peak_price.unwrap(), + current_price, + prev_price, + peak_price, age, )?; } diff --git a/crates/brk_computer/src/distribution/block/cohort/tx_counts.rs b/crates/brk_computer/src/distribution/block/cohort/tx_counts.rs index d4cd2b04e..d75437460 100644 --- a/crates/brk_computer/src/distribution/block/cohort/tx_counts.rs +++ b/crates/brk_computer/src/distribution/block/cohort/tx_counts.rs @@ -10,7 +10,7 @@ use super::with_source::{EmptyAddressDataWithSource, FundedAddressDataWithSource /// /// Addresses are looked up in funded_cache first, then empty_cache. /// NOTE: This should be called AFTER merging parallel-fetched address data into funded_cache. -pub fn update_tx_counts( +pub(crate) fn update_tx_counts( funded_cache: &mut AddressTypeToTypeIndexMap, empty_cache: &mut AddressTypeToTypeIndexMap, mut txindex_vecs: AddressTypeToTypeIndexMap, diff --git a/crates/brk_computer/src/distribution/block/mod.rs b/crates/brk_computer/src/distribution/block/mod.rs index d5c7e483a..a948498e8 100644 --- a/crates/brk_computer/src/distribution/block/mod.rs +++ b/crates/brk_computer/src/distribution/block/mod.rs @@ -2,6 +2,6 @@ mod cache; mod cohort; mod utxo; -pub use cache::*; -pub use cohort::*; -pub use utxo::*; +pub(crate) use cache::*; +pub(crate) use cohort::*; +pub(crate) use utxo::*; diff --git a/crates/brk_computer/src/distribution/block/utxo/inputs.rs b/crates/brk_computer/src/distribution/block/utxo/inputs.rs index 81274df0c..79bee78b6 100644 --- a/crates/brk_computer/src/distribution/block/utxo/inputs.rs +++ b/crates/brk_computer/src/distribution/block/utxo/inputs.rs @@ -1,4 +1,5 @@ use brk_cohort::ByAddressType; +use brk_error::Result; use brk_types::{Height, OutputType, Sats, TxIndex, TypeIndex}; use rayon::prelude::*; use rustc_hash::FxHashMap; @@ -42,7 +43,7 @@ pub struct InputsResult { /// Uses parallel reads followed by sequential accumulation to avoid /// expensive merge overhead from rayon's fold/reduce pattern. #[allow(clippy::too_many_arguments)] -pub fn process_inputs( +pub(crate) fn process_inputs( input_count: usize, txinindex_to_txindex: &[TxIndex], txinindex_to_value: &[Sats], @@ -54,10 +55,10 @@ pub fn process_inputs( vr: &VecsReaders, any_address_indexes: &AnyAddressIndexesVecs, addresses_data: &AddressesDataVecs, -) -> InputsResult { +) -> Result { let items: Vec<_> = (0..input_count) .into_par_iter() - .map(|local_idx| { + .map(|local_idx| -> Result<_> { let txindex = txinindex_to_txindex[local_idx]; let prev_height = *txinindex_to_prev_height.get(local_idx).unwrap(); @@ -65,7 +66,7 @@ pub fn process_inputs( let input_type = *txinindex_to_outputtype.get(local_idx).unwrap(); if input_type.is_not_address() { - return (prev_height, value, input_type, None); + return Ok((prev_height, value, input_type, None)); } let typeindex = *txinindex_to_typeindex.get(local_idx).unwrap(); @@ -79,16 +80,16 @@ pub fn process_inputs( vr, any_address_indexes, addresses_data, - ); + )?; - ( + Ok(( prev_height, value, input_type, Some((typeindex, txindex, value, addr_data_opt)), - ) + )) }) - .collect(); + .collect::>>()?; // Phase 2: Sequential accumulation - no merge overhead // Estimate: unique heights bounded by block depth, addresses spread across ~8 types @@ -131,10 +132,10 @@ pub fn process_inputs( } } - InputsResult { + Ok(InputsResult { height_to_sent, sent_data, address_data, txindex_vecs, - } + }) } diff --git a/crates/brk_computer/src/distribution/block/utxo/outputs.rs b/crates/brk_computer/src/distribution/block/utxo/outputs.rs index 8879b0e3f..9d50f05e0 100644 --- a/crates/brk_computer/src/distribution/block/utxo/outputs.rs +++ b/crates/brk_computer/src/distribution/block/utxo/outputs.rs @@ -1,4 +1,5 @@ use brk_cohort::ByAddressType; +use brk_error::Result; use brk_types::{Sats, TxIndex, TypeIndex}; use crate::distribution::{ @@ -34,7 +35,7 @@ pub struct OutputsResult { /// 3. Look up address data if output is an address type /// 4. Track address-specific data for address cohort processing #[allow(clippy::too_many_arguments)] -pub fn process_outputs( +pub(crate) fn process_outputs( txoutindex_to_txindex: &[TxIndex], txoutdata_vec: &[TxOutData], first_addressindexes: &ByAddressType, @@ -42,7 +43,7 @@ pub fn process_outputs( vr: &VecsReaders, any_address_indexes: &AnyAddressIndexesVecs, addresses_data: &AddressesDataVecs, -) -> OutputsResult { +) -> Result { let output_count = txoutdata_vec.len(); // Pre-allocate result structures @@ -81,7 +82,7 @@ pub fn process_outputs( vr, any_address_indexes, addresses_data, - ); + )?; if let Some(addr_data) = addr_data_opt { address_data.insert_for_type(output_type, typeindex, addr_data); @@ -95,10 +96,10 @@ pub fn process_outputs( .push(txindex); } - OutputsResult { + Ok(OutputsResult { transacted, received_data, address_data, txindex_vecs, - } + }) } diff --git a/crates/brk_computer/src/distribution/cohorts/address/groups.rs b/crates/brk_computer/src/distribution/cohorts/address/groups.rs index 6104acfde..ad20c125f 100644 --- a/crates/brk_computer/src/distribution/cohorts/address/groups.rs +++ b/crates/brk_computer/src/distribution/cohorts/address/groups.rs @@ -5,12 +5,12 @@ use brk_cohort::{ }; use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{DateIndex, Dollars, Height, Version}; +use brk_types::{Dollars, Height, Version}; use derive_more::{Deref, DerefMut}; use rayon::prelude::*; -use vecdb::{AnyStoredVec, Database, Exit, IterableVec}; +use vecdb::{AnyStoredVec, Database, Exit, ReadableVec, Rw, StorageMode}; -use crate::{ComputeIndexes, distribution::DynCohortVecs, indexes, price}; +use crate::{ComputeIndexes, blocks, distribution::DynCohortVecs, indexes, prices}; use crate::distribution::metrics::SupplyMetrics; @@ -19,19 +19,19 @@ use super::{super::traits::CohortVecs, vecs::AddressCohortVecs}; const VERSION: Version = Version::new(0); /// All Address cohorts organized by filter type. -#[derive(Clone, Deref, DerefMut, Traversable)] -pub struct AddressCohorts(AddressGroups); +#[derive(Deref, DerefMut, Traversable)] +pub struct AddressCohorts(AddressGroups>); impl AddressCohorts { /// Import all Address cohorts from database. /// /// `all_supply` is the supply metrics from the UTXO "all" cohort, used as global /// sources for `*_rel_to_market_cap` ratios. - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, states_path: &Path, all_supply: Option<&SupplyMetrics>, ) -> Result { @@ -43,7 +43,7 @@ impl AddressCohorts { has_state: bool| -> Result { let sp = if has_state { Some(states_path) } else { None }; - AddressCohortVecs::forced_import(db, filter, name, v, indexes, price, sp, all_supply) + AddressCohortVecs::forced_import(db, filter, name, v, indexes, prices, sp, all_supply) }; let full = |f: Filter, name: &'static str| create(f, name, true); @@ -86,7 +86,7 @@ impl AddressCohorts { } /// Compute overlapping cohorts from component amount_range cohorts. - pub fn compute_overlapping_vecs( + pub(crate) fn compute_overlapping_vecs( &mut self, starting_indexes: &ComputeIndexes, exit: &Exit, @@ -97,20 +97,32 @@ impl AddressCohorts { } /// First phase of post-processing: compute index transforms. - pub fn compute_rest_part1( + pub(crate) fn compute_rest_part1( &mut self, - indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + blocks: &blocks::Vecs, + prices: &prices::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - // 1. Compute all metrics except net_sentiment + // 1. Compute addr_count_30d_change using rolling window + self.par_iter_mut().try_for_each(|v| { + v.addr_count_30d_change.height.compute_rolling_change( + starting_indexes.height, + &blocks.count.height_1m_ago, + &v.addr_count.height, + exit, + ) + })?; + + // 2. Compute all metrics except net_sentiment self.par_iter_mut() - .try_for_each(|v| v.compute_rest_part1(indexes, price, starting_indexes, exit))?; + .try_for_each(|v| v.compute_rest_part1(blocks, prices, starting_indexes, exit))?; // 2. Compute net_sentiment.height for separate cohorts (greed - pain) - self.par_iter_separate_mut() - .try_for_each(|v| v.metrics.compute_net_sentiment_height(starting_indexes, exit))?; + self.par_iter_separate_mut().try_for_each(|v| { + v.metrics + .compute_net_sentiment_height(starting_indexes, exit) + })?; // 3. Compute net_sentiment.height for aggregate cohorts (weighted average) self.for_each_aggregate(|vecs, sources| { @@ -119,40 +131,34 @@ impl AddressCohorts { .compute_net_sentiment_from_others(starting_indexes, &metrics, exit) })?; - // 4. Compute net_sentiment dateindex for ALL cohorts - self.par_iter_mut() - .try_for_each(|v| v.metrics.compute_net_sentiment_rest(indexes, starting_indexes, exit)) + Ok(()) } /// Second phase of post-processing: compute relative metrics. - #[allow(clippy::too_many_arguments)] - pub fn compute_rest_part2( + pub(crate) fn compute_rest_part2( &mut self, - indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + blocks: &blocks::Vecs, + prices: &prices::Vecs, starting_indexes: &ComputeIndexes, height_to_market_cap: Option<&HM>, - dateindex_to_market_cap: Option<&DM>, exit: &Exit, ) -> Result<()> where - HM: IterableVec + Sync, - DM: IterableVec + Sync, + HM: ReadableVec + Sync, { self.0.par_iter_mut().try_for_each(|v| { v.compute_rest_part2( - indexes, - price, + blocks, + prices, starting_indexes, height_to_market_cap, - dateindex_to_market_cap, exit, ) }) } /// Returns a parallel iterator over all vecs for parallel writing. - pub fn par_iter_vecs_mut(&mut self) -> impl ParallelIterator { + pub(crate) fn par_iter_vecs_mut(&mut self) -> impl ParallelIterator { // Collect all vecs from all cohorts self.0 .iter_mut() @@ -162,44 +168,36 @@ impl AddressCohorts { } /// Commit all states to disk (separate from vec writes for parallelization). - pub fn commit_all_states(&mut self, height: Height, cleanup: bool) -> Result<()> { + pub(crate) fn commit_all_states(&mut self, height: Height, cleanup: bool) -> Result<()> { self.par_iter_separate_mut() .try_for_each(|v| v.write_state(height, cleanup)) } /// Get minimum height from all separate cohorts' height-indexed vectors. - pub fn min_separate_stateful_height_len(&self) -> Height { + pub(crate) fn min_separate_stateful_height_len(&self) -> Height { self.iter_separate() .map(|v| Height::from(v.min_stateful_height_len())) .min() .unwrap_or_default() } - /// Get minimum dateindex from all separate cohorts' dateindex-indexed vectors. - pub fn min_separate_stateful_dateindex_len(&self) -> usize { - self.iter_separate() - .map(|v| v.min_stateful_dateindex_len()) - .min() - .unwrap_or(usize::MAX) - } - /// Import state for all separate cohorts at or before given height. /// Returns true if all imports succeeded and returned the expected height. - pub fn import_separate_states(&mut self, height: Height) -> bool { + pub(crate) fn import_separate_states(&mut self, height: Height) -> bool { self.par_iter_separate_mut() .map(|v| v.import_state(height).unwrap_or_default()) .all(|h| h == height) } /// Reset state heights for all separate cohorts. - pub fn reset_separate_state_heights(&mut self) { + pub(crate) fn reset_separate_state_heights(&mut self) { self.par_iter_separate_mut().for_each(|v| { v.reset_state_starting_height(); }); } /// Reset cost_basis_data for all separate cohorts (called during fresh start). - pub fn reset_separate_cost_basis_data(&mut self) -> Result<()> { + pub(crate) fn reset_separate_cost_basis_data(&mut self) -> Result<()> { self.par_iter_separate_mut().try_for_each(|v| { if let Some(state) = v.state.as_mut() { state.reset_cost_basis_data_if_needed()?; @@ -209,7 +207,7 @@ impl AddressCohorts { } /// Validate computed versions for all separate cohorts. - pub fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> { + pub(crate) fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> { self.par_iter_separate_mut() .try_for_each(|v| v.validate_computed_versions(base_version)) } diff --git a/crates/brk_computer/src/distribution/cohorts/address/vecs.rs b/crates/brk_computer/src/distribution/cohorts/address/vecs.rs index 9b46a6203..67573e2b9 100644 --- a/crates/brk_computer/src/distribution/cohorts/address/vecs.rs +++ b/crates/brk_computer/src/distribution/cohorts/address/vecs.rs @@ -3,15 +3,16 @@ use std::path::Path; use brk_cohort::{CohortContext, Filter, Filtered}; use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{CentsUnsigned, DateIndex, Dollars, Height, StoredF64, StoredU64, Version}; +use brk_types::{Cents, Dollars, Height, StoredF64, StoredU64, Version}; use rayon::prelude::*; -use vecdb::{AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, IterableVec}; +use vecdb::{AnyStoredVec, AnyVec, Database, Exit, WritableVec, ReadableVec, Rw, StorageMode}; use crate::{ + ComputeIndexes, blocks, distribution::state::AddressCohortState, indexes, - internal::{ComputedFromDateLast, ComputedFromHeightLast}, - price, ComputeIndexes, + internal::ComputedFromHeightLast, + prices, }; use crate::distribution::metrics::{CohortMetrics, ImportConfig, SupplyMetrics}; @@ -21,21 +22,21 @@ use super::super::traits::{CohortVecs, DynCohortVecs}; const VERSION: Version = Version::ZERO; /// Address cohort with metrics and optional runtime state. -#[derive(Clone, Traversable)] -pub struct AddressCohortVecs { +#[derive(Traversable)] +pub struct AddressCohortVecs { /// Starting height when state was imported starting_height: Option, /// Runtime state for block-by-block processing #[traversable(skip)] - pub state: Option, + pub state: Option>, /// Metric vectors #[traversable(flatten)] - pub metrics: CohortMetrics, + pub metrics: CohortMetrics, - pub addr_count: ComputedFromHeightLast, - pub addr_count_30d_change: ComputedFromDateLast, + pub addr_count: ComputedFromHeightLast, + pub addr_count_30d_change: ComputedFromHeightLast, } impl AddressCohortVecs { @@ -44,17 +45,16 @@ impl AddressCohortVecs { /// `all_supply` is the supply metrics from the "all" cohort, used as global /// sources for `*_rel_to_market_cap` ratios. Pass `None` if not available. #[allow(clippy::too_many_arguments)] - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, filter: Filter, name: &str, version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, states_path: Option<&Path>, all_supply: Option<&SupplyMetrics>, ) -> Result { - let compute_dollars = price.is_some(); let full_name = CohortContext::Address.full_name(&filter, name); let cfg = ImportConfig { @@ -64,7 +64,7 @@ impl AddressCohortVecs { context: CohortContext::Address, version, indexes, - price, + prices, up_to_1h_realized: None, }; @@ -72,7 +72,7 @@ impl AddressCohortVecs { starting_height: None, state: states_path - .map(|path| AddressCohortState::new(path, &full_name, compute_dollars)), + .map(|path| Box::new(AddressCohortState::new(path, &full_name))), metrics: CohortMetrics::forced_import(&cfg, all_supply)?, @@ -82,7 +82,7 @@ impl AddressCohortVecs { version + VERSION, indexes, )?, - addr_count_30d_change: ComputedFromDateLast::forced_import( + addr_count_30d_change: ComputedFromHeightLast::forced_import( db, &cfg.name("addr_count_30d_change"), version + VERSION, @@ -91,29 +91,19 @@ impl AddressCohortVecs { }) } - /// Get the starting height when state was imported. - pub fn starting_height(&self) -> Option { - self.starting_height - } - - /// Set the starting height. - pub fn set_starting_height(&mut self, height: Height) { - self.starting_height = Some(height); - } - /// Reset starting height to zero. - pub fn reset_starting_height(&mut self) { + pub(crate) fn reset_starting_height(&mut self) { self.starting_height = Some(Height::ZERO); } /// Returns a parallel iterator over all vecs for parallel writing. - pub fn par_iter_vecs_mut(&mut self) -> impl ParallelIterator { + pub(crate) fn par_iter_vecs_mut(&mut self) -> impl ParallelIterator { rayon::iter::once(&mut self.addr_count.height as &mut dyn AnyStoredVec) .chain(self.metrics.par_iter_mut()) } /// Commit state to disk (separate from vec writes for parallelization). - pub fn write_state(&mut self, height: Height, cleanup: bool) -> Result<()> { + pub(crate) fn write_state(&mut self, height: Height, cleanup: bool) -> Result<()> { if let Some(state) = self.state.as_mut() { state.inner.write(height, cleanup)?; } @@ -135,10 +125,6 @@ impl DynCohortVecs for AddressCohortVecs { .min(self.metrics.min_stateful_height_len()) } - fn min_stateful_dateindex_len(&self) -> usize { - self.metrics.min_stateful_dateindex_len() - } - fn reset_state_starting_height(&mut self) { self.reset_starting_height(); if let Some(state) = self.state.as_mut() { @@ -147,8 +133,6 @@ impl DynCohortVecs for AddressCohortVecs { } fn import_state(&mut self, starting_height: Height) -> Result { - use vecdb::GenericStoredVec; - // Import state from runtime state if present if let Some(state) = self.state.as_mut() { // State files are saved AT height H, so to resume at H+1 we need to import at H @@ -164,14 +148,16 @@ impl DynCohortVecs for AddressCohortVecs { .total .sats .height - .read_once(prev_height)?; + .collect_one(prev_height) + .unwrap(); state.inner.supply.utxo_count = *self .metrics .outputs .utxo_count .height - .read_once(prev_height)?; - state.addr_count = *self.addr_count.height.read_once(prev_height)?; + .collect_one(prev_height) + .unwrap(); + state.addr_count = *self.addr_count.height.collect_one(prev_height).unwrap(); // Restore realized cap from persisted exact values state.inner.restore_realized_cap(); @@ -191,7 +177,7 @@ impl DynCohortVecs for AddressCohortVecs { } fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> { - use vecdb::GenericStoredVec; + use vecdb::WritableVec; self.addr_count .height .validate_computed_version_or_reset(base_version)?; @@ -218,16 +204,12 @@ impl DynCohortVecs for AddressCohortVecs { fn compute_then_truncate_push_unrealized_states( &mut self, height: Height, - height_price: Option, - dateindex: Option, - date_price: Option>, + height_price: Cents, ) -> Result<()> { if let Some(state) = self.state.as_mut() { self.metrics.compute_then_truncate_push_unrealized_states( height, height_price, - dateindex, - date_price, &mut state.inner, )?; } @@ -236,27 +218,13 @@ impl DynCohortVecs for AddressCohortVecs { fn compute_rest_part1( &mut self, - indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + blocks: &blocks::Vecs, + prices: &prices::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.addr_count - .compute_rest(indexes, starting_indexes, exit)?; - - self.addr_count_30d_change - .compute_all(starting_indexes, exit, |v| { - v.compute_change( - starting_indexes.dateindex, - &*self.addr_count.dateindex, - 30, - exit, - )?; - Ok(()) - })?; - self.metrics - .compute_rest_part1(indexes, price, starting_indexes, exit)?; + .compute_rest_part1(blocks, prices, starting_indexes, exit)?; Ok(()) } } @@ -287,19 +255,17 @@ impl CohortVecs for AddressCohortVecs { fn compute_rest_part2( &mut self, - indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + blocks: &blocks::Vecs, + prices: &prices::Vecs, starting_indexes: &ComputeIndexes, - height_to_market_cap: Option<&impl IterableVec>, - dateindex_to_market_cap: Option<&impl IterableVec>, + height_to_market_cap: Option<&impl ReadableVec>, exit: &Exit, ) -> Result<()> { self.metrics.compute_rest_part2( - indexes, - price, + blocks, + prices, starting_indexes, height_to_market_cap, - dateindex_to_market_cap, exit, )?; Ok(()) diff --git a/crates/brk_computer/src/distribution/cohorts/traits.rs b/crates/brk_computer/src/distribution/cohorts/traits.rs index 3259e02c3..09f058a6b 100644 --- a/crates/brk_computer/src/distribution/cohorts/traits.rs +++ b/crates/brk_computer/src/distribution/cohorts/traits.rs @@ -1,8 +1,8 @@ use brk_error::Result; -use brk_types::{CentsUnsigned, DateIndex, Dollars, Height, Version}; -use vecdb::{Exit, IterableVec}; +use brk_types::{Cents, Dollars, Height, Version}; +use vecdb::{Exit, ReadableVec}; -use crate::{ComputeIndexes, indexes, price}; +use crate::{ComputeIndexes, blocks, prices}; /// Dynamic dispatch trait for cohort vectors. /// @@ -11,9 +11,6 @@ pub trait DynCohortVecs: Send + Sync { /// Get minimum length across height-indexed vectors written in block loop. fn min_stateful_height_len(&self) -> usize; - /// Get minimum length across dateindex-indexed vectors written in block loop. - fn min_stateful_dateindex_len(&self) -> usize; - /// Reset the starting height for state tracking. fn reset_state_starting_height(&mut self); @@ -26,21 +23,18 @@ pub trait DynCohortVecs: Send + Sync { /// Push state to height-indexed vectors (truncating if needed). fn truncate_push(&mut self, height: Height) -> Result<()>; - /// Compute and push unrealized profit/loss states. + /// Compute and push unrealized profit/loss states and percentiles. fn compute_then_truncate_push_unrealized_states( &mut self, height: Height, - height_price: Option, - dateindex: Option, - date_price: Option>, + height_price: Cents, ) -> Result<()>; /// First phase of post-processing computations. - #[allow(clippy::too_many_arguments)] fn compute_rest_part1( &mut self, - indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + blocks: &blocks::Vecs, + prices: &prices::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()>; @@ -57,14 +51,12 @@ pub trait CohortVecs: DynCohortVecs { ) -> Result<()>; /// Second phase of post-processing computations. - #[allow(clippy::too_many_arguments)] fn compute_rest_part2( &mut self, - indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + blocks: &blocks::Vecs, + prices: &prices::Vecs, starting_indexes: &ComputeIndexes, - height_to_market_cap: Option<&impl IterableVec>, - dateindex_to_market_cap: Option<&impl IterableVec>, + height_to_market_cap: Option<&impl ReadableVec>, exit: &Exit, ) -> Result<()>; } diff --git a/crates/brk_computer/src/distribution/cohorts/utxo/groups.rs b/crates/brk_computer/src/distribution/cohorts/utxo/groups.rs index e920c2484..acbc8570e 100644 --- a/crates/brk_computer/src/distribution/cohorts/utxo/groups.rs +++ b/crates/brk_computer/src/distribution/cohorts/utxo/groups.rs @@ -8,19 +8,19 @@ use brk_cohort::{ use brk_error::Result; use brk_traversable::Traversable; use brk_types::{ - CentsUnsigned, CentsUnsignedCompact, CostBasisDistribution, Date, DateIndex, Dollars, Height, - ONE_HOUR_IN_SEC, Sats, StoredF32, Timestamp, Version, + Cents, CentsCompact, CostBasisDistribution, Date, Day1, Dollars, Height, ONE_HOUR_IN_SEC, Sats, + StoredF32, Timestamp, Version, }; use derive_more::{Deref, DerefMut}; use rayon::prelude::*; -use vecdb::{AnyStoredVec, Database, Exit, GenericStoredVec, IterableVec, VecIndex}; +use vecdb::{AnyStoredVec, Database, Exit, ReadableVec, Rw, StorageMode, VecIndex, WritableVec}; use crate::{ - ComputeIndexes, + ComputeIndexes, blocks, distribution::{DynCohortVecs, compute::PriceRangeMax, state::BlockState}, indexes, internal::{PERCENTILES, PERCENTILES_LEN, compute_spot_percentile_rank}, - price, + prices, }; use super::{super::traits::CohortVecs, vecs::UTXOCohortVecs}; @@ -31,16 +31,16 @@ const VERSION: Version = Version::new(0); const COST_BASIS_PRICE_DIGITS: i32 = 5; /// All UTXO cohorts organized by filter type. -#[derive(Clone, Deref, DerefMut, Traversable)] -pub struct UTXOCohorts(pub(crate) UTXOGroups); +#[derive(Deref, DerefMut, Traversable)] +pub struct UTXOCohorts(pub(crate) UTXOGroups>); impl UTXOCohorts { /// Import all UTXO cohorts from database. - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, states_path: &Path, ) -> Result { let v = version + VERSION; @@ -54,7 +54,7 @@ impl UTXOCohorts { name, v, indexes, - price, + prices, states_path, StateLevel::Full, None, @@ -69,7 +69,7 @@ impl UTXOCohorts { let type_ = BySpendableType::try_new(&base)?; // Get up_to_1h realized for adjusted computation (cohort - up_to_1h) - let up_to_1h_realized = age_range.up_to_1h.metrics.realized.as_ref(); + let up_to_1h_realized = &age_range.up_to_1h.metrics.realized; // Phase 2: Import "all" cohort (needs up_to_1h for adjusted, is global supply source) let all = UTXOCohortVecs::forced_import( @@ -78,14 +78,14 @@ impl UTXOCohorts { "", version + VERSION + Version::ONE, indexes, - price, + prices, states_path, StateLevel::PriceOnly, None, - up_to_1h_realized, + Some(up_to_1h_realized), )?; - let all_supply = Some(&all.metrics.supply); + let all_supply = Some(all.metrics.supply.as_ref()); // Phase 3: Import cohorts that need adjusted and/or all_supply let price_only_adjusted = |f: Filter, name: &'static str| { @@ -95,11 +95,11 @@ impl UTXOCohorts { name, v, indexes, - price, + prices, states_path, StateLevel::PriceOnly, all_supply, - up_to_1h_realized, + Some(up_to_1h_realized), ) }; @@ -112,11 +112,11 @@ impl UTXOCohorts { name, v, indexes, - price, + prices, states_path, StateLevel::None, all_supply, - up_to_1h_realized, + Some(up_to_1h_realized), ) }; @@ -130,7 +130,7 @@ impl UTXOCohorts { name, v, indexes, - price, + prices, states_path, StateLevel::None, all_supply, @@ -226,7 +226,7 @@ impl UTXOCohorts { } /// Compute overlapping cohorts from component age/amount range cohorts. - pub fn compute_overlapping_vecs( + pub(crate) fn compute_overlapping_vecs( &mut self, starting_indexes: &ComputeIndexes, exit: &Exit, @@ -237,16 +237,16 @@ impl UTXOCohorts { } /// First phase of post-processing: compute index transforms. - pub fn compute_rest_part1( + pub(crate) fn compute_rest_part1( &mut self, - indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + blocks: &blocks::Vecs, + prices: &prices::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { // 1. Compute all metrics except net_sentiment self.par_iter_mut() - .try_for_each(|v| v.compute_rest_part1(indexes, price, starting_indexes, exit))?; + .try_for_each(|v| v.compute_rest_part1(blocks, prices, starting_indexes, exit))?; // 2. Compute net_sentiment.height for separate cohorts (greed - pain) self.par_iter_separate_mut().try_for_each(|v| { @@ -261,42 +261,30 @@ impl UTXOCohorts { .compute_net_sentiment_from_others(starting_indexes, &metrics, exit) })?; - // 4. Compute net_sentiment dateindex for ALL cohorts - self.par_iter_mut().try_for_each(|v| { - v.metrics - .compute_net_sentiment_rest(indexes, starting_indexes, exit) - }) + Ok(()) } /// Second phase of post-processing: compute relative metrics. - #[allow(clippy::too_many_arguments)] - pub fn compute_rest_part2( + pub(crate) fn compute_rest_part2( &mut self, - indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + blocks: &blocks::Vecs, + prices: &prices::Vecs, starting_indexes: &ComputeIndexes, height_to_market_cap: Option<&HM>, - dateindex_to_market_cap: Option<&DM>, exit: &Exit, ) -> Result<()> where - HM: IterableVec + Sync, - DM: IterableVec + Sync, + HM: ReadableVec + Sync, { self.par_iter_mut().try_for_each(|v| { - v.compute_rest_part2( - indexes, - price, - starting_indexes, - height_to_market_cap, - dateindex_to_market_cap, - exit, - ) + v.compute_rest_part2(blocks, prices, starting_indexes, height_to_market_cap, exit) }) } /// Returns a parallel iterator over all vecs for parallel writing. - pub fn par_iter_vecs_mut(&mut self) -> impl ParallelIterator { + pub(crate) fn par_iter_vecs_mut( + &mut self, + ) -> impl ParallelIterator { // Collect all vecs from all cohorts (separate + aggregate) self.0 .iter_mut() @@ -306,55 +294,36 @@ impl UTXOCohorts { } /// Commit all states to disk (separate from vec writes for parallelization). - pub fn commit_all_states(&mut self, height: Height, cleanup: bool) -> Result<()> { + pub(crate) fn commit_all_states(&mut self, height: Height, cleanup: bool) -> Result<()> { self.par_iter_separate_mut() .try_for_each(|v| v.write_state(height, cleanup)) } /// Get minimum height from all separate cohorts' height-indexed vectors. - pub fn min_separate_stateful_height_len(&self) -> Height { + pub(crate) fn min_separate_stateful_height_len(&self) -> Height { self.iter_separate() .map(|v| Height::from(v.min_stateful_height_len())) .min() .unwrap_or_default() } - /// Get minimum dateindex from all separate cohorts' dateindex-indexed vectors. - pub fn min_separate_stateful_dateindex_len(&self) -> usize { - self.iter_separate() - .map(|v| v.min_stateful_dateindex_len()) - .min() - .unwrap_or(usize::MAX) - } - - /// Get minimum dateindex from all aggregate cohorts' dateindex-indexed vectors. - /// This checks cost_basis metrics which are only on aggregate cohorts. - pub fn min_aggregate_stateful_dateindex_len(&self) -> usize { - self.0 - .iter_aggregate() - .filter_map(|v| v.metrics.cost_basis.as_ref()) - .map(|cb| cb.min_stateful_dateindex_len()) - .min() - .unwrap_or(usize::MAX) - } - /// Import state for all separate cohorts at or before given height. /// Returns true if all imports succeeded and returned the expected height. - pub fn import_separate_states(&mut self, height: Height) -> bool { + pub(crate) fn import_separate_states(&mut self, height: Height) -> bool { self.par_iter_separate_mut() .map(|v| v.import_state(height).unwrap_or_default()) .all(|h| h == height) } /// Reset state heights for all separate cohorts. - pub fn reset_separate_state_heights(&mut self) { + pub(crate) fn reset_separate_state_heights(&mut self) { self.par_iter_separate_mut().for_each(|v| { v.reset_state_starting_height(); }); } /// Reset cost_basis_data for all separate cohorts (called during fresh start). - pub fn reset_separate_cost_basis_data(&mut self) -> Result<()> { + pub(crate) fn reset_separate_cost_basis_data(&mut self) -> Result<()> { self.par_iter_separate_mut().try_for_each(|v| { if let Some(state) = v.state.as_mut() { state.reset_cost_basis_data_if_needed()?; @@ -367,11 +336,12 @@ impl UTXOCohorts { /// Computes on-demand by merging age_range cohorts' cost_basis_data data. /// This avoids maintaining redundant aggregate cost_basis_data maps. /// Computes both sat-weighted (percentiles) and USD-weighted (invested_capital) percentiles. - /// Also writes daily cost basis snapshots to states_path. - pub fn truncate_push_aggregate_percentiles( + /// Also writes daily cost basis snapshots to states_path when day1 is provided. + pub(crate) fn truncate_push_aggregate_percentiles( &mut self, - dateindex: DateIndex, + height: Height, spot: Dollars, + day1_opt: Option, states_path: &Path, ) -> Result<()> { // Collect (filter, entries, total_sats, total_usd) from age_range cohorts. @@ -385,8 +355,8 @@ impl UTXOCohorts { let state = sub.state.as_ref()?; let mut total_sats: u64 = 0; let mut total_usd: u128 = 0; - let entries: Vec<(CentsUnsigned, Sats)> = state - .cost_basis_data_iter()? + let entries: Vec<(Cents, Sats)> = state + .cost_basis_data_iter() .map(|(price, &sats)| { let sats_u64 = u64::from(sats); let price_u128 = price.as_u128(); @@ -403,10 +373,7 @@ impl UTXOCohorts { self.0.par_iter_aggregate_mut().try_for_each(|aggregate| { let filter = aggregate.filter().clone(); - // Get cost_basis, skip if not configured - let Some(cost_basis) = aggregate.metrics.cost_basis.as_mut() else { - return Ok(()); - }; + let cost_basis = &mut aggregate.metrics.cost_basis; // Collect relevant cohort data for this aggregate and sum totals let mut total_sats: u64 = 0; @@ -424,27 +391,23 @@ impl UTXOCohorts { if total_sats == 0 { let nan_prices = [Dollars::NAN; PERCENTILES_LEN]; if let Some(percentiles) = cost_basis.percentiles.as_mut() { - percentiles.truncate_push(dateindex, &nan_prices)?; + percentiles.truncate_push(height, &nan_prices)?; } if let Some(invested_capital) = cost_basis.invested_capital.as_mut() { - invested_capital.truncate_push(dateindex, &nan_prices)?; + invested_capital.truncate_push(height, &nan_prices)?; } if let Some(spot_pct) = cost_basis.spot_cost_basis_percentile.as_mut() { - spot_pct - .dateindex - .truncate_push(dateindex, StoredF32::NAN)?; + spot_pct.height.truncate_push(height, StoredF32::NAN)?; } if let Some(spot_pct) = cost_basis.spot_invested_capital_percentile.as_mut() { - spot_pct - .dateindex - .truncate_push(dateindex, StoredF32::NAN)?; + spot_pct.height.truncate_push(height, StoredF32::NAN)?; } return Ok(()); } // K-way merge using min-heap: O(n log k) where k = number of cohorts // Collects merged price->sats map while computing percentiles - let mut heap: BinaryHeap> = BinaryHeap::new(); + let mut heap: BinaryHeap> = BinaryHeap::new(); // Initialize heap with first entry from each cohort for (cohort_idx, entries) in relevant.iter().enumerate() { @@ -465,19 +428,21 @@ impl UTXOCohorts { let mut sat_idx = 0; let mut usd_idx = 0; - let mut current_price: Option = None; + let mut current_price: Option = None; let mut sats_at_price: u64 = 0; let mut usd_at_price: u128 = 0; - // Collect merged entries during the merge (already in sorted order) - // Pre-allocate with max possible unique prices (actual count likely lower due to dedup) - let max_unique_prices = relevant.iter().map(|e| e.len()).max().unwrap_or(0); - let mut merged: Vec<(CentsUnsignedCompact, Sats)> = - Vec::with_capacity(max_unique_prices); + // Only collect merged entries when writing snapshots (date boundary) + let collect_merged = day1_opt.is_some(); + let max_unique_prices = if collect_merged { + relevant.iter().map(|e| e.len()).max().unwrap_or(0) + } else { + 0 + }; + let mut merged: Vec<(CentsCompact, Sats)> = Vec::with_capacity(max_unique_prices); - // Finalize a price point: compute percentiles and accumulate for merged vec - let mut finalize_price = |price: CentsUnsigned, sats: u64, usd: u128| { - // Percentile computation uses exact price for accuracy + // Finalize a price point: compute percentiles and optionally accumulate for merged vec + let mut finalize_price = |price: Cents, sats: u64, usd: u128| { cumsum_sats += sats; cumsum_usd += usd; @@ -493,17 +458,16 @@ impl UTXOCohorts { } } - // Round to nearest dollar with N significant digits for storage - let rounded: CentsUnsignedCompact = - price.round_to_dollar(COST_BASIS_PRICE_DIGITS).into(); - - // Merge entries with same rounded price using last_mut - if let Some((last_price, last_sats)) = merged.last_mut() - && *last_price == rounded - { - *last_sats += Sats::from(sats); - } else { - merged.push((rounded, Sats::from(sats))); + if collect_merged { + let rounded: CentsCompact = + price.round_to_dollar(COST_BASIS_PRICE_DIGITS).into(); + if let Some((last_price, last_sats)) = merged.last_mut() + && *last_price == rounded + { + *last_sats += Sats::from(sats); + } else { + merged.push((rounded, Sats::from(sats))); + } } }; @@ -540,45 +504,47 @@ impl UTXOCohorts { // Push both sat-weighted and USD-weighted results if let Some(percentiles) = cost_basis.percentiles.as_mut() { - percentiles.truncate_push(dateindex, &sat_result)?; + percentiles.truncate_push(height, &sat_result)?; } if let Some(invested_capital) = cost_basis.invested_capital.as_mut() { - invested_capital.truncate_push(dateindex, &usd_result)?; + invested_capital.truncate_push(height, &usd_result)?; } // Compute and push spot percentile ranks if let Some(spot_pct) = cost_basis.spot_cost_basis_percentile.as_mut() { let rank = compute_spot_percentile_rank(&sat_result, spot); - spot_pct.dateindex.truncate_push(dateindex, rank)?; + spot_pct.height.truncate_push(height, rank)?; } if let Some(spot_pct) = cost_basis.spot_invested_capital_percentile.as_mut() { let rank = compute_spot_percentile_rank(&usd_result, spot); - spot_pct.dateindex.truncate_push(dateindex, rank)?; + spot_pct.height.truncate_push(height, rank)?; } - // Write daily cost basis snapshot - let cohort_name = match &filter { - Filter::All => "all", - Filter::Term(Term::Sth) => TERM_NAMES.short.id, - Filter::Term(Term::Lth) => TERM_NAMES.long.id, - _ => return Ok(()), - }; + // Write daily cost basis snapshot (only at date boundaries) + if let Some(day1) = day1_opt { + let cohort_name = match &filter { + Filter::All => "all", + Filter::Term(Term::Sth) => TERM_NAMES.short.id, + Filter::Term(Term::Lth) => TERM_NAMES.long.id, + _ => return Ok(()), + }; - let date = Date::from(dateindex); - let dir = states_path.join(format!("utxo_{cohort_name}_cost_basis/by_date")); - fs::create_dir_all(&dir)?; - let path = dir.join(date.to_string()); - fs::write( - path, - CostBasisDistribution::serialize_iter(merged.into_iter())?, - )?; + let date = Date::from(day1); + let dir = states_path.join(format!("utxo_{cohort_name}_cost_basis/by_date")); + fs::create_dir_all(&dir)?; + let path = dir.join(date.to_string()); + fs::write( + path, + CostBasisDistribution::serialize_iter(merged.into_iter())?, + )?; + } Ok(()) }) } /// Validate computed versions for all cohorts (separate and aggregate). - pub fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> { + pub(crate) fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> { // Validate separate cohorts self.par_iter_separate_mut() .try_for_each(|v| v.validate_computed_versions(base_version))?; @@ -595,15 +561,14 @@ impl UTXOCohorts { /// /// Uses split points to efficiently compute regret per cohort. /// All 21 cohorts are computed in parallel, then pushed sequentially. - /// Called once per day when dateindex changes. - pub fn compute_and_push_peak_regret( + /// Called once per day when day1 changes. + pub(crate) fn compute_and_push_peak_regret( &mut self, chain_state: &[BlockState], current_height: Height, current_timestamp: Timestamp, - spot: CentsUnsigned, + spot: Cents, price_range_max: &PriceRangeMax, - dateindex: DateIndex, ) -> Result<()> { const FIRST_PRICE_HEIGHT: usize = 68_195; @@ -613,12 +578,10 @@ impl UTXOCohorts { // Early return: push zeros if no price data yet if end_height <= start_height { for cohort in self.0.age_range.iter_mut() { - if let Some(unrealized) = cohort.metrics.unrealized.as_mut() - && let Some(peak_regret) = unrealized.peak_regret.as_mut() - { + if let Some(peak_regret) = cohort.metrics.unrealized.peak_regret.as_mut() { peak_regret - .dateindex - .truncate_push(dateindex, Dollars::ZERO)?; + .height + .truncate_push(current_height, Dollars::ZERO)?; } } return Ok(()); @@ -655,20 +618,16 @@ impl UTXOCohorts { } let mut regret: u128 = 0; - for h in effective_start..range_end { - let block = &chain_state[h]; + for (i, block) in chain_state[effective_start..range_end].iter().enumerate() { let supply = block.supply.value; if supply.is_zero() { continue; } - let cost_basis = match block.price { - Some(p) => p, - None => continue, - }; + let cost_basis = block.price; - let receive_height = Height::from(h); + let receive_height = Height::from(effective_start + i); let peak = price_range_max.max_between(receive_height, current_height); let peak_u128 = peak.as_u128(); let cost_u128 = cost_basis.as_u128(); @@ -681,7 +640,7 @@ impl UTXOCohorts { }; } - CentsUnsigned::new((regret / Sats::ONE_BTC_U128) as u64).to_dollars() + Cents::new((regret / Sats::ONE_BTC_U128) as u64).to_dollars() }) .collect::>() .try_into() @@ -689,10 +648,8 @@ impl UTXOCohorts { // Push results to cohorts for (cohort, regret) in self.0.age_range.iter_mut().zip(regrets) { - if let Some(unrealized) = cohort.metrics.unrealized.as_mut() - && let Some(peak_regret) = unrealized.peak_regret.as_mut() - { - peak_regret.dateindex.truncate_push(dateindex, regret)?; + if let Some(peak_regret) = cohort.metrics.unrealized.peak_regret.as_mut() { + peak_regret.height.truncate_push(current_height, regret)?; } } diff --git a/crates/brk_computer/src/distribution/cohorts/utxo/receive.rs b/crates/brk_computer/src/distribution/cohorts/utxo/receive.rs index c7cb6c0a1..83cfea0a7 100644 --- a/crates/brk_computer/src/distribution/cohorts/utxo/receive.rs +++ b/crates/brk_computer/src/distribution/cohorts/utxo/receive.rs @@ -1,4 +1,4 @@ -use brk_types::{CentsUnsigned, Height, Timestamp}; +use brk_types::{Cents, Height, Timestamp}; use crate::distribution::state::Transacted; @@ -13,12 +13,12 @@ impl UTXOCohorts { /// - The appropriate year cohort based on block timestamp /// - The appropriate output type cohort (P2PKH, P2SH, etc.) /// - The appropriate amount range cohort based on value - pub fn receive( + pub(crate) fn receive( &mut self, received: Transacted, height: Height, timestamp: Timestamp, - price: Option, + price: Cents, ) { let supply_state = received.spendable_supply; diff --git a/crates/brk_computer/src/distribution/cohorts/utxo/send.rs b/crates/brk_computer/src/distribution/cohorts/utxo/send.rs index cfa5989f1..7d2a82cce 100644 --- a/crates/brk_computer/src/distribution/cohorts/utxo/send.rs +++ b/crates/brk_computer/src/distribution/cohorts/utxo/send.rs @@ -1,13 +1,10 @@ -use brk_types::{Age, CentsUnsigned, Height}; +use brk_types::{Age, Height}; use rustc_hash::FxHashMap; use vecdb::VecIndex; -use crate::{ - distribution::{ - compute::PriceRangeMax, - state::{BlockState, Transacted}, - }, - utils::OptionExt, +use crate::distribution::{ + compute::PriceRangeMax, + state::{BlockState, Transacted}, }; use super::groups::UTXOCohorts; @@ -20,11 +17,11 @@ impl UTXOCohorts { /// /// `price_range_max` is used to compute the peak price during each UTXO's holding period /// for accurate peak regret calculation. - pub fn send( + pub(crate) fn send( &mut self, height_to_sent: FxHashMap, chain_state: &mut [BlockState], - price_range_max: Option<&PriceRangeMax>, + price_range_max: &PriceRangeMax, ) { if chain_state.is_empty() { return; @@ -46,12 +43,11 @@ impl UTXOCohorts { let age = Age::new(last_timestamp, block_state.timestamp, blocks_old); // Compute peak price during holding period for peak regret - // This is the max HIGH price between receive and send heights - let peak_price: Option = - price_range_max.map(|t| t.max_between(receive_height, send_height)); + // This is the max price between receive and send heights + let peak_price = price_range_max.max_between(receive_height, send_height); // Update age range cohort (direct index lookup) - self.0.age_range.get_mut(age).state.um().send_utxo( + self.0.age_range.get_mut(age).state.as_mut().unwrap().send_utxo( &sent.spendable_supply, current_price, prev_price, @@ -64,7 +60,7 @@ impl UTXOCohorts { .epoch .mut_vec_from_height(receive_height) .state - .um() + .as_mut().unwrap() .send_utxo( &sent.spendable_supply, current_price, @@ -78,7 +74,7 @@ impl UTXOCohorts { .year .mut_vec_from_timestamp(block_state.timestamp) .state - .um() + .as_mut().unwrap() .send_utxo( &sent.spendable_supply, current_price, @@ -92,24 +88,26 @@ impl UTXOCohorts { .spendable .iter_typed() .for_each(|(output_type, supply_state)| { - self.0 - .type_ - .get_mut(output_type) - .state - .um() - .send_utxo(supply_state, current_price, prev_price, peak_price, age) + self.0.type_.get_mut(output_type).state.as_mut().unwrap().send_utxo( + supply_state, + current_price, + prev_price, + peak_price, + age, + ) }); // Update amount range cohorts sent.by_size_group .iter_typed() .for_each(|(group, supply_state)| { - self.0 - .amount_range - .get_mut(group) - .state - .um() - .send_utxo(supply_state, current_price, prev_price, peak_price, age); + self.0.amount_range.get_mut(group).state.as_mut().unwrap().send_utxo( + supply_state, + current_price, + prev_price, + peak_price, + age, + ); }); } } diff --git a/crates/brk_computer/src/distribution/cohorts/utxo/tick_tock.rs b/crates/brk_computer/src/distribution/cohorts/utxo/tick_tock.rs index df78c471e..0e00ccbde 100644 --- a/crates/brk_computer/src/distribution/cohorts/utxo/tick_tock.rs +++ b/crates/brk_computer/src/distribution/cohorts/utxo/tick_tock.rs @@ -15,7 +15,7 @@ impl UTXOCohorts { /// - k = 20 boundaries to check /// - n = total blocks in chain_state /// - Linear scan for end_idx is faster than binary search since typically 0-2 blocks cross each boundary - pub fn tick_tock_next_block(&mut self, chain_state: &[BlockState], timestamp: Timestamp) { + pub(crate) fn tick_tock_next_block(&mut self, chain_state: &[BlockState], timestamp: Timestamp) { if chain_state.is_empty() { return; } diff --git a/crates/brk_computer/src/distribution/cohorts/utxo/vecs.rs b/crates/brk_computer/src/distribution/cohorts/utxo/vecs.rs index 3fd2515a8..7fac37cd4 100644 --- a/crates/brk_computer/src/distribution/cohorts/utxo/vecs.rs +++ b/crates/brk_computer/src/distribution/cohorts/utxo/vecs.rs @@ -3,29 +3,29 @@ use std::path::Path; use brk_cohort::{CohortContext, Filter, Filtered, StateLevel}; use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{CentsUnsigned, DateIndex, Dollars, Height, Version}; +use brk_types::{Cents, Dollars, Height, Version}; use rayon::prelude::*; -use vecdb::{AnyStoredVec, Database, Exit, IterableVec}; +use vecdb::{AnyStoredVec, Database, Exit, ReadableVec, Rw, StorageMode}; -use crate::{ComputeIndexes, distribution::state::UTXOCohortState, indexes, price}; +use crate::{ComputeIndexes, blocks, distribution::state::UTXOCohortState, indexes, prices}; use crate::distribution::metrics::{CohortMetrics, ImportConfig, RealizedMetrics, SupplyMetrics}; use super::super::traits::{CohortVecs, DynCohortVecs}; /// UTXO cohort with metrics and optional runtime state. -#[derive(Clone, Traversable)] -pub struct UTXOCohortVecs { +#[derive(Traversable)] +pub struct UTXOCohortVecs { /// Starting height when state was imported state_starting_height: Option, /// Runtime state for block-by-block processing (separate cohorts only) #[traversable(skip)] - pub state: Option, + pub state: Option>, /// Metric vectors #[traversable(flatten)] - pub metrics: CohortMetrics, + pub metrics: CohortMetrics, } impl UTXOCohortVecs { @@ -37,19 +37,18 @@ impl UTXOCohortVecs { /// `up_to_1h_realized` is used for cohorts where `compute_adjusted()` is true, /// to create lazy adjusted vecs: adjusted = cohort - up_to_1h. #[allow(clippy::too_many_arguments)] - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, filter: Filter, name: &str, version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, states_path: &Path, state_level: StateLevel, all_supply: Option<&SupplyMetrics>, up_to_1h_realized: Option<&RealizedMetrics>, ) -> Result { - let compute_dollars = price.is_some(); let full_name = CohortContext::Utxo.full_name(&filter, name); let cfg = ImportConfig { @@ -59,7 +58,7 @@ impl UTXOCohortVecs { context: CohortContext::Utxo, version, indexes, - price, + prices, up_to_1h_realized, }; @@ -67,11 +66,7 @@ impl UTXOCohortVecs { state_starting_height: None, state: if state_level.is_full() { - Some(UTXOCohortState::new( - states_path, - &full_name, - compute_dollars, - )) + Some(Box::new(UTXOCohortState::new(states_path, &full_name))) } else { None }, @@ -80,18 +75,8 @@ impl UTXOCohortVecs { }) } - /// Get the starting height when state was imported. - pub fn state_starting_height(&self) -> Option { - self.state_starting_height - } - - /// Set the state starting height. - pub fn set_state_starting_height(&mut self, height: Height) { - self.state_starting_height = Some(height); - } - /// Reset state starting height to zero and reset state values. - pub fn reset_state_starting_height(&mut self) { + pub(crate) fn reset_state_starting_height(&mut self) { self.state_starting_height = Some(Height::ZERO); if let Some(state) = self.state.as_mut() { state.reset(); @@ -99,12 +84,12 @@ impl UTXOCohortVecs { } /// Returns a parallel iterator over all vecs for parallel writing. - pub fn par_iter_vecs_mut(&mut self) -> impl ParallelIterator { + pub(crate) fn par_iter_vecs_mut(&mut self) -> impl ParallelIterator { self.metrics.par_iter_mut() } /// Commit state to disk (separate from vec writes for parallelization). - pub fn write_state(&mut self, height: Height, cleanup: bool) -> Result<()> { + pub(crate) fn write_state(&mut self, height: Height, cleanup: bool) -> Result<()> { if let Some(state) = self.state.as_mut() { state.write(height, cleanup)?; } @@ -123,10 +108,6 @@ impl DynCohortVecs for UTXOCohortVecs { self.metrics.min_stateful_height_len() } - fn min_stateful_dateindex_len(&self) -> usize { - self.metrics.min_stateful_dateindex_len() - } - fn reset_state_starting_height(&mut self) { self.state_starting_height = Some(Height::ZERO); if let Some(state) = self.state.as_mut() { @@ -135,8 +116,6 @@ impl DynCohortVecs for UTXOCohortVecs { } fn import_state(&mut self, starting_height: Height) -> Result { - use vecdb::GenericStoredVec; - // Import state from runtime state if present if let Some(state) = self.state.as_mut() { // State files are saved AT height H, so to resume at H+1 we need to import at H @@ -152,13 +131,15 @@ impl DynCohortVecs for UTXOCohortVecs { .total .sats .height - .read_once(prev_height)?; + .collect_one(prev_height) + .unwrap(); state.supply.utxo_count = *self .metrics .outputs .utxo_count .height - .read_once(prev_height)?; + .collect_one(prev_height) + .unwrap(); // Restore realized cap from persisted exact values state.restore_realized_cap(); @@ -197,16 +178,12 @@ impl DynCohortVecs for UTXOCohortVecs { fn compute_then_truncate_push_unrealized_states( &mut self, height: Height, - height_price: Option, - dateindex: Option, - date_price: Option>, + height_price: Cents, ) -> Result<()> { if let Some(state) = self.state.as_mut() { self.metrics.compute_then_truncate_push_unrealized_states( height, height_price, - dateindex, - date_price, state, )?; } @@ -215,13 +192,13 @@ impl DynCohortVecs for UTXOCohortVecs { fn compute_rest_part1( &mut self, - indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + blocks: &blocks::Vecs, + prices: &prices::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { self.metrics - .compute_rest_part1(indexes, price, starting_indexes, exit) + .compute_rest_part1(blocks, prices, starting_indexes, exit) } } @@ -241,19 +218,17 @@ impl CohortVecs for UTXOCohortVecs { fn compute_rest_part2( &mut self, - indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + blocks: &blocks::Vecs, + prices: &prices::Vecs, starting_indexes: &ComputeIndexes, - height_to_market_cap: Option<&impl IterableVec>, - dateindex_to_market_cap: Option<&impl IterableVec>, + height_to_market_cap: Option<&impl ReadableVec>, exit: &Exit, ) -> Result<()> { self.metrics.compute_rest_part2( - indexes, - price, + blocks, + prices, starting_indexes, height_to_market_cap, - dateindex_to_market_cap, exit, ) } diff --git a/crates/brk_computer/src/distribution/compute/aggregates.rs b/crates/brk_computer/src/distribution/compute/aggregates.rs index b23f1d5cf..6262d657b 100644 --- a/crates/brk_computer/src/distribution/compute/aggregates.rs +++ b/crates/brk_computer/src/distribution/compute/aggregates.rs @@ -1,9 +1,9 @@ use brk_error::Result; -use brk_types::{DateIndex, Dollars, Height}; +use brk_types::{Dollars, Height}; use tracing::info; -use vecdb::{Exit, IterableVec}; +use vecdb::{Exit, ReadableVec}; -use crate::{ComputeIndexes, indexes, price}; +use crate::{ComputeIndexes, blocks, prices}; use super::super::cohorts::{AddressCohorts, UTXOCohorts}; @@ -12,7 +12,7 @@ use super::super::cohorts::{AddressCohorts, UTXOCohorts}; /// For example: /// - ">=1d" UTXO cohort is computed from sum of age_range cohorts that match /// - ">=1 BTC" address cohort is computed from sum of amount_range cohorts that match -pub fn compute_overlapping( +pub(crate) fn compute_overlapping( utxo_cohorts: &mut UTXOCohorts, address_cohorts: &mut AddressCohorts, starting_indexes: &ComputeIndexes, @@ -28,19 +28,19 @@ pub fn compute_overlapping( /// First phase of post-processing: compute index transforms. /// -/// Converts height-indexed data to dateindex-indexed data and other transforms. -pub fn compute_rest_part1( +/// Converts height-indexed data to day1-indexed data and other transforms. +pub(crate) fn compute_rest_part1( utxo_cohorts: &mut UTXOCohorts, address_cohorts: &mut AddressCohorts, - indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + blocks: &blocks::Vecs, + prices: &prices::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { info!("Computing rest part 1..."); - utxo_cohorts.compute_rest_part1(indexes, price, starting_indexes, exit)?; - address_cohorts.compute_rest_part1(indexes, price, starting_indexes, exit)?; + utxo_cohorts.compute_rest_part1(blocks, prices, starting_indexes, exit)?; + address_cohorts.compute_rest_part1(blocks, prices, starting_indexes, exit)?; Ok(()) } @@ -48,38 +48,33 @@ pub fn compute_rest_part1( /// Second phase of post-processing: compute relative metrics. /// /// Computes supply ratios, market cap ratios, etc. using total references. -#[allow(clippy::too_many_arguments)] -pub fn compute_rest_part2( +pub(crate) fn compute_rest_part2( utxo_cohorts: &mut UTXOCohorts, address_cohorts: &mut AddressCohorts, - indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + blocks: &blocks::Vecs, + prices: &prices::Vecs, starting_indexes: &ComputeIndexes, height_to_market_cap: Option<&HM>, - dateindex_to_market_cap: Option<&DM>, exit: &Exit, ) -> Result<()> where - HM: IterableVec + Sync, - DM: IterableVec + Sync, + HM: ReadableVec + Sync, { info!("Computing rest part 2..."); utxo_cohorts.compute_rest_part2( - indexes, - price, + blocks, + prices, starting_indexes, height_to_market_cap, - dateindex_to_market_cap, exit, )?; address_cohorts.compute_rest_part2( - indexes, - price, + blocks, + prices, starting_indexes, height_to_market_cap, - dateindex_to_market_cap, exit, )?; diff --git a/crates/brk_computer/src/distribution/compute/block_loop.rs b/crates/brk_computer/src/distribution/compute/block_loop.rs index 2981b3db8..6bc08c7bd 100644 --- a/crates/brk_computer/src/distribution/compute/block_loop.rs +++ b/crates/brk_computer/src/distribution/compute/block_loop.rs @@ -3,11 +3,11 @@ use std::thread; use brk_cohort::ByAddressType; use brk_error::Result; use brk_indexer::Indexer; -use brk_types::{CentsUnsigned, DateIndex, Dollars, Height, OutputType, Sats, TxIndex, TypeIndex}; +use brk_types::{Cents, Date, Day1, Height, OutputType, Sats, StoredU64, TxIndex, TypeIndex}; use rayon::prelude::*; use rustc_hash::FxHashSet; use tracing::{debug, info}; -use vecdb::{Exit, IterableVec, TypedVecIterator, VecIndex}; +use vecdb::{AnyVec, Exit, ReadableVec, VecIndex}; use crate::{ blocks, @@ -20,7 +20,7 @@ use crate::{ compute::write::{process_address_updates, write}, state::{BlockState, Transacted}, }, - indexes, inputs, outputs, price, transactions, + indexes, inputs, outputs, prices, transactions, }; use super::{ @@ -30,13 +30,13 @@ use super::{ vecs::Vecs, }, BIP30_DUPLICATE_HEIGHT_1, BIP30_DUPLICATE_HEIGHT_2, BIP30_ORIGINAL_HEIGHT_1, - BIP30_ORIGINAL_HEIGHT_2, ComputeContext, FLUSH_INTERVAL, TxInIterators, TxOutIterators, + BIP30_ORIGINAL_HEIGHT_2, ComputeContext, FLUSH_INTERVAL, TxInReaders, TxOutReaders, VecsReaders, build_txinindex_to_txindex, build_txoutindex_to_txindex, }; /// Process all blocks from starting_height to last_height. #[allow(clippy::too_many_arguments)] -pub fn process_blocks( +pub(crate) fn process_blocks( vecs: &mut Vecs, indexer: &Indexer, indexes: &indexes::Vecs, @@ -44,7 +44,7 @@ pub fn process_blocks( outputs: &outputs::Vecs, transactions: &transactions::Vecs, blocks: &blocks::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, starting_height: Height, last_height: Height, chain_state: &mut Vec, @@ -52,7 +52,7 @@ pub fn process_blocks( ) -> Result<()> { // Create computation context with pre-computed vectors for thread-safe access debug!("creating ComputeContext"); - let ctx = ComputeContext::new(starting_height, last_height, blocks, price); + let ctx = ComputeContext::new(starting_height, last_height, blocks, prices); debug!("ComputeContext created"); if ctx.starting_height > ctx.last_height { @@ -72,34 +72,39 @@ pub fn process_blocks( // From blocks: let height_to_timestamp = &blocks.time.timestamp_monotonic; let height_to_date = &blocks.time.date; - let dateindex_to_first_height = &indexes.dateindex.first_height; - let dateindex_to_height_count = &indexes.dateindex.height_count; + let day1_to_first_height = &indexes.day1.first_height; + let day1_to_height_count = &indexes.day1.height_count; let txindex_to_output_count = &indexes.txindex.output_count; let txindex_to_input_count = &indexes.txindex.input_count; - // From price (optional) - use cents for computation: - let height_to_price = price.map(|p| &p.cents.split.height.close); - let dateindex_to_price = price.map(|p| &p.cents.split.dateindex.close); + // From price - use cents for computation: + let height_to_price = &prices.cents.price; // Access pre-computed vectors from context for thread-safe access let height_to_price_vec = &ctx.height_to_price; let height_to_timestamp_vec = &ctx.height_to_timestamp; - // Create iterators for sequential access - let mut height_to_first_txindex_iter = height_to_first_txindex.into_iter(); - let mut height_to_first_txoutindex_iter = height_to_first_txoutindex.into_iter(); - let mut height_to_first_txinindex_iter = height_to_first_txinindex.into_iter(); - let mut height_to_tx_count_iter = height_to_tx_count.into_iter(); - let mut height_to_output_count_iter = height_to_output_count.into_iter(); - let mut height_to_input_count_iter = height_to_input_count.into_iter(); - let mut height_to_timestamp_iter = height_to_timestamp.into_iter(); - let mut height_to_date_iter = height_to_date.into_iter(); - let mut dateindex_to_first_height_iter = dateindex_to_first_height.into_iter(); - let mut dateindex_to_height_count_iter = dateindex_to_height_count.into_iter(); - let mut txindex_to_output_count_iter = txindex_to_output_count.iter(); - let mut txindex_to_input_count_iter = txindex_to_input_count.iter(); - let mut height_to_price_iter = height_to_price.map(|v| v.into_iter()); - let mut dateindex_to_price_iter = dateindex_to_price.map(|v| v.into_iter()); + // Range for pre-collecting height-indexed vecs + let start_usize = starting_height.to_usize(); + let end_usize = last_height.to_usize() + 1; + + // Pre-collect height-indexed vecs for the block range (bulk read before hot loop) + let height_to_first_txindex_vec: Vec = + height_to_first_txindex.collect_range_at(start_usize, end_usize); + let height_to_first_txoutindex_vec: Vec<_> = + height_to_first_txoutindex.collect_range_at(start_usize, end_usize); + let height_to_first_txinindex_vec: Vec<_> = + height_to_first_txinindex.collect_range_at(start_usize, end_usize); + let height_to_tx_count_vec: Vec<_> = + height_to_tx_count.collect_range_at(start_usize, end_usize); + let height_to_output_count_vec: Vec<_> = + height_to_output_count.collect_range_at(start_usize, end_usize); + let height_to_input_count_vec: Vec<_> = + height_to_input_count.collect_range_at(start_usize, end_usize); + let height_to_timestamp_collected: Vec<_> = + height_to_timestamp.collect_range_at(start_usize, end_usize); + let height_to_price_collected: Vec<_> = + height_to_price.collect_range_at(start_usize, end_usize); debug!("creating VecsReaders"); let mut vr = VecsReaders::new(&vecs.any_address_indexes, &vecs.addresses_data); @@ -108,8 +113,11 @@ pub fn process_blocks( // Build txindex -> height lookup map for efficient prev_height computation debug!("building txindex_to_height RangeMap"); let mut txindex_to_height: RangeMap = { - let mut map = RangeMap::with_capacity(last_height.to_usize() + 1); - for first_txindex in indexer.vecs.transactions.first_txindex.into_iter() { + let first_txindex_len = indexer.vecs.transactions.first_txindex.len(); + let all_first_txindexes: Vec = + indexer.vecs.transactions.first_txindex.collect_range_at(0, first_txindex_len); + let mut map = RangeMap::with_capacity(first_txindex_len); + for first_txindex in all_first_txindexes { map.push(first_txindex); } map @@ -117,18 +125,18 @@ pub fn process_blocks( debug!("txindex_to_height RangeMap built"); // Create reusable iterators for sequential txout/txin reads (16KB buffered) - let mut txout_iters = TxOutIterators::new(indexer); - let mut txin_iters = TxInIterators::new(indexer, inputs, &mut txindex_to_height); + let txout_iters = TxOutReaders::new(indexer); + let mut txin_iters = TxInReaders::new(indexer, inputs, &mut txindex_to_height); - // Create iterators for first address indexes per type - let mut first_p2a_iter = indexer.vecs.addresses.first_p2aaddressindex.into_iter(); - let mut first_p2pk33_iter = indexer.vecs.addresses.first_p2pk33addressindex.into_iter(); - let mut first_p2pk65_iter = indexer.vecs.addresses.first_p2pk65addressindex.into_iter(); - let mut first_p2pkh_iter = indexer.vecs.addresses.first_p2pkhaddressindex.into_iter(); - let mut first_p2sh_iter = indexer.vecs.addresses.first_p2shaddressindex.into_iter(); - let mut first_p2tr_iter = indexer.vecs.addresses.first_p2traddressindex.into_iter(); - let mut first_p2wpkh_iter = indexer.vecs.addresses.first_p2wpkhaddressindex.into_iter(); - let mut first_p2wsh_iter = indexer.vecs.addresses.first_p2wshaddressindex.into_iter(); + // Pre-collect first address indexes per type for the block range + let first_p2a_vec = indexer.vecs.addresses.first_p2aaddressindex.collect_range_at(start_usize, end_usize); + let first_p2pk33_vec = indexer.vecs.addresses.first_p2pk33addressindex.collect_range_at(start_usize, end_usize); + let first_p2pk65_vec = indexer.vecs.addresses.first_p2pk65addressindex.collect_range_at(start_usize, end_usize); + let first_p2pkh_vec = indexer.vecs.addresses.first_p2pkhaddressindex.collect_range_at(start_usize, end_usize); + let first_p2sh_vec = indexer.vecs.addresses.first_p2shaddressindex.collect_range_at(start_usize, end_usize); + let first_p2tr_vec = indexer.vecs.addresses.first_p2traddressindex.collect_range_at(start_usize, end_usize); + let first_p2wpkh_vec = indexer.vecs.addresses.first_p2wpkhaddressindex.collect_range_at(start_usize, end_usize); + let first_p2wsh_vec = indexer.vecs.addresses.first_p2wshaddressindex.collect_range_at(start_usize, end_usize); // Track running totals - recover from previous height if resuming debug!("recovering addr_counts from height {}", starting_height); @@ -151,48 +159,58 @@ pub fn process_blocks( // Track activity counts - reset each block let mut activity_counts = AddressTypeToActivityCounts::default(); + // Pre-collect lazy vecs that don't support iterators + let height_to_date_vec: Vec = height_to_date.collect_range_at(start_usize, end_usize); + debug!("creating AddressCache"); let mut cache = AddressCache::new(); debug!("AddressCache created, entering main loop"); + // Cache for day1 lookups - same day1 repeats ~140 times per day + let mut cached_day1 = Day1::default(); + let mut cached_date_first_height = Height::ZERO; + let mut cached_date_height_count = StoredU64::default(); + + // Reusable hashsets for received addresses (avoid per-block allocation) + let mut received_addresses = ByAddressType::>::default(); + // Main block iteration for height in starting_height.to_usize()..=last_height.to_usize() { let height = Height::from(height); info!("Processing chain at {}...", height); - // Get block metadata - let first_txindex = height_to_first_txindex_iter.get_unwrap(height); - let tx_count = u64::from(height_to_tx_count_iter.get_unwrap(height)); - let first_txoutindex = height_to_first_txoutindex_iter - .get_unwrap(height) - .to_usize(); - let output_count = u64::from(height_to_output_count_iter.get_unwrap(height)) as usize; - let first_txinindex = height_to_first_txinindex_iter.get_unwrap(height).to_usize(); - let input_count = u64::from(height_to_input_count_iter.get_unwrap(height)) as usize; - let timestamp = height_to_timestamp_iter.get_unwrap(height); - let block_price = height_to_price_iter.as_mut().map(|v| *v.get_unwrap(height)); + // Get block metadata from pre-collected vecs + let offset = height.to_usize() - start_usize; + let first_txindex = height_to_first_txindex_vec[offset]; + let tx_count = u64::from(height_to_tx_count_vec[offset]); + let first_txoutindex = height_to_first_txoutindex_vec[offset].to_usize(); + let output_count = u64::from(height_to_output_count_vec[offset]) as usize; + let first_txinindex = height_to_first_txinindex_vec[offset].to_usize(); + let input_count = u64::from(height_to_input_count_vec[offset]) as usize; + let timestamp = height_to_timestamp_collected[offset]; + let block_price = height_to_price_collected[offset]; - // Debug validation: verify context methods match iterator values + // Debug validation: verify context methods match pre-collected values debug_assert_eq!(ctx.timestamp_at(height), timestamp); debug_assert_eq!(ctx.price_at(height), block_price); - // Build txindex mappings for this block + // Build txindex mappings for this block (pass ReadableVec refs directly) let txoutindex_to_txindex = - build_txoutindex_to_txindex(first_txindex, tx_count, &mut txindex_to_output_count_iter); + build_txoutindex_to_txindex(first_txindex, tx_count, txindex_to_output_count); let txinindex_to_txindex = - build_txinindex_to_txindex(first_txindex, tx_count, &mut txindex_to_input_count_iter); + build_txinindex_to_txindex(first_txindex, tx_count, txindex_to_input_count); - // Get first address indexes for this height + // Get first address indexes for this height from pre-collected vecs let first_addressindexes = ByAddressType { - p2a: TypeIndex::from(first_p2a_iter.get_unwrap(height).to_usize()), - p2pk33: TypeIndex::from(first_p2pk33_iter.get_unwrap(height).to_usize()), - p2pk65: TypeIndex::from(first_p2pk65_iter.get_unwrap(height).to_usize()), - p2pkh: TypeIndex::from(first_p2pkh_iter.get_unwrap(height).to_usize()), - p2sh: TypeIndex::from(first_p2sh_iter.get_unwrap(height).to_usize()), - p2tr: TypeIndex::from(first_p2tr_iter.get_unwrap(height).to_usize()), - p2wpkh: TypeIndex::from(first_p2wpkh_iter.get_unwrap(height).to_usize()), - p2wsh: TypeIndex::from(first_p2wsh_iter.get_unwrap(height).to_usize()), + p2a: TypeIndex::from(first_p2a_vec[offset].to_usize()), + p2pk33: TypeIndex::from(first_p2pk33_vec[offset].to_usize()), + p2pk65: TypeIndex::from(first_p2pk65_vec[offset].to_usize()), + p2pkh: TypeIndex::from(first_p2pkh_vec[offset].to_usize()), + p2sh: TypeIndex::from(first_p2sh_vec[offset].to_usize()), + p2tr: TypeIndex::from(first_p2tr_vec[offset].to_usize()), + p2wpkh: TypeIndex::from(first_p2wpkh_vec[offset].to_usize()), + p2wsh: TypeIndex::from(first_p2wsh_vec[offset].to_usize()), }; // Reset per-block values for all separate cohorts @@ -213,7 +231,7 @@ pub fn process_blocks( }; // Process outputs and inputs in parallel with tick-tock - let (outputs_result, inputs_result) = thread::scope(|scope| { + let (outputs_result, inputs_result) = thread::scope(|scope| -> Result<_> { // Tick-tock age transitions in background scope.spawn(|| { vecs.utxo_cohorts @@ -247,7 +265,7 @@ pub fn process_blocks( &vr, &vecs.any_address_indexes, &vecs.addresses_data, - ) + )? } else { InputsResult { height_to_sent: Default::default(), @@ -257,10 +275,10 @@ pub fn process_blocks( } }; - let outputs_result = outputs_handle.join().unwrap(); + let outputs_result = outputs_handle.join().unwrap()?; - (outputs_result, inputs_result) - }); + Ok((outputs_result, inputs_result)) + })?; // Merge new address data into current cache cache.merge_funded(outputs_result.address_data); @@ -302,16 +320,14 @@ pub fn process_blocks( }); // Build set of addresses that received this block (for detecting "both" in sent) - let received_addresses: ByAddressType> = { - let mut sets = ByAddressType::>::default(); - for (output_type, vec) in outputs_result.received_data.iter() { - let set = sets.get_mut_unwrap(output_type); - for (type_index, _) in vec { - set.insert(*type_index); - } + // Reuse pre-allocated hashsets: clear preserves capacity, avoiding reallocation + received_addresses.values_mut().for_each(|set| set.clear()); + for (output_type, vec) in outputs_result.received_data.iter() { + let set = received_addresses.get_mut_unwrap(output_type); + for (type_index, _) in vec { + set.insert(*type_index); } - sets - }; + } // Process UTXO cohorts and Address cohorts in parallel // - Main thread: UTXO cohorts receive/send @@ -339,12 +355,12 @@ pub fn process_blocks( &mut vecs.address_cohorts, &mut lookup, block_price, - ctx.price_range_max.as_ref(), + &ctx.price_range_max, &mut addr_counts, &mut empty_addr_counts, &mut activity_counts, &received_addresses, - height_to_price_vec.as_deref(), + height_to_price_vec, height_to_timestamp_vec, height, timestamp, @@ -356,7 +372,7 @@ pub fn process_blocks( vecs.utxo_cohorts .receive(transacted, height, timestamp, block_price); vecs.utxo_cohorts - .send(height_to_sent, chain_state, ctx.price_range_max.as_ref()); + .send(height_to_sent, chain_state, &ctx.price_range_max); }); // Push to height-indexed vectors @@ -370,17 +386,24 @@ pub fn process_blocks( vecs.address_activity .truncate_push_height(height, &activity_counts)?; - // Get date info for unrealized state computation - let date = height_to_date_iter.get_unwrap(height); - let dateindex = DateIndex::try_from(date).unwrap(); - let date_first_height = dateindex_to_first_height_iter.get_unwrap(dateindex); - let date_height_count = dateindex_to_height_count_iter.get_unwrap(dateindex); + // Get date info for unrealized state computation (cold path - once per day) + // Cache day1 lookups: same day1 repeats ~140 times per day, + // avoiding redundant PcoVec page decompressions. + let date = height_to_date_vec[offset]; + let day1 = Day1::try_from(date).unwrap(); + let (date_first_height, date_height_count) = if day1 == cached_day1 { + (cached_date_first_height, cached_date_height_count) + } else { + let fh: Height = day1_to_first_height.collect_one(day1).unwrap(); + let hc = day1_to_height_count.collect_one(day1).unwrap(); + cached_day1 = day1; + cached_date_first_height = fh; + cached_date_height_count = hc; + (fh, hc) + }; let is_date_last_height = date_first_height + Height::from(date_height_count).decremented().unwrap() == height; - let date_price = dateindex_to_price_iter - .as_mut() - .map(|v| is_date_last_height.then(|| *v.get_unwrap(dateindex))); - let dateindex_opt = is_date_last_height.then_some(dateindex); + let day1_opt = is_date_last_height.then_some(day1); // Push cohort states and compute unrealized push_cohort_states( @@ -388,33 +411,26 @@ pub fn process_blocks( &mut vecs.address_cohorts, height, block_price, - dateindex_opt, - date_price, )?; // Compute and push percentiles for aggregate cohorts (all, sth, lth) - if let Some(dateindex) = dateindex_opt { - let spot = date_price - .flatten() - .map(|c| c.to_dollars()) - .unwrap_or(Dollars::NAN); - vecs.utxo_cohorts - .truncate_push_aggregate_percentiles(dateindex, spot, &vecs.states_path)?; + let spot = block_price.to_dollars(); + vecs.utxo_cohorts.truncate_push_aggregate_percentiles( + height, + spot, + day1_opt, + &vecs.states_path, + )?; - // Compute unrealized peak regret by age range (once per day) - // Aggregate cohorts (all, term, etc.) get values via compute_from_stateful - if let Some(spot_cents) = block_price - && let Some(price_range_max) = ctx.price_range_max.as_ref() - { - vecs.utxo_cohorts.compute_and_push_peak_regret( - chain_state, - height, - timestamp, - spot_cents, - price_range_max, - dateindex, - )?; - } + // Compute unrealized peak regret by age range (once per day) + if let Some(day1) = day1_opt { + vecs.utxo_cohorts.compute_and_push_peak_regret( + chain_state, + height, + timestamp, + block_price, + &ctx.price_range_max, + )?; } // Periodic checkpoint flush @@ -487,20 +503,16 @@ fn push_cohort_states( utxo_cohorts: &mut UTXOCohorts, address_cohorts: &mut AddressCohorts, height: Height, - height_price: Option, - dateindex: Option, - date_price: Option>, + height_price: Cents, ) -> Result<()> { - // utxo_cohorts.iter_separate_mut().try_for_each(|v| { utxo_cohorts.par_iter_separate_mut().try_for_each(|v| { v.truncate_push(height)?; - v.compute_then_truncate_push_unrealized_states(height, height_price, dateindex, date_price) + v.compute_then_truncate_push_unrealized_states(height, height_price) })?; - // address_cohorts.iter_separate_mut().try_for_each(|v| { address_cohorts.par_iter_separate_mut().try_for_each(|v| { v.truncate_push(height)?; - v.compute_then_truncate_push_unrealized_states(height, height_price, dateindex, date_price) + v.compute_then_truncate_push_unrealized_states(height, height_price) })?; Ok(()) diff --git a/crates/brk_computer/src/distribution/compute/context.rs b/crates/brk_computer/src/distribution/compute/context.rs index 92fb50eba..6b7410db2 100644 --- a/crates/brk_computer/src/distribution/compute/context.rs +++ b/crates/brk_computer/src/distribution/compute/context.rs @@ -1,24 +1,24 @@ use std::time::Instant; -use brk_types::{CentsUnsigned, Height, Timestamp}; +use brk_types::{Cents, Height, Timestamp}; use tracing::debug; -use vecdb::VecIndex; +use vecdb::{ReadableVec, VecIndex}; -use crate::{blocks, price}; +use crate::{blocks, prices}; /// Sparse table for O(1) range maximum queries on prices. /// Uses O(n log n) space (~140MB for 880k blocks). pub struct PriceRangeMax { /// Flattened table: table[k * n + i] = max of 2^k elements starting at index i /// Using flat layout for better cache locality. - table: Vec, + table: Vec, /// Number of elements n: usize, } impl PriceRangeMax { /// Build sparse table from high prices. O(n log n) time and space. - pub fn build(prices: &[CentsUnsigned]) -> Self { + pub(crate) fn build(prices: &[Cents]) -> Self { let start = Instant::now(); let n = prices.len(); @@ -33,7 +33,7 @@ impl PriceRangeMax { let levels = (usize::BITS - n.leading_zeros()) as usize; // Allocate flat table: levels * n elements - let mut table = vec![CentsUnsigned::ZERO; levels * n]; + let mut table = vec![Cents::ZERO; levels * n]; // Base case: level 0 = original prices table[..n].copy_from_slice(prices); @@ -61,7 +61,7 @@ impl PriceRangeMax { "PriceRangeMax built: {} heights, {} levels, {:.2}MB, {:.2}ms", n, levels, - (levels * n * std::mem::size_of::()) as f64 / 1_000_000.0, + (levels * n * std::mem::size_of::()) as f64 / 1_000_000.0, elapsed.as_secs_f64() * 1000.0 ); @@ -70,7 +70,7 @@ impl PriceRangeMax { /// Query maximum value in range [l, r] (inclusive). O(1) time. #[inline] - pub fn range_max(&self, l: usize, r: usize) -> CentsUnsigned { + pub(crate) fn range_max(&self, l: usize, r: usize) -> Cents { debug_assert!(l <= r && r < self.n); let len = r - l + 1; @@ -89,7 +89,7 @@ impl PriceRangeMax { /// Query maximum value in height range. O(1) time. #[inline] - pub fn max_between(&self, from: Height, to: Height) -> CentsUnsigned { + pub(crate) fn max_between(&self, from: Height, to: Height) -> Cents { self.range_max(from.to_usize(), to.to_usize()) } } @@ -105,35 +105,31 @@ pub struct ComputeContext { /// Pre-computed height -> timestamp mapping pub height_to_timestamp: Vec, - /// Pre-computed height -> price mapping (if available) - pub height_to_price: Option>, + /// Pre-computed height -> price mapping + pub height_to_price: Vec, /// Sparse table for O(1) range max queries on high prices. /// Used for computing max price during UTXO holding periods (peak regret). - pub price_range_max: Option, + pub price_range_max: PriceRangeMax, } impl ComputeContext { /// Create a new computation context. - pub fn new( + pub(crate) fn new( starting_height: Height, last_height: Height, blocks: &blocks::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, ) -> Self { let height_to_timestamp: Vec = - blocks.time.timestamp_monotonic.into_iter().collect(); + blocks.time.timestamp_monotonic.collect(); - let height_to_price: Option> = price - .map(|p| &p.cents.split.height.close) - .map(|v| v.into_iter().map(|c| *c).collect()); + let height_to_price: Vec = + prices.cents.price.collect(); - // Build sparse table for O(1) range max queries on HIGH prices + // Build sparse table for O(1) range max queries on prices // Used for computing peak price during UTXO holding periods (peak regret) - let price_range_max = price - .map(|p| &p.cents.split.height.high) - .map(|v| v.into_iter().map(|c| *c).collect::>()) - .map(|prices| PriceRangeMax::build(&prices)); + let price_range_max = PriceRangeMax::build(&height_to_price); Self { starting_height, @@ -144,16 +140,13 @@ impl ComputeContext { } } - /// Get price at height (None if no price data or height out of range). - pub fn price_at(&self, height: Height) -> Option { - self.height_to_price - .as_ref()? - .get(height.to_usize()) - .copied() + /// Get price at height. + pub(crate) fn price_at(&self, height: Height) -> Cents { + self.height_to_price[height.to_usize()] } /// Get timestamp at height. - pub fn timestamp_at(&self, height: Height) -> Timestamp { + pub(crate) fn timestamp_at(&self, height: Height) -> Timestamp { self.height_to_timestamp[height.to_usize()] } } diff --git a/crates/brk_computer/src/distribution/compute/mod.rs b/crates/brk_computer/src/distribution/compute/mod.rs index 4d8f11ad7..2a7c9c377 100644 --- a/crates/brk_computer/src/distribution/compute/mod.rs +++ b/crates/brk_computer/src/distribution/compute/mod.rs @@ -5,13 +5,13 @@ mod readers; mod recover; mod write; -pub use block_loop::process_blocks; -pub use context::{ComputeContext, PriceRangeMax}; -pub use readers::{ - TxInIterators, TxOutData, TxOutIterators, VecsReaders, build_txinindex_to_txindex, +pub(crate) use block_loop::process_blocks; +pub(crate) use context::{ComputeContext, PriceRangeMax}; +pub(crate) use readers::{ + TxInReaders, TxOutData, TxOutReaders, VecsReaders, build_txinindex_to_txindex, build_txoutindex_to_txindex, }; -pub use recover::{StartMode, determine_start_mode, recover_state, reset_state}; +pub(crate) use recover::{StartMode, determine_start_mode, recover_state, reset_state}; /// Flush checkpoint interval (every N blocks). pub const FLUSH_INTERVAL: usize = 10_000; diff --git a/crates/brk_computer/src/distribution/compute/readers.rs b/crates/brk_computer/src/distribution/compute/readers.rs index 868ef6168..b3b1cf5ed 100644 --- a/crates/brk_computer/src/distribution/compute/readers.rs +++ b/crates/brk_computer/src/distribution/compute/readers.rs @@ -1,12 +1,9 @@ use brk_cohort::{ByAddressType, ByAnyAddress}; use brk_indexer::Indexer; use brk_types::{ - Height, OutPoint, OutputType, Sats, StoredU64, TxInIndex, TxIndex, TxOutIndex, TypeIndex, -}; -use vecdb::{ - BoxedVecIterator, BytesVecIterator, GenericStoredVec, PcodecVecIterator, Reader, VecIndex, - VecIterator, + Height, OutPoint, OutputType, Sats, StoredU64, TxIndex, TypeIndex, }; +use vecdb::{Reader, ReadableVec, VecIndex}; use crate::{ distribution::{ @@ -24,94 +21,86 @@ pub struct TxOutData { pub typeindex: TypeIndex, } -/// Reusable iterators for txout vectors (16KB buffered reads). -/// -/// Iterators are created once and re-positioned each block to avoid -/// creating new file handles repeatedly. -pub struct TxOutIterators<'a> { - value_iter: BytesVecIterator<'a, TxOutIndex, Sats>, - outputtype_iter: BytesVecIterator<'a, TxOutIndex, OutputType>, - typeindex_iter: BytesVecIterator<'a, TxOutIndex, TypeIndex>, +/// Readers for txout vectors. Uses collect_range for bulk reads. +pub struct TxOutReaders<'a> { + indexer: &'a Indexer, } -impl<'a> TxOutIterators<'a> { - pub fn new(indexer: &'a Indexer) -> Self { - Self { - value_iter: indexer.vecs.outputs.value.into_iter(), - outputtype_iter: indexer.vecs.outputs.outputtype.into_iter(), - typeindex_iter: indexer.vecs.outputs.typeindex.into_iter(), - } +impl<'a> TxOutReaders<'a> { + pub(crate) fn new(indexer: &'a Indexer) -> Self { + Self { indexer } } - /// Collect output data for a block range using buffered iteration. - pub fn collect_block_outputs( - &mut self, + /// Collect output data for a block range using bulk reads. + pub(crate) fn collect_block_outputs( + &self, first_txoutindex: usize, output_count: usize, ) -> Vec { - (first_txoutindex..first_txoutindex + output_count) - .map(|i| TxOutData { - value: self.value_iter.get_at_unwrap(i), - outputtype: self.outputtype_iter.get_at_unwrap(i), - typeindex: self.typeindex_iter.get_at_unwrap(i), + let end = first_txoutindex + output_count; + let values: Vec = self.indexer.vecs.outputs.value.collect_range_at(first_txoutindex, end); + let outputtypes: Vec = self.indexer.vecs.outputs.outputtype.collect_range_at(first_txoutindex, end); + let typeindexes: Vec = self.indexer.vecs.outputs.typeindex.collect_range_at(first_txoutindex, end); + + values + .into_iter() + .zip(outputtypes) + .zip(typeindexes) + .map(|((value, outputtype), typeindex)| TxOutData { + value, + outputtype, + typeindex, }) .collect() } } -/// Reusable iterators for txin vectors (PcoVec - avoids repeated page decompression). -pub struct TxInIterators<'a> { - value_iter: PcodecVecIterator<'a, TxInIndex, Sats>, - outpoint_iter: PcodecVecIterator<'a, TxInIndex, OutPoint>, - outputtype_iter: PcodecVecIterator<'a, TxInIndex, OutputType>, - typeindex_iter: PcodecVecIterator<'a, TxInIndex, TypeIndex>, +/// Readers for txin vectors. Uses collect_range for bulk reads. +pub struct TxInReaders<'a> { + indexer: &'a Indexer, + txins: &'a inputs::Vecs, txindex_to_height: &'a mut RangeMap, } -impl<'a> TxInIterators<'a> { - pub fn new( +impl<'a> TxInReaders<'a> { + pub(crate) fn new( indexer: &'a Indexer, txins: &'a inputs::Vecs, txindex_to_height: &'a mut RangeMap, ) -> Self { Self { - value_iter: txins.spent.value.into_iter(), - outpoint_iter: indexer.vecs.inputs.outpoint.into_iter(), - outputtype_iter: indexer.vecs.inputs.outputtype.into_iter(), - typeindex_iter: indexer.vecs.inputs.typeindex.into_iter(), + indexer, + txins, txindex_to_height, } } - /// Collect input data for a block range using buffered iteration. + /// Collect input data for a block range using bulk reads. /// Computes prev_height on-the-fly from outpoint using RangeMap lookup. - pub fn collect_block_inputs( + pub(crate) fn collect_block_inputs( &mut self, first_txinindex: usize, input_count: usize, current_height: Height, ) -> (Vec, Vec, Vec, Vec) { - let mut values = Vec::with_capacity(input_count); - let mut prev_heights = Vec::with_capacity(input_count); - let mut outputtypes = Vec::with_capacity(input_count); - let mut typeindexes = Vec::with_capacity(input_count); + let end = first_txinindex + input_count; + let values: Vec = self.txins.spent.value.collect_range_at(first_txinindex, end); + let outpoints: Vec = self.indexer.vecs.inputs.outpoint.collect_range_at(first_txinindex, end); + let outputtypes: Vec = self.indexer.vecs.inputs.outputtype.collect_range_at(first_txinindex, end); + let typeindexes: Vec = self.indexer.vecs.inputs.typeindex.collect_range_at(first_txinindex, end); - for i in first_txinindex..first_txinindex + input_count { - values.push(self.value_iter.get_at_unwrap(i)); - - let outpoint = self.outpoint_iter.get_at_unwrap(i); - let prev_height = if outpoint.is_coinbase() { - current_height - } else { - self.txindex_to_height - .get(outpoint.txindex()) - .unwrap_or(current_height) - }; - prev_heights.push(prev_height); - - outputtypes.push(self.outputtype_iter.get_at_unwrap(i)); - typeindexes.push(self.typeindex_iter.get_at_unwrap(i)); - } + let prev_heights: Vec = outpoints + .iter() + .map(|outpoint| { + if outpoint.is_coinbase() { + current_height + } else { + self.txindex_to_height + .get(outpoint.txindex()) + .unwrap_or(current_height) + } + }) + .collect(); (values, prev_heights, outputtypes, typeindexes) } @@ -124,7 +113,7 @@ pub struct VecsReaders { } impl VecsReaders { - pub fn new( + pub(crate) fn new( any_address_indexes: &AnyAddressIndexesVecs, addresses_data: &AddressesDataVecs, ) -> Self { @@ -147,51 +136,48 @@ impl VecsReaders { } /// Get reader for specific address type. - pub fn address_reader(&self, address_type: OutputType) -> &Reader { + pub(crate) fn address_reader(&self, address_type: OutputType) -> &Reader { self.addresstypeindex_to_anyaddressindex - .get_unwrap(address_type) + .get(address_type) + .unwrap() } } /// Build txoutindex -> txindex mapping for a block. -pub fn build_txoutindex_to_txindex<'a>( +pub(crate) fn build_txoutindex_to_txindex( block_first_txindex: TxIndex, block_tx_count: u64, - txindex_to_count: &mut BoxedVecIterator<'a, TxIndex, StoredU64>, + txindex_to_count: &impl ReadableVec, ) -> Vec { build_index_to_txindex(block_first_txindex, block_tx_count, txindex_to_count) } /// Build txinindex -> txindex mapping for a block. -pub fn build_txinindex_to_txindex<'a>( +pub(crate) fn build_txinindex_to_txindex( block_first_txindex: TxIndex, block_tx_count: u64, - txindex_to_count: &mut BoxedVecIterator<'a, TxIndex, StoredU64>, + txindex_to_count: &impl ReadableVec, ) -> Vec { build_index_to_txindex(block_first_txindex, block_tx_count, txindex_to_count) } /// Build index -> txindex mapping for a block (shared implementation). -fn build_index_to_txindex<'a>( +fn build_index_to_txindex( block_first_txindex: TxIndex, block_tx_count: u64, - txindex_to_count: &mut BoxedVecIterator<'a, TxIndex, StoredU64>, + txindex_to_count: &impl ReadableVec, ) -> Vec { let first = block_first_txindex.to_usize(); - let counts: Vec = (0..block_tx_count as usize) - .map(|offset| { - let txindex = TxIndex::from(first + offset); - u64::from(txindex_to_count.get_unwrap(txindex)) - }) - .collect(); + let counts: Vec = + txindex_to_count.collect_range_at(first, first + block_tx_count as usize); - let total: u64 = counts.iter().sum(); + let total: u64 = counts.iter().map(|c| u64::from(*c)).sum(); let mut result = Vec::with_capacity(total as usize); - for (offset, &count) in counts.iter().enumerate() { + for (offset, count) in counts.iter().enumerate() { let txindex = TxIndex::from(first + offset); - result.extend(std::iter::repeat_n(txindex, count as usize)); + result.extend(std::iter::repeat_n(txindex, u64::from(*count) as usize)); } result diff --git a/crates/brk_computer/src/distribution/compute/recover.rs b/crates/brk_computer/src/distribution/compute/recover.rs index d28fb8efa..87eb4a6bb 100644 --- a/crates/brk_computer/src/distribution/compute/recover.rs +++ b/crates/brk_computer/src/distribution/compute/recover.rs @@ -22,7 +22,7 @@ pub struct RecoveredState { /// Rolls back state vectors and imports cohort states. /// Validates that all rollbacks and imports are consistent. /// Returns Height::ZERO if any validation fails (triggers fresh start). -pub fn recover_state( +pub(crate) fn recover_state( height: Height, chain_state_rollback: vecdb::Result, any_address_indexes: &mut AnyAddressIndexesVecs, @@ -98,7 +98,7 @@ pub fn recover_state( /// Reset all state for fresh start. /// /// Resets all state vectors and cohort states. -pub fn reset_state( +pub(crate) fn reset_state( any_address_indexes: &mut AnyAddressIndexesVecs, addresses_data: &mut AddressesDataVecs, utxo_cohorts: &mut UTXOCohorts, @@ -125,7 +125,7 @@ pub fn reset_state( /// /// - `min_available`: minimum height we have data for across all stateful vecs /// - `resume_target`: the height we want to resume processing from -pub fn determine_start_mode(min_available: Height, resume_target: Height) -> StartMode { +pub(crate) fn determine_start_mode(min_available: Height, resume_target: Height) -> StartMode { // No data to resume from if resume_target.is_zero() { return StartMode::Fresh; diff --git a/crates/brk_computer/src/distribution/compute/write.rs b/crates/brk_computer/src/distribution/compute/write.rs index fa1bbe5e7..a6c9162cf 100644 --- a/crates/brk_computer/src/distribution/compute/write.rs +++ b/crates/brk_computer/src/distribution/compute/write.rs @@ -4,7 +4,7 @@ use brk_error::Result; use brk_types::Height; use rayon::prelude::*; use tracing::info; -use vecdb::{AnyStoredVec, GenericStoredVec, Stamp}; +use vecdb::{AnyStoredVec, WritableVec, Stamp}; use crate::distribution::{ Vecs, @@ -25,7 +25,7 @@ use super::super::address::{AddressTypeToTypeIndexMap, AddressesDataVecs, AnyAdd /// - Updates address indexes /// /// Call this before `flush()` to prepare data for writing. -pub fn process_address_updates( +pub(crate) fn process_address_updates( addresses_data: &mut AddressesDataVecs, address_indexes: &mut AnyAddressIndexesVecs, empty_updates: AddressTypeToTypeIndexMap, @@ -53,7 +53,7 @@ pub fn process_address_updates( /// - Chain state /// /// Set `with_changes=true` near chain tip to enable rollback support. -pub fn write( +pub(crate) fn write( vecs: &mut Vecs, height: Height, chain_state: &[BlockState], diff --git a/crates/brk_computer/src/distribution/metrics/activity.rs b/crates/brk_computer/src/distribution/metrics/activity.rs index a28794607..d48bfa794 100644 --- a/crates/brk_computer/src/distribution/metrics/activity.rs +++ b/crates/brk_computer/src/distribution/metrics/activity.rs @@ -2,54 +2,53 @@ use brk_error::Result; use brk_traversable::Traversable; use brk_types::{Bitcoin, Height, Sats, StoredF64, Version}; use rayon::prelude::*; -use vecdb::{AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, ImportableVec, PcoVec}; +use vecdb::{AnyStoredVec, AnyVec, EagerVec, Exit, ImportableVec, PcoVec, Rw, StorageMode, WritableVec}; use crate::{ - ComputeIndexes, indexes, - internal::{ComputedFromHeightSumCum, LazyComputedValueFromHeightSumCum, ValueFromDateLast}, + ComputeIndexes, blocks, + internal::{ComputedFromHeightSumCum, LazyComputedValueFromHeightSumCum, ValueEmaFromHeight}, }; use super::ImportConfig; /// Activity metrics for a cohort. -#[derive(Clone, Traversable)] -pub struct ActivityMetrics { +#[derive(Traversable)] +pub struct ActivityMetrics { /// Total satoshis sent at each height + derived indexes - pub sent: LazyComputedValueFromHeightSumCum, + pub sent: LazyComputedValueFromHeightSumCum, /// 14-day EMA of sent supply (sats, btc, usd) - pub sent_14d_ema: ValueFromDateLast, + pub sent_14d_ema: ValueEmaFromHeight, /// Satoshi-blocks destroyed (supply * blocks_old when spent) - pub satblocks_destroyed: EagerVec>, + pub satblocks_destroyed: M::Stored>>, /// Satoshi-days destroyed (supply * days_old when spent) - pub satdays_destroyed: EagerVec>, + pub satdays_destroyed: M::Stored>>, /// Coin-blocks destroyed (in BTC rather than sats) - pub coinblocks_destroyed: ComputedFromHeightSumCum, + pub coinblocks_destroyed: ComputedFromHeightSumCum, /// Coin-days destroyed (in BTC rather than sats) - pub coindays_destroyed: ComputedFromHeightSumCum, + pub coindays_destroyed: ComputedFromHeightSumCum, } impl ActivityMetrics { /// Import activity metrics from database. - pub fn forced_import(cfg: &ImportConfig) -> Result { + pub(crate) fn forced_import(cfg: &ImportConfig) -> Result { Ok(Self { sent: LazyComputedValueFromHeightSumCum::forced_import( cfg.db, &cfg.name("sent"), cfg.version, cfg.indexes, - cfg.price, + cfg.prices, )?, - sent_14d_ema: ValueFromDateLast::forced_import( + sent_14d_ema: ValueEmaFromHeight::forced_import( cfg.db, &cfg.name("sent_14d_ema"), cfg.version, - cfg.compute_dollars(), cfg.indexes, )?, @@ -82,7 +81,7 @@ impl ActivityMetrics { } /// Get minimum length across height-indexed vectors. - pub fn min_len(&self) -> usize { + pub(crate) fn min_len(&self) -> usize { self.sent .sats .height @@ -92,7 +91,7 @@ impl ActivityMetrics { } /// Push activity state values to height-indexed vectors. - pub fn truncate_push( + pub(crate) fn truncate_push( &mut self, height: Height, sent: Sats, @@ -108,7 +107,7 @@ impl ActivityMetrics { } /// Returns a parallel iterator over all vecs for parallel writing. - pub fn par_iter_mut(&mut self) -> impl ParallelIterator { + pub(crate) fn par_iter_mut(&mut self) -> impl ParallelIterator { vec![ &mut self.sent.sats.height as &mut dyn AnyStoredVec, &mut self.satblocks_destroyed as &mut dyn AnyStoredVec, @@ -118,13 +117,13 @@ impl ActivityMetrics { } /// Validate computed versions against base version. - pub fn validate_computed_versions(&mut self, _base_version: Version) -> Result<()> { + pub(crate) fn validate_computed_versions(&mut self, _base_version: Version) -> Result<()> { // Validation logic for computed vecs Ok(()) } /// Compute aggregate values from separate cohorts. - pub fn compute_from_stateful( + pub(crate) fn compute_from_stateful( &mut self, starting_indexes: &ComputeIndexes, others: &[&Self], @@ -158,44 +157,42 @@ impl ActivityMetrics { } /// First phase of computed metrics (indexes from height). - pub fn compute_rest_part1( + pub(crate) fn compute_rest_part1( &mut self, - indexes: &indexes::Vecs, + blocks: &blocks::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.sent.compute_rest(indexes, starting_indexes, exit)?; + self.sent.compute_cumulative(starting_indexes, exit)?; - // 14-day EMA of sent (sats and dollars) - self.sent_14d_ema.compute_ema( - starting_indexes.dateindex, - &self.sent.sats.dateindex.sum.0, - self.sent.dollars.as_ref().map(|d| &d.dateindex.sum.0), - 14, + // 14-day rolling average of sent (sats and dollars) + self.sent_14d_ema.compute_rolling_average( + starting_indexes.height, + &blocks.count.height_2w_ago, + &self.sent.sats.height, + &self.sent.usd.height, exit, )?; - self.coinblocks_destroyed - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &self.satblocks_destroyed, - |(i, v, ..)| (i, StoredF64::from(Bitcoin::from(v))), - exit, - )?; - Ok(()) - })?; + self.coinblocks_destroyed.compute(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.height, + &self.satblocks_destroyed, + |(i, v, ..)| (i, StoredF64::from(Bitcoin::from(v))), + exit, + )?; + Ok(()) + })?; - self.coindays_destroyed - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &self.satdays_destroyed, - |(i, v, ..)| (i, StoredF64::from(Bitcoin::from(v))), - exit, - )?; - Ok(()) - })?; + self.coindays_destroyed.compute(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.height, + &self.satdays_destroyed, + |(i, v, ..)| (i, StoredF64::from(Bitcoin::from(v))), + exit, + )?; + Ok(()) + })?; Ok(()) } diff --git a/crates/brk_computer/src/distribution/metrics/config.rs b/crates/brk_computer/src/distribution/metrics/config.rs index 9e9236da7..f2cb4aeee 100644 --- a/crates/brk_computer/src/distribution/metrics/config.rs +++ b/crates/brk_computer/src/distribution/metrics/config.rs @@ -1,8 +1,8 @@ -use brk_cohort::{CohortContext, Filter, TimeFilter}; +use brk_cohort::{CohortContext, Filter}; use brk_types::Version; use vecdb::Database; -use crate::{indexes, price}; +use crate::{indexes, prices}; use super::RealizedMetrics; @@ -14,40 +14,35 @@ pub struct ImportConfig<'a> { pub context: CohortContext, pub version: Version, pub indexes: &'a indexes::Vecs, - pub price: Option<&'a price::Vecs>, + pub prices: &'a prices::Vecs, /// Source for lazy adjusted computation: adjusted = cohort - up_to_1h. /// Required for cohorts where `compute_adjusted()` is true. pub up_to_1h_realized: Option<&'a RealizedMetrics>, } impl<'a> ImportConfig<'a> { - /// Whether price data is available (enables realized/unrealized metrics). - pub fn compute_dollars(&self) -> bool { - self.price.is_some() - } - /// Whether this is an extended cohort (more relative metrics). - pub fn extended(&self) -> bool { + pub(crate) fn extended(&self) -> bool { self.filter.is_extended(self.context) } /// Whether to compute relative-to-all metrics. - pub fn compute_rel_to_all(&self) -> bool { + pub(crate) fn compute_rel_to_all(&self) -> bool { self.filter.compute_rel_to_all() } /// Whether to compute adjusted metrics (SOPR, etc.). - pub fn compute_adjusted(&self) -> bool { + pub(crate) fn compute_adjusted(&self) -> bool { self.filter.compute_adjusted(self.context) } /// Whether to compute relative metrics (invested capital %, NUPL ratios, etc.). - pub fn compute_relative(&self) -> bool { + pub(crate) fn compute_relative(&self) -> bool { self.filter.compute_relative() } /// Get full metric name with filter prefix. - pub fn name(&self, suffix: &str) -> String { + pub(crate) fn name(&self, suffix: &str) -> String { if self.full_name.is_empty() { suffix.to_string() } else if suffix.is_empty() { @@ -60,20 +55,9 @@ impl<'a> ImportConfig<'a> { /// Whether this cohort needs peak_regret metric. /// True for UTXO cohorts with age-based filters (all, term, time). /// age_range cohorts compute directly, others aggregate from age_range. - pub fn compute_peak_regret(&self) -> bool { + pub(crate) fn compute_peak_regret(&self) -> bool { matches!(self.context, CohortContext::Utxo) - && matches!( - self.filter, - Filter::All | Filter::Term(_) | Filter::Time(_) - ) + && matches!(self.filter, Filter::All | Filter::Term(_) | Filter::Time(_)) } - /// Whether this is an age_range cohort (UTXO context with Time::Range filter). - /// These cohorts have peak_regret computed directly from chain_state. - pub fn is_age_range(&self) -> bool { - matches!( - (&self.context, &self.filter), - (CohortContext::Utxo, Filter::Time(TimeFilter::Range(_))) - ) - } } diff --git a/crates/brk_computer/src/distribution/metrics/cost_basis.rs b/crates/brk_computer/src/distribution/metrics/cost_basis.rs index db1d9973b..c9249be1e 100644 --- a/crates/brk_computer/src/distribution/metrics/cost_basis.rs +++ b/crates/brk_computer/src/distribution/metrics/cost_basis.rs @@ -1,15 +1,14 @@ use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{DateIndex, Dollars, Height, StoredF32, Version}; +use brk_types::{Dollars, Height, StoredF32, Version}; use rayon::prelude::*; -use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec}; +use vecdb::{AnyStoredVec, AnyVec, Exit, Rw, StorageMode, WritableVec}; use crate::{ ComputeIndexes, distribution::state::CohortState, - indexes, internal::{ - ComputedFromDateLast, PERCENTILES_LEN, PercentilesVecs, PriceFromHeight, + ComputedFromHeightLast, PERCENTILES_LEN, Price, PriceFromHeight, PercentilesVecs, compute_spot_percentile_rank, }, }; @@ -17,30 +16,30 @@ use crate::{ use super::ImportConfig; /// Cost basis metrics. -#[derive(Clone, Traversable)] -pub struct CostBasisMetrics { +#[derive(Traversable)] +pub struct CostBasisMetrics { /// Minimum cost basis for any UTXO at this height - pub min: PriceFromHeight, + pub min: Price>, /// Maximum cost basis for any UTXO at this height - pub max: PriceFromHeight, + pub max: Price>, /// Cost basis percentiles (sat-weighted) - pub percentiles: Option, + pub percentiles: Option>, /// Invested capital percentiles (USD-weighted) - pub invested_capital: Option, + pub invested_capital: Option>, /// What percentile of cost basis is below spot (sat-weighted) - pub spot_cost_basis_percentile: Option>, + pub spot_cost_basis_percentile: Option>, /// What percentile of invested capital is below spot (USD-weighted) - pub spot_invested_capital_percentile: Option>, + pub spot_invested_capital_percentile: Option>, } impl CostBasisMetrics { /// Import cost basis metrics from database. - pub fn forced_import(cfg: &ImportConfig) -> Result { + pub(crate) fn forced_import(cfg: &ImportConfig) -> Result { let extended = cfg.extended(); Ok(Self { @@ -80,7 +79,7 @@ impl CostBasisMetrics { .transpose()?, spot_cost_basis_percentile: extended .then(|| { - ComputedFromDateLast::forced_import( + ComputedFromHeightLast::forced_import( cfg.db, &cfg.name("spot_cost_basis_percentile"), cfg.version, @@ -90,7 +89,7 @@ impl CostBasisMetrics { .transpose()?, spot_invested_capital_percentile: extended .then(|| { - ComputedFromDateLast::forced_import( + ComputedFromHeightLast::forced_import( cfg.db, &cfg.name("spot_invested_capital_percentile"), cfg.version, @@ -102,38 +101,25 @@ impl CostBasisMetrics { } /// Get minimum length across height-indexed vectors written in block loop. - pub fn min_stateful_height_len(&self) -> usize { - self.min.height.len().min(self.max.height.len()) - } - - /// Get minimum length across dateindex-indexed vectors written in block loop. - pub fn min_stateful_dateindex_len(&self) -> usize { - self.percentiles - .as_ref() - .map(|p| p.min_stateful_dateindex_len()) - .unwrap_or(usize::MAX) - .min( - self.invested_capital - .as_ref() - .map(|p| p.min_stateful_dateindex_len()) - .unwrap_or(usize::MAX), - ) - .min( - self.spot_cost_basis_percentile - .as_ref() - .map(|v| v.dateindex.len()) - .unwrap_or(usize::MAX), - ) - .min( - self.spot_invested_capital_percentile - .as_ref() - .map(|v| v.dateindex.len()) - .unwrap_or(usize::MAX), - ) + pub(crate) fn min_stateful_height_len(&self) -> usize { + let mut min = self.min.height.len().min(self.max.height.len()); + if let Some(v) = &self.spot_cost_basis_percentile { + min = min.min(v.height.len()); + } + if let Some(v) = &self.spot_invested_capital_percentile { + min = min.min(v.height.len()); + } + if let Some(p) = &self.percentiles { + min = min.min(p.min_stateful_height_len()); + } + if let Some(p) = &self.invested_capital { + min = min.min(p.min_stateful_height_len()); + } + min } /// Push min/max cost basis from state. - pub fn truncate_push_minmax(&mut self, height: Height, state: &CohortState) -> Result<()> { + pub(crate) fn truncate_push_minmax(&mut self, height: Height, state: &CohortState) -> Result<()> { self.min.height.truncate_push( height, state @@ -151,49 +137,48 @@ impl CostBasisMetrics { Ok(()) } - /// Push cost basis percentiles from state at date boundary. - /// Only called when at the last height of a day. - pub fn truncate_push_percentiles( + /// Push cost basis percentiles and spot ranks at every height. + pub(crate) fn truncate_push_percentiles( &mut self, - dateindex: DateIndex, - state: &CohortState, + height: Height, + state: &mut CohortState, spot: Dollars, ) -> Result<()> { let computed = state.compute_percentiles(); - // Push sat-weighted percentiles and spot rank + // Sat-weighted percentiles and spot rank let sat_prices = computed .as_ref() .map(|p| p.sat_weighted.map(|c| c.to_dollars())) .unwrap_or([Dollars::NAN; PERCENTILES_LEN]); if let Some(percentiles) = self.percentiles.as_mut() { - percentiles.truncate_push(dateindex, &sat_prices)?; + percentiles.truncate_push(height, &sat_prices)?; } if let Some(spot_pct) = self.spot_cost_basis_percentile.as_mut() { let rank = compute_spot_percentile_rank(&sat_prices, spot); - spot_pct.dateindex.truncate_push(dateindex, rank)?; + spot_pct.height.truncate_push(height, rank)?; } - // Push USD-weighted percentiles and spot rank + // USD-weighted percentiles and spot rank let usd_prices = computed .as_ref() .map(|p| p.usd_weighted.map(|c| c.to_dollars())) .unwrap_or([Dollars::NAN; PERCENTILES_LEN]); if let Some(invested_capital) = self.invested_capital.as_mut() { - invested_capital.truncate_push(dateindex, &usd_prices)?; + invested_capital.truncate_push(height, &usd_prices)?; } if let Some(spot_pct) = self.spot_invested_capital_percentile.as_mut() { let rank = compute_spot_percentile_rank(&usd_prices, spot); - spot_pct.dateindex.truncate_push(dateindex, rank)?; + spot_pct.height.truncate_push(height, rank)?; } Ok(()) } /// Returns a parallel iterator over all vecs for parallel writing. - pub fn par_iter_mut(&mut self) -> impl ParallelIterator { + pub(crate) fn par_iter_mut(&mut self) -> impl ParallelIterator { let mut vecs: Vec<&mut dyn AnyStoredVec> = vec![&mut self.min.height, &mut self.max.height]; if let Some(percentiles) = self.percentiles.as_mut() { vecs.extend( @@ -201,7 +186,7 @@ impl CostBasisMetrics { .vecs .iter_mut() .flatten() - .map(|v| &mut v.dateindex as &mut dyn AnyStoredVec), + .map(|v| &mut v.height as &mut dyn AnyStoredVec), ); } if let Some(invested_capital) = self.invested_capital.as_mut() { @@ -210,20 +195,20 @@ impl CostBasisMetrics { .vecs .iter_mut() .flatten() - .map(|v| &mut v.dateindex as &mut dyn AnyStoredVec), + .map(|v| &mut v.height as &mut dyn AnyStoredVec), ); } if let Some(v) = self.spot_cost_basis_percentile.as_mut() { - vecs.push(&mut v.dateindex); + vecs.push(&mut v.height); } if let Some(v) = self.spot_invested_capital_percentile.as_mut() { - vecs.push(&mut v.dateindex); + vecs.push(&mut v.height); } vecs.into_par_iter() } /// Validate computed versions or reset if mismatched. - pub fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> { + pub(crate) fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> { if let Some(percentiles) = self.percentiles.as_mut() { percentiles.validate_computed_version_or_reset(base_version)?; } @@ -231,16 +216,18 @@ impl CostBasisMetrics { invested_capital.validate_computed_version_or_reset(base_version)?; } if let Some(v) = self.spot_cost_basis_percentile.as_mut() { - v.dateindex.validate_computed_version_or_reset(base_version)?; + v.height + .validate_computed_version_or_reset(base_version)?; } if let Some(v) = self.spot_invested_capital_percentile.as_mut() { - v.dateindex.validate_computed_version_or_reset(base_version)?; + v.height + .validate_computed_version_or_reset(base_version)?; } Ok(()) } /// Compute aggregate values from separate cohorts. - pub fn compute_from_stateful( + pub(crate) fn compute_from_stateful( &mut self, starting_indexes: &ComputeIndexes, others: &[&Self], @@ -258,16 +245,4 @@ impl CostBasisMetrics { )?; Ok(()) } - - /// First phase of computed metrics (indexes from height). - pub fn compute_rest_part1( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.min.compute_rest(indexes, starting_indexes, exit)?; - self.max.compute_rest(indexes, starting_indexes, exit)?; - Ok(()) - } } diff --git a/crates/brk_computer/src/distribution/metrics/mod.rs b/crates/brk_computer/src/distribution/metrics/mod.rs index 26913e7c5..36cd8c5a3 100644 --- a/crates/brk_computer/src/distribution/metrics/mod.rs +++ b/crates/brk_computer/src/distribution/metrics/mod.rs @@ -19,38 +19,38 @@ pub use unrealized::*; use brk_cohort::Filter; use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{CentsUnsigned, DateIndex, Dollars, Height, Version}; +use brk_types::{Cents, Dollars, Height, Version}; use rayon::prelude::*; -use vecdb::{AnyStoredVec, Exit, IterableVec}; +use vecdb::{AnyStoredVec, Exit, ReadableVec, Rw, StorageMode}; -use crate::{ComputeIndexes, distribution::state::CohortState, indexes, price as price_vecs}; +use crate::{ComputeIndexes, blocks, distribution::state::CohortState, prices}; /// All metrics for a cohort, organized by category. -#[derive(Clone, Traversable)] -pub struct CohortMetrics { +#[derive(Traversable)] +pub struct CohortMetrics { #[traversable(skip)] pub filter: Filter, /// Supply metrics (always computed) - pub supply: SupplyMetrics, + pub supply: Box>, /// Output metrics - UTXO count (always computed) - pub outputs: OutputsMetrics, + pub outputs: Box>, /// Transaction activity (always computed) - pub activity: ActivityMetrics, + pub activity: Box>, - /// Realized cap and profit/loss (requires price data) - pub realized: Option, + /// Realized cap and profit/loss + pub realized: Box>, - /// Unrealized profit/loss (requires price data) - pub unrealized: Option, + /// Unrealized profit/loss + pub unrealized: Box>, - /// Cost basis metrics (requires price data) - pub cost_basis: Option, + /// Cost basis metrics + pub cost_basis: Box>, - /// Relative metrics (requires price data) - pub relative: Option, + /// Relative metrics (not all cohorts compute this) + pub relative: Option>, } impl CohortMetrics { @@ -59,83 +59,51 @@ impl CohortMetrics { /// `all_supply` is the supply metrics from the "all" cohort, used as global /// sources for `*_rel_to_market_cap` and `*_rel_to_circulating_supply` ratios. /// Pass `None` for the "all" cohort itself. - pub fn forced_import(cfg: &ImportConfig, all_supply: Option<&SupplyMetrics>) -> Result { - let compute_dollars = cfg.compute_dollars(); - + pub(crate) fn forced_import(cfg: &ImportConfig, all_supply: Option<&SupplyMetrics>) -> Result { let supply = SupplyMetrics::forced_import(cfg)?; let outputs = OutputsMetrics::forced_import(cfg)?; - let unrealized = compute_dollars - .then(|| UnrealizedMetrics::forced_import(cfg)) - .transpose()?; + let unrealized = UnrealizedMetrics::forced_import(cfg)?; + let realized = RealizedMetrics::forced_import(cfg)?; - let realized = compute_dollars - .then(|| RealizedMetrics::forced_import(cfg)) - .transpose()?; - - let relative = (cfg.compute_relative() && unrealized.is_some()) + let relative = cfg + .compute_relative() .then(|| { RelativeMetrics::forced_import( cfg, - unrealized.as_ref().unwrap(), + &unrealized, &supply, all_supply, - realized.as_ref(), + Some(&realized), ) }) .transpose()?; Ok(Self { filter: cfg.filter.clone(), - supply, - outputs, - activity: ActivityMetrics::forced_import(cfg)?, - realized, - cost_basis: compute_dollars - .then(|| CostBasisMetrics::forced_import(cfg)) - .transpose()?, - relative, - unrealized, + supply: Box::new(supply), + outputs: Box::new(outputs), + activity: Box::new(ActivityMetrics::forced_import(cfg)?), + realized: Box::new(realized), + cost_basis: Box::new(CostBasisMetrics::forced_import(cfg)?), + relative: relative.map(Box::new), + unrealized: Box::new(unrealized), }) } /// Get minimum length across height-indexed vectors written in block loop. - pub fn min_stateful_height_len(&self) -> usize { - let mut min = self - .supply + pub(crate) fn min_stateful_height_len(&self) -> usize { + self.supply .min_len() .min(self.outputs.min_len()) - .min(self.activity.min_len()); - - if let Some(realized) = &self.realized { - min = min.min(realized.min_stateful_height_len()); - } - if let Some(unrealized) = &self.unrealized { - min = min.min(unrealized.min_stateful_height_len()); - } - if let Some(cost_basis) = &self.cost_basis { - min = min.min(cost_basis.min_stateful_height_len()); - } - - min - } - - /// Get minimum length across dateindex-indexed vectors written in block loop. - pub fn min_stateful_dateindex_len(&self) -> usize { - let mut min = usize::MAX; - - if let Some(unrealized) = &self.unrealized { - min = min.min(unrealized.min_stateful_dateindex_len()); - } - if let Some(cost_basis) = &self.cost_basis { - min = min.min(cost_basis.min_stateful_dateindex_len()); - } - - min + .min(self.activity.min_len()) + .min(self.realized.min_stateful_height_len()) + .min(self.unrealized.min_stateful_height_len()) + .min(self.cost_basis.min_stateful_height_len()) } /// Push state values to height-indexed vectors. - pub fn truncate_push(&mut self, height: Height, state: &CohortState) -> Result<()> { + pub(crate) fn truncate_push(&mut self, height: Height, state: &CohortState) -> Result<()> { self.supply.truncate_push(height, state.supply.value)?; self.outputs .truncate_push(height, state.supply.utxo_count)?; @@ -146,99 +114,61 @@ impl CohortMetrics { state.satdays_destroyed, )?; - if let (Some(realized), Some(realized_state)) = - (self.realized.as_mut(), state.realized.as_ref()) - { - realized.truncate_push(height, realized_state)?; - } + self.realized.truncate_push(height, &state.realized)?; Ok(()) } /// Returns a parallel iterator over all vecs for parallel writing. - pub fn par_iter_mut(&mut self) -> impl ParallelIterator { + pub(crate) fn par_iter_mut(&mut self) -> impl ParallelIterator { let mut vecs: Vec<&mut dyn AnyStoredVec> = Vec::new(); vecs.extend(self.supply.par_iter_mut().collect::>()); vecs.extend(self.outputs.par_iter_mut().collect::>()); vecs.extend(self.activity.par_iter_mut().collect::>()); - - if let Some(realized) = self.realized.as_mut() { - vecs.extend(realized.par_iter_mut().collect::>()); - } - - if let Some(unrealized) = self.unrealized.as_mut() { - vecs.extend(unrealized.par_iter_mut().collect::>()); - } - - if let Some(cost_basis) = self.cost_basis.as_mut() { - vecs.extend(cost_basis.par_iter_mut().collect::>()); - } + vecs.extend(self.realized.par_iter_mut().collect::>()); + vecs.extend(self.unrealized.par_iter_mut().collect::>()); + vecs.extend(self.cost_basis.par_iter_mut().collect::>()); vecs.into_par_iter() } /// Validate computed versions against base version. - pub fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> { + pub(crate) fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> { self.supply.validate_computed_versions(base_version)?; self.activity.validate_computed_versions(base_version)?; - - if let Some(realized) = self.realized.as_mut() { - realized.validate_computed_versions(base_version)?; - } - - if let Some(cost_basis) = self.cost_basis.as_mut() { - cost_basis.validate_computed_versions(base_version)?; - } + self.realized.validate_computed_versions(base_version)?; + self.cost_basis.validate_computed_versions(base_version)?; Ok(()) } - /// Compute and push unrealized states. - /// Percentiles are only computed at date boundaries (when dateindex is Some). - pub fn compute_then_truncate_push_unrealized_states( + /// Compute and push unrealized states and percentiles. + pub(crate) fn compute_then_truncate_push_unrealized_states( &mut self, height: Height, - height_price: Option, - dateindex: Option, - date_price: Option>, + height_price: Cents, state: &mut CohortState, ) -> Result<()> { // Apply pending updates before reading state.apply_pending(); - if let (Some(unrealized), Some(cost_basis), Some(height_price)) = ( - self.unrealized.as_mut(), - self.cost_basis.as_mut(), - height_price, - ) { - cost_basis.truncate_push_minmax(height, state)?; + self.cost_basis.truncate_push_minmax(height, state)?; - let (height_unrealized_state, date_unrealized_state) = - state.compute_unrealized_states(height_price, date_price.unwrap()); + let (height_unrealized_state, _) = state.compute_unrealized_states(height_price, None); - unrealized.truncate_push( - height, - dateindex, - &height_unrealized_state, - date_unrealized_state.as_ref(), - )?; + self.unrealized + .truncate_push(height, &height_unrealized_state)?; - // Only compute expensive percentiles at date boundaries (~144x reduction) - if let Some(dateindex) = dateindex { - let spot = date_price - .unwrap() - .map(|c| c.to_dollars()) - .unwrap_or(Dollars::NAN); - cost_basis.truncate_push_percentiles(dateindex, state, spot)?; - } - } + let spot = height_price.to_dollars(); + self.cost_basis + .truncate_push_percentiles(height, state, spot)?; Ok(()) } /// Compute aggregate cohort values from separate cohorts. - pub fn compute_from_stateful( + pub(crate) fn compute_from_stateful( &mut self, starting_indexes: &ComputeIndexes, others: &[&Self], @@ -246,52 +176,35 @@ impl CohortMetrics { ) -> Result<()> { self.supply.compute_from_stateful( starting_indexes, - &others.iter().map(|v| &v.supply).collect::>(), + &others.iter().map(|v| &*v.supply).collect::>(), exit, )?; self.outputs.compute_from_stateful( starting_indexes, - &others.iter().map(|v| &v.outputs).collect::>(), + &others.iter().map(|v| &*v.outputs).collect::>(), exit, )?; self.activity.compute_from_stateful( starting_indexes, - &others.iter().map(|v| &v.activity).collect::>(), + &others.iter().map(|v| &*v.activity).collect::>(), exit, )?; - if let Some(realized) = self.realized.as_mut() { - realized.compute_from_stateful( - starting_indexes, - &others - .iter() - .filter_map(|v| v.realized.as_ref()) - .collect::>(), - exit, - )?; - } - - if let Some(unrealized) = self.unrealized.as_mut() { - unrealized.compute_from_stateful( - starting_indexes, - &others - .iter() - .filter_map(|v| v.unrealized.as_ref()) - .collect::>(), - exit, - )?; - } - - if let Some(cost_basis) = self.cost_basis.as_mut() { - cost_basis.compute_from_stateful( - starting_indexes, - &others - .iter() - .filter_map(|v| v.cost_basis.as_ref()) - .collect::>(), - exit, - )?; - } + self.realized.compute_from_stateful( + starting_indexes, + &others.iter().map(|v| &*v.realized).collect::>(), + exit, + )?; + self.unrealized.compute_from_stateful( + starting_indexes, + &others.iter().map(|v| &*v.unrealized).collect::>(), + exit, + )?; + self.cost_basis.compute_from_stateful( + starting_indexes, + &others.iter().map(|v| &*v.cost_basis).collect::>(), + exit, + )?; Ok(()) } @@ -302,115 +215,80 @@ impl CohortMetrics { /// the range of components due to asymmetric weighting. This computes net_sentiment /// as a proper weighted average using realized_cap as weight. /// - /// Only computes height; dateindex derivation is done separately via compute_net_sentiment_rest. - pub fn compute_net_sentiment_from_others( + /// Only computes height; day1 derivation is done separately via compute_net_sentiment_rest. + pub(crate) fn compute_net_sentiment_from_others( &mut self, starting_indexes: &ComputeIndexes, others: &[&Self], exit: &Exit, ) -> Result<()> { - let Some(unrealized) = self.unrealized.as_mut() else { - return Ok(()); - }; - let weights: Vec<_> = others .iter() - .filter_map(|o| Some(&o.realized.as_ref()?.realized_cap.height)) + .map(|o| &o.realized.realized_cap.height) .collect(); let values: Vec<_> = others .iter() - .filter_map(|o| Some(&o.unrealized.as_ref()?.net_sentiment.height)) + .map(|o| &o.unrealized.net_sentiment.height) .collect(); - if weights.len() != others.len() || values.len() != others.len() { - return Ok(()); - } - - Ok(unrealized + self.unrealized .net_sentiment .height - .compute_weighted_average_of_others(starting_indexes.height, &weights, &values, exit)?) + .compute_weighted_average_of_others(starting_indexes.height, &weights, &values, exit)?; + + Ok(()) } /// First phase of computed metrics (indexes from height). - pub fn compute_rest_part1( + pub(crate) fn compute_rest_part1( &mut self, - indexes: &indexes::Vecs, - price: Option<&price_vecs::Vecs>, + blocks: &blocks::Vecs, + prices: &prices::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.supply - .compute_rest_part1(indexes, starting_indexes, exit)?; - self.outputs.compute_rest(indexes, starting_indexes, exit)?; - self.activity - .compute_rest_part1(indexes, starting_indexes, exit)?; + self.supply.compute_rest_part1(blocks, starting_indexes, exit)?; + self.outputs.compute_rest(blocks, starting_indexes, exit)?; + self.activity.compute_rest_part1(blocks, starting_indexes, exit)?; - if let Some(realized) = self.realized.as_mut() { - realized.compute_rest_part1(indexes, starting_indexes, exit)?; - } + self.realized.compute_rest_part1(starting_indexes, exit)?; - if let Some(unrealized) = self.unrealized.as_mut() { - unrealized.compute_rest(indexes, price, starting_indexes, exit)?; - } - - if let Some(cost_basis) = self.cost_basis.as_mut() { - cost_basis.compute_rest_part1(indexes, starting_indexes, exit)?; - } + self.unrealized + .compute_rest(prices, starting_indexes, exit)?; Ok(()) } /// Second phase of computed metrics (ratios, relative values). - #[allow(clippy::too_many_arguments)] - pub fn compute_rest_part2( + pub(crate) fn compute_rest_part2( &mut self, - indexes: &indexes::Vecs, - price: Option<&price_vecs::Vecs>, + blocks: &blocks::Vecs, + prices: &prices::Vecs, starting_indexes: &ComputeIndexes, - height_to_market_cap: Option<&impl IterableVec>, - dateindex_to_market_cap: Option<&impl IterableVec>, + height_to_market_cap: Option<&impl ReadableVec>, exit: &Exit, ) -> Result<()> { - if let Some(realized) = self.realized.as_mut() { - realized.compute_rest_part2( - indexes, - price, - starting_indexes, - &self.supply.total.bitcoin.height, - height_to_market_cap, - dateindex_to_market_cap, - exit, - )?; - } + self.realized.compute_rest_part2( + blocks, + prices, + starting_indexes, + &self.supply.total.btc.height, + height_to_market_cap, + exit, + )?; Ok(()) } /// Compute net_sentiment.height for separate cohorts (greed - pain). /// Called only for separate cohorts; aggregates compute via weighted average in compute_from_stateful. - pub fn compute_net_sentiment_height( + pub(crate) fn compute_net_sentiment_height( &mut self, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - if let Some(unrealized) = self.unrealized.as_mut() { - unrealized.compute_net_sentiment_height(starting_indexes, exit)?; - } - Ok(()) - } - - /// Compute net_sentiment dateindex derivation from height. - /// Called for ALL cohorts after height is computed. - pub fn compute_net_sentiment_rest( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - if let Some(unrealized) = self.unrealized.as_mut() { - unrealized.compute_net_sentiment_rest(indexes, starting_indexes, exit)?; - } + self.unrealized + .compute_net_sentiment_height(starting_indexes, exit)?; Ok(()) } } diff --git a/crates/brk_computer/src/distribution/metrics/outputs.rs b/crates/brk_computer/src/distribution/metrics/outputs.rs index 8d4ffffdd..57fda31d0 100644 --- a/crates/brk_computer/src/distribution/metrics/outputs.rs +++ b/crates/brk_computer/src/distribution/metrics/outputs.rs @@ -2,22 +2,22 @@ use brk_error::Result; use brk_traversable::Traversable; use brk_types::{Height, StoredF64, StoredU64}; use rayon::prelude::*; -use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec}; +use vecdb::{AnyStoredVec, AnyVec, Exit, Rw, StorageMode, WritableVec}; -use crate::{ComputeIndexes, indexes, internal::{ComputedFromDateLast, ComputedFromHeightLast}}; +use crate::{ComputeIndexes, blocks, internal::ComputedFromHeightLast}; use super::ImportConfig; /// Output metrics for a cohort. -#[derive(Clone, Traversable)] -pub struct OutputsMetrics { - pub utxo_count: ComputedFromHeightLast, - pub utxo_count_30d_change: ComputedFromDateLast, +#[derive(Traversable)] +pub struct OutputsMetrics { + pub utxo_count: ComputedFromHeightLast, + pub utxo_count_30d_change: ComputedFromHeightLast, } impl OutputsMetrics { /// Import output metrics from database. - pub fn forced_import(cfg: &ImportConfig) -> Result { + pub(crate) fn forced_import(cfg: &ImportConfig) -> Result { Ok(Self { utxo_count: ComputedFromHeightLast::forced_import( cfg.db, @@ -25,7 +25,7 @@ impl OutputsMetrics { cfg.version, cfg.indexes, )?, - utxo_count_30d_change: ComputedFromDateLast::forced_import( + utxo_count_30d_change: ComputedFromHeightLast::forced_import( cfg.db, &cfg.name("utxo_count_30d_change"), cfg.version, @@ -35,12 +35,12 @@ impl OutputsMetrics { } /// Get minimum length across height-indexed vectors. - pub fn min_len(&self) -> usize { + pub(crate) fn min_len(&self) -> usize { self.utxo_count.height.len() } /// Push utxo count to height-indexed vector. - pub fn truncate_push(&mut self, height: Height, utxo_count: u64) -> Result<()> { + pub(crate) fn truncate_push(&mut self, height: Height, utxo_count: u64) -> Result<()> { self.utxo_count .height .truncate_push(height, StoredU64::from(utxo_count))?; @@ -48,16 +48,12 @@ impl OutputsMetrics { } /// Returns a parallel iterator over all vecs for parallel writing. - pub fn par_iter_mut(&mut self) -> impl ParallelIterator { - vec![ - &mut self.utxo_count.height as &mut dyn AnyStoredVec, - &mut self.utxo_count_30d_change.dateindex as &mut dyn AnyStoredVec, - ] - .into_par_iter() + pub(crate) fn par_iter_mut(&mut self) -> impl ParallelIterator { + vec![&mut self.utxo_count.height as &mut dyn AnyStoredVec].into_par_iter() } /// Compute aggregate values from separate cohorts. - pub fn compute_from_stateful( + pub(crate) fn compute_from_stateful( &mut self, starting_indexes: &ComputeIndexes, others: &[&Self], @@ -74,26 +70,19 @@ impl OutputsMetrics { Ok(()) } - /// Compute derived metrics (dateindex from height). - pub fn compute_rest( + /// Compute derived metrics. + pub(crate) fn compute_rest( &mut self, - indexes: &indexes::Vecs, + blocks: &blocks::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.utxo_count - .compute_rest(indexes, starting_indexes, exit)?; - - self.utxo_count_30d_change - .compute_all(starting_indexes, exit, |v| { - v.compute_change( - starting_indexes.dateindex, - &*self.utxo_count.dateindex, - 30, - exit, - )?; - Ok(()) - })?; + self.utxo_count_30d_change.height.compute_rolling_change( + starting_indexes.height, + &blocks.count.height_1m_ago, + &self.utxo_count.height, + exit, + )?; Ok(()) } diff --git a/crates/brk_computer/src/distribution/metrics/realized.rs b/crates/brk_computer/src/distribution/metrics/realized.rs index cfa13132a..bae7bc0b6 100644 --- a/crates/brk_computer/src/distribution/metrics/realized.rs +++ b/crates/brk_computer/src/distribution/metrics/realized.rs @@ -1,48 +1,47 @@ use brk_error::Result; use brk_traversable::Traversable; use brk_types::{ - Bitcoin, CentsSats, CentsSquaredSats, CentsUnsigned, DateIndex, Dollars, Height, StoredF32, - StoredF64, Version, + Bitcoin, Cents, CentsSats, CentsSquaredSats, Dollars, Height, StoredF32, StoredF64, + Version, }; use rayon::prelude::*; use vecdb::{ - AnyStoredVec, AnyVec, BytesVec, EagerVec, Exit, GenericStoredVec, Ident, ImportableVec, - IterableCloneableVec, IterableVec, Negate, PcoVec, TypedVecIterator, + AnyStoredVec, AnyVec, BytesVec, Exit, WritableVec, Ident, ImportableVec, + ReadableCloneableVec, ReadableVec, Negate, Rw, StorageMode, }; use crate::{ - ComputeIndexes, + ComputeIndexes, blocks, distribution::state::RealizedState, - indexes, internal::{ - CentsUnsignedToDollars, ComputedFromDateLast, ComputedFromDateRatio, - ComputedFromHeightLast, ComputedFromHeightSum, ComputedFromHeightSumCum, DollarsMinus, - DollarsPlus, DollarsSquaredDivide, LazyBinaryFromHeightSum, LazyBinaryFromHeightSumCum, - LazyBinaryPriceFromHeight, - LazyComputedValueFromHeightSumCum, LazyFromDateLast, LazyFromHeightLast, LazyFromHeightSum, - LazyFromHeightSumCum, LazyPriceFromCents, PercentageDollarsF32, PriceFromHeight, - StoredF32Identity, ValueFromDateLast, + CentsUnsignedToDollars, ComputedFromHeightLast, ComputedFromHeightRatio, + ComputedFromHeightSum, ComputedFromHeightSumCum, DollarsMinus, DollarsPlus, + DollarsSquaredDivide, LazyBinaryFromHeightLast, LazyBinaryFromHeightSum, + LazyBinaryFromHeightSumCum, LazyBinaryPriceFromHeight, + LazyComputedValueFromHeightSumCum, LazyFromHeightLast, LazyFromHeightSum, + LazyFromHeightSumCum, LazyPriceFromCents, PercentageDollarsF32, Price, PriceFromHeight, + Ratio64, StoredF32Identity, ValueEmaFromHeight, }, - price, + prices, }; use super::ImportConfig; /// Realized cap and related metrics. -#[derive(Clone, Traversable)] -pub struct RealizedMetrics { +#[derive(Traversable)] +pub struct RealizedMetrics { // === Realized Cap === - pub realized_cap_cents: ComputedFromHeightLast, - pub realized_cap: LazyFromHeightLast, - pub realized_price: PriceFromHeight, - pub realized_price_extra: ComputedFromDateRatio, - pub realized_cap_rel_to_own_market_cap: Option>, - pub realized_cap_30d_delta: ComputedFromDateLast, + pub realized_cap_cents: ComputedFromHeightLast, + pub realized_cap: LazyFromHeightLast, + pub realized_price: Price>, + pub realized_price_extra: ComputedFromHeightRatio, + pub realized_cap_rel_to_own_market_cap: Option>, + pub realized_cap_30d_delta: ComputedFromHeightLast, // === Investor Price (dollar-weighted average acquisition price) === - pub investor_price_cents: ComputedFromHeightLast, + pub investor_price_cents: ComputedFromHeightLast, pub investor_price: LazyPriceFromCents, - pub investor_price_extra: ComputedFromDateRatio, + pub investor_price_extra: ComputedFromHeightRatio, // === Floor/Ceiling Price Bands (lazy: realized²/investor, investor²/realized) === pub lower_price_band: LazyBinaryPriceFromHeight, @@ -50,23 +49,23 @@ pub struct RealizedMetrics { // === Raw values for aggregation (needed to compute investor_price for aggregated cohorts) === /// Raw Σ(price × sats) for realized cap aggregation - pub cap_raw: BytesVec, + pub cap_raw: M::Stored>, /// Raw Σ(price² × sats) for investor_price aggregation - pub investor_cap_raw: BytesVec, + pub investor_cap_raw: M::Stored>, // === MVRV (Market Value to Realized Value) === // Proxy for realized_price_extra.ratio (close / realized_price = market_cap / realized_cap) - pub mvrv: LazyFromDateLast, + pub mvrv: LazyFromHeightLast, // === Realized Profit/Loss === - pub realized_profit: ComputedFromHeightSumCum, - pub realized_profit_7d_ema: ComputedFromDateLast, - pub realized_loss: ComputedFromHeightSumCum, - pub realized_loss_7d_ema: ComputedFromDateLast, + pub realized_profit: ComputedFromHeightSumCum, + pub realized_profit_7d_ema: ComputedFromHeightLast, + pub realized_loss: ComputedFromHeightSumCum, + pub realized_loss_7d_ema: ComputedFromHeightLast, pub neg_realized_loss: LazyFromHeightSumCum, - pub net_realized_pnl: ComputedFromHeightSumCum, - pub net_realized_pnl_7d_ema: ComputedFromDateLast, - pub realized_value: ComputedFromHeightSum, + pub net_realized_pnl: ComputedFromHeightSumCum, + pub net_realized_pnl_7d_ema: ComputedFromHeightLast, + pub realized_value: ComputedFromHeightSum, // === Realized vs Realized Cap Ratios (lazy) === pub realized_profit_rel_to_realized_cap: @@ -77,13 +76,28 @@ pub struct RealizedMetrics { // === Total Realized PnL === pub total_realized_pnl: LazyFromHeightSum, - pub realized_profit_to_loss_ratio: Option>>, + + // === Realized Profit/Loss Rolling Sums === + pub realized_profit_24h: Option>, + pub realized_profit_7d: Option>, + pub realized_profit_30d: Option>, + pub realized_profit_1y: Option>, + pub realized_loss_24h: Option>, + pub realized_loss_7d: Option>, + pub realized_loss_30d: Option>, + pub realized_loss_1y: Option>, + + // === Realized Profit to Loss Ratio (lazy from rolling sums) === + pub realized_profit_to_loss_ratio_24h: Option>, + pub realized_profit_to_loss_ratio_7d: Option>, + pub realized_profit_to_loss_ratio_30d: Option>, + pub realized_profit_to_loss_ratio_1y: Option>, // === Value Created/Destroyed Splits (stored) === - pub profit_value_created: ComputedFromHeightSum, - pub profit_value_destroyed: ComputedFromHeightSum, - pub loss_value_created: ComputedFromHeightSum, - pub loss_value_destroyed: ComputedFromHeightSum, + pub profit_value_created: ComputedFromHeightSum, + pub profit_value_destroyed: ComputedFromHeightSum, + pub loss_value_created: ComputedFromHeightSum, + pub loss_value_destroyed: ComputedFromHeightSum, // === Value Created/Destroyed Totals (lazy: profit + loss) === pub value_created: LazyBinaryFromHeightSum, @@ -97,46 +111,92 @@ pub struct RealizedMetrics { pub adjusted_value_created: Option>, pub adjusted_value_destroyed: Option>, - // === SOPR (Spent Output Profit Ratio) === - pub sopr: EagerVec>, - pub sopr_7d_ema: EagerVec>, - pub sopr_30d_ema: EagerVec>, - pub adjusted_sopr: Option>>, - pub adjusted_sopr_7d_ema: Option>>, - pub adjusted_sopr_30d_ema: Option>>, + // === Value Created/Destroyed Rolling Sums === + pub value_created_24h: ComputedFromHeightLast, + pub value_created_7d: ComputedFromHeightLast, + pub value_created_30d: ComputedFromHeightLast, + pub value_created_1y: ComputedFromHeightLast, + pub value_destroyed_24h: ComputedFromHeightLast, + pub value_destroyed_7d: ComputedFromHeightLast, + pub value_destroyed_30d: ComputedFromHeightLast, + pub value_destroyed_1y: ComputedFromHeightLast, - // === Sell Side Risk === - pub sell_side_risk_ratio: EagerVec>, - pub sell_side_risk_ratio_7d_ema: EagerVec>, - pub sell_side_risk_ratio_30d_ema: EagerVec>, + // === SOPR (rolling window ratios) === + pub sopr: LazyFromHeightLast, + pub sopr_24h: LazyBinaryFromHeightLast, + pub sopr_7d: LazyBinaryFromHeightLast, + pub sopr_30d: LazyBinaryFromHeightLast, + pub sopr_1y: LazyBinaryFromHeightLast, + pub sopr_24h_7d_ema: ComputedFromHeightLast, + pub sopr_7d_ema: LazyFromHeightLast, + pub sopr_24h_30d_ema: ComputedFromHeightLast, + pub sopr_30d_ema: LazyFromHeightLast, + + // === Adjusted Value Created/Destroyed Rolling Sums === + pub adjusted_value_created_24h: Option>, + pub adjusted_value_created_7d: Option>, + pub adjusted_value_created_30d: Option>, + pub adjusted_value_created_1y: Option>, + pub adjusted_value_destroyed_24h: Option>, + pub adjusted_value_destroyed_7d: Option>, + pub adjusted_value_destroyed_30d: Option>, + pub adjusted_value_destroyed_1y: Option>, + + // === Adjusted SOPR (rolling window ratios) === + pub adjusted_sopr: Option>, + pub adjusted_sopr_24h: Option>, + pub adjusted_sopr_7d: Option>, + pub adjusted_sopr_30d: Option>, + pub adjusted_sopr_1y: Option>, + pub adjusted_sopr_24h_7d_ema: Option>, + pub adjusted_sopr_7d_ema: Option>, + pub adjusted_sopr_24h_30d_ema: Option>, + pub adjusted_sopr_30d_ema: Option>, + + // === Sell Side Risk Rolling Sum Intermediates === + pub realized_value_24h: ComputedFromHeightLast, + pub realized_value_7d: ComputedFromHeightLast, + pub realized_value_30d: ComputedFromHeightLast, + pub realized_value_1y: ComputedFromHeightLast, + + // === Sell Side Risk (rolling window ratios) === + pub sell_side_risk_ratio: LazyFromHeightLast, + pub sell_side_risk_ratio_24h: LazyBinaryFromHeightLast, + pub sell_side_risk_ratio_7d: LazyBinaryFromHeightLast, + pub sell_side_risk_ratio_30d: LazyBinaryFromHeightLast, + pub sell_side_risk_ratio_1y: LazyBinaryFromHeightLast, + pub sell_side_risk_ratio_24h_7d_ema: ComputedFromHeightLast, + pub sell_side_risk_ratio_7d_ema: LazyFromHeightLast, + pub sell_side_risk_ratio_24h_30d_ema: ComputedFromHeightLast, + pub sell_side_risk_ratio_30d_ema: LazyFromHeightLast, // === Net Realized PnL Deltas === - pub net_realized_pnl_cumulative_30d_delta: ComputedFromDateLast, - pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: ComputedFromDateLast, - pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: ComputedFromDateLast, + pub net_realized_pnl_cumulative_30d_delta: ComputedFromHeightLast, + pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: ComputedFromHeightLast, + pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: ComputedFromHeightLast, // === Peak Regret === /// Realized peak regret: Σ((peak - sell_price) × sats) /// where peak = max price during holding period. /// "How much more could have been made by selling at peak instead" - pub peak_regret: ComputedFromHeightSumCum, + pub peak_regret: ComputedFromHeightSumCum, /// Peak regret as % of realized cap pub peak_regret_rel_to_realized_cap: LazyBinaryFromHeightSum, // === Sent in Profit/Loss === /// Sats sent in profit (sats/btc/usd) - pub sent_in_profit: LazyComputedValueFromHeightSumCum, + pub sent_in_profit: LazyComputedValueFromHeightSumCum, /// 14-day EMA of sent in profit (sats, btc, usd) - pub sent_in_profit_14d_ema: ValueFromDateLast, + pub sent_in_profit_14d_ema: ValueEmaFromHeight, /// Sats sent in loss (sats/btc/usd) - pub sent_in_loss: LazyComputedValueFromHeightSumCum, + pub sent_in_loss: LazyComputedValueFromHeightSumCum, /// 14-day EMA of sent in loss (sats, btc, usd) - pub sent_in_loss_14d_ema: ValueFromDateLast, + pub sent_in_loss_14d_ema: ValueEmaFromHeight, } impl RealizedMetrics { /// Import realized metrics from database. - pub fn forced_import(cfg: &ImportConfig) -> Result { + pub(crate) fn forced_import(cfg: &ImportConfig) -> Result { let v1 = Version::ONE; let v2 = Version::new(2); let v3 = Version::new(3); @@ -154,7 +214,7 @@ impl RealizedMetrics { let realized_cap = LazyFromHeightLast::from_computed::( &cfg.name("realized_cap"), cfg.version, - realized_cap_cents.height.boxed_clone(), + realized_cap_cents.height.read_only_boxed_clone(), &realized_cap_cents, ); @@ -165,7 +225,7 @@ impl RealizedMetrics { cfg.indexes, )?; - let realized_profit_7d_ema = ComputedFromDateLast::forced_import( + let realized_profit_7d_ema = ComputedFromHeightLast::forced_import( cfg.db, &cfg.name("realized_profit_7d_ema"), cfg.version, @@ -179,7 +239,7 @@ impl RealizedMetrics { cfg.indexes, )?; - let realized_loss_7d_ema = ComputedFromDateLast::forced_import( + let realized_loss_7d_ema = ComputedFromHeightLast::forced_import( cfg.db, &cfg.name("realized_loss_7d_ema"), cfg.version, @@ -189,7 +249,7 @@ impl RealizedMetrics { let neg_realized_loss = LazyFromHeightSumCum::from_computed::( &cfg.name("neg_realized_loss"), cfg.version + v1, - realized_loss.height.boxed_clone(), + realized_loss.height.read_only_boxed_clone(), &realized_loss, ); @@ -200,7 +260,7 @@ impl RealizedMetrics { cfg.indexes, )?; - let net_realized_pnl_7d_ema = ComputedFromDateLast::forced_import( + let net_realized_pnl_7d_ema = ComputedFromHeightLast::forced_import( cfg.db, &cfg.name("net_realized_pnl_7d_ema"), cfg.version, @@ -226,7 +286,7 @@ impl RealizedMetrics { let total_realized_pnl = LazyFromHeightSum::from_computed::( &cfg.name("total_realized_pnl"), cfg.version + v1, - realized_value.height.boxed_clone(), + realized_value.height.read_only_boxed_clone(), &realized_value, ); @@ -235,8 +295,8 @@ impl RealizedMetrics { LazyBinaryFromHeightSumCum::from_computed_lazy_last::( &cfg.name("realized_profit_rel_to_realized_cap"), cfg.version + v1, - realized_profit.height.boxed_clone(), - realized_cap.height.boxed_clone(), + realized_profit.height.read_only_boxed_clone(), + realized_cap.height.read_only_boxed_clone(), &realized_profit, &realized_cap, ); @@ -245,8 +305,8 @@ impl RealizedMetrics { LazyBinaryFromHeightSumCum::from_computed_lazy_last::( &cfg.name("realized_loss_rel_to_realized_cap"), cfg.version + v1, - realized_loss.height.boxed_clone(), - realized_cap.height.boxed_clone(), + realized_loss.height.read_only_boxed_clone(), + realized_cap.height.read_only_boxed_clone(), &realized_loss, &realized_cap, ); @@ -255,8 +315,8 @@ impl RealizedMetrics { LazyBinaryFromHeightSumCum::from_computed_lazy_last::( &cfg.name("net_realized_pnl_rel_to_realized_cap"), cfg.version + v1, - net_realized_pnl.height.boxed_clone(), - realized_cap.height.boxed_clone(), + net_realized_pnl.height.read_only_boxed_clone(), + realized_cap.height.read_only_boxed_clone(), &net_realized_pnl, &realized_cap, ); @@ -276,16 +336,16 @@ impl RealizedMetrics { cfg.indexes, )?; - let investor_price = LazyPriceFromCents::from_computed( + let investor_price = LazyPriceFromCents::from_computed::( &cfg.name("investor_price"), cfg.version, &investor_price_cents, ); - let investor_price_extra = ComputedFromDateRatio::forced_import_from_lazy( + let investor_price_extra = ComputedFromHeightRatio::forced_import_from_lazy( cfg.db, &cfg.name("investor_price"), - &investor_price.dollars, + &investor_price.usd, cfg.version, cfg.indexes, extended, @@ -362,14 +422,14 @@ impl RealizedMetrics { let capitulation_flow = LazyFromHeightSum::from_computed::( &cfg.name("capitulation_flow"), cfg.version, - loss_value_destroyed.height.boxed_clone(), + loss_value_destroyed.height.read_only_boxed_clone(), &loss_value_destroyed, ); let profit_flow = LazyFromHeightSum::from_computed::( &cfg.name("profit_flow"), cfg.version, - profit_value_destroyed.height.boxed_clone(), + profit_value_destroyed.height.read_only_boxed_clone(), &profit_value_destroyed, ); @@ -408,7 +468,7 @@ impl RealizedMetrics { }); // Create realized_price_extra first so we can reference its ratio for MVRV proxy - let realized_price_extra = ComputedFromDateRatio::forced_import( + let realized_price_extra = ComputedFromHeightRatio::forced_import( cfg.db, &cfg.name("realized_price"), Some(&realized_price), @@ -419,16 +479,169 @@ impl RealizedMetrics { // MVRV is a lazy proxy for realized_price_extra.ratio // ratio = close / realized_price = market_cap / realized_cap = MVRV - let mvrv = LazyFromDateLast::from_source::( + let mvrv = LazyFromHeightLast::from_computed::( &cfg.name("mvrv"), cfg.version, + realized_price_extra.ratio.height.read_only_boxed_clone(), &realized_price_extra.ratio, ); + // === Rolling sum intermediates (must be imported before lazy ratios reference them) === + macro_rules! import_rolling { + ($name:expr) => { + ComputedFromHeightLast::forced_import(cfg.db, &cfg.name($name), cfg.version + v1, cfg.indexes)? + }; + } + macro_rules! import_rolling_opt { + ($cond:expr, $name:expr) => { + $cond.then(|| ComputedFromHeightLast::forced_import(cfg.db, &cfg.name($name), cfg.version + v1, cfg.indexes)).transpose()? + }; + } + + let value_created_24h = import_rolling!("value_created_24h"); + let value_created_7d = import_rolling!("value_created_7d"); + let value_created_30d = import_rolling!("value_created_30d"); + let value_created_1y = import_rolling!("value_created_1y"); + let value_destroyed_24h = import_rolling!("value_destroyed_24h"); + let value_destroyed_7d = import_rolling!("value_destroyed_7d"); + let value_destroyed_30d = import_rolling!("value_destroyed_30d"); + let value_destroyed_1y = import_rolling!("value_destroyed_1y"); + + let adjusted_value_created_24h = import_rolling_opt!(compute_adjusted, "adjusted_value_created_24h"); + let adjusted_value_created_7d = import_rolling_opt!(compute_adjusted, "adjusted_value_created_7d"); + let adjusted_value_created_30d = import_rolling_opt!(compute_adjusted, "adjusted_value_created_30d"); + let adjusted_value_created_1y = import_rolling_opt!(compute_adjusted, "adjusted_value_created_1y"); + let adjusted_value_destroyed_24h = import_rolling_opt!(compute_adjusted, "adjusted_value_destroyed_24h"); + let adjusted_value_destroyed_7d = import_rolling_opt!(compute_adjusted, "adjusted_value_destroyed_7d"); + let adjusted_value_destroyed_30d = import_rolling_opt!(compute_adjusted, "adjusted_value_destroyed_30d"); + let adjusted_value_destroyed_1y = import_rolling_opt!(compute_adjusted, "adjusted_value_destroyed_1y"); + + let realized_value_24h = import_rolling!("realized_value_24h"); + let realized_value_7d = import_rolling!("realized_value_7d"); + let realized_value_30d = import_rolling!("realized_value_30d"); + let realized_value_1y = import_rolling!("realized_value_1y"); + + let realized_profit_24h = import_rolling_opt!(extended, "realized_profit_24h"); + let realized_profit_7d = import_rolling_opt!(extended, "realized_profit_7d"); + let realized_profit_30d = import_rolling_opt!(extended, "realized_profit_30d"); + let realized_profit_1y = import_rolling_opt!(extended, "realized_profit_1y"); + let realized_loss_24h = import_rolling_opt!(extended, "realized_loss_24h"); + let realized_loss_7d = import_rolling_opt!(extended, "realized_loss_7d"); + let realized_loss_30d = import_rolling_opt!(extended, "realized_loss_30d"); + let realized_loss_1y = import_rolling_opt!(extended, "realized_loss_1y"); + + // === Rolling window lazy ratios (from rolling sum intermediates) === + let sopr_24h = LazyBinaryFromHeightLast::from_computed_last::( + &cfg.name("sopr_24h"), cfg.version + v1, &value_created_24h, &value_destroyed_24h, + ); + let sopr_7d = LazyBinaryFromHeightLast::from_computed_last::( + &cfg.name("sopr_7d"), cfg.version + v1, &value_created_7d, &value_destroyed_7d, + ); + let sopr_30d = LazyBinaryFromHeightLast::from_computed_last::( + &cfg.name("sopr_30d"), cfg.version + v1, &value_created_30d, &value_destroyed_30d, + ); + let sopr_1y = LazyBinaryFromHeightLast::from_computed_last::( + &cfg.name("sopr_1y"), cfg.version + v1, &value_created_1y, &value_destroyed_1y, + ); + let sopr = LazyFromHeightLast::from_binary::( + &cfg.name("sopr"), cfg.version + v1, &sopr_24h, + ); + + macro_rules! lazy_binary_from_opt_last { + ($transform:ty, $name:expr, $s1:expr, $s2:expr) => { + ($s1.is_some() && $s2.is_some()).then(|| { + LazyBinaryFromHeightLast::from_computed_last::<$transform>( + &cfg.name($name), cfg.version + v1, + $s1.as_ref().unwrap(), $s2.as_ref().unwrap(), + ) + }) + }; + } + let adjusted_sopr_24h = lazy_binary_from_opt_last!(Ratio64, "adjusted_sopr_24h", adjusted_value_created_24h, adjusted_value_destroyed_24h); + let adjusted_sopr_7d = lazy_binary_from_opt_last!(Ratio64, "adjusted_sopr_7d", adjusted_value_created_7d, adjusted_value_destroyed_7d); + let adjusted_sopr_30d = lazy_binary_from_opt_last!(Ratio64, "adjusted_sopr_30d", adjusted_value_created_30d, adjusted_value_destroyed_30d); + let adjusted_sopr_1y = lazy_binary_from_opt_last!(Ratio64, "adjusted_sopr_1y", adjusted_value_created_1y, adjusted_value_destroyed_1y); + let adjusted_sopr = adjusted_sopr_24h.as_ref().map(|sopr_24h| { + LazyFromHeightLast::from_binary::( + &cfg.name("adjusted_sopr"), cfg.version + v1, sopr_24h, + ) + }); + + let sell_side_risk_ratio_24h = LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::( + &cfg.name("sell_side_risk_ratio_24h"), cfg.version + v1, &realized_value_24h, &realized_cap, + ); + let sell_side_risk_ratio_7d = LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::( + &cfg.name("sell_side_risk_ratio_7d"), cfg.version + v1, &realized_value_7d, &realized_cap, + ); + let sell_side_risk_ratio_30d = LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::( + &cfg.name("sell_side_risk_ratio_30d"), cfg.version + v1, &realized_value_30d, &realized_cap, + ); + let sell_side_risk_ratio_1y = LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::( + &cfg.name("sell_side_risk_ratio_1y"), cfg.version + v1, &realized_value_1y, &realized_cap, + ); + let sell_side_risk_ratio = LazyFromHeightLast::from_binary::( + &cfg.name("sell_side_risk_ratio"), cfg.version + v1, &sell_side_risk_ratio_24h, + ); + + let realized_profit_to_loss_ratio_24h = lazy_binary_from_opt_last!(Ratio64, "realized_profit_to_loss_ratio_24h", realized_profit_24h, realized_loss_24h); + let realized_profit_to_loss_ratio_7d = lazy_binary_from_opt_last!(Ratio64, "realized_profit_to_loss_ratio_7d", realized_profit_7d, realized_loss_7d); + let realized_profit_to_loss_ratio_30d = lazy_binary_from_opt_last!(Ratio64, "realized_profit_to_loss_ratio_30d", realized_profit_30d, realized_loss_30d); + let realized_profit_to_loss_ratio_1y = lazy_binary_from_opt_last!(Ratio64, "realized_profit_to_loss_ratio_1y", realized_profit_1y, realized_loss_1y); + + // === EMA imports + identity aliases === + let sopr_24h_7d_ema = import_rolling!("sopr_24h_7d_ema"); + let sopr_7d_ema = LazyFromHeightLast::from_computed::( + &cfg.name("sopr_7d_ema"), cfg.version + v1, + sopr_24h_7d_ema.height.read_only_boxed_clone(), &sopr_24h_7d_ema, + ); + let sopr_24h_30d_ema = import_rolling!("sopr_24h_30d_ema"); + let sopr_30d_ema = LazyFromHeightLast::from_computed::( + &cfg.name("sopr_30d_ema"), cfg.version + v1, + sopr_24h_30d_ema.height.read_only_boxed_clone(), &sopr_24h_30d_ema, + ); + + let adjusted_sopr_24h_7d_ema = import_rolling_opt!(compute_adjusted, "adjusted_sopr_24h_7d_ema"); + let adjusted_sopr_7d_ema = adjusted_sopr_24h_7d_ema.as_ref().map(|ema| { + LazyFromHeightLast::from_computed::( + &cfg.name("adjusted_sopr_7d_ema"), cfg.version + v1, + ema.height.read_only_boxed_clone(), ema, + ) + }); + let adjusted_sopr_24h_30d_ema = import_rolling_opt!(compute_adjusted, "adjusted_sopr_24h_30d_ema"); + let adjusted_sopr_30d_ema = adjusted_sopr_24h_30d_ema.as_ref().map(|ema| { + LazyFromHeightLast::from_computed::( + &cfg.name("adjusted_sopr_30d_ema"), cfg.version + v1, + ema.height.read_only_boxed_clone(), ema, + ) + }); + + let sell_side_risk_ratio_24h_7d_ema = import_rolling!("sell_side_risk_ratio_24h_7d_ema"); + let sell_side_risk_ratio_7d_ema = LazyFromHeightLast::from_computed::( + &cfg.name("sell_side_risk_ratio_7d_ema"), cfg.version + v1, + sell_side_risk_ratio_24h_7d_ema.height.read_only_boxed_clone(), &sell_side_risk_ratio_24h_7d_ema, + ); + let sell_side_risk_ratio_24h_30d_ema = import_rolling!("sell_side_risk_ratio_24h_30d_ema"); + let sell_side_risk_ratio_30d_ema = LazyFromHeightLast::from_computed::( + &cfg.name("sell_side_risk_ratio_30d_ema"), cfg.version + v1, + sell_side_risk_ratio_24h_30d_ema.height.read_only_boxed_clone(), &sell_side_risk_ratio_24h_30d_ema, + ); + + let peak_regret_rel_to_realized_cap = LazyBinaryFromHeightSum::from_sumcum_lazy_last::< + PercentageDollarsF32, + _, + >( + &cfg.name("peak_regret_rel_to_realized_cap"), + cfg.version + v1, + peak_regret.height.read_only_boxed_clone(), + realized_cap.height.read_only_boxed_clone(), + &peak_regret, + &realized_cap, + ); + Ok(Self { // === Realized Cap === realized_cap_cents, - realized_cap: realized_cap.clone(), + realized_cap, realized_price, realized_price_extra, realized_cap_rel_to_own_market_cap: extended @@ -441,7 +654,7 @@ impl RealizedMetrics { ) }) .transpose()?, - realized_cap_30d_delta: ComputedFromDateLast::forced_import( + realized_cap_30d_delta: ComputedFromHeightLast::forced_import( cfg.db, &cfg.name("realized_cap_30d_delta"), cfg.version, @@ -480,15 +693,22 @@ impl RealizedMetrics { // === Total Realized PnL === total_realized_pnl, - realized_profit_to_loss_ratio: extended - .then(|| { - EagerVec::forced_import( - cfg.db, - &cfg.name("realized_profit_to_loss_ratio"), - cfg.version + v1, - ) - }) - .transpose()?, + + // === Realized Profit/Loss Rolling Sums === + realized_profit_24h, + realized_profit_7d, + realized_profit_30d, + realized_profit_1y, + realized_loss_24h, + realized_loss_7d, + realized_loss_30d, + realized_loss_1y, + + // === Realized Profit to Loss Ratio (lazy from rolling sums) === + realized_profit_to_loss_ratio_24h, + realized_profit_to_loss_ratio_7d, + realized_profit_to_loss_ratio_30d, + realized_profit_to_loss_ratio_1y, // === Value Created/Destroyed Splits (stored) === profit_value_created, @@ -508,75 +728,81 @@ impl RealizedMetrics { adjusted_value_created, adjusted_value_destroyed, - // === SOPR === - sopr: EagerVec::forced_import(cfg.db, &cfg.name("sopr"), cfg.version + v1)?, - sopr_7d_ema: EagerVec::forced_import( - cfg.db, - &cfg.name("sopr_7d_ema"), - cfg.version + v1, - )?, - sopr_30d_ema: EagerVec::forced_import( - cfg.db, - &cfg.name("sopr_30d_ema"), - cfg.version + v1, - )?, - adjusted_sopr: compute_adjusted - .then(|| { - EagerVec::forced_import(cfg.db, &cfg.name("adjusted_sopr"), cfg.version + v1) - }) - .transpose()?, - adjusted_sopr_7d_ema: compute_adjusted - .then(|| { - EagerVec::forced_import( - cfg.db, - &cfg.name("adjusted_sopr_7d_ema"), - cfg.version + v1, - ) - }) - .transpose()?, - adjusted_sopr_30d_ema: compute_adjusted - .then(|| { - EagerVec::forced_import( - cfg.db, - &cfg.name("adjusted_sopr_30d_ema"), - cfg.version + v1, - ) - }) - .transpose()?, + // === Value Created/Destroyed Rolling Sums === + value_created_24h, + value_created_7d, + value_created_30d, + value_created_1y, + value_destroyed_24h, + value_destroyed_7d, + value_destroyed_30d, + value_destroyed_1y, - // === Sell Side Risk === - sell_side_risk_ratio: EagerVec::forced_import( - cfg.db, - &cfg.name("sell_side_risk_ratio"), - cfg.version + v1, - )?, - sell_side_risk_ratio_7d_ema: EagerVec::forced_import( - cfg.db, - &cfg.name("sell_side_risk_ratio_7d_ema"), - cfg.version + v1, - )?, - sell_side_risk_ratio_30d_ema: EagerVec::forced_import( - cfg.db, - &cfg.name("sell_side_risk_ratio_30d_ema"), - cfg.version + v1, - )?, + // === SOPR (rolling window ratios) === + sopr, + sopr_24h, + sopr_7d, + sopr_30d, + sopr_1y, + sopr_24h_7d_ema, + sopr_7d_ema, + sopr_24h_30d_ema, + sopr_30d_ema, + + // === Adjusted Value Created/Destroyed Rolling Sums === + adjusted_value_created_24h, + adjusted_value_created_7d, + adjusted_value_created_30d, + adjusted_value_created_1y, + adjusted_value_destroyed_24h, + adjusted_value_destroyed_7d, + adjusted_value_destroyed_30d, + adjusted_value_destroyed_1y, + + // === Adjusted SOPR (rolling window ratios) === + adjusted_sopr, + adjusted_sopr_24h, + adjusted_sopr_7d, + adjusted_sopr_30d, + adjusted_sopr_1y, + adjusted_sopr_24h_7d_ema, + adjusted_sopr_7d_ema, + adjusted_sopr_24h_30d_ema, + adjusted_sopr_30d_ema, + + // === Sell Side Risk Rolling Sum Intermediates === + realized_value_24h, + realized_value_7d, + realized_value_30d, + realized_value_1y, + + // === Sell Side Risk (rolling window ratios) === + sell_side_risk_ratio, + sell_side_risk_ratio_24h, + sell_side_risk_ratio_7d, + sell_side_risk_ratio_30d, + sell_side_risk_ratio_1y, + sell_side_risk_ratio_24h_7d_ema, + sell_side_risk_ratio_7d_ema, + sell_side_risk_ratio_24h_30d_ema, + sell_side_risk_ratio_30d_ema, // === Net Realized PnL Deltas === - net_realized_pnl_cumulative_30d_delta: ComputedFromDateLast::forced_import( + net_realized_pnl_cumulative_30d_delta: ComputedFromHeightLast::forced_import( cfg.db, &cfg.name("net_realized_pnl_cumulative_30d_delta"), cfg.version + v3, cfg.indexes, )?, net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: - ComputedFromDateLast::forced_import( + ComputedFromHeightLast::forced_import( cfg.db, &cfg.name("net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap"), cfg.version + v3, cfg.indexes, )?, net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: - ComputedFromDateLast::forced_import( + ComputedFromHeightLast::forced_import( cfg.db, &cfg.name("net_realized_pnl_cumulative_30d_delta_rel_to_market_cap"), cfg.version + v3, @@ -584,18 +810,8 @@ impl RealizedMetrics { )?, // === ATH Regret === - peak_regret: peak_regret.clone(), - peak_regret_rel_to_realized_cap: LazyBinaryFromHeightSum::from_sumcum_lazy_last::< - PercentageDollarsF32, - _, - >( - &cfg.name("peak_regret_rel_to_realized_cap"), - cfg.version + v1, - peak_regret.height.boxed_clone(), - realized_cap.height.boxed_clone(), - &peak_regret, - &realized_cap, - ), + peak_regret, + peak_regret_rel_to_realized_cap, // === Sent in Profit/Loss === sent_in_profit: LazyComputedValueFromHeightSumCum::forced_import( @@ -603,13 +819,12 @@ impl RealizedMetrics { &cfg.name("sent_in_profit"), cfg.version, cfg.indexes, - cfg.price, + cfg.prices, )?, - sent_in_profit_14d_ema: ValueFromDateLast::forced_import( + sent_in_profit_14d_ema: ValueEmaFromHeight::forced_import( cfg.db, &cfg.name("sent_in_profit_14d_ema"), cfg.version, - cfg.compute_dollars(), cfg.indexes, )?, sent_in_loss: LazyComputedValueFromHeightSumCum::forced_import( @@ -617,20 +832,19 @@ impl RealizedMetrics { &cfg.name("sent_in_loss"), cfg.version, cfg.indexes, - cfg.price, + cfg.prices, )?, - sent_in_loss_14d_ema: ValueFromDateLast::forced_import( + sent_in_loss_14d_ema: ValueEmaFromHeight::forced_import( cfg.db, &cfg.name("sent_in_loss_14d_ema"), cfg.version, - cfg.compute_dollars(), cfg.indexes, )?, }) } /// Get minimum length across height-indexed vectors written in block loop. - pub fn min_stateful_height_len(&self) -> usize { + pub(crate) fn min_stateful_height_len(&self) -> usize { self.realized_cap .height .len() @@ -650,7 +864,7 @@ impl RealizedMetrics { /// Push realized state values to height-indexed vectors. /// State values are CentsUnsigned (deterministic), converted to Dollars for storage. - pub fn truncate_push(&mut self, height: Height, state: &RealizedState) -> Result<()> { + pub(crate) fn truncate_push(&mut self, height: Height, state: &RealizedState) -> Result<()> { self.realized_cap_cents .height .truncate_push(height, state.cap())?; @@ -699,7 +913,7 @@ impl RealizedMetrics { } /// Returns a parallel iterator over all vecs for parallel writing. - pub fn par_iter_mut(&mut self) -> impl ParallelIterator { + pub(crate) fn par_iter_mut(&mut self) -> impl ParallelIterator { vec![ &mut self.realized_cap_cents.height as &mut dyn AnyStoredVec, &mut self.realized_profit.height, @@ -723,13 +937,13 @@ impl RealizedMetrics { } /// Validate computed versions against base version. - pub fn validate_computed_versions(&mut self, _base_version: Version) -> Result<()> { + pub(crate) fn validate_computed_versions(&mut self, _base_version: Version) -> Result<()> { // Validation logic for computed vecs Ok(()) } /// Compute aggregate values from separate cohorts. - pub fn compute_from_stateful( + pub(crate) fn compute_from_stateful( &mut self, starting_indexes: &ComputeIndexes, others: &[&Self], @@ -771,11 +985,6 @@ impl RealizedMetrics { .height .validate_computed_version_or_reset(investor_price_dep_version)?; - let mut iters: Vec<_> = others - .iter() - .filter_map(|o| Some((o.cap_raw.iter().ok()?, o.investor_cap_raw.iter().ok()?))) - .collect(); - // Start from where the target vecs left off (handles fresh/reset vecs) let start = self .cap_raw @@ -791,9 +1000,9 @@ impl RealizedMetrics { let mut sum_cap = CentsSats::ZERO; let mut sum_investor_cap = CentsSquaredSats::ZERO; - for (cap_iter, investor_cap_iter) in &mut iters { - sum_cap += cap_iter.get_unwrap(height); - sum_investor_cap += investor_cap_iter.get_unwrap(height); + for o in others.iter() { + sum_cap += o.cap_raw.collect_one_at(i).unwrap(); + sum_investor_cap += o.investor_cap_raw.collect_one_at(i).unwrap(); } self.cap_raw.truncate_push(height, sum_cap)?; @@ -802,9 +1011,9 @@ impl RealizedMetrics { // Compute investor_price from aggregated raw values let investor_price = if sum_cap.inner() == 0 { - CentsUnsigned::ZERO + Cents::ZERO } else { - CentsUnsigned::new((sum_investor_cap / sum_cap.inner()) as u64) + Cents::new((sum_investor_cap / sum_cap.inner()) as u64) }; self.investor_price_cents .height @@ -882,24 +1091,23 @@ impl RealizedMetrics { } /// First phase of computed metrics (indexes from height). - pub fn compute_rest_part1( + pub(crate) fn compute_rest_part1( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.realized_cap_cents - .compute_rest(indexes, starting_indexes, exit)?; + // realized_cap_cents: ComputedFromHeightLast - day1 is lazy, nothing to compute + // investor_price_cents: ComputedFromHeightLast - day1 is lazy, nothing to compute + + // realized_profit/loss: ComputedFromHeightSumCum - compute cumulative from height self.realized_profit - .compute_rest(indexes, starting_indexes, exit)?; + .compute_cumulative(starting_indexes, exit)?; self.realized_loss - .compute_rest(indexes, starting_indexes, exit)?; - self.investor_price_cents - .compute_rest(indexes, starting_indexes, exit)?; + .compute_cumulative(starting_indexes, exit)?; // net_realized_pnl = profit - loss self.net_realized_pnl - .compute_all(indexes, starting_indexes, exit, |vec| { + .compute(starting_indexes, exit, |vec| { vec.compute_subtract( starting_indexes.height, &self.realized_profit.height, @@ -912,268 +1120,298 @@ impl RealizedMetrics { // realized_value = profit + loss // Note: total_realized_pnl is a lazy alias to realized_value since both // compute profit + loss with sum aggregation, making them identical. - self.realized_value - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_add( - starting_indexes.height, - &self.realized_profit.height, - &self.realized_loss.height, - exit, - )?; - Ok(()) - })?; + // ComputedFromHeightSum: day1 is lazy, just compute the height vec directly + self.realized_value.height.compute_add( + starting_indexes.height, + &self.realized_profit.height, + &self.realized_loss.height, + exit, + )?; // Compute derived aggregations for the 4 splits // (value_created, value_destroyed, capitulation_flow, profit_flow are derived lazily) - self.profit_value_created - .compute_rest(indexes, starting_indexes, exit)?; - self.profit_value_destroyed - .compute_rest(indexes, starting_indexes, exit)?; - self.loss_value_created - .compute_rest(indexes, starting_indexes, exit)?; - self.loss_value_destroyed - .compute_rest(indexes, starting_indexes, exit)?; - // ATH regret - self.peak_regret - .compute_rest(indexes, starting_indexes, exit)?; + // ComputedFromHeightSum: day1 is lazy, nothing to compute - // Volume at profit/loss + // ATH regret: ComputedFromHeightSumCum - compute cumulative from height + self.peak_regret + .compute_cumulative(starting_indexes, exit)?; + + // Volume at profit/loss: LazyComputedValueFromHeightSumCum - compute cumulative self.sent_in_profit - .compute_rest(indexes, starting_indexes, exit)?; + .compute_cumulative(starting_indexes, exit)?; self.sent_in_loss - .compute_rest(indexes, starting_indexes, exit)?; + .compute_cumulative(starting_indexes, exit)?; Ok(()) } /// Second phase of computed metrics (realized price from realized cap / supply). #[allow(clippy::too_many_arguments)] - pub fn compute_rest_part2( + pub(crate) fn compute_rest_part2( &mut self, - indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + blocks: &blocks::Vecs, + prices: &prices::Vecs, starting_indexes: &ComputeIndexes, - height_to_supply: &impl IterableVec, - height_to_market_cap: Option<&impl IterableVec>, - dateindex_to_market_cap: Option<&impl IterableVec>, + height_to_supply: &impl ReadableVec, + height_to_market_cap: Option<&impl ReadableVec>, exit: &Exit, ) -> Result<()> { // realized_price = realized_cap / supply - self.realized_price - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_divide( + self.realized_price.height.compute_divide( + starting_indexes.height, + &self.realized_cap.height, + height_to_supply, + exit, + )?; + + self.realized_price_extra.compute_rest( + blocks, + prices, + starting_indexes, + exit, + Some(&self.realized_price.height), + )?; + + self.investor_price_extra.compute_rest( + blocks, + prices, + starting_indexes, + exit, + Some(&self.investor_price.height), + )?; + + // realized_cap_30d_delta: height-level rolling change + self.realized_cap_30d_delta.height.compute_rolling_change( + starting_indexes.height, + &blocks.count.height_1m_ago, + &self.realized_cap.height, + exit, + )?; + + // === Rolling sum intermediates (must be computed before lazy ratios/EMAs that read them) === + macro_rules! rolling_sum { + ($target:expr, $window:expr, $source:expr) => { + $target.height.compute_rolling_sum( + starting_indexes.height, $window, $source, exit, + )? + }; + } + + // Value created/destroyed rolling sums (from lazy binary totals) + rolling_sum!(self.value_created_24h, &blocks.count.height_24h_ago, &self.value_created.height); + rolling_sum!(self.value_created_7d, &blocks.count.height_1w_ago, &self.value_created.height); + rolling_sum!(self.value_created_30d, &blocks.count.height_1m_ago, &self.value_created.height); + rolling_sum!(self.value_created_1y, &blocks.count.height_1y_ago, &self.value_created.height); + rolling_sum!(self.value_destroyed_24h, &blocks.count.height_24h_ago, &self.value_destroyed.height); + rolling_sum!(self.value_destroyed_7d, &blocks.count.height_1w_ago, &self.value_destroyed.height); + rolling_sum!(self.value_destroyed_30d, &blocks.count.height_1m_ago, &self.value_destroyed.height); + rolling_sum!(self.value_destroyed_1y, &blocks.count.height_1y_ago, &self.value_destroyed.height); + + // Adjusted value created/destroyed rolling sums (from lazy adjusted totals) + if let Some(source) = self.adjusted_value_created.as_ref() { + macro_rules! rolling_sum_opt { + ($target:expr, $window:expr) => { + if let Some(f) = $target.as_mut() { + f.height.compute_rolling_sum( + starting_indexes.height, $window, &source.height, exit, + )?; + } + }; + } + rolling_sum_opt!(self.adjusted_value_created_24h, &blocks.count.height_24h_ago); + rolling_sum_opt!(self.adjusted_value_created_7d, &blocks.count.height_1w_ago); + rolling_sum_opt!(self.adjusted_value_created_30d, &blocks.count.height_1m_ago); + rolling_sum_opt!(self.adjusted_value_created_1y, &blocks.count.height_1y_ago); + } + if let Some(source) = self.adjusted_value_destroyed.as_ref() { + macro_rules! rolling_sum_opt { + ($target:expr, $window:expr) => { + if let Some(f) = $target.as_mut() { + f.height.compute_rolling_sum( + starting_indexes.height, $window, &source.height, exit, + )?; + } + }; + } + rolling_sum_opt!(self.adjusted_value_destroyed_24h, &blocks.count.height_24h_ago); + rolling_sum_opt!(self.adjusted_value_destroyed_7d, &blocks.count.height_1w_ago); + rolling_sum_opt!(self.adjusted_value_destroyed_30d, &blocks.count.height_1m_ago); + rolling_sum_opt!(self.adjusted_value_destroyed_1y, &blocks.count.height_1y_ago); + } + + // Realized value rolling sums (for sell_side_risk_ratio) + rolling_sum!(self.realized_value_24h, &blocks.count.height_24h_ago, &self.realized_value.height); + rolling_sum!(self.realized_value_7d, &blocks.count.height_1w_ago, &self.realized_value.height); + rolling_sum!(self.realized_value_30d, &blocks.count.height_1m_ago, &self.realized_value.height); + rolling_sum!(self.realized_value_1y, &blocks.count.height_1y_ago, &self.realized_value.height); + + // Realized profit/loss rolling sums (for realized_profit_to_loss_ratio) + if let Some(f) = self.realized_profit_24h.as_mut() { + f.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_24h_ago, &self.realized_profit.height, exit)?; + } + if let Some(f) = self.realized_profit_7d.as_mut() { + f.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_1w_ago, &self.realized_profit.height, exit)?; + } + if let Some(f) = self.realized_profit_30d.as_mut() { + f.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_1m_ago, &self.realized_profit.height, exit)?; + } + if let Some(f) = self.realized_profit_1y.as_mut() { + f.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_1y_ago, &self.realized_profit.height, exit)?; + } + if let Some(f) = self.realized_loss_24h.as_mut() { + f.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_24h_ago, &self.realized_loss.height, exit)?; + } + if let Some(f) = self.realized_loss_7d.as_mut() { + f.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_1w_ago, &self.realized_loss.height, exit)?; + } + if let Some(f) = self.realized_loss_30d.as_mut() { + f.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_1m_ago, &self.realized_loss.height, exit)?; + } + if let Some(f) = self.realized_loss_1y.as_mut() { + f.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_1y_ago, &self.realized_loss.height, exit)?; + } + + // 7d rolling average of realized profit (height-level) + self.realized_profit_7d_ema + .height + .compute_rolling_average( + starting_indexes.height, + &blocks.count.height_1w_ago, + &self.realized_profit.height, + exit, + )?; + + // 7d rolling average of realized loss (height-level) + self.realized_loss_7d_ema.height.compute_rolling_average( + starting_indexes.height, + &blocks.count.height_1w_ago, + &self.realized_loss.height, + exit, + )?; + + // 7d rolling average of net realized PnL (height-level) + self.net_realized_pnl_7d_ema + .height + .compute_rolling_average( + starting_indexes.height, + &blocks.count.height_1w_ago, + &self.net_realized_pnl.height, + exit, + )?; + + // 14-day rolling average of sent in profit (sats and dollars) + self.sent_in_profit_14d_ema.compute_rolling_average( + starting_indexes.height, + &blocks.count.height_2w_ago, + &self.sent_in_profit.sats.height, + &self.sent_in_profit.usd.height, + exit, + )?; + + // 14-day rolling average of sent in loss (sats and dollars) + self.sent_in_loss_14d_ema.compute_rolling_average( + starting_indexes.height, + &blocks.count.height_2w_ago, + &self.sent_in_loss.sats.height, + &self.sent_in_loss.usd.height, + exit, + )?; + + // 7d/30d rolling average of SOPR (from 24h rolling ratio) + self.sopr_24h_7d_ema.height.compute_rolling_average( + starting_indexes.height, + &blocks.count.height_1w_ago, + &self.sopr.height, + exit, + )?; + + self.sopr_24h_30d_ema.height.compute_rolling_average( + starting_indexes.height, + &blocks.count.height_1m_ago, + &self.sopr.height, + exit, + )?; + + // Optional: adjusted SOPR rolling averages (from 24h rolling ratio) + if let Some(adjusted_sopr) = self.adjusted_sopr.as_ref() { + if let Some(ema_7d) = self.adjusted_sopr_24h_7d_ema.as_mut() { + ema_7d.height.compute_rolling_average( starting_indexes.height, - &self.realized_cap.height, - height_to_supply, - exit, - )?; - Ok(()) - })?; - - if let Some(price) = price { - self.realized_price_extra.compute_rest( - price, - starting_indexes, - exit, - Some(&self.realized_price.dateindex.0), - )?; - - self.investor_price_extra.compute_rest( - price, - starting_indexes, - exit, - Some(&self.investor_price.dateindex.0), - )?; - } - - // realized_cap_30d_delta - self.realized_cap_30d_delta - .compute_all(starting_indexes, exit, |vec| { - vec.compute_change( - starting_indexes.dateindex, - &self.realized_cap.dateindex.0, - 30, - exit, - )?; - Ok(()) - })?; - - // SOPR = value_created / value_destroyed - self.sopr.compute_divide( - starting_indexes.dateindex, - &self.value_created.dateindex.0, - &self.value_destroyed.dateindex.0, - exit, - )?; - - // 7d EMA of realized profit/loss - self.realized_profit_7d_ema.compute_all(starting_indexes, exit, |v| { - Ok(v.compute_ema( - starting_indexes.dateindex, - &self.realized_profit.dateindex.sum.0, - 7, - exit, - )?) - })?; - - self.realized_loss_7d_ema.compute_all(starting_indexes, exit, |v| { - Ok(v.compute_ema( - starting_indexes.dateindex, - &self.realized_loss.dateindex.sum.0, - 7, - exit, - )?) - })?; - - self.net_realized_pnl_7d_ema.compute_all(starting_indexes, exit, |v| { - Ok(v.compute_ema( - starting_indexes.dateindex, - &self.net_realized_pnl.dateindex.sum.0, - 7, - exit, - )?) - })?; - - // 14-day EMA of sent in profit (sats and dollars) - self.sent_in_profit_14d_ema.compute_ema( - starting_indexes.dateindex, - &self.sent_in_profit.sats.dateindex.sum.0, - self.sent_in_profit.dollars.as_ref().map(|d| &d.dateindex.sum.0), - 14, - exit, - )?; - - // 14-day EMA of sent in loss (sats and dollars) - self.sent_in_loss_14d_ema.compute_ema( - starting_indexes.dateindex, - &self.sent_in_loss.sats.dateindex.sum.0, - self.sent_in_loss.dollars.as_ref().map(|d| &d.dateindex.sum.0), - 14, - exit, - )?; - - self.sopr_7d_ema - .compute_ema(starting_indexes.dateindex, &self.sopr, 7, exit)?; - - self.sopr_30d_ema - .compute_ema(starting_indexes.dateindex, &self.sopr, 30, exit)?; - - // Optional: adjusted SOPR (lazy: cohort - up_to_1h) - if let (Some(adjusted_sopr), Some(adj_created), Some(adj_destroyed)) = ( - self.adjusted_sopr.as_mut(), - self.adjusted_value_created.as_ref(), - self.adjusted_value_destroyed.as_ref(), - ) { - adjusted_sopr.compute_divide( - starting_indexes.dateindex, - &*adj_created.dateindex, - &*adj_destroyed.dateindex, - exit, - )?; - - if let Some(ema_7d) = self.adjusted_sopr_7d_ema.as_mut() { - ema_7d.compute_ema( - starting_indexes.dateindex, - self.adjusted_sopr.as_ref().unwrap(), - 7, + &blocks.count.height_1w_ago, + &adjusted_sopr.height, exit, )?; } - if let Some(ema_30d) = self.adjusted_sopr_30d_ema.as_mut() { - ema_30d.compute_ema( - starting_indexes.dateindex, - self.adjusted_sopr.as_ref().unwrap(), - 30, + if let Some(ema_30d) = self.adjusted_sopr_24h_30d_ema.as_mut() { + ema_30d.height.compute_rolling_average( + starting_indexes.height, + &blocks.count.height_1m_ago, + &adjusted_sopr.height, exit, )?; } } - // sell_side_risk_ratio = realized_value / realized_cap - self.sell_side_risk_ratio.compute_percentage( - starting_indexes.dateindex, - &self.realized_value.dateindex.0, - &self.realized_cap.dateindex.0, - exit, - )?; + // 7d/30d rolling average of sell_side_risk_ratio (from 24h rolling ratio) + self.sell_side_risk_ratio_24h_7d_ema + .height + .compute_rolling_average( + starting_indexes.height, + &blocks.count.height_1w_ago, + &self.sell_side_risk_ratio.height, + exit, + )?; - self.sell_side_risk_ratio_7d_ema.compute_ema( - starting_indexes.dateindex, - &self.sell_side_risk_ratio, - 7, - exit, - )?; + self.sell_side_risk_ratio_24h_30d_ema + .height + .compute_rolling_average( + starting_indexes.height, + &blocks.count.height_1m_ago, + &self.sell_side_risk_ratio.height, + exit, + )?; - self.sell_side_risk_ratio_30d_ema.compute_ema( - starting_indexes.dateindex, - &self.sell_side_risk_ratio, - 30, - exit, - )?; - - // Net realized PnL cumulative 30d delta + // Net realized PnL cumulative 30d delta (height-level rolling change) self.net_realized_pnl_cumulative_30d_delta - .compute_all(starting_indexes, exit, |vec| { - vec.compute_change( - starting_indexes.dateindex, - &self.net_realized_pnl.dateindex.cumulative.0, - 30, - exit, - )?; - Ok(()) - })?; + .height + .compute_rolling_change( + starting_indexes.height, + &blocks.count.height_1m_ago, + &*self.net_realized_pnl.rest.height_cumulative, + exit, + )?; - // Relative to realized cap + // Relative to realized cap (height-level) self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap - .compute_all(starting_indexes, exit, |vec| { - vec.compute_percentage( - starting_indexes.dateindex, - &self.net_realized_pnl_cumulative_30d_delta.dateindex, - &self.realized_cap.dateindex.0, + .height + .compute_percentage( + starting_indexes.height, + &self.net_realized_pnl_cumulative_30d_delta.height, + &self.realized_cap.height, + exit, + )?; + + // Relative to market cap (height-level) + if let Some(height_to_market_cap) = height_to_market_cap { + self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap + .height + .compute_percentage( + starting_indexes.height, + &self.net_realized_pnl_cumulative_30d_delta.height, + height_to_market_cap, exit, )?; - Ok(()) - })?; - // Relative to market cap - if let Some(dateindex_to_market_cap) = dateindex_to_market_cap { - self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap - .compute_all(starting_indexes, exit, |vec| { - vec.compute_percentage( - starting_indexes.dateindex, - &self.net_realized_pnl_cumulative_30d_delta.dateindex, - dateindex_to_market_cap, - exit, - )?; - Ok(()) - })?; - } - - // Optional: realized_cap_rel_to_own_market_cap - if let (Some(rel_vec), Some(height_to_market_cap)) = ( - self.realized_cap_rel_to_own_market_cap.as_mut(), - height_to_market_cap, - ) { - rel_vec.compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_percentage( + // Optional: realized_cap_rel_to_own_market_cap + if let Some(rel_vec) = self.realized_cap_rel_to_own_market_cap.as_mut() { + rel_vec.height.compute_percentage( starting_indexes.height, &self.realized_cap.height, height_to_market_cap, exit, )?; - Ok(()) - })?; - } - - // Optional: realized_profit_to_loss_ratio - if let Some(ratio) = self.realized_profit_to_loss_ratio.as_mut() { - ratio.compute_divide( - starting_indexes.dateindex, - &self.realized_profit.dateindex.sum.0, - &self.realized_loss.dateindex.sum.0, - exit, - )?; + } } Ok(()) diff --git a/crates/brk_computer/src/distribution/metrics/relative.rs b/crates/brk_computer/src/distribution/metrics/relative.rs index 8a1cba8f3..d996f1d01 100644 --- a/crates/brk_computer/src/distribution/metrics/relative.rs +++ b/crates/brk_computer/src/distribution/metrics/relative.rs @@ -2,11 +2,9 @@ use brk_cohort::Filter; use brk_error::Result; use brk_traversable::Traversable; use brk_types::{Dollars, Sats, StoredF32, StoredF64, Version}; -use vecdb::IterableCloneableVec; use crate::internal::{ - LazyBinaryFromDateLast, LazyBinaryFromHeightLast, NegPercentageDollarsF32, - PercentageDollarsF32, PercentageSatsF64, + LazyBinaryFromHeightLast, NegPercentageDollarsF32, PercentageDollarsF32, PercentageSatsF64, }; use super::{ImportConfig, RealizedMetrics, SupplyMetrics, UnrealizedMetrics}; @@ -16,7 +14,8 @@ use super::{ImportConfig, RealizedMetrics, SupplyMetrics, UnrealizedMetrics}; #[derive(Clone, Traversable)] pub struct RelativeMetrics { // === Supply Relative to Circulating Supply (lazy from global supply) === - pub supply_rel_to_circulating_supply: Option>, + pub supply_rel_to_circulating_supply: + Option>, // === Supply in Profit/Loss Relative to Own Supply (lazy) === pub supply_in_profit_rel_to_own_supply: LazyBinaryFromHeightLast, @@ -31,7 +30,8 @@ pub struct RelativeMetrics { // === Unrealized vs Market Cap (lazy from global market cap) === pub unrealized_profit_rel_to_market_cap: Option>, - pub unrealized_loss_rel_to_market_cap: Option>, + pub unrealized_loss_rel_to_market_cap: + Option>, pub neg_unrealized_loss_rel_to_market_cap: Option>, pub net_unrealized_pnl_rel_to_market_cap: @@ -66,9 +66,9 @@ pub struct RelativeMetrics { pub invested_capital_in_loss_pct: Option>, - // === Unrealized Peak Regret Relative to Market Cap (date-only, lazy) === + // === Unrealized Peak Regret Relative to Market Cap (lazy) === pub unrealized_peak_regret_rel_to_market_cap: - Option>, + Option>, } impl RelativeMetrics { @@ -77,7 +77,7 @@ impl RelativeMetrics { /// All `rel_to_` metrics are lazy - computed on-demand from their sources. /// `all_supply` provides global sources for `*_rel_to_market_cap` and `*_rel_to_circulating_supply`. /// `realized` provides realized_cap for invested capital percentage metrics. - pub fn forced_import( + pub(crate) fn forced_import( cfg: &ImportConfig, unrealized: &UnrealizedMetrics, supply: &SupplyMetrics, @@ -90,145 +90,92 @@ impl RelativeMetrics { let compute_rel_to_all = cfg.compute_rel_to_all(); // Global sources from "all" cohort - let global_supply_sats_height = all_supply.map(|s| &s.total.sats.height); - let global_supply_sats_difficultyepoch = all_supply.map(|s| &s.total.sats.difficultyepoch); - let global_supply_sats_dates = all_supply.map(|s| &s.total.sats.rest.dates); - let global_supply_sats_dateindex = all_supply.map(|s| &s.total.sats.rest.dateindex); - let global_market_cap = all_supply.and_then(|s| s.total.dollars.as_ref()); + let global_supply_sats = all_supply.map(|s| &s.total.sats); + let global_market_cap = all_supply.map(|s| &s.total.usd); // Own market cap source - let own_market_cap = supply.total.dollars.as_ref(); + let own_market_cap = &supply.total.usd; // For "all" cohort, own_market_cap IS the global market cap let market_cap = global_market_cap.or_else(|| { - matches!(cfg.filter, Filter::All).then_some(own_market_cap).flatten() + matches!(cfg.filter, Filter::All).then_some(own_market_cap) }); Ok(Self { - // === Supply Relative to Circulating Supply (lazy from global supply) === + // === Supply Relative to Circulating Supply === supply_rel_to_circulating_supply: (compute_rel_to_all - && global_supply_sats_dates.is_some()) + && global_supply_sats.is_some()) .then(|| { - LazyBinaryFromDateLast::from_both_derived_last::( + LazyBinaryFromHeightLast::from_computed_last::( &cfg.name("supply_rel_to_circulating_supply"), cfg.version + v1, - supply.total.sats.rest.dateindex.boxed_clone(), - &supply.total.sats.rest.dates, - global_supply_sats_dateindex.unwrap().boxed_clone(), - global_supply_sats_dates.unwrap(), + &supply.total.sats, + global_supply_sats.unwrap(), ) }), - // === Supply in Profit/Loss Relative to Own Supply (lazy) === + // === Supply in Profit/Loss Relative to Own Supply === supply_in_profit_rel_to_own_supply: - LazyBinaryFromHeightLast::from_height_difficultyepoch_dates::( + LazyBinaryFromHeightLast::from_computed_last::( &cfg.name("supply_in_profit_rel_to_own_supply"), cfg.version + v1, - unrealized.supply_in_profit.height.boxed_clone(), - supply.total.sats.height.boxed_clone(), - unrealized.supply_in_profit.difficultyepoch.sats.boxed_clone(), - supply.total.sats.difficultyepoch.boxed_clone(), - unrealized - .supply_in_profit - .indexes - .sats_dateindex - .boxed_clone(), - &unrealized.supply_in_profit.indexes.sats, - supply.total.sats.rest.dateindex.boxed_clone(), - &supply.total.sats.rest.dates, + &unrealized.supply_in_profit.sats, + &supply.total.sats, ), supply_in_loss_rel_to_own_supply: - LazyBinaryFromHeightLast::from_height_difficultyepoch_dates::( + LazyBinaryFromHeightLast::from_computed_last::( &cfg.name("supply_in_loss_rel_to_own_supply"), cfg.version + v1, - unrealized.supply_in_loss.height.boxed_clone(), - supply.total.sats.height.boxed_clone(), - unrealized.supply_in_loss.difficultyepoch.sats.boxed_clone(), - supply.total.sats.difficultyepoch.boxed_clone(), - unrealized - .supply_in_loss - .indexes - .sats_dateindex - .boxed_clone(), - &unrealized.supply_in_loss.indexes.sats, - supply.total.sats.rest.dateindex.boxed_clone(), - &supply.total.sats.rest.dates, + &unrealized.supply_in_loss.sats, + &supply.total.sats, ), - // === Supply in Profit/Loss Relative to Circulating Supply (lazy from global supply) === + // === Supply in Profit/Loss Relative to Circulating Supply === supply_in_profit_rel_to_circulating_supply: (compute_rel_to_all - && global_supply_sats_height.is_some()) + && global_supply_sats.is_some()) .then(|| { - LazyBinaryFromHeightLast::from_height_difficultyepoch_dates::( + LazyBinaryFromHeightLast::from_computed_last::( &cfg.name("supply_in_profit_rel_to_circulating_supply"), cfg.version + v1, - unrealized.supply_in_profit.height.boxed_clone(), - global_supply_sats_height.unwrap().boxed_clone(), - unrealized.supply_in_profit.difficultyepoch.sats.boxed_clone(), - global_supply_sats_difficultyepoch.unwrap().boxed_clone(), - unrealized - .supply_in_profit - .indexes - .sats_dateindex - .boxed_clone(), - &unrealized.supply_in_profit.indexes.sats, - global_supply_sats_dateindex.unwrap().boxed_clone(), - global_supply_sats_dates.unwrap(), + &unrealized.supply_in_profit.sats, + global_supply_sats.unwrap(), ) }), supply_in_loss_rel_to_circulating_supply: (compute_rel_to_all - && global_supply_sats_height.is_some()) + && global_supply_sats.is_some()) .then(|| { - LazyBinaryFromHeightLast::from_height_difficultyepoch_dates::( + LazyBinaryFromHeightLast::from_computed_last::( &cfg.name("supply_in_loss_rel_to_circulating_supply"), cfg.version + v1, - unrealized.supply_in_loss.height.boxed_clone(), - global_supply_sats_height.unwrap().boxed_clone(), - unrealized.supply_in_loss.difficultyepoch.sats.boxed_clone(), - global_supply_sats_difficultyepoch.unwrap().boxed_clone(), - unrealized - .supply_in_loss - .indexes - .sats_dateindex - .boxed_clone(), - &unrealized.supply_in_loss.indexes.sats, - global_supply_sats_dateindex.unwrap().boxed_clone(), - global_supply_sats_dates.unwrap(), + &unrealized.supply_in_loss.sats, + global_supply_sats.unwrap(), ) }), - // === Unrealized vs Market Cap (lazy from global market cap) === - unrealized_profit_rel_to_market_cap: - market_cap.map(|mc| { - LazyBinaryFromHeightLast::from_computed_height_date_and_lazy_binary_block_last::< - PercentageDollarsF32, - _, - _, - >( - &cfg.name("unrealized_profit_rel_to_market_cap"), - cfg.version + v2, - &unrealized.unrealized_profit, - mc, - ) - }), - unrealized_loss_rel_to_market_cap: - market_cap.map(|mc| { - LazyBinaryFromHeightLast::from_computed_height_date_and_lazy_binary_block_last::< - PercentageDollarsF32, - _, - _, - >( - &cfg.name("unrealized_loss_rel_to_market_cap"), - cfg.version + v2, - &unrealized.unrealized_loss, - mc, - ) - }), + // === Unrealized vs Market Cap === + unrealized_profit_rel_to_market_cap: market_cap.map(|mc| { + LazyBinaryFromHeightLast::from_block_last_and_lazy_binary_computed_block_last::< + PercentageDollarsF32, _, _, + >( + &cfg.name("unrealized_profit_rel_to_market_cap"), + cfg.version + v2, + &unrealized.unrealized_profit, + mc, + ) + }), + unrealized_loss_rel_to_market_cap: market_cap.map(|mc| { + LazyBinaryFromHeightLast::from_block_last_and_lazy_binary_computed_block_last::< + PercentageDollarsF32, _, _, + >( + &cfg.name("unrealized_loss_rel_to_market_cap"), + cfg.version + v2, + &unrealized.unrealized_loss, + mc, + ) + }), neg_unrealized_loss_rel_to_market_cap: market_cap.map(|mc| { - LazyBinaryFromHeightLast::from_computed_height_date_and_lazy_binary_block_last::< - NegPercentageDollarsF32, - _, - _, + LazyBinaryFromHeightLast::from_block_last_and_lazy_binary_computed_block_last::< + NegPercentageDollarsF32, _, _, >( &cfg.name("neg_unrealized_loss_rel_to_market_cap"), cfg.version + v2, @@ -238,11 +185,7 @@ impl RelativeMetrics { }), net_unrealized_pnl_rel_to_market_cap: market_cap.map(|mc| { LazyBinaryFromHeightLast::from_binary_block_and_lazy_binary_block_last::< - PercentageDollarsF32, - _, - _, - _, - _, + PercentageDollarsF32, _, _, _, _, >( &cfg.name("net_unrealized_pnl_rel_to_market_cap"), cfg.version + v2, @@ -254,11 +197,7 @@ impl RelativeMetrics { // NUPL is a proxy for net_unrealized_pnl_rel_to_market_cap nupl: market_cap.map(|mc| { LazyBinaryFromHeightLast::from_binary_block_and_lazy_binary_block_last::< - PercentageDollarsF32, - _, - _, - _, - _, + PercentageDollarsF32, _, _, _, _, >( &cfg.name("nupl"), cfg.version + v2, @@ -270,74 +209,52 @@ impl RelativeMetrics { // === Unrealized vs Own Market Cap (lazy, optional) === unrealized_profit_rel_to_own_market_cap: (extended && compute_rel_to_all) .then(|| { - own_market_cap.map(|mc| { - LazyBinaryFromHeightLast::from_computed_height_date_and_lazy_binary_block_last::< - PercentageDollarsF32, - _, - _, - >( - &cfg.name("unrealized_profit_rel_to_own_market_cap"), - cfg.version + v2, - &unrealized.unrealized_profit, - mc, - ) - }) - }) - .flatten(), + LazyBinaryFromHeightLast::from_block_last_and_lazy_binary_computed_block_last::< + PercentageDollarsF32, _, _, + >( + &cfg.name("unrealized_profit_rel_to_own_market_cap"), + cfg.version + v2, + &unrealized.unrealized_profit, + own_market_cap, + ) + }), unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all) .then(|| { - own_market_cap.map(|mc| { - LazyBinaryFromHeightLast::from_computed_height_date_and_lazy_binary_block_last::< - PercentageDollarsF32, - _, - _, - >( - &cfg.name("unrealized_loss_rel_to_own_market_cap"), - cfg.version + v2, - &unrealized.unrealized_loss, - mc, - ) - }) - }) - .flatten(), + LazyBinaryFromHeightLast::from_block_last_and_lazy_binary_computed_block_last::< + PercentageDollarsF32, _, _, + >( + &cfg.name("unrealized_loss_rel_to_own_market_cap"), + cfg.version + v2, + &unrealized.unrealized_loss, + own_market_cap, + ) + }), neg_unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all) .then(|| { - own_market_cap.map(|mc| { - LazyBinaryFromHeightLast::from_computed_height_date_and_lazy_binary_block_last::< - NegPercentageDollarsF32, - _, - _, - >( - &cfg.name("neg_unrealized_loss_rel_to_own_market_cap"), - cfg.version + v2, - &unrealized.unrealized_loss, - mc, - ) - }) - }) - .flatten(), + LazyBinaryFromHeightLast::from_block_last_and_lazy_binary_computed_block_last::< + NegPercentageDollarsF32, _, _, + >( + &cfg.name("neg_unrealized_loss_rel_to_own_market_cap"), + cfg.version + v2, + &unrealized.unrealized_loss, + own_market_cap, + ) + }), net_unrealized_pnl_rel_to_own_market_cap: (extended && compute_rel_to_all) .then(|| { - own_market_cap.map(|mc| { - LazyBinaryFromHeightLast::from_binary_block_and_lazy_binary_block_last::< - PercentageDollarsF32, - _, - _, - _, - _, - >( - &cfg.name("net_unrealized_pnl_rel_to_own_market_cap"), - cfg.version + v2, - &unrealized.net_unrealized_pnl, - mc, - ) - }) - }) - .flatten(), + LazyBinaryFromHeightLast::from_binary_block_and_lazy_binary_block_last::< + PercentageDollarsF32, _, _, _, _, + >( + &cfg.name("net_unrealized_pnl_rel_to_own_market_cap"), + cfg.version + v2, + &unrealized.net_unrealized_pnl, + own_market_cap, + ) + }), // === Unrealized vs Own Total Unrealized PnL (lazy, optional) === unrealized_profit_rel_to_own_total_unrealized_pnl: extended.then(|| { - LazyBinaryFromHeightLast::from_computed_height_date_and_binary_block::( + LazyBinaryFromHeightLast::from_block_last_and_binary_block::( &cfg.name("unrealized_profit_rel_to_own_total_unrealized_pnl"), cfg.version + v1, &unrealized.unrealized_profit, @@ -345,7 +262,7 @@ impl RelativeMetrics { ) }), unrealized_loss_rel_to_own_total_unrealized_pnl: extended.then(|| { - LazyBinaryFromHeightLast::from_computed_height_date_and_binary_block::( + LazyBinaryFromHeightLast::from_block_last_and_binary_block::( &cfg.name("unrealized_loss_rel_to_own_total_unrealized_pnl"), cfg.version + v1, &unrealized.unrealized_loss, @@ -353,7 +270,7 @@ impl RelativeMetrics { ) }), neg_unrealized_loss_rel_to_own_total_unrealized_pnl: extended.then(|| { - LazyBinaryFromHeightLast::from_computed_height_date_and_binary_block::( + LazyBinaryFromHeightLast::from_block_last_and_binary_block::( &cfg.name("neg_unrealized_loss_rel_to_own_total_unrealized_pnl"), cfg.version + v1, &unrealized.unrealized_loss, @@ -371,9 +288,8 @@ impl RelativeMetrics { // === Invested Capital in Profit/Loss as % of Realized Cap === invested_capital_in_profit_pct: realized.map(|r| { - LazyBinaryFromHeightLast::from_computed_height_date_and_lazy_block_last::< - PercentageDollarsF32, - _, + LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::< + PercentageDollarsF32, _, >( &cfg.name("invested_capital_in_profit_pct"), cfg.version, @@ -382,9 +298,8 @@ impl RelativeMetrics { ) }), invested_capital_in_loss_pct: realized.map(|r| { - LazyBinaryFromHeightLast::from_computed_height_date_and_lazy_block_last::< - PercentageDollarsF32, - _, + LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::< + PercentageDollarsF32, _, >( &cfg.name("invested_capital_in_loss_pct"), cfg.version, @@ -393,18 +308,19 @@ impl RelativeMetrics { ) }), - // === Peak Regret Relative to Market Cap (date-only, lazy) === + // === Peak Regret Relative to Market Cap === unrealized_peak_regret_rel_to_market_cap: unrealized .peak_regret .as_ref() .zip(market_cap) .map(|(pr, mc)| { - LazyBinaryFromDateLast::from_computed_and_derived_last::( + LazyBinaryFromHeightLast::from_block_last_and_lazy_binary_computed_block_last::< + PercentageDollarsF32, _, _, + >( &cfg.name("unrealized_peak_regret_rel_to_market_cap"), cfg.version, pr, - mc.rest.dateindex.boxed_clone(), - &mc.rest.dates, + mc, ) }), }) diff --git a/crates/brk_computer/src/distribution/metrics/supply.rs b/crates/brk_computer/src/distribution/metrics/supply.rs index 1b96ae8b1..9661e3012 100644 --- a/crates/brk_computer/src/distribution/metrics/supply.rs +++ b/crates/brk_computer/src/distribution/metrics/supply.rs @@ -2,52 +2,48 @@ use brk_error::Result; use brk_traversable::Traversable; use brk_types::{Height, Sats, Version}; -use crate::ComputeIndexes; +use crate::{ComputeIndexes, blocks}; use rayon::prelude::*; -use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec}; +use vecdb::{AnyStoredVec, AnyVec, Exit, Rw, StorageMode, WritableVec}; -use crate::{ - indexes, - internal::{ - HalfClosePriceTimesSats, HalveDollars, HalveSats, HalveSatsToBitcoin, - LazyBinaryValueFromHeightLast, ValueChangeFromDate, ValueFromHeightLast, - }, +use crate::internal::{ + HalfPriceTimesSats, HalveDollars, HalveSats, HalveSatsToBitcoin, + LazyBinaryValueFromHeightLast, ValueChangeFromHeight, ValueFromHeightLast, }; use super::ImportConfig; /// Supply metrics for a cohort. -#[derive(Clone, Traversable)] -pub struct SupplyMetrics { - pub total: ValueFromHeightLast, +#[derive(Traversable)] +pub struct SupplyMetrics { + pub total: ValueFromHeightLast, pub halved: LazyBinaryValueFromHeightLast, /// 30-day change in supply (net position change) - sats, btc, usd - pub _30d_change: ValueChangeFromDate, + pub _30d_change: ValueChangeFromHeight, } impl SupplyMetrics { /// Import supply metrics from database. - pub fn forced_import(cfg: &ImportConfig) -> Result { + pub(crate) fn forced_import(cfg: &ImportConfig) -> Result { let supply = ValueFromHeightLast::forced_import( cfg.db, &cfg.name("supply"), cfg.version, cfg.indexes, - cfg.price, + cfg.prices, )?; let supply_halved = LazyBinaryValueFromHeightLast::from_block_source::< HalveSats, HalveSatsToBitcoin, - HalfClosePriceTimesSats, + HalfPriceTimesSats, HalveDollars, - >(&cfg.name("supply_halved"), &supply, cfg.price, cfg.version); + >(&cfg.name("supply_halved"), &supply, cfg.prices, cfg.version); - let _30d_change = ValueChangeFromDate::forced_import( + let _30d_change = ValueChangeFromHeight::forced_import( cfg.db, &cfg.name("_30d_change"), cfg.version, - cfg.compute_dollars(), cfg.indexes, )?; @@ -59,29 +55,29 @@ impl SupplyMetrics { } /// Get minimum length across height-indexed vectors. - pub fn min_len(&self) -> usize { + pub(crate) fn min_len(&self) -> usize { self.total.sats.height.len() } /// Push supply state values to height-indexed vectors. - pub fn truncate_push(&mut self, height: Height, supply: Sats) -> Result<()> { + pub(crate) fn truncate_push(&mut self, height: Height, supply: Sats) -> Result<()> { self.total.sats.height.truncate_push(height, supply)?; Ok(()) } /// Returns a parallel iterator over all vecs for parallel writing. - pub fn par_iter_mut(&mut self) -> impl ParallelIterator { + pub(crate) fn par_iter_mut(&mut self) -> impl ParallelIterator { vec![&mut self.total.sats.height as &mut dyn AnyStoredVec].into_par_iter() } /// Validate computed versions against base version. - pub fn validate_computed_versions(&mut self, _base_version: Version) -> Result<()> { + pub(crate) fn validate_computed_versions(&mut self, _base_version: Version) -> Result<()> { // Validation logic for computed vecs Ok(()) } /// Compute aggregate values from separate cohorts. - pub fn compute_from_stateful( + pub(crate) fn compute_from_stateful( &mut self, starting_indexes: &ComputeIndexes, others: &[&Self], @@ -99,23 +95,18 @@ impl SupplyMetrics { } /// Compute derived vecs from existing height data. - pub fn compute_rest_part1( + pub(crate) fn compute_rest_part1( &mut self, - indexes: &indexes::Vecs, + blocks: &blocks::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.total.compute_rest(indexes, starting_indexes, exit)?; - - // 30-day change in supply - self._30d_change.compute_change( - starting_indexes.dateindex, - &self.total.sats.dateindex.0, - self.total.dollars.as_ref().map(|d| &d.dateindex.0), - 30, + self._30d_change.compute_rolling( + starting_indexes.height, + &blocks.count.height_1m_ago, + &self.total.sats.height, + &self.total.usd.height, exit, - )?; - - Ok(()) + ) } } diff --git a/crates/brk_computer/src/distribution/metrics/unrealized.rs b/crates/brk_computer/src/distribution/metrics/unrealized.rs index 622563d8f..ad95728b8 100644 --- a/crates/brk_computer/src/distribution/metrics/unrealized.rs +++ b/crates/brk_computer/src/distribution/metrics/unrealized.rs @@ -1,57 +1,56 @@ use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{CentsSats, CentsSquaredSats, CentsUnsigned, DateIndex, Dollars, Height, Version}; +use brk_types::{Cents, CentsSats, CentsSquaredSats, Dollars, Height, Version}; use rayon::prelude::*; use vecdb::{ - AnyStoredVec, AnyVec, BytesVec, Exit, GenericStoredVec, ImportableVec, Negate, - TypedVecIterator, + AnyStoredVec, AnyVec, BytesVec, Exit, WritableVec, ImportableVec, ReadableCloneableVec, + ReadableVec, Negate, Rw, StorageMode, }; use crate::{ ComputeIndexes, distribution::state::UnrealizedState, - indexes, internal::{ - ComputedFromDateLast, ComputedFromHeightAndDateLast, ComputedFromHeightLast, DollarsMinus, - DollarsPlus, LazyBinaryFromHeightLast, LazyFromHeightLast, ValueFromHeightAndDateLast, + ComputedFromHeightLast, DollarsMinus, DollarsPlus, + LazyBinaryFromHeightLast, LazyFromHeightLast, ValueFromHeightLast, }, - price, + prices, }; use super::ImportConfig; /// Unrealized profit/loss metrics. -#[derive(Clone, Traversable)] -pub struct UnrealizedMetrics { +#[derive(Traversable)] +pub struct UnrealizedMetrics { // === Supply in Profit/Loss === - pub supply_in_profit: ValueFromHeightAndDateLast, - pub supply_in_loss: ValueFromHeightAndDateLast, + pub supply_in_profit: ValueFromHeightLast, + pub supply_in_loss: ValueFromHeightLast, // === Unrealized Profit/Loss === - pub unrealized_profit: ComputedFromHeightAndDateLast, - pub unrealized_loss: ComputedFromHeightAndDateLast, + pub unrealized_profit: ComputedFromHeightLast, + pub unrealized_loss: ComputedFromHeightLast, // === Invested Capital in Profit/Loss === - pub invested_capital_in_profit: ComputedFromHeightAndDateLast, - pub invested_capital_in_loss: ComputedFromHeightAndDateLast, + pub invested_capital_in_profit: ComputedFromHeightLast, + pub invested_capital_in_loss: ComputedFromHeightLast, // === Raw values for precise aggregation (used to compute pain/greed indices) === /// Σ(price × sats) for UTXOs in profit (raw u128, no indexes) - pub invested_capital_in_profit_raw: BytesVec, + pub invested_capital_in_profit_raw: M::Stored>, /// Σ(price × sats) for UTXOs in loss (raw u128, no indexes) - pub invested_capital_in_loss_raw: BytesVec, + pub invested_capital_in_loss_raw: M::Stored>, /// Σ(price² × sats) for UTXOs in profit (raw u128, no indexes) - pub investor_cap_in_profit_raw: BytesVec, + pub investor_cap_in_profit_raw: M::Stored>, /// Σ(price² × sats) for UTXOs in loss (raw u128, no indexes) - pub investor_cap_in_loss_raw: BytesVec, + pub investor_cap_in_loss_raw: M::Stored>, // === Pain/Greed Indices (computed in compute_rest from raw values + spot price) === /// investor_price_of_losers - spot (average distance underwater, weighted by $) - pub pain_index: ComputedFromHeightLast, + pub pain_index: ComputedFromHeightLast, /// spot - investor_price_of_winners (average distance in profit, weighted by $) - pub greed_index: ComputedFromHeightLast, + pub greed_index: ComputedFromHeightLast, /// greed_index - pain_index (positive = greedy market, negative = painful market) - pub net_sentiment: ComputedFromHeightLast, + pub net_sentiment: ComputedFromHeightLast, // === Negated === pub neg_unrealized_loss: LazyFromHeightLast, @@ -64,40 +63,36 @@ pub struct UnrealizedMetrics { /// Unrealized peak regret: sum of (peak_price - reference_price) × supply /// where reference_price = max(spot, cost_basis) and peak = max price during holding period. /// Only computed for age_range cohorts, then aggregated for overlapping cohorts. - pub peak_regret: Option>, + pub peak_regret: Option>, } impl UnrealizedMetrics { /// Import unrealized metrics from database. - pub fn forced_import(cfg: &ImportConfig) -> Result { - let compute_dollars = cfg.compute_dollars(); - + pub(crate) fn forced_import(cfg: &ImportConfig) -> Result { // === Supply in Profit/Loss === - let supply_in_profit = ValueFromHeightAndDateLast::forced_import( + let supply_in_profit = ValueFromHeightLast::forced_import( cfg.db, &cfg.name("supply_in_profit"), cfg.version, - compute_dollars, cfg.indexes, - cfg.price, + cfg.prices, )?; - let supply_in_loss = ValueFromHeightAndDateLast::forced_import( + let supply_in_loss = ValueFromHeightLast::forced_import( cfg.db, &cfg.name("supply_in_loss"), cfg.version, - compute_dollars, cfg.indexes, - cfg.price, + cfg.prices, )?; // === Unrealized Profit/Loss === - let unrealized_profit = ComputedFromHeightAndDateLast::forced_import( + let unrealized_profit = ComputedFromHeightLast::forced_import( cfg.db, &cfg.name("unrealized_profit"), cfg.version, cfg.indexes, )?; - let unrealized_loss = ComputedFromHeightAndDateLast::forced_import( + let unrealized_loss = ComputedFromHeightLast::forced_import( cfg.db, &cfg.name("unrealized_loss"), cfg.version, @@ -105,13 +100,13 @@ impl UnrealizedMetrics { )?; // === Invested Capital in Profit/Loss === - let invested_capital_in_profit = ComputedFromHeightAndDateLast::forced_import( + let invested_capital_in_profit = ComputedFromHeightLast::forced_import( cfg.db, &cfg.name("invested_capital_in_profit"), cfg.version, cfg.indexes, )?; - let invested_capital_in_loss = ComputedFromHeightAndDateLast::forced_import( + let invested_capital_in_loss = ComputedFromHeightLast::forced_import( cfg.db, &cfg.name("invested_capital_in_loss"), cfg.version, @@ -155,33 +150,32 @@ impl UnrealizedMetrics { )?; // === Negated === - let neg_unrealized_loss = LazyFromHeightLast::from_computed_height_date::( + let neg_unrealized_loss = LazyFromHeightLast::from_computed::( &cfg.name("neg_unrealized_loss"), cfg.version, + unrealized_loss.height.read_only_boxed_clone(), &unrealized_loss, ); // === Net and Total === - let net_unrealized_pnl = - LazyBinaryFromHeightLast::from_computed_height_date_last::( - &cfg.name("net_unrealized_pnl"), - cfg.version, - &unrealized_profit, - &unrealized_loss, - ); - let total_unrealized_pnl = - LazyBinaryFromHeightLast::from_computed_height_date_last::( - &cfg.name("total_unrealized_pnl"), - cfg.version, - &unrealized_profit, - &unrealized_loss, - ); + let net_unrealized_pnl = LazyBinaryFromHeightLast::from_computed_last::( + &cfg.name("net_unrealized_pnl"), + cfg.version, + &unrealized_profit, + &unrealized_loss, + ); + let total_unrealized_pnl = LazyBinaryFromHeightLast::from_computed_last::( + &cfg.name("total_unrealized_pnl"), + cfg.version, + &unrealized_profit, + &unrealized_loss, + ); // Peak regret: only for age-based UTXO cohorts let peak_regret = cfg .compute_peak_regret() .then(|| { - ComputedFromDateLast::forced_import( + ComputedFromHeightLast::forced_import( cfg.db, &cfg.name("unrealized_peak_regret"), cfg.version, @@ -212,11 +206,12 @@ impl UnrealizedMetrics { } /// Get minimum length across height-indexed vectors written in block loop. - pub fn min_stateful_height_len(&self) -> usize { + pub(crate) fn min_stateful_height_len(&self) -> usize { self.supply_in_profit + .sats .height .len() - .min(self.supply_in_loss.height.len()) + .min(self.supply_in_loss.sats.height.len()) .min(self.unrealized_profit.height.len()) .min(self.unrealized_loss.height.len()) .min(self.invested_capital_in_profit.height.len()) @@ -227,36 +222,14 @@ impl UnrealizedMetrics { .min(self.investor_cap_in_loss_raw.len()) } - /// Get minimum length across dateindex-indexed vectors written in block loop. - pub fn min_stateful_dateindex_len(&self) -> usize { - let mut min = self - .supply_in_profit - .indexes - .sats_dateindex - .len() - .min(self.supply_in_loss.indexes.sats_dateindex.len()) - .min(self.unrealized_profit.dateindex.len()) - .min(self.unrealized_loss.dateindex.len()) - .min(self.invested_capital_in_profit.dateindex.len()) - .min(self.invested_capital_in_loss.dateindex.len()); - if let Some(pr) = &self.peak_regret { - min = min.min(pr.dateindex.len()); - } - min - } - /// Push unrealized state values to height-indexed vectors. - pub fn truncate_push( - &mut self, - height: Height, - dateindex: Option, - height_state: &UnrealizedState, - date_state: Option<&UnrealizedState>, - ) -> Result<()> { + pub(crate) fn truncate_push(&mut self, height: Height, height_state: &UnrealizedState) -> Result<()> { self.supply_in_profit + .sats .height .truncate_push(height, height_state.supply_in_profit)?; self.supply_in_loss + .sats .height .truncate_push(height, height_state.supply_in_loss)?; self.unrealized_profit @@ -290,38 +263,14 @@ impl UnrealizedMetrics { CentsSquaredSats::new(height_state.investor_cap_in_loss_raw), )?; - if let (Some(dateindex), Some(date_state)) = (dateindex, date_state) { - self.supply_in_profit - .indexes - .sats_dateindex - .truncate_push(dateindex, date_state.supply_in_profit)?; - self.supply_in_loss - .indexes - .sats_dateindex - .truncate_push(dateindex, date_state.supply_in_loss)?; - self.unrealized_profit - .dateindex - .truncate_push(dateindex, date_state.unrealized_profit.to_dollars())?; - self.unrealized_loss - .dateindex - .truncate_push(dateindex, date_state.unrealized_loss.to_dollars())?; - self.invested_capital_in_profit.dateindex.truncate_push( - dateindex, - date_state.invested_capital_in_profit.to_dollars(), - )?; - self.invested_capital_in_loss - .dateindex - .truncate_push(dateindex, date_state.invested_capital_in_loss.to_dollars())?; - } - Ok(()) } /// Returns a parallel iterator over all vecs for parallel writing. - pub fn par_iter_mut(&mut self) -> impl ParallelIterator { + pub(crate) fn par_iter_mut(&mut self) -> impl ParallelIterator { let mut vecs: Vec<&mut dyn AnyStoredVec> = vec![ - &mut self.supply_in_profit.height, - &mut self.supply_in_loss.height, + &mut self.supply_in_profit.sats.height, + &mut self.supply_in_loss.sats.height, &mut self.unrealized_profit.height, &mut self.unrealized_loss.height, &mut self.invested_capital_in_profit.height, @@ -330,39 +279,33 @@ impl UnrealizedMetrics { &mut self.invested_capital_in_loss_raw, &mut self.investor_cap_in_profit_raw, &mut self.investor_cap_in_loss_raw, - &mut self.supply_in_profit.indexes.sats_dateindex, - &mut self.supply_in_loss.indexes.sats_dateindex, - &mut self.unrealized_profit.rest.dateindex, - &mut self.unrealized_loss.rest.dateindex, - &mut self.invested_capital_in_profit.rest.dateindex, - &mut self.invested_capital_in_loss.rest.dateindex, ]; if let Some(pr) = &mut self.peak_regret { - vecs.push(&mut pr.dateindex); + vecs.push(&mut pr.height); } vecs.into_par_iter() } /// Compute aggregate values from separate cohorts. - pub fn compute_from_stateful( + pub(crate) fn compute_from_stateful( &mut self, starting_indexes: &ComputeIndexes, others: &[&Self], exit: &Exit, ) -> Result<()> { - self.supply_in_profit.height.compute_sum_of_others( + self.supply_in_profit.sats.height.compute_sum_of_others( starting_indexes.height, &others .iter() - .map(|v| &v.supply_in_profit.height) + .map(|v| &v.supply_in_profit.sats.height) .collect::>(), exit, )?; - self.supply_in_loss.height.compute_sum_of_others( + self.supply_in_loss.sats.height.compute_sum_of_others( starting_indexes.height, &others .iter() - .map(|v| &v.supply_in_loss.height) + .map(|v| &v.supply_in_loss.sats.height) .collect::>(), exit, )?; @@ -402,19 +345,6 @@ impl UnrealizedMetrics { )?; // Raw values for aggregation - manually sum since BytesVec doesn't have compute_sum_of_others - // Create iterators for each source vec - let mut iters: Vec<_> = others - .iter() - .filter_map(|o| { - Some(( - o.invested_capital_in_profit_raw.iter().ok()?, - o.invested_capital_in_loss_raw.iter().ok()?, - o.investor_cap_in_profit_raw.iter().ok()?, - o.investor_cap_in_loss_raw.iter().ok()?, - )) - }) - .collect(); - // Start from where the target vecs left off (handles fresh/reset vecs) let start = self .invested_capital_in_profit_raw @@ -437,11 +367,11 @@ impl UnrealizedMetrics { let mut sum_investor_profit = CentsSquaredSats::ZERO; let mut sum_investor_loss = CentsSquaredSats::ZERO; - for (ip_iter, il_iter, cap_p_iter, cap_l_iter) in &mut iters { - sum_invested_profit += ip_iter.get_unwrap(height); - sum_invested_loss += il_iter.get_unwrap(height); - sum_investor_profit += cap_p_iter.get_unwrap(height); - sum_investor_loss += cap_l_iter.get_unwrap(height); + for o in others.iter() { + sum_invested_profit += o.invested_capital_in_profit_raw.collect_one_at(i).unwrap(); + sum_invested_loss += o.invested_capital_in_loss_raw.collect_one_at(i).unwrap(); + sum_investor_profit += o.investor_cap_in_profit_raw.collect_one_at(i).unwrap(); + sum_investor_loss += o.investor_cap_in_loss_raw.collect_one_at(i).unwrap(); } self.invested_capital_in_profit_raw @@ -454,75 +384,16 @@ impl UnrealizedMetrics { .truncate_push(height, sum_investor_loss)?; } - self.supply_in_profit - .indexes - .sats_dateindex - .compute_sum_of_others( - starting_indexes.dateindex, - &others - .iter() - .map(|v| &v.supply_in_profit.indexes.sats_dateindex) - .collect::>(), - exit, - )?; - self.supply_in_loss - .indexes - .sats_dateindex - .compute_sum_of_others( - starting_indexes.dateindex, - &others - .iter() - .map(|v| &v.supply_in_loss.indexes.sats_dateindex) - .collect::>(), - exit, - )?; - self.unrealized_profit.dateindex.compute_sum_of_others( - starting_indexes.dateindex, - &others - .iter() - .map(|v| &v.unrealized_profit.dateindex) - .collect::>(), - exit, - )?; - self.unrealized_loss.dateindex.compute_sum_of_others( - starting_indexes.dateindex, - &others - .iter() - .map(|v| &v.unrealized_loss.dateindex) - .collect::>(), - exit, - )?; - self.invested_capital_in_profit - .dateindex - .compute_sum_of_others( - starting_indexes.dateindex, - &others - .iter() - .map(|v| &v.invested_capital_in_profit.dateindex) - .collect::>(), - exit, - )?; - self.invested_capital_in_loss - .dateindex - .compute_sum_of_others( - starting_indexes.dateindex, - &others - .iter() - .map(|v| &v.invested_capital_in_loss.dateindex) - .collect::>(), - exit, - )?; - // Peak regret aggregation (only if this cohort has peak_regret) if let Some(pr) = &mut self.peak_regret { - let other_prs: Vec<_> = others.iter().filter_map(|v| v.peak_regret.as_ref()).collect(); + let other_prs: Vec<_> = others + .iter() + .filter_map(|v| v.peak_regret.as_ref()) + .collect(); if !other_prs.is_empty() { - pr.dateindex.compute_sum_of_others( - starting_indexes.dateindex, - &other_prs - .iter() - .map(|v| &v.dateindex) - .collect::>(), + pr.height.compute_sum_of_others( + starting_indexes.height, + &other_prs.iter().map(|v| &v.height).collect::>(), exit, )?; } @@ -532,71 +403,53 @@ impl UnrealizedMetrics { } /// Compute derived metrics from stored values + price. - pub fn compute_rest( + pub(crate) fn compute_rest( &mut self, - indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.supply_in_profit - .compute_dollars_from_price(price, starting_indexes, exit)?; - - self.supply_in_loss - .compute_dollars_from_price(price, starting_indexes, exit)?; - - // Compute pain/greed/net from raw values + spot price - let Some(price) = price else { - return Ok(()); - }; + // Height-based types now have lazy day1, no compute_rest needed. // Pain index: investor_price_of_losers - spot - self.pain_index - .compute_all(indexes, starting_indexes, exit, |vec| { - Ok(vec.compute_transform3( - starting_indexes.height, - &self.investor_cap_in_loss_raw, - &self.invested_capital_in_loss_raw, - &price.cents.split.height.close, - |(h, investor_cap, invested_cap, spot, ..)| { - if invested_cap.inner() == 0 { - return (h, Dollars::ZERO); - } - let investor_price_losers = investor_cap.inner() / invested_cap.inner(); - let spot_u128 = (*spot).as_u128(); - ( - h, - CentsUnsigned::new((investor_price_losers - spot_u128) as u64) - .to_dollars(), - ) - }, - exit, - )?) - })?; + self.pain_index.height.compute_transform3( + starting_indexes.height, + &self.investor_cap_in_loss_raw, + &self.invested_capital_in_loss_raw, + &prices.cents.price, + |(h, investor_cap, invested_cap, spot, ..)| { + if invested_cap.inner() == 0 { + return (h, Dollars::ZERO); + } + let investor_price_losers = investor_cap.inner() / invested_cap.inner(); + let spot_u128 = spot.as_u128(); + ( + h, + Cents::new((investor_price_losers - spot_u128) as u64).to_dollars(), + ) + }, + exit, + )?; // Greed index: spot - investor_price_of_winners - self.greed_index - .compute_all(indexes, starting_indexes, exit, |vec| { - Ok(vec.compute_transform3( - starting_indexes.height, - &self.investor_cap_in_profit_raw, - &self.invested_capital_in_profit_raw, - &price.cents.split.height.close, - |(h, investor_cap, invested_cap, spot, ..)| { - if invested_cap.inner() == 0 { - return (h, Dollars::ZERO); - } - let investor_price_winners = investor_cap.inner() / invested_cap.inner(); - let spot_u128 = (*spot).as_u128(); - ( - h, - CentsUnsigned::new((spot_u128 - investor_price_winners) as u64) - .to_dollars(), - ) - }, - exit, - )?) - })?; + self.greed_index.height.compute_transform3( + starting_indexes.height, + &self.investor_cap_in_profit_raw, + &self.invested_capital_in_profit_raw, + &prices.cents.price, + |(h, investor_cap, invested_cap, spot, ..)| { + if invested_cap.inner() == 0 { + return (h, Dollars::ZERO); + } + let investor_price_winners = investor_cap.inner() / invested_cap.inner(); + let spot_u128 = spot.as_u128(); + ( + h, + Cents::new((spot_u128 - investor_price_winners) as u64).to_dollars(), + ) + }, + exit, + )?; // Net sentiment height (greed - pain) computed separately for separate cohorts only // Aggregate cohorts compute it via weighted average in compute_from_stateful @@ -607,7 +460,11 @@ impl UnrealizedMetrics { /// Compute net_sentiment.height for separate cohorts (greed - pain). /// Aggregate cohorts skip this - their height is computed via weighted average in compute_from_stateful. - pub fn compute_net_sentiment_height(&mut self, starting_indexes: &ComputeIndexes, exit: &Exit) -> Result<()> { + pub(crate) fn compute_net_sentiment_height( + &mut self, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { Ok(self.net_sentiment.height.compute_subtract( starting_indexes.height, &self.greed_index.height, @@ -615,15 +472,4 @@ impl UnrealizedMetrics { exit, )?) } - - /// Compute net_sentiment dateindex derivation from height. - /// Called for ALL cohorts after height is computed (either via greed-pain or weighted avg). - pub fn compute_net_sentiment_rest( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.net_sentiment.compute_rest(indexes, starting_indexes, exit) - } } diff --git a/crates/brk_computer/src/distribution/range_map.rs b/crates/brk_computer/src/distribution/range_map.rs index 2dcdab906..5df4849c8 100644 --- a/crates/brk_computer/src/distribution/range_map.rs +++ b/crates/brk_computer/src/distribution/range_map.rs @@ -35,7 +35,7 @@ impl Default for RangeMap { impl + Copy + Default> RangeMap { /// Create with pre-allocated capacity. - pub fn with_capacity(capacity: usize) -> Self { + pub(crate) fn with_capacity(capacity: usize) -> Self { Self { first_indexes: Vec::with_capacity(capacity), cache: [(I::default(), I::default(), V::default(), 0); CACHE_SIZE], @@ -47,7 +47,7 @@ impl + Copy + Default> RangeMap { /// Push a new first_index. Value is implicitly the current length. /// Must be called in order (first_index must be >= all previous). #[inline] - pub fn push(&mut self, first_index: I) { + pub(crate) fn push(&mut self, first_index: I) { debug_assert!( self.first_indexes .last() @@ -60,7 +60,7 @@ impl + Copy + Default> RangeMap { /// Look up value for an index, checking cache first. /// Returns the value (position) of the largest first_index <= given index. #[inline] - pub fn get(&mut self, index: I) -> Option { + pub(crate) fn get(&mut self, index: I) -> Option { if self.first_indexes.is_empty() { return None; } diff --git a/crates/brk_computer/src/distribution/state/block.rs b/crates/brk_computer/src/distribution/state/block.rs index a36dbd4da..ffa340b09 100644 --- a/crates/brk_computer/src/distribution/state/block.rs +++ b/crates/brk_computer/src/distribution/state/block.rs @@ -1,6 +1,6 @@ use std::ops::{Add, AddAssign, SubAssign}; -use brk_types::{CentsUnsigned, SupplyState, Timestamp}; +use brk_types::{Cents, SupplyState, Timestamp}; use serde::Serialize; #[derive(Debug, Clone, Serialize)] @@ -8,7 +8,7 @@ pub struct BlockState { #[serde(flatten)] pub supply: SupplyState, #[serde(skip)] - pub price: Option, + pub price: Cents, #[serde(skip)] pub timestamp: Timestamp, } diff --git a/crates/brk_computer/src/distribution/state/cohort/address.rs b/crates/brk_computer/src/distribution/state/cohort/address.rs index 3245f0cb2..323e15ff5 100644 --- a/crates/brk_computer/src/distribution/state/cohort/address.rs +++ b/crates/brk_computer/src/distribution/state/cohort/address.rs @@ -1,7 +1,7 @@ use std::path::Path; use brk_error::Result; -use brk_types::{Age, CentsUnsigned, FundedAddressData, Height, Sats, SupplyState}; +use brk_types::{Age, Cents, FundedAddressData, Sats, SupplyState}; use vecdb::unlikely; use super::{super::cost_basis::RealizedState, base::CohortState}; @@ -9,52 +9,45 @@ use super::{super::cost_basis::RealizedState, base::CohortState}; /// Significant digits for address cost basis prices (after rounding to dollars). const COST_BASIS_PRICE_DIGITS: i32 = 4; -#[derive(Clone)] pub struct AddressCohortState { pub addr_count: u64, pub inner: CohortState, } impl AddressCohortState { - pub fn new(path: &Path, name: &str, compute_dollars: bool) -> Self { + pub(crate) fn new(path: &Path, name: &str) -> Self { Self { addr_count: 0, - inner: CohortState::new(path, name, compute_dollars) + inner: CohortState::new(path, name) .with_price_rounding(COST_BASIS_PRICE_DIGITS), } } /// Reset state for fresh start. - pub fn reset(&mut self) { + pub(crate) fn reset(&mut self) { self.addr_count = 0; self.inner.supply = SupplyState::default(); self.inner.sent = Sats::ZERO; self.inner.satblocks_destroyed = Sats::ZERO; self.inner.satdays_destroyed = Sats::ZERO; - if let Some(realized) = self.inner.realized.as_mut() { - *realized = RealizedState::default(); - } + self.inner.realized = RealizedState::default(); } - pub fn reset_cost_basis_data_if_needed(&mut self) -> Result<()> { + pub(crate) fn reset_cost_basis_data_if_needed(&mut self) -> Result<()> { self.inner.reset_cost_basis_data_if_needed() } - pub fn reset_single_iteration_values(&mut self) { - self.inner.reset_single_iteration_values(); - } - - pub fn send( + pub(crate) fn send( &mut self, addressdata: &mut FundedAddressData, value: Sats, - current_price: CentsUnsigned, - prev_price: CentsUnsigned, - ath: CentsUnsigned, + current_price: Cents, + prev_price: Cents, + ath: Cents, age: Age, ) -> Result<()> { let prev = addressdata.cost_basis_snapshot(); - addressdata.send(value, Some(prev_price))?; + addressdata.send(value, prev_price)?; let current = addressdata.cost_basis_snapshot(); self.inner.send_address( @@ -73,24 +66,15 @@ impl AddressCohortState { Ok(()) } - pub fn receive( + pub(crate) fn receive_outputs( &mut self, address_data: &mut FundedAddressData, value: Sats, - price: CentsUnsigned, - ) { - self.receive_outputs(address_data, value, price, 1); - } - - pub fn receive_outputs( - &mut self, - address_data: &mut FundedAddressData, - value: Sats, - price: CentsUnsigned, + price: Cents, output_count: u32, ) { let prev = address_data.cost_basis_snapshot(); - address_data.receive_outputs(value, Some(price), output_count); + address_data.receive_outputs(value, price, output_count); let current = address_data.cost_basis_snapshot(); self.inner.receive_address( @@ -104,13 +88,13 @@ impl AddressCohortState { ); } - pub fn add(&mut self, addressdata: &FundedAddressData) { + pub(crate) fn add(&mut self, addressdata: &FundedAddressData) { self.addr_count += 1; self.inner .increment_snapshot(&addressdata.cost_basis_snapshot()); } - pub fn subtract(&mut self, addressdata: &FundedAddressData) { + pub(crate) fn subtract(&mut self, addressdata: &FundedAddressData) { let snapshot = addressdata.cost_basis_snapshot(); // Check for potential underflow before it happens @@ -157,7 +141,4 @@ impl AddressCohortState { self.inner.decrement_snapshot(&snapshot); } - pub fn write(&mut self, height: Height, cleanup: bool) -> Result<()> { - self.inner.write(height, cleanup) - } } diff --git a/crates/brk_computer/src/distribution/state/cohort/base.rs b/crates/brk_computer/src/distribution/state/cohort/base.rs index 2906ca940..7dc802f75 100644 --- a/crates/brk_computer/src/distribution/state/cohort/base.rs +++ b/crates/brk_computer/src/distribution/state/cohort/base.rs @@ -1,110 +1,89 @@ use std::path::Path; use brk_error::Result; -use brk_types::{Age, CentsSats, CentsUnsigned, CostBasisSnapshot, Height, Sats, SupplyState}; +use brk_types::{Age, CentsSats, Cents, CostBasisSnapshot, Height, Sats, SupplyState}; use super::super::cost_basis::{CostBasisData, Percentiles, RealizedState, UnrealizedState}; -#[derive(Clone)] pub struct CohortState { pub supply: SupplyState, - pub realized: Option, + pub realized: RealizedState, pub sent: Sats, pub satblocks_destroyed: Sats, pub satdays_destroyed: Sats, - cost_basis_data: Option, + cost_basis_data: CostBasisData, } impl CohortState { - pub fn new(path: &Path, name: &str, compute_dollars: bool) -> Self { + pub(crate) fn new(path: &Path, name: &str) -> Self { Self { supply: SupplyState::default(), - realized: compute_dollars.then_some(RealizedState::default()), + realized: RealizedState::default(), sent: Sats::ZERO, satblocks_destroyed: Sats::ZERO, satdays_destroyed: Sats::ZERO, - cost_basis_data: compute_dollars.then_some(CostBasisData::create(path, name)), + cost_basis_data: CostBasisData::create(path, name), } } /// Enable price rounding for cost basis data. - pub fn with_price_rounding(mut self, digits: i32) -> Self { - if let Some(data) = self.cost_basis_data.take() { - self.cost_basis_data = Some(data.with_price_rounding(digits)); - } + pub(crate) fn with_price_rounding(mut self, digits: i32) -> Self { + self.cost_basis_data = self.cost_basis_data.with_price_rounding(digits); self } - pub fn import_at_or_before(&mut self, height: Height) -> Result { - match self.cost_basis_data.as_mut() { - Some(p) => p.import_at_or_before(height), - None => Ok(height), - } + pub(crate) fn import_at_or_before(&mut self, height: Height) -> Result { + self.cost_basis_data.import_at_or_before(height) } /// Restore realized cap from cost_basis_data after import. /// Uses the exact persisted values instead of recomputing from the map. - pub fn restore_realized_cap(&mut self) { - if let Some(cost_basis_data) = self.cost_basis_data.as_ref() - && let Some(realized) = self.realized.as_mut() - { - realized.set_cap_raw(cost_basis_data.cap_raw()); - realized.set_investor_cap_raw(cost_basis_data.investor_cap_raw()); - } + pub(crate) fn restore_realized_cap(&mut self) { + self.realized.set_cap_raw(self.cost_basis_data.cap_raw()); + self.realized + .set_investor_cap_raw(self.cost_basis_data.investor_cap_raw()); } - pub fn reset_cost_basis_data_if_needed(&mut self) -> Result<()> { - if let Some(p) = self.cost_basis_data.as_mut() { - p.clean()?; - p.init(); - } + pub(crate) fn reset_cost_basis_data_if_needed(&mut self) -> Result<()> { + self.cost_basis_data.clean()?; + self.cost_basis_data.init(); Ok(()) } - pub fn apply_pending(&mut self) { - if let Some(p) = self.cost_basis_data.as_mut() { - p.apply_pending(); - } + pub(crate) fn apply_pending(&mut self) { + self.cost_basis_data.apply_pending(); } - pub fn cost_basis_data_first_key_value(&self) -> Option<(CentsUnsigned, &Sats)> { + pub(crate) fn cost_basis_data_first_key_value(&self) -> Option<(Cents, &Sats)> { self.cost_basis_data - .as_ref()? .first_key_value() .map(|(k, v)| (k.into(), v)) } - pub fn cost_basis_data_last_key_value(&self) -> Option<(CentsUnsigned, &Sats)> { + pub(crate) fn cost_basis_data_last_key_value(&self) -> Option<(Cents, &Sats)> { self.cost_basis_data - .as_ref()? .last_key_value() .map(|(k, v)| (k.into(), v)) } - pub fn reset_single_iteration_values(&mut self) { + pub(crate) fn reset_single_iteration_values(&mut self) { self.sent = Sats::ZERO; self.satdays_destroyed = Sats::ZERO; self.satblocks_destroyed = Sats::ZERO; - if let Some(realized) = self.realized.as_mut() { - realized.reset_single_iteration_values(); - } + self.realized.reset_single_iteration_values(); } - pub fn increment(&mut self, supply: &SupplyState, price: Option) { - match price { - Some(p) => self.increment_snapshot(&CostBasisSnapshot::from_utxo(p, supply)), - None => self.supply += supply, - } + pub(crate) fn increment(&mut self, supply: &SupplyState, price: Cents) { + self.increment_snapshot(&CostBasisSnapshot::from_utxo(price, supply)); } - pub fn increment_snapshot(&mut self, s: &CostBasisSnapshot) { + pub(crate) fn increment_snapshot(&mut self, s: &CostBasisSnapshot) { self.supply += &s.supply_state; - if s.supply_state.value > Sats::ZERO - && let Some(realized) = self.realized.as_mut() - { - realized.increment_snapshot(s.price_sats, s.investor_cap); - self.cost_basis_data.as_mut().unwrap().increment( + if s.supply_state.value > Sats::ZERO { + self.realized + .increment_snapshot(s.price_sats, s.investor_cap); + self.cost_basis_data.increment( s.realized_price, s.supply_state.value, s.price_sats, @@ -113,21 +92,17 @@ impl CohortState { } } - pub fn decrement(&mut self, supply: &SupplyState, price: Option) { - match price { - Some(p) => self.decrement_snapshot(&CostBasisSnapshot::from_utxo(p, supply)), - None => self.supply -= supply, - } + pub(crate) fn decrement(&mut self, supply: &SupplyState, price: Cents) { + self.decrement_snapshot(&CostBasisSnapshot::from_utxo(price, supply)); } - pub fn decrement_snapshot(&mut self, s: &CostBasisSnapshot) { + pub(crate) fn decrement_snapshot(&mut self, s: &CostBasisSnapshot) { self.supply -= &s.supply_state; - if s.supply_state.value > Sats::ZERO - && let Some(realized) = self.realized.as_mut() - { - realized.decrement_snapshot(s.price_sats, s.investor_cap); - self.cost_basis_data.as_mut().unwrap().decrement( + if s.supply_state.value > Sats::ZERO { + self.realized + .decrement_snapshot(s.price_sats, s.investor_cap); + self.cost_basis_data.decrement( s.realized_price, s.supply_state.value, s.price_sats, @@ -136,44 +111,37 @@ impl CohortState { } } - pub fn receive_utxo(&mut self, supply: &SupplyState, price: Option) { + pub(crate) fn receive_utxo(&mut self, supply: &SupplyState, price: Cents) { self.supply += supply; - if supply.value > Sats::ZERO - && let Some(realized) = self.realized.as_mut() - { - let price = price.unwrap(); + if supply.value > Sats::ZERO { let sats = supply.value; // Compute once using typed values let price_sats = CentsSats::from_price_sats(price, sats); let investor_cap = price_sats.to_investor_cap(price); - realized.receive(price, sats); + self.realized.receive(price, sats); self.cost_basis_data - .as_mut() - .unwrap() .increment(price, sats, price_sats, investor_cap); } } - pub fn receive_address( + pub(crate) fn receive_address( &mut self, supply: &SupplyState, - price: CentsUnsigned, + price: Cents, current: &CostBasisSnapshot, prev: &CostBasisSnapshot, ) { self.supply += supply; - if supply.value > Sats::ZERO - && let Some(realized) = self.realized.as_mut() - { - realized.receive(price, supply.value); + if supply.value > Sats::ZERO { + self.realized.receive(price, supply.value); if current.supply_state.value.is_not_zero() { - self.cost_basis_data.as_mut().unwrap().increment( + self.cost_basis_data.increment( current.realized_price, current.supply_state.value, current.price_sats, @@ -182,7 +150,7 @@ impl CohortState { } if prev.supply_state.value.is_not_zero() { - self.cost_basis_data.as_mut().unwrap().decrement( + self.cost_basis_data.decrement( prev.realized_price, prev.supply_state.value, prev.price_sats, @@ -192,12 +160,12 @@ impl CohortState { } } - pub fn send_utxo( + pub(crate) fn send_utxo( &mut self, supply: &SupplyState, - current_price: Option, - prev_price: Option, - ath: Option, + current_price: Cents, + prev_price: Cents, + ath: Cents, age: Age, ) { if supply.utxo_count == 0 { @@ -211,37 +179,32 @@ impl CohortState { self.satblocks_destroyed += age.satblocks_destroyed(supply.value); self.satdays_destroyed += age.satdays_destroyed(supply.value); - if let Some(realized) = self.realized.as_mut() { - let cp = current_price.unwrap(); - let pp = prev_price.unwrap(); - let ath_price = ath.unwrap(); - let sats = supply.value; + let cp = current_price; + let pp = prev_price; + let ath_price = ath; + let sats = supply.value; - // Compute ONCE using typed values - let current_ps = CentsSats::from_price_sats(cp, sats); - let prev_ps = CentsSats::from_price_sats(pp, sats); - let ath_ps = CentsSats::from_price_sats(ath_price, sats); - let prev_investor_cap = prev_ps.to_investor_cap(pp); + // Compute ONCE using typed values + let current_ps = CentsSats::from_price_sats(cp, sats); + let prev_ps = CentsSats::from_price_sats(pp, sats); + let ath_ps = CentsSats::from_price_sats(ath_price, sats); + let prev_investor_cap = prev_ps.to_investor_cap(pp); - realized.send(sats, current_ps, prev_ps, ath_ps, prev_investor_cap); + self.realized + .send(sats, current_ps, prev_ps, ath_ps, prev_investor_cap); - self.cost_basis_data.as_mut().unwrap().decrement( - pp, - sats, - prev_ps, - prev_investor_cap, - ); - } + self.cost_basis_data + .decrement(pp, sats, prev_ps, prev_investor_cap); } } #[allow(clippy::too_many_arguments)] - pub fn send_address( + pub(crate) fn send_address( &mut self, supply: &SupplyState, - current_price: CentsUnsigned, - prev_price: CentsUnsigned, - ath: CentsUnsigned, + current_price: Cents, + prev_price: Cents, + ath: Cents, age: Age, current: &CostBasisSnapshot, prev: &CostBasisSnapshot, @@ -257,80 +220,55 @@ impl CohortState { self.satblocks_destroyed += age.satblocks_destroyed(supply.value); self.satdays_destroyed += age.satdays_destroyed(supply.value); - if let Some(realized) = self.realized.as_mut() { - let sats = supply.value; + let sats = supply.value; - // Compute once for realized.send using typed values - let current_ps = CentsSats::from_price_sats(current_price, sats); - let prev_ps = CentsSats::from_price_sats(prev_price, sats); - let ath_ps = CentsSats::from_price_sats(ath, sats); - let prev_investor_cap = prev_ps.to_investor_cap(prev_price); + // Compute once for realized.send using typed values + let current_ps = CentsSats::from_price_sats(current_price, sats); + let prev_ps = CentsSats::from_price_sats(prev_price, sats); + let ath_ps = CentsSats::from_price_sats(ath, sats); + let prev_investor_cap = prev_ps.to_investor_cap(prev_price); - realized.send(sats, current_ps, prev_ps, ath_ps, prev_investor_cap); + self.realized + .send(sats, current_ps, prev_ps, ath_ps, prev_investor_cap); - if current.supply_state.value.is_not_zero() { - self.cost_basis_data.as_mut().unwrap().increment( - current.realized_price, - current.supply_state.value, - current.price_sats, - current.investor_cap, - ); - } + if current.supply_state.value.is_not_zero() { + self.cost_basis_data.increment( + current.realized_price, + current.supply_state.value, + current.price_sats, + current.investor_cap, + ); + } - if prev.supply_state.value.is_not_zero() { - self.cost_basis_data.as_mut().unwrap().decrement( - prev.realized_price, - prev.supply_state.value, - prev.price_sats, - prev.investor_cap, - ); - } + if prev.supply_state.value.is_not_zero() { + self.cost_basis_data.decrement( + prev.realized_price, + prev.supply_state.value, + prev.price_sats, + prev.investor_cap, + ); } } } - pub fn compute_percentiles(&self) -> Option { - self.cost_basis_data.as_ref()?.compute_percentiles() + pub(crate) fn compute_percentiles(&mut self) -> Option { + self.cost_basis_data.compute_percentiles() } - pub fn compute_unrealized_states( + pub(crate) fn compute_unrealized_states( &mut self, - height_price: CentsUnsigned, - date_price: Option, + height_price: Cents, + date_price: Option, ) -> (UnrealizedState, Option) { - match self.cost_basis_data.as_mut() { - Some(p) => p.compute_unrealized_states(height_price, date_price), - None => ( - UnrealizedState::ZERO, - date_price.map(|_| UnrealizedState::ZERO), - ), - } - } - - pub fn write(&mut self, height: Height, cleanup: bool) -> Result<()> { - if let Some(p) = self.cost_basis_data.as_mut() { - p.write(height, cleanup)?; - } - Ok(()) - } - - pub fn min_price(&self) -> Option { self.cost_basis_data - .as_ref()? - .first_key_value() - .map(|(k, _)| k.into()) + .compute_unrealized_states(height_price, date_price) } - pub fn max_price(&self) -> Option { - self.cost_basis_data - .as_ref()? - .last_key_value() - .map(|(k, _)| k.into()) + pub(crate) fn write(&mut self, height: Height, cleanup: bool) -> Result<()> { + self.cost_basis_data.write(height, cleanup) } - pub fn cost_basis_data_iter(&self) -> Option> { - self.cost_basis_data - .as_ref() - .map(|p| p.iter().map(|(k, v)| (k.into(), v))) + pub(crate) fn cost_basis_data_iter(&self) -> impl Iterator { + self.cost_basis_data.iter().map(|(k, v)| (k.into(), v)) } } diff --git a/crates/brk_computer/src/distribution/state/cohort/utxo.rs b/crates/brk_computer/src/distribution/state/cohort/utxo.rs index 0280a80bc..be104fa98 100644 --- a/crates/brk_computer/src/distribution/state/cohort/utxo.rs +++ b/crates/brk_computer/src/distribution/state/cohort/utxo.rs @@ -6,26 +6,24 @@ use derive_more::{Deref, DerefMut}; use super::{super::cost_basis::RealizedState, base::CohortState}; -#[derive(Clone, Deref, DerefMut)] +#[derive(Deref, DerefMut)] pub struct UTXOCohortState(CohortState); impl UTXOCohortState { - pub fn new(path: &Path, name: &str, compute_dollars: bool) -> Self { - Self(CohortState::new(path, name, compute_dollars)) + pub(crate) fn new(path: &Path, name: &str) -> Self { + Self(CohortState::new(path, name)) } - pub fn reset_cost_basis_data_if_needed(&mut self) -> Result<()> { + pub(crate) fn reset_cost_basis_data_if_needed(&mut self) -> Result<()> { self.0.reset_cost_basis_data_if_needed() } /// Reset state for fresh start. - pub fn reset(&mut self) { + pub(crate) fn reset(&mut self) { self.0.supply = SupplyState::default(); self.0.sent = Sats::ZERO; self.0.satblocks_destroyed = Sats::ZERO; self.0.satdays_destroyed = Sats::ZERO; - if let Some(realized) = self.0.realized.as_mut() { - *realized = RealizedState::default(); - } + self.0.realized = RealizedState::default(); } } diff --git a/crates/brk_computer/src/distribution/state/cost_basis/data.rs b/crates/brk_computer/src/distribution/state/cost_basis/data.rs index a082c6b10..b5c3fbf5f 100644 --- a/crates/brk_computer/src/distribution/state/cost_basis/data.rs +++ b/crates/brk_computer/src/distribution/state/cost_basis/data.rs @@ -6,8 +6,7 @@ use std::{ use brk_error::{Error, Result}; use brk_types::{ - CentsSats, CentsSquaredSats, CentsUnsigned, CentsUnsignedCompact, CostBasisDistribution, - Height, Sats, + CentsCompact, CentsSats, CentsSquaredSats, Cents, CostBasisDistribution, Height, Sats, }; use rustc_hash::FxHashMap; use vecdb::Bytes; @@ -17,7 +16,7 @@ use crate::utils::OptionExt; use super::{CachedUnrealizedState, Percentiles, UnrealizedState}; /// Type alias for the price-to-sats map used in cost basis data. -pub(super) type CostBasisMap = BTreeMap; +pub(super) type CostBasisMap = BTreeMap; #[derive(Clone, Debug, Default)] struct PendingRaw { @@ -31,40 +30,44 @@ struct PendingRaw { pub struct CostBasisData { pathbuf: PathBuf, state: Option, - pending: FxHashMap, + pending: FxHashMap, pending_raw: PendingRaw, cache: Option, + percentiles_dirty: bool, + cached_percentiles: Option, rounding_digits: Option, } const STATE_TO_KEEP: usize = 10; impl CostBasisData { - pub fn create(path: &Path, name: &str) -> Self { + pub(crate) fn create(path: &Path, name: &str) -> Self { Self { pathbuf: path.join(format!("{name}_cost_basis")), state: None, pending: FxHashMap::default(), pending_raw: PendingRaw::default(), cache: None, + percentiles_dirty: true, + cached_percentiles: None, rounding_digits: None, } } - pub fn with_price_rounding(mut self, digits: i32) -> Self { + pub(crate) fn with_price_rounding(mut self, digits: i32) -> Self { self.rounding_digits = Some(digits); self } #[inline] - fn round_price(&self, price: CentsUnsigned) -> CentsUnsigned { + fn round_price(&self, price: Cents) -> Cents { match self.rounding_digits { Some(digits) => price.round_to_dollar(digits), None => price, } } - pub fn import_at_or_before(&mut self, height: Height) -> Result { + pub(crate) fn import_at_or_before(&mut self, height: Height) -> Result { let files = self.read_dir(None)?; let (&height, path) = files.range(..=height).next_back().ok_or(Error::NotFound( "No cost basis state found at or before height".into(), @@ -73,6 +76,8 @@ impl CostBasisData { self.pending.clear(); self.pending_raw = PendingRaw::default(); self.cache = None; + self.percentiles_dirty = true; + self.cached_percentiles = None; Ok(height) } @@ -90,16 +95,16 @@ impl CostBasisData { && self.pending_raw.investor_cap_dec == CentsSquaredSats::ZERO } - pub fn iter(&self) -> impl Iterator { + pub(crate) fn iter(&self) -> impl Iterator { self.assert_pending_empty(); self.state.u().base.map.iter().map(|(&k, v)| (k, v)) } - pub fn is_empty(&self) -> bool { + pub(crate) fn is_empty(&self) -> bool { self.pending.is_empty() && self.state.u().base.map.is_empty() } - pub fn first_key_value(&self) -> Option<(CentsUnsignedCompact, &Sats)> { + pub(crate) fn first_key_value(&self) -> Option<(CentsCompact, &Sats)> { self.assert_pending_empty(); self.state .u() @@ -109,7 +114,7 @@ impl CostBasisData { .map(|(&k, v)| (k, v)) } - pub fn last_key_value(&self) -> Option<(CentsUnsignedCompact, &Sats)> { + pub(crate) fn last_key_value(&self) -> Option<(CentsCompact, &Sats)> { self.assert_pending_empty(); self.state .u() @@ -120,22 +125,22 @@ impl CostBasisData { } /// Get the exact cap_raw value (not recomputed from map). - pub fn cap_raw(&self) -> CentsSats { + pub(crate) fn cap_raw(&self) -> CentsSats { self.assert_pending_empty(); self.state.u().cap_raw } /// Get the exact investor_cap_raw value (not recomputed from map). - pub fn investor_cap_raw(&self) -> CentsSquaredSats { + pub(crate) fn investor_cap_raw(&self) -> CentsSquaredSats { self.assert_pending_empty(); self.state.u().investor_cap_raw } /// Increment with pre-computed typed values. /// Handles rounding and cache update. - pub fn increment( + pub(crate) fn increment( &mut self, - price: CentsUnsigned, + price: Cents, sats: Sats, price_sats: CentsSats, investor_cap: CentsSquaredSats, @@ -153,9 +158,9 @@ impl CostBasisData { /// Decrement with pre-computed typed values. /// Handles rounding and cache update. - pub fn decrement( + pub(crate) fn decrement( &mut self, - price: CentsUnsigned, + price: Cents, sats: Sats, price_sats: CentsSats, investor_cap: CentsSquaredSats, @@ -171,7 +176,10 @@ impl CostBasisData { } } - pub fn apply_pending(&mut self) { + pub(crate) fn apply_pending(&mut self) { + if !self.pending.is_empty() { + self.percentiles_dirty = true; + } for (cents, (inc, dec)) in self.pending.drain() { let entry = self.state.um().base.map.entry(cents).or_default(); *entry += inc; @@ -232,25 +240,35 @@ impl CostBasisData { self.pending_raw = PendingRaw::default(); } - pub fn init(&mut self) { + pub(crate) fn init(&mut self) { self.state.replace(State::default()); self.pending.clear(); self.pending_raw = PendingRaw::default(); self.cache = None; + self.percentiles_dirty = true; + self.cached_percentiles = None; } - pub fn compute_percentiles(&self) -> Option { + pub(crate) fn compute_percentiles(&mut self) -> Option { self.assert_pending_empty(); - Percentiles::compute(self.iter().map(|(k, &v)| (k, v))) + if !self.percentiles_dirty { + return self.cached_percentiles; + } + self.cached_percentiles = Percentiles::compute(self.iter().map(|(k, &v)| (k, v))); + self.percentiles_dirty = false; + self.cached_percentiles } - pub fn compute_unrealized_states( + pub(crate) fn compute_unrealized_states( &mut self, - height_price: CentsUnsigned, - date_price: Option, + height_price: Cents, + date_price: Option, ) -> (UnrealizedState, Option) { if self.is_empty() { - return (UnrealizedState::ZERO, date_price.map(|_| UnrealizedState::ZERO)); + return ( + UnrealizedState::ZERO, + date_price.map(|_| UnrealizedState::ZERO), + ); } let map = &self.state.u().base.map; @@ -270,7 +288,7 @@ impl CostBasisData { (height_state, date_state) } - pub fn clean(&mut self) -> Result<()> { + pub(crate) fn clean(&mut self) -> Result<()> { let _ = fs::remove_dir_all(&self.pathbuf); fs::create_dir_all(self.path_by_height())?; self.cache = None; @@ -304,7 +322,7 @@ impl CostBasisData { .collect::>()) } - pub fn write(&mut self, height: Height, cleanup: bool) -> Result<()> { + pub(crate) fn write(&mut self, height: Height, cleanup: bool) -> Result<()> { self.apply_pending(); if cleanup { diff --git a/crates/brk_computer/src/distribution/state/cost_basis/percentiles.rs b/crates/brk_computer/src/distribution/state/cost_basis/percentiles.rs index 556b0184f..d5ce601a1 100644 --- a/crates/brk_computer/src/distribution/state/cost_basis/percentiles.rs +++ b/crates/brk_computer/src/distribution/state/cost_basis/percentiles.rs @@ -1,19 +1,19 @@ -use brk_types::{CentsUnsigned, CentsUnsignedCompact, Sats}; +use brk_types::{Cents, CentsCompact, Sats}; use crate::internal::{PERCENTILES, PERCENTILES_LEN}; #[derive(Clone, Copy, Debug)] pub struct Percentiles { /// Sat-weighted: percentiles by coin count - pub sat_weighted: [CentsUnsigned; PERCENTILES_LEN], + pub sat_weighted: [Cents; PERCENTILES_LEN], /// USD-weighted: percentiles by invested capital (sats × price) - pub usd_weighted: [CentsUnsigned; PERCENTILES_LEN], + pub usd_weighted: [Cents; PERCENTILES_LEN], } impl Percentiles { /// Compute both sat-weighted and USD-weighted percentiles in a single pass. /// Takes an iterator over (price, sats) pairs, assumed sorted by price ascending. - pub fn compute(iter: impl Iterator) -> Option { + pub(crate) fn compute(iter: impl Iterator) -> Option { // Collect to allow two passes: one for totals, one for percentiles let entries: Vec<_> = iter.collect(); if entries.is_empty() { @@ -32,8 +32,8 @@ impl Percentiles { return None; } - let mut sat_weighted = [CentsUnsigned::ZERO; PERCENTILES_LEN]; - let mut usd_weighted = [CentsUnsigned::ZERO; PERCENTILES_LEN]; + let mut sat_weighted = [Cents::ZERO; PERCENTILES_LEN]; + let mut usd_weighted = [Cents::ZERO; PERCENTILES_LEN]; let mut cumsum_sats: u64 = 0; let mut cumsum_usd: u128 = 0; let mut sat_idx = 0; diff --git a/crates/brk_computer/src/distribution/state/cost_basis/realized.rs b/crates/brk_computer/src/distribution/state/cost_basis/realized.rs index 346125da0..32fb59a8e 100644 --- a/crates/brk_computer/src/distribution/state/cost_basis/realized.rs +++ b/crates/brk_computer/src/distribution/state/cost_basis/realized.rs @@ -1,6 +1,6 @@ use std::cmp::Ordering; -use brk_types::{CentsSats, CentsSquaredSats, CentsUnsigned, Sats}; +use brk_types::{CentsSats, CentsSquaredSats, Cents, Sats}; /// Realized state using u128 for raw cent*sat values internally. /// This avoids overflow and defers division to output time for efficiency. @@ -34,19 +34,19 @@ pub struct RealizedState { impl RealizedState { /// Get realized cap as CentsUnsigned (divides by ONE_BTC). #[inline] - pub fn cap(&self) -> CentsUnsigned { - CentsUnsigned::new((self.cap_raw / Sats::ONE_BTC_U128) as u64) + pub(crate) fn cap(&self) -> Cents { + Cents::new((self.cap_raw / Sats::ONE_BTC_U128) as u64) } /// Set cap_raw directly from persisted value. #[inline] - pub fn set_cap_raw(&mut self, cap_raw: CentsSats) { + pub(crate) fn set_cap_raw(&mut self, cap_raw: CentsSats) { self.cap_raw = cap_raw.inner(); } /// Set investor_cap_raw directly from persisted value. #[inline] - pub fn set_investor_cap_raw(&mut self, investor_cap_raw: CentsSquaredSats) { + pub(crate) fn set_investor_cap_raw(&mut self, investor_cap_raw: CentsSquaredSats) { self.investor_cap_raw = investor_cap_raw; } @@ -54,114 +54,84 @@ impl RealizedState { /// investor_price = Σ(price² × sats) / Σ(price × sats) /// This is the dollar-weighted average acquisition price. #[inline] - pub fn investor_price(&self) -> CentsUnsigned { + pub(crate) fn investor_price(&self) -> Cents { if self.cap_raw == 0 { - return CentsUnsigned::ZERO; + return Cents::ZERO; } - CentsUnsigned::new((self.investor_cap_raw / self.cap_raw) as u64) + Cents::new((self.investor_cap_raw / self.cap_raw) as u64) } /// Get raw realized cap for aggregation. #[inline] - pub fn cap_raw(&self) -> CentsSats { + pub(crate) fn cap_raw(&self) -> CentsSats { CentsSats::new(self.cap_raw) } /// Get raw investor cap for aggregation. #[inline] - pub fn investor_cap_raw(&self) -> CentsSquaredSats { + pub(crate) fn investor_cap_raw(&self) -> CentsSquaredSats { self.investor_cap_raw } /// Get realized profit as CentsUnsigned. #[inline] - pub fn profit(&self) -> CentsUnsigned { - CentsUnsigned::new((self.profit_raw / Sats::ONE_BTC_U128) as u64) + pub(crate) fn profit(&self) -> Cents { + Cents::new((self.profit_raw / Sats::ONE_BTC_U128) as u64) } /// Get realized loss as CentsUnsigned. #[inline] - pub fn loss(&self) -> CentsUnsigned { - CentsUnsigned::new((self.loss_raw / Sats::ONE_BTC_U128) as u64) - } - - /// Get value created as CentsUnsigned (derived from profit + loss splits). - #[inline] - pub fn value_created(&self) -> CentsUnsigned { - let raw = self.profit_value_created_raw + self.loss_value_created_raw; - CentsUnsigned::new((raw / Sats::ONE_BTC_U128) as u64) - } - - /// Get value destroyed as CentsUnsigned (derived from profit + loss splits). - #[inline] - pub fn value_destroyed(&self) -> CentsUnsigned { - let raw = self.profit_value_destroyed_raw + self.loss_value_destroyed_raw; - CentsUnsigned::new((raw / Sats::ONE_BTC_U128) as u64) + pub(crate) fn loss(&self) -> Cents { + Cents::new((self.loss_raw / Sats::ONE_BTC_U128) as u64) } /// Get profit value created as CentsUnsigned (sell_price × sats for profit cases). #[inline] - pub fn profit_value_created(&self) -> CentsUnsigned { - CentsUnsigned::new((self.profit_value_created_raw / Sats::ONE_BTC_U128) as u64) + pub(crate) fn profit_value_created(&self) -> Cents { + Cents::new((self.profit_value_created_raw / Sats::ONE_BTC_U128) as u64) } /// Get profit value destroyed as CentsUnsigned (cost_basis × sats for profit cases). /// This is also known as profit_flow. #[inline] - pub fn profit_value_destroyed(&self) -> CentsUnsigned { - CentsUnsigned::new((self.profit_value_destroyed_raw / Sats::ONE_BTC_U128) as u64) + pub(crate) fn profit_value_destroyed(&self) -> Cents { + Cents::new((self.profit_value_destroyed_raw / Sats::ONE_BTC_U128) as u64) } /// Get loss value created as CentsUnsigned (sell_price × sats for loss cases). #[inline] - pub fn loss_value_created(&self) -> CentsUnsigned { - CentsUnsigned::new((self.loss_value_created_raw / Sats::ONE_BTC_U128) as u64) + pub(crate) fn loss_value_created(&self) -> Cents { + Cents::new((self.loss_value_created_raw / Sats::ONE_BTC_U128) as u64) } /// Get loss value destroyed as CentsUnsigned (cost_basis × sats for loss cases). /// This is also known as capitulation_flow. #[inline] - pub fn loss_value_destroyed(&self) -> CentsUnsigned { - CentsUnsigned::new((self.loss_value_destroyed_raw / Sats::ONE_BTC_U128) as u64) - } - - /// Get capitulation flow as CentsUnsigned. - /// This is the invested capital (cost_basis × sats) sold at a loss. - /// Alias for loss_value_destroyed. - #[inline] - pub fn capitulation_flow(&self) -> CentsUnsigned { - self.loss_value_destroyed() - } - - /// Get profit flow as CentsUnsigned. - /// This is the invested capital (cost_basis × sats) sold at a profit. - /// Alias for profit_value_destroyed. - #[inline] - pub fn profit_flow(&self) -> CentsUnsigned { - self.profit_value_destroyed() + pub(crate) fn loss_value_destroyed(&self) -> Cents { + Cents::new((self.loss_value_destroyed_raw / Sats::ONE_BTC_U128) as u64) } /// Get realized peak regret as CentsUnsigned. /// This is Σ((peak - sell_price) × sats) - how much more could have been made /// by selling at peak instead of when actually sold. #[inline] - pub fn peak_regret(&self) -> CentsUnsigned { - CentsUnsigned::new((self.peak_regret_raw / Sats::ONE_BTC_U128) as u64) + pub(crate) fn peak_regret(&self) -> Cents { + Cents::new((self.peak_regret_raw / Sats::ONE_BTC_U128) as u64) } /// Get sats sent in profit. #[inline] - pub fn sent_in_profit(&self) -> Sats { + pub(crate) fn sent_in_profit(&self) -> Sats { self.sent_in_profit } /// Get sats sent in loss. #[inline] - pub fn sent_in_loss(&self) -> Sats { + pub(crate) fn sent_in_loss(&self) -> Sats { self.sent_in_loss } - pub fn reset_single_iteration_values(&mut self) { + pub(crate) fn reset_single_iteration_values(&mut self) { self.profit_raw = 0; self.loss_raw = 0; self.profit_value_created_raw = 0; @@ -175,7 +145,7 @@ impl RealizedState { /// Increment using pre-computed values (for UTXO path) #[inline] - pub fn increment(&mut self, price: CentsUnsigned, sats: Sats) { + pub(crate) fn increment(&mut self, price: Cents, sats: Sats) { if sats.is_zero() { return; } @@ -186,26 +156,26 @@ impl RealizedState { /// Increment using pre-computed snapshot values (for address path) #[inline] - pub fn increment_snapshot(&mut self, price_sats: CentsSats, investor_cap: CentsSquaredSats) { + pub(crate) fn increment_snapshot(&mut self, price_sats: CentsSats, investor_cap: CentsSquaredSats) { self.cap_raw += price_sats.as_u128(); self.investor_cap_raw += investor_cap; } /// Decrement using pre-computed snapshot values (for address path) #[inline] - pub fn decrement_snapshot(&mut self, price_sats: CentsSats, investor_cap: CentsSquaredSats) { + pub(crate) fn decrement_snapshot(&mut self, price_sats: CentsSats, investor_cap: CentsSquaredSats) { self.cap_raw -= price_sats.as_u128(); self.investor_cap_raw -= investor_cap; } #[inline] - pub fn receive(&mut self, price: CentsUnsigned, sats: Sats) { + pub(crate) fn receive(&mut self, price: Cents, sats: Sats) { self.increment(price, sats); } /// Send with pre-computed typed values. Inlines decrement to avoid recomputation. #[inline] - pub fn send( + pub(crate) fn send( &mut self, sats: Sats, current_ps: CentsSats, diff --git a/crates/brk_computer/src/distribution/state/cost_basis/unrealized.rs b/crates/brk_computer/src/distribution/state/cost_basis/unrealized.rs index 76b557d00..c903cfac7 100644 --- a/crates/brk_computer/src/distribution/state/cost_basis/unrealized.rs +++ b/crates/brk_computer/src/distribution/state/cost_basis/unrealized.rs @@ -1,6 +1,6 @@ use std::ops::Bound; -use brk_types::{CentsUnsigned, CentsUnsignedCompact, Sats}; +use brk_types::{Cents, CentsCompact, Sats}; use super::CostBasisMap; @@ -8,10 +8,10 @@ use super::CostBasisMap; pub struct UnrealizedState { pub supply_in_profit: Sats, pub supply_in_loss: Sats, - pub unrealized_profit: CentsUnsigned, - pub unrealized_loss: CentsUnsigned, - pub invested_capital_in_profit: CentsUnsigned, - pub invested_capital_in_loss: CentsUnsigned, + pub unrealized_profit: Cents, + pub unrealized_loss: Cents, + pub invested_capital_in_profit: Cents, + pub invested_capital_in_loss: Cents, /// Raw Σ(price² × sats) for UTXOs in profit. Used for aggregation. pub investor_cap_in_profit_raw: u128, /// Raw Σ(price² × sats) for UTXOs in loss. Used for aggregation. @@ -26,39 +26,16 @@ impl UnrealizedState { pub const ZERO: Self = Self { supply_in_profit: Sats::ZERO, supply_in_loss: Sats::ZERO, - unrealized_profit: CentsUnsigned::ZERO, - unrealized_loss: CentsUnsigned::ZERO, - invested_capital_in_profit: CentsUnsigned::ZERO, - invested_capital_in_loss: CentsUnsigned::ZERO, + unrealized_profit: Cents::ZERO, + unrealized_loss: Cents::ZERO, + invested_capital_in_profit: Cents::ZERO, + invested_capital_in_loss: Cents::ZERO, investor_cap_in_profit_raw: 0, investor_cap_in_loss_raw: 0, invested_capital_in_profit_raw: 0, invested_capital_in_loss_raw: 0, }; - /// Compute pain_index from raw values. - /// pain_index = investor_price_of_losers - spot - #[inline] - pub fn pain_index(&self, spot: CentsUnsigned) -> CentsUnsigned { - if self.invested_capital_in_loss_raw == 0 { - return CentsUnsigned::ZERO; - } - let investor_price_losers = - self.investor_cap_in_loss_raw / self.invested_capital_in_loss_raw; - CentsUnsigned::new((investor_price_losers - spot.as_u128()) as u64) - } - - /// Compute greed_index from raw values. - /// greed_index = spot - investor_price_of_winners - #[inline] - pub fn greed_index(&self, spot: CentsUnsigned) -> CentsUnsigned { - if self.invested_capital_in_profit_raw == 0 { - return CentsUnsigned::ZERO; - } - let investor_price_winners = - self.investor_cap_in_profit_raw / self.invested_capital_in_profit_raw; - CentsUnsigned::new((spot.as_u128() - investor_price_winners) as u64) - } } /// Internal cache state using u128 for raw cent*sat values. @@ -88,14 +65,12 @@ impl CachedStateRaw { UnrealizedState { supply_in_profit: self.supply_in_profit, supply_in_loss: self.supply_in_loss, - unrealized_profit: CentsUnsigned::new( - (self.unrealized_profit / Sats::ONE_BTC_U128) as u64, - ), - unrealized_loss: CentsUnsigned::new((self.unrealized_loss / Sats::ONE_BTC_U128) as u64), - invested_capital_in_profit: CentsUnsigned::new( + unrealized_profit: Cents::new((self.unrealized_profit / Sats::ONE_BTC_U128) as u64), + unrealized_loss: Cents::new((self.unrealized_loss / Sats::ONE_BTC_U128) as u64), + invested_capital_in_profit: Cents::new( (self.invested_capital_in_profit / Sats::ONE_BTC_U128) as u64, ), - invested_capital_in_loss: CentsUnsigned::new( + invested_capital_in_loss: Cents::new( (self.invested_capital_in_loss / Sats::ONE_BTC_U128) as u64, ), investor_cap_in_profit_raw: self.investor_cap_in_profit, @@ -109,12 +84,12 @@ impl CachedStateRaw { #[derive(Debug, Clone)] pub struct CachedUnrealizedState { state: CachedStateRaw, - at_price: CentsUnsignedCompact, + at_price: CentsCompact, } impl CachedUnrealizedState { - pub fn compute_fresh(price: CentsUnsigned, map: &CostBasisMap) -> Self { - let price: CentsUnsignedCompact = price.into(); + pub(crate) fn compute_fresh(price: Cents, map: &CostBasisMap) -> Self { + let price: CentsCompact = price.into(); let state = Self::compute_raw(price, map); Self { state, @@ -123,24 +98,20 @@ impl CachedUnrealizedState { } /// Get the current cached state as output (without price update). - pub fn current_state(&self) -> UnrealizedState { + pub(crate) fn current_state(&self) -> UnrealizedState { self.state.to_output() } - pub fn get_at_price( - &mut self, - new_price: CentsUnsigned, - map: &CostBasisMap, - ) -> UnrealizedState { - let new_price: CentsUnsignedCompact = new_price.into(); + pub(crate) fn get_at_price(&mut self, new_price: Cents, map: &CostBasisMap) -> UnrealizedState { + let new_price: CentsCompact = new_price.into(); if new_price != self.at_price { self.update_for_price_change(new_price, map); } self.state.to_output() } - pub fn on_receive(&mut self, price: CentsUnsigned, sats: Sats) { - let price: CentsUnsignedCompact = price.into(); + pub(crate) fn on_receive(&mut self, price: Cents, sats: Sats) { + let price: CentsCompact = price.into(); let sats_u128 = sats.as_u128(); let price_u128 = price.as_u128(); let invested_capital = price_u128 * sats_u128; @@ -163,8 +134,8 @@ impl CachedUnrealizedState { } } - pub fn on_send(&mut self, price: CentsUnsigned, sats: Sats) { - let price: CentsUnsignedCompact = price.into(); + pub(crate) fn on_send(&mut self, price: Cents, sats: Sats) { + let price: CentsCompact = price.into(); let sats_u128 = sats.as_u128(); let price_u128 = price.as_u128(); let invested_capital = price_u128 * sats_u128; @@ -187,7 +158,7 @@ impl CachedUnrealizedState { } } - fn update_for_price_change(&mut self, new_price: CentsUnsignedCompact, map: &CostBasisMap) { + fn update_for_price_change(&mut self, new_price: CentsCompact, map: &CostBasisMap) { let old_price = self.at_price; if new_price > old_price { @@ -198,7 +169,8 @@ impl CachedUnrealizedState { // First, process UTXOs crossing from loss to profit // Range (old_price, new_price] means: old_price < price <= new_price - for (&price, &sats) in map.range((Bound::Excluded(old_price), Bound::Included(new_price))) + for (&price, &sats) in + map.range((Bound::Excluded(old_price), Bound::Included(new_price))) { let sats_u128 = sats.as_u128(); let price_u128 = price.as_u128(); @@ -239,7 +211,8 @@ impl CachedUnrealizedState { // First, process UTXOs crossing from profit to loss // Range (new_price, old_price] means: new_price < price <= old_price - for (&price, &sats) in map.range((Bound::Excluded(new_price), Bound::Included(old_price))) + for (&price, &sats) in + map.range((Bound::Excluded(new_price), Bound::Included(old_price))) { let sats_u128 = sats.as_u128(); let price_u128 = price.as_u128(); @@ -278,7 +251,7 @@ impl CachedUnrealizedState { } /// Compute raw cached state from the map. - fn compute_raw(current_price: CentsUnsignedCompact, map: &CostBasisMap) -> CachedStateRaw { + fn compute_raw(current_price: CentsCompact, map: &CostBasisMap) -> CachedStateRaw { let mut state = CachedStateRaw::default(); for (&price, &sats) in map.iter() { @@ -309,8 +282,8 @@ impl CachedUnrealizedState { /// Compute final UnrealizedState directly (not cached). /// Used for date_state which doesn't use the cache. - pub fn compute_full_standalone( - current_price: CentsUnsignedCompact, + pub(crate) fn compute_full_standalone( + current_price: CentsCompact, map: &CostBasisMap, ) -> UnrealizedState { Self::compute_raw(current_price, map).to_output() diff --git a/crates/brk_computer/src/distribution/state/transacted.rs b/crates/brk_computer/src/distribution/state/transacted.rs index 3f567bfee..9f6039326 100644 --- a/crates/brk_computer/src/distribution/state/transacted.rs +++ b/crates/brk_computer/src/distribution/state/transacted.rs @@ -12,7 +12,7 @@ pub struct Transacted { impl Transacted { #[allow(clippy::inconsistent_digit_grouping)] - pub fn iterate(&mut self, value: Sats, _type: OutputType) { + pub(crate) fn iterate(&mut self, value: Sats, _type: OutputType) { let supply = SupplyState { utxo_count: 1, value, diff --git a/crates/brk_computer/src/distribution/vecs.rs b/crates/brk_computer/src/distribution/vecs.rs index b90d8c4c9..c4b237c47 100644 --- a/crates/brk_computer/src/distribution/vecs.rs +++ b/crates/brk_computer/src/distribution/vecs.rs @@ -4,13 +4,13 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_traversable::Traversable; use brk_types::{ - DateIndex, EmptyAddressData, EmptyAddressIndex, FundedAddressData, FundedAddressIndex, Height, + Day1, EmptyAddressData, EmptyAddressIndex, FundedAddressData, FundedAddressIndex, Height, SupplyState, Version, }; use tracing::{debug, info}; use vecdb::{ - AnyVec, BytesVec, Database, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec, - LazyVecFrom1, PAGE_SIZE, Stamp, TypedVecIterator, VecIndex, + AnyVec, BytesVec, Database, Exit, WritableVec, ImportableVec, ReadableCloneableVec, + ReadableVec, Rw, StorageMode, LazyVecFrom1, PAGE_SIZE, Stamp, }; use crate::{ @@ -19,7 +19,7 @@ use crate::{ compute::{StartMode, determine_start_mode, process_blocks, recover_state, reset_state}, state::BlockState, }, - indexes, inputs, outputs, price, transactions, + indexes, inputs, outputs, prices, transactions, }; use super::{ @@ -33,27 +33,27 @@ use super::{ const VERSION: Version = Version::new(22); /// Main struct holding all computed vectors and state for stateful computation. -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { #[traversable(skip)] db: Database, #[traversable(skip)] pub states_path: PathBuf, - pub supply_state: BytesVec, - pub any_address_indexes: AnyAddressIndexesVecs, - pub addresses_data: AddressesDataVecs, - pub utxo_cohorts: UTXOCohorts, - pub address_cohorts: AddressCohorts, + pub supply_state: M::Stored>, + pub any_address_indexes: AnyAddressIndexesVecs, + pub addresses_data: AddressesDataVecs, + pub utxo_cohorts: UTXOCohorts, + pub address_cohorts: AddressCohorts, - pub addr_count: AddrCountsVecs, - pub empty_addr_count: AddrCountsVecs, - pub address_activity: AddressActivityVecs, + pub addr_count: AddrCountsVecs, + pub empty_addr_count: AddrCountsVecs, + pub address_activity: AddressActivityVecs, /// Total addresses ever seen (addr_count + empty_addr_count) - lazy, global + per-type pub total_addr_count: TotalAddrCountVecs, - /// New addresses per block (delta of total) - lazy height, stored dateindex stats, global + per-type - pub new_addr_count: NewAddrCountVecs, + /// New addresses per block (delta of total) - lazy height, stored day1 stats, global + per-type + pub new_addr_count: NewAddrCountVecs, /// Growth rate (new / addr_count) - lazy ratio with distribution stats, global + per-type pub growth_rate: GrowthRateVecs, @@ -66,11 +66,11 @@ pub struct Vecs { const SAVED_STAMPED_CHANGES: u16 = 10; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( parent: &Path, parent_version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, ) -> Result { let db_path = parent.join(super::DB_NAME); let states_path = db_path.join("states"); @@ -81,14 +81,14 @@ impl Vecs { let version = parent_version + VERSION; - let utxo_cohorts = UTXOCohorts::forced_import(&db, version, indexes, price, &states_path)?; + let utxo_cohorts = UTXOCohorts::forced_import(&db, version, indexes, prices, &states_path)?; // Create address cohorts with reference to utxo "all" cohort's supply for global ratios let address_cohorts = AddressCohorts::forced_import( &db, version, indexes, - price, + prices, &states_path, Some(&utxo_cohorts.all.metrics.supply), )?; @@ -107,14 +107,14 @@ impl Vecs { let fundedaddressindex = LazyVecFrom1::init( "fundedaddressindex", version, - fundedaddressindex_to_fundedaddressdata.boxed_clone(), - |index, _| Some(index), + fundedaddressindex_to_fundedaddressdata.read_only_boxed_clone(), + |index, _| index, ); let emptyaddressindex = LazyVecFrom1::init( "emptyaddressindex", version, - emptyaddressindex_to_emptyaddressdata.boxed_clone(), - |index, _| Some(index), + emptyaddressindex_to_emptyaddressdata.read_only_boxed_clone(), + |index, _| index, ); let addr_count = AddrCountsVecs::forced_import(&db, "addr_count", version, indexes)?; @@ -125,7 +125,6 @@ impl Vecs { // Lazy total = addr_count + empty_addr_count (global + per-type, with all derived indexes) let total_addr_count = TotalAddrCountVecs::forced_import( - &db, version, indexes, &addr_count, @@ -138,7 +137,7 @@ impl Vecs { // Growth rate: new / addr_count (global + per-type) let growth_rate = - GrowthRateVecs::forced_import(&db, version, indexes, &new_addr_count, &addr_count)?; + GrowthRateVecs::forced_import(version, indexes, &new_addr_count, &addr_count)?; let this = Self { supply_state: BytesVec::forced_import_with( @@ -187,7 +186,7 @@ impl Vecs { /// 4. Computes aggregate cohorts from separate cohorts /// 5. Computes derived metrics #[allow(clippy::too_many_arguments)] - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, @@ -195,15 +194,13 @@ impl Vecs { outputs: &outputs::Vecs, transactions: &transactions::Vecs, blocks: &blocks::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, starting_indexes: &mut ComputeIndexes, exit: &Exit, ) -> Result<()> { // 1. Find minimum height we have data for across stateful vecs let current_height = Height::from(self.supply_state.len()); - let height_based_min = self.min_stateful_height_len(); - let dateindex_min = self.min_stateful_dateindex_len(); - let min_stateful = adjust_for_dateindex_gap(height_based_min, dateindex_min, indexes)?; + let min_stateful = self.min_stateful_height_len(); // 2. Determine start mode and recover/reset state // Clamp to starting_indexes.height to handle reorg (indexer may require earlier start) @@ -267,22 +264,21 @@ impl Vecs { // Recover chain_state from stored values debug!("recovering chain_state from stored values"); let height_to_timestamp = &blocks.time.timestamp_monotonic; - let height_to_price = price.map(|p| &p.cents.split.height.close); + let height_to_price = &prices.cents.price; - let mut height_to_timestamp_iter = height_to_timestamp.into_iter(); - let mut height_to_price_iter = height_to_price.map(|v| v.into_iter()); - let mut chain_state_iter = self.supply_state.into_iter(); + let end = usize::from(recovered_height); + let timestamp_data: Vec<_> = height_to_timestamp.collect_range_at(0, end); + let price_data: Vec<_> = height_to_price.collect_range_at(0, end); debug!("building supply_state vec for {} heights", recovered_height); - let chain_state = (0..recovered_height.to_usize()) - .map(|h| { - let h = Height::from(h); - let price = height_to_price_iter.as_mut().map(|v| *v.get_unwrap(h)); - BlockState { - supply: chain_state_iter.get_unwrap(h), - price, - timestamp: height_to_timestamp_iter.get_unwrap(h), - } + let supply_state_data: Vec<_> = self.supply_state.collect_range_at(0, end); + let chain_state = supply_state_data + .into_iter() + .enumerate() + .map(|(h, supply)| BlockState { + supply, + price: price_data[h], + timestamp: timestamp_data[h], }) .collect(); debug!("chain_state vec built"); @@ -293,14 +289,15 @@ impl Vecs { // Update starting_indexes if we need to recompute from an earlier point if starting_height < starting_indexes.height { starting_indexes.height = starting_height; - // Also update dateindex to match + // Also update day1 to match if starting_height.is_zero() { - starting_indexes.dateindex = DateIndex::from(0); + starting_indexes.day1 = Day1::from(0); } else { - starting_indexes.dateindex = indexes + starting_indexes.day1 = indexes .height - .dateindex - .read_once(starting_height.decremented().unwrap())?; + .day1 + .collect_one(starting_height.decremented().unwrap()) + .unwrap(); } } @@ -330,7 +327,7 @@ impl Vecs { outputs, transactions, blocks, - price, + prices, starting_height, last_height, &mut chain_state, @@ -346,62 +343,38 @@ impl Vecs { exit, )?; - // 6. Compute rest part1 (dateindex mappings) + // 6. Compute rest part1 (day1 mappings) aggregates::compute_rest_part1( &mut self.utxo_cohorts, &mut self.address_cohorts, - indexes, - price, + blocks, + prices, starting_indexes, exit, )?; - // 6b. Compute address count dateindex vecs (by addresstype + all) + // 6b. Compute address count day1 vecs (by addresstype + all) self.addr_count - .compute_rest(indexes, starting_indexes, exit)?; + .compute_rest(blocks, starting_indexes, exit)?; self.empty_addr_count - .compute_rest(indexes, starting_indexes, exit)?; - self.address_activity - .compute_rest(indexes, starting_indexes, exit)?; + .compute_rest(blocks, starting_indexes, exit)?; - // 6c. Derive total_addr_count dateindex stats (height is lazy sum) - self.total_addr_count - .derive_from(indexes, starting_indexes, exit)?; - - // 6d. Derive new_addr_count dateindex stats (height is lazy delta) + // 6d. Compute new_addr_count cumulative (height is lazy delta) self.new_addr_count - .derive_from(indexes, starting_indexes, exit)?; - - // 6e. Derive growth_rate dateindex stats (height is lazy ratio) - self.growth_rate - .derive_from(indexes, starting_indexes, exit)?; + .compute_cumulative(starting_indexes, exit)?; // 7. Compute rest part2 (relative metrics) let supply_metrics = &self.utxo_cohorts.all.metrics.supply; - let height_to_market_cap = supply_metrics - .total - .dollars - .as_ref() - .map(|d| d.height.clone()); - - let dateindex_to_market_cap = supply_metrics - .total - .dollars - .as_ref() - .map(|d| d.dateindex.0.clone()); - - let height_to_market_cap_ref = height_to_market_cap.as_ref(); - let dateindex_to_market_cap_ref = dateindex_to_market_cap.as_ref(); + let height_to_market_cap = supply_metrics.total.usd.height.clone(); aggregates::compute_rest_part2( &mut self.utxo_cohorts, &mut self.address_cohorts, - indexes, - price, + blocks, + prices, starting_indexes, - height_to_market_cap_ref, - dateindex_to_market_cap_ref, + Some(&height_to_market_cap), exit, )?; @@ -410,7 +383,7 @@ impl Vecs { Ok(()) } - pub fn flush(&self) -> Result<()> { + pub(crate) fn flush(&self) -> Result<()> { self.db.flush()?; Ok(()) } @@ -428,44 +401,4 @@ impl Vecs { .min(Height::from(self.address_activity.min_stateful_height())) } - /// Get minimum length across all dateindex-indexed stateful vectors. - fn min_stateful_dateindex_len(&self) -> usize { - self.utxo_cohorts - .min_separate_stateful_dateindex_len() - .min(self.utxo_cohorts.min_aggregate_stateful_dateindex_len()) - .min(self.address_cohorts.min_separate_stateful_dateindex_len()) - } -} - -/// Adjust start height if dateindex vecs are behind where they should be. -/// -/// To resume at height H (in day D), we need days 0..D-1 complete in dateindex vecs. -/// If dateindex vecs only have length N < D, restart from the first height of day N. -fn adjust_for_dateindex_gap( - height_based_min: Height, - dateindex_min: usize, - indexes: &indexes::Vecs, -) -> Result { - // Skip check if no dateindex vecs exist or starting from zero - if dateindex_min == usize::MAX || height_based_min.is_zero() { - return Ok(height_based_min); - } - - // Skip if height.dateindex doesn't cover height_based_min yet - if height_based_min.to_usize() >= indexes.height.dateindex.len() { - return Ok(height_based_min); - } - - // Get the dateindex at the height we want to resume at - let required_dateindex: usize = indexes.height.dateindex.read_once(height_based_min)?.into(); - - // If dateindex vecs are behind, restart from first height of the missing day - if dateindex_min < required_dateindex && dateindex_min < indexes.dateindex.first_height.len() { - Ok(indexes - .dateindex - .first_height - .read_once(dateindex_min.into())?) - } else { - Ok(height_based_min) - } } diff --git a/crates/brk_computer/src/indexes/address.rs b/crates/brk_computer/src/indexes/address.rs index a04ff751e..50ed9c97b 100644 --- a/crates/brk_computer/src/indexes/address.rs +++ b/crates/brk_computer/src/indexes/address.rs @@ -6,7 +6,7 @@ use brk_types::{ P2PKHBytes, P2SHAddressIndex, P2SHBytes, P2TRAddressIndex, P2TRBytes, P2WPKHAddressIndex, P2WPKHBytes, P2WSHAddressIndex, P2WSHBytes, TxIndex, UnknownOutputIndex, Version, }; -use vecdb::{IterableCloneableVec, LazyVecFrom1}; +use vecdb::{ReadableCloneableVec, LazyVecFrom1}; #[derive(Clone, Traversable)] pub struct Vecs { @@ -85,102 +85,102 @@ pub struct OpReturnVecs { } impl Vecs { - pub fn forced_import(version: Version, indexer: &Indexer) -> Self { + pub(crate) fn forced_import(version: Version, indexer: &Indexer) -> Self { Self { p2pk33: P2PK33Vecs { identity: LazyVecFrom1::init( "p2pk33addressindex", version, - indexer.vecs.addresses.p2pk33bytes.boxed_clone(), - |index, _| Some(index), + indexer.vecs.addresses.p2pk33bytes.read_only_boxed_clone(), + |index, _| index, ), }, p2pk65: P2PK65Vecs { identity: LazyVecFrom1::init( "p2pk65addressindex", version, - indexer.vecs.addresses.p2pk65bytes.boxed_clone(), - |index, _| Some(index), + indexer.vecs.addresses.p2pk65bytes.read_only_boxed_clone(), + |index, _| index, ), }, p2pkh: P2PKHVecs { identity: LazyVecFrom1::init( "p2pkhaddressindex", version, - indexer.vecs.addresses.p2pkhbytes.boxed_clone(), - |index, _| Some(index), + indexer.vecs.addresses.p2pkhbytes.read_only_boxed_clone(), + |index, _| index, ), }, p2sh: P2SHVecs { identity: LazyVecFrom1::init( "p2shaddressindex", version, - indexer.vecs.addresses.p2shbytes.boxed_clone(), - |index, _| Some(index), + indexer.vecs.addresses.p2shbytes.read_only_boxed_clone(), + |index, _| index, ), }, p2tr: P2TRVecs { identity: LazyVecFrom1::init( "p2traddressindex", version, - indexer.vecs.addresses.p2trbytes.boxed_clone(), - |index, _| Some(index), + indexer.vecs.addresses.p2trbytes.read_only_boxed_clone(), + |index, _| index, ), }, p2wpkh: P2WPKHVecs { identity: LazyVecFrom1::init( "p2wpkhaddressindex", version, - indexer.vecs.addresses.p2wpkhbytes.boxed_clone(), - |index, _| Some(index), + indexer.vecs.addresses.p2wpkhbytes.read_only_boxed_clone(), + |index, _| index, ), }, p2wsh: P2WSHVecs { identity: LazyVecFrom1::init( "p2wshaddressindex", version, - indexer.vecs.addresses.p2wshbytes.boxed_clone(), - |index, _| Some(index), + indexer.vecs.addresses.p2wshbytes.read_only_boxed_clone(), + |index, _| index, ), }, p2a: P2AVecs { identity: LazyVecFrom1::init( "p2aaddressindex", version, - indexer.vecs.addresses.p2abytes.boxed_clone(), - |index, _| Some(index), + indexer.vecs.addresses.p2abytes.read_only_boxed_clone(), + |index, _| index, ), }, p2ms: P2MSVecs { identity: LazyVecFrom1::init( "p2msoutputindex", version, - indexer.vecs.scripts.p2ms_to_txindex.boxed_clone(), - |index, _| Some(index), + indexer.vecs.scripts.p2ms_to_txindex.read_only_boxed_clone(), + |index, _| index, ), }, empty: EmptyVecs { identity: LazyVecFrom1::init( "emptyoutputindex", version, - indexer.vecs.scripts.empty_to_txindex.boxed_clone(), - |index, _| Some(index), + indexer.vecs.scripts.empty_to_txindex.read_only_boxed_clone(), + |index, _| index, ), }, unknown: UnknownVecs { identity: LazyVecFrom1::init( "unknownoutputindex", version, - indexer.vecs.scripts.unknown_to_txindex.boxed_clone(), - |index, _| Some(index), + indexer.vecs.scripts.unknown_to_txindex.read_only_boxed_clone(), + |index, _| index, ), }, opreturn: OpReturnVecs { identity: LazyVecFrom1::init( "opreturnindex", version, - indexer.vecs.scripts.opreturn_to_txindex.boxed_clone(), - |index, _| Some(index), + indexer.vecs.scripts.opreturn_to_txindex.read_only_boxed_clone(), + |index, _| index, ), }, } diff --git a/crates/brk_computer/src/indexes/dateindex.rs b/crates/brk_computer/src/indexes/dateindex.rs deleted file mode 100644 index 9b4158099..000000000 --- a/crates/brk_computer/src/indexes/dateindex.rs +++ /dev/null @@ -1,28 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{Date, DateIndex, Height, MonthIndex, StoredU64, Version, WeekIndex}; -use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; - -use brk_error::Result; - -#[derive(Clone, Traversable)] -pub struct Vecs { - pub identity: EagerVec>, - pub date: EagerVec>, - pub first_height: EagerVec>, - pub height_count: EagerVec>, - pub weekindex: EagerVec>, - pub monthindex: EagerVec>, -} - -impl Vecs { - pub fn forced_import(db: &Database, version: Version) -> Result { - Ok(Self { - identity: EagerVec::forced_import(db, "dateindex", version)?, - date: EagerVec::forced_import(db, "date", version + Version::ONE)?, - first_height: EagerVec::forced_import(db, "first_height", version)?, - height_count: EagerVec::forced_import(db, "height_count", version)?, - weekindex: EagerVec::forced_import(db, "weekindex", version)?, - monthindex: EagerVec::forced_import(db, "monthindex", version)?, - }) - } -} diff --git a/crates/brk_computer/src/indexes/day1.rs b/crates/brk_computer/src/indexes/day1.rs new file mode 100644 index 000000000..b25fb018c --- /dev/null +++ b/crates/brk_computer/src/indexes/day1.rs @@ -0,0 +1,24 @@ +use brk_traversable::Traversable; +use brk_types::{Date, Day1, Height, StoredU64, Version}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode}; + +use brk_error::Result; + +#[derive(Traversable)] +pub struct Vecs { + pub identity: M::Stored>>, + pub date: M::Stored>>, + pub first_height: M::Stored>>, + pub height_count: M::Stored>>, +} + +impl Vecs { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "day1", version)?, + date: EagerVec::forced_import(db, "date", version + Version::ONE)?, + first_height: EagerVec::forced_import(db, "first_height", version)?, + height_count: EagerVec::forced_import(db, "height_count", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/day3.rs b/crates/brk_computer/src/indexes/day3.rs new file mode 100644 index 000000000..90c6490ee --- /dev/null +++ b/crates/brk_computer/src/indexes/day3.rs @@ -0,0 +1,20 @@ +use brk_traversable::Traversable; +use brk_types::{Day3, Height, Version}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode}; + +use brk_error::Result; + +#[derive(Traversable)] +pub struct Vecs { + pub identity: M::Stored>>, + pub first_height: M::Stored>>, +} + +impl Vecs { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "day3", version)?, + first_height: EagerVec::forced_import(db, "day3_first_height", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/decadeindex.rs b/crates/brk_computer/src/indexes/decadeindex.rs deleted file mode 100644 index f920412a8..000000000 --- a/crates/brk_computer/src/indexes/decadeindex.rs +++ /dev/null @@ -1,24 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{Date, DecadeIndex, StoredU64, Version, YearIndex}; -use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; - -use brk_error::Result; - -#[derive(Clone, Traversable)] -pub struct Vecs { - pub identity: EagerVec>, - pub date: EagerVec>, - pub first_yearindex: EagerVec>, - pub yearindex_count: EagerVec>, -} - -impl Vecs { - pub fn forced_import(db: &Database, version: Version) -> Result { - Ok(Self { - identity: EagerVec::forced_import(db, "decadeindex", version)?, - date: EagerVec::forced_import(db, "date", version)?, - first_yearindex: EagerVec::forced_import(db, "first_yearindex", version)?, - yearindex_count: EagerVec::forced_import(db, "yearindex_count", version)?, - }) - } -} diff --git a/crates/brk_computer/src/indexes/difficultyepoch.rs b/crates/brk_computer/src/indexes/difficultyepoch.rs index edff96bec..cd07683df 100644 --- a/crates/brk_computer/src/indexes/difficultyepoch.rs +++ b/crates/brk_computer/src/indexes/difficultyepoch.rs @@ -1,18 +1,18 @@ use brk_traversable::Traversable; use brk_types::{DifficultyEpoch, Height, StoredU64, Version}; -use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode}; use brk_error::Result; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub identity: EagerVec>, - pub first_height: EagerVec>, - pub height_count: EagerVec>, +#[derive(Traversable)] +pub struct Vecs { + pub identity: M::Stored>>, + pub first_height: M::Stored>>, + pub height_count: M::Stored>>, } impl Vecs { - pub fn forced_import(db: &Database, version: Version) -> Result { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { Ok(Self { identity: EagerVec::forced_import(db, "difficultyepoch", version)?, first_height: EagerVec::forced_import(db, "first_height", version)?, diff --git a/crates/brk_computer/src/indexes/halvingepoch.rs b/crates/brk_computer/src/indexes/halvingepoch.rs index 4cf533637..d09c783d1 100644 --- a/crates/brk_computer/src/indexes/halvingepoch.rs +++ b/crates/brk_computer/src/indexes/halvingepoch.rs @@ -1,17 +1,17 @@ use brk_traversable::Traversable; use brk_types::{HalvingEpoch, Height, Version}; -use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode}; use brk_error::Result; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub identity: EagerVec>, - pub first_height: EagerVec>, +#[derive(Traversable)] +pub struct Vecs { + pub identity: M::Stored>>, + pub first_height: M::Stored>>, } impl Vecs { - pub fn forced_import(db: &Database, version: Version) -> Result { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { Ok(Self { identity: EagerVec::forced_import(db, "halvingepoch", version)?, first_height: EagerVec::forced_import(db, "first_height", version)?, diff --git a/crates/brk_computer/src/indexes/height.rs b/crates/brk_computer/src/indexes/height.rs index 365c24155..d6bf8076d 100644 --- a/crates/brk_computer/src/indexes/height.rs +++ b/crates/brk_computer/src/indexes/height.rs @@ -1,25 +1,57 @@ use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, HalvingEpoch, Height, StoredU64, Version}; -use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; +use brk_types::{ + Day1, Day3, Year10, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, + Minute1, Minute10, Minute30, Minute5, Month1, Month3, Month6, StoredU64, Version, Week1, + Year1, +}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode}; use brk_error::Result; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub identity: EagerVec>, - pub dateindex: EagerVec>, - pub difficultyepoch: EagerVec>, - pub halvingepoch: EagerVec>, - pub txindex_count: EagerVec>, +#[derive(Traversable)] +pub struct Vecs { + pub identity: M::Stored>>, + pub minute1: M::Stored>>, + pub minute5: M::Stored>>, + pub minute10: M::Stored>>, + pub minute30: M::Stored>>, + pub hour1: M::Stored>>, + pub hour4: M::Stored>>, + pub hour12: M::Stored>>, + pub day1: M::Stored>>, + pub day3: M::Stored>>, + pub difficultyepoch: M::Stored>>, + pub halvingepoch: M::Stored>>, + pub week1: M::Stored>>, + pub month1: M::Stored>>, + pub month3: M::Stored>>, + pub month6: M::Stored>>, + pub year1: M::Stored>>, + pub year10: M::Stored>>, + pub txindex_count: M::Stored>>, } impl Vecs { - pub fn forced_import(db: &Database, version: Version) -> Result { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { Ok(Self { identity: EagerVec::forced_import(db, "height", version)?, - dateindex: EagerVec::forced_import(db, "dateindex", version)?, + minute1: EagerVec::forced_import(db, "minute1", version)?, + minute5: EagerVec::forced_import(db, "minute5", version)?, + minute10: EagerVec::forced_import(db, "minute10", version)?, + minute30: EagerVec::forced_import(db, "minute30", version)?, + hour1: EagerVec::forced_import(db, "hour1", version)?, + hour4: EagerVec::forced_import(db, "hour4", version)?, + hour12: EagerVec::forced_import(db, "hour12", version)?, + day1: EagerVec::forced_import(db, "day1", version)?, + day3: EagerVec::forced_import(db, "day3", version)?, difficultyepoch: EagerVec::forced_import(db, "difficultyepoch", version)?, halvingepoch: EagerVec::forced_import(db, "halvingepoch", version)?, + week1: EagerVec::forced_import(db, "week1", version)?, + month1: EagerVec::forced_import(db, "month1", version)?, + month3: EagerVec::forced_import(db, "month3", version)?, + month6: EagerVec::forced_import(db, "month6", version)?, + year1: EagerVec::forced_import(db, "year1", version)?, + year10: EagerVec::forced_import(db, "year10", version)?, txindex_count: EagerVec::forced_import(db, "txindex_count", version)?, }) } diff --git a/crates/brk_computer/src/indexes/hour1.rs b/crates/brk_computer/src/indexes/hour1.rs new file mode 100644 index 000000000..4d57d383f --- /dev/null +++ b/crates/brk_computer/src/indexes/hour1.rs @@ -0,0 +1,20 @@ +use brk_traversable::Traversable; +use brk_types::{Height, Hour1, Version}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode}; + +use brk_error::Result; + +#[derive(Traversable)] +pub struct Vecs { + pub identity: M::Stored>>, + pub first_height: M::Stored>>, +} + +impl Vecs { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "hour1", version)?, + first_height: EagerVec::forced_import(db, "hour1_first_height", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/hour12.rs b/crates/brk_computer/src/indexes/hour12.rs new file mode 100644 index 000000000..12dd4d640 --- /dev/null +++ b/crates/brk_computer/src/indexes/hour12.rs @@ -0,0 +1,20 @@ +use brk_traversable::Traversable; +use brk_types::{Height, Hour12, Version}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode}; + +use brk_error::Result; + +#[derive(Traversable)] +pub struct Vecs { + pub identity: M::Stored>>, + pub first_height: M::Stored>>, +} + +impl Vecs { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "hour12", version)?, + first_height: EagerVec::forced_import(db, "hour12_first_height", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/hour4.rs b/crates/brk_computer/src/indexes/hour4.rs new file mode 100644 index 000000000..a59acee41 --- /dev/null +++ b/crates/brk_computer/src/indexes/hour4.rs @@ -0,0 +1,20 @@ +use brk_traversable::Traversable; +use brk_types::{Height, Hour4, Version}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode}; + +use brk_error::Result; + +#[derive(Traversable)] +pub struct Vecs { + pub identity: M::Stored>>, + pub first_height: M::Stored>>, +} + +impl Vecs { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "hour4", version)?, + first_height: EagerVec::forced_import(db, "hour4_first_height", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/minute1.rs b/crates/brk_computer/src/indexes/minute1.rs new file mode 100644 index 000000000..c728e8550 --- /dev/null +++ b/crates/brk_computer/src/indexes/minute1.rs @@ -0,0 +1,20 @@ +use brk_traversable::Traversable; +use brk_types::{Height, Minute1, Version}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode}; + +use brk_error::Result; + +#[derive(Traversable)] +pub struct Vecs { + pub identity: M::Stored>>, + pub first_height: M::Stored>>, +} + +impl Vecs { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "minute1", version)?, + first_height: EagerVec::forced_import(db, "minute1_first_height", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/minute10.rs b/crates/brk_computer/src/indexes/minute10.rs new file mode 100644 index 000000000..bdbcfead1 --- /dev/null +++ b/crates/brk_computer/src/indexes/minute10.rs @@ -0,0 +1,20 @@ +use brk_traversable::Traversable; +use brk_types::{Height, Minute10, Version}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode}; + +use brk_error::Result; + +#[derive(Traversable)] +pub struct Vecs { + pub identity: M::Stored>>, + pub first_height: M::Stored>>, +} + +impl Vecs { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "minute10", version)?, + first_height: EagerVec::forced_import(db, "minute10_first_height", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/minute30.rs b/crates/brk_computer/src/indexes/minute30.rs new file mode 100644 index 000000000..4d6344eef --- /dev/null +++ b/crates/brk_computer/src/indexes/minute30.rs @@ -0,0 +1,20 @@ +use brk_traversable::Traversable; +use brk_types::{Height, Minute30, Version}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode}; + +use brk_error::Result; + +#[derive(Traversable)] +pub struct Vecs { + pub identity: M::Stored>>, + pub first_height: M::Stored>>, +} + +impl Vecs { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "minute30", version)?, + first_height: EagerVec::forced_import(db, "minute30_first_height", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/minute5.rs b/crates/brk_computer/src/indexes/minute5.rs new file mode 100644 index 000000000..82c060e29 --- /dev/null +++ b/crates/brk_computer/src/indexes/minute5.rs @@ -0,0 +1,20 @@ +use brk_traversable::Traversable; +use brk_types::{Height, Minute5, Version}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode}; + +use brk_error::Result; + +#[derive(Traversable)] +pub struct Vecs { + pub identity: M::Stored>>, + pub first_height: M::Stored>>, +} + +impl Vecs { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "minute5", version)?, + first_height: EagerVec::forced_import(db, "minute5_first_height", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/mod.rs b/crates/brk_computer/src/indexes/mod.rs index da9079597..9fe6e2573 100644 --- a/crates/brk_computer/src/indexes/mod.rs +++ b/crates/brk_computer/src/indexes/mod.rs @@ -1,68 +1,95 @@ mod address; -mod dateindex; -mod decadeindex; +mod day1; +mod day3; mod difficultyepoch; mod halvingepoch; mod height; -mod monthindex; -mod quarterindex; -mod semesterindex; +mod hour1; +mod hour12; +mod hour4; +mod minute1; +mod minute10; +mod minute30; +mod minute5; +mod month1; +mod month3; +mod month6; mod txindex; mod txinindex; mod txoutindex; -mod weekindex; -mod yearindex; +mod week1; +mod year1; +mod year10; use std::path::Path; use brk_error::Result; use brk_indexer::Indexer; use brk_traversable::Traversable; -use brk_types::{Date, DateIndex, Indexes, MonthIndex, Version, WeekIndex}; -use vecdb::{Database, Exit, IterableVec, PAGE_SIZE, TypedVecIterator}; +use brk_types::{ + Date, Day1, Day3, Hour1, Hour4, Hour12, Indexes, Minute1, Minute5, Minute10, Minute30, Month1, + Month3, Month6, Version, Week1, Year1, Year10, +}; +use vecdb::{Database, Exit, PAGE_SIZE, ReadableVec, Rw, StorageMode}; use crate::blocks; pub use address::Vecs as AddressVecs; pub use brk_types::ComputeIndexes; -pub use dateindex::Vecs as DateIndexVecs; -pub use decadeindex::Vecs as DecadeIndexVecs; +pub use day1::Vecs as Day1Vecs; +pub use day3::Vecs as Day3Vecs; pub use difficultyepoch::Vecs as DifficultyEpochVecs; pub use halvingepoch::Vecs as HalvingEpochVecs; pub use height::Vecs as HeightVecs; -pub use monthindex::Vecs as MonthIndexVecs; -pub use quarterindex::Vecs as QuarterIndexVecs; -pub use semesterindex::Vecs as SemesterIndexVecs; +pub use hour1::Vecs as Hour1Vecs; +pub use hour4::Vecs as Hour4Vecs; +pub use hour12::Vecs as Hour12Vecs; +pub use minute1::Vecs as Minute1Vecs; +pub use minute5::Vecs as Minute5Vecs; +pub use minute10::Vecs as Minute10Vecs; +pub use minute30::Vecs as Minute30Vecs; +pub use month1::Vecs as Month1Vecs; +pub use month3::Vecs as Month3Vecs; +pub use month6::Vecs as Month6Vecs; pub use txindex::Vecs as TxIndexVecs; pub use txinindex::Vecs as TxInIndexVecs; pub use txoutindex::Vecs as TxOutIndexVecs; -pub use weekindex::Vecs as WeekIndexVecs; -pub use yearindex::Vecs as YearIndexVecs; +pub use week1::Vecs as Week1Vecs; +pub use year1::Vecs as Year1Vecs; +pub use year10::Vecs as Year10Vecs; const VERSION: Version = Version::ZERO; pub const DB_NAME: &str = "indexes"; -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { db: Database, pub address: AddressVecs, - pub height: HeightVecs, - pub difficultyepoch: DifficultyEpochVecs, - pub halvingepoch: HalvingEpochVecs, - pub dateindex: DateIndexVecs, - pub weekindex: WeekIndexVecs, - pub monthindex: MonthIndexVecs, - pub quarterindex: QuarterIndexVecs, - pub semesterindex: SemesterIndexVecs, - pub yearindex: YearIndexVecs, - pub decadeindex: DecadeIndexVecs, - pub txindex: TxIndexVecs, + pub height: HeightVecs, + pub difficultyepoch: DifficultyEpochVecs, + pub halvingepoch: HalvingEpochVecs, + pub minute1: Minute1Vecs, + pub minute5: Minute5Vecs, + pub minute10: Minute10Vecs, + pub minute30: Minute30Vecs, + pub hour1: Hour1Vecs, + pub hour4: Hour4Vecs, + pub hour12: Hour12Vecs, + pub day1: Day1Vecs, + pub day3: Day3Vecs, + pub week1: Week1Vecs, + pub month1: Month1Vecs, + pub month3: Month3Vecs, + pub month6: Month6Vecs, + pub year1: Year1Vecs, + pub year10: Year10Vecs, + pub txindex: TxIndexVecs, pub txinindex: TxInIndexVecs, pub txoutindex: TxOutIndexVecs, } impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( parent: &Path, parent_version: Version, indexer: &Indexer, @@ -77,13 +104,21 @@ impl Vecs { height: HeightVecs::forced_import(&db, version)?, difficultyepoch: DifficultyEpochVecs::forced_import(&db, version)?, halvingepoch: HalvingEpochVecs::forced_import(&db, version)?, - dateindex: DateIndexVecs::forced_import(&db, version)?, - weekindex: WeekIndexVecs::forced_import(&db, version)?, - monthindex: MonthIndexVecs::forced_import(&db, version)?, - quarterindex: QuarterIndexVecs::forced_import(&db, version)?, - semesterindex: SemesterIndexVecs::forced_import(&db, version)?, - yearindex: YearIndexVecs::forced_import(&db, version)?, - decadeindex: DecadeIndexVecs::forced_import(&db, version)?, + minute1: Minute1Vecs::forced_import(&db, version)?, + minute5: Minute5Vecs::forced_import(&db, version)?, + minute10: Minute10Vecs::forced_import(&db, version)?, + minute30: Minute30Vecs::forced_import(&db, version)?, + hour1: Hour1Vecs::forced_import(&db, version)?, + hour4: Hour4Vecs::forced_import(&db, version)?, + hour12: Hour12Vecs::forced_import(&db, version)?, + day1: Day1Vecs::forced_import(&db, version)?, + day3: Day3Vecs::forced_import(&db, version)?, + week1: Week1Vecs::forced_import(&db, version)?, + month1: Month1Vecs::forced_import(&db, version)?, + month3: Month3Vecs::forced_import(&db, version)?, + month6: Month6Vecs::forced_import(&db, version)?, + year1: Year1Vecs::forced_import(&db, version)?, + year10: Year10Vecs::forced_import(&db, version)?, txindex: TxIndexVecs::forced_import(&db, version, indexer)?, txinindex: TxInIndexVecs::forced_import(version, indexer), txoutindex: TxOutIndexVecs::forced_import(version, indexer), @@ -100,14 +135,17 @@ impl Vecs { Ok(this) } - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, - blocks_time: &blocks::time::Vecs, + blocks: &mut blocks::Vecs, starting_indexes: Indexes, exit: &Exit, ) -> Result { - let indexes = self.compute_(indexer, blocks_time, starting_indexes, exit)?; + blocks + .time + .compute(indexer, starting_indexes.height, exit)?; + let indexes = self.compute_(indexer, &blocks.time, starting_indexes, exit)?; let _lock = exit.lock(); self.db.compact()?; Ok(indexes) @@ -150,38 +188,100 @@ impl Vecs { let decremented_starting_height = starting_indexes.height.decremented().unwrap_or_default(); - // DateIndex (uses blocks_time.date_monotonic computed in blocks::time::compute_early) - let starting_dateindex = self - .height - .dateindex - .into_iter() - .get(decremented_starting_height) - .unwrap_or_default(); + // --- Timestamp-based height → period mappings --- - self.height.dateindex.compute_transform( + // Minute1 + self.height.minute1.compute_transform( starting_indexes.height, - &blocks_time.date, - |(h, d, ..)| (h, DateIndex::try_from(d).unwrap()), + &blocks_time.timestamp_monotonic, + |(h, ts, _)| (h, Minute1::from_timestamp(ts)), exit, )?; - let starting_dateindex = if let Some(dateindex) = self + // Minute5 + self.height.minute5.compute_transform( + starting_indexes.height, + &blocks_time.timestamp_monotonic, + |(h, ts, _)| (h, Minute5::from_timestamp(ts)), + exit, + )?; + + // Minute10 + self.height.minute10.compute_transform( + starting_indexes.height, + &blocks_time.timestamp_monotonic, + |(h, ts, _)| (h, Minute10::from_timestamp(ts)), + exit, + )?; + + // Minute30 + self.height.minute30.compute_transform( + starting_indexes.height, + &blocks_time.timestamp_monotonic, + |(h, ts, _)| (h, Minute30::from_timestamp(ts)), + exit, + )?; + + // Hour1 + self.height.hour1.compute_transform( + starting_indexes.height, + &blocks_time.timestamp_monotonic, + |(h, ts, _)| (h, Hour1::from_timestamp(ts)), + exit, + )?; + + // Hour4 + self.height.hour4.compute_transform( + starting_indexes.height, + &blocks_time.timestamp_monotonic, + |(h, ts, _)| (h, Hour4::from_timestamp(ts)), + exit, + )?; + + // Hour12 + self.height.hour12.compute_transform( + starting_indexes.height, + &blocks_time.timestamp_monotonic, + |(h, ts, _)| (h, Hour12::from_timestamp(ts)), + exit, + )?; + + // Day3 + self.height.day3.compute_transform( + starting_indexes.height, + &blocks_time.timestamp_monotonic, + |(h, ts, _)| (h, Day3::from_timestamp(ts)), + exit, + )?; + + // --- Calendar-based height → period mappings --- + + // Day1 (uses blocks_time.date computed in blocks::time::compute_early) + let starting_day1 = self .height - .dateindex - .into_iter() - .get(decremented_starting_height) - { - starting_dateindex.min(dateindex) - } else { - starting_dateindex - }; + .day1 + .collect_one(decremented_starting_height) + .unwrap_or_default(); + + self.height.day1.compute_transform( + starting_indexes.height, + &blocks_time.date, + |(h, d, ..)| (h, Day1::try_from(d).unwrap()), + exit, + )?; + + let starting_day1 = + if let Some(day1) = self.height.day1.collect_one(decremented_starting_height) { + starting_day1.min(day1) + } else { + starting_day1 + }; // Difficulty epoch let starting_difficultyepoch = self .height .difficultyepoch - .into_iter() - .get(decremented_starting_height) + .collect_one(decremented_starting_height) .unwrap_or_default(); self.height.difficultyepoch.compute_from_index( @@ -190,7 +290,7 @@ impl Vecs { exit, )?; - self.difficultyepoch.first_height.compute_coarser( + self.difficultyepoch.first_height.compute_first_per_index( starting_indexes.height, &self.height.difficultyepoch, exit, @@ -215,8 +315,7 @@ impl Vecs { let starting_halvingepoch = self .height .halvingepoch - .into_iter() - .get(decremented_starting_height) + .collect_one(decremented_starting_height) .unwrap_or_default(); self.height.halvingepoch.compute_from_index( @@ -225,7 +324,7 @@ impl Vecs { exit, )?; - self.halvingepoch.first_height.compute_coarser( + self.halvingepoch.first_height.compute_first_per_index( starting_indexes.height, &self.height.halvingepoch, exit, @@ -237,310 +336,356 @@ impl Vecs { exit, )?; - // Time indexes (depends on height.dateindex) - self.dateindex.first_height.compute_coarser( + // Height → period mappings (calendar-based, derived from height.day1) + self.height.week1.compute_transform( starting_indexes.height, - &self.height.dateindex, + &self.height.day1, + |(h, di, _)| (h, Week1::from(di)), + exit, + )?; + self.height.month1.compute_transform( + starting_indexes.height, + &self.height.day1, + |(h, di, _)| (h, Month1::from(di)), + exit, + )?; + self.height.month3.compute_transform( + starting_indexes.height, + &self.height.month1, + |(h, mi, _)| (h, Month3::from(mi)), + exit, + )?; + self.height.month6.compute_transform( + starting_indexes.height, + &self.height.month1, + |(h, mi, _)| (h, Month6::from(mi)), + exit, + )?; + self.height.year1.compute_transform( + starting_indexes.height, + &self.height.month1, + |(h, mi, _)| (h, Year1::from(mi)), + exit, + )?; + self.height.year10.compute_transform( + starting_indexes.height, + &self.height.year1, + |(h, yi, _)| (h, Year10::from(yi)), exit, )?; - self.dateindex.identity.compute_from_index( - starting_dateindex, - &self.dateindex.first_height, + // --- Starting values from height → period mappings --- + + let starting_minute1 = self + .height + .minute1 + .collect_one(decremented_starting_height) + .unwrap_or_default(); + let starting_minute5 = self + .height + .minute5 + .collect_one(decremented_starting_height) + .unwrap_or_default(); + let starting_minute10 = self + .height + .minute10 + .collect_one(decremented_starting_height) + .unwrap_or_default(); + let starting_minute30 = self + .height + .minute30 + .collect_one(decremented_starting_height) + .unwrap_or_default(); + let starting_hour1 = self + .height + .hour1 + .collect_one(decremented_starting_height) + .unwrap_or_default(); + let starting_hour4 = self + .height + .hour4 + .collect_one(decremented_starting_height) + .unwrap_or_default(); + let starting_hour12 = self + .height + .hour12 + .collect_one(decremented_starting_height) + .unwrap_or_default(); + let starting_day3 = self + .height + .day3 + .collect_one(decremented_starting_height) + .unwrap_or_default(); + let starting_week1 = self + .height + .week1 + .collect_one(decremented_starting_height) + .unwrap_or_default(); + let starting_month1 = self + .height + .month1 + .collect_one(decremented_starting_height) + .unwrap_or_default(); + let starting_month3 = self + .height + .month3 + .collect_one(decremented_starting_height) + .unwrap_or_default(); + let starting_month6 = self + .height + .month6 + .collect_one(decremented_starting_height) + .unwrap_or_default(); + let starting_year1 = self + .height + .year1 + .collect_one(decremented_starting_height) + .unwrap_or_default(); + let starting_year10 = self + .height + .year10 + .collect_one(decremented_starting_height) + .unwrap_or_default(); + + // --- Compute period-level vecs (first_height + identity) --- + + // Minute1 + self.minute1.first_height.compute_first_per_index( + starting_indexes.height, + &self.height.minute1, + exit, + )?; + self.minute1.identity.compute_from_index( + starting_minute1, + &self.minute1.first_height, exit, )?; - self.dateindex.date.compute_transform( - starting_dateindex, - &self.dateindex.identity, + // Minute5 + self.minute5.first_height.compute_first_per_index( + starting_indexes.height, + &self.height.minute5, + exit, + )?; + self.minute5.identity.compute_from_index( + starting_minute5, + &self.minute5.first_height, + exit, + )?; + + // Minute10 + self.minute10.first_height.compute_first_per_index( + starting_indexes.height, + &self.height.minute10, + exit, + )?; + self.minute10.identity.compute_from_index( + starting_minute10, + &self.minute10.first_height, + exit, + )?; + + // Minute30 + self.minute30.first_height.compute_first_per_index( + starting_indexes.height, + &self.height.minute30, + exit, + )?; + self.minute30.identity.compute_from_index( + starting_minute30, + &self.minute30.first_height, + exit, + )?; + + // Hour1 + self.hour1.first_height.compute_first_per_index( + starting_indexes.height, + &self.height.hour1, + exit, + )?; + self.hour1 + .identity + .compute_from_index(starting_hour1, &self.hour1.first_height, exit)?; + + // Hour4 + self.hour4.first_height.compute_first_per_index( + starting_indexes.height, + &self.height.hour4, + exit, + )?; + self.hour4 + .identity + .compute_from_index(starting_hour4, &self.hour4.first_height, exit)?; + + // Hour12 + self.hour12.first_height.compute_first_per_index( + starting_indexes.height, + &self.height.hour12, + exit, + )?; + self.hour12.identity.compute_from_index( + starting_hour12, + &self.hour12.first_height, + exit, + )?; + + // Day1 + self.day1.first_height.compute_first_per_index( + starting_indexes.height, + &self.height.day1, + exit, + )?; + self.day1 + .identity + .compute_from_index(starting_day1, &self.day1.first_height, exit)?; + self.day1.date.compute_transform( + starting_day1, + &self.day1.identity, |(di, ..)| (di, Date::from(di)), exit, )?; - - self.dateindex.height_count.compute_count_from_indexes( - starting_dateindex, - &self.dateindex.first_height, + self.day1.height_count.compute_count_from_indexes( + starting_day1, + &self.day1.first_height, &indexer.vecs.blocks.weight, exit, )?; + // Day3 + self.day3.first_height.compute_first_per_index( + starting_indexes.height, + &self.height.day3, + exit, + )?; + self.day3 + .identity + .compute_from_index(starting_day3, &self.day3.first_height, exit)?; + + let blocks_time_date = &blocks_time.date; + // Week - let starting_weekindex = self - .dateindex - .weekindex - .into_iter() - .get(starting_dateindex) - .unwrap_or_default(); - - self.dateindex.weekindex.compute_range( - starting_dateindex, - &self.dateindex.identity, - |i| (i, WeekIndex::from(i)), + self.week1.first_height.compute_first_per_index( + starting_indexes.height, + &self.height.week1, exit, )?; - - self.weekindex.first_dateindex.compute_coarser( - starting_dateindex, - &self.dateindex.weekindex, - exit, - )?; - - self.weekindex.identity.compute_from_index( - starting_weekindex, - &self.weekindex.first_dateindex, - exit, - )?; - - self.weekindex.date.compute_transform( - starting_weekindex, - &self.weekindex.first_dateindex, - |(wi, first_di, ..)| (wi, Date::from(first_di)), - exit, - )?; - - self.weekindex.dateindex_count.compute_count_from_indexes( - starting_weekindex, - &self.weekindex.first_dateindex, - &self.dateindex.date, + self.week1 + .identity + .compute_from_index(starting_week1, &self.week1.first_height, exit)?; + self.week1.date.compute_transform( + starting_week1, + &self.week1.first_height, + |(wi, first_h, _)| (wi, blocks_time_date.collect_one(first_h).unwrap()), exit, )?; // Month - let starting_monthindex = self - .dateindex - .monthindex - .into_iter() - .get(starting_dateindex) - .unwrap_or_default(); - - self.dateindex.monthindex.compute_range( - starting_dateindex, - &self.dateindex.identity, - |i| (i, MonthIndex::from(i)), + self.month1.first_height.compute_first_per_index( + starting_indexes.height, + &self.height.month1, exit, )?; - - self.monthindex.first_dateindex.compute_coarser( - starting_dateindex, - &self.dateindex.monthindex, + self.month1.identity.compute_from_index( + starting_month1, + &self.month1.first_height, exit, )?; - - self.monthindex.identity.compute_from_index( - starting_monthindex, - &self.monthindex.first_dateindex, - exit, - )?; - - self.monthindex.date.compute_transform( - starting_monthindex, - &self.monthindex.first_dateindex, - |(mi, first_di, ..)| (mi, Date::from(first_di)), - exit, - )?; - - self.monthindex.dateindex_count.compute_count_from_indexes( - starting_monthindex, - &self.monthindex.first_dateindex, - &self.dateindex.date, + self.month1.date.compute_transform( + starting_month1, + &self.month1.first_height, + |(mi, first_h, _)| (mi, blocks_time_date.collect_one(first_h).unwrap()), exit, )?; // Quarter - let starting_quarterindex = self - .monthindex - .quarterindex - .into_iter() - .get(starting_monthindex) - .unwrap_or_default(); - - self.monthindex.quarterindex.compute_from_index( - starting_monthindex, - &self.monthindex.first_dateindex, + self.month3.first_height.compute_first_per_index( + starting_indexes.height, + &self.height.month3, exit, )?; - - self.quarterindex.first_monthindex.compute_coarser( - starting_monthindex, - &self.monthindex.quarterindex, + self.month3.identity.compute_from_index( + starting_month3, + &self.month3.first_height, exit, )?; - - self.quarterindex.identity.compute_from_index( - starting_quarterindex, - &self.quarterindex.first_monthindex, + self.month3.date.compute_transform( + starting_month3, + &self.month3.first_height, + |(qi, first_h, _)| (qi, blocks_time_date.collect_one(first_h).unwrap()), exit, )?; - let monthindex_first_dateindex = &self.monthindex.first_dateindex; - self.quarterindex.date.compute_transform( - starting_quarterindex, - &self.quarterindex.first_monthindex, - |(qi, first_mi, _)| { - let first_di = monthindex_first_dateindex.iter().get_unwrap(first_mi); - (qi, Date::from(first_di)) - }, - exit, - )?; - - self.quarterindex - .monthindex_count - .compute_count_from_indexes( - starting_quarterindex, - &self.quarterindex.first_monthindex, - &self.monthindex.identity, - exit, - )?; - // Semester - let starting_semesterindex = self - .monthindex - .semesterindex - .into_iter() - .get(starting_monthindex) - .unwrap_or_default(); - - self.monthindex.semesterindex.compute_from_index( - starting_monthindex, - &self.monthindex.first_dateindex, + self.month6.first_height.compute_first_per_index( + starting_indexes.height, + &self.height.month6, exit, )?; - - self.semesterindex.first_monthindex.compute_coarser( - starting_monthindex, - &self.monthindex.semesterindex, + self.month6.identity.compute_from_index( + starting_month6, + &self.month6.first_height, exit, )?; - - self.semesterindex.identity.compute_from_index( - starting_semesterindex, - &self.semesterindex.first_monthindex, + self.month6.date.compute_transform( + starting_month6, + &self.month6.first_height, + |(si, first_h, _)| (si, blocks_time_date.collect_one(first_h).unwrap()), exit, )?; - let monthindex_first_dateindex = &self.monthindex.first_dateindex; - self.semesterindex.date.compute_transform( - starting_semesterindex, - &self.semesterindex.first_monthindex, - |(si, first_mi, _)| { - let first_di = monthindex_first_dateindex.iter().get_unwrap(first_mi); - (si, Date::from(first_di)) - }, - exit, - )?; - - self.semesterindex - .monthindex_count - .compute_count_from_indexes( - starting_semesterindex, - &self.semesterindex.first_monthindex, - &self.monthindex.identity, - exit, - )?; - // Year - let starting_yearindex = self - .monthindex - .yearindex - .into_iter() - .get(starting_monthindex) - .unwrap_or_default(); - - self.monthindex.yearindex.compute_from_index( - starting_monthindex, - &self.monthindex.first_dateindex, + self.year1.first_height.compute_first_per_index( + starting_indexes.height, + &self.height.year1, exit, )?; - - self.yearindex.first_monthindex.compute_coarser( - starting_monthindex, - &self.monthindex.yearindex, - exit, - )?; - - self.yearindex.identity.compute_from_index( - starting_yearindex, - &self.yearindex.first_monthindex, - exit, - )?; - - let monthindex_first_dateindex = &self.monthindex.first_dateindex; - self.yearindex.date.compute_transform( - starting_yearindex, - &self.yearindex.first_monthindex, - |(yi, first_mi, _)| { - let first_di = monthindex_first_dateindex.iter().get_unwrap(first_mi); - (yi, Date::from(first_di)) - }, - exit, - )?; - - self.yearindex.monthindex_count.compute_count_from_indexes( - starting_yearindex, - &self.yearindex.first_monthindex, - &self.monthindex.identity, + self.year1 + .identity + .compute_from_index(starting_year1, &self.year1.first_height, exit)?; + self.year1.date.compute_transform( + starting_year1, + &self.year1.first_height, + |(yi, first_h, _)| (yi, blocks_time_date.collect_one(first_h).unwrap()), exit, )?; // Decade - let starting_decadeindex = self - .yearindex - .decadeindex - .into_iter() - .get(starting_yearindex) - .unwrap_or_default(); - - self.yearindex.decadeindex.compute_from_index( - starting_yearindex, - &self.yearindex.first_monthindex, + self.year10.first_height.compute_first_per_index( + starting_indexes.height, + &self.height.year10, exit, )?; - - self.decadeindex.first_yearindex.compute_coarser( - starting_yearindex, - &self.yearindex.decadeindex, + self.year10.identity.compute_from_index( + starting_year10, + &self.year10.first_height, exit, )?; - - self.decadeindex.identity.compute_from_index( - starting_decadeindex, - &self.decadeindex.first_yearindex, + self.year10.date.compute_transform( + starting_year10, + &self.year10.first_height, + |(di, first_h, _)| (di, blocks_time_date.collect_one(first_h).unwrap()), exit, )?; - let yearindex_first_monthindex = &self.yearindex.first_monthindex; - let monthindex_first_dateindex = &self.monthindex.first_dateindex; - self.decadeindex.date.compute_transform( - starting_decadeindex, - &self.decadeindex.first_yearindex, - |(di, first_yi, _)| { - let first_mi = yearindex_first_monthindex.iter().get_unwrap(first_yi); - let first_di = monthindex_first_dateindex.iter().get_unwrap(first_mi); - (di, Date::from(first_di)) - }, - exit, - )?; - - self.decadeindex - .yearindex_count - .compute_count_from_indexes( - starting_decadeindex, - &self.decadeindex.first_yearindex, - &self.yearindex.identity, - exit, - )?; - Ok(ComputeIndexes::new( starting_indexes, - starting_dateindex, - starting_weekindex, - starting_monthindex, - starting_quarterindex, - starting_semesterindex, - starting_yearindex, - starting_decadeindex, - starting_difficultyepoch, + starting_minute1, + starting_minute5, + starting_minute10, + starting_minute30, + starting_hour1, + starting_hour4, + starting_hour12, + starting_day1, + starting_day3, + starting_week1, + starting_month1, + starting_month3, + starting_month6, + starting_year1, + starting_year10, starting_halvingepoch, + starting_difficultyepoch, )) } } diff --git a/crates/brk_computer/src/indexes/month1.rs b/crates/brk_computer/src/indexes/month1.rs new file mode 100644 index 000000000..8fbde6b4b --- /dev/null +++ b/crates/brk_computer/src/indexes/month1.rs @@ -0,0 +1,22 @@ +use brk_traversable::Traversable; +use brk_types::{Date, Height, Month1, Version}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode}; + +use brk_error::Result; + +#[derive(Traversable)] +pub struct Vecs { + pub identity: M::Stored>>, + pub date: M::Stored>>, + pub first_height: M::Stored>>, +} + +impl Vecs { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "month1", version)?, + date: EagerVec::forced_import(db, "date", version)?, + first_height: EagerVec::forced_import(db, "month1_first_height", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/month3.rs b/crates/brk_computer/src/indexes/month3.rs new file mode 100644 index 000000000..bb674ecbe --- /dev/null +++ b/crates/brk_computer/src/indexes/month3.rs @@ -0,0 +1,22 @@ +use brk_traversable::Traversable; +use brk_types::{Date, Height, Month3, Version}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode}; + +use brk_error::Result; + +#[derive(Traversable)] +pub struct Vecs { + pub identity: M::Stored>>, + pub date: M::Stored>>, + pub first_height: M::Stored>>, +} + +impl Vecs { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "month3", version)?, + date: EagerVec::forced_import(db, "date", version)?, + first_height: EagerVec::forced_import(db, "month3_first_height", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/month6.rs b/crates/brk_computer/src/indexes/month6.rs new file mode 100644 index 000000000..64f2cea96 --- /dev/null +++ b/crates/brk_computer/src/indexes/month6.rs @@ -0,0 +1,22 @@ +use brk_traversable::Traversable; +use brk_types::{Date, Height, Month6, Version}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode}; + +use brk_error::Result; + +#[derive(Traversable)] +pub struct Vecs { + pub identity: M::Stored>>, + pub date: M::Stored>>, + pub first_height: M::Stored>>, +} + +impl Vecs { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "month6", version)?, + date: EagerVec::forced_import(db, "date", version)?, + first_height: EagerVec::forced_import(db, "month6_first_height", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/monthindex.rs b/crates/brk_computer/src/indexes/monthindex.rs deleted file mode 100644 index bfcbfef60..000000000 --- a/crates/brk_computer/src/indexes/monthindex.rs +++ /dev/null @@ -1,32 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{ - Date, DateIndex, MonthIndex, QuarterIndex, SemesterIndex, StoredU64, Version, YearIndex, -}; -use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; - -use brk_error::Result; - -#[derive(Clone, Traversable)] -pub struct Vecs { - pub identity: EagerVec>, - pub date: EagerVec>, - pub first_dateindex: EagerVec>, - pub dateindex_count: EagerVec>, - pub quarterindex: EagerVec>, - pub semesterindex: EagerVec>, - pub yearindex: EagerVec>, -} - -impl Vecs { - pub fn forced_import(db: &Database, version: Version) -> Result { - Ok(Self { - identity: EagerVec::forced_import(db, "monthindex", version)?, - date: EagerVec::forced_import(db, "date", version)?, - first_dateindex: EagerVec::forced_import(db, "first_dateindex", version)?, - dateindex_count: EagerVec::forced_import(db, "dateindex_count", version)?, - quarterindex: EagerVec::forced_import(db, "quarterindex", version)?, - semesterindex: EagerVec::forced_import(db, "semesterindex", version)?, - yearindex: EagerVec::forced_import(db, "yearindex", version)?, - }) - } -} diff --git a/crates/brk_computer/src/indexes/quarterindex.rs b/crates/brk_computer/src/indexes/quarterindex.rs deleted file mode 100644 index 979f62133..000000000 --- a/crates/brk_computer/src/indexes/quarterindex.rs +++ /dev/null @@ -1,24 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{Date, MonthIndex, QuarterIndex, StoredU64, Version}; -use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; - -use brk_error::Result; - -#[derive(Clone, Traversable)] -pub struct Vecs { - pub identity: EagerVec>, - pub date: EagerVec>, - pub first_monthindex: EagerVec>, - pub monthindex_count: EagerVec>, -} - -impl Vecs { - pub fn forced_import(db: &Database, version: Version) -> Result { - Ok(Self { - identity: EagerVec::forced_import(db, "quarterindex", version)?, - date: EagerVec::forced_import(db, "date", version)?, - first_monthindex: EagerVec::forced_import(db, "first_monthindex", version)?, - monthindex_count: EagerVec::forced_import(db, "monthindex_count", version)?, - }) - } -} diff --git a/crates/brk_computer/src/indexes/semesterindex.rs b/crates/brk_computer/src/indexes/semesterindex.rs deleted file mode 100644 index 30c881621..000000000 --- a/crates/brk_computer/src/indexes/semesterindex.rs +++ /dev/null @@ -1,24 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{Date, MonthIndex, SemesterIndex, StoredU64, Version}; -use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; - -use brk_error::Result; - -#[derive(Clone, Traversable)] -pub struct Vecs { - pub identity: EagerVec>, - pub date: EagerVec>, - pub first_monthindex: EagerVec>, - pub monthindex_count: EagerVec>, -} - -impl Vecs { - pub fn forced_import(db: &Database, version: Version) -> Result { - Ok(Self { - identity: EagerVec::forced_import(db, "semesterindex", version)?, - date: EagerVec::forced_import(db, "date", version)?, - first_monthindex: EagerVec::forced_import(db, "first_monthindex", version)?, - monthindex_count: EagerVec::forced_import(db, "monthindex_count", version)?, - }) - } -} diff --git a/crates/brk_computer/src/indexes/txindex.rs b/crates/brk_computer/src/indexes/txindex.rs index d85796c5f..e97ddcd7e 100644 --- a/crates/brk_computer/src/indexes/txindex.rs +++ b/crates/brk_computer/src/indexes/txindex.rs @@ -1,25 +1,25 @@ use brk_indexer::Indexer; use brk_traversable::Traversable; use brk_types::{StoredU64, TxIndex, Txid, Version}; -use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1, PcoVec}; +use vecdb::{Database, EagerVec, ImportableVec, ReadableCloneableVec, LazyVecFrom1, PcoVec, Rw, StorageMode}; use brk_error::Result; -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { pub identity: LazyVecFrom1, - pub input_count: EagerVec>, - pub output_count: EagerVec>, + pub input_count: M::Stored>>, + pub output_count: M::Stored>>, } impl Vecs { - pub fn forced_import(db: &Database, version: Version, indexer: &Indexer) -> Result { + pub(crate) fn forced_import(db: &Database, version: Version, indexer: &Indexer) -> Result { Ok(Self { identity: LazyVecFrom1::init( "txindex", version, - indexer.vecs.transactions.txid.boxed_clone(), - |index, _| Some(index), + indexer.vecs.transactions.txid.read_only_boxed_clone(), + |index, _| index, ), input_count: EagerVec::forced_import(db, "input_count", version)?, output_count: EagerVec::forced_import(db, "output_count", version)?, diff --git a/crates/brk_computer/src/indexes/txinindex.rs b/crates/brk_computer/src/indexes/txinindex.rs index d281dc762..eb12aa71f 100644 --- a/crates/brk_computer/src/indexes/txinindex.rs +++ b/crates/brk_computer/src/indexes/txinindex.rs @@ -1,7 +1,7 @@ use brk_indexer::Indexer; use brk_traversable::Traversable; use brk_types::{OutPoint, TxInIndex, Version}; -use vecdb::{IterableCloneableVec, LazyVecFrom1}; +use vecdb::{ReadableCloneableVec, LazyVecFrom1}; #[derive(Clone, Traversable)] pub struct Vecs { @@ -9,13 +9,13 @@ pub struct Vecs { } impl Vecs { - pub fn forced_import(version: Version, indexer: &Indexer) -> Self { + pub(crate) fn forced_import(version: Version, indexer: &Indexer) -> Self { Self { identity: LazyVecFrom1::init( "txinindex", version, - indexer.vecs.inputs.outpoint.boxed_clone(), - |index, _| Some(index), + indexer.vecs.inputs.outpoint.read_only_boxed_clone(), + |index, _| index, ), } } diff --git a/crates/brk_computer/src/indexes/txoutindex.rs b/crates/brk_computer/src/indexes/txoutindex.rs index 1baff3b25..b9b5ca1b8 100644 --- a/crates/brk_computer/src/indexes/txoutindex.rs +++ b/crates/brk_computer/src/indexes/txoutindex.rs @@ -1,7 +1,7 @@ use brk_indexer::Indexer; use brk_traversable::Traversable; use brk_types::{Sats, TxOutIndex, Version}; -use vecdb::{IterableCloneableVec, LazyVecFrom1}; +use vecdb::{ReadableCloneableVec, LazyVecFrom1}; #[derive(Clone, Traversable)] pub struct Vecs { @@ -9,13 +9,13 @@ pub struct Vecs { } impl Vecs { - pub fn forced_import(version: Version, indexer: &Indexer) -> Self { + pub(crate) fn forced_import(version: Version, indexer: &Indexer) -> Self { Self { identity: LazyVecFrom1::init( "txoutindex", version, - indexer.vecs.outputs.value.boxed_clone(), - |index, _| Some(index), + indexer.vecs.outputs.value.read_only_boxed_clone(), + |index, _| index, ), } } diff --git a/crates/brk_computer/src/indexes/week1.rs b/crates/brk_computer/src/indexes/week1.rs new file mode 100644 index 000000000..4cd7958aa --- /dev/null +++ b/crates/brk_computer/src/indexes/week1.rs @@ -0,0 +1,22 @@ +use brk_traversable::Traversable; +use brk_types::{Date, Height, Version, Week1}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode}; + +use brk_error::Result; + +#[derive(Traversable)] +pub struct Vecs { + pub identity: M::Stored>>, + pub date: M::Stored>>, + pub first_height: M::Stored>>, +} + +impl Vecs { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "week1", version)?, + date: EagerVec::forced_import(db, "date", version)?, + first_height: EagerVec::forced_import(db, "week1_first_height", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/weekindex.rs b/crates/brk_computer/src/indexes/weekindex.rs deleted file mode 100644 index af03e7478..000000000 --- a/crates/brk_computer/src/indexes/weekindex.rs +++ /dev/null @@ -1,24 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{Date, DateIndex, StoredU64, Version, WeekIndex}; -use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; - -use brk_error::Result; - -#[derive(Clone, Traversable)] -pub struct Vecs { - pub identity: EagerVec>, - pub date: EagerVec>, - pub first_dateindex: EagerVec>, - pub dateindex_count: EagerVec>, -} - -impl Vecs { - pub fn forced_import(db: &Database, version: Version) -> Result { - Ok(Self { - identity: EagerVec::forced_import(db, "weekindex", version)?, - date: EagerVec::forced_import(db, "date", version)?, - first_dateindex: EagerVec::forced_import(db, "first_dateindex", version)?, - dateindex_count: EagerVec::forced_import(db, "dateindex_count", version)?, - }) - } -} diff --git a/crates/brk_computer/src/indexes/year1.rs b/crates/brk_computer/src/indexes/year1.rs new file mode 100644 index 000000000..c9a734ae5 --- /dev/null +++ b/crates/brk_computer/src/indexes/year1.rs @@ -0,0 +1,22 @@ +use brk_traversable::Traversable; +use brk_types::{Date, Height, Version, Year1}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode}; + +use brk_error::Result; + +#[derive(Traversable)] +pub struct Vecs { + pub identity: M::Stored>>, + pub date: M::Stored>>, + pub first_height: M::Stored>>, +} + +impl Vecs { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "year1", version)?, + date: EagerVec::forced_import(db, "date", version)?, + first_height: EagerVec::forced_import(db, "year1_first_height", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/year10.rs b/crates/brk_computer/src/indexes/year10.rs new file mode 100644 index 000000000..5d38b5758 --- /dev/null +++ b/crates/brk_computer/src/indexes/year10.rs @@ -0,0 +1,22 @@ +use brk_traversable::Traversable; +use brk_types::{Date, Year10, Height, Version}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode}; + +use brk_error::Result; + +#[derive(Traversable)] +pub struct Vecs { + pub identity: M::Stored>>, + pub date: M::Stored>>, + pub first_height: M::Stored>>, +} + +impl Vecs { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "year10", version)?, + date: EagerVec::forced_import(db, "date", version)?, + first_height: EagerVec::forced_import(db, "year10_first_height", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/yearindex.rs b/crates/brk_computer/src/indexes/yearindex.rs deleted file mode 100644 index 03c6f061b..000000000 --- a/crates/brk_computer/src/indexes/yearindex.rs +++ /dev/null @@ -1,26 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{Date, DecadeIndex, MonthIndex, StoredU64, Version, YearIndex}; -use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; - -use brk_error::Result; - -#[derive(Clone, Traversable)] -pub struct Vecs { - pub identity: EagerVec>, - pub date: EagerVec>, - pub first_monthindex: EagerVec>, - pub monthindex_count: EagerVec>, - pub decadeindex: EagerVec>, -} - -impl Vecs { - pub fn forced_import(db: &Database, version: Version) -> Result { - Ok(Self { - identity: EagerVec::forced_import(db, "yearindex", version)?, - date: EagerVec::forced_import(db, "date", version)?, - first_monthindex: EagerVec::forced_import(db, "first_monthindex", version)?, - monthindex_count: EagerVec::forced_import(db, "monthindex_count", version)?, - decadeindex: EagerVec::forced_import(db, "decadeindex", version)?, - }) - } -} diff --git a/crates/brk_computer/src/inputs/compute.rs b/crates/brk_computer/src/inputs/compute.rs index 73e6bc17d..f546d3ba4 100644 --- a/crates/brk_computer/src/inputs/compute.rs +++ b/crates/brk_computer/src/inputs/compute.rs @@ -6,7 +6,7 @@ use super::Vecs; use crate::{indexes, ComputeIndexes}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, diff --git a/crates/brk_computer/src/inputs/count/compute.rs b/crates/brk_computer/src/inputs/count/compute.rs index 2a38e4f34..31492a8e3 100644 --- a/crates/brk_computer/src/inputs/count/compute.rs +++ b/crates/brk_computer/src/inputs/count/compute.rs @@ -6,7 +6,7 @@ use super::Vecs; use crate::{ComputeIndexes, indexes}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, diff --git a/crates/brk_computer/src/inputs/count/import.rs b/crates/brk_computer/src/inputs/count/import.rs index cfc4ab1a4..68eb17f5e 100644 --- a/crates/brk_computer/src/inputs/count/import.rs +++ b/crates/brk_computer/src/inputs/count/import.rs @@ -6,7 +6,7 @@ use super::Vecs; use crate::{indexes, internal::TxDerivedFull}; impl Vecs { - pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { + pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { Ok(Self(TxDerivedFull::forced_import( db, "input_count", diff --git a/crates/brk_computer/src/inputs/count/vecs.rs b/crates/brk_computer/src/inputs/count/vecs.rs index 238616ae3..2c0838dbc 100644 --- a/crates/brk_computer/src/inputs/count/vecs.rs +++ b/crates/brk_computer/src/inputs/count/vecs.rs @@ -1,8 +1,9 @@ use brk_traversable::Traversable; use brk_types::StoredU64; use derive_more::{Deref, DerefMut}; +use vecdb::{Rw, StorageMode}; use crate::internal::TxDerivedFull; -#[derive(Clone, Deref, DerefMut, Traversable)] -pub struct Vecs(pub TxDerivedFull); +#[derive(Deref, DerefMut, Traversable)] +pub struct Vecs(pub TxDerivedFull); diff --git a/crates/brk_computer/src/inputs/import.rs b/crates/brk_computer/src/inputs/import.rs index 62c82fe93..4304ab7a1 100644 --- a/crates/brk_computer/src/inputs/import.rs +++ b/crates/brk_computer/src/inputs/import.rs @@ -9,7 +9,7 @@ use super::{CountVecs, SpentVecs, Vecs}; use crate::indexes; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( parent_path: &Path, parent_version: Version, indexes: &indexes::Vecs, diff --git a/crates/brk_computer/src/inputs/mod.rs b/crates/brk_computer/src/inputs/mod.rs index 515cac15e..9cd02a8c9 100644 --- a/crates/brk_computer/src/inputs/mod.rs +++ b/crates/brk_computer/src/inputs/mod.rs @@ -5,18 +5,18 @@ mod compute; mod import; use brk_traversable::Traversable; -use vecdb::Database; +use vecdb::{Database, Rw, StorageMode}; pub use count::Vecs as CountVecs; pub use spent::Vecs as SpentVecs; pub const DB_NAME: &str = "inputs"; -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { #[traversable(skip)] pub(crate) db: Database, - pub spent: SpentVecs, - pub count: CountVecs, + pub spent: SpentVecs, + pub count: CountVecs, } diff --git a/crates/brk_computer/src/inputs/spent/compute.rs b/crates/brk_computer/src/inputs/spent/compute.rs index 63e1cfc0b..69ded0a85 100644 --- a/crates/brk_computer/src/inputs/spent/compute.rs +++ b/crates/brk_computer/src/inputs/spent/compute.rs @@ -2,7 +2,7 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_types::{Sats, TxInIndex, TxIndex, TxOutIndex, Vout}; use tracing::info; -use vecdb::{AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, TypedVecIterator, VecIndex}; +use vecdb::{AnyStoredVec, AnyVec, Database, Exit, WritableVec, ReadableVec, VecIndex}; use super::Vecs; use crate::ComputeIndexes; @@ -10,7 +10,7 @@ use crate::ComputeIndexes; const BATCH_SIZE: usize = 2 * 1024 * 1024 * 1024 / size_of::(); impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, db: &Database, indexer: &Indexer, @@ -39,19 +39,20 @@ impl Vecs { return Ok(()); } - let mut outpoint_iter = indexer.vecs.inputs.outpoint.iter()?; - let mut first_txoutindex_iter = indexer.vecs.transactions.first_txoutindex.iter()?; - let mut value_iter = indexer.vecs.outputs.value.iter()?; - let mut entries: Vec = Vec::with_capacity(BATCH_SIZE); + let first_txoutindex_reader = indexer.vecs.transactions.first_txoutindex.reader(); + let value_reader = indexer.vecs.outputs.value.reader(); + let actual_total = target - min; + let mut entries: Vec = Vec::with_capacity(actual_total.min(BATCH_SIZE)); let mut batch_start = min; while batch_start < target { let batch_end = (batch_start + BATCH_SIZE).min(target); + let outpoints = indexer.vecs.inputs.outpoint.collect_range_at(batch_start, batch_end); + entries.clear(); - for i in batch_start..batch_end { - let txinindex = TxInIndex::from(i); - let outpoint = outpoint_iter.get_unwrap(txinindex); + for (j, outpoint) in outpoints.into_iter().enumerate() { + let txinindex = TxInIndex::from(batch_start + j); entries.push(Entry { txinindex, txindex: outpoint.txindex(), @@ -67,7 +68,7 @@ impl Vecs { if entry.txindex.is_coinbase() { break; } - entry.txoutindex = first_txoutindex_iter.get_unwrap(entry.txindex) + entry.vout; + entry.txoutindex = first_txoutindex_reader.get(entry.txindex.to_usize()) + entry.vout; } entries.sort_unstable_by_key(|e| e.txoutindex); @@ -75,7 +76,7 @@ impl Vecs { if entry.txoutindex.is_coinbase() { break; } - entry.value = value_iter.get_unwrap(entry.txoutindex); + entry.value = value_reader.get(entry.txoutindex.to_usize()); } entries.sort_unstable_by_key(|e| e.txinindex); diff --git a/crates/brk_computer/src/inputs/spent/import.rs b/crates/brk_computer/src/inputs/spent/import.rs index 5b55fc9fb..535033db2 100644 --- a/crates/brk_computer/src/inputs/spent/import.rs +++ b/crates/brk_computer/src/inputs/spent/import.rs @@ -5,7 +5,7 @@ use vecdb::{Database, ImportableVec, PcoVec}; use super::Vecs; impl Vecs { - pub fn forced_import(db: &Database, version: Version) -> Result { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { Ok(Self { txoutindex: PcoVec::forced_import(db, "txoutindex", version)?, value: PcoVec::forced_import(db, "value", version)?, diff --git a/crates/brk_computer/src/inputs/spent/vecs.rs b/crates/brk_computer/src/inputs/spent/vecs.rs index 241a9fb82..a362fef0f 100644 --- a/crates/brk_computer/src/inputs/spent/vecs.rs +++ b/crates/brk_computer/src/inputs/spent/vecs.rs @@ -1,9 +1,9 @@ use brk_traversable::Traversable; use brk_types::{Sats, TxInIndex, TxOutIndex}; -use vecdb::PcoVec; +use vecdb::{PcoVec, Rw, StorageMode}; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub txoutindex: PcoVec, - pub value: PcoVec, +#[derive(Traversable)] +pub struct Vecs { + pub txoutindex: M::Stored>, + pub value: M::Stored>, } diff --git a/crates/brk_computer/src/internal/compute.rs b/crates/brk_computer/src/internal/compute.rs index 3705b678d..e63cbef9b 100644 --- a/crates/brk_computer/src/internal/compute.rs +++ b/crates/brk_computer/src/internal/compute.rs @@ -7,7 +7,8 @@ use brk_error::Result; use brk_types::{CheckedSub, StoredU64}; use schemars::JsonSchema; use vecdb::{ - AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, IterableVec, PcoVec, VecIndex, VecValue, + AnyStoredVec, AnyVec, EagerVec, Exit, WritableVec, ReadableVec, PcoVec, VecIndex, + VecValue, }; use crate::utils::get_percentile; @@ -33,11 +34,11 @@ fn validate_and_start( /// This is useful for excluding coinbase transactions (which have 0 fee) from /// fee/feerate aggregations. #[allow(clippy::too_many_arguments)] -pub fn compute_aggregations( +pub(crate) fn compute_aggregations( max_from: I, - source: &impl IterableVec, - first_indexes: &impl IterableVec, - count_indexes: &impl IterableVec, + source: &impl ReadableVec, + first_indexes: &impl ReadableVec, + count_indexes: &impl ReadableVec, exit: &Exit, skip_count: usize, mut first: Option<&mut EagerVec>>, @@ -94,23 +95,20 @@ where return Ok(()); } - let mut source_iter = source.iter(); - let mut cumulative_val = cumulative.as_ref().map(|cumulative_vec| { index.decremented().map_or(T::from(0_usize), |idx| { - cumulative_vec.iter().get_unwrap(idx) + cumulative_vec.collect_one_at(idx.to_usize()).unwrap_or(T::from(0_usize)) }) }); - let mut count_indexes_iter = count_indexes.iter().skip(index.to_usize()); + let start = index.to_usize(); + let fi_len = first_indexes.len(); + let first_indexes_batch: Vec = first_indexes.collect_range_at(start, fi_len); + let count_indexes_batch: Vec = count_indexes.collect_range_at(start, fi_len); - first_indexes - .iter() - .enumerate() - .skip(index.to_usize()) - .try_for_each(|(idx, first_index)| -> Result<()> { - let count_index = count_indexes_iter.next().unwrap(); - let count = *count_index as usize; + first_indexes_batch.into_iter().zip(count_indexes_batch).enumerate().try_for_each(|(j, (first_index, count_index))| -> Result<()> { + let idx = start + j; + let count = u64::from(count_index) as usize; // Effective count after skipping (e.g., skip coinbase for fee calculations) let effective_count = count.saturating_sub(skip_count); @@ -118,7 +116,7 @@ where if let Some(ref mut first_vec) = first { let f = if effective_count > 0 { - source_iter.get_unwrap(effective_first_index) + source.collect_one_at(effective_first_index.to_usize()).unwrap() } else { T::from(0_usize) }; @@ -131,25 +129,25 @@ where last_vec.truncate_push_at(idx, T::from(0_usize))?; } else { let last_index = first_index + (count - 1); - let v = source_iter.get_unwrap(last_index); + let v = source.collect_one_at(last_index.to_usize()).unwrap(); last_vec.truncate_push_at(idx, v)?; } } // Fast path: only min/max needed, no sorting or allocation required if needs_minmax && !needs_percentiles && !needs_aggregates { - source_iter.set_position(effective_first_index); + let efi = effective_first_index.to_usize(); let mut min_val: Option = None; let mut max_val: Option = None; - for val in (&mut source_iter).take(effective_count) { + source.for_each_range_at(efi, efi + effective_count, |val| { if needs_min { min_val = Some(min_val.map_or(val, |m| if val < m { val } else { m })); } if needs_max { max_val = Some(max_val.map_or(val, |m| if val > m { val } else { m })); } - } + }); if let Some(ref mut min_vec) = min { let v = min_val.or(max_val).unwrap_or_else(|| T::from(0_usize)); @@ -159,9 +157,11 @@ where let v = max_val.or(min_val).unwrap_or_else(|| T::from(0_usize)); max_vec.truncate_push_at(idx, v)?; } - } else if needs_percentiles || needs_aggregates || needs_minmax { - source_iter.set_position(effective_first_index); - let values: Vec = (&mut source_iter).take(effective_count).collect(); + } else if needs_percentiles || needs_minmax { + let mut values: Vec = source.collect_range_at( + effective_first_index.to_usize(), + effective_first_index.to_usize() + effective_count, + ); if values.is_empty() { // Handle edge case where all items were skipped @@ -197,35 +197,41 @@ where cumulative_vec.truncate_push_at(idx, t)?; } } else if needs_percentiles { - let mut sorted_values = values.clone(); - sorted_values.sort_unstable(); + // Compute aggregates from unsorted values first to avoid clone + let aggregate_result = if needs_aggregates { + let len = values.len(); + let sum_val = values.iter().copied().fold(T::from(0), |a, b| a + b); + Some((len, sum_val)) + } else { + None + }; + + // Sort in-place — no clone needed + values.sort_unstable(); if let Some(ref mut max_vec) = max { - max_vec.truncate_push_at(idx, *sorted_values.last().unwrap())?; + max_vec.truncate_push_at(idx, *values.last().unwrap())?; } if let Some(ref mut pct90_vec) = pct90 { - pct90_vec.truncate_push_at(idx, get_percentile(&sorted_values, 0.90))?; + pct90_vec.truncate_push_at(idx, get_percentile(&values, 0.90))?; } if let Some(ref mut pct75_vec) = pct75 { - pct75_vec.truncate_push_at(idx, get_percentile(&sorted_values, 0.75))?; + pct75_vec.truncate_push_at(idx, get_percentile(&values, 0.75))?; } if let Some(ref mut median_vec) = median { - median_vec.truncate_push_at(idx, get_percentile(&sorted_values, 0.50))?; + median_vec.truncate_push_at(idx, get_percentile(&values, 0.50))?; } if let Some(ref mut pct25_vec) = pct25 { - pct25_vec.truncate_push_at(idx, get_percentile(&sorted_values, 0.25))?; + pct25_vec.truncate_push_at(idx, get_percentile(&values, 0.25))?; } if let Some(ref mut pct10_vec) = pct10 { - pct10_vec.truncate_push_at(idx, get_percentile(&sorted_values, 0.10))?; + pct10_vec.truncate_push_at(idx, get_percentile(&values, 0.10))?; } if let Some(ref mut min_vec) = min { - min_vec.truncate_push_at(idx, *sorted_values.first().unwrap())?; + min_vec.truncate_push_at(idx, *values.first().unwrap())?; } - if needs_aggregates { - let len = values.len(); - let sum_val = values.into_iter().fold(T::from(0), |a, b| a + b); - + if let Some((len, sum_val)) = aggregate_result { if let Some(ref mut average_vec) = average { average_vec.truncate_push_at(idx, sum_val / len)?; } @@ -268,23 +274,25 @@ where } } } - } else if needs_aggregates { - let len = values.len(); - let sum_val = values.into_iter().fold(T::from(0), |a, b| a + b); + } + } else if needs_aggregates { + // Aggregates only (sum/average/cumulative) — no Vec allocation needed + let efi = effective_first_index.to_usize(); + let (sum_val, len) = source.fold_range_at(efi, efi + effective_count, (T::from(0_usize), 0_usize), |(acc, cnt), val| (acc + val, cnt + 1)); - if let Some(ref mut average_vec) = average { - average_vec.truncate_push_at(idx, sum_val / len)?; + if let Some(ref mut average_vec) = average { + let avg = if len > 0 { sum_val / len } else { T::from(0_usize) }; + average_vec.truncate_push_at(idx, avg)?; + } + + if needs_sum_or_cumulative { + if let Some(ref mut sum_vec) = sum { + sum_vec.truncate_push_at(idx, sum_val)?; } - - if needs_sum_or_cumulative { - if let Some(ref mut sum_vec) = sum { - sum_vec.truncate_push_at(idx, sum_val)?; - } - if let Some(ref mut cumulative_vec) = cumulative { - let t = cumulative_val.unwrap() + sum_val; - cumulative_val.replace(t); - cumulative_vec.truncate_push_at(idx, t)?; - } + if let Some(ref mut cumulative_vec) = cumulative { + let t = cumulative_val.unwrap() + sum_val; + cumulative_val.replace(t); + cumulative_vec.truncate_push_at(idx, t)?; } } } @@ -306,179 +314,3 @@ where Ok(()) } - -/// Compute coarser aggregations from already-aggregated source data. -/// -/// This is used for dateindex → weekindex, monthindex, etc. where we derive -/// coarser aggregations from finer ones. -/// -/// NOTE: Percentiles are NOT supported - they cannot be derived from finer percentiles. -#[allow(clippy::too_many_arguments)] -pub fn compute_aggregations_from_aligned( - max_from: I, - first_indexes: &impl IterableVec, - count_indexes: &impl IterableVec, - exit: &Exit, - // Source vecs (already aggregated at finer level) - source_first: Option<&EagerVec>>, - source_last: Option<&EagerVec>>, - source_min: Option<&EagerVec>>, - source_max: Option<&EagerVec>>, - source_average: Option<&EagerVec>>, - source_sum: Option<&EagerVec>>, - // Target vecs - mut first: Option<&mut EagerVec>>, - mut last: Option<&mut EagerVec>>, - mut min: Option<&mut EagerVec>>, - mut max: Option<&mut EagerVec>>, - mut average: Option<&mut EagerVec>>, - mut sum: Option<&mut EagerVec>>, - mut cumulative: Option<&mut EagerVec>>, -) -> Result<()> -where - I: VecIndex, - T: ComputedVecValue + JsonSchema, - A: VecIndex + VecValue + CheckedSub, -{ - let combined_version = first_indexes.version() + count_indexes.version(); - - macro_rules! validate_vec { - ($($vec:ident),*) => {{ - let mut idx = max_from; - $(if let Some(ref mut v) = $vec { - idx = validate_and_start(v, combined_version, idx)?; - })* - idx - }}; - } - - let index = validate_vec!(first, last, min, max, average, sum, cumulative); - - let needs_first = first.is_some(); - let needs_last = last.is_some(); - let needs_min = min.is_some(); - let needs_max = max.is_some(); - let needs_average = average.is_some(); - let needs_sum = sum.is_some(); - let needs_cumulative = cumulative.is_some(); - - if !needs_first - && !needs_last - && !needs_min - && !needs_max - && !needs_average - && !needs_sum - && !needs_cumulative - { - return Ok(()); - } - - let mut source_first_iter = source_first.map(|f| f.iter()); - let mut source_last_iter = source_last.map(|f| f.iter()); - let mut source_min_iter = source_min.map(|f| f.iter()); - let mut source_max_iter = source_max.map(|f| f.iter()); - let mut source_average_iter = source_average.map(|f| f.iter()); - let mut source_sum_iter = source_sum.map(|f| f.iter()); - - let mut cumulative_val = cumulative.as_ref().map(|cumulative_vec| { - index.decremented().map_or(T::from(0_usize), |idx| { - cumulative_vec.iter().get_unwrap(idx) - }) - }); - - let mut count_indexes_iter = count_indexes.iter().skip(index.to_usize()); - - first_indexes - .iter() - .enumerate() - .skip(index.to_usize()) - .try_for_each(|(idx, first_index)| -> Result<()> { - let count_index = count_indexes_iter.next().unwrap(); - let count = *count_index as usize; - - if let Some(ref mut first_vec) = first { - let source_iter = source_first_iter - .as_mut() - .expect("source_first required for first"); - let v = source_iter.get_unwrap(first_index); - first_vec.truncate_push_at(idx, v)?; - } - - if let Some(ref mut last_vec) = last { - if count == 0 { - panic!("should not compute last if count can be 0"); - } - let last_index = first_index + (count - 1); - let source_iter = source_last_iter - .as_mut() - .expect("source_last required for last"); - let v = source_iter.get_unwrap(last_index); - last_vec.truncate_push_at(idx, v)?; - } - - if let Some(ref mut min_vec) = min { - let source_iter = source_min_iter - .as_mut() - .expect("source_min required for min"); - source_iter.set_position(first_index); - let min_val = source_iter.take(count).min().unwrap(); - min_vec.truncate_push_at(idx, min_val)?; - } - - if let Some(ref mut max_vec) = max { - let source_iter = source_max_iter - .as_mut() - .expect("source_max required for max"); - source_iter.set_position(first_index); - let max_val = source_iter.take(count).max().unwrap(); - max_vec.truncate_push_at(idx, max_val)?; - } - - if let Some(ref mut average_vec) = average { - let source_iter = source_average_iter - .as_mut() - .expect("source_average required for average"); - source_iter.set_position(first_index); - let mut len = 0usize; - let sum_val = (&mut *source_iter) - .take(count) - .inspect(|_| len += 1) - .fold(T::from(0), |a, b| a + b); - // TODO: Multiply by count then divide by cumulative for accuracy - let average = sum_val / len; - average_vec.truncate_push_at(idx, average)?; - } - - if needs_sum || needs_cumulative { - let source_iter = source_sum_iter - .as_mut() - .expect("source_sum required for sum/cumulative"); - source_iter.set_position(first_index); - let sum_val = source_iter.take(count).fold(T::from(0), |a, b| a + b); - - if let Some(ref mut sum_vec) = sum { - sum_vec.truncate_push_at(idx, sum_val)?; - } - - if let Some(ref mut cumulative_vec) = cumulative { - let t = cumulative_val.unwrap() + sum_val; - cumulative_val.replace(t); - cumulative_vec.truncate_push_at(idx, t)?; - } - } - - Ok(()) - })?; - - let _lock = exit.lock(); - - macro_rules! write_vec { - ($($vec:ident),*) => { - $(if let Some(v) = $vec { v.write()?; })* - }; - } - - write_vec!(first, last, min, max, average, sum, cumulative); - - Ok(()) -} diff --git a/crates/brk_computer/src/internal/multi/date_derived/average.rs b/crates/brk_computer/src/internal/multi/date_derived/average.rs deleted file mode 100644 index fe7705f05..000000000 --- a/crates/brk_computer/src/internal/multi/date_derived/average.rs +++ /dev/null @@ -1,58 +0,0 @@ -//! Derived date periods with average-value aggregation. - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, IterableCloneableVec}; - -use crate::{indexes, internal::LazyAverage}; - -use crate::internal::ComputedVecValue; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyDateDerivedAverage -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub weekindex: LazyAverage, - pub monthindex: LazyAverage, - pub quarterindex: LazyAverage, - pub semesterindex: LazyAverage, - pub yearindex: LazyAverage, - pub decadeindex: LazyAverage, -} - -const VERSION: Version = Version::ZERO; - -impl LazyDateDerivedAverage -where - T: ComputedVecValue + JsonSchema + 'static, -{ - /// Create from an external dateindex source. - pub fn from_source( - name: &str, - version: Version, - dateindex_source: IterableBoxedVec, - indexes: &indexes::Vecs, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($idx:ident) => { - LazyAverage::from_source_raw(name, v, dateindex_source.clone(), indexes.$idx.identity.boxed_clone()) - }; - } - - Self { - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/date_derived/first.rs b/crates/brk_computer/src/internal/multi/date_derived/first.rs deleted file mode 100644 index d47be80a2..000000000 --- a/crates/brk_computer/src/internal/multi/date_derived/first.rs +++ /dev/null @@ -1,58 +0,0 @@ -//! Derived date periods with first-value aggregation. - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, IterableCloneableVec}; - -use crate::{indexes, internal::LazyFirst}; - -use crate::internal::ComputedVecValue; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyDateDerivedFirst -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub weekindex: LazyFirst, - pub monthindex: LazyFirst, - pub quarterindex: LazyFirst, - pub semesterindex: LazyFirst, - pub yearindex: LazyFirst, - pub decadeindex: LazyFirst, -} - -const VERSION: Version = Version::ZERO; - -impl LazyDateDerivedFirst -where - T: ComputedVecValue + JsonSchema + 'static, -{ - /// Create from an external dateindex source. - pub fn from_source( - name: &str, - version: Version, - dateindex_source: IterableBoxedVec, - indexes: &indexes::Vecs, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($idx:ident) => { - LazyFirst::from_source(name, v, dateindex_source.clone(), indexes.$idx.identity.boxed_clone()) - }; - } - - Self { - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/date_derived/full.rs b/crates/brk_computer/src/internal/multi/date_derived/full.rs deleted file mode 100644 index 8d01b6a1e..000000000 --- a/crates/brk_computer/src/internal/multi/date_derived/full.rs +++ /dev/null @@ -1,68 +0,0 @@ -//! Derived date periods with full stats aggregation. - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, IterableCloneableVec}; - -use crate::{indexes, internal::LazyFull}; - -use crate::internal::ComputedVecValue; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyDateDerivedFull -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub weekindex: LazyFull, - pub monthindex: LazyFull, - pub quarterindex: LazyFull, - pub semesterindex: LazyFull, - pub yearindex: LazyFull, - pub decadeindex: LazyFull, -} - -const VERSION: Version = Version::ZERO; - -impl LazyDateDerivedFull -where - T: ComputedVecValue + JsonSchema + 'static, -{ - /// Create from external dateindex sources for full stats. - #[allow(clippy::too_many_arguments)] - pub fn from_sources( - name: &str, - version: Version, - average_source: IterableBoxedVec, - min_source: IterableBoxedVec, - max_source: IterableBoxedVec, - sum_source: IterableBoxedVec, - cumulative_source: IterableBoxedVec, - indexes: &indexes::Vecs, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($idx:ident) => { - LazyFull::from_stats_aggregate( - name, v, - average_source.clone(), min_source.clone(), max_source.clone(), - sum_source.clone(), cumulative_source.clone(), - indexes.$idx.identity.boxed_clone(), - ) - }; - } - - Self { - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/date_derived/last.rs b/crates/brk_computer/src/internal/multi/date_derived/last.rs deleted file mode 100644 index 06fa893fe..000000000 --- a/crates/brk_computer/src/internal/multi/date_derived/last.rs +++ /dev/null @@ -1,58 +0,0 @@ -//! Derived date periods with last-value aggregation. - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, IterableCloneableVec}; - -use crate::{indexes, internal::LazyLast}; - -use crate::internal::ComputedVecValue; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyDateDerivedLast -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub weekindex: LazyLast, - pub monthindex: LazyLast, - pub quarterindex: LazyLast, - pub semesterindex: LazyLast, - pub yearindex: LazyLast, - pub decadeindex: LazyLast, -} - -const VERSION: Version = Version::ZERO; - -impl LazyDateDerivedLast -where - T: ComputedVecValue + JsonSchema + 'static, -{ - /// Create from an external dateindex source. - pub fn from_source( - name: &str, - version: Version, - dateindex_source: IterableBoxedVec, - indexes: &indexes::Vecs, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($idx:ident) => { - LazyLast::from_source(name, v, dateindex_source.clone(), indexes.$idx.identity.boxed_clone()) - }; - } - - Self { - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/date_derived/max.rs b/crates/brk_computer/src/internal/multi/date_derived/max.rs deleted file mode 100644 index bc54745d7..000000000 --- a/crates/brk_computer/src/internal/multi/date_derived/max.rs +++ /dev/null @@ -1,82 +0,0 @@ -//! Derived date periods with max-value aggregation. - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, IterableCloneableVec}; - -use crate::{indexes, internal::LazyMax}; - -use crate::internal::ComputedVecValue; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyDateDerivedMax -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub weekindex: LazyMax, - pub monthindex: LazyMax, - pub quarterindex: LazyMax, - pub semesterindex: LazyMax, - pub yearindex: LazyMax, - pub decadeindex: LazyMax, -} - -const VERSION: Version = Version::ZERO; - -impl LazyDateDerivedMax -where - T: ComputedVecValue + JsonSchema + 'static, -{ - /// Create from an external dateindex source. - pub fn from_source( - name: &str, - version: Version, - dateindex_source: IterableBoxedVec, - indexes: &indexes::Vecs, - ) -> Self { - Self::from_source_inner(name, version, dateindex_source, indexes, false) - } - - /// Create from an external dateindex source without adding _max suffix. - pub fn from_source_raw( - name: &str, - version: Version, - dateindex_source: IterableBoxedVec, - indexes: &indexes::Vecs, - ) -> Self { - Self::from_source_inner(name, version, dateindex_source, indexes, true) - } - - fn from_source_inner( - name: &str, - version: Version, - dateindex_source: IterableBoxedVec, - indexes: &indexes::Vecs, - raw: bool, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($idx:ident) => { - if raw { - LazyMax::from_source_raw(name, v, dateindex_source.clone(), indexes.$idx.identity.boxed_clone()) - } else { - LazyMax::from_source(name, v, dateindex_source.clone(), indexes.$idx.identity.boxed_clone()) - } - }; - } - - Self { - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/date_derived/min.rs b/crates/brk_computer/src/internal/multi/date_derived/min.rs deleted file mode 100644 index 461186866..000000000 --- a/crates/brk_computer/src/internal/multi/date_derived/min.rs +++ /dev/null @@ -1,82 +0,0 @@ -//! Derived date periods with min-value aggregation. - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, IterableCloneableVec}; - -use crate::{indexes, internal::LazyMin}; - -use crate::internal::ComputedVecValue; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyDateDerivedMin -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub weekindex: LazyMin, - pub monthindex: LazyMin, - pub quarterindex: LazyMin, - pub semesterindex: LazyMin, - pub yearindex: LazyMin, - pub decadeindex: LazyMin, -} - -const VERSION: Version = Version::ZERO; - -impl LazyDateDerivedMin -where - T: ComputedVecValue + JsonSchema + 'static, -{ - /// Create from an external dateindex source. - pub fn from_source( - name: &str, - version: Version, - dateindex_source: IterableBoxedVec, - indexes: &indexes::Vecs, - ) -> Self { - Self::from_source_inner(name, version, dateindex_source, indexes, false) - } - - /// Create from an external dateindex source without adding _min suffix. - pub fn from_source_raw( - name: &str, - version: Version, - dateindex_source: IterableBoxedVec, - indexes: &indexes::Vecs, - ) -> Self { - Self::from_source_inner(name, version, dateindex_source, indexes, true) - } - - fn from_source_inner( - name: &str, - version: Version, - dateindex_source: IterableBoxedVec, - indexes: &indexes::Vecs, - raw: bool, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($idx:ident) => { - if raw { - LazyMin::from_source_raw(name, v, dateindex_source.clone(), indexes.$idx.identity.boxed_clone()) - } else { - LazyMin::from_source(name, v, dateindex_source.clone(), indexes.$idx.identity.boxed_clone()) - } - }; - } - - Self { - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/date_derived/mod.rs b/crates/brk_computer/src/internal/multi/date_derived/mod.rs deleted file mode 100644 index a7a435602..000000000 --- a/crates/brk_computer/src/internal/multi/date_derived/mod.rs +++ /dev/null @@ -1,19 +0,0 @@ -mod average; -mod first; -mod full; -mod last; -mod max; -mod min; -mod spread; -mod sum; -mod sum_cum; - -pub use average::*; -pub use first::*; -pub use full::*; -pub use last::*; -pub use max::*; -pub use min::*; -pub use spread::*; -pub use sum::*; -pub use sum_cum::*; diff --git a/crates/brk_computer/src/internal/multi/date_derived/spread.rs b/crates/brk_computer/src/internal/multi/date_derived/spread.rs deleted file mode 100644 index 167761a67..000000000 --- a/crates/brk_computer/src/internal/multi/date_derived/spread.rs +++ /dev/null @@ -1,67 +0,0 @@ -//! Derived date periods with distribution aggregation. - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, IterableCloneableVec}; - -use crate::{indexes, internal::LazySpread}; - -use crate::internal::ComputedVecValue; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyDateDerivedSpread -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub weekindex: LazySpread, - pub monthindex: LazySpread, - pub quarterindex: LazySpread, - pub semesterindex: LazySpread, - pub yearindex: LazySpread, - pub decadeindex: LazySpread, -} - -const VERSION: Version = Version::ZERO; - -impl LazyDateDerivedSpread -where - T: ComputedVecValue + JsonSchema + 'static, -{ - /// Create from external dateindex sources for distribution stats. - pub fn from_sources( - name: &str, - version: Version, - average_source: IterableBoxedVec, - min_source: IterableBoxedVec, - max_source: IterableBoxedVec, - indexes: &indexes::Vecs, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($idx:ident) => { - LazySpread::from_distribution( - name, - v, - average_source.clone(), - min_source.clone(), - max_source.clone(), - indexes.$idx.identity.boxed_clone(), - ) - }; - } - - Self { - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/date_derived/sum.rs b/crates/brk_computer/src/internal/multi/date_derived/sum.rs deleted file mode 100644 index c911c0833..000000000 --- a/crates/brk_computer/src/internal/multi/date_derived/sum.rs +++ /dev/null @@ -1,58 +0,0 @@ -//! Derived date periods with sum aggregation. - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, IterableCloneableVec}; - -use crate::{indexes, internal::LazySum}; - -use crate::internal::ComputedVecValue; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyDateDerivedSum -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub weekindex: LazySum, - pub monthindex: LazySum, - pub quarterindex: LazySum, - pub semesterindex: LazySum, - pub yearindex: LazySum, - pub decadeindex: LazySum, -} - -const VERSION: Version = Version::ZERO; - -impl LazyDateDerivedSum -where - T: ComputedVecValue + JsonSchema + 'static, -{ - /// Create from an external dateindex source. - pub fn from_source( - name: &str, - version: Version, - dateindex_source: IterableBoxedVec, - indexes: &indexes::Vecs, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($idx:ident) => { - LazySum::from_source_raw(name, v, dateindex_source.clone(), indexes.$idx.identity.boxed_clone()) - }; - } - - Self { - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/date_derived/sum_cum.rs b/crates/brk_computer/src/internal/multi/date_derived/sum_cum.rs deleted file mode 100644 index c265fa186..000000000 --- a/crates/brk_computer/src/internal/multi/date_derived/sum_cum.rs +++ /dev/null @@ -1,62 +0,0 @@ -//! Derived date periods with sum+cumulative aggregation. - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, IterableCloneableVec}; - -use crate::{indexes, internal::LazySumCum}; - -use crate::internal::ComputedVecValue; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyDateDerivedSumCum -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub weekindex: LazySumCum, - pub monthindex: LazySumCum, - pub quarterindex: LazySumCum, - pub semesterindex: LazySumCum, - pub yearindex: LazySumCum, - pub decadeindex: LazySumCum, -} - -const VERSION: Version = Version::ZERO; - -impl LazyDateDerivedSumCum -where - T: ComputedVecValue + JsonSchema + 'static, -{ - /// Create from external dateindex sum and cumulative sources. - pub fn from_sources( - name: &str, - version: Version, - sum_source: IterableBoxedVec, - cumulative_source: IterableBoxedVec, - indexes: &indexes::Vecs, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($idx:ident) => { - LazySumCum::from_sources_sum_raw( - name, v, sum_source.clone(), cumulative_source.clone(), - indexes.$idx.identity.boxed_clone(), - ) - }; - } - - Self { - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/average.rs b/crates/brk_computer/src/internal/multi/from_date/average.rs deleted file mode 100644 index fdd8583d8..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/average.rs +++ /dev/null @@ -1,64 +0,0 @@ -//! ComputedFromDateAverage - dateindex storage + lazy periods for average-value aggregation. - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{DateIndex, Version}; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; - -use crate::{ComputeIndexes, indexes}; - -use crate::internal::{ComputedVecValue, LazyDateDerivedAverage}; - -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct ComputedFromDateAverage -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub dateindex: EagerVec>, - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub rest: LazyDateDerivedAverage, -} - -const VERSION: Version = Version::ZERO; - -impl ComputedFromDateAverage -where - T: ComputedVecValue + JsonSchema + 'static, -{ - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - let dateindex = EagerVec::forced_import(db, name, version + VERSION)?; - - Ok(Self { - rest: LazyDateDerivedAverage::from_source( - name, - version + VERSION, - dateindex.boxed_clone(), - indexes, - ), - dateindex, - }) - } - - pub fn compute_all( - &mut self, - _starting_indexes: &ComputeIndexes, - _exit: &Exit, - mut compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - compute(&mut self.dateindex)?; - Ok(()) - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/binary_last.rs b/crates/brk_computer/src/internal/multi/from_date/binary_last.rs deleted file mode 100644 index 0170e8fbd..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/binary_last.rs +++ /dev/null @@ -1,954 +0,0 @@ -//! Binary transform composite from DateIndex - Last aggregation only. - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2}; - -use crate::internal::{ - ComputedFromHeightLast, ComputedFromHeightSum, ComputedFromDateLast, ComputedVecValue, - LazyBinaryComputedFromHeightLast, LazyBinaryComputedFromHeightSum, LazyBinaryTransformLast, - LazyDateDerivedLast, LazyDateDerivedSumCum, LazyFromDateLast, LazyFromHeightLast, NumericValue, -}; - -const VERSION: Version = Version::ZERO; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyBinaryFromDateLast -where - T: ComputedVecValue + PartialOrd + JsonSchema, - S1T: ComputedVecValue, - S2T: ComputedVecValue, -{ - pub dateindex: LazyVecFrom2, - pub weekindex: LazyBinaryTransformLast, - pub monthindex: LazyBinaryTransformLast, - pub quarterindex: LazyBinaryTransformLast, - pub semesterindex: LazyBinaryTransformLast, - pub yearindex: LazyBinaryTransformLast, - pub decadeindex: LazyBinaryTransformLast, -} - -impl LazyBinaryFromDateLast -where - T: ComputedVecValue + JsonSchema + 'static, - S1T: ComputedVecValue + JsonSchema, - S2T: ComputedVecValue + JsonSchema, -{ - pub fn from_computed_both_last>( - name: &str, - version: Version, - source1: &ComputedFromDateLast, - source2: &ComputedFromDateLast, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_lazy_last::( - name, - v, - &source1.$p, - &source2.$p, - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - source1.dateindex.boxed_clone(), - source2.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - pub fn from_derived_last_and_computed_last>( - name: &str, - version: Version, - dateindex_source1: IterableBoxedVec, - source1: &LazyDateDerivedLast, - source2: &ComputedFromDateLast, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_lazy_last::( - name, - v, - &source1.$p, - &source2.$p, - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - dateindex_source1, - source2.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - pub fn from_derived_last_and_block_last>( - name: &str, - version: Version, - dateindex_source1: IterableBoxedVec, - source1: &LazyDateDerivedLast, - source2: &ComputedFromHeightLast, - ) -> Self - where - S2T: NumericValue, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_lazy_last::( - name, - v, - &source1.$p, - &source2.$p, - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - dateindex_source1, - source2.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - pub fn from_both_derived_last>( - name: &str, - version: Version, - dateindex_source1: IterableBoxedVec, - source1: &LazyDateDerivedLast, - dateindex_source2: IterableBoxedVec, - source2: &LazyDateDerivedLast, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_lazy_last::( - name, - v, - &source1.$p, - &source2.$p, - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - dateindex_source1, - dateindex_source2, - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - pub fn from_height_and_dateindex_last>( - name: &str, - version: Version, - source1: &ComputedFromHeightLast, - source2: &ComputedFromDateLast, - ) -> Self - where - S1T: NumericValue, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_lazy_last::( - name, - v, - &source1.$p, - &source2.$p, - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - source1.dateindex.boxed_clone(), - source2.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - pub fn from_lazy_height_and_dateindex_last( - name: &str, - version: Version, - source1: &LazyFromHeightLast, - source2: &ComputedFromDateLast, - ) -> Self - where - F: BinaryTransform, - S1SourceT: ComputedVecValue + JsonSchema, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.$p.boxed_clone(), - source2.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - source1.dateindex.boxed_clone(), - source2.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - pub fn from_dateindex_and_height_last>( - name: &str, - version: Version, - source1: &ComputedFromDateLast, - source2: &ComputedFromHeightLast, - ) -> Self - where - S2T: NumericValue, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_lazy_last::( - name, - v, - &source1.$p, - &source2.$p, - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - source1.dateindex.boxed_clone(), - source2.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - pub fn from_both_block_last>( - name: &str, - version: Version, - source1: &ComputedFromHeightLast, - source2: &ComputedFromHeightLast, - ) -> Self - where - S1T: NumericValue, - S2T: NumericValue, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_lazy_last::( - name, - v, - &source1.$p, - &source2.$p, - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - source1.dateindex.boxed_clone(), - source2.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - pub fn from_dateindex_last_and_height_sum>( - name: &str, - version: Version, - source1: &ComputedFromDateLast, - source2: &ComputedFromHeightSum, - ) -> Self - where - S2T: NumericValue, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.$p.boxed_clone(), - source2.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - source1.dateindex.boxed_clone(), - source2.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - pub fn from_block_last_and_height_sum>( - name: &str, - version: Version, - source1: &ComputedFromHeightLast, - source2: &ComputedFromHeightSum, - ) -> Self - where - S1T: NumericValue, - S2T: NumericValue, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.$p.boxed_clone(), - source2.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - source1.dateindex.boxed_clone(), - source2.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - pub fn from_both_sum_cum_cumulatives>( - name: &str, - version: Version, - dateindex_source1: IterableBoxedVec, - dates1: &LazyDateDerivedSumCum, - dateindex_source2: IterableBoxedVec, - dates2: &LazyDateDerivedSumCum, - ) -> Self - where - S1T: PartialOrd, - S2T: PartialOrd, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_vecs::( - name, - v, - dates1.$p.cumulative.boxed_clone(), - dates2.$p.cumulative.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - dateindex_source1, - dateindex_source2, - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - /// Create from a ComputedFromHeightLast (block last with derived dates) and a LazyFromHeightLast. - pub fn from_block_last_and_lazy_block_last( - name: &str, - version: Version, - source1: &ComputedFromHeightLast, - source2: &LazyFromHeightLast, - ) -> Self - where - F: BinaryTransform, - S1T: NumericValue, - S2SourceT: ComputedVecValue + JsonSchema, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.$p.boxed_clone(), - source2.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - source1.dateindex.boxed_clone(), - source2.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - /// Create from a LazyFromHeightLast and a ComputedFromHeightLast (reversed source order). - pub fn from_lazy_block_last_and_block_last( - name: &str, - version: Version, - source1: &LazyFromHeightLast, - source2: &ComputedFromHeightLast, - ) -> Self - where - F: BinaryTransform, - S2T: NumericValue, - S1SourceT: ComputedVecValue + JsonSchema, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.$p.boxed_clone(), - source2.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - source1.dateindex.boxed_clone(), - source2.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - /// Create from a LazyDateDerivedLast source and a BinaryDateLast source. - pub fn from_derived_last_and_binary_last( - name: &str, - version: Version, - dateindex_source1: IterableBoxedVec, - source1: &LazyDateDerivedLast, - source2: &LazyBinaryFromDateLast, - ) -> Self - where - F: BinaryTransform, - S2aT: ComputedVecValue + JsonSchema, - S2bT: ComputedVecValue + JsonSchema, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.$p.boxed_clone(), - source2.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - dateindex_source1, - source2.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - /// Create from a BinaryDateLast source and a ComputedFromDateLast source. - pub fn from_binary_and_computed_last( - name: &str, - version: Version, - source1: &LazyBinaryFromDateLast, - source2: &ComputedFromDateLast, - ) -> Self - where - F: BinaryTransform, - S1aT: ComputedVecValue + JsonSchema, - S1bT: ComputedVecValue + JsonSchema, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.$p.boxed_clone(), - source2.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - source1.dateindex.boxed_clone(), - source2.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - /// Create from a BinaryDateLast source and a ComputedFromHeightLast source. - pub fn from_binary_and_block_last( - name: &str, - version: Version, - source1: &LazyBinaryFromDateLast, - source2: &ComputedFromHeightLast, - ) -> Self - where - F: BinaryTransform, - S1aT: ComputedVecValue + JsonSchema, - S1bT: ComputedVecValue + JsonSchema, - S2T: NumericValue, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.$p.boxed_clone(), - source2.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - source1.dateindex.boxed_clone(), - source2.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - /// Create from a ComputedFromDateLast source and a BinaryDateLast source. - pub fn from_computed_and_binary_last( - name: &str, - version: Version, - source1: &ComputedFromDateLast, - source2: &LazyBinaryFromDateLast, - ) -> Self - where - F: BinaryTransform, - S2aT: ComputedVecValue + JsonSchema, - S2bT: ComputedVecValue + JsonSchema, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.$p.boxed_clone(), - source2.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - source1.dateindex.boxed_clone(), - source2.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - /// Create from two BinaryDateLast sources. - pub fn from_both_binary_last( - name: &str, - version: Version, - source1: &LazyBinaryFromDateLast, - source2: &LazyBinaryFromDateLast, - ) -> Self - where - F: BinaryTransform, - S1aT: ComputedVecValue + JsonSchema, - S1bT: ComputedVecValue + JsonSchema, - S2aT: ComputedVecValue + JsonSchema, - S2bT: ComputedVecValue + JsonSchema, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.$p.boxed_clone(), - source2.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - source1.dateindex.boxed_clone(), - source2.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - /// Create from a BinaryDateLast source and a LazyDateDerivedLast source. - pub fn from_binary_and_derived_last( - name: &str, - version: Version, - source1: &LazyBinaryFromDateLast, - dateindex_source2: IterableBoxedVec, - source2: &LazyDateDerivedLast, - ) -> Self - where - F: BinaryTransform, - S1aT: ComputedVecValue + JsonSchema, - S1bT: ComputedVecValue + JsonSchema, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.$p.boxed_clone(), - source2.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - source1.dateindex.boxed_clone(), - dateindex_source2, - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - /// Create from a LazyBinaryComputedFromHeightLast and a ComputedFromHeightSum. - pub fn from_lazy_binary_block_last_and_height_sum( - name: &str, - version: Version, - source1: &LazyBinaryComputedFromHeightLast, - source2: &ComputedFromHeightSum, - ) -> Self - where - F: BinaryTransform, - S1aT: ComputedVecValue + JsonSchema, - S1bT: ComputedVecValue + JsonSchema, - S2T: NumericValue, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.rest.dates.$p.boxed_clone(), - source2.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - source1.rest.dateindex.boxed_clone(), - source2.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - /// Create from a LazyBinaryComputedFromHeightLast and a LazyBinaryComputedFromHeightSum. - pub fn from_lazy_binary_block_last_and_lazy_binary_sum( - name: &str, - version: Version, - source1: &LazyBinaryComputedFromHeightLast, - source2: &LazyBinaryComputedFromHeightSum, - ) -> Self - where - F: BinaryTransform, - S1aT: ComputedVecValue + JsonSchema, - S1bT: ComputedVecValue + JsonSchema, - S2aT: ComputedVecValue + JsonSchema, - S2bT: ComputedVecValue + JsonSchema, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.rest.dates.$p.boxed_clone(), - source2.rest.dates.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - source1.rest.dateindex.boxed_clone(), - source2.rest.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - /// Create from a ComputedFromDateLast and a LazyFromDateLast. - pub fn from_computed_and_lazy_last( - name: &str, - version: Version, - source1: &ComputedFromDateLast, - source2: &LazyFromDateLast, - ) -> Self - where - F: BinaryTransform, - S2SourceT: ComputedVecValue + JsonSchema, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.rest.$p.boxed_clone(), - source2.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - source1.dateindex.boxed_clone(), - source2.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - /// Create from a ComputedFromDateLast and a LazyDateDerivedLast. - pub fn from_computed_and_derived_last>( - name: &str, - version: Version, - source1: &ComputedFromDateLast, - dateindex_source2: IterableBoxedVec, - source2: &LazyDateDerivedLast, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformLast::from_lazy_last::( - name, - v, - &source1.$p, - &source2.$p, - ) - }; - } - - Self { - dateindex: LazyVecFrom2::transformed::( - name, - v, - source1.dateindex.boxed_clone(), - dateindex_source2, - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/binary_sum.rs b/crates/brk_computer/src/internal/multi/from_date/binary_sum.rs deleted file mode 100644 index 767ff933d..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/binary_sum.rs +++ /dev/null @@ -1,134 +0,0 @@ -//! Binary transform for Sum-only pattern across date periods. - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableCloneableVec}; - -use crate::internal::{ - ComputedFromHeightSumCum, ComputedHeightDerivedSum, ComputedVecValue, LazyBinaryTransformSum, - LazyFromHeightLast, NumericValue, -}; - -const VERSION: Version = Version::ZERO; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyBinaryFromDateSum -where - T: ComputedVecValue + PartialOrd + JsonSchema, - S1T: ComputedVecValue, - S2T: ComputedVecValue, -{ - pub dateindex: LazyBinaryTransformSum, - pub weekindex: LazyBinaryTransformSum, - pub monthindex: LazyBinaryTransformSum, - pub quarterindex: LazyBinaryTransformSum, - pub semesterindex: LazyBinaryTransformSum, - pub yearindex: LazyBinaryTransformSum, - pub decadeindex: LazyBinaryTransformSum, -} - -impl LazyBinaryFromDateSum -where - T: ComputedVecValue + JsonSchema + 'static, - S1T: NumericValue + JsonSchema, - S2T: NumericValue + JsonSchema, -{ - pub fn from_derived>( - name: &str, - version: Version, - source1: &ComputedHeightDerivedSum, - source2: &ComputedHeightDerivedSum, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformSum::from_boxed::(name, v, source1.$p.boxed_clone(), source2.$p.boxed_clone()) - }; - } - - Self { - dateindex: LazyBinaryTransformSum::from_sum::(name, v, &source1.dateindex, &source2.dateindex), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - /// Create from two LazyBinaryFromDateSum sources. - pub fn from_binary( - name: &str, - version: Version, - source1: &LazyBinaryFromDateSum, - source2: &LazyBinaryFromDateSum, - ) -> Self - where - F: BinaryTransform, - S1aT: ComputedVecValue + JsonSchema, - S1bT: ComputedVecValue + JsonSchema, - S2aT: ComputedVecValue + JsonSchema, - S2bT: ComputedVecValue + JsonSchema, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformSum::from_boxed::(name, v, source1.$p.boxed_clone(), source2.$p.boxed_clone()) - }; - } - - Self { - dateindex: period!(dateindex), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - /// Create from a SumCum source (using only sum) and a LazyLast source. - pub fn from_sumcum_lazy_last( - name: &str, - version: Version, - source1: &ComputedFromHeightSumCum, - source2: &LazyFromHeightLast, - ) -> Self - where - F: BinaryTransform, - S2ST: ComputedVecValue + JsonSchema, - { - let v = version + VERSION; - - // source1 has SumCum pattern with .dateindex.sum, .weekindex.sum, etc. - // source2 has Last pattern via deref chain: .dates.dateindex, .dates.weekindex, etc. - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformSum::from_boxed::( - name, - v, - source1.$p.sum.boxed_clone(), - source2.dates.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: period!(dateindex), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/binary_sum_cum.rs b/crates/brk_computer/src/internal/multi/from_date/binary_sum_cum.rs deleted file mode 100644 index 0b407af27..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/binary_sum_cum.rs +++ /dev/null @@ -1,324 +0,0 @@ -//! Binary transform for SumCum pattern across date periods. - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableCloneableVec}; - -use crate::internal::{ - ComputedFromHeightLast, ComputedFromHeightSumCum, ComputedHeightDerivedLast, - ComputedHeightDerivedSumCum, ComputedVecValue, LazyBinaryTransformSumCum, LazyDateDerivedFull, - LazyDateDerivedSumCum, LazyFromHeightLast, NumericValue, SumCum, -}; - -const VERSION: Version = Version::ZERO; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyBinaryFromDateSumCum -where - T: ComputedVecValue + PartialOrd + JsonSchema, - S1T: ComputedVecValue, - S2T: ComputedVecValue, -{ - pub dateindex: LazyBinaryTransformSumCum, - pub weekindex: LazyBinaryTransformSumCum, - pub monthindex: LazyBinaryTransformSumCum, - pub quarterindex: LazyBinaryTransformSumCum, - pub semesterindex: LazyBinaryTransformSumCum, - pub yearindex: LazyBinaryTransformSumCum, - pub decadeindex: LazyBinaryTransformSumCum, -} - -impl LazyBinaryFromDateSumCum -where - T: ComputedVecValue + JsonSchema + 'static, - S1T: ComputedVecValue + JsonSchema, - S2T: ComputedVecValue + JsonSchema, -{ - #[allow(clippy::too_many_arguments)] - pub fn from_computed>( - name: &str, - version: Version, - dateindex1: &SumCum, - periods1: &LazyDateDerivedSumCum, - dateindex2: &SumCum, - periods2: &LazyDateDerivedSumCum, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformSumCum::from_sources::( - name, v, - periods1.$p.sum.boxed_clone(), periods2.$p.sum.boxed_clone(), - periods1.$p.cumulative.boxed_clone(), periods2.$p.cumulative.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyBinaryTransformSumCum::from_sum_cum::(name, v, dateindex1, dateindex2), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - pub fn from_derived_full>( - name: &str, - version: Version, - dateindex1: &SumCum, - dates1: &LazyDateDerivedFull, - dateindex2: &SumCum, - dates2: &LazyDateDerivedFull, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformSumCum::from_lazy_stats_aggregate::( - name, v, &dates1.$p, &dates2.$p, - ) - }; - } - - Self { - dateindex: LazyBinaryTransformSumCum::from_sum_cum::(name, v, dateindex1, dateindex2), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - // --- Raw variants (no _sum suffix) for pure SumCum types --- - - #[allow(clippy::too_many_arguments)] - pub fn from_computed_sum_raw>( - name: &str, - version: Version, - dateindex1: &SumCum, - periods1: &LazyDateDerivedSumCum, - dateindex2: &SumCum, - periods2: &LazyDateDerivedSumCum, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformSumCum::from_sources_sum_raw::( - name, v, - periods1.$p.sum.boxed_clone(), periods2.$p.sum.boxed_clone(), - periods1.$p.cumulative.boxed_clone(), periods2.$p.cumulative.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyBinaryTransformSumCum::from_sum_cum_sum_raw::(name, v, dateindex1, dateindex2), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - // --- Methods accepting SumCum + Last sources --- - - pub fn from_computed_last>( - name: &str, - version: Version, - source1: &ComputedFromHeightSumCum, - source2: &ComputedFromHeightLast, - ) -> Self - where - S1T: PartialOrd, - S2T: NumericValue, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformSumCum::from_sources_last_sum_raw::( - name, v, - source1.rest.$p.sum.boxed_clone(), - source1.rest.$p.cumulative.boxed_clone(), - source2.rest.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyBinaryTransformSumCum::from_sum_cum_last_sum_raw::( - name, v, &source1.dateindex, &source2.dateindex, - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - pub fn from_derived_computed_last>( - name: &str, - version: Version, - source1: &ComputedHeightDerivedSumCum, - source2: &ComputedFromHeightLast, - ) -> Self - where - S1T: NumericValue, - S2T: NumericValue, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformSumCum::from_sources_last_sum_raw::( - name, v, - source1.$p.sum.boxed_clone(), - source1.$p.cumulative.boxed_clone(), - source2.rest.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyBinaryTransformSumCum::from_sum_cum_last_sum_raw::( - name, v, &source1.dateindex, &source2.dateindex, - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - pub fn from_computed_derived_last>( - name: &str, - version: Version, - source1: &ComputedFromHeightSumCum, - source2: &ComputedHeightDerivedLast, - ) -> Self - where - S1T: PartialOrd, - S2T: NumericValue, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformSumCum::from_sources_last_sum_raw::( - name, v, - source1.rest.$p.sum.boxed_clone(), - source1.rest.$p.cumulative.boxed_clone(), - source2.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyBinaryTransformSumCum::from_sum_cum_last_sum_raw::( - name, v, &source1.dateindex, &source2.dateindex, - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - pub fn from_derived_last>( - name: &str, - version: Version, - source1: &ComputedHeightDerivedSumCum, - source2: &ComputedHeightDerivedLast, - ) -> Self - where - S1T: NumericValue, - S2T: NumericValue, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformSumCum::from_sources_last_sum_raw::( - name, v, - source1.$p.sum.boxed_clone(), - source1.$p.cumulative.boxed_clone(), - source2.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyBinaryTransformSumCum::from_sum_cum_last_sum_raw::( - name, v, &source1.dateindex, &source2.dateindex, - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - // --- Methods accepting SumCum + LazyLast sources --- - - pub fn from_computed_lazy_last( - name: &str, - version: Version, - source1: &ComputedFromHeightSumCum, - source2: &LazyFromHeightLast, - ) -> Self - where - F: BinaryTransform, - S1T: PartialOrd, - S2T: NumericValue, - S2ST: ComputedVecValue + JsonSchema, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyBinaryTransformSumCum::from_sources_last_sum_raw::( - name, v, - source1.rest.$p.sum.boxed_clone(), - source1.rest.$p.cumulative.boxed_clone(), - source2.rest.dates.$p.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyBinaryTransformSumCum::from_sources_last_sum_raw::( - name, v, - source1.dateindex.boxed_sum(), - source1.dateindex.boxed_cumulative(), - source2.rest.dates.dateindex.boxed_clone(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/first.rs b/crates/brk_computer/src/internal/multi/from_date/first.rs deleted file mode 100644 index 7d683b58b..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/first.rs +++ /dev/null @@ -1,64 +0,0 @@ -//! ComputedVecsDate using only first-value aggregation. - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{DateIndex, Version}; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; - -use crate::{ComputeIndexes, indexes}; - -use crate::internal::{ComputedVecValue, LazyDateDerivedFirst}; - -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct ComputedFromDateFirst -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub dateindex: EagerVec>, - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub rest: LazyDateDerivedFirst, -} - -const VERSION: Version = Version::ZERO; - -impl ComputedFromDateFirst -where - T: ComputedVecValue + JsonSchema + 'static, -{ - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - let dateindex = EagerVec::forced_import(db, name, version + VERSION)?; - - Ok(Self { - rest: LazyDateDerivedFirst::from_source( - name, - version + VERSION, - dateindex.boxed_clone(), - indexes, - ), - dateindex, - }) - } - - pub fn compute_all( - &mut self, - _starting_indexes: &ComputeIndexes, - _exit: &Exit, - mut compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - compute(&mut self.dateindex)?; - Ok(()) - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/last.rs b/crates/brk_computer/src/internal/multi/from_date/last.rs deleted file mode 100644 index 3df16e800..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/last.rs +++ /dev/null @@ -1,73 +0,0 @@ -//! ComputedVecsDate using only last-value aggregation. - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{DateIndex, Version}; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, IterableVec, PcoVec}; - -use crate::{ComputeIndexes, indexes}; - -use crate::internal::{ComputedVecValue, LazyDateDerivedLast}; - -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct ComputedFromDateLast -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub dateindex: EagerVec>, - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub rest: LazyDateDerivedLast, -} - -const VERSION: Version = Version::ZERO; - -impl ComputedFromDateLast -where - T: ComputedVecValue + JsonSchema + 'static, -{ - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - let dateindex = EagerVec::forced_import(db, name, version + VERSION)?; - - Ok(Self { - rest: LazyDateDerivedLast::from_source( - name, - version + VERSION, - dateindex.boxed_clone(), - indexes, - ), - dateindex, - }) - } - - pub fn compute_all( - &mut self, - _starting_indexes: &ComputeIndexes, - _exit: &Exit, - mut compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - compute(&mut self.dateindex)?; - Ok(()) - } - - pub fn compute_rest( - &mut self, - _starting_indexes: &ComputeIndexes, - _exit: &Exit, - _dateindex: Option<&impl IterableVec>, - ) -> Result<()> { - Ok(()) - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/lazy.rs b/crates/brk_computer/src/internal/multi/from_date/lazy.rs deleted file mode 100644 index 0fd5bb51f..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/lazy.rs +++ /dev/null @@ -1,88 +0,0 @@ -//! Generic lazy vecs for all time period indexes. - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use serde::Serialize; -use vecdb::{ComputeFrom1, Formattable, IterableCloneableVec, LazyVecFrom1, VecValue}; - -use crate::indexes; - -/// Lazy vecs for all time period indexes (no height). -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyFromDate -where - T: VecValue + Formattable + Serialize + JsonSchema, -{ - pub dateindex: LazyVecFrom1, - pub weekindex: LazyVecFrom1, - pub monthindex: LazyVecFrom1, - pub quarterindex: LazyVecFrom1, - pub semesterindex: LazyVecFrom1, - pub yearindex: LazyVecFrom1, - pub decadeindex: LazyVecFrom1, -} - -impl LazyFromDate { - #[allow(clippy::too_many_arguments)] - pub fn new( - name: &str, - version: Version, - indexes: &indexes::Vecs, - dateindex_fn: ComputeFrom1, - weekindex_fn: ComputeFrom1, - monthindex_fn: ComputeFrom1, - quarterindex_fn: ComputeFrom1, - semesterindex_fn: ComputeFrom1, - yearindex_fn: ComputeFrom1, - decadeindex_fn: ComputeFrom1, - ) -> Self { - Self { - dateindex: LazyVecFrom1::init( - name, - version, - indexes.dateindex.identity.boxed_clone(), - dateindex_fn, - ), - weekindex: LazyVecFrom1::init( - name, - version, - indexes.weekindex.identity.boxed_clone(), - weekindex_fn, - ), - monthindex: LazyVecFrom1::init( - name, - version, - indexes.monthindex.identity.boxed_clone(), - monthindex_fn, - ), - quarterindex: LazyVecFrom1::init( - name, - version, - indexes.quarterindex.identity.boxed_clone(), - quarterindex_fn, - ), - semesterindex: LazyVecFrom1::init( - name, - version, - indexes.semesterindex.identity.boxed_clone(), - semesterindex_fn, - ), - yearindex: LazyVecFrom1::init( - name, - version, - indexes.yearindex.identity.boxed_clone(), - yearindex_fn, - ), - decadeindex: LazyVecFrom1::init( - name, - version, - indexes.decadeindex.identity.boxed_clone(), - decadeindex_fn, - ), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/lazy_binary_price.rs b/crates/brk_computer/src/internal/multi/from_date/lazy_binary_price.rs deleted file mode 100644 index 2e12cb247..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/lazy_binary_price.rs +++ /dev/null @@ -1,152 +0,0 @@ -//! Lazy binary price wrapper with both USD and sats representations. -//! -//! For binary operations (e.g., price × ratio) that produce price values. -//! Both dollars and sats are computed lazily from the same sources. - -use brk_traversable::Traversable; -use brk_types::{Dollars, SatsFract, Version}; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableCloneableVec, LazyVecFrom1}; - -use super::{ComputedFromDateLast, LazyBinaryFromDateLast}; -use crate::internal::{ComputedFromHeightLast, ComputedVecValue, DollarsToSatsFract, LazyFromHeightLast, LazyTransformLast, NumericValue}; - -/// Lazy binary price with both USD and sats representations. -/// -/// Wraps a binary operation that produces Dollars and lazily converts to sats. -/// Derefs to the dollars metric. -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct LazyBinaryPrice -where - S1T: ComputedVecValue, - S2T: ComputedVecValue, -{ - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub dollars: LazyBinaryFromDateLast, - pub sats: LazyUnaryFromBinaryLast, -} - -/// Lazy unary transform chain on a LazyBinaryFromDateLast output. -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyUnaryFromBinaryLast -where - T: ComputedVecValue + PartialOrd + JsonSchema, - ST: ComputedVecValue, - S1T: ComputedVecValue, - S2T: ComputedVecValue, -{ - pub dateindex: LazyVecFrom1, - pub weekindex: LazyTransformLast, - pub monthindex: LazyTransformLast, - pub quarterindex: LazyTransformLast, - pub semesterindex: LazyTransformLast, - pub yearindex: LazyTransformLast, - pub decadeindex: LazyTransformLast, - _phantom: std::marker::PhantomData<(S1T, S2T)>, -} - -impl LazyBinaryPrice -where - S1T: ComputedVecValue + JsonSchema + 'static, - S2T: ComputedVecValue + JsonSchema + 'static, -{ - /// Create from height-based price and dateindex-based ratio sources. - pub fn from_height_and_dateindex_last>( - name: &str, - version: Version, - source1: &ComputedFromHeightLast, - source2: &ComputedFromDateLast, - ) -> Self - where - S1T: NumericValue, - { - let dollars = LazyBinaryFromDateLast::from_height_and_dateindex_last::( - name, version, source1, source2, - ); - Self::from_dollars(name, version, dollars) - } - - /// Create from lazy height-based price and dateindex-based ratio sources. - pub fn from_lazy_height_and_dateindex_last( - name: &str, - version: Version, - source1: &LazyFromHeightLast, - source2: &ComputedFromDateLast, - ) -> Self - where - F: BinaryTransform, - S1SourceT: ComputedVecValue + JsonSchema, - { - let dollars = LazyBinaryFromDateLast::from_lazy_height_and_dateindex_last::( - name, version, source1, source2, - ); - Self::from_dollars(name, version, dollars) - } - - /// Create from two computed dateindex sources. - pub fn from_computed_both_last>( - name: &str, - version: Version, - source1: &ComputedFromDateLast, - source2: &ComputedFromDateLast, - ) -> Self { - let dollars = LazyBinaryFromDateLast::from_computed_both_last::( - name, version, source1, source2, - ); - Self::from_dollars(name, version, dollars) - } - - /// Create sats version from dollars. - fn from_dollars( - name: &str, - version: Version, - dollars: LazyBinaryFromDateLast, - ) -> Self { - let sats_name = format!("{name}_sats"); - let sats = LazyUnaryFromBinaryLast { - dateindex: LazyVecFrom1::transformed::( - &sats_name, - version, - dollars.dateindex.boxed_clone(), - ), - weekindex: LazyTransformLast::from_boxed::( - &sats_name, - version, - dollars.weekindex.boxed_clone(), - ), - monthindex: LazyTransformLast::from_boxed::( - &sats_name, - version, - dollars.monthindex.boxed_clone(), - ), - quarterindex: LazyTransformLast::from_boxed::( - &sats_name, - version, - dollars.quarterindex.boxed_clone(), - ), - semesterindex: LazyTransformLast::from_boxed::( - &sats_name, - version, - dollars.semesterindex.boxed_clone(), - ), - yearindex: LazyTransformLast::from_boxed::( - &sats_name, - version, - dollars.yearindex.boxed_clone(), - ), - decadeindex: LazyTransformLast::from_boxed::( - &sats_name, - version, - dollars.decadeindex.boxed_clone(), - ), - _phantom: std::marker::PhantomData, - }; - - Self { dollars, sats } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/lazy_distribution.rs b/crates/brk_computer/src/internal/multi/from_date/lazy_distribution.rs deleted file mode 100644 index 74d7676fb..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/lazy_distribution.rs +++ /dev/null @@ -1,69 +0,0 @@ -//! Lazy transform for Distribution date sources. -//! Like LazyFromDateFull but without sum/cumulative (for ratio/percentage metrics). - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use vecdb::{IterableCloneableVec, UnaryTransform}; - -use crate::internal::{ - ComputedVecValue, Full, LazyDateDerivedFull, LazyTransformDistribution, LazyTransformSpread, -}; - -const VERSION: Version = Version::ZERO; - -/// Distribution stats across date periods. Has average, min, max, percentiles but no sum/cumulative. -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyFromDateDistribution -where - T: ComputedVecValue + PartialOrd + JsonSchema, - S1T: ComputedVecValue, -{ - pub dateindex: LazyTransformDistribution, - pub weekindex: LazyTransformSpread, - pub monthindex: LazyTransformSpread, - pub quarterindex: LazyTransformSpread, - pub semesterindex: LazyTransformSpread, - pub yearindex: LazyTransformSpread, - pub decadeindex: LazyTransformSpread, -} - -impl LazyFromDateDistribution -where - T: ComputedVecValue + JsonSchema + 'static, - S1T: ComputedVecValue + JsonSchema, -{ - pub fn from_full>( - name: &str, - version: Version, - dateindex: &Full, - source: &LazyDateDerivedFull, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyTransformSpread::from_boxed::( - name, - v, - source.$p.average.boxed_clone(), - source.$p.min.boxed_clone(), - source.$p.max.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyTransformDistribution::from_stats_aggregate::(name, v, dateindex), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/lazy_full.rs b/crates/brk_computer/src/internal/multi/from_date/lazy_full.rs deleted file mode 100644 index 67f3c7984..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/lazy_full.rs +++ /dev/null @@ -1,62 +0,0 @@ -//! Lazy transform for Full date sources. - -use brk_traversable::Traversable; -use brk_types::{DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex}; -use schemars::JsonSchema; -use vecdb::{IterableCloneableVec, UnaryTransform}; - -use crate::internal::{ComputedVecValue, Full, LazyDateDerivedFull, LazyTransformFull, LazyTransformStats}; - -const VERSION: Version = Version::ZERO; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyFromDateFull -where - T: ComputedVecValue + PartialOrd + JsonSchema, - S1T: ComputedVecValue, -{ - pub dateindex: LazyTransformFull, - pub weekindex: LazyTransformStats, - pub monthindex: LazyTransformStats, - pub quarterindex: LazyTransformStats, - pub semesterindex: LazyTransformStats, - pub yearindex: LazyTransformStats, - pub decadeindex: LazyTransformStats, -} - -impl LazyFromDateFull -where - T: ComputedVecValue + JsonSchema + 'static, - S1T: ComputedVecValue + JsonSchema, -{ - pub fn from_full>( - name: &str, - version: Version, - dateindex: &Full, - source: &LazyDateDerivedFull, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyTransformStats::from_boxed::( - name, v, - source.$p.average.boxed_clone(), source.$p.min.boxed_clone(), - source.$p.max.boxed_clone(), source.$p.sum.boxed_clone(), - source.$p.cumulative.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyTransformFull::from_stats_aggregate::(name, v, dateindex), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/lazy_last.rs b/crates/brk_computer/src/internal/multi/from_date/lazy_last.rs deleted file mode 100644 index 487e686de..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/lazy_last.rs +++ /dev/null @@ -1,117 +0,0 @@ -//! Lazy transform for Last-only date sources. - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, IterableCloneableVec, UnaryTransform}; - -use crate::internal::{ComputedFromHeightLast, ComputedFromDateLast, ComputedVecValue, LazyBinaryFromDateLast, LazyDateDerivedLast, LazyTransformLast, NumericValue}; - -const VERSION: Version = Version::ZERO; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyFromDateLast -where - T: ComputedVecValue + PartialOrd + JsonSchema, - S1T: ComputedVecValue, -{ - pub dateindex: LazyTransformLast, - pub weekindex: LazyTransformLast, - pub monthindex: LazyTransformLast, - pub quarterindex: LazyTransformLast, - pub semesterindex: LazyTransformLast, - pub yearindex: LazyTransformLast, - pub decadeindex: LazyTransformLast, -} - -impl LazyFromDateLast -where - T: ComputedVecValue + JsonSchema + 'static, - S1T: ComputedVecValue + JsonSchema, -{ - pub fn from_source>( - name: &str, - version: Version, - source: &ComputedFromDateLast, - ) -> Self { - Self::from_computed::(name, version, source.dateindex.boxed_clone(), source) - } - - pub fn from_computed>( - name: &str, - version: Version, - dateindex_source: IterableBoxedVec, - source: &ComputedFromDateLast, - ) -> Self { - Self::from_derived::(name, version, dateindex_source, &source.rest) - } - - pub fn from_derived>( - name: &str, - version: Version, - dateindex_source: IterableBoxedVec, - source: &LazyDateDerivedLast, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyTransformLast::from_lazy_last::(name, v, &source.$p) - }; - } - - Self { - dateindex: LazyTransformLast::from_boxed::(name, v, dateindex_source), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } - - pub fn from_block_source>( - name: &str, - version: Version, - source: &ComputedFromHeightLast, - ) -> Self - where - S1T: NumericValue, - { - Self::from_derived::(name, version, source.dateindex.boxed_clone(), &source.dates) - } - - /// Create by unary-transforming a LazyBinaryFromDateLast source. - pub fn from_binary( - name: &str, - version: Version, - source: &LazyBinaryFromDateLast, - ) -> Self - where - F: UnaryTransform, - S1aT: ComputedVecValue + JsonSchema, - S1bT: ComputedVecValue + JsonSchema, - { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyTransformLast::from_boxed::(name, v, source.$p.boxed_clone()) - }; - } - - Self { - dateindex: period!(dateindex), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/lazy_price.rs b/crates/brk_computer/src/internal/multi/from_date/lazy_price.rs deleted file mode 100644 index f51843bdc..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/lazy_price.rs +++ /dev/null @@ -1,62 +0,0 @@ -//! Lazy price wrapper with both USD and sats representations. -//! -//! Both dollars and sats are computed from the same source. - -use std::marker::PhantomData; - -use brk_traversable::Traversable; -use brk_types::{Dollars, SatsFract, Version}; -use derive_more::{Deref, DerefMut}; -use vecdb::UnaryTransform; - -use super::{ComputedFromDateLast, LazyFromDateLast}; -use crate::internal::{ComputedVecValue, DollarsToSatsFract}; - -/// Lazy price with both USD and sats representations. -/// -/// Both are computed from the same source via separate transforms. -/// Derefs to the dollars metric. -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct LazyPrice -where - ST: ComputedVecValue, -{ - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub dollars: LazyFromDateLast, - pub sats: LazyFromDateLast, -} - -/// Composed transform: ST -> Dollars -> SatsFract -pub struct ComposedDollarsToSatsFract(PhantomData); - -impl UnaryTransform for ComposedDollarsToSatsFract -where - F: UnaryTransform, -{ - #[inline(always)] - fn apply(source: ST) -> SatsFract { - DollarsToSatsFract::apply(F::apply(source)) - } -} - -impl LazyPrice -where - ST: ComputedVecValue + schemars::JsonSchema + 'static, -{ - pub fn from_source>( - name: &str, - version: Version, - source: &ComputedFromDateLast, - ) -> Self { - let dollars = LazyFromDateLast::from_source::(name, version, source); - let sats = LazyFromDateLast::from_source::>( - &format!("{name}_sats"), - version, - source, - ); - Self { dollars, sats } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/lazy_sum.rs b/crates/brk_computer/src/internal/multi/from_date/lazy_sum.rs deleted file mode 100644 index 7b5dc401c..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/lazy_sum.rs +++ /dev/null @@ -1,57 +0,0 @@ -//! Lazy transform for Sum-only date sources. - -use brk_traversable::Traversable; -use brk_types::{DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex}; -use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, IterableCloneableVec, UnaryTransform}; - -use crate::internal::{ComputedVecValue, LazyDateDerivedSum, LazyTransformSum}; - -const VERSION: Version = Version::ZERO; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyFromDateSum -where - T: ComputedVecValue + PartialOrd + JsonSchema, - S1T: ComputedVecValue, -{ - pub dateindex: LazyTransformSum, - pub weekindex: LazyTransformSum, - pub monthindex: LazyTransformSum, - pub quarterindex: LazyTransformSum, - pub semesterindex: LazyTransformSum, - pub yearindex: LazyTransformSum, - pub decadeindex: LazyTransformSum, -} - -impl LazyFromDateSum -where - T: ComputedVecValue + JsonSchema + 'static, - S1T: ComputedVecValue + JsonSchema, -{ - pub fn from_derived>( - name: &str, - version: Version, - dateindex_source: IterableBoxedVec, - source: &LazyDateDerivedSum, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyTransformSum::from_boxed::(name, v, source.$p.boxed_clone()) - }; - } - - Self { - dateindex: LazyTransformSum::from_boxed::(name, v, dateindex_source), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/lazy_sum_cum.rs b/crates/brk_computer/src/internal/multi/from_date/lazy_sum_cum.rs deleted file mode 100644 index d32d5764e..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/lazy_sum_cum.rs +++ /dev/null @@ -1,59 +0,0 @@ -//! Lazy transform for SumCum date sources. - -use brk_traversable::Traversable; -use brk_types::{DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex}; -use schemars::JsonSchema; -use vecdb::{IterableCloneableVec, UnaryTransform}; - -use crate::internal::{ComputedVecValue, LazyDateDerivedSumCum, LazyTransformSumCum, SumCum}; - -const VERSION: Version = Version::ZERO; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyFromDateSumCum -where - T: ComputedVecValue + PartialOrd + JsonSchema, - S1T: ComputedVecValue, -{ - pub dateindex: LazyTransformSumCum, - pub weekindex: LazyTransformSumCum, - pub monthindex: LazyTransformSumCum, - pub quarterindex: LazyTransformSumCum, - pub semesterindex: LazyTransformSumCum, - pub yearindex: LazyTransformSumCum, - pub decadeindex: LazyTransformSumCum, -} - -impl LazyFromDateSumCum -where - T: ComputedVecValue + JsonSchema + 'static, - S1T: ComputedVecValue + JsonSchema, -{ - pub fn from_sum_cum>( - name: &str, - version: Version, - dateindex: &SumCum, - source: &LazyDateDerivedSumCum, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyTransformSumCum::from_boxed_sum_raw::( - name, v, source.$p.sum.boxed_clone(), source.$p.cumulative.boxed_clone(), - ) - }; - } - - Self { - dateindex: LazyTransformSumCum::from_sum_cum_sum_raw::(name, v, dateindex), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/max.rs b/crates/brk_computer/src/internal/multi/from_date/max.rs deleted file mode 100644 index d39f9921f..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/max.rs +++ /dev/null @@ -1,95 +0,0 @@ -//! ComputedVecsDate using only max-value aggregation. - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{DateIndex, Version}; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; - -use crate::{ComputeIndexes, indexes}; - -use crate::internal::{ComputedVecValue, LazyDateDerivedMax}; - -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct ComputedFromDateMax -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub dateindex: EagerVec>, - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub rest: LazyDateDerivedMax, -} - -const VERSION: Version = Version::ZERO; - -impl ComputedFromDateMax -where - T: ComputedVecValue + JsonSchema + 'static, -{ - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - Self::forced_import_inner(db, name, version, indexes, false) - } - - /// Import without adding _max suffix to lazy vecs. - pub fn forced_import_raw( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - Self::forced_import_inner(db, name, version, indexes, true) - } - - fn forced_import_inner( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - raw: bool, - ) -> Result { - let dateindex = EagerVec::forced_import(db, name, version + VERSION)?; - - let rest = if raw { - LazyDateDerivedMax::from_source_raw( - name, - version + VERSION, - dateindex.boxed_clone(), - indexes, - ) - } else { - LazyDateDerivedMax::from_source( - name, - version + VERSION, - dateindex.boxed_clone(), - indexes, - ) - }; - - Ok(Self { - rest, - dateindex, - }) - } - - pub fn compute_all( - &mut self, - _starting_indexes: &ComputeIndexes, - _exit: &Exit, - mut compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - compute(&mut self.dateindex)?; - Ok(()) - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/min.rs b/crates/brk_computer/src/internal/multi/from_date/min.rs deleted file mode 100644 index 44268f1d7..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/min.rs +++ /dev/null @@ -1,95 +0,0 @@ -//! ComputedVecsDate using only min-value aggregation. - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{DateIndex, Version}; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; - -use crate::{ComputeIndexes, indexes}; - -use crate::internal::{ComputedVecValue, LazyDateDerivedMin}; - -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct ComputedFromDateMin -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub dateindex: EagerVec>, - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub rest: LazyDateDerivedMin, -} - -const VERSION: Version = Version::ZERO; - -impl ComputedFromDateMin -where - T: ComputedVecValue + JsonSchema + 'static, -{ - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - Self::forced_import_inner(db, name, version, indexes, false) - } - - /// Import without adding _min suffix to lazy vecs. - pub fn forced_import_raw( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - Self::forced_import_inner(db, name, version, indexes, true) - } - - fn forced_import_inner( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - raw: bool, - ) -> Result { - let dateindex = EagerVec::forced_import(db, name, version + VERSION)?; - - let rest = if raw { - LazyDateDerivedMin::from_source_raw( - name, - version + VERSION, - dateindex.boxed_clone(), - indexes, - ) - } else { - LazyDateDerivedMin::from_source( - name, - version + VERSION, - dateindex.boxed_clone(), - indexes, - ) - }; - - Ok(Self { - rest, - dateindex, - }) - } - - pub fn compute_all( - &mut self, - _starting_indexes: &ComputeIndexes, - _exit: &Exit, - mut compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - compute(&mut self.dateindex)?; - Ok(()) - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/mod.rs b/crates/brk_computer/src/internal/multi/from_date/mod.rs deleted file mode 100644 index e936df028..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/mod.rs +++ /dev/null @@ -1,53 +0,0 @@ -mod average; -mod binary_last; -mod binary_sum; -mod binary_sum_cum; -mod first; -mod last; -mod lazy; -mod lazy_binary_price; -mod lazy_distribution; -mod lazy_full; -mod lazy_last; -mod lazy_price; -mod lazy_sum; -mod lazy_sum_cum; -mod max; -mod min; -mod percentiles; -mod price; -mod ratio; -mod stddev; -mod unary_last; -mod value_change; -mod value_change_derived; -mod value_derived_last; -mod value_last; -mod value_lazy_last; - -pub use average::*; -pub use binary_last::*; -pub use binary_sum::*; -pub use binary_sum_cum::*; -pub use first::*; -pub use last::*; -pub use lazy::*; -pub use lazy_binary_price::*; -pub use lazy_distribution::*; -pub use lazy_full::*; -pub use lazy_last::*; -pub use lazy_price::*; -pub use lazy_sum::*; -pub use lazy_sum_cum::*; -pub use max::*; -pub use min::*; -pub use percentiles::*; -pub use price::*; -pub use ratio::*; -pub use stddev::*; -pub use unary_last::*; -pub use value_change::*; -pub use value_change_derived::*; -pub use value_derived_last::*; -pub use value_last::*; -pub use value_lazy_last::*; diff --git a/crates/brk_computer/src/internal/multi/from_date/price.rs b/crates/brk_computer/src/internal/multi/from_date/price.rs deleted file mode 100644 index d879c144e..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/price.rs +++ /dev/null @@ -1,48 +0,0 @@ -//! Price wrapper that provides both USD and sats representations. -//! -//! The struct derefs to dollars, making it transparent for existing code. -//! Access `.sats` for the sats representation. - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{Dollars, SatsFract, Version}; -use derive_more::{Deref, DerefMut}; -use vecdb::Database; - -use super::{ComputedFromDateLast, LazyUnaryFromDateLast}; -use crate::{indexes, internal::DollarsToSatsFract}; - -/// Price metric with both USD and sats representations. -/// -/// Derefs to the dollars metric, so existing code works unchanged. -/// Access `.sats` for the sats exchange rate version. -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct Price { - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub dollars: ComputedFromDateLast, - pub sats: LazyUnaryFromDateLast, -} - -impl Price { - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - let dollars = ComputedFromDateLast::forced_import(db, name, version, indexes)?; - Ok(Self::from_computed(name, version, dollars)) - } - - pub fn from_computed(name: &str, version: Version, dollars: ComputedFromDateLast) -> Self { - let sats = LazyUnaryFromDateLast::from_computed_last::( - &format!("{name}_sats"), - version, - &dollars, - ); - Self { dollars, sats } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/ratio.rs b/crates/brk_computer/src/internal/multi/from_date/ratio.rs deleted file mode 100644 index 4c66fb729..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/ratio.rs +++ /dev/null @@ -1,418 +0,0 @@ -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{Date, DateIndex, Dollars, StoredF32, Version}; -use vecdb::{ - AnyStoredVec, AnyVec, CollectableVec, Database, EagerVec, Exit, GenericStoredVec, IterableVec, - PcoVec, VecIndex, -}; - -use crate::{ - ComputeIndexes, indexes, - internal::{ - ComputedFromDateStdDev, LazyBinaryPrice, PriceTimesRatio, - StandardDeviationVecsOptions, - }, - price, - utils::get_percentile, -}; - -use super::{ComputedFromDateLast, Price}; -use crate::internal::{ComputedFromHeightLast, ComputedVecValue, LazyFromHeightLast}; -use schemars::JsonSchema; - -#[derive(Clone, Traversable)] -pub struct ComputedFromDateRatio { - pub price: Option, - - pub ratio: ComputedFromDateLast, - pub ratio_1w_sma: Option>, - pub ratio_1m_sma: Option>, - pub ratio_pct99: Option>, - pub ratio_pct98: Option>, - pub ratio_pct95: Option>, - pub ratio_pct5: Option>, - pub ratio_pct2: Option>, - pub ratio_pct1: Option>, - pub ratio_pct99_usd: Option>, - pub ratio_pct98_usd: Option>, - pub ratio_pct95_usd: Option>, - pub ratio_pct5_usd: Option>, - pub ratio_pct2_usd: Option>, - pub ratio_pct1_usd: Option>, - - pub ratio_sd: Option, - pub ratio_4y_sd: Option, - pub ratio_2y_sd: Option, - pub ratio_1y_sd: Option, -} - -const VERSION: Version = Version::TWO; - -impl ComputedFromDateRatio { - #[allow(clippy::too_many_arguments)] - pub fn forced_import( - db: &Database, - name: &str, - metric_price: Option<&ComputedFromHeightLast>, - version: Version, - indexes: &indexes::Vecs, - extended: bool, - ) -> Result { - let v = version + VERSION; - - macro_rules! import { - ($suffix:expr) => { - ComputedFromDateLast::forced_import(db, &format!("{name}_{}", $suffix), v, indexes) - .unwrap() - }; - } - // Create price sources first so lazy vecs can reference them - // Only compute internally when metric_price is None - let price = metric_price - .is_none() - .then(|| Price::forced_import(db, name, v, indexes).unwrap()); - - macro_rules! import_sd { - ($suffix:expr, $days:expr) => { - ComputedFromDateStdDev::forced_import( - db, - &format!("{name}_{}", $suffix), - $days, - v, - indexes, - StandardDeviationVecsOptions::default().add_all(), - metric_price, - price.as_ref().map(|p| &p.dollars), - ) - .unwrap() - }; - } - - let ratio_pct99 = extended.then(|| import!("ratio_pct99")); - let ratio_pct98 = extended.then(|| import!("ratio_pct98")); - let ratio_pct95 = extended.then(|| import!("ratio_pct95")); - let ratio_pct5 = extended.then(|| import!("ratio_pct5")); - let ratio_pct2 = extended.then(|| import!("ratio_pct2")); - let ratio_pct1 = extended.then(|| import!("ratio_pct1")); - - macro_rules! lazy_usd { - ($ratio:expr, $suffix:expr) => { - if let Some(mp) = metric_price { - $ratio.as_ref().map(|r| { - LazyBinaryPrice::from_height_and_dateindex_last::( - &format!("{name}_{}", $suffix), - v, - mp, - r, - ) - }) - } else { - price.as_ref().zip($ratio.as_ref()).map(|(p, r)| { - LazyBinaryPrice::from_computed_both_last::( - &format!("{name}_{}", $suffix), - v, - p, - r, - ) - }) - } - }; - } - - Ok(Self { - ratio: import!("ratio"), - ratio_1w_sma: extended.then(|| import!("ratio_1w_sma")), - ratio_1m_sma: extended.then(|| import!("ratio_1m_sma")), - ratio_sd: extended.then(|| import_sd!("ratio", usize::MAX)), - ratio_1y_sd: extended.then(|| import_sd!("ratio_1y", 365)), - ratio_2y_sd: extended.then(|| import_sd!("ratio_2y", 2 * 365)), - ratio_4y_sd: extended.then(|| import_sd!("ratio_4y", 4 * 365)), - ratio_pct99_usd: lazy_usd!(&ratio_pct99, "ratio_pct99_usd"), - ratio_pct98_usd: lazy_usd!(&ratio_pct98, "ratio_pct98_usd"), - ratio_pct95_usd: lazy_usd!(&ratio_pct95, "ratio_pct95_usd"), - ratio_pct5_usd: lazy_usd!(&ratio_pct5, "ratio_pct5_usd"), - ratio_pct2_usd: lazy_usd!(&ratio_pct2, "ratio_pct2_usd"), - ratio_pct1_usd: lazy_usd!(&ratio_pct1, "ratio_pct1_usd"), - price, - ratio_pct99, - ratio_pct98, - ratio_pct95, - ratio_pct5, - ratio_pct2, - ratio_pct1, - }) - } - - pub fn forced_import_from_lazy( - db: &Database, - name: &str, - metric_price: &LazyFromHeightLast, - version: Version, - indexes: &indexes::Vecs, - extended: bool, - ) -> Result { - let v = version + VERSION; - - macro_rules! import { - ($suffix:expr) => { - ComputedFromDateLast::forced_import(db, &format!("{name}_{}", $suffix), v, indexes) - .unwrap() - }; - } - - macro_rules! import_sd { - ($suffix:expr, $days:expr) => { - ComputedFromDateStdDev::forced_import_from_lazy( - db, - &format!("{name}_{}", $suffix), - $days, - v, - indexes, - StandardDeviationVecsOptions::default().add_all(), - Some(metric_price), - ) - .unwrap() - }; - } - - let ratio_pct99 = extended.then(|| import!("ratio_pct99")); - let ratio_pct98 = extended.then(|| import!("ratio_pct98")); - let ratio_pct95 = extended.then(|| import!("ratio_pct95")); - let ratio_pct5 = extended.then(|| import!("ratio_pct5")); - let ratio_pct2 = extended.then(|| import!("ratio_pct2")); - let ratio_pct1 = extended.then(|| import!("ratio_pct1")); - - macro_rules! lazy_usd { - ($ratio:expr, $suffix:expr) => { - $ratio.as_ref().map(|r| { - LazyBinaryPrice::from_lazy_height_and_dateindex_last::( - &format!("{name}_{}", $suffix), - v, - metric_price, - r, - ) - }) - }; - } - - Ok(Self { - ratio: import!("ratio"), - ratio_1w_sma: extended.then(|| import!("ratio_1w_sma")), - ratio_1m_sma: extended.then(|| import!("ratio_1m_sma")), - ratio_sd: extended.then(|| import_sd!("ratio", usize::MAX)), - ratio_1y_sd: extended.then(|| import_sd!("ratio_1y", 365)), - ratio_2y_sd: extended.then(|| import_sd!("ratio_2y", 2 * 365)), - ratio_4y_sd: extended.then(|| import_sd!("ratio_4y", 4 * 365)), - ratio_pct99_usd: lazy_usd!(&ratio_pct99, "ratio_pct99_usd"), - ratio_pct98_usd: lazy_usd!(&ratio_pct98, "ratio_pct98_usd"), - ratio_pct95_usd: lazy_usd!(&ratio_pct95, "ratio_pct95_usd"), - ratio_pct5_usd: lazy_usd!(&ratio_pct5, "ratio_pct5_usd"), - ratio_pct2_usd: lazy_usd!(&ratio_pct2, "ratio_pct2_usd"), - ratio_pct1_usd: lazy_usd!(&ratio_pct1, "ratio_pct1_usd"), - price: None, - ratio_pct99, - ratio_pct98, - ratio_pct95, - ratio_pct5, - ratio_pct2, - ratio_pct1, - }) - } - - pub fn compute_all( - &mut self, - price: &price::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - self.price - .as_mut() - .unwrap() - .compute_all(starting_indexes, exit, compute)?; - - let date_to_price_opt: Option<&EagerVec>> = None; - self.compute_rest(price, starting_indexes, exit, date_to_price_opt) - } - - pub fn compute_rest( - &mut self, - price: &price::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - price_opt: Option<&impl IterableVec>, - ) -> Result<()> { - let closes = &price.usd.split.close.dateindex; - - let price = price_opt.unwrap_or_else(|| unsafe { - std::mem::transmute(&self.price.as_ref().unwrap().dateindex) - }); - - self.ratio.compute_all(starting_indexes, exit, |v| { - v.compute_transform2( - starting_indexes.dateindex, - closes, - price, - |(i, close, price, ..)| { - if price == Dollars::ZERO { - (i, StoredF32::from(1.0)) - } else { - (i, StoredF32::from(*close / price)) - } - }, - exit, - )?; - Ok(()) - })?; - - if self.ratio_1w_sma.is_none() { - return Ok(()); - } - - let min_ratio_date = DateIndex::try_from(Date::MIN_RATIO).unwrap(); - - self.ratio_1w_sma - .as_mut() - .unwrap() - .compute_all(starting_indexes, exit, |v| { - v.compute_sma_( - starting_indexes.dateindex, - &self.ratio.dateindex, - 7, - exit, - Some(min_ratio_date), - )?; - Ok(()) - })?; - - self.ratio_1m_sma - .as_mut() - .unwrap() - .compute_all(starting_indexes, exit, |v| { - v.compute_sma_( - starting_indexes.dateindex, - &self.ratio.dateindex, - 30, - exit, - Some(min_ratio_date), - )?; - Ok(()) - })?; - - let ratio_version = self.ratio.dateindex.version(); - self.mut_ratio_vecs() - .iter_mut() - .try_for_each(|v| -> Result<()> { - v.validate_computed_version_or_reset(ratio_version)?; - Ok(()) - })?; - - let starting_dateindex = self - .mut_ratio_vecs() - .iter() - .map(|v| DateIndex::from(v.len())) - .min() - .unwrap() - .min(starting_indexes.dateindex); - - let min_ratio_date_usize = min_ratio_date.to_usize(); - - let mut sorted = self.ratio.dateindex.collect_range( - Some(min_ratio_date_usize), - Some(starting_dateindex.to_usize()), - ); - - sorted.sort_unstable(); - - // Cache mutable refs before the loop to avoid repeated unwrap chains - let pct1_vec = &mut self.ratio_pct1.as_mut().unwrap().dateindex; - let pct2_vec = &mut self.ratio_pct2.as_mut().unwrap().dateindex; - let pct5_vec = &mut self.ratio_pct5.as_mut().unwrap().dateindex; - let pct95_vec = &mut self.ratio_pct95.as_mut().unwrap().dateindex; - let pct98_vec = &mut self.ratio_pct98.as_mut().unwrap().dateindex; - let pct99_vec = &mut self.ratio_pct99.as_mut().unwrap().dateindex; - - self.ratio - .dateindex - .iter() - .enumerate() - .skip(starting_dateindex.to_usize()) - .try_for_each(|(index, ratio)| -> Result<()> { - if index < min_ratio_date_usize { - pct1_vec.truncate_push_at(index, StoredF32::NAN)?; - pct2_vec.truncate_push_at(index, StoredF32::NAN)?; - pct5_vec.truncate_push_at(index, StoredF32::NAN)?; - pct95_vec.truncate_push_at(index, StoredF32::NAN)?; - pct98_vec.truncate_push_at(index, StoredF32::NAN)?; - pct99_vec.truncate_push_at(index, StoredF32::NAN)?; - } else { - let pos = sorted.binary_search(&ratio).unwrap_or_else(|pos| pos); - sorted.insert(pos, ratio); - - pct1_vec.truncate_push_at(index, get_percentile(&sorted, 0.01))?; - pct2_vec.truncate_push_at(index, get_percentile(&sorted, 0.02))?; - pct5_vec.truncate_push_at(index, get_percentile(&sorted, 0.05))?; - pct95_vec.truncate_push_at(index, get_percentile(&sorted, 0.95))?; - pct98_vec.truncate_push_at(index, get_percentile(&sorted, 0.98))?; - pct99_vec.truncate_push_at(index, get_percentile(&sorted, 0.99))?; - } - - Ok(()) - })?; - - { - let _lock = exit.lock(); - self.mut_ratio_vecs() - .into_iter() - .try_for_each(|v| v.flush())?; - } - - macro_rules! compute_pct_rest { - ($($field:ident),*) => { - $(self.$field.as_mut().unwrap().compute_rest( - starting_indexes, exit, None as Option<&EagerVec>>, - )?;)* - }; - } - compute_pct_rest!( - ratio_pct1, - ratio_pct2, - ratio_pct5, - ratio_pct95, - ratio_pct98, - ratio_pct99 - ); - - macro_rules! compute_sd { - ($($field:ident),*) => { - $(self.$field.as_mut().unwrap().compute_all( - starting_indexes, exit, &self.ratio.dateindex, - )?;)* - }; - } - compute_sd!(ratio_sd, ratio_4y_sd, ratio_2y_sd, ratio_1y_sd); - - Ok(()) - } - - fn mut_ratio_vecs(&mut self) -> Vec<&mut EagerVec>> { - macro_rules! collect_vecs { - ($($field:ident),*) => {{ - let mut vecs = Vec::with_capacity(6); - $(if let Some(v) = self.$field.as_mut() { vecs.push(&mut v.dateindex); })* - vecs - }}; - } - collect_vecs!( - ratio_pct1, - ratio_pct2, - ratio_pct5, - ratio_pct95, - ratio_pct98, - ratio_pct99 - ) - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/stddev.rs b/crates/brk_computer/src/internal/multi/from_date/stddev.rs deleted file mode 100644 index e9a070bef..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/stddev.rs +++ /dev/null @@ -1,503 +0,0 @@ -use std::mem; - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{Date, DateIndex, Dollars, StoredF32, Version}; -use schemars::JsonSchema; -use vecdb::{ - AnyStoredVec, AnyVec, CollectableVec, Database, EagerVec, Exit, GenericStoredVec, IterableVec, - PcoVec, VecIndex, -}; - -use crate::{ComputeIndexes, indexes}; - -use crate::internal::{ - ComputedFromDateLast, ComputedFromHeightLast, ComputedVecValue, LazyBinaryPrice, - LazyFromHeightLast, PriceTimesRatio, -}; - -#[derive(Clone, Traversable)] -pub struct ComputedFromDateStdDev { - days: usize, - - pub sma: Option>, - - pub sd: ComputedFromDateLast, - - pub zscore: Option>, - - pub p0_5sd: Option>, - pub p1sd: Option>, - pub p1_5sd: Option>, - pub p2sd: Option>, - pub p2_5sd: Option>, - pub p3sd: Option>, - pub m0_5sd: Option>, - pub m1sd: Option>, - pub m1_5sd: Option>, - pub m2sd: Option>, - pub m2_5sd: Option>, - pub m3sd: Option>, - - pub _0sd_usd: Option>, - pub p0_5sd_usd: Option>, - pub p1sd_usd: Option>, - pub p1_5sd_usd: Option>, - pub p2sd_usd: Option>, - pub p2_5sd_usd: Option>, - pub p3sd_usd: Option>, - pub m0_5sd_usd: Option>, - pub m1sd_usd: Option>, - pub m1_5sd_usd: Option>, - pub m2sd_usd: Option>, - pub m2_5sd_usd: Option>, - pub m3sd_usd: Option>, -} - -#[derive(Debug, Default)] -pub struct StandardDeviationVecsOptions { - zscore: bool, - bands: bool, - price_bands: bool, -} - -impl StandardDeviationVecsOptions { - pub fn add_all(mut self) -> Self { - self.zscore = true; - self.bands = true; - self.price_bands = true; - self - } - - pub fn add_zscore(mut self) -> Self { - self.zscore = true; - self - } - - pub fn add_bands(mut self) -> Self { - self.bands = true; - self - } - - pub fn add_price_bands(mut self) -> Self { - self.bands = true; - self.price_bands = true; - self - } - - pub fn zscore(&self) -> bool { - self.zscore - } - - pub fn bands(&self) -> bool { - self.bands - } - - pub fn price_bands(&self) -> bool { - self.price_bands - } -} - -impl ComputedFromDateStdDev { - #[allow(clippy::too_many_arguments)] - pub fn forced_import( - db: &Database, - name: &str, - days: usize, - parent_version: Version, - indexes: &indexes::Vecs, - options: StandardDeviationVecsOptions, - metric_price: Option<&ComputedFromHeightLast>, - date_price: Option<&ComputedFromDateLast>, - ) -> Result { - let version = parent_version + Version::TWO; - - macro_rules! import { - ($suffix:expr) => { - ComputedFromDateLast::forced_import( - db, - &format!("{name}_{}", $suffix), - version, - indexes, - ) - .unwrap() - }; - } - - let sma_vec = Some(import!("sma")); - let p0_5sd = options.bands().then(|| import!("p0_5sd")); - let p1sd = options.bands().then(|| import!("p1sd")); - let p1_5sd = options.bands().then(|| import!("p1_5sd")); - let p2sd = options.bands().then(|| import!("p2sd")); - let p2_5sd = options.bands().then(|| import!("p2_5sd")); - let p3sd = options.bands().then(|| import!("p3sd")); - let m0_5sd = options.bands().then(|| import!("m0_5sd")); - let m1sd = options.bands().then(|| import!("m1sd")); - let m1_5sd = options.bands().then(|| import!("m1_5sd")); - let m2sd = options.bands().then(|| import!("m2sd")); - let m2_5sd = options.bands().then(|| import!("m2_5sd")); - let m3sd = options.bands().then(|| import!("m3sd")); - - // Create USD bands using the metric price (the denominator of the ratio). - // This converts ratio bands back to USD: usd_band = metric_price * ratio_band - macro_rules! lazy_usd { - ($band:expr, $suffix:expr) => { - if !options.price_bands() { - None - } else if let Some(mp) = metric_price { - $band.as_ref().map(|b| { - LazyBinaryPrice::from_height_and_dateindex_last::( - &format!("{name}_{}", $suffix), - version, - mp, - b, - ) - }) - } else if let Some(dp) = date_price { - $band.as_ref().map(|b| { - LazyBinaryPrice::from_computed_both_last::( - &format!("{name}_{}", $suffix), - version, - dp, - b, - ) - }) - } else { - None - } - }; - } - - Ok(Self { - days, - sd: import!("sd"), - zscore: options.zscore().then(|| import!("zscore")), - // Lazy USD vecs - _0sd_usd: lazy_usd!(&sma_vec, "0sd_usd"), - p0_5sd_usd: lazy_usd!(&p0_5sd, "p0_5sd_usd"), - p1sd_usd: lazy_usd!(&p1sd, "p1sd_usd"), - p1_5sd_usd: lazy_usd!(&p1_5sd, "p1_5sd_usd"), - p2sd_usd: lazy_usd!(&p2sd, "p2sd_usd"), - p2_5sd_usd: lazy_usd!(&p2_5sd, "p2_5sd_usd"), - p3sd_usd: lazy_usd!(&p3sd, "p3sd_usd"), - m0_5sd_usd: lazy_usd!(&m0_5sd, "m0_5sd_usd"), - m1sd_usd: lazy_usd!(&m1sd, "m1sd_usd"), - m1_5sd_usd: lazy_usd!(&m1_5sd, "m1_5sd_usd"), - m2sd_usd: lazy_usd!(&m2sd, "m2sd_usd"), - m2_5sd_usd: lazy_usd!(&m2_5sd, "m2_5sd_usd"), - m3sd_usd: lazy_usd!(&m3sd, "m3sd_usd"), - // Stored band sources - sma: sma_vec, - p0_5sd, - p1sd, - p1_5sd, - p2sd, - p2_5sd, - p3sd, - m0_5sd, - m1sd, - m1_5sd, - m2sd, - m2_5sd, - m3sd, - }) - } - - pub fn compute_all( - &mut self, - starting_indexes: &ComputeIndexes, - exit: &Exit, - source: &impl CollectableVec, - ) -> Result<()> { - let min_date = DateIndex::try_from(Date::MIN_RATIO).unwrap(); - - self.sma - .as_mut() - .unwrap() - .compute_all(starting_indexes, exit, |v| { - v.compute_sma_( - starting_indexes.dateindex, - source, - self.days, - exit, - Some(min_date), - )?; - Ok(()) - })?; - - let sma_opt: Option<&EagerVec>> = None; - self.compute_rest(starting_indexes, exit, sma_opt, source) - } - - pub fn compute_rest( - &mut self, - starting_indexes: &ComputeIndexes, - exit: &Exit, - sma_opt: Option<&impl IterableVec>, - source: &impl CollectableVec, - ) -> Result<()> { - let sma = sma_opt - .unwrap_or_else(|| unsafe { mem::transmute(&self.sma.as_ref().unwrap().dateindex) }); - - let min_date = DateIndex::try_from(Date::MIN_RATIO).unwrap(); - - let source_version = source.version(); - - self.mut_stateful_date_vecs() - .try_for_each(|v| -> Result<()> { - v.validate_computed_version_or_reset(source_version)?; - Ok(()) - })?; - - let starting_dateindex = self - .mut_stateful_date_vecs() - .map(|v| DateIndex::from(v.len())) - .min() - .unwrap() - .min(starting_indexes.dateindex); - - let mut sorted = source.collect_range( - Some(min_date.to_usize()), - Some(starting_dateindex.to_usize()), - ); - - sorted.sort_unstable(); - - macro_rules! band_ref { - ($field:ident) => { - self.$field.as_mut().map(|c| &mut c.dateindex) - }; - } - let mut p0_5sd = band_ref!(p0_5sd); - let mut p1sd = band_ref!(p1sd); - let mut p1_5sd = band_ref!(p1_5sd); - let mut p2sd = band_ref!(p2sd); - let mut p2_5sd = band_ref!(p2_5sd); - let mut p3sd = band_ref!(p3sd); - let mut m0_5sd = band_ref!(m0_5sd); - let mut m1sd = band_ref!(m1sd); - let mut m1_5sd = band_ref!(m1_5sd); - let mut m2sd = band_ref!(m2sd); - let mut m2_5sd = band_ref!(m2_5sd); - let mut m3sd = band_ref!(m3sd); - - let min_date_usize = min_date.to_usize(); - let mut sma_iter = sma.iter().skip(starting_dateindex.to_usize()); - - source - .iter() - .enumerate() - .skip(starting_dateindex.to_usize()) - .try_for_each(|(index, ratio)| -> Result<()> { - if index < min_date_usize { - self.sd.dateindex.truncate_push_at(index, StoredF32::NAN)?; - - macro_rules! push_nan { - ($($band:ident),*) => { - $(if let Some(v) = $band.as_mut() { v.truncate_push_at(index, StoredF32::NAN)? })* - }; - } - push_nan!(p0_5sd, p1sd, p1_5sd, p2sd, p2_5sd, p3sd, m0_5sd, m1sd, m1_5sd, m2sd, m2_5sd, m3sd); - - // Advance iterator to stay in sync - sma_iter.next(); - } else { - let pos = sorted.binary_search(&ratio).unwrap_or_else(|pos| pos); - sorted.insert(pos, ratio); - - let average = sma_iter.next().unwrap(); - - let population = - index.checked_sub(min_date_usize).unwrap().to_usize() as f32 + 1.0; - - let sd = StoredF32::from( - (sorted.iter().map(|v| (**v - *average).powi(2)).sum::() / population) - .sqrt(), - ); - - self.sd.dateindex.truncate_push_at(index, sd)?; - if let Some(v) = p0_5sd.as_mut() { - v.truncate_push_at(index, average + StoredF32::from(0.5 * *sd))? - } - if let Some(v) = p1sd.as_mut() { - v.truncate_push_at(index, average + sd)? - } - if let Some(v) = p1_5sd.as_mut() { - v.truncate_push_at(index, average + StoredF32::from(1.5 * *sd))? - } - if let Some(v) = p2sd.as_mut() { - v.truncate_push_at(index, average + 2 * sd)? - } - if let Some(v) = p2_5sd.as_mut() { - v.truncate_push_at(index, average + StoredF32::from(2.5 * *sd))? - } - if let Some(v) = p3sd.as_mut() { - v.truncate_push_at(index, average + 3 * sd)? - } - if let Some(v) = m0_5sd.as_mut() { - v.truncate_push_at(index, average - StoredF32::from(0.5 * *sd))? - } - if let Some(v) = m1sd.as_mut() { - v.truncate_push_at(index, average - sd)? - } - if let Some(v) = m1_5sd.as_mut() { - v.truncate_push_at(index, average - StoredF32::from(1.5 * *sd))? - } - if let Some(v) = m2sd.as_mut() { - v.truncate_push_at(index, average - 2 * sd)? - } - if let Some(v) = m2_5sd.as_mut() { - v.truncate_push_at(index, average - StoredF32::from(2.5 * *sd))? - } - if let Some(v) = m3sd.as_mut() { - v.truncate_push_at(index, average - 3 * sd)? - } - } - - Ok(()) - })?; - - drop(sma_iter); - - { - let _lock = exit.lock(); - self.mut_stateful_date_vecs().try_for_each(|v| v.flush())?; - } - - self.mut_stateful_computed().try_for_each(|v| { - v.compute_rest( - starting_indexes, - exit, - None as Option<&EagerVec>>, - ) - })?; - - if let Some(zscore) = self.zscore.as_mut() { - zscore.compute_all(starting_indexes, exit, |vec| { - vec.compute_zscore( - starting_indexes.dateindex, - source, - sma, - &self.sd.dateindex, - exit, - )?; - Ok(()) - })?; - } - - Ok(()) - } - - fn mut_stateful_computed(&mut self) -> impl Iterator> { - [ - Some(&mut self.sd), - self.p0_5sd.as_mut(), - self.p1sd.as_mut(), - self.p1_5sd.as_mut(), - self.p2sd.as_mut(), - self.p2_5sd.as_mut(), - self.p3sd.as_mut(), - self.m0_5sd.as_mut(), - self.m1sd.as_mut(), - self.m1_5sd.as_mut(), - self.m2sd.as_mut(), - self.m2_5sd.as_mut(), - self.m3sd.as_mut(), - ] - .into_iter() - .flatten() - } - - fn mut_stateful_date_vecs( - &mut self, - ) -> impl Iterator>> { - self.mut_stateful_computed().map(|c| &mut c.dateindex) - } - - #[allow(clippy::too_many_arguments)] - pub fn forced_import_from_lazy( - db: &Database, - name: &str, - days: usize, - parent_version: Version, - indexes: &indexes::Vecs, - options: StandardDeviationVecsOptions, - metric_price: Option<&LazyFromHeightLast>, - ) -> Result { - let version = parent_version + Version::TWO; - - macro_rules! import { - ($suffix:expr) => { - ComputedFromDateLast::forced_import( - db, - &format!("{name}_{}", $suffix), - version, - indexes, - ) - .unwrap() - }; - } - - let sma_vec = Some(import!("sma")); - let p0_5sd = options.bands().then(|| import!("p0_5sd")); - let p1sd = options.bands().then(|| import!("p1sd")); - let p1_5sd = options.bands().then(|| import!("p1_5sd")); - let p2sd = options.bands().then(|| import!("p2sd")); - let p2_5sd = options.bands().then(|| import!("p2_5sd")); - let p3sd = options.bands().then(|| import!("p3sd")); - let m0_5sd = options.bands().then(|| import!("m0_5sd")); - let m1sd = options.bands().then(|| import!("m1sd")); - let m1_5sd = options.bands().then(|| import!("m1_5sd")); - let m2sd = options.bands().then(|| import!("m2sd")); - let m2_5sd = options.bands().then(|| import!("m2_5sd")); - let m3sd = options.bands().then(|| import!("m3sd")); - - macro_rules! lazy_usd { - ($band:expr, $suffix:expr) => { - metric_price - .zip($band.as_ref()) - .filter(|_| options.price_bands()) - .map(|(mp, b)| { - LazyBinaryPrice::from_lazy_height_and_dateindex_last::( - &format!("{name}_{}", $suffix), - version, - mp, - b, - ) - }) - }; - } - - Ok(Self { - days, - sd: import!("sd"), - zscore: options.zscore().then(|| import!("zscore")), - _0sd_usd: lazy_usd!(&sma_vec, "0sd_usd"), - p0_5sd_usd: lazy_usd!(&p0_5sd, "p0_5sd_usd"), - p1sd_usd: lazy_usd!(&p1sd, "p1sd_usd"), - p1_5sd_usd: lazy_usd!(&p1_5sd, "p1_5sd_usd"), - p2sd_usd: lazy_usd!(&p2sd, "p2sd_usd"), - p2_5sd_usd: lazy_usd!(&p2_5sd, "p2_5sd_usd"), - p3sd_usd: lazy_usd!(&p3sd, "p3sd_usd"), - m0_5sd_usd: lazy_usd!(&m0_5sd, "m0_5sd_usd"), - m1sd_usd: lazy_usd!(&m1sd, "m1sd_usd"), - m1_5sd_usd: lazy_usd!(&m1_5sd, "m1_5sd_usd"), - m2sd_usd: lazy_usd!(&m2sd, "m2sd_usd"), - m2_5sd_usd: lazy_usd!(&m2_5sd, "m2_5sd_usd"), - m3sd_usd: lazy_usd!(&m3sd, "m3sd_usd"), - sma: sma_vec, - p0_5sd, - p1sd, - p1_5sd, - p2sd, - p2_5sd, - p3sd, - m0_5sd, - m1sd, - m1_5sd, - m2sd, - m2_5sd, - m3sd, - }) - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/unary_last.rs b/crates/brk_computer/src/internal/multi/from_date/unary_last.rs deleted file mode 100644 index 21cd3f98b..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/unary_last.rs +++ /dev/null @@ -1,58 +0,0 @@ -//! Unary transform composite from DateIndex - Last aggregation only. - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use vecdb::{IterableCloneableVec, LazyVecFrom1, UnaryTransform}; - -use crate::internal::{ComputedFromDateLast, ComputedVecValue, LazyTransformLast}; - -const VERSION: Version = Version::ZERO; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyUnaryFromDateLast -where - T: ComputedVecValue + PartialOrd + JsonSchema, - ST: ComputedVecValue, -{ - pub dateindex: LazyVecFrom1, - pub weekindex: LazyTransformLast, - pub monthindex: LazyTransformLast, - pub quarterindex: LazyTransformLast, - pub semesterindex: LazyTransformLast, - pub yearindex: LazyTransformLast, - pub decadeindex: LazyTransformLast, -} - -impl LazyUnaryFromDateLast -where - T: ComputedVecValue + JsonSchema + 'static, - ST: ComputedVecValue + JsonSchema, -{ - pub fn from_computed_last>( - name: &str, - version: Version, - source: &ComputedFromDateLast, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyTransformLast::from_lazy_last::(name, v, &source.$p) - }; - } - - Self { - dateindex: LazyVecFrom1::transformed::(name, v, source.dateindex.boxed_clone()), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/value_change.rs b/crates/brk_computer/src/internal/multi/from_date/value_change.rs deleted file mode 100644 index ec56374dc..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/value_change.rs +++ /dev/null @@ -1,77 +0,0 @@ -//! Change values from DateIndex - stores signed sats (changes can be negative). - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{DateIndex, Dollars, Sats, SatsSigned, Version}; -use vecdb::{CollectableVec, Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; - -use crate::{ComputeIndexes, indexes, price}; - -use super::LazyValueChangeDateDerived; - -const VERSION: Version = Version::ZERO; - -/// Change values indexed by date - uses signed sats since changes can be negative. -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct ValueChangeFromDate { - #[traversable(rename = "sats")] - pub sats: EagerVec>, - #[traversable(flatten)] - pub rest: LazyValueChangeDateDerived, -} - -impl ValueChangeFromDate { - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - compute_dollars: bool, - indexes: &indexes::Vecs, - ) -> Result { - let sats = EagerVec::forced_import(db, name, version + VERSION)?; - - let rest = LazyValueChangeDateDerived::from_source( - db, - name, - sats.boxed_clone(), - version + VERSION, - compute_dollars, - indexes, - )?; - - Ok(Self { sats, rest }) - } - - /// Compute N-day change from unsigned sats source and optional dollars source. - pub fn compute_change( - &mut self, - starting_dateindex: DateIndex, - sats_source: &impl CollectableVec, - dollars_source: Option<&impl CollectableVec>, - period: usize, - exit: &Exit, - ) -> Result<()> { - self.sats - .compute_change(starting_dateindex, sats_source, period, exit)?; - - if let (Some(dollars), Some(source)) = (self.rest.dollars.as_mut(), dollars_source) { - dollars - .dateindex - .compute_change(starting_dateindex, source, period, exit)?; - } - - Ok(()) - } - - /// Compute dollars from price after sats change is computed. - pub fn compute_dollars_from_price( - &mut self, - price: Option<&price::Vecs>, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.rest - .compute_dollars_from_price(price, starting_indexes, exit) - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/value_change_derived.rs b/crates/brk_computer/src/internal/multi/from_date/value_change_derived.rs deleted file mode 100644 index f3c880000..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/value_change_derived.rs +++ /dev/null @@ -1,84 +0,0 @@ -//! Lazy derived values for change (bitcoin from sats, period aggregations). - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{Bitcoin, DateIndex, Dollars, SatsSigned, Version}; -use vecdb::{Database, Exit, IterableBoxedVec}; - -use crate::{ - ComputeIndexes, indexes, - internal::{ComputedFromDateLast, LazyDateDerivedLast, LazyFromDateLast, SatsSignedToBitcoin}, - price, - traits::ComputeFromBitcoin, - utils::OptionExt, -}; - -const VERSION: Version = Version::ZERO; - -/// Lazy derived values for change (bitcoin from sats, period aggregations). -#[derive(Clone, Traversable)] -pub struct LazyValueChangeDateDerived { - pub sats: LazyDateDerivedLast, - pub bitcoin: LazyFromDateLast, - pub dollars: Option>, -} - -impl LazyValueChangeDateDerived { - pub fn from_source( - db: &Database, - name: &str, - source: IterableBoxedVec, - version: Version, - compute_dollars: bool, - indexes: &indexes::Vecs, - ) -> Result { - let sats = - LazyDateDerivedLast::from_source(name, version + VERSION, source.clone(), indexes); - - let bitcoin = LazyFromDateLast::from_derived::( - &format!("{name}_btc"), - version + VERSION, - source, - &sats, - ); - - let dollars = compute_dollars - .then(|| { - ComputedFromDateLast::forced_import( - db, - &format!("{name}_usd"), - version + VERSION, - indexes, - ) - }) - .transpose()?; - - Ok(Self { - sats, - bitcoin, - dollars, - }) - } - - pub fn compute_dollars_from_price( - &mut self, - price: Option<&price::Vecs>, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - if let Some(dollars) = self.dollars.as_mut() { - let dateindex_to_bitcoin = &*self.bitcoin.dateindex; - let dateindex_to_price_close = &price.u().usd.split.close.dateindex; - - dollars.compute_all(starting_indexes, exit, |v| { - v.compute_from_bitcoin( - starting_indexes.dateindex, - dateindex_to_bitcoin, - dateindex_to_price_close, - exit, - ) - })?; - } - Ok(()) - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/value_derived_last.rs b/crates/brk_computer/src/internal/multi/from_date/value_derived_last.rs deleted file mode 100644 index ec949cb3f..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/value_derived_last.rs +++ /dev/null @@ -1,86 +0,0 @@ -//! Value type for Derived Last pattern from DateIndex (when source is external). - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{Bitcoin, DateIndex, Dollars, Sats, Version}; -use vecdb::{Database, Exit, IterableBoxedVec}; - -use crate::{ - ComputeIndexes, indexes, - internal::{ComputedFromDateLast, LazyDateDerivedLast, LazyFromDateLast, SatsToBitcoin}, - price, - traits::ComputeFromBitcoin, - utils::OptionExt, -}; - -#[derive(Clone, Traversable)] -pub struct LazyValueDateDerivedLast { - pub sats: LazyDateDerivedLast, - pub bitcoin: LazyFromDateLast, - pub dollars: Option>, -} - -const VERSION: Version = Version::ZERO; - -impl LazyValueDateDerivedLast { - pub fn from_source( - db: &Database, - name: &str, - source: IterableBoxedVec, - version: Version, - compute_dollars: bool, - indexes: &indexes::Vecs, - ) -> Result { - let sats = LazyDateDerivedLast::from_source(name, version + VERSION, source.clone(), indexes); - - let bitcoin = LazyFromDateLast::from_derived::( - &format!("{name}_btc"), - version + VERSION, - source, - &sats, - ); - - let dollars = compute_dollars.then(|| { - ComputedFromDateLast::forced_import(db, &format!("{name}_usd"), version + VERSION, indexes) - .unwrap() - }); - - Ok(Self { - sats, - bitcoin, - dollars, - }) - } - - pub fn compute_dollars(&mut self, mut compute: F) -> Result<()> - where - F: FnMut(&mut ComputedFromDateLast) -> Result<()>, - { - if let Some(dollars) = self.dollars.as_mut() { - compute(dollars)?; - } - Ok(()) - } - - pub fn compute_dollars_from_price( - &mut self, - price: Option<&price::Vecs>, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - if let Some(dollars) = self.dollars.as_mut() { - let dateindex_to_bitcoin = &*self.bitcoin.dateindex; - let dateindex_to_price_close = &price.u().usd.split.close.dateindex; - - dollars.compute_all(starting_indexes, exit, |v| { - v.compute_from_bitcoin( - starting_indexes.dateindex, - dateindex_to_bitcoin, - dateindex_to_price_close, - exit, - ) - })?; - } - Ok(()) - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/value_last.rs b/crates/brk_computer/src/internal/multi/from_date/value_last.rs deleted file mode 100644 index c2ee55ca6..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/value_last.rs +++ /dev/null @@ -1,146 +0,0 @@ -//! Value type for Last pattern from DateIndex. - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{DateIndex, Dollars, Sats, Version}; -use derive_more::{Deref, DerefMut}; -use vecdb::{CollectableVec, Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; - -use crate::{ComputeIndexes, indexes, price}; - -use super::{ComputedFromDateLast, LazyValueDateDerivedLast}; - -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct ValueFromDateLast { - #[traversable(rename = "sats")] - pub sats_dateindex: EagerVec>, - #[deref] - #[deref_mut] - pub rest: LazyValueDateDerivedLast, -} - -const VERSION: Version = Version::ZERO; - -impl ValueFromDateLast { - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - compute_dollars: bool, - indexes: &indexes::Vecs, - ) -> Result { - let sats_dateindex = EagerVec::forced_import(db, name, version + VERSION)?; - - let rest = LazyValueDateDerivedLast::from_source( - db, - name, - sats_dateindex.boxed_clone(), - version + VERSION, - compute_dollars, - indexes, - )?; - - Ok(Self { - sats_dateindex, - rest, - }) - } - - pub fn compute_sats(&mut self, mut compute: F) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - compute(&mut self.sats_dateindex) - } - - pub fn compute_all( - &mut self, - price: Option<&price::Vecs>, - starting_indexes: &ComputeIndexes, - exit: &Exit, - compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - self.compute_sats(compute)?; - self.compute_dollars_from_price(price, starting_indexes, exit) - } - - pub fn compute_dollars(&mut self, compute: F) -> Result<()> - where - F: FnMut(&mut ComputedFromDateLast) -> Result<()>, - { - self.rest.compute_dollars(compute) - } - - pub fn compute_dollars_from_price( - &mut self, - price: Option<&price::Vecs>, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.rest - .compute_dollars_from_price(price, starting_indexes, exit) - } - - /// Compute both sats and dollars using provided closures. - pub fn compute_both( - &mut self, - compute_sats: S, - compute_dollars: D, - ) -> Result<()> - where - S: FnOnce(&mut EagerVec>) -> Result<()>, - D: FnOnce(&mut ComputedFromDateLast) -> Result<()>, - { - compute_sats(&mut self.sats_dateindex)?; - if let Some(dollars) = self.rest.dollars.as_mut() { - compute_dollars(dollars)?; - } - Ok(()) - } - - /// Compute EMA for sats and optionally dollars from source vecs. - pub fn compute_ema( - &mut self, - starting_dateindex: DateIndex, - sats_source: &impl CollectableVec, - dollars_source: Option<&impl CollectableVec>, - period: usize, - exit: &Exit, - ) -> Result<()> { - self.sats_dateindex - .compute_ema(starting_dateindex, sats_source, period, exit)?; - - if let (Some(dollars), Some(source)) = (self.rest.dollars.as_mut(), dollars_source) { - dollars - .dateindex - .compute_ema(starting_dateindex, source, period, exit)?; - } - - Ok(()) - } - - /// Compute N-day change for sats and optionally dollars from source vecs. - pub fn compute_change( - &mut self, - starting_dateindex: DateIndex, - sats_source: &impl CollectableVec, - dollars_source: Option<&impl CollectableVec>, - period: usize, - exit: &Exit, - ) -> Result<()> { - self.sats_dateindex - .compute_change(starting_dateindex, sats_source, period, exit)?; - - if let (Some(dollars), Some(source)) = (self.rest.dollars.as_mut(), dollars_source) { - dollars - .dateindex - .compute_change(starting_dateindex, source, period, exit)?; - } - - Ok(()) - } -} diff --git a/crates/brk_computer/src/internal/multi/from_date/value_lazy_last.rs b/crates/brk_computer/src/internal/multi/from_date/value_lazy_last.rs deleted file mode 100644 index c0aaaaee7..000000000 --- a/crates/brk_computer/src/internal/multi/from_date/value_lazy_last.rs +++ /dev/null @@ -1,94 +0,0 @@ -//! Lazy value type for Last pattern from DateIndex. - -use brk_traversable::Traversable; -use brk_types::{Bitcoin, Dollars, Sats, Version}; -use vecdb::{IterableCloneableVec, UnaryTransform}; - -use crate::internal::{LazyFromDateLast, ValueFromHeightLast, ValueFromDateLast}; - -const VERSION: Version = Version::ZERO; - -#[derive(Clone, Traversable)] -pub struct LazyValueFromDateLast { - pub sats: LazyFromDateLast, - pub bitcoin: LazyFromDateLast, - pub dollars: Option>, -} - -impl LazyValueFromDateLast { - pub fn from_source( - name: &str, - source: &ValueFromDateLast, - version: Version, - ) -> Self - where - SatsTransform: UnaryTransform, - BitcoinTransform: UnaryTransform, - DollarsTransform: UnaryTransform, - { - let v = version + VERSION; - - let sats = LazyFromDateLast::from_derived::( - name, - v, - source.sats_dateindex.boxed_clone(), - &source.sats, - ); - - let bitcoin = LazyFromDateLast::from_derived::( - &format!("{name}_btc"), - v, - source.sats_dateindex.boxed_clone(), - &source.sats, - ); - - let dollars = source.dollars.as_ref().map(|dollars_source| { - LazyFromDateLast::from_computed::( - &format!("{name}_usd"), - v, - dollars_source.dateindex.boxed_clone(), - dollars_source, - ) - }); - - Self { sats, bitcoin, dollars } - } - - pub fn from_block_source( - name: &str, - source: &ValueFromHeightLast, - version: Version, - ) -> Self - where - SatsTransform: UnaryTransform, - BitcoinTransform: UnaryTransform, - DollarsTransform: UnaryTransform, - { - let v = version + VERSION; - - let sats = LazyFromDateLast::from_derived::( - name, - v, - source.sats.rest.dateindex.boxed_clone(), - &source.sats.rest.dates, - ); - - let bitcoin = LazyFromDateLast::from_derived::( - &format!("{name}_btc"), - v, - source.sats.rest.dateindex.boxed_clone(), - &source.sats.rest.dates, - ); - - let dollars = source.dollars.as_ref().map(|dollars_source| { - LazyFromDateLast::from_derived::( - &format!("{name}_usd"), - v, - dollars_source.rest.dateindex.boxed_clone(), - &dollars_source.rest.dates, - ) - }); - - Self { sats, bitcoin, dollars } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_height/binary_full.rs b/crates/brk_computer/src/internal/multi/from_height/binary_full.rs index 3d344e8e4..820db075a 100644 --- a/crates/brk_computer/src/internal/multi/from_height/binary_full.rs +++ b/crates/brk_computer/src/internal/multi/from_height/binary_full.rs @@ -4,10 +4,11 @@ use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableBoxedVec, LazyVecFrom2}; +use vecdb::{BinaryTransform, ReadableBoxedVec, LazyVecFrom2}; use crate::internal::{ - ComputedFromHeightFull, ComputedVecValue, TxDerivedFull, LazyBinaryHeightDerivedSumCum, NumericValue, + ComputedFromHeightFull, ComputedVecValue, TxDerivedFull, LazyBinaryHeightDerivedSumCum, + NumericValue, }; #[derive(Clone, Deref, DerefMut, Traversable)] @@ -22,7 +23,7 @@ where pub height: LazyVecFrom2, #[deref] #[deref_mut] - pub rest: LazyBinaryHeightDerivedSumCum, + pub rest: Box>, } const VERSION: Version = Version::ZERO; @@ -33,11 +34,11 @@ where S1T: NumericValue + JsonSchema, S2T: NumericValue + JsonSchema, { - pub fn from_height_and_txindex>( + pub(crate) fn from_height_and_txindex>( name: &str, version: Version, - height_source1: IterableBoxedVec, - height_source2: IterableBoxedVec, + height_source1: ReadableBoxedVec, + height_source2: ReadableBoxedVec, source1: &ComputedFromHeightFull, source2: &TxDerivedFull, ) -> Self { @@ -45,16 +46,9 @@ where Self { height: LazyVecFrom2::transformed::(name, v, height_source1, height_source2), - rest: LazyBinaryHeightDerivedSumCum::from_derived_full::( - name, - v, - &source1.dateindex.sum_cum, - &source1.rest, - &source1.difficultyepoch, - &source2.dateindex.sum_cum, - &source2.dates, - &source2.difficultyepoch, - ), + rest: Box::new(LazyBinaryHeightDerivedSumCum::from_full_sources::( + name, v, &source1.rest, source2, + )), } } } diff --git a/crates/brk_computer/src/internal/multi/from_height/binary_last.rs b/crates/brk_computer/src/internal/multi/from_height/binary_last.rs index 3f4196513..bb4aed44d 100644 --- a/crates/brk_computer/src/internal/multi/from_height/binary_last.rs +++ b/crates/brk_computer/src/internal/multi/from_height/binary_last.rs @@ -1,15 +1,16 @@ //! Lazy binary transform from two SumCum sources, producing Last (cumulative) ratios only. use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, Height, Version}; +use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2}; +use vecdb::{BinaryTransform, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2}; use crate::internal::{ - ComputedFromHeightLast, ComputedFromHeightSumCum, ComputedFromHeightAndDateLast, ComputedVecValue, - LazyBinaryComputedFromHeightLast, LazyBinaryFromDateLast, LazyBinaryHeightDerivedLast, - LazyBinaryTransformLast, LazyDateDerivedLast, LazyFromHeightLast, NumericValue, + ComputedFromHeightLast, ComputedFromHeightSumCum, ComputedHeightDerivedLast, ComputedVecValue, + LazyBinaryComputedFromHeightLast, LazyBinaryComputedFromHeightSum, + LazyBinaryHeightDerivedLast, LazyBinaryTransformLast, + LazyFromHeightLast, NumericValue, }; #[derive(Clone, Deref, DerefMut, Traversable)] @@ -23,18 +24,54 @@ where pub height: LazyVecFrom2, #[deref] #[deref_mut] - pub rest: LazyBinaryHeightDerivedLast, + pub rest: Box>, } const VERSION: Version = Version::ZERO; +/// Helper macro: given two deref-able sources whose `.$p` fields implement +/// `ReadableCloneableVec`, build all 17 period fields of a `LazyBinaryHeightDerivedLast`. +macro_rules! build_rest { + ($name:expr, $v:expr, $source1:expr, $source2:expr) => {{ + macro_rules! period { + ($p:ident) => { + LazyBinaryTransformLast::from_vecs::( + $name, + $v, + $source1.$p.read_only_boxed_clone(), + $source2.$p.read_only_boxed_clone(), + ) + }; + } + Box::new(LazyBinaryHeightDerivedLast { + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), + }) + }}; +} + impl LazyBinaryFromHeightLast where T: ComputedVecValue + JsonSchema + 'static, S1T: ComputedVecValue + JsonSchema, S2T: ComputedVecValue + JsonSchema, { - pub fn from_computed_sum_cum>( + pub(crate) fn from_computed_sum_cum>( name: &str, version: Version, source1: &ComputedFromHeightSumCum, @@ -50,14 +87,14 @@ where height: LazyVecFrom2::transformed::( name, v, - source1.height_cumulative.boxed_clone(), - source2.height_cumulative.boxed_clone(), + source1.height_cumulative.read_only_boxed_clone(), + source2.height_cumulative.read_only_boxed_clone(), ), - rest: LazyBinaryHeightDerivedLast::from_computed_sum_cum::(name, v, source1, source2), + rest: Box::new(LazyBinaryHeightDerivedLast::from_computed_sum_cum::(name, v, source1, source2)), } } - pub fn from_computed_last>( + pub(crate) fn from_computed_last>( name: &str, version: Version, source1: &ComputedFromHeightLast, @@ -73,14 +110,14 @@ where height: LazyVecFrom2::transformed::( name, v, - source1.height.boxed_clone(), - source2.height.boxed_clone(), + source1.height.read_only_boxed_clone(), + source2.height.read_only_boxed_clone(), ), - rest: LazyBinaryHeightDerivedLast::from_computed_last::(name, v, source1, source2), + rest: Box::new(LazyBinaryHeightDerivedLast::from_computed_last::(name, v, source1, source2)), } } - pub fn from_block_last_and_lazy_block_last( + pub(crate) fn from_block_last_and_lazy_block_last( name: &str, version: Version, source1: &ComputedFromHeightLast, @@ -97,16 +134,16 @@ where height: LazyVecFrom2::transformed::( name, v, - source1.height.boxed_clone(), - source2.height.boxed_clone(), + source1.height.read_only_boxed_clone(), + source2.height.read_only_boxed_clone(), ), - rest: LazyBinaryHeightDerivedLast::from_block_last_and_lazy_block_last::( + rest: Box::new(LazyBinaryHeightDerivedLast::from_block_last_and_lazy_block_last::( name, v, source1, source2, - ), + )), } } - pub fn from_lazy_block_last_and_block_last( + pub(crate) fn from_lazy_block_last_and_block_last( name: &str, version: Version, source1: &LazyFromHeightLast, @@ -123,50 +160,25 @@ where height: LazyVecFrom2::transformed::( name, v, - source1.height.boxed_clone(), - source2.height.boxed_clone(), + source1.height.read_only_boxed_clone(), + source2.height.read_only_boxed_clone(), ), - rest: LazyBinaryHeightDerivedLast::from_lazy_block_last_and_block_last::( + rest: Box::new(LazyBinaryHeightDerivedLast::from_lazy_block_last_and_block_last::( name, v, source1, source2, - ), + )), } } - pub fn from_computed_height_date_last>( + /// Create from a ComputedFromHeightLast and a LazyBinaryFromHeightLast. + pub(crate) fn from_block_last_and_binary_block( name: &str, version: Version, - source1: &ComputedFromHeightAndDateLast, - source2: &ComputedFromHeightAndDateLast, - ) -> Self - where - S1T: PartialOrd, - S2T: PartialOrd, - { - let v = version + VERSION; - - Self { - height: LazyVecFrom2::transformed::( - name, - v, - source1.height.boxed_clone(), - source2.height.boxed_clone(), - ), - rest: LazyBinaryHeightDerivedLast::from_computed_height_date_last::( - name, v, source1, source2, - ), - } - } - - /// Create from a ComputedFromHeightAndDateLast and a LazyBinaryFromHeightLast. - pub fn from_computed_height_date_and_binary_block( - name: &str, - version: Version, - source1: &ComputedFromHeightAndDateLast, + source1: &ComputedFromHeightLast, source2: &LazyBinaryFromHeightLast, ) -> Self where F: BinaryTransform, - S1T: PartialOrd, + S1T: NumericValue, S2aT: ComputedVecValue + JsonSchema, S2bT: ComputedVecValue + JsonSchema, { @@ -176,93 +188,15 @@ where height: LazyVecFrom2::transformed::( name, v, - source1.height.boxed_clone(), - source2.height.boxed_clone(), + source1.height.read_only_boxed_clone(), + source2.height.read_only_boxed_clone(), ), - rest: LazyBinaryHeightDerivedLast { - dates: LazyBinaryFromDateLast::from_computed_and_binary_last::( - name, - v, - &source1.rest, - &source2.rest.dates, - ), - difficultyepoch: LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.difficultyepoch.boxed_clone(), - source2.rest.difficultyepoch.boxed_clone(), - ), - }, - } - } - - /// Create from a ComputedFromHeightAndDateLast and a ComputedFromHeightLast. - pub fn from_computed_height_date_and_block_last>( - name: &str, - version: Version, - source1: &ComputedFromHeightAndDateLast, - source2: &ComputedFromHeightLast, - ) -> Self - where - S1T: PartialOrd, - S2T: NumericValue, - { - let v = version + VERSION; - - Self { - height: LazyVecFrom2::transformed::( - name, - v, - source1.height.boxed_clone(), - source2.height.boxed_clone(), - ), - rest: LazyBinaryHeightDerivedLast::from_computed_height_date_and_block_last::( - name, v, source1, source2, - ), - } - } - - /// Create from a LazyBinaryFromHeightLast and a ComputedFromHeightLast. - pub fn from_binary_block_and_computed_block_last( - name: &str, - version: Version, - source1: &LazyBinaryFromHeightLast, - source2: &ComputedFromHeightLast, - ) -> Self - where - F: BinaryTransform, - S1aT: ComputedVecValue + JsonSchema, - S1bT: ComputedVecValue + JsonSchema, - S2T: NumericValue, - { - let v = version + VERSION; - - Self { - height: LazyVecFrom2::transformed::( - name, - v, - source1.height.boxed_clone(), - source2.height.boxed_clone(), - ), - rest: LazyBinaryHeightDerivedLast { - dates: LazyBinaryFromDateLast::from_binary_and_block_last::( - name, - v, - &source1.rest.dates, - source2, - ), - difficultyepoch: LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.rest.difficultyepoch.boxed_clone(), - source2.difficultyepoch.boxed_clone(), - ), - }, + rest: build_rest!(name, v, source1, source2), } } /// Create from two LazyBinaryFromHeightLast sources. - pub fn from_both_binary_block( + pub(crate) fn from_both_binary_block( name: &str, version: Version, source1: &LazyBinaryFromHeightLast, @@ -281,75 +215,44 @@ where height: LazyVecFrom2::transformed::( name, v, - source1.height.boxed_clone(), - source2.height.boxed_clone(), + source1.height.read_only_boxed_clone(), + source2.height.read_only_boxed_clone(), ), - rest: LazyBinaryHeightDerivedLast { - dates: LazyBinaryFromDateLast::from_both_binary_last::( - name, - v, - &source1.rest.dates, - &source2.rest.dates, - ), - difficultyepoch: LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.rest.difficultyepoch.boxed_clone(), - source2.rest.difficultyepoch.boxed_clone(), - ), - }, + rest: build_rest!(name, v, source1, source2), } } - /// Create from separate height, difficultyepoch, and date sources. - /// - /// Use when sources are split across different types (e.g., ValueFromHeightAndDateLast + ComputedFromHeightLast). - #[allow(clippy::too_many_arguments)] - pub fn from_height_difficultyepoch_dates>( + /// Create from separate height sources and two `ComputedHeightDerivedLast` structs. + pub(crate) fn from_height_and_derived_last>( name: &str, version: Version, - height_source1: IterableBoxedVec, - height_source2: IterableBoxedVec, - difficultyepoch_source1: IterableBoxedVec, - difficultyepoch_source2: IterableBoxedVec, - dateindex_source1: IterableBoxedVec, - dates_source1: &LazyDateDerivedLast, - dateindex_source2: IterableBoxedVec, - dates_source2: &LazyDateDerivedLast, - ) -> Self { + height_source1: ReadableBoxedVec, + height_source2: ReadableBoxedVec, + derived1: &ComputedHeightDerivedLast, + derived2: &ComputedHeightDerivedLast, + ) -> Self + where + S1T: NumericValue, + S2T: NumericValue, + { let v = version + VERSION; Self { height: LazyVecFrom2::transformed::(name, v, height_source1, height_source2), - rest: LazyBinaryHeightDerivedLast { - dates: LazyBinaryFromDateLast::from_both_derived_last::( - name, - v, - dateindex_source1, - dates_source1, - dateindex_source2, - dates_source2, - ), - difficultyepoch: LazyBinaryTransformLast::from_vecs::( - name, - v, - difficultyepoch_source1, - difficultyepoch_source2, - ), - }, + rest: build_rest!(name, v, derived1, derived2), } } - /// Create from a ComputedFromHeightAndDateLast and a LazyBinaryComputedFromHeightLast. - pub fn from_computed_height_date_and_lazy_binary_block_last( + /// Create from a ComputedFromHeightLast and a LazyBinaryComputedFromHeightLast. + pub(crate) fn from_block_last_and_lazy_binary_computed_block_last( name: &str, version: Version, - source1: &ComputedFromHeightAndDateLast, + source1: &ComputedFromHeightLast, source2: &LazyBinaryComputedFromHeightLast, ) -> Self where F: BinaryTransform, - S1T: PartialOrd, + S1T: NumericValue, S2aT: ComputedVecValue + JsonSchema, S2bT: ComputedVecValue + JsonSchema, { @@ -359,30 +262,42 @@ where height: LazyVecFrom2::transformed::( name, v, - source1.height.boxed_clone(), - source2.height.boxed_clone(), + source1.height.read_only_boxed_clone(), + source2.height.read_only_boxed_clone(), ), - rest: LazyBinaryHeightDerivedLast { - dates: LazyBinaryFromDateLast::from_both_derived_last::( - name, - v, - source1.rest.dateindex.boxed_clone(), - &source1.rest.rest, - source2.rest.dateindex.boxed_clone(), - &source2.rest.dates, - ), - difficultyepoch: LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.difficultyepoch.boxed_clone(), - source2.rest.difficultyepoch.boxed_clone(), - ), - }, + rest: build_rest!(name, v, source1, source2), + } + } + + /// Create from a LazyBinaryComputedFromHeightLast and a LazyBinaryComputedFromHeightSum. + pub(crate) fn from_lazy_binary_block_last_and_lazy_binary_sum( + name: &str, + version: Version, + source1: &LazyBinaryComputedFromHeightLast, + source2: &LazyBinaryComputedFromHeightSum, + ) -> Self + where + F: BinaryTransform, + S1aT: ComputedVecValue + JsonSchema, + S1bT: ComputedVecValue + JsonSchema, + S2aT: ComputedVecValue + JsonSchema, + S2bT: ComputedVecValue + JsonSchema, + { + let v = version + VERSION; + + Self { + height: LazyVecFrom2::transformed::( + name, + v, + source1.height.read_only_boxed_clone(), + source2.height.read_only_boxed_clone(), + ), + rest: build_rest!(name, v, source1, source2), } } /// Create from a LazyBinaryFromHeightLast and a LazyBinaryComputedFromHeightLast. - pub fn from_binary_block_and_lazy_binary_block_last( + pub(crate) fn from_binary_block_and_lazy_binary_block_last( name: &str, version: Version, source1: &LazyBinaryFromHeightLast, @@ -401,51 +316,10 @@ where height: LazyVecFrom2::transformed::( name, v, - source1.height.boxed_clone(), - source2.height.boxed_clone(), - ), - rest: LazyBinaryHeightDerivedLast { - dates: LazyBinaryFromDateLast::from_binary_and_derived_last::( - name, - v, - &source1.rest.dates, - source2.rest.dateindex.boxed_clone(), - &source2.rest.dates, - ), - difficultyepoch: LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.rest.difficultyepoch.boxed_clone(), - source2.rest.difficultyepoch.boxed_clone(), - ), - }, - } - } - - /// Create from a ComputedFromHeightAndDateLast and a LazyFromHeightLast. - pub fn from_computed_height_date_and_lazy_block_last( - name: &str, - version: Version, - source1: &ComputedFromHeightAndDateLast, - source2: &LazyFromHeightLast, - ) -> Self - where - F: BinaryTransform, - S1T: PartialOrd, - S2SourceT: ComputedVecValue + JsonSchema, - { - let v = version + VERSION; - - Self { - height: LazyVecFrom2::transformed::( - name, - v, - source1.height.boxed_clone(), - source2.height.boxed_clone(), - ), - rest: LazyBinaryHeightDerivedLast::from_computed_height_date_and_lazy_block_last::( - name, v, source1, source2, + source1.height.read_only_boxed_clone(), + source2.height.read_only_boxed_clone(), ), + rest: build_rest!(name, v, source1, source2), } } } diff --git a/crates/brk_computer/src/internal/multi/from_height/binary_sum.rs b/crates/brk_computer/src/internal/multi/from_height/binary_sum.rs index d6e7214b5..d746c0043 100644 --- a/crates/brk_computer/src/internal/multi/from_height/binary_sum.rs +++ b/crates/brk_computer/src/internal/multi/from_height/binary_sum.rs @@ -4,10 +4,10 @@ use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2}; +use vecdb::{BinaryTransform, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2}; use crate::internal::{ - ComputedFromHeightSum, ComputedFromHeightSumCum, ComputedHeightDerivedSum, ComputedVecValue, + ComputedFromHeightSum, ComputedFromHeightSumCum, ComputedVecValue, LazyBinaryHeightDerivedSum, LazyFromHeightLast, NumericValue, }; @@ -25,7 +25,7 @@ where pub height: LazyVecFrom2, #[deref] #[deref_mut] - pub rest: LazyBinaryHeightDerivedSum, + pub rest: Box>, } impl LazyBinaryFromHeightSum @@ -34,23 +34,7 @@ where S1T: NumericValue + JsonSchema, S2T: NumericValue + JsonSchema, { - pub fn from_derived>( - name: &str, - version: Version, - height_source1: IterableBoxedVec, - height_source2: IterableBoxedVec, - source1: &ComputedHeightDerivedSum, - source2: &ComputedHeightDerivedSum, - ) -> Self { - let v = version + VERSION; - - Self { - height: LazyVecFrom2::transformed::(name, v, height_source1, height_source2), - rest: LazyBinaryHeightDerivedSum::from_derived::(name, v, source1, source2), - } - } - - pub fn from_computed>( + pub(crate) fn from_computed>( name: &str, version: Version, source1: &ComputedFromHeightSum, @@ -62,15 +46,15 @@ where height: LazyVecFrom2::transformed::( name, v, - source1.height.boxed_clone(), - source2.height.boxed_clone(), + source1.height.read_only_boxed_clone(), + source2.height.read_only_boxed_clone(), ), - rest: LazyBinaryHeightDerivedSum::from_derived::(name, v, &source1.rest, &source2.rest), + rest: Box::new(LazyBinaryHeightDerivedSum::from_derived::(name, v, &source1.rest, &source2.rest)), } } /// Create from two LazyBinaryFromHeightSum sources. - pub fn from_binary( + pub(crate) fn from_binary( name: &str, version: Version, source1: &LazyBinaryFromHeightSum, @@ -89,25 +73,25 @@ where height: LazyVecFrom2::transformed::( name, v, - source1.height.boxed_clone(), - source2.height.boxed_clone(), + source1.height.read_only_boxed_clone(), + source2.height.read_only_boxed_clone(), ), - rest: LazyBinaryHeightDerivedSum::from_binary::( + rest: Box::new(LazyBinaryHeightDerivedSum::from_binary::( name, v, &source1.rest, &source2.rest, - ), + )), } } /// Create from a SumCum source (using only sum) and a LazyLast source. /// Produces sum-only output (no cumulative). - pub fn from_sumcum_lazy_last( + pub(crate) fn from_sumcum_lazy_last( name: &str, version: Version, - height_source1: IterableBoxedVec, - height_source2: IterableBoxedVec, + height_source1: ReadableBoxedVec, + height_source2: ReadableBoxedVec, source1: &ComputedFromHeightSumCum, source2: &LazyFromHeightLast, ) -> Self @@ -119,12 +103,12 @@ where Self { height: LazyVecFrom2::transformed::(name, v, height_source1, height_source2), - rest: LazyBinaryHeightDerivedSum::from_sumcum_lazy_last::( + rest: Box::new(LazyBinaryHeightDerivedSum::from_sumcum_lazy_last::( name, v, source1, source2, - ), + )), } } } diff --git a/crates/brk_computer/src/internal/multi/from_height/binary_sum_cum.rs b/crates/brk_computer/src/internal/multi/from_height/binary_sum_cum.rs index 6a27ef95d..dd949f478 100644 --- a/crates/brk_computer/src/internal/multi/from_height/binary_sum_cum.rs +++ b/crates/brk_computer/src/internal/multi/from_height/binary_sum_cum.rs @@ -4,10 +4,10 @@ use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2}; +use vecdb::{BinaryTransform, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2}; use crate::internal::{ - ComputedFromHeightLast, ComputedFromHeightSumCum, ComputedHeightDerivedLast, ComputedHeightDerivedSumCum, + ComputedFromHeightSumCum, ComputedHeightDerivedSumCum, ComputedVecValue, LazyBinaryHeightDerivedSumCum, LazyFromHeightLast, NumericValue, }; @@ -25,7 +25,7 @@ where pub height_cumulative: LazyVecFrom2, #[deref] #[deref_mut] - pub rest: LazyBinaryHeightDerivedSumCum, + pub rest: Box>, } const VERSION: Version = Version::ZERO; @@ -36,46 +36,11 @@ where S1T: ComputedVecValue + JsonSchema, S2T: ComputedVecValue + JsonSchema, { - pub fn from_computed>( + pub(crate) fn from_derived>( name: &str, version: Version, - height_source1: IterableBoxedVec, - height_source2: IterableBoxedVec, - source1: &ComputedFromHeightSumCum, - source2: &ComputedFromHeightSumCum, - ) -> Self - where - S1T: PartialOrd, - S2T: PartialOrd, - { - let v = version + VERSION; - - Self { - height: LazyVecFrom2::transformed::(name, v, height_source1, height_source2), - height_cumulative: LazyVecFrom2::transformed::( - &format!("{name}_cumulative"), - v, - source1.height_cumulative.boxed_clone(), - source2.height_cumulative.boxed_clone(), - ), - rest: LazyBinaryHeightDerivedSumCum::from_computed_sum_raw::( - name, - v, - &source1.dateindex, - &source1.rest, - &source1.difficultyepoch, - &source2.dateindex, - &source2.rest, - &source2.difficultyepoch, - ), - } - } - - pub fn from_derived>( - name: &str, - version: Version, - height_source1: IterableBoxedVec, - height_source2: IterableBoxedVec, + height_source1: ReadableBoxedVec, + height_source2: ReadableBoxedVec, source1: &ComputedHeightDerivedSumCum, source2: &ComputedHeightDerivedSumCum, ) -> Self @@ -90,131 +55,25 @@ where height_cumulative: LazyVecFrom2::transformed::( &format!("{name}_cumulative"), v, - source1.height_cumulative.boxed_clone(), - source2.height_cumulative.boxed_clone(), + source1.height_cumulative.read_only_boxed_clone(), + source2.height_cumulative.read_only_boxed_clone(), ), - rest: LazyBinaryHeightDerivedSumCum::from_computed_sum_raw::( + rest: Box::new(LazyBinaryHeightDerivedSumCum::from_computed_sum_raw::( name, v, - &source1.dateindex, source1, - &source1.difficultyepoch, - &source2.dateindex, source2, - &source2.difficultyepoch, - ), - } - } - - // --- Methods accepting SumCum + Last sources --- - - pub fn from_computed_last>( - name: &str, - version: Version, - height_source1: IterableBoxedVec, - height_source2: IterableBoxedVec, - source1: &ComputedFromHeightSumCum, - source2: &ComputedFromHeightLast, - ) -> Self - where - S1T: PartialOrd, - S2T: NumericValue, - { - let v = version + VERSION; - Self { - height: LazyVecFrom2::transformed::(name, v, height_source1, height_source2), - height_cumulative: LazyVecFrom2::transformed::( - &format!("{name}_cumulative"), - v, - source1.height_cumulative.boxed_clone(), - source2.height.boxed_clone(), - ), - rest: LazyBinaryHeightDerivedSumCum::from_computed_last::(name, v, source1, source2), - } - } - - pub fn from_derived_computed_last>( - name: &str, - version: Version, - height_source1: IterableBoxedVec, - height_source2: IterableBoxedVec, - source1: &ComputedHeightDerivedSumCum, - source2: &ComputedFromHeightLast, - ) -> Self - where - S1T: NumericValue, - S2T: NumericValue, - { - let v = version + VERSION; - Self { - height: LazyVecFrom2::transformed::(name, v, height_source1, height_source2), - height_cumulative: LazyVecFrom2::transformed::( - &format!("{name}_cumulative"), - v, - source1.height_cumulative.boxed_clone(), - source2.height.boxed_clone(), - ), - rest: LazyBinaryHeightDerivedSumCum::from_derived_computed_last::(name, v, source1, source2), - } - } - - pub fn from_derived_last>( - name: &str, - version: Version, - height_source1: IterableBoxedVec, - height_source2: IterableBoxedVec, - source1: &ComputedHeightDerivedSumCum, - source2: &ComputedHeightDerivedLast, - ) -> Self - where - S1T: NumericValue, - S2T: NumericValue, - { - let v = version + VERSION; - Self { - height: LazyVecFrom2::transformed::(name, v, height_source1.clone(), height_source2.clone()), - height_cumulative: LazyVecFrom2::transformed::( - &format!("{name}_cumulative"), - v, - source1.height_cumulative.boxed_clone(), - height_source2, - ), - rest: LazyBinaryHeightDerivedSumCum::from_derived_last::(name, v, source1, source2), - } - } - - pub fn from_computed_derived_last>( - name: &str, - version: Version, - height_source1: IterableBoxedVec, - height_source2: IterableBoxedVec, - source1: &ComputedFromHeightSumCum, - source2: &ComputedHeightDerivedLast, - ) -> Self - where - S1T: PartialOrd, - S2T: NumericValue, - { - let v = version + VERSION; - Self { - height: LazyVecFrom2::transformed::(name, v, height_source1.clone(), height_source2.clone()), - height_cumulative: LazyVecFrom2::transformed::( - &format!("{name}_cumulative"), - v, - source1.height_cumulative.boxed_clone(), - height_source2, - ), - rest: LazyBinaryHeightDerivedSumCum::from_computed_derived_last::(name, v, source1, source2), + )), } } // --- Methods accepting SumCum + LazyLast sources --- - pub fn from_computed_lazy_last( + pub(crate) fn from_computed_lazy_last( name: &str, version: Version, - height_source1: IterableBoxedVec, - height_source2: IterableBoxedVec, + height_source1: ReadableBoxedVec, + height_source2: ReadableBoxedVec, source1: &ComputedFromHeightSumCum, source2: &LazyFromHeightLast, ) -> Self @@ -230,10 +89,10 @@ where height_cumulative: LazyVecFrom2::transformed::( &format!("{name}_cumulative"), v, - source1.height_cumulative.boxed_clone(), - source2.height.boxed_clone(), + source1.height_cumulative.read_only_boxed_clone(), + source2.height.read_only_boxed_clone(), ), - rest: LazyBinaryHeightDerivedSumCum::from_computed_lazy_last::(name, v, source1, source2), + rest: Box::new(LazyBinaryHeightDerivedSumCum::from_computed_lazy_last::(name, v, source1, source2)), } } } diff --git a/crates/brk_computer/src/internal/multi/from_height/constant.rs b/crates/brk_computer/src/internal/multi/from_height/constant.rs new file mode 100644 index 000000000..baeb229fb --- /dev/null +++ b/crates/brk_computer/src/internal/multi/from_height/constant.rs @@ -0,0 +1,98 @@ +use brk_traversable::Traversable; +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10, + Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10, +}; +use schemars::JsonSchema; +use serde::Serialize; +use vecdb::{Formattable, ReadableCloneableVec, LazyVecFrom1, UnaryTransform, VecValue}; + +use crate::indexes; + +/// Lazy constant vecs for all index levels. +/// Uses const generic transforms to return the same value for every index. +#[derive(Clone, Traversable)] +#[traversable(merge)] +pub struct ConstantVecs +where + T: VecValue + Formattable + Serialize + JsonSchema, +{ + pub height: LazyVecFrom1, + pub minute1: LazyVecFrom1, + pub minute5: LazyVecFrom1, + pub minute10: LazyVecFrom1, + pub minute30: LazyVecFrom1, + pub hour1: LazyVecFrom1, + pub hour4: LazyVecFrom1, + pub hour12: LazyVecFrom1, + pub day1: LazyVecFrom1, + pub day3: LazyVecFrom1, + pub week1: LazyVecFrom1, + pub month1: LazyVecFrom1, + pub month3: LazyVecFrom1, + pub month6: LazyVecFrom1, + pub year1: LazyVecFrom1, + pub year10: LazyVecFrom1, + pub halvingepoch: LazyVecFrom1, + pub difficultyepoch: LazyVecFrom1, +} + +impl ConstantVecs { + /// Create constant vecs using a transform that ignores input and returns a constant. + pub(crate) fn new(name: &str, version: Version, indexes: &indexes::Vecs) -> Self + where + F: UnaryTransform + + UnaryTransform + + UnaryTransform + + UnaryTransform + + UnaryTransform + + UnaryTransform + + UnaryTransform + + UnaryTransform + + UnaryTransform + + UnaryTransform + + UnaryTransform + + UnaryTransform + + UnaryTransform + + UnaryTransform + + UnaryTransform + + UnaryTransform + + UnaryTransform + + UnaryTransform, + { + macro_rules! period { + ($idx:ident, $I:ty) => { + LazyVecFrom1::transformed::( + name, + version, + indexes.$idx.identity.read_only_boxed_clone(), + ) + }; + } + + Self { + height: LazyVecFrom1::transformed::( + name, + version, + indexes.height.identity.read_only_boxed_clone(), + ), + minute1: period!(minute1, Minute1), + minute5: period!(minute5, Minute5), + minute10: period!(minute10, Minute10), + minute30: period!(minute30, Minute30), + hour1: period!(hour1, Hour1), + hour4: period!(hour4, Hour4), + hour12: period!(hour12, Hour12), + day1: period!(day1, Day1), + day3: period!(day3, Day3), + week1: period!(week1, Week1), + month1: period!(month1, Month1), + month3: period!(month3, Month3), + month6: period!(month6, Month6), + year1: period!(year1, Year1), + year10: period!(year10, Year10), + halvingepoch: period!(halvingepoch, HalvingEpoch), + difficultyepoch: period!(difficultyepoch, DifficultyEpoch), + } + } +} diff --git a/crates/brk_computer/src/internal/multi/from_height/distribution.rs b/crates/brk_computer/src/internal/multi/from_height/distribution.rs index 25e42903e..071a3941a 100644 --- a/crates/brk_computer/src/internal/multi/from_height/distribution.rs +++ b/crates/brk_computer/src/internal/multi/from_height/distribution.rs @@ -9,23 +9,23 @@ use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, ReadableCloneableVec, Rw, StorageMode}; -use crate::{ComputeIndexes, indexes}; +use crate::indexes; use crate::internal::{ComputedHeightDerivedDistribution, ComputedVecValue, NumericValue}; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Deref, DerefMut, Traversable)] #[traversable(merge)] -pub struct ComputedFromHeightDistribution +pub struct ComputedFromHeightDistribution where T: ComputedVecValue + PartialOrd + JsonSchema, { #[traversable(rename = "base")] - pub height: EagerVec>, + pub height: M::Stored>>, #[deref] #[deref_mut] - pub rest: ComputedHeightDerivedDistribution, + pub rest: Box>, } const VERSION: Version = Version::ZERO; @@ -34,7 +34,7 @@ impl ComputedFromHeightDistribution where T: NumericValue + JsonSchema, { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, @@ -45,38 +45,12 @@ where let height: EagerVec> = EagerVec::forced_import(db, name, v)?; let rest = ComputedHeightDerivedDistribution::forced_import( - db, name, - height.boxed_clone(), + height.read_only_boxed_clone(), v, indexes, - )?; + ); - Ok(Self { height, rest }) - } - - pub fn compute_all( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - mut compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - compute(&mut self.height)?; - self.compute_rest(indexes, starting_indexes, exit) - } - - /// Compute rest from self.height (for stateful computation patterns). - pub fn compute_rest( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.rest - .derive_from(indexes, starting_indexes, &self.height, exit) + Ok(Self { height, rest: Box::new(rest) }) } } diff --git a/crates/brk_computer/src/internal/multi/from_height/full.rs b/crates/brk_computer/src/internal/multi/from_height/full.rs index 6c15b7b14..478c13caf 100644 --- a/crates/brk_computer/src/internal/multi/from_height/full.rs +++ b/crates/brk_computer/src/internal/multi/from_height/full.rs @@ -6,23 +6,25 @@ use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; +use vecdb::{ + Database, EagerVec, Exit, ImportableVec, PcoVec, ReadableCloneableVec, Rw, StorageMode, +}; use crate::{ComputeIndexes, indexes}; -use crate::internal::{ComputedVecValue, ComputedHeightDerivedFull, NumericValue}; +use crate::internal::{ComputedHeightDerivedFull, ComputedVecValue, NumericValue}; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Deref, DerefMut, Traversable)] #[traversable(merge)] -pub struct ComputedFromHeightFull +pub struct ComputedFromHeightFull where T: ComputedVecValue + PartialOrd + JsonSchema, { #[traversable(rename = "base")] - pub height: EagerVec>, + pub height: M::Stored>>, #[deref] #[deref_mut] - pub rest: ComputedHeightDerivedFull, + pub rest: Box>, } const VERSION: Version = Version::ZERO; @@ -31,7 +33,7 @@ impl ComputedFromHeightFull where T: NumericValue + JsonSchema, { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, @@ -44,25 +46,22 @@ where let rest = ComputedHeightDerivedFull::forced_import( db, name, - height.boxed_clone(), + height.read_only_boxed_clone(), v, indexes, )?; - Ok(Self { height, rest }) + Ok(Self { height, rest: Box::new(rest) }) } - pub fn compute_all( + pub(crate) fn compute( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, - mut compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { + mut compute: impl FnMut(&mut EagerVec>) -> Result<()>, + ) -> Result<()> { compute(&mut self.height)?; - self.rest.derive_from(indexes, starting_indexes, &self.height, exit) + self.rest.compute_cumulative(starting_indexes, &self.height, exit)?; + Ok(()) } } diff --git a/crates/brk_computer/src/internal/multi/from_height/last.rs b/crates/brk_computer/src/internal/multi/from_height/last.rs index be54a280b..0a4e23258 100644 --- a/crates/brk_computer/src/internal/multi/from_height/last.rs +++ b/crates/brk_computer/src/internal/multi/from_height/last.rs @@ -1,4 +1,4 @@ -//! ComputedFromHeight using only LastVec aggregation. +//! ComputedFromHeight using only Last aggregation. use brk_error::Result; @@ -6,23 +6,23 @@ use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, ReadableCloneableVec, Rw, StorageMode}; -use crate::{ComputeIndexes, indexes}; +use crate::indexes; -use crate::internal::{ComputedVecValue, ComputedHeightDerivedLast, NumericValue}; +use crate::internal::{ComputedHeightDerivedLast, ComputedVecValue, NumericValue}; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Deref, DerefMut, Traversable)] #[traversable(merge)] -pub struct ComputedFromHeightLast +pub struct ComputedFromHeightLast where T: ComputedVecValue + PartialOrd + JsonSchema, { - pub height: EagerVec>, + pub height: M::Stored>>, #[deref] #[deref_mut] #[traversable(flatten)] - pub rest: ComputedHeightDerivedLast, + pub rest: Box>, } const VERSION: Version = Version::ZERO; @@ -31,7 +31,7 @@ impl ComputedFromHeightLast where T: NumericValue + JsonSchema, { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, @@ -42,33 +42,8 @@ where let height: EagerVec> = EagerVec::forced_import(db, name, v)?; let rest = - ComputedHeightDerivedLast::forced_import(db, name, height.boxed_clone(), v, indexes)?; + ComputedHeightDerivedLast::forced_import(name, height.read_only_boxed_clone(), v, indexes); - Ok(Self { height, rest }) - } - - pub fn compute_all( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - mut compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - compute(&mut self.height)?; - self.compute_rest(indexes, starting_indexes, exit) - } - - /// Compute rest from self.height (for stateful computation patterns). - pub fn compute_rest( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.rest - .derive_from(indexes, starting_indexes, &self.height, exit) + Ok(Self { height, rest: Box::new(rest) }) } } diff --git a/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_distribution.rs b/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_distribution.rs index 3722a7a94..63a377d09 100644 --- a/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_distribution.rs +++ b/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_distribution.rs @@ -3,15 +3,14 @@ //! Height-level values are lazy: `transform(source1[h], source2[h])`. //! Uses Distribution aggregation (no sum/cumulative) - appropriate for ratios. -use brk_error::Result; use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{BinaryTransform, Database, Exit, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2}; +use vecdb::{BinaryTransform, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2}; use crate::{ - ComputeIndexes, indexes, + indexes, internal::{ComputedHeightDerivedDistribution, ComputedVecValue, NumericValue}, }; @@ -30,7 +29,7 @@ where pub height: LazyVecFrom2, #[deref] #[deref_mut] - pub rest: ComputedHeightDerivedDistribution, + pub rest: Box>, } impl LazyBinaryComputedFromHeightDistribution @@ -39,36 +38,24 @@ where S1T: ComputedVecValue + JsonSchema, S2T: ComputedVecValue + JsonSchema, { - pub fn forced_import>( - db: &Database, + pub(crate) fn forced_import>( name: &str, version: Version, - source1: IterableBoxedVec, - source2: IterableBoxedVec, + source1: ReadableBoxedVec, + source2: ReadableBoxedVec, indexes: &indexes::Vecs, - ) -> Result { + ) -> Self { let v = version + VERSION; let height = LazyVecFrom2::transformed::(name, v, source1, source2); let rest = ComputedHeightDerivedDistribution::forced_import( - db, name, - height.boxed_clone(), + height.read_only_boxed_clone(), v, indexes, - )?; + ); - Ok(Self { height, rest }) - } - - pub fn derive_from( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.rest - .derive_from(indexes, starting_indexes, &self.height, exit) + Self { height, rest: Box::new(rest) } } } diff --git a/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_full.rs b/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_full.rs index 6fe0d3771..ba010320a 100644 --- a/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_full.rs +++ b/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_full.rs @@ -1,7 +1,7 @@ //! LazyBinaryComputedFromHeightFull - block full with lazy binary transform at height level. //! //! Height-level values are lazy: `transform(source1[h], source2[h])`. -//! Cumulative, dateindex stats, and difficultyepoch are stored since they +//! Cumulative, day1 stats, and difficultyepoch are stored since they //! require aggregation across heights. use brk_error::Result; @@ -9,7 +9,7 @@ use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{BinaryTransform, Database, Exit, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2}; +use vecdb::{BinaryTransform, Database, Exit, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2, Rw, StorageMode}; use crate::{ ComputeIndexes, indexes, @@ -19,9 +19,9 @@ use crate::{ const VERSION: Version = Version::ZERO; /// Block full aggregation with lazy binary transform at height + computed derived indexes. -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Deref, DerefMut, Traversable)] #[traversable(merge)] -pub struct LazyBinaryComputedFromHeightFull +pub struct LazyBinaryComputedFromHeightFull where T: ComputedVecValue + PartialOrd + JsonSchema, S1T: ComputedVecValue, @@ -31,7 +31,7 @@ where pub height: LazyVecFrom2, #[deref] #[deref_mut] - pub rest: ComputedHeightDerivedFull, + pub rest: Box>, } impl LazyBinaryComputedFromHeightFull @@ -40,12 +40,12 @@ where S1T: ComputedVecValue + JsonSchema, S2T: ComputedVecValue + JsonSchema, { - pub fn forced_import>( + pub(crate) fn forced_import>( db: &Database, name: &str, version: Version, - source1: IterableBoxedVec, - source2: IterableBoxedVec, + source1: ReadableBoxedVec, + source2: ReadableBoxedVec, indexes: &indexes::Vecs, ) -> Result { let v = version + VERSION; @@ -53,18 +53,17 @@ where let height = LazyVecFrom2::transformed::(name, v, source1, source2); let rest = - ComputedHeightDerivedFull::forced_import(db, name, height.boxed_clone(), v, indexes)?; + ComputedHeightDerivedFull::forced_import(db, name, height.read_only_boxed_clone(), v, indexes)?; - Ok(Self { height, rest }) + Ok(Self { height, rest: Box::new(rest) }) } - pub fn derive_from( + pub(crate) fn compute_cumulative( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { self.rest - .derive_from(indexes, starting_indexes, &self.height, exit) + .compute_cumulative(starting_indexes, &self.height, exit) } } diff --git a/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_last.rs b/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_last.rs index b0d564ad9..2f0e689ac 100644 --- a/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_last.rs +++ b/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_last.rs @@ -1,18 +1,17 @@ //! LazyBinaryComputedFromHeightLast - block last with lazy binary transform at height level. //! //! Height-level value is lazy: `transform(source1[h], source2[h])`. -//! DateIndex last is stored since it requires finding the last value within each date +//! Day1 last is stored since it requires finding the last value within each date //! (which may span multiple heights with varying prices). -use brk_error::Result; use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{BinaryTransform, Database, Exit, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2}; +use vecdb::{BinaryTransform, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2}; use crate::{ - ComputeIndexes, indexes, + indexes, internal::{ComputedHeightDerivedLast, ComputedVecValue, NumericValue}, }; @@ -31,7 +30,7 @@ where #[deref] #[deref_mut] #[traversable(flatten)] - pub rest: ComputedHeightDerivedLast, + pub rest: Box>, } impl LazyBinaryComputedFromHeightLast @@ -40,31 +39,20 @@ where S1T: ComputedVecValue + JsonSchema, S2T: ComputedVecValue + JsonSchema, { - pub fn forced_import>( - db: &Database, + pub(crate) fn forced_import>( name: &str, version: Version, - source1: IterableBoxedVec, - source2: IterableBoxedVec, + source1: ReadableBoxedVec, + source2: ReadableBoxedVec, indexes: &indexes::Vecs, - ) -> Result { + ) -> Self { let v = version + VERSION; let height = LazyVecFrom2::transformed::(name, v, source1, source2); let rest = - ComputedHeightDerivedLast::forced_import(db, name, height.boxed_clone(), v, indexes)?; + ComputedHeightDerivedLast::forced_import(name, height.read_only_boxed_clone(), v, indexes); - Ok(Self { height, rest }) - } - - pub fn derive_from( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.rest - .derive_from(indexes, starting_indexes, &self.height, exit) + Self { height, rest: Box::new(rest) } } } diff --git a/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_sum.rs b/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_sum.rs index 7508fdcd8..9dae82554 100644 --- a/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_sum.rs +++ b/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_sum.rs @@ -1,17 +1,16 @@ //! LazyBinaryComputedFromHeightSum - block sum with lazy binary transform at height level. //! //! Height-level sum is lazy: `transform(source1[h], source2[h])`. -//! DateIndex stats are stored since they require aggregation across heights. +//! Day1 stats are stored since they require aggregation across heights. -use brk_error::Result; use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{BinaryTransform, Database, Exit, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2}; +use vecdb::{BinaryTransform, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2}; use crate::{ - ComputeIndexes, indexes, + indexes, internal::{ComputedHeightDerivedSum, ComputedVecValue, NumericValue}, }; @@ -31,7 +30,7 @@ where #[deref] #[deref_mut] #[traversable(flatten)] - pub rest: ComputedHeightDerivedSum, + pub rest: Box>, } impl LazyBinaryComputedFromHeightSum @@ -40,31 +39,20 @@ where S1T: ComputedVecValue + JsonSchema, S2T: ComputedVecValue + JsonSchema, { - pub fn forced_import>( - db: &Database, + pub(crate) fn forced_import>( name: &str, version: Version, - source1: IterableBoxedVec, - source2: IterableBoxedVec, + source1: ReadableBoxedVec, + source2: ReadableBoxedVec, indexes: &indexes::Vecs, - ) -> Result { + ) -> Self { let v = version + VERSION; let height = LazyVecFrom2::transformed::(name, v, source1, source2); let rest = - ComputedHeightDerivedSum::forced_import(db, name, height.boxed_clone(), v, indexes)?; + ComputedHeightDerivedSum::forced_import(name, height.read_only_boxed_clone(), v, indexes); - Ok(Self { height, rest }) - } - - pub fn derive_from( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.rest - .derive_from(indexes, starting_indexes, &self.height, exit) + Self { height, rest: Box::new(rest) } } } diff --git a/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_sum_cum.rs b/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_sum_cum.rs index d0bc35a56..384840843 100644 --- a/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_sum_cum.rs +++ b/crates/brk_computer/src/internal/multi/from_height/lazy_binary_computed_sum_cum.rs @@ -1,7 +1,7 @@ //! LazyBinaryComputedFromHeightSumCum - block sum_cum with lazy binary transform at height level. //! //! Height-level sum is lazy: `transform(source1[h], source2[h])`. -//! Cumulative and dateindex stats are stored since they require aggregation +//! Cumulative and day1 stats are stored since they require aggregation //! across heights. use brk_error::Result; @@ -9,19 +9,20 @@ use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{BinaryTransform, Database, Exit, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2}; +use vecdb::{BinaryTransform, Database, Exit, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2, Rw, StorageMode}; use crate::{ - ComputeIndexes, indexes, + ComputeIndexes, + indexes, internal::{ComputedHeightDerivedSumCum, ComputedVecValue, NumericValue}, }; const VERSION: Version = Version::ZERO; /// Block sum_cum aggregation with lazy binary transform at height + computed derived indexes. -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Deref, DerefMut, Traversable)] #[traversable(merge)] -pub struct LazyBinaryComputedFromHeightSumCum +pub struct LazyBinaryComputedFromHeightSumCum where T: ComputedVecValue + PartialOrd + JsonSchema, S1T: ComputedVecValue, @@ -32,7 +33,7 @@ where #[deref] #[deref_mut] #[traversable(flatten)] - pub rest: ComputedHeightDerivedSumCum, + pub rest: Box>, } impl LazyBinaryComputedFromHeightSumCum @@ -41,12 +42,12 @@ where S1T: ComputedVecValue + JsonSchema, S2T: ComputedVecValue + JsonSchema, { - pub fn forced_import>( + pub(crate) fn forced_import>( db: &Database, name: &str, version: Version, - source1: IterableBoxedVec, - source2: IterableBoxedVec, + source1: ReadableBoxedVec, + source2: ReadableBoxedVec, indexes: &indexes::Vecs, ) -> Result { let v = version + VERSION; @@ -54,18 +55,17 @@ where let height = LazyVecFrom2::transformed::(name, v, source1, source2); let rest = - ComputedHeightDerivedSumCum::forced_import(db, name, height.boxed_clone(), v, indexes)?; + ComputedHeightDerivedSumCum::forced_import(db, name, height.read_only_boxed_clone(), v, indexes)?; - Ok(Self { height, rest }) + Ok(Self { height, rest: Box::new(rest) }) } - pub fn derive_from( + pub(crate) fn compute_cumulative( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { self.rest - .derive_from(indexes, starting_indexes, &self.height, exit) + .derive_from(starting_indexes, &self.height, exit) } } diff --git a/crates/brk_computer/src/internal/multi/from_height/lazy_binary_price.rs b/crates/brk_computer/src/internal/multi/from_height/lazy_binary_price.rs deleted file mode 100644 index db65b3372..000000000 --- a/crates/brk_computer/src/internal/multi/from_height/lazy_binary_price.rs +++ /dev/null @@ -1,72 +0,0 @@ -//! Fully lazy binary price wrapper with both USD and sats representations. -//! -//! All levels (height, dateindex, date periods, difficultyepoch) are lazy. -//! Derives dateindex from the two source dateindexes rather than storing it. - -use brk_traversable::Traversable; -use brk_types::{CentsUnsigned, Dollars, SatsFract, Version}; -use derive_more::{Deref, DerefMut}; -use vecdb::BinaryTransform; - -use crate::internal::{ - DollarsToSatsFract, LazyBinaryFromHeightLast, LazyFromHeightLast, LazyPriceFromCents, - PriceFromHeight, -}; - -/// Fully lazy binary price metric with both USD and sats representations. -/// -/// Dollars: lazy binary transform at all levels (height, dateindex, date periods, difficultyepoch). -/// Sats: lazy unary transform of dollars. -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct LazyBinaryPriceFromHeight { - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub dollars: LazyBinaryFromHeightLast, - pub sats: LazyFromHeightLast, -} - -impl LazyBinaryPriceFromHeight { - /// Create from a PriceFromHeight (source1) and a LazyPriceFromCents (source2). - pub fn from_price_and_lazy_price>( - name: &str, - version: Version, - source1: &PriceFromHeight, - source2: &LazyPriceFromCents, - ) -> Self { - let dollars = LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::< - F, - CentsUnsigned, - >(name, version, &source1.dollars, &source2.dollars); - - let sats = LazyFromHeightLast::from_binary::( - &format!("{name}_sats"), - version, - &dollars, - ); - - Self { dollars, sats } - } - - /// Create from a LazyPriceFromCents (source1) and a PriceFromHeight (source2). - pub fn from_lazy_price_and_price>( - name: &str, - version: Version, - source1: &LazyPriceFromCents, - source2: &PriceFromHeight, - ) -> Self { - let dollars = LazyBinaryFromHeightLast::from_lazy_block_last_and_block_last::< - F, - CentsUnsigned, - >(name, version, &source1.dollars, &source2.dollars); - - let sats = LazyFromHeightLast::from_binary::( - &format!("{name}_sats"), - version, - &dollars, - ); - - Self { dollars, sats } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_height/lazy_computed_full.rs b/crates/brk_computer/src/internal/multi/from_height/lazy_computed_full.rs index 1c5c26333..3ece1eca6 100644 --- a/crates/brk_computer/src/internal/multi/from_height/lazy_computed_full.rs +++ b/crates/brk_computer/src/internal/multi/from_height/lazy_computed_full.rs @@ -5,19 +5,20 @@ use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{Database, Exit, IterableCloneableVec, LazyVecFrom1, UnaryTransform}; +use vecdb::{Database, Exit, ReadableCloneableVec, LazyVecFrom1, UnaryTransform, Rw, StorageMode}; use crate::{ - ComputeIndexes, indexes, + ComputeIndexes, + indexes, internal::{ComputedVecValue, ComputedHeightDerivedFull, NumericValue}, }; const VERSION: Version = Version::ZERO; /// Block full aggregation with lazy height transform + computed derived indexes. -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Deref, DerefMut, Traversable)] #[traversable(merge)] -pub struct LazyComputedFromHeightFull +pub struct LazyComputedFromHeightFull where T: ComputedVecValue + PartialOrd + JsonSchema, S: ComputedVecValue, @@ -26,7 +27,7 @@ where pub height: LazyVecFrom1, #[deref] #[deref_mut] - pub rest: ComputedHeightDerivedFull, + pub rest: Box>, } impl LazyComputedFromHeightFull @@ -34,48 +35,29 @@ where T: NumericValue + JsonSchema, S: ComputedVecValue + JsonSchema, { - pub fn forced_import>( + pub(crate) fn forced_import>( db: &Database, name: &str, version: Version, - source: impl IterableCloneableVec, + source: &impl ReadableCloneableVec, indexes: &indexes::Vecs, ) -> Result { let v = version + VERSION; - let height = LazyVecFrom1::transformed::(name, v, source.boxed_clone()); + let height = LazyVecFrom1::transformed::(name, v, source.read_only_boxed_clone()); let rest = - ComputedHeightDerivedFull::forced_import(db, name, height.boxed_clone(), v, indexes)?; + ComputedHeightDerivedFull::forced_import(db, name, height.read_only_boxed_clone(), v, indexes)?; - Ok(Self { height, rest }) + Ok(Self { height, rest: Box::new(rest) }) } - pub fn forced_import_with_init( - db: &Database, - name: &str, - version: Version, - source: impl IterableCloneableVec, - indexes: &indexes::Vecs, - init_fn: vecdb::ComputeFrom1, - ) -> Result { - let v = version + VERSION; - - let height = LazyVecFrom1::init(name, v, source.boxed_clone(), init_fn); - - let rest = - ComputedHeightDerivedFull::forced_import(db, name, height.boxed_clone(), v, indexes)?; - - Ok(Self { height, rest }) - } - - pub fn derive_from( + pub(crate) fn compute_cumulative( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { self.rest - .derive_from(indexes, starting_indexes, &self.height, exit) + .compute_cumulative(starting_indexes, &self.height, exit) } } diff --git a/crates/brk_computer/src/internal/multi/from_height/lazy_computed_sum_cum.rs b/crates/brk_computer/src/internal/multi/from_height/lazy_computed_sum_cum.rs index a30351b43..2dadaff3a 100644 --- a/crates/brk_computer/src/internal/multi/from_height/lazy_computed_sum_cum.rs +++ b/crates/brk_computer/src/internal/multi/from_height/lazy_computed_sum_cum.rs @@ -2,7 +2,7 @@ //! //! Use this when you need: //! - Lazy height (binary transform from two sources) -//! - Stored cumulative and dateindex aggregates +//! - Stored cumulative and day1 aggregates //! - Lazy coarser period lookups use brk_error::Result; @@ -10,7 +10,7 @@ use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{Database, Exit, IterableCloneableVec, LazyVecFrom2}; +use vecdb::{Database, Exit, ReadableCloneableVec, LazyVecFrom2, Rw, StorageMode}; use crate::{indexes, ComputeIndexes}; @@ -19,11 +19,11 @@ use crate::internal::{ComputedVecValue, ComputedHeightDerivedSumCum, NumericValu /// Block sum+cumulative with lazy binary height transform + computed derived indexes. /// /// Height is a lazy binary transform (e.g., mask × source, or price × sats). -/// Cumulative and dateindex are stored (computed from lazy height). +/// Cumulative and day1 are stored (computed from lazy height). /// Coarser periods are lazy lookups. -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Deref, DerefMut, Traversable)] #[traversable(merge)] -pub struct LazyComputedFromHeightSumCum +pub struct LazyComputedFromHeightSumCum where T: ComputedVecValue + PartialOrd + JsonSchema, S1T: ComputedVecValue, @@ -33,7 +33,7 @@ where pub height: LazyVecFrom2, #[deref] #[deref_mut] - pub rest: ComputedHeightDerivedSumCum, + pub rest: Box>, } const VERSION: Version = Version::ZERO; @@ -44,7 +44,7 @@ where S1T: ComputedVecValue + JsonSchema, S2T: ComputedVecValue + JsonSchema, { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, @@ -56,22 +56,20 @@ where let rest = ComputedHeightDerivedSumCum::forced_import( db, name, - height.boxed_clone(), + height.read_only_boxed_clone(), v, indexes, )?; - Ok(Self { height, rest }) + Ok(Self { height, rest: Box::new(rest) }) } - /// Derive aggregates from the lazy height source. - pub fn derive_from( + pub(crate) fn compute_cumulative( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { self.rest - .derive_from(indexes, starting_indexes, &self.height, exit) + .derive_from(starting_indexes, &self.height, exit) } } diff --git a/crates/brk_computer/src/internal/multi/from_height/lazy_distribution.rs b/crates/brk_computer/src/internal/multi/from_height/lazy_distribution.rs deleted file mode 100644 index e9b74d1a3..000000000 --- a/crates/brk_computer/src/internal/multi/from_height/lazy_distribution.rs +++ /dev/null @@ -1,95 +0,0 @@ -//! LazyFromHeightDistribution - lazy height + derived distribution (avg/min/max) for indexes. - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{Height, Version}; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{ComputeFrom1, Database, Exit, IterableCloneableVec, LazyVecFrom1, UnaryTransform}; - -use crate::{ - ComputeIndexes, indexes, - internal::{ComputedVecValue, ComputedHeightDerivedDistribution, NumericValue}, -}; - -const VERSION: Version = Version::ZERO; - -/// Lazy height + derived distribution for indexes. -/// -/// Height is a lazy transform from a source. -/// Indexes (dateindex + periods + difficultyepoch) store distribution stats (avg/min/max). -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct LazyFromHeightDistribution -where - T: ComputedVecValue + PartialOrd + JsonSchema, - S: ComputedVecValue, -{ - #[traversable(rename = "base")] - pub height: LazyVecFrom1, - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub rest: ComputedHeightDerivedDistribution, -} - -impl LazyFromHeightDistribution -where - T: NumericValue + JsonSchema, - S: ComputedVecValue + JsonSchema, -{ - pub fn forced_import>( - db: &Database, - name: &str, - version: Version, - source: impl IterableCloneableVec, - indexes: &indexes::Vecs, - ) -> Result { - let v = version + VERSION; - - let height = LazyVecFrom1::transformed::(name, v, source.boxed_clone()); - - let rest = ComputedHeightDerivedDistribution::forced_import( - db, - name, - height.boxed_clone(), - v, - indexes, - )?; - - Ok(Self { height, rest }) - } - - pub fn forced_import_with_init( - db: &Database, - name: &str, - version: Version, - source: impl IterableCloneableVec, - indexes: &indexes::Vecs, - init_fn: ComputeFrom1, - ) -> Result { - let v = version + VERSION; - - let height = LazyVecFrom1::init(name, v, source.boxed_clone(), init_fn); - - let rest = ComputedHeightDerivedDistribution::forced_import( - db, - name, - height.boxed_clone(), - v, - indexes, - )?; - - Ok(Self { height, rest }) - } - - pub fn derive_from( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.rest - .derive_from(indexes, starting_indexes, &self.height, exit) - } -} diff --git a/crates/brk_computer/src/internal/multi/from_height/lazy_full.rs b/crates/brk_computer/src/internal/multi/from_height/lazy_full.rs index ca180a6a1..0b0cd70fb 100644 --- a/crates/brk_computer/src/internal/multi/from_height/lazy_full.rs +++ b/crates/brk_computer/src/internal/multi/from_height/lazy_full.rs @@ -4,10 +4,10 @@ use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, LazyVecFrom1, UnaryTransform}; +use vecdb::{ReadableBoxedVec, LazyVecFrom1, UnaryTransform}; use crate::internal::{ - ComputedFromHeightFull, ComputedHeightDerivedFull, ComputedVecValue, LazyHeightDerivedFull, + ComputedFromHeightFull, ComputedVecValue, LazyHeightDerivedFull, NumericValue, }; #[derive(Clone, Deref, DerefMut, Traversable)] @@ -21,7 +21,7 @@ where pub height: LazyVecFrom1, #[deref] #[deref_mut] - pub rest: LazyHeightDerivedFull, + pub rest: Box>, } const VERSION: Version = Version::ZERO; @@ -31,30 +31,11 @@ where T: ComputedVecValue + JsonSchema + 'static, S1T: ComputedVecValue + JsonSchema, { - pub fn from_computed>( + pub(crate) fn from_computed>( name: &str, version: Version, - height_source: IterableBoxedVec, + height_source: ReadableBoxedVec, source: &ComputedFromHeightFull, - ) -> Self { - let v = version + VERSION; - Self { - height: LazyVecFrom1::transformed::(name, v, height_source), - rest: LazyHeightDerivedFull::from_computed::( - name, - v, - &source.dateindex, - &source.rest, - &source.difficultyepoch, - ), - } - } - - pub fn from_derived>( - name: &str, - version: Version, - height_source: IterableBoxedVec, - source: &ComputedHeightDerivedFull, ) -> Self where S1T: NumericValue, @@ -62,7 +43,8 @@ where let v = version + VERSION; Self { height: LazyVecFrom1::transformed::(name, v, height_source), - rest: LazyHeightDerivedFull::from_derived_computed::(name, v, source), + rest: Box::new(LazyHeightDerivedFull::from_derived_computed::(name, v, &source.rest)), } } + } diff --git a/crates/brk_computer/src/internal/multi/from_height/lazy_last.rs b/crates/brk_computer/src/internal/multi/from_height/lazy_last.rs index 8879392a1..18eabe537 100644 --- a/crates/brk_computer/src/internal/multi/from_height/lazy_last.rs +++ b/crates/brk_computer/src/internal/multi/from_height/lazy_last.rs @@ -4,10 +4,10 @@ use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, IterableCloneableVec, LazyVecFrom1, UnaryTransform}; +use vecdb::{ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom1, UnaryTransform}; use crate::internal::{ - ComputedFromHeightAndDateLast, ComputedFromHeightLast, ComputedHeightDerivedLast, + ComputedFromHeightLast, ComputedVecValue, LazyBinaryComputedFromHeightLast, LazyBinaryFromHeightLast, LazyHeightDerivedLast, NumericValue, }; @@ -22,7 +22,7 @@ where #[deref] #[deref_mut] #[traversable(flatten)] - pub rest: LazyHeightDerivedLast, + pub rest: Box>, } const VERSION: Version = Version::ZERO; @@ -32,10 +32,10 @@ where T: ComputedVecValue + JsonSchema + 'static, S1T: ComputedVecValue + JsonSchema, { - pub fn from_computed>( + pub(crate) fn from_computed>( name: &str, version: Version, - height_source: IterableBoxedVec, + height_source: ReadableBoxedVec, source: &ComputedFromHeightLast, ) -> Self where @@ -44,45 +44,14 @@ where let v = version + VERSION; Self { height: LazyVecFrom1::transformed::(name, v, height_source), - rest: LazyHeightDerivedLast::from_computed::(name, v, source), + rest: Box::new(LazyHeightDerivedLast::from_computed::(name, v, source)), } } - pub fn from_derived>( + pub(crate) fn from_lazy_binary_computed( name: &str, version: Version, - height_source: IterableBoxedVec, - source: &ComputedHeightDerivedLast, - ) -> Self - where - S1T: NumericValue, - { - let v = version + VERSION; - Self { - height: LazyVecFrom1::transformed::(name, v, height_source), - rest: LazyHeightDerivedLast::from_derived_computed::(name, v, source), - } - } - - pub fn from_computed_height_date>( - name: &str, - version: Version, - source: &ComputedFromHeightAndDateLast, - ) -> Self - where - S1T: PartialOrd, - { - let v = version + VERSION; - Self { - height: LazyVecFrom1::transformed::(name, v, source.height.boxed_clone()), - rest: LazyHeightDerivedLast::from_computed_height_date::(name, v, source), - } - } - - pub fn from_lazy_binary_computed( - name: &str, - version: Version, - height_source: IterableBoxedVec, + height_source: ReadableBoxedVec, source: &LazyBinaryComputedFromHeightLast, ) -> Self where @@ -94,12 +63,29 @@ where let v = version + VERSION; Self { height: LazyVecFrom1::transformed::(name, v, height_source), - rest: LazyHeightDerivedLast::from_derived_computed::(name, v, &source.rest), + rest: Box::new(LazyHeightDerivedLast::from_derived_computed::(name, v, &source.rest)), + } + } + + /// Create by unary-transforming a LazyFromHeightLast source (chaining lazy vecs). + pub(crate) fn from_lazy( + name: &str, + version: Version, + source: &LazyFromHeightLast, + ) -> Self + where + F: UnaryTransform, + S2T: ComputedVecValue + JsonSchema, + { + let v = version + VERSION; + Self { + height: LazyVecFrom1::transformed::(name, v, source.height.read_only_boxed_clone()), + rest: Box::new(LazyHeightDerivedLast::from_lazy::(name, v, &source.rest)), } } /// Create by unary-transforming a LazyBinaryFromHeightLast source. - pub fn from_binary( + pub(crate) fn from_binary( name: &str, version: Version, source: &LazyBinaryFromHeightLast, @@ -111,8 +97,8 @@ where { let v = version + VERSION; Self { - height: LazyVecFrom1::transformed::(name, v, source.height.boxed_clone()), - rest: LazyHeightDerivedLast::from_binary::(name, v, &source.rest), + height: LazyVecFrom1::transformed::(name, v, source.height.read_only_boxed_clone()), + rest: Box::new(LazyHeightDerivedLast::from_binary::(name, v, &source.rest)), } } } diff --git a/crates/brk_computer/src/internal/multi/from_height/lazy_price_from_cents.rs b/crates/brk_computer/src/internal/multi/from_height/lazy_price_from_cents.rs deleted file mode 100644 index 946374608..000000000 --- a/crates/brk_computer/src/internal/multi/from_height/lazy_price_from_cents.rs +++ /dev/null @@ -1,47 +0,0 @@ -//! Lazy price wrapper for height-based metrics with both USD and sats representations. -//! Derives both from a cents base metric. - -use brk_traversable::Traversable; -use brk_types::{CentsUnsigned, Dollars, SatsFract, Version}; -use derive_more::{Deref, DerefMut}; -use vecdb::IterableCloneableVec; - -use super::{ComputedFromHeightLast, LazyFromHeightLast}; -use crate::internal::{CentsUnsignedToDollars, CentsUnsignedToSatsFract}; - -/// Lazy price metric (height-based) with both USD and sats representations. -/// Both are lazily derived from a cents base metric. -/// -/// Derefs to the dollars metric, so existing code works unchanged. -/// Access `.sats` for the sats exchange rate version. -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct LazyPriceFromCents { - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub dollars: LazyFromHeightLast, - pub sats: LazyFromHeightLast, -} - -impl LazyPriceFromCents { - pub fn from_computed( - name: &str, - version: Version, - cents: &ComputedFromHeightLast, - ) -> Self { - let dollars = LazyFromHeightLast::from_computed::( - name, - version, - cents.height.boxed_clone(), - cents, - ); - let sats = LazyFromHeightLast::from_computed::( - &format!("{name}_sats"), - version, - cents.height.boxed_clone(), - cents, - ); - Self { dollars, sats } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_height/lazy_sum.rs b/crates/brk_computer/src/internal/multi/from_height/lazy_sum.rs index 1372edaec..5b98c6760 100644 --- a/crates/brk_computer/src/internal/multi/from_height/lazy_sum.rs +++ b/crates/brk_computer/src/internal/multi/from_height/lazy_sum.rs @@ -4,10 +4,10 @@ use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, LazyVecFrom1, UnaryTransform}; +use vecdb::{ReadableBoxedVec, LazyVecFrom1, UnaryTransform}; use crate::internal::{ - ComputedFromHeightSum, ComputedHeightDerivedSum, ComputedVecValue, LazyHeightDerivedSum, NumericValue, + ComputedFromHeightSum, ComputedVecValue, LazyHeightDerivedSum, NumericValue, }; #[derive(Clone, Deref, DerefMut, Traversable)] #[traversable(merge)] @@ -20,7 +20,7 @@ where #[deref] #[deref_mut] #[traversable(flatten)] - pub rest: LazyHeightDerivedSum, + pub rest: Box>, } const VERSION: Version = Version::ZERO; @@ -30,30 +30,11 @@ where T: ComputedVecValue + JsonSchema + 'static, S1T: ComputedVecValue + JsonSchema, { - pub fn from_computed>( + pub(crate) fn from_computed>( name: &str, version: Version, - height_source: IterableBoxedVec, + height_source: ReadableBoxedVec, source: &ComputedFromHeightSum, - ) -> Self { - let v = version + VERSION; - Self { - height: LazyVecFrom1::transformed::(name, v, height_source), - rest: LazyHeightDerivedSum::from_computed::( - name, - v, - &source.dateindex, - &source.rest, - &source.difficultyepoch, - ), - } - } - - pub fn from_derived>( - name: &str, - version: Version, - height_source: IterableBoxedVec, - source: &ComputedHeightDerivedSum, ) -> Self where S1T: NumericValue, @@ -61,7 +42,8 @@ where let v = version + VERSION; Self { height: LazyVecFrom1::transformed::(name, v, height_source), - rest: LazyHeightDerivedSum::from_derived_computed::(name, v, source), + rest: Box::new(LazyHeightDerivedSum::from_derived_computed::(name, v, &source.rest)), } } + } diff --git a/crates/brk_computer/src/internal/multi/from_height/lazy_sum_cum.rs b/crates/brk_computer/src/internal/multi/from_height/lazy_sum_cum.rs index 215eda718..16d074539 100644 --- a/crates/brk_computer/src/internal/multi/from_height/lazy_sum_cum.rs +++ b/crates/brk_computer/src/internal/multi/from_height/lazy_sum_cum.rs @@ -4,7 +4,7 @@ use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, LazyVecFrom1, UnaryTransform}; +use vecdb::{ReadableBoxedVec, LazyVecFrom1, UnaryTransform}; use crate::internal::{ ComputedFromHeightSumCum, ComputedHeightDerivedSumCum, ComputedVecValue, @@ -22,7 +22,7 @@ where #[deref] #[deref_mut] #[traversable(flatten)] - pub rest: LazyHeightDerivedSumCum, + pub rest: Box>, } const VERSION: Version = Version::ZERO; @@ -32,29 +32,26 @@ where T: ComputedVecValue + JsonSchema + 'static, S1T: ComputedVecValue + JsonSchema, { - pub fn from_computed>( + pub(crate) fn from_computed>( name: &str, version: Version, - height_source: IterableBoxedVec, + height_source: ReadableBoxedVec, source: &ComputedFromHeightSumCum, - ) -> Self { + ) -> Self + where + S1T: NumericValue, + { let v = version + VERSION; Self { height: LazyVecFrom1::transformed::(name, v, height_source), - rest: LazyHeightDerivedSumCum::from_computed::( - name, - v, - &source.dateindex, - &source.rest, - &source.difficultyepoch, - ), + rest: Box::new(LazyHeightDerivedSumCum::from_derived_computed::(name, v, &source.rest)), } } - pub fn from_derived>( + pub(crate) fn from_derived>( name: &str, version: Version, - height_source: IterableBoxedVec, + height_source: ReadableBoxedVec, source: &ComputedHeightDerivedSumCum, ) -> Self where @@ -63,7 +60,7 @@ where let v = version + VERSION; Self { height: LazyVecFrom1::transformed::(name, v, height_source), - rest: LazyHeightDerivedSumCum::from_derived_computed::(name, v, source), + rest: Box::new(LazyHeightDerivedSumCum::from_derived_computed::(name, v, source)), } } } diff --git a/crates/brk_computer/src/internal/multi/from_height/lazy_transform_distribution.rs b/crates/brk_computer/src/internal/multi/from_height/lazy_transform_distribution.rs index bac64be95..1f19f9446 100644 --- a/crates/brk_computer/src/internal/multi/from_height/lazy_transform_distribution.rs +++ b/crates/brk_computer/src/internal/multi/from_height/lazy_transform_distribution.rs @@ -5,7 +5,7 @@ use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, LazyVecFrom1, UnaryTransform}; +use vecdb::{ReadableBoxedVec, LazyVecFrom1, UnaryTransform}; use crate::internal::{ ComputedHeightDerivedFull, ComputedVecValue, LazyHeightDerivedDistribution, NumericValue, @@ -22,7 +22,7 @@ where pub height: LazyVecFrom1, #[deref] #[deref_mut] - pub rest: LazyHeightDerivedDistribution, + pub rest: Box>, } const VERSION: Version = Version::ZERO; @@ -32,10 +32,10 @@ where T: ComputedVecValue + JsonSchema + 'static, S1T: ComputedVecValue + JsonSchema, { - pub fn from_derived>( + pub(crate) fn from_derived>( name: &str, version: Version, - height_source: IterableBoxedVec, + height_source: ReadableBoxedVec, source: &ComputedHeightDerivedFull, ) -> Self where @@ -44,7 +44,7 @@ where let v = version + VERSION; Self { height: LazyVecFrom1::transformed::(name, v, height_source), - rest: LazyHeightDerivedDistribution::from_derived_computed::(name, v, source), + rest: Box::new(LazyHeightDerivedDistribution::from_derived_computed::(name, v, source)), } } } diff --git a/crates/brk_computer/src/internal/multi/from_height/lazy_value.rs b/crates/brk_computer/src/internal/multi/from_height/lazy_value.rs index d0cd1ed19..c942c10ec 100644 --- a/crates/brk_computer/src/internal/multi/from_height/lazy_value.rs +++ b/crates/brk_computer/src/internal/multi/from_height/lazy_value.rs @@ -1,7 +1,7 @@ use brk_traversable::Traversable; -use brk_types::{Bitcoin, Close, Dollars, Height, Sats, Version}; +use brk_types::{Bitcoin, Dollars, Height, Sats, Version}; use derive_more::{Deref, DerefMut}; -use vecdb::{BinaryTransform, IterableBoxedVec, LazyVecFrom1, LazyVecFrom2, UnaryTransform}; +use vecdb::{BinaryTransform, ReadableBoxedVec, LazyVecFrom1, LazyVecFrom2, UnaryTransform}; use crate::internal::LazyDerivedValuesHeight; @@ -18,39 +18,37 @@ pub struct LazyFromHeightValue { } impl LazyFromHeightValue { - pub fn from_sources( + pub(crate) fn from_sources( name: &str, - sats_source: IterableBoxedVec, - price_source: Option>>, + sats_source: ReadableBoxedVec, + price_source: ReadableBoxedVec, version: Version, ) -> Self where SatsTransform: UnaryTransform, BitcoinTransform: UnaryTransform, - DollarsTransform: BinaryTransform, Sats, Dollars>, + DollarsTransform: BinaryTransform, { let v = version + VERSION; let sats = LazyVecFrom1::transformed::(name, v, sats_source.clone()); - let bitcoin = LazyVecFrom1::transformed::( + let btc = LazyVecFrom1::transformed::( &format!("{name}_btc"), v, sats_source.clone(), ); - let dollars = price_source.map(|price| { - LazyVecFrom2::transformed::( - &format!("{name}_usd"), - v, - price, - sats_source, - ) - }); + let usd = LazyVecFrom2::transformed::( + &format!("{name}_usd"), + v, + price_source, + sats_source, + ); Self { sats, - rest: LazyDerivedValuesHeight { bitcoin, dollars }, + rest: LazyDerivedValuesHeight { btc, usd }, } } } diff --git a/crates/brk_computer/src/internal/multi/from_height/mod.rs b/crates/brk_computer/src/internal/multi/from_height/mod.rs index 39fae111f..b2f558bfa 100644 --- a/crates/brk_computer/src/internal/multi/from_height/mod.rs +++ b/crates/brk_computer/src/internal/multi/from_height/mod.rs @@ -2,6 +2,7 @@ mod binary_full; mod binary_last; mod binary_sum; mod binary_sum_cum; +mod constant; mod distribution; mod full; mod last; @@ -10,22 +11,24 @@ mod lazy_binary_computed_full; mod lazy_binary_computed_last; mod lazy_binary_computed_sum; mod lazy_binary_computed_sum_cum; -mod lazy_binary_price; mod lazy_computed_full; mod lazy_computed_sum_cum; -mod lazy_distribution; mod lazy_full; mod lazy_last; -mod lazy_price_from_cents; mod lazy_sum; mod lazy_sum_cum; mod lazy_transform_distribution; mod lazy_value; +mod percentiles; mod price; +mod ratio; +mod stddev; +mod stored_value_last; mod sum; mod sum_cum; -mod unary_last; mod value_binary; +mod value_change; +mod value_ema; mod value_full; mod value_last; mod value_lazy_binary_last; @@ -39,6 +42,7 @@ pub use binary_full::*; pub use binary_last::*; pub use binary_sum::*; pub use binary_sum_cum::*; +pub use constant::*; pub use distribution::*; pub use full::*; pub use last::*; @@ -47,22 +51,24 @@ pub use lazy_binary_computed_full::*; pub use lazy_binary_computed_last::*; pub use lazy_binary_computed_sum::*; pub use lazy_binary_computed_sum_cum::*; -pub use lazy_binary_price::*; pub use lazy_computed_full::*; pub use lazy_computed_sum_cum::*; -pub use lazy_distribution::*; pub use lazy_full::*; pub use lazy_last::*; -pub use lazy_price_from_cents::*; pub use lazy_sum::*; pub use lazy_sum_cum::*; pub use lazy_transform_distribution::*; pub use lazy_value::*; +pub use percentiles::*; pub use price::*; +pub use ratio::*; +pub use stddev::*; +pub use stored_value_last::*; pub use sum::*; pub use sum_cum::*; -pub use unary_last::*; pub use value_binary::*; +pub use value_change::*; +pub use value_ema::*; pub use value_full::*; pub use value_last::*; pub use value_lazy_binary_last::*; diff --git a/crates/brk_computer/src/internal/multi/from_date/percentiles.rs b/crates/brk_computer/src/internal/multi/from_height/percentiles.rs similarity index 59% rename from crates/brk_computer/src/internal/multi/from_date/percentiles.rs rename to crates/brk_computer/src/internal/multi/from_height/percentiles.rs index 7ae6018be..f5d496cc1 100644 --- a/crates/brk_computer/src/internal/multi/from_date/percentiles.rs +++ b/crates/brk_computer/src/internal/multi/from_height/percentiles.rs @@ -1,14 +1,12 @@ use brk_error::Result; use brk_traversable::{Traversable, TreeNode}; -use brk_types::{DateIndex, Dollars, StoredF32, Version}; -use rayon::prelude::*; +use brk_types::{Dollars, Height, StoredF32, Version}; use vecdb::{ - AnyExportableVec, AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, PcoVec, + AnyExportableVec, AnyVec, Database, ReadOnlyClone, Ro, Rw, StorageMode, WritableVec, }; -use crate::{ComputeIndexes, indexes}; - -use super::Price; +use crate::indexes; +use crate::internal::{ComputedFromHeightLast, Price, PriceFromHeight}; pub const PERCENTILES: [u8; 19] = [ 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, 85, 90, 95, @@ -17,7 +15,7 @@ pub const PERCENTILES_LEN: usize = PERCENTILES.len(); /// Compute spot percentile rank by interpolating within percentile bands. /// Returns a value between 0 and 100 indicating where spot sits in the distribution. -pub fn compute_spot_percentile_rank(percentile_prices: &[Dollars; PERCENTILES_LEN], spot: Dollars) -> StoredF32 { +pub(crate) fn compute_spot_percentile_rank(percentile_prices: &[Dollars; PERCENTILES_LEN], spot: Dollars) -> StoredF32 { if spot.is_nan() || percentile_prices[0].is_nan() { return StoredF32::NAN; } @@ -68,15 +66,14 @@ pub fn compute_spot_percentile_rank(percentile_prices: &[Dollars; PERCENTILES_LE StoredF32::NAN } -#[derive(Clone)] -pub struct PercentilesVecs { - pub vecs: [Option; PERCENTILES_LEN], +pub struct PercentilesVecs { + pub vecs: [Option>>; PERCENTILES_LEN], } -const VERSION: Version = Version::ZERO; +const VERSION: Version = Version::ONE; impl PercentilesVecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, prefix: &str, version: Version, @@ -86,85 +83,60 @@ impl PercentilesVecs { let vecs = PERCENTILES.map(|p| { compute.then(|| { let metric_name = format!("{prefix}_pct{p:02}"); - Price::forced_import(db, &metric_name, version + VERSION, indexes).unwrap() + PriceFromHeight::forced_import(db, &metric_name, version + VERSION, indexes).unwrap() }) }); Ok(Self { vecs }) } - /// Get minimum length across dateindex-indexed vectors written in block loop. - pub fn min_stateful_dateindex_len(&self) -> usize { + /// Get minimum length across height-indexed vectors written in block loop. + pub(crate) fn min_stateful_height_len(&self) -> usize { self.vecs .iter() .filter_map(|v| v.as_ref()) - .map(|v| v.dateindex.len()) + .map(|v| v.height.len()) .min() .unwrap_or(usize::MAX) } - /// Push percentile prices at date boundary. - /// Only called when dateindex is Some (last height of the day). - pub fn truncate_push( + /// Push percentile prices at this height. + pub(crate) fn truncate_push( &mut self, - dateindex: DateIndex, + height: Height, percentile_prices: &[Dollars; PERCENTILES_LEN], ) -> Result<()> { for (i, vec) in self.vecs.iter_mut().enumerate() { if let Some(v) = vec { - v.dateindex.truncate_push(dateindex, percentile_prices[i])?; + v.height.truncate_push(height, percentile_prices[i])?; } } Ok(()) } - pub fn compute_rest(&mut self, starting_indexes: &ComputeIndexes, exit: &Exit) -> Result<()> { - for vec in self.vecs.iter_mut().flatten() { - vec.compute_rest( - starting_indexes, - exit, - None::<&EagerVec>>, - )?; - } - Ok(()) - } - - pub fn get(&self, percentile: u8) -> Option<&Price> { - PERCENTILES - .iter() - .position(|&p| p == percentile) - .and_then(|i| self.vecs[i].as_ref()) - } -} - -impl PercentilesVecs { - pub fn write(&mut self) -> Result<()> { - for vec in self.vecs.iter_mut().flatten() { - vec.dateindex.write()?; - } - Ok(()) - } - - /// Returns a parallel iterator over all vecs for parallel writing. - pub fn par_iter_mut(&mut self) -> impl ParallelIterator { - self.vecs - .iter_mut() - .flatten() - .map(|v| &mut v.dateindex as &mut dyn AnyStoredVec) - .collect::>() - .into_par_iter() - } - /// Validate computed versions or reset if mismatched. - pub fn validate_computed_version_or_reset(&mut self, version: Version) -> Result<()> { + pub(crate) fn validate_computed_version_or_reset(&mut self, version: Version) -> Result<()> { for vec in self.vecs.iter_mut().flatten() { - vec.dateindex.validate_computed_version_or_reset(version)?; + vec.height.validate_computed_version_or_reset(version)?; } Ok(()) } } -impl Traversable for PercentilesVecs { +impl ReadOnlyClone for PercentilesVecs { + type ReadOnly = PercentilesVecs; + + fn read_only_clone(&self) -> Self::ReadOnly { + PercentilesVecs { + vecs: self.vecs.each_ref().map(|v| v.as_ref().map(|p| p.read_only_clone())), + } + } +} + +impl Traversable for PercentilesVecs +where + Price>: Traversable, +{ fn to_tree_node(&self) -> TreeNode { TreeNode::Branch( PERCENTILES diff --git a/crates/brk_computer/src/internal/multi/from_height/price.rs b/crates/brk_computer/src/internal/multi/from_height/price.rs index b9a249678..481c4e87e 100644 --- a/crates/brk_computer/src/internal/multi/from_height/price.rs +++ b/crates/brk_computer/src/internal/multi/from_height/price.rs @@ -1,49 +1,193 @@ -//! Price wrapper for height-based metrics with both USD and sats representations. +//! Generic price wrapper with both USD and sats representations. +//! +//! All prices use this single struct with different USD types. +//! Sats is always lazily derived from USD via DollarsToSatsFract. use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{Dollars, SatsFract, Version}; +use brk_types::{Cents, Dollars, SatsFract, Version}; use derive_more::{Deref, DerefMut}; -use vecdb::Database; +use schemars::JsonSchema; +use vecdb::{BinaryTransform, Database, ReadableCloneableVec, UnaryTransform}; -use super::{ComputedFromHeightLast, LazyUnaryFromHeightLast}; -use crate::{indexes, internal::DollarsToSatsFract}; +use super::{ComputedFromHeightLast, LazyBinaryFromHeightLast, LazyFromHeightLast}; +use crate::{ + indexes, + internal::{ComputedVecValue, DollarsToSatsFract, NumericValue}, +}; -/// Price metric (height-based) with both USD and sats representations. +/// Generic price metric with both USD and sats representations. /// -/// Derefs to the dollars metric, so existing code works unchanged. +/// Derefs to the usd metric, so existing code works unchanged. /// Access `.sats` for the sats exchange rate version. #[derive(Clone, Deref, DerefMut, Traversable)] #[traversable(merge)] -pub struct PriceFromHeight { +pub struct Price { #[deref] #[deref_mut] #[traversable(flatten)] - pub dollars: ComputedFromHeightLast, - pub sats: LazyUnaryFromHeightLast, + pub usd: U, + pub sats: LazyFromHeightLast, } -impl PriceFromHeight { - pub fn forced_import( +// --- PriceFromHeight --- + +pub type PriceFromHeight = Price>; + +impl Price> { + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, indexes: &indexes::Vecs, ) -> Result { - let dollars = ComputedFromHeightLast::forced_import(db, name, version, indexes)?; - Ok(Self::from_computed(name, version, dollars)) + let usd = ComputedFromHeightLast::forced_import(db, name, version, indexes)?; + Ok(Self::from_computed(name, version, usd)) } - pub fn from_computed( + pub(crate) fn from_computed( name: &str, version: Version, - dollars: ComputedFromHeightLast, + usd: ComputedFromHeightLast, ) -> Self { - let sats = LazyUnaryFromHeightLast::from_computed_last::( + let sats = LazyFromHeightLast::from_computed::( &format!("{name}_sats"), version, - &dollars, + usd.height.read_only_boxed_clone(), + &usd, ); - Self { dollars, sats } + Self { usd, sats } + } +} + +// --- LazyPriceFromHeight --- + +pub type LazyPriceFromHeight = Price>; + +impl Price> +where + ST: ComputedVecValue + NumericValue + JsonSchema + 'static, +{ + pub(crate) fn from_computed>( + name: &str, + version: Version, + source: &ComputedFromHeightLast, + ) -> Self { + let usd = LazyFromHeightLast::from_computed::( + name, + version, + source.height.read_only_boxed_clone(), + source, + ); + let sats = LazyFromHeightLast::from_lazy::( + &format!("{name}_sats"), + version, + &usd, + ); + Self { usd, sats } + } +} + +// --- LazyPriceFromCents --- + +pub type LazyPriceFromCents = Price>; + +// --- LazyBinaryPriceFromHeight --- + +pub type LazyBinaryPriceFromHeight = Price>; + +impl Price> { + /// Create from a PriceFromHeight (source1) and a LazyPriceFromCents (source2). + pub(crate) fn from_price_and_lazy_price>( + name: &str, + version: Version, + source1: &PriceFromHeight, + source2: &LazyPriceFromCents, + ) -> Self { + let usd = LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::( + name, + version, + &source1.usd, + &source2.usd, + ); + + let sats = LazyFromHeightLast::from_binary::( + &format!("{name}_sats"), + version, + &usd, + ); + + Self { usd, sats } + } + + /// Create from a LazyPriceFromCents (source1) and a PriceFromHeight (source2). + pub(crate) fn from_lazy_price_and_price>( + name: &str, + version: Version, + source1: &LazyPriceFromCents, + source2: &PriceFromHeight, + ) -> Self { + let usd = LazyBinaryFromHeightLast::from_lazy_block_last_and_block_last::( + name, + version, + &source1.usd, + &source2.usd, + ); + + let sats = LazyFromHeightLast::from_binary::( + &format!("{name}_sats"), + version, + &usd, + ); + + Self { usd, sats } + } +} + +// --- Price bands (for stddev/ratio) --- + +impl Price> +where + S2T: ComputedVecValue + NumericValue + JsonSchema, +{ + /// Create a price band from a computed price and a computed band. + pub(crate) fn from_computed_price_and_band>( + name: &str, + version: Version, + price: &ComputedFromHeightLast, + band: &ComputedFromHeightLast, + ) -> Self { + let usd = LazyBinaryFromHeightLast::from_computed_last::(name, version, price, band); + + let sats = LazyFromHeightLast::from_binary::( + &format!("{name}_sats"), + version, + &usd, + ); + + Self { usd, sats } + } + + /// Create a price band from a lazy price and a computed band. + pub(crate) fn from_lazy_price_and_band, S1T>( + name: &str, + version: Version, + price: &LazyFromHeightLast, + band: &ComputedFromHeightLast, + ) -> Self + where + S1T: ComputedVecValue + JsonSchema, + { + let usd = LazyBinaryFromHeightLast::from_lazy_block_last_and_block_last::( + name, version, price, band, + ); + + let sats = LazyFromHeightLast::from_binary::( + &format!("{name}_sats"), + version, + &usd, + ); + + Self { usd, sats } } } diff --git a/crates/brk_computer/src/internal/multi/from_height/ratio.rs b/crates/brk_computer/src/internal/multi/from_height/ratio.rs new file mode 100644 index 000000000..cc369e004 --- /dev/null +++ b/crates/brk_computer/src/internal/multi/from_height/ratio.rs @@ -0,0 +1,418 @@ +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{Dollars, Height, StoredF32, Version}; +use schemars::JsonSchema; +use vecdb::{ + AnyStoredVec, AnyVec, Database, EagerVec, Exit, WritableVec, ReadableVec, + PcoVec, Rw, StorageMode, VecIndex, +}; + +use crate::{ + blocks, indexes, ComputeIndexes, + internal::{ + ComputedFromHeightStdDev, ComputedVecValue, LazyBinaryFromHeightLast, + LazyFromHeightLast, Price, PriceTimesRatio, StandardDeviationVecsOptions, + }, + prices, + utils::get_percentile, +}; + +use super::{ComputedFromHeightLast, PriceFromHeight}; + +#[derive(Traversable)] +pub struct ComputedFromHeightRatio { + pub price: Option>>, + + pub ratio: ComputedFromHeightLast, + pub ratio_1w_sma: Option>, + pub ratio_1m_sma: Option>, + pub ratio_pct99: Option>, + pub ratio_pct98: Option>, + pub ratio_pct95: Option>, + pub ratio_pct5: Option>, + pub ratio_pct2: Option>, + pub ratio_pct1: Option>, + pub ratio_pct99_usd: Option>>, + pub ratio_pct98_usd: Option>>, + pub ratio_pct95_usd: Option>>, + pub ratio_pct5_usd: Option>>, + pub ratio_pct2_usd: Option>>, + pub ratio_pct1_usd: Option>>, + + pub ratio_sd: Option>, + pub ratio_4y_sd: Option>, + pub ratio_2y_sd: Option>, + pub ratio_1y_sd: Option>, +} + +const VERSION: Version = Version::TWO; + +impl ComputedFromHeightRatio { + #[allow(clippy::too_many_arguments)] + pub(crate) fn forced_import( + db: &Database, + name: &str, + metric_price: Option<&ComputedFromHeightLast>, + version: Version, + indexes: &indexes::Vecs, + extended: bool, + ) -> Result { + let v = version + VERSION; + + macro_rules! import { + ($suffix:expr) => { + ComputedFromHeightLast::forced_import( + db, + &format!("{name}_{}", $suffix), + v, + indexes, + ) + .unwrap() + }; + } + + // Only compute price internally when metric_price is None + let price = metric_price + .is_none() + .then(|| PriceFromHeight::forced_import(db, name, v, indexes).unwrap()); + + // Use provided metric_price, falling back to internally computed price + let effective_price = metric_price.or(price.as_ref().map(|p| &p.usd)); + + macro_rules! import_sd { + ($suffix:expr, $days:expr) => { + ComputedFromHeightStdDev::forced_import( + db, + &format!("{name}_{}", $suffix), + $days, + v, + indexes, + StandardDeviationVecsOptions::default().add_all(), + effective_price, + ) + .unwrap() + }; + } + + let ratio_pct99 = extended.then(|| import!("ratio_pct99")); + let ratio_pct98 = extended.then(|| import!("ratio_pct98")); + let ratio_pct95 = extended.then(|| import!("ratio_pct95")); + let ratio_pct5 = extended.then(|| import!("ratio_pct5")); + let ratio_pct2 = extended.then(|| import!("ratio_pct2")); + let ratio_pct1 = extended.then(|| import!("ratio_pct1")); + + macro_rules! lazy_usd { + ($ratio:expr, $suffix:expr) => { + effective_price.zip($ratio.as_ref()).map(|(mp, r)| { + Price::from_computed_price_and_band::( + &format!("{name}_{}", $suffix), + v, + mp, + r, + ) + }) + }; + } + + Ok(Self { + ratio: import!("ratio"), + ratio_1w_sma: extended.then(|| import!("ratio_1w_sma")), + ratio_1m_sma: extended.then(|| import!("ratio_1m_sma")), + ratio_sd: extended.then(|| import_sd!("ratio", usize::MAX)), + ratio_1y_sd: extended.then(|| import_sd!("ratio_1y", 365)), + ratio_2y_sd: extended.then(|| import_sd!("ratio_2y", 2 * 365)), + ratio_4y_sd: extended.then(|| import_sd!("ratio_4y", 4 * 365)), + ratio_pct99_usd: lazy_usd!(&ratio_pct99, "ratio_pct99_usd"), + ratio_pct98_usd: lazy_usd!(&ratio_pct98, "ratio_pct98_usd"), + ratio_pct95_usd: lazy_usd!(&ratio_pct95, "ratio_pct95_usd"), + ratio_pct5_usd: lazy_usd!(&ratio_pct5, "ratio_pct5_usd"), + ratio_pct2_usd: lazy_usd!(&ratio_pct2, "ratio_pct2_usd"), + ratio_pct1_usd: lazy_usd!(&ratio_pct1, "ratio_pct1_usd"), + price, + ratio_pct99, + ratio_pct98, + ratio_pct95, + ratio_pct5, + ratio_pct2, + ratio_pct1, + }) + } + + pub(crate) fn forced_import_from_lazy( + db: &Database, + name: &str, + metric_price: &LazyFromHeightLast, + version: Version, + indexes: &indexes::Vecs, + extended: bool, + ) -> Result { + let v = version + VERSION; + + macro_rules! import { + ($suffix:expr) => { + ComputedFromHeightLast::forced_import( + db, + &format!("{name}_{}", $suffix), + v, + indexes, + ) + .unwrap() + }; + } + + macro_rules! import_sd { + ($suffix:expr, $days:expr) => { + ComputedFromHeightStdDev::forced_import_from_lazy( + db, + &format!("{name}_{}", $suffix), + $days, + v, + indexes, + StandardDeviationVecsOptions::default().add_all(), + Some(metric_price), + ) + .unwrap() + }; + } + + let ratio_pct99 = extended.then(|| import!("ratio_pct99")); + let ratio_pct98 = extended.then(|| import!("ratio_pct98")); + let ratio_pct95 = extended.then(|| import!("ratio_pct95")); + let ratio_pct5 = extended.then(|| import!("ratio_pct5")); + let ratio_pct2 = extended.then(|| import!("ratio_pct2")); + let ratio_pct1 = extended.then(|| import!("ratio_pct1")); + + macro_rules! lazy_usd { + ($ratio:expr, $suffix:expr) => { + $ratio.as_ref().map(|r| { + Price::from_lazy_price_and_band::( + &format!("{name}_{}", $suffix), + v, + metric_price, + r, + ) + }) + }; + } + + Ok(Self { + ratio: import!("ratio"), + ratio_1w_sma: extended.then(|| import!("ratio_1w_sma")), + ratio_1m_sma: extended.then(|| import!("ratio_1m_sma")), + ratio_sd: extended.then(|| import_sd!("ratio", usize::MAX)), + ratio_1y_sd: extended.then(|| import_sd!("ratio_1y", 365)), + ratio_2y_sd: extended.then(|| import_sd!("ratio_2y", 2 * 365)), + ratio_4y_sd: extended.then(|| import_sd!("ratio_4y", 4 * 365)), + ratio_pct99_usd: lazy_usd!(&ratio_pct99, "ratio_pct99_usd"), + ratio_pct98_usd: lazy_usd!(&ratio_pct98, "ratio_pct98_usd"), + ratio_pct95_usd: lazy_usd!(&ratio_pct95, "ratio_pct95_usd"), + ratio_pct5_usd: lazy_usd!(&ratio_pct5, "ratio_pct5_usd"), + ratio_pct2_usd: lazy_usd!(&ratio_pct2, "ratio_pct2_usd"), + ratio_pct1_usd: lazy_usd!(&ratio_pct1, "ratio_pct1_usd"), + price: None, + ratio_pct99, + ratio_pct98, + ratio_pct95, + ratio_pct5, + ratio_pct2, + ratio_pct1, + }) + } + + /// Compute all: computes price at height level, then ratio + rest. + pub(crate) fn compute_all( + &mut self, + blocks: &blocks::Vecs, + prices: &prices::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + mut compute: F, + ) -> Result<()> + where + F: FnMut(&mut EagerVec>) -> Result<()>, + { + compute(&mut self.price.as_mut().unwrap().usd.height)?; + + let price_opt: Option<&EagerVec>> = None; + self.compute_rest(blocks, prices, starting_indexes, exit, price_opt) + } + + /// Compute ratio and derived metrics from an externally-provided or internal price. + pub(crate) fn compute_rest( + &mut self, + blocks: &blocks::Vecs, + prices: &prices::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + price_opt: Option<&impl ReadableVec>, + ) -> Result<()> { + let close_price = &prices.usd.price; + + let price = price_opt.unwrap_or_else(|| unsafe { + std::mem::transmute(&self.price.as_ref().unwrap().usd.height) + }); + + // Compute ratio = close_price / metric_price at height level + self.ratio.height.compute_transform2( + starting_indexes.height, + close_price, + price, + |(i, close, price, ..)| { + if price == Dollars::ZERO { + (i, StoredF32::from(1.0)) + } else { + (i, StoredF32::from(close / price)) + } + }, + exit, + )?; + + if self.ratio_1w_sma.is_none() { + return Ok(()); + } + + // SMA using lookback vecs + self.ratio_1w_sma + .as_mut() + .unwrap() + .height + .compute_rolling_average( + starting_indexes.height, + &blocks.count.height_1w_ago, + &self.ratio.height, + exit, + )?; + + self.ratio_1m_sma + .as_mut() + .unwrap() + .height + .compute_rolling_average( + starting_indexes.height, + &blocks.count.height_1m_ago, + &self.ratio.height, + exit, + )?; + + // Percentiles: insert into sorted array on day boundaries + let ratio_version = self.ratio.height.version(); + self.mut_ratio_vecs() + .iter_mut() + .try_for_each(|v| -> Result<()> { + v.validate_computed_version_or_reset(ratio_version)?; + Ok(()) + })?; + + let starting_height = self + .mut_ratio_vecs() + .iter() + .map(|v| Height::from(v.len())) + .min() + .unwrap() + .min(starting_indexes.height); + + let start = starting_height.to_usize(); + let day_start = &blocks.count.height_24h_ago; + + // Collect sorted history up to starting point (one per day boundary) + let mut sorted = { + let ratio_data = self.ratio.height.collect_range_at(0, start); + let day_start_hist = day_start.collect_range_at(0, start); + let mut sorted: Vec = Vec::new(); + let mut last_day_start = Height::from(0_usize); + for (h, ratio) in ratio_data.into_iter().enumerate() { + let cur_day_start = day_start_hist[h]; + if h == 0 || cur_day_start != last_day_start { + let pos = sorted.binary_search(&ratio).unwrap_or_else(|p| p); + sorted.insert(pos, ratio); + last_day_start = cur_day_start; + } + } + sorted + }; + + let pct1_vec = &mut self.ratio_pct1.as_mut().unwrap().height; + let pct2_vec = &mut self.ratio_pct2.as_mut().unwrap().height; + let pct5_vec = &mut self.ratio_pct5.as_mut().unwrap().height; + let pct95_vec = &mut self.ratio_pct95.as_mut().unwrap().height; + let pct98_vec = &mut self.ratio_pct98.as_mut().unwrap().height; + let pct99_vec = &mut self.ratio_pct99.as_mut().unwrap().height; + + let ratio_len = self.ratio.height.len(); + let ratio_data = self.ratio.height.collect_range_at(start, ratio_len); + let mut last_day_start = if start > 0 { + day_start + .collect_one_at(start - 1) + .unwrap_or(Height::from(0_usize)) + } else { + Height::from(0_usize) + }; + + let day_start_data = day_start.collect_range_at(start, ratio_len); + + for (offset, ratio) in ratio_data.into_iter().enumerate() { + let index = start + offset; + + // Insert into sorted history on day boundaries + let cur_day_start = day_start_data[offset]; + if index == 0 || cur_day_start != last_day_start { + let pos = sorted.binary_search(&ratio).unwrap_or_else(|p| p); + sorted.insert(pos, ratio); + last_day_start = cur_day_start; + } + + if sorted.is_empty() { + pct1_vec.truncate_push_at(index, StoredF32::NAN)?; + pct2_vec.truncate_push_at(index, StoredF32::NAN)?; + pct5_vec.truncate_push_at(index, StoredF32::NAN)?; + pct95_vec.truncate_push_at(index, StoredF32::NAN)?; + pct98_vec.truncate_push_at(index, StoredF32::NAN)?; + pct99_vec.truncate_push_at(index, StoredF32::NAN)?; + } else { + pct1_vec.truncate_push_at(index, get_percentile(&sorted, 0.01))?; + pct2_vec.truncate_push_at(index, get_percentile(&sorted, 0.02))?; + pct5_vec.truncate_push_at(index, get_percentile(&sorted, 0.05))?; + pct95_vec.truncate_push_at(index, get_percentile(&sorted, 0.95))?; + pct98_vec.truncate_push_at(index, get_percentile(&sorted, 0.98))?; + pct99_vec.truncate_push_at(index, get_percentile(&sorted, 0.99))?; + } + } + + { + let _lock = exit.lock(); + self.mut_ratio_vecs() + .into_iter() + .try_for_each(|v| v.flush())?; + } + + // Compute stddev at height level + macro_rules! compute_sd { + ($($field:ident),*) => { + $(self.$field.as_mut().unwrap().compute_all( + blocks, starting_indexes, exit, &self.ratio.height, + )?;)* + }; + } + compute_sd!(ratio_sd, ratio_4y_sd, ratio_2y_sd, ratio_1y_sd); + + Ok(()) + } + + fn mut_ratio_vecs(&mut self) -> Vec<&mut EagerVec>> { + macro_rules! collect_vecs { + ($($field:ident),*) => {{ + let mut vecs = Vec::with_capacity(6); + $(if let Some(v) = self.$field.as_mut() { vecs.push(&mut v.height); })* + vecs + }}; + } + collect_vecs!( + ratio_pct1, + ratio_pct2, + ratio_pct5, + ratio_pct95, + ratio_pct98, + ratio_pct99 + ) + } +} diff --git a/crates/brk_computer/src/internal/multi/from_height/stddev.rs b/crates/brk_computer/src/internal/multi/from_height/stddev.rs new file mode 100644 index 000000000..ae747904d --- /dev/null +++ b/crates/brk_computer/src/internal/multi/from_height/stddev.rs @@ -0,0 +1,501 @@ +use std::mem; + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{Dollars, Height, StoredF32, Version}; +use schemars::JsonSchema; +use vecdb::{ + AnyStoredVec, AnyVec, Database, EagerVec, Exit, WritableVec, ReadableVec, + PcoVec, Rw, StorageMode, VecIndex, +}; + +use crate::{blocks, indexes, ComputeIndexes}; + +use crate::internal::{ + ComputedFromHeightLast, ComputedVecValue, LazyBinaryFromHeightLast, LazyFromHeightLast, + Price, PriceTimesRatio, +}; + +#[derive(Default)] +pub struct StandardDeviationVecsOptions { + zscore: bool, + bands: bool, + price_bands: bool, +} + +impl StandardDeviationVecsOptions { + pub(crate) fn add_all(mut self) -> Self { + self.zscore = true; + self.bands = true; + self.price_bands = true; + self + } + + pub(crate) fn zscore(&self) -> bool { + self.zscore + } + + pub(crate) fn bands(&self) -> bool { + self.bands + } + + pub(crate) fn price_bands(&self) -> bool { + self.price_bands + } +} + +#[derive(Traversable)] +pub struct ComputedFromHeightStdDev { + days: usize, + + pub sma: Option>, + + pub sd: ComputedFromHeightLast, + + pub zscore: Option>, + + pub p0_5sd: Option>, + pub p1sd: Option>, + pub p1_5sd: Option>, + pub p2sd: Option>, + pub p2_5sd: Option>, + pub p3sd: Option>, + pub m0_5sd: Option>, + pub m1sd: Option>, + pub m1_5sd: Option>, + pub m2sd: Option>, + pub m2_5sd: Option>, + pub m3sd: Option>, + + pub _0sd_usd: Option>>, + pub p0_5sd_usd: Option>>, + pub p1sd_usd: Option>>, + pub p1_5sd_usd: Option>>, + pub p2sd_usd: Option>>, + pub p2_5sd_usd: Option>>, + pub p3sd_usd: Option>>, + pub m0_5sd_usd: Option>>, + pub m1sd_usd: Option>>, + pub m1_5sd_usd: Option>>, + pub m2sd_usd: Option>>, + pub m2_5sd_usd: Option>>, + pub m3sd_usd: Option>>, +} + +impl ComputedFromHeightStdDev { + #[allow(clippy::too_many_arguments)] + pub(crate) fn forced_import( + db: &Database, + name: &str, + days: usize, + parent_version: Version, + indexes: &indexes::Vecs, + options: StandardDeviationVecsOptions, + metric_price: Option<&ComputedFromHeightLast>, + ) -> Result { + let version = parent_version + Version::TWO; + + macro_rules! import { + ($suffix:expr) => { + ComputedFromHeightLast::forced_import( + db, + &format!("{name}_{}", $suffix), + version, + indexes, + ) + .unwrap() + }; + } + + let sma_vec = Some(import!("sma")); + let p0_5sd = options.bands().then(|| import!("p0_5sd")); + let p1sd = options.bands().then(|| import!("p1sd")); + let p1_5sd = options.bands().then(|| import!("p1_5sd")); + let p2sd = options.bands().then(|| import!("p2sd")); + let p2_5sd = options.bands().then(|| import!("p2_5sd")); + let p3sd = options.bands().then(|| import!("p3sd")); + let m0_5sd = options.bands().then(|| import!("m0_5sd")); + let m1sd = options.bands().then(|| import!("m1sd")); + let m1_5sd = options.bands().then(|| import!("m1_5sd")); + let m2sd = options.bands().then(|| import!("m2sd")); + let m2_5sd = options.bands().then(|| import!("m2_5sd")); + let m3sd = options.bands().then(|| import!("m3sd")); + + // Create USD bands using the metric price (the denominator of the ratio). + // This converts ratio bands back to USD: usd_band = metric_price * ratio_band + macro_rules! lazy_usd { + ($band:expr, $suffix:expr) => { + if !options.price_bands() { + None + } else if let Some(mp) = metric_price { + $band.as_ref().map(|b| { + Price::from_computed_price_and_band::( + &format!("{name}_{}", $suffix), + version, + mp, + b, + ) + }) + } else { + None + } + }; + } + + Ok(Self { + days, + sd: import!("sd"), + zscore: options.zscore().then(|| import!("zscore")), + // Lazy USD vecs + _0sd_usd: lazy_usd!(&sma_vec, "0sd_usd"), + p0_5sd_usd: lazy_usd!(&p0_5sd, "p0_5sd_usd"), + p1sd_usd: lazy_usd!(&p1sd, "p1sd_usd"), + p1_5sd_usd: lazy_usd!(&p1_5sd, "p1_5sd_usd"), + p2sd_usd: lazy_usd!(&p2sd, "p2sd_usd"), + p2_5sd_usd: lazy_usd!(&p2_5sd, "p2_5sd_usd"), + p3sd_usd: lazy_usd!(&p3sd, "p3sd_usd"), + m0_5sd_usd: lazy_usd!(&m0_5sd, "m0_5sd_usd"), + m1sd_usd: lazy_usd!(&m1sd, "m1sd_usd"), + m1_5sd_usd: lazy_usd!(&m1_5sd, "m1_5sd_usd"), + m2sd_usd: lazy_usd!(&m2sd, "m2sd_usd"), + m2_5sd_usd: lazy_usd!(&m2_5sd, "m2_5sd_usd"), + m3sd_usd: lazy_usd!(&m3sd, "m3sd_usd"), + // Stored band sources + sma: sma_vec, + p0_5sd, + p1sd, + p1_5sd, + p2sd, + p2_5sd, + p3sd, + m0_5sd, + m1sd, + m1_5sd, + m2sd, + m2_5sd, + m3sd, + }) + } + + #[allow(clippy::too_many_arguments)] + pub(crate) fn forced_import_from_lazy( + db: &Database, + name: &str, + days: usize, + parent_version: Version, + indexes: &indexes::Vecs, + options: StandardDeviationVecsOptions, + metric_price: Option<&LazyFromHeightLast>, + ) -> Result { + let version = parent_version + Version::TWO; + + macro_rules! import { + ($suffix:expr) => { + ComputedFromHeightLast::forced_import( + db, + &format!("{name}_{}", $suffix), + version, + indexes, + ) + .unwrap() + }; + } + + let sma_vec = Some(import!("sma")); + let p0_5sd = options.bands().then(|| import!("p0_5sd")); + let p1sd = options.bands().then(|| import!("p1sd")); + let p1_5sd = options.bands().then(|| import!("p1_5sd")); + let p2sd = options.bands().then(|| import!("p2sd")); + let p2_5sd = options.bands().then(|| import!("p2_5sd")); + let p3sd = options.bands().then(|| import!("p3sd")); + let m0_5sd = options.bands().then(|| import!("m0_5sd")); + let m1sd = options.bands().then(|| import!("m1sd")); + let m1_5sd = options.bands().then(|| import!("m1_5sd")); + let m2sd = options.bands().then(|| import!("m2sd")); + let m2_5sd = options.bands().then(|| import!("m2_5sd")); + let m3sd = options.bands().then(|| import!("m3sd")); + + // For lazy metric price, use from_lazy_block_last_and_block_last. + // PriceTimesRatio: BinaryTransform + // source1 = metric_price (Dollars, lazy), source2 = band (StoredF32, computed) + macro_rules! lazy_usd { + ($band:expr, $suffix:expr) => { + metric_price + .zip($band.as_ref()) + .filter(|_| options.price_bands()) + .map(|(mp, b)| { + Price::from_lazy_price_and_band::( + &format!("{name}_{}", $suffix), + version, + mp, + b, + ) + }) + }; + } + + Ok(Self { + days, + sd: import!("sd"), + zscore: options.zscore().then(|| import!("zscore")), + _0sd_usd: lazy_usd!(&sma_vec, "0sd_usd"), + p0_5sd_usd: lazy_usd!(&p0_5sd, "p0_5sd_usd"), + p1sd_usd: lazy_usd!(&p1sd, "p1sd_usd"), + p1_5sd_usd: lazy_usd!(&p1_5sd, "p1_5sd_usd"), + p2sd_usd: lazy_usd!(&p2sd, "p2sd_usd"), + p2_5sd_usd: lazy_usd!(&p2_5sd, "p2_5sd_usd"), + p3sd_usd: lazy_usd!(&p3sd, "p3sd_usd"), + m0_5sd_usd: lazy_usd!(&m0_5sd, "m0_5sd_usd"), + m1sd_usd: lazy_usd!(&m1sd, "m1sd_usd"), + m1_5sd_usd: lazy_usd!(&m1_5sd, "m1_5sd_usd"), + m2sd_usd: lazy_usd!(&m2sd, "m2sd_usd"), + m2_5sd_usd: lazy_usd!(&m2_5sd, "m2_5sd_usd"), + m3sd_usd: lazy_usd!(&m3sd, "m3sd_usd"), + sma: sma_vec, + p0_5sd, + p1sd, + p1_5sd, + p2sd, + p2_5sd, + p3sd, + m0_5sd, + m1sd, + m1_5sd, + m2sd, + m2_5sd, + m3sd, + }) + } + + pub(crate) fn compute_all( + &mut self, + blocks: &blocks::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + source: &impl ReadableVec, + ) -> Result<()> { + // 1. Compute SMA using the appropriate lookback vec (or full-history SMA) + if self.days != usize::MAX { + let window_starts = blocks.count.start_vec(self.days); + self.sma + .as_mut() + .unwrap() + .height + .compute_rolling_average( + starting_indexes.height, + window_starts, + source, + exit, + )?; + } else { + // Full history SMA (days == usize::MAX) + self.sma + .as_mut() + .unwrap() + .height + .compute_sma_( + starting_indexes.height, + source, + self.days, + exit, + None, + )?; + } + + let sma_opt: Option<&EagerVec>> = None; + self.compute_rest(blocks, starting_indexes, exit, sma_opt, source) + } + + pub(crate) fn compute_rest( + &mut self, + blocks: &blocks::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + sma_opt: Option<&impl ReadableVec>, + source: &impl ReadableVec, + ) -> Result<()> { + let sma = sma_opt + .unwrap_or_else(|| unsafe { mem::transmute(&self.sma.as_ref().unwrap().height) }); + + let source_version = source.version(); + + self.mut_stateful_height_vecs() + .try_for_each(|v| -> Result<()> { + v.validate_computed_version_or_reset(source_version)?; + Ok(()) + })?; + + let starting_height = self + .mut_stateful_height_vecs() + .map(|v| Height::from(v.len())) + .min() + .unwrap() + .min(starting_indexes.height); + + // Reconstruct running statistics up to starting point. + // We accumulate one data point per day boundary, tracking sum and sum_sq + // for O(1) per-height SD computation (instead of O(n) sorted-array scan). + let day_start = &blocks.count.height_24h_ago; + let start = starting_height.to_usize(); + + let mut n: usize = 0; + let mut welford_sum: f64 = 0.0; + let mut welford_sum_sq: f64 = 0.0; + if start > 0 { + let day_start_hist = day_start.collect_range_at(0, start); + let source_hist = source.collect_range_at(0, start); + let mut last_ds = Height::from(0_usize); + for h in 0..start { + let cur_ds = day_start_hist[h]; + if h == 0 || cur_ds != last_ds { + let val = *source_hist[h] as f64; + n += 1; + welford_sum += val; + welford_sum_sq += val * val; + last_ds = cur_ds; + } + } + } + + macro_rules! band_ref { + ($field:ident) => { + self.$field.as_mut().map(|c| &mut c.height) + }; + } + let mut p0_5sd = band_ref!(p0_5sd); + let mut p1sd = band_ref!(p1sd); + let mut p1_5sd = band_ref!(p1_5sd); + let mut p2sd = band_ref!(p2sd); + let mut p2_5sd = band_ref!(p2_5sd); + let mut p3sd = band_ref!(p3sd); + let mut m0_5sd = band_ref!(m0_5sd); + let mut m1sd = band_ref!(m1sd); + let mut m1_5sd = band_ref!(m1_5sd); + let mut m2sd = band_ref!(m2sd); + let mut m2_5sd = band_ref!(m2_5sd); + let mut m3sd = band_ref!(m3sd); + + let source_len = source.len(); + let source_data = source.collect_range_at(start, source_len); + let sma_data = sma.collect_range_at(start, sma.len()); + let mut last_day_start = if start > 0 { + day_start + .collect_one_at(start - 1) + .unwrap_or(Height::from(0_usize)) + } else { + Height::from(0_usize) + }; + + let day_start_data = day_start.collect_range_at(start, source_len); + + for (offset, ratio) in source_data.into_iter().enumerate() { + let index = start + offset; + // Update running statistics on day boundaries + let cur_day_start = day_start_data[offset]; + if index == 0 || cur_day_start != last_day_start { + let val = *ratio as f64; + n += 1; + welford_sum += val; + welford_sum_sq += val * val; + last_day_start = cur_day_start; + } + + let average = sma_data[offset]; + let avg_f64 = *average as f64; + + // SD = sqrt((sum_sq/n - 2*avg*sum/n + avg^2)) + // This is the population SD of all daily values relative to the current SMA + let sd = if n > 0 { + let nf = n as f64; + let variance = welford_sum_sq / nf - 2.0 * avg_f64 * welford_sum / nf + avg_f64 * avg_f64; + StoredF32::from(variance.max(0.0).sqrt() as f32) + } else { + StoredF32::from(0.0_f32) + }; + + self.sd.height.truncate_push_at(index, sd)?; + if let Some(v) = p0_5sd.as_mut() { + v.truncate_push_at(index, average + StoredF32::from(0.5 * *sd))? + } + if let Some(v) = p1sd.as_mut() { + v.truncate_push_at(index, average + sd)? + } + if let Some(v) = p1_5sd.as_mut() { + v.truncate_push_at(index, average + StoredF32::from(1.5 * *sd))? + } + if let Some(v) = p2sd.as_mut() { + v.truncate_push_at(index, average + 2 * sd)? + } + if let Some(v) = p2_5sd.as_mut() { + v.truncate_push_at(index, average + StoredF32::from(2.5 * *sd))? + } + if let Some(v) = p3sd.as_mut() { + v.truncate_push_at(index, average + 3 * sd)? + } + if let Some(v) = m0_5sd.as_mut() { + v.truncate_push_at(index, average - StoredF32::from(0.5 * *sd))? + } + if let Some(v) = m1sd.as_mut() { + v.truncate_push_at(index, average - sd)? + } + if let Some(v) = m1_5sd.as_mut() { + v.truncate_push_at(index, average - StoredF32::from(1.5 * *sd))? + } + if let Some(v) = m2sd.as_mut() { + v.truncate_push_at(index, average - 2 * sd)? + } + if let Some(v) = m2_5sd.as_mut() { + v.truncate_push_at(index, average - StoredF32::from(2.5 * *sd))? + } + if let Some(v) = m3sd.as_mut() { + v.truncate_push_at(index, average - 3 * sd)? + } + } + + { + let _lock = exit.lock(); + self.mut_stateful_height_vecs() + .try_for_each(|v| v.flush())?; + } + + if let Some(zscore) = self.zscore.as_mut() { + zscore.height.compute_zscore( + starting_indexes.height, + source, + sma, + &self.sd.height, + exit, + )?; + } + + Ok(()) + } + + fn mut_stateful_computed( + &mut self, + ) -> impl Iterator> { + [ + Some(&mut self.sd), + self.p0_5sd.as_mut(), + self.p1sd.as_mut(), + self.p1_5sd.as_mut(), + self.p2sd.as_mut(), + self.p2_5sd.as_mut(), + self.p3sd.as_mut(), + self.m0_5sd.as_mut(), + self.m1sd.as_mut(), + self.m1_5sd.as_mut(), + self.m2sd.as_mut(), + self.m2_5sd.as_mut(), + self.m3sd.as_mut(), + ] + .into_iter() + .flatten() + } + + fn mut_stateful_height_vecs( + &mut self, + ) -> impl Iterator>> { + self.mut_stateful_computed().map(|c| &mut c.height) + } +} diff --git a/crates/brk_computer/src/internal/multi/from_height/stored_value_last.rs b/crates/brk_computer/src/internal/multi/from_height/stored_value_last.rs new file mode 100644 index 000000000..538c56fd1 --- /dev/null +++ b/crates/brk_computer/src/internal/multi/from_height/stored_value_last.rs @@ -0,0 +1,65 @@ +//! Stored value type for Last pattern from Height. +//! +//! Both sats and USD are stored eagerly at the height level. +//! Used for rolling-window sums where USD = sum(usd_per_block), +//! NOT sats * current_price. + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{Bitcoin, Dollars, Height, Sats, Version}; +use vecdb::{Database, Exit, ReadableCloneableVec, ReadableVec, Rw, StorageMode}; + +use crate::{ + indexes, + internal::{ComputedFromHeightLast, LazyFromHeightLast, SatsToBitcoin}, +}; + +const VERSION: Version = Version::ZERO; + +#[derive(Traversable)] +pub struct StoredValueFromHeightLast { + pub sats: ComputedFromHeightLast, + pub btc: LazyFromHeightLast, + pub usd: ComputedFromHeightLast, +} + +impl StoredValueFromHeightLast { + pub(crate) fn forced_import( + db: &Database, + name: &str, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { + let v = version + VERSION; + + let sats = ComputedFromHeightLast::forced_import(db, name, v, indexes)?; + + let btc = LazyFromHeightLast::from_computed::( + &format!("{name}_btc"), + v, + sats.height.read_only_boxed_clone(), + &sats, + ); + + let usd = ComputedFromHeightLast::forced_import(db, &format!("{name}_usd"), v, indexes)?; + + Ok(Self { sats, btc, usd }) + } + + pub(crate) fn compute_rolling_sum( + &mut self, + max_from: Height, + window_starts: &impl ReadableVec, + sats_source: &impl ReadableVec, + usd_source: &impl ReadableVec, + exit: &Exit, + ) -> Result<()> { + self.sats + .height + .compute_rolling_sum(max_from, window_starts, sats_source, exit)?; + self.usd + .height + .compute_rolling_sum(max_from, window_starts, usd_source, exit)?; + Ok(()) + } +} diff --git a/crates/brk_computer/src/internal/multi/from_height/sum.rs b/crates/brk_computer/src/internal/multi/from_height/sum.rs index 059aea8b5..ca9eaa59f 100644 --- a/crates/brk_computer/src/internal/multi/from_height/sum.rs +++ b/crates/brk_computer/src/internal/multi/from_height/sum.rs @@ -6,23 +6,23 @@ use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, ReadableCloneableVec, Rw, StorageMode}; -use crate::{ComputeIndexes, indexes}; +use crate::indexes; -use crate::internal::{ComputedVecValue, ComputedHeightDerivedSum, NumericValue}; +use crate::internal::{ComputedHeightDerivedSum, ComputedVecValue, NumericValue}; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Deref, DerefMut, Traversable)] #[traversable(merge)] -pub struct ComputedFromHeightSum +pub struct ComputedFromHeightSum where T: ComputedVecValue + PartialOrd + JsonSchema, { - pub height: EagerVec>, + pub height: M::Stored>>, #[deref] #[deref_mut] #[traversable(flatten)] - pub rest: ComputedHeightDerivedSum, + pub rest: Box>, } const VERSION: Version = Version::ZERO; @@ -31,7 +31,7 @@ impl ComputedFromHeightSum where T: NumericValue + JsonSchema, { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, @@ -42,33 +42,8 @@ where let height: EagerVec> = EagerVec::forced_import(db, name, v)?; let rest = - ComputedHeightDerivedSum::forced_import(db, name, height.boxed_clone(), v, indexes)?; + ComputedHeightDerivedSum::forced_import(name, height.read_only_boxed_clone(), v, indexes); - Ok(Self { height, rest }) - } - - pub fn compute_all( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - mut compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - compute(&mut self.height)?; - self.compute_rest(indexes, starting_indexes, exit) - } - - /// Compute rest from self.height (for stateful computation patterns). - pub fn compute_rest( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.rest - .derive_from(indexes, starting_indexes, &self.height, exit) + Ok(Self { height, rest: Box::new(rest) }) } } diff --git a/crates/brk_computer/src/internal/multi/from_height/sum_cum.rs b/crates/brk_computer/src/internal/multi/from_height/sum_cum.rs index a2dd5bfac..aa157cd19 100644 --- a/crates/brk_computer/src/internal/multi/from_height/sum_cum.rs +++ b/crates/brk_computer/src/internal/multi/from_height/sum_cum.rs @@ -7,26 +7,25 @@ use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; use vecdb::{ - AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec, - IterableCloneableVec, IterableVec, PcoVec, VecIndex, + Database, EagerVec, Exit, ImportableVec, PcoVec, ReadableCloneableVec, Rw, StorageMode, }; use crate::{ComputeIndexes, indexes}; use crate::internal::{ComputedHeightDerivedSumCum, ComputedVecValue, NumericValue}; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Deref, DerefMut, Traversable)] #[traversable(merge)] -pub struct ComputedFromHeightSumCum +pub struct ComputedFromHeightSumCum where T: ComputedVecValue + PartialOrd + JsonSchema, { #[traversable(rename = "sum")] - pub height: EagerVec>, + pub height: M::Stored>>, #[deref] #[deref_mut] #[traversable(flatten)] - pub rest: ComputedHeightDerivedSumCum, + pub rest: Box>, } const VERSION: Version = Version::ZERO; @@ -35,7 +34,7 @@ impl ComputedFromHeightSumCum where T: NumericValue + JsonSchema, { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, @@ -46,59 +45,27 @@ where let height: EagerVec> = EagerVec::forced_import(db, name, v)?; let rest = - ComputedHeightDerivedSumCum::forced_import(db, name, height.boxed_clone(), v, indexes)?; + ComputedHeightDerivedSumCum::forced_import(db, name, height.read_only_boxed_clone(), v, indexes)?; - Ok(Self { height, rest }) + Ok(Self { height, rest: Box::new(rest) }) } - pub fn compute_all( + /// Compute height_cumulative from self.height. + pub(crate) fn compute_cumulative( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, - mut compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { + ) -> Result<()> { + self.rest.derive_from(starting_indexes, &self.height, exit) + } + + pub(crate) fn compute( + &mut self, + starting_indexes: &ComputeIndexes, + exit: &Exit, + mut compute: impl FnMut(&mut EagerVec>) -> Result<()>, + ) -> Result<()> { compute(&mut self.height)?; - self.compute_rest(indexes, starting_indexes, exit) - } - - /// Compute rest from self.height (for stateful computation patterns). - pub fn compute_rest( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.rest - .derive_from(indexes, starting_indexes, &self.height, exit) - } - - /// Derive from an external height source (e.g., a LazyVec). - pub fn derive_from( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - source: &impl IterableVec, - exit: &Exit, - ) -> Result<()> { - let target_len = source.len(); - let starting_height = starting_indexes.height.to_usize().min(self.height.len()); - - self.height - .validate_computed_version_or_reset(source.version())?; - - let mut source_iter = source.iter(); - for h_idx in starting_height..target_len { - let height = Height::from(h_idx); - let value = source_iter.get_unwrap(height); - self.height.truncate_push(height, value)?; - } - self.height.write()?; - - self.rest - .derive_from(indexes, starting_indexes, &self.height, exit) + self.compute_cumulative(starting_indexes, exit) } } diff --git a/crates/brk_computer/src/internal/multi/from_height/unary_last.rs b/crates/brk_computer/src/internal/multi/from_height/unary_last.rs deleted file mode 100644 index 1ded1006e..000000000 --- a/crates/brk_computer/src/internal/multi/from_height/unary_last.rs +++ /dev/null @@ -1,65 +0,0 @@ -//! Unary transform composite from Height - Last aggregation only. - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, DifficultyEpoch, Height, MonthIndex, QuarterIndex, SemesterIndex, - Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use vecdb::{IterableCloneableVec, LazyVecFrom1, UnaryTransform}; - -use crate::internal::{ - ComputedFromHeightLast, ComputedVecValue, LazyTransformLast, NumericValue, -}; - -const VERSION: Version = Version::ZERO; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyUnaryFromHeightLast -where - T: ComputedVecValue + PartialOrd + JsonSchema, - ST: ComputedVecValue, -{ - pub height: LazyVecFrom1, - pub dateindex: LazyTransformLast, - pub weekindex: LazyTransformLast, - pub monthindex: LazyTransformLast, - pub quarterindex: LazyTransformLast, - pub semesterindex: LazyTransformLast, - pub yearindex: LazyTransformLast, - pub decadeindex: LazyTransformLast, - pub difficultyepoch: LazyTransformLast, -} - -impl LazyUnaryFromHeightLast -where - T: ComputedVecValue + JsonSchema + 'static, - ST: NumericValue + JsonSchema, -{ - pub fn from_computed_last>( - name: &str, - version: Version, - source: &ComputedFromHeightLast, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyTransformLast::from_lazy_last::(name, v, &source.$p) - }; - } - - Self { - height: LazyVecFrom1::transformed::(name, v, source.height.boxed_clone()), - dateindex: LazyTransformLast::from_last_vec::(name, v, &source.rest.dateindex), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - difficultyepoch: period!(difficultyepoch), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_height/value_binary.rs b/crates/brk_computer/src/internal/multi/from_height/value_binary.rs index 3684c7b19..f55ad98c5 100644 --- a/crates/brk_computer/src/internal/multi/from_height/value_binary.rs +++ b/crates/brk_computer/src/internal/multi/from_height/value_binary.rs @@ -1,132 +1,21 @@ use brk_traversable::Traversable; -use brk_types::{Bitcoin, Dollars, Height, Sats, Version}; +use brk_types::{Bitcoin, Dollars, Sats, Version}; use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec}; +use vecdb::{BinaryTransform, ReadableCloneableVec}; -use crate::internal::{ - ComputedVecValue, ValueHeightDerivedSumCum, LazyBinaryFromHeightSumCum, LazyValueFromHeightSumCum, - ValueFromHeightSumCum, -}; +use crate::internal::{ComputedVecValue, LazyBinaryFromHeightSumCum, LazyValueFromHeightSumCum}; /// Lazy value vecs computed from two ValueFromHeightSumCum sources via binary transforms. /// Used for computing coinbase = subsidy + fee. #[derive(Clone, Traversable)] pub struct ValueBinaryFromHeight { pub sats: LazyBinaryFromHeightSumCum, - pub bitcoin: LazyBinaryFromHeightSumCum, - pub dollars: Option>, + pub btc: LazyBinaryFromHeightSumCum, + pub usd: LazyBinaryFromHeightSumCum, } impl ValueBinaryFromHeight { - pub fn from_computed( - name: &str, - version: Version, - height_source1: IterableBoxedVec, - height_source2: IterableBoxedVec, - source1: &ValueFromHeightSumCum, - source2: &ValueFromHeightSumCum, - ) -> Self - where - SatsF: BinaryTransform, - BitcoinF: BinaryTransform, - DollarsF: BinaryTransform, - { - let sats = LazyBinaryFromHeightSumCum::from_computed::( - name, - version, - height_source1.boxed_clone(), - height_source2.boxed_clone(), - &source1.sats, - &source2.sats, - ); - - let bitcoin = LazyBinaryFromHeightSumCum::from_computed::( - &format!("{name}_btc"), - version, - height_source1, - height_source2, - &source1.sats, - &source2.sats, - ); - - // For dollars: use from_derived since the height is now lazy (LazyVecFrom2) - // The rest (cumulative, dateindex) is still ComputedHeightDerivedSumCum - let dollars = source1 - .dollars - .as_ref() - .zip(source2.dollars.as_ref()) - .map(|(d1, d2)| { - LazyBinaryFromHeightSumCum::from_derived::( - &format!("{name}_usd"), - version, - d1.height.boxed_clone(), - d2.height.boxed_clone(), - &d1.rest, - &d2.rest, - ) - }); - - Self { - sats, - bitcoin, - dollars, - } - } - - pub fn from_derived( - name: &str, - version: Version, - height_source1: IterableBoxedVec, - height_source2: IterableBoxedVec, - source1: &ValueHeightDerivedSumCum, - source2: &ValueHeightDerivedSumCum, - ) -> Self - where - SatsF: BinaryTransform, - BitcoinF: BinaryTransform, - DollarsF: BinaryTransform, - { - let sats = LazyBinaryFromHeightSumCum::from_derived::( - name, - version, - height_source1.boxed_clone(), - height_source2.boxed_clone(), - &source1.sats, - &source2.sats, - ); - - let bitcoin = LazyBinaryFromHeightSumCum::from_derived::( - &format!("{name}_btc"), - version, - height_source1, - height_source2, - &source1.sats, - &source2.sats, - ); - - let dollars = source1 - .dollars - .as_ref() - .zip(source2.dollars.as_ref()) - .map(|(d1, d2)| { - LazyBinaryFromHeightSumCum::from_derived::( - &format!("{name}_usd"), - version, - d1.height_cumulative.boxed_clone(), - d2.height_cumulative.boxed_clone(), - d1, - d2, - ) - }); - - Self { - sats, - bitcoin, - dollars, - } - } - - pub fn from_lazy( + pub(crate) fn from_lazy( name: &str, version: Version, source1: &LazyValueFromHeightSumCum, @@ -142,40 +31,34 @@ impl ValueBinaryFromHeight { let sats = LazyBinaryFromHeightSumCum::from_derived::( name, version, - source1.sats.height.boxed_clone(), - source2.sats.height.boxed_clone(), + source1.sats.height.read_only_boxed_clone(), + source2.sats.height.read_only_boxed_clone(), &source1.sats.rest, &source2.sats.rest, ); - let bitcoin = LazyBinaryFromHeightSumCum::from_derived::( + let btc = LazyBinaryFromHeightSumCum::from_derived::( &format!("{name}_btc"), version, - source1.sats.height.boxed_clone(), - source2.sats.height.boxed_clone(), + source1.sats.height.read_only_boxed_clone(), + source2.sats.height.read_only_boxed_clone(), &source1.sats.rest, &source2.sats.rest, ); - let dollars = source1 - .dollars - .as_ref() - .zip(source2.dollars.as_ref()) - .map(|(d1, d2)| { - LazyBinaryFromHeightSumCum::from_derived::( - &format!("{name}_usd"), - version, - d1.height.boxed_clone(), - d2.height.boxed_clone(), - &d1.rest, - &d2.rest, - ) - }); + let usd = LazyBinaryFromHeightSumCum::from_derived::( + &format!("{name}_usd"), + version, + source1.usd.height.read_only_boxed_clone(), + source2.usd.height.read_only_boxed_clone(), + &source1.usd.rest, + &source2.usd.rest, + ); Self { sats, - bitcoin, - dollars, + btc, + usd, } } } diff --git a/crates/brk_computer/src/internal/multi/from_height/value_change.rs b/crates/brk_computer/src/internal/multi/from_height/value_change.rs new file mode 100644 index 000000000..a78af87cd --- /dev/null +++ b/crates/brk_computer/src/internal/multi/from_height/value_change.rs @@ -0,0 +1,68 @@ +//! Change values from Height - stores signed sats and dollars (changes can be negative). + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{Bitcoin, Dollars, Height, Sats, SatsSigned, Version}; +use vecdb::{Database, Exit, ReadableCloneableVec, ReadableVec, Rw, StorageMode}; + +use crate::{ + indexes, + internal::{ComputedFromHeightLast, LazyFromHeightLast, SatsSignedToBitcoin}, +}; + +const VERSION: Version = Version::ZERO; + +/// Change values indexed by height - sats (stored), btc (lazy), usd (stored). +#[derive(Traversable)] +pub struct ValueChangeFromHeight { + pub sats: ComputedFromHeightLast, + pub btc: LazyFromHeightLast, + pub usd: ComputedFromHeightLast, +} + +impl ValueChangeFromHeight { + pub(crate) fn forced_import( + db: &Database, + name: &str, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { + let v = version + VERSION; + + let sats = ComputedFromHeightLast::forced_import(db, name, v, indexes)?; + + let btc = LazyFromHeightLast::from_computed::( + &format!("{name}_btc"), + v, + sats.height.read_only_boxed_clone(), + &sats, + ); + + let usd = ComputedFromHeightLast::forced_import( + db, + &format!("{name}_usd"), + v, + indexes, + )?; + + Ok(Self { sats, btc, usd }) + } + + /// Compute rolling change for both sats and dollars in one call. + pub(crate) fn compute_rolling( + &mut self, + starting_height: Height, + window_starts: &impl ReadableVec, + sats_source: &impl ReadableVec, + dollars_source: &(impl ReadableVec + Sync), + exit: &Exit, + ) -> Result<()> { + self.sats + .height + .compute_rolling_change(starting_height, window_starts, sats_source, exit)?; + self.usd + .height + .compute_rolling_change(starting_height, window_starts, dollars_source, exit)?; + Ok(()) + } +} diff --git a/crates/brk_computer/src/internal/multi/from_height/value_ema.rs b/crates/brk_computer/src/internal/multi/from_height/value_ema.rs new file mode 100644 index 000000000..72faca466 --- /dev/null +++ b/crates/brk_computer/src/internal/multi/from_height/value_ema.rs @@ -0,0 +1,68 @@ +//! Rolling average values from Height - stores sats and dollars, btc is lazy. + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{Bitcoin, Dollars, Height, Sats, Version}; +use vecdb::{Database, Exit, ReadableCloneableVec, ReadableVec, Rw, StorageMode}; + +use crate::{ + indexes, + internal::{ComputedFromHeightLast, LazyFromHeightLast, SatsToBitcoin}, +}; + +const VERSION: Version = Version::ZERO; + +/// Rolling average values indexed by height - sats (stored), btc (lazy), usd (stored). +#[derive(Traversable)] +pub struct ValueEmaFromHeight { + pub sats: ComputedFromHeightLast, + pub btc: LazyFromHeightLast, + pub usd: ComputedFromHeightLast, +} + +impl ValueEmaFromHeight { + pub(crate) fn forced_import( + db: &Database, + name: &str, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { + let v = version + VERSION; + + let sats = ComputedFromHeightLast::forced_import(db, name, v, indexes)?; + + let btc = LazyFromHeightLast::from_computed::( + &format!("{name}_btc"), + v, + sats.height.read_only_boxed_clone(), + &sats, + ); + + let usd = ComputedFromHeightLast::forced_import( + db, + &format!("{name}_usd"), + v, + indexes, + )?; + + Ok(Self { sats, btc, usd }) + } + + /// Compute rolling average for both sats and dollars in one call. + pub(crate) fn compute_rolling_average( + &mut self, + starting_height: Height, + window_starts: &impl ReadableVec, + sats_source: &impl ReadableVec, + dollars_source: &(impl ReadableVec + Sync), + exit: &Exit, + ) -> Result<()> { + self.sats + .height + .compute_rolling_average(starting_height, window_starts, sats_source, exit)?; + self.usd + .height + .compute_rolling_average(starting_height, window_starts, dollars_source, exit)?; + Ok(()) + } +} diff --git a/crates/brk_computer/src/internal/multi/from_height/value_full.rs b/crates/brk_computer/src/internal/multi/from_height/value_full.rs index d74a63600..2ebcc609b 100644 --- a/crates/brk_computer/src/internal/multi/from_height/value_full.rs +++ b/crates/brk_computer/src/internal/multi/from_height/value_full.rs @@ -1,94 +1,76 @@ //! Value type for Full pattern from Height. //! //! Height-level USD stats are lazy: `sats * price`. -//! Cumulative and dateindex stats are stored since they require aggregation +//! Cumulative and day1 stats are stored since they require aggregation //! across heights with varying prices. use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{Bitcoin, Close, Dollars, Height, Sats, Version}; -use vecdb::{Database, EagerVec, Exit, IterableCloneableVec, PcoVec}; +use brk_types::{Bitcoin, Dollars, Height, Sats, Version}; +use vecdb::{Database, EagerVec, Exit, ReadableCloneableVec, PcoVec, Rw, StorageMode}; use crate::{ ComputeIndexes, indexes, internal::{ ComputedFromHeightFull, LazyBinaryComputedFromHeightFull, LazyFromHeightFull, - SatsTimesClosePrice, SatsToBitcoin, + SatsTimesPrice, SatsToBitcoin, }, - price, + prices, }; -/// Lazy dollars type: `sats[h] * price[h]` at height level, stored derived. -pub type LazyDollarsFromHeightFull = - LazyBinaryComputedFromHeightFull>; - -#[derive(Clone, Traversable)] -pub struct ValueFromHeightFull { - pub sats: ComputedFromHeightFull, - pub bitcoin: LazyFromHeightFull, - pub dollars: Option, +#[derive(Traversable)] +pub struct ValueFromHeightFull { + pub sats: ComputedFromHeightFull, + pub btc: LazyFromHeightFull, + pub usd: LazyBinaryComputedFromHeightFull, } const VERSION: Version = Version::ONE; // Bumped for lazy height dollars impl ValueFromHeightFull { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, ) -> Result { let v = version + VERSION; let sats = ComputedFromHeightFull::forced_import(db, name, v, indexes)?; - let bitcoin = LazyFromHeightFull::from_computed::( + let btc = LazyFromHeightFull::from_computed::( &format!("{name}_btc"), v, - sats.height.boxed_clone(), + sats.height.read_only_boxed_clone(), &sats, ); - let dollars = price - .map(|price| { - LazyBinaryComputedFromHeightFull::forced_import::( - db, - &format!("{name}_usd"), - v, - sats.height.boxed_clone(), - price.usd.split.close.height.boxed_clone(), - indexes, - ) - }) - .transpose()?; + let usd = LazyBinaryComputedFromHeightFull::forced_import::( + db, + &format!("{name}_usd"), + v, + sats.height.read_only_boxed_clone(), + prices.usd.price.read_only_boxed_clone(), + indexes, + )?; Ok(Self { sats, - bitcoin, - dollars, + btc, + usd, }) } - pub fn compute_all( + pub(crate) fn compute( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, - mut compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - // Compute sats - self.sats - .compute_all(indexes, starting_indexes, exit, |v| compute(v))?; - - // Derive dollars (height is lazy, just compute cumulative and dateindex) - if let Some(dollars) = self.dollars.as_mut() { - dollars.derive_from(indexes, starting_indexes, exit)?; - } - + mut compute: impl FnMut(&mut EagerVec>) -> Result<()>, + ) -> Result<()> { + compute(&mut self.sats.height)?; + self.sats.rest.compute_cumulative(starting_indexes, &self.sats.height, exit)?; + self.usd.compute_cumulative(starting_indexes, exit)?; Ok(()) } } diff --git a/crates/brk_computer/src/internal/multi/from_height/value_last.rs b/crates/brk_computer/src/internal/multi/from_height/value_last.rs index 55f2d4e44..a10107b9f 100644 --- a/crates/brk_computer/src/internal/multi/from_height/value_last.rs +++ b/crates/brk_computer/src/internal/multi/from_height/value_last.rs @@ -1,110 +1,62 @@ //! Value type for Last pattern from Height. //! //! Height-level USD value is lazy: `sats * price`. -//! DateIndex last is stored since it requires finding the last value within each date. +//! Day1 last is stored since it requires finding the last value within each date. use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{Bitcoin, Close, Dollars, Height, Sats, Version}; -use vecdb::{Database, EagerVec, Exit, IterableCloneableVec, PcoVec}; +use brk_types::{Bitcoin, Dollars, Sats, Version}; +use vecdb::{Database, ReadableCloneableVec, Rw, StorageMode}; use crate::{ - ComputeIndexes, indexes, + indexes, internal::{ ComputedFromHeightLast, LazyBinaryComputedFromHeightLast, LazyFromHeightLast, - SatsTimesClosePrice, SatsToBitcoin, + SatsTimesPrice, SatsToBitcoin, }, - price, + prices, }; -/// Lazy dollars type: `sats[h] * price[h]` at height level, stored derived. -pub type LazyDollarsFromHeightLast = - LazyBinaryComputedFromHeightLast>; - -#[derive(Clone, Traversable)] -pub struct ValueFromHeightLast { - pub sats: ComputedFromHeightLast, - pub bitcoin: LazyFromHeightLast, - pub dollars: Option, +#[derive(Traversable)] +pub struct ValueFromHeightLast { + pub sats: ComputedFromHeightLast, + pub btc: LazyFromHeightLast, + pub usd: LazyBinaryComputedFromHeightLast, } const VERSION: Version = Version::ONE; // Bumped for lazy height dollars impl ValueFromHeightLast { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, ) -> Result { let v = version + VERSION; let sats = ComputedFromHeightLast::forced_import(db, name, v, indexes)?; - let bitcoin = LazyFromHeightLast::from_computed::( + let btc = LazyFromHeightLast::from_computed::( &format!("{name}_btc"), v, - sats.height.boxed_clone(), + sats.height.read_only_boxed_clone(), &sats, ); - let dollars = price - .map(|price| { - LazyBinaryComputedFromHeightLast::forced_import::( - db, - &format!("{name}_usd"), - v, - sats.height.boxed_clone(), - price.usd.split.close.height.boxed_clone(), - indexes, - ) - }) - .transpose()?; + let usd = LazyBinaryComputedFromHeightLast::forced_import::( + &format!("{name}_usd"), + v, + sats.height.read_only_boxed_clone(), + prices.usd.price.read_only_boxed_clone(), + indexes, + ); Ok(Self { sats, - bitcoin, - dollars, + btc, + usd, }) } - - pub fn compute_all( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - mut compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - // Compute sats (closure receives &mut height vec) - self.sats - .compute_all(indexes, starting_indexes, exit, |v| compute(v))?; - - // Derive dollars (height is lazy, just compute dateindex last) - if let Some(dollars) = self.dollars.as_mut() { - dollars.derive_from(indexes, starting_indexes, exit)?; - } - - Ok(()) - } - - /// Compute derived vecs from existing height data. - pub fn compute_rest( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.sats.compute_rest(indexes, starting_indexes, exit)?; - - // Derive dollars (height is lazy, just compute dateindex last) - if let Some(dollars) = self.dollars.as_mut() { - dollars.derive_from(indexes, starting_indexes, exit)?; - } - - Ok(()) - } } diff --git a/crates/brk_computer/src/internal/multi/from_height/value_lazy_binary_last.rs b/crates/brk_computer/src/internal/multi/from_height/value_lazy_binary_last.rs index 3e481e73a..f0e54665a 100644 --- a/crates/brk_computer/src/internal/multi/from_height/value_lazy_binary_last.rs +++ b/crates/brk_computer/src/internal/multi/from_height/value_lazy_binary_last.rs @@ -1,69 +1,67 @@ -//! Lazy binary value wrapper combining height (with price) + difficultyepoch + date last transforms. +//! Lazy binary value wrapper combining height (with price) + all derived last transforms. use brk_traversable::Traversable; -use brk_types::{Bitcoin, Close, Dollars, Sats, Version}; +use brk_types::{Bitcoin, Dollars, Sats, Version}; use derive_more::{Deref, DerefMut}; -use vecdb::{BinaryTransform, IterableCloneableVec, UnaryTransform}; +use vecdb::{BinaryTransform, ReadableCloneableVec, UnaryTransform}; use super::LazyFromHeightValue; -use crate::internal::{LazyTransformedValueDifficultyEpoch, LazyValueFromDateLast}; -use crate::{internal::ValueFromHeightLast, price}; +use crate::internal::{LazyValueHeightDerivedLast, ValueFromHeightLast}; +use crate::prices; const VERSION: Version = Version::ZERO; -/// Lazy binary value wrapper with height (using price binary transform) + difficultyepoch + date last transforms. +/// Lazy binary value wrapper with height (using price binary transform) + all derived last transforms. /// -/// Use this when the height-level dollars need a binary transform (e.g., price × sats) +/// Use this when the height-level dollars need a binary transform (e.g., price * sats) /// rather than a unary transform from existing dollars. /// -/// No merge at this level - denominations (sats, bitcoin, dollars) stay as separate branches. -/// Each inner field has merge which combines indexes within each denomination. +/// All coarser-than-height periods (minute1 through difficultyepoch) use unary transforms +/// on the pre-computed values from the source. #[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(merge)] pub struct LazyBinaryValueFromHeightLast { #[traversable(flatten)] pub height: LazyFromHeightValue, - #[traversable(flatten)] - pub difficultyepoch: LazyTransformedValueDifficultyEpoch, #[deref] #[deref_mut] #[traversable(flatten)] - pub dates: LazyValueFromDateLast, + pub rest: Box, } impl LazyBinaryValueFromHeightLast { - pub fn from_block_source( + pub(crate) fn from_block_source< + SatsTransform, + BitcoinTransform, + HeightDollarsTransform, + DateDollarsTransform, + >( name: &str, source: &ValueFromHeightLast, - price: Option<&price::Vecs>, + prices: &prices::Vecs, version: Version, ) -> Self where SatsTransform: UnaryTransform, BitcoinTransform: UnaryTransform, - HeightDollarsTransform: BinaryTransform, Sats, Dollars>, + HeightDollarsTransform: BinaryTransform, DateDollarsTransform: UnaryTransform, { let v = version + VERSION; - let price_source = price.map(|p| p.usd.split.close.height.boxed_clone()); + let price_source = prices.usd.price.read_only_boxed_clone(); - let height = LazyFromHeightValue::from_sources::( - name, - source.sats.height.boxed_clone(), - price_source, - v, - ); - - let difficultyepoch = LazyTransformedValueDifficultyEpoch::from_block_source::< + let height = LazyFromHeightValue::from_sources::< SatsTransform, BitcoinTransform, HeightDollarsTransform, - >(name, source, price, v); + >(name, source.sats.height.read_only_boxed_clone(), price_source, v); - let dates = LazyValueFromDateLast::from_block_source::( - name, source, v, - ); + let rest = + LazyValueHeightDerivedLast::from_block_source::( + name, source, v, + ); - Self { height, difficultyepoch, dates } + Self { height, rest: Box::new(rest) } } } diff --git a/crates/brk_computer/src/internal/multi/from_height/value_lazy_computed_sum_cum.rs b/crates/brk_computer/src/internal/multi/from_height/value_lazy_computed_sum_cum.rs index c79fdc97f..e7dffc632 100644 --- a/crates/brk_computer/src/internal/multi/from_height/value_lazy_computed_sum_cum.rs +++ b/crates/brk_computer/src/internal/multi/from_height/value_lazy_computed_sum_cum.rs @@ -8,90 +8,81 @@ use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{Bitcoin, Close, Dollars, Sats, Version}; -use vecdb::{Database, Exit, IterableCloneableVec, LazyVecFrom2}; +use brk_types::{Bitcoin, Dollars, Sats, Version}; +use vecdb::{Database, Exit, ReadableCloneableVec, LazyVecFrom2, Rw, StorageMode}; use crate::{ ComputeIndexes, indexes, internal::{ - ClosePriceTimesSats, ComputedFromHeightSumCum, LazyFromHeightSumCum, LazyComputedFromHeightSumCum, - SatsToBitcoin, + ComputedFromHeightSumCum, LazyComputedFromHeightSumCum, LazyFromHeightSumCum, + PriceTimesSats, SatsToBitcoin, }, - price, + prices, }; /// Value wrapper with stored sats height + lazy dollars. /// /// Sats height is stored (computed directly or from stateful loop). /// Dollars height is lazy (price × sats). -/// Cumulative and dateindex aggregates are stored for both. -#[derive(Clone, Traversable)] -pub struct LazyComputedValueFromHeightSumCum { - pub sats: ComputedFromHeightSumCum, - pub bitcoin: LazyFromHeightSumCum, - pub dollars: Option, Sats>>, +/// Cumulative and day1 aggregates are stored for both. +#[derive(Traversable)] +pub struct LazyComputedValueFromHeightSumCum { + pub sats: ComputedFromHeightSumCum, + pub btc: LazyFromHeightSumCum, + pub usd: LazyComputedFromHeightSumCum, } const VERSION: Version = Version::ZERO; impl LazyComputedValueFromHeightSumCum { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, ) -> Result { let v = version + VERSION; let sats = ComputedFromHeightSumCum::forced_import(db, name, v, indexes)?; - let bitcoin = LazyFromHeightSumCum::from_computed::( + let btc = LazyFromHeightSumCum::from_computed::( &format!("{name}_btc"), v, - sats.height.boxed_clone(), + sats.height.read_only_boxed_clone(), &sats, ); - let dollars = if let Some(price) = price { - let dollars_height = LazyVecFrom2::transformed::( - &format!("{name}_usd"), - v, - price.usd.split.close.height.boxed_clone(), - sats.height.boxed_clone(), - ); + let usd_height = LazyVecFrom2::transformed::( + &format!("{name}_usd"), + v, + prices.usd.price.read_only_boxed_clone(), + sats.height.read_only_boxed_clone(), + ); - Some(LazyComputedFromHeightSumCum::forced_import( - db, - &format!("{name}_usd"), - v, - indexes, - dollars_height, - )?) - } else { - None - }; + let usd = LazyComputedFromHeightSumCum::forced_import( + db, + &format!("{name}_usd"), + v, + indexes, + usd_height, + )?; Ok(Self { sats, - bitcoin, - dollars, + btc, + usd, }) } - /// Compute rest (derived indexes) from already-computed height. - pub fn compute_rest( + /// Compute cumulative from already-computed height. + pub(crate) fn compute_cumulative( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.sats.compute_rest(indexes, starting_indexes, exit)?; - - if let Some(dollars) = self.dollars.as_mut() { - dollars.derive_from(indexes, starting_indexes, exit)?; - } - + self.sats.compute_cumulative(starting_indexes, exit)?; + self.usd.compute_cumulative(starting_indexes, exit)?; Ok(()) } } diff --git a/crates/brk_computer/src/internal/multi/from_height/value_lazy_last.rs b/crates/brk_computer/src/internal/multi/from_height/value_lazy_last.rs index 73357cd7a..80bd6efc9 100644 --- a/crates/brk_computer/src/internal/multi/from_height/value_lazy_last.rs +++ b/crates/brk_computer/src/internal/multi/from_height/value_lazy_last.rs @@ -5,11 +5,13 @@ use brk_types::{Dollars, Sats, Version}; use derive_more::{Deref, DerefMut}; use vecdb::UnaryTransform; -use crate::internal::{LazyValueFromDateLast, LazyValueHeight, SatsToBitcoin, ValueFromHeightLast}; +use crate::internal::{ + LazyValueHeight, LazyValueHeightDerivedLast, SatsToBitcoin, ValueFromHeightLast, +}; const VERSION: Version = Version::ZERO; -/// Lazy value wrapper with height + date last transforms from ValueFromHeightLast. +/// Lazy value wrapper with height + all derived last transforms from ValueFromHeightLast. #[derive(Clone, Deref, DerefMut, Traversable)] #[traversable(merge)] pub struct LazyValueFromHeightLast { @@ -18,11 +20,11 @@ pub struct LazyValueFromHeightLast { #[deref] #[deref_mut] #[traversable(flatten)] - pub dates: LazyValueFromDateLast, + pub rest: Box, } impl LazyValueFromHeightLast { - pub fn from_block_source( + pub(crate) fn from_block_source( name: &str, source: &ValueFromHeightLast, version: Version, @@ -36,10 +38,11 @@ impl LazyValueFromHeightLast { let height = LazyValueHeight::from_block_source::(name, source, v); - let dates = LazyValueFromDateLast::from_block_source::( - name, source, v, - ); + let rest = + LazyValueHeightDerivedLast::from_block_source::( + name, source, v, + ); - Self { height, dates } + Self { height, rest: Box::new(rest) } } } diff --git a/crates/brk_computer/src/internal/multi/from_height/value_lazy_sum_cum.rs b/crates/brk_computer/src/internal/multi/from_height/value_lazy_sum_cum.rs index 2f4017953..8932dc3af 100644 --- a/crates/brk_computer/src/internal/multi/from_height/value_lazy_sum_cum.rs +++ b/crates/brk_computer/src/internal/multi/from_height/value_lazy_sum_cum.rs @@ -4,35 +4,36 @@ use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{Bitcoin, Close, Dollars, Height, Sats, Version}; +use brk_types::{Bitcoin, Dollars, Height, Sats, Version}; use schemars::JsonSchema; use vecdb::{ - BinaryTransform, Database, Exit, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2, + BinaryTransform, Database, Exit, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2, Rw, + StorageMode, }; use crate::{ ComputeIndexes, indexes, internal::{ - ClosePriceTimesSats, ComputedVecValue, LazyFromHeightSumCum, LazyComputedFromHeightSumCum, + ComputedVecValue, LazyComputedFromHeightSumCum, LazyFromHeightSumCum, PriceTimesSats, SatsToBitcoin, }, - price, + prices, }; /// Value wrapper with lazy binary height + stored derived SumCum. /// /// Sats height is a lazy binary transform (e.g., mask × source). /// Dollars height is also lazy (price × sats). -/// Cumulative and dateindex are stored. -#[derive(Clone, Traversable)] -pub struct LazyValueFromHeightSumCum +/// Cumulative and day1 are stored. +#[derive(Traversable)] +pub struct LazyValueFromHeightSumCum where S1T: ComputedVecValue + JsonSchema, S2T: ComputedVecValue + JsonSchema, { - pub sats: LazyComputedFromHeightSumCum, - pub bitcoin: LazyFromHeightSumCum, - pub dollars: Option, Sats>>, + pub sats: LazyComputedFromHeightSumCum, + pub btc: LazyFromHeightSumCum, + pub usd: LazyComputedFromHeightSumCum, } const VERSION: Version = Version::ZERO; @@ -42,14 +43,14 @@ where S1T: ComputedVecValue + JsonSchema, S2T: ComputedVecValue + JsonSchema, { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, indexes: &indexes::Vecs, - source1: IterableBoxedVec, - source2: IterableBoxedVec, - price: Option<&price::Vecs>, + source1: ReadableBoxedVec, + source2: ReadableBoxedVec, + prices: &prices::Vecs, ) -> Result where F: BinaryTransform, @@ -59,52 +60,42 @@ where let sats_height = LazyVecFrom2::transformed::(name, v, source1, source2); let sats = LazyComputedFromHeightSumCum::forced_import(db, name, v, indexes, sats_height)?; - let bitcoin = LazyFromHeightSumCum::from_derived::( + let btc = LazyFromHeightSumCum::from_derived::( &format!("{name}_btc"), v, - sats.height.boxed_clone(), + sats.height.read_only_boxed_clone(), &sats.rest, ); - let dollars = if let Some(price) = price { - let dollars_height = LazyVecFrom2::transformed::( - &format!("{name}_usd"), - v, - price.usd.split.close.height.boxed_clone(), - sats.height.boxed_clone(), - ); + let usd_height = LazyVecFrom2::transformed::( + &format!("{name}_usd"), + v, + prices.usd.price.read_only_boxed_clone(), + sats.height.read_only_boxed_clone(), + ); - Some(LazyComputedFromHeightSumCum::forced_import( - db, - &format!("{name}_usd"), - v, - indexes, - dollars_height, - )?) - } else { - None - }; + let usd = LazyComputedFromHeightSumCum::forced_import( + db, + &format!("{name}_usd"), + v, + indexes, + usd_height, + )?; Ok(Self { sats, - bitcoin, - dollars, + btc, + usd, }) } - /// Derive aggregates from the lazy sats height source. - pub fn derive_from( + pub(crate) fn compute_cumulative( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.sats.derive_from(indexes, starting_indexes, exit)?; - - if let Some(dollars) = self.dollars.as_mut() { - dollars.derive_from(indexes, starting_indexes, exit)?; - } - + self.sats.compute_cumulative(starting_indexes, exit)?; + self.usd.compute_cumulative(starting_indexes, exit)?; Ok(()) } } diff --git a/crates/brk_computer/src/internal/multi/from_height/value_sum.rs b/crates/brk_computer/src/internal/multi/from_height/value_sum.rs index 5bf4eb65d..376b32a4b 100644 --- a/crates/brk_computer/src/internal/multi/from_height/value_sum.rs +++ b/crates/brk_computer/src/internal/multi/from_height/value_sum.rs @@ -1,93 +1,62 @@ //! Value type for Sum pattern from Height. //! //! Height-level USD value is lazy: `sats * price`. -//! DateIndex sum is stored since it requires aggregation across heights with varying prices. +//! Day1 sum is stored since it requires aggregation across heights with varying prices. use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{Bitcoin, Close, Dollars, Height, Sats, Version}; -use vecdb::{Database, EagerVec, Exit, IterableCloneableVec, PcoVec}; +use brk_types::{Bitcoin, Dollars, Sats, Version}; +use vecdb::{Database, ReadableCloneableVec, Rw, StorageMode}; use crate::{ - ComputeIndexes, indexes, + indexes, internal::{ - ComputedFromHeightSum, LazyBinaryComputedFromHeightSum, LazyFromHeightSum, - SatsTimesClosePrice, SatsToBitcoin, + ComputedFromHeightSum, LazyBinaryComputedFromHeightSum, LazyFromHeightSum, SatsTimesPrice, + SatsToBitcoin, }, - price, + prices, }; -/// Lazy dollars type: `sats[h] * price[h]` at height level, stored derived. -pub type LazyDollarsFromHeightSum = - LazyBinaryComputedFromHeightSum>; - -#[derive(Clone, Traversable)] -pub struct ValueFromHeightSum { - pub sats: ComputedFromHeightSum, - pub bitcoin: LazyFromHeightSum, - pub dollars: Option, +#[derive(Traversable)] +pub struct ValueFromHeightSum { + pub sats: ComputedFromHeightSum, + pub btc: LazyFromHeightSum, + pub usd: LazyBinaryComputedFromHeightSum, } const VERSION: Version = Version::ONE; // Bumped for lazy height dollars impl ValueFromHeightSum { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, ) -> Result { let v = version + VERSION; let sats = ComputedFromHeightSum::forced_import(db, name, v, indexes)?; - let bitcoin = LazyFromHeightSum::from_computed::( + let btc = LazyFromHeightSum::from_computed::( &format!("{name}_btc"), v, - sats.height.boxed_clone(), + sats.height.read_only_boxed_clone(), &sats, ); - let dollars = price - .map(|price| { - LazyBinaryComputedFromHeightSum::forced_import::( - db, - &format!("{name}_usd"), - v, - sats.height.boxed_clone(), - price.usd.split.close.height.boxed_clone(), - indexes, - ) - }) - .transpose()?; + let usd = LazyBinaryComputedFromHeightSum::forced_import::( + &format!("{name}_usd"), + v, + sats.height.read_only_boxed_clone(), + prices.usd.price.read_only_boxed_clone(), + indexes, + ); Ok(Self { sats, - bitcoin, - dollars, + btc, + usd, }) } - - pub fn compute_all( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - mut compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - // Compute sats (closure receives &mut height vec) - self.sats - .compute_all(indexes, starting_indexes, exit, |v| compute(v))?; - - // Derive dollars (height is lazy, just compute dateindex sum) - if let Some(dollars) = self.dollars.as_mut() { - dollars.derive_from(indexes, starting_indexes, exit)?; - } - - Ok(()) - } } diff --git a/crates/brk_computer/src/internal/multi/from_height/value_sum_cum.rs b/crates/brk_computer/src/internal/multi/from_height/value_sum_cum.rs index c9e0054b8..2790fe51e 100644 --- a/crates/brk_computer/src/internal/multi/from_height/value_sum_cum.rs +++ b/crates/brk_computer/src/internal/multi/from_height/value_sum_cum.rs @@ -1,131 +1,78 @@ //! Value type for SumCum pattern from Height. //! //! Height-level USD sum is lazy: `sats * price`. -//! Cumulative and dateindex stats are stored since they require aggregation +//! Cumulative and day1 stats are stored since they require aggregation //! across heights with varying prices. use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{Bitcoin, Close, Dollars, Height, Sats, Version}; -use vecdb::{Database, EagerVec, Exit, IterableCloneableVec, IterableVec, PcoVec}; +use brk_types::{Bitcoin, Dollars, Height, Sats, Version}; +use vecdb::{Database, EagerVec, Exit, ReadableCloneableVec, PcoVec, Rw, StorageMode}; use crate::{ - ComputeIndexes, indexes, + ComputeIndexes, + indexes, internal::{ ComputedFromHeightSumCum, LazyBinaryComputedFromHeightSumCum, LazyFromHeightSumCum, - SatsTimesClosePrice, SatsToBitcoin, + SatsTimesPrice, SatsToBitcoin, }, - price, + prices, }; -/// Lazy dollars type: `sats[h] * price[h]` at height level, stored derived. -pub type LazyDollarsFromHeightSumCum = - LazyBinaryComputedFromHeightSumCum>; - -#[derive(Clone, Traversable)] -pub struct ValueFromHeightSumCum { - pub sats: ComputedFromHeightSumCum, - pub bitcoin: LazyFromHeightSumCum, - pub dollars: Option, +#[derive(Traversable)] +pub struct ValueFromHeightSumCum { + pub sats: ComputedFromHeightSumCum, + pub btc: LazyFromHeightSumCum, + pub usd: LazyBinaryComputedFromHeightSumCum, } const VERSION: Version = Version::ONE; // Bumped for lazy height dollars impl ValueFromHeightSumCum { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, ) -> Result { let v = version + VERSION; let sats = ComputedFromHeightSumCum::forced_import(db, name, v, indexes)?; - let bitcoin = LazyFromHeightSumCum::from_computed::( + let btc = LazyFromHeightSumCum::from_computed::( &format!("{name}_btc"), v, - sats.height.boxed_clone(), + sats.height.read_only_boxed_clone(), &sats, ); - let dollars = price - .map(|price| { - LazyBinaryComputedFromHeightSumCum::forced_import::( - db, - &format!("{name}_usd"), - v, - sats.height.boxed_clone(), - price.usd.split.close.height.boxed_clone(), - indexes, - ) - }) - .transpose()?; + let usd = LazyBinaryComputedFromHeightSumCum::forced_import::( + db, + &format!("{name}_usd"), + v, + sats.height.read_only_boxed_clone(), + prices.usd.price.read_only_boxed_clone(), + indexes, + )?; + Ok(Self { sats, - bitcoin, - dollars, + btc, + usd, }) } - pub fn compute_all( + pub(crate) fn compute( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, - mut compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - // Compute sats (closure receives &mut height vec) - self.sats - .compute_all(indexes, starting_indexes, exit, |v| compute(v))?; - - // Derive dollars (height is lazy, just compute cumulative and dateindex) - if let Some(dollars) = self.dollars.as_mut() { - dollars.derive_from(indexes, starting_indexes, exit)?; - } - - Ok(()) - } - - /// Derive from an external height source (e.g., a LazyVec). - pub fn derive_from( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - source: &impl IterableVec, - exit: &Exit, + mut compute: impl FnMut(&mut EagerVec>) -> Result<()>, ) -> Result<()> { - // Derive sats from source - self.sats - .derive_from(indexes, starting_indexes, source, exit)?; - - // Derive dollars (height is lazy, just compute cumulative and dateindex) - if let Some(dollars) = self.dollars.as_mut() { - dollars.derive_from(indexes, starting_indexes, exit)?; - } - - Ok(()) - } - - /// Compute rest (derived indexes) from already-computed height. - pub fn compute_rest( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.sats.compute_rest(indexes, starting_indexes, exit)?; - - // Derive dollars (height is lazy, just compute cumulative and dateindex) - if let Some(dollars) = self.dollars.as_mut() { - dollars.derive_from(indexes, starting_indexes, exit)?; - } - + compute(&mut self.sats.height)?; + self.sats.compute_cumulative(starting_indexes, exit)?; + self.usd.compute_cumulative(starting_indexes, exit)?; Ok(()) } } diff --git a/crates/brk_computer/src/internal/multi/from_height_and_date/binary_last.rs b/crates/brk_computer/src/internal/multi/from_height_and_date/binary_last.rs deleted file mode 100644 index 741c05289..000000000 --- a/crates/brk_computer/src/internal/multi/from_height_and_date/binary_last.rs +++ /dev/null @@ -1,76 +0,0 @@ -//! LazyBinaryFromHeightAndDateLast - height storage + binary transform lazy date periods. -//! -//! Use this when height is stored as EagerVec and date periods are lazy binary transforms. - -use brk_traversable::Traversable; -use brk_types::{Height, Version}; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{BinaryTransform, EagerVec, PcoVec}; - -use crate::internal::{ - ComputedFromDateLast, ComputedFromHeightAndDateLast, ComputedVecValue, LazyBinaryFromDateLast, -}; - -const VERSION: Version = Version::ZERO; - -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct LazyBinaryFromHeightAndDateLast -where - T: ComputedVecValue + PartialOrd + JsonSchema, - S1T: ComputedVecValue, - S2T: ComputedVecValue, -{ - pub height: EagerVec>, - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub rest: LazyBinaryFromDateLast, -} - -impl LazyBinaryFromHeightAndDateLast -where - T: ComputedVecValue + JsonSchema + 'static, - S1T: ComputedVecValue + JsonSchema, - S2T: ComputedVecValue + JsonSchema, -{ - pub fn from_computed_both_last>( - name: &str, - version: Version, - height: EagerVec>, - source1: &ComputedFromDateLast, - source2: &ComputedFromDateLast, - ) -> Self { - let v = version + VERSION; - - Self { - height, - rest: LazyBinaryFromDateLast::from_computed_both_last::(name, v, source1, source2), - } - } - - pub fn from_computed_height_date_last>( - name: &str, - version: Version, - height: EagerVec>, - source1: &ComputedFromHeightAndDateLast, - source2: &ComputedFromHeightAndDateLast, - ) -> Self - where - S1T: JsonSchema + 'static, - S2T: JsonSchema + 'static, - { - let v = version + VERSION; - - Self { - height, - rest: LazyBinaryFromDateLast::from_computed_both_last::( - name, - v, - &source1.rest, - &source2.rest, - ), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_height_and_date/computed_ohlc.rs b/crates/brk_computer/src/internal/multi/from_height_and_date/computed_ohlc.rs deleted file mode 100644 index 0904f0c45..000000000 --- a/crates/brk_computer/src/internal/multi/from_height_and_date/computed_ohlc.rs +++ /dev/null @@ -1,68 +0,0 @@ -//! OHLC computed aggregations combining height, dateindex, and period indexes. - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{Close, High, Low, Open, Version}; -use schemars::JsonSchema; -use vecdb::Database; - -use crate::indexes; -use crate::internal::{ - ComputedFromHeightAndDateFirst, ComputedFromHeightAndDateLast, ComputedFromHeightAndDateMax, ComputedFromHeightAndDateMin, - ComputedVecValue, -}; - -/// Combined OHLC computed vecs with all indexes (height + dateindex + periods + difficultyepoch). -/// -/// Access pattern: `ohlc.{open,high,low,close}.{height,dateindex,weekindex,...,difficultyepoch}` -#[derive(Clone, Traversable)] -pub struct ComputedOHLC -where - T: ComputedVecValue + PartialOrd + JsonSchema + From, - f64: From, -{ - pub open: ComputedFromHeightAndDateFirst>, - pub high: ComputedFromHeightAndDateMax>, - pub low: ComputedFromHeightAndDateMin>, - pub close: ComputedFromHeightAndDateLast>, -} - -impl ComputedOHLC -where - T: ComputedVecValue + PartialOrd + JsonSchema + From + 'static, - f64: From, -{ - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - Ok(Self { - open: ComputedFromHeightAndDateFirst::forced_import( - db, - &format!("{name}_open"), - version, - indexes, - )?, - high: ComputedFromHeightAndDateMax::forced_import_raw( - db, - &format!("{name}_high"), - version, - indexes, - )?, - low: ComputedFromHeightAndDateMin::forced_import_raw( - db, - &format!("{name}_low"), - version, - indexes, - )?, - close: ComputedFromHeightAndDateLast::forced_import( - db, - &format!("{name}_close"), - version, - indexes, - )?, - }) - } -} diff --git a/crates/brk_computer/src/internal/multi/from_height_and_date/constant.rs b/crates/brk_computer/src/internal/multi/from_height_and_date/constant.rs deleted file mode 100644 index 326863b18..000000000 --- a/crates/brk_computer/src/internal/multi/from_height_and_date/constant.rs +++ /dev/null @@ -1,94 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, DifficultyEpoch, Height, MonthIndex, QuarterIndex, SemesterIndex, - Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use serde::Serialize; -use vecdb::{Formattable, IterableCloneableVec, LazyVecFrom1, UnaryTransform, VecValue}; - -use crate::indexes; - -/// Lazy constant vecs for all index levels. -/// Uses const generic transforms to return the same value for every index. -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct ConstantVecs -where - T: VecValue + Formattable + Serialize + JsonSchema, -{ - pub height: LazyVecFrom1, - pub difficultyepoch: LazyVecFrom1, - pub dateindex: LazyVecFrom1, - pub weekindex: LazyVecFrom1, - pub monthindex: LazyVecFrom1, - pub quarterindex: LazyVecFrom1, - pub semesterindex: LazyVecFrom1, - pub yearindex: LazyVecFrom1, - pub decadeindex: LazyVecFrom1, -} - -impl ConstantVecs { - /// Create constant vecs using a transform that ignores input and returns a constant. - pub fn new(name: &str, version: Version, indexes: &indexes::Vecs) -> Self - where - F: UnaryTransform - + UnaryTransform - + UnaryTransform - + UnaryTransform - + UnaryTransform - + UnaryTransform - + UnaryTransform - + UnaryTransform - + UnaryTransform, - { - Self { - height: LazyVecFrom1::transformed::( - name, - version, - indexes.height.identity.boxed_clone(), - ), - difficultyepoch: LazyVecFrom1::transformed::( - name, - version, - indexes.difficultyepoch.identity.boxed_clone(), - ), - dateindex: LazyVecFrom1::transformed::( - name, - version, - indexes.dateindex.identity.boxed_clone(), - ), - weekindex: LazyVecFrom1::transformed::( - name, - version, - indexes.weekindex.identity.boxed_clone(), - ), - monthindex: LazyVecFrom1::transformed::( - name, - version, - indexes.monthindex.identity.boxed_clone(), - ), - quarterindex: LazyVecFrom1::transformed::( - name, - version, - indexes.quarterindex.identity.boxed_clone(), - ), - semesterindex: LazyVecFrom1::transformed::( - name, - version, - indexes.semesterindex.identity.boxed_clone(), - ), - yearindex: LazyVecFrom1::transformed::( - name, - version, - indexes.yearindex.identity.boxed_clone(), - ), - decadeindex: LazyVecFrom1::transformed::( - name, - version, - indexes.decadeindex.identity.boxed_clone(), - ), - } - } -} - diff --git a/crates/brk_computer/src/internal/multi/from_height_and_date/first.rs b/crates/brk_computer/src/internal/multi/from_height_and_date/first.rs deleted file mode 100644 index d1c59b5bd..000000000 --- a/crates/brk_computer/src/internal/multi/from_height_and_date/first.rs +++ /dev/null @@ -1,72 +0,0 @@ -//! ComputedFromHeightAndDateFirst - height storage + dateindex storage + lazy periods. -//! -//! Use this when both height and dateindex are stored EagerVecs with first-value aggregation. - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, Height, Version}; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; - -use crate::{ComputeIndexes, indexes}; - -use crate::internal::{ComputedFromDateFirst, ComputedVecValue, LazyFirst}; - -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct ComputedFromHeightAndDateFirst -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub height: EagerVec>, - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub rest: ComputedFromDateFirst, - pub difficultyepoch: LazyFirst, -} - -const VERSION: Version = Version::ZERO; - -impl ComputedFromHeightAndDateFirst -where - T: ComputedVecValue + JsonSchema + 'static, -{ - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - let v = version + VERSION; - - let height: EagerVec> = EagerVec::forced_import(db, name, v)?; - let rest = ComputedFromDateFirst::forced_import(db, name, v, indexes)?; - let difficultyepoch = LazyFirst::from_source( - name, - v, - height.boxed_clone(), - indexes.difficultyepoch.identity.boxed_clone(), - ); - - Ok(Self { - height, - rest, - difficultyepoch, - }) - } - - /// Compute rest (dateindex + periods) with the given compute function. - pub fn compute_rest( - &mut self, - starting_indexes: &ComputeIndexes, - exit: &Exit, - compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - self.rest.compute_all(starting_indexes, exit, compute) - } -} diff --git a/crates/brk_computer/src/internal/multi/from_height_and_date/last.rs b/crates/brk_computer/src/internal/multi/from_height_and_date/last.rs deleted file mode 100644 index 744f34c32..000000000 --- a/crates/brk_computer/src/internal/multi/from_height_and_date/last.rs +++ /dev/null @@ -1,72 +0,0 @@ -//! ComputedFromHeightAndDateLast - height storage + dateindex storage + lazy periods. -//! -//! Use this when both height and dateindex are stored EagerVecs with last-value aggregation. - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, Height, Version}; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; - -use crate::{indexes, internal::ComputedFromDateLast, ComputeIndexes}; - -use crate::internal::{ComputedVecValue, LazyLast}; - -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct ComputedFromHeightAndDateLast -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub height: EagerVec>, - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub rest: ComputedFromDateLast, - pub difficultyepoch: LazyLast, -} - -const VERSION: Version = Version::ZERO; - -impl ComputedFromHeightAndDateLast -where - T: ComputedVecValue + JsonSchema + 'static, -{ - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - let v = version + VERSION; - - let height: EagerVec> = EagerVec::forced_import(db, name, v)?; - let rest = ComputedFromDateLast::forced_import(db, name, v, indexes)?; - let difficultyepoch = LazyLast::from_source( - name, - v, - height.boxed_clone(), - indexes.difficultyepoch.identity.boxed_clone(), - ); - - Ok(Self { - height, - rest, - difficultyepoch, - }) - } - - /// Compute rest (dateindex + periods) with the given compute function. - pub fn compute_rest( - &mut self, - starting_indexes: &ComputeIndexes, - exit: &Exit, - compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - self.rest.compute_all(starting_indexes, exit, compute) - } -} diff --git a/crates/brk_computer/src/internal/multi/from_height_and_date/lazy_ohlc.rs b/crates/brk_computer/src/internal/multi/from_height_and_date/lazy_ohlc.rs deleted file mode 100644 index d2a0c0496..000000000 --- a/crates/brk_computer/src/internal/multi/from_height_and_date/lazy_ohlc.rs +++ /dev/null @@ -1,30 +0,0 @@ -//! OHLC period groupings for all time/chain periods. - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, DifficultyEpoch, Height, MonthIndex, QuarterIndex, SemesterIndex, - WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use serde::Serialize; -use vecdb::{BytesVec, BytesVecValue, EagerVec, Formattable}; - -/// Bundled OHLC vecs for all periods (time + chain based). -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyFromHeightAndDateOHLC -where - T: BytesVecValue + Formattable + Serialize + JsonSchema + 'static, -{ - // Time-based periods - pub dateindex: EagerVec>, - pub week: EagerVec>, - pub month: EagerVec>, - pub quarter: EagerVec>, - pub semester: EagerVec>, - pub year: EagerVec>, - pub decade: EagerVec>, - // Chain-based periods - pub height: EagerVec>, - pub difficultyepoch: EagerVec>, -} diff --git a/crates/brk_computer/src/internal/multi/from_height_and_date/max.rs b/crates/brk_computer/src/internal/multi/from_height_and_date/max.rs deleted file mode 100644 index 1e9039f03..000000000 --- a/crates/brk_computer/src/internal/multi/from_height_and_date/max.rs +++ /dev/null @@ -1,105 +0,0 @@ -//! ComputedFromHeightAndDateMax - height storage + dateindex storage + lazy periods. -//! -//! Use this when both height and dateindex are stored EagerVecs with max-value aggregation. - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, Height, Version}; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; - -use crate::{ComputeIndexes, indexes}; - -use crate::internal::{ComputedFromDateMax, ComputedVecValue, LazyMax}; - -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct ComputedFromHeightAndDateMax -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub height: EagerVec>, - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub rest: ComputedFromDateMax, - pub difficultyepoch: LazyMax, -} - -const VERSION: Version = Version::ZERO; - -impl ComputedFromHeightAndDateMax -where - T: ComputedVecValue + JsonSchema + 'static, -{ - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - Self::forced_import_inner(db, name, version, indexes, false) - } - - /// Import without adding _max suffix to lazy vecs. - pub fn forced_import_raw( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - Self::forced_import_inner(db, name, version, indexes, true) - } - - fn forced_import_inner( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - raw: bool, - ) -> Result { - let v = version + VERSION; - - let height: EagerVec> = EagerVec::forced_import(db, name, v)?; - let rest = if raw { - ComputedFromDateMax::forced_import_raw(db, name, v, indexes)? - } else { - ComputedFromDateMax::forced_import(db, name, v, indexes)? - }; - let difficultyepoch = if raw { - LazyMax::from_source_raw( - name, - v, - height.boxed_clone(), - indexes.difficultyepoch.identity.boxed_clone(), - ) - } else { - LazyMax::from_source( - name, - v, - height.boxed_clone(), - indexes.difficultyepoch.identity.boxed_clone(), - ) - }; - - Ok(Self { - height, - rest, - difficultyepoch, - }) - } - - /// Compute rest (dateindex + periods) with the given compute function. - pub fn compute_rest( - &mut self, - starting_indexes: &ComputeIndexes, - exit: &Exit, - compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - self.rest.compute_all(starting_indexes, exit, compute) - } -} diff --git a/crates/brk_computer/src/internal/multi/from_height_and_date/min.rs b/crates/brk_computer/src/internal/multi/from_height_and_date/min.rs deleted file mode 100644 index 196de7fc0..000000000 --- a/crates/brk_computer/src/internal/multi/from_height_and_date/min.rs +++ /dev/null @@ -1,105 +0,0 @@ -//! ComputedFromHeightAndDateMin - height storage + dateindex storage + lazy periods. -//! -//! Use this when both height and dateindex are stored EagerVecs with min-value aggregation. - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, Height, Version}; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; - -use crate::{ComputeIndexes, indexes}; - -use crate::internal::{ComputedFromDateMin, ComputedVecValue, LazyMin}; - -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct ComputedFromHeightAndDateMin -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub height: EagerVec>, - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub rest: ComputedFromDateMin, - pub difficultyepoch: LazyMin, -} - -const VERSION: Version = Version::ZERO; - -impl ComputedFromHeightAndDateMin -where - T: ComputedVecValue + JsonSchema + 'static, -{ - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - Self::forced_import_inner(db, name, version, indexes, false) - } - - /// Import without adding _min suffix to lazy vecs. - pub fn forced_import_raw( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - Self::forced_import_inner(db, name, version, indexes, true) - } - - fn forced_import_inner( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - raw: bool, - ) -> Result { - let v = version + VERSION; - - let height: EagerVec> = EagerVec::forced_import(db, name, v)?; - let rest = if raw { - ComputedFromDateMin::forced_import_raw(db, name, v, indexes)? - } else { - ComputedFromDateMin::forced_import(db, name, v, indexes)? - }; - let difficultyepoch = if raw { - LazyMin::from_source_raw( - name, - v, - height.boxed_clone(), - indexes.difficultyepoch.identity.boxed_clone(), - ) - } else { - LazyMin::from_source( - name, - v, - height.boxed_clone(), - indexes.difficultyepoch.identity.boxed_clone(), - ) - }; - - Ok(Self { - height, - rest, - difficultyepoch, - }) - } - - /// Compute rest (dateindex + periods) with the given compute function. - pub fn compute_rest( - &mut self, - starting_indexes: &ComputeIndexes, - exit: &Exit, - compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - self.rest.compute_all(starting_indexes, exit, compute) - } -} diff --git a/crates/brk_computer/src/internal/multi/from_height_and_date/mod.rs b/crates/brk_computer/src/internal/multi/from_height_and_date/mod.rs deleted file mode 100644 index d6b91de82..000000000 --- a/crates/brk_computer/src/internal/multi/from_height_and_date/mod.rs +++ /dev/null @@ -1,23 +0,0 @@ -mod binary_last; -mod computed_ohlc; -mod constant; -mod first; -mod last; -mod lazy_ohlc; -mod max; -mod min; -mod price; -mod unary_last; -mod value_last; - -pub use binary_last::*; -pub use computed_ohlc::*; -pub use constant::*; -pub use first::*; -pub use last::*; -pub use lazy_ohlc::*; -pub use max::*; -pub use min::*; -pub use price::*; -pub use unary_last::*; -pub use value_last::*; diff --git a/crates/brk_computer/src/internal/multi/from_height_and_date/price.rs b/crates/brk_computer/src/internal/multi/from_height_and_date/price.rs deleted file mode 100644 index 60f497771..000000000 --- a/crates/brk_computer/src/internal/multi/from_height_and_date/price.rs +++ /dev/null @@ -1,49 +0,0 @@ -//! Price wrapper for height+date-based metrics with both USD and sats representations. - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{Dollars, SatsFract, Version}; -use derive_more::{Deref, DerefMut}; -use vecdb::Database; - -use super::{ComputedFromHeightAndDateLast, LazyUnaryFromHeightAndDateLast}; -use crate::{indexes, internal::DollarsToSatsFract}; - -/// Price metric (height+date-based) with both USD and sats representations. -/// -/// Derefs to the dollars metric, so existing code works unchanged. -/// Access `.sats` for the sats exchange rate version. -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct PriceFromHeightAndDate { - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub dollars: ComputedFromHeightAndDateLast, - pub sats: LazyUnaryFromHeightAndDateLast, -} - -impl PriceFromHeightAndDate { - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - let dollars = ComputedFromHeightAndDateLast::forced_import(db, name, version, indexes)?; - Ok(Self::from_computed(name, version, dollars)) - } - - pub fn from_computed( - name: &str, - version: Version, - dollars: ComputedFromHeightAndDateLast, - ) -> Self { - let sats = LazyUnaryFromHeightAndDateLast::from_computed_last::( - &format!("{name}_sats"), - version, - &dollars, - ); - Self { dollars, sats } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_height_and_date/unary_last.rs b/crates/brk_computer/src/internal/multi/from_height_and_date/unary_last.rs deleted file mode 100644 index 5330a5561..000000000 --- a/crates/brk_computer/src/internal/multi/from_height_and_date/unary_last.rs +++ /dev/null @@ -1,71 +0,0 @@ -//! Unary transform composite from Height+Date - Last aggregation only. - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, DifficultyEpoch, Height, MonthIndex, QuarterIndex, SemesterIndex, - Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use vecdb::{IterableCloneableVec, LazyVecFrom1, UnaryTransform}; - -use crate::internal::{ComputedFromHeightAndDateLast, ComputedVecValue, LazyTransformLast}; - -const VERSION: Version = Version::ZERO; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyUnaryFromHeightAndDateLast -where - T: ComputedVecValue + PartialOrd + JsonSchema, - ST: ComputedVecValue, -{ - pub height: LazyVecFrom1, - pub dateindex: LazyTransformLast, - pub weekindex: LazyTransformLast, - pub monthindex: LazyTransformLast, - pub quarterindex: LazyTransformLast, - pub semesterindex: LazyTransformLast, - pub yearindex: LazyTransformLast, - pub decadeindex: LazyTransformLast, - pub difficultyepoch: LazyTransformLast, -} - -impl LazyUnaryFromHeightAndDateLast -where - T: ComputedVecValue + JsonSchema + 'static, - ST: ComputedVecValue + JsonSchema, -{ - pub fn from_computed_last>( - name: &str, - version: Version, - source: &ComputedFromHeightAndDateLast, - ) -> Self { - let v = version + VERSION; - - macro_rules! period { - ($p:ident) => { - LazyTransformLast::from_lazy_last::(name, v, &source.rest.$p) - }; - } - - Self { - height: LazyVecFrom1::transformed::(name, v, source.height.boxed_clone()), - dateindex: LazyTransformLast(LazyVecFrom1::transformed::( - name, - v, - source.rest.dateindex.boxed_clone(), - )), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), - difficultyepoch: LazyTransformLast::from_lazy_last::( - name, - v, - &source.difficultyepoch, - ), - } - } -} diff --git a/crates/brk_computer/src/internal/multi/from_height_and_date/value_last.rs b/crates/brk_computer/src/internal/multi/from_height_and_date/value_last.rs deleted file mode 100644 index 91327951b..000000000 --- a/crates/brk_computer/src/internal/multi/from_height_and_date/value_last.rs +++ /dev/null @@ -1,81 +0,0 @@ -//! Value type for stateful Last pattern - height and dateindex both stored independently. -//! -//! Use this when dateindex values are NOT derivable from height (e.g., unrealized metrics -//! where end-of-day state differs from last-block-of-day). - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{Height, Sats, Version}; -use derive_more::{Deref, DerefMut}; -use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; - -use crate::{ComputeIndexes, indexes, price}; - -use crate::internal::{LazyDerivedValuesHeight, LazyValueDifficultyEpoch, ValueFromDateLast}; - -/// Value type where both height and dateindex are stored independently. -/// Dateindex values cannot be derived from height (e.g., unrealized P&L). -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct ValueFromHeightAndDateLast { - #[traversable(rename = "sats")] - pub height: EagerVec>, - #[traversable(flatten)] - pub height_value: LazyDerivedValuesHeight, - #[traversable(flatten)] - pub difficultyepoch: LazyValueDifficultyEpoch, - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub indexes: ValueFromDateLast, -} - -const VERSION: Version = Version::ZERO; - -impl ValueFromHeightAndDateLast { - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - compute_dollars: bool, - indexes: &indexes::Vecs, - price: Option<&price::Vecs>, - ) -> Result { - let v = version + VERSION; - - let height: EagerVec> = EagerVec::forced_import(db, name, v)?; - - let price_source = price.map(|p| p.usd.split.close.height.boxed_clone()); - - let height_value = - LazyDerivedValuesHeight::from_source(name, height.boxed_clone(), v, price_source); - - let difficultyepoch = LazyValueDifficultyEpoch::from_height_source( - name, - height.boxed_clone(), - indexes.difficultyepoch.identity.boxed_clone(), - price, - v, - ); - - let indexes = ValueFromDateLast::forced_import(db, name, v, compute_dollars, indexes)?; - - Ok(Self { - height, - height_value, - difficultyepoch, - indexes, - }) - } - - /// Compute derived periods from dateindex. - pub fn compute_dollars_from_price( - &mut self, - price: Option<&price::Vecs>, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.indexes - .compute_dollars_from_price(price, starting_indexes, exit) - } -} diff --git a/crates/brk_computer/src/internal/multi/from_tx/lazy_distribution.rs b/crates/brk_computer/src/internal/multi/from_tx/lazy_distribution.rs index 5c6f7e785..41306e8a0 100644 --- a/crates/brk_computer/src/internal/multi/from_tx/lazy_distribution.rs +++ b/crates/brk_computer/src/internal/multi/from_tx/lazy_distribution.rs @@ -6,7 +6,7 @@ use brk_traversable::Traversable; use brk_types::{TxIndex, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{CollectableVec, Database, Exit, LazyVecFrom2}; +use vecdb::{Database, Exit, LazyVecFrom2, ReadableVec, Rw, StorageMode}; use crate::{ ComputeIndexes, indexes, @@ -15,9 +15,9 @@ use crate::{ const VERSION: Version = Version::ZERO; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Deref, DerefMut, Traversable)] #[traversable(merge)] -pub struct LazyFromTxDistribution +pub struct LazyFromTxDistribution where T: ComputedVecValue + PartialOrd + JsonSchema, S1: ComputedVecValue, @@ -27,7 +27,7 @@ where #[deref] #[deref_mut] #[traversable(flatten)] - pub distribution: TxDerivedDistribution, + pub distribution: TxDerivedDistribution, } impl LazyFromTxDistribution @@ -36,7 +36,7 @@ where S1: ComputedVecValue + JsonSchema, S2: ComputedVecValue + JsonSchema, { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, @@ -51,7 +51,7 @@ where }) } - pub fn derive_from( + pub(crate) fn derive_from( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, @@ -59,7 +59,7 @@ where exit: &Exit, ) -> Result<()> where - LazyVecFrom2: CollectableVec, + LazyVecFrom2: ReadableVec, { self.distribution .derive_from(indexer, indexes, starting_indexes, &self.txindex, exit) diff --git a/crates/brk_computer/src/internal/multi/from_tx/value_dollars.rs b/crates/brk_computer/src/internal/multi/from_tx/value_dollars.rs index 466e6b0ec..39e601588 100644 --- a/crates/brk_computer/src/internal/multi/from_tx/value_dollars.rs +++ b/crates/brk_computer/src/internal/multi/from_tx/value_dollars.rs @@ -1,70 +1,78 @@ -//! Dollars from TxIndex with lazy height stats and stored dateindex. +//! Dollars from TxIndex with lazy height stats and stored day1. //! //! Height-level USD stats (min/max/avg/sum/percentiles) are lazy: `sats_stat * price`. -//! Height cumulative and dateindex stats are stored since they require aggregation +//! Height cumulative and day1 stats are stored since they require aggregation //! across heights with varying prices. use brk_error::Result; -use brk_indexer::Indexer; use brk_traversable::Traversable; use brk_types::{ - Bitcoin, Close, DateIndex, DifficultyEpoch, Dollars, Height, Sats, TxIndex, Version, + Bitcoin, Day1, Day3, DifficultyEpoch, Dollars, HalvingEpoch, Height, Hour1, Hour4, Hour12, + Minute1, Minute5, Minute10, Minute30, Month1, Month3, Month6, Sats, TxIndex, Version, Week1, + Year1, Year10, }; -use derive_more::{Deref, DerefMut}; use vecdb::{ - Database, EagerVec, Exit, ImportableVec, IterableBoxedVec, IterableCloneableVec, LazyVecFrom3, + Database, Exit, LazyVecFrom3, ReadableBoxedVec, ReadableCloneableVec, Rw, StorageMode, }; use crate::{ - indexes, - internal::{ - CumulativeVec, Full, LazyBinaryTransformFull, LazyDateDerivedFull, LazyFull, - SatsTimesClosePrice, Stats, - }, - ComputeIndexes, + ComputeIndexes, indexes, + internal::{CumulativeVec, Full, LazyBinaryTransformFull, LazyFull, SatsTimesPrice}, }; /// Lazy dollars at TxIndex: `sats * price[height]` pub type LazyDollarsTxIndex = - LazyVecFrom3>; + LazyVecFrom3; /// Lazy dollars height stats: `sats_height_stat * price` -pub type LazyDollarsHeightFull = LazyBinaryTransformFull>; +pub type LazyDollarsHeightFull = LazyBinaryTransformFull; -/// Dollars with lazy txindex and height fields, stored dateindex. +/// Dollars with lazy txindex and height fields, stored day1. /// /// Height-level stats (except cumulative) are lazy: `sats * price[height]`. /// Cumulative at height level is stored since it requires summing historical values. -/// DateIndex stats are stored since they aggregate across heights with varying prices. -#[derive(Clone, Deref, DerefMut, Traversable)] +/// Day1 stats are stored since they aggregate across heights with varying prices. +#[derive(Traversable)] #[traversable(merge)] -pub struct ValueDollarsFromTxFull { +pub struct ValueDollarsFromTxFull { #[traversable(skip)] pub txindex: LazyDollarsTxIndex, #[traversable(flatten)] pub height: LazyDollarsHeightFull, #[traversable(rename = "cumulative")] - pub height_cumulative: CumulativeVec, + pub height_cumulative: CumulativeVec, + pub minute1: LazyFull, + pub minute5: LazyFull, + pub minute10: LazyFull, + pub minute30: LazyFull, + pub hour1: LazyFull, + pub hour4: LazyFull, + pub hour12: LazyFull, + pub day1: LazyFull, + pub day3: LazyFull, + pub week1: LazyFull, + pub month1: LazyFull, + pub month3: LazyFull, + pub month6: LazyFull, + pub year1: LazyFull, + pub year10: LazyFull, + pub halvingepoch: LazyFull, pub difficultyepoch: LazyFull, - pub dateindex: Stats, - #[deref] - #[deref_mut] - pub dates: LazyDateDerivedFull, } const VERSION: Version = Version::ONE; // Bumped for lazy height change impl ValueDollarsFromTxFull { #[allow(clippy::too_many_arguments)] - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, indexes: &indexes::Vecs, sats_height: &Full, - height_to_price: IterableBoxedVec>, - sats_txindex: IterableBoxedVec, - txindex_to_height: IterableBoxedVec, + height_to_price: ReadableBoxedVec, + sats_txindex: ReadableBoxedVec, + txindex_to_height: ReadableBoxedVec, ) -> Result { let v = version + VERSION; @@ -77,7 +85,7 @@ impl ValueDollarsFromTxFull { ); // Lazy height stats: sats_stat * price - let height = LazyBinaryTransformFull::from_full_and_source::( + let height = LazyBinaryTransformFull::from_full_and_source::( name, v, sats_height, @@ -85,55 +93,85 @@ impl ValueDollarsFromTxFull { ); // Stored cumulative - must be computed by summing historical sum*price - let height_cumulative = CumulativeVec(EagerVec::forced_import( - db, - &format!("{name}_cumulative"), - v, - )?); + let height_cumulative = CumulativeVec::forced_import(db, name, v)?; - let dateindex = Stats::forced_import(db, name, v)?; + macro_rules! period { + ($idx:ident) => { + LazyFull::from_height_source( + name, + v, + height.boxed_sum(), + height_cumulative.read_only_boxed_clone(), + indexes.$idx.first_height.read_only_boxed_clone(), + ) + }; + } - let difficultyepoch = - LazyFull::::from_stats_aggregate( - name, - v, - height.boxed_average(), - height.boxed_min(), - height.boxed_max(), - height.boxed_sum(), - height_cumulative.0.boxed_clone(), - indexes.difficultyepoch.identity.boxed_clone(), - ); + macro_rules! epoch { + ($idx:ident) => { + LazyFull::from_stats_aggregate( + name, + v, + height.boxed_average(), + height.boxed_min(), + height.boxed_max(), + height.boxed_sum(), + height_cumulative.read_only_boxed_clone(), + height.boxed_average(), + indexes.$idx.identity.read_only_boxed_clone(), + ) + }; + } - let dates = LazyDateDerivedFull::from_sources( - name, - v, - dateindex.boxed_average(), - dateindex.boxed_min(), - dateindex.boxed_max(), - dateindex.boxed_sum(), - dateindex.boxed_cumulative(), - indexes, - ); + let minute1 = period!(minute1); + let minute5 = period!(minute5); + let minute10 = period!(minute10); + let minute30 = period!(minute30); + let hour1 = period!(hour1); + let hour4 = period!(hour4); + let hour12 = period!(hour12); + let day1 = period!(day1); + let day3 = period!(day3); + let week1 = period!(week1); + let month1 = period!(month1); + let month3 = period!(month3); + let month6 = period!(month6); + let year1 = period!(year1); + let year10 = period!(year10); + let halvingepoch = epoch!(halvingepoch); + let difficultyepoch = epoch!(difficultyepoch); Ok(Self { txindex, height, height_cumulative, + minute1, + minute5, + minute10, + minute30, + hour1, + hour4, + hour12, + day1, + day3, + week1, + month1, + month3, + month6, + year1, + year10, + halvingepoch, difficultyepoch, - dateindex, - dates, }) } - /// Compute stored fields (cumulative and dateindex) from lazy height stats. + /// Compute stored fields (cumulative and day1) from lazy height stats. /// /// This is MUCH faster than the old approach since it only iterates heights, /// not all transactions per block. - pub fn derive_from( + pub(crate) fn derive_from( &mut self, - _indexer: &Indexer, - indexes: &indexes::Vecs, + _indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { @@ -144,15 +182,6 @@ impl ValueDollarsFromTxFull { exit, )?; - // Compute dateindex stats by aggregating lazy height stats - self.dateindex.compute( - starting_indexes.dateindex, - &self.height.average, - &indexes.dateindex.first_height, - &indexes.dateindex.height_count, - exit, - )?; - Ok(()) } } @@ -160,9 +189,9 @@ impl ValueDollarsFromTxFull { fn create_lazy_txindex( name: &str, version: Version, - sats_txindex: IterableBoxedVec, - txindex_to_height: IterableBoxedVec, - height_to_price: IterableBoxedVec>, + sats_txindex: ReadableBoxedVec, + txindex_to_height: ReadableBoxedVec, + height_to_price: ReadableBoxedVec, ) -> LazyDollarsTxIndex { LazyVecFrom3::init( &format!("{name}_txindex"), @@ -170,14 +199,6 @@ fn create_lazy_txindex( sats_txindex, txindex_to_height, height_to_price, - |txindex, sats_iter, height_iter, price_iter| { - sats_iter.get(txindex).and_then(|sats| { - height_iter.get(txindex).and_then(|height| { - price_iter - .get(height) - .map(|close| *close * Bitcoin::from(sats)) - }) - }) - }, + |_index, sats, _height, close| close * Bitcoin::from(sats), ) } diff --git a/crates/brk_computer/src/internal/multi/from_tx/value_full.rs b/crates/brk_computer/src/internal/multi/from_tx/value_full.rs index 20a310b4a..aae3ef62b 100644 --- a/crates/brk_computer/src/internal/multi/from_tx/value_full.rs +++ b/crates/brk_computer/src/internal/multi/from_tx/value_full.rs @@ -5,55 +5,45 @@ use brk_indexer::Indexer; use brk_traversable::Traversable; use brk_types::{Sats, TxIndex, Version}; use derive_more::{Deref, DerefMut}; -use vecdb::{Database, EagerVec, Exit, ImportableVec, PcoVec}; +use vecdb::{Database, EagerVec, Exit, ImportableVec, PcoVec, Rw, StorageMode}; -use crate::{ComputeIndexes, indexes, internal::ValueTxDerivedFull, price}; +use crate::{ComputeIndexes, indexes, internal::ValueTxDerivedFull, prices}; const VERSION: Version = Version::ZERO; -#[derive(Clone, Deref, DerefMut, Traversable)] -pub struct ValueFromTxFull { +#[derive(Deref, DerefMut, Traversable)] +pub struct ValueFromTxFull { #[traversable(rename = "txindex")] - pub base: EagerVec>, + pub base: M::Stored>>, #[deref] #[deref_mut] #[traversable(flatten)] - pub indexes: ValueTxDerivedFull, + pub indexes: ValueTxDerivedFull, } impl ValueFromTxFull { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, indexes: &indexes::Vecs, indexer: &Indexer, - price: Option<&price::Vecs>, + prices: &prices::Vecs, ) -> Result { let v = version + VERSION; let txindex = EagerVec::forced_import(db, name, v)?; let derived = - ValueTxDerivedFull::forced_import(db, name, v, indexes, indexer, price, &txindex)?; + ValueTxDerivedFull::forced_import(db, name, v, indexes, indexer, prices, &txindex)?; Ok(Self { base: txindex, indexes: derived, }) } - pub fn derive_from( - &mut self, - indexer: &Indexer, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.derive_from_with_skip(indexer, indexes, starting_indexes, exit, 0) - } - /// Derive from source, skipping first N transactions per block from all calculations. /// /// Use `skip_count: 1` to exclude coinbase transactions from fee/feerate stats. - pub fn derive_from_with_skip( + pub(crate) fn derive_from_with_skip( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, diff --git a/crates/brk_computer/src/internal/multi/height_and_date/bytes.rs b/crates/brk_computer/src/internal/multi/height_and_date/bytes.rs deleted file mode 100644 index e1a8cc986..000000000 --- a/crates/brk_computer/src/internal/multi/height_and_date/bytes.rs +++ /dev/null @@ -1,37 +0,0 @@ -//! ComputedHeightAndDateBytes - height + dateindex BytesVec storage. -//! -//! Use this for simple cases where both height and dateindex are stored BytesVecs -//! without any lazy derivations. For OHLC-type data. - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{DateIndex, Height, Version}; -use schemars::JsonSchema; -use serde::Serialize; -use vecdb::{BytesVec, BytesVecValue, Database, Formattable, ImportableVec}; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct ComputedHeightAndDateBytes -where - T: BytesVecValue + Formattable + Serialize + JsonSchema, -{ - pub height: BytesVec, - pub dateindex: BytesVec, -} - -const VERSION: Version = Version::ZERO; - -impl ComputedHeightAndDateBytes -where - T: BytesVecValue + Formattable + Serialize + JsonSchema + 'static, -{ - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { - let v = version + VERSION; - - Ok(Self { - height: BytesVec::forced_import(db, name, v)?, - dateindex: BytesVec::forced_import(db, name, v)?, - }) - } -} diff --git a/crates/brk_computer/src/internal/multi/height_and_date/mod.rs b/crates/brk_computer/src/internal/multi/height_and_date/mod.rs deleted file mode 100644 index ed1a31441..000000000 --- a/crates/brk_computer/src/internal/multi/height_and_date/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod bytes; -mod ohlc; - -pub use bytes::*; -pub use ohlc::*; diff --git a/crates/brk_computer/src/internal/multi/height_and_date/ohlc.rs b/crates/brk_computer/src/internal/multi/height_and_date/ohlc.rs deleted file mode 100644 index 1a69d3013..000000000 --- a/crates/brk_computer/src/internal/multi/height_and_date/ohlc.rs +++ /dev/null @@ -1,21 +0,0 @@ -//! Lazy OHLC component extractors for height + dateindex only. - -use brk_traversable::Traversable; -use brk_types::{DateIndex, Height}; -use schemars::JsonSchema; -use serde::Serialize; -use vecdb::{BytesVecValue, Formattable}; - -use crate::internal::LazyOHLC; - -/// Lazy OHLC component extractors for height + dateindex. -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyHeightAndDateOHLC -where - T: BytesVecValue + Formattable + Serialize + JsonSchema + 'static, - SourceT: BytesVecValue + Formattable + Serialize + JsonSchema + 'static, -{ - pub height: LazyOHLC, - pub dateindex: LazyOHLC, -} diff --git a/crates/brk_computer/src/internal/multi/height_derived/binary_last.rs b/crates/brk_computer/src/internal/multi/height_derived/binary_last.rs index c2f5ce699..0903878b6 100644 --- a/crates/brk_computer/src/internal/multi/height_derived/binary_last.rs +++ b/crates/brk_computer/src/internal/multi/height_derived/binary_last.rs @@ -1,17 +1,19 @@ //! Lazy binary transform for derived block with Last aggregation only. use brk_traversable::Traversable; -use brk_types::{DifficultyEpoch, Version}; -use derive_more::{Deref, DerefMut}; +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Hour1, Hour12, Hour4, Minute1, Minute10, Minute30, + Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10, +}; use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableCloneableVec}; +use vecdb::{BinaryTransform, ReadableCloneableVec}; use crate::internal::{ - ComputedFromHeightLast, ComputedFromHeightSumCum, ComputedFromHeightAndDateLast, ComputedVecValue, - LazyBinaryFromDateLast, LazyBinaryTransformLast, LazyFromHeightLast, NumericValue, + ComputedFromHeightLast, ComputedFromHeightSumCum, ComputedVecValue, + LazyBinaryTransformLast, LazyFromHeightLast, NumericValue, }; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Clone, Traversable)] #[traversable(merge)] pub struct LazyBinaryHeightDerivedLast where @@ -19,9 +21,22 @@ where S1T: ComputedVecValue, S2T: ComputedVecValue, { - #[deref] - #[deref_mut] - pub dates: LazyBinaryFromDateLast, + pub minute1: LazyBinaryTransformLast, + pub minute5: LazyBinaryTransformLast, + pub minute10: LazyBinaryTransformLast, + pub minute30: LazyBinaryTransformLast, + pub hour1: LazyBinaryTransformLast, + pub hour4: LazyBinaryTransformLast, + pub hour12: LazyBinaryTransformLast, + pub day1: LazyBinaryTransformLast, + pub day3: LazyBinaryTransformLast, + pub week1: LazyBinaryTransformLast, + pub month1: LazyBinaryTransformLast, + pub month3: LazyBinaryTransformLast, + pub month6: LazyBinaryTransformLast, + pub year1: LazyBinaryTransformLast, + pub year10: LazyBinaryTransformLast, + pub halvingepoch: LazyBinaryTransformLast, pub difficultyepoch: LazyBinaryTransformLast, } @@ -33,7 +48,7 @@ where S1T: ComputedVecValue + JsonSchema, S2T: ComputedVecValue + JsonSchema, { - pub fn from_computed_sum_cum>( + pub(crate) fn from_computed_sum_cum>( name: &str, version: Version, source1: &ComputedFromHeightSumCum, @@ -45,25 +60,39 @@ where { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyBinaryTransformLast::from_vecs::( + name, + v, + source1.$p.cumulative.read_only_boxed_clone(), + source2.$p.cumulative.read_only_boxed_clone(), + ) + }; + } + Self { - dates: LazyBinaryFromDateLast::from_both_sum_cum_cumulatives::( - name, - v, - source1.dateindex.cumulative.boxed_clone(), - &source1.dates, - source2.dateindex.cumulative.boxed_clone(), - &source2.dates, - ), - difficultyepoch: LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.difficultyepoch.cumulative.boxed_clone(), - source2.difficultyepoch.cumulative.boxed_clone(), - ), + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), } } - pub fn from_computed_last>( + pub(crate) fn from_computed_last>( name: &str, version: Version, source1: &ComputedFromHeightLast, @@ -75,18 +104,39 @@ where { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyBinaryTransformLast::from_lazy_last::( + name, + v, + &source1.$p, + &source2.$p, + ) + }; + } + Self { - dates: LazyBinaryFromDateLast::from_both_block_last::(name, v, source1, source2), - difficultyepoch: LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.difficultyepoch.boxed_clone(), - source2.difficultyepoch.boxed_clone(), - ), + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), } } - pub fn from_lazy_block_last_and_block_last( + pub(crate) fn from_lazy_block_last_and_block_last( name: &str, version: Version, source1: &LazyFromHeightLast, @@ -99,48 +149,39 @@ where { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyBinaryTransformLast::from_vecs::( + name, + v, + source1.$p.read_only_boxed_clone(), + source2.$p.read_only_boxed_clone(), + ) + }; + } + Self { - dates: LazyBinaryFromDateLast::from_lazy_block_last_and_block_last::( - name, v, source1, source2, - ), - difficultyepoch: LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.rest.difficultyepoch.boxed_clone(), - source2.rest.difficultyepoch.boxed_clone(), - ), + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), } } - pub fn from_computed_height_date_last>( - name: &str, - version: Version, - source1: &ComputedFromHeightAndDateLast, - source2: &ComputedFromHeightAndDateLast, - ) -> Self - where - S1T: PartialOrd, - S2T: PartialOrd, - { - let v = version + VERSION; - - Self { - dates: LazyBinaryFromDateLast::from_computed_both_last::( - name, - v, - &source1.rest, - &source2.rest, - ), - difficultyepoch: LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.difficultyepoch.boxed_clone(), - source2.difficultyepoch.boxed_clone(), - ), - } - } - - pub fn from_block_last_and_lazy_block_last( + pub(crate) fn from_block_last_and_lazy_block_last( name: &str, version: Version, source1: &ComputedFromHeightLast, @@ -153,74 +194,35 @@ where { let v = version + VERSION; - Self { - dates: LazyBinaryFromDateLast::from_block_last_and_lazy_block_last::( - name, v, source1, source2, - ), - difficultyepoch: LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.rest.difficultyepoch.boxed_clone(), - source2.rest.difficultyepoch.boxed_clone(), - ), + macro_rules! period { + ($p:ident) => { + LazyBinaryTransformLast::from_vecs::( + name, + v, + source1.$p.read_only_boxed_clone(), + source2.$p.read_only_boxed_clone(), + ) + }; } - } - - pub fn from_computed_height_date_and_block_last>( - name: &str, - version: Version, - source1: &ComputedFromHeightAndDateLast, - source2: &ComputedFromHeightLast, - ) -> Self - where - S1T: PartialOrd, - S2T: NumericValue, - { - let v = version + VERSION; Self { - dates: LazyBinaryFromDateLast::from_dateindex_and_height_last::( - name, - v, - &source1.rest, - source2, - ), - difficultyepoch: LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.difficultyepoch.boxed_clone(), - source2.difficultyepoch.boxed_clone(), - ), - } - } - - /// Create from a ComputedFromHeightAndDateLast and a LazyFromHeightLast. - pub fn from_computed_height_date_and_lazy_block_last( - name: &str, - version: Version, - source1: &ComputedFromHeightAndDateLast, - source2: &LazyFromHeightLast, - ) -> Self - where - F: BinaryTransform, - S1T: PartialOrd, - S2SourceT: ComputedVecValue + JsonSchema, - { - let v = version + VERSION; - - Self { - dates: LazyBinaryFromDateLast::from_computed_and_lazy_last::( - name, - v, - &source1.rest, - &source2.rest.dates, - ), - difficultyepoch: LazyBinaryTransformLast::from_vecs::( - name, - v, - source1.difficultyepoch.boxed_clone(), - source2.rest.difficultyepoch.boxed_clone(), - ), + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), } } } diff --git a/crates/brk_computer/src/internal/multi/height_derived/binary_sum.rs b/crates/brk_computer/src/internal/multi/height_derived/binary_sum.rs index 057c1b1d9..cfbb13eb5 100644 --- a/crates/brk_computer/src/internal/multi/height_derived/binary_sum.rs +++ b/crates/brk_computer/src/internal/multi/height_derived/binary_sum.rs @@ -1,19 +1,21 @@ //! Lazy aggregated binary transform for Sum-only pattern across all time periods. use brk_traversable::Traversable; -use brk_types::{DifficultyEpoch, Version}; -use derive_more::{Deref, DerefMut}; +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Hour1, Hour12, Hour4, Minute1, Minute10, Minute30, + Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10, +}; use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableCloneableVec}; +use vecdb::{BinaryTransform, ReadableCloneableVec}; use crate::internal::{ - ComputedFromHeightSumCum, ComputedHeightDerivedSum, ComputedVecValue, LazyBinaryFromDateSum, - LazyBinaryTransformSum, LazyFromHeightLast, NumericValue, + ComputedFromHeightSumCum, ComputedHeightDerivedSum, ComputedVecValue, LazyBinaryTransformSum, + LazyFromHeightLast, NumericValue, }; const VERSION: Version = Version::ZERO; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Clone, Traversable)] #[traversable(merge)] pub struct LazyBinaryHeightDerivedSum where @@ -21,9 +23,22 @@ where S1T: ComputedVecValue, S2T: ComputedVecValue, { - #[deref] - #[deref_mut] - pub dates: LazyBinaryFromDateSum, + pub minute1: LazyBinaryTransformSum, + pub minute5: LazyBinaryTransformSum, + pub minute10: LazyBinaryTransformSum, + pub minute30: LazyBinaryTransformSum, + pub hour1: LazyBinaryTransformSum, + pub hour4: LazyBinaryTransformSum, + pub hour12: LazyBinaryTransformSum, + pub day1: LazyBinaryTransformSum, + pub day3: LazyBinaryTransformSum, + pub week1: LazyBinaryTransformSum, + pub month1: LazyBinaryTransformSum, + pub month3: LazyBinaryTransformSum, + pub month6: LazyBinaryTransformSum, + pub year1: LazyBinaryTransformSum, + pub year10: LazyBinaryTransformSum, + pub halvingepoch: LazyBinaryTransformSum, pub difficultyepoch: LazyBinaryTransformSum, } @@ -33,7 +48,7 @@ where S1T: NumericValue + JsonSchema, S2T: NumericValue + JsonSchema, { - pub fn from_derived>( + pub(crate) fn from_derived>( name: &str, version: Version, source1: &ComputedHeightDerivedSum, @@ -41,19 +56,40 @@ where ) -> Self { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyBinaryTransformSum::from_boxed::( + name, + v, + source1.$p.read_only_boxed_clone(), + source2.$p.read_only_boxed_clone(), + ) + }; + } + Self { - dates: LazyBinaryFromDateSum::from_derived::(name, v, source1, source2), - difficultyepoch: LazyBinaryTransformSum::from_boxed::( - name, - v, - source1.difficultyepoch.boxed_clone(), - source2.difficultyepoch.boxed_clone(), - ), + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), } } /// Create from two LazyBinaryHeightDerivedSum sources. - pub fn from_binary( + pub(crate) fn from_binary( name: &str, version: Version, source1: &LazyBinaryHeightDerivedSum, @@ -68,24 +104,40 @@ where { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyBinaryTransformSum::from_boxed::( + name, + v, + source1.$p.read_only_boxed_clone(), + source2.$p.read_only_boxed_clone(), + ) + }; + } + Self { - dates: LazyBinaryFromDateSum::from_binary::( - name, - v, - &source1.dates, - &source2.dates, - ), - difficultyepoch: LazyBinaryTransformSum::from_boxed::( - name, - v, - source1.difficultyepoch.boxed_clone(), - source2.difficultyepoch.boxed_clone(), - ), + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), } } /// Create from a SumCum source (using only sum) and a LazyLast source. - pub fn from_sumcum_lazy_last( + pub(crate) fn from_sumcum_lazy_last( name: &str, version: Version, source1: &ComputedFromHeightSumCum, @@ -97,19 +149,35 @@ where { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyBinaryTransformSum::from_boxed::( + name, + v, + source1.$p.sum.read_only_boxed_clone(), + source2.$p.read_only_boxed_clone(), + ) + }; + } + Self { - dates: LazyBinaryFromDateSum::from_sumcum_lazy_last::( - name, - v, - source1, - source2, - ), - difficultyepoch: LazyBinaryTransformSum::from_boxed::( - name, - v, - source1.difficultyepoch.sum.boxed_clone(), - source2.difficultyepoch.boxed_clone(), - ), + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), } } } diff --git a/crates/brk_computer/src/internal/multi/height_derived/binary_sum_cum.rs b/crates/brk_computer/src/internal/multi/height_derived/binary_sum_cum.rs index 85695f9cf..8732e42e8 100644 --- a/crates/brk_computer/src/internal/multi/height_derived/binary_sum_cum.rs +++ b/crates/brk_computer/src/internal/multi/height_derived/binary_sum_cum.rs @@ -1,20 +1,21 @@ //! Lazy aggregated SumCum - binary transform version. use brk_traversable::Traversable; -use brk_types::{DifficultyEpoch, Height, Version}; -use derive_more::{Deref, DerefMut}; +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Hour1, Hour4, Hour12, Minute1, Minute5, Minute10, + Minute30, Month1, Month3, Month6, Version, Week1, Year1, Year10, +}; use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableCloneableVec}; +use vecdb::{BinaryTransform, ReadableCloneableVec}; use crate::internal::{ - ComputedFromHeightLast, ComputedFromHeightSumCum, ComputedHeightDerivedLast, ComputedHeightDerivedSumCum, - ComputedVecValue, LazyBinaryFromDateSumCum, LazyBinaryTransformSumCum, LazyFull, LazyDateDerivedFull, - LazyDateDerivedSumCum, LazyFromHeightLast, LazySumCum, NumericValue, SumCum, + ComputedFromHeightSumCum, ComputedHeightDerivedFull, ComputedHeightDerivedSumCum, + ComputedVecValue, LazyBinaryTransformSumCum, LazyFromHeightLast, NumericValue, TxDerivedFull, }; const VERSION: Version = Version::ZERO; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Clone, Traversable)] #[traversable(merge)] pub struct LazyBinaryHeightDerivedSumCum where @@ -22,9 +23,22 @@ where S1T: ComputedVecValue, S2T: ComputedVecValue, { - #[deref] - #[deref_mut] - pub dates: LazyBinaryFromDateSumCum, + pub minute1: LazyBinaryTransformSumCum, + pub minute5: LazyBinaryTransformSumCum, + pub minute10: LazyBinaryTransformSumCum, + pub minute30: LazyBinaryTransformSumCum, + pub hour1: LazyBinaryTransformSumCum, + pub hour4: LazyBinaryTransformSumCum, + pub hour12: LazyBinaryTransformSumCum, + pub day1: LazyBinaryTransformSumCum, + pub day3: LazyBinaryTransformSumCum, + pub week1: LazyBinaryTransformSumCum, + pub month1: LazyBinaryTransformSumCum, + pub month3: LazyBinaryTransformSumCum, + pub month6: LazyBinaryTransformSumCum, + pub year1: LazyBinaryTransformSumCum, + pub year10: LazyBinaryTransformSumCum, + pub halvingepoch: LazyBinaryTransformSumCum, pub difficultyepoch: LazyBinaryTransformSumCum, } @@ -34,197 +48,94 @@ where S1T: ComputedVecValue + JsonSchema, S2T: ComputedVecValue + JsonSchema, { - #[allow(clippy::too_many_arguments)] - pub fn from_computed>( - name: &str, - version: Version, - dateindex1: &SumCum, - periods1: &LazyDateDerivedSumCum, - difficultyepoch1: &LazySumCum, - dateindex2: &SumCum, - periods2: &LazyDateDerivedSumCum, - difficultyepoch2: &LazySumCum, - ) -> Self { - let v = version + VERSION; - - Self { - dates: LazyBinaryFromDateSumCum::from_computed::( - name, v, dateindex1, periods1, dateindex2, periods2, - ), - difficultyepoch: LazyBinaryTransformSumCum::from_sources::( - name, - v, - difficultyepoch1.sum.boxed_clone(), - difficultyepoch2.sum.boxed_clone(), - difficultyepoch1.cumulative.boxed_clone(), - difficultyepoch2.cumulative.boxed_clone(), - ), - } - } - - #[allow(clippy::too_many_arguments)] - pub fn from_derived_full( - name: &str, - version: Version, - dateindex1: &SumCum, - dates1: &LazyDateDerivedFull, - difficultyepoch1: &LazyFull, - dateindex2: &SumCum, - dates2: &LazyDateDerivedFull, - difficultyepoch2: &LazyFull, - ) -> Self - where - F: BinaryTransform, - S1I: vecdb::VecIndex + 'static, - S1L: ComputedVecValue, - S2I: vecdb::VecIndex + 'static, - S2L: ComputedVecValue, - { - let v = version + VERSION; - - Self { - dates: LazyBinaryFromDateSumCum::from_derived_full::( - name, v, dateindex1, dates1, dateindex2, dates2, - ), - difficultyepoch: LazyBinaryTransformSumCum::from_lazy_stats_aggregate::( - name, - v, - difficultyepoch1, - difficultyepoch2, - ), - } - } - - /// Without _sum suffix for pure SumCum types. - #[allow(clippy::too_many_arguments)] - pub fn from_computed_sum_raw>( - name: &str, - version: Version, - dateindex1: &SumCum, - periods1: &LazyDateDerivedSumCum, - difficultyepoch1: &LazySumCum, - dateindex2: &SumCum, - periods2: &LazyDateDerivedSumCum, - difficultyepoch2: &LazySumCum, - ) -> Self { - let v = version + VERSION; - - Self { - dates: LazyBinaryFromDateSumCum::from_computed_sum_raw::( - name, v, dateindex1, periods1, dateindex2, periods2, - ), - difficultyepoch: LazyBinaryTransformSumCum::from_sources_sum_raw::( - name, - v, - difficultyepoch1.sum.boxed_clone(), - difficultyepoch2.sum.boxed_clone(), - difficultyepoch1.cumulative.boxed_clone(), - difficultyepoch2.cumulative.boxed_clone(), - ), - } - } - - // --- Methods accepting SumCum + Last sources --- - - pub fn from_computed_last>( - name: &str, - version: Version, - source1: &ComputedFromHeightSumCum, - source2: &ComputedFromHeightLast, - ) -> Self - where - S1T: PartialOrd, - S2T: NumericValue, - { - let v = version + VERSION; - - Self { - dates: LazyBinaryFromDateSumCum::from_computed_last::(name, v, source1, source2), - difficultyepoch: LazyBinaryTransformSumCum::from_sources_last_sum_raw::( - name, - v, - source1.difficultyepoch.sum.boxed_clone(), - source1.difficultyepoch.cumulative.boxed_clone(), - source2.difficultyepoch.boxed_clone(), - ), - } - } - - pub fn from_derived_computed_last>( + /// Create from two ComputedHeightDerivedSumCum sources. + pub(crate) fn from_computed_sum_raw>( name: &str, version: Version, source1: &ComputedHeightDerivedSumCum, - source2: &ComputedFromHeightLast, - ) -> Self - where - S1T: NumericValue, - S2T: NumericValue, - { + source2: &ComputedHeightDerivedSumCum, + ) -> Self { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyBinaryTransformSumCum::from_sources_sum_raw::( + name, + v, + source1.$p.sum.read_only_boxed_clone(), + source2.$p.sum.read_only_boxed_clone(), + source1.$p.cumulative.read_only_boxed_clone(), + source2.$p.cumulative.read_only_boxed_clone(), + ) + }; + } + Self { - dates: LazyBinaryFromDateSumCum::from_derived_computed_last::(name, v, source1, source2), - difficultyepoch: LazyBinaryTransformSumCum::from_sources_last_sum_raw::( - name, - v, - source1.difficultyepoch.sum.boxed_clone(), - source1.difficultyepoch.cumulative.boxed_clone(), - source2.difficultyepoch.boxed_clone(), - ), + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), } } - pub fn from_computed_derived_last>( + /// Create from ComputedHeightDerivedFull + TxDerivedFull sources. + pub(crate) fn from_full_sources>( name: &str, version: Version, - source1: &ComputedFromHeightSumCum, - source2: &ComputedHeightDerivedLast, + source1: &ComputedHeightDerivedFull, + source2: &TxDerivedFull, ) -> Self where S1T: PartialOrd, - S2T: NumericValue, + S2T: PartialOrd, { let v = version + VERSION; - Self { - dates: LazyBinaryFromDateSumCum::from_computed_derived_last::(name, v, source1, source2), - difficultyepoch: LazyBinaryTransformSumCum::from_sources_last_sum_raw::( - name, - v, - source1.difficultyepoch.sum.boxed_clone(), - source1.difficultyepoch.cumulative.boxed_clone(), - source2.difficultyepoch.boxed_clone(), - ), + macro_rules! period { + ($p:ident) => { + LazyBinaryTransformSumCum::from_lazy_stats_aggregate::( + name, v, &source1.$p, &source2.$p, + ) + }; } - } - - pub fn from_derived_last>( - name: &str, - version: Version, - source1: &ComputedHeightDerivedSumCum, - source2: &ComputedHeightDerivedLast, - ) -> Self - where - S1T: NumericValue, - S2T: NumericValue, - { - let v = version + VERSION; Self { - dates: LazyBinaryFromDateSumCum::from_derived_last::(name, v, source1, source2), - difficultyepoch: LazyBinaryTransformSumCum::from_sources_last_sum_raw::( - name, - v, - source1.difficultyepoch.sum.boxed_clone(), - source1.difficultyepoch.cumulative.boxed_clone(), - source2.difficultyepoch.boxed_clone(), - ), + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), } } // --- Methods accepting SumCum + LazyLast sources --- - pub fn from_computed_lazy_last( + pub(crate) fn from_computed_lazy_last( name: &str, version: Version, source1: &ComputedFromHeightSumCum, @@ -238,15 +149,36 @@ where { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyBinaryTransformSumCum::from_sources_last_sum_raw::( + name, + v, + source1.$p.sum.read_only_boxed_clone(), + source1.$p.cumulative.read_only_boxed_clone(), + source2.$p.read_only_boxed_clone(), + ) + }; + } + Self { - dates: LazyBinaryFromDateSumCum::from_computed_lazy_last::(name, v, source1, source2), - difficultyepoch: LazyBinaryTransformSumCum::from_sources_last_sum_raw::( - name, - v, - source1.difficultyepoch.sum.boxed_clone(), - source1.difficultyepoch.cumulative.boxed_clone(), - source2.rest.difficultyepoch.boxed_clone(), - ), + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), } } } diff --git a/crates/brk_computer/src/internal/multi/height_derived/distribution.rs b/crates/brk_computer/src/internal/multi/height_derived/distribution.rs index 294a546bd..9fdfc808a 100644 --- a/crates/brk_computer/src/internal/multi/height_derived/distribution.rs +++ b/crates/brk_computer/src/internal/multi/height_derived/distribution.rs @@ -1,29 +1,41 @@ -//! ComputedHeightDerivedDistribution - dateindex storage + lazy time periods + difficultyepoch. - -use brk_error::Result; +//! ComputedHeightDerivedDistribution - lazy time periods + epochs. use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, Height, Version}; -use derive_more::{Deref, DerefMut}; +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10, + Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10, +}; use schemars::JsonSchema; -use vecdb::{Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec}; +use vecdb::{ReadableBoxedVec, ReadableCloneableVec}; use crate::{ - ComputeIndexes, indexes, - internal::{ComputedVecValue, Distribution, LazyDateDerivedSpread, LazySpread, NumericValue}, + indexes, + internal::{ComputedVecValue, LazyDistribution, NumericValue}, }; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Clone, Traversable)] #[traversable(merge)] pub struct ComputedHeightDerivedDistribution where T: ComputedVecValue + PartialOrd + JsonSchema, { - pub dateindex: Distribution, - #[deref] - #[deref_mut] - pub dates: LazyDateDerivedSpread, - pub difficultyepoch: LazySpread, + pub minute1: LazyDistribution, + pub minute5: LazyDistribution, + pub minute10: LazyDistribution, + pub minute30: LazyDistribution, + pub hour1: LazyDistribution, + pub hour4: LazyDistribution, + pub hour12: LazyDistribution, + pub day1: LazyDistribution, + pub day3: LazyDistribution, + pub week1: LazyDistribution, + pub month1: LazyDistribution, + pub month3: LazyDistribution, + pub month6: LazyDistribution, + pub year1: LazyDistribution, + pub year10: LazyDistribution, + pub halvingepoch: LazyDistribution, + pub difficultyepoch: LazyDistribution, } const VERSION: Version = Version::ZERO; @@ -32,56 +44,54 @@ impl ComputedHeightDerivedDistribution where T: NumericValue + JsonSchema, { - pub fn forced_import( - db: &Database, + pub(crate) fn forced_import( name: &str, - height_source: IterableBoxedVec, + height_source: ReadableBoxedVec, version: Version, indexes: &indexes::Vecs, - ) -> Result { - let dateindex = Distribution::forced_import(db, name, version + VERSION)?; + ) -> Self { let v = version + VERSION; - let dates = LazyDateDerivedSpread::from_sources( - name, - v, - dateindex.boxed_average(), - dateindex.boxed_min(), - dateindex.boxed_max(), - indexes, - ); + macro_rules! period { + ($idx:ident) => { + LazyDistribution::from_height_source( + name, + v, + height_source.clone(), + indexes.$idx.first_height.read_only_boxed_clone(), + ) + }; + } - let difficultyepoch = LazySpread::from_distribution( - name, - v, - height_source.boxed_clone(), - height_source.boxed_clone(), - height_source, - indexes.difficultyepoch.identity.boxed_clone(), - ); + macro_rules! epoch { + ($idx:ident) => { + LazyDistribution::from_source( + name, + v, + height_source.clone(), + indexes.$idx.identity.read_only_boxed_clone(), + ) + }; + } - Ok(Self { - dateindex, - dates, - difficultyepoch, - }) - } - - pub fn derive_from( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - height_source: &impl IterableVec, - exit: &Exit, - ) -> Result<()> { - self.dateindex.compute( - starting_indexes.dateindex, - height_source, - &indexes.dateindex.first_height, - &indexes.dateindex.height_count, - exit, - )?; - - Ok(()) + Self { + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: epoch!(halvingepoch), + difficultyepoch: epoch!(difficultyepoch), + } } } diff --git a/crates/brk_computer/src/internal/multi/height_derived/first.rs b/crates/brk_computer/src/internal/multi/height_derived/first.rs index 2d4298b54..7625b49d6 100644 --- a/crates/brk_computer/src/internal/multi/height_derived/first.rs +++ b/crates/brk_computer/src/internal/multi/height_derived/first.rs @@ -1,29 +1,40 @@ -//! ComputedHeightDerivedFirst - dateindex storage + difficultyepoch + lazy time periods (first value). - -use brk_error::Result; +//! ComputedHeightDerivedFirst - lazy time periods + epochs (first value). use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, Height, Version}; -use derive_more::{Deref, DerefMut}; +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10, + Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10, +}; use schemars::JsonSchema; -use vecdb::{Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec}; +use vecdb::{ReadableBoxedVec, ReadableCloneableVec}; use crate::{ - ComputeIndexes, indexes, - internal::{ComputedVecValue, LazyDateDerivedFirst, FirstVec, LazyFirst, NumericValue}, + indexes, + internal::{ComputedVecValue, LazyFirst, NumericValue}, }; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Clone, Traversable)] #[traversable(merge)] pub struct ComputedHeightDerivedFirst where T: ComputedVecValue + PartialOrd + JsonSchema, { - pub dateindex: FirstVec, - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub dates: LazyDateDerivedFirst, + pub minute1: LazyFirst, + pub minute5: LazyFirst, + pub minute10: LazyFirst, + pub minute30: LazyFirst, + pub hour1: LazyFirst, + pub hour4: LazyFirst, + pub hour12: LazyFirst, + pub day1: LazyFirst, + pub day3: LazyFirst, + pub week1: LazyFirst, + pub month1: LazyFirst, + pub month3: LazyFirst, + pub month6: LazyFirst, + pub year1: LazyFirst, + pub year10: LazyFirst, + pub halvingepoch: LazyFirst, pub difficultyepoch: LazyFirst, } @@ -33,50 +44,54 @@ impl ComputedHeightDerivedFirst where T: NumericValue + JsonSchema, { - pub fn forced_import( - db: &Database, + pub(crate) fn forced_import( name: &str, - height_source: IterableBoxedVec, + height_source: ReadableBoxedVec, version: Version, indexes: &indexes::Vecs, - ) -> Result { - let dateindex = FirstVec::forced_import(db, name, version + VERSION)?; + ) -> Self { let v = version + VERSION; - Ok(Self { - dates: LazyDateDerivedFirst::from_source(name, v, dateindex.boxed_clone(), indexes), - difficultyepoch: LazyFirst::from_source( - name, - v, - height_source, - indexes.difficultyepoch.identity.boxed_clone(), - ), - dateindex, - }) - } + macro_rules! period { + ($idx:ident) => { + LazyFirst::from_height_source( + name, + v, + height_source.clone(), + indexes.$idx.first_height.read_only_boxed_clone(), + ) + }; + } - pub fn derive_from( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - height_source: &impl IterableVec, - exit: &Exit, - ) -> Result<()> { - self.dateindex.compute_first( - starting_indexes.dateindex, - height_source, - &indexes.dateindex.first_height, - &indexes.dateindex.height_count, - exit, - )?; - Ok(()) - } + macro_rules! epoch { + ($idx:ident) => { + LazyFirst::from_source( + name, + v, + height_source.clone(), + indexes.$idx.identity.read_only_boxed_clone(), + ) + }; + } - pub fn compute_all(&mut self, mut compute: F) -> Result<()> - where - F: FnMut(&mut FirstVec) -> Result<()>, - { - compute(&mut self.dateindex)?; - Ok(()) + Self { + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: epoch!(halvingepoch), + difficultyepoch: epoch!(difficultyepoch), + } } } diff --git a/crates/brk_computer/src/internal/multi/height_derived/full.rs b/crates/brk_computer/src/internal/multi/height_derived/full.rs index f95f5160a..f0b6c633f 100644 --- a/crates/brk_computer/src/internal/multi/height_derived/full.rs +++ b/crates/brk_computer/src/internal/multi/height_derived/full.rs @@ -1,33 +1,45 @@ -//! ComputedHeightDerivedFull - height_cumulative + dateindex storage + difficultyepoch + lazy time periods. +//! ComputedHeightDerivedFull - height_cumulative (stored) + lazy time periods + epochs. use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, Height, Version}; -use derive_more::{Deref, DerefMut}; +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10, + Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10, +}; use schemars::JsonSchema; -use vecdb::{Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec}; +use vecdb::{Database, Exit, ReadableBoxedVec, ReadableCloneableVec, ReadableVec, Rw, StorageMode}; use crate::{ indexes, - internal::{ - ComputedVecValue, CumulativeVec, Full, LazyDateDerivedFull, LazyFull, NumericValue, - }, + internal::{ComputedVecValue, CumulativeVec, LazyFull, NumericValue}, ComputeIndexes, }; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Traversable)] #[traversable(merge)] -pub struct ComputedHeightDerivedFull +pub struct ComputedHeightDerivedFull where T: ComputedVecValue + PartialOrd + JsonSchema, { #[traversable(rename = "cumulative")] - pub height_cumulative: CumulativeVec, - pub dateindex: Full, - #[deref] - #[deref_mut] - pub dates: LazyDateDerivedFull, + pub height_cumulative: CumulativeVec, + pub minute1: LazyFull, + pub minute5: LazyFull, + pub minute10: LazyFull, + pub minute30: LazyFull, + pub hour1: LazyFull, + pub hour4: LazyFull, + pub hour12: LazyFull, + pub day1: LazyFull, + pub day3: LazyFull, + pub week1: LazyFull, + pub month1: LazyFull, + pub month3: LazyFull, + pub month6: LazyFull, + pub year1: LazyFull, + pub year10: LazyFull, + pub halvingepoch: LazyFull, pub difficultyepoch: LazyFull, } @@ -37,74 +49,92 @@ impl ComputedHeightDerivedFull where T: NumericValue + JsonSchema, { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, - height_source: IterableBoxedVec, + height_source: ReadableBoxedVec, version: Version, indexes: &indexes::Vecs, ) -> Result { let v = version + VERSION; let height_cumulative = CumulativeVec::forced_import(db, name, v)?; - let dateindex = Full::forced_import(db, name, v)?; + + macro_rules! period { + ($idx:ident) => { + LazyFull::from_height_source( + name, + v, + height_source.clone(), + height_cumulative.read_only_boxed_clone(), + indexes.$idx.first_height.read_only_boxed_clone(), + ) + }; + } + + macro_rules! epoch { + ($idx:ident) => { + LazyFull::from_stats_aggregate( + name, + v, + height_source.clone(), + height_source.clone(), + height_source.clone(), + height_source.clone(), + height_cumulative.read_only_boxed_clone(), + height_source.clone(), + indexes.$idx.identity.read_only_boxed_clone(), + ) + }; + } + + let minute1 = period!(minute1); + let minute5 = period!(minute5); + let minute10 = period!(minute10); + let minute30 = period!(minute30); + let hour1 = period!(hour1); + let hour4 = period!(hour4); + let hour12 = period!(hour12); + let day1 = period!(day1); + let day3 = period!(day3); + let week1 = period!(week1); + let month1 = period!(month1); + let month3 = period!(month3); + let month6 = period!(month6); + let year1 = period!(year1); + let year10 = period!(year10); + let halvingepoch = epoch!(halvingepoch); + let difficultyepoch = epoch!(difficultyepoch); Ok(Self { - dates: LazyDateDerivedFull::from_sources( - name, - v, - dateindex.boxed_average(), - dateindex.boxed_min(), - dateindex.boxed_max(), - dateindex.boxed_sum(), - dateindex.boxed_cumulative(), - indexes, - ), - difficultyepoch: LazyFull::from_stats_aggregate( - name, - v, - height_source.boxed_clone(), - height_source.boxed_clone(), - height_source.boxed_clone(), - height_source.boxed_clone(), - height_cumulative.boxed_clone(), - indexes.difficultyepoch.identity.boxed_clone(), - ), height_cumulative, - dateindex, + minute1, + minute5, + minute10, + minute30, + hour1, + hour4, + hour12, + day1, + day3, + week1, + month1, + month3, + month6, + year1, + year10, + halvingepoch, + difficultyepoch, }) } - pub fn derive_from( + pub(crate) fn compute_cumulative( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, - height_source: &impl IterableVec, + height_source: &impl ReadableVec, exit: &Exit, ) -> Result<()> { - // Compute height_cumulative from external source - self.compute_height_cumulative(starting_indexes.height, height_source, exit)?; - - // Compute dateindex aggregations - self.dateindex.compute( - starting_indexes.dateindex, - height_source, - &indexes.dateindex.first_height, - &indexes.dateindex.height_count, - exit, - )?; - - Ok(()) - } - - fn compute_height_cumulative( - &mut self, - max_from: Height, - height_source: &impl IterableVec, - exit: &Exit, - ) -> Result<()> { - self.height_cumulative + Ok(self.height_cumulative .0 - .compute_cumulative(max_from, height_source, exit)?; - Ok(()) + .compute_cumulative(starting_indexes.height, height_source, exit)?) } } diff --git a/crates/brk_computer/src/internal/multi/height_derived/last.rs b/crates/brk_computer/src/internal/multi/height_derived/last.rs index ea507372f..73e5422c9 100644 --- a/crates/brk_computer/src/internal/multi/height_derived/last.rs +++ b/crates/brk_computer/src/internal/multi/height_derived/last.rs @@ -1,29 +1,40 @@ -//! ComputedHeightDerivedLast - dateindex storage + difficultyepoch + lazy time periods. - -use brk_error::Result; +//! ComputedHeightDerivedLast - lazy time periods + epochs (last value). use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, Height, Version}; -use derive_more::{Deref, DerefMut}; +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10, + Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10, +}; use schemars::JsonSchema; -use vecdb::{Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec}; +use vecdb::{ReadableBoxedVec, ReadableCloneableVec}; use crate::{ - ComputeIndexes, indexes, - internal::{ComputedVecValue, LazyDateDerivedLast, LastVec, LazyLast, NumericValue}, + indexes, + internal::{ComputedVecValue, LazyLast, NumericValue, SparseLast}, }; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Clone, Traversable)] #[traversable(merge)] pub struct ComputedHeightDerivedLast where T: ComputedVecValue + PartialOrd + JsonSchema, { - pub dateindex: LastVec, - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub dates: LazyDateDerivedLast, + pub minute1: LazyLast, + pub minute5: LazyLast, + pub minute10: LazyLast, + pub minute30: LazyLast, + pub hour1: LazyLast, + pub hour4: LazyLast, + pub hour12: LazyLast, + pub day1: LazyLast, + pub day3: LazyLast, + pub week1: LazyLast, + pub month1: LazyLast, + pub month3: LazyLast, + pub month6: LazyLast, + pub year1: LazyLast, + pub year10: LazyLast, + pub halvingepoch: LazyLast, pub difficultyepoch: LazyLast, } @@ -33,42 +44,54 @@ impl ComputedHeightDerivedLast where T: NumericValue + JsonSchema, { - pub fn forced_import( - db: &Database, + pub(crate) fn forced_import( name: &str, - height_source: IterableBoxedVec, + height_source: ReadableBoxedVec, version: Version, indexes: &indexes::Vecs, - ) -> Result { - let dateindex = LastVec::forced_import(db, name, version + VERSION)?; + ) -> Self { let v = version + VERSION; - Ok(Self { - dates: LazyDateDerivedLast::from_source(name, v, dateindex.boxed_clone(), indexes), - difficultyepoch: LazyLast::from_source( - name, - v, - height_source, - indexes.difficultyepoch.identity.boxed_clone(), - ), - dateindex, - }) - } + macro_rules! period { + ($idx:ident) => { + LazyLast::from_height_source( + name, + v, + height_source.clone(), + indexes.$idx.first_height.read_only_boxed_clone(), + ) + }; + } - pub fn derive_from( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - height_source: &impl IterableVec, - exit: &Exit, - ) -> Result<()> { - self.dateindex.compute_last( - starting_indexes.dateindex, - height_source, - &indexes.dateindex.first_height, - &indexes.dateindex.height_count, - exit, - )?; - Ok(()) + macro_rules! epoch { + ($idx:ident) => { + LazyLast::from_source( + name, + v, + height_source.clone(), + indexes.$idx.identity.read_only_boxed_clone(), + ) + }; + } + + Self { + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: epoch!(halvingepoch), + difficultyepoch: epoch!(difficultyepoch), + } } } diff --git a/crates/brk_computer/src/internal/multi/height_derived/lazy_distribution.rs b/crates/brk_computer/src/internal/multi/height_derived/lazy_distribution.rs index 170bfdf40..19a300501 100644 --- a/crates/brk_computer/src/internal/multi/height_derived/lazy_distribution.rs +++ b/crates/brk_computer/src/internal/multi/height_derived/lazy_distribution.rs @@ -2,27 +2,41 @@ //! Like LazyHeightDerivedFull but without sum/cumulative (for ratio/percentage metrics). use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, Version}; -use derive_more::{Deref, DerefMut}; +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Hour1, Hour12, Hour4, Minute1, Minute10, Minute30, + Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10, +}; use schemars::JsonSchema; -use vecdb::{IterableCloneableVec, UnaryTransform}; +use vecdb::{ReadableCloneableVec, UnaryTransform}; use crate::internal::{ - ComputedHeightDerivedFull, ComputedVecValue, Full, LazyDateDerivedFull, - LazyFromDateDistribution, LazyTransformSpread, NumericValue, + ComputedHeightDerivedFull, ComputedVecValue, LazyTransformDistribution, NumericValue, }; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Clone, Traversable)] #[traversable(merge)] pub struct LazyHeightDerivedDistribution where T: ComputedVecValue + PartialOrd + JsonSchema, S1T: ComputedVecValue, { - #[deref] - #[deref_mut] - pub dates: LazyFromDateDistribution, - pub difficultyepoch: LazyTransformSpread, + pub minute1: LazyTransformDistribution, + pub minute5: LazyTransformDistribution, + pub minute10: LazyTransformDistribution, + pub minute30: LazyTransformDistribution, + pub hour1: LazyTransformDistribution, + pub hour4: LazyTransformDistribution, + pub hour12: LazyTransformDistribution, + pub day1: LazyTransformDistribution, + pub day3: LazyTransformDistribution, + pub week1: LazyTransformDistribution, + pub month1: LazyTransformDistribution, + pub month3: LazyTransformDistribution, + pub month6: LazyTransformDistribution, + pub year1: LazyTransformDistribution, + pub year10: LazyTransformDistribution, + pub halvingepoch: LazyTransformDistribution, + pub difficultyepoch: LazyTransformDistribution, } const VERSION: Version = Version::ZERO; @@ -32,33 +46,7 @@ where T: ComputedVecValue + JsonSchema + 'static, S1T: ComputedVecValue + JsonSchema, { - pub fn from_computed>( - name: &str, - version: Version, - dateindex: &Full, - periods: &LazyDateDerivedFull, - difficultyepoch: &crate::internal::LazyFull< - DifficultyEpoch, - S1T, - brk_types::Height, - DifficultyEpoch, - >, - ) -> Self { - let v = version + VERSION; - - Self { - dates: LazyFromDateDistribution::from_full::(name, v, dateindex, periods), - difficultyepoch: LazyTransformSpread::from_boxed::( - name, - v, - difficultyepoch.average.boxed_clone(), - difficultyepoch.min.boxed_clone(), - difficultyepoch.max.boxed_clone(), - ), - } - } - - pub fn from_derived_computed>( + pub(crate) fn from_derived_computed>( name: &str, version: Version, source: &ComputedHeightDerivedFull, @@ -68,15 +56,41 @@ where { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyTransformDistribution::from_boxed::( + name, + v, + source.$p.average.read_only_boxed_clone(), + source.$p.min.read_only_boxed_clone(), + source.$p.max.read_only_boxed_clone(), + source.$p.percentiles.pct10.read_only_boxed_clone(), + source.$p.percentiles.pct25.read_only_boxed_clone(), + source.$p.percentiles.median.read_only_boxed_clone(), + source.$p.percentiles.pct75.read_only_boxed_clone(), + source.$p.percentiles.pct90.read_only_boxed_clone(), + ) + }; + } + Self { - dates: LazyFromDateDistribution::from_full::(name, v, &source.dateindex, &source.dates), - difficultyepoch: LazyTransformSpread::from_boxed::( - name, - v, - source.difficultyepoch.average.boxed_clone(), - source.difficultyepoch.min.boxed_clone(), - source.difficultyepoch.max.boxed_clone(), - ), + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), } } } diff --git a/crates/brk_computer/src/internal/multi/height_derived/lazy_full.rs b/crates/brk_computer/src/internal/multi/height_derived/lazy_full.rs index bcb54070b..d17fbccac 100644 --- a/crates/brk_computer/src/internal/multi/height_derived/lazy_full.rs +++ b/crates/brk_computer/src/internal/multi/height_derived/lazy_full.rs @@ -1,27 +1,41 @@ //! Lazy aggregated Full for block-level sources. use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, Version}; -use derive_more::{Deref, DerefMut}; +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Hour1, Hour12, Hour4, Minute1, Minute10, Minute30, + Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10, +}; use schemars::JsonSchema; -use vecdb::{IterableCloneableVec, UnaryTransform}; +use vecdb::{ReadableCloneableVec, UnaryTransform}; use crate::internal::{ - ComputedHeightDerivedFull, ComputedVecValue, Full, LazyFromDateFull, LazyDateDerivedFull, - LazyTransformStats, NumericValue, + ComputedHeightDerivedFull, ComputedVecValue, LazyTransformFull, NumericValue, }; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Clone, Traversable)] #[traversable(merge)] pub struct LazyHeightDerivedFull where T: ComputedVecValue + PartialOrd + JsonSchema, S1T: ComputedVecValue, { - #[deref] - #[deref_mut] - pub dates: LazyFromDateFull, - pub difficultyepoch: LazyTransformStats, + pub minute1: LazyTransformFull, + pub minute5: LazyTransformFull, + pub minute10: LazyTransformFull, + pub minute30: LazyTransformFull, + pub hour1: LazyTransformFull, + pub hour4: LazyTransformFull, + pub hour12: LazyTransformFull, + pub day1: LazyTransformFull, + pub day3: LazyTransformFull, + pub week1: LazyTransformFull, + pub month1: LazyTransformFull, + pub month3: LazyTransformFull, + pub month6: LazyTransformFull, + pub year1: LazyTransformFull, + pub year10: LazyTransformFull, + pub halvingepoch: LazyTransformFull, + pub difficultyepoch: LazyTransformFull, } const VERSION: Version = Version::ZERO; @@ -31,35 +45,7 @@ where T: ComputedVecValue + JsonSchema + 'static, S1T: ComputedVecValue + JsonSchema, { - pub fn from_computed>( - name: &str, - version: Version, - dateindex: &Full, - periods: &LazyDateDerivedFull, - difficultyepoch: &crate::internal::LazyFull< - DifficultyEpoch, - S1T, - brk_types::Height, - DifficultyEpoch, - >, - ) -> Self { - let v = version + VERSION; - - Self { - dates: LazyFromDateFull::from_full::(name, v, dateindex, periods), - difficultyepoch: LazyTransformStats::from_boxed::( - name, - v, - difficultyepoch.average.boxed_clone(), - difficultyepoch.min.boxed_clone(), - difficultyepoch.max.boxed_clone(), - difficultyepoch.sum.boxed_clone(), - difficultyepoch.cumulative.boxed_clone(), - ), - } - } - - pub fn from_derived_computed>( + pub(crate) fn from_derived_computed>( name: &str, version: Version, source: &ComputedHeightDerivedFull, @@ -69,17 +55,43 @@ where { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyTransformFull::from_boxed::( + name, + v, + source.$p.average.read_only_boxed_clone(), + source.$p.min.read_only_boxed_clone(), + source.$p.max.read_only_boxed_clone(), + source.$p.percentiles.pct10.read_only_boxed_clone(), + source.$p.percentiles.pct25.read_only_boxed_clone(), + source.$p.percentiles.median.read_only_boxed_clone(), + source.$p.percentiles.pct75.read_only_boxed_clone(), + source.$p.percentiles.pct90.read_only_boxed_clone(), + source.$p.sum.read_only_boxed_clone(), + source.$p.cumulative.read_only_boxed_clone(), + ) + }; + } + Self { - dates: LazyFromDateFull::from_full::(name, v, &source.dateindex, &source.dates), - difficultyepoch: LazyTransformStats::from_boxed::( - name, - v, - source.difficultyepoch.average.boxed_clone(), - source.difficultyepoch.min.boxed_clone(), - source.difficultyepoch.max.boxed_clone(), - source.difficultyepoch.sum.boxed_clone(), - source.difficultyepoch.cumulative.boxed_clone(), - ), + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), } } } diff --git a/crates/brk_computer/src/internal/multi/height_derived/lazy_last.rs b/crates/brk_computer/src/internal/multi/height_derived/lazy_last.rs index 0f9b98327..e0e8946cf 100644 --- a/crates/brk_computer/src/internal/multi/height_derived/lazy_last.rs +++ b/crates/brk_computer/src/internal/multi/height_derived/lazy_last.rs @@ -1,26 +1,41 @@ //! Lazy aggregated Last for block-level sources. use brk_traversable::Traversable; -use brk_types::{DifficultyEpoch, Version}; -use derive_more::{Deref, DerefMut}; +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Hour1, Hour12, Hour4, Minute1, Minute10, Minute30, + Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10, +}; use schemars::JsonSchema; -use vecdb::{IterableCloneableVec, UnaryTransform}; +use vecdb::{ReadableCloneableVec, UnaryTransform}; use crate::internal::{ - ComputedFromHeightLast, ComputedHeightDerivedLast, ComputedFromHeightAndDateLast, ComputedVecValue, - LazyBinaryHeightDerivedLast, LazyFromDateLast, LazyTransformLast, NumericValue, + ComputedFromHeightLast, ComputedHeightDerivedLast, ComputedVecValue, + LazyBinaryHeightDerivedLast, LazyTransformLast, NumericValue, }; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Clone, Traversable)] #[traversable(merge)] pub struct LazyHeightDerivedLast where T: ComputedVecValue + PartialOrd + JsonSchema, S1T: ComputedVecValue, { - #[deref] - #[deref_mut] - pub dates: LazyFromDateLast, + pub minute1: LazyTransformLast, + pub minute5: LazyTransformLast, + pub minute10: LazyTransformLast, + pub minute30: LazyTransformLast, + pub hour1: LazyTransformLast, + pub hour4: LazyTransformLast, + pub hour12: LazyTransformLast, + pub day1: LazyTransformLast, + pub day3: LazyTransformLast, + pub week1: LazyTransformLast, + pub month1: LazyTransformLast, + pub month3: LazyTransformLast, + pub month6: LazyTransformLast, + pub year1: LazyTransformLast, + pub year10: LazyTransformLast, + pub halvingepoch: LazyTransformLast, pub difficultyepoch: LazyTransformLast, } @@ -31,7 +46,7 @@ where T: ComputedVecValue + JsonSchema + 'static, S1T: ComputedVecValue + JsonSchema, { - pub fn from_computed>( + pub(crate) fn from_computed>( name: &str, version: Version, source: &ComputedFromHeightLast, @@ -41,22 +56,34 @@ where { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyTransformLast::from_boxed::(name, v, source.rest.$p.read_only_boxed_clone()) + }; + } + Self { - dates: LazyFromDateLast::from_derived::( - name, - v, - source.dateindex.boxed_clone(), - &source.rest, - ), - difficultyepoch: LazyTransformLast::from_boxed::( - name, - v, - source.difficultyepoch.boxed_clone(), - ), + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), } } - pub fn from_derived_computed>( + pub(crate) fn from_derived_computed>( name: &str, version: Version, source: &ComputedHeightDerivedLast, @@ -66,23 +93,74 @@ where { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyTransformLast::from_boxed::(name, v, source.$p.read_only_boxed_clone()) + }; + } + Self { - dates: LazyFromDateLast::from_derived::( - name, - v, - source.dateindex.boxed_clone(), - &source.dates, - ), - difficultyepoch: LazyTransformLast::from_boxed::( - name, - v, - source.difficultyepoch.boxed_clone(), - ), + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), + } + } + + /// Create by unary-transforming a LazyHeightDerivedLast source. + pub(crate) fn from_lazy( + name: &str, + version: Version, + source: &LazyHeightDerivedLast, + ) -> Self + where + F: UnaryTransform, + S2T: ComputedVecValue + JsonSchema, + { + let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransformLast::from_boxed::(name, v, source.$p.read_only_boxed_clone()) + }; + } + + Self { + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), } } /// Create by unary-transforming a LazyBinaryHeightDerivedLast source. - pub fn from_binary( + pub(crate) fn from_binary( name: &str, version: Version, source: &LazyBinaryHeightDerivedLast, @@ -94,38 +172,31 @@ where { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyTransformLast::from_boxed::(name, v, source.$p.read_only_boxed_clone()) + }; + } + Self { - dates: LazyFromDateLast::from_binary::(name, v, &source.dates), - difficultyepoch: LazyTransformLast::from_boxed::( - name, - v, - source.difficultyepoch.boxed_clone(), - ), + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), } } - pub fn from_computed_height_date>( - name: &str, - version: Version, - source: &ComputedFromHeightAndDateLast, - ) -> Self - where - S1T: PartialOrd, - { - let v = version + VERSION; - - Self { - dates: LazyFromDateLast::from_derived::( - name, - v, - source.dateindex.boxed_clone(), - &source.rest.rest, - ), - difficultyepoch: LazyTransformLast::from_boxed::( - name, - v, - source.difficultyepoch.boxed_clone(), - ), - } - } } diff --git a/crates/brk_computer/src/internal/multi/height_derived/lazy_sum.rs b/crates/brk_computer/src/internal/multi/height_derived/lazy_sum.rs index c2cc35693..ece9da82a 100644 --- a/crates/brk_computer/src/internal/multi/height_derived/lazy_sum.rs +++ b/crates/brk_computer/src/internal/multi/height_derived/lazy_sum.rs @@ -1,26 +1,40 @@ //! Lazy aggregated Sum for block-level sources. use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, Height, Version}; -use derive_more::{Deref, DerefMut}; +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Hour1, Hour12, Hour4, Minute1, Minute10, Minute30, + Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10, +}; use schemars::JsonSchema; -use vecdb::{IterableCloneableVec, UnaryTransform}; +use vecdb::{ReadableCloneableVec, UnaryTransform}; use crate::internal::{ - ComputedHeightDerivedSum, ComputedVecValue, LazyFromDateSum, LazyDateDerivedSum, LazySum, - LazyTransformSum, NumericValue, SumVec, + ComputedHeightDerivedSum, ComputedVecValue, LazyTransformSum, NumericValue, }; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Clone, Traversable)] #[traversable(merge)] pub struct LazyHeightDerivedSum where T: ComputedVecValue + PartialOrd + JsonSchema, S1T: ComputedVecValue, { - #[deref] - #[deref_mut] - pub dates: LazyFromDateSum, + pub minute1: LazyTransformSum, + pub minute5: LazyTransformSum, + pub minute10: LazyTransformSum, + pub minute30: LazyTransformSum, + pub hour1: LazyTransformSum, + pub hour4: LazyTransformSum, + pub hour12: LazyTransformSum, + pub day1: LazyTransformSum, + pub day3: LazyTransformSum, + pub week1: LazyTransformSum, + pub month1: LazyTransformSum, + pub month3: LazyTransformSum, + pub month6: LazyTransformSum, + pub year1: LazyTransformSum, + pub year10: LazyTransformSum, + pub halvingepoch: LazyTransformSum, pub difficultyepoch: LazyTransformSum, } @@ -31,26 +45,7 @@ where T: ComputedVecValue + JsonSchema + 'static, S1T: ComputedVecValue + JsonSchema, { - pub fn from_computed>( - name: &str, - version: Version, - dateindex: &SumVec, - periods: &LazyDateDerivedSum, - difficultyepoch: &LazySum, - ) -> Self { - let v = version + VERSION; - - Self { - dates: LazyFromDateSum::from_derived::(name, v, dateindex.boxed_clone(), periods), - difficultyepoch: LazyTransformSum::from_boxed::( - name, - v, - difficultyepoch.boxed_clone(), - ), - } - } - - pub fn from_derived_computed>( + pub(crate) fn from_derived_computed>( name: &str, version: Version, source: &ComputedHeightDerivedSum, @@ -60,18 +55,30 @@ where { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyTransformSum::from_boxed::(name, v, source.$p.read_only_boxed_clone()) + }; + } + Self { - dates: LazyFromDateSum::from_derived::( - name, - v, - source.dateindex.boxed_clone(), - &source.dates, - ), - difficultyepoch: LazyTransformSum::from_boxed::( - name, - v, - source.difficultyepoch.boxed_clone(), - ), + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), } } } diff --git a/crates/brk_computer/src/internal/multi/height_derived/lazy_sum_cum.rs b/crates/brk_computer/src/internal/multi/height_derived/lazy_sum_cum.rs index 80545fbb0..c283ba99f 100644 --- a/crates/brk_computer/src/internal/multi/height_derived/lazy_sum_cum.rs +++ b/crates/brk_computer/src/internal/multi/height_derived/lazy_sum_cum.rs @@ -1,26 +1,40 @@ //! Lazy aggregated SumCum for block-level sources. use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, Height, Version}; -use derive_more::{Deref, DerefMut}; +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Hour1, Hour12, Hour4, Minute1, Minute10, Minute30, + Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10, +}; use schemars::JsonSchema; -use vecdb::{IterableCloneableVec, UnaryTransform}; +use vecdb::{ReadableCloneableVec, UnaryTransform}; use crate::internal::{ - ComputedHeightDerivedSumCum, ComputedVecValue, LazyFromDateSumCum, LazyDateDerivedSumCum, LazySumCum, - LazyTransformSumCum, NumericValue, SumCum, + ComputedHeightDerivedSumCum, ComputedVecValue, LazyTransformSumCum, NumericValue, }; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Clone, Traversable)] #[traversable(merge)] pub struct LazyHeightDerivedSumCum where T: ComputedVecValue + PartialOrd + JsonSchema, S1T: ComputedVecValue, { - #[deref] - #[deref_mut] - pub dates: LazyFromDateSumCum, + pub minute1: LazyTransformSumCum, + pub minute5: LazyTransformSumCum, + pub minute10: LazyTransformSumCum, + pub minute30: LazyTransformSumCum, + pub hour1: LazyTransformSumCum, + pub hour4: LazyTransformSumCum, + pub hour12: LazyTransformSumCum, + pub day1: LazyTransformSumCum, + pub day3: LazyTransformSumCum, + pub week1: LazyTransformSumCum, + pub month1: LazyTransformSumCum, + pub month3: LazyTransformSumCum, + pub month6: LazyTransformSumCum, + pub year1: LazyTransformSumCum, + pub year10: LazyTransformSumCum, + pub halvingepoch: LazyTransformSumCum, pub difficultyepoch: LazyTransformSumCum, } @@ -31,27 +45,7 @@ where T: ComputedVecValue + JsonSchema + 'static, S1T: ComputedVecValue + JsonSchema, { - pub fn from_computed>( - name: &str, - version: Version, - dateindex: &SumCum, - periods: &LazyDateDerivedSumCum, - difficultyepoch: &LazySumCum, - ) -> Self { - let v = version + VERSION; - - Self { - dates: LazyFromDateSumCum::from_sum_cum::(name, v, dateindex, periods), - difficultyepoch: LazyTransformSumCum::from_boxed_sum_raw::( - name, - v, - difficultyepoch.sum.boxed_clone(), - difficultyepoch.cumulative.boxed_clone(), - ), - } - } - - pub fn from_derived_computed>( + pub(crate) fn from_derived_computed>( name: &str, version: Version, source: &ComputedHeightDerivedSumCum, @@ -61,14 +55,35 @@ where { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyTransformSumCum::from_boxed_sum_raw::( + name, + v, + source.$p.sum.read_only_boxed_clone(), + source.$p.cumulative.read_only_boxed_clone(), + ) + }; + } + Self { - dates: LazyFromDateSumCum::from_sum_cum::(name, v, &source.dateindex, &source.dates), - difficultyepoch: LazyTransformSumCum::from_boxed_sum_raw::( - name, - v, - source.difficultyepoch.sum.boxed_clone(), - source.difficultyepoch.cumulative.boxed_clone(), - ), + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), } } } diff --git a/crates/brk_computer/src/internal/multi/height_derived/max.rs b/crates/brk_computer/src/internal/multi/height_derived/max.rs new file mode 100644 index 000000000..d98b927c3 --- /dev/null +++ b/crates/brk_computer/src/internal/multi/height_derived/max.rs @@ -0,0 +1,97 @@ +//! ComputedHeightDerivedMax - lazy time periods + epochs (max value). + +use brk_traversable::Traversable; +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10, + Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10, +}; +use schemars::JsonSchema; +use vecdb::{ReadableBoxedVec, ReadableCloneableVec}; + +use crate::{ + indexes, + internal::{ComputedVecValue, LazyMax, NumericValue}, +}; + +#[derive(Clone, Traversable)] +#[traversable(merge)] +pub struct ComputedHeightDerivedMax +where + T: ComputedVecValue + PartialOrd + JsonSchema, +{ + pub minute1: LazyMax, + pub minute5: LazyMax, + pub minute10: LazyMax, + pub minute30: LazyMax, + pub hour1: LazyMax, + pub hour4: LazyMax, + pub hour12: LazyMax, + pub day1: LazyMax, + pub day3: LazyMax, + pub week1: LazyMax, + pub month1: LazyMax, + pub month3: LazyMax, + pub month6: LazyMax, + pub year1: LazyMax, + pub year10: LazyMax, + pub halvingepoch: LazyMax, + pub difficultyepoch: LazyMax, +} + +const VERSION: Version = Version::ZERO; + +impl ComputedHeightDerivedMax +where + T: NumericValue + JsonSchema, +{ + pub(crate) fn forced_import( + name: &str, + height_source: ReadableBoxedVec, + version: Version, + indexes: &indexes::Vecs, + ) -> Self { + let v = version + VERSION; + + macro_rules! period { + ($idx:ident) => { + LazyMax::from_height_source_raw( + name, + v, + height_source.clone(), + indexes.$idx.first_height.read_only_boxed_clone(), + ) + }; + } + + macro_rules! epoch { + ($idx:ident) => { + LazyMax::from_source_raw( + name, + v, + height_source.clone(), + indexes.$idx.identity.read_only_boxed_clone(), + ) + }; + } + + Self { + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: epoch!(halvingepoch), + difficultyepoch: epoch!(difficultyepoch), + } + } +} diff --git a/crates/brk_computer/src/internal/multi/height_derived/min.rs b/crates/brk_computer/src/internal/multi/height_derived/min.rs new file mode 100644 index 000000000..5a50f365a --- /dev/null +++ b/crates/brk_computer/src/internal/multi/height_derived/min.rs @@ -0,0 +1,97 @@ +//! ComputedHeightDerivedMin - lazy time periods + epochs (min value). + +use brk_traversable::Traversable; +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10, + Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10, +}; +use schemars::JsonSchema; +use vecdb::{ReadableBoxedVec, ReadableCloneableVec}; + +use crate::{ + indexes, + internal::{ComputedVecValue, LazyMin, NumericValue}, +}; + +#[derive(Clone, Traversable)] +#[traversable(merge)] +pub struct ComputedHeightDerivedMin +where + T: ComputedVecValue + PartialOrd + JsonSchema, +{ + pub minute1: LazyMin, + pub minute5: LazyMin, + pub minute10: LazyMin, + pub minute30: LazyMin, + pub hour1: LazyMin, + pub hour4: LazyMin, + pub hour12: LazyMin, + pub day1: LazyMin, + pub day3: LazyMin, + pub week1: LazyMin, + pub month1: LazyMin, + pub month3: LazyMin, + pub month6: LazyMin, + pub year1: LazyMin, + pub year10: LazyMin, + pub halvingepoch: LazyMin, + pub difficultyepoch: LazyMin, +} + +const VERSION: Version = Version::ZERO; + +impl ComputedHeightDerivedMin +where + T: NumericValue + JsonSchema, +{ + pub(crate) fn forced_import( + name: &str, + height_source: ReadableBoxedVec, + version: Version, + indexes: &indexes::Vecs, + ) -> Self { + let v = version + VERSION; + + macro_rules! period { + ($idx:ident) => { + LazyMin::from_height_source_raw( + name, + v, + height_source.clone(), + indexes.$idx.first_height.read_only_boxed_clone(), + ) + }; + } + + macro_rules! epoch { + ($idx:ident) => { + LazyMin::from_source_raw( + name, + v, + height_source.clone(), + indexes.$idx.identity.read_only_boxed_clone(), + ) + }; + } + + Self { + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: epoch!(halvingepoch), + difficultyepoch: epoch!(difficultyepoch), + } + } +} diff --git a/crates/brk_computer/src/internal/multi/height_derived/mod.rs b/crates/brk_computer/src/internal/multi/height_derived/mod.rs index 152fe42ed..61ec320fc 100644 --- a/crates/brk_computer/src/internal/multi/height_derived/mod.rs +++ b/crates/brk_computer/src/internal/multi/height_derived/mod.rs @@ -10,9 +10,13 @@ mod lazy_full; mod lazy_last; mod lazy_sum; mod lazy_sum_cum; +mod max; +mod min; +mod ohlc; +mod split_ohlc; mod sum; mod sum_cum; -mod value_sum_cum; +mod value_lazy_last; pub use binary_last::*; pub use binary_sum::*; @@ -26,6 +30,10 @@ pub use lazy_full::*; pub use lazy_last::*; pub use lazy_sum::*; pub use lazy_sum_cum::*; +pub use max::*; +pub use min::*; +pub use ohlc::*; +pub use split_ohlc::*; pub use sum::*; pub use sum_cum::*; -pub use value_sum_cum::*; +pub use value_lazy_last::*; diff --git a/crates/brk_computer/src/internal/multi/height_derived/ohlc.rs b/crates/brk_computer/src/internal/multi/height_derived/ohlc.rs new file mode 100644 index 000000000..a79c834b9 --- /dev/null +++ b/crates/brk_computer/src/internal/multi/height_derived/ohlc.rs @@ -0,0 +1,91 @@ +//! Lazy OHLC period groupings derived from height-level data. +//! +//! Each period's OHLC is computed lazily in a single pass over the source range: +//! open = first, high = max, low = min, close = last. + +use brk_traversable::Traversable; +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10, + Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10, +}; +use schemars::JsonSchema; +use vecdb::{ReadableBoxedVec, ReadableCloneableVec}; + +use crate::{ + indexes, + internal::{ComputedVecValue, LazyOHLC, OHLCRecord}, +}; + +/// Lazy bundled OHLC vecs for all periods, derived from height-level data. +#[derive(Clone, Traversable)] +#[traversable(merge)] +pub struct ComputedHeightDerivedOHLC +where + OHLC: OHLCRecord + 'static, +{ + pub minute1: LazyOHLC, + pub minute5: LazyOHLC, + pub minute10: LazyOHLC, + pub minute30: LazyOHLC, + pub hour1: LazyOHLC, + pub hour4: LazyOHLC, + pub hour12: LazyOHLC, + pub day1: LazyOHLC, + pub day3: LazyOHLC, + pub week1: LazyOHLC, + pub month1: LazyOHLC, + pub month3: LazyOHLC, + pub month6: LazyOHLC, + pub year1: LazyOHLC, + pub year10: LazyOHLC, + pub halvingepoch: LazyOHLC, + pub difficultyepoch: LazyOHLC, +} + +const VERSION: Version = Version::ZERO; + +impl ComputedHeightDerivedOHLC +where + OHLC: OHLCRecord + 'static, + OHLC::Inner: ComputedVecValue + JsonSchema + 'static, +{ + pub(crate) fn forced_import( + name: &str, + version: Version, + indexes: &indexes::Vecs, + height_source: ReadableBoxedVec, + ) -> Self { + let v = version + VERSION; + + macro_rules! period { + ($idx:ident) => { + LazyOHLC::from_height_source( + name, + v, + height_source.clone(), + indexes.$idx.first_height.read_only_boxed_clone(), + ) + }; + } + + Self { + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), + difficultyepoch: period!(difficultyepoch), + } + } +} diff --git a/crates/brk_computer/src/internal/multi/height_derived/split_ohlc.rs b/crates/brk_computer/src/internal/multi/height_derived/split_ohlc.rs new file mode 100644 index 000000000..f0f752650 --- /dev/null +++ b/crates/brk_computer/src/internal/multi/height_derived/split_ohlc.rs @@ -0,0 +1,49 @@ +//! OHLC split into separate First/Last/Max/Min period groupings derived from height-level data. + +use brk_traversable::Traversable; +use brk_types::{Height, Version}; +use schemars::JsonSchema; +use vecdb::ReadableBoxedVec; + +use crate::{ + indexes, + internal::{ + ComputedHeightDerivedFirst, ComputedHeightDerivedLast, ComputedHeightDerivedMax, + ComputedHeightDerivedMin, ComputedVecValue, NumericValue, + }, +}; + +/// Split OHLC vecs for all periods, derived from height data. +#[derive(Clone, Traversable)] +pub struct ComputedHeightDerivedSplitOHLC +where + T: ComputedVecValue + PartialOrd + JsonSchema, +{ + pub open: ComputedHeightDerivedFirst, + pub high: ComputedHeightDerivedMax, + pub low: ComputedHeightDerivedMin, + pub close: ComputedHeightDerivedLast, +} + +const VERSION: Version = Version::ZERO; + +impl ComputedHeightDerivedSplitOHLC +where + T: NumericValue + JsonSchema, +{ + pub(crate) fn forced_import( + name: &str, + version: Version, + indexes: &indexes::Vecs, + height_source: ReadableBoxedVec, + ) -> Self { + let v = version + VERSION; + + Self { + open: ComputedHeightDerivedFirst::forced_import(&format!("{name}_open"), height_source.clone(), v, indexes), + high: ComputedHeightDerivedMax::forced_import(&format!("{name}_high"), height_source.clone(), v, indexes), + low: ComputedHeightDerivedMin::forced_import(&format!("{name}_low"), height_source.clone(), v, indexes), + close: ComputedHeightDerivedLast::forced_import(&format!("{name}_close"), height_source, v, indexes), + } + } +} diff --git a/crates/brk_computer/src/internal/multi/height_derived/sum.rs b/crates/brk_computer/src/internal/multi/height_derived/sum.rs index 4ced10d35..aa5528d2e 100644 --- a/crates/brk_computer/src/internal/multi/height_derived/sum.rs +++ b/crates/brk_computer/src/internal/multi/height_derived/sum.rs @@ -1,31 +1,40 @@ -//! ComputedHeightDerivedSum - dateindex storage + difficultyepoch + lazy time periods. - -use brk_error::Result; +//! ComputedHeightDerivedSum - lazy time periods + epochs. use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, Height, StoredU64, Version}; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{ - AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, IterableBoxedVec, IterableCloneableVec, - IterableVec, VecIndex, +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10, + Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10, }; +use schemars::JsonSchema; +use vecdb::{ReadableBoxedVec, ReadableCloneableVec}; use crate::{ - ComputeIndexes, indexes, - internal::{ComputedVecValue, LazyDateDerivedSum, LazySum, NumericValue, SumVec}, + indexes, + internal::{ComputedVecValue, LazySum, NumericValue}, }; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Clone, Traversable)] #[traversable(merge)] pub struct ComputedHeightDerivedSum where T: ComputedVecValue + PartialOrd + JsonSchema, { - pub dateindex: SumVec, - #[deref] - #[deref_mut] - pub dates: LazyDateDerivedSum, + pub minute1: LazySum, + pub minute5: LazySum, + pub minute10: LazySum, + pub minute30: LazySum, + pub hour1: LazySum, + pub hour4: LazySum, + pub hour12: LazySum, + pub day1: LazySum, + pub day3: LazySum, + pub week1: LazySum, + pub month1: LazySum, + pub month3: LazySum, + pub month6: LazySum, + pub year1: LazySum, + pub year10: LazySum, + pub halvingepoch: LazySum, pub difficultyepoch: LazySum, } @@ -35,81 +44,54 @@ impl ComputedHeightDerivedSum where T: NumericValue + JsonSchema, { - pub fn forced_import( - db: &Database, + pub(crate) fn forced_import( name: &str, - height_source: IterableBoxedVec, + height_source: ReadableBoxedVec, version: Version, indexes: &indexes::Vecs, - ) -> Result { - let dateindex = SumVec::forced_import_raw(db, name, version + VERSION)?; + ) -> Self { let v = version + VERSION; - Ok(Self { - dates: LazyDateDerivedSum::from_source(name, v, dateindex.boxed_clone(), indexes), - difficultyepoch: LazySum::from_source_raw( - name, - v, - height_source, - indexes.difficultyepoch.identity.boxed_clone(), - ), - dateindex, - }) - } + macro_rules! period { + ($idx:ident) => { + LazySum::from_height_source_raw( + name, + v, + height_source.clone(), + indexes.$idx.first_height.read_only_boxed_clone(), + ) + }; + } - pub fn derive_from( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - height_source: &impl IterableVec, - exit: &Exit, - ) -> Result<()> { - self.compute_from( - starting_indexes.dateindex, - height_source, - &indexes.dateindex.first_height, - &indexes.dateindex.height_count, - exit, - ) - } + macro_rules! epoch { + ($idx:ident) => { + LazySum::from_source_raw( + name, + v, + height_source.clone(), + indexes.$idx.identity.read_only_boxed_clone(), + ) + }; + } - fn compute_from( - &mut self, - starting_dateindex: DateIndex, - height_source: &impl IterableVec, - first_indexes: &impl IterableVec, - count_indexes: &impl IterableVec, - exit: &Exit, - ) -> Result<()> { - let sum_vec = &mut self.dateindex.0; - - let combined_version = - height_source.version() + first_indexes.version() + count_indexes.version(); - sum_vec.validate_computed_version_or_reset(combined_version)?; - - let index = starting_dateindex.to_usize().min(sum_vec.len()); - - let mut source_iter = height_source.iter(); - let mut count_iter = count_indexes.iter().skip(index); - - first_indexes.iter().enumerate().skip(index).try_for_each( - |(idx, first_height)| -> Result<()> { - let count = *count_iter.next().unwrap() as usize; - - source_iter.set_position(first_height); - let sum: T = (&mut source_iter) - .take(count) - .fold(T::from(0_usize), |acc, v| acc + v); - - sum_vec.truncate_push_at(idx, sum)?; - - Ok(()) - }, - )?; - - let _lock = exit.lock(); - sum_vec.write()?; - - Ok(()) + Self { + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: epoch!(halvingepoch), + difficultyepoch: epoch!(difficultyepoch), + } } } diff --git a/crates/brk_computer/src/internal/multi/height_derived/sum_cum.rs b/crates/brk_computer/src/internal/multi/height_derived/sum_cum.rs index 673f7072f..1cceed713 100644 --- a/crates/brk_computer/src/internal/multi/height_derived/sum_cum.rs +++ b/crates/brk_computer/src/internal/multi/height_derived/sum_cum.rs @@ -1,36 +1,45 @@ -//! ComputedHeightDerivedSumCum - aggregates derived from an external height source. +//! ComputedHeightDerivedSumCum - height cumulative (stored) + lazy time periods + epochs. use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, Height, StoredU64, Version}; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{ - AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, IterableBoxedVec, IterableCloneableVec, - IterableVec, VecIndex, +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10, + Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10, }; +use schemars::JsonSchema; +use vecdb::{Database, Exit, ReadableBoxedVec, ReadableCloneableVec, ReadableVec, Rw, StorageMode}; use crate::{ indexes, - internal::{ - ComputedVecValue, CumulativeVec, LazyDateDerivedSumCum, LazySumCum, NumericValue, SumCum, - }, + internal::{ComputedVecValue, CumulativeVec, LazySumCum, NumericValue}, ComputeIndexes, }; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Traversable)] #[traversable(merge)] -pub struct ComputedHeightDerivedSumCum +pub struct ComputedHeightDerivedSumCum where T: ComputedVecValue + PartialOrd + JsonSchema, { #[traversable(rename = "cumulative")] - pub height_cumulative: CumulativeVec, - pub dateindex: SumCum, - #[deref] - #[deref_mut] - pub dates: LazyDateDerivedSumCum, + pub height_cumulative: CumulativeVec, + pub minute1: LazySumCum, + pub minute5: LazySumCum, + pub minute10: LazySumCum, + pub minute30: LazySumCum, + pub hour1: LazySumCum, + pub hour4: LazySumCum, + pub hour12: LazySumCum, + pub day1: LazySumCum, + pub day3: LazySumCum, + pub week1: LazySumCum, + pub month1: LazySumCum, + pub month3: LazySumCum, + pub month6: LazySumCum, + pub year1: LazySumCum, + pub year10: LazySumCum, + pub halvingepoch: LazySumCum, pub difficultyepoch: LazySumCum, } @@ -40,122 +49,90 @@ impl ComputedHeightDerivedSumCum where T: NumericValue + JsonSchema, { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, - height_source: IterableBoxedVec, + height_source: ReadableBoxedVec, version: Version, indexes: &indexes::Vecs, ) -> Result { let v = version + VERSION; let height_cumulative = CumulativeVec::forced_import(db, name, v)?; - let dateindex = SumCum::forced_import_sum_raw(db, name, v)?; - let dates = LazyDateDerivedSumCum::from_sources( - name, - v, - dateindex.boxed_sum(), - dateindex.boxed_cumulative(), - indexes, - ); + macro_rules! period { + ($idx:ident) => { + LazySumCum::from_height_sources_sum_raw( + name, + v, + height_source.clone(), + height_cumulative.read_only_boxed_clone(), + indexes.$idx.first_height.read_only_boxed_clone(), + ) + }; + } - let difficultyepoch = LazySumCum::from_sources_sum_raw( - name, - v, - height_source.boxed_clone(), - height_cumulative.boxed_clone(), - indexes.difficultyepoch.identity.boxed_clone(), - ); + macro_rules! epoch { + ($idx:ident) => { + LazySumCum::from_sources_sum_raw( + name, + v, + height_source.clone(), + height_cumulative.read_only_boxed_clone(), + indexes.$idx.identity.read_only_boxed_clone(), + ) + }; + } + + let minute1 = period!(minute1); + let minute5 = period!(minute5); + let minute10 = period!(minute10); + let minute30 = period!(minute30); + let hour1 = period!(hour1); + let hour4 = period!(hour4); + let hour12 = period!(hour12); + let day1 = period!(day1); + let day3 = period!(day3); + let week1 = period!(week1); + let month1 = period!(month1); + let month3 = period!(month3); + let month6 = period!(month6); + let year1 = period!(year1); + let year10 = period!(year10); + let halvingepoch = epoch!(halvingepoch); + let difficultyepoch = epoch!(difficultyepoch); Ok(Self { height_cumulative, - dateindex, - dates, + minute1, + minute5, + minute10, + minute30, + hour1, + hour4, + hour12, + day1, + day3, + week1, + month1, + month3, + month6, + year1, + year10, + halvingepoch, difficultyepoch, }) } - pub fn derive_from( + pub(crate) fn derive_from( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, - height_source: &impl IterableVec, - exit: &Exit, - ) -> Result<()> { - self.compute_height_cumulative(starting_indexes.height, height_source, exit)?; - self.compute_dateindex_sum_cum( - starting_indexes.dateindex, - height_source, - &indexes.dateindex.first_height, - &indexes.dateindex.height_count, - exit, - ) - } - - fn compute_height_cumulative( - &mut self, - max_from: Height, - source: &impl IterableVec, + height_source: &impl ReadableVec, exit: &Exit, ) -> Result<()> { self.height_cumulative .0 - .compute_cumulative(max_from, source, exit)?; - Ok(()) - } - - fn compute_dateindex_sum_cum( - &mut self, - starting_dateindex: DateIndex, - height_source: &impl IterableVec, - first_indexes: &impl IterableVec, - count_indexes: &impl IterableVec, - exit: &Exit, - ) -> Result<()> { - let sum_vec = &mut self.dateindex.sum.0; - let cumulative_vec = &mut self.dateindex.cumulative.0; - - let combined_version = - height_source.version() + first_indexes.version() + count_indexes.version(); - sum_vec.validate_computed_version_or_reset(combined_version)?; - cumulative_vec.validate_computed_version_or_reset(combined_version)?; - - let index = starting_dateindex - .to_usize() - .min(sum_vec.len()) - .min(cumulative_vec.len()); - - let mut cumulative = if index > 0 { - cumulative_vec.iter().get_unwrap((index - 1).into()) - } else { - T::from(0_usize) - }; - - let mut source_iter = height_source.iter(); - let mut count_iter = count_indexes.iter().skip(index); - - first_indexes.iter().enumerate().skip(index).try_for_each( - |(idx, first_height)| -> Result<()> { - let count = *count_iter.next().unwrap() as usize; - - source_iter.set_position(first_height); - let sum: T = (&mut source_iter) - .take(count) - .fold(T::from(0_usize), |acc, v| acc + v); - - cumulative += sum; - sum_vec.truncate_push_at(idx, sum)?; - cumulative_vec.truncate_push_at(idx, cumulative)?; - - Ok(()) - }, - )?; - - let _lock = exit.lock(); - sum_vec.write()?; - cumulative_vec.write()?; - + .compute_cumulative(starting_indexes.height, height_source, exit)?; Ok(()) } } diff --git a/crates/brk_computer/src/internal/multi/height_derived/value_lazy_last.rs b/crates/brk_computer/src/internal/multi/height_derived/value_lazy_last.rs new file mode 100644 index 000000000..7ecdf3d3c --- /dev/null +++ b/crates/brk_computer/src/internal/multi/height_derived/value_lazy_last.rs @@ -0,0 +1,51 @@ +//! Lazy value type for Last pattern across all height-derived indexes. + +use brk_traversable::Traversable; +use brk_types::{Bitcoin, Dollars, Sats, Version}; +use vecdb::UnaryTransform; + +use crate::internal::{LazyHeightDerivedLast, ValueFromHeightLast}; + +const VERSION: Version = Version::ZERO; + +#[derive(Clone, Traversable)] +pub struct LazyValueHeightDerivedLast { + pub sats: LazyHeightDerivedLast, + pub btc: LazyHeightDerivedLast, + pub usd: LazyHeightDerivedLast, +} + +impl LazyValueHeightDerivedLast { + pub(crate) fn from_block_source( + name: &str, + source: &ValueFromHeightLast, + version: Version, + ) -> Self + where + SatsTransform: UnaryTransform, + BitcoinTransform: UnaryTransform, + DollarsTransform: UnaryTransform, + { + let v = version + VERSION; + + let sats = LazyHeightDerivedLast::from_derived_computed::( + name, + v, + &source.sats.rest, + ); + + let btc = LazyHeightDerivedLast::from_derived_computed::( + &format!("{name}_btc"), + v, + &source.sats.rest, + ); + + let usd = LazyHeightDerivedLast::from_derived_computed::( + &format!("{name}_usd"), + v, + &source.usd.rest, + ); + + Self { sats, btc, usd } + } +} diff --git a/crates/brk_computer/src/internal/multi/height_derived/value_sum_cum.rs b/crates/brk_computer/src/internal/multi/height_derived/value_sum_cum.rs deleted file mode 100644 index 6009e534d..000000000 --- a/crates/brk_computer/src/internal/multi/height_derived/value_sum_cum.rs +++ /dev/null @@ -1,96 +0,0 @@ -//! Value type for derived SumCum pattern (derives from external height source). - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{Bitcoin, Close, Dollars, Height, Sats, Version}; -use vecdb::{Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec, LazyVecFrom2}; - -use crate::{ - ComputeIndexes, indexes, - internal::{ - ClosePriceTimesSats, ComputedHeightDerivedSumCum, LazyFromHeightSumCum, LazyComputedFromHeightSumCum, - SatsToBitcoin, - }, - price, -}; - -/// Value wrapper for derived SumCum (derives from external height source). -#[derive(Clone, Traversable)] -pub struct ValueHeightDerivedSumCum { - pub sats: ComputedHeightDerivedSumCum, - pub bitcoin: LazyFromHeightSumCum, - pub dollars: Option, Sats>>, -} - -const VERSION: Version = Version::ZERO; - -impl ValueHeightDerivedSumCum { - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - sats_source: IterableBoxedVec, - price: Option<&price::Vecs>, - ) -> Result { - let v = version + VERSION; - - let sats = ComputedHeightDerivedSumCum::forced_import( - db, - name, - sats_source.boxed_clone(), - v, - indexes, - )?; - - let bitcoin = LazyFromHeightSumCum::from_derived::( - &format!("{name}_btc"), - v, - sats_source.boxed_clone(), - &sats, - ); - - let dollars = if let Some(price) = price { - let dollars_height = LazyVecFrom2::transformed::( - &format!("{name}_usd"), - v, - price.usd.split.close.height.boxed_clone(), - sats_source.boxed_clone(), - ); - - Some(LazyComputedFromHeightSumCum::forced_import( - db, - &format!("{name}_usd"), - v, - indexes, - dollars_height, - )?) - } else { - None - }; - - Ok(Self { - sats, - bitcoin, - dollars, - }) - } - - /// Derive aggregates from caller-provided sats height source. - pub fn derive_from( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - sats_source: &impl IterableVec, - exit: &Exit, - ) -> Result<()> { - self.sats - .derive_from(indexes, starting_indexes, sats_source, exit)?; - - if let Some(dollars) = self.dollars.as_mut() { - dollars.derive_from(indexes, starting_indexes, exit)?; - } - - Ok(()) - } -} diff --git a/crates/brk_computer/src/internal/multi/mod.rs b/crates/brk_computer/src/internal/multi/mod.rs index 487798f2a..ea221528b 100644 --- a/crates/brk_computer/src/internal/multi/mod.rs +++ b/crates/brk_computer/src/internal/multi/mod.rs @@ -1,19 +1,11 @@ //! Multi-index composite types. -mod date_derived; -mod from_date; mod from_height; -mod from_height_and_date; mod from_tx; -mod height_and_date; mod height_derived; mod tx_derived; -pub use date_derived::*; -pub use from_date::*; pub use from_height::*; -pub use from_height_and_date::*; pub use from_tx::*; -pub use height_and_date::*; pub use height_derived::*; pub use tx_derived::*; diff --git a/crates/brk_computer/src/internal/multi/tx_derived/distribution.rs b/crates/brk_computer/src/internal/multi/tx_derived/distribution.rs index c902580f1..052f045d2 100644 --- a/crates/brk_computer/src/internal/multi/tx_derived/distribution.rs +++ b/crates/brk_computer/src/internal/multi/tx_derived/distribution.rs @@ -1,39 +1,45 @@ -//! TxDerivedDistribution - computes TxIndex data to height Distribution + dateindex MinMaxAverage + lazy aggregations. -//! -//! Note: Percentiles are computed at height level only. DateIndex and coarser -//! periods only have average+min+max since computing percentiles across all -//! transactions per day would be expensive. +//! TxDerivedDistribution - computes TxIndex data to height Distribution + lazy time periods + epochs. use brk_error::Result; use brk_indexer::Indexer; use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, Height, TxIndex, Version}; -use derive_more::{Deref, DerefMut}; +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10, + Minute30, Minute5, Month1, Month3, Month6, TxIndex, Version, Week1, Year1, Year10, +}; use schemars::JsonSchema; -use vecdb::{CollectableVec, Database, Exit, IterableCloneableVec}; +use vecdb::{Database, Exit, ReadableCloneableVec, ReadableVec, Rw, StorageMode}; use crate::{ ComputeIndexes, indexes, - internal::{ - ComputedVecValue, Distribution, LazyDateDerivedSpread, LazySpread, MinMaxAverage, - NumericValue, - }, + internal::{ComputedVecValue, Distribution, LazyDistribution, NumericValue}, }; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Traversable)] #[traversable(merge)] -pub struct TxDerivedDistribution +pub struct TxDerivedDistribution where T: ComputedVecValue + PartialOrd + JsonSchema, { - pub height: Distribution, - pub difficultyepoch: LazySpread, - pub dateindex: MinMaxAverage, - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub dates: LazyDateDerivedSpread, + pub height: Distribution, + pub minute1: LazyDistribution, + pub minute5: LazyDistribution, + pub minute10: LazyDistribution, + pub minute30: LazyDistribution, + pub hour1: LazyDistribution, + pub hour4: LazyDistribution, + pub hour12: LazyDistribution, + pub day1: LazyDistribution, + pub day3: LazyDistribution, + pub week1: LazyDistribution, + pub month1: LazyDistribution, + pub month3: LazyDistribution, + pub month6: LazyDistribution, + pub year1: LazyDistribution, + pub year10: LazyDistribution, + pub halvingepoch: LazyDistribution, + pub difficultyepoch: LazyDistribution, } const VERSION: Version = Version::ZERO; @@ -42,49 +48,83 @@ impl TxDerivedDistribution where T: NumericValue + JsonSchema, { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, indexes: &indexes::Vecs, ) -> Result { let height = Distribution::forced_import(db, name, version + VERSION)?; - let dateindex = MinMaxAverage::forced_import(db, name, version + VERSION)?; let v = version + VERSION; - let difficultyepoch = - LazySpread::::from_distribution( - name, - v, - height.boxed_average(), - height.boxed_min(), - height.boxed_max(), - indexes.difficultyepoch.identity.boxed_clone(), - ); + macro_rules! period { + ($idx:ident) => { + LazyDistribution::from_height_source( + name, + v, + height.boxed_average(), + indexes.$idx.first_height.read_only_boxed_clone(), + ) + }; + } - let dates = LazyDateDerivedSpread::from_sources( - name, - v, - dateindex.boxed_average(), - dateindex.boxed_min(), - dateindex.boxed_max(), - indexes, - ); + macro_rules! epoch { + ($idx:ident) => { + LazyDistribution::from_source( + name, + v, + height.boxed_average(), + indexes.$idx.identity.read_only_boxed_clone(), + ) + }; + } + + let minute1 = period!(minute1); + let minute5 = period!(minute5); + let minute10 = period!(minute10); + let minute30 = period!(minute30); + let hour1 = period!(hour1); + let hour4 = period!(hour4); + let hour12 = period!(hour12); + let day1 = period!(day1); + let day3 = period!(day3); + let week1 = period!(week1); + let month1 = period!(month1); + let month3 = period!(month3); + let month6 = period!(month6); + let year1 = period!(year1); + let year10 = period!(year10); + let halvingepoch = epoch!(halvingepoch); + let difficultyepoch = epoch!(difficultyepoch); Ok(Self { height, + minute1, + minute5, + minute10, + minute30, + hour1, + hour4, + hour12, + day1, + day3, + week1, + month1, + month3, + month6, + year1, + year10, + halvingepoch, difficultyepoch, - dateindex, - dates, }) } - pub fn derive_from( + pub(crate) fn derive_from( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, - txindex_source: &impl CollectableVec, + txindex_source: &impl ReadableVec, exit: &Exit, ) -> Result<()> { self.derive_from_with_skip(indexer, indexes, starting_indexes, txindex_source, exit, 0) @@ -93,12 +133,12 @@ where /// Derive from source, skipping first N transactions per block from all calculations. /// /// Use `skip_count: 1` to exclude coinbase transactions from fee/feerate stats. - pub fn derive_from_with_skip( + pub(crate) fn derive_from_with_skip( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, - txindex_source: &impl CollectableVec, + txindex_source: &impl ReadableVec, exit: &Exit, skip_count: usize, ) -> Result<()> { @@ -111,14 +151,6 @@ where skip_count, )?; - self.dateindex.compute( - starting_indexes.dateindex, - &self.height.average().0, - &indexes.dateindex.first_height, - &indexes.dateindex.height_count, - exit, - )?; - Ok(()) } } diff --git a/crates/brk_computer/src/internal/multi/tx_derived/full.rs b/crates/brk_computer/src/internal/multi/tx_derived/full.rs index 7bcc34494..07bc81ec8 100644 --- a/crates/brk_computer/src/internal/multi/tx_derived/full.rs +++ b/crates/brk_computer/src/internal/multi/tx_derived/full.rs @@ -1,33 +1,46 @@ -//! TxDerivedFull - aggregates from TxIndex to height Full + dateindex Stats + lazy date periods. +//! TxDerivedFull - aggregates from TxIndex to height Full + lazy time periods + epochs. use brk_error::Result; use brk_indexer::Indexer; use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, Height, TxIndex, Version}; -use derive_more::{Deref, DerefMut}; +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10, + Minute30, Minute5, Month1, Month3, Month6, TxIndex, Version, Week1, Year1, Year10, +}; use schemars::JsonSchema; -use vecdb::{CollectableVec, Database, Exit, IterableCloneableVec}; +use vecdb::{Database, Exit, ReadableCloneableVec, ReadableVec, Rw, StorageMode}; use crate::{ indexes, ComputeIndexes, - internal::{ComputedVecValue, LazyDateDerivedFull, Full, LazyFull, NumericValue, Stats}, + internal::{ComputedVecValue, Full, LazyFull, NumericValue}, }; -/// Aggregates from TxIndex to height/dateindex with full stats. -#[derive(Clone, Deref, DerefMut, Traversable)] +/// Aggregates from TxIndex to height/time periods with full stats. +#[derive(Traversable)] #[traversable(merge)] -pub struct TxDerivedFull +pub struct TxDerivedFull where T: ComputedVecValue + PartialOrd + JsonSchema, { - pub height: Full, + pub height: Full, + pub minute1: LazyFull, + pub minute5: LazyFull, + pub minute10: LazyFull, + pub minute30: LazyFull, + pub hour1: LazyFull, + pub hour4: LazyFull, + pub hour12: LazyFull, + pub day1: LazyFull, + pub day3: LazyFull, + pub week1: LazyFull, + pub month1: LazyFull, + pub month3: LazyFull, + pub month6: LazyFull, + pub year1: LazyFull, + pub year10: LazyFull, + pub halvingepoch: LazyFull, pub difficultyepoch: LazyFull, - pub dateindex: Stats, - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub dates: LazyDateDerivedFull, } const VERSION: Version = Version::ONE; @@ -36,53 +49,89 @@ impl TxDerivedFull where T: NumericValue + JsonSchema, { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, indexes: &indexes::Vecs, ) -> Result { let height = Full::forced_import(db, name, version + VERSION)?; - let dateindex = Stats::forced_import(db, name, version + VERSION)?; let v = version + VERSION; - let difficultyepoch = - LazyFull::::from_stats_aggregate( - name, - v, - height.boxed_average(), - height.boxed_min(), - height.boxed_max(), - height.boxed_sum(), - height.boxed_cumulative(), - indexes.difficultyepoch.identity.boxed_clone(), - ); + macro_rules! period { + ($idx:ident) => { + LazyFull::from_height_source( + name, + v, + height.boxed_sum(), + height.boxed_cumulative(), + indexes.$idx.first_height.read_only_boxed_clone(), + ) + }; + } - let dates = LazyDateDerivedFull::from_sources( - name, - v, - dateindex.boxed_average(), - dateindex.boxed_min(), - dateindex.boxed_max(), - dateindex.boxed_sum(), - dateindex.boxed_cumulative(), - indexes, - ); + macro_rules! epoch { + ($idx:ident) => { + LazyFull::from_stats_aggregate( + name, + v, + height.boxed_average(), + height.boxed_min(), + height.boxed_max(), + height.boxed_sum(), + height.boxed_cumulative(), + height.boxed_average(), + indexes.$idx.identity.read_only_boxed_clone(), + ) + }; + } + + let minute1 = period!(minute1); + let minute5 = period!(minute5); + let minute10 = period!(minute10); + let minute30 = period!(minute30); + let hour1 = period!(hour1); + let hour4 = period!(hour4); + let hour12 = period!(hour12); + let day1 = period!(day1); + let day3 = period!(day3); + let week1 = period!(week1); + let month1 = period!(month1); + let month3 = period!(month3); + let month6 = period!(month6); + let year1 = period!(year1); + let year10 = period!(year10); + let halvingepoch = epoch!(halvingepoch); + let difficultyepoch = epoch!(difficultyepoch); Ok(Self { height, + minute1, + minute5, + minute10, + minute30, + hour1, + hour4, + hour12, + day1, + day3, + week1, + month1, + month3, + month6, + year1, + year10, + halvingepoch, difficultyepoch, - dateindex, - dates, }) } - pub fn derive_from( + pub(crate) fn derive_from( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, - txindex_source: &impl CollectableVec, + txindex_source: &impl ReadableVec, exit: &Exit, ) -> Result<()> { self.derive_from_with_skip(indexer, indexes, starting_indexes, txindex_source, exit, 0) @@ -91,12 +140,12 @@ where /// Derive from source, skipping first N transactions per block from all calculations. /// /// Use `skip_count: 1` to exclude coinbase transactions from fee/feerate stats. - pub fn derive_from_with_skip( + pub(crate) fn derive_from_with_skip( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, - txindex_source: &impl CollectableVec, + txindex_source: &impl ReadableVec, exit: &Exit, skip_count: usize, ) -> Result<()> { @@ -109,14 +158,6 @@ where skip_count, )?; - self.dateindex.compute( - starting_indexes.dateindex, - &self.height.sum().0, - &indexes.dateindex.first_height, - &indexes.dateindex.height_count, - exit, - )?; - Ok(()) } } diff --git a/crates/brk_computer/src/internal/multi/tx_derived/lazy_full.rs b/crates/brk_computer/src/internal/multi/tx_derived/lazy_full.rs index 3223644a6..f84595e99 100644 --- a/crates/brk_computer/src/internal/multi/tx_derived/lazy_full.rs +++ b/crates/brk_computer/src/internal/multi/tx_derived/lazy_full.rs @@ -2,13 +2,13 @@ use brk_traversable::Traversable; use brk_types::{ - DateIndex, DecadeIndex, DifficultyEpoch, Height, MonthIndex, QuarterIndex, SemesterIndex, - Version, WeekIndex, YearIndex, + Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10, + Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10, }; use schemars::JsonSchema; -use vecdb::{IterableCloneableVec, UnaryTransform}; +use vecdb::{ReadableCloneableVec, UnaryTransform}; -use crate::internal::{ComputedVecValue, TxDerivedFull, LazyTransformFull, LazyTransformStats}; +use crate::internal::{ComputedVecValue, TxDerivedFull, LazyTransformFull}; #[derive(Clone, Traversable)] #[traversable(merge)] @@ -18,14 +18,23 @@ where S1T: ComputedVecValue, { pub height: LazyTransformFull, - pub difficultyepoch: LazyTransformStats, - pub dateindex: LazyTransformStats, - pub weekindex: LazyTransformStats, - pub monthindex: LazyTransformStats, - pub quarterindex: LazyTransformStats, - pub semesterindex: LazyTransformStats, - pub yearindex: LazyTransformStats, - pub decadeindex: LazyTransformStats, + pub minute1: LazyTransformFull, + pub minute5: LazyTransformFull, + pub minute10: LazyTransformFull, + pub minute30: LazyTransformFull, + pub hour1: LazyTransformFull, + pub hour4: LazyTransformFull, + pub hour12: LazyTransformFull, + pub day1: LazyTransformFull, + pub day3: LazyTransformFull, + pub week1: LazyTransformFull, + pub month1: LazyTransformFull, + pub month3: LazyTransformFull, + pub month6: LazyTransformFull, + pub year1: LazyTransformFull, + pub year10: LazyTransformFull, + pub halvingepoch: LazyTransformFull, + pub difficultyepoch: LazyTransformFull, } const VERSION: Version = Version::ZERO; @@ -35,7 +44,7 @@ where T: ComputedVecValue + JsonSchema + 'static, S1T: ComputedVecValue + JsonSchema, { - pub fn from_computed>( + pub(crate) fn from_computed>( name: &str, version: Version, source: &TxDerivedFull, @@ -44,32 +53,42 @@ where macro_rules! period { ($p:ident) => { - LazyTransformStats::from_boxed::( - name, v, - source.$p.average.boxed_clone(), source.$p.min.boxed_clone(), - source.$p.max.boxed_clone(), source.$p.sum.boxed_clone(), - source.$p.cumulative.boxed_clone(), + LazyTransformFull::from_boxed::( + name, + v, + source.$p.average.read_only_boxed_clone(), + source.$p.min.read_only_boxed_clone(), + source.$p.max.read_only_boxed_clone(), + source.$p.percentiles.pct10.read_only_boxed_clone(), + source.$p.percentiles.pct25.read_only_boxed_clone(), + source.$p.percentiles.median.read_only_boxed_clone(), + source.$p.percentiles.pct75.read_only_boxed_clone(), + source.$p.percentiles.pct90.read_only_boxed_clone(), + source.$p.sum.read_only_boxed_clone(), + source.$p.cumulative.read_only_boxed_clone(), ) }; } Self { height: LazyTransformFull::from_stats_aggregate::(name, v, &source.height), + minute1: period!(minute1), + minute5: period!(minute5), + minute10: period!(minute10), + minute30: period!(minute30), + hour1: period!(hour1), + hour4: period!(hour4), + hour12: period!(hour12), + day1: period!(day1), + day3: period!(day3), + week1: period!(week1), + month1: period!(month1), + month3: period!(month3), + month6: period!(month6), + year1: period!(year1), + year10: period!(year10), + halvingepoch: period!(halvingepoch), difficultyepoch: period!(difficultyepoch), - dateindex: LazyTransformStats::from_boxed::( - name, v, - source.dateindex.boxed_average(), - source.dateindex.boxed_min(), - source.dateindex.boxed_max(), - source.dateindex.boxed_sum(), - source.dateindex.boxed_cumulative(), - ), - weekindex: period!(weekindex), - monthindex: period!(monthindex), - quarterindex: period!(quarterindex), - semesterindex: period!(semesterindex), - yearindex: period!(yearindex), - decadeindex: period!(decadeindex), } } } diff --git a/crates/brk_computer/src/internal/multi/tx_derived/value_full.rs b/crates/brk_computer/src/internal/multi/tx_derived/value_full.rs index 746793bd9..5202f5c50 100644 --- a/crates/brk_computer/src/internal/multi/tx_derived/value_full.rs +++ b/crates/brk_computer/src/internal/multi/tx_derived/value_full.rs @@ -4,82 +4,67 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_traversable::Traversable; use brk_types::{Bitcoin, Sats, TxIndex, Version}; -use vecdb::{CollectableVec, Database, Exit, IterableCloneableVec}; +use vecdb::{Database, Exit, ReadableCloneableVec, ReadableVec, Rw, StorageMode}; use crate::{ ComputeIndexes, indexes, - internal::{TxDerivedFull, ValueDollarsFromTxFull, LazyTxDerivedFull, SatsToBitcoin}, - price, + internal::{LazyTxDerivedFull, SatsToBitcoin, TxDerivedFull, ValueDollarsFromTxFull}, + prices, }; -#[derive(Clone, Traversable)] -pub struct ValueTxDerivedFull { - pub sats: TxDerivedFull, - pub bitcoin: LazyTxDerivedFull, - pub dollars: Option, +#[derive(Traversable)] +pub struct ValueTxDerivedFull { + pub sats: TxDerivedFull, + pub btc: LazyTxDerivedFull, + pub usd: ValueDollarsFromTxFull, } const VERSION: Version = Version::ZERO; impl ValueTxDerivedFull { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, indexes: &indexes::Vecs, indexer: &Indexer, - price: Option<&price::Vecs>, - sats_txindex: &impl IterableCloneableVec, + prices: &prices::Vecs, + sats_txindex: &impl ReadableCloneableVec, ) -> Result { let v = version + VERSION; let sats = TxDerivedFull::forced_import(db, name, v, indexes)?; - let bitcoin = + let btc = LazyTxDerivedFull::from_computed::(&format!("{name}_btc"), v, &sats); - let dollars = price - .map(|price| { - ValueDollarsFromTxFull::forced_import( - db, - &format!("{name}_usd"), - v, - indexes, - &sats.height, - price.usd.split.close.height.boxed_clone(), - sats_txindex.boxed_clone(), - indexer.vecs.transactions.height.boxed_clone(), - ) - }) - .transpose()?; + let usd = ValueDollarsFromTxFull::forced_import( + db, + &format!("{name}_usd"), + v, + indexes, + &sats.height, + prices.usd.price.read_only_boxed_clone(), + sats_txindex.read_only_boxed_clone(), + indexer.vecs.transactions.height.read_only_boxed_clone(), + )?; Ok(Self { sats, - bitcoin, - dollars, + btc, + usd, }) } - pub fn derive_from( - &mut self, - indexer: &Indexer, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - txindex_source: &impl CollectableVec, - exit: &Exit, - ) -> Result<()> { - self.derive_from_with_skip(indexer, indexes, starting_indexes, txindex_source, exit, 0) - } - /// Derive from source, skipping first N transactions per block from all calculations. /// /// Use `skip_count: 1` to exclude coinbase transactions from fee/feerate stats. - pub fn derive_from_with_skip( + pub(crate) fn derive_from_with_skip( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, - txindex_source: &impl CollectableVec, + txindex_source: &impl ReadableVec, exit: &Exit, skip_count: usize, ) -> Result<()> { @@ -92,9 +77,7 @@ impl ValueTxDerivedFull { skip_count, )?; - if let Some(dollars) = self.dollars.as_mut() { - dollars.derive_from(indexer, indexes, starting_indexes, exit)?; - } + self.usd.derive_from(indexes, starting_indexes, exit)?; Ok(()) } diff --git a/crates/brk_computer/src/internal/single/difficultyepoch/lazy_value.rs b/crates/brk_computer/src/internal/single/difficultyepoch/lazy_value.rs deleted file mode 100644 index d9fade809..000000000 --- a/crates/brk_computer/src/internal/single/difficultyepoch/lazy_value.rs +++ /dev/null @@ -1,141 +0,0 @@ -//! Fully lazy value types for DifficultyEpoch indexing. -//! -//! Two variants exist for different source patterns: -//! - `LazyValueDifficultyEpochFromHeight`: For sources without dollars (computes from price × sats) -//! - `LazyTransformedValueDifficultyEpoch`: For transformed views (e.g., halved supply) - -use brk_traversable::Traversable; -use brk_types::{Bitcoin, Close, DifficultyEpoch, Dollars, Height, Sats, Version}; -use vecdb::{ - BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom1, LazyVecFrom2, - UnaryTransform, -}; - -use crate::internal::{ClosePriceTimesSats, LazyLast, SatsToBitcoin, ValueFromHeightLast}; -use crate::price; - -const VERSION: Version = Version::ZERO; - -/// Lazy value type at difficultyepoch level - computed from height sats + price. -/// -/// Use this when the source only has height-indexed sats (e.g., ValueFromHeightAndDateLast). -/// Dollars are computed via price × sats binary transform. -#[derive(Clone, Traversable)] -pub struct LazyValueDifficultyEpoch { - pub sats: LazyLast, - pub bitcoin: LazyVecFrom1, - pub dollars: Option< - LazyVecFrom2< - DifficultyEpoch, - Dollars, - DifficultyEpoch, - Close, - DifficultyEpoch, - Sats, - >, - >, -} - -impl LazyValueDifficultyEpoch { - /// Create from height sats source and difficultyepoch identity. - /// Bitcoin is derived from sats. Dollars are computed from price × sats. - pub fn from_height_source( - name: &str, - height_sats: IterableBoxedVec, - difficultyepoch_identity: IterableBoxedVec, - price: Option<&price::Vecs>, - version: Version, - ) -> Self { - let v = version + VERSION; - - let sats = LazyLast::from_source(name, v, height_sats, difficultyepoch_identity); - - let bitcoin = LazyVecFrom1::transformed::( - &format!("{name}_btc"), - v, - sats.boxed_clone(), - ); - - let dollars = price.map(|p| { - LazyVecFrom2::transformed::( - &format!("{name}_usd"), - v, - p.usd.split.close.difficultyepoch.boxed_clone(), - sats.boxed_clone(), - ) - }); - - Self { - sats, - bitcoin, - dollars, - } - } -} - -/// Lazy value type at difficultyepoch level - transformed from existing difficultyepoch sources. -/// -/// Use this when creating transformed views (e.g., halved supply) from sources that -/// already have difficultyepoch aggregations. Applies transforms to the existing aggregations. -#[derive(Clone, Traversable)] -pub struct LazyTransformedValueDifficultyEpoch { - pub sats: LazyVecFrom1, - pub bitcoin: LazyVecFrom1, - pub dollars: Option< - LazyVecFrom2< - DifficultyEpoch, - Dollars, - DifficultyEpoch, - Close, - DifficultyEpoch, - Sats, - >, - >, -} - -impl LazyTransformedValueDifficultyEpoch { - /// Create transformed difficultyepoch values from a ValueFromHeightLast source. - /// SatsTransform is applied to the source's difficultyepoch sats. - /// BitcoinTransform converts source sats to bitcoin (should combine sats transform + conversion). - /// Dollars are computed from price × transformed sats. - pub fn from_block_source( - name: &str, - source: &ValueFromHeightLast, - price: Option<&price::Vecs>, - version: Version, - ) -> Self - where - SatsTransform: UnaryTransform, - BitcoinTransform: UnaryTransform, - DollarsTransform: BinaryTransform, Sats, Dollars>, - { - let v = version + VERSION; - - let sats = LazyVecFrom1::transformed::( - name, - v, - source.sats.rest.difficultyepoch.boxed_clone(), - ); - - let bitcoin = LazyVecFrom1::transformed::( - &format!("{name}_btc"), - v, - source.sats.rest.difficultyepoch.boxed_clone(), - ); - - let dollars = price.map(|p| { - LazyVecFrom2::transformed::( - &format!("{name}_usd"), - v, - p.usd.split.close.difficultyepoch.boxed_clone(), - source.sats.rest.difficultyepoch.boxed_clone(), - ) - }); - - Self { - sats, - bitcoin, - dollars, - } - } -} diff --git a/crates/brk_computer/src/internal/single/difficultyepoch/mod.rs b/crates/brk_computer/src/internal/single/difficultyepoch/mod.rs deleted file mode 100644 index 66af87422..000000000 --- a/crates/brk_computer/src/internal/single/difficultyepoch/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -mod lazy_value; - -pub use lazy_value::*; diff --git a/crates/brk_computer/src/internal/single/group/distribution.rs b/crates/brk_computer/src/internal/single/group/distribution.rs index a8d234221..c4dc8f575 100644 --- a/crates/brk_computer/src/internal/single/group/distribution.rs +++ b/crates/brk_computer/src/internal/single/group/distribution.rs @@ -1,55 +1,40 @@ use brk_error::Result; use brk_traversable::Traversable; use schemars::JsonSchema; -use vecdb::{AnyVec, Database, Exit, IterableBoxedVec, IterableVec, VecIndex, VecValue, Version}; +use vecdb::{ + Database, Exit, ReadableBoxedVec, ReadableVec, Ro, Rw, StorageMode, VecIndex, VecValue, Version, +}; -use crate::internal::{AverageVec, ComputedVecValue, MaxVec, MinVec}; +use crate::internal::ComputedVecValue; use super::{MinMaxAverage, Percentiles}; /// Distribution stats (average + minmax + percentiles) -#[derive(Clone, Traversable)] -pub struct Distribution { +#[derive(Traversable)] +pub struct Distribution { #[traversable(flatten)] - pub min_max_average: MinMaxAverage, + pub min_max_average: MinMaxAverage, #[traversable(flatten)] - pub percentiles: Percentiles, + pub percentiles: Percentiles, } impl Distribution { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { + pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result { Ok(Self { min_max_average: MinMaxAverage::forced_import(db, name, version)?, percentiles: Percentiles::forced_import(db, name, version)?, }) } - /// Compute distribution stats from source data. - /// - /// This computes: average, min, max, percentiles (pct10, pct25, median, pct75, pct90) - pub fn compute( - &mut self, - max_from: I, - source: &impl IterableVec, - first_indexes: &impl IterableVec, - count_indexes: &impl IterableVec, - exit: &Exit, - ) -> Result<()> - where - A: VecIndex + VecValue + brk_types::CheckedSub, - { - self.compute_with_skip(max_from, source, first_indexes, count_indexes, exit, 0) - } - /// Compute distribution stats, skipping first N items from all calculations. /// /// Use `skip_count: 1` to exclude coinbase transactions from fee/feerate stats. - pub fn compute_with_skip( + pub(crate) fn compute_with_skip( &mut self, max_from: I, - source: &impl IterableVec, - first_indexes: &impl IterableVec, - count_indexes: &impl IterableVec, + source: &impl ReadableVec, + first_indexes: &impl ReadableVec, + count_indexes: &impl ReadableVec, exit: &Exit, skip_count: usize, ) -> Result<()> @@ -78,43 +63,23 @@ impl Distribution { ) } - pub fn len(&self) -> usize { - self.min_max_average - .len() - .min(self.percentiles.pct10.0.len()) - .min(self.percentiles.pct25.0.len()) - .min(self.percentiles.median.0.len()) - .min(self.percentiles.pct75.0.len()) - .min(self.percentiles.pct90.0.len()) - } - - pub fn starting_index(&self, max_from: I) -> I { - max_from.min(I::from(self.len())) - } - - // Accessors - pub fn average(&self) -> &AverageVec { - &self.min_max_average.average - } - - pub fn min(&self) -> &MinVec { - self.min_max_average.min() - } - - pub fn max(&self) -> &MaxVec { - self.min_max_average.max() - } - // Boxed accessors - pub fn boxed_average(&self) -> IterableBoxedVec { + pub(crate) fn boxed_average(&self) -> ReadableBoxedVec { self.min_max_average.boxed_average() } - pub fn boxed_min(&self) -> IterableBoxedVec { + pub(crate) fn boxed_min(&self) -> ReadableBoxedVec { self.min_max_average.boxed_min() } - pub fn boxed_max(&self) -> IterableBoxedVec { + pub(crate) fn boxed_max(&self) -> ReadableBoxedVec { self.min_max_average.boxed_max() } + + pub fn read_only_clone(&self) -> Distribution { + Distribution { + min_max_average: self.min_max_average.read_only_clone(), + percentiles: self.percentiles.read_only_clone(), + } + } } diff --git a/crates/brk_computer/src/internal/single/group/full.rs b/crates/brk_computer/src/internal/single/group/full.rs index e6491bb92..0d6aea6fc 100644 --- a/crates/brk_computer/src/internal/single/group/full.rs +++ b/crates/brk_computer/src/internal/single/group/full.rs @@ -1,56 +1,42 @@ use brk_error::Result; use brk_traversable::Traversable; use schemars::JsonSchema; -use vecdb::{Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec, VecIndex, VecValue, Version}; +use vecdb::{ + Database, Exit, ReadableBoxedVec, ReadableCloneableVec, ReadableVec, Ro, Rw, StorageMode, + VecIndex, VecValue, Version, +}; -use crate::internal::{AverageVec, ComputedVecValue, CumulativeVec, MaxVec, MinVec, SumVec}; +use crate::internal::ComputedVecValue; use super::{Distribution, SumCum}; /// Full stats aggregate: distribution + sum_cum /// Matches the common full_stats() pattern: average + minmax + percentiles + sum + cumulative -#[derive(Clone, Traversable)] -pub struct Full { +#[derive(Traversable)] +pub struct Full { #[traversable(flatten)] - pub distribution: Distribution, + pub distribution: Distribution, #[traversable(flatten)] - pub sum_cum: SumCum, + pub sum_cum: SumCum, } impl Full { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { + pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result { Ok(Self { distribution: Distribution::forced_import(db, name, version)?, sum_cum: SumCum::forced_import(db, name, version)?, }) } - /// Compute all stats from source data. - /// - /// This computes: average, min, max, percentiles (pct10, pct25, median, pct75, pct90), sum, cumulative - pub fn compute( - &mut self, - max_from: I, - source: &impl IterableVec, - first_indexes: &impl IterableVec, - count_indexes: &impl IterableVec, - exit: &Exit, - ) -> Result<()> - where - A: VecIndex + VecValue + brk_types::CheckedSub, - { - self.compute_with_skip(max_from, source, first_indexes, count_indexes, exit, 0) - } - /// Compute all stats, skipping first N items from all calculations. /// /// Use `skip_count: 1` to exclude coinbase transactions from fee/feerate stats. - pub fn compute_with_skip( + pub(crate) fn compute_with_skip( &mut self, max_from: I, - source: &impl IterableVec, - first_indexes: &impl IterableVec, - count_indexes: &impl IterableVec, + source: &impl ReadableVec, + first_indexes: &impl ReadableVec, + count_indexes: &impl ReadableVec, exit: &Exit, skip_count: usize, ) -> Result<()> @@ -79,91 +65,31 @@ impl Full { ) } - pub fn len(&self) -> usize { - self.distribution.len().min(self.sum_cum.len()) - } - - pub fn starting_index(&self, max_from: I) -> I { - max_from.min(I::from(self.len())) - } - - /// Compute from aligned source (for coarser time periods like week from dateindex). - /// - /// NOTE: Percentiles cannot be derived from finer percentiles - they are skipped. - pub fn compute_from_aligned( - &mut self, - max_from: I, - source: &Full, - first_indexes: &impl IterableVec, - count_indexes: &impl IterableVec, - exit: &Exit, - ) -> Result<()> - where - A: VecIndex + VecValue + brk_types::CheckedSub, - { - // Note: Percentiles cannot be derived from finer percentiles, so we skip them - crate::internal::compute_aggregations_from_aligned( - max_from, - first_indexes, - count_indexes, - exit, - // Source vecs - None, // first not in Full - None, // last not in Full - Some(&source.distribution.min_max_average.minmax.min.0), - Some(&source.distribution.min_max_average.minmax.max.0), - Some(&source.distribution.min_max_average.average.0), - Some(&source.sum_cum.sum.0), - // Target vecs - None, // first - None, // last - Some(&mut self.distribution.min_max_average.minmax.min.0), - Some(&mut self.distribution.min_max_average.minmax.max.0), - Some(&mut self.distribution.min_max_average.average.0), - Some(&mut self.sum_cum.sum.0), - Some(&mut self.sum_cum.cumulative.0), - ) - } - - // Accessors - pub fn average(&self) -> &AverageVec { - self.distribution.average() - } - - pub fn min(&self) -> &MinVec { - self.distribution.min() - } - - pub fn max(&self) -> &MaxVec { - self.distribution.max() - } - - pub fn sum(&self) -> &SumVec { - &self.sum_cum.sum - } - - pub fn cumulative(&self) -> &CumulativeVec { - &self.sum_cum.cumulative - } - // Boxed accessors - pub fn boxed_average(&self) -> IterableBoxedVec { + pub(crate) fn boxed_average(&self) -> ReadableBoxedVec { self.distribution.boxed_average() } - pub fn boxed_min(&self) -> IterableBoxedVec { + pub(crate) fn boxed_min(&self) -> ReadableBoxedVec { self.distribution.boxed_min() } - pub fn boxed_max(&self) -> IterableBoxedVec { + pub(crate) fn boxed_max(&self) -> ReadableBoxedVec { self.distribution.boxed_max() } - pub fn boxed_sum(&self) -> IterableBoxedVec { - self.sum_cum.sum.0.boxed_clone() + pub(crate) fn boxed_sum(&self) -> ReadableBoxedVec { + self.sum_cum.sum.0.read_only_boxed_clone() } - pub fn boxed_cumulative(&self) -> IterableBoxedVec { - self.sum_cum.cumulative.0.boxed_clone() + pub(crate) fn boxed_cumulative(&self) -> ReadableBoxedVec { + self.sum_cum.cumulative.0.read_only_boxed_clone() + } + + pub fn read_only_clone(&self) -> Full { + Full { + distribution: self.distribution.read_only_clone(), + sum_cum: self.sum_cum.read_only_clone(), + } } } diff --git a/crates/brk_computer/src/internal/single/group/min_max.rs b/crates/brk_computer/src/internal/single/group/min_max.rs index 325978e4b..af877b90b 100644 --- a/crates/brk_computer/src/internal/single/group/min_max.rs +++ b/crates/brk_computer/src/internal/single/group/min_max.rs @@ -1,24 +1,31 @@ use brk_error::Result; use brk_traversable::Traversable; use schemars::JsonSchema; -use vecdb::{Database, VecIndex, Version}; +use vecdb::{Database, Ro, Rw, StorageMode, VecIndex, Version}; use crate::internal::{ComputedVecValue, MaxVec, MinVec}; /// Min + Max -#[derive(Clone, Traversable)] -pub struct MinMax { +#[derive(Traversable)] +pub struct MinMax { #[traversable(flatten)] - pub min: MinVec, + pub min: MinVec, #[traversable(flatten)] - pub max: MaxVec, + pub max: MaxVec, } impl MinMax { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { + pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result { Ok(Self { min: MinVec::forced_import(db, name, version)?, max: MaxVec::forced_import(db, name, version)?, }) } + + pub fn read_only_clone(&self) -> MinMax { + MinMax { + min: self.min.read_only_clone(), + max: self.max.read_only_clone(), + } + } } diff --git a/crates/brk_computer/src/internal/single/group/min_max_average.rs b/crates/brk_computer/src/internal/single/group/min_max_average.rs index 8af45dc76..9a8079b52 100644 --- a/crates/brk_computer/src/internal/single/group/min_max_average.rs +++ b/crates/brk_computer/src/internal/single/group/min_max_average.rs @@ -1,128 +1,45 @@ use brk_error::Result; use brk_traversable::Traversable; use schemars::JsonSchema; -use vecdb::{AnyVec, Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec, VecIndex, VecValue, Version}; +use vecdb::{Database, ReadableBoxedVec, ReadableCloneableVec, Ro, Rw, StorageMode, VecIndex, Version}; -use crate::internal::{AverageVec, ComputedVecValue, MaxVec, MinVec}; +use crate::internal::{AverageVec, ComputedVecValue}; use super::MinMax; -/// Average + MinMax (for TxIndex dateindex aggregation - no percentiles) -#[derive(Clone, Traversable)] -pub struct MinMaxAverage { - pub average: AverageVec, +/// Average + MinMax (for TxIndex day1 aggregation - no percentiles) +#[derive(Traversable)] +pub struct MinMaxAverage { + pub average: AverageVec, #[traversable(flatten)] - pub minmax: MinMax, + pub minmax: MinMax, } impl MinMaxAverage { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { + pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result { Ok(Self { average: AverageVec::forced_import(db, name, version)?, minmax: MinMax::forced_import(db, name, version)?, }) } - /// Compute average and minmax from source data. - pub fn compute( - &mut self, - max_from: I, - source: &impl IterableVec, - first_indexes: &impl IterableVec, - count_indexes: &impl IterableVec, - exit: &Exit, - ) -> Result<()> - where - A: VecIndex + VecValue + brk_types::CheckedSub, - { - crate::internal::compute_aggregations( - max_from, - source, - first_indexes, - count_indexes, - exit, - 0, // min_skip_count - None, // first - None, // last - Some(&mut self.minmax.min.0), - Some(&mut self.minmax.max.0), - Some(&mut self.average.0), - None, // sum - None, // cumulative - None, // median - None, // pct10 - None, // pct25 - None, // pct75 - None, // pct90 - ) - } - - /// Compute from aligned source (for coarser time periods). - pub fn compute_from_aligned( - &mut self, - max_from: I, - source: &MinMaxAverage, - first_indexes: &impl IterableVec, - count_indexes: &impl IterableVec, - exit: &Exit, - ) -> Result<()> - where - A: VecIndex + VecValue + brk_types::CheckedSub, - { - crate::internal::compute_aggregations_from_aligned( - max_from, - first_indexes, - count_indexes, - exit, - // Source vecs - None, // first - None, // last - Some(&source.minmax.min.0), - Some(&source.minmax.max.0), - Some(&source.average.0), - None, // sum - // Target vecs - None, // first - None, // last - Some(&mut self.minmax.min.0), - Some(&mut self.minmax.max.0), - Some(&mut self.average.0), - None, // sum - None, // cumulative - ) - } - - pub fn len(&self) -> usize { - self.average - .0 - .len() - .min(self.minmax.min.0.len()) - .min(self.minmax.max.0.len()) - } - - pub fn starting_index(&self, max_from: I) -> I { - max_from.min(I::from(self.len())) - } - - // Accessors - pub fn min(&self) -> &MinVec { - &self.minmax.min - } - - pub fn max(&self) -> &MaxVec { - &self.minmax.max - } - // Boxed accessors - pub fn boxed_average(&self) -> IterableBoxedVec { - self.average.0.boxed_clone() + pub(crate) fn boxed_average(&self) -> ReadableBoxedVec { + self.average.0.read_only_boxed_clone() } - pub fn boxed_min(&self) -> IterableBoxedVec { - self.minmax.min.0.boxed_clone() + pub(crate) fn boxed_min(&self) -> ReadableBoxedVec { + self.minmax.min.0.read_only_boxed_clone() } - pub fn boxed_max(&self) -> IterableBoxedVec { - self.minmax.max.0.boxed_clone() + pub(crate) fn boxed_max(&self) -> ReadableBoxedVec { + self.minmax.max.0.read_only_boxed_clone() + } + + pub fn read_only_clone(&self) -> MinMaxAverage { + MinMaxAverage { + average: self.average.read_only_clone(), + minmax: self.minmax.read_only_clone(), + } } } diff --git a/crates/brk_computer/src/internal/single/group/mod.rs b/crates/brk_computer/src/internal/single/group/mod.rs index a67e20f9d..d730abf80 100644 --- a/crates/brk_computer/src/internal/single/group/mod.rs +++ b/crates/brk_computer/src/internal/single/group/mod.rs @@ -3,7 +3,6 @@ mod full; mod min_max; mod min_max_average; mod percentiles; -mod stats; mod sum_cum; pub use distribution::*; @@ -11,5 +10,4 @@ pub use full::*; pub use min_max::*; pub use min_max_average::*; pub use percentiles::*; -pub use stats::*; pub use sum_cum::*; diff --git a/crates/brk_computer/src/internal/single/group/percentiles.rs b/crates/brk_computer/src/internal/single/group/percentiles.rs index 89462fe97..62bd85993 100644 --- a/crates/brk_computer/src/internal/single/group/percentiles.rs +++ b/crates/brk_computer/src/internal/single/group/percentiles.rs @@ -1,22 +1,24 @@ use brk_error::Result; use brk_traversable::Traversable; use schemars::JsonSchema; -use vecdb::{Database, IterableBoxedVec, IterableCloneableVec, VecIndex, Version}; +use vecdb::{ + Database, ReadableBoxedVec, ReadableCloneableVec, Ro, Rw, StorageMode, VecIndex, Version, +}; use crate::internal::{ComputedVecValue, MedianVec, Pct10Vec, Pct25Vec, Pct75Vec, Pct90Vec}; /// All percentiles (pct10, pct25, median, pct75, pct90) -#[derive(Clone, Traversable)] -pub struct Percentiles { - pub pct10: Pct10Vec, - pub pct25: Pct25Vec, - pub median: MedianVec, - pub pct75: Pct75Vec, - pub pct90: Pct90Vec, +#[derive(Traversable)] +pub struct Percentiles { + pub pct10: Pct10Vec, + pub pct25: Pct25Vec, + pub median: MedianVec, + pub pct75: Pct75Vec, + pub pct90: Pct90Vec, } impl Percentiles { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { + pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result { Ok(Self { pct10: Pct10Vec::forced_import(db, name, version)?, pct25: Pct25Vec::forced_import(db, name, version)?, @@ -27,23 +29,33 @@ impl Percentiles { } // Boxed accessors - pub fn boxed_pct10(&self) -> IterableBoxedVec { - self.pct10.0.boxed_clone() + pub(crate) fn boxed_pct10(&self) -> ReadableBoxedVec { + self.pct10.0.read_only_boxed_clone() } - pub fn boxed_pct25(&self) -> IterableBoxedVec { - self.pct25.0.boxed_clone() + pub(crate) fn boxed_pct25(&self) -> ReadableBoxedVec { + self.pct25.0.read_only_boxed_clone() } - pub fn boxed_median(&self) -> IterableBoxedVec { - self.median.0.boxed_clone() + pub(crate) fn boxed_median(&self) -> ReadableBoxedVec { + self.median.0.read_only_boxed_clone() } - pub fn boxed_pct75(&self) -> IterableBoxedVec { - self.pct75.0.boxed_clone() + pub(crate) fn boxed_pct75(&self) -> ReadableBoxedVec { + self.pct75.0.read_only_boxed_clone() } - pub fn boxed_pct90(&self) -> IterableBoxedVec { - self.pct90.0.boxed_clone() + pub(crate) fn boxed_pct90(&self) -> ReadableBoxedVec { + self.pct90.0.read_only_boxed_clone() + } + + pub fn read_only_clone(&self) -> Percentiles { + Percentiles { + pct10: self.pct10.read_only_clone(), + pct25: self.pct25.read_only_clone(), + median: self.median.read_only_clone(), + pct75: self.pct75.read_only_clone(), + pct90: self.pct90.read_only_clone(), + } } } diff --git a/crates/brk_computer/src/internal/single/group/stats.rs b/crates/brk_computer/src/internal/single/group/stats.rs deleted file mode 100644 index 072eb7cea..000000000 --- a/crates/brk_computer/src/internal/single/group/stats.rs +++ /dev/null @@ -1,110 +0,0 @@ -use brk_error::Result; -use brk_traversable::Traversable; -use schemars::JsonSchema; -use vecdb::{Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec, VecIndex, VecValue, Version}; - -use crate::internal::{AverageVec, ComputedVecValue, CumulativeVec, MaxVec, MinVec, SumVec}; - -use super::{MinMaxAverage, SumCum}; - -/// Sum + Cumulative + Average + Min + Max. Like `Full` but without percentiles. -#[derive(Clone, Traversable)] -pub struct Stats { - #[traversable(flatten)] - pub sum_cum: SumCum, - #[traversable(flatten)] - pub min_max_average: MinMaxAverage, -} - -impl Stats { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { - Ok(Self { - sum_cum: SumCum::forced_import(db, name, version)?, - min_max_average: MinMaxAverage::forced_import(db, name, version)?, - }) - } - - /// Compute sum, cumulative, average, and minmax from source data. - pub fn compute( - &mut self, - max_from: I, - source: &impl IterableVec, - first_indexes: &impl IterableVec, - count_indexes: &impl IterableVec, - exit: &Exit, - ) -> Result<()> - where - A: VecIndex + VecValue + brk_types::CheckedSub, - { - crate::internal::compute_aggregations( - max_from, - source, - first_indexes, - count_indexes, - exit, - 0, // min_skip_count - None, // first - None, // last - Some(&mut self.min_max_average.minmax.min.0), - Some(&mut self.min_max_average.minmax.max.0), - Some(&mut self.min_max_average.average.0), - Some(&mut self.sum_cum.sum.0), - Some(&mut self.sum_cum.cumulative.0), - None, // median - None, // pct10 - None, // pct25 - None, // pct75 - None, // pct90 - ) - } - - pub fn len(&self) -> usize { - self.sum_cum.len().min(self.min_max_average.len()) - } - - pub fn starting_index(&self, max_from: I) -> I { - max_from.min(I::from(self.len())) - } - - // Accessors - pub fn average(&self) -> &AverageVec { - &self.min_max_average.average - } - - pub fn min(&self) -> &MinVec { - self.min_max_average.min() - } - - pub fn max(&self) -> &MaxVec { - self.min_max_average.max() - } - - pub fn sum(&self) -> &SumVec { - &self.sum_cum.sum - } - - pub fn cumulative(&self) -> &CumulativeVec { - &self.sum_cum.cumulative - } - - // Boxed accessors - pub fn boxed_average(&self) -> IterableBoxedVec { - self.min_max_average.boxed_average() - } - - pub fn boxed_min(&self) -> IterableBoxedVec { - self.min_max_average.boxed_min() - } - - pub fn boxed_max(&self) -> IterableBoxedVec { - self.min_max_average.boxed_max() - } - - pub fn boxed_sum(&self) -> IterableBoxedVec { - self.sum_cum.sum.0.boxed_clone() - } - - pub fn boxed_cumulative(&self) -> IterableBoxedVec { - self.sum_cum.cumulative.0.boxed_clone() - } -} diff --git a/crates/brk_computer/src/internal/single/group/sum_cum.rs b/crates/brk_computer/src/internal/single/group/sum_cum.rs index ae00e9aed..e4e98d96b 100644 --- a/crates/brk_computer/src/internal/single/group/sum_cum.rs +++ b/crates/brk_computer/src/internal/single/group/sum_cum.rs @@ -1,121 +1,31 @@ use brk_error::Result; use brk_traversable::Traversable; use schemars::JsonSchema; -use vecdb::{ - AnyVec, Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec, VecIndex, - VecValue, Version, -}; +use vecdb::{Database, Ro, Rw, StorageMode, VecIndex, Version}; use crate::internal::{ComputedVecValue, CumulativeVec, SumVec}; /// Sum + Cumulative (12% of usage) -#[derive(Clone, Traversable)] -pub struct SumCum { +#[derive(Traversable)] +pub struct SumCum { #[traversable(flatten)] - pub sum: SumVec, + pub sum: SumVec, #[traversable(flatten)] - pub cumulative: CumulativeVec, + pub cumulative: CumulativeVec, } impl SumCum { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { + pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result { Ok(Self { sum: SumVec::forced_import(db, name, version)?, cumulative: CumulativeVec::forced_import(db, name, version)?, }) } - /// Import with raw sum name (no _sum suffix) for cases where sum should merge with base. - pub fn forced_import_sum_raw(db: &Database, name: &str, version: Version) -> Result { - Ok(Self { - sum: SumVec::forced_import_raw(db, name, version)?, - cumulative: CumulativeVec::forced_import(db, name, version)?, - }) - } - - /// Compute sum and cumulative from source data. - pub fn compute( - &mut self, - max_from: I, - source: &impl IterableVec, - first_indexes: &impl IterableVec, - count_indexes: &impl IterableVec, - exit: &Exit, - ) -> Result<()> - where - A: VecIndex + VecValue + brk_types::CheckedSub, - { - crate::internal::compute_aggregations( - max_from, - source, - first_indexes, - count_indexes, - exit, - 0, // min_skip_count - None, // first - None, // last - None, // min - None, // max - None, // average - Some(&mut self.sum.0), - Some(&mut self.cumulative.0), - None, // median - None, // pct10 - None, // pct25 - None, // pct75 - None, // pct90 - ) - } - - pub fn len(&self) -> usize { - self.sum.0.len().min(self.cumulative.0.len()) - } - - pub fn starting_index(&self, max_from: I) -> I { - max_from.min(I::from(self.len())) - } - - /// Compute from aligned source (for coarser time periods like week from dateindex). - pub fn compute_from_aligned( - &mut self, - max_from: I, - source: &SumCum, - first_indexes: &impl IterableVec, - count_indexes: &impl IterableVec, - exit: &Exit, - ) -> Result<()> - where - A: VecIndex + VecValue + brk_types::CheckedSub, - { - crate::internal::compute_aggregations_from_aligned( - max_from, - first_indexes, - count_indexes, - exit, - // Source vecs - None, // first - None, // last - None, // min - None, // max - None, // average - Some(&source.sum.0), - // Target vecs - None, // first - None, // last - None, // min - None, // max - None, // average - Some(&mut self.sum.0), - Some(&mut self.cumulative.0), - ) - } - - // Boxed accessors - pub fn boxed_sum(&self) -> IterableBoxedVec { - self.sum.0.boxed_clone() - } - - pub fn boxed_cumulative(&self) -> IterableBoxedVec { - self.cumulative.0.boxed_clone() + pub fn read_only_clone(&self) -> SumCum { + SumCum { + sum: self.sum.read_only_clone(), + cumulative: self.cumulative.read_only_clone(), + } } } diff --git a/crates/brk_computer/src/internal/single/height/derived_values.rs b/crates/brk_computer/src/internal/single/height/derived_values.rs index de08a29de..2efc723fc 100644 --- a/crates/brk_computer/src/internal/single/height/derived_values.rs +++ b/crates/brk_computer/src/internal/single/height/derived_values.rs @@ -1,39 +1,9 @@ use brk_traversable::Traversable; -use brk_types::{Bitcoin, Close, Dollars, Height, Sats, Version}; -use vecdb::{IterableBoxedVec, LazyVecFrom1, LazyVecFrom2}; - -use crate::internal::{ClosePriceTimesSats, SatsToBitcoin}; +use brk_types::{Bitcoin, Dollars, Height, Sats}; +use vecdb::{LazyVecFrom1, LazyVecFrom2}; #[derive(Clone, Traversable)] pub struct LazyDerivedValuesHeight { - pub bitcoin: LazyVecFrom1, - pub dollars: Option, Height, Sats>>, -} - -const VERSION: Version = Version::ZERO; - -impl LazyDerivedValuesHeight { - pub fn from_source( - name: &str, - sats_source: IterableBoxedVec, - version: Version, - price_source: Option>>, - ) -> Self { - let bitcoin = LazyVecFrom1::transformed::( - &format!("{name}_btc"), - version + VERSION, - sats_source.clone(), - ); - - let dollars = price_source.map(|price| { - LazyVecFrom2::transformed::( - &format!("{name}_usd"), - version + VERSION, - price, - sats_source, - ) - }); - - Self { bitcoin, dollars } - } + pub btc: LazyVecFrom1, + pub usd: LazyVecFrom2, } diff --git a/crates/brk_computer/src/internal/single/height/lazy_value.rs b/crates/brk_computer/src/internal/single/height/lazy_value.rs index b222f50f7..89909f5e4 100644 --- a/crates/brk_computer/src/internal/single/height/lazy_value.rs +++ b/crates/brk_computer/src/internal/single/height/lazy_value.rs @@ -2,7 +2,7 @@ use brk_traversable::Traversable; use brk_types::{Bitcoin, Dollars, Height, Sats, Version}; -use vecdb::{IterableCloneableVec, LazyVecFrom1, UnaryTransform}; +use vecdb::{ReadableCloneableVec, LazyVecFrom1, UnaryTransform}; use crate::internal::{SatsToBitcoin, ValueFromHeightLast}; @@ -14,12 +14,12 @@ const VERSION: Version = Version::ZERO; #[derive(Clone, Traversable)] pub struct LazyValueHeight { pub sats: LazyVecFrom1, - pub bitcoin: LazyVecFrom1, - pub dollars: Option>, + pub btc: LazyVecFrom1, + pub usd: LazyVecFrom1, } impl LazyValueHeight { - pub fn from_block_source( + pub(crate) fn from_block_source( name: &str, source: &ValueFromHeightLast, version: Version, @@ -31,22 +31,20 @@ impl LazyValueHeight { let v = version + VERSION; let sats = - LazyVecFrom1::transformed::(name, v, source.sats.height.boxed_clone()); + LazyVecFrom1::transformed::(name, v, source.sats.height.read_only_boxed_clone()); - let bitcoin = LazyVecFrom1::transformed::( + let btc = LazyVecFrom1::transformed::( &format!("{name}_btc"), v, - source.sats.height.boxed_clone(), + source.sats.height.read_only_boxed_clone(), ); - let dollars = source.dollars.as_ref().map(|d| { - LazyVecFrom1::transformed::( - &format!("{name}_usd"), - v, - d.height.boxed_clone(), - ) - }); + let usd = LazyVecFrom1::transformed::( + &format!("{name}_usd"), + v, + source.usd.height.read_only_boxed_clone(), + ); - Self { sats, bitcoin, dollars } + Self { sats, btc, usd } } } diff --git a/crates/brk_computer/src/internal/single/height/mod.rs b/crates/brk_computer/src/internal/single/height/mod.rs index 9e2522acd..04499ded4 100644 --- a/crates/brk_computer/src/internal/single/height/mod.rs +++ b/crates/brk_computer/src/internal/single/height/mod.rs @@ -1,7 +1,5 @@ mod derived_values; mod lazy_value; -mod value; pub use derived_values::*; pub use lazy_value::*; -pub use value::*; diff --git a/crates/brk_computer/src/internal/single/height/value.rs b/crates/brk_computer/src/internal/single/height/value.rs deleted file mode 100644 index 07a2bd765..000000000 --- a/crates/brk_computer/src/internal/single/height/value.rs +++ /dev/null @@ -1,49 +0,0 @@ -//! Value type for Height-only storage (no derived indexes). - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{Bitcoin, Dollars, Height, Sats, Version}; -use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1, PcoVec}; - -use crate::internal::SatsToBitcoin; - -const VERSION: Version = Version::ZERO; - -/// Value type with only height indexing (no derived dateindex/periods). -/// -/// Used for metrics that are computed per height but don't need index aggregations. -#[derive(Clone, Traversable)] -pub struct ValueHeight { - pub sats: EagerVec>, - pub bitcoin: LazyVecFrom1, - pub dollars: Option>>, -} - -impl ValueHeight { - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - compute_dollars: bool, - ) -> Result { - let v = version + VERSION; - - let sats = EagerVec::forced_import(db, name, v)?; - - let bitcoin = LazyVecFrom1::transformed::( - &format!("{name}_btc"), - v, - sats.boxed_clone(), - ); - - let dollars = compute_dollars - .then(|| EagerVec::forced_import(db, &format!("{name}_usd"), v)) - .transpose()?; - - Ok(Self { - sats, - bitcoin, - dollars, - }) - } -} diff --git a/crates/brk_computer/src/internal/single/lazy/average.rs b/crates/brk_computer/src/internal/single/lazy/average.rs index 8b602ae36..f04dc31e4 100644 --- a/crates/brk_computer/src/internal/single/lazy/average.rs +++ b/crates/brk_computer/src/internal/single/lazy/average.rs @@ -1,23 +1,34 @@ //! Lazy average-value aggregation. +use std::sync::Arc; + use brk_traversable::Traversable; -use brk_types::Version; -use derive_more::{Deref, DerefMut}; +use brk_types::{Height, Version}; use schemars::JsonSchema; -use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue}; +use vecdb::{Cursor, FromCoarserIndex, ReadableBoxedVec, VecIndex, VecValue}; use crate::internal::ComputedVecValue; const VERSION: Version = Version::ZERO; -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(transparent)] -pub struct LazyAverage(pub LazyVecFrom2) +type ForEachRangeFn = + fn(usize, usize, &ReadableBoxedVec, &ReadableBoxedVec, &mut dyn FnMut(T)); + +pub struct LazyAverage where I: VecIndex, T: ComputedVecValue + JsonSchema, S1I: VecIndex, - S2T: VecValue; + S2T: VecValue, +{ + name: Arc, + version: Version, + source: ReadableBoxedVec, + mapping: ReadableBoxedVec, + for_each_range: ForEachRangeFn, +} + +impl_lazy_agg!(LazyAverage); impl LazyAverage where @@ -26,52 +37,120 @@ where S1I: VecIndex + 'static + FromCoarserIndex, S2T: VecValue, { - pub fn from_source( + pub(crate) fn from_source( name: &str, version: Version, - source: IterableBoxedVec, - len_source: IterableBoxedVec, + source: ReadableBoxedVec, + len_source: ReadableBoxedVec, ) -> Self { Self::from_source_inner(&format!("{name}_average"), version, source, len_source) } - pub fn from_source_raw( - name: &str, - version: Version, - source: IterableBoxedVec, - len_source: IterableBoxedVec, - ) -> Self { - Self::from_source_inner(name, version, source, len_source) - } - fn from_source_inner( name: &str, version: Version, - source: IterableBoxedVec, - len_source: IterableBoxedVec, + source: ReadableBoxedVec, + len_source: ReadableBoxedVec, ) -> Self { - Self(LazyVecFrom2::init( - name, - version + VERSION, + fn for_each_range< + I: VecIndex, + T: ComputedVecValue + JsonSchema, + S1I: VecIndex + FromCoarserIndex, + S2T: VecValue, + >( + from: usize, + to: usize, + source: &ReadableBoxedVec, + mapping: &ReadableBoxedVec, + f: &mut dyn FnMut(T), + ) { + let mapping_len = mapping.len(); + let source_len = source.len(); + let to = to.min(mapping_len); + if from >= to { + return; + } + let mut cursor = Cursor::from_dyn(&**source); + cursor.advance(S1I::min_from(I::from(from))); + for i in from..to { + let start = S1I::min_from(I::from(i)); + let end = S1I::max_from(I::from(i), source_len) + 1; + let count = end.saturating_sub(start); + if count == 0 || cursor.remaining() == 0 { + continue; + } + let sum = cursor.fold(count, T::from(0), |s, v| s + v); + f(sum / count); + } + } + Self { + name: Arc::from(name), + version: version + VERSION, source, - len_source, - |i: I, source, len_source| { - if i.to_usize() >= len_source.vec_len() { - return None; - } - let mut sum = T::from(0); - let mut len = 0usize; - for v in - S1I::inclusive_range_from(i, source.vec_len()).flat_map(|i| source.get_at(i)) - { - sum += v; - len += 1; - } - if len == 0 { - return None; - } - Some(sum / len) - }, - )) + mapping: len_source, + for_each_range: for_each_range::, + } + } +} + +impl LazyAverage +where + I: VecIndex, + T: ComputedVecValue + JsonSchema + 'static, +{ + pub(crate) fn from_height_source( + name: &str, + version: Version, + source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + Self::from_height_source_inner(&format!("{name}_average"), version, source, first_height) + } + + fn from_height_source_inner( + name: &str, + version: Version, + source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + fn for_each_range( + from: usize, + to: usize, + source: &ReadableBoxedVec, + mapping: &ReadableBoxedVec, + f: &mut dyn FnMut(T), + ) { + let map_end = (to + 1).min(mapping.len()); + let heights = mapping.collect_range_dyn(from, map_end); + let source_len = source.len(); + let Some(&first_h) = heights.first() else { + return; + }; + let mut cursor = Cursor::from_dyn(&**source); + cursor.advance(first_h.to_usize()); + for idx in 0..(to - from) { + let Some(&cur_h) = heights.get(idx) else { + continue; + }; + let first = cur_h.to_usize(); + let next_first = heights + .get(idx + 1) + .map(|h| h.to_usize()) + .unwrap_or(source_len); + let count = next_first.saturating_sub(first); + if count == 0 || cursor.remaining() == 0 { + continue; + } + let sum = cursor.fold(count, T::from(0), |s, v| s + v); + f(sum / count); + } + } + Self { + name: Arc::from(name), + version: version + VERSION, + source, + mapping: first_height, + for_each_range: for_each_range::, + } } } diff --git a/crates/brk_computer/src/internal/single/lazy/cumulative.rs b/crates/brk_computer/src/internal/single/lazy/cumulative.rs index 287552b59..6270964dd 100644 --- a/crates/brk_computer/src/internal/single/lazy/cumulative.rs +++ b/crates/brk_computer/src/internal/single/lazy/cumulative.rs @@ -1,23 +1,34 @@ //! Lazy cumulative-only aggregation (takes last value from cumulative source). +use std::sync::Arc; + use brk_traversable::Traversable; -use brk_types::Version; -use derive_more::{Deref, DerefMut}; +use brk_types::{Height, Version}; use schemars::JsonSchema; -use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue}; +use vecdb::{Cursor, FromCoarserIndex, ReadableBoxedVec, VecIndex, VecValue}; use crate::internal::ComputedVecValue; const VERSION: Version = Version::ZERO; -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(transparent)] -pub struct LazyCumulative(pub LazyVecFrom2) +type ForEachRangeFn = + fn(usize, usize, &ReadableBoxedVec, &ReadableBoxedVec, &mut dyn FnMut(T)); + +pub struct LazyCumulative where I: VecIndex, T: ComputedVecValue + JsonSchema, S1I: VecIndex, - S2T: VecValue; + S2T: VecValue, +{ + name: Arc, + version: Version, + source: ReadableBoxedVec, + mapping: ReadableBoxedVec, + for_each_range: ForEachRangeFn, +} + +impl_lazy_agg!(LazyCumulative); impl LazyCumulative where @@ -26,23 +37,101 @@ where S1I: VecIndex + 'static + FromCoarserIndex, S2T: VecValue, { - pub fn from_source( + pub(crate) fn from_source( name: &str, version: Version, - cumulative_source: IterableBoxedVec, - len_source: IterableBoxedVec, + cumulative_source: ReadableBoxedVec, + len_source: ReadableBoxedVec, ) -> Self { - Self(LazyVecFrom2::init( - &format!("{name}_cumulative"), - version + VERSION, - cumulative_source, - len_source, - |i: I, source, len_source| { - if i.to_usize() >= len_source.vec_len() { - return None; + fn for_each_range< + I: VecIndex, + T: VecValue, + S1I: VecIndex + FromCoarserIndex, + S2T: VecValue, + >( + from: usize, + to: usize, + source: &ReadableBoxedVec, + mapping: &ReadableBoxedVec, + f: &mut dyn FnMut(T), + ) { + let mapping_len = mapping.len(); + let source_len = source.len(); + let mut cursor = Cursor::from_dyn(&**source); + for i in from..to { + if i >= mapping_len { + break; } - source.get_at(S1I::max_from(i, source.vec_len())) - }, - )) + let target = S1I::max_from(I::from(i), source_len); + if cursor.position() <= target { + cursor.advance(target - cursor.position()); + if let Some(v) = cursor.next() { + f(v); + } + } else if let Some(v) = source.collect_one_at(target) { + f(v); + } + } + } + Self { + name: Arc::from(format!("{name}_cumulative")), + version: version + VERSION, + source: cumulative_source, + mapping: len_source, + for_each_range: for_each_range::, + } + } +} + +impl LazyCumulative +where + I: VecIndex, + T: ComputedVecValue + JsonSchema + 'static, +{ + /// Create from a height-indexed cumulative source using an explicit first_height mapping. + /// Looks up cumulative value at last height of the day. + pub(crate) fn from_height_source( + name: &str, + version: Version, + cumulative_source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + fn for_each_range( + from: usize, + to: usize, + source: &ReadableBoxedVec, + mapping: &ReadableBoxedVec, + f: &mut dyn FnMut(T), + ) { + let map_end = (to + 1).min(mapping.len()); + let heights = mapping.collect_range_dyn(from, map_end); + let source_len = source.len(); + let mut cursor = Cursor::from_dyn(&**source); + for idx in 0..(to - from) { + let next_first = heights + .get(idx + 1) + .map(|h| h.to_usize()) + .unwrap_or(source_len); + if next_first == 0 { + continue; + } + let target = next_first - 1; + if cursor.position() <= target { + cursor.advance(target - cursor.position()); + if let Some(v) = cursor.next() { + f(v); + } + } else if let Some(v) = source.collect_one_at(target) { + f(v); + } + } + } + Self { + name: Arc::from(format!("{name}_cumulative")), + version: version + VERSION, + source: cumulative_source, + mapping: first_height, + for_each_range: for_each_range::, + } } } diff --git a/crates/brk_computer/src/internal/single/lazy/distribution.rs b/crates/brk_computer/src/internal/single/lazy/distribution.rs new file mode 100644 index 000000000..aab1af02d --- /dev/null +++ b/crates/brk_computer/src/internal/single/lazy/distribution.rs @@ -0,0 +1,75 @@ +//! Lazy distribution pattern (average, min, max + percentiles). + +use brk_traversable::Traversable; +use brk_types::{Height, Version}; +use schemars::JsonSchema; +use vecdb::{FromCoarserIndex, ReadableBoxedVec, VecIndex, VecValue}; + +use super::{LazyAggPercentiles, LazyAverage, LazyMax, LazyMin}; +use crate::internal::ComputedVecValue; + +const VERSION: Version = Version::ZERO; + +#[derive(Clone, Traversable)] +pub struct LazyDistribution +where + I: VecIndex, + T: ComputedVecValue + JsonSchema, + S1I: VecIndex, + S2T: VecValue, +{ + #[traversable(flatten)] + pub average: LazyAverage, + #[traversable(flatten)] + pub min: LazyMin, + #[traversable(flatten)] + pub max: LazyMax, + #[traversable(flatten)] + pub percentiles: LazyAggPercentiles, +} + +impl LazyDistribution +where + I: VecIndex, + T: ComputedVecValue + JsonSchema + 'static, + S1I: VecIndex + 'static + FromCoarserIndex, + S2T: VecValue, +{ + pub(crate) fn from_source( + name: &str, + version: Version, + source: ReadableBoxedVec, + len_source: ReadableBoxedVec, + ) -> Self { + let v = version + VERSION; + + Self { + average: LazyAverage::from_source(name, v, source.clone(), len_source.clone()), + min: LazyMin::from_source(name, v, source.clone(), len_source.clone()), + max: LazyMax::from_source(name, v, source.clone(), len_source.clone()), + percentiles: LazyAggPercentiles::from_source(name, v, source, len_source), + } + } +} + +impl LazyDistribution +where + I: VecIndex, + T: ComputedVecValue + JsonSchema + 'static, +{ + pub(crate) fn from_height_source( + name: &str, + version: Version, + source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + let v = version + VERSION; + + Self { + average: LazyAverage::from_height_source(name, v, source.clone(), first_height.clone()), + min: LazyMin::from_height_source(name, v, source.clone(), first_height.clone()), + max: LazyMax::from_height_source(name, v, source.clone(), first_height.clone()), + percentiles: LazyAggPercentiles::from_height_source(name, v, source, first_height), + } + } +} diff --git a/crates/brk_computer/src/internal/single/lazy/first.rs b/crates/brk_computer/src/internal/single/lazy/first.rs index 67a6edd96..1ec3c7fe4 100644 --- a/crates/brk_computer/src/internal/single/lazy/first.rs +++ b/crates/brk_computer/src/internal/single/lazy/first.rs @@ -1,23 +1,34 @@ //! Lazy first-value aggregation. +use std::sync::Arc; + use brk_traversable::Traversable; -use brk_types::Version; -use derive_more::{Deref, DerefMut}; +use brk_types::{Height, Version}; use schemars::JsonSchema; -use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue}; +use vecdb::{Cursor, FromCoarserIndex, ReadableBoxedVec, VecIndex, VecValue}; use crate::internal::ComputedVecValue; const VERSION: Version = Version::ZERO; -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(transparent)] -pub struct LazyFirst(pub LazyVecFrom2) +type ForEachRangeFn = + fn(usize, usize, &ReadableBoxedVec, &ReadableBoxedVec, &mut dyn FnMut(T)); + +pub struct LazyFirst where I: VecIndex, T: ComputedVecValue + JsonSchema, S1I: VecIndex, - S2T: VecValue; + S2T: VecValue, +{ + name: Arc, + version: Version, + source: ReadableBoxedVec, + mapping: ReadableBoxedVec, + for_each_range: ForEachRangeFn, +} + +impl_lazy_agg!(LazyFirst); impl LazyFirst where @@ -26,23 +37,92 @@ where S1I: VecIndex + 'static + FromCoarserIndex, S2T: VecValue, { - pub fn from_source( + pub(crate) fn from_source( name: &str, version: Version, - source: IterableBoxedVec, - len_source: IterableBoxedVec, + source: ReadableBoxedVec, + len_source: ReadableBoxedVec, ) -> Self { - Self(LazyVecFrom2::init( - name, - version + VERSION, - source, - len_source, - |i: I, source, len_source| { - if i.to_usize() >= len_source.vec_len() { - return None; + fn for_each_range< + I: VecIndex, + T: VecValue, + S1I: VecIndex + FromCoarserIndex, + S2T: VecValue, + >( + from: usize, + to: usize, + source: &ReadableBoxedVec, + mapping: &ReadableBoxedVec, + f: &mut dyn FnMut(T), + ) { + let mapping_len = mapping.len(); + let mut cursor = Cursor::from_dyn(&**source); + for i in from..to { + if i >= mapping_len { + break; } - source.get_at(S1I::min_from(i)) - }, - )) + let target = S1I::min_from(I::from(i)); + if cursor.position() <= target { + cursor.advance(target - cursor.position()); + if let Some(v) = cursor.next() { + f(v); + } + } else if let Some(v) = source.collect_one_at(target) { + f(v); + } + } + } + Self { + name: Arc::from(name), + version: version + VERSION, + source, + mapping: len_source, + for_each_range: for_each_range::, + } + } +} + +impl LazyFirst +where + I: VecIndex, + T: ComputedVecValue + JsonSchema + 'static, +{ + pub(crate) fn from_height_source( + name: &str, + version: Version, + source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + fn for_each_range( + from: usize, + to: usize, + source: &ReadableBoxedVec, + mapping: &ReadableBoxedVec, + f: &mut dyn FnMut(T), + ) { + let heights = mapping.collect_range_dyn(from, to.min(mapping.len())); + let mut cursor = Cursor::from_dyn(&**source); + for idx in 0..(to - from) { + let Some(&first_h) = heights.get(idx) else { + continue; + }; + let target = first_h.to_usize(); + if cursor.position() <= target { + cursor.advance(target - cursor.position()); + if let Some(v) = cursor.next() { + f(v); + } + } else if let Some(v) = source.collect_one_at(target) { + f(v); + } + } + } + Self { + name: Arc::from(name), + version: version + VERSION, + source, + mapping: first_height, + for_each_range: for_each_range::, + } } } diff --git a/crates/brk_computer/src/internal/single/lazy/full.rs b/crates/brk_computer/src/internal/single/lazy/full.rs index ad2aef93c..4bb7c9d1c 100644 --- a/crates/brk_computer/src/internal/single/lazy/full.rs +++ b/crates/brk_computer/src/internal/single/lazy/full.rs @@ -1,11 +1,11 @@ -//! Lazy stats aggregate pattern (average, min, max, sum, cumulative). +//! Lazy full stats aggregate (distribution + sum + cumulative). use brk_traversable::Traversable; -use brk_types::Version; +use brk_types::{Height, Version}; use schemars::JsonSchema; -use vecdb::{FromCoarserIndex, IterableBoxedVec, VecIndex, VecValue}; +use vecdb::{FromCoarserIndex, ReadableBoxedVec, VecIndex, VecValue}; -use super::{LazyAverage, LazyCumulative, LazyMax, LazyMin, LazySum}; +use super::{LazyAggPercentiles, LazyAverage, LazyCumulative, LazyMax, LazyMin, LazySum}; use crate::internal::ComputedVecValue; const VERSION: Version = Version::ZERO; @@ -25,6 +25,8 @@ where #[traversable(flatten)] pub max: LazyMax, #[traversable(flatten)] + pub percentiles: LazyAggPercentiles, + #[traversable(flatten)] pub sum: LazySum, #[traversable(flatten)] pub cumulative: LazyCumulative, @@ -38,15 +40,16 @@ where S2T: VecValue, { #[allow(clippy::too_many_arguments)] - pub fn from_stats_aggregate( + pub(crate) fn from_stats_aggregate( name: &str, version: Version, - source_average: IterableBoxedVec, - source_min: IterableBoxedVec, - source_max: IterableBoxedVec, - source_sum: IterableBoxedVec, - source_cumulative: IterableBoxedVec, - len_source: IterableBoxedVec, + source_average: ReadableBoxedVec, + source_min: ReadableBoxedVec, + source_max: ReadableBoxedVec, + source_sum: ReadableBoxedVec, + source_cumulative: ReadableBoxedVec, + source_all: ReadableBoxedVec, + len_source: ReadableBoxedVec, ) -> Self { let v = version + VERSION; @@ -54,8 +57,34 @@ where average: LazyAverage::from_source(name, v, source_average, len_source.clone()), min: LazyMin::from_source(name, v, source_min, len_source.clone()), max: LazyMax::from_source(name, v, source_max, len_source.clone()), + percentiles: LazyAggPercentiles::from_source(name, v, source_all, len_source.clone()), sum: LazySum::from_source(name, v, source_sum, len_source.clone()), cumulative: LazyCumulative::from_source(name, v, source_cumulative, len_source), } } } + +impl LazyFull +where + I: VecIndex, + T: ComputedVecValue + JsonSchema + 'static, +{ + pub(crate) fn from_height_source( + name: &str, + version: Version, + source: ReadableBoxedVec, + height_cumulative: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + let v = version + VERSION; + + Self { + average: LazyAverage::from_height_source(name, v, source.clone(), first_height.clone()), + min: LazyMin::from_height_source(name, v, source.clone(), first_height.clone()), + max: LazyMax::from_height_source(name, v, source.clone(), first_height.clone()), + percentiles: LazyAggPercentiles::from_height_source(name, v, source.clone(), first_height.clone()), + sum: LazySum::from_height_source(name, v, source, first_height.clone()), + cumulative: LazyCumulative::from_height_source(name, v, height_cumulative, first_height), + } + } +} diff --git a/crates/brk_computer/src/internal/single/lazy/last.rs b/crates/brk_computer/src/internal/single/lazy/last.rs index 989646863..740536336 100644 --- a/crates/brk_computer/src/internal/single/lazy/last.rs +++ b/crates/brk_computer/src/internal/single/lazy/last.rs @@ -1,23 +1,34 @@ //! Lazy last-value aggregation. +use std::sync::Arc; + use brk_traversable::Traversable; -use brk_types::Version; -use derive_more::{Deref, DerefMut}; +use brk_types::{Height, Version}; use schemars::JsonSchema; -use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue}; +use vecdb::{Cursor, FromCoarserIndex, ReadableBoxedVec, VecIndex, VecValue}; use crate::internal::ComputedVecValue; const VERSION: Version = Version::ZERO; -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(transparent)] -pub struct LazyLast(pub LazyVecFrom2) +type ForEachRangeFn = + fn(usize, usize, &ReadableBoxedVec, &ReadableBoxedVec, &mut dyn FnMut(T)); + +pub struct LazyLast where I: VecIndex, T: ComputedVecValue + JsonSchema, S1I: VecIndex, - S2T: VecValue; + S2T: VecValue, +{ + name: Arc, + version: Version, + source: ReadableBoxedVec, + mapping: ReadableBoxedVec, + for_each_range: ForEachRangeFn, +} + +impl_lazy_agg!(LazyLast); impl LazyLast where @@ -26,24 +37,101 @@ where S1I: VecIndex + 'static + FromCoarserIndex, S2T: VecValue, { - pub fn from_source( + pub(crate) fn from_source( name: &str, version: Version, - source: IterableBoxedVec, - len_source: IterableBoxedVec, + source: ReadableBoxedVec, + len_source: ReadableBoxedVec, ) -> Self { - Self(LazyVecFrom2::init( - name, - version + VERSION, - source, - len_source, - |i: I, source, len_source| { - if i.to_usize() >= len_source.vec_len() { - return None; + fn for_each_range< + I: VecIndex, + T: VecValue, + S1I: VecIndex + FromCoarserIndex, + S2T: VecValue, + >( + from: usize, + to: usize, + source: &ReadableBoxedVec, + mapping: &ReadableBoxedVec, + f: &mut dyn FnMut(T), + ) { + let mapping_len = mapping.len(); + let source_len = source.len(); + let mut cursor = Cursor::from_dyn(&**source); + for i in from..to { + if i >= mapping_len { + break; } - source.get_at(S1I::max_from(i, source.vec_len())) - }, - )) + let target = S1I::max_from(I::from(i), source_len); + if cursor.position() <= target { + cursor.advance(target - cursor.position()); + if let Some(v) = cursor.next() { + f(v); + } + } else if let Some(v) = source.collect_one_at(target) { + f(v); + } + } + } + Self { + name: Arc::from(name), + version: version + VERSION, + source, + mapping: len_source, + for_each_range: for_each_range::, + } } } +impl LazyLast +where + I: VecIndex, + T: ComputedVecValue + JsonSchema + 'static, +{ + /// Create from a height-indexed source using an explicit first_height mapping. + /// For day1 d, looks up value at `first_height[d+1] - 1` (last height of the day). + pub(crate) fn from_height_source( + name: &str, + version: Version, + source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + fn for_each_range( + from: usize, + to: usize, + source: &ReadableBoxedVec, + mapping: &ReadableBoxedVec, + f: &mut dyn FnMut(T), + ) { + let map_end = (to + 1).min(mapping.len()); + let heights = mapping.collect_range_dyn(from, map_end); + let source_len = source.len(); + let mut cursor = Cursor::from_dyn(&**source); + for idx in 0..(to - from) { + let next_first = heights + .get(idx + 1) + .map(|h| h.to_usize()) + .unwrap_or(source_len); + if next_first == 0 { + continue; + } + let target = next_first - 1; + if cursor.position() <= target { + cursor.advance(target - cursor.position()); + if let Some(v) = cursor.next() { + f(v); + } + } else if let Some(v) = source.collect_one_at(target) { + f(v); + } + } + } + Self { + name: Arc::from(name), + version: version + VERSION, + source, + mapping: first_height, + for_each_range: for_each_range::, + } + } +} diff --git a/crates/brk_computer/src/internal/single/lazy/max.rs b/crates/brk_computer/src/internal/single/lazy/max.rs index db5f044cf..4cf8e7e32 100644 --- a/crates/brk_computer/src/internal/single/lazy/max.rs +++ b/crates/brk_computer/src/internal/single/lazy/max.rs @@ -1,23 +1,34 @@ //! Lazy max-value aggregation. +use std::sync::Arc; + use brk_traversable::Traversable; -use brk_types::Version; -use derive_more::{Deref, DerefMut}; +use brk_types::{Height, Version}; use schemars::JsonSchema; -use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue}; +use vecdb::{Cursor, FromCoarserIndex, ReadableBoxedVec, VecIndex, VecValue}; use crate::internal::ComputedVecValue; const VERSION: Version = Version::ZERO; -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(transparent)] -pub struct LazyMax(pub LazyVecFrom2) +type ForEachRangeFn = + fn(usize, usize, &ReadableBoxedVec, &ReadableBoxedVec, &mut dyn FnMut(T)); + +pub struct LazyMax where I: VecIndex, T: ComputedVecValue + JsonSchema, S1I: VecIndex, - S2T: VecValue; + S2T: VecValue, +{ + name: Arc, + version: Version, + source: ReadableBoxedVec, + mapping: ReadableBoxedVec, + for_each_range: ForEachRangeFn, +} + +impl_lazy_agg!(LazyMax); impl LazyMax where @@ -26,21 +37,20 @@ where S1I: VecIndex + 'static + FromCoarserIndex, S2T: VecValue, { - pub fn from_source( + pub(crate) fn from_source( name: &str, version: Version, - source: IterableBoxedVec, - len_source: IterableBoxedVec, + source: ReadableBoxedVec, + len_source: ReadableBoxedVec, ) -> Self { Self::from_source_inner(&format!("{name}_max"), version, source, len_source) } - /// Create from source without adding _max suffix. - pub fn from_source_raw( + pub(crate) fn from_source_raw( name: &str, version: Version, - source: IterableBoxedVec, - len_source: IterableBoxedVec, + source: ReadableBoxedVec, + len_source: ReadableBoxedVec, ) -> Self { Self::from_source_inner(name, version, source, len_source) } @@ -48,22 +58,119 @@ where fn from_source_inner( name: &str, version: Version, - source: IterableBoxedVec, - len_source: IterableBoxedVec, + source: ReadableBoxedVec, + len_source: ReadableBoxedVec, ) -> Self { - Self(LazyVecFrom2::init( - name, - version + VERSION, - source, - len_source, - |i: I, source, len_source| { - if i.to_usize() >= len_source.vec_len() { - return None; + fn for_each_range< + I: VecIndex, + T: ComputedVecValue + JsonSchema, + S1I: VecIndex + FromCoarserIndex, + S2T: VecValue, + >( + from: usize, + to: usize, + source: &ReadableBoxedVec, + mapping: &ReadableBoxedVec, + f: &mut dyn FnMut(T), + ) { + let mapping_len = mapping.len(); + let source_len = source.len(); + let to = to.min(mapping_len); + if from >= to { + return; + } + let mut cursor = Cursor::from_dyn(&**source); + cursor.advance(S1I::min_from(I::from(from))); + for i in from..to { + let start = S1I::min_from(I::from(i)); + let end = S1I::max_from(I::from(i), source_len) + 1; + let count = end.saturating_sub(start); + if count == 0 { + continue; } - S1I::inclusive_range_from(i, source.vec_len()) - .flat_map(|i| source.get_at(i)) - .max() - }, - )) + if let Some(first) = cursor.next() { + f(cursor.fold(count - 1, first, |m, v| if v > m { v } else { m })); + } + } + } + Self { + name: Arc::from(name), + version: version + VERSION, + source, + mapping: len_source, + for_each_range: for_each_range::, + } + } +} + +impl LazyMax +where + I: VecIndex, + T: ComputedVecValue + JsonSchema + 'static, +{ + pub(crate) fn from_height_source( + name: &str, + version: Version, + source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + Self::from_height_source_inner(&format!("{name}_max"), version, source, first_height) + } + + pub(crate) fn from_height_source_raw( + name: &str, + version: Version, + source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + Self::from_height_source_inner(name, version, source, first_height) + } + + fn from_height_source_inner( + name: &str, + version: Version, + source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + fn for_each_range( + from: usize, + to: usize, + source: &ReadableBoxedVec, + mapping: &ReadableBoxedVec, + f: &mut dyn FnMut(T), + ) { + let map_end = (to + 1).min(mapping.len()); + let heights = mapping.collect_range_dyn(from, map_end); + let source_len = source.len(); + let Some(&first_h) = heights.first() else { + return; + }; + let mut cursor = Cursor::from_dyn(&**source); + cursor.advance(first_h.to_usize()); + for idx in 0..(to - from) { + let Some(&cur_h) = heights.get(idx) else { + continue; + }; + let first = cur_h.to_usize(); + let next_first = heights + .get(idx + 1) + .map(|h| h.to_usize()) + .unwrap_or(source_len); + let count = next_first.saturating_sub(first); + if count == 0 { + continue; + } + if let Some(first_val) = cursor.next() { + f(cursor.fold(count - 1, first_val, |m, v| if v > m { v } else { m })); + } + } + } + Self { + name: Arc::from(name), + version: version + VERSION, + source, + mapping: first_height, + for_each_range: for_each_range::, + } } } diff --git a/crates/brk_computer/src/internal/single/lazy/min.rs b/crates/brk_computer/src/internal/single/lazy/min.rs index dd530f8a5..dc52bf5b1 100644 --- a/crates/brk_computer/src/internal/single/lazy/min.rs +++ b/crates/brk_computer/src/internal/single/lazy/min.rs @@ -1,23 +1,34 @@ //! Lazy min-value aggregation. +use std::sync::Arc; + use brk_traversable::Traversable; -use brk_types::Version; -use derive_more::{Deref, DerefMut}; +use brk_types::{Height, Version}; use schemars::JsonSchema; -use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue}; +use vecdb::{Cursor, FromCoarserIndex, ReadableBoxedVec, VecIndex, VecValue}; use crate::internal::ComputedVecValue; const VERSION: Version = Version::ZERO; -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(transparent)] -pub struct LazyMin(pub LazyVecFrom2) +type ForEachRangeFn = + fn(usize, usize, &ReadableBoxedVec, &ReadableBoxedVec, &mut dyn FnMut(T)); + +pub struct LazyMin where I: VecIndex, T: ComputedVecValue + JsonSchema, S1I: VecIndex, - S2T: VecValue; + S2T: VecValue, +{ + name: Arc, + version: Version, + source: ReadableBoxedVec, + mapping: ReadableBoxedVec, + for_each_range: ForEachRangeFn, +} + +impl_lazy_agg!(LazyMin); impl LazyMin where @@ -26,21 +37,20 @@ where S1I: VecIndex + 'static + FromCoarserIndex, S2T: VecValue, { - pub fn from_source( + pub(crate) fn from_source( name: &str, version: Version, - source: IterableBoxedVec, - len_source: IterableBoxedVec, + source: ReadableBoxedVec, + len_source: ReadableBoxedVec, ) -> Self { Self::from_source_inner(&format!("{name}_min"), version, source, len_source) } - /// Create from source without adding _min suffix. - pub fn from_source_raw( + pub(crate) fn from_source_raw( name: &str, version: Version, - source: IterableBoxedVec, - len_source: IterableBoxedVec, + source: ReadableBoxedVec, + len_source: ReadableBoxedVec, ) -> Self { Self::from_source_inner(name, version, source, len_source) } @@ -48,22 +58,119 @@ where fn from_source_inner( name: &str, version: Version, - source: IterableBoxedVec, - len_source: IterableBoxedVec, + source: ReadableBoxedVec, + len_source: ReadableBoxedVec, ) -> Self { - Self(LazyVecFrom2::init( - name, - version + VERSION, - source, - len_source, - |i: I, source, len_source| { - if i.to_usize() >= len_source.vec_len() { - return None; + fn for_each_range< + I: VecIndex, + T: ComputedVecValue + JsonSchema, + S1I: VecIndex + FromCoarserIndex, + S2T: VecValue, + >( + from: usize, + to: usize, + source: &ReadableBoxedVec, + mapping: &ReadableBoxedVec, + f: &mut dyn FnMut(T), + ) { + let mapping_len = mapping.len(); + let source_len = source.len(); + let to = to.min(mapping_len); + if from >= to { + return; + } + let mut cursor = Cursor::from_dyn(&**source); + cursor.advance(S1I::min_from(I::from(from))); + for i in from..to { + let start = S1I::min_from(I::from(i)); + let end = S1I::max_from(I::from(i), source_len) + 1; + let count = end.saturating_sub(start); + if count == 0 { + continue; } - S1I::inclusive_range_from(i, source.vec_len()) - .flat_map(|i| source.get_at(i)) - .min() - }, - )) + if let Some(first) = cursor.next() { + f(cursor.fold(count - 1, first, |m, v| if v < m { v } else { m })); + } + } + } + Self { + name: Arc::from(name), + version: version + VERSION, + source, + mapping: len_source, + for_each_range: for_each_range::, + } + } +} + +impl LazyMin +where + I: VecIndex, + T: ComputedVecValue + JsonSchema + 'static, +{ + pub(crate) fn from_height_source( + name: &str, + version: Version, + source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + Self::from_height_source_inner(&format!("{name}_min"), version, source, first_height) + } + + pub(crate) fn from_height_source_raw( + name: &str, + version: Version, + source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + Self::from_height_source_inner(name, version, source, first_height) + } + + fn from_height_source_inner( + name: &str, + version: Version, + source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + fn for_each_range( + from: usize, + to: usize, + source: &ReadableBoxedVec, + mapping: &ReadableBoxedVec, + f: &mut dyn FnMut(T), + ) { + let map_end = (to + 1).min(mapping.len()); + let heights = mapping.collect_range_dyn(from, map_end); + let source_len = source.len(); + let Some(&first_h) = heights.first() else { + return; + }; + let mut cursor = Cursor::from_dyn(&**source); + cursor.advance(first_h.to_usize()); + for idx in 0..(to - from) { + let Some(&cur_h) = heights.get(idx) else { + continue; + }; + let first = cur_h.to_usize(); + let next_first = heights + .get(idx + 1) + .map(|h| h.to_usize()) + .unwrap_or(source_len); + let count = next_first.saturating_sub(first); + if count == 0 { + continue; + } + if let Some(first_val) = cursor.next() { + f(cursor.fold(count - 1, first_val, |m, v| if v < m { v } else { m })); + } + } + } + Self { + name: Arc::from(name), + version: version + VERSION, + source, + mapping: first_height, + for_each_range: for_each_range::, + } } } diff --git a/crates/brk_computer/src/internal/single/lazy/mod.rs b/crates/brk_computer/src/internal/single/lazy/mod.rs index a7c0f0fe6..243bdbf0f 100644 --- a/crates/brk_computer/src/internal/single/lazy/mod.rs +++ b/crates/brk_computer/src/internal/single/lazy/mod.rs @@ -1,23 +1,209 @@ //! Lazy aggregation primitives (finer index → coarser index). +//! +//! These types implement GROUP BY: map each coarser output index to a range +//! in the finer source, then aggregate that range. They implement the vecdb +//! ReadableVec trait directly. + +/// Common trait implementations for lazy aggregation types. +/// +/// Provides: Clone, AnyVec, TypedVec, ReadableVec, Traversable. +/// The struct must have fields: name, version, source, mapping, for_each_range. +macro_rules! impl_lazy_agg { + ($name:ident) => { + impl Clone for $name + where + I: VecIndex, + T: ComputedVecValue + JsonSchema, + S1I: VecIndex, + S2T: VecValue, + { + fn clone(&self) -> Self { + Self { + name: self.name.clone(), + version: self.version, + source: self.source.clone(), + mapping: self.mapping.clone(), + for_each_range: self.for_each_range, + } + } + } + + impl vecdb::AnyVec for $name + where + I: VecIndex, + T: ComputedVecValue + JsonSchema, + S1I: VecIndex, + S2T: VecValue, + { + fn version(&self) -> Version { + self.version + self.source.version() + self.mapping.version() + } + fn name(&self) -> &str { + &self.name + } + fn index_type_to_string(&self) -> &'static str { + I::to_string() + } + fn len(&self) -> usize { + self.mapping.len() + } + #[inline] + fn value_type_to_size_of(&self) -> usize { + size_of::() + } + #[inline] + fn value_type_to_string(&self) -> &'static str { + vecdb::short_type_name::() + } + #[inline] + fn region_names(&self) -> Vec { + vec![] + } + } + + impl vecdb::TypedVec for $name + where + I: VecIndex, + T: ComputedVecValue + JsonSchema, + S1I: VecIndex, + S2T: VecValue, + { + type I = I; + type T = T; + } + + impl vecdb::ReadableVec for $name + where + I: VecIndex, + T: ComputedVecValue + JsonSchema, + S1I: VecIndex, + S2T: VecValue, + { + fn read_into_at(&self, from: usize, to: usize, buf: &mut Vec) { + let to = to.min(self.mapping.len()); + if from >= to { return; } + buf.reserve(to - from); + (self.for_each_range)(from, to, &self.source, &self.mapping, &mut |v| buf.push(v)); + } + + fn for_each_range_dyn_at(&self, from: usize, to: usize, f: &mut dyn FnMut(T)) { + let to = to.min(self.mapping.len()); + if from >= to { return; } + (self.for_each_range)(from, to, &self.source, &self.mapping, f); + } + + #[inline] + fn fold_range_at B>( + &self, + from: usize, + to: usize, + init: B, + mut f: F, + ) -> B + where + Self: Sized, + { + let to = to.min(self.mapping.len()); + if from >= to { return init; } + let mut acc = Some(init); + (self.for_each_range)(from, to, &self.source, &self.mapping, &mut |v| { + acc = Some(f(acc.take().unwrap(), v)); + }); + acc.unwrap() + } + + #[inline] + fn try_fold_range_at std::result::Result>( + &self, + from: usize, + to: usize, + init: B, + mut f: F, + ) -> std::result::Result + where + Self: Sized, + { + let to = to.min(self.mapping.len()); + if from >= to { return Ok(init); } + let mut acc: Option> = Some(Ok(init)); + (self.for_each_range)(from, to, &self.source, &self.mapping, &mut |v| { + if let Some(Ok(a)) = acc.take() { + acc = Some(f(a, v)); + } + }); + acc.unwrap() + } + + #[inline] + fn collect_one_at(&self, index: usize) -> Option { + if index >= self.mapping.len() { + return None; + } + let mut result = None; + (self.for_each_range)(index, index + 1, &self.source, &self.mapping, &mut |v| result = Some(v)); + result + } + } + + impl Traversable for $name + where + I: VecIndex, + T: ComputedVecValue + JsonSchema + 'static, + S1I: VecIndex + 'static, + S2T: VecValue, + { + fn iter_any_exportable(&self) -> impl Iterator { + std::iter::once(self as &dyn vecdb::AnyExportableVec) + } + + fn to_tree_node(&self) -> brk_types::TreeNode { + use vecdb::AnyVec; + let index_str = I::to_string(); + let index = brk_types::Index::try_from(index_str).ok(); + let indexes = index.into_iter().collect(); + let leaf = brk_types::MetricLeaf::new( + self.name().to_string(), + self.value_type_to_string().to_string(), + indexes, + ); + let schema = + schemars::SchemaGenerator::default().into_root_schema_for::(); + let schema_json = serde_json::to_value(schema).unwrap_or_default(); + brk_types::TreeNode::Leaf(brk_types::MetricLeafWithSchema::new( + leaf, + schema_json, + )) + } + } + }; +} mod average; mod cumulative; +mod distribution; mod first; mod full; mod last; mod max; mod min; -mod spread; +mod ohlc; +mod percentile; +mod percentiles; +mod sparse_last; mod sum; mod sum_cum; pub use average::*; pub use cumulative::*; +pub use distribution::*; pub use first::*; pub use full::*; pub use last::*; pub use max::*; pub use min::*; -pub use spread::*; +pub use ohlc::*; +pub use percentile::*; +pub use percentiles::*; +pub use sparse_last::*; pub use sum::*; pub use sum_cum::*; diff --git a/crates/brk_computer/src/internal/single/lazy/ohlc.rs b/crates/brk_computer/src/internal/single/lazy/ohlc.rs new file mode 100644 index 000000000..a4f563d24 --- /dev/null +++ b/crates/brk_computer/src/internal/single/lazy/ohlc.rs @@ -0,0 +1,370 @@ +//! Lazy OHLC aggregation — single-pass first/max/min/last from height-level data. + +use std::sync::Arc; + +use brk_traversable::Traversable; +use brk_types::{Height, Version}; +use schemars::JsonSchema; +use serde::Serialize; +use vecdb::{Cursor, Formattable, ReadableBoxedVec, VecIndex, VecValue}; + +use brk_types::{Cents, Close, Dollars, High, Low, OHLCCents, OHLCDollars, OHLCSats, Open, Sats}; + +use crate::internal::ComputedVecValue; + +/// Trait for OHLC bundle types that can be constructed from / decomposed into +/// their open/high/low/close components. +pub trait OHLCRecord: VecValue + Formattable + Serialize + JsonSchema { + type Inner: ComputedVecValue + JsonSchema + Copy; + fn ohlc_open(&self) -> Self::Inner; + fn ohlc_high(&self) -> Self::Inner; + fn ohlc_low(&self) -> Self::Inner; + fn ohlc_close(&self) -> Self::Inner; + fn from_parts( + open: Self::Inner, + high: Self::Inner, + low: Self::Inner, + close: Self::Inner, + ) -> Self; +} + +impl OHLCRecord for OHLCCents { + type Inner = Cents; + fn ohlc_open(&self) -> Cents { + *self.open + } + fn ohlc_high(&self) -> Cents { + *self.high + } + fn ohlc_low(&self) -> Cents { + *self.low + } + fn ohlc_close(&self) -> Cents { + *self.close + } + fn from_parts(open: Cents, high: Cents, low: Cents, close: Cents) -> Self { + Self { + open: Open::new(open), + high: High::new(high), + low: Low::new(low), + close: Close::new(close), + } + } +} + +impl OHLCRecord for OHLCDollars { + type Inner = Dollars; + fn ohlc_open(&self) -> Dollars { + *self.open + } + fn ohlc_high(&self) -> Dollars { + *self.high + } + fn ohlc_low(&self) -> Dollars { + *self.low + } + fn ohlc_close(&self) -> Dollars { + *self.close + } + fn from_parts(open: Dollars, high: Dollars, low: Dollars, close: Dollars) -> Self { + Self { + open: Open::new(open), + high: High::new(high), + low: Low::new(low), + close: Close::new(close), + } + } +} + +impl OHLCRecord for OHLCSats { + type Inner = Sats; + fn ohlc_open(&self) -> Sats { + *self.open + } + fn ohlc_high(&self) -> Sats { + *self.high + } + fn ohlc_low(&self) -> Sats { + *self.low + } + fn ohlc_close(&self) -> Sats { + *self.close + } + fn from_parts(open: Sats, high: Sats, low: Sats, close: Sats) -> Self { + Self { + open: Open::new(open), + high: High::new(high), + low: Low::new(low), + close: Close::new(close), + } + } +} + +const VERSION: Version = Version::ZERO; + +type ForEachRangeFn = + fn(usize, usize, &ReadableBoxedVec, &ReadableBoxedVec, &mut dyn FnMut(OHLC)); + +/// Lazy OHLC aggregation vec. For each coarser period, computes open (first), +/// high (max), low (min), close (last) in a single pass over the source range. +pub struct LazyOHLC +where + I: VecIndex, + OHLC: OHLCRecord, + S1I: VecIndex, + ST: VecValue, + S2T: VecValue, +{ + name: Arc, + version: Version, + source: ReadableBoxedVec, + mapping: ReadableBoxedVec, + for_each_range: ForEachRangeFn, +} + +// --- From height source (Day1, DifficultyEpoch) --- + +impl LazyOHLC +where + I: VecIndex, + OHLC: OHLCRecord + 'static, + T: ComputedVecValue + JsonSchema + 'static, +{ + pub(crate) fn from_height_source( + name: &str, + version: Version, + source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + fn for_each_range< + I: VecIndex, + OHLC: OHLCRecord, + T: ComputedVecValue + JsonSchema, + >( + from: usize, + to: usize, + source: &ReadableBoxedVec, + mapping: &ReadableBoxedVec, + f: &mut dyn FnMut(OHLC), + ) { + let map_end = (to + 1).min(mapping.len()); + let heights = mapping.collect_range_dyn(from, map_end); + let source_len = source.len(); + let Some(&first_h) = heights.first() else { + return; + }; + let mut cursor = Cursor::from_dyn(&**source); + cursor.advance(first_h.to_usize()); + for idx in 0..(to - from) { + let Some(&cur_h) = heights.get(idx) else { + continue; + }; + let first = cur_h.to_usize(); + let next_first = heights + .get(idx + 1) + .map(|h| h.to_usize()) + .unwrap_or(source_len); + let count = next_first.saturating_sub(first); + if count == 0 { + continue; + } + if let Some(first_val) = cursor.next() { + let (high, low, close) = cursor.fold( + count - 1, + (first_val, first_val, first_val), + |(hi, lo, _), v| { + (if v > hi { v } else { hi }, if v < lo { v } else { lo }, v) + }, + ); + f(OHLC::from_parts(first_val, high, low, close)); + } + } + } + Self { + name: Arc::from(format!("{name}_ohlc")), + version: version + VERSION, + source, + mapping: first_height, + for_each_range: for_each_range::, + } + } +} + +// --- Trait implementations --- + +impl Clone for LazyOHLC +where + I: VecIndex, + OHLC: OHLCRecord, + S1I: VecIndex, + ST: VecValue, + S2T: VecValue, +{ + fn clone(&self) -> Self { + Self { + name: self.name.clone(), + version: self.version, + source: self.source.clone(), + mapping: self.mapping.clone(), + for_each_range: self.for_each_range, + } + } +} + +impl vecdb::AnyVec for LazyOHLC +where + I: VecIndex, + OHLC: OHLCRecord, + S1I: VecIndex, + ST: VecValue, + S2T: VecValue, +{ + fn version(&self) -> Version { + self.version + self.source.version() + self.mapping.version() + } + fn name(&self) -> &str { + &self.name + } + fn index_type_to_string(&self) -> &'static str { + I::to_string() + } + fn len(&self) -> usize { + self.mapping.len() + } + #[inline] + fn value_type_to_size_of(&self) -> usize { + size_of::() + } + #[inline] + fn value_type_to_string(&self) -> &'static str { + vecdb::short_type_name::() + } + #[inline] + fn region_names(&self) -> Vec { + vec![] + } +} + +impl vecdb::TypedVec for LazyOHLC +where + I: VecIndex, + OHLC: OHLCRecord, + S1I: VecIndex, + ST: VecValue, + S2T: VecValue, +{ + type I = I; + type T = OHLC; +} + +impl vecdb::ReadableVec for LazyOHLC +where + I: VecIndex, + OHLC: OHLCRecord, + S1I: VecIndex, + ST: VecValue, + S2T: VecValue, +{ + fn read_into_at(&self, from: usize, to: usize, buf: &mut Vec) { + let to = to.min(self.mapping.len()); + if from >= to { + return; + } + buf.reserve(to - from); + (self.for_each_range)(from, to, &self.source, &self.mapping, &mut |v| buf.push(v)); + } + + fn for_each_range_dyn_at(&self, from: usize, to: usize, f: &mut dyn FnMut(OHLC)) { + let to = to.min(self.mapping.len()); + if from >= to { + return; + } + (self.for_each_range)(from, to, &self.source, &self.mapping, f); + } + + #[inline] + fn fold_range_at B>( + &self, + from: usize, + to: usize, + init: B, + mut f: F, + ) -> B + where + Self: Sized, + { + let to = to.min(self.mapping.len()); + if from >= to { + return init; + } + let mut acc = Some(init); + (self.for_each_range)(from, to, &self.source, &self.mapping, &mut |v| { + acc = Some(f(acc.take().unwrap(), v)); + }); + acc.unwrap() + } + + #[inline] + fn try_fold_range_at std::result::Result>( + &self, + from: usize, + to: usize, + init: B, + mut f: F, + ) -> std::result::Result + where + Self: Sized, + { + let to = to.min(self.mapping.len()); + if from >= to { + return Ok(init); + } + let mut acc: Option> = Some(Ok(init)); + (self.for_each_range)(from, to, &self.source, &self.mapping, &mut |v| { + if let Some(Ok(a)) = acc.take() { + acc = Some(f(a, v)); + } + }); + acc.unwrap() + } + + #[inline] + fn collect_one_at(&self, index: usize) -> Option { + if index >= self.mapping.len() { + return None; + } + let mut result = None; + (self.for_each_range)(index, index + 1, &self.source, &self.mapping, &mut |v| { + result = Some(v) + }); + result + } +} + +impl Traversable for LazyOHLC +where + I: VecIndex + 'static, + OHLC: OHLCRecord + 'static, + S1I: VecIndex + 'static, + ST: VecValue, + S2T: VecValue, +{ + fn iter_any_exportable(&self) -> impl Iterator { + std::iter::once(self as &dyn vecdb::AnyExportableVec) + } + + fn to_tree_node(&self) -> brk_types::TreeNode { + use vecdb::AnyVec; + let index_str = I::to_string(); + let index = brk_types::Index::try_from(index_str).ok(); + let indexes = index.into_iter().collect(); + let leaf = brk_types::MetricLeaf::new( + self.name().to_string(), + self.value_type_to_string().to_string(), + indexes, + ); + let schema = schemars::SchemaGenerator::default().into_root_schema_for::(); + let schema_json = serde_json::to_value(schema).unwrap_or_default(); + brk_types::TreeNode::Leaf(brk_types::MetricLeafWithSchema::new(leaf, schema_json)) + } +} diff --git a/crates/brk_computer/src/internal/single/lazy/percentile.rs b/crates/brk_computer/src/internal/single/lazy/percentile.rs new file mode 100644 index 000000000..2ca490a02 --- /dev/null +++ b/crates/brk_computer/src/internal/single/lazy/percentile.rs @@ -0,0 +1,156 @@ +//! Lazy percentile aggregation via const-generic fn pointers. + +use std::sync::Arc; + +use brk_traversable::Traversable; +use brk_types::{Height, Version}; +use schemars::JsonSchema; +use vecdb::{Cursor, FromCoarserIndex, ReadableBoxedVec, VecIndex, VecValue}; + +use crate::internal::ComputedVecValue; + +const VERSION: Version = Version::ZERO; + +type ForEachRangeFn = + fn(usize, usize, &ReadableBoxedVec, &ReadableBoxedVec, &mut dyn FnMut(T)); + +pub struct LazyPercentile +where + I: VecIndex, + T: ComputedVecValue + JsonSchema, + S1I: VecIndex, + S2T: VecValue, +{ + name: Arc, + version: Version, + source: ReadableBoxedVec, + mapping: ReadableBoxedVec, + for_each_range: ForEachRangeFn, +} + +impl_lazy_agg!(LazyPercentile); + +fn select_and_pick(values: &mut [T], pct: u8) -> Option { + if values.is_empty() { + return None; + } + let idx = (values.len() - 1) * pct as usize / 100; + values.select_nth_unstable_by(idx, |a, b| { + a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal) + }); + Some(values[idx]) +} + +impl LazyPercentile +where + I: VecIndex, + T: ComputedVecValue + JsonSchema + 'static, + S1I: VecIndex + 'static + FromCoarserIndex, + S2T: VecValue, +{ + pub(crate) fn from_source( + name: &str, + version: Version, + source: ReadableBoxedVec, + len_source: ReadableBoxedVec, + ) -> Self { + fn for_each_range< + const PCT: u8, + I: VecIndex, + T: ComputedVecValue + JsonSchema, + S1I: VecIndex + FromCoarserIndex, + S2T: VecValue, + >( + from: usize, + to: usize, + source: &ReadableBoxedVec, + mapping: &ReadableBoxedVec, + f: &mut dyn FnMut(T), + ) { + let mapping_len = mapping.len(); + let source_len = source.len(); + let to = to.min(mapping_len); + if from >= to { + return; + } + let mut cursor = Cursor::from_dyn(&**source); + cursor.advance(S1I::min_from(I::from(from))); + let mut values = Vec::new(); + for i in from..to { + let start = S1I::min_from(I::from(i)); + let end = S1I::max_from(I::from(i), source_len) + 1; + if end <= start { + continue; + } + values.clear(); + cursor.for_each(end - start, |v| values.push(v)); + if let Some(v) = select_and_pick(&mut values, PCT) { + f(v); + } + } + } + Self { + name: Arc::from(name), + version: version + VERSION, + source, + mapping: len_source, + for_each_range: for_each_range::, + } + } +} + +impl LazyPercentile +where + I: VecIndex, + T: ComputedVecValue + JsonSchema + 'static, +{ + pub(crate) fn from_height_source( + name: &str, + version: Version, + source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + fn for_each_range( + from: usize, + to: usize, + source: &ReadableBoxedVec, + mapping: &ReadableBoxedVec, + f: &mut dyn FnMut(T), + ) { + let map_end = (to + 1).min(mapping.len()); + let heights = mapping.collect_range_dyn(from, map_end); + let source_len = source.len(); + let Some(&first_h) = heights.first() else { + return; + }; + let mut cursor = Cursor::from_dyn(&**source); + cursor.advance(first_h.to_usize()); + let mut values = Vec::new(); + for idx in 0..(to - from) { + let Some(&cur_h) = heights.get(idx) else { + continue; + }; + let first = cur_h.to_usize(); + let next_first = heights + .get(idx + 1) + .map(|h| h.to_usize()) + .unwrap_or(source_len); + if next_first <= first { + continue; + } + values.clear(); + cursor.for_each(next_first - first, |v| values.push(v)); + if let Some(v) = select_and_pick(&mut values, PCT) { + f(v); + } + } + } + Self { + name: Arc::from(name), + version: version + VERSION, + source, + mapping: first_height, + for_each_range: for_each_range::, + } + } +} diff --git a/crates/brk_computer/src/internal/single/lazy/percentiles.rs b/crates/brk_computer/src/internal/single/lazy/percentiles.rs new file mode 100644 index 000000000..314a0c072 --- /dev/null +++ b/crates/brk_computer/src/internal/single/lazy/percentiles.rs @@ -0,0 +1,75 @@ +//! Lazy percentiles composite (pct10, pct25, median, pct75, pct90). + +use brk_traversable::Traversable; +use brk_types::{Height, Version}; +use schemars::JsonSchema; +use vecdb::{FromCoarserIndex, ReadableBoxedVec, VecIndex, VecValue}; + +use crate::internal::ComputedVecValue; + +use super::LazyPercentile; + +const VERSION: Version = Version::ZERO; + +#[derive(Clone, Traversable)] +pub struct LazyAggPercentiles +where + I: VecIndex, + T: ComputedVecValue + JsonSchema, + S1I: VecIndex, + S2T: VecValue, +{ + pub pct10: LazyPercentile, + pub pct25: LazyPercentile, + pub median: LazyPercentile, + pub pct75: LazyPercentile, + pub pct90: LazyPercentile, +} + +impl LazyAggPercentiles +where + I: VecIndex, + T: ComputedVecValue + JsonSchema + 'static, + S1I: VecIndex + 'static + FromCoarserIndex, + S2T: VecValue, +{ + pub(crate) fn from_source( + name: &str, + version: Version, + source: ReadableBoxedVec, + len_source: ReadableBoxedVec, + ) -> Self { + let v = version + VERSION; + + Self { + pct10: LazyPercentile::from_source::<10>(&format!("{name}_pct10"), v, source.clone(), len_source.clone()), + pct25: LazyPercentile::from_source::<25>(&format!("{name}_pct25"), v, source.clone(), len_source.clone()), + median: LazyPercentile::from_source::<50>(&format!("{name}_median"), v, source.clone(), len_source.clone()), + pct75: LazyPercentile::from_source::<75>(&format!("{name}_pct75"), v, source.clone(), len_source.clone()), + pct90: LazyPercentile::from_source::<90>(&format!("{name}_pct90"), v, source, len_source), + } + } +} + +impl LazyAggPercentiles +where + I: VecIndex, + T: ComputedVecValue + JsonSchema + 'static, +{ + pub(crate) fn from_height_source( + name: &str, + version: Version, + source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + let v = version + VERSION; + + Self { + pct10: LazyPercentile::from_height_source::<10>(&format!("{name}_pct10"), v, source.clone(), first_height.clone()), + pct25: LazyPercentile::from_height_source::<25>(&format!("{name}_pct25"), v, source.clone(), first_height.clone()), + median: LazyPercentile::from_height_source::<50>(&format!("{name}_median"), v, source.clone(), first_height.clone()), + pct75: LazyPercentile::from_height_source::<75>(&format!("{name}_pct75"), v, source.clone(), first_height.clone()), + pct90: LazyPercentile::from_height_source::<90>(&format!("{name}_pct90"), v, source, first_height), + } + } +} diff --git a/crates/brk_computer/src/internal/single/lazy/sparse_last.rs b/crates/brk_computer/src/internal/single/lazy/sparse_last.rs new file mode 100644 index 000000000..d0836e229 --- /dev/null +++ b/crates/brk_computer/src/internal/single/lazy/sparse_last.rs @@ -0,0 +1,279 @@ +//! Sparse last-value aggregation for time-based periods. +//! +//! Unlike [`LazyLast`], which skips empty periods, `SparseLast` produces +//! `Option` for every period slot: `Some(v)` when blocks exist, `None` +//! when a time period contains no blocks. This preserves dense positional +//! mapping (slot i = period start + i) required for correct serialization. + +use std::sync::Arc; + +use brk_traversable::Traversable; +use brk_types::{Height, Version}; +use schemars::JsonSchema; +use vecdb::{Cursor, ReadableBoxedVec, VecIndex}; + +use crate::internal::ComputedVecValue; + +const VERSION: Version = Version::ZERO; + +/// Lazy last-value aggregation that emits `Option` for every time period. +/// +/// For periods containing blocks: `Some(last_value_in_period)`. +/// For empty periods (no blocks mined): `None`. +pub struct SparseLast +where + I: VecIndex, + T: ComputedVecValue + JsonSchema, +{ + name: Arc, + version: Version, + source: ReadableBoxedVec, + first_height: ReadableBoxedVec, +} + +impl SparseLast +where + I: VecIndex, + T: ComputedVecValue + JsonSchema, +{ + pub(crate) fn new( + name: &str, + version: Version, + source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + Self { + name: Arc::from(name), + version: version + VERSION, + source, + first_height, + } + } + + pub fn height_source(&self) -> &ReadableBoxedVec { + &self.source + } + + pub fn first_height(&self) -> &ReadableBoxedVec { + &self.first_height + } + + /// Dense iteration: calls `f` for every period in `[from, to)`, + /// including empty ones (with `None`). + fn for_each_impl( + from: usize, + to: usize, + source: &ReadableBoxedVec, + first_height: &ReadableBoxedVec, + f: &mut dyn FnMut(Option), + ) { + let map_end = (to + 1).min(first_height.len()); + let heights = first_height.collect_range_dyn(from, map_end); + let source_len = source.len(); + let mut cursor = Cursor::from_dyn(&**source); + + for idx in 0..(to - from) { + let current_first = heights[idx].to_usize(); + let next_first = heights + .get(idx + 1) + .map(|h| h.to_usize()) + .unwrap_or(source_len); + + // Empty period: no blocks belong to this time slot + if next_first == 0 || current_first >= next_first { + f(None); + continue; + } + + // Last height in this period + let target = next_first - 1; + + if cursor.position() <= target { + cursor.advance(target - cursor.position()); + match cursor.next() { + Some(v) => f(Some(v)), + None => f(None), + } + } else { + match source.collect_one_at(target) { + Some(v) => f(Some(v)), + None => f(None), + } + } + } + } +} + +impl Clone for SparseLast +where + I: VecIndex, + T: ComputedVecValue + JsonSchema, +{ + fn clone(&self) -> Self { + Self { + name: self.name.clone(), + version: self.version, + source: self.source.clone(), + first_height: self.first_height.clone(), + } + } +} + +impl vecdb::AnyVec for SparseLast +where + I: VecIndex, + T: ComputedVecValue + JsonSchema, +{ + fn version(&self) -> Version { + self.version + self.source.version() + self.first_height.version() + } + + fn name(&self) -> &str { + &self.name + } + + fn index_type_to_string(&self) -> &'static str { + I::to_string() + } + + fn len(&self) -> usize { + self.first_height.len() + } + + #[inline] + fn value_type_to_size_of(&self) -> usize { + size_of::>() + } + + #[inline] + fn value_type_to_string(&self) -> &'static str { + vecdb::short_type_name::() + } + + #[inline] + fn region_names(&self) -> Vec { + vec![] + } +} + +impl vecdb::TypedVec for SparseLast +where + I: VecIndex, + T: ComputedVecValue + JsonSchema, +{ + type I = I; + type T = Option; +} + +impl vecdb::ReadableVec> for SparseLast +where + I: VecIndex, + T: ComputedVecValue + JsonSchema, +{ + fn read_into_at(&self, from: usize, to: usize, buf: &mut Vec>) { + let to = to.min(self.first_height.len()); + if from >= to { + return; + } + buf.reserve(to - from); + Self::for_each_impl(from, to, &self.source, &self.first_height, &mut |v| { + buf.push(v) + }); + } + + fn for_each_range_dyn_at(&self, from: usize, to: usize, f: &mut dyn FnMut(Option)) { + let to = to.min(self.first_height.len()); + if from >= to { + return; + } + Self::for_each_impl(from, to, &self.source, &self.first_height, f); + } + + #[inline] + fn fold_range_at) -> B>( + &self, + from: usize, + to: usize, + init: B, + mut f: F, + ) -> B + where + Self: Sized, + { + let to = to.min(self.first_height.len()); + if from >= to { + return init; + } + let mut acc = Some(init); + Self::for_each_impl(from, to, &self.source, &self.first_height, &mut |v| { + acc = Some(f(acc.take().unwrap(), v)); + }); + acc.unwrap() + } + + #[inline] + fn try_fold_range_at) -> std::result::Result>( + &self, + from: usize, + to: usize, + init: B, + mut f: F, + ) -> std::result::Result + where + Self: Sized, + { + let to = to.min(self.first_height.len()); + if from >= to { + return Ok(init); + } + let mut acc: Option> = Some(Ok(init)); + Self::for_each_impl(from, to, &self.source, &self.first_height, &mut |v| { + if let Some(Ok(a)) = acc.take() { + acc = Some(f(a, v)); + } + }); + acc.unwrap() + } + + #[inline] + fn collect_one_at(&self, index: usize) -> Option> { + if index >= self.first_height.len() { + return None; + } + let current_first = self.first_height.collect_one_at(index)?.to_usize(); + let next_first = self + .first_height + .collect_one_at(index + 1) + .map(|h| h.to_usize()) + .unwrap_or(self.source.len()); + if next_first == 0 || current_first >= next_first { + return Some(None); + } + Some(self.source.collect_one_at(next_first - 1)) + } +} + +impl Traversable for SparseLast +where + I: VecIndex, + T: ComputedVecValue + JsonSchema + 'static, +{ + fn iter_any_exportable(&self) -> impl Iterator { + std::iter::once(self as &dyn vecdb::AnyExportableVec) + } + + fn to_tree_node(&self) -> brk_types::TreeNode { + use vecdb::AnyVec; + let index_str = I::to_string(); + let index = brk_types::Index::try_from(index_str).ok(); + let indexes = index.into_iter().collect(); + let leaf = brk_types::MetricLeaf::new( + self.name().to_string(), + self.value_type_to_string().to_string(), + indexes, + ); + let schema = schemars::SchemaGenerator::default().into_root_schema_for::>(); + let schema_json = serde_json::to_value(schema).unwrap_or_default(); + brk_types::TreeNode::Leaf(brk_types::MetricLeafWithSchema::new(leaf, schema_json)) + } +} diff --git a/crates/brk_computer/src/internal/single/lazy/spread.rs b/crates/brk_computer/src/internal/single/lazy/spread.rs deleted file mode 100644 index 05955e74a..000000000 --- a/crates/brk_computer/src/internal/single/lazy/spread.rs +++ /dev/null @@ -1,52 +0,0 @@ -//! Lazy distribution pattern (average, min, max). - -use brk_traversable::Traversable; -use brk_types::Version; -use schemars::JsonSchema; -use vecdb::{FromCoarserIndex, IterableBoxedVec, VecIndex}; - -use super::{LazyAverage, LazyMax, LazyMin}; -use crate::internal::ComputedVecValue; - -const VERSION: Version = Version::ZERO; - -#[derive(Clone, Traversable)] -pub struct LazySpread -where - I: VecIndex, - T: ComputedVecValue + JsonSchema, - S1I: VecIndex, - S2T: ComputedVecValue, -{ - #[traversable(flatten)] - pub average: LazyAverage, - #[traversable(flatten)] - pub min: LazyMin, - #[traversable(flatten)] - pub max: LazyMax, -} - -impl LazySpread -where - I: VecIndex, - T: ComputedVecValue + JsonSchema + 'static, - S1I: VecIndex + 'static + FromCoarserIndex, - S2T: ComputedVecValue, -{ - pub fn from_distribution( - name: &str, - version: Version, - source_average: IterableBoxedVec, - source_min: IterableBoxedVec, - source_max: IterableBoxedVec, - len_source: IterableBoxedVec, - ) -> Self { - let v = version + VERSION; - - Self { - average: LazyAverage::from_source(name, v, source_average, len_source.clone()), - min: LazyMin::from_source(name, v, source_min, len_source.clone()), - max: LazyMax::from_source(name, v, source_max, len_source), - } - } -} diff --git a/crates/brk_computer/src/internal/single/lazy/sum.rs b/crates/brk_computer/src/internal/single/lazy/sum.rs index 449db2dce..3017ffaca 100644 --- a/crates/brk_computer/src/internal/single/lazy/sum.rs +++ b/crates/brk_computer/src/internal/single/lazy/sum.rs @@ -1,23 +1,34 @@ //! Lazy sum-value aggregation. +use std::sync::Arc; + use brk_traversable::Traversable; -use brk_types::Version; -use derive_more::{Deref, DerefMut}; +use brk_types::{Height, Version}; use schemars::JsonSchema; -use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue}; +use vecdb::{Cursor, FromCoarserIndex, ReadableBoxedVec, VecIndex, VecValue}; use crate::internal::ComputedVecValue; const VERSION: Version = Version::ZERO; -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(transparent)] -pub struct LazySum(pub LazyVecFrom2) +type ForEachRangeFn = + fn(usize, usize, &ReadableBoxedVec, &ReadableBoxedVec, &mut dyn FnMut(T)); + +pub struct LazySum where I: VecIndex, T: ComputedVecValue + JsonSchema, S1I: VecIndex, - S2T: VecValue; + S2T: VecValue, +{ + name: Arc, + version: Version, + source: ReadableBoxedVec, + mapping: ReadableBoxedVec, + for_each_range: ForEachRangeFn, +} + +impl_lazy_agg!(LazySum); impl LazySum where @@ -26,21 +37,20 @@ where S1I: VecIndex + 'static + FromCoarserIndex, S2T: VecValue, { - pub fn from_source( + pub(crate) fn from_source( name: &str, version: Version, - source: IterableBoxedVec, - len_source: IterableBoxedVec, + source: ReadableBoxedVec, + len_source: ReadableBoxedVec, ) -> Self { Self::from_source_inner(&format!("{name}_sum"), version, source, len_source) } - /// Create from source without adding _sum suffix. - pub fn from_source_raw( + pub(crate) fn from_source_raw( name: &str, version: Version, - source: IterableBoxedVec, - len_source: IterableBoxedVec, + source: ReadableBoxedVec, + len_source: ReadableBoxedVec, ) -> Self { Self::from_source_inner(name, version, source, len_source) } @@ -48,32 +58,115 @@ where fn from_source_inner( name: &str, version: Version, - source: IterableBoxedVec, - len_source: IterableBoxedVec, + source: ReadableBoxedVec, + len_source: ReadableBoxedVec, ) -> Self { - Self(LazyVecFrom2::init( - name, - version + VERSION, + fn for_each_range< + I: VecIndex, + T: ComputedVecValue + JsonSchema, + S1I: VecIndex + FromCoarserIndex, + S2T: VecValue, + >( + from: usize, + to: usize, + source: &ReadableBoxedVec, + mapping: &ReadableBoxedVec, + f: &mut dyn FnMut(T), + ) { + let mapping_len = mapping.len(); + let source_len = source.len(); + let to = to.min(mapping_len); + if from >= to { + return; + } + let mut cursor = Cursor::from_dyn(&**source); + cursor.advance(S1I::min_from(I::from(from))); + for i in from..to { + let start = S1I::min_from(I::from(i)); + let end = S1I::max_from(I::from(i), source_len) + 1; + let count = end.saturating_sub(start); + if count == 0 || cursor.remaining() == 0 { + continue; + } + f(cursor.fold(count, T::from(0), |s, v| s + v)); + } + } + Self { + name: Arc::from(name), + version: version + VERSION, source, - len_source, - |i: I, source, len_source| { - if i.to_usize() >= len_source.vec_len() { - return None; - } - let mut sum = T::from(0); - let mut has_values = false; - for v in S1I::inclusive_range_from(i, source.vec_len()) - .flat_map(|i| source.get_at(i)) - { - sum += v; - has_values = true; - } - if !has_values { - return None; - } - Some(sum) - }, - )) + mapping: len_source, + for_each_range: for_each_range::, + } } } +impl LazySum +where + I: VecIndex, + T: ComputedVecValue + JsonSchema + 'static, +{ + pub(crate) fn from_height_source( + name: &str, + version: Version, + source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + Self::from_height_source_inner(&format!("{name}_sum"), version, source, first_height) + } + + pub(crate) fn from_height_source_raw( + name: &str, + version: Version, + source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + Self::from_height_source_inner(name, version, source, first_height) + } + + fn from_height_source_inner( + name: &str, + version: Version, + source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + fn for_each_range( + from: usize, + to: usize, + source: &ReadableBoxedVec, + mapping: &ReadableBoxedVec, + f: &mut dyn FnMut(T), + ) { + let map_end = (to + 1).min(mapping.len()); + let heights = mapping.collect_range_dyn(from, map_end); + let source_len = source.len(); + let Some(&first_h) = heights.first() else { + return; + }; + let mut cursor = Cursor::from_dyn(&**source); + cursor.advance(first_h.to_usize()); + for idx in 0..(to - from) { + let Some(&cur_h) = heights.get(idx) else { + continue; + }; + let first = cur_h.to_usize(); + let next_first = heights + .get(idx + 1) + .map(|h| h.to_usize()) + .unwrap_or(source_len); + let count = next_first.saturating_sub(first); + if count == 0 || cursor.remaining() == 0 { + continue; + } + f(cursor.fold(count, T::from(0), |s, v| s + v)); + } + } + Self { + name: Arc::from(name), + version: version + VERSION, + source, + mapping: first_height, + for_each_range: for_each_range::, + } + } +} diff --git a/crates/brk_computer/src/internal/single/lazy/sum_cum.rs b/crates/brk_computer/src/internal/single/lazy/sum_cum.rs index 183e64547..6bef68309 100644 --- a/crates/brk_computer/src/internal/single/lazy/sum_cum.rs +++ b/crates/brk_computer/src/internal/single/lazy/sum_cum.rs @@ -1,9 +1,9 @@ //! Lazy sum + cumulative aggregation. use brk_traversable::Traversable; -use brk_types::Version; +use brk_types::{Height, Version}; use schemars::JsonSchema; -use vecdb::{FromCoarserIndex, IterableBoxedVec, VecIndex, VecValue}; +use vecdb::{FromCoarserIndex, ReadableBoxedVec, VecIndex, VecValue}; use crate::internal::{ComputedVecValue, LazyCumulative, LazySum}; @@ -30,31 +30,13 @@ where S1I: VecIndex + 'static + FromCoarserIndex, S2T: VecValue, { - pub fn from_sources( - name: &str, - version: Version, - sum_source: IterableBoxedVec, - cumulative_source: IterableBoxedVec, - len_source: IterableBoxedVec, - ) -> Self { - Self { - sum: LazySum::from_source(name, version + VERSION, sum_source, len_source.clone()), - cumulative: LazyCumulative::from_source( - name, - version + VERSION, - cumulative_source, - len_source, - ), - } - } - /// Create from sources without adding _sum suffix to sum vec. - pub fn from_sources_sum_raw( + pub(crate) fn from_sources_sum_raw( name: &str, version: Version, - sum_source: IterableBoxedVec, - cumulative_source: IterableBoxedVec, - len_source: IterableBoxedVec, + sum_source: ReadableBoxedVec, + cumulative_source: ReadableBoxedVec, + len_source: ReadableBoxedVec, ) -> Self { Self { sum: LazySum::from_source_raw(name, version + VERSION, sum_source, len_source.clone()), @@ -68,3 +50,31 @@ where } } +impl LazySumCum +where + I: VecIndex, + T: ComputedVecValue + JsonSchema + 'static, +{ + pub(crate) fn from_height_sources_sum_raw( + name: &str, + version: Version, + sum_source: ReadableBoxedVec, + cumulative_source: ReadableBoxedVec, + first_height: ReadableBoxedVec, + ) -> Self { + Self { + sum: LazySum::from_height_source_raw( + name, + version + VERSION, + sum_source, + first_height.clone(), + ), + cumulative: LazyCumulative::from_height_source( + name, + version + VERSION, + cumulative_source, + first_height, + ), + } + } +} diff --git a/crates/brk_computer/src/internal/single/lazy_transform/binary_full.rs b/crates/brk_computer/src/internal/single/lazy_transform/binary_full.rs index 750027017..ca80b71e8 100644 --- a/crates/brk_computer/src/internal/single/lazy_transform/binary_full.rs +++ b/crates/brk_computer/src/internal/single/lazy_transform/binary_full.rs @@ -7,7 +7,7 @@ use brk_traversable::Traversable; use brk_types::Version; use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2, VecIndex}; +use vecdb::{BinaryTransform, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2, VecIndex}; use crate::internal::{ComputedVecValue, Full}; @@ -43,11 +43,11 @@ where /// Create from Full source and a second source (e.g., price). /// /// The transform F is applied as `F(source1_stat, source2)` for each stat. - pub fn from_full_and_source>( + pub(crate) fn from_full_and_source>( name: &str, version: Version, source1: &Full, - source2: IterableBoxedVec, + source2: ReadableBoxedVec, ) -> Self { Self { average: LazyVecFrom2::transformed::( @@ -83,19 +83,19 @@ where } } - pub fn boxed_average(&self) -> IterableBoxedVec { - self.average.boxed_clone() + pub(crate) fn boxed_average(&self) -> ReadableBoxedVec { + self.average.read_only_boxed_clone() } - pub fn boxed_min(&self) -> IterableBoxedVec { - self.min.boxed_clone() + pub(crate) fn boxed_min(&self) -> ReadableBoxedVec { + self.min.read_only_boxed_clone() } - pub fn boxed_max(&self) -> IterableBoxedVec { - self.max.boxed_clone() + pub(crate) fn boxed_max(&self) -> ReadableBoxedVec { + self.max.read_only_boxed_clone() } - pub fn boxed_sum(&self) -> IterableBoxedVec { - self.sum.boxed_clone() + pub(crate) fn boxed_sum(&self) -> ReadableBoxedVec { + self.sum.read_only_boxed_clone() } } diff --git a/crates/brk_computer/src/internal/single/lazy_transform/binary_last.rs b/crates/brk_computer/src/internal/single/lazy_transform/binary_last.rs index 3efec0f74..44057d3b7 100644 --- a/crates/brk_computer/src/internal/single/lazy_transform/binary_last.rs +++ b/crates/brk_computer/src/internal/single/lazy_transform/binary_last.rs @@ -4,7 +4,7 @@ use brk_traversable::Traversable; use brk_types::Version; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2, VecIndex}; +use vecdb::{BinaryTransform, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2, VecIndex}; use crate::internal::{ComputedVecValue, LazyLast}; @@ -24,7 +24,7 @@ where S1T: ComputedVecValue + JsonSchema, S2T: ComputedVecValue + JsonSchema, { - pub fn from_lazy_last< + pub(crate) fn from_lazy_last< F: BinaryTransform, S1I: VecIndex + 'static, S2I: VecIndex + 'static, @@ -43,16 +43,16 @@ where Self(LazyVecFrom2::transformed::( name, version, - source1.boxed_clone(), - source2.boxed_clone(), + source1.read_only_boxed_clone(), + source2.read_only_boxed_clone(), )) } - pub fn from_vecs>( + pub(crate) fn from_vecs>( name: &str, version: Version, - source1: IterableBoxedVec, - source2: IterableBoxedVec, + source1: ReadableBoxedVec, + source2: ReadableBoxedVec, ) -> Self { Self(LazyVecFrom2::transformed::( name, version, source1, source2, diff --git a/crates/brk_computer/src/internal/single/lazy_transform/binary_percentiles.rs b/crates/brk_computer/src/internal/single/lazy_transform/binary_percentiles.rs index 7f7e4f395..29a419f09 100644 --- a/crates/brk_computer/src/internal/single/lazy_transform/binary_percentiles.rs +++ b/crates/brk_computer/src/internal/single/lazy_transform/binary_percentiles.rs @@ -3,7 +3,7 @@ use brk_traversable::Traversable; use brk_types::Version; use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableBoxedVec, LazyVecFrom2, VecIndex}; +use vecdb::{BinaryTransform, ReadableBoxedVec, LazyVecFrom2, VecIndex}; use crate::internal::{ComputedVecValue, Percentiles}; @@ -29,11 +29,11 @@ where S1T: ComputedVecValue + JsonSchema, S2T: ComputedVecValue + JsonSchema, { - pub fn from_percentiles>( + pub(crate) fn from_percentiles>( name: &str, version: Version, source: &Percentiles, - source2: IterableBoxedVec, + source2: ReadableBoxedVec, ) -> Self { Self { pct10: LazyVecFrom2::transformed::( diff --git a/crates/brk_computer/src/internal/single/lazy_transform/binary_sum.rs b/crates/brk_computer/src/internal/single/lazy_transform/binary_sum.rs index f1dd6bf86..ed7c402e1 100644 --- a/crates/brk_computer/src/internal/single/lazy_transform/binary_sum.rs +++ b/crates/brk_computer/src/internal/single/lazy_transform/binary_sum.rs @@ -4,9 +4,9 @@ use brk_traversable::Traversable; use brk_types::Version; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableBoxedVec, LazyVecFrom2, VecIndex}; +use vecdb::{BinaryTransform, ReadableBoxedVec, LazyVecFrom2, VecIndex}; -use crate::internal::{ComputedVecValue, SumVec}; +use crate::internal::ComputedVecValue; const VERSION: Version = Version::ZERO; @@ -26,27 +26,11 @@ where S1T: ComputedVecValue + JsonSchema, S2T: ComputedVecValue + JsonSchema, { - pub fn from_sum>( + pub(crate) fn from_boxed>( name: &str, version: Version, - source1: &SumVec, - source2: &SumVec, - ) -> Self { - let v = version + VERSION; - - Self(LazyVecFrom2::transformed::( - name, - v, - source1.boxed_clone(), - source2.boxed_clone(), - )) - } - - pub fn from_boxed>( - name: &str, - version: Version, - source1: IterableBoxedVec, - source2: IterableBoxedVec, + source1: ReadableBoxedVec, + source2: ReadableBoxedVec, ) -> Self { let v = version + VERSION; diff --git a/crates/brk_computer/src/internal/single/lazy_transform/binary_sum_cum.rs b/crates/brk_computer/src/internal/single/lazy_transform/binary_sum_cum.rs index 44b5d49e8..816b0f3c7 100644 --- a/crates/brk_computer/src/internal/single/lazy_transform/binary_sum_cum.rs +++ b/crates/brk_computer/src/internal/single/lazy_transform/binary_sum_cum.rs @@ -4,10 +4,10 @@ use brk_traversable::Traversable; use brk_types::Version; use schemars::JsonSchema; use vecdb::{ - BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2, VecIndex, VecValue, + BinaryTransform, LazyVecFrom2, ReadableBoxedVec, ReadableCloneableVec, VecIndex, VecValue, }; -use crate::internal::{ComputedVecValue, LazyFull, LastVec, SumCum}; +use crate::internal::{ComputedVecValue, LazyFull}; #[derive(Clone, Traversable)] pub struct LazyBinaryTransformSumCum @@ -28,83 +28,14 @@ where S1T: ComputedVecValue + JsonSchema, S2T: ComputedVecValue + JsonSchema, { - pub fn from_sum_cum>( - name: &str, - version: Version, - source1: &SumCum, - source2: &SumCum, - ) -> Self { - Self { - sum: LazyVecFrom2::transformed::( - &format!("{name}_sum"), - version, - source1.boxed_sum(), - source2.boxed_sum(), - ), - cumulative: LazyVecFrom2::transformed::( - &format!("{name}_cumulative"), - version, - source1.boxed_cumulative(), - source2.boxed_cumulative(), - ), - } - } - - /// Create from SumCum without adding _sum suffix. - pub fn from_sum_cum_sum_raw>( - name: &str, - version: Version, - source1: &SumCum, - source2: &SumCum, - ) -> Self { - Self { - sum: LazyVecFrom2::transformed::( - name, - version, - source1.boxed_sum(), - source2.boxed_sum(), - ), - cumulative: LazyVecFrom2::transformed::( - &format!("{name}_cumulative"), - version, - source1.boxed_cumulative(), - source2.boxed_cumulative(), - ), - } - } - - pub fn from_sources>( - name: &str, - version: Version, - sum_source1: IterableBoxedVec, - sum_source2: IterableBoxedVec, - cum_source1: IterableBoxedVec, - cum_source2: IterableBoxedVec, - ) -> Self { - Self { - sum: LazyVecFrom2::transformed::( - &format!("{name}_sum"), - version, - sum_source1, - sum_source2, - ), - cumulative: LazyVecFrom2::transformed::( - &format!("{name}_cumulative"), - version, - cum_source1, - cum_source2, - ), - } - } - /// Create from sources without adding _sum suffix. - pub fn from_sources_sum_raw>( + pub(crate) fn from_sources_sum_raw>( name: &str, version: Version, - sum_source1: IterableBoxedVec, - sum_source2: IterableBoxedVec, - cum_source1: IterableBoxedVec, - cum_source2: IterableBoxedVec, + sum_source1: ReadableBoxedVec, + sum_source2: ReadableBoxedVec, + cum_source1: ReadableBoxedVec, + cum_source2: ReadableBoxedVec, ) -> Self { Self { sum: LazyVecFrom2::transformed::(name, version, sum_source1, sum_source2), @@ -117,7 +48,7 @@ where } } - pub fn from_lazy_stats_aggregate( + pub(crate) fn from_lazy_stats_aggregate( name: &str, version: Version, source1: &LazyFull, @@ -134,125 +65,25 @@ where sum: LazyVecFrom2::transformed::( &format!("{name}_sum"), version, - source1.sum.boxed_clone(), - source2.sum.boxed_clone(), + source1.sum.read_only_boxed_clone(), + source2.sum.read_only_boxed_clone(), ), cumulative: LazyVecFrom2::transformed::( &format!("{name}_cumulative"), version, - source1.cumulative.boxed_clone(), - source2.cumulative.boxed_clone(), - ), - } - } - - /// Create from lazy stats aggregate without adding _sum suffix. - pub fn from_lazy_stats_aggregate_sum_raw( - name: &str, - version: Version, - source1: &LazyFull, - source2: &LazyFull, - ) -> Self - where - F: BinaryTransform, - S1I: VecIndex + 'static, - S1L: VecValue, - S2I: VecIndex + 'static, - S2L: VecValue, - { - Self { - sum: LazyVecFrom2::transformed::( - name, - version, - source1.sum.boxed_clone(), - source2.sum.boxed_clone(), - ), - cumulative: LazyVecFrom2::transformed::( - &format!("{name}_cumulative"), - version, - source1.cumulative.boxed_clone(), - source2.cumulative.boxed_clone(), - ), - } - } - - // --- Methods accepting SumCum + Last sources --- - - pub fn from_sum_cum_last>( - name: &str, - version: Version, - source1: &SumCum, - source2: &LastVec, - ) -> Self { - Self { - sum: LazyVecFrom2::transformed::( - &format!("{name}_sum"), - version, - source1.boxed_sum(), - source2.boxed_clone(), - ), - cumulative: LazyVecFrom2::transformed::( - &format!("{name}_cumulative"), - version, - source1.boxed_cumulative(), - source2.boxed_clone(), - ), - } - } - - /// Create from SumCum + Last without adding _sum suffix. - pub fn from_sum_cum_last_sum_raw>( - name: &str, - version: Version, - source1: &SumCum, - source2: &LastVec, - ) -> Self { - Self { - sum: LazyVecFrom2::transformed::( - name, - version, - source1.boxed_sum(), - source2.boxed_clone(), - ), - cumulative: LazyVecFrom2::transformed::( - &format!("{name}_cumulative"), - version, - source1.boxed_cumulative(), - source2.boxed_clone(), - ), - } - } - - pub fn from_sources_last>( - name: &str, - version: Version, - sum_source1: IterableBoxedVec, - cum_source1: IterableBoxedVec, - last_source: IterableBoxedVec, - ) -> Self { - Self { - sum: LazyVecFrom2::transformed::( - &format!("{name}_sum"), - version, - sum_source1, - last_source.clone(), - ), - cumulative: LazyVecFrom2::transformed::( - &format!("{name}_cumulative"), - version, - cum_source1, - last_source, + source1.cumulative.read_only_boxed_clone(), + source2.cumulative.read_only_boxed_clone(), ), } } /// Create from boxed SumCum + Last sources without adding _sum suffix. - pub fn from_sources_last_sum_raw>( + pub(crate) fn from_sources_last_sum_raw>( name: &str, version: Version, - sum_source1: IterableBoxedVec, - cum_source1: IterableBoxedVec, - last_source: IterableBoxedVec, + sum_source1: ReadableBoxedVec, + cum_source1: ReadableBoxedVec, + last_source: ReadableBoxedVec, ) -> Self { Self { sum: LazyVecFrom2::transformed::(name, version, sum_source1, last_source.clone()), diff --git a/crates/brk_computer/src/internal/single/lazy_transform/distribution.rs b/crates/brk_computer/src/internal/single/lazy_transform/distribution.rs index 7e0cf648d..7435a1827 100644 --- a/crates/brk_computer/src/internal/single/lazy_transform/distribution.rs +++ b/crates/brk_computer/src/internal/single/lazy_transform/distribution.rs @@ -5,9 +5,9 @@ use brk_traversable::Traversable; use brk_types::Version; use schemars::JsonSchema; -use vecdb::{LazyVecFrom1, UnaryTransform, VecIndex}; +use vecdb::{ReadableBoxedVec, LazyVecFrom1, UnaryTransform, VecIndex}; -use crate::internal::{ComputedVecValue, Distribution, Full}; +use crate::internal::ComputedVecValue; use super::LazyPercentiles; @@ -33,61 +33,24 @@ where T: ComputedVecValue + JsonSchema + 'static, S1T: ComputedVecValue + JsonSchema, { - pub fn from_stats_aggregate>( + #[allow(clippy::too_many_arguments)] + pub(crate) fn from_boxed>( name: &str, version: Version, - source: &Full, + average: ReadableBoxedVec, + min: ReadableBoxedVec, + max: ReadableBoxedVec, + pct10: ReadableBoxedVec, + pct25: ReadableBoxedVec, + median: ReadableBoxedVec, + pct75: ReadableBoxedVec, + pct90: ReadableBoxedVec, ) -> Self { Self { - average: LazyVecFrom1::transformed::( - &format!("{name}_average"), - version, - source.boxed_average(), - ), - min: LazyVecFrom1::transformed::( - &format!("{name}_min"), - version, - source.boxed_min(), - ), - max: LazyVecFrom1::transformed::( - &format!("{name}_max"), - version, - source.boxed_max(), - ), - percentiles: LazyPercentiles::from_percentiles::( - name, - version, - &source.distribution.percentiles, - ), - } - } - - pub fn from_distribution>( - name: &str, - version: Version, - source: &Distribution, - ) -> Self { - Self { - average: LazyVecFrom1::transformed::( - &format!("{name}_average"), - version, - source.boxed_average(), - ), - min: LazyVecFrom1::transformed::( - &format!("{name}_min"), - version, - source.boxed_min(), - ), - max: LazyVecFrom1::transformed::( - &format!("{name}_max"), - version, - source.boxed_max(), - ), - percentiles: LazyPercentiles::from_percentiles::( - name, - version, - &source.percentiles, - ), + average: LazyVecFrom1::transformed::(&format!("{name}_average"), version, average), + min: LazyVecFrom1::transformed::(&format!("{name}_min"), version, min), + max: LazyVecFrom1::transformed::(&format!("{name}_max"), version, max), + percentiles: LazyPercentiles::from_boxed::(name, version, pct10, pct25, median, pct75, pct90), } } } diff --git a/crates/brk_computer/src/internal/single/lazy_transform/full.rs b/crates/brk_computer/src/internal/single/lazy_transform/full.rs index 2d45dd90a..e67229587 100644 --- a/crates/brk_computer/src/internal/single/lazy_transform/full.rs +++ b/crates/brk_computer/src/internal/single/lazy_transform/full.rs @@ -3,7 +3,7 @@ use brk_traversable::Traversable; use brk_types::Version; use schemars::JsonSchema; -use vecdb::{LazyVecFrom1, UnaryTransform, VecIndex}; +use vecdb::{ReadableBoxedVec, LazyVecFrom1, UnaryTransform, VecIndex}; use crate::internal::{ComputedVecValue, Full}; @@ -31,43 +31,49 @@ where T: ComputedVecValue + JsonSchema + 'static, S1T: ComputedVecValue + JsonSchema, { - pub fn from_stats_aggregate>( + pub(crate) fn from_stats_aggregate>( name: &str, version: Version, source: &Full, ) -> Self { - Self { - average: LazyVecFrom1::transformed::( - &format!("{name}_average"), - version, - source.boxed_average(), - ), - min: LazyVecFrom1::transformed::( - &format!("{name}_min"), - version, - source.boxed_min(), - ), - max: LazyVecFrom1::transformed::( - &format!("{name}_max"), - version, - source.boxed_max(), - ), - percentiles: LazyPercentiles::from_percentiles::( - name, - version, - &source.distribution.percentiles, - ), - sum: LazyVecFrom1::transformed::( - &format!("{name}_sum"), - version, - source.boxed_sum(), - ), - cumulative: LazyVecFrom1::transformed::( - &format!("{name}_cumulative"), - version, - source.boxed_cumulative(), - ), - } + Self::from_boxed::( + name, + version, + source.boxed_average(), + source.boxed_min(), + source.boxed_max(), + source.distribution.percentiles.boxed_pct10(), + source.distribution.percentiles.boxed_pct25(), + source.distribution.percentiles.boxed_median(), + source.distribution.percentiles.boxed_pct75(), + source.distribution.percentiles.boxed_pct90(), + source.boxed_sum(), + source.boxed_cumulative(), + ) } + #[allow(clippy::too_many_arguments)] + pub(crate) fn from_boxed>( + name: &str, + version: Version, + average: ReadableBoxedVec, + min: ReadableBoxedVec, + max: ReadableBoxedVec, + pct10: ReadableBoxedVec, + pct25: ReadableBoxedVec, + median: ReadableBoxedVec, + pct75: ReadableBoxedVec, + pct90: ReadableBoxedVec, + sum: ReadableBoxedVec, + cumulative: ReadableBoxedVec, + ) -> Self { + Self { + average: LazyVecFrom1::transformed::(&format!("{name}_average"), version, average), + min: LazyVecFrom1::transformed::(&format!("{name}_min"), version, min), + max: LazyVecFrom1::transformed::(&format!("{name}_max"), version, max), + percentiles: LazyPercentiles::from_boxed::(name, version, pct10, pct25, median, pct75, pct90), + sum: LazyVecFrom1::transformed::(&format!("{name}_sum"), version, sum), + cumulative: LazyVecFrom1::transformed::(&format!("{name}_cumulative"), version, cumulative), + } + } } diff --git a/crates/brk_computer/src/internal/single/lazy_transform/last.rs b/crates/brk_computer/src/internal/single/lazy_transform/last.rs index 04e315322..2cd301e8c 100644 --- a/crates/brk_computer/src/internal/single/lazy_transform/last.rs +++ b/crates/brk_computer/src/internal/single/lazy_transform/last.rs @@ -4,11 +4,9 @@ use brk_traversable::Traversable; use brk_types::Version; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{ - IterableBoxedVec, IterableCloneableVec, LazyVecFrom1, UnaryTransform, VecIndex, VecValue, -}; +use vecdb::{LazyVecFrom1, ReadableBoxedVec, UnaryTransform, VecIndex}; -use crate::internal::{ComputedVecValue, LastVec, LazyLast}; +use crate::internal::ComputedVecValue; #[derive(Clone, Deref, DerefMut, Traversable)] #[traversable(transparent)] @@ -24,34 +22,10 @@ where T: ComputedVecValue + JsonSchema + 'static, S1T: ComputedVecValue + JsonSchema, { - pub fn from_last_vec>( + pub(crate) fn from_boxed>( name: &str, version: Version, - source: &LastVec, - ) -> Self { - Self(LazyVecFrom1::transformed::( - name, - version, - source.boxed_clone(), - )) - } - - pub fn from_lazy_last, S1I: VecIndex + 'static, S1S2T: VecValue>( - name: &str, - version: Version, - source: &LazyLast, - ) -> Self { - Self(LazyVecFrom1::transformed::( - name, - version, - source.boxed_clone(), - )) - } - - pub fn from_boxed>( - name: &str, - version: Version, - last_source: IterableBoxedVec, + last_source: ReadableBoxedVec, ) -> Self { Self(LazyVecFrom1::transformed::(name, version, last_source)) } diff --git a/crates/brk_computer/src/internal/single/lazy_transform/mod.rs b/crates/brk_computer/src/internal/single/lazy_transform/mod.rs index 2305eee15..ba02a31ac 100644 --- a/crates/brk_computer/src/internal/single/lazy_transform/mod.rs +++ b/crates/brk_computer/src/internal/single/lazy_transform/mod.rs @@ -7,8 +7,6 @@ mod distribution; mod full; mod last; mod percentiles; -mod spread; -mod stats; mod sum; mod sum_cum; @@ -21,7 +19,5 @@ pub use distribution::*; pub use full::*; pub use last::*; pub use percentiles::*; -pub use spread::*; -pub use stats::*; pub use sum::*; pub use sum_cum::*; diff --git a/crates/brk_computer/src/internal/single/lazy_transform/percentiles.rs b/crates/brk_computer/src/internal/single/lazy_transform/percentiles.rs index 28dd8d6dd..a47c6d40f 100644 --- a/crates/brk_computer/src/internal/single/lazy_transform/percentiles.rs +++ b/crates/brk_computer/src/internal/single/lazy_transform/percentiles.rs @@ -3,9 +3,9 @@ use brk_traversable::Traversable; use brk_types::Version; use schemars::JsonSchema; -use vecdb::{LazyVecFrom1, UnaryTransform, VecIndex}; +use vecdb::{ReadableBoxedVec, LazyVecFrom1, UnaryTransform, VecIndex}; -use crate::internal::{ComputedVecValue, Percentiles}; +use crate::internal::ComputedVecValue; #[derive(Clone, Traversable)] pub struct LazyPercentiles @@ -27,37 +27,22 @@ where T: ComputedVecValue + JsonSchema + 'static, S1T: ComputedVecValue + JsonSchema, { - pub fn from_percentiles>( + #[allow(clippy::too_many_arguments)] + pub(crate) fn from_boxed>( name: &str, version: Version, - source: &Percentiles, + pct10: ReadableBoxedVec, + pct25: ReadableBoxedVec, + median: ReadableBoxedVec, + pct75: ReadableBoxedVec, + pct90: ReadableBoxedVec, ) -> Self { Self { - pct10: LazyVecFrom1::transformed::( - &format!("{name}_pct10"), - version, - source.boxed_pct10(), - ), - pct25: LazyVecFrom1::transformed::( - &format!("{name}_pct25"), - version, - source.boxed_pct25(), - ), - median: LazyVecFrom1::transformed::( - &format!("{name}_median"), - version, - source.boxed_median(), - ), - pct75: LazyVecFrom1::transformed::( - &format!("{name}_pct75"), - version, - source.boxed_pct75(), - ), - pct90: LazyVecFrom1::transformed::( - &format!("{name}_pct90"), - version, - source.boxed_pct90(), - ), + pct10: LazyVecFrom1::transformed::(&format!("{name}_pct10"), version, pct10), + pct25: LazyVecFrom1::transformed::(&format!("{name}_pct25"), version, pct25), + median: LazyVecFrom1::transformed::(&format!("{name}_median"), version, median), + pct75: LazyVecFrom1::transformed::(&format!("{name}_pct75"), version, pct75), + pct90: LazyVecFrom1::transformed::(&format!("{name}_pct90"), version, pct90), } } } diff --git a/crates/brk_computer/src/internal/single/lazy_transform/spread.rs b/crates/brk_computer/src/internal/single/lazy_transform/spread.rs deleted file mode 100644 index 729e6d17b..000000000 --- a/crates/brk_computer/src/internal/single/lazy_transform/spread.rs +++ /dev/null @@ -1,49 +0,0 @@ -//! Lazy unary transform for Spread metrics. -//! Has average, min, max only - no percentiles, no sum/cumulative. -//! Use for ratio/percentage metrics where you only need basic range info. - -use brk_traversable::Traversable; -use brk_types::Version; -use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, LazyVecFrom1, UnaryTransform, VecIndex}; - -use crate::internal::ComputedVecValue; - -/// Spread stats: average, min, max only. -/// Excludes percentiles (no detailed distribution) and sum/cumulative (meaningless for ratios). -#[derive(Clone, Traversable)] -pub struct LazyTransformSpread -where - I: VecIndex, - T: ComputedVecValue + PartialOrd + JsonSchema, - S1T: ComputedVecValue, -{ - pub average: LazyVecFrom1, - pub min: LazyVecFrom1, - pub max: LazyVecFrom1, -} - -impl LazyTransformSpread -where - I: VecIndex, - T: ComputedVecValue + JsonSchema + 'static, - S1T: ComputedVecValue + JsonSchema, -{ - pub fn from_boxed>( - name: &str, - version: Version, - average_source: IterableBoxedVec, - min_source: IterableBoxedVec, - max_source: IterableBoxedVec, - ) -> Self { - Self { - average: LazyVecFrom1::transformed::( - &format!("{name}_average"), - version, - average_source, - ), - min: LazyVecFrom1::transformed::(&format!("{name}_min"), version, min_source), - max: LazyVecFrom1::transformed::(&format!("{name}_max"), version, max_source), - } - } -} diff --git a/crates/brk_computer/src/internal/single/lazy_transform/stats.rs b/crates/brk_computer/src/internal/single/lazy_transform/stats.rs deleted file mode 100644 index ff0fec161..000000000 --- a/crates/brk_computer/src/internal/single/lazy_transform/stats.rs +++ /dev/null @@ -1,56 +0,0 @@ -//! Lazy unary transform for Stats (without percentiles). - -use brk_traversable::Traversable; -use brk_types::Version; -use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, LazyVecFrom1, UnaryTransform, VecIndex}; - -use crate::internal::ComputedVecValue; - -/// Lazy transform matching Stats structure: average, min, max, sum, cumulative (no percentiles). -#[derive(Clone, Traversable)] -pub struct LazyTransformStats -where - I: VecIndex, - T: ComputedVecValue + PartialOrd + JsonSchema, - S1T: ComputedVecValue, -{ - pub average: LazyVecFrom1, - pub min: LazyVecFrom1, - pub max: LazyVecFrom1, - pub sum: LazyVecFrom1, - pub cumulative: LazyVecFrom1, -} - -impl LazyTransformStats -where - I: VecIndex, - T: ComputedVecValue + JsonSchema + 'static, - S1T: ComputedVecValue + JsonSchema, -{ - pub fn from_boxed>( - name: &str, - version: Version, - average_source: IterableBoxedVec, - min_source: IterableBoxedVec, - max_source: IterableBoxedVec, - sum_source: IterableBoxedVec, - cumulative_source: IterableBoxedVec, - ) -> Self { - Self { - average: LazyVecFrom1::transformed::( - &format!("{name}_average"), - version, - average_source, - ), - min: LazyVecFrom1::transformed::(&format!("{name}_min"), version, min_source), - max: LazyVecFrom1::transformed::(&format!("{name}_max"), version, max_source), - sum: LazyVecFrom1::transformed::(&format!("{name}_sum"), version, sum_source), - cumulative: LazyVecFrom1::transformed::( - &format!("{name}_cumulative"), - version, - cumulative_source, - ), - } - } -} diff --git a/crates/brk_computer/src/internal/single/lazy_transform/sum.rs b/crates/brk_computer/src/internal/single/lazy_transform/sum.rs index 651bc79f3..ebbdce24f 100644 --- a/crates/brk_computer/src/internal/single/lazy_transform/sum.rs +++ b/crates/brk_computer/src/internal/single/lazy_transform/sum.rs @@ -4,9 +4,9 @@ use brk_traversable::Traversable; use brk_types::Version; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, LazyVecFrom1, UnaryTransform, VecIndex}; +use vecdb::{ReadableBoxedVec, LazyVecFrom1, UnaryTransform, VecIndex}; -use crate::internal::{ComputedVecValue, SumVec}; +use crate::internal::ComputedVecValue; #[derive(Clone, Deref, DerefMut, Traversable)] #[traversable(wrap = "sum")] @@ -22,22 +22,10 @@ where T: ComputedVecValue + JsonSchema + 'static, S1T: ComputedVecValue + JsonSchema, { - pub fn from_sum_vec>( + pub(crate) fn from_boxed>( name: &str, version: Version, - source: &SumVec, - ) -> Self { - Self(LazyVecFrom1::transformed::( - name, - version, - source.boxed_clone(), - )) - } - - pub fn from_boxed>( - name: &str, - version: Version, - sum_source: IterableBoxedVec, + sum_source: ReadableBoxedVec, ) -> Self { Self(LazyVecFrom1::transformed::(name, version, sum_source)) } diff --git a/crates/brk_computer/src/internal/single/lazy_transform/sum_cum.rs b/crates/brk_computer/src/internal/single/lazy_transform/sum_cum.rs index 885adc252..73570d241 100644 --- a/crates/brk_computer/src/internal/single/lazy_transform/sum_cum.rs +++ b/crates/brk_computer/src/internal/single/lazy_transform/sum_cum.rs @@ -3,9 +3,9 @@ use brk_traversable::Traversable; use brk_types::Version; use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, LazyVecFrom1, UnaryTransform, VecIndex}; +use vecdb::{ReadableBoxedVec, LazyVecFrom1, UnaryTransform, VecIndex}; -use crate::internal::{ComputedVecValue, SumCum}; +use crate::internal::ComputedVecValue; #[derive(Clone, Traversable)] pub struct LazyTransformSumCum @@ -24,63 +24,11 @@ where T: ComputedVecValue + JsonSchema + 'static, S1T: ComputedVecValue + JsonSchema, { - pub fn from_sum_cum>( + pub(crate) fn from_boxed_sum_raw>( name: &str, version: Version, - source: &SumCum, - ) -> Self { - Self { - sum: LazyVecFrom1::transformed::( - &format!("{name}_sum"), - version, - source.boxed_sum(), - ), - cumulative: LazyVecFrom1::transformed::( - &format!("{name}_cumulative"), - version, - source.boxed_cumulative(), - ), - } - } - - /// Create from SumCum without adding _sum suffix. - pub fn from_sum_cum_sum_raw>( - name: &str, - version: Version, - source: &SumCum, - ) -> Self { - Self { - sum: LazyVecFrom1::transformed::(name, version, source.boxed_sum()), - cumulative: LazyVecFrom1::transformed::( - &format!("{name}_cumulative"), - version, - source.boxed_cumulative(), - ), - } - } - - pub fn from_boxed>( - name: &str, - version: Version, - sum_source: IterableBoxedVec, - cumulative_source: IterableBoxedVec, - ) -> Self { - Self { - sum: LazyVecFrom1::transformed::(&format!("{name}_sum"), version, sum_source), - cumulative: LazyVecFrom1::transformed::( - &format!("{name}_cumulative"), - version, - cumulative_source, - ), - } - } - - /// Create from boxed sources without adding _sum suffix. - pub fn from_boxed_sum_raw>( - name: &str, - version: Version, - sum_source: IterableBoxedVec, - cumulative_source: IterableBoxedVec, + sum_source: ReadableBoxedVec, + cumulative_source: ReadableBoxedVec, ) -> Self { Self { sum: LazyVecFrom1::transformed::(name, version, sum_source), diff --git a/crates/brk_computer/src/internal/single/mod.rs b/crates/brk_computer/src/internal/single/mod.rs index 0324abb84..2b14269a9 100644 --- a/crates/brk_computer/src/internal/single/mod.rs +++ b/crates/brk_computer/src/internal/single/mod.rs @@ -1,6 +1,5 @@ //! Single-index types and primitives. -mod difficultyepoch; mod group; mod height; mod lazy; @@ -9,7 +8,6 @@ mod transform; mod tx; mod vec; -pub use difficultyepoch::*; pub use group::*; pub use height::*; pub use lazy::*; diff --git a/crates/brk_computer/src/internal/single/transform/block_count_target.rs b/crates/brk_computer/src/internal/single/transform/block_count_target.rs new file mode 100644 index 000000000..52fc4ea07 --- /dev/null +++ b/crates/brk_computer/src/internal/single/transform/block_count_target.rs @@ -0,0 +1,142 @@ +use brk_types::{ + Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10, + Minute30, Minute5, Month1, Month3, Month6, StoredU64, Week1, Year1, Year10, +}; +use vecdb::UnaryTransform; + +use crate::blocks::{ + TARGET_BLOCKS_PER_DAY, TARGET_BLOCKS_PER_DAY3, TARGET_BLOCKS_PER_DECADE, + TARGET_BLOCKS_PER_HALVING, TARGET_BLOCKS_PER_HOUR1, TARGET_BLOCKS_PER_HOUR12, + TARGET_BLOCKS_PER_HOUR4, TARGET_BLOCKS_PER_MINUTE1, TARGET_BLOCKS_PER_MINUTE10, + TARGET_BLOCKS_PER_MINUTE30, TARGET_BLOCKS_PER_MINUTE5, TARGET_BLOCKS_PER_MONTH, + TARGET_BLOCKS_PER_QUARTER, TARGET_BLOCKS_PER_SEMESTER, TARGET_BLOCKS_PER_WEEK, + TARGET_BLOCKS_PER_YEAR, +}; + +pub struct BlockCountTarget; + +impl UnaryTransform for BlockCountTarget { + #[inline(always)] + fn apply(_: Height) -> StoredU64 { + StoredU64::from(TARGET_BLOCKS_PER_DAY) + } +} + +impl UnaryTransform for BlockCountTarget { + #[inline(always)] + fn apply(_: Minute1) -> StoredU64 { + StoredU64::from(TARGET_BLOCKS_PER_MINUTE1) + } +} + +impl UnaryTransform for BlockCountTarget { + #[inline(always)] + fn apply(_: Minute5) -> StoredU64 { + StoredU64::from(TARGET_BLOCKS_PER_MINUTE5) + } +} + +impl UnaryTransform for BlockCountTarget { + #[inline(always)] + fn apply(_: Minute10) -> StoredU64 { + StoredU64::from(TARGET_BLOCKS_PER_MINUTE10) + } +} + +impl UnaryTransform for BlockCountTarget { + #[inline(always)] + fn apply(_: Minute30) -> StoredU64 { + StoredU64::from(TARGET_BLOCKS_PER_MINUTE30) + } +} + +impl UnaryTransform for BlockCountTarget { + #[inline(always)] + fn apply(_: Hour1) -> StoredU64 { + StoredU64::from(TARGET_BLOCKS_PER_HOUR1) + } +} + +impl UnaryTransform for BlockCountTarget { + #[inline(always)] + fn apply(_: Hour4) -> StoredU64 { + StoredU64::from(TARGET_BLOCKS_PER_HOUR4) + } +} + +impl UnaryTransform for BlockCountTarget { + #[inline(always)] + fn apply(_: Hour12) -> StoredU64 { + StoredU64::from(TARGET_BLOCKS_PER_HOUR12) + } +} + +impl UnaryTransform for BlockCountTarget { + #[inline(always)] + fn apply(_: Day1) -> StoredU64 { + StoredU64::from(TARGET_BLOCKS_PER_DAY) + } +} + +impl UnaryTransform for BlockCountTarget { + #[inline(always)] + fn apply(_: Day3) -> StoredU64 { + StoredU64::from(TARGET_BLOCKS_PER_DAY3) + } +} + +impl UnaryTransform for BlockCountTarget { + #[inline(always)] + fn apply(_: Week1) -> StoredU64 { + StoredU64::from(TARGET_BLOCKS_PER_WEEK) + } +} + +impl UnaryTransform for BlockCountTarget { + #[inline(always)] + fn apply(_: Month1) -> StoredU64 { + StoredU64::from(TARGET_BLOCKS_PER_MONTH) + } +} + +impl UnaryTransform for BlockCountTarget { + #[inline(always)] + fn apply(_: Month3) -> StoredU64 { + StoredU64::from(TARGET_BLOCKS_PER_QUARTER) + } +} + +impl UnaryTransform for BlockCountTarget { + #[inline(always)] + fn apply(_: Month6) -> StoredU64 { + StoredU64::from(TARGET_BLOCKS_PER_SEMESTER) + } +} + +impl UnaryTransform for BlockCountTarget { + #[inline(always)] + fn apply(_: Year1) -> StoredU64 { + StoredU64::from(TARGET_BLOCKS_PER_YEAR) + } +} + +impl UnaryTransform for BlockCountTarget { + #[inline(always)] + fn apply(_: Year10) -> StoredU64 { + StoredU64::from(TARGET_BLOCKS_PER_DECADE) + } +} + +impl UnaryTransform for BlockCountTarget { + #[inline(always)] + fn apply(_: HalvingEpoch) -> StoredU64 { + StoredU64::from(TARGET_BLOCKS_PER_HALVING) + } +} + +impl UnaryTransform for BlockCountTarget { + #[inline(always)] + fn apply(_: DifficultyEpoch) -> StoredU64 { + StoredU64::from(2016u64) + } +} diff --git a/crates/brk_computer/src/internal/single/transform/cents_unsigned_to_dollars.rs b/crates/brk_computer/src/internal/single/transform/cents_to_dollars.rs similarity index 53% rename from crates/brk_computer/src/internal/single/transform/cents_unsigned_to_dollars.rs rename to crates/brk_computer/src/internal/single/transform/cents_to_dollars.rs index 6e36040a2..b62cf003a 100644 --- a/crates/brk_computer/src/internal/single/transform/cents_unsigned_to_dollars.rs +++ b/crates/brk_computer/src/internal/single/transform/cents_to_dollars.rs @@ -1,12 +1,12 @@ -use brk_types::{CentsUnsigned, Dollars}; +use brk_types::{Cents, Dollars}; use vecdb::UnaryTransform; /// CentsUnsigned -> Dollars (convert cents to dollars for display) pub struct CentsUnsignedToDollars; -impl UnaryTransform for CentsUnsignedToDollars { +impl UnaryTransform for CentsUnsignedToDollars { #[inline(always)] - fn apply(cents: CentsUnsigned) -> Dollars { + fn apply(cents: Cents) -> Dollars { cents.into() } } diff --git a/crates/brk_computer/src/internal/single/transform/cents_to_sats.rs b/crates/brk_computer/src/internal/single/transform/cents_to_sats.rs new file mode 100644 index 000000000..e407a7c64 --- /dev/null +++ b/crates/brk_computer/src/internal/single/transform/cents_to_sats.rs @@ -0,0 +1,17 @@ +use brk_types::{Cents, Dollars, Sats}; +use vecdb::UnaryTransform; + +/// CentsUnsigned -> Sats (sats per dollar: 1 BTC / price) +pub struct CentsUnsignedToSats; + +impl UnaryTransform for CentsUnsignedToSats { + #[inline(always)] + fn apply(cents: Cents) -> Sats { + let dollars = Dollars::from(cents); + if dollars == Dollars::ZERO { + Sats::ZERO + } else { + Sats::ONE_BTC / dollars + } + } +} diff --git a/crates/brk_computer/src/internal/single/transform/cents_unsigned_to_sats_fract.rs b/crates/brk_computer/src/internal/single/transform/cents_unsigned_to_sats_fract.rs deleted file mode 100644 index 731e090e6..000000000 --- a/crates/brk_computer/src/internal/single/transform/cents_unsigned_to_sats_fract.rs +++ /dev/null @@ -1,19 +0,0 @@ -use brk_types::{CentsUnsigned, SatsFract}; -use vecdb::UnaryTransform; - -/// CentsUnsigned -> SatsFract (exchange rate: sats per dollar at this price level) -/// Formula: sats = 100_000_000 / dollars = 100_000_000 / (cents / 100) = 10_000_000_000 / cents -pub struct CentsUnsignedToSatsFract; - -impl UnaryTransform for CentsUnsignedToSatsFract { - #[inline(always)] - fn apply(cents: CentsUnsigned) -> SatsFract { - let cents_f64 = cents.inner() as f64; - if cents_f64 == 0.0 { - SatsFract::NAN - } else { - // sats = 1 BTC * 100 / cents = 10_000_000_000 / cents - SatsFract::new(SatsFract::SATS_PER_BTC * 100.0 / cents_f64) - } - } -} diff --git a/crates/brk_computer/src/internal/single/transform/close_price_times_sats.rs b/crates/brk_computer/src/internal/single/transform/close_price_times_sats.rs index bf38c0f4a..f0e1e65e1 100644 --- a/crates/brk_computer/src/internal/single/transform/close_price_times_sats.rs +++ b/crates/brk_computer/src/internal/single/transform/close_price_times_sats.rs @@ -1,13 +1,12 @@ -use brk_types::{Bitcoin, Close, Dollars, Sats}; +use brk_types::{Bitcoin, Dollars, Sats}; use vecdb::BinaryTransform; -/// Close * Sats -> Dollars (price × sats / 1e8) -/// Same as PriceTimesSats but accepts Close price source. -pub struct ClosePriceTimesSats; +/// Dollars * Sats -> Dollars (price × sats / 1e8) +pub struct PriceTimesSats; -impl BinaryTransform, Sats, Dollars> for ClosePriceTimesSats { +impl BinaryTransform for PriceTimesSats { #[inline(always)] - fn apply(price: Close, sats: Sats) -> Dollars { - *price * Bitcoin::from(sats) + fn apply(price: Dollars, sats: Sats) -> Dollars { + price * Bitcoin::from(sats) } } diff --git a/crates/brk_computer/src/internal/single/transform/half_close_price_times_sats.rs b/crates/brk_computer/src/internal/single/transform/half_close_price_times_sats.rs index 069d76720..0ff800da4 100644 --- a/crates/brk_computer/src/internal/single/transform/half_close_price_times_sats.rs +++ b/crates/brk_computer/src/internal/single/transform/half_close_price_times_sats.rs @@ -1,13 +1,12 @@ -use brk_types::{Bitcoin, Close, Dollars, Sats}; +use brk_types::{Bitcoin, Dollars, Sats}; use vecdb::BinaryTransform; -/// Close * Sats -> Dollars/2 (price × sats / 1e8 / 2) -/// Computes halved dollars directly from sats, avoiding lazy-from-lazy chains. -pub struct HalfClosePriceTimesSats; +/// Dollars * Sats -> Dollars/2 (price × sats / 1e8 / 2) +pub struct HalfPriceTimesSats; -impl BinaryTransform, Sats, Dollars> for HalfClosePriceTimesSats { +impl BinaryTransform for HalfPriceTimesSats { #[inline(always)] - fn apply(price: Close, sats: Sats) -> Dollars { - (*price * Bitcoin::from(sats)).halved() + fn apply(price: Dollars, sats: Sats) -> Dollars { + (price * Bitcoin::from(sats)).halved() } } diff --git a/crates/brk_computer/src/internal/single/transform/mod.rs b/crates/brk_computer/src/internal/single/transform/mod.rs index 019c8a4af..06e10ac56 100644 --- a/crates/brk_computer/src/internal/single/transform/mod.rs +++ b/crates/brk_computer/src/internal/single/transform/mod.rs @@ -1,5 +1,6 @@ -mod cents_unsigned_to_dollars; -mod cents_unsigned_to_sats_fract; +mod block_count_target; +mod cents_to_dollars; +mod cents_to_sats; mod close_price_times_sats; mod difference_f32; mod dollar_halve; @@ -11,7 +12,6 @@ mod dollars_squared_divide; mod dollars_to_sats_fract; mod f32_identity; mod half_close_price_times_sats; -mod ohlc; mod percentage_diff_close_dollars; mod percentage_dollars_f32; mod percentage_dollars_f32_neg; @@ -20,6 +20,7 @@ mod percentage_u32_f32; mod percentage_u64_f32; mod price_times_ratio; mod ratio32; +mod ratio64; mod ratio_f32; mod ratio_u64_f32; mod return_f32_tenths; @@ -40,9 +41,11 @@ mod volatility_sqrt30; mod volatility_sqrt365; mod volatility_sqrt7; mod weight_to_fullness; +mod weight_to_vbytes; -pub use cents_unsigned_to_dollars::*; -pub use cents_unsigned_to_sats_fract::*; +pub use block_count_target::*; +pub use cents_to_dollars::*; +pub use cents_to_sats::*; pub use close_price_times_sats::*; pub use difference_f32::*; pub use dollar_halve::*; @@ -54,7 +57,6 @@ pub use dollars_squared_divide::*; pub use dollars_to_sats_fract::*; pub use f32_identity::*; pub use half_close_price_times_sats::*; -pub use ohlc::*; pub use percentage_diff_close_dollars::*; pub use percentage_dollars_f32::*; pub use percentage_dollars_f32_neg::*; @@ -65,6 +67,7 @@ pub use price_times_ratio::*; pub use ratio_f32::*; pub use ratio_u64_f32::*; pub use ratio32::*; +pub use ratio64::*; pub use return_f32_tenths::*; pub use return_i8::*; pub use return_u16::*; @@ -83,3 +86,4 @@ pub use volatility_sqrt7::*; pub use volatility_sqrt30::*; pub use volatility_sqrt365::*; pub use weight_to_fullness::*; +pub use weight_to_vbytes::*; diff --git a/crates/brk_computer/src/internal/single/transform/ohlc.rs b/crates/brk_computer/src/internal/single/transform/ohlc.rs deleted file mode 100644 index 1bf5e3fb7..000000000 --- a/crates/brk_computer/src/internal/single/transform/ohlc.rs +++ /dev/null @@ -1,21 +0,0 @@ -//! Lazy OHLC component extractors. - -use brk_traversable::Traversable; -use brk_types::{Close, High, Low, Open}; -use schemars::JsonSchema; -use serde::Serialize; -use vecdb::{BytesVecValue, Formattable, LazyVecFrom1, VecIndex}; - -/// Lazy OHLC component extractors for a single index type. -#[derive(Clone, Traversable)] -pub struct LazyOHLC -where - I: VecIndex + BytesVecValue + Formattable + Serialize + JsonSchema + 'static, - T: BytesVecValue + Formattable + Serialize + JsonSchema + 'static, - SourceT: BytesVecValue + Formattable + Serialize + JsonSchema + 'static, -{ - pub open: LazyVecFrom1, I, SourceT>, - pub high: LazyVecFrom1, I, SourceT>, - pub low: LazyVecFrom1, I, SourceT>, - pub close: LazyVecFrom1, I, SourceT>, -} diff --git a/crates/brk_computer/src/internal/single/transform/percentage_diff_close_dollars.rs b/crates/brk_computer/src/internal/single/transform/percentage_diff_close_dollars.rs index 64446c8e9..0d4ac860a 100644 --- a/crates/brk_computer/src/internal/single/transform/percentage_diff_close_dollars.rs +++ b/crates/brk_computer/src/internal/single/transform/percentage_diff_close_dollars.rs @@ -1,18 +1,16 @@ -use brk_types::{Close, Dollars, StoredF32}; +use brk_types::{Dollars, StoredF32}; use vecdb::BinaryTransform; -/// (Close, Dollars) -> StoredF32 percentage difference ((a/b - 1) × 100) -/// Used for DCA returns: (price / dca_average_price - 1) × 100 -/// Also used for drawdown: (close / ath - 1) × 100 (note: drawdown is typically negative) -pub struct PercentageDiffCloseDollars; +/// (Dollars, Dollars) -> StoredF32 percentage difference ((a/b - 1) × 100) +pub struct PercentageDiffDollars; -impl BinaryTransform, Dollars, StoredF32> for PercentageDiffCloseDollars { +impl BinaryTransform for PercentageDiffDollars { #[inline(always)] - fn apply(close: Close, base: Dollars) -> StoredF32 { + fn apply(close: Dollars, base: Dollars) -> StoredF32 { if base == Dollars::ZERO { StoredF32::default() } else { - StoredF32::from((**close / *base - 1.0) * 100.0) + StoredF32::from((*close / *base - 1.0) * 100.0) } } } diff --git a/crates/brk_computer/src/internal/single/transform/ratio64.rs b/crates/brk_computer/src/internal/single/transform/ratio64.rs new file mode 100644 index 000000000..f86e60e21 --- /dev/null +++ b/crates/brk_computer/src/internal/single/transform/ratio64.rs @@ -0,0 +1,13 @@ +use brk_types::{Dollars, StoredF64}; +use vecdb::BinaryTransform; + +/// (Dollars, Dollars) -> StoredF64 ratio +/// Used for computing ratios like SOPR where f64 precision is needed. +pub struct Ratio64; + +impl BinaryTransform for Ratio64 { + #[inline(always)] + fn apply(numerator: Dollars, denominator: Dollars) -> StoredF64 { + numerator / denominator + } +} diff --git a/crates/brk_computer/src/internal/single/transform/sats_times_close_price.rs b/crates/brk_computer/src/internal/single/transform/sats_times_close_price.rs index 3df760477..7474ee922 100644 --- a/crates/brk_computer/src/internal/single/transform/sats_times_close_price.rs +++ b/crates/brk_computer/src/internal/single/transform/sats_times_close_price.rs @@ -1,14 +1,12 @@ -use brk_types::{Bitcoin, Close, Dollars, Sats}; +use brk_types::{Bitcoin, Dollars, Sats}; use vecdb::BinaryTransform; -/// Sats * Close -> Dollars (sats / 1e8 × price) -/// Same as ClosePriceTimesSats but with swapped argument order. -/// Use when sats is the first source (e.g., Full) and price is second. -pub struct SatsTimesClosePrice; +/// Sats * Dollars -> Dollars (sats / 1e8 × price) +pub struct SatsTimesPrice; -impl BinaryTransform, Dollars> for SatsTimesClosePrice { +impl BinaryTransform for SatsTimesPrice { #[inline(always)] - fn apply(sats: Sats, price: Close) -> Dollars { - *price * Bitcoin::from(sats) + fn apply(sats: Sats, price: Dollars) -> Dollars { + price * Bitcoin::from(sats) } } diff --git a/crates/brk_computer/src/internal/single/transform/volatility_sqrt30.rs b/crates/brk_computer/src/internal/single/transform/volatility_sqrt30.rs index fd43dfda6..74a499566 100644 --- a/crates/brk_computer/src/internal/single/transform/volatility_sqrt30.rs +++ b/crates/brk_computer/src/internal/single/transform/volatility_sqrt30.rs @@ -7,6 +7,7 @@ pub struct StoredF32TimesSqrt30; impl UnaryTransform for StoredF32TimesSqrt30 { #[inline(always)] fn apply(v: StoredF32) -> StoredF32 { - (*v * 30.0_f32.sqrt()).into() + // 30.0_f32.sqrt() = 5.477226 + (*v * 5.477226_f32).into() } } diff --git a/crates/brk_computer/src/internal/single/transform/volatility_sqrt365.rs b/crates/brk_computer/src/internal/single/transform/volatility_sqrt365.rs index 97dd940f3..57e466511 100644 --- a/crates/brk_computer/src/internal/single/transform/volatility_sqrt365.rs +++ b/crates/brk_computer/src/internal/single/transform/volatility_sqrt365.rs @@ -7,6 +7,7 @@ pub struct StoredF32TimesSqrt365; impl UnaryTransform for StoredF32TimesSqrt365 { #[inline(always)] fn apply(v: StoredF32) -> StoredF32 { - (*v * 365.0_f32.sqrt()).into() + // 365.0_f32.sqrt() = 19.104973 + (*v * 19.104973_f32).into() } } diff --git a/crates/brk_computer/src/internal/single/transform/volatility_sqrt7.rs b/crates/brk_computer/src/internal/single/transform/volatility_sqrt7.rs index 83af6f972..d359978a0 100644 --- a/crates/brk_computer/src/internal/single/transform/volatility_sqrt7.rs +++ b/crates/brk_computer/src/internal/single/transform/volatility_sqrt7.rs @@ -7,6 +7,7 @@ pub struct StoredF32TimesSqrt7; impl UnaryTransform for StoredF32TimesSqrt7 { #[inline(always)] fn apply(v: StoredF32) -> StoredF32 { - (*v * 7.0_f32.sqrt()).into() + // 7.0_f32.sqrt() = 2.6457513 + (*v * 2.6457513_f32).into() } } diff --git a/crates/brk_computer/src/internal/single/transform/weight_to_vbytes.rs b/crates/brk_computer/src/internal/single/transform/weight_to_vbytes.rs new file mode 100644 index 000000000..b08a4d6c0 --- /dev/null +++ b/crates/brk_computer/src/internal/single/transform/weight_to_vbytes.rs @@ -0,0 +1,12 @@ +use brk_types::{StoredU64, Weight}; +use vecdb::UnaryTransform; + +/// Weight -> StoredU64 virtual bytes (vbytes = ceil(weight/4)) +pub struct WeightToVbytes; + +impl UnaryTransform for WeightToVbytes { + #[inline(always)] + fn apply(weight: Weight) -> StoredU64 { + StoredU64::from(weight.to_vbytes_floor()) + } +} diff --git a/crates/brk_computer/src/internal/single/tx/distribution.rs b/crates/brk_computer/src/internal/single/tx/distribution.rs index ea5fa2d9d..316652485 100644 --- a/crates/brk_computer/src/internal/single/tx/distribution.rs +++ b/crates/brk_computer/src/internal/single/tx/distribution.rs @@ -6,7 +6,7 @@ use brk_traversable::Traversable; use brk_types::{TxIndex, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{Database, EagerVec, Exit, ImportableVec, PcoVec}; +use vecdb::{Database, EagerVec, Exit, ImportableVec, PcoVec, Rw, StorageMode}; use crate::{ ComputeIndexes, indexes, @@ -15,24 +15,24 @@ use crate::{ const VERSION: Version = Version::ZERO; -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Deref, DerefMut, Traversable)] #[traversable(merge)] -pub struct ComputedFromTxDistribution +pub struct ComputedFromTxDistribution where T: ComputedVecValue + PartialOrd + JsonSchema, { - pub txindex: EagerVec>, + pub txindex: M::Stored>>, #[deref] #[deref_mut] #[traversable(flatten)] - pub distribution: TxDerivedDistribution, + pub distribution: TxDerivedDistribution, } impl ComputedFromTxDistribution where T: NumericValue + JsonSchema, { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, name: &str, version: Version, @@ -44,20 +44,10 @@ where Ok(Self { txindex, distribution }) } - pub fn derive_from( - &mut self, - indexer: &Indexer, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.derive_from_with_skip(indexer, indexes, starting_indexes, exit, 0) - } - /// Derive from source, skipping first N transactions per block from all calculations. /// /// Use `skip_count: 1` to exclude coinbase transactions from fee/feerate stats. - pub fn derive_from_with_skip( + pub(crate) fn derive_from_with_skip( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, diff --git a/crates/brk_computer/src/internal/single/vec/average.rs b/crates/brk_computer/src/internal/single/vec/average.rs index a286dd7b2..1de7f1a3b 100644 --- a/crates/brk_computer/src/internal/single/vec/average.rs +++ b/crates/brk_computer/src/internal/single/vec/average.rs @@ -2,16 +2,20 @@ use brk_error::Result; use brk_traversable::Traversable; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{Database, EagerVec, ImportableVec, IterableBoxedVec, IterableCloneableVec, PcoVec, VecIndex, Version}; +use vecdb::{ + Database, EagerVec, ImportableVec, PcoVec, Ro, Rw, StorageMode, StoredVec, VecIndex, Version, +}; use crate::internal::ComputedVecValue; /// Average value in an aggregation period -#[derive(Clone, Deref, DerefMut, Traversable)] -pub struct AverageVec(pub EagerVec>); +#[derive(Deref, DerefMut, Traversable)] +pub struct AverageVec( + pub M::Stored>>, +); impl AverageVec { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { + pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result { Ok(Self(EagerVec::forced_import( db, &format!("{name}_average"), @@ -19,12 +23,7 @@ impl AverageVec { )?)) } - #[inline] - pub fn inner(&self) -> &EagerVec> { - &self.0 - } - - pub fn boxed_clone(&self) -> IterableBoxedVec { - self.0.boxed_clone() + pub fn read_only_clone(&self) -> AverageVec { + AverageVec(StoredVec::read_only_clone(&self.0)) } } diff --git a/crates/brk_computer/src/internal/single/vec/cumulative.rs b/crates/brk_computer/src/internal/single/vec/cumulative.rs index 9d418ad3a..679e2045c 100644 --- a/crates/brk_computer/src/internal/single/vec/cumulative.rs +++ b/crates/brk_computer/src/internal/single/vec/cumulative.rs @@ -2,28 +2,39 @@ use brk_error::Result; use brk_traversable::Traversable; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{Database, EagerVec, ImportableVec, IterableBoxedVec, IterableCloneableVec, PcoVec, VecIndex, Version}; +use vecdb::{ + Database, EagerVec, ImportableVec, PcoVec, ReadableBoxedVec, ReadableCloneableVec, Ro, Rw, + StorageMode, StoredVec, VecIndex, Version, +}; use crate::internal::ComputedVecValue; /// Cumulative sum across aggregation periods -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Deref, DerefMut, Traversable)] #[traversable(transparent)] -pub struct CumulativeVec( - pub EagerVec>, +pub struct CumulativeVec( + pub M::Stored>>, ); impl CumulativeVec { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { - Ok(Self(EagerVec::forced_import(db, &format!("{name}_cumulative"), version)?)) + pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result { + Ok(Self(EagerVec::forced_import( + db, + &format!("{name}_cumulative"), + version, + )?)) } #[inline] - pub fn inner(&self) -> &EagerVec> { + pub(crate) fn inner(&self) -> &EagerVec> { &self.0 } - pub fn boxed_clone(&self) -> IterableBoxedVec { - self.0.boxed_clone() + pub(crate) fn read_only_boxed_clone(&self) -> ReadableBoxedVec { + self.0.read_only_boxed_clone() + } + + pub fn read_only_clone(&self) -> CumulativeVec { + CumulativeVec(StoredVec::read_only_clone(&self.0)) } } diff --git a/crates/brk_computer/src/internal/single/vec/first.rs b/crates/brk_computer/src/internal/single/vec/first.rs deleted file mode 100644 index c0daf43c4..000000000 --- a/crates/brk_computer/src/internal/single/vec/first.rs +++ /dev/null @@ -1,78 +0,0 @@ -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::StoredU64; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{ - AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableBoxedVec, - IterableCloneableVec, IterableVec, PcoVec, VecIndex, VecValue, Version, -}; - -use crate::internal::ComputedVecValue; - -/// First value in an aggregation period -#[derive(Clone, Deref, DerefMut, Traversable)] -pub struct FirstVec(pub EagerVec>); - -impl FirstVec { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { - Ok(Self(EagerVec::forced_import(db, name, version)?)) - } - - #[inline] - pub fn inner(&self) -> &EagerVec> { - &self.0 - } - - pub fn boxed_clone(&self) -> IterableBoxedVec { - self.0.boxed_clone() - } - - /// Compute first values from a source vec. - /// - /// For each output index I, takes the first value from the corresponding - /// range in the source vec (indexed by A). - pub fn compute_first( - &mut self, - max_from: I, - source: &impl IterableVec, - first_indexes: &impl IterableVec, - count_indexes: &impl IterableVec, - exit: &Exit, - ) -> Result<()> - where - A: VecIndex + VecValue, - { - self.0.validate_computed_version_or_reset( - source.version() + first_indexes.version() + count_indexes.version(), - )?; - - let index = max_from.min(I::from(self.0.len())); - - let mut source_iter = source.iter(); - let mut count_indexes_iter = count_indexes.iter().skip(index.to_usize()); - - first_indexes - .iter() - .enumerate() - .skip(index.to_usize()) - .try_for_each(|(i, first_index)| -> Result<()> { - let count_index = count_indexes_iter.next().unwrap(); - let count = *count_index as usize; - - if count == 0 { - panic!("should not compute first if count can be 0"); - } - - let v = source_iter.get_unwrap(first_index); - self.0.truncate_push_at(i, v)?; - - Ok(()) - })?; - - let _lock = exit.lock(); - self.0.write()?; - - Ok(()) - } -} diff --git a/crates/brk_computer/src/internal/single/vec/last.rs b/crates/brk_computer/src/internal/single/vec/last.rs deleted file mode 100644 index 6e1edf57d..000000000 --- a/crates/brk_computer/src/internal/single/vec/last.rs +++ /dev/null @@ -1,78 +0,0 @@ -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{CheckedSub, StoredU64}; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableBoxedVec, IterableCloneableVec, IterableVec, PcoVec, VecIndex, VecValue, Version}; - -use crate::internal::ComputedVecValue; - -/// Last value in an aggregation period -#[derive(Clone, Deref, DerefMut, Traversable)] -pub struct LastVec( - pub EagerVec>, -); - -impl LastVec { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { - Ok(Self(EagerVec::forced_import(db, name, version)?)) - } - - #[inline] - pub fn inner(&self) -> &EagerVec> { - &self.0 - } - - pub fn boxed_clone(&self) -> IterableBoxedVec { - self.0.boxed_clone() - } - - /// Compute last values from a source vec. - /// - /// For each output index I, takes the last value from the corresponding - /// range in the source vec (indexed by A). - pub fn compute_last( - &mut self, - max_from: I, - source: &impl IterableVec, - first_indexes: &impl IterableVec, - count_indexes: &impl IterableVec, - exit: &Exit, - ) -> Result<()> - where - A: VecIndex + VecValue + CheckedSub, - { - self.0.validate_computed_version_or_reset( - source.version() + first_indexes.version() + count_indexes.version(), - )?; - - let index = max_from.min(I::from(self.0.len())); - - let mut source_iter = source.iter(); - let mut count_indexes_iter = count_indexes.iter().skip(index.to_usize()); - - first_indexes - .iter() - .enumerate() - .skip(index.to_usize()) - .try_for_each(|(i, first_index)| -> Result<()> { - let count_index = count_indexes_iter.next().unwrap(); - let count = *count_index as usize; - - if count == 0 { - panic!("should not compute last if count can be 0"); - } - - let last_index = first_index + (count - 1); - let v = source_iter.get_unwrap(last_index); - self.0.truncate_push_at(i, v)?; - - Ok(()) - })?; - - let _lock = exit.lock(); - self.0.write()?; - - Ok(()) - } -} diff --git a/crates/brk_computer/src/internal/single/vec/max.rs b/crates/brk_computer/src/internal/single/vec/max.rs index bae57fd90..1b0e60e71 100644 --- a/crates/brk_computer/src/internal/single/vec/max.rs +++ b/crates/brk_computer/src/internal/single/vec/max.rs @@ -2,27 +2,24 @@ use brk_error::Result; use brk_traversable::Traversable; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{Database, EagerVec, ImportableVec, IterableBoxedVec, IterableCloneableVec, PcoVec, VecIndex, Version}; +use vecdb::{ + Database, EagerVec, ImportableVec, PcoVec, Ro, Rw, StorageMode, StoredVec, VecIndex, Version, +}; use crate::internal::ComputedVecValue; /// Maximum value in an aggregation period -#[derive(Clone, Deref, DerefMut, Traversable)] -pub struct MaxVec( - pub EagerVec>, +#[derive(Deref, DerefMut, Traversable)] +pub struct MaxVec( + pub M::Stored>>, ); impl MaxVec { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { + pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result { Ok(Self(EagerVec::forced_import(db, &format!("{name}_max"), version)?)) } - #[inline] - pub fn inner(&self) -> &EagerVec> { - &self.0 - } - - pub fn boxed_clone(&self) -> IterableBoxedVec { - self.0.boxed_clone() + pub fn read_only_clone(&self) -> MaxVec { + MaxVec(StoredVec::read_only_clone(&self.0)) } } diff --git a/crates/brk_computer/src/internal/single/vec/min.rs b/crates/brk_computer/src/internal/single/vec/min.rs index 06b43406c..99a049094 100644 --- a/crates/brk_computer/src/internal/single/vec/min.rs +++ b/crates/brk_computer/src/internal/single/vec/min.rs @@ -2,27 +2,24 @@ use brk_error::Result; use brk_traversable::Traversable; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{Database, EagerVec, ImportableVec, IterableBoxedVec, IterableCloneableVec, PcoVec, VecIndex, Version}; +use vecdb::{ + Database, EagerVec, ImportableVec, PcoVec, Ro, Rw, StorageMode, StoredVec, VecIndex, Version, +}; use crate::internal::ComputedVecValue; /// Minimum value in an aggregation period -#[derive(Clone, Deref, DerefMut, Traversable)] -pub struct MinVec( - pub EagerVec>, +#[derive(Deref, DerefMut, Traversable)] +pub struct MinVec( + pub M::Stored>>, ); impl MinVec { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { + pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result { Ok(Self(EagerVec::forced_import(db, &format!("{name}_min"), version)?)) } - #[inline] - pub fn inner(&self) -> &EagerVec> { - &self.0 - } - - pub fn boxed_clone(&self) -> IterableBoxedVec { - self.0.boxed_clone() + pub fn read_only_clone(&self) -> MinVec { + MinVec(StoredVec::read_only_clone(&self.0)) } } diff --git a/crates/brk_computer/src/internal/single/vec/mod.rs b/crates/brk_computer/src/internal/single/vec/mod.rs index 0bfdbd286..b82232b9d 100644 --- a/crates/brk_computer/src/internal/single/vec/mod.rs +++ b/crates/brk_computer/src/internal/single/vec/mod.rs @@ -1,7 +1,5 @@ mod average; mod cumulative; -mod first; -mod last; mod max; mod min; mod percentiles; @@ -9,8 +7,6 @@ mod sum; pub use average::*; pub use cumulative::*; -pub use first::*; -pub use last::*; pub use max::*; pub use min::*; pub use percentiles::*; diff --git a/crates/brk_computer/src/internal/single/vec/percentiles.rs b/crates/brk_computer/src/internal/single/vec/percentiles.rs index 8d051b350..90a2586dc 100644 --- a/crates/brk_computer/src/internal/single/vec/percentiles.rs +++ b/crates/brk_computer/src/internal/single/vec/percentiles.rs @@ -4,30 +4,27 @@ use brk_error::Result; use brk_traversable::Traversable; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{Database, EagerVec, ImportableVec, IterableBoxedVec, IterableCloneableVec, PcoVec, VecIndex, Version}; +use vecdb::{ + Database, EagerVec, ImportableVec, PcoVec, Ro, Rw, StorageMode, StoredVec, VecIndex, Version, +}; use crate::internal::ComputedVecValue; macro_rules! define_percentile_vec { ($name:ident, $suffix:literal, $doc:literal) => { #[doc = $doc] - #[derive(Clone, Deref, DerefMut, Traversable)] - pub struct $name( - pub EagerVec>, + #[derive(Deref, DerefMut, Traversable)] + pub struct $name( + pub M::Stored>>, ); impl $name { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { + pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result { Ok(Self(EagerVec::forced_import(db, &format!("{name}_{}", $suffix), version)?)) } - #[inline] - pub fn inner(&self) -> &EagerVec> { - &self.0 - } - - pub fn boxed_clone(&self) -> IterableBoxedVec { - self.0.boxed_clone() + pub fn read_only_clone(&self) -> $name { + $name(StoredVec::read_only_clone(&self.0)) } } }; diff --git a/crates/brk_computer/src/internal/single/vec/sum.rs b/crates/brk_computer/src/internal/single/vec/sum.rs index c3d29ea14..f7a9b2959 100644 --- a/crates/brk_computer/src/internal/single/vec/sum.rs +++ b/crates/brk_computer/src/internal/single/vec/sum.rs @@ -2,33 +2,34 @@ use brk_error::Result; use brk_traversable::Traversable; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{Database, EagerVec, ImportableVec, IterableBoxedVec, IterableCloneableVec, PcoVec, VecIndex, Version}; +use vecdb::{ + Database, EagerVec, ImportableVec, PcoVec, Ro, Rw, StorageMode, StoredVec, VecIndex, Version, +}; use crate::internal::ComputedVecValue; /// Sum of values in an aggregation period -#[derive(Clone, Deref, DerefMut, Traversable)] +#[derive(Deref, DerefMut, Traversable)] #[traversable(transparent)] -pub struct SumVec( - pub EagerVec>, +pub struct SumVec( + pub M::Stored>>, ); impl SumVec { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { - Ok(Self(EagerVec::forced_import(db, &format!("{name}_sum"), version)?)) - } - - /// Import with raw name (no suffix) for backwards compat - pub fn forced_import_raw(db: &Database, name: &str, version: Version) -> Result { - Ok(Self(EagerVec::forced_import(db, name, version)?)) + pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result { + Ok(Self(EagerVec::forced_import( + db, + &format!("{name}_sum"), + version, + )?)) } #[inline] - pub fn inner(&self) -> &EagerVec> { + pub(crate) fn inner(&self) -> &EagerVec> { &self.0 } - pub fn boxed_clone(&self) -> IterableBoxedVec { - self.0.boxed_clone() + pub fn read_only_clone(&self) -> SumVec { + SumVec(StoredVec::read_only_clone(&self.0)) } } diff --git a/crates/brk_computer/src/lib.rs b/crates/brk_computer/src/lib.rs index 6e9fe4947..413139e03 100644 --- a/crates/brk_computer/src/lib.rs +++ b/crates/brk_computer/src/lib.rs @@ -3,13 +3,12 @@ use std::{fs, path::Path, thread, time::Instant}; use brk_error::Result; -use brk_fetcher::Fetcher; use brk_indexer::Indexer; use brk_reader::Reader; use brk_traversable::Traversable; use brk_types::Version; use tracing::info; -use vecdb::Exit; +use vecdb::{Exit, Ro, Rw, StorageMode}; mod blocks; mod cointime; @@ -19,10 +18,11 @@ pub mod indexes; mod inputs; mod internal; mod market; +mod mining; mod outputs; mod pools; mod positions; -pub mod price; +pub mod prices; mod scripts; mod supply; mod traits; @@ -31,48 +31,45 @@ mod utils; use indexes::ComputeIndexes; -#[derive(Clone, Traversable)] -pub struct Computer { - pub blocks: blocks::Vecs, - pub transactions: transactions::Vecs, - pub scripts: scripts::Vecs, - pub positions: positions::Vecs, - pub cointime: cointime::Vecs, - pub constants: constants::Vecs, - pub indexes: indexes::Vecs, - pub market: market::Vecs, - pub pools: pools::Vecs, - pub price: Option, - pub distribution: distribution::Vecs, - pub supply: supply::Vecs, - pub inputs: inputs::Vecs, - pub outputs: outputs::Vecs, +#[derive(Traversable)] +pub struct Computer { + pub blocks: Box>, + pub mining: Box>, + pub transactions: Box>, + pub scripts: Box>, + pub positions: Box>, + pub cointime: Box>, + pub constants: Box, + pub indexes: Box>, + pub market: Box>, + pub pools: Box>, + pub prices: Box>, + pub distribution: Box>, + pub supply: Box>, + pub inputs: Box>, + pub outputs: Box>, } -const VERSION: Version = Version::new(4); +const VERSION: Version = Version::new(5); impl Computer { /// Do NOT import multiple times or things will break !!! - pub fn forced_import( - outputs_path: &Path, - indexer: &Indexer, - fetcher: Option, - ) -> Result { + pub fn forced_import(outputs_path: &Path, indexer: &Indexer) -> Result { info!("Importing computer..."); let import_start = Instant::now(); let computed_path = outputs_path.join("computed"); - const STACK_SIZE: usize = 512 * 1024 * 1024; + const STACK_SIZE: usize = 8 * 1024 * 1024; let big_thread = || thread::Builder::new().stack_size(STACK_SIZE); let i = Instant::now(); let (indexes, positions) = thread::scope(|s| -> Result<_> { - let positions_handle = big_thread().spawn_scoped(s, || { - positions::Vecs::forced_import(&computed_path, VERSION) + let positions_handle = big_thread().spawn_scoped(s, || -> Result<_> { + Ok(Box::new(positions::Vecs::forced_import(&computed_path, VERSION)?)) })?; - let indexes = indexes::Vecs::forced_import(&computed_path, VERSION, indexer)?; + let indexes = Box::new(indexes::Vecs::forced_import(&computed_path, VERSION, indexer)?); let positions = positions_handle.join().unwrap()?; Ok((indexes, positions)) @@ -82,12 +79,12 @@ impl Computer { // inputs/outputs need indexes for count imports let i = Instant::now(); let (inputs, outputs) = thread::scope(|s| -> Result<_> { - let inputs_handle = big_thread().spawn_scoped(s, || { - inputs::Vecs::forced_import(&computed_path, VERSION, &indexes) + let inputs_handle = big_thread().spawn_scoped(s, || -> Result<_> { + Ok(Box::new(inputs::Vecs::forced_import(&computed_path, VERSION, &indexes)?)) })?; - let outputs_handle = big_thread().spawn_scoped(s, || { - outputs::Vecs::forced_import(&computed_path, VERSION, &indexes) + let outputs_handle = big_thread().spawn_scoped(s, || -> Result<_> { + Ok(Box::new(outputs::Vecs::forced_import(&computed_path, VERSION, &indexes)?)) })?; let inputs = inputs_handle.join().unwrap()?; @@ -98,104 +95,104 @@ impl Computer { info!("Imported inputs/outputs in {:?}", i.elapsed()); let i = Instant::now(); - let constants = constants::Vecs::new(VERSION, &indexes); + let constants = Box::new(constants::Vecs::new(VERSION, &indexes)); // Price must be created before market since market's lazy vecs reference price - let price = price::Vecs::forced_import(&computed_path, VERSION, &indexes, fetcher)?; - let price = price.has_fetcher().then_some(price); + let prices = Box::new(prices::Vecs::forced_import(&computed_path, VERSION, &indexes)?); info!("Imported price/constants in {:?}", i.elapsed()); let i = Instant::now(); - let (blocks, transactions, scripts, pools, cointime) = thread::scope(|s| -> Result<_> { - // Import blocks module - let blocks_handle = big_thread().spawn_scoped(s, || { - blocks::Vecs::forced_import( - &computed_path, - VERSION, - indexer, - &indexes, - price.as_ref(), - ) - })?; + let (blocks, mining, transactions, scripts, pools, cointime) = + thread::scope(|s| -> Result<_> { + // Import blocks module (no longer needs prices) + let blocks_handle = big_thread().spawn_scoped(s, || -> Result<_> { + Ok(Box::new(blocks::Vecs::forced_import(&computed_path, VERSION, indexer, &indexes)?)) + })?; - // Import transactions module - let transactions_handle = big_thread().spawn_scoped(s, || { - transactions::Vecs::forced_import( - &computed_path, - VERSION, - indexer, - &indexes, - price.as_ref(), - ) - })?; + // Import mining module (separate database) + let mining_handle = big_thread().spawn_scoped(s, || -> Result<_> { + Ok(Box::new(mining::Vecs::forced_import(&computed_path, VERSION, &indexes, &prices)?)) + })?; - // Import scripts module (depends on outputs for adoption ratio denominators) - let scripts_handle = big_thread().spawn_scoped(s, || { - scripts::Vecs::forced_import( + // Import transactions module + let transactions_handle = big_thread().spawn_scoped(s, || -> Result<_> { + Ok(Box::new(transactions::Vecs::forced_import( + &computed_path, + VERSION, + indexer, + &indexes, + &prices, + )?)) + })?; + + // Import scripts module (depends on outputs for adoption ratio denominators) + let scripts_handle = big_thread().spawn_scoped(s, || -> Result<_> { + Ok(Box::new(scripts::Vecs::forced_import( + &computed_path, + VERSION, + &indexes, + &prices, + &outputs, + )?)) + })?; + + let cointime = Box::new( + cointime::Vecs::forced_import(&computed_path, VERSION, &indexes, &prices)? + ); + + let blocks = blocks_handle.join().unwrap()?; + let mining = mining_handle.join().unwrap()?; + let transactions = transactions_handle.join().unwrap()?; + let scripts = scripts_handle.join().unwrap()?; + + // pools depends on blocks, mining, and transactions for lazy dominance vecs + let pools = Box::new(pools::Vecs::forced_import( &computed_path, VERSION, &indexes, - price.as_ref(), - &outputs, - ) + &prices, + &blocks, + &mining, + &transactions, + )?); + + Ok((blocks, mining, transactions, scripts, pools, cointime)) })?; - - let cointime = - cointime::Vecs::forced_import(&computed_path, VERSION, &indexes, price.as_ref())?; - - let blocks = blocks_handle.join().unwrap()?; - let transactions = transactions_handle.join().unwrap()?; - let scripts = scripts_handle.join().unwrap()?; - - // pools depends on blocks and transactions for lazy dominance vecs - let pools = pools::Vecs::forced_import( - &computed_path, - VERSION, - &indexes, - price.as_ref(), - &blocks, - &transactions, - )?; - - Ok((blocks, transactions, scripts, pools, cointime)) - })?; info!( - "Imported blocks/transactions/scripts/pools/cointime in {:?}", + "Imported blocks/mining/transactions/scripts/pools/cointime in {:?}", i.elapsed() ); // Threads inside let i = Instant::now(); - let distribution = - distribution::Vecs::forced_import(&computed_path, VERSION, &indexes, price.as_ref())?; + let distribution = Box::new( + distribution::Vecs::forced_import(&computed_path, VERSION, &indexes, &prices)? + ); info!("Imported distribution in {:?}", i.elapsed()); // Supply must be imported after distribution (references distribution's supply) let i = Instant::now(); - let supply = supply::Vecs::forced_import( - &computed_path, - VERSION, - &indexes, - price.as_ref(), - &distribution, - )?; + let supply = Box::new( + supply::Vecs::forced_import(&computed_path, VERSION, &indexes, &prices, &distribution)? + ); info!("Imported supply in {:?}", i.elapsed()); // Market must be imported after distribution and transactions (for NVT indicator) let i = Instant::now(); - let market = market::Vecs::forced_import( + let market = Box::new(market::Vecs::forced_import( &computed_path, VERSION, &indexes, - price.as_ref(), + &prices, &distribution, &transactions, - )?; + )?); info!("Imported market in {:?}", i.elapsed()); info!("Total import time: {:?}", import_start.elapsed()); let this = Self { blocks, + mining, transactions, scripts, constants, @@ -207,7 +204,7 @@ impl Computer { cointime, indexes, inputs, - price, + prices, outputs, }; @@ -220,6 +217,7 @@ impl Computer { fn retain_databases(computed_path: &Path) -> Result<()> { const EXPECTED_DBS: &[&str] = &[ blocks::DB_NAME, + mining::DB_NAME, transactions::DB_NAME, scripts::DB_NAME, positions::DB_NAME, @@ -227,7 +225,7 @@ impl Computer { indexes::DB_NAME, market::DB_NAME, pools::DB_NAME, - price::DB_NAME, + prices::DB_NAME, distribution::DB_NAME, supply::DB_NAME, inputs::DB_NAME, @@ -266,34 +264,21 @@ impl Computer { ) -> Result<()> { let compute_start = Instant::now(); - // Compute blocks.time early (height_to_date, height_to_timestamp_monotonic, height_to_date_monotonic) - // These are needed by indexes::block to compute height_to_dateindex - info!("Computing blocks.time (early)..."); - let i = Instant::now(); - self.blocks - .time - .compute_early(indexer, starting_indexes.height, exit)?; - info!("Computed blocks.time (early) in {:?}", i.elapsed()); - + // 1. Indexes (absorbs blocks.time.compute — timestamp_monotonic) info!("Computing indexes..."); let i = Instant::now(); let mut starting_indexes = self.indexes - .compute(indexer, &self.blocks.time, starting_indexes, exit)?; + .compute(indexer, &mut self.blocks, starting_indexes, exit)?; info!("Computed indexes in {:?}", i.elapsed()); - if let Some(price) = self.price.as_mut() { - info!("Fetching prices..."); - let i = Instant::now(); - price.fetch(indexer, &self.indexes, &starting_indexes, exit)?; - info!("Fetched prices in {:?}", i.elapsed()); - - info!("Computing prices..."); - let i = Instant::now(); - price.compute(indexer, &self.indexes, &starting_indexes, exit)?; - info!("Computed prices in {:?}", i.elapsed()); - } + // 2. Prices + info!("Computing prices..."); + let i = Instant::now(); + self.prices.compute(indexer, &starting_indexes, exit)?; + info!("Computed prices in {:?}", i.elapsed()); + // 3. Main scope thread::scope(|scope| -> Result<()> { let positions = scope.spawn(|| -> Result<()> { info!("Computing positions metadata..."); @@ -304,39 +289,53 @@ impl Computer { Ok(()) }); - // Inputs must complete first - info!("Computing inputs..."); - let i = Instant::now(); - self.inputs - .compute(indexer, &self.indexes, &starting_indexes, exit)?; - info!("Computed inputs in {:?}", i.elapsed()); + // Nested scope: blocks (mut) runs in parallel with inputs chain + // The nested scope ensures blocks' mutable borrow ends before transactions + thread::scope(|inner| -> Result<()> { + let blocks = inner.spawn(|| -> Result<()> { + info!("Computing blocks..."); + let i = Instant::now(); + self.blocks + .compute(indexer, &self.indexes, &starting_indexes, exit)?; + info!("Computed blocks in {:?}", i.elapsed()); + Ok(()) + }); - // Scripts (needed for outputs.count.utxo_count) - info!("Computing scripts..."); - let i = Instant::now(); - self.scripts - .compute(indexer, &self.indexes, &starting_indexes, exit)?; - info!("Computed scripts in {:?}", i.elapsed()); + // Inputs → scripts → outputs (sequential) + info!("Computing inputs..."); + let i = Instant::now(); + self.inputs + .compute(indexer, &self.indexes, &starting_indexes, exit)?; + info!("Computed inputs in {:?}", i.elapsed()); - // Outputs depends on inputs and scripts (for utxo_count) - info!("Computing outputs..."); - let i = Instant::now(); - self.outputs.compute( - indexer, - &self.indexes, - &self.inputs, - &self.scripts, - &starting_indexes, - exit, - )?; - info!("Computed outputs in {:?}", i.elapsed()); + info!("Computing scripts..."); + let i = Instant::now(); + self.scripts.compute(indexer, &starting_indexes, exit)?; + info!("Computed scripts in {:?}", i.elapsed()); - // Transactions: count, versions, size, fees, volume + info!("Computing outputs..."); + let i = Instant::now(); + self.outputs.compute( + indexer, + &self.indexes, + &self.inputs, + &self.scripts, + &starting_indexes, + exit, + )?; + info!("Computed outputs in {:?}", i.elapsed()); + + blocks.join().unwrap()?; + Ok(()) + })?; + + // Transactions (needs blocks for count/interval) info!("Computing transactions..."); let i = Instant::now(); self.transactions.compute( indexer, &self.indexes, + &self.blocks, &self.inputs, &self.outputs, &starting_indexes, @@ -344,22 +343,28 @@ impl Computer { )?; info!("Computed transactions in {:?}", i.elapsed()); - // Blocks depends on transactions.fees for rewards computation - info!("Computing blocks..."); + // Mining (needs blocks + transactions) + info!("Computing mining..."); let i = Instant::now(); - self.blocks.compute( + self.mining.compute( indexer, &self.indexes, + &self.blocks, &self.transactions, &starting_indexes, exit, )?; - info!("Computed blocks in {:?}", i.elapsed()); + info!("Computed mining in {:?}", i.elapsed()); positions.join().unwrap()?; Ok(()) })?; + if true { + return Ok(()); + } + + // 4. Pools || distribution let starting_indexes_clone = starting_indexes.clone(); thread::scope(|scope| -> Result<()> { let pools = scope.spawn(|| -> Result<()> { @@ -385,7 +390,7 @@ impl Computer { &self.outputs, &self.transactions, &self.blocks, - self.price.as_ref(), + &self.prices, &mut starting_indexes, exit, )?; @@ -395,41 +400,49 @@ impl Computer { Ok(()) })?; - // Market must be computed after distribution (uses distribution data for gini) - if let Some(price) = self.price.as_ref() { - info!("Computing market..."); + // 5. Market and supply are independent — both depend on distribution but not each other + thread::scope(|scope| -> Result<()> { + let market = scope.spawn(|| -> Result<()> { + info!("Computing market..."); + let i = Instant::now(); + self.market.compute( + &self.indexes, + &self.prices, + &self.blocks, + &self.mining, + &self.distribution, + &starting_indexes, + exit, + )?; + info!("Computed market in {:?}", i.elapsed()); + Ok(()) + }); + + info!("Computing supply..."); let i = Instant::now(); - self.market.compute( - price, + self.supply.compute( + &self.scripts, &self.blocks, + &self.mining, + &self.transactions, &self.distribution, &starting_indexes, exit, )?; - info!("Computed market in {:?}", i.elapsed()); - } + info!("Computed supply in {:?}", i.elapsed()); - // Supply must be computed after distribution (uses actual circulating supply) - info!("Computing supply..."); - let i = Instant::now(); - self.supply.compute( - &self.indexes, - &self.scripts, - &self.blocks, - &self.transactions, - &self.distribution, - &starting_indexes, - exit, - )?; - info!("Computed supply in {:?}", i.elapsed()); + market.join().unwrap()?; + Ok(()) + })?; + // 6. Cointime (depends on supply, distribution, mining) info!("Computing cointime..."); let i = Instant::now(); self.cointime.compute( - &self.indexes, &starting_indexes, - self.price.as_ref(), + &self.prices, &self.blocks, + &self.mining, &self.supply, &self.distribution, exit, @@ -439,7 +452,9 @@ impl Computer { info!("Total compute time: {:?}", compute_start.elapsed()); Ok(()) } +} +impl Computer { /// Iterate over all exportable vecs with their database name. pub fn iter_named_exportable( &self, @@ -452,6 +467,11 @@ impl Computer { .iter_any_exportable() .map(|v| (blocks::DB_NAME, v)), ) + .chain( + self.mining + .iter_any_exportable() + .map(|v| (mining::DB_NAME, v)), + ) .chain( self.transactions .iter_any_exportable() @@ -493,9 +513,9 @@ impl Computer { .map(|v| (pools::DB_NAME, v)), ) .chain( - self.price + self.prices .iter_any_exportable() - .map(|v| (price::DB_NAME, v)), + .map(|v| (prices::DB_NAME, v)), ) .chain( self.distribution diff --git a/crates/brk_computer/src/market/ath/compute.rs b/crates/brk_computer/src/market/ath/compute.rs index ec961a3c8..61630ddf9 100644 --- a/crates/brk_computer/src/market/ath/compute.rs +++ b/crates/brk_computer/src/market/ath/compute.rs @@ -1,91 +1,67 @@ use brk_error::Result; use brk_types::StoredU16; -use vecdb::{Exit, GenericStoredVec, TypedVecIterator, VecIndex}; +use vecdb::{Exit, ReadableVec, VecIndex}; use super::Vecs; -use crate::{ComputeIndexes, price, traits::ComputeDrawdown}; +use crate::{ComputeIndexes, prices}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, - price: &price::Vecs, + prices: &prices::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { self.price_ath.height.compute_all_time_high( starting_indexes.height, - &price.usd.split.high.height, + &prices.usd.price, exit, )?; - self.price_drawdown.height.compute_drawdown( + let mut prev = None; + self.days_since_price_ath.height.compute_transform2( starting_indexes.height, - &price.usd.split.close.height, &self.price_ath.height, + &prices.usd.price, + |(i, ath, price, slf)| { + if prev.is_none() { + let i = i.to_usize(); + prev.replace(if i > 0 { + slf.collect_one_at(i - 1).unwrap() + } else { + StoredU16::default() + }); + } + let days = if *price == *ath { + StoredU16::default() + } else { + prev.unwrap() + StoredU16::new(1) + }; + prev.replace(days); + (i, days) + }, exit, )?; - self.price_ath.compute_rest(starting_indexes, exit, |v| { - v.compute_all_time_high( - starting_indexes.dateindex, - &price.usd.split.high.dateindex, - exit, - )?; - Ok(()) - })?; - - self.days_since_price_ath - .compute_all(starting_indexes, exit, |v| { - let mut high_iter = price.usd.split.high.dateindex.into_iter(); - let mut prev = None; - v.compute_transform( - starting_indexes.dateindex, - &self.price_ath.dateindex, - |(i, ath, slf)| { - if prev.is_none() { - let i = i.to_usize(); - prev.replace(if i > 0 { - slf.get_pushed_or_read_at_unwrap_once(i - 1) - } else { - StoredU16::default() - }); - } - let days = if *high_iter.get_unwrap(i) == ath { - StoredU16::default() - } else { - prev.unwrap() + StoredU16::new(1) - }; - prev.replace(days); - (i, days) - }, - exit, - )?; - Ok(()) - })?; - - self.max_days_between_price_aths - .compute_all(starting_indexes, exit, |v| { - let mut prev = None; - v.compute_transform( - starting_indexes.dateindex, - &self.days_since_price_ath.dateindex, - |(i, days, slf)| { - if prev.is_none() { - let i = i.to_usize(); - prev.replace(if i > 0 { - slf.get_pushed_or_read_at_unwrap_once(i - 1) - } else { - StoredU16::ZERO - }); - } - let max = prev.unwrap().max(days); - prev.replace(max); - (i, max) - }, - exit, - )?; - Ok(()) - })?; + let mut prev = None; + self.max_days_between_price_aths.height.compute_transform( + starting_indexes.height, + &self.days_since_price_ath.height, + |(i, days, slf)| { + if prev.is_none() { + let i = i.to_usize(); + prev.replace(if i > 0 { + slf.collect_one_at(i - 1).unwrap() + } else { + StoredU16::ZERO + }); + } + let max = prev.unwrap().max(days); + prev.replace(max); + (i, max) + }, + exit, + )?; Ok(()) } diff --git a/crates/brk_computer/src/market/ath/import.rs b/crates/brk_computer/src/market/ath/import.rs index 6efa8390b..eeaf912d7 100644 --- a/crates/brk_computer/src/market/ath/import.rs +++ b/crates/brk_computer/src/market/ath/import.rs @@ -1,52 +1,56 @@ use brk_error::Result; use brk_types::Version; -use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec}; +use vecdb::{Database, ReadableCloneableVec}; use super::Vecs; use crate::{ indexes, internal::{ - ComputedFromDateLast, LazyBinaryFromHeightAndDateLast, LazyFromDateLast, - PercentageDiffCloseDollars, PriceFromHeightAndDate, StoredU16ToYears, + ComputedFromHeightLast, LazyBinaryFromHeightLast, LazyHeightDerivedLast, + PercentageDiffDollars, PriceFromHeight, StoredU16ToYears, }, - price, + prices, }; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexes: &indexes::Vecs, - price: &price::Vecs, + prices: &prices::Vecs, ) -> Result { - let price_ath = PriceFromHeightAndDate::forced_import(db, "price_ath", version, indexes)?; + let price_ath = PriceFromHeight::forced_import(db, "price_ath", version, indexes)?; - let max_days_between_price_aths = - ComputedFromDateLast::forced_import(db, "max_days_between_price_aths", version, indexes)?; - - let max_years_between_price_aths = LazyFromDateLast::from_computed::( - "max_years_between_price_aths", + let max_days_between_price_aths = ComputedFromHeightLast::forced_import( + db, + "max_days_between_price_aths", version, - max_days_between_price_aths.dateindex.boxed_clone(), - &max_days_between_price_aths, - ); + indexes, + )?; + + let max_years_between_price_aths = + LazyHeightDerivedLast::from_computed::( + "max_years_between_price_aths", + version, + &max_days_between_price_aths, + ); let days_since_price_ath = - ComputedFromDateLast::forced_import(db, "days_since_price_ath", version, indexes)?; + ComputedFromHeightLast::forced_import(db, "days_since_price_ath", version, indexes)?; - let years_since_price_ath = LazyFromDateLast::from_computed::( + let years_since_price_ath = LazyHeightDerivedLast::from_computed::( "years_since_price_ath", version, - days_since_price_ath.dateindex.boxed_clone(), &days_since_price_ath, ); let price_drawdown = - LazyBinaryFromHeightAndDateLast::from_computed_both_last::( + LazyBinaryFromHeightLast::from_height_and_derived_last::( "price_drawdown", version, - EagerVec::forced_import(db, "price_drawdown", version)?, - &price.usd.split.close, + prices.usd.price.read_only_boxed_clone(), + price_ath.height.read_only_boxed_clone(), + &prices.usd.split.close, &price_ath.rest, ); diff --git a/crates/brk_computer/src/market/ath/vecs.rs b/crates/brk_computer/src/market/ath/vecs.rs index 5ff4281b1..7aa4dd3b8 100644 --- a/crates/brk_computer/src/market/ath/vecs.rs +++ b/crates/brk_computer/src/market/ath/vecs.rs @@ -1,17 +1,18 @@ use brk_traversable::Traversable; -use brk_types::{Close, Dollars, StoredF32, StoredU16}; +use brk_types::{Dollars, StoredF32, StoredU16}; +use vecdb::{Rw, StorageMode}; use crate::internal::{ - ComputedFromDateLast, LazyBinaryFromHeightAndDateLast, LazyFromDateLast, PriceFromHeightAndDate, + ComputedFromHeightLast, LazyBinaryFromHeightLast, LazyHeightDerivedLast, Price, }; /// All-time high related metrics -#[derive(Clone, Traversable)] -pub struct Vecs { - pub price_ath: PriceFromHeightAndDate, - pub price_drawdown: LazyBinaryFromHeightAndDateLast, Dollars>, - pub days_since_price_ath: ComputedFromDateLast, - pub years_since_price_ath: LazyFromDateLast, - pub max_days_between_price_aths: ComputedFromDateLast, - pub max_years_between_price_aths: LazyFromDateLast, +#[derive(Traversable)] +pub struct Vecs { + pub price_ath: Price>, + pub price_drawdown: LazyBinaryFromHeightLast, + pub days_since_price_ath: ComputedFromHeightLast, + pub years_since_price_ath: LazyHeightDerivedLast, + pub max_days_between_price_aths: ComputedFromHeightLast, + pub max_years_between_price_aths: LazyHeightDerivedLast, } diff --git a/crates/brk_computer/src/market/compute.rs b/crates/brk_computer/src/market/compute.rs index 612995d37..68fe242ac 100644 --- a/crates/brk_computer/src/market/compute.rs +++ b/crates/brk_computer/src/market/compute.rs @@ -1,46 +1,54 @@ use brk_error::Result; use vecdb::Exit; -use crate::{blocks, distribution, price, ComputeIndexes}; +use crate::{ComputeIndexes, blocks, distribution, indexes, mining, prices}; use super::Vecs; impl Vecs { - pub fn compute( + #[allow(clippy::too_many_arguments)] + pub(crate) fn compute( &mut self, - price: &price::Vecs, + indexes: &indexes::Vecs, + prices: &prices::Vecs, blocks: &blocks::Vecs, + mining: &mining::Vecs, distribution: &distribution::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { // ATH metrics (independent) - self.ath.compute(price, starting_indexes, exit)?; + self.ath.compute(prices, starting_indexes, exit)?; // Lookback metrics (independent) - self.lookback.compute(price, starting_indexes, exit)?; + self.lookback + .compute(blocks, prices, starting_indexes, exit)?; // Returns metrics (depends on lookback) - self.returns.compute(starting_indexes, exit)?; + self.returns + .compute(indexes, blocks, starting_indexes, exit)?; // Volatility: all fields are lazy (derived from returns SD) // Range metrics (independent) - self.range.compute(price, starting_indexes, exit)?; + self.range + .compute(prices, blocks, starting_indexes, exit)?; // Moving average metrics (independent) - self.moving_average.compute(price, starting_indexes, exit)?; + self.moving_average + .compute(blocks, prices, indexes, starting_indexes, exit)?; // DCA metrics (depends on lookback for lump sum comparison) self.dca - .compute(price, &self.lookback, starting_indexes, exit)?; + .compute(indexes, prices, blocks, &self.lookback, starting_indexes, exit)?; self.indicators.compute( - &blocks.rewards, + indexes, + &mining.rewards, &self.returns, - &self.moving_average, &self.range, - price, + prices, + blocks, distribution, starting_indexes, exit, diff --git a/crates/brk_computer/src/market/dca/by_class.rs b/crates/brk_computer/src/market/dca/by_class.rs index 5ac6939ed..a9b0c3ba2 100644 --- a/crates/brk_computer/src/market/dca/by_class.rs +++ b/crates/brk_computer/src/market/dca/by_class.rs @@ -1,5 +1,5 @@ use brk_traversable::Traversable; -use brk_types::{Date, DateIndex}; +use brk_types::{Date, Day1}; /// DCA class years pub const DCA_CLASS_YEARS: ByDcaClass = ByDcaClass { @@ -34,7 +34,7 @@ pub const DCA_CLASS_NAMES: ByDcaClass<&'static str> = ByDcaClass { }; /// Generic wrapper for DCA year class data -#[derive(Default, Clone, Traversable)] +#[derive(Clone, Default, Traversable)] pub struct ByDcaClass { pub _2015: T, pub _2016: T, @@ -51,55 +51,33 @@ pub struct ByDcaClass { } impl ByDcaClass { - pub fn new(mut create: F) -> Self + pub(crate) fn try_new(mut create: F) -> Result where - F: FnMut(&'static str, u16, DateIndex) -> T, - { - let n = DCA_CLASS_NAMES; - let y = DCA_CLASS_YEARS; - Self { - _2015: create(n._2015, y._2015, Self::dateindex(y._2015)), - _2016: create(n._2016, y._2016, Self::dateindex(y._2016)), - _2017: create(n._2017, y._2017, Self::dateindex(y._2017)), - _2018: create(n._2018, y._2018, Self::dateindex(y._2018)), - _2019: create(n._2019, y._2019, Self::dateindex(y._2019)), - _2020: create(n._2020, y._2020, Self::dateindex(y._2020)), - _2021: create(n._2021, y._2021, Self::dateindex(y._2021)), - _2022: create(n._2022, y._2022, Self::dateindex(y._2022)), - _2023: create(n._2023, y._2023, Self::dateindex(y._2023)), - _2024: create(n._2024, y._2024, Self::dateindex(y._2024)), - _2025: create(n._2025, y._2025, Self::dateindex(y._2025)), - _2026: create(n._2026, y._2026, Self::dateindex(y._2026)), - } - } - - pub fn try_new(mut create: F) -> Result - where - F: FnMut(&'static str, u16, DateIndex) -> Result, + F: FnMut(&'static str, u16, Day1) -> Result, { let n = DCA_CLASS_NAMES; let y = DCA_CLASS_YEARS; Ok(Self { - _2015: create(n._2015, y._2015, Self::dateindex(y._2015))?, - _2016: create(n._2016, y._2016, Self::dateindex(y._2016))?, - _2017: create(n._2017, y._2017, Self::dateindex(y._2017))?, - _2018: create(n._2018, y._2018, Self::dateindex(y._2018))?, - _2019: create(n._2019, y._2019, Self::dateindex(y._2019))?, - _2020: create(n._2020, y._2020, Self::dateindex(y._2020))?, - _2021: create(n._2021, y._2021, Self::dateindex(y._2021))?, - _2022: create(n._2022, y._2022, Self::dateindex(y._2022))?, - _2023: create(n._2023, y._2023, Self::dateindex(y._2023))?, - _2024: create(n._2024, y._2024, Self::dateindex(y._2024))?, - _2025: create(n._2025, y._2025, Self::dateindex(y._2025))?, - _2026: create(n._2026, y._2026, Self::dateindex(y._2026))?, + _2015: create(n._2015, y._2015, Self::day1(y._2015))?, + _2016: create(n._2016, y._2016, Self::day1(y._2016))?, + _2017: create(n._2017, y._2017, Self::day1(y._2017))?, + _2018: create(n._2018, y._2018, Self::day1(y._2018))?, + _2019: create(n._2019, y._2019, Self::day1(y._2019))?, + _2020: create(n._2020, y._2020, Self::day1(y._2020))?, + _2021: create(n._2021, y._2021, Self::day1(y._2021))?, + _2022: create(n._2022, y._2022, Self::day1(y._2022))?, + _2023: create(n._2023, y._2023, Self::day1(y._2023))?, + _2024: create(n._2024, y._2024, Self::day1(y._2024))?, + _2025: create(n._2025, y._2025, Self::day1(y._2025))?, + _2026: create(n._2026, y._2026, Self::day1(y._2026))?, }) } - pub fn dateindex(year: u16) -> DateIndex { - DateIndex::try_from(Date::new(year, 1, 1)).unwrap() + pub(crate) fn day1(year: u16) -> Day1 { + Day1::try_from(Date::new(year, 1, 1)).unwrap() } - pub fn iter(&self) -> impl Iterator { + pub(crate) fn iter(&self) -> impl Iterator { [ &self._2015, &self._2016, @@ -117,7 +95,7 @@ impl ByDcaClass { .into_iter() } - pub fn iter_mut(&mut self) -> impl Iterator { + pub(crate) fn iter_mut(&mut self) -> impl Iterator { [ &mut self._2015, &mut self._2016, @@ -135,61 +113,25 @@ impl ByDcaClass { .into_iter() } - pub fn iter_mut_with_dateindex(&mut self) -> impl Iterator { + pub(crate) fn start_days() -> [Day1; 12] { let y = DCA_CLASS_YEARS; [ - (&mut self._2015, Self::dateindex(y._2015)), - (&mut self._2016, Self::dateindex(y._2016)), - (&mut self._2017, Self::dateindex(y._2017)), - (&mut self._2018, Self::dateindex(y._2018)), - (&mut self._2019, Self::dateindex(y._2019)), - (&mut self._2020, Self::dateindex(y._2020)), - (&mut self._2021, Self::dateindex(y._2021)), - (&mut self._2022, Self::dateindex(y._2022)), - (&mut self._2023, Self::dateindex(y._2023)), - (&mut self._2024, Self::dateindex(y._2024)), - (&mut self._2025, Self::dateindex(y._2025)), - (&mut self._2026, Self::dateindex(y._2026)), - ] - .into_iter() - } - - pub fn dateindexes() -> [DateIndex; 12] { - let y = DCA_CLASS_YEARS; - [ - Self::dateindex(y._2015), - Self::dateindex(y._2016), - Self::dateindex(y._2017), - Self::dateindex(y._2018), - Self::dateindex(y._2019), - Self::dateindex(y._2020), - Self::dateindex(y._2021), - Self::dateindex(y._2022), - Self::dateindex(y._2023), - Self::dateindex(y._2024), - Self::dateindex(y._2025), - Self::dateindex(y._2026), + Self::day1(y._2015), + Self::day1(y._2016), + Self::day1(y._2017), + Self::day1(y._2018), + Self::day1(y._2019), + Self::day1(y._2020), + Self::day1(y._2021), + Self::day1(y._2022), + Self::day1(y._2023), + Self::day1(y._2024), + Self::day1(y._2025), + Self::day1(y._2026), ] } - pub fn zip(self, other: ByDcaClass) -> ByDcaClass<(T, U)> { - ByDcaClass { - _2015: (self._2015, other._2015), - _2016: (self._2016, other._2016), - _2017: (self._2017, other._2017), - _2018: (self._2018, other._2018), - _2019: (self._2019, other._2019), - _2020: (self._2020, other._2020), - _2021: (self._2021, other._2021), - _2022: (self._2022, other._2022), - _2023: (self._2023, other._2023), - _2024: (self._2024, other._2024), - _2025: (self._2025, other._2025), - _2026: (self._2026, other._2026), - } - } - - pub fn zip_ref<'a, U>(&'a self, other: &'a ByDcaClass) -> ByDcaClass<(&'a T, &'a U)> { + pub(crate) fn zip_ref<'a, U>(&'a self, other: &'a ByDcaClass) -> ByDcaClass<(&'a T, &'a U)> { ByDcaClass { _2015: (&self._2015, &other._2015), _2016: (&self._2016, &other._2016), @@ -206,7 +148,7 @@ impl ByDcaClass { } } - pub fn map U>(self, mut f: F) -> ByDcaClass { + pub(crate) fn map U>(self, mut f: F) -> ByDcaClass { ByDcaClass { _2015: f(self._2015), _2016: f(self._2016), diff --git a/crates/brk_computer/src/market/dca/by_period.rs b/crates/brk_computer/src/market/dca/by_period.rs index 4ccb29b07..356e64666 100644 --- a/crates/brk_computer/src/market/dca/by_period.rs +++ b/crates/brk_computer/src/market/dca/by_period.rs @@ -55,7 +55,7 @@ pub const DCA_CAGR_NAMES: ByDcaCagr<&'static str> = ByDcaCagr { }; /// Generic wrapper for DCA period-based data -#[derive(Default, Clone, Traversable)] +#[derive(Clone, Default, Traversable)] pub struct ByDcaPeriod { pub _1w: T, pub _1m: T, @@ -72,29 +72,7 @@ pub struct ByDcaPeriod { } impl ByDcaPeriod { - pub fn new(mut create: F) -> Self - where - F: FnMut(&'static str, u32) -> T, - { - let n = DCA_PERIOD_NAMES; - let d = DCA_PERIOD_DAYS; - Self { - _1w: create(n._1w, d._1w), - _1m: create(n._1m, d._1m), - _3m: create(n._3m, d._3m), - _6m: create(n._6m, d._6m), - _1y: create(n._1y, d._1y), - _2y: create(n._2y, d._2y), - _3y: create(n._3y, d._3y), - _4y: create(n._4y, d._4y), - _5y: create(n._5y, d._5y), - _6y: create(n._6y, d._6y), - _8y: create(n._8y, d._8y), - _10y: create(n._10y, d._10y), - } - } - - pub fn try_new(mut create: F) -> Result + pub(crate) fn try_new(mut create: F) -> Result where F: FnMut(&'static str, u32) -> Result, { @@ -116,25 +94,7 @@ impl ByDcaPeriod { }) } - pub fn iter(&self) -> impl Iterator { - [ - &self._1w, - &self._1m, - &self._3m, - &self._6m, - &self._1y, - &self._2y, - &self._3y, - &self._4y, - &self._5y, - &self._6y, - &self._8y, - &self._10y, - ] - .into_iter() - } - - pub fn iter_mut(&mut self) -> impl Iterator { + pub(crate) fn iter_mut(&mut self) -> impl Iterator { [ &mut self._1w, &mut self._1m, @@ -152,7 +112,7 @@ impl ByDcaPeriod { .into_iter() } - pub fn iter_with_days(&self) -> impl Iterator { + pub(crate) fn iter_with_days(&self) -> impl Iterator { let d = DCA_PERIOD_DAYS; [ (&self._1w, d._1w), @@ -171,7 +131,7 @@ impl ByDcaPeriod { .into_iter() } - pub fn iter_mut_with_days(&mut self) -> impl Iterator { + pub(crate) fn iter_mut_with_days(&mut self) -> impl Iterator { let d = DCA_PERIOD_DAYS; [ (&mut self._1w, d._1w), @@ -190,25 +150,7 @@ impl ByDcaPeriod { .into_iter() } - pub fn zip_mut<'a, U>(&'a mut self, other: &'a ByDcaPeriod) -> impl Iterator { - [ - (&mut self._1w, &other._1w), - (&mut self._1m, &other._1m), - (&mut self._3m, &other._3m), - (&mut self._6m, &other._6m), - (&mut self._1y, &other._1y), - (&mut self._2y, &other._2y), - (&mut self._3y, &other._3y), - (&mut self._4y, &other._4y), - (&mut self._5y, &other._5y), - (&mut self._6y, &other._6y), - (&mut self._8y, &other._8y), - (&mut self._10y, &other._10y), - ] - .into_iter() - } - - pub fn zip_mut_with_days<'a, U>( + pub(crate) fn zip_mut_with_days<'a, U>( &'a mut self, other: &'a ByDcaPeriod, ) -> impl Iterator { @@ -230,7 +172,7 @@ impl ByDcaPeriod { .into_iter() } - pub fn zip_ref<'a, U>(&'a self, other: &'a ByDcaPeriod) -> ByDcaPeriod<(&'a T, &'a U)> { + pub(crate) fn zip_ref<'a, U>(&'a self, other: &'a ByDcaPeriod) -> ByDcaPeriod<(&'a T, &'a U)> { ByDcaPeriod { _1w: (&self._1w, &other._1w), _1m: (&self._1m, &other._1m), @@ -247,7 +189,7 @@ impl ByDcaPeriod { } } - pub fn map U>(self, mut f: F) -> ByDcaPeriod { + pub(crate) fn map U>(self, mut f: F) -> ByDcaPeriod { ByDcaPeriod { _1w: f(self._1w), _1m: f(self._1m), @@ -264,32 +206,10 @@ impl ByDcaPeriod { } } - pub fn zip_mut2_with_days<'a, U, V>( - &'a mut self, - other1: &'a ByDcaPeriod, - other2: &'a ByDcaPeriod, - ) -> impl Iterator { - let d = DCA_PERIOD_DAYS; - [ - (&mut self._1w, &other1._1w, &other2._1w, d._1w), - (&mut self._1m, &other1._1m, &other2._1m, d._1m), - (&mut self._3m, &other1._3m, &other2._3m, d._3m), - (&mut self._6m, &other1._6m, &other2._6m, d._6m), - (&mut self._1y, &other1._1y, &other2._1y, d._1y), - (&mut self._2y, &other1._2y, &other2._2y, d._2y), - (&mut self._3y, &other1._3y, &other2._3y, d._3y), - (&mut self._4y, &other1._4y, &other2._4y, d._4y), - (&mut self._5y, &other1._5y, &other2._5y, d._5y), - (&mut self._6y, &other1._6y, &other2._6y, d._6y), - (&mut self._8y, &other1._8y, &other2._8y, d._8y), - (&mut self._10y, &other1._10y, &other2._10y, d._10y), - ] - .into_iter() - } } /// Generic wrapper for DCA CAGR data (periods >= 2 years) -#[derive(Default, Clone, Traversable)] +#[derive(Clone, Default, Traversable)] pub struct ByDcaCagr { pub _2y: T, pub _3y: T, @@ -301,24 +221,7 @@ pub struct ByDcaCagr { } impl ByDcaCagr { - pub fn new(mut create: F) -> Self - where - F: FnMut(&'static str, u32) -> T, - { - let n = DCA_CAGR_NAMES; - let d = DCA_CAGR_DAYS; - Self { - _2y: create(n._2y, d._2y), - _3y: create(n._3y, d._3y), - _4y: create(n._4y, d._4y), - _5y: create(n._5y, d._5y), - _6y: create(n._6y, d._6y), - _8y: create(n._8y, d._8y), - _10y: create(n._10y, d._10y), - } - } - - pub fn try_new(mut create: F) -> Result + pub(crate) fn try_new(mut create: F) -> Result where F: FnMut(&'static str, u32) -> Result, { @@ -335,48 +238,8 @@ impl ByDcaCagr { }) } - pub fn iter(&self) -> impl Iterator { - [ - &self._2y, - &self._3y, - &self._4y, - &self._5y, - &self._6y, - &self._8y, - &self._10y, - ] - .into_iter() - } - - pub fn iter_mut(&mut self) -> impl Iterator { - [ - &mut self._2y, - &mut self._3y, - &mut self._4y, - &mut self._5y, - &mut self._6y, - &mut self._8y, - &mut self._10y, - ] - .into_iter() - } - - pub fn iter_mut_with_days(&mut self) -> impl Iterator { - let d = DCA_CAGR_DAYS; - [ - (&mut self._2y, d._2y), - (&mut self._3y, d._3y), - (&mut self._4y, d._4y), - (&mut self._5y, d._5y), - (&mut self._6y, d._6y), - (&mut self._8y, d._8y), - (&mut self._10y, d._10y), - ] - .into_iter() - } - /// Zip with the matching subset of a ByDcaPeriod - pub fn zip_mut_with_period<'a, U>( + pub(crate) fn zip_mut_with_period<'a, U>( &'a mut self, period: &'a ByDcaPeriod, ) -> impl Iterator { diff --git a/crates/brk_computer/src/market/dca/compute.rs b/crates/brk_computer/src/market/dca/compute.rs index e35b9e1ba..3fff02b99 100644 --- a/crates/brk_computer/src/market/dca/compute.rs +++ b/crates/brk_computer/src/market/dca/compute.rs @@ -1,138 +1,235 @@ use brk_error::Result; -use brk_types::{Close, Dollars, StoredF32, StoredU32}; -use vecdb::Exit; +use brk_types::{Bitcoin, Day1, Date, Dollars, Height, Sats, StoredF32, StoredU32}; +use vecdb::{AnyVec, EagerVec, Exit, ReadableVec, PcoVec, PcoVecValue, VecIndex}; use super::{ByDcaClass, ByDcaPeriod, Vecs}; use crate::{ - ComputeIndexes, - internal::{ComputedFromDateLast, LazyBinaryFromDateLast}, + ComputeIndexes, blocks, indexes, + internal::{ComputedFromHeightLast, LazyBinaryFromHeightLast}, market::lookback, - price, - traits::{ComputeDCAAveragePriceViaLen, ComputeDCAStackViaLen, ComputeLumpSumStackViaLen}, + prices, }; +const DCA_AMOUNT: Dollars = Dollars::mint(100.0); + impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, - price: &price::Vecs, + indexes: &indexes::Vecs, + prices: &prices::Vecs, + blocks: &blocks::Vecs, lookback: &lookback::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let close = &price.usd.split.close.dateindex; + let h2d = &indexes.height.day1; + let close = &prices.usd.split.close.day1; - // DCA by period - stack - for (stack, days) in self.period_stack.iter_mut_with_days() { - stack.compute_all(Some(price), starting_indexes, exit, |v| { - v.compute_dca_stack_via_len( - starting_indexes.dateindex, - close, - days as usize, - exit, - )?; - Ok(()) - })?; + let first_price_di = Day1::try_from(Date::new(2010, 7, 12)) + .unwrap() + .to_usize(); + + // Compute per-height DCA sats contribution once (reused by all periods). + // Value = sats_from_dca(close_price) on day-boundary blocks, Sats::ZERO otherwise. + { + let mut last_di: Option = None; + self.dca_sats_per_day.compute_transform( + starting_indexes.height, + h2d, + |(h, di, _)| { + let same_day = last_di.is_some_and(|prev| prev == di); + last_di = Some(di); + if same_day { + (h, Sats::ZERO) + } else { + let s = close.collect_one(di).map(sats_from_dca).unwrap_or(Sats::ZERO); + (h, s) + } + }, + exit, + )?; } - // DCA by period - average_price (needs stack's dateindex) + // DCA by period - stack (rolling sum via _start vecs) + for (stack, days) in self.period_stack.iter_mut_with_days() { + let window_starts = blocks.count.start_vec(days as usize); + stack.sats.height.compute_rolling_sum( + starting_indexes.height, + window_starts, + &self.dca_sats_per_day, + exit, + )?; + } + + // DCA by period - average price (derived from stack) + let sh = starting_indexes.height.to_usize(); for (average_price, stack, days) in self .period_average_price .zip_mut_with_days(&self.period_stack) { - average_price.compute_all(starting_indexes, exit, |v| { - v.compute_dca_average_price_via_len( - starting_indexes.dateindex, - &stack.sats_dateindex, - days as usize, - exit, - )?; - Ok(()) - })?; + let days = days as usize; + let stack_data = stack.sats.height.collect_range_at(sh, stack.sats.height.len()); + average_price.usd.height.compute_transform( + starting_indexes.height, + h2d, + |(h, di, _)| { + let di_usize = di.to_usize(); + let stack_sats = stack_data[h.to_usize() - sh]; + let avg = if di_usize > first_price_di { + let num_days = days + .min(di_usize + 1) + .min(di_usize + 1 - first_price_di); + DCA_AMOUNT * num_days / Bitcoin::from(stack_sats) + } else { + Dollars::NAN + }; + (h, avg) + }, + exit, + )?; } // DCA by period - CAGR (computed from returns) for (cagr, returns, days) in self.period_cagr.zip_mut_with_period(&self.period_returns) { - cagr.compute_all(starting_indexes, exit, |v| { - v.compute_cagr( - starting_indexes.dateindex, - &returns.dateindex, - days as usize, - exit, - )?; - Ok(()) - })?; + let years = days as f32 / 365.0; + let returns_data: Vec = returns.day1.collect(); + cagr.height.compute_transform( + starting_indexes.height, + h2d, + |(h, di, _)| { + let v = returns_data.get(di.to_usize()) + .map(|r| ((**r / 100.0 + 1.0).powf(1.0 / years) - 1.0) * 100.0) + .unwrap_or(0.0); + (h, StoredF32::from(v)) + }, + exit, + )?; } // DCA by period - profitability - compute_period_profitability( + compute_period_rolling( &mut self.period_days_in_profit, &mut self.period_days_in_loss, &mut self.period_min_return, &mut self.period_max_return, &self.period_returns, + blocks, + h2d, starting_indexes, exit, )?; - // Lump sum by period - stack (for comparison with DCA) + // Lump sum by period - stack let lookback_dca = lookback.price_ago.as_dca_period(); for (stack, lookback_price, days) in self.period_lump_sum_stack.zip_mut_with_days(&lookback_dca) { - stack.compute_all(Some(price), starting_indexes, exit, |v| { - v.compute_lump_sum_stack_via_len( - starting_indexes.dateindex, - close, - &lookback_price.dateindex, - days as usize, - exit, - )?; - Ok(()) - })?; + let total_invested = DCA_AMOUNT * days as usize; + let lookback_data = lookback_price.usd.height.collect_range_at(sh, lookback_price.usd.height.len()); + stack.sats.height.compute_transform( + starting_indexes.height, + h2d, + |(h, _di, _)| { + let lp = lookback_data[h.to_usize() - sh]; + let sats = if lp == Dollars::ZERO { + Sats::ZERO + } else { + Sats::from(Bitcoin::from(total_invested / lp)) + }; + (h, sats) + }, + exit, + )?; } // Lump sum by period - profitability - compute_period_profitability( + compute_period_rolling( &mut self.period_lump_sum_days_in_profit, &mut self.period_lump_sum_days_in_loss, &mut self.period_lump_sum_min_return, &mut self.period_lump_sum_max_return, &self.period_lump_sum_returns, + blocks, + h2d, starting_indexes, exit, )?; - // DCA by year class - stack and average_price - let dateindexes = super::ByDcaClass::<()>::dateindexes(); - for ((stack, average_price), dateindex) in self - .class_stack - .iter_mut() - .zip(self.class_average_price.iter_mut()) - .zip(dateindexes) - { - stack.compute_all(Some(price), starting_indexes, exit, |v| { - v.compute_dca_stack_via_from(starting_indexes.dateindex, close, dateindex, exit)?; - Ok(()) - })?; + // DCA by year class - stack (cumulative sum from class start date) + let start_days = super::ByDcaClass::<()>::start_days(); + for (stack, day1) in self.class_stack.iter_mut().zip(start_days) { + let mut last_di: Option = None; - average_price.compute_all(starting_indexes, exit, |v| { - v.compute_dca_average_price_via_from( - starting_indexes.dateindex, - &stack.sats_dateindex, - dateindex, - exit, - )?; - Ok(()) - })?; + stack.sats.height.compute_transform( + starting_indexes.height, + h2d, + |(h, di, this)| { + let hi = h.to_usize(); + + if last_di.is_none() && hi > 0 { + last_di = Some(h2d.collect_one_at(hi - 1).unwrap()); + } + + if di < day1 { + last_di = Some(di); + return (h, Sats::ZERO); + } + + let prev_di = last_di; + last_di = Some(di); + + let same_day = prev_di.is_some_and(|prev| prev == di); + if same_day { + (h, this.collect_one_at(hi - 1).unwrap_or_default()) + } else { + let prev = if hi > 0 && prev_di.is_some_and(|pd| pd >= day1) { + this.collect_one_at(hi - 1).unwrap_or_default() + } else { + Sats::ZERO + }; + let s = close.collect_one(di).map(sats_from_dca).unwrap_or(Sats::ZERO); + (h, prev + s) + } + }, + exit, + )?; + } + + // DCA by year class - average price (derived from stack) + let start_days = super::ByDcaClass::<()>::start_days(); + for ((average_price, stack), from) in self + .class_average_price + .iter_mut() + .zip(self.class_stack.iter()) + .zip(start_days) + { + let from_usize = from.to_usize(); + let stack_data = stack.sats.height.collect_range_at(sh, stack.sats.height.len()); + average_price.usd.height.compute_transform( + starting_indexes.height, + h2d, + |(h, di, _)| { + let di_usize = di.to_usize(); + if di_usize < from_usize { + return (h, Dollars::NAN); + } + let stack_sats = stack_data[h.to_usize() - sh]; + let num_days = di_usize + 1 - from_usize; + let avg = DCA_AMOUNT * num_days / Bitcoin::from(stack_sats); + (h, avg) + }, + exit, + )?; } // DCA by year class - profitability - compute_class_profitability( + compute_class_cumulative( &mut self.class_days_in_profit, &mut self.class_days_in_loss, &mut self.class_min_return, &mut self.class_max_return, &self.class_returns, + h2d, starting_indexes, exit, )?; @@ -141,12 +238,23 @@ impl Vecs { } } -fn compute_period_profitability( - days_in_profit: &mut ByDcaPeriod>, - days_in_loss: &mut ByDcaPeriod>, - min_return: &mut ByDcaPeriod>, - max_return: &mut ByDcaPeriod>, - returns: &ByDcaPeriod, Dollars>>, +fn sats_from_dca(price: Dollars) -> Sats { + if price == Dollars::ZERO { + Sats::ZERO + } else { + Sats::from(Bitcoin::from(DCA_AMOUNT / price)) + } +} + +#[allow(clippy::too_many_arguments)] +fn compute_period_rolling( + days_in_profit: &mut ByDcaPeriod>, + days_in_loss: &mut ByDcaPeriod>, + min_return: &mut ByDcaPeriod>, + max_return: &mut ByDcaPeriod>, + returns: &ByDcaPeriod>, + blocks: &blocks::Vecs, + h2d: &EagerVec>, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { @@ -157,57 +265,54 @@ fn compute_period_profitability( .zip(max_return.iter_mut()) .zip(returns.iter_with_days()) { - dip.compute_all(starting_indexes, exit, |v| { - Ok(v.compute_rolling_count( - starting_indexes.dateindex, - &ret.dateindex, - days as usize, - |r| f32::from(*r) > 0.0, - exit, - )?) - })?; + let window_starts = blocks.count.start_vec(days as usize); + let returns_data: Vec = ret.day1.collect(); - dil.compute_all(starting_indexes, exit, |v| { - Ok(v.compute_rolling_count( - starting_indexes.dateindex, - &ret.dateindex, - days as usize, - |r| f32::from(*r) < 0.0, - exit, - )?) - })?; + compute_rolling( + &mut dip.height, h2d, &returns_data, window_starts, starting_indexes.height, exit, + |buf| StoredU32::from(buf.iter().copied().filter(|r| **r > 0.0).count()), + )?; - minr.compute_all(starting_indexes, exit, |v| { - Ok(v.compute_min( - starting_indexes.dateindex, - &ret.dateindex, - days as usize, - exit, - )?) - })?; + compute_rolling( + &mut dil.height, h2d, &returns_data, window_starts, starting_indexes.height, exit, + |buf| StoredU32::from(buf.iter().copied().filter(|r| **r < 0.0).count()), + )?; - maxr.compute_all(starting_indexes, exit, |v| { - Ok(v.compute_max( - starting_indexes.dateindex, - &ret.dateindex, - days as usize, - exit, - )?) - })?; + compute_rolling( + &mut minr.height, h2d, &returns_data, window_starts, starting_indexes.height, exit, + |buf| { + buf.iter() + .copied() + .reduce(|a, b| if *b < *a { b } else { a }) + .unwrap_or_default() + }, + )?; + + compute_rolling( + &mut maxr.height, h2d, &returns_data, window_starts, starting_indexes.height, exit, + |buf| { + buf.iter() + .copied() + .reduce(|a, b| if *b > *a { b } else { a }) + .unwrap_or_default() + }, + )?; } Ok(()) } -fn compute_class_profitability( - days_in_profit: &mut ByDcaClass>, - days_in_loss: &mut ByDcaClass>, - min_return: &mut ByDcaClass>, - max_return: &mut ByDcaClass>, - returns: &ByDcaClass, Dollars>>, +#[allow(clippy::too_many_arguments)] +fn compute_class_cumulative( + days_in_profit: &mut ByDcaClass>, + days_in_loss: &mut ByDcaClass>, + min_return: &mut ByDcaClass>, + max_return: &mut ByDcaClass>, + returns: &ByDcaClass>, + h2d: &EagerVec>, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let dateindexes = ByDcaClass::<()>::dateindexes(); + let start_days = ByDcaClass::<()>::start_days(); for (((((dip, dil), minr), maxr), ret), from) in days_in_profit .iter_mut() @@ -215,45 +320,133 @@ fn compute_class_profitability( .zip(min_return.iter_mut()) .zip(max_return.iter_mut()) .zip(returns.iter()) - .zip(dateindexes) + .zip(start_days) { - dip.compute_all(starting_indexes, exit, |v| { - Ok(v.compute_cumulative_count_from( - starting_indexes.dateindex, - &ret.dateindex, - from, - |r| f32::from(*r) > 0.0, - exit, - )?) - })?; + compute_cumulative( + &mut dip.height, h2d, &*ret.day1, from, starting_indexes.height, exit, + StoredU32::ZERO, + |prev, ret| if *ret > 0.0 { prev + StoredU32::ONE } else { prev }, + )?; - dil.compute_all(starting_indexes, exit, |v| { - Ok(v.compute_cumulative_count_from( - starting_indexes.dateindex, - &ret.dateindex, - from, - |r| f32::from(*r) < 0.0, - exit, - )?) - })?; + compute_cumulative( + &mut dil.height, h2d, &*ret.day1, from, starting_indexes.height, exit, + StoredU32::ZERO, + |prev, ret| if *ret < 0.0 { prev + StoredU32::ONE } else { prev }, + )?; - minr.compute_all(starting_indexes, exit, |v| { - Ok(v.compute_all_time_low_from( - starting_indexes.dateindex, - &ret.dateindex, - from, - exit, - )?) - })?; + compute_cumulative( + &mut minr.height, h2d, &*ret.day1, from, starting_indexes.height, exit, + StoredF32::from(f32::MAX), + |prev, ret| if *ret < *prev { ret } else { prev }, + )?; - maxr.compute_all(starting_indexes, exit, |v| { - Ok(v.compute_all_time_high_from( - starting_indexes.dateindex, - &ret.dateindex, - from, - exit, - )?) - })?; + compute_cumulative( + &mut maxr.height, h2d, &*ret.day1, from, starting_indexes.height, exit, + StoredF32::from(f32::MIN), + |prev, ret| if *ret > *prev { ret } else { prev }, + )?; } Ok(()) } + +/// Compute a rolling day-window metric at height level using _start vecs. +#[allow(clippy::too_many_arguments)] +fn compute_rolling( + output: &mut EagerVec>, + h2d: &EagerVec>, + returns_data: &[StoredF32], + window_starts: &EagerVec>, + starting_height: Height, + exit: &Exit, + mut aggregate: impl FnMut(&[StoredF32]) -> T, +) -> Result<()> { + // Cursor + cache avoids per-height PcoVec page decompression for the + // h2d lookback read. Window-start heights are non-decreasing so the + // cursor only moves forward; the cache handles repeated values. + let mut h2d_cursor = h2d.cursor(); + let mut last_ws = Height::ZERO; + let mut last_ws_di = Day1::default(); + + output.compute_transform2( + starting_height, + h2d, + window_starts, + |(h, di, window_start, ..)| { + let window_start_di = if window_start == last_ws { + last_ws_di + } else { + let target = window_start.to_usize(); + let ws_di = if target >= h2d_cursor.position() { + h2d_cursor.advance(target - h2d_cursor.position()); + h2d_cursor.next().unwrap_or_default() + } else { + // Cursor past target (batch boundary); rare fallback + h2d.collect_one(window_start).unwrap_or_default() + }; + last_ws = window_start; + last_ws_di = ws_di; + ws_di + }; + let start = window_start_di.to_usize(); + let end = di.to_usize() + 1; + if start >= end { + return (h, T::default()); + } + (h, aggregate(&returns_data[start..end])) + }, + exit, + )?; + + Ok(()) +} + +/// Compute a cumulative metric at height level starting from a fixed date. +#[allow(clippy::too_many_arguments)] +fn compute_cumulative( + output: &mut EagerVec>, + h2d: &EagerVec>, + returns: &impl ReadableVec, + from_day1: Day1, + starting_height: Height, + exit: &Exit, + initial: T, + mut accumulate: impl FnMut(T, StoredF32) -> T, +) -> Result<()> { + let mut last_di: Option = None; + + output.compute_transform( + starting_height, + h2d, + |(h, di, this)| { + let hi = h.to_usize(); + + if last_di.is_none() && hi > 0 { + last_di = Some(h2d.collect_one_at(hi - 1).unwrap()); + } + + if di < from_day1 { + last_di = Some(di); + return (h, T::default()); + } + + let prev_di = last_di; + last_di = Some(di); + + let same_day = prev_di.is_some_and(|prev| prev == di); + if same_day { + (h, this.collect_one_at(hi - 1).unwrap_or_default()) + } else { + let prev = if hi > 0 && prev_di.is_some_and(|pd| pd >= from_day1) { + this.collect_one_at(hi - 1).unwrap_or_default() + } else { + initial + }; + let ret = returns.collect_one(di).unwrap_or_default(); + (h, accumulate(prev, ret)) + } + }, + exit, + )?; + + Ok(()) +} diff --git a/crates/brk_computer/src/market/dca/import.rs b/crates/brk_computer/src/market/dca/import.rs index 29e7db226..4f7745274 100644 --- a/crates/brk_computer/src/market/dca/import.rs +++ b/crates/brk_computer/src/market/dca/import.rs @@ -1,56 +1,71 @@ use brk_error::Result; use brk_types::Version; -use vecdb::{Database, IterableCloneableVec}; +use vecdb::{Database, ImportableVec, ReadableCloneableVec}; use super::{ByDcaCagr, ByDcaClass, ByDcaPeriod, DCA_CLASS_NAMES, DCA_PERIOD_NAMES, Vecs}; use crate::{ indexes, internal::{ - ComputedFromDateLast, LazyBinaryFromDateLast, PercentageDiffCloseDollars, Price, - ValueFromDateLast, + ComputedFromHeightLast, LazyBinaryFromHeightLast, PercentageDiffDollars, PriceFromHeight, + ValueFromHeightLast, }, market::lookback, - price, + prices, }; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexes: &indexes::Vecs, - price: &price::Vecs, + prices: &prices::Vecs, lookback: &lookback::Vecs, ) -> Result { // DCA by period - stack (KISS) let period_stack = ByDcaPeriod::try_new(|name, _days| { - ValueFromDateLast::forced_import(db, &format!("{name}_dca_stack"), version, true, indexes) + ValueFromHeightLast::forced_import( + db, + &format!("{name}_dca_stack"), + version, + indexes, + prices, + ) })?; // DCA by period - average price let period_average_price = ByDcaPeriod::try_new(|name, _days| { - Price::forced_import(db, &format!("{name}_dca_average_price"), version, indexes) + PriceFromHeight::forced_import( + db, + &format!("{name}_dca_average_price"), + version, + indexes, + ) })?; let period_returns = DCA_PERIOD_NAMES .zip_ref(&period_average_price) .map(|(name, average_price)| { - LazyBinaryFromDateLast::from_computed_both_last::( + LazyBinaryFromHeightLast::from_height_and_derived_last::< + PercentageDiffDollars, + >( &format!("{name}_dca_returns"), version, - &price.usd.split.close, - average_price, + prices.usd.price.read_only_boxed_clone(), + average_price.height.read_only_boxed_clone(), + &prices.usd.split.close, + &average_price.rest, ) }); // DCA by period - CAGR let period_cagr = ByDcaCagr::try_new(|name, _days| { - ComputedFromDateLast::forced_import(db, &format!("{name}_dca_cagr"), version, indexes) + ComputedFromHeightLast::forced_import(db, &format!("{name}_dca_cagr"), version, indexes) })?; // DCA by period - profitability let period_days_in_profit = ByDcaPeriod::try_new(|name, _days| { - ComputedFromDateLast::forced_import( + ComputedFromHeightLast::forced_import( db, &format!("{name}_dca_days_in_profit"), version + Version::ONE, @@ -59,7 +74,7 @@ impl Vecs { })?; let period_days_in_loss = ByDcaPeriod::try_new(|name, _days| { - ComputedFromDateLast::forced_import( + ComputedFromHeightLast::forced_import( db, &format!("{name}_dca_days_in_loss"), version + Version::ONE, @@ -68,7 +83,7 @@ impl Vecs { })?; let period_min_return = ByDcaPeriod::try_new(|name, _days| { - ComputedFromDateLast::forced_import( + ComputedFromHeightLast::forced_import( db, &format!("{name}_dca_min_return"), version, @@ -77,7 +92,7 @@ impl Vecs { })?; let period_max_return = ByDcaPeriod::try_new(|name, _days| { - ComputedFromDateLast::forced_import( + ComputedFromHeightLast::forced_import( db, &format!("{name}_dca_max_return"), version, @@ -87,33 +102,36 @@ impl Vecs { // Lump sum by period - stack (KISS) let period_lump_sum_stack = ByDcaPeriod::try_new(|name, _days| { - ValueFromDateLast::forced_import( + ValueFromHeightLast::forced_import( db, &format!("{name}_lump_sum_stack"), version, - true, indexes, + prices, ) })?; // Lump sum by period - returns - let period_lump_sum_returns = DCA_PERIOD_NAMES - .zip_ref(&lookback.price_ago.as_dca_period()) - .map(|(name, lookback_price)| { - LazyBinaryFromDateLast::from_derived_last_and_computed_last::< - PercentageDiffCloseDollars, - >( - &format!("{name}_lump_sum_returns"), - version, - price.usd.split.close.dateindex.boxed_clone(), - &price.usd.split.close.rest, - lookback_price, - ) - }); + let lookback_dca = lookback.price_ago.as_dca_period(); + let period_lump_sum_returns = + DCA_PERIOD_NAMES + .zip_ref(&lookback_dca) + .map(|(name, lookback_price)| { + LazyBinaryFromHeightLast::from_height_and_derived_last::< + PercentageDiffDollars, + >( + &format!("{name}_lump_sum_returns"), + version, + prices.usd.price.read_only_boxed_clone(), + lookback_price.height.read_only_boxed_clone(), + &prices.usd.split.close, + &lookback_price.rest, + ) + }); // Lump sum by period - profitability let period_lump_sum_days_in_profit = ByDcaPeriod::try_new(|name, _days| { - ComputedFromDateLast::forced_import( + ComputedFromHeightLast::forced_import( db, &format!("{name}_lump_sum_days_in_profit"), version + Version::ONE, @@ -122,7 +140,7 @@ impl Vecs { })?; let period_lump_sum_days_in_loss = ByDcaPeriod::try_new(|name, _days| { - ComputedFromDateLast::forced_import( + ComputedFromHeightLast::forced_import( db, &format!("{name}_lump_sum_days_in_loss"), version + Version::ONE, @@ -131,7 +149,7 @@ impl Vecs { })?; let period_lump_sum_min_return = ByDcaPeriod::try_new(|name, _days| { - ComputedFromDateLast::forced_import( + ComputedFromHeightLast::forced_import( db, &format!("{name}_lump_sum_min_return"), version, @@ -140,7 +158,7 @@ impl Vecs { })?; let period_lump_sum_max_return = ByDcaPeriod::try_new(|name, _days| { - ComputedFromDateLast::forced_import( + ComputedFromHeightLast::forced_import( db, &format!("{name}_lump_sum_max_return"), version, @@ -149,30 +167,40 @@ impl Vecs { })?; // DCA by year class - stack (KISS) - let class_stack = ByDcaClass::try_new(|name, _year, _dateindex| { - ValueFromDateLast::forced_import(db, &format!("{name}_stack"), version, true, indexes) + let class_stack = ByDcaClass::try_new(|name, _year, _day1| { + ValueFromHeightLast::forced_import( + db, + &format!("{name}_stack"), + version, + indexes, + prices, + ) })?; // DCA by year class - average price - let class_average_price = ByDcaClass::try_new(|name, _year, _dateindex| { - Price::forced_import(db, &format!("{name}_average_price"), version, indexes) + let class_average_price = ByDcaClass::try_new(|name, _year, _day1| { + PriceFromHeight::forced_import(db, &format!("{name}_average_price"), version, indexes) })?; let class_returns = DCA_CLASS_NAMES .zip_ref(&class_average_price) .map(|(name, average_price)| { - LazyBinaryFromDateLast::from_computed_both_last::( + LazyBinaryFromHeightLast::from_height_and_derived_last::< + PercentageDiffDollars, + >( &format!("{name}_returns"), version, - &price.usd.split.close, - average_price, + prices.usd.price.read_only_boxed_clone(), + average_price.height.read_only_boxed_clone(), + &prices.usd.split.close, + &average_price.rest, ) }); // DCA by year class - profitability - let class_days_in_profit = ByDcaClass::try_new(|name, _year, _dateindex| { - ComputedFromDateLast::forced_import( + let class_days_in_profit = ByDcaClass::try_new(|name, _year, _day1| { + ComputedFromHeightLast::forced_import( db, &format!("{name}_days_in_profit"), version, @@ -180,8 +208,8 @@ impl Vecs { ) })?; - let class_days_in_loss = ByDcaClass::try_new(|name, _year, _dateindex| { - ComputedFromDateLast::forced_import( + let class_days_in_loss = ByDcaClass::try_new(|name, _year, _day1| { + ComputedFromHeightLast::forced_import( db, &format!("{name}_days_in_loss"), version, @@ -189,25 +217,16 @@ impl Vecs { ) })?; - let class_min_return = ByDcaClass::try_new(|name, _year, _dateindex| { - ComputedFromDateLast::forced_import( - db, - &format!("{name}_min_return"), - version, - indexes, - ) + let class_min_return = ByDcaClass::try_new(|name, _year, _day1| { + ComputedFromHeightLast::forced_import(db, &format!("{name}_min_return"), version, indexes) })?; - let class_max_return = ByDcaClass::try_new(|name, _year, _dateindex| { - ComputedFromDateLast::forced_import( - db, - &format!("{name}_max_return"), - version, - indexes, - ) + let class_max_return = ByDcaClass::try_new(|name, _year, _day1| { + ComputedFromHeightLast::forced_import(db, &format!("{name}_max_return"), version, indexes) })?; Ok(Self { + dca_sats_per_day: ImportableVec::forced_import(db, "dca_sats_per_day", version)?, period_stack, period_average_price, period_returns, diff --git a/crates/brk_computer/src/market/dca/vecs.rs b/crates/brk_computer/src/market/dca/vecs.rs index 1e2ba26e7..a66b05f04 100644 --- a/crates/brk_computer/src/market/dca/vecs.rs +++ b/crates/brk_computer/src/market/dca/vecs.rs @@ -1,42 +1,49 @@ use brk_traversable::Traversable; -use brk_types::{Close, Dollars, StoredF32, StoredU32}; +use brk_types::{Dollars, Height, Sats, StoredF32, StoredU32}; +use vecdb::{EagerVec, PcoVec, Rw, StorageMode}; use super::{ByDcaCagr, ByDcaClass, ByDcaPeriod}; -use crate::internal::{ComputedFromDateLast, LazyBinaryFromDateLast, Price, ValueFromDateLast}; +use crate::internal::{ + ComputedFromHeightLast, LazyBinaryFromHeightLast, Price, ValueFromHeightLast, +}; /// Dollar-cost averaging metrics by time period and year class -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { + /// Per-height DCA sats contribution: sats_from_dca(close) on day boundaries, 0 otherwise. + /// Computed once, reused by all period rolling sums. + pub dca_sats_per_day: M::Stored>>, + // DCA by period - KISS types - pub period_stack: ByDcaPeriod, - pub period_average_price: ByDcaPeriod, - pub period_returns: ByDcaPeriod, Dollars>>, - pub period_cagr: ByDcaCagr>, + pub period_stack: ByDcaPeriod>, + pub period_average_price: ByDcaPeriod>>, + pub period_returns: ByDcaPeriod>, + pub period_cagr: ByDcaCagr>, // DCA by period - profitability - pub period_days_in_profit: ByDcaPeriod>, - pub period_days_in_loss: ByDcaPeriod>, - pub period_min_return: ByDcaPeriod>, - pub period_max_return: ByDcaPeriod>, + pub period_days_in_profit: ByDcaPeriod>, + pub period_days_in_loss: ByDcaPeriod>, + pub period_min_return: ByDcaPeriod>, + pub period_max_return: ByDcaPeriod>, // Lump sum by period (for comparison with DCA) - KISS types - pub period_lump_sum_stack: ByDcaPeriod, - pub period_lump_sum_returns: ByDcaPeriod, Dollars>>, + pub period_lump_sum_stack: ByDcaPeriod>, + pub period_lump_sum_returns: ByDcaPeriod>, // Lump sum by period - profitability - pub period_lump_sum_days_in_profit: ByDcaPeriod>, - pub period_lump_sum_days_in_loss: ByDcaPeriod>, - pub period_lump_sum_min_return: ByDcaPeriod>, - pub period_lump_sum_max_return: ByDcaPeriod>, + pub period_lump_sum_days_in_profit: ByDcaPeriod>, + pub period_lump_sum_days_in_loss: ByDcaPeriod>, + pub period_lump_sum_min_return: ByDcaPeriod>, + pub period_lump_sum_max_return: ByDcaPeriod>, // DCA by year class - KISS types - pub class_stack: ByDcaClass, - pub class_average_price: ByDcaClass, - pub class_returns: ByDcaClass, Dollars>>, + pub class_stack: ByDcaClass>, + pub class_average_price: ByDcaClass>>, + pub class_returns: ByDcaClass>, // DCA by year class - profitability - pub class_days_in_profit: ByDcaClass>, - pub class_days_in_loss: ByDcaClass>, - pub class_min_return: ByDcaClass>, - pub class_max_return: ByDcaClass>, + pub class_days_in_profit: ByDcaClass>, + pub class_days_in_loss: ByDcaClass>, + pub class_min_return: ByDcaClass>, + pub class_max_return: ByDcaClass>, } diff --git a/crates/brk_computer/src/market/import.rs b/crates/brk_computer/src/market/import.rs index 312571396..ca35f7dab 100644 --- a/crates/brk_computer/src/market/import.rs +++ b/crates/brk_computer/src/market/import.rs @@ -5,7 +5,7 @@ use brk_traversable::Traversable; use brk_types::Version; use vecdb::{Database, PAGE_SIZE}; -use crate::{distribution, indexes, price, transactions}; +use crate::{distribution, indexes, prices, transactions}; use super::{ AthVecs, DcaVecs, IndicatorsVecs, LookbackVecs, MovingAverageVecs, RangeVecs, ReturnsVecs, @@ -13,11 +13,11 @@ use super::{ }; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( parent_path: &Path, parent_version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, distribution: &distribution::Vecs, transactions: &transactions::Vecs, ) -> Result { @@ -26,20 +26,17 @@ impl Vecs { let version = parent_version; - let price = price.expect("price required for market"); - - let ath = AthVecs::forced_import(&db, version, indexes, price)?; + let ath = AthVecs::forced_import(&db, version, indexes, prices)?; let lookback = LookbackVecs::forced_import(&db, version, indexes)?; - let returns = ReturnsVecs::forced_import(&db, version, indexes, price, &lookback)?; + let returns = ReturnsVecs::forced_import(&db, version, indexes, prices, &lookback)?; let volatility = VolatilityVecs::forced_import(version, &returns); let range = RangeVecs::forced_import(&db, version, indexes)?; let moving_average = MovingAverageVecs::forced_import(&db, version, indexes)?; - let dca = DcaVecs::forced_import(&db, version, indexes, price, &lookback)?; + let dca = DcaVecs::forced_import(&db, version, indexes, prices, &lookback)?; let indicators = IndicatorsVecs::forced_import( &db, version, indexes, - true, distribution, transactions, &moving_average, diff --git a/crates/brk_computer/src/market/indicators/compute.rs b/crates/brk_computer/src/market/indicators/compute.rs index d8e6318f9..14126e022 100644 --- a/crates/brk_computer/src/market/indicators/compute.rs +++ b/crates/brk_computer/src/market/indicators/compute.rs @@ -1,219 +1,99 @@ use brk_error::Result; -use brk_types::{StoredF32, Version}; -use vecdb::{AnyVec, Exit, TypedVecIterator}; +use brk_types::{Day1, StoredF32}; +use vecdb::{Exit, ReadableVec}; -use super::{ - super::{moving_average, range, returns::Vecs as ReturnsVecs}, - Vecs, -}; -use crate::{ComputeIndexes, blocks, distribution, price}; +use super::{super::range, Vecs}; +use crate::{ComputeIndexes, blocks, distribution, indexes, mining, prices}; impl Vecs { #[allow(clippy::too_many_arguments)] - pub fn compute( + pub(crate) fn compute( &mut self, - rewards: &blocks::RewardsVecs, - returns: &ReturnsVecs, - moving_average: &moving_average::Vecs, + indexes: &indexes::Vecs, + rewards: &mining::RewardsVecs, + returns: &super::super::returns::Vecs, range: &range::Vecs, - price: &price::Vecs, + prices: &prices::Vecs, + blocks: &blocks::Vecs, distribution: &distribution::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - if let (Some(puell), Some(sma), Some(coinbase_dollars)) = ( - self.puell_multiple.as_mut(), - rewards.subsidy_usd_1y_sma.as_ref(), - rewards.coinbase.dollars.as_ref(), - ) { - let date_to_coinbase_usd_sum = &coinbase_dollars.dateindex.sum_cum.sum.0; - - puell.compute_all(starting_indexes, exit, |v| { - v.compute_divide( - starting_indexes.dateindex, - date_to_coinbase_usd_sum, - &sma.dateindex, - exit, - )?; - Ok(()) - })?; - } - - let returns_dateindex = &returns.price_returns._1d.dateindex; - - self.rsi_gains.compute_transform( - starting_indexes.dateindex, - returns_dateindex, - |(i, ret, ..)| (i, StoredF32::from((*ret).max(0.0))), + self.puell_multiple.height.compute_divide( + starting_indexes.height, + &rewards.coinbase.usd.height, + &rewards.subsidy_usd_1y_sma.height, exit, )?; - self.rsi_losses.compute_transform( - starting_indexes.dateindex, - returns_dateindex, - |(i, ret, ..)| (i, StoredF32::from((-*ret).max(0.0))), - exit, - )?; - - self.rsi_average_gain_14d.compute_rma( - starting_indexes.dateindex, - &self.rsi_gains, - 14, - exit, - )?; - - self.rsi_average_loss_14d.compute_rma( - starting_indexes.dateindex, - &self.rsi_losses, - 14, - exit, - )?; - - let ema12 = &moving_average - .price_12d_ema - .price - .as_ref() - .unwrap() - .dateindex; - let ema26 = &moving_average - .price_26d_ema - .price - .as_ref() - .unwrap() - .dateindex; - - self.macd_line.compute_transform2( - starting_indexes.dateindex, - ema12, - ema26, - |(i, a, b, _)| (i, StoredF32::from(*a - *b)), - exit, - )?; - - self.macd_signal - .compute_ema(starting_indexes.dateindex, &self.macd_line, 9, exit)?; - - // Stochastic RSI: StochRSI = (RSI - min) / (max - min) * 100 - self.rsi_14d_min - .compute_min(starting_indexes.dateindex, &self.rsi_14d, 14, exit)?; - - self.rsi_14d_max - .compute_max(starting_indexes.dateindex, &self.rsi_14d, 14, exit)?; - - self.stoch_rsi.compute_transform3( - starting_indexes.dateindex, - &self.rsi_14d, - &self.rsi_14d_min, - &self.rsi_14d_max, - |(i, rsi, min, max, ..)| { - let range = *max - *min; - let stoch = if range == 0.0 { - StoredF32::from(50.0) - } else { - StoredF32::from((*rsi - *min) / range * 100.0) - }; - (i, stoch) - }, - exit, - )?; - - self.stoch_rsi_k - .compute_sma(starting_indexes.dateindex, &self.stoch_rsi, 3, exit)?; - - self.stoch_rsi_d - .compute_sma(starting_indexes.dateindex, &self.stoch_rsi_k, 3, exit)?; - - // Stochastic Oscillator: K = (close - low_14) / (high_14 - low_14) * 100 + // Stochastic Oscillator: K = (close - low_2w) / (high_2w - low_2w) * 100 { - let close = &price.usd.split.close.dateindex; - let low_2w = &range.price_2w_min.dateindex; - let high_2w = &range.price_2w_max.dateindex; - self.stoch_k.compute_transform3( - starting_indexes.dateindex, - close, - low_2w, - high_2w, - |(i, close, low, high, ..)| { + let price = &prices.usd.price; + self.stoch_k.height.compute_transform3( + starting_indexes.height, + price, + &range.price_2w_min.height, + &range.price_2w_max.height, + |(h, close, low, high, ..)| { let range = *high - *low; let stoch = if range == 0.0 { StoredF32::from(50.0) } else { - StoredF32::from((**close - *low) / range * 100.0) + StoredF32::from(((*close - *low) / range * 100.0) as f32) }; - (i, stoch) + (h, stoch) }, exit, )?; - self.stoch_d - .compute_sma(starting_indexes.dateindex, &self.stoch_k, 3, exit)?; - } - - let amount_range = &distribution.utxo_cohorts.amount_range; - - let supply_vecs: Vec<_> = amount_range - .iter() - .map(|c| &c.metrics.supply.total.sats.dateindex.0) - .collect(); - let count_vecs: Vec<_> = amount_range - .iter() - .map(|c| &c.metrics.outputs.utxo_count.dateindex) - .collect(); - - if let Some(first_supply) = supply_vecs.first() - && supply_vecs.len() == count_vecs.len() - { - let version = supply_vecs - .iter() - .fold(Version::ZERO, |acc, v| acc + v.version()) - + count_vecs - .iter() - .fold(Version::ZERO, |acc, v| acc + v.version()); - let mut supply_iters: Vec<_> = supply_vecs.iter().map(|v| v.into_iter()).collect(); - let mut count_iters: Vec<_> = count_vecs.iter().map(|v| v.into_iter()).collect(); - - self.gini.compute_to( - starting_indexes.dateindex, - first_supply.len(), - version, - |dateindex| { - let buckets: Vec<(u64, u64)> = supply_iters - .iter_mut() - .zip(count_iters.iter_mut()) - .map(|(s, c)| { - let count: u64 = *c.get_unwrap(dateindex); - let supply: u64 = *s.get_unwrap(dateindex); - (count, supply) - }) - .collect(); - (dateindex, StoredF32::from(gini_from_lorenz(&buckets))) - }, + self.stoch_d.height.compute_rolling_average( + starting_indexes.height, + &blocks.count.height_3d_ago, + &self.stoch_k.height, exit, )?; } + // Pre-collect Height→Day1 mapping + let h2d: Vec = indexes.height.day1.collect(); + let total_heights = h2d.len(); + + // RSI per timeframe + for (tf, rsi_chain) in self.rsi.iter_mut() { + super::rsi::compute( + rsi_chain, + tf, + returns, + &h2d, + total_heights, + starting_indexes, + exit, + )?; + } + + // MACD per timeframe + for (tf, macd_chain) in self.macd.iter_mut() { + super::macd::compute( + macd_chain, + tf, + prices, + &h2d, + total_heights, + starting_indexes, + exit, + )?; + } + + // Gini (daily only, expanded to Height) + super::gini::compute( + &mut self.gini, + distribution, + &h2d, + total_heights, + starting_indexes, + exit, + )?; + Ok(()) } } - -fn gini_from_lorenz(buckets: &[(u64, u64)]) -> f32 { - let total_count: u64 = buckets.iter().map(|(c, _)| c).sum(); - let total_supply: u64 = buckets.iter().map(|(_, s)| s).sum(); - - if total_count == 0 || total_supply == 0 { - return 0.0; - } - - let (mut cum_count, mut cum_supply, mut area) = (0u64, 0u64, 0.0f64); - let (tc, ts) = (total_count as f64, total_supply as f64); - - for &(count, supply) in buckets { - let (p0, w0) = (cum_count as f64 / tc, cum_supply as f64 / ts); - cum_count += count; - cum_supply += supply; - let (p1, w1) = (cum_count as f64 / tc, cum_supply as f64 / ts); - area += (p1 - p0) * (w0 + w1) / 2.0; - } - - (1.0 - 2.0 * area) as f32 -} diff --git a/crates/brk_computer/src/market/indicators/gini.rs b/crates/brk_computer/src/market/indicators/gini.rs new file mode 100644 index 000000000..d60c99288 --- /dev/null +++ b/crates/brk_computer/src/market/indicators/gini.rs @@ -0,0 +1,110 @@ +use brk_error::Result; +use brk_types::{Day1, Sats, StoredF32, StoredU64, Version}; +use vecdb::{AnyStoredVec, AnyVec, Exit, ReadableVec, VecIndex, WritableVec}; + +use crate::{ComputeIndexes, distribution, internal::ComputedFromHeightLast}; + +pub(super) fn compute( + gini: &mut ComputedFromHeightLast, + distribution: &distribution::Vecs, + h2d: &[Day1], + total_heights: usize, + starting_indexes: &ComputeIndexes, + exit: &Exit, +) -> Result<()> { + let amount_range = &distribution.utxo_cohorts.amount_range; + + let supply_vecs: Vec<&_> = amount_range + .iter() + .map(|c| &c.metrics.supply.total.sats.day1) + .collect(); + let count_vecs: Vec<&_> = amount_range + .iter() + .map(|c| &c.metrics.outputs.utxo_count.day1) + .collect(); + + if supply_vecs.is_empty() || supply_vecs.len() != count_vecs.len() { + return Ok(()); + } + + let source_version = supply_vecs + .iter() + .fold(Version::ZERO, |acc, v| acc + v.version()) + + count_vecs + .iter() + .fold(Version::ZERO, |acc, v| acc + v.version()); + + gini.height + .validate_computed_version_or_reset(source_version)?; + gini.height + .truncate_if_needed_at(gini.height.len().min(starting_indexes.height.to_usize()))?; + + let start_height = gini.height.len(); + if start_height >= total_heights { + return Ok(()); + } + + // Pre-collect all daily data + let supply_data: Vec> = supply_vecs + .iter() + .map(|v| ReadableVec::collect(*v)) + .collect(); + let count_data: Vec> = count_vecs + .iter() + .map(|v| ReadableVec::collect(*v)) + .collect(); + let num_days = supply_data.first().map_or(0, |v| v.len()); + + // Compute gini per day in-memory + let mut gini_daily = Vec::with_capacity(num_days); + let mut buckets: Vec<(u64, u64)> = Vec::with_capacity(supply_data.len()); + for di in 0..num_days { + buckets.clear(); + buckets.extend(supply_data.iter().zip(count_data.iter()).map(|(s, c)| { + let count: u64 = c[di].into(); + let supply: u64 = s[di].into(); + (count, supply) + })); + gini_daily.push(gini_from_lorenz(&buckets)); + } + + // Expand to Height + (start_height..total_heights).for_each(|h| { + let di = h2d[h].to_usize(); + let val = if di < gini_daily.len() { + StoredF32::from(gini_daily[di]) + } else { + StoredF32::NAN + }; + gini.height.push(val); + }); + + { + let _lock = exit.lock(); + gini.height.write()?; + } + + Ok(()) +} + +fn gini_from_lorenz(buckets: &[(u64, u64)]) -> f32 { + let total_count: u64 = buckets.iter().map(|(c, _)| c).sum(); + let total_supply: u64 = buckets.iter().map(|(_, s)| s).sum(); + + if total_count == 0 || total_supply == 0 { + return 0.0; + } + + let (mut cum_count, mut cum_supply, mut area) = (0u64, 0u64, 0.0f64); + let (tc, ts) = (total_count as f64, total_supply as f64); + + for &(count, supply) in buckets { + let (p0, w0) = (cum_count as f64 / tc, cum_supply as f64 / ts); + cum_count += count; + cum_supply += supply; + let (p1, w1) = (cum_count as f64 / tc, cum_supply as f64 / ts); + area += (p1 - p0) * (w0 + w1) / 2.0; + } + + (1.0 - 2.0 * area) as f32 +} diff --git a/crates/brk_computer/src/market/indicators/import.rs b/crates/brk_computer/src/market/indicators/import.rs index def74c71d..ceeb92bfe 100644 --- a/crates/brk_computer/src/market/indicators/import.rs +++ b/crates/brk_computer/src/market/indicators/import.rs @@ -1,113 +1,140 @@ use brk_error::Result; use brk_types::Version; -use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom2}; +use vecdb::{Database, ReadableCloneableVec, LazyVecFrom2}; -use super::{super::moving_average, Vecs}; +use super::{ByIndicatorTimeframe, MacdChain, RsiChain, Vecs}; use crate::{ distribution, indexes, - internal::{ComputedFromDateLast, DifferenceF32, LazyBinaryFromDateLast, Ratio32, RsiFormula}, + internal::{ComputedFromHeightLast, DifferenceF32, LazyBinaryFromHeightLast, Ratio32, RsiFormula}, transactions, }; -const VERSION: Version = Version::ZERO; +const VERSION: Version = Version::ONE; + +impl RsiChain { + fn forced_import( + db: &Database, + tf: &str, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { + macro_rules! import { + ($name:expr) => { + ComputedFromHeightLast::forced_import( + db, + &format!("rsi_{}_{}", $name, tf), + version, + indexes, + )? + }; + } + + let average_gain = import!("avg_gain"); + let average_loss = import!("avg_loss"); + + let rsi = LazyVecFrom2::transformed::( + &format!("rsi_{tf}"), + version, + average_gain.height.read_only_boxed_clone(), + average_loss.height.read_only_boxed_clone(), + ); + + Ok(Self { + gains: import!("gains"), + losses: import!("losses"), + average_gain, + average_loss, + rsi, + rsi_min: import!("rsi_min"), + rsi_max: import!("rsi_max"), + stoch_rsi: import!("stoch_rsi"), + stoch_rsi_k: import!("stoch_rsi_k"), + stoch_rsi_d: import!("stoch_rsi_d"), + }) + } +} + +impl MacdChain { + fn forced_import( + db: &Database, + tf: &str, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { + let line = ComputedFromHeightLast::forced_import( + db, + &format!("macd_line_{tf}"), + version, + indexes, + )?; + let signal = ComputedFromHeightLast::forced_import( + db, + &format!("macd_signal_{tf}"), + version, + indexes, + )?; + + let histogram = LazyVecFrom2::transformed::( + &format!("macd_histogram_{tf}"), + version, + line.height.read_only_boxed_clone(), + signal.height.read_only_boxed_clone(), + ); + + Ok(Self { + line, + signal, + histogram, + }) + } +} impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexes: &indexes::Vecs, - compute_dollars: bool, distribution: &distribution::Vecs, transactions: &transactions::Vecs, - moving_average: &moving_average::Vecs, + moving_average: &super::super::moving_average::Vecs, ) -> Result { let v = version + VERSION; - // NVT = Market Cap (KISS DateIndex) / Volume (Height) - let nvt = distribution - .utxo_cohorts - .all - .metrics - .supply - .total - .dollars - .as_ref() - .zip(transactions.volume.sent_sum.dollars.as_ref()) - .map(|(market_cap, volume)| { - LazyBinaryFromDateLast::from_lazy_binary_block_last_and_lazy_binary_sum::< - Ratio32, - _, - _, - _, - _, - >("nvt", v, market_cap, volume) - }); - - let rsi_gains = EagerVec::forced_import(db, "rsi_gains", v)?; - let rsi_losses = EagerVec::forced_import(db, "rsi_losses", v)?; - // v1: Changed from SMA to RMA (Wilder's smoothing) - let rsi_average_gain_14d = - EagerVec::forced_import(db, "rsi_average_gain_14d", v + Version::ONE)?; - let rsi_average_loss_14d = - EagerVec::forced_import(db, "rsi_average_loss_14d", v + Version::ONE)?; - let rsi_14d = LazyVecFrom2::transformed::( - "rsi_14d", + let nvt = LazyBinaryFromHeightLast::from_lazy_binary_block_last_and_lazy_binary_sum::< + Ratio32, + _, + _, + _, + _, + >( + "nvt", v, - rsi_average_gain_14d.boxed_clone(), - rsi_average_loss_14d.boxed_clone(), + &distribution.utxo_cohorts.all.metrics.supply.total.usd, + &transactions.volume.sent_sum.usd, ); - let macd_line = EagerVec::forced_import(db, "macd_line", v)?; - let macd_signal = EagerVec::forced_import(db, "macd_signal", v)?; - let macd_histogram = LazyVecFrom2::transformed::( - "macd_histogram", + let rsi = ByIndicatorTimeframe::try_new(|tf| RsiChain::forced_import(db, tf, v, indexes))?; + let macd = ByIndicatorTimeframe::try_new(|tf| MacdChain::forced_import(db, tf, v, indexes))?; + + let stoch_k = ComputedFromHeightLast::forced_import(db, "stoch_k", v, indexes)?; + let stoch_d = ComputedFromHeightLast::forced_import(db, "stoch_d", v, indexes)?; + let gini = ComputedFromHeightLast::forced_import(db, "gini", v, indexes)?; + + let pi_cycle = LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::( + "pi_cycle", v, - macd_line.boxed_clone(), - macd_signal.boxed_clone(), + &moving_average.price_111d_sma.price.as_ref().unwrap().usd, + &moving_average.price_350d_sma_x2.usd, ); - let rsi_14d_min = EagerVec::forced_import(db, "rsi_14d_min", v)?; - let rsi_14d_max = EagerVec::forced_import(db, "rsi_14d_max", v)?; - let stoch_rsi = EagerVec::forced_import(db, "stoch_rsi", v)?; - let stoch_rsi_k = EagerVec::forced_import(db, "stoch_rsi_k", v)?; - let stoch_rsi_d = EagerVec::forced_import(db, "stoch_rsi_d", v)?; - - let stoch_k = EagerVec::forced_import(db, "stoch_k", v)?; - let stoch_d = EagerVec::forced_import(db, "stoch_d", v)?; - - let gini = EagerVec::forced_import(db, "gini", v)?; - - // Pi Cycle Top: 111d SMA / (2 * 350d SMA) - signals top when > 1 - let pi_cycle = moving_average.price_111d_sma.price.as_ref().map(|sma_111| { - LazyVecFrom2::transformed::( - "pi_cycle", - v, - sma_111.dateindex.boxed_clone(), - moving_average.price_350d_sma_x2.dateindex.boxed_clone(), - ) - }); - Ok(Self { - puell_multiple: compute_dollars - .then(|| ComputedFromDateLast::forced_import(db, "puell_multiple", v, indexes)) - .transpose()?, + puell_multiple: ComputedFromHeightLast::forced_import(db, "puell_multiple", v, indexes)?, nvt, - rsi_gains, - rsi_losses, - rsi_average_gain_14d, - rsi_average_loss_14d, - rsi_14d, - rsi_14d_min, - rsi_14d_max, - stoch_rsi, - stoch_rsi_k, - stoch_rsi_d, + rsi, stoch_k, stoch_d, pi_cycle, - macd_line, - macd_signal, - macd_histogram, + macd, gini, }) } diff --git a/crates/brk_computer/src/market/indicators/macd.rs b/crates/brk_computer/src/market/indicators/macd.rs new file mode 100644 index 000000000..99f9563ae --- /dev/null +++ b/crates/brk_computer/src/market/indicators/macd.rs @@ -0,0 +1,83 @@ +use brk_error::Result; +use brk_types::{Day1, StoredF32}; +use vecdb::{AnyStoredVec, AnyVec, Exit, VecIndex, WritableVec}; + +use super::{MacdChain, smoothing::compute_ema, timeframe::{collect_closes, date_to_period}}; +use crate::{ComputeIndexes, prices}; + +#[allow(clippy::too_many_arguments)] +pub(super) fn compute( + chain: &mut MacdChain, + tf: &str, + prices: &prices::Vecs, + h2d: &[Day1], + total_heights: usize, + starting_indexes: &ComputeIndexes, + exit: &Exit, +) -> Result<()> { + let source_version = prices.usd.price.version(); + + chain + .line + .height + .validate_computed_version_or_reset(source_version)?; + chain + .signal + .height + .validate_computed_version_or_reset(source_version)?; + + chain.line.height.truncate_if_needed_at( + chain + .line + .height + .len() + .min(starting_indexes.height.to_usize()), + )?; + chain.signal.height.truncate_if_needed_at( + chain + .signal + .height + .len() + .min(starting_indexes.height.to_usize()), + )?; + + let start_height = chain.line.height.len(); + if start_height >= total_heights { + return Ok(()); + } + + // Collect close prices at timeframe level + let closes = collect_closes(tf, prices); + let closes_f32: Vec = closes.iter().map(|d| **d as f32).collect(); + + // Compute MACD in-memory + let ema12 = compute_ema(&closes_f32, 12); + let ema26 = compute_ema(&closes_f32, 26); + + let macd_line: Vec = ema12.iter().zip(ema26.iter()).map(|(a, b)| a - b).collect(); + + let macd_signal = compute_ema(&macd_line, 9); + + // Expand to Height + (start_height..total_heights).for_each(|h| { + let pi = date_to_period(tf, h2d[h]); + chain.line.height.push(if pi < macd_line.len() { + StoredF32::from(macd_line[pi]) + } else { + StoredF32::NAN + }); + chain.signal.height.push(if pi < macd_signal.len() { + StoredF32::from(macd_signal[pi]) + } else { + StoredF32::NAN + }); + }); + + { + let _lock = exit.lock(); + chain.line.height.write()?; + chain.signal.height.write()?; + } + + Ok(()) +} diff --git a/crates/brk_computer/src/market/indicators/mod.rs b/crates/brk_computer/src/market/indicators/mod.rs index 1136f9ebd..2ea5670ee 100644 --- a/crates/brk_computer/src/market/indicators/mod.rs +++ b/crates/brk_computer/src/market/indicators/mod.rs @@ -1,5 +1,10 @@ mod compute; +mod gini; mod import; +mod macd; +mod rsi; +mod smoothing; +mod timeframe; mod vecs; -pub use vecs::Vecs; +pub use vecs::{ByIndicatorTimeframe, MacdChain, RsiChain, Vecs}; diff --git a/crates/brk_computer/src/market/indicators/rsi.rs b/crates/brk_computer/src/market/indicators/rsi.rs new file mode 100644 index 000000000..32af51e5c --- /dev/null +++ b/crates/brk_computer/src/market/indicators/rsi.rs @@ -0,0 +1,123 @@ +use brk_error::Result; +use brk_types::{Day1, StoredF32}; +use vecdb::{AnyStoredVec, AnyVec, Exit, VecIndex, WritableVec}; + +use super::{ + RsiChain, + smoothing::{compute_rma, compute_rolling_max, compute_rolling_min, compute_sma}, + timeframe::{collect_returns, date_to_period}, +}; +use crate::{ComputeIndexes, market::returns::Vecs as ReturnsVecs}; + +#[allow(clippy::too_many_arguments)] +pub(super) fn compute( + chain: &mut RsiChain, + tf: &str, + returns: &ReturnsVecs, + h2d: &[Day1], + total_heights: usize, + starting_indexes: &ComputeIndexes, + exit: &Exit, +) -> Result<()> { + let source_version = returns.price_returns._1d.height.version(); + + let vecs = [ + &mut chain.gains.height, + &mut chain.losses.height, + &mut chain.average_gain.height, + &mut chain.average_loss.height, + &mut chain.rsi_min.height, + &mut chain.rsi_max.height, + &mut chain.stoch_rsi.height, + &mut chain.stoch_rsi_k.height, + &mut chain.stoch_rsi_d.height, + ]; + + for v in vecs { + v.validate_computed_version_or_reset(source_version)?; + v.truncate_if_needed_at(v.len().min(starting_indexes.height.to_usize()))?; + } + + let start_height = chain.gains.height.len(); + if start_height >= total_heights { + return Ok(()); + } + + // Collect returns at the appropriate timeframe level + let period_returns = collect_returns(tf, returns); + + // Compute in-memory + let gains: Vec = period_returns.iter().map(|r| r.max(0.0)).collect(); + let losses: Vec = period_returns.iter().map(|r| (-r).max(0.0)).collect(); + let avg_gain = compute_rma(&gains, 14); + let avg_loss = compute_rma(&losses, 14); + + let rsi: Vec = avg_gain + .iter() + .zip(avg_loss.iter()) + .map(|(g, l)| { + let sum = g + l; + if sum == 0.0 { 50.0 } else { 100.0 * g / sum } + }) + .collect(); + + let rsi_min = compute_rolling_min(&rsi, 14); + let rsi_max = compute_rolling_max(&rsi, 14); + + let stoch_rsi: Vec = rsi + .iter() + .zip(rsi_min.iter()) + .zip(rsi_max.iter()) + .map(|((r, mn), mx)| { + let range = mx - mn; + if range == 0.0 { + 50.0 + } else { + (r - mn) / range * 100.0 + } + }) + .collect(); + + let stoch_rsi_k = compute_sma(&stoch_rsi, 3); + let stoch_rsi_d = compute_sma(&stoch_rsi_k, 3); + + // Expand to Height + macro_rules! expand { + ($target:expr, $buffer:expr) => { + for h in start_height..total_heights { + let pi = date_to_period(tf, h2d[h]); + let val = if pi < $buffer.len() { + StoredF32::from($buffer[pi]) + } else { + StoredF32::NAN + }; + $target.push(val); + } + }; + } + + expand!(chain.gains.height, gains); + expand!(chain.losses.height, losses); + expand!(chain.average_gain.height, avg_gain); + expand!(chain.average_loss.height, avg_loss); + expand!(chain.rsi_min.height, rsi_min); + expand!(chain.rsi_max.height, rsi_max); + expand!(chain.stoch_rsi.height, stoch_rsi); + expand!(chain.stoch_rsi_k.height, stoch_rsi_k); + expand!(chain.stoch_rsi_d.height, stoch_rsi_d); + + { + let _lock = exit.lock(); + chain.gains.height.write()?; + chain.losses.height.write()?; + chain.average_gain.height.write()?; + chain.average_loss.height.write()?; + chain.rsi_min.height.write()?; + chain.rsi_max.height.write()?; + chain.stoch_rsi.height.write()?; + chain.stoch_rsi_k.height.write()?; + chain.stoch_rsi_d.height.write()?; + } + + Ok(()) +} diff --git a/crates/brk_computer/src/market/indicators/smoothing.rs b/crates/brk_computer/src/market/indicators/smoothing.rs new file mode 100644 index 000000000..0b3b922ed --- /dev/null +++ b/crates/brk_computer/src/market/indicators/smoothing.rs @@ -0,0 +1,93 @@ +use std::collections::VecDeque; + +pub(super) fn compute_rma(source: &[f32], period: usize) -> Vec { + let mut result = Vec::with_capacity(source.len()); + let k = 1.0 / period as f32; + let mut sum = 0.0f32; + + for (i, &val) in source.iter().enumerate() { + if i < period { + sum += val; + result.push(sum / (i + 1) as f32); + } else { + let prev = result[i - 1]; + result.push(val * k + prev * (1.0 - k)); + } + } + + result +} + +pub(super) fn compute_ema(source: &[f32], period: usize) -> Vec { + let mut result = Vec::with_capacity(source.len()); + let k = 2.0 / (period as f32 + 1.0); + let mut sum = 0.0f32; + + for (i, &val) in source.iter().enumerate() { + if i < period { + sum += val; + if i == period - 1 { + result.push(sum / period as f32); + } else { + result.push(val); + } + } else { + let prev = result[i - 1]; + result.push(val * k + prev * (1.0 - k)); + } + } + + result +} + +pub(super) fn compute_sma(source: &[f32], window: usize) -> Vec { + let mut result = Vec::with_capacity(source.len()); + let mut sum = 0.0f32; + + for (i, &val) in source.iter().enumerate() { + sum += val; + if i >= window { + sum -= source[i - window]; + } + let count = (i + 1).min(window); + result.push(sum / count as f32); + } + + result +} + +pub(super) fn compute_rolling_min(source: &[f32], window: usize) -> Vec { + let mut result = Vec::with_capacity(source.len()); + let mut deque = VecDeque::new(); + + for (i, &val) in source.iter().enumerate() { + while deque.back().is_some_and(|&(_, v): &(usize, f32)| v >= val) { + deque.pop_back(); + } + deque.push_back((i, val)); + if deque.front().unwrap().0 + window <= i { + deque.pop_front(); + } + result.push(deque.front().unwrap().1); + } + + result +} + +pub(super) fn compute_rolling_max(source: &[f32], window: usize) -> Vec { + let mut result = Vec::with_capacity(source.len()); + let mut deque = VecDeque::new(); + + for (i, &val) in source.iter().enumerate() { + while deque.back().is_some_and(|&(_, v): &(usize, f32)| v <= val) { + deque.pop_back(); + } + deque.push_back((i, val)); + if deque.front().unwrap().0 + window <= i { + deque.pop_front(); + } + result.push(deque.front().unwrap().1); + } + + result +} diff --git a/crates/brk_computer/src/market/indicators/timeframe.rs b/crates/brk_computer/src/market/indicators/timeframe.rs new file mode 100644 index 000000000..b2f64c316 --- /dev/null +++ b/crates/brk_computer/src/market/indicators/timeframe.rs @@ -0,0 +1,50 @@ +use brk_types::{Day1, Dollars, Month1, StoredF32, Week1, Year1}; +use vecdb::{ReadableVec, VecIndex}; + +use crate::{market::returns::Vecs as ReturnsVecs, prices}; + +/// Returns period-level returns data +pub(super) fn collect_returns(tf: &str, returns: &ReturnsVecs) -> Vec { + match tf { + "1d" => { + let data: Vec = returns.price_returns._1d.day1.collect(); + data.into_iter().map(|v| *v).collect() + } + "1w" => { + let data: Vec = returns.price_returns._1w.week1.collect(); + data.into_iter().map(|v| *v).collect() + } + "1m" => { + let data: Vec = returns.price_returns._1m.month1.collect(); + data.into_iter().map(|v| *v).collect() + } + "1y" => { + let data: Vec = returns.price_returns._1y.year1.collect(); + data.into_iter().map(|v| *v).collect() + } + _ => unreachable!(), + } +} + +/// Returns period-level close prices +pub(super) fn collect_closes(tf: &str, prices: &prices::Vecs) -> Vec { + match tf { + "1d" => prices.usd.split.close.day1.collect(), + "1w" => prices.usd.split.close.week1.collect(), + "1m" => prices.usd.split.close.month1.collect(), + "1y" => prices.usd.split.close.year1.collect(), + _ => unreachable!(), + } +} + +/// Maps a Day1 to a period-level index for the given timeframe +#[inline] +pub(super) fn date_to_period(tf: &str, di: Day1) -> usize { + match tf { + "1d" => di.to_usize(), + "1w" => Week1::from(di).to_usize(), + "1m" => Month1::from(di).to_usize(), + "1y" => Year1::from(Month1::from(di)).to_usize(), + _ => unreachable!(), + } +} diff --git a/crates/brk_computer/src/market/indicators/vecs.rs b/crates/brk_computer/src/market/indicators/vecs.rs index 5d8a14f5e..3a1891860 100644 --- a/crates/brk_computer/src/market/indicators/vecs.rs +++ b/crates/brk_computer/src/market/indicators/vecs.rs @@ -1,35 +1,74 @@ use brk_traversable::Traversable; -use brk_types::{DateIndex, Dollars, StoredF32}; -use vecdb::{EagerVec, LazyVecFrom2, PcoVec}; +use brk_types::{Dollars, Height, StoredF32}; +use vecdb::{LazyVecFrom2, Rw, StorageMode}; -use crate::internal::{ComputedFromDateLast, LazyBinaryFromDateLast}; +use crate::internal::{ComputedFromHeightLast, LazyBinaryFromHeightLast}; + +pub const TIMEFRAME_NAMES: [&str; 4] = ["1d", "1w", "1m", "1y"]; #[derive(Clone, Traversable)] -pub struct Vecs { - pub puell_multiple: Option>, - pub nvt: Option>, - - pub rsi_gains: EagerVec>, - pub rsi_losses: EagerVec>, - pub rsi_average_gain_14d: EagerVec>, - pub rsi_average_loss_14d: EagerVec>, - pub rsi_14d: LazyVecFrom2, - pub rsi_14d_min: EagerVec>, - pub rsi_14d_max: EagerVec>, - pub stoch_rsi: EagerVec>, - pub stoch_rsi_k: EagerVec>, - pub stoch_rsi_d: EagerVec>, - - pub stoch_k: EagerVec>, - pub stoch_d: EagerVec>, - - pub pi_cycle: - Option>, - - pub macd_line: EagerVec>, - pub macd_signal: EagerVec>, - pub macd_histogram: - LazyVecFrom2, - - pub gini: EagerVec>, +pub struct ByIndicatorTimeframe { + pub _1d: T, + pub _1w: T, + pub _1m: T, + pub _1y: T, +} + +impl ByIndicatorTimeframe { + pub fn try_new(mut create: impl FnMut(&str) -> Result) -> Result { + Ok(Self { + _1d: create(TIMEFRAME_NAMES[0])?, + _1w: create(TIMEFRAME_NAMES[1])?, + _1m: create(TIMEFRAME_NAMES[2])?, + _1y: create(TIMEFRAME_NAMES[3])?, + }) + } + + pub fn iter_mut(&mut self) -> impl Iterator { + [ + (TIMEFRAME_NAMES[0], &mut self._1d), + (TIMEFRAME_NAMES[1], &mut self._1w), + (TIMEFRAME_NAMES[2], &mut self._1m), + (TIMEFRAME_NAMES[3], &mut self._1y), + ] + .into_iter() + } +} + +#[derive(Traversable)] +pub struct RsiChain { + pub gains: ComputedFromHeightLast, + pub losses: ComputedFromHeightLast, + pub average_gain: ComputedFromHeightLast, + pub average_loss: ComputedFromHeightLast, + pub rsi: LazyVecFrom2, + pub rsi_min: ComputedFromHeightLast, + pub rsi_max: ComputedFromHeightLast, + pub stoch_rsi: ComputedFromHeightLast, + pub stoch_rsi_k: ComputedFromHeightLast, + pub stoch_rsi_d: ComputedFromHeightLast, +} + +#[derive(Traversable)] +pub struct MacdChain { + pub line: ComputedFromHeightLast, + pub signal: ComputedFromHeightLast, + pub histogram: LazyVecFrom2, +} + +#[derive(Traversable)] +pub struct Vecs { + pub puell_multiple: ComputedFromHeightLast, + pub nvt: LazyBinaryFromHeightLast, + + pub rsi: ByIndicatorTimeframe>, + + pub stoch_k: ComputedFromHeightLast, + pub stoch_d: ComputedFromHeightLast, + + pub pi_cycle: LazyBinaryFromHeightLast, + + pub macd: ByIndicatorTimeframe>, + + pub gini: ComputedFromHeightLast, } diff --git a/crates/brk_computer/src/market/lookback/by_period.rs b/crates/brk_computer/src/market/lookback/by_period.rs index ad4733ae9..270c13b34 100644 --- a/crates/brk_computer/src/market/lookback/by_period.rs +++ b/crates/brk_computer/src/market/lookback/by_period.rs @@ -37,7 +37,7 @@ pub const LOOKBACK_PERIOD_NAMES: ByLookbackPeriod<&'static str> = ByLookbackPeri }; /// Generic wrapper for lookback period-based data (includes 1d) -#[derive(Default, Clone, Traversable)] +#[derive(Clone, Default, Traversable)] pub struct ByLookbackPeriod { pub _1d: T, pub _1w: T, @@ -55,30 +55,7 @@ pub struct ByLookbackPeriod { } impl ByLookbackPeriod { - pub fn new(mut create: F) -> Self - where - F: FnMut(&'static str, u32) -> T, - { - let n = LOOKBACK_PERIOD_NAMES; - let d = LOOKBACK_PERIOD_DAYS; - Self { - _1d: create(n._1d, d._1d), - _1w: create(n._1w, d._1w), - _1m: create(n._1m, d._1m), - _3m: create(n._3m, d._3m), - _6m: create(n._6m, d._6m), - _1y: create(n._1y, d._1y), - _2y: create(n._2y, d._2y), - _3y: create(n._3y, d._3y), - _4y: create(n._4y, d._4y), - _5y: create(n._5y, d._5y), - _6y: create(n._6y, d._6y), - _8y: create(n._8y, d._8y), - _10y: create(n._10y, d._10y), - } - } - - pub fn try_new(mut create: F) -> Result + pub(crate) fn try_new(mut create: F) -> Result where F: FnMut(&'static str, u32) -> Result, { @@ -101,45 +78,7 @@ impl ByLookbackPeriod { }) } - pub fn iter(&self) -> impl Iterator { - [ - &self._1d, - &self._1w, - &self._1m, - &self._3m, - &self._6m, - &self._1y, - &self._2y, - &self._3y, - &self._4y, - &self._5y, - &self._6y, - &self._8y, - &self._10y, - ] - .into_iter() - } - - pub fn iter_mut(&mut self) -> impl Iterator { - [ - &mut self._1d, - &mut self._1w, - &mut self._1m, - &mut self._3m, - &mut self._6m, - &mut self._1y, - &mut self._2y, - &mut self._3y, - &mut self._4y, - &mut self._5y, - &mut self._6y, - &mut self._8y, - &mut self._10y, - ] - .into_iter() - } - - pub fn iter_mut_with_days(&mut self) -> impl Iterator { + pub(crate) fn iter_mut_with_days(&mut self) -> impl Iterator { let d = LOOKBACK_PERIOD_DAYS; [ (&mut self._1d, d._1d), @@ -160,7 +99,7 @@ impl ByLookbackPeriod { } /// Get the DCA-matching subset (excludes 1d) - pub fn as_dca_period(&self) -> ByDcaPeriod<&T> { + pub(crate) fn as_dca_period(&self) -> ByDcaPeriod<&T> { ByDcaPeriod { _1w: &self._1w, _1m: &self._1m, @@ -177,7 +116,7 @@ impl ByLookbackPeriod { } } - pub fn zip_ref<'a, U>(&'a self, other: &'a ByLookbackPeriod) -> ByLookbackPeriod<(&'a T, &'a U)> { + pub(crate) fn zip_ref<'a, U>(&'a self, other: &'a ByLookbackPeriod) -> ByLookbackPeriod<(&'a T, &'a U)> { ByLookbackPeriod { _1d: (&self._1d, &other._1d), _1w: (&self._1w, &other._1w), @@ -195,7 +134,7 @@ impl ByLookbackPeriod { } } - pub fn map U>(self, mut f: F) -> ByLookbackPeriod { + pub(crate) fn map U>(self, mut f: F) -> ByLookbackPeriod { ByLookbackPeriod { _1d: f(self._1d), _1w: f(self._1w), diff --git a/crates/brk_computer/src/market/lookback/compute.rs b/crates/brk_computer/src/market/lookback/compute.rs index 57315e9f8..d3e681882 100644 --- a/crates/brk_computer/src/market/lookback/compute.rs +++ b/crates/brk_computer/src/market/lookback/compute.rs @@ -1,23 +1,31 @@ use brk_error::Result; -use vecdb::Exit; +use brk_types::Dollars; +use vecdb::{Exit, ReadableVec, VecIndex}; use super::Vecs; -use crate::{ComputeIndexes, price}; +use crate::{blocks, ComputeIndexes, prices}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, - price: &price::Vecs, + blocks: &blocks::Vecs, + prices: &prices::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let close = &price.usd.split.close.dateindex; + let close_data: Vec = prices.usd.price.collect(); for (price_ago, days) in self.price_ago.iter_mut_with_days() { - price_ago.compute_all(starting_indexes, exit, |v| { - v.compute_previous_value(starting_indexes.dateindex, close, days as usize, exit)?; - Ok(()) - })?; + let window_starts = blocks.count.start_vec(days as usize); + price_ago.usd.height.compute_transform( + starting_indexes.height, + window_starts, + |(h, start_h, _)| { + let val = close_data[start_h.to_usize()]; + (h, val) + }, + exit, + )?; } Ok(()) diff --git a/crates/brk_computer/src/market/lookback/import.rs b/crates/brk_computer/src/market/lookback/import.rs index b0cb986cc..4bbb4b150 100644 --- a/crates/brk_computer/src/market/lookback/import.rs +++ b/crates/brk_computer/src/market/lookback/import.rs @@ -3,12 +3,12 @@ use brk_types::Version; use vecdb::Database; use super::{ByLookbackPeriod, Vecs}; -use crate::{indexes, internal::Price}; +use crate::{indexes, internal::PriceFromHeight}; impl Vecs { - pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { + pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { let price_ago = ByLookbackPeriod::try_new(|name, _days| { - Price::forced_import(db, &format!("price_{name}_ago"), version, indexes) + PriceFromHeight::forced_import(db, &format!("price_{name}_ago"), version, indexes) })?; Ok(Self { price_ago }) diff --git a/crates/brk_computer/src/market/lookback/vecs.rs b/crates/brk_computer/src/market/lookback/vecs.rs index 5eaa236f0..e15b30eb1 100644 --- a/crates/brk_computer/src/market/lookback/vecs.rs +++ b/crates/brk_computer/src/market/lookback/vecs.rs @@ -1,11 +1,13 @@ use brk_traversable::Traversable; +use brk_types::Dollars; +use vecdb::{Rw, StorageMode}; use super::ByLookbackPeriod; -use crate::internal::Price; +use crate::internal::{ComputedFromHeightLast, Price}; /// Price lookback metrics -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { #[traversable(flatten)] - pub price_ago: ByLookbackPeriod, + pub price_ago: ByLookbackPeriod>>, } diff --git a/crates/brk_computer/src/market/mod.rs b/crates/brk_computer/src/market/mod.rs index 4343b8625..7c4693560 100644 --- a/crates/brk_computer/src/market/mod.rs +++ b/crates/brk_computer/src/market/mod.rs @@ -10,7 +10,7 @@ pub mod returns; pub mod volatility; use brk_traversable::Traversable; -use vecdb::Database; +use vecdb::{Database, Rw, StorageMode}; pub use ath::Vecs as AthVecs; pub use dca::Vecs as DcaVecs; @@ -24,16 +24,16 @@ pub use volatility::Vecs as VolatilityVecs; pub const DB_NAME: &str = "market"; /// Main market metrics struct composed of sub-modules -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { #[traversable(skip)] pub(crate) db: Database, - pub ath: AthVecs, - pub lookback: LookbackVecs, - pub returns: ReturnsVecs, + pub ath: AthVecs, + pub lookback: LookbackVecs, + pub returns: ReturnsVecs, pub volatility: VolatilityVecs, - pub range: RangeVecs, - pub moving_average: MovingAverageVecs, - pub dca: DcaVecs, - pub indicators: IndicatorsVecs, + pub range: RangeVecs, + pub moving_average: MovingAverageVecs, + pub dca: DcaVecs, + pub indicators: IndicatorsVecs, } diff --git a/crates/brk_computer/src/market/moving_average/compute.rs b/crates/brk_computer/src/market/moving_average/compute.rs index 0deee77b0..2750f7a49 100644 --- a/crates/brk_computer/src/market/moving_average/compute.rs +++ b/crates/brk_computer/src/market/moving_average/compute.rs @@ -1,17 +1,20 @@ use brk_error::Result; -use vecdb::Exit; +use brk_types::Dollars; +use vecdb::{Exit, ReadableVec, VecIndex}; use super::Vecs; -use crate::{ComputeIndexes, price}; +use crate::{ComputeIndexes, blocks, indexes, prices}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, - price: &price::Vecs, + blocks: &blocks::Vecs, + prices: &prices::Vecs, + indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let close = &price.usd.split.close.dateindex; + let close = &prices.usd.price; for (sma, period) in [ (&mut self.price_1w_sma, 7), @@ -31,12 +34,16 @@ impl Vecs { (&mut self.price_200w_sma, 200 * 7), (&mut self.price_4y_sma, 4 * 365), ] { - sma.compute_all(price, starting_indexes, exit, |v| { - v.compute_sma(starting_indexes.dateindex, close, period, exit)?; + let window_starts = blocks.count.start_vec(period); + sma.compute_all(blocks, prices, starting_indexes, exit, |v| { + v.compute_rolling_average(starting_indexes.height, window_starts, close, exit)?; Ok(()) })?; } + let h2d = &indexes.height.day1; + let closes: Vec = prices.usd.split.close.day1.collect(); + for (ema, period) in [ (&mut self.price_1w_ema, 7), (&mut self.price_8d_ema, 8), @@ -55,8 +62,18 @@ impl Vecs { (&mut self.price_200w_ema, 200 * 7), (&mut self.price_4y_ema, 4 * 365), ] { - ema.compute_all(price, starting_indexes, exit, |v| { - v.compute_ema(starting_indexes.dateindex, close, period, exit)?; + let k = 2.0f64 / (period as f64 + 1.0); + + // Compute date-level EMA, then expand to height level + let date_ema = compute_date_ema(&closes, k); + + ema.compute_all(blocks, prices, starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.height, + h2d, + |(h, date, ..)| (h, Dollars::from(date_ema[date.to_usize()])), + exit, + )?; Ok(()) })?; } @@ -64,3 +81,18 @@ impl Vecs { Ok(()) } } + +fn compute_date_ema(closes: &[Dollars], k: f64) -> Vec { + let mut date_ema: Vec = Vec::with_capacity(closes.len()); + let mut ema_val = 0.0f64; + for (d, close) in closes.iter().enumerate() { + let close = f64::from(*close); + if d == 0 { + ema_val = close; + } else { + ema_val = close * k + ema_val * (1.0 - k); + } + date_ema.push(ema_val); + } + date_ema +} diff --git a/crates/brk_computer/src/market/moving_average/import.rs b/crates/brk_computer/src/market/moving_average/import.rs index 1392b00e5..a960cb278 100644 --- a/crates/brk_computer/src/market/moving_average/import.rs +++ b/crates/brk_computer/src/market/moving_average/import.rs @@ -5,16 +5,16 @@ use vecdb::Database; use super::Vecs; use crate::{ indexes, - internal::{ComputedFromDateRatio, DollarsTimesTenths, LazyPrice}, + internal::{ComputedFromHeightRatio, DollarsTimesTenths, LazyPriceFromHeight}, }; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexes: &indexes::Vecs, ) -> Result { - let price_1w_sma = ComputedFromDateRatio::forced_import( + let price_1w_sma = ComputedFromHeightRatio::forced_import( db, "price_1w_sma", None, @@ -22,7 +22,7 @@ impl Vecs { indexes, true, )?; - let price_8d_sma = ComputedFromDateRatio::forced_import( + let price_8d_sma = ComputedFromHeightRatio::forced_import( db, "price_8d_sma", None, @@ -30,7 +30,7 @@ impl Vecs { indexes, true, )?; - let price_13d_sma = ComputedFromDateRatio::forced_import( + let price_13d_sma = ComputedFromHeightRatio::forced_import( db, "price_13d_sma", None, @@ -38,7 +38,7 @@ impl Vecs { indexes, true, )?; - let price_21d_sma = ComputedFromDateRatio::forced_import( + let price_21d_sma = ComputedFromHeightRatio::forced_import( db, "price_21d_sma", None, @@ -46,7 +46,7 @@ impl Vecs { indexes, true, )?; - let price_1m_sma = ComputedFromDateRatio::forced_import( + let price_1m_sma = ComputedFromHeightRatio::forced_import( db, "price_1m_sma", None, @@ -54,7 +54,7 @@ impl Vecs { indexes, true, )?; - let price_34d_sma = ComputedFromDateRatio::forced_import( + let price_34d_sma = ComputedFromHeightRatio::forced_import( db, "price_34d_sma", None, @@ -62,7 +62,7 @@ impl Vecs { indexes, true, )?; - let price_55d_sma = ComputedFromDateRatio::forced_import( + let price_55d_sma = ComputedFromHeightRatio::forced_import( db, "price_55d_sma", None, @@ -70,7 +70,7 @@ impl Vecs { indexes, true, )?; - let price_89d_sma = ComputedFromDateRatio::forced_import( + let price_89d_sma = ComputedFromHeightRatio::forced_import( db, "price_89d_sma", None, @@ -78,7 +78,7 @@ impl Vecs { indexes, true, )?; - let price_111d_sma = ComputedFromDateRatio::forced_import( + let price_111d_sma = ComputedFromHeightRatio::forced_import( db, "price_111d_sma", None, @@ -86,7 +86,7 @@ impl Vecs { indexes, true, )?; - let price_144d_sma = ComputedFromDateRatio::forced_import( + let price_144d_sma = ComputedFromHeightRatio::forced_import( db, "price_144d_sma", None, @@ -94,7 +94,7 @@ impl Vecs { indexes, true, )?; - let price_200d_sma = ComputedFromDateRatio::forced_import( + let price_200d_sma = ComputedFromHeightRatio::forced_import( db, "price_200d_sma", None, @@ -102,7 +102,7 @@ impl Vecs { indexes, true, )?; - let price_350d_sma = ComputedFromDateRatio::forced_import( + let price_350d_sma = ComputedFromHeightRatio::forced_import( db, "price_350d_sma", None, @@ -110,7 +110,7 @@ impl Vecs { indexes, true, )?; - let price_1y_sma = ComputedFromDateRatio::forced_import( + let price_1y_sma = ComputedFromHeightRatio::forced_import( db, "price_1y_sma", None, @@ -118,7 +118,7 @@ impl Vecs { indexes, true, )?; - let price_2y_sma = ComputedFromDateRatio::forced_import( + let price_2y_sma = ComputedFromHeightRatio::forced_import( db, "price_2y_sma", None, @@ -126,7 +126,7 @@ impl Vecs { indexes, true, )?; - let price_200w_sma = ComputedFromDateRatio::forced_import( + let price_200w_sma = ComputedFromHeightRatio::forced_import( db, "price_200w_sma", None, @@ -134,7 +134,7 @@ impl Vecs { indexes, true, )?; - let price_4y_sma = ComputedFromDateRatio::forced_import( + let price_4y_sma = ComputedFromHeightRatio::forced_import( db, "price_4y_sma", None, @@ -143,7 +143,7 @@ impl Vecs { true, )?; - let price_1w_ema = ComputedFromDateRatio::forced_import( + let price_1w_ema = ComputedFromHeightRatio::forced_import( db, "price_1w_ema", None, @@ -151,7 +151,7 @@ impl Vecs { indexes, true, )?; - let price_8d_ema = ComputedFromDateRatio::forced_import( + let price_8d_ema = ComputedFromHeightRatio::forced_import( db, "price_8d_ema", None, @@ -159,7 +159,7 @@ impl Vecs { indexes, true, )?; - let price_12d_ema = ComputedFromDateRatio::forced_import( + let price_12d_ema = ComputedFromHeightRatio::forced_import( db, "price_12d_ema", None, @@ -167,7 +167,7 @@ impl Vecs { indexes, true, )?; - let price_13d_ema = ComputedFromDateRatio::forced_import( + let price_13d_ema = ComputedFromHeightRatio::forced_import( db, "price_13d_ema", None, @@ -175,7 +175,7 @@ impl Vecs { indexes, true, )?; - let price_21d_ema = ComputedFromDateRatio::forced_import( + let price_21d_ema = ComputedFromHeightRatio::forced_import( db, "price_21d_ema", None, @@ -183,7 +183,7 @@ impl Vecs { indexes, true, )?; - let price_26d_ema = ComputedFromDateRatio::forced_import( + let price_26d_ema = ComputedFromHeightRatio::forced_import( db, "price_26d_ema", None, @@ -191,7 +191,7 @@ impl Vecs { indexes, true, )?; - let price_1m_ema = ComputedFromDateRatio::forced_import( + let price_1m_ema = ComputedFromHeightRatio::forced_import( db, "price_1m_ema", None, @@ -199,7 +199,7 @@ impl Vecs { indexes, true, )?; - let price_34d_ema = ComputedFromDateRatio::forced_import( + let price_34d_ema = ComputedFromHeightRatio::forced_import( db, "price_34d_ema", None, @@ -207,7 +207,7 @@ impl Vecs { indexes, true, )?; - let price_55d_ema = ComputedFromDateRatio::forced_import( + let price_55d_ema = ComputedFromHeightRatio::forced_import( db, "price_55d_ema", None, @@ -215,7 +215,7 @@ impl Vecs { indexes, true, )?; - let price_89d_ema = ComputedFromDateRatio::forced_import( + let price_89d_ema = ComputedFromHeightRatio::forced_import( db, "price_89d_ema", None, @@ -223,7 +223,7 @@ impl Vecs { indexes, true, )?; - let price_144d_ema = ComputedFromDateRatio::forced_import( + let price_144d_ema = ComputedFromHeightRatio::forced_import( db, "price_144d_ema", None, @@ -231,7 +231,7 @@ impl Vecs { indexes, true, )?; - let price_200d_ema = ComputedFromDateRatio::forced_import( + let price_200d_ema = ComputedFromHeightRatio::forced_import( db, "price_200d_ema", None, @@ -239,7 +239,7 @@ impl Vecs { indexes, true, )?; - let price_1y_ema = ComputedFromDateRatio::forced_import( + let price_1y_ema = ComputedFromHeightRatio::forced_import( db, "price_1y_ema", None, @@ -247,7 +247,7 @@ impl Vecs { indexes, true, )?; - let price_2y_ema = ComputedFromDateRatio::forced_import( + let price_2y_ema = ComputedFromHeightRatio::forced_import( db, "price_2y_ema", None, @@ -255,7 +255,7 @@ impl Vecs { indexes, true, )?; - let price_200w_ema = ComputedFromDateRatio::forced_import( + let price_200w_ema = ComputedFromHeightRatio::forced_import( db, "price_200w_ema", None, @@ -263,7 +263,7 @@ impl Vecs { indexes, true, )?; - let price_4y_ema = ComputedFromDateRatio::forced_import( + let price_4y_ema = ComputedFromHeightRatio::forced_import( db, "price_4y_ema", None, @@ -272,20 +272,20 @@ impl Vecs { true, )?; - let price_200d_sma_source = price_200d_sma.price.as_ref().unwrap(); - let price_200d_sma_x2_4 = LazyPrice::from_source::>( + let price_200d_sma_source = &price_200d_sma.price.as_ref().unwrap().usd; + let price_200d_sma_x2_4 = LazyPriceFromHeight::from_computed::>( "price_200d_sma_x2_4", version, price_200d_sma_source, ); - let price_200d_sma_x0_8 = LazyPrice::from_source::>( + let price_200d_sma_x0_8 = LazyPriceFromHeight::from_computed::>( "price_200d_sma_x0_8", version, price_200d_sma_source, ); - let price_350d_sma_source = price_350d_sma.price.as_ref().unwrap(); - let price_350d_sma_x2 = LazyPrice::from_source::>( + let price_350d_sma_source = &price_350d_sma.price.as_ref().unwrap().usd; + let price_350d_sma_x2 = LazyPriceFromHeight::from_computed::>( "price_350d_sma_x2", version, price_350d_sma_source, diff --git a/crates/brk_computer/src/market/moving_average/vecs.rs b/crates/brk_computer/src/market/moving_average/vecs.rs index e86a20260..2189cbf20 100644 --- a/crates/brk_computer/src/market/moving_average/vecs.rs +++ b/crates/brk_computer/src/market/moving_average/vecs.rs @@ -1,46 +1,47 @@ use brk_traversable::Traversable; use brk_types::Dollars; +use vecdb::{Rw, StorageMode}; -use crate::internal::{ComputedFromDateRatio, LazyPrice}; +use crate::internal::{ComputedFromHeightRatio, LazyPriceFromHeight}; /// Simple and exponential moving average metrics -#[derive(Clone, Traversable)] -pub struct Vecs { - pub price_1w_sma: ComputedFromDateRatio, - pub price_8d_sma: ComputedFromDateRatio, - pub price_13d_sma: ComputedFromDateRatio, - pub price_21d_sma: ComputedFromDateRatio, - pub price_1m_sma: ComputedFromDateRatio, - pub price_34d_sma: ComputedFromDateRatio, - pub price_55d_sma: ComputedFromDateRatio, - pub price_89d_sma: ComputedFromDateRatio, - pub price_111d_sma: ComputedFromDateRatio, - pub price_144d_sma: ComputedFromDateRatio, - pub price_200d_sma: ComputedFromDateRatio, - pub price_350d_sma: ComputedFromDateRatio, - pub price_1y_sma: ComputedFromDateRatio, - pub price_2y_sma: ComputedFromDateRatio, - pub price_200w_sma: ComputedFromDateRatio, - pub price_4y_sma: ComputedFromDateRatio, +#[derive(Traversable)] +pub struct Vecs { + pub price_1w_sma: ComputedFromHeightRatio, + pub price_8d_sma: ComputedFromHeightRatio, + pub price_13d_sma: ComputedFromHeightRatio, + pub price_21d_sma: ComputedFromHeightRatio, + pub price_1m_sma: ComputedFromHeightRatio, + pub price_34d_sma: ComputedFromHeightRatio, + pub price_55d_sma: ComputedFromHeightRatio, + pub price_89d_sma: ComputedFromHeightRatio, + pub price_111d_sma: ComputedFromHeightRatio, + pub price_144d_sma: ComputedFromHeightRatio, + pub price_200d_sma: ComputedFromHeightRatio, + pub price_350d_sma: ComputedFromHeightRatio, + pub price_1y_sma: ComputedFromHeightRatio, + pub price_2y_sma: ComputedFromHeightRatio, + pub price_200w_sma: ComputedFromHeightRatio, + pub price_4y_sma: ComputedFromHeightRatio, - pub price_1w_ema: ComputedFromDateRatio, - pub price_8d_ema: ComputedFromDateRatio, - pub price_12d_ema: ComputedFromDateRatio, - pub price_13d_ema: ComputedFromDateRatio, - pub price_21d_ema: ComputedFromDateRatio, - pub price_26d_ema: ComputedFromDateRatio, - pub price_1m_ema: ComputedFromDateRatio, - pub price_34d_ema: ComputedFromDateRatio, - pub price_55d_ema: ComputedFromDateRatio, - pub price_89d_ema: ComputedFromDateRatio, - pub price_144d_ema: ComputedFromDateRatio, - pub price_200d_ema: ComputedFromDateRatio, - pub price_1y_ema: ComputedFromDateRatio, - pub price_2y_ema: ComputedFromDateRatio, - pub price_200w_ema: ComputedFromDateRatio, - pub price_4y_ema: ComputedFromDateRatio, + pub price_1w_ema: ComputedFromHeightRatio, + pub price_8d_ema: ComputedFromHeightRatio, + pub price_12d_ema: ComputedFromHeightRatio, + pub price_13d_ema: ComputedFromHeightRatio, + pub price_21d_ema: ComputedFromHeightRatio, + pub price_26d_ema: ComputedFromHeightRatio, + pub price_1m_ema: ComputedFromHeightRatio, + pub price_34d_ema: ComputedFromHeightRatio, + pub price_55d_ema: ComputedFromHeightRatio, + pub price_89d_ema: ComputedFromHeightRatio, + pub price_144d_ema: ComputedFromHeightRatio, + pub price_200d_ema: ComputedFromHeightRatio, + pub price_1y_ema: ComputedFromHeightRatio, + pub price_2y_ema: ComputedFromHeightRatio, + pub price_200w_ema: ComputedFromHeightRatio, + pub price_4y_ema: ComputedFromHeightRatio, - pub price_200d_sma_x2_4: LazyPrice, - pub price_200d_sma_x0_8: LazyPrice, - pub price_350d_sma_x2: LazyPrice, + pub price_200d_sma_x2_4: LazyPriceFromHeight, + pub price_200d_sma_x0_8: LazyPriceFromHeight, + pub price_350d_sma_x2: LazyPriceFromHeight, } diff --git a/crates/brk_computer/src/market/range/compute.rs b/crates/brk_computer/src/market/range/compute.rs index 73fda9fc1..a022a5746 100644 --- a/crates/brk_computer/src/market/range/compute.rs +++ b/crates/brk_computer/src/market/range/compute.rs @@ -1,103 +1,127 @@ use brk_error::Result; use brk_types::StoredF32; -use vecdb::Exit; +use vecdb::{Exit, ReadableVec, VecIndex}; use super::Vecs; -use crate::{ComputeIndexes, price}; +use crate::{ + blocks, ComputeIndexes, prices, + traits::{ComputeRollingMaxFromStarts, ComputeRollingMinFromStarts}, +}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, - price: &price::Vecs, + prices: &prices::Vecs, + blocks: &blocks::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let open = &price.usd.split.open.dateindex; - let low = &price.usd.split.low.dateindex; - let high = &price.usd.split.high.dateindex; + let price = &prices.usd.price; - self.price_1w_min.compute_all(starting_indexes, exit, |v| { - v.compute_min(starting_indexes.dateindex, low, 7, exit)?; - Ok(()) - })?; + self.price_1w_min.height.compute_rolling_min_from_starts( + starting_indexes.height, + &blocks.count.height_1w_ago, + price, + exit, + )?; - self.price_1w_max.compute_all(starting_indexes, exit, |v| { - v.compute_max(starting_indexes.dateindex, high, 7, exit)?; - Ok(()) - })?; + self.price_1w_max.height.compute_rolling_max_from_starts( + starting_indexes.height, + &blocks.count.height_1w_ago, + price, + exit, + )?; - self.price_2w_min.compute_all(starting_indexes, exit, |v| { - v.compute_min(starting_indexes.dateindex, low, 14, exit)?; - Ok(()) - })?; + self.price_2w_min.height.compute_rolling_min_from_starts( + starting_indexes.height, + &blocks.count.height_2w_ago, + price, + exit, + )?; - self.price_2w_max.compute_all(starting_indexes, exit, |v| { - v.compute_max(starting_indexes.dateindex, high, 14, exit)?; - Ok(()) - })?; + self.price_2w_max.height.compute_rolling_max_from_starts( + starting_indexes.height, + &blocks.count.height_2w_ago, + price, + exit, + )?; - self.price_1m_min.compute_all(starting_indexes, exit, |v| { - v.compute_min(starting_indexes.dateindex, low, 30, exit)?; - Ok(()) - })?; + self.price_1m_min.height.compute_rolling_min_from_starts( + starting_indexes.height, + &blocks.count.height_1m_ago, + price, + exit, + )?; - self.price_1m_max.compute_all(starting_indexes, exit, |v| { - v.compute_max(starting_indexes.dateindex, high, 30, exit)?; - Ok(()) - })?; + self.price_1m_max.height.compute_rolling_max_from_starts( + starting_indexes.height, + &blocks.count.height_1m_ago, + price, + exit, + )?; - self.price_1y_min.compute_all(starting_indexes, exit, |v| { - v.compute_min(starting_indexes.dateindex, low, 365, exit)?; - Ok(()) - })?; + self.price_1y_min.height.compute_rolling_min_from_starts( + starting_indexes.height, + &blocks.count.height_1y_ago, + price, + exit, + )?; - self.price_1y_max.compute_all(starting_indexes, exit, |v| { - v.compute_max(starting_indexes.dateindex, high, 365, exit)?; - Ok(()) - })?; + self.price_1y_max.height.compute_rolling_max_from_starts( + starting_indexes.height, + &blocks.count.height_1y_ago, + price, + exit, + )?; - self.price_true_range.compute_transform3( - starting_indexes.dateindex, - open, - high, - low, - |(i, open, high, low, ..)| { - let high_min_low = **high - **low; - let high_min_open = (**high - **open).abs(); - let low_min_open = (**low - **open).abs(); - (i, high_min_low.max(high_min_open).max(low_min_open).into()) + // True range at block level: |price[h] - price[h-1]| + let mut prev_price = None; + self.price_true_range.height.compute_transform( + starting_indexes.height, + price, + |(h, current, ..)| { + let prev = prev_price.unwrap_or_else(|| { + if h.to_usize() > 0 { + price.collect_one_at(h.to_usize() - 1).unwrap_or(current) + } else { + current + } + }); + prev_price = Some(current); + let tr = (*current - *prev).abs(); + (h, StoredF32::from(tr)) }, exit, )?; - self.price_true_range_2w_sum.compute_sum( - starting_indexes.dateindex, - &self.price_true_range, - 14, + // 2w rolling sum of true range + self.price_true_range_2w_sum.height.compute_rolling_sum( + starting_indexes.height, + &blocks.count.height_2w_ago, + &self.price_true_range.height, exit, )?; - self.price_2w_choppiness_index - .compute_all(starting_indexes, exit, |v| { - let n = 14; - let log10n = (n as f32).log10(); - v.compute_transform3( - starting_indexes.dateindex, - &self.price_true_range_2w_sum, - &self.price_2w_max.dateindex, - &self.price_2w_min.dateindex, - |(i, tr_sum, max, min, ..)| { - ( - i, - StoredF32::from( - 100.0 * (*tr_sum / (*max - *min) as f32).log10() / log10n, - ), - ) - }, - exit, - )?; - Ok(()) - })?; + // Choppiness index: 100 * log10(tr_2w_sum / (price_2w_max - price_2w_min)) / log10(14) + let log10n = 14.0f32.log10(); + self.price_2w_choppiness_index.height.compute_transform3( + starting_indexes.height, + &self.price_true_range_2w_sum.height, + &self.price_2w_max.height, + &self.price_2w_min.height, + |(h, tr_sum, max, min, ..)| { + let range = *max - *min; + let ci = if range > 0.0 { + StoredF32::from( + 100.0 * (*tr_sum / range as f32).log10() / log10n, + ) + } else { + StoredF32::NAN + }; + (h, ci) + }, + exit, + )?; Ok(()) } diff --git a/crates/brk_computer/src/market/range/import.rs b/crates/brk_computer/src/market/range/import.rs index e039668b1..1e406ed32 100644 --- a/crates/brk_computer/src/market/range/import.rs +++ b/crates/brk_computer/src/market/range/import.rs @@ -1,30 +1,31 @@ use brk_error::Result; use brk_types::Version; -use vecdb::{Database, EagerVec, ImportableVec}; +use vecdb::Database; use super::Vecs; -use crate::{indexes, internal::{ComputedFromDateLast, Price}}; +use crate::{indexes, internal::{ComputedFromHeightLast, PriceFromHeight}}; impl Vecs { - pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { + pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { let v1 = Version::ONE; Ok(Self { - price_1w_min: Price::forced_import(db, "price_1w_min", version + v1, indexes)?, - price_1w_max: Price::forced_import(db, "price_1w_max", version + v1, indexes)?, - price_2w_min: Price::forced_import(db, "price_2w_min", version + v1, indexes)?, - price_2w_max: Price::forced_import(db, "price_2w_max", version + v1, indexes)?, - price_1m_min: Price::forced_import(db, "price_1m_min", version + v1, indexes)?, - price_1m_max: Price::forced_import(db, "price_1m_max", version + v1, indexes)?, - price_1y_min: Price::forced_import(db, "price_1y_min", version + v1, indexes)?, - price_1y_max: Price::forced_import(db, "price_1y_max", version + v1, indexes)?, - price_true_range: EagerVec::forced_import(db, "price_true_range", version)?, - price_true_range_2w_sum: EagerVec::forced_import(db, "price_true_range_2w_sum", version)?, - price_2w_choppiness_index: ComputedFromDateLast::forced_import( - db, - "price_2w_choppiness_index", - version + v1, - indexes, + price_1w_min: PriceFromHeight::forced_import(db, "price_1w_min", version + v1, indexes)?, + price_1w_max: PriceFromHeight::forced_import(db, "price_1w_max", version + v1, indexes)?, + price_2w_min: PriceFromHeight::forced_import(db, "price_2w_min", version + v1, indexes)?, + price_2w_max: PriceFromHeight::forced_import(db, "price_2w_max", version + v1, indexes)?, + price_1m_min: PriceFromHeight::forced_import(db, "price_1m_min", version + v1, indexes)?, + price_1m_max: PriceFromHeight::forced_import(db, "price_1m_max", version + v1, indexes)?, + price_1y_min: PriceFromHeight::forced_import(db, "price_1y_min", version + v1, indexes)?, + price_1y_max: PriceFromHeight::forced_import(db, "price_1y_max", version + v1, indexes)?, + price_true_range: ComputedFromHeightLast::forced_import( + db, "price_true_range", version + v1, indexes, + )?, + price_true_range_2w_sum: ComputedFromHeightLast::forced_import( + db, "price_true_range_2w_sum", version + v1, indexes, + )?, + price_2w_choppiness_index: ComputedFromHeightLast::forced_import( + db, "price_2w_choppiness_index", version + v1, indexes, )?, }) } diff --git a/crates/brk_computer/src/market/range/vecs.rs b/crates/brk_computer/src/market/range/vecs.rs index 34ed47a80..89271dfda 100644 --- a/crates/brk_computer/src/market/range/vecs.rs +++ b/crates/brk_computer/src/market/range/vecs.rs @@ -1,21 +1,21 @@ use brk_traversable::Traversable; -use brk_types::{DateIndex, StoredF32}; -use vecdb::{EagerVec, PcoVec}; +use brk_types::{Dollars, StoredF32}; +use vecdb::{Rw, StorageMode}; -use crate::internal::{ComputedFromDateLast, Price}; +use crate::internal::{ComputedFromHeightLast, Price}; /// Price range and choppiness metrics -#[derive(Clone, Traversable)] -pub struct Vecs { - pub price_1w_min: Price, - pub price_1w_max: Price, - pub price_2w_min: Price, - pub price_2w_max: Price, - pub price_1m_min: Price, - pub price_1m_max: Price, - pub price_1y_min: Price, - pub price_1y_max: Price, - pub price_true_range: EagerVec>, - pub price_true_range_2w_sum: EagerVec>, - pub price_2w_choppiness_index: ComputedFromDateLast, +#[derive(Traversable)] +pub struct Vecs { + pub price_1w_min: Price>, + pub price_1w_max: Price>, + pub price_2w_min: Price>, + pub price_2w_max: Price>, + pub price_1m_min: Price>, + pub price_1m_max: Price>, + pub price_1y_min: Price>, + pub price_1y_max: Price>, + pub price_true_range: ComputedFromHeightLast, + pub price_true_range_2w_sum: ComputedFromHeightLast, + pub price_2w_choppiness_index: ComputedFromHeightLast, } diff --git a/crates/brk_computer/src/market/returns/compute.rs b/crates/brk_computer/src/market/returns/compute.rs index 5ca50bf5b..0e8b58865 100644 --- a/crates/brk_computer/src/market/returns/compute.rs +++ b/crates/brk_computer/src/market/returns/compute.rs @@ -1,50 +1,68 @@ use brk_error::Result; use brk_types::StoredF32; -use vecdb::Exit; +use vecdb::{Exit, ReadableVec}; use super::Vecs; -use crate::ComputeIndexes; +use crate::{ComputeIndexes, blocks, indexes}; impl Vecs { - pub fn compute(&mut self, starting_indexes: &ComputeIndexes, exit: &Exit) -> Result<()> { + pub(crate) fn compute( + &mut self, + indexes: &indexes::Vecs, + blocks: &blocks::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { // CAGR computed from returns (2y+ periods only) + let h2d = &indexes.height.day1; let price_returns_dca = self.price_returns.as_dca_period(); for (cagr, returns, days) in self.cagr.zip_mut_with_period(&price_returns_dca) { - cagr.compute_all(starting_indexes, exit, |v| { - v.compute_cagr( - starting_indexes.dateindex, - &returns.dateindex, - days as usize, - exit, - )?; - Ok(()) - })?; + let years = days as f32 / 365.0; + let mut cached_di = None; + let mut cached_val = StoredF32::from(0.0); + cagr.height.compute_transform( + starting_indexes.height, + h2d, + |(h, di, _)| { + if cached_di != Some(di) { + cached_di = Some(di); + cached_val = StoredF32::from( + returns.day1 + .collect_one(di) + .map(|r| ((*r / 100.0 + 1.0).powf(1.0 / years) - 1.0) * 100.0) + .unwrap_or(0.0) + ); + } + (h, cached_val) + }, + exit, + )?; } - let _1d_price_returns_dateindex = &self.price_returns._1d.dateindex; + let _1d_price_returns_height = &self.price_returns._1d.height; self._1d_returns_1w_sd - .compute_all(starting_indexes, exit, _1d_price_returns_dateindex)?; + .compute_all(blocks, starting_indexes, exit, _1d_price_returns_height)?; self._1d_returns_1m_sd - .compute_all(starting_indexes, exit, _1d_price_returns_dateindex)?; + .compute_all(blocks, starting_indexes, exit, _1d_price_returns_height)?; self._1d_returns_1y_sd - .compute_all(starting_indexes, exit, _1d_price_returns_dateindex)?; + .compute_all(blocks, starting_indexes, exit, _1d_price_returns_height)?; // Downside returns: min(return, 0) self.downside_returns.compute_transform( - starting_indexes.dateindex, - _1d_price_returns_dateindex, + starting_indexes.height, + _1d_price_returns_height, |(i, ret, ..)| (i, StoredF32::from((*ret).min(0.0))), exit, )?; // Downside deviation (SD of downside returns) self.downside_1w_sd - .compute_all(starting_indexes, exit, &self.downside_returns)?; + .compute_all(blocks, starting_indexes, exit, &self.downside_returns)?; self.downside_1m_sd - .compute_all(starting_indexes, exit, &self.downside_returns)?; + .compute_all(blocks, starting_indexes, exit, &self.downside_returns)?; self.downside_1y_sd - .compute_all(starting_indexes, exit, &self.downside_returns)?; + .compute_all(blocks, starting_indexes, exit, &self.downside_returns)?; Ok(()) } diff --git a/crates/brk_computer/src/market/returns/import.rs b/crates/brk_computer/src/market/returns/import.rs index 1348ff45d..b81413fcd 100644 --- a/crates/brk_computer/src/market/returns/import.rs +++ b/crates/brk_computer/src/market/returns/import.rs @@ -1,25 +1,25 @@ use brk_error::Result; use brk_types::Version; -use vecdb::{Database, EagerVec, ImportableVec}; +use vecdb::{Database, EagerVec, ImportableVec, ReadableCloneableVec}; use super::super::lookback::{self, LOOKBACK_PERIOD_NAMES}; use super::Vecs; use crate::{ indexes, internal::{ - ComputedFromDateLast, ComputedFromDateStdDev, LazyBinaryFromDateLast, - PercentageDiffCloseDollars, StandardDeviationVecsOptions, + ComputedFromHeightLast, ComputedFromHeightStdDev, LazyBinaryFromHeightLast, + PercentageDiffDollars, StandardDeviationVecsOptions, }, market::dca::ByDcaCagr, - price, + prices, }; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexes: &indexes::Vecs, - price: &price::Vecs, + prices: &prices::Vecs, lookback: &lookback::Vecs, ) -> Result { let v1 = Version::ONE; @@ -28,20 +28,24 @@ impl Vecs { LOOKBACK_PERIOD_NAMES .zip_ref(&lookback.price_ago) .map(|(name, price_ago)| { - LazyBinaryFromDateLast::from_computed_both_last::( + LazyBinaryFromHeightLast::from_height_and_derived_last::< + PercentageDiffDollars, + >( &format!("{name}_price_returns"), version, - &price.usd.split.close, - price_ago, + prices.usd.price.read_only_boxed_clone(), + price_ago.height.read_only_boxed_clone(), + &prices.usd.split.close, + &price_ago.rest, ) }); // CAGR (computed, 2y+ only) let cagr = ByDcaCagr::try_new(|name, _days| { - ComputedFromDateLast::forced_import(db, &format!("{name}_cagr"), version, indexes) + ComputedFromHeightLast::forced_import(db, &format!("{name}_cagr"), version, indexes) })?; - let _1d_returns_1w_sd = ComputedFromDateStdDev::forced_import( + let _1d_returns_1w_sd = ComputedFromHeightStdDev::forced_import( db, "1d_returns_1w_sd", 7, @@ -49,9 +53,8 @@ impl Vecs { indexes, StandardDeviationVecsOptions::default(), None, - None, )?; - let _1d_returns_1m_sd = ComputedFromDateStdDev::forced_import( + let _1d_returns_1m_sd = ComputedFromHeightStdDev::forced_import( db, "1d_returns_1m_sd", 30, @@ -59,9 +62,8 @@ impl Vecs { indexes, StandardDeviationVecsOptions::default(), None, - None, )?; - let _1d_returns_1y_sd = ComputedFromDateStdDev::forced_import( + let _1d_returns_1y_sd = ComputedFromHeightStdDev::forced_import( db, "1d_returns_1y_sd", 365, @@ -69,11 +71,10 @@ impl Vecs { indexes, StandardDeviationVecsOptions::default(), None, - None, )?; let downside_returns = EagerVec::forced_import(db, "downside_returns", version)?; - let downside_1w_sd = ComputedFromDateStdDev::forced_import( + let downside_1w_sd = ComputedFromHeightStdDev::forced_import( db, "downside_1w_sd", 7, @@ -81,9 +82,8 @@ impl Vecs { indexes, StandardDeviationVecsOptions::default(), None, - None, )?; - let downside_1m_sd = ComputedFromDateStdDev::forced_import( + let downside_1m_sd = ComputedFromHeightStdDev::forced_import( db, "downside_1m_sd", 30, @@ -91,9 +91,8 @@ impl Vecs { indexes, StandardDeviationVecsOptions::default(), None, - None, )?; - let downside_1y_sd = ComputedFromDateStdDev::forced_import( + let downside_1y_sd = ComputedFromHeightStdDev::forced_import( db, "downside_1y_sd", 365, @@ -101,7 +100,6 @@ impl Vecs { indexes, StandardDeviationVecsOptions::default(), None, - None, )?; Ok(Self { diff --git a/crates/brk_computer/src/market/returns/vecs.rs b/crates/brk_computer/src/market/returns/vecs.rs index b22f3f02f..e041192fe 100644 --- a/crates/brk_computer/src/market/returns/vecs.rs +++ b/crates/brk_computer/src/market/returns/vecs.rs @@ -1,28 +1,28 @@ use brk_traversable::Traversable; -use brk_types::{Close, DateIndex, Dollars, StoredF32}; -use vecdb::{EagerVec, PcoVec}; +use brk_types::{Dollars, Height, StoredF32}; +use vecdb::{EagerVec, PcoVec, Rw, StorageMode}; use crate::{ - internal::{ComputedFromDateLast, ComputedFromDateStdDev, LazyBinaryFromDateLast}, + internal::{ComputedFromHeightLast, ComputedFromHeightStdDev, LazyBinaryFromHeightLast}, market::{dca::ByDcaCagr, lookback::ByLookbackPeriod}, }; /// Price returns, CAGR, and returns standard deviation metrics -#[derive(Clone, Traversable)] -pub struct Vecs { - pub price_returns: ByLookbackPeriod, Dollars>>, +#[derive(Traversable)] +pub struct Vecs { + pub price_returns: ByLookbackPeriod>, // CAGR (computed from returns, 2y+ only) - pub cagr: ByDcaCagr>, + pub cagr: ByDcaCagr>, // Returns standard deviation (computed from 1d returns) - pub _1d_returns_1w_sd: ComputedFromDateStdDev, - pub _1d_returns_1m_sd: ComputedFromDateStdDev, - pub _1d_returns_1y_sd: ComputedFromDateStdDev, + pub _1d_returns_1w_sd: ComputedFromHeightStdDev, + pub _1d_returns_1m_sd: ComputedFromHeightStdDev, + pub _1d_returns_1y_sd: ComputedFromHeightStdDev, // Downside returns and deviation (for Sortino ratio) - pub downside_returns: EagerVec>, - pub downside_1w_sd: ComputedFromDateStdDev, - pub downside_1m_sd: ComputedFromDateStdDev, - pub downside_1y_sd: ComputedFromDateStdDev, + pub downside_returns: M::Stored>>, + pub downside_1w_sd: ComputedFromHeightStdDev, + pub downside_1m_sd: ComputedFromHeightStdDev, + pub downside_1y_sd: ComputedFromHeightStdDev, } diff --git a/crates/brk_computer/src/market/volatility/import.rs b/crates/brk_computer/src/market/volatility/import.rs index fc11df8c9..9065c2d5b 100644 --- a/crates/brk_computer/src/market/volatility/import.rs +++ b/crates/brk_computer/src/market/volatility/import.rs @@ -1,75 +1,78 @@ use brk_types::Version; -use vecdb::{IterableCloneableVec, LazyVecFrom2}; +use vecdb::{ReadableCloneableVec, LazyVecFrom2}; use super::super::returns; use super::Vecs; use crate::internal::{ - LazyFromDateLast, RatioF32, StoredF32TimesSqrt7, StoredF32TimesSqrt30, StoredF32TimesSqrt365, + LazyFromHeightLast, RatioF32, StoredF32TimesSqrt7, StoredF32TimesSqrt30, StoredF32TimesSqrt365, }; impl Vecs { - pub fn forced_import(version: Version, returns: &returns::Vecs) -> Self { + pub(crate) fn forced_import(version: Version, returns: &returns::Vecs) -> Self { let v2 = Version::TWO; - let price_1w_volatility = LazyFromDateLast::from_source::( + let price_1w_volatility = LazyFromHeightLast::from_computed::( "price_1w_volatility", version + v2, + returns._1d_returns_1w_sd.sd.height.read_only_boxed_clone(), &returns._1d_returns_1w_sd.sd, ); - let price_1m_volatility = LazyFromDateLast::from_source::( + let price_1m_volatility = LazyFromHeightLast::from_computed::( "price_1m_volatility", version + v2, + returns._1d_returns_1m_sd.sd.height.read_only_boxed_clone(), &returns._1d_returns_1m_sd.sd, ); - let price_1y_volatility = LazyFromDateLast::from_source::( + let price_1y_volatility = LazyFromHeightLast::from_computed::( "price_1y_volatility", version + v2, + returns._1d_returns_1y_sd.sd.height.read_only_boxed_clone(), &returns._1d_returns_1y_sd.sd, ); let sharpe_1w = LazyVecFrom2::transformed::( "sharpe_1w", version + v2, - returns.price_returns._1w.dateindex.boxed_clone(), - price_1w_volatility.dateindex.boxed_clone(), + returns.price_returns._1w.height.read_only_boxed_clone(), + price_1w_volatility.height.read_only_boxed_clone(), ); let sharpe_1m = LazyVecFrom2::transformed::( "sharpe_1m", version + v2, - returns.price_returns._1m.dateindex.boxed_clone(), - price_1m_volatility.dateindex.boxed_clone(), + returns.price_returns._1m.height.read_only_boxed_clone(), + price_1m_volatility.height.read_only_boxed_clone(), ); let sharpe_1y = LazyVecFrom2::transformed::( "sharpe_1y", version + v2, - returns.price_returns._1y.dateindex.boxed_clone(), - price_1y_volatility.dateindex.boxed_clone(), + returns.price_returns._1y.height.read_only_boxed_clone(), + price_1y_volatility.height.read_only_boxed_clone(), ); // Sortino ratio = returns / downside volatility let sortino_1w = LazyVecFrom2::transformed::( "sortino_1w", version + v2, - returns.price_returns._1w.dateindex.boxed_clone(), - returns.downside_1w_sd.sd.dateindex.boxed_clone(), + returns.price_returns._1w.height.read_only_boxed_clone(), + returns.downside_1w_sd.sd.height.read_only_boxed_clone(), ); let sortino_1m = LazyVecFrom2::transformed::( "sortino_1m", version + v2, - returns.price_returns._1m.dateindex.boxed_clone(), - returns.downside_1m_sd.sd.dateindex.boxed_clone(), + returns.price_returns._1m.height.read_only_boxed_clone(), + returns.downside_1m_sd.sd.height.read_only_boxed_clone(), ); let sortino_1y = LazyVecFrom2::transformed::( "sortino_1y", version + v2, - returns.price_returns._1y.dateindex.boxed_clone(), - returns.downside_1y_sd.sd.dateindex.boxed_clone(), + returns.price_returns._1y.height.read_only_boxed_clone(), + returns.downside_1y_sd.sd.height.read_only_boxed_clone(), ); Self { diff --git a/crates/brk_computer/src/market/volatility/vecs.rs b/crates/brk_computer/src/market/volatility/vecs.rs index d1e1af918..1808b2703 100644 --- a/crates/brk_computer/src/market/volatility/vecs.rs +++ b/crates/brk_computer/src/market/volatility/vecs.rs @@ -1,21 +1,21 @@ use brk_traversable::Traversable; -use brk_types::{DateIndex, StoredF32}; +use brk_types::{Height, StoredF32}; use vecdb::LazyVecFrom2; -use crate::internal::LazyFromDateLast; +use crate::internal::LazyFromHeightLast; /// Price volatility metrics (derived from returns standard deviation) #[derive(Clone, Traversable)] pub struct Vecs { - pub price_1w_volatility: LazyFromDateLast, - pub price_1m_volatility: LazyFromDateLast, - pub price_1y_volatility: LazyFromDateLast, + pub price_1w_volatility: LazyFromHeightLast, + pub price_1m_volatility: LazyFromHeightLast, + pub price_1y_volatility: LazyFromHeightLast, - pub sharpe_1w: LazyVecFrom2, - pub sharpe_1m: LazyVecFrom2, - pub sharpe_1y: LazyVecFrom2, + pub sharpe_1w: LazyVecFrom2, + pub sharpe_1m: LazyVecFrom2, + pub sharpe_1y: LazyVecFrom2, - pub sortino_1w: LazyVecFrom2, - pub sortino_1m: LazyVecFrom2, - pub sortino_1y: LazyVecFrom2, + pub sortino_1w: LazyVecFrom2, + pub sortino_1m: LazyVecFrom2, + pub sortino_1y: LazyVecFrom2, } diff --git a/crates/brk_computer/src/mining/compute.rs b/crates/brk_computer/src/mining/compute.rs new file mode 100644 index 000000000..6f1b2e993 --- /dev/null +++ b/crates/brk_computer/src/mining/compute.rs @@ -0,0 +1,41 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use vecdb::Exit; + +use super::Vecs; +use crate::{ComputeIndexes, blocks, indexes, transactions}; + +impl Vecs { + pub(crate) fn compute( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + blocks: &blocks::Vecs, + transactions: &transactions::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + // Block rewards (coinbase, subsidy, fee_dominance, etc.) + self.rewards.compute( + indexer, + indexes, + &blocks.count, + &transactions.fees, + starting_indexes, + exit, + )?; + + // Hashrate metrics (uses rewards.coinbase_24h_sum — disjoint field borrow) + self.hashrate.compute( + &blocks.count, + &blocks.difficulty, + &self.rewards.coinbase_24h_sum, + starting_indexes, + exit, + )?; + + let _lock = exit.lock(); + self.db.compact()?; + Ok(()) + } +} diff --git a/crates/brk_computer/src/mining/hashrate/compute.rs b/crates/brk_computer/src/mining/hashrate/compute.rs new file mode 100644 index 000000000..dd1bb7922 --- /dev/null +++ b/crates/brk_computer/src/mining/hashrate/compute.rs @@ -0,0 +1,173 @@ +use brk_error::Result; +use brk_types::{StoredF32, StoredF64}; +use vecdb::Exit; + +use super::Vecs; +use crate::{ + blocks::{self, ONE_TERA_HASH, TARGET_BLOCKS_PER_DAY_F64}, + internal::StoredValueFromHeightLast, + ComputeIndexes, + traits::ComputeDrawdown, +}; + +impl Vecs { + pub(crate) fn compute( + &mut self, + count_vecs: &blocks::CountVecs, + difficulty_vecs: &blocks::DifficultyVecs, + coinbase_sum_24h: &StoredValueFromHeightLast, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.hash_rate.height.compute_transform2( + starting_indexes.height, + &count_vecs.block_count_24h_sum.height, + &difficulty_vecs.as_hash.height, + |(i, block_count_sum, difficulty_as_hash, ..)| { + ( + i, + StoredF64::from( + (f64::from(block_count_sum) / TARGET_BLOCKS_PER_DAY_F64) + * f64::from(difficulty_as_hash), + ), + ) + }, + exit, + )?; + + self.hash_rate_1w_sma.height.compute_rolling_average( + starting_indexes.height, + &count_vecs.height_1w_ago, + &self.hash_rate.height, + exit, + )?; + + self.hash_rate_1m_sma.height.compute_rolling_average( + starting_indexes.height, + &count_vecs.height_1m_ago, + &self.hash_rate.height, + exit, + )?; + + self.hash_rate_2m_sma.height.compute_rolling_average( + starting_indexes.height, + &count_vecs.height_2m_ago, + &self.hash_rate.height, + exit, + )?; + + self.hash_rate_1y_sma.height.compute_rolling_average( + starting_indexes.height, + &count_vecs.height_1y_ago, + &self.hash_rate.height, + exit, + )?; + + self.hash_rate_ath.height.compute_all_time_high( + starting_indexes.height, + &self.hash_rate.height, + exit, + )?; + + self.hash_rate_drawdown.height.compute_drawdown( + starting_indexes.height, + &self.hash_rate.height, + &self.hash_rate_ath.height, + exit, + )?; + + self.hash_price_ths.height.compute_transform2( + starting_indexes.height, + &coinbase_sum_24h.usd.height, + &self.hash_rate.height, + |(i, coinbase_sum, hashrate, ..)| { + let hashrate_ths = *hashrate / ONE_TERA_HASH; + let price = if hashrate_ths == 0.0 { + StoredF32::NAN + } else { + (*coinbase_sum / hashrate_ths).into() + }; + (i, price) + }, + exit, + )?; + + self.hash_price_phs.height.compute_transform( + starting_indexes.height, + &self.hash_price_ths.height, + |(i, price, ..)| (i, (*price * 1000.0).into()), + exit, + )?; + + self.hash_value_ths.height.compute_transform2( + starting_indexes.height, + &coinbase_sum_24h.sats.height, + &self.hash_rate.height, + |(i, coinbase_sum, hashrate, ..)| { + let hashrate_ths = *hashrate / ONE_TERA_HASH; + let value = if hashrate_ths == 0.0 { + StoredF32::NAN + } else { + StoredF32::from(*coinbase_sum as f64 / hashrate_ths) + }; + (i, value) + }, + exit, + )?; + + self.hash_value_phs.height.compute_transform( + starting_indexes.height, + &self.hash_value_ths.height, + |(i, value, ..)| (i, (*value * 1000.0).into()), + exit, + )?; + + self.hash_price_ths_min.height.compute_all_time_low_( + starting_indexes.height, + &self.hash_price_ths.height, + exit, + true, + )?; + + self.hash_price_phs_min.height.compute_all_time_low_( + starting_indexes.height, + &self.hash_price_phs.height, + exit, + true, + )?; + + self.hash_value_ths_min.height.compute_all_time_low_( + starting_indexes.height, + &self.hash_value_ths.height, + exit, + true, + )?; + + self.hash_value_phs_min.height.compute_all_time_low_( + starting_indexes.height, + &self.hash_value_phs.height, + exit, + true, + )?; + + self.hash_price_rebound + .height + .compute_percentage_difference( + starting_indexes.height, + &self.hash_price_phs.height, + &self.hash_price_phs_min.height, + exit, + )?; + + self.hash_value_rebound + .height + .compute_percentage_difference( + starting_indexes.height, + &self.hash_value_phs.height, + &self.hash_value_phs_min.height, + exit, + )?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/blocks/mining/import.rs b/crates/brk_computer/src/mining/hashrate/import.rs similarity index 90% rename from crates/brk_computer/src/blocks/mining/import.rs rename to crates/brk_computer/src/mining/hashrate/import.rs index 76e3014c9..f2e785736 100644 --- a/crates/brk_computer/src/blocks/mining/import.rs +++ b/crates/brk_computer/src/mining/hashrate/import.rs @@ -5,11 +5,11 @@ use vecdb::Database; use super::Vecs; use crate::{ indexes, - internal::{ComputedFromHeightLast, ComputedFromDateLast}, + internal::ComputedFromHeightLast, }; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexes: &indexes::Vecs, @@ -19,25 +19,25 @@ impl Vecs { Ok(Self { hash_rate: ComputedFromHeightLast::forced_import(db, "hash_rate", version + v5, indexes)?, - hash_rate_1w_sma: ComputedFromDateLast::forced_import( + hash_rate_1w_sma: ComputedFromHeightLast::forced_import( db, "hash_rate_1w_sma", version, indexes, )?, - hash_rate_1m_sma: ComputedFromDateLast::forced_import( + hash_rate_1m_sma: ComputedFromHeightLast::forced_import( db, "hash_rate_1m_sma", version, indexes, )?, - hash_rate_2m_sma: ComputedFromDateLast::forced_import( + hash_rate_2m_sma: ComputedFromHeightLast::forced_import( db, "hash_rate_2m_sma", version, indexes, )?, - hash_rate_1y_sma: ComputedFromDateLast::forced_import( + hash_rate_1y_sma: ComputedFromHeightLast::forced_import( db, "hash_rate_1y_sma", version, diff --git a/crates/brk_computer/src/blocks/mining/mod.rs b/crates/brk_computer/src/mining/hashrate/mod.rs similarity index 100% rename from crates/brk_computer/src/blocks/mining/mod.rs rename to crates/brk_computer/src/mining/hashrate/mod.rs diff --git a/crates/brk_computer/src/mining/hashrate/vecs.rs b/crates/brk_computer/src/mining/hashrate/vecs.rs new file mode 100644 index 000000000..f5249962b --- /dev/null +++ b/crates/brk_computer/src/mining/hashrate/vecs.rs @@ -0,0 +1,27 @@ +use brk_traversable::Traversable; +use brk_types::{StoredF32, StoredF64}; +use vecdb::{Rw, StorageMode}; + +use crate::internal::ComputedFromHeightLast; + +/// Mining-related metrics: hash rate, hash price, hash value +#[derive(Traversable)] +pub struct Vecs { + pub hash_rate: ComputedFromHeightLast, + pub hash_rate_1w_sma: ComputedFromHeightLast, + pub hash_rate_1m_sma: ComputedFromHeightLast, + pub hash_rate_2m_sma: ComputedFromHeightLast, + pub hash_rate_1y_sma: ComputedFromHeightLast, + pub hash_rate_ath: ComputedFromHeightLast, + pub hash_rate_drawdown: ComputedFromHeightLast, + pub hash_price_ths: ComputedFromHeightLast, + pub hash_price_ths_min: ComputedFromHeightLast, + pub hash_price_phs: ComputedFromHeightLast, + pub hash_price_phs_min: ComputedFromHeightLast, + pub hash_price_rebound: ComputedFromHeightLast, + pub hash_value_ths: ComputedFromHeightLast, + pub hash_value_ths_min: ComputedFromHeightLast, + pub hash_value_phs: ComputedFromHeightLast, + pub hash_value_phs_min: ComputedFromHeightLast, + pub hash_value_rebound: ComputedFromHeightLast, +} diff --git a/crates/brk_computer/src/mining/import.rs b/crates/brk_computer/src/mining/import.rs new file mode 100644 index 000000000..c28ea19f2 --- /dev/null +++ b/crates/brk_computer/src/mining/import.rs @@ -0,0 +1,42 @@ +use std::path::Path; + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::Version; +use vecdb::{Database, PAGE_SIZE}; + +use crate::{indexes, prices}; + +use super::{HashrateVecs, RewardsVecs, Vecs}; + +impl Vecs { + pub(crate) fn forced_import( + parent_path: &Path, + parent_version: Version, + indexes: &indexes::Vecs, + prices: &prices::Vecs, + ) -> Result { + let db = Database::open(&parent_path.join(super::DB_NAME))?; + db.set_min_len(PAGE_SIZE * 50_000_000)?; + + let version = parent_version; + + let rewards = RewardsVecs::forced_import(&db, version, indexes, prices)?; + let hashrate = HashrateVecs::forced_import(&db, version, indexes)?; + + let this = Self { + db, + rewards, + hashrate, + }; + + this.db.retain_regions( + this.iter_any_exportable() + .flat_map(|v| v.region_names()) + .collect(), + )?; + this.db.compact()?; + + Ok(this) + } +} diff --git a/crates/brk_computer/src/mining/mod.rs b/crates/brk_computer/src/mining/mod.rs new file mode 100644 index 000000000..c7d0ba6fa --- /dev/null +++ b/crates/brk_computer/src/mining/mod.rs @@ -0,0 +1,22 @@ +pub mod hashrate; +pub mod rewards; + +mod compute; +mod import; + +use brk_traversable::Traversable; +use vecdb::{Database, Rw, StorageMode}; + +pub use hashrate::Vecs as HashrateVecs; +pub use rewards::Vecs as RewardsVecs; + +pub const DB_NAME: &str = "mining"; + +#[derive(Traversable)] +pub struct Vecs { + #[traversable(skip)] + pub(crate) db: Database, + + pub rewards: RewardsVecs, + pub hashrate: HashrateVecs, +} diff --git a/crates/brk_computer/src/mining/rewards/compute.rs b/crates/brk_computer/src/mining/rewards/compute.rs new file mode 100644 index 000000000..d83ab6137 --- /dev/null +++ b/crates/brk_computer/src/mining/rewards/compute.rs @@ -0,0 +1,223 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use brk_types::{CheckedSub, HalvingEpoch, Sats, StoredF32}; +use vecdb::{Exit, ReadableVec, VecIndex}; + +use super::Vecs; +use crate::{ComputeIndexes, blocks, indexes, transactions}; + +impl Vecs { + pub(crate) fn compute( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + count_vecs: &blocks::CountVecs, + transactions_fees: &transactions::FeesVecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.coinbase.compute(starting_indexes, exit, |vec| { + // Cursors avoid per-height PcoVec page decompression for the + // tx-indexed lookups. Coinbase txindex values are strictly + // increasing, so the cursors only advance forward. + let mut txout_cursor = indexer.vecs.transactions.first_txoutindex.cursor(); + let mut count_cursor = indexes.txindex.output_count.cursor(); + + vec.compute_transform( + starting_indexes.height, + &indexer.vecs.transactions.first_txindex, + |(height, txindex, ..)| { + let ti = txindex.to_usize(); + + txout_cursor.advance(ti - txout_cursor.position()); + let first_txoutindex = txout_cursor.next().unwrap().to_usize(); + + count_cursor.advance(ti - count_cursor.position()); + let output_count: usize = count_cursor.next().unwrap().into(); + + let sats = indexer.vecs.outputs.value.fold_range_at( + first_txoutindex, + first_txoutindex + output_count, + Sats::ZERO, + |acc, v| acc + v, + ); + (height, sats) + }, + exit, + )?; + Ok(()) + })?; + + self.coinbase_24h_sum.compute_rolling_sum( + starting_indexes.height, + &count_vecs.height_24h_ago, + &self.coinbase.sats.height, + &self.coinbase.usd.height, + exit, + )?; + self.coinbase_7d_sum.compute_rolling_sum( + starting_indexes.height, + &count_vecs.height_1w_ago, + &self.coinbase.sats.height, + &self.coinbase.usd.height, + exit, + )?; + self.coinbase_30d_sum.compute_rolling_sum( + starting_indexes.height, + &count_vecs.height_1m_ago, + &self.coinbase.sats.height, + &self.coinbase.usd.height, + exit, + )?; + self.coinbase_1y_sum.compute_rolling_sum( + starting_indexes.height, + &count_vecs.height_1y_ago, + &self.coinbase.sats.height, + &self.coinbase.usd.height, + exit, + )?; + + let fee_sats_source = transactions_fees.fee.sats.height.sum_cum.sum.inner(); + let fee_usd_source = &transactions_fees.fee.usd.height.sum; + self.fee_24h_sum.compute_rolling_sum( + starting_indexes.height, + &count_vecs.height_24h_ago, + fee_sats_source, + fee_usd_source, + exit, + )?; + self.fee_7d_sum.compute_rolling_sum( + starting_indexes.height, + &count_vecs.height_1w_ago, + fee_sats_source, + fee_usd_source, + exit, + )?; + self.fee_30d_sum.compute_rolling_sum( + starting_indexes.height, + &count_vecs.height_1m_ago, + fee_sats_source, + fee_usd_source, + exit, + )?; + self.fee_1y_sum.compute_rolling_sum( + starting_indexes.height, + &count_vecs.height_1y_ago, + fee_sats_source, + fee_usd_source, + exit, + )?; + + self.subsidy.compute(starting_indexes, exit, |vec| { + vec.compute_transform2( + starting_indexes.height, + &self.coinbase.sats.height, + transactions_fees.fee.sats.height.sum_cum.sum.inner(), + |(height, coinbase, fees, ..)| { + ( + height, + coinbase.checked_sub(fees).unwrap_or_else(|| { + dbg!(height, coinbase, fees); + panic!() + }), + ) + }, + exit, + )?; + Ok(()) + })?; + + self.unclaimed_rewards + .compute(starting_indexes, exit, |vec| { + vec.compute_transform( + starting_indexes.height, + &self.subsidy.sats.height, + |(height, subsidy, ..)| { + let halving = HalvingEpoch::from(height); + let expected = Sats::FIFTY_BTC / 2_usize.pow(halving.to_usize() as u32); + (height, expected.checked_sub(subsidy).unwrap()) + }, + exit, + )?; + Ok(()) + })?; + + // All-time cumulative fee dominance + self.fee_dominance.height.compute_percentage( + starting_indexes.height, + transactions_fees.fee.sats.height.sum_cum.cumulative.inner(), + self.coinbase.sats.rest.height_cumulative.inner(), + exit, + )?; + + // Rolling fee dominance = sum(fees) / sum(coinbase) * 100 + self.fee_dominance_24h.height.compute_percentage( + starting_indexes.height, + &self.fee_24h_sum.sats.height, + &self.coinbase_24h_sum.sats.height, + exit, + )?; + self.fee_dominance_7d.height.compute_percentage( + starting_indexes.height, + &self.fee_7d_sum.sats.height, + &self.coinbase_7d_sum.sats.height, + exit, + )?; + self.fee_dominance_30d.height.compute_percentage( + starting_indexes.height, + &self.fee_30d_sum.sats.height, + &self.coinbase_30d_sum.sats.height, + exit, + )?; + self.fee_dominance_1y.height.compute_percentage( + starting_indexes.height, + &self.fee_1y_sum.sats.height, + &self.coinbase_1y_sum.sats.height, + exit, + )?; + + // All-time cumulative subsidy dominance + self.subsidy_dominance.height.compute_percentage( + starting_indexes.height, + self.subsidy.sats.rest.height_cumulative.inner(), + self.coinbase.sats.rest.height_cumulative.inner(), + exit, + )?; + + // Rolling subsidy dominance = 100 - fee_dominance + let hundred = StoredF32::from(100u8); + self.subsidy_dominance_24h.height.compute_transform( + starting_indexes.height, + &self.fee_dominance_24h.height, + |(height, fee_dom, _)| (height, hundred - fee_dom), + exit, + )?; + self.subsidy_dominance_7d.height.compute_transform( + starting_indexes.height, + &self.fee_dominance_7d.height, + |(height, fee_dom, _)| (height, hundred - fee_dom), + exit, + )?; + self.subsidy_dominance_30d.height.compute_transform( + starting_indexes.height, + &self.fee_dominance_30d.height, + |(height, fee_dom, _)| (height, hundred - fee_dom), + exit, + )?; + self.subsidy_dominance_1y.height.compute_transform( + starting_indexes.height, + &self.fee_dominance_1y.height, + |(height, fee_dom, _)| (height, hundred - fee_dom), + exit, + )?; + + self.subsidy_usd_1y_sma.height.compute_rolling_average( + starting_indexes.height, + &count_vecs.height_1y_ago, + &self.coinbase.usd.height, + exit, + )?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/mining/rewards/import.rs b/crates/brk_computer/src/mining/rewards/import.rs new file mode 100644 index 000000000..bd7643925 --- /dev/null +++ b/crates/brk_computer/src/mining/rewards/import.rs @@ -0,0 +1,105 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::Database; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedFromHeightLast, StoredValueFromHeightLast, ValueFromHeightFull, ValueFromHeightSumCum}, + prices, +}; + +impl Vecs { + pub(crate) fn forced_import( + db: &Database, + version: Version, + indexes: &indexes::Vecs, + prices: &prices::Vecs, + ) -> Result { + Ok(Self { + coinbase_24h_sum: StoredValueFromHeightLast::forced_import(db, "coinbase_24h_sum", version, indexes)?, + coinbase_7d_sum: StoredValueFromHeightLast::forced_import(db, "coinbase_7d_sum", version, indexes)?, + coinbase_30d_sum: StoredValueFromHeightLast::forced_import(db, "coinbase_30d_sum", version, indexes)?, + coinbase_1y_sum: StoredValueFromHeightLast::forced_import(db, "coinbase_1y_sum", version, indexes)?, + fee_24h_sum: StoredValueFromHeightLast::forced_import(db, "fee_24h_sum", version, indexes)?, + fee_7d_sum: StoredValueFromHeightLast::forced_import(db, "fee_7d_sum", version, indexes)?, + fee_30d_sum: StoredValueFromHeightLast::forced_import(db, "fee_30d_sum", version, indexes)?, + fee_1y_sum: StoredValueFromHeightLast::forced_import(db, "fee_1y_sum", version, indexes)?, + coinbase: ValueFromHeightFull::forced_import(db, "coinbase", version, indexes, prices)?, + subsidy: ValueFromHeightFull::forced_import(db, "subsidy", version, indexes, prices)?, + unclaimed_rewards: ValueFromHeightSumCum::forced_import( + db, + "unclaimed_rewards", + version, + indexes, + prices, + )?, + fee_dominance: ComputedFromHeightLast::forced_import( + db, + "fee_dominance", + version, + indexes, + )?, + fee_dominance_24h: ComputedFromHeightLast::forced_import( + db, + "fee_dominance_24h", + version, + indexes, + )?, + fee_dominance_7d: ComputedFromHeightLast::forced_import( + db, + "fee_dominance_7d", + version, + indexes, + )?, + fee_dominance_30d: ComputedFromHeightLast::forced_import( + db, + "fee_dominance_30d", + version, + indexes, + )?, + fee_dominance_1y: ComputedFromHeightLast::forced_import( + db, + "fee_dominance_1y", + version, + indexes, + )?, + subsidy_dominance: ComputedFromHeightLast::forced_import( + db, + "subsidy_dominance", + version, + indexes, + )?, + subsidy_dominance_24h: ComputedFromHeightLast::forced_import( + db, + "subsidy_dominance_24h", + version, + indexes, + )?, + subsidy_dominance_7d: ComputedFromHeightLast::forced_import( + db, + "subsidy_dominance_7d", + version, + indexes, + )?, + subsidy_dominance_30d: ComputedFromHeightLast::forced_import( + db, + "subsidy_dominance_30d", + version, + indexes, + )?, + subsidy_dominance_1y: ComputedFromHeightLast::forced_import( + db, + "subsidy_dominance_1y", + version, + indexes, + )?, + subsidy_usd_1y_sma: ComputedFromHeightLast::forced_import( + db, + "subsidy_usd_1y_sma", + version, + indexes, + )?, + }) + } +} diff --git a/crates/brk_computer/src/blocks/rewards/mod.rs b/crates/brk_computer/src/mining/rewards/mod.rs similarity index 100% rename from crates/brk_computer/src/blocks/rewards/mod.rs rename to crates/brk_computer/src/mining/rewards/mod.rs diff --git a/crates/brk_computer/src/mining/rewards/vecs.rs b/crates/brk_computer/src/mining/rewards/vecs.rs new file mode 100644 index 000000000..32cd24453 --- /dev/null +++ b/crates/brk_computer/src/mining/rewards/vecs.rs @@ -0,0 +1,32 @@ +use brk_traversable::Traversable; +use brk_types::{Dollars, StoredF32}; +use vecdb::{Rw, StorageMode}; + +use crate::internal::{ComputedFromHeightLast, StoredValueFromHeightLast, ValueFromHeightFull, ValueFromHeightSumCum}; + +/// Coinbase/subsidy/rewards metrics +#[derive(Traversable)] +pub struct Vecs { + pub coinbase_24h_sum: StoredValueFromHeightLast, + pub coinbase_7d_sum: StoredValueFromHeightLast, + pub coinbase_30d_sum: StoredValueFromHeightLast, + pub coinbase_1y_sum: StoredValueFromHeightLast, + pub fee_24h_sum: StoredValueFromHeightLast, + pub fee_7d_sum: StoredValueFromHeightLast, + pub fee_30d_sum: StoredValueFromHeightLast, + pub fee_1y_sum: StoredValueFromHeightLast, + pub coinbase: ValueFromHeightFull, + pub subsidy: ValueFromHeightFull, + pub unclaimed_rewards: ValueFromHeightSumCum, + pub fee_dominance: ComputedFromHeightLast, + pub fee_dominance_24h: ComputedFromHeightLast, + pub fee_dominance_7d: ComputedFromHeightLast, + pub fee_dominance_30d: ComputedFromHeightLast, + pub fee_dominance_1y: ComputedFromHeightLast, + pub subsidy_dominance: ComputedFromHeightLast, + pub subsidy_dominance_24h: ComputedFromHeightLast, + pub subsidy_dominance_7d: ComputedFromHeightLast, + pub subsidy_dominance_30d: ComputedFromHeightLast, + pub subsidy_dominance_1y: ComputedFromHeightLast, + pub subsidy_usd_1y_sma: ComputedFromHeightLast, +} diff --git a/crates/brk_computer/src/outputs/compute.rs b/crates/brk_computer/src/outputs/compute.rs index 7c71e3e9a..10d248e7e 100644 --- a/crates/brk_computer/src/outputs/compute.rs +++ b/crates/brk_computer/src/outputs/compute.rs @@ -6,7 +6,7 @@ use super::Vecs; use crate::{indexes, inputs, scripts, ComputeIndexes}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, diff --git a/crates/brk_computer/src/outputs/count/compute.rs b/crates/brk_computer/src/outputs/count/compute.rs index 3b9e80946..ac0e1a12f 100644 --- a/crates/brk_computer/src/outputs/count/compute.rs +++ b/crates/brk_computer/src/outputs/count/compute.rs @@ -1,13 +1,13 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_types::{Height, StoredU64}; -use vecdb::{Exit, TypedVecIterator}; +use vecdb::Exit; use super::Vecs; use crate::{ComputeIndexes, indexes, inputs, scripts}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, @@ -24,46 +24,39 @@ impl Vecs { exit, )?; - self.utxo_count - .compute_all(indexes, starting_indexes, exit, |v| { - let mut input_count_iter = inputs_count.height.sum_cum.cumulative.0.into_iter(); - let mut opreturn_cumulative_iter = - scripts_count.opreturn.height_cumulative.0.into_iter(); - v.compute_transform( - starting_indexes.height, - &self.total_count.height.sum_cum.cumulative.0, - |(h, output_count, ..)| { - let input_count = input_count_iter.get_unwrap(h); - let opreturn_count = *opreturn_cumulative_iter.get_unwrap(h); - let block_count = u64::from(h + 1_usize); - // -1 > genesis output is unspendable - let mut utxo_count = - *output_count - (*input_count - block_count) - opreturn_count - 1; + self.utxo_count.height.compute_transform3( + starting_indexes.height, + &*self.total_count.height.sum_cum.cumulative, + &*inputs_count.height.sum_cum.cumulative, + &*scripts_count.opreturn.height_cumulative, + |(h, output_count, input_count, opreturn_count, ..)| { + let block_count = u64::from(h + 1_usize); + // -1 > genesis output is unspendable + let mut utxo_count = + *output_count - (*input_count - block_count) - *opreturn_count - 1; - // txid dup: e3bf3d07d4b0375638d5f1db5255fe07ba2c4cb067cd81b84ee974b6585fb468 - // Block 91_722 https://mempool.space/block/00000000000271a2dc26e7667f8419f2e15416dc6955e5a6c6cdf3f2574dd08e - // Block 91_880 https://mempool.space/block/00000000000743f190a18c5577a3c2d2a1f610ae9601ac046a38084ccb7cd721 - // - // txid dup: d5d27987d2a3dfc724e359870c6644b40e497bdc0589a033220fe15429d88599 - // Block 91_812 https://mempool.space/block/00000000000af0aed4792b1acee3d966af36cf5def14935db8de83d6f9306f2f - // Block 91_842 https://mempool.space/block/00000000000a4d0a398161ffc163c503763b1f4360639393e0e4c8e300e0caec - // - // Warning: Dups invalidate the previous coinbase according to - // https://chainquery.com/bitcoin-cli/gettxoutsetinfo + // txid dup: e3bf3d07d4b0375638d5f1db5255fe07ba2c4cb067cd81b84ee974b6585fb468 + // Block 91_722 https://mempool.space/block/00000000000271a2dc26e7667f8419f2e15416dc6955e5a6c6cdf3f2574dd08e + // Block 91_880 https://mempool.space/block/00000000000743f190a18c5577a3c2d2a1f610ae9601ac046a38084ccb7cd721 + // + // txid dup: d5d27987d2a3dfc724e359870c6644b40e497bdc0589a033220fe15429d88599 + // Block 91_812 https://mempool.space/block/00000000000af0aed4792b1acee3d966af36cf5def14935db8de83d6f9306f2f + // Block 91_842 https://mempool.space/block/00000000000a4d0a398161ffc163c503763b1f4360639393e0e4c8e300e0caec + // + // Warning: Dups invalidate the previous coinbase according to + // https://chainquery.com/bitcoin-cli/gettxoutsetinfo - if h >= Height::new(91_842) { - utxo_count -= 1; - } - if h >= Height::new(91_880) { - utxo_count -= 1; - } + if h >= Height::new(91_842) { + utxo_count -= 1; + } + if h >= Height::new(91_880) { + utxo_count -= 1; + } - (h, StoredU64::from(utxo_count)) - }, - exit, - )?; - Ok(()) - })?; + (h, StoredU64::from(utxo_count)) + }, + exit, + )?; Ok(()) } diff --git a/crates/brk_computer/src/outputs/count/import.rs b/crates/brk_computer/src/outputs/count/import.rs index 9d785954c..e7ec5ca83 100644 --- a/crates/brk_computer/src/outputs/count/import.rs +++ b/crates/brk_computer/src/outputs/count/import.rs @@ -9,7 +9,7 @@ use crate::{ }; impl Vecs { - pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { + pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { Ok(Self { total_count: TxDerivedFull::forced_import(db, "output_count", version, indexes)?, utxo_count: ComputedFromHeightLast::forced_import(db, "exact_utxo_count", version, indexes)?, diff --git a/crates/brk_computer/src/outputs/count/vecs.rs b/crates/brk_computer/src/outputs/count/vecs.rs index 53a130e2a..044dcf1cc 100644 --- a/crates/brk_computer/src/outputs/count/vecs.rs +++ b/crates/brk_computer/src/outputs/count/vecs.rs @@ -1,10 +1,11 @@ use brk_traversable::Traversable; use brk_types::StoredU64; +use vecdb::{Rw, StorageMode}; use crate::internal::{ComputedFromHeightLast, TxDerivedFull}; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub total_count: TxDerivedFull, - pub utxo_count: ComputedFromHeightLast, +#[derive(Traversable)] +pub struct Vecs { + pub total_count: TxDerivedFull, + pub utxo_count: ComputedFromHeightLast, } diff --git a/crates/brk_computer/src/outputs/import.rs b/crates/brk_computer/src/outputs/import.rs index 3490c5302..f291bb609 100644 --- a/crates/brk_computer/src/outputs/import.rs +++ b/crates/brk_computer/src/outputs/import.rs @@ -9,7 +9,7 @@ use super::{CountVecs, SpentVecs, Vecs}; use crate::indexes; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( parent_path: &Path, parent_version: Version, indexes: &indexes::Vecs, diff --git a/crates/brk_computer/src/outputs/mod.rs b/crates/brk_computer/src/outputs/mod.rs index ab3e54d27..e32611856 100644 --- a/crates/brk_computer/src/outputs/mod.rs +++ b/crates/brk_computer/src/outputs/mod.rs @@ -5,18 +5,18 @@ mod compute; mod import; use brk_traversable::Traversable; -use vecdb::Database; +use vecdb::{Database, Rw, StorageMode}; pub use count::Vecs as CountVecs; pub use spent::Vecs as SpentVecs; pub const DB_NAME: &str = "outputs"; -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { #[traversable(skip)] pub(crate) db: Database, - pub spent: SpentVecs, - pub count: CountVecs, + pub spent: SpentVecs, + pub count: CountVecs, } diff --git a/crates/brk_computer/src/outputs/spent/compute.rs b/crates/brk_computer/src/outputs/spent/compute.rs index 95a0184b1..3d9f28292 100644 --- a/crates/brk_computer/src/outputs/spent/compute.rs +++ b/crates/brk_computer/src/outputs/spent/compute.rs @@ -3,16 +3,16 @@ use brk_indexer::Indexer; use brk_types::{Height, TxInIndex, TxOutIndex}; use tracing::info; use vecdb::{ - AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, Stamp, TypedVecIterator, VecIndex, + AnyStoredVec, AnyVec, Database, Exit, WritableVec, ReadableVec, Stamp, VecIndex, }; use super::Vecs; -use crate::{ComputeIndexes, inputs}; +use crate::{inputs, ComputeIndexes}; const HEIGHT_BATCH: u32 = 10_000; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, db: &Database, indexer: &Indexer, @@ -36,19 +36,36 @@ impl Vecs { self.txinindex .truncate_if_needed(TxOutIndex::from(min_txoutindex))?; - let mut height_to_first_txoutindex = indexer.vecs.outputs.first_txoutindex.iter()?; - let mut height_to_first_txinindex = indexer.vecs.inputs.first_txinindex.iter()?; - let mut txinindex_to_txoutindex = inputs.spent.txoutindex.iter()?; + let txinindex_to_txoutindex = &inputs.spent.txoutindex; - // Find starting height from min_txoutindex - let mut min_height = Height::ZERO; - for h in 0..=target_height.to_usize() { - let txoutindex = height_to_first_txoutindex.get_unwrap(Height::from(h)); - if txoutindex.to_usize() > min_txoutindex { - break; + // Find min_height via binary search (first_txoutindex is monotonically non-decreasing) + let first_txoutindex_vec = &indexer.vecs.outputs.first_txoutindex; + let total_heights = target_height.to_usize() + 1; + let min_height = if min_txoutindex == 0 { + Height::ZERO + } else { + let mut lo = 0usize; + let mut hi = total_heights; + while lo < hi { + let mid = lo + (hi - lo) / 2; + if first_txoutindex_vec.collect_one_at(mid).unwrap().to_usize() <= min_txoutindex { + lo = mid + 1; + } else { + hi = mid; + } } - min_height = Height::from(h); - } + Height::from(lo.saturating_sub(1)) + }; + + // Only collect from min_height onward (not from 0) + let offset = min_height.to_usize(); + let first_txoutindex_data = first_txoutindex_vec + .collect_range_at(offset, target_height.to_usize() + 1); + let first_txinindex_data = indexer + .vecs + .inputs + .first_txinindex + .collect_range_at(offset, target_height.to_usize() + 2); // Validate: computed height must not exceed starting height assert!( @@ -68,30 +85,24 @@ impl Vecs { let batch_txoutindex = if batch_end_height >= target_height { indexer.vecs.outputs.value.len() } else { - height_to_first_txoutindex - .get_unwrap(batch_end_height + 1_u32) - .to_usize() + first_txoutindex_data[batch_end_height.to_usize() + 1 - offset].to_usize() }; self.txinindex .fill_to(batch_txoutindex, TxInIndex::UNSPENT)?; // Get txin range for this height batch - let txin_start = height_to_first_txinindex - .get_unwrap(batch_start_height) - .to_usize(); + let txin_start = first_txinindex_data[batch_start_height.to_usize() - offset].to_usize(); let txin_end = if batch_end_height >= target_height { inputs.spent.txoutindex.len() } else { - height_to_first_txinindex - .get_unwrap(batch_end_height + 1_u32) - .to_usize() + first_txinindex_data[batch_end_height.to_usize() + 1 - offset].to_usize() }; // Collect and process txins pairs.clear(); - for i in txin_start..txin_end { - let txinindex = TxInIndex::from(i); - let txoutindex = txinindex_to_txoutindex.get_unwrap(txinindex); + let txoutindexes: Vec = txinindex_to_txoutindex.collect_range_at(txin_start, txin_end); + for (j, txoutindex) in txoutindexes.into_iter().enumerate() { + let txinindex = TxInIndex::from(txin_start + j); if txoutindex.is_coinbase() { continue; diff --git a/crates/brk_computer/src/outputs/spent/import.rs b/crates/brk_computer/src/outputs/spent/import.rs index c99012ed1..88c517d84 100644 --- a/crates/brk_computer/src/outputs/spent/import.rs +++ b/crates/brk_computer/src/outputs/spent/import.rs @@ -5,7 +5,7 @@ use vecdb::{BytesVec, Database, ImportableVec}; use super::Vecs; impl Vecs { - pub fn forced_import(db: &Database, version: Version) -> Result { + pub(crate) fn forced_import(db: &Database, version: Version) -> Result { Ok(Self { txinindex: BytesVec::forced_import(db, "txinindex", version)?, }) diff --git a/crates/brk_computer/src/outputs/spent/vecs.rs b/crates/brk_computer/src/outputs/spent/vecs.rs index ffc2c5865..35a5f0731 100644 --- a/crates/brk_computer/src/outputs/spent/vecs.rs +++ b/crates/brk_computer/src/outputs/spent/vecs.rs @@ -1,8 +1,8 @@ use brk_traversable::Traversable; use brk_types::{TxInIndex, TxOutIndex}; -use vecdb::BytesVec; +use vecdb::{BytesVec, Rw, StorageMode}; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub txinindex: BytesVec, +#[derive(Traversable)] +pub struct Vecs { + pub txinindex: M::Stored>, } diff --git a/crates/brk_computer/src/pools/mod.rs b/crates/brk_computer/src/pools/mod.rs index 24773458b..bd9190247 100644 --- a/crates/brk_computer/src/pools/mod.rs +++ b/crates/brk_computer/src/pools/mod.rs @@ -7,8 +7,8 @@ use brk_traversable::Traversable; use brk_types::{Address, AddressBytes, Height, OutputType, PoolSlug, Pools, TxOutIndex, pools}; use rayon::prelude::*; use vecdb::{ - AnyStoredVec, AnyVec, BytesVec, Database, Exit, GenericStoredVec, ImportableVec, IterableVec, - PAGE_SIZE, TypedVecIterator, VecIndex, Version, + AnyStoredVec, AnyVec, BytesVec, Database, Exit, WritableVec, ImportableVec, ReadableVec, + PAGE_SIZE, Rw, StorageMode, VecIndex, Version, }; mod vecs; @@ -16,34 +16,35 @@ mod vecs; use crate::{ blocks, indexes::{self, ComputeIndexes}, - price, transactions, + mining, prices, transactions, }; pub const DB_NAME: &str = "pools"; -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { db: Database, pools: &'static Pools, - pub height_to_pool: BytesVec, - pub vecs: BTreeMap, + pub height_to_pool: M::Stored>, + pub vecs: BTreeMap>, } impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( parent_path: &Path, parent_version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, blocks: &blocks::Vecs, + mining: &mining::Vecs, transactions: &transactions::Vecs, ) -> Result { let db = Database::open(&parent_path.join(DB_NAME))?; db.set_min_len(PAGE_SIZE * 1_000_000)?; let pools = pools(); - let version = parent_version + Version::new(3) + Version::new(pools.len() as u64); + let version = parent_version + Version::new(3) + Version::new(pools.len() as u32); let this = Self { height_to_pool: BytesVec::forced_import(&db, "pool", version)?, @@ -55,8 +56,9 @@ impl Vecs { pool.slug, version, indexes, - price, + prices, blocks, + mining, transactions, ) .map(|vecs| (pool.slug, vecs)) @@ -76,7 +78,7 @@ impl Vecs { Ok(this) } - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, @@ -101,7 +103,12 @@ impl Vecs { self.compute_height_to_pool(indexer, indexes, starting_indexes, exit)?; self.vecs.par_iter_mut().try_for_each(|(_, vecs)| { - vecs.compute(indexes, starting_indexes, &self.height_to_pool, blocks, exit) + vecs.compute( + starting_indexes, + &self.height_to_pool, + blocks, + exit, + ) })?; Ok(()) @@ -117,44 +124,21 @@ impl Vecs { self.height_to_pool .validate_computed_version_or_reset(indexer.stores.height_to_coinbase_tag.version())?; - let mut height_to_first_txindex_iter = indexer.vecs.transactions.first_txindex.iter()?; - let mut txindex_to_first_txoutindex_iter = - indexer.vecs.transactions.first_txoutindex.iter()?; - let mut txindex_to_output_count_iter = indexes.txindex.output_count.iter(); - let mut txoutindex_to_outputtype_iter = - indexer.vecs.outputs.outputtype.iter()?; - let mut txoutindex_to_typeindex_iter = indexer.vecs.outputs.typeindex.iter()?; - let mut p2pk65addressindex_to_p2pk65bytes_iter = indexer - .vecs - .addresses - .p2pk65bytes - .iter()?; - let mut p2pk33addressindex_to_p2pk33bytes_iter = indexer - .vecs - .addresses - .p2pk33bytes - .iter()?; - let mut p2pkhaddressindex_to_p2pkhbytes_iter = indexer - .vecs - .addresses - .p2pkhbytes - .iter()?; - let mut p2shaddressindex_to_p2shbytes_iter = - indexer.vecs.addresses.p2shbytes.iter()?; - let mut p2wpkhaddressindex_to_p2wpkhbytes_iter = indexer - .vecs - .addresses - .p2wpkhbytes - .iter()?; - let mut p2wshaddressindex_to_p2wshbytes_iter = indexer - .vecs - .addresses - .p2wshbytes - .iter()?; - let mut p2traddressindex_to_p2trbytes_iter = - indexer.vecs.addresses.p2trbytes.iter()?; - let mut p2aaddressindex_to_p2abytes_iter = - indexer.vecs.addresses.p2abytes.iter()?; + let txindex_to_first_txoutindex_reader = + indexer.vecs.transactions.first_txoutindex.reader(); + let txoutindex_to_outputtype_reader = indexer.vecs.outputs.outputtype.reader(); + let txoutindex_to_typeindex_reader = indexer.vecs.outputs.typeindex.reader(); + let p2pk65addressindex_to_p2pk65bytes_reader = + indexer.vecs.addresses.p2pk65bytes.reader(); + let p2pk33addressindex_to_p2pk33bytes_reader = + indexer.vecs.addresses.p2pk33bytes.reader(); + let p2pkhaddressindex_to_p2pkhbytes_reader = indexer.vecs.addresses.p2pkhbytes.reader(); + let p2shaddressindex_to_p2shbytes_reader = indexer.vecs.addresses.p2shbytes.reader(); + let p2wpkhaddressindex_to_p2wpkhbytes_reader = + indexer.vecs.addresses.p2wpkhbytes.reader(); + let p2wshaddressindex_to_p2wshbytes_reader = indexer.vecs.addresses.p2wshbytes.reader(); + let p2traddressindex_to_p2trbytes_reader = indexer.vecs.addresses.p2trbytes.reader(); + let p2aaddressindex_to_p2abytes_reader = indexer.vecs.addresses.p2abytes.reader(); let unknown = self.pools.get_unknown(); @@ -163,46 +147,57 @@ impl Vecs { .to_usize() .min(self.height_to_pool.len()); + // Cursors avoid per-height PcoVec page decompression. + // Heights are sequential, txindex values derived from them are monotonically + // increasing, so both cursors only advance forward. + let mut first_txindex_cursor = indexer.vecs.transactions.first_txindex.cursor(); + first_txindex_cursor.advance(min); + let mut output_count_cursor = indexes.txindex.output_count.cursor(); + indexer .stores .height_to_coinbase_tag .iter() .skip(min) .try_for_each(|(height, coinbase_tag)| -> Result<()> { - let txindex = height_to_first_txindex_iter.get_unwrap(height); - let txoutindex = txindex_to_first_txoutindex_iter.get_unwrap(txindex); - let outputcount = txindex_to_output_count_iter.get_unwrap(txindex); + let txindex = first_txindex_cursor.next().unwrap(); + let txoutindex = txindex_to_first_txoutindex_reader.get(txindex.to_usize()); + + let ti = txindex.to_usize(); + output_count_cursor.advance(ti - output_count_cursor.position()); + let outputcount = output_count_cursor.next().unwrap(); let pool = (*txoutindex..(*txoutindex + *outputcount)) .map(TxOutIndex::from) .find_map(|txoutindex| { - let outputtype = txoutindex_to_outputtype_iter.get_unwrap(txoutindex); - let typeindex = txoutindex_to_typeindex_iter.get_unwrap(txoutindex); + let outputtype = txoutindex_to_outputtype_reader.get(txoutindex.to_usize()); + let typeindex = txoutindex_to_typeindex_reader.get(txoutindex.to_usize()); + let ti = usize::from(typeindex); match outputtype { OutputType::P2PK65 => Some(AddressBytes::from( - p2pk65addressindex_to_p2pk65bytes_iter.get_unwrap(typeindex.into()), + p2pk65addressindex_to_p2pk65bytes_reader.get(ti), )), OutputType::P2PK33 => Some(AddressBytes::from( - p2pk33addressindex_to_p2pk33bytes_iter.get_unwrap(typeindex.into()), + p2pk33addressindex_to_p2pk33bytes_reader.get(ti), )), OutputType::P2PKH => Some(AddressBytes::from( - p2pkhaddressindex_to_p2pkhbytes_iter.get_unwrap(typeindex.into()), + p2pkhaddressindex_to_p2pkhbytes_reader.get(ti), )), OutputType::P2SH => Some(AddressBytes::from( - p2shaddressindex_to_p2shbytes_iter.get_unwrap(typeindex.into()), + p2shaddressindex_to_p2shbytes_reader.get(ti), )), OutputType::P2WPKH => Some(AddressBytes::from( - p2wpkhaddressindex_to_p2wpkhbytes_iter.get_unwrap(typeindex.into()), + p2wpkhaddressindex_to_p2wpkhbytes_reader.get(ti), )), OutputType::P2WSH => Some(AddressBytes::from( - p2wshaddressindex_to_p2wshbytes_iter.get_unwrap(typeindex.into()), + p2wshaddressindex_to_p2wshbytes_reader.get(ti), )), OutputType::P2TR => Some(AddressBytes::from( - p2traddressindex_to_p2trbytes_iter.get_unwrap(typeindex.into()), + p2traddressindex_to_p2trbytes_reader.get(ti), )), OutputType::P2A => Some(AddressBytes::from( - p2aaddressindex_to_p2abytes_iter.get_unwrap(typeindex.into()), + p2aaddressindex_to_p2abytes_reader.get(ti), )), _ => None, } diff --git a/crates/brk_computer/src/pools/vecs.rs b/crates/brk_computer/src/pools/vecs.rs index 98418cef2..e7fb7cb13 100644 --- a/crates/brk_computer/src/pools/vecs.rs +++ b/crates/brk_computer/src/pools/vecs.rs @@ -2,50 +2,52 @@ use brk_error::Result; use brk_traversable::Traversable; use brk_types::{Height, PoolSlug, Sats, StoredF32, StoredU16, StoredU32}; use vecdb::{ - Database, Exit, GenericStoredVec, IterableCloneableVec, IterableVec, VecIndex, Version, + Database, Exit, ReadableCloneableVec, ReadableVec, Rw, StorageMode, Version, }; use crate::{ blocks, indexes::{self, ComputeIndexes}, internal::{ - ComputedFromHeightLast, ComputedFromHeightSumCum, ComputedFromDateLast, DollarsPlus, LazyBinaryFromHeightLast, - LazyValueFromHeightSumCum, MaskSats, PercentageU32F32, SatsPlus, SatsPlusToBitcoin, - ValueBinaryFromHeight, + ComputedFromHeightLast, ComputedFromHeightSumCum, DollarsPlus, + LazyBinaryFromHeightLast, LazyValueFromHeightSumCum, MaskSats, PercentageU32F32, SatsPlus, + SatsPlusToBitcoin, ValueBinaryFromHeight, }, - price, transactions, + mining, prices, transactions, }; -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { slug: PoolSlug, - pub blocks_mined: ComputedFromHeightSumCum, - pub _24h_blocks_mined: ComputedFromHeightLast, - pub _1w_blocks_mined: ComputedFromHeightLast, - pub _1m_blocks_mined: ComputedFromHeightLast, - pub _1y_blocks_mined: ComputedFromHeightLast, - pub subsidy: LazyValueFromHeightSumCum, - pub fee: LazyValueFromHeightSumCum, + pub blocks_mined: ComputedFromHeightSumCum, + pub blocks_mined_24h_sum: ComputedFromHeightLast, + pub blocks_mined_1w_sum: ComputedFromHeightLast, + pub blocks_mined_1m_sum: ComputedFromHeightLast, + pub blocks_mined_1y_sum: ComputedFromHeightLast, + pub subsidy: LazyValueFromHeightSumCum, + pub fee: LazyValueFromHeightSumCum, pub coinbase: ValueBinaryFromHeight, pub dominance: LazyBinaryFromHeightLast, - pub _24h_dominance: LazyBinaryFromHeightLast, - pub _1w_dominance: LazyBinaryFromHeightLast, - pub _1m_dominance: LazyBinaryFromHeightLast, - pub _1y_dominance: LazyBinaryFromHeightLast, - pub blocks_since_block: ComputedFromHeightLast, - pub days_since_block: ComputedFromDateLast, + pub dominance_24h: LazyBinaryFromHeightLast, + pub dominance_1w: LazyBinaryFromHeightLast, + pub dominance_1m: LazyBinaryFromHeightLast, + pub dominance_1y: LazyBinaryFromHeightLast, + pub blocks_since_block: ComputedFromHeightLast, + pub days_since_block: ComputedFromHeightLast, } impl Vecs { - pub fn forced_import( + #[allow(clippy::too_many_arguments)] + pub(crate) fn forced_import( db: &Database, slug: PoolSlug, parent_version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, blocks: &blocks::Vecs, + mining: &mining::Vecs, transactions: &transactions::Vecs, ) -> Result { let suffix = |s: &str| format!("{}_{s}", slug); @@ -54,23 +56,39 @@ impl Vecs { let blocks_mined = ComputedFromHeightSumCum::forced_import(db, &suffix("blocks_mined"), version, indexes)?; - let _24h_blocks_mined = - ComputedFromHeightLast::forced_import(db, &suffix("24h_blocks_mined"), version, indexes)?; - let _1w_blocks_mined = - ComputedFromHeightLast::forced_import(db, &suffix("1w_blocks_mined"), version, indexes)?; - let _1m_blocks_mined = - ComputedFromHeightLast::forced_import(db, &suffix("1m_blocks_mined"), version, indexes)?; - let _1y_blocks_mined = - ComputedFromHeightLast::forced_import(db, &suffix("1y_blocks_mined"), version, indexes)?; + let blocks_mined_24h_sum = ComputedFromHeightLast::forced_import( + db, + &suffix("blocks_mined_24h_sum"), + version, + indexes, + )?; + let blocks_mined_1w_sum = ComputedFromHeightLast::forced_import( + db, + &suffix("blocks_mined_1w_sum"), + version, + indexes, + )?; + let blocks_mined_1m_sum = ComputedFromHeightLast::forced_import( + db, + &suffix("blocks_mined_1m_sum"), + version, + indexes, + )?; + let blocks_mined_1y_sum = ComputedFromHeightLast::forced_import( + db, + &suffix("blocks_mined_1y_sum"), + version, + indexes, + )?; let subsidy = LazyValueFromHeightSumCum::forced_import::( db, &suffix("subsidy"), version, indexes, - blocks_mined.height.boxed_clone(), - blocks.rewards.subsidy.sats.height.boxed_clone(), - price, + blocks_mined.height.read_only_boxed_clone(), + mining.rewards.subsidy.sats.height.read_only_boxed_clone(), + prices, )?; let fee = LazyValueFromHeightSumCum::forced_import::( @@ -78,9 +96,9 @@ impl Vecs { &suffix("fee"), version, indexes, - blocks_mined.height.boxed_clone(), - transactions.fees.fee.sats.height.sum_cum.sum.0.boxed_clone(), - price, + blocks_mined.height.read_only_boxed_clone(), + transactions.fees.fee.sats.height.boxed_sum(), + prices, )?; Ok(Self { @@ -90,36 +108,36 @@ impl Vecs { &blocks_mined, &blocks.count.block_count, ), - _24h_dominance: LazyBinaryFromHeightLast::from_computed_last::( - &suffix("24h_dominance"), + dominance_24h: LazyBinaryFromHeightLast::from_computed_last::( + &suffix("dominance_24h"), version, - &_24h_blocks_mined, - &blocks.count._24h_block_count, + &blocks_mined_24h_sum, + &blocks.count.block_count_24h_sum, ), - _1w_dominance: LazyBinaryFromHeightLast::from_computed_last::( - &suffix("1w_dominance"), + dominance_1w: LazyBinaryFromHeightLast::from_computed_last::( + &suffix("dominance_1w"), version, - &_1w_blocks_mined, - &blocks.count._1w_block_count, + &blocks_mined_1w_sum, + &blocks.count.block_count_1w_sum, ), - _1m_dominance: LazyBinaryFromHeightLast::from_computed_last::( - &suffix("1m_dominance"), + dominance_1m: LazyBinaryFromHeightLast::from_computed_last::( + &suffix("dominance_1m"), version, - &_1m_blocks_mined, - &blocks.count._1m_block_count, + &blocks_mined_1m_sum, + &blocks.count.block_count_1m_sum, ), - _1y_dominance: LazyBinaryFromHeightLast::from_computed_last::( - &suffix("1y_dominance"), + dominance_1y: LazyBinaryFromHeightLast::from_computed_last::( + &suffix("dominance_1y"), version, - &_1y_blocks_mined, - &blocks.count._1y_block_count, + &blocks_mined_1y_sum, + &blocks.count.block_count_1y_sum, ), slug, blocks_mined, - _24h_blocks_mined, - _1w_blocks_mined, - _1m_blocks_mined, - _1y_blocks_mined, + blocks_mined_24h_sum, + blocks_mined_1w_sum, + blocks_mined_1m_sum, + blocks_mined_1y_sum, coinbase: ValueBinaryFromHeight::from_lazy::< SatsPlus, SatsPlusToBitcoin, @@ -135,7 +153,7 @@ impl Vecs { version, indexes, )?, - days_since_block: ComputedFromDateLast::forced_import( + days_since_block: ComputedFromHeightLast::forced_import( db, &suffix("days_since_block"), version, @@ -144,16 +162,15 @@ impl Vecs { }) } - pub fn compute( + pub(crate) fn compute( &mut self, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, - height_to_pool: &impl IterableVec, + height_to_pool: &impl ReadableVec, blocks: &blocks::Vecs, exit: &Exit, ) -> Result<()> { self.blocks_mined - .compute_all(indexes, starting_indexes, exit, |vec| { + .compute(starting_indexes, exit, |vec| { vec.compute_transform( starting_indexes.height, height_to_pool, @@ -173,99 +190,62 @@ impl Vecs { })?; // Compute rolling window blocks mined using the start heights from blocks.count - let blocks_mined_height = &self.blocks_mined.height.clone(); - self._24h_blocks_mined - .compute_all(indexes, starting_indexes, exit, |v| { - Ok(v.compute_rolling_sum( - starting_indexes.height, - &blocks.count._24h_start, - blocks_mined_height, - exit, - )?) - })?; + self.blocks_mined_24h_sum.height.compute_rolling_sum( + starting_indexes.height, + &blocks.count.height_24h_ago, + &self.blocks_mined.height, + exit, + )?; - self._1w_blocks_mined - .compute_all(indexes, starting_indexes, exit, |v| { - Ok(v.compute_rolling_sum( - starting_indexes.height, - &blocks.count._1w_start, - blocks_mined_height, - exit, - )?) - })?; + self.blocks_mined_1w_sum.height.compute_rolling_sum( + starting_indexes.height, + &blocks.count.height_1w_ago, + &self.blocks_mined.height, + exit, + )?; - self._1m_blocks_mined - .compute_all(indexes, starting_indexes, exit, |v| { - Ok(v.compute_rolling_sum( - starting_indexes.height, - &blocks.count._1m_start, - blocks_mined_height, - exit, - )?) - })?; + self.blocks_mined_1m_sum.height.compute_rolling_sum( + starting_indexes.height, + &blocks.count.height_1m_ago, + &self.blocks_mined.height, + exit, + )?; - self._1y_blocks_mined - .compute_all(indexes, starting_indexes, exit, |v| { - Ok(v.compute_rolling_sum( - starting_indexes.height, - &blocks.count._1y_start, - blocks_mined_height, - exit, - )?) - })?; + self.blocks_mined_1y_sum.height.compute_rolling_sum( + starting_indexes.height, + &blocks.count.height_1y_ago, + &self.blocks_mined.height, + exit, + )?; - self.subsidy.derive_from(indexes, starting_indexes, exit)?; + self.subsidy.compute_cumulative(starting_indexes, exit)?; - self.fee.derive_from(indexes, starting_indexes, exit)?; + self.fee.compute_cumulative(starting_indexes, exit)?; - self.blocks_since_block - .compute_all(indexes, starting_indexes, exit, |v| { - let mut prev = StoredU32::ZERO; - v.compute_transform( - starting_indexes.height, - blocks_mined_height, - |(h, mined, ..)| { - let blocks = if mined.is_zero() { - prev + StoredU32::ONE - } else { - StoredU32::ZERO - }; - prev = blocks; - (h, blocks) - }, - exit, - )?; - Ok(()) - })?; + { + let mut prev = StoredU32::ZERO; + self.blocks_since_block.height.compute_transform( + starting_indexes.height, + &self.blocks_mined.height, + |(h, mined, ..)| { + let blocks = if mined.is_zero() { + prev + StoredU32::ONE + } else { + StoredU32::ZERO + }; + prev = blocks; + (h, blocks) + }, + exit, + )?; + } - self.days_since_block - .compute_all(starting_indexes, exit, |v| { - let mut prev = None; - v.compute_transform2( - starting_indexes.dateindex, - self.blocks_mined.dateindex.sum.inner(), - self.blocks_mined.dateindex.cumulative.inner(), - |(i, sum, cumulative, slf)| { - if prev.is_none() { - let i = i.to_usize(); - prev.replace(if i > 0 { - slf.get_pushed_or_read_at_unwrap_once(i - 1) - } else { - StoredU16::ZERO - }); - } - let days = if !cumulative.is_zero() && sum.is_zero() { - prev.unwrap() + StoredU16::ONE - } else { - StoredU16::ZERO - }; - prev.replace(days); - (i, days) - }, - exit, - )?; - Ok(()) - })?; + self.days_since_block.height.compute_transform( + starting_indexes.height, + &self.blocks_since_block.height, + |(h, blocks, ..)| (h, StoredU16::from(u16::try_from(*blocks).unwrap_or(u16::MAX))), + exit, + )?; Ok(()) } diff --git a/crates/brk_computer/src/positions.rs b/crates/brk_computer/src/positions.rs index 07edeb5cc..8145ae2c4 100644 --- a/crates/brk_computer/src/positions.rs +++ b/crates/brk_computer/src/positions.rs @@ -6,24 +6,24 @@ use brk_reader::Reader; use brk_traversable::Traversable; use brk_types::{BlkPosition, Height, TxIndex, Version}; use vecdb::{ - AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, ImportableVec, PAGE_SIZE, PcoVec, - TypedVecIterator, + AnyStoredVec, AnyVec, Database, Exit, WritableVec, ImportableVec, PAGE_SIZE, PcoVec, + ReadableVec, Rw, StorageMode, VecIndex, }; use super::ComputeIndexes; pub const DB_NAME: &str = "positions"; -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { db: Database, - pub block_position: PcoVec, - pub tx_position: PcoVec, + pub block_position: M::Stored>, + pub tx_position: M::Stored>, } impl Vecs { - pub fn forced_import(parent_path: &Path, parent_version: Version) -> Result { + pub(crate) fn forced_import(parent_path: &Path, parent_version: Version) -> Result { let db = Database::open(&parent_path.join(DB_NAME))?; db.set_min_len(PAGE_SIZE * 1_000_000)?; @@ -46,7 +46,7 @@ impl Vecs { Ok(this) } - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, starting_indexes: &ComputeIndexes, @@ -80,14 +80,16 @@ impl Vecs { .vecs .transactions .height - .iter()? - .get(min_txindex) - .map(|h| h.min(starting_indexes.height)) + .collect_one(min_txindex) + .map(|h: Height| h.min(starting_indexes.height)) else { return Ok(()); }; - let mut height_to_first_txindex_iter = indexer.vecs.transactions.first_txindex.iter()?; + // Cursor avoids per-height PcoVec page decompression. + // Heights are sequential, so the cursor only advances forward. + let mut first_txindex_cursor = indexer.vecs.transactions.first_txindex.cursor(); + first_txindex_cursor.advance(min_height.to_usize()); parser .read( @@ -101,7 +103,7 @@ impl Vecs { self.block_position .truncate_push(height, block.metadata().position())?; - let txindex = height_to_first_txindex_iter.get_unwrap(height); + let txindex = first_txindex_cursor.next().unwrap(); block.tx_metadata().iter().enumerate().try_for_each( |(index, metadata)| -> Result<()> { diff --git a/crates/brk_computer/src/price/cents/import.rs b/crates/brk_computer/src/price/cents/import.rs deleted file mode 100644 index d1e2f865b..000000000 --- a/crates/brk_computer/src/price/cents/import.rs +++ /dev/null @@ -1,73 +0,0 @@ -use brk_error::Result; -use brk_types::{DateIndex, Height, OHLCCentsUnsigned, Version}; -use vecdb::{Database, IterableCloneableVec, LazyVecFrom1}; - -use super::Vecs; -use crate::internal::{ComputedHeightAndDateBytes, LazyHeightAndDateOHLC, LazyOHLC}; - -impl Vecs { - pub fn forced_import(db: &Database, version: Version) -> Result { - let ohlc: ComputedHeightAndDateBytes = - ComputedHeightAndDateBytes::forced_import(db, "ohlc_cents", version)?; - - let components = LazyHeightAndDateOHLC { - height: LazyOHLC { - open: LazyVecFrom1::init( - "price_open_cents", - version, - ohlc.height.boxed_clone(), - |h: Height, iter| iter.get(h).map(|o: OHLCCentsUnsigned| o.open), - ), - high: LazyVecFrom1::init( - "price_high_cents", - version, - ohlc.height.boxed_clone(), - |h: Height, iter| iter.get(h).map(|o: OHLCCentsUnsigned| o.high), - ), - low: LazyVecFrom1::init( - "price_low_cents", - version, - ohlc.height.boxed_clone(), - |h: Height, iter| iter.get(h).map(|o: OHLCCentsUnsigned| o.low), - ), - close: LazyVecFrom1::init( - "price_close_cents", - version, - ohlc.height.boxed_clone(), - |h: Height, iter| iter.get(h).map(|o: OHLCCentsUnsigned| o.close), - ), - }, - dateindex: LazyOHLC { - open: LazyVecFrom1::init( - "price_open_cents", - version, - ohlc.dateindex.boxed_clone(), - |di: DateIndex, iter| iter.get(di).map(|o: OHLCCentsUnsigned| o.open), - ), - high: LazyVecFrom1::init( - "price_high_cents", - version, - ohlc.dateindex.boxed_clone(), - |di: DateIndex, iter| iter.get(di).map(|o: OHLCCentsUnsigned| o.high), - ), - low: LazyVecFrom1::init( - "price_low_cents", - version, - ohlc.dateindex.boxed_clone(), - |di: DateIndex, iter| iter.get(di).map(|o: OHLCCentsUnsigned| o.low), - ), - close: LazyVecFrom1::init( - "price_close_cents", - version, - ohlc.dateindex.boxed_clone(), - |di: DateIndex, iter| iter.get(di).map(|o: OHLCCentsUnsigned| o.close), - ), - }, - }; - - Ok(Self { - split: components, - ohlc, - }) - } -} diff --git a/crates/brk_computer/src/price/cents/vecs.rs b/crates/brk_computer/src/price/cents/vecs.rs deleted file mode 100644 index 93831922b..000000000 --- a/crates/brk_computer/src/price/cents/vecs.rs +++ /dev/null @@ -1,10 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{CentsUnsigned, OHLCCentsUnsigned}; - -use crate::internal::{ComputedHeightAndDateBytes, LazyHeightAndDateOHLC}; - -#[derive(Clone, Traversable)] -pub struct Vecs { - pub split: LazyHeightAndDateOHLC, - pub ohlc: ComputedHeightAndDateBytes, -} diff --git a/crates/brk_computer/src/price/fetch.rs b/crates/brk_computer/src/price/fetch.rs deleted file mode 100644 index 396a49375..000000000 --- a/crates/brk_computer/src/price/fetch.rs +++ /dev/null @@ -1,99 +0,0 @@ -use brk_error::Result; -use brk_indexer::Indexer; -use brk_types::{DateIndex, Height, OHLCCentsUnsigned}; -use vecdb::{ - AnyStoredVec, AnyVec, Exit, GenericStoredVec, IterableVec, TypedVecIterator, VecIndex, -}; - -use crate::{ComputeIndexes, indexes, utils::OptionExt}; - -use super::Vecs; - -impl Vecs { - pub fn fetch( - &mut self, - indexer: &Indexer, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - let Some(fetcher) = self.fetcher.as_mut() else { - return Ok(()); - }; - - // Validate computed versions against dependencies - let height_dep_version = indexer.vecs.blocks.timestamp.version(); - self.cents - .ohlc - .height - .validate_computed_version_or_reset(height_dep_version)?; - - let dateindex_dep_version = indexes.dateindex.date.version(); - self.cents - .ohlc - .dateindex - .validate_computed_version_or_reset(dateindex_dep_version)?; - - let height_to_timestamp = &indexer.vecs.blocks.timestamp; - let index = starting_indexes - .height - .min(Height::from(self.cents.ohlc.height.len())); - let mut prev_timestamp = index - .decremented() - .map(|prev_i| height_to_timestamp.iter().unwrap().get_unwrap(prev_i)); - height_to_timestamp - .iter()? - .enumerate() - .skip(index.to_usize()) - .try_for_each(|(i, v)| -> Result<()> { - self.cents.ohlc.height.truncate_push_at( - i, - fetcher.get_height(i.into(), v, prev_timestamp).unwrap(), - )?; - prev_timestamp = Some(v); - Ok(()) - })?; - { - let _lock = exit.lock(); - self.cents.ohlc.height.write()?; - } - - let index = starting_indexes - .dateindex - .min(DateIndex::from(self.cents.ohlc.dateindex.len())); - let mut prev = Some(index.decremented().map_or(OHLCCentsUnsigned::default(), |prev_i| { - self.cents.ohlc.dateindex.iter().unwrap().get_unwrap(prev_i) - })); - indexes - .dateindex - .date - .iter() - .enumerate() - .skip(index.to_usize()) - .try_for_each(|(i, d)| -> Result<()> { - let ohlc = if i.to_usize() + 100 >= self.cents.ohlc.dateindex.len() - && let Ok(mut ohlc) = fetcher.get_date(d) - { - let prev_open = *prev.u().close; - *ohlc.open = prev_open; - *ohlc.high = (*ohlc.high).max(prev_open); - *ohlc.low = (*ohlc.low).min(prev_open); - ohlc - } else { - prev.clone().unwrap() - }; - - prev.replace(ohlc.clone()); - - self.cents.ohlc.dateindex.truncate_push_at(i, ohlc)?; - - Ok(()) - })?; - { - let _lock = exit.lock(); - self.cents.ohlc.dateindex.write()?; - } - - Ok(()) - } -} diff --git a/crates/brk_computer/src/price/oracle/compute.rs b/crates/brk_computer/src/price/oracle/compute.rs deleted file mode 100644 index 8d976b30b..000000000 --- a/crates/brk_computer/src/price/oracle/compute.rs +++ /dev/null @@ -1,491 +0,0 @@ -use std::ops::Range; - -use brk_error::Result; -use brk_indexer::Indexer; -use brk_oracle::{Config, NUM_BINS, Oracle, START_HEIGHT, bin_to_cents, cents_to_bin}; -use brk_types::{ - CentsUnsigned, Close, DateIndex, Height, High, Low, OHLCCentsUnsigned, OHLCDollars, Open, - OutputType, Sats, TxIndex, TxOutIndex, -}; -use tracing::info; -use vecdb::{ - AnyStoredVec, AnyVec, Exit, GenericStoredVec, IterableVec, TypedVecIterator, VecIndex, - VecIterator, -}; - -use super::Vecs; -use crate::{ComputeIndexes, indexes}; - -impl Vecs { - pub fn compute( - &mut self, - indexer: &Indexer, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.compute_prices(indexer, starting_indexes, exit)?; - self.compute_daily_ohlc(indexes, starting_indexes, exit)?; - self.compute_split_and_ohlc(starting_indexes, exit)?; - Ok(()) - } - - fn compute_split_and_ohlc( - &mut self, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - // Destructure to allow simultaneous borrows of different fields - let Self { - price_cents, - ohlc_cents, - split, - ohlc, - ohlc_dollars, - } = self; - - // Open: first-value aggregation - split.open.height.compute_transform( - starting_indexes.height, - &*price_cents, - |(h, price, ..)| (h, Open::new(price)), - exit, - )?; - split.open.compute_rest(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - &*ohlc_cents, - |(di, ohlc_val, ..)| (di, ohlc_val.open), - exit, - )?; - Ok(()) - })?; - - // High: max-value aggregation - split.high.height.compute_transform( - starting_indexes.height, - &*price_cents, - |(h, price, ..)| (h, High::new(price)), - exit, - )?; - split.high.compute_rest(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - &*ohlc_cents, - |(di, ohlc_val, ..)| (di, ohlc_val.high), - exit, - )?; - Ok(()) - })?; - - // Low: min-value aggregation - split.low.height.compute_transform( - starting_indexes.height, - &*price_cents, - |(h, price, ..)| (h, Low::new(price)), - exit, - )?; - split.low.compute_rest(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - &*ohlc_cents, - |(di, ohlc_val, ..)| (di, ohlc_val.low), - exit, - )?; - Ok(()) - })?; - - // Close: last-value aggregation - split.close.height.compute_transform( - starting_indexes.height, - &*price_cents, - |(h, price, ..)| (h, Close::new(price)), - exit, - )?; - split.close.compute_rest(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - &*ohlc_cents, - |(di, ohlc_val, ..)| (di, ohlc_val.close), - exit, - )?; - Ok(()) - })?; - - // Period OHLC aggregates - time based - ohlc.dateindex.compute_transform4( - starting_indexes.dateindex, - &split.open.dateindex, - &split.high.dateindex, - &split.low.dateindex, - &split.close.dateindex, - |(i, open, high, low, close, _)| { - (i, OHLCCentsUnsigned { open, high, low, close }) - }, - exit, - )?; - - ohlc.week.compute_transform4( - starting_indexes.weekindex, - &*split.open.weekindex, - &*split.high.weekindex, - &*split.low.weekindex, - &*split.close.weekindex, - |(i, open, high, low, close, _)| { - (i, OHLCCentsUnsigned { open, high, low, close }) - }, - exit, - )?; - - ohlc.month.compute_transform4( - starting_indexes.monthindex, - &*split.open.monthindex, - &*split.high.monthindex, - &*split.low.monthindex, - &*split.close.monthindex, - |(i, open, high, low, close, _)| { - (i, OHLCCentsUnsigned { open, high, low, close }) - }, - exit, - )?; - - ohlc.quarter.compute_transform4( - starting_indexes.quarterindex, - &*split.open.quarterindex, - &*split.high.quarterindex, - &*split.low.quarterindex, - &*split.close.quarterindex, - |(i, open, high, low, close, _)| { - (i, OHLCCentsUnsigned { open, high, low, close }) - }, - exit, - )?; - - ohlc.semester.compute_transform4( - starting_indexes.semesterindex, - &*split.open.semesterindex, - &*split.high.semesterindex, - &*split.low.semesterindex, - &*split.close.semesterindex, - |(i, open, high, low, close, _)| { - (i, OHLCCentsUnsigned { open, high, low, close }) - }, - exit, - )?; - - ohlc.year.compute_transform4( - starting_indexes.yearindex, - &*split.open.yearindex, - &*split.high.yearindex, - &*split.low.yearindex, - &*split.close.yearindex, - |(i, open, high, low, close, _)| { - (i, OHLCCentsUnsigned { open, high, low, close }) - }, - exit, - )?; - - ohlc.decade.compute_transform4( - starting_indexes.decadeindex, - &*split.open.decadeindex, - &*split.high.decadeindex, - &*split.low.decadeindex, - &*split.close.decadeindex, - |(i, open, high, low, close, _)| { - (i, OHLCCentsUnsigned { open, high, low, close }) - }, - exit, - )?; - - // Period OHLC aggregates - chain based - ohlc.height.compute_transform4( - starting_indexes.height, - &split.open.height, - &split.high.height, - &split.low.height, - &split.close.height, - |(i, open, high, low, close, _)| { - (i, OHLCCentsUnsigned { open, high, low, close }) - }, - exit, - )?; - - ohlc.difficultyepoch.compute_transform4( - starting_indexes.difficultyepoch, - &*split.open.difficultyepoch, - &*split.high.difficultyepoch, - &*split.low.difficultyepoch, - &*split.close.difficultyepoch, - |(i, open, high, low, close, _)| { - (i, OHLCCentsUnsigned { open, high, low, close }) - }, - exit, - )?; - - // OHLC dollars - transform cents to dollars at every period level - macro_rules! cents_to_dollars { - ($field:ident, $idx:expr) => { - ohlc_dollars.$field.compute_transform( - $idx, - &ohlc.$field, - |(i, c, ..)| (i, OHLCDollars::from(c)), - exit, - )?; - }; - } - - cents_to_dollars!(dateindex, starting_indexes.dateindex); - cents_to_dollars!(week, starting_indexes.weekindex); - cents_to_dollars!(month, starting_indexes.monthindex); - cents_to_dollars!(quarter, starting_indexes.quarterindex); - cents_to_dollars!(semester, starting_indexes.semesterindex); - cents_to_dollars!(year, starting_indexes.yearindex); - cents_to_dollars!(decade, starting_indexes.decadeindex); - cents_to_dollars!(height, starting_indexes.height); - cents_to_dollars!(difficultyepoch, starting_indexes.difficultyepoch); - - Ok(()) - } - - fn compute_prices( - &mut self, - indexer: &Indexer, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - let source_version = - indexer.vecs.outputs.value.version() + indexer.vecs.outputs.outputtype.version(); - self.price_cents - .validate_computed_version_or_reset(source_version)?; - - let total_heights = indexer.vecs.blocks.timestamp.len(); - - if total_heights <= START_HEIGHT { - return Ok(()); - } - - // Reorg: truncate to starting_indexes - let truncate_to = self - .price_cents - .len() - .min(starting_indexes.height.to_usize()); - self.price_cents.truncate_if_needed_at(truncate_to)?; - - if self.price_cents.len() < START_HEIGHT { - for line in brk_oracle::PRICES.lines().skip(self.price_cents.len()) { - if self.price_cents.len() >= START_HEIGHT { - break; - } - let dollars: f64 = line.parse().unwrap_or(0.0); - let cents = (dollars * 100.0).round() as u64; - self.price_cents.push(CentsUnsigned::new(cents)); - } - } - - if self.price_cents.len() >= total_heights { - return Ok(()); - } - - let config = Config::default(); - let committed = self.price_cents.len(); - let prev_cents = self.price_cents - .iter()? - .get(Height::from(committed - 1)) - .unwrap(); - let seed_bin = cents_to_bin(prev_cents.inner() as f64); - let warmup = config.window_size.min(committed - START_HEIGHT); - let mut oracle = Oracle::from_checkpoint(seed_bin, config, |o| { - Self::feed_blocks(o, indexer, (committed - warmup)..committed); - }); - - let num_new = total_heights - committed; - info!( - "Computing oracle prices: {} to {} ({warmup} warmup)", - committed, total_heights - ); - - let ref_bins = Self::feed_blocks(&mut oracle, indexer, committed..total_heights); - - for (i, ref_bin) in ref_bins.into_iter().enumerate() { - self.price_cents.push(CentsUnsigned::new(bin_to_cents(ref_bin))); - - let progress = ((i + 1) * 100 / num_new) as u8; - if i > 0 && progress > ((i * 100 / num_new) as u8) { - info!("Oracle price computation: {}%", progress); - } - } - - { - let _lock = exit.lock(); - self.price_cents.write()?; - } - - info!( - "Oracle prices complete: {} committed", - self.price_cents.len() - ); - - Ok(()) - } - - /// Returns an Oracle seeded from the last committed price, with the last - /// window_size blocks already processed. Ready for additional blocks (e.g. mempool). - pub fn live_oracle(&self, indexer: &Indexer) -> Result { - let config = Config::default(); - let height = indexer.vecs.blocks.timestamp.len(); - let last_cents = self.price_cents - .iter()? - .get(Height::from(self.price_cents.len() - 1)) - .unwrap(); - let seed_bin = cents_to_bin(last_cents.inner() as f64); - let window_size = config.window_size; - let oracle = Oracle::from_checkpoint(seed_bin, config, |o| { - Self::feed_blocks(o, indexer, height.saturating_sub(window_size)..height); - }); - - Ok(oracle) - } - - /// Feed a range of blocks from the indexer into an Oracle (skipping coinbase), - /// returning per-block ref_bin values. - fn feed_blocks(oracle: &mut Oracle, indexer: &Indexer, range: Range) -> Vec { - let total_txs = indexer.vecs.transactions.height.len(); - let total_outputs = indexer.vecs.outputs.value.len(); - - let mut first_txindex_iter = indexer.vecs.transactions.first_txindex.into_iter(); - let mut first_txoutindex_iter = indexer.vecs.transactions.first_txoutindex.into_iter(); - let mut out_first_iter = indexer.vecs.outputs.first_txoutindex.into_iter(); - let mut value_iter = indexer.vecs.outputs.value.into_iter(); - let mut outputtype_iter = indexer.vecs.outputs.outputtype.into_iter(); - - let mut ref_bins = Vec::with_capacity(range.len()); - - for h in range { - let first_txindex: TxIndex = first_txindex_iter.get_at_unwrap(h); - let next_first_txindex = first_txindex_iter - .get_at(h + 1) - .unwrap_or(TxIndex::from(total_txs)); - - let out_start = if first_txindex.to_usize() + 1 < next_first_txindex.to_usize() { - first_txoutindex_iter - .get_at_unwrap(first_txindex.to_usize() + 1) - .to_usize() - } else { - out_first_iter - .get_at(h + 1) - .unwrap_or(TxOutIndex::from(total_outputs)) - .to_usize() - }; - let out_end = out_first_iter - .get_at(h + 1) - .unwrap_or(TxOutIndex::from(total_outputs)) - .to_usize(); - - let mut hist = [0u32; NUM_BINS]; - for i in out_start..out_end { - let sats: Sats = value_iter.get_at_unwrap(i); - let output_type: OutputType = outputtype_iter.get_at_unwrap(i); - if let Some(bin) = oracle.output_to_bin(sats, output_type) { - hist[bin] += 1; - } - } - - ref_bins.push(oracle.process_histogram(&hist)); - } - - ref_bins - } - - fn compute_daily_ohlc( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - let last_dateindex = DateIndex::from(indexes.dateindex.date.len()); - let start_dateindex = starting_indexes - .dateindex - .min(DateIndex::from(self.ohlc_cents.len())); - - if start_dateindex >= last_dateindex { - return Ok(()); - } - - let last_height = Height::from(self.price_cents.len()); - let mut height_to_price_iter = self.price_cents.iter()?; - let mut dateindex_to_first_height_iter = indexes.dateindex.first_height.iter(); - let mut height_count_iter = indexes.dateindex.height_count.iter(); - - for dateindex_usize in start_dateindex.to_usize()..last_dateindex.to_usize() { - let dateindex = DateIndex::from(dateindex_usize); - let first_height = dateindex_to_first_height_iter.get_unwrap(dateindex); - let count = height_count_iter.get_unwrap(dateindex); - - if *count == 0 || first_height >= last_height { - self.ohlc_cents - .truncate_push(dateindex, self.previous_ohlc(dateindex)?)?; - continue; - } - - let count = *count as usize; - let mut open = None; - let mut high = CentsUnsigned::ZERO; - let mut low = CentsUnsigned::MAX; - let mut close = CentsUnsigned::ZERO; - - for i in 0..count { - let height = first_height + Height::from(i); - if height >= last_height { - break; - } - - if let Some(price) = height_to_price_iter.get(height) { - if price == CentsUnsigned::ZERO { - continue; - } - if open.is_none() { - open = Some(price); - } - if price > high { - high = price; - } - if price < low { - low = price; - } - close = price; - } - } - - let ohlc = if let Some(open_price) = open { - OHLCCentsUnsigned { - open: Open::new(open_price), - high: High::new(high), - low: Low::new(low), - close: Close::new(close), - } - } else { - self.previous_ohlc(dateindex)? - }; - - self.ohlc_cents.truncate_push(dateindex, ohlc)?; - } - - { - let _lock = exit.lock(); - self.ohlc_cents.write()?; - } - - Ok(()) - } - - fn previous_ohlc(&self, dateindex: DateIndex) -> Result { - Ok(if dateindex > DateIndex::from(0usize) { - self.ohlc_cents - .iter()? - .get(dateindex.decremented().unwrap()) - .unwrap_or_default() - } else { - OHLCCentsUnsigned::default() - }) - } -} diff --git a/crates/brk_computer/src/price/oracle/import.rs b/crates/brk_computer/src/price/oracle/import.rs deleted file mode 100644 index e946666e3..000000000 --- a/crates/brk_computer/src/price/oracle/import.rs +++ /dev/null @@ -1,54 +0,0 @@ -use brk_error::Result; -use brk_types::Version; -use vecdb::{BytesVec, Database, EagerVec, ImportableVec, PcoVec}; - -use super::Vecs; -use crate::indexes; -use crate::internal::{ComputedOHLC, LazyFromHeightAndDateOHLC}; - -impl Vecs { - pub fn forced_import( - db: &Database, - parent_version: Version, - indexes: &indexes::Vecs, - ) -> Result { - let version = parent_version + Version::new(11); - - let price_cents = PcoVec::forced_import(db, "oracle_price_cents", version)?; - let ohlc_cents = BytesVec::forced_import(db, "oracle_ohlc_cents", version)?; - - let split = ComputedOHLC::forced_import(db, "oracle_price", version, indexes)?; - - let ohlc = LazyFromHeightAndDateOHLC { - dateindex: EagerVec::forced_import(db, "oracle_price_ohlc", version)?, - week: EagerVec::forced_import(db, "oracle_price_ohlc", version)?, - month: EagerVec::forced_import(db, "oracle_price_ohlc", version)?, - quarter: EagerVec::forced_import(db, "oracle_price_ohlc", version)?, - semester: EagerVec::forced_import(db, "oracle_price_ohlc", version)?, - year: EagerVec::forced_import(db, "oracle_price_ohlc", version)?, - decade: EagerVec::forced_import(db, "oracle_price_ohlc", version)?, - height: EagerVec::forced_import(db, "oracle_price_ohlc", version)?, - difficultyepoch: EagerVec::forced_import(db, "oracle_price_ohlc", version)?, - }; - - let ohlc_dollars = LazyFromHeightAndDateOHLC { - dateindex: EagerVec::forced_import(db, "oracle_ohlc_dollars", version)?, - week: EagerVec::forced_import(db, "oracle_ohlc_dollars", version)?, - month: EagerVec::forced_import(db, "oracle_ohlc_dollars", version)?, - quarter: EagerVec::forced_import(db, "oracle_ohlc_dollars", version)?, - semester: EagerVec::forced_import(db, "oracle_ohlc_dollars", version)?, - year: EagerVec::forced_import(db, "oracle_ohlc_dollars", version)?, - decade: EagerVec::forced_import(db, "oracle_ohlc_dollars", version)?, - height: EagerVec::forced_import(db, "oracle_ohlc_dollars", version)?, - difficultyepoch: EagerVec::forced_import(db, "oracle_ohlc_dollars", version)?, - }; - - Ok(Self { - price_cents, - ohlc_cents, - split, - ohlc, - ohlc_dollars, - }) - } -} diff --git a/crates/brk_computer/src/price/oracle/vecs.rs b/crates/brk_computer/src/price/oracle/vecs.rs deleted file mode 100644 index 3cf1fbeeb..000000000 --- a/crates/brk_computer/src/price/oracle/vecs.rs +++ /dev/null @@ -1,14 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{CentsUnsigned, DateIndex, Height, OHLCCentsUnsigned, OHLCDollars}; -use vecdb::{BytesVec, PcoVec}; - -use crate::internal::{ComputedOHLC, LazyFromHeightAndDateOHLC}; - -#[derive(Clone, Traversable)] -pub struct Vecs { - pub price_cents: PcoVec, - pub ohlc_cents: BytesVec, - pub split: ComputedOHLC, - pub ohlc: LazyFromHeightAndDateOHLC, - pub ohlc_dollars: LazyFromHeightAndDateOHLC, -} diff --git a/crates/brk_computer/src/price/sats/compute.rs b/crates/brk_computer/src/price/sats/compute.rs deleted file mode 100644 index 7137e9e07..000000000 --- a/crates/brk_computer/src/price/sats/compute.rs +++ /dev/null @@ -1,275 +0,0 @@ -use brk_error::Result; -use brk_types::{Close, High, Low, OHLCSats, Open, Sats}; -use vecdb::Exit; - -use super::super::usd; -use super::Vecs; -use crate::ComputeIndexes; - -impl Vecs { - pub fn compute( - &mut self, - starting_indexes: &ComputeIndexes, - usd: &usd::Vecs, - exit: &Exit, - ) -> Result<()> { - // Open: first-value aggregation (1 BTC / price) - self.split.open.height.compute_transform( - starting_indexes.height, - &usd.split.open.height, - |(i, open, ..)| (i, Open::new(Sats::ONE_BTC / *open)), - exit, - )?; - self.split - .open - .compute_rest(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - &usd.split.open.dateindex, - |(i, open, ..)| (i, Open::new(Sats::ONE_BTC / *open)), - exit, - )?; - Ok(()) - })?; - - // High: max-value aggregation (sats high = 1 BTC / usd low) - self.split.high.height.compute_transform( - starting_indexes.height, - &usd.split.low.height, - |(i, low, ..)| (i, High::new(Sats::ONE_BTC / *low)), - exit, - )?; - self.split - .high - .compute_rest(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - &usd.split.low.dateindex, - |(i, low, ..)| (i, High::new(Sats::ONE_BTC / *low)), - exit, - )?; - Ok(()) - })?; - - // Low: min-value aggregation (sats low = 1 BTC / usd high) - self.split.low.height.compute_transform( - starting_indexes.height, - &usd.split.high.height, - |(i, high, ..)| (i, Low::new(Sats::ONE_BTC / *high)), - exit, - )?; - self.split.low.compute_rest(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - &usd.split.high.dateindex, - |(i, high, ..)| (i, Low::new(Sats::ONE_BTC / *high)), - exit, - )?; - Ok(()) - })?; - - // Close: last-value aggregation - self.split.close.height.compute_transform( - starting_indexes.height, - &usd.split.close.height, - |(i, close, ..)| (i, Close::new(Sats::ONE_BTC / *close)), - exit, - )?; - self.split - .close - .compute_rest(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - &usd.split.close.dateindex, - |(i, close, ..)| (i, Close::new(Sats::ONE_BTC / *close)), - exit, - )?; - Ok(()) - })?; - - // Height OHLC in sats - self.ohlc.height.compute_transform4( - starting_indexes.height, - &self.split.open.height, - &self.split.high.height, - &self.split.low.height, - &self.split.close.height, - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - // DateIndex OHLC in sats - self.ohlc.dateindex.compute_transform4( - starting_indexes.dateindex, - &self.split.open.dateindex, - &self.split.high.dateindex, - &self.split.low.dateindex, - &self.split.close.dateindex, - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - // Period OHLC in sats - self.ohlc.week.compute_transform4( - starting_indexes.weekindex, - &*self.split.open.weekindex, - &*self.split.high.weekindex, - &*self.split.low.weekindex, - &*self.split.close.weekindex, - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.ohlc.difficultyepoch.compute_transform4( - starting_indexes.difficultyepoch, - &*self.split.open.difficultyepoch, - &*self.split.high.difficultyepoch, - &*self.split.low.difficultyepoch, - &*self.split.close.difficultyepoch, - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.ohlc.month.compute_transform4( - starting_indexes.monthindex, - &*self.split.open.monthindex, - &*self.split.high.monthindex, - &*self.split.low.monthindex, - &*self.split.close.monthindex, - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.ohlc.quarter.compute_transform4( - starting_indexes.quarterindex, - &*self.split.open.quarterindex, - &*self.split.high.quarterindex, - &*self.split.low.quarterindex, - &*self.split.close.quarterindex, - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.ohlc.semester.compute_transform4( - starting_indexes.semesterindex, - &*self.split.open.semesterindex, - &*self.split.high.semesterindex, - &*self.split.low.semesterindex, - &*self.split.close.semesterindex, - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.ohlc.year.compute_transform4( - starting_indexes.yearindex, - &*self.split.open.yearindex, - &*self.split.high.yearindex, - &*self.split.low.yearindex, - &*self.split.close.yearindex, - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.ohlc.decade.compute_transform4( - starting_indexes.decadeindex, - &*self.split.open.decadeindex, - &*self.split.high.decadeindex, - &*self.split.low.decadeindex, - &*self.split.close.decadeindex, - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - Ok(()) - } -} diff --git a/crates/brk_computer/src/price/sats/import.rs b/crates/brk_computer/src/price/sats/import.rs deleted file mode 100644 index 6f40a16c2..000000000 --- a/crates/brk_computer/src/price/sats/import.rs +++ /dev/null @@ -1,28 +0,0 @@ -use brk_error::Result; -use brk_types::Version; -use vecdb::{Database, EagerVec, ImportableVec}; - -use super::Vecs; -use crate::{ - indexes, - internal::{ComputedOHLC, LazyFromHeightAndDateOHLC}, -}; - -impl Vecs { - pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { - Ok(Self { - split: ComputedOHLC::forced_import(db, "price_sats", version, indexes)?, - ohlc: LazyFromHeightAndDateOHLC { - dateindex: EagerVec::forced_import(db, "price_ohlc_sats", version)?, - week: EagerVec::forced_import(db, "price_ohlc_sats", version)?, - month: EagerVec::forced_import(db, "price_ohlc_sats", version)?, - quarter: EagerVec::forced_import(db, "price_ohlc_sats", version)?, - semester: EagerVec::forced_import(db, "price_ohlc_sats", version)?, - year: EagerVec::forced_import(db, "price_ohlc_sats", version)?, - decade: EagerVec::forced_import(db, "price_ohlc_sats", version)?, - height: EagerVec::forced_import(db, "price_ohlc_sats", version)?, - difficultyepoch: EagerVec::forced_import(db, "price_ohlc_sats", version)?, - }, - }) - } -} diff --git a/crates/brk_computer/src/price/sats/vecs.rs b/crates/brk_computer/src/price/sats/vecs.rs deleted file mode 100644 index ce7065e14..000000000 --- a/crates/brk_computer/src/price/sats/vecs.rs +++ /dev/null @@ -1,10 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{OHLCSats, Sats}; - -use crate::internal::{ComputedOHLC, LazyFromHeightAndDateOHLC}; - -#[derive(Clone, Traversable)] -pub struct Vecs { - pub split: ComputedOHLC, - pub ohlc: LazyFromHeightAndDateOHLC, -} diff --git a/crates/brk_computer/src/price/usd/compute.rs b/crates/brk_computer/src/price/usd/compute.rs deleted file mode 100644 index 39b3ad5c1..000000000 --- a/crates/brk_computer/src/price/usd/compute.rs +++ /dev/null @@ -1,268 +0,0 @@ -use brk_error::Result; -use brk_types::{Close, Dollars, High, Low, OHLCDollars, Open}; -use vecdb::Exit; - -use super::super::cents; -use super::Vecs; -use crate::ComputeIndexes; - -impl Vecs { - pub fn compute( - &mut self, - starting_indexes: &ComputeIndexes, - cents: ¢s::Vecs, - exit: &Exit, - ) -> Result<()> { - // Open: first-value aggregation - self.split.open.height.compute_transform( - starting_indexes.height, - ¢s.split.height.open, - |(h, open, ..)| (h, Open::new(Dollars::from(*open))), - exit, - )?; - self.split.open.compute_rest(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - ¢s.split.dateindex.open, - |(di, open, ..)| (di, Open::new(Dollars::from(*open))), - exit, - )?; - Ok(()) - })?; - - // High: max-value aggregation - self.split.high.height.compute_transform( - starting_indexes.height, - ¢s.split.height.high, - |(h, high, ..)| (h, High::new(Dollars::from(*high))), - exit, - )?; - self.split.high.compute_rest(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - ¢s.split.dateindex.high, - |(di, high, ..)| (di, High::new(Dollars::from(*high))), - exit, - )?; - Ok(()) - })?; - - // Low: min-value aggregation - self.split.low.height.compute_transform( - starting_indexes.height, - ¢s.split.height.low, - |(h, low, ..)| (h, Low::new(Dollars::from(*low))), - exit, - )?; - self.split.low.compute_rest(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - ¢s.split.dateindex.low, - |(di, low, ..)| (di, Low::new(Dollars::from(*low))), - exit, - )?; - Ok(()) - })?; - - // Close: last-value aggregation - self.split.close.height.compute_transform( - starting_indexes.height, - ¢s.split.height.close, - |(h, close, ..)| (h, Close::new(Dollars::from(*close))), - exit, - )?; - self.split.close.compute_rest(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - ¢s.split.dateindex.close, - |(di, close, ..)| (di, Close::new(Dollars::from(*close))), - exit, - )?; - Ok(()) - })?; - - // Period OHLC aggregates - time based - self.ohlc.dateindex.compute_transform4( - starting_indexes.dateindex, - &self.split.open.dateindex, - &self.split.high.dateindex, - &self.split.low.dateindex, - &self.split.close.dateindex, - |(i, open, high, low, close, _)| { - ( - i, - OHLCDollars { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.ohlc.week.compute_transform4( - starting_indexes.weekindex, - &*self.split.open.weekindex, - &*self.split.high.weekindex, - &*self.split.low.weekindex, - &*self.split.close.weekindex, - |(i, open, high, low, close, _)| { - ( - i, - OHLCDollars { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.ohlc.month.compute_transform4( - starting_indexes.monthindex, - &*self.split.open.monthindex, - &*self.split.high.monthindex, - &*self.split.low.monthindex, - &*self.split.close.monthindex, - |(i, open, high, low, close, _)| { - ( - i, - OHLCDollars { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.ohlc.quarter.compute_transform4( - starting_indexes.quarterindex, - &*self.split.open.quarterindex, - &*self.split.high.quarterindex, - &*self.split.low.quarterindex, - &*self.split.close.quarterindex, - |(i, open, high, low, close, _)| { - ( - i, - OHLCDollars { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.ohlc.semester.compute_transform4( - starting_indexes.semesterindex, - &*self.split.open.semesterindex, - &*self.split.high.semesterindex, - &*self.split.low.semesterindex, - &*self.split.close.semesterindex, - |(i, open, high, low, close, _)| { - ( - i, - OHLCDollars { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.ohlc.year.compute_transform4( - starting_indexes.yearindex, - &*self.split.open.yearindex, - &*self.split.high.yearindex, - &*self.split.low.yearindex, - &*self.split.close.yearindex, - |(i, open, high, low, close, _)| { - ( - i, - OHLCDollars { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.ohlc.decade.compute_transform4( - starting_indexes.decadeindex, - &*self.split.open.decadeindex, - &*self.split.high.decadeindex, - &*self.split.low.decadeindex, - &*self.split.close.decadeindex, - |(i, open, high, low, close, _)| { - ( - i, - OHLCDollars { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - // Period OHLC aggregates - chain based - self.ohlc.height.compute_transform4( - starting_indexes.height, - &self.split.open.height, - &self.split.high.height, - &self.split.low.height, - &self.split.close.height, - |(i, open, high, low, close, _)| { - ( - i, - OHLCDollars { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.ohlc.difficultyepoch.compute_transform4( - starting_indexes.difficultyepoch, - &*self.split.open.difficultyepoch, - &*self.split.high.difficultyepoch, - &*self.split.low.difficultyepoch, - &*self.split.close.difficultyepoch, - |(i, open, high, low, close, _)| { - ( - i, - OHLCDollars { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - Ok(()) - } -} diff --git a/crates/brk_computer/src/price/usd/import.rs b/crates/brk_computer/src/price/usd/import.rs deleted file mode 100644 index 5e65147ff..000000000 --- a/crates/brk_computer/src/price/usd/import.rs +++ /dev/null @@ -1,28 +0,0 @@ -use brk_error::Result; -use brk_types::Version; -use vecdb::{Database, EagerVec, ImportableVec}; - -use super::Vecs; -use crate::{ - indexes, - internal::{ComputedOHLC, LazyFromHeightAndDateOHLC}, -}; - -impl Vecs { - pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { - Ok(Self { - split: ComputedOHLC::forced_import(db, "price", version, indexes)?, - ohlc: LazyFromHeightAndDateOHLC { - dateindex: EagerVec::forced_import(db, "price_ohlc", version)?, - week: EagerVec::forced_import(db, "price_ohlc", version)?, - month: EagerVec::forced_import(db, "price_ohlc", version)?, - quarter: EagerVec::forced_import(db, "price_ohlc", version)?, - semester: EagerVec::forced_import(db, "price_ohlc", version)?, - year: EagerVec::forced_import(db, "price_ohlc", version)?, - decade: EagerVec::forced_import(db, "price_ohlc", version)?, - height: EagerVec::forced_import(db, "price_ohlc", version)?, - difficultyepoch: EagerVec::forced_import(db, "price_ohlc", version)?, - }, - }) - } -} diff --git a/crates/brk_computer/src/price/usd/mod.rs b/crates/brk_computer/src/price/usd/mod.rs deleted file mode 100644 index 1136f9ebd..000000000 --- a/crates/brk_computer/src/price/usd/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod compute; -mod import; -mod vecs; - -pub use vecs::Vecs; diff --git a/crates/brk_computer/src/price/usd/vecs.rs b/crates/brk_computer/src/price/usd/vecs.rs deleted file mode 100644 index 76865ca06..000000000 --- a/crates/brk_computer/src/price/usd/vecs.rs +++ /dev/null @@ -1,10 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{Dollars, OHLCDollars}; - -use crate::internal::{ComputedOHLC, LazyFromHeightAndDateOHLC}; - -#[derive(Clone, Traversable)] -pub struct Vecs { - pub split: ComputedOHLC, - pub ohlc: LazyFromHeightAndDateOHLC, -} diff --git a/crates/brk_computer/src/prices/cents/compute.rs b/crates/brk_computer/src/prices/cents/compute.rs new file mode 100644 index 000000000..861d0ae44 --- /dev/null +++ b/crates/brk_computer/src/prices/cents/compute.rs @@ -0,0 +1,191 @@ +use std::ops::Range; + +use brk_error::Result; +use brk_indexer::Indexer; +use brk_oracle::{Config, NUM_BINS, Oracle, START_HEIGHT, bin_to_cents, cents_to_bin}; +use brk_types::{Cents, OutputType, Sats, TxIndex, TxOutIndex}; +use tracing::info; +use vecdb::{AnyStoredVec, AnyVec, Exit, ReadableVec, StorageMode, WritableVec, VecIndex}; + +use super::Vecs; +use crate::ComputeIndexes; + +impl Vecs { + pub(crate) fn compute( + &mut self, + indexer: &Indexer, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.compute_prices(indexer, starting_indexes, exit)?; + Ok(()) + } + + fn compute_prices( + &mut self, + indexer: &Indexer, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + let source_version = + indexer.vecs.outputs.value.version() + indexer.vecs.outputs.outputtype.version(); + self.price + .validate_computed_version_or_reset(source_version)?; + + let total_heights = indexer.vecs.blocks.timestamp.len(); + + if total_heights <= START_HEIGHT { + return Ok(()); + } + + // Reorg: truncate to starting_indexes + let truncate_to = self.price.len().min(starting_indexes.height.to_usize()); + self.price.truncate_if_needed_at(truncate_to)?; + + if self.price.len() < START_HEIGHT { + for line in brk_oracle::PRICES.lines().skip(self.price.len()) { + if self.price.len() >= START_HEIGHT { + break; + } + let dollars: f64 = line.parse().unwrap_or(0.0); + let cents = (dollars * 100.0).round() as u64; + self.price.push(Cents::new(cents)); + } + } + + if self.price.len() >= total_heights { + return Ok(()); + } + + let config = Config::default(); + let committed = self.price.len(); + let prev_cents = self.price.collect_one_at(committed - 1).unwrap(); + let seed_bin = cents_to_bin(prev_cents.inner() as f64); + let warmup = config.window_size.min(committed - START_HEIGHT); + let mut oracle = Oracle::from_checkpoint(seed_bin, config, |o| { + Self::feed_blocks(o, indexer, (committed - warmup)..committed); + }); + + let num_new = total_heights - committed; + info!( + "Computing oracle prices: {} to {} ({warmup} warmup)", + committed, total_heights + ); + + let ref_bins = Self::feed_blocks(&mut oracle, indexer, committed..total_heights); + + for (i, ref_bin) in ref_bins.into_iter().enumerate() { + self.price.push(Cents::new(bin_to_cents(ref_bin))); + + let progress = ((i + 1) * 100 / num_new) as u8; + if i > 0 && progress > ((i * 100 / num_new) as u8) { + info!("Oracle price computation: {}%", progress); + } + } + + { + let _lock = exit.lock(); + self.price.write()?; + } + + info!("Oracle prices complete: {} committed", self.price.len()); + + Ok(()) + } + + /// Feed a range of blocks from the indexer into an Oracle (skipping coinbase), + /// returning per-block ref_bin values. + fn feed_blocks(oracle: &mut Oracle, indexer: &Indexer, range: Range) -> Vec { + let total_txs = indexer.vecs.transactions.height.len(); + let total_outputs = indexer.vecs.outputs.value.len(); + + // Pre-collect height-indexed data for the range (plus one extra for next-block lookups) + let collect_end = (range.end + 1).min(indexer.vecs.transactions.first_txindex.len()); + let first_txindexes: Vec = indexer + .vecs + .transactions + .first_txindex + .collect_range_at(range.start, collect_end); + + let out_firsts: Vec = indexer + .vecs + .outputs + .first_txoutindex + .collect_range_at(range.start, collect_end); + + let mut ref_bins = Vec::with_capacity(range.len()); + + // Cursor avoids per-block PcoVec page decompression for + // the tx-indexed first_txoutindex lookup. The accessed + // txindex values (first_txindex + 1) are strictly increasing + // across blocks, so the cursor only advances forward. + let mut txout_cursor = indexer.vecs.transactions.first_txoutindex.cursor(); + + for (idx, _h) in range.enumerate() { + let first_txindex = first_txindexes[idx]; + let next_first_txindex = first_txindexes + .get(idx + 1) + .copied() + .unwrap_or(TxIndex::from(total_txs)); + + let out_start = if first_txindex.to_usize() + 1 < next_first_txindex.to_usize() { + let target = first_txindex.to_usize() + 1; + txout_cursor.advance(target - txout_cursor.position()); + txout_cursor.next().unwrap().to_usize() + } else { + out_firsts + .get(idx + 1) + .copied() + .unwrap_or(TxOutIndex::from(total_outputs)) + .to_usize() + }; + let out_end = out_firsts + .get(idx + 1) + .copied() + .unwrap_or(TxOutIndex::from(total_outputs)) + .to_usize(); + + let values: Vec = indexer + .vecs + .outputs + .value + .collect_range_at(out_start, out_end); + let output_types: Vec = indexer + .vecs + .outputs + .outputtype + .collect_range_at(out_start, out_end); + + let mut hist = [0u32; NUM_BINS]; + for i in 0..values.len() { + if let Some(bin) = oracle.output_to_bin(values[i], output_types[i]) { + hist[bin] += 1; + } + } + + ref_bins.push(oracle.process_histogram(&hist)); + } + + ref_bins + } +} + +impl Vecs { + /// Returns an Oracle seeded from the last committed price, with the last + /// window_size blocks already processed. Ready for additional blocks (e.g. mempool). + pub fn live_oracle(&self, indexer: &Indexer) -> Result { + let config = Config::default(); + let height = indexer.vecs.blocks.timestamp.len(); + let last_cents = self + .price + .collect_one_at(self.price.len() - 1) + .unwrap(); + let seed_bin = cents_to_bin(last_cents.inner() as f64); + let window_size = config.window_size; + let oracle = Oracle::from_checkpoint(seed_bin, config, |o| { + Vecs::feed_blocks(o, indexer, height.saturating_sub(window_size)..height); + }); + + Ok(oracle) + } +} diff --git a/crates/brk_computer/src/prices/cents/import.rs b/crates/brk_computer/src/prices/cents/import.rs new file mode 100644 index 000000000..bcae71cc9 --- /dev/null +++ b/crates/brk_computer/src/prices/cents/import.rs @@ -0,0 +1,35 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::{Database, ImportableVec, ReadableCloneableVec, PcoVec}; + +use super::Vecs; +use crate::indexes; +use crate::internal::{ComputedHeightDerivedOHLC, ComputedHeightDerivedSplitOHLC}; + +impl Vecs { + pub(crate) fn forced_import( + db: &Database, + parent_version: Version, + indexes: &indexes::Vecs, + ) -> Result { + let version = parent_version + Version::new(11); + + let price = PcoVec::forced_import(db, "price_cents", version)?; + + let split = ComputedHeightDerivedSplitOHLC::forced_import( + "price_cents", + version, + indexes, + price.read_only_boxed_clone(), + ); + + let ohlc = ComputedHeightDerivedOHLC::forced_import( + "price_cents", + version, + indexes, + price.read_only_boxed_clone(), + ); + + Ok(Self { price, split, ohlc }) + } +} diff --git a/crates/brk_computer/src/price/oracle/mod.rs b/crates/brk_computer/src/prices/cents/mod.rs similarity index 100% rename from crates/brk_computer/src/price/oracle/mod.rs rename to crates/brk_computer/src/prices/cents/mod.rs diff --git a/crates/brk_computer/src/prices/cents/vecs.rs b/crates/brk_computer/src/prices/cents/vecs.rs new file mode 100644 index 000000000..87dc24117 --- /dev/null +++ b/crates/brk_computer/src/prices/cents/vecs.rs @@ -0,0 +1,12 @@ +use brk_traversable::Traversable; +use brk_types::{Cents, Height, OHLCCents}; +use vecdb::{PcoVec, Rw, StorageMode}; + +use crate::internal::{ComputedHeightDerivedOHLC, ComputedHeightDerivedSplitOHLC}; + +#[derive(Traversable)] +pub struct Vecs { + pub price: M::Stored>, + pub split: ComputedHeightDerivedSplitOHLC, + pub ohlc: ComputedHeightDerivedOHLC, +} diff --git a/crates/brk_computer/src/price/compute.rs b/crates/brk_computer/src/prices/compute.rs similarity index 50% rename from crates/brk_computer/src/price/compute.rs rename to crates/brk_computer/src/prices/compute.rs index 29af7f36d..27b322841 100644 --- a/crates/brk_computer/src/price/compute.rs +++ b/crates/brk_computer/src/prices/compute.rs @@ -3,22 +3,17 @@ use brk_indexer::Indexer; use vecdb::Exit; use super::Vecs; -use crate::{indexes, ComputeIndexes}; +use crate::ComputeIndexes; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.usd.compute(starting_indexes, &self.cents, exit)?; - - self.sats.compute(starting_indexes, &self.usd, exit)?; - - self.oracle - .compute(indexer, indexes, starting_indexes, exit)?; + self.cents + .compute(indexer, starting_indexes, exit)?; let _lock = exit.lock(); self.db().compact()?; diff --git a/crates/brk_computer/src/price/mod.rs b/crates/brk_computer/src/prices/mod.rs similarity index 59% rename from crates/brk_computer/src/price/mod.rs rename to crates/brk_computer/src/prices/mod.rs index 5b160c0fb..b8fd55667 100644 --- a/crates/brk_computer/src/price/mod.rs +++ b/crates/brk_computer/src/prices/mod.rs @@ -1,52 +1,43 @@ mod compute; -mod fetch; pub mod cents; -pub mod oracle; pub mod sats; pub mod usd; pub use cents::Vecs as CentsVecs; -pub use oracle::Vecs as OracleVecs; pub use sats::Vecs as SatsVecs; pub use usd::Vecs as UsdVecs; use std::path::Path; -use brk_fetcher::Fetcher; use brk_traversable::Traversable; use brk_types::Version; -use vecdb::{Database, PAGE_SIZE}; +use vecdb::{Database, Rw, StorageMode, PAGE_SIZE}; use crate::indexes; -pub const DB_NAME: &str = "price"; +pub const DB_NAME: &str = "prices"; -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { #[traversable(skip)] pub(crate) db: Database, - #[traversable(skip)] - pub(crate) fetcher: Option, - - pub cents: CentsVecs, + pub cents: CentsVecs, pub usd: UsdVecs, pub sats: SatsVecs, - pub oracle: OracleVecs, } impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( parent: &Path, version: Version, indexes: &indexes::Vecs, - fetcher: Option, ) -> brk_error::Result { let db = Database::open(&parent.join(DB_NAME))?; db.set_min_len(PAGE_SIZE * 1_000_000)?; - let this = Self::forced_import_inner(&db, version, indexes, fetcher)?; + let this = Self::forced_import_inner(&db, version, indexes)?; this.db.retain_regions( this.iter_any_exportable() @@ -62,27 +53,19 @@ impl Vecs { db: &Database, version: Version, indexes: &indexes::Vecs, - fetcher: Option, ) -> brk_error::Result { - let cents = CentsVecs::forced_import(db, version)?; - let usd = UsdVecs::forced_import(db, version, indexes)?; - let sats = SatsVecs::forced_import(db, version, indexes)?; - let oracle = OracleVecs::forced_import(db, version, indexes)?; + let cents = CentsVecs::forced_import(db, version, indexes)?; + let usd = UsdVecs::forced_import(version, indexes, ¢s); + let sats = SatsVecs::forced_import(version, indexes, ¢s); Ok(Self { db: db.clone(), - fetcher, cents, usd, sats, - oracle, }) } - pub fn has_fetcher(&self) -> bool { - self.fetcher.is_some() - } - pub(crate) fn db(&self) -> &Database { &self.db } diff --git a/crates/brk_computer/src/prices/sats/import.rs b/crates/brk_computer/src/prices/sats/import.rs new file mode 100644 index 000000000..9d101c3a2 --- /dev/null +++ b/crates/brk_computer/src/prices/sats/import.rs @@ -0,0 +1,39 @@ +use brk_types::Version; +use vecdb::{ReadableCloneableVec, LazyVecFrom1}; + +use super::super::cents; +use super::Vecs; +use crate::{ + indexes, + internal::{CentsUnsignedToSats, ComputedHeightDerivedOHLC, ComputedHeightDerivedSplitOHLC}, +}; + +impl Vecs { + pub(crate) fn forced_import( + version: Version, + indexes: &indexes::Vecs, + cents: ¢s::Vecs, + ) -> Self { + let price = LazyVecFrom1::transformed::( + "price_sats", + version, + cents.price.read_only_boxed_clone(), + ); + + let split = ComputedHeightDerivedSplitOHLC::forced_import( + "price_sats", + version, + indexes, + price.read_only_boxed_clone(), + ); + + let ohlc = ComputedHeightDerivedOHLC::forced_import( + "price_sats", + version, + indexes, + price.read_only_boxed_clone(), + ); + + Self { price, split, ohlc } + } +} diff --git a/crates/brk_computer/src/price/cents/mod.rs b/crates/brk_computer/src/prices/sats/mod.rs similarity index 100% rename from crates/brk_computer/src/price/cents/mod.rs rename to crates/brk_computer/src/prices/sats/mod.rs diff --git a/crates/brk_computer/src/prices/sats/vecs.rs b/crates/brk_computer/src/prices/sats/vecs.rs new file mode 100644 index 000000000..ee2184460 --- /dev/null +++ b/crates/brk_computer/src/prices/sats/vecs.rs @@ -0,0 +1,12 @@ +use brk_traversable::Traversable; +use brk_types::{Cents, Height, OHLCSats, Sats}; +use vecdb::LazyVecFrom1; + +use crate::internal::{ComputedHeightDerivedOHLC, ComputedHeightDerivedSplitOHLC}; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub price: LazyVecFrom1, + pub split: ComputedHeightDerivedSplitOHLC, + pub ohlc: ComputedHeightDerivedOHLC, +} diff --git a/crates/brk_computer/src/prices/usd/import.rs b/crates/brk_computer/src/prices/usd/import.rs new file mode 100644 index 000000000..661acd242 --- /dev/null +++ b/crates/brk_computer/src/prices/usd/import.rs @@ -0,0 +1,39 @@ +use brk_types::Version; +use vecdb::{ReadableCloneableVec, LazyVecFrom1}; + +use super::super::cents; +use super::Vecs; +use crate::{ + indexes, + internal::{CentsUnsignedToDollars, ComputedHeightDerivedOHLC, ComputedHeightDerivedSplitOHLC}, +}; + +impl Vecs { + pub(crate) fn forced_import( + version: Version, + indexes: &indexes::Vecs, + cents: ¢s::Vecs, + ) -> Self { + let price = LazyVecFrom1::transformed::( + "price_usd", + version, + cents.price.read_only_boxed_clone(), + ); + + let split = ComputedHeightDerivedSplitOHLC::forced_import( + "price", + version, + indexes, + price.read_only_boxed_clone(), + ); + + let ohlc = ComputedHeightDerivedOHLC::forced_import( + "price_usd", + version, + indexes, + price.read_only_boxed_clone(), + ); + + Self { price, split, ohlc } + } +} diff --git a/crates/brk_computer/src/price/sats/mod.rs b/crates/brk_computer/src/prices/usd/mod.rs similarity index 76% rename from crates/brk_computer/src/price/sats/mod.rs rename to crates/brk_computer/src/prices/usd/mod.rs index 1136f9ebd..f8623047a 100644 --- a/crates/brk_computer/src/price/sats/mod.rs +++ b/crates/brk_computer/src/prices/usd/mod.rs @@ -1,4 +1,3 @@ -mod compute; mod import; mod vecs; diff --git a/crates/brk_computer/src/prices/usd/vecs.rs b/crates/brk_computer/src/prices/usd/vecs.rs new file mode 100644 index 000000000..629f21378 --- /dev/null +++ b/crates/brk_computer/src/prices/usd/vecs.rs @@ -0,0 +1,12 @@ +use brk_traversable::Traversable; +use brk_types::{Cents, Dollars, Height, OHLCDollars}; +use vecdb::LazyVecFrom1; + +use crate::internal::{ComputedHeightDerivedOHLC, ComputedHeightDerivedSplitOHLC}; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub price: LazyVecFrom1, + pub split: ComputedHeightDerivedSplitOHLC, + pub ohlc: ComputedHeightDerivedOHLC, +} diff --git a/crates/brk_computer/src/scripts/compute.rs b/crates/brk_computer/src/scripts/compute.rs index beeecca0c..b5f558825 100644 --- a/crates/brk_computer/src/scripts/compute.rs +++ b/crates/brk_computer/src/scripts/compute.rs @@ -2,23 +2,22 @@ use brk_error::Result; use brk_indexer::Indexer; use vecdb::Exit; -use crate::{indexes, ComputeIndexes}; +use crate::ComputeIndexes; use super::Vecs; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { self.count - .compute(indexer, indexes, starting_indexes, exit)?; + .compute(indexer, starting_indexes, exit)?; self.value - .compute(indexer, indexes, starting_indexes, exit)?; + .compute(indexer, starting_indexes, exit)?; let _lock = exit.lock(); self.db.compact()?; diff --git a/crates/brk_computer/src/scripts/count/compute.rs b/crates/brk_computer/src/scripts/count/compute.rs index e7b4c1da4..2d069457d 100644 --- a/crates/brk_computer/src/scripts/count/compute.rs +++ b/crates/brk_computer/src/scripts/count/compute.rs @@ -1,20 +1,19 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_types::StoredU64; -use vecdb::{Exit, TypedVecIterator}; +use vecdb::Exit; use super::Vecs; -use crate::{ComputeIndexes, indexes}; +use crate::ComputeIndexes; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.p2a.compute_all(indexes, starting_indexes, exit, |v| { + self.p2a.compute(starting_indexes, exit,|v| { v.compute_count_from_indexes( starting_indexes.height, &indexer.vecs.addresses.first_p2aaddressindex, @@ -25,7 +24,7 @@ impl Vecs { })?; self.p2ms - .compute_all(indexes, starting_indexes, exit, |v| { + .compute(starting_indexes, exit,|v| { v.compute_count_from_indexes( starting_indexes.height, &indexer.vecs.scripts.first_p2msoutputindex, @@ -36,7 +35,7 @@ impl Vecs { })?; self.p2pk33 - .compute_all(indexes, starting_indexes, exit, |v| { + .compute(starting_indexes, exit,|v| { v.compute_count_from_indexes( starting_indexes.height, &indexer.vecs.addresses.first_p2pk33addressindex, @@ -47,7 +46,7 @@ impl Vecs { })?; self.p2pk65 - .compute_all(indexes, starting_indexes, exit, |v| { + .compute(starting_indexes, exit,|v| { v.compute_count_from_indexes( starting_indexes.height, &indexer.vecs.addresses.first_p2pk65addressindex, @@ -58,7 +57,7 @@ impl Vecs { })?; self.p2pkh - .compute_all(indexes, starting_indexes, exit, |v| { + .compute(starting_indexes, exit,|v| { v.compute_count_from_indexes( starting_indexes.height, &indexer.vecs.addresses.first_p2pkhaddressindex, @@ -69,7 +68,7 @@ impl Vecs { })?; self.p2sh - .compute_all(indexes, starting_indexes, exit, |v| { + .compute(starting_indexes, exit,|v| { v.compute_count_from_indexes( starting_indexes.height, &indexer.vecs.addresses.first_p2shaddressindex, @@ -80,7 +79,7 @@ impl Vecs { })?; self.p2tr - .compute_all(indexes, starting_indexes, exit, |v| { + .compute(starting_indexes, exit,|v| { v.compute_count_from_indexes( starting_indexes.height, &indexer.vecs.addresses.first_p2traddressindex, @@ -91,7 +90,7 @@ impl Vecs { })?; self.p2wpkh - .compute_all(indexes, starting_indexes, exit, |v| { + .compute(starting_indexes, exit,|v| { v.compute_count_from_indexes( starting_indexes.height, &indexer.vecs.addresses.first_p2wpkhaddressindex, @@ -102,7 +101,7 @@ impl Vecs { })?; self.p2wsh - .compute_all(indexes, starting_indexes, exit, |v| { + .compute(starting_indexes, exit,|v| { v.compute_count_from_indexes( starting_indexes.height, &indexer.vecs.addresses.first_p2wshaddressindex, @@ -113,7 +112,7 @@ impl Vecs { })?; self.opreturn - .compute_all(indexes, starting_indexes, exit, |v| { + .compute(starting_indexes, exit,|v| { v.compute_count_from_indexes( starting_indexes.height, &indexer.vecs.scripts.first_opreturnindex, @@ -124,7 +123,7 @@ impl Vecs { })?; self.unknownoutput - .compute_all(indexes, starting_indexes, exit, |v| { + .compute(starting_indexes, exit,|v| { v.compute_count_from_indexes( starting_indexes.height, &indexer.vecs.scripts.first_unknownoutputindex, @@ -135,7 +134,7 @@ impl Vecs { })?; self.emptyoutput - .compute_all(indexes, starting_indexes, exit, |v| { + .compute(starting_indexes, exit,|v| { v.compute_count_from_indexes( starting_indexes.height, &indexer.vecs.scripts.first_emptyoutputindex, @@ -146,17 +145,15 @@ impl Vecs { })?; // Compute segwit = p2wpkh + p2wsh + p2tr - let mut p2wsh_iter = self.p2wsh.height.into_iter(); - let mut p2tr_iter = self.p2tr.height.into_iter(); - self.segwit - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_transform( + .compute(starting_indexes, exit,|v| { + v.compute_transform3( starting_indexes.height, &self.p2wpkh.height, - |(h, p2wpkh, ..)| { - let sum = *p2wpkh + *p2wsh_iter.get_unwrap(h) + *p2tr_iter.get_unwrap(h); - (h, StoredU64::from(sum)) + &self.p2wsh.height, + &self.p2tr.height, + |(h, p2wpkh, p2wsh, p2tr, ..)| { + (h, StoredU64::from(*p2wpkh + *p2wsh + *p2tr)) }, exit, )?; diff --git a/crates/brk_computer/src/scripts/count/import.rs b/crates/brk_computer/src/scripts/count/import.rs index 647d154e5..5e34e1376 100644 --- a/crates/brk_computer/src/scripts/count/import.rs +++ b/crates/brk_computer/src/scripts/count/import.rs @@ -1,6 +1,6 @@ use brk_error::Result; use brk_types::Version; -use vecdb::{Database, IterableCloneableVec}; +use vecdb::{Database, ReadableCloneableVec}; use super::Vecs; use crate::{ @@ -10,7 +10,7 @@ use crate::{ }; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexes: &indexes::Vecs, @@ -31,20 +31,20 @@ impl Vecs { // Adoption ratios (lazy) // Uses outputs.count.count as denominator (total output count) - // At height level: per-block ratio; at dateindex level: sum-based ratio (% of new outputs) + // At height level: per-block ratio; at day1 level: sum-based ratio (% of new outputs) let taproot_adoption = LazyBinaryFromHeightFull::from_height_and_txindex::( "taproot_adoption", version, - p2tr.height.boxed_clone(), - outputs.count.total_count.height.sum_cum.sum.0.boxed_clone(), + p2tr.height.read_only_boxed_clone(), + outputs.count.total_count.height.sum_cum.sum.0.read_only_boxed_clone(), &p2tr, &outputs.count.total_count, ); let segwit_adoption = LazyBinaryFromHeightFull::from_height_and_txindex::( "segwit_adoption", version, - segwit.height.boxed_clone(), - outputs.count.total_count.height.sum_cum.sum.0.boxed_clone(), + segwit.height.read_only_boxed_clone(), + outputs.count.total_count.height.sum_cum.sum.0.read_only_boxed_clone(), &segwit, &outputs.count.total_count, ); diff --git a/crates/brk_computer/src/scripts/count/vecs.rs b/crates/brk_computer/src/scripts/count/vecs.rs index 149974c66..173f2f309 100644 --- a/crates/brk_computer/src/scripts/count/vecs.rs +++ b/crates/brk_computer/src/scripts/count/vecs.rs @@ -1,27 +1,28 @@ use brk_traversable::Traversable; use brk_types::{StoredF32, StoredU64}; +use vecdb::{Rw, StorageMode}; use crate::internal::{ComputedFromHeightFull, LazyBinaryFromHeightFull}; -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { // Per-type output counts - pub p2a: ComputedFromHeightFull, - pub p2ms: ComputedFromHeightFull, - pub p2pk33: ComputedFromHeightFull, - pub p2pk65: ComputedFromHeightFull, - pub p2pkh: ComputedFromHeightFull, - pub p2sh: ComputedFromHeightFull, - pub p2tr: ComputedFromHeightFull, - pub p2wpkh: ComputedFromHeightFull, - pub p2wsh: ComputedFromHeightFull, - pub opreturn: ComputedFromHeightFull, - pub emptyoutput: ComputedFromHeightFull, - pub unknownoutput: ComputedFromHeightFull, + pub p2a: ComputedFromHeightFull, + pub p2ms: ComputedFromHeightFull, + pub p2pk33: ComputedFromHeightFull, + pub p2pk65: ComputedFromHeightFull, + pub p2pkh: ComputedFromHeightFull, + pub p2sh: ComputedFromHeightFull, + pub p2tr: ComputedFromHeightFull, + pub p2wpkh: ComputedFromHeightFull, + pub p2wsh: ComputedFromHeightFull, + pub opreturn: ComputedFromHeightFull, + pub emptyoutput: ComputedFromHeightFull, + pub unknownoutput: ComputedFromHeightFull, // Aggregate counts /// SegWit output count (p2wpkh + p2wsh + p2tr) - pub segwit: ComputedFromHeightFull, + pub segwit: ComputedFromHeightFull, // Adoption ratios pub taproot_adoption: LazyBinaryFromHeightFull, diff --git a/crates/brk_computer/src/scripts/import.rs b/crates/brk_computer/src/scripts/import.rs index b2c669814..31f391ffd 100644 --- a/crates/brk_computer/src/scripts/import.rs +++ b/crates/brk_computer/src/scripts/import.rs @@ -5,16 +5,16 @@ use brk_traversable::Traversable; use brk_types::Version; use vecdb::{Database, PAGE_SIZE}; -use crate::{indexes, outputs, price}; +use crate::{indexes, outputs, prices}; use super::{CountVecs, ValueVecs, Vecs}; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( parent_path: &Path, parent_version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, outputs: &outputs::Vecs, ) -> Result { let db = Database::open(&parent_path.join(super::DB_NAME))?; @@ -23,7 +23,7 @@ impl Vecs { let version = parent_version; let count = CountVecs::forced_import(&db, version, indexes, outputs)?; - let value = ValueVecs::forced_import(&db, version, indexes, price)?; + let value = ValueVecs::forced_import(&db, version, indexes, prices)?; let this = Self { db, count, value }; diff --git a/crates/brk_computer/src/scripts/mod.rs b/crates/brk_computer/src/scripts/mod.rs index 2cd216732..1b7042e55 100644 --- a/crates/brk_computer/src/scripts/mod.rs +++ b/crates/brk_computer/src/scripts/mod.rs @@ -5,18 +5,18 @@ mod compute; mod import; use brk_traversable::Traversable; -use vecdb::Database; +use vecdb::{Database, Rw, StorageMode}; pub use count::Vecs as CountVecs; pub use value::Vecs as ValueVecs; pub const DB_NAME: &str = "scripts"; -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { #[traversable(skip)] pub(crate) db: Database, - pub count: CountVecs, - pub value: ValueVecs, + pub count: CountVecs, + pub value: ValueVecs, } diff --git a/crates/brk_computer/src/scripts/value/compute.rs b/crates/brk_computer/src/scripts/value/compute.rs index 3b4489ce1..074de81ea 100644 --- a/crates/brk_computer/src/scripts/value/compute.rs +++ b/crates/brk_computer/src/scripts/value/compute.rs @@ -1,21 +1,20 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_types::{Height, OutputType, Sats, TxOutIndex}; -use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec, TypedVecIterator, VecIndex}; +use vecdb::{AnyStoredVec, AnyVec, Exit, ReadableVec, WritableVec, VecIndex}; use super::Vecs; -use crate::{ComputeIndexes, indexes}; +use crate::ComputeIndexes; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { self.opreturn - .compute_all(indexes, starting_indexes, exit, |height_vec| { + .compute(starting_indexes, exit, |height_vec| { // Validate computed versions against dependencies let dep_version = indexer.vecs.outputs.first_txoutindex.version() + indexer.vecs.outputs.outputtype.version() @@ -38,35 +37,38 @@ impl Vecs { return Ok(()); } - // Prepare iterators - let mut height_to_first_txoutindex = - indexer.vecs.outputs.first_txoutindex.iter()?; - let mut txoutindex_to_outputtype = indexer.vecs.outputs.outputtype.iter()?; - let mut txoutindex_to_value = indexer.vecs.outputs.value.iter()?; + // Pre-collect height-indexed data + let first_txoutindexes: Vec = indexer.vecs.outputs.first_txoutindex + .collect_range_at(starting_height.to_usize(), target_height.to_usize() + 2.min(indexer.vecs.outputs.first_txoutindex.len())); // Iterate blocks for h in starting_height.to_usize()..=target_height.to_usize() { let height = Height::from(h); + let local_idx = h - starting_height.to_usize(); // Get output range for this block - let first_txoutindex = height_to_first_txoutindex.get_unwrap(height); - let next_first_txoutindex = if height < target_height { - height_to_first_txoutindex.get_unwrap(height.incremented()) + let first_txoutindex = first_txoutindexes[local_idx]; + let next_first_txoutindex = if let Some(&next) = first_txoutindexes.get(local_idx + 1) { + next } else { TxOutIndex::from(indexer.vecs.outputs.value.len()) }; - // Sum opreturn values - let mut opreturn_value = Sats::ZERO; - for i in first_txoutindex.to_usize()..next_first_txoutindex.to_usize() { - let txoutindex = TxOutIndex::from(i); - let outputtype = txoutindex_to_outputtype.get_unwrap(txoutindex); + let out_start = first_txoutindex.to_usize(); + let out_end = next_first_txoutindex.to_usize(); - if outputtype == OutputType::OpReturn { - let value = txoutindex_to_value.get_unwrap(txoutindex); - opreturn_value += value; - } - } + // Sum opreturn values — batch read both ranges for the block + let values = indexer.vecs.outputs.value.collect_range_at(out_start, out_end); + let opreturn_value = indexer.vecs.outputs.outputtype.fold_range_at( + out_start, out_end, + (Sats::ZERO, 0_usize), + |(mut sum, idx), ot| { + if ot == OutputType::OpReturn { + sum += values[idx]; + } + (sum, idx + 1) + }, + ).0; height_vec.truncate_push(height, opreturn_value)?; } diff --git a/crates/brk_computer/src/scripts/value/import.rs b/crates/brk_computer/src/scripts/value/import.rs index 64e04fe08..80558adce 100644 --- a/crates/brk_computer/src/scripts/value/import.rs +++ b/crates/brk_computer/src/scripts/value/import.rs @@ -3,14 +3,14 @@ use brk_types::Version; use vecdb::Database; use super::Vecs; -use crate::{indexes, internal::ValueFromHeightFull, price}; +use crate::{indexes, internal::ValueFromHeightFull, prices}; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, ) -> Result { Ok(Self { opreturn: ValueFromHeightFull::forced_import( @@ -18,7 +18,7 @@ impl Vecs { "opreturn_value", version, indexes, - price, + prices, )?, }) } diff --git a/crates/brk_computer/src/scripts/value/vecs.rs b/crates/brk_computer/src/scripts/value/vecs.rs index a6bd4ec43..1f570b72f 100644 --- a/crates/brk_computer/src/scripts/value/vecs.rs +++ b/crates/brk_computer/src/scripts/value/vecs.rs @@ -1,8 +1,9 @@ use brk_traversable::Traversable; +use vecdb::{Rw, StorageMode}; use crate::internal::ValueFromHeightFull; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub opreturn: ValueFromHeightFull, +#[derive(Traversable)] +pub struct Vecs { + pub opreturn: ValueFromHeightFull, } diff --git a/crates/brk_computer/src/supply/burned/compute.rs b/crates/brk_computer/src/supply/burned/compute.rs index 8bd5c8860..a4751c277 100644 --- a/crates/brk_computer/src/supply/burned/compute.rs +++ b/crates/brk_computer/src/supply/burned/compute.rs @@ -1,22 +1,21 @@ use brk_error::Result; use brk_types::{Height, Sats}; -use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec, TypedVecIterator, VecIndex}; +use vecdb::{AnyStoredVec, AnyVec, Exit, ReadableVec, WritableVec, VecIndex}; use super::Vecs; -use crate::{ComputeIndexes, blocks, indexes, scripts}; +use crate::{ComputeIndexes, mining, scripts}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, - indexes: &indexes::Vecs, scripts: &scripts::Vecs, - blocks: &blocks::Vecs, + mining: &mining::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { // 1. Compute opreturn supply - copy per-block opreturn values from scripts self.opreturn - .compute_all(indexes, starting_indexes, exit, |height_vec| { + .compute(starting_indexes, exit, |height_vec| { // Validate computed versions against dependencies let opreturn_dep_version = scripts.value.opreturn.sats.height.version(); @@ -31,14 +30,15 @@ impl Vecs { Height::from(current_len.min(starting_indexes.height.to_usize())); if starting_height <= target_height { - let mut opreturn_value_iter = - scripts.value.opreturn.sats.height.into_iter(); - - for h in starting_height.to_usize()..=target_height.to_usize() { - let height = Height::from(h); - let value = opreturn_value_iter.get_unwrap(height); - height_vec.truncate_push(height, value)?; - } + let start = starting_height.to_usize(); + let end = target_height.to_usize() + 1; + scripts.value.opreturn.sats.height.fold_range_at( + start, end, start, + |idx, value| { + height_vec.truncate_push(Height::from(idx), value).unwrap(); + idx + 1 + }, + ); } } @@ -49,10 +49,10 @@ impl Vecs { // 2. Compute unspendable supply = opreturn + unclaimed_rewards + genesis (at height 0) // Get reference to opreturn height vec for computing unspendable let opreturn_height = &self.opreturn.sats.height; - let unclaimed_height = &blocks.rewards.unclaimed_rewards.sats.height; + let unclaimed_height = &mining.rewards.unclaimed_rewards.sats.height; self.unspendable - .compute_all(indexes, starting_indexes, exit, |height_vec| { + .compute(starting_indexes, exit, |height_vec| { let unspendable_dep_version = opreturn_height.version() + unclaimed_height.version(); height_vec.validate_computed_version_or_reset(unspendable_dep_version)?; @@ -65,29 +65,19 @@ impl Vecs { Height::from(current_len.min(starting_indexes.height.to_usize())); if starting_height <= target_height { - let mut opreturn_iter = opreturn_height.into_iter(); - - let mut unclaimed_rewards_iter = unclaimed_height.into_iter(); - - for h in starting_height.to_usize()..=target_height.to_usize() { - let height = Height::from(h); - - // Genesis block 50 BTC is unspendable (only at height 0) - let genesis = if height == Height::ZERO { - Sats::FIFTY_BTC - } else { - Sats::ZERO - }; - - // Per-block opreturn value - let opreturn = opreturn_iter.get_unwrap(height); - - // Per-block unclaimed rewards - let unclaimed = unclaimed_rewards_iter.get_unwrap(height); - - let unspendable = genesis + opreturn + unclaimed; - height_vec.truncate_push(height, unspendable)?; - } + let start = starting_height.to_usize(); + let end = target_height.to_usize() + 1; + let unclaimed_data = unclaimed_height.collect_range_at(start, end); + opreturn_height.fold_range_at( + start, end, start, + |idx, opreturn| { + let unclaimed = unclaimed_data[idx - start]; + let genesis = if idx == 0 { Sats::FIFTY_BTC } else { Sats::ZERO }; + let unspendable = genesis + opreturn + unclaimed; + height_vec.truncate_push(Height::from(idx), unspendable).unwrap(); + idx + 1 + }, + ); } } diff --git a/crates/brk_computer/src/supply/burned/import.rs b/crates/brk_computer/src/supply/burned/import.rs index 740f8ce76..dde2f9977 100644 --- a/crates/brk_computer/src/supply/burned/import.rs +++ b/crates/brk_computer/src/supply/burned/import.rs @@ -3,14 +3,14 @@ use brk_types::Version; use vecdb::Database; use super::Vecs; -use crate::{indexes, internal::ValueFromHeightSumCum, price}; +use crate::{indexes, internal::ValueFromHeightSumCum, prices}; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, ) -> Result { Ok(Self { opreturn: ValueFromHeightSumCum::forced_import( @@ -18,14 +18,14 @@ impl Vecs { "opreturn_supply", version, indexes, - price, + prices, )?, unspendable: ValueFromHeightSumCum::forced_import( db, "unspendable_supply", version, indexes, - price, + prices, )?, }) } diff --git a/crates/brk_computer/src/supply/burned/vecs.rs b/crates/brk_computer/src/supply/burned/vecs.rs index 5c6fddd0b..88692a984 100644 --- a/crates/brk_computer/src/supply/burned/vecs.rs +++ b/crates/brk_computer/src/supply/burned/vecs.rs @@ -1,10 +1,11 @@ use brk_traversable::Traversable; +use vecdb::{Rw, StorageMode}; use crate::internal::ValueFromHeightSumCum; /// Burned/unspendable supply metrics -#[derive(Clone, Traversable)] -pub struct Vecs { - pub opreturn: ValueFromHeightSumCum, - pub unspendable: ValueFromHeightSumCum, +#[derive(Traversable)] +pub struct Vecs { + pub opreturn: ValueFromHeightSumCum, + pub unspendable: ValueFromHeightSumCum, } diff --git a/crates/brk_computer/src/supply/compute.rs b/crates/brk_computer/src/supply/compute.rs index 1e4a569bd..6f438ac15 100644 --- a/crates/brk_computer/src/supply/compute.rs +++ b/crates/brk_computer/src/supply/compute.rs @@ -2,15 +2,15 @@ use brk_error::Result; use vecdb::Exit; use super::Vecs; -use crate::{ComputeIndexes, blocks, distribution, indexes, scripts, transactions}; +use crate::{ComputeIndexes, blocks, distribution, mining, scripts, transactions}; impl Vecs { #[allow(clippy::too_many_arguments)] - pub fn compute( + pub(crate) fn compute( &mut self, - indexes: &indexes::Vecs, scripts: &scripts::Vecs, blocks: &blocks::Vecs, + mining: &mining::Vecs, transactions: &transactions::Vecs, distribution: &distribution::Vecs, starting_indexes: &ComputeIndexes, @@ -18,60 +18,39 @@ impl Vecs { ) -> Result<()> { // 1. Compute burned/unspendable supply self.burned - .compute(indexes, scripts, blocks, starting_indexes, exit)?; + .compute(scripts, mining, starting_indexes, exit)?; - // 2. Compute inflation rate: daily_subsidy / circulating_supply * 365 * 100 + // 2. Compute inflation rate at height level: (supply[h] - supply[1y_ago]) / supply[1y_ago] * 100 let circulating_supply = &distribution.utxo_cohorts.all.metrics.supply.total.sats; - self.inflation.compute_all(starting_indexes, exit, |v| { - v.compute_transform2( - starting_indexes.dateindex, - &blocks.rewards.subsidy.sats.dateindex.sum_cum.sum.0, - &circulating_supply.dateindex.0, - |(i, subsidy_1d_sum, supply, ..)| { - let inflation = if *supply > 0 { - 365.0 * *subsidy_1d_sum as f64 / *supply as f64 * 100.0 - } else { - 0.0 - }; - (i, inflation.into()) - }, + self.inflation.height.compute_rolling_percentage_change( + starting_indexes.height, + &blocks.count.height_1y_ago, + &circulating_supply.height, + exit, + )?; + + // 3. Compute velocity at height level + self.velocity + .compute(blocks, transactions, distribution, starting_indexes, exit)?; + + // 4. Compute cap growth rates at height level using 1y lookback + self.market_cap_growth_rate + .height + .compute_rolling_percentage_change( + starting_indexes.height, + &blocks.count.height_1y_ago, + &self.market_cap.height, exit, )?; - Ok(()) - })?; - // 3. Compute velocity - self.velocity - .compute(transactions, distribution, starting_indexes, exit)?; - - // 4. Compute cap growth rates - if let Some(market_cap) = self.market_cap.as_ref() { - let mcap_dateindex = &market_cap.dateindex.0; - self.market_cap_growth_rate - .compute_all(starting_indexes, exit, |vec| { - vec.compute_percentage_change( - starting_indexes.dateindex, - mcap_dateindex, - 365, - exit, - )?; - Ok(()) - })?; - } - - if let Some(realized) = distribution.utxo_cohorts.all.metrics.realized.as_ref() { - let rcap_dateindex = &realized.realized_cap.dateindex.0; - self.realized_cap_growth_rate - .compute_all(starting_indexes, exit, |vec| { - vec.compute_percentage_change( - starting_indexes.dateindex, - rcap_dateindex, - 365, - exit, - )?; - Ok(()) - })?; - } + self.realized_cap_growth_rate + .height + .compute_rolling_percentage_change( + starting_indexes.height, + &blocks.count.height_1y_ago, + &distribution.utxo_cohorts.all.metrics.realized.realized_cap.height, + exit, + )?; // Note: circulating, market_cap, cap_growth_rate_diff are lazy diff --git a/crates/brk_computer/src/supply/import.rs b/crates/brk_computer/src/supply/import.rs index bc0c1a1a4..b7c28eb02 100644 --- a/crates/brk_computer/src/supply/import.rs +++ b/crates/brk_computer/src/supply/import.rs @@ -3,34 +3,33 @@ use std::path::Path; use brk_error::Result; use brk_traversable::Traversable; use brk_types::Version; -use vecdb::{Database, IterableCloneableVec, LazyVecFrom2, PAGE_SIZE}; +use vecdb::{Database, ReadableCloneableVec, PAGE_SIZE}; use super::Vecs; use crate::{ distribution, indexes, internal::{ - ComputedFromDateAverage, ComputedFromDateLast, DifferenceF32, DollarsIdentity, - LazyFromHeightLast, LazyValueFromHeightLast, SatsIdentity, + ComputedFromHeightLast, DifferenceF32, DollarsIdentity, + LazyBinaryComputedFromHeightLast, LazyFromHeightLast, LazyValueFromHeightLast, + SatsIdentity, }, - price, + prices, }; const VERSION: Version = Version::ONE; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( parent: &Path, parent_version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, distribution: &distribution::Vecs, ) -> Result { let db = Database::open(&parent.join(super::DB_NAME))?; db.set_min_len(PAGE_SIZE * 10_000_000)?; let version = parent_version + VERSION; - let compute_dollars = price.is_some(); - let supply_metrics = &distribution.utxo_cohorts.all.metrics.supply; // Circulating supply - lazy refs to distribution @@ -41,45 +40,45 @@ impl Vecs { ); // Burned/unspendable supply - computed from scripts - let burned = super::burned::Vecs::forced_import(&db, version, indexes, price)?; + let burned = super::burned::Vecs::forced_import(&db, version, indexes, prices)?; // Inflation rate let inflation = - ComputedFromDateAverage::forced_import(&db, "inflation_rate", version, indexes)?; + ComputedFromHeightLast::forced_import(&db, "inflation_rate", version, indexes)?; // Velocity let velocity = - super::velocity::Vecs::forced_import(&db, version, indexes, compute_dollars)?; + super::velocity::Vecs::forced_import(&db, version, indexes)?; // Market cap - lazy identity from distribution supply in USD - let market_cap = supply_metrics.total.dollars.as_ref().map(|d| { - LazyFromHeightLast::from_lazy_binary_computed::( - "market_cap", - version, - d.height.boxed_clone(), - d, - ) - }); + let market_cap = LazyFromHeightLast::from_lazy_binary_computed::( + "market_cap", + version, + supply_metrics.total.usd.height.read_only_boxed_clone(), + &supply_metrics.total.usd, + ); // Growth rates - let market_cap_growth_rate = ComputedFromDateLast::forced_import( + let market_cap_growth_rate = ComputedFromHeightLast::forced_import( &db, "market_cap_growth_rate", version + Version::ONE, indexes, )?; - let realized_cap_growth_rate = ComputedFromDateLast::forced_import( + let realized_cap_growth_rate = ComputedFromHeightLast::forced_import( &db, "realized_cap_growth_rate", version + Version::ONE, indexes, )?; - let cap_growth_rate_diff = LazyVecFrom2::transformed::( - "cap_growth_rate_diff", - version, - market_cap_growth_rate.dateindex.boxed_clone(), - realized_cap_growth_rate.dateindex.boxed_clone(), - ); + let cap_growth_rate_diff = + LazyBinaryComputedFromHeightLast::forced_import::( + "cap_growth_rate_diff", + version, + market_cap_growth_rate.height.read_only_boxed_clone(), + realized_cap_growth_rate.height.read_only_boxed_clone(), + indexes, + ); let this = Self { db, diff --git a/crates/brk_computer/src/supply/vecs.rs b/crates/brk_computer/src/supply/vecs.rs index 422db7bcb..e04d73303 100644 --- a/crates/brk_computer/src/supply/vecs.rs +++ b/crates/brk_computer/src/supply/vecs.rs @@ -1,24 +1,24 @@ use brk_traversable::Traversable; -use brk_types::{DateIndex, Dollars, StoredF32}; -use vecdb::{Database, LazyVecFrom2}; +use brk_types::{Dollars, StoredF32}; +use vecdb::{Database, Rw, StorageMode}; use super::{burned, velocity}; use crate::internal::{ - ComputedFromDateAverage, ComputedFromDateLast, LazyFromHeightLast, LazyValueFromHeightLast, + ComputedFromHeightLast, LazyBinaryComputedFromHeightLast, LazyFromHeightLast, + LazyValueFromHeightLast, }; -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { #[traversable(skip)] pub(crate) db: Database, pub circulating: LazyValueFromHeightLast, - pub burned: burned::Vecs, - pub inflation: ComputedFromDateAverage, - pub velocity: velocity::Vecs, - pub market_cap: Option>, - pub market_cap_growth_rate: ComputedFromDateLast, - pub realized_cap_growth_rate: ComputedFromDateLast, - pub cap_growth_rate_diff: - LazyVecFrom2, + pub burned: burned::Vecs, + pub inflation: ComputedFromHeightLast, + pub velocity: velocity::Vecs, + pub market_cap: LazyFromHeightLast, + pub market_cap_growth_rate: ComputedFromHeightLast, + pub realized_cap_growth_rate: ComputedFromHeightLast, + pub cap_growth_rate_diff: LazyBinaryComputedFromHeightLast, } diff --git a/crates/brk_computer/src/supply/velocity/compute.rs b/crates/brk_computer/src/supply/velocity/compute.rs index 046c8475d..f9246b24f 100644 --- a/crates/brk_computer/src/supply/velocity/compute.rs +++ b/crates/brk_computer/src/supply/velocity/compute.rs @@ -2,45 +2,37 @@ use brk_error::Result; use vecdb::Exit; use super::Vecs; -use crate::{ComputeIndexes, distribution, transactions}; +use crate::{blocks, ComputeIndexes, distribution, transactions}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, + blocks: &blocks::Vecs, transactions: &transactions::Vecs, distribution: &distribution::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - // velocity = annualized_volume / circulating_supply + // velocity = rolling_1y_sum(volume) / circulating_supply let circulating_supply = &distribution.utxo_cohorts.all.metrics.supply.total; - // BTC velocity - self.btc.compute_all(starting_indexes, exit, |v| { - v.compute_divide( - starting_indexes.dateindex, - &*transactions.volume.annualized_volume.bitcoin.dateindex, - &*circulating_supply.bitcoin.dateindex, - exit, - )?; - Ok(()) - })?; + // BTC velocity at height level + self.btc.height.compute_rolling_ratio( + starting_indexes.height, + &blocks.count.height_1y_ago, + &transactions.volume.sent_sum.sats.height, + &circulating_supply.sats.height, + exit, + )?; - // USD velocity - if let Some(usd_velocity) = self.usd.as_mut() - && let Some(supply_usd) = circulating_supply.dollars.as_ref() - && let Some(volume_usd) = transactions.volume.annualized_volume.dollars.as_ref() - { - usd_velocity.compute_all(starting_indexes, exit, |v| { - v.compute_divide( - starting_indexes.dateindex, - &volume_usd.dateindex, - &supply_usd.dateindex.0, - exit, - )?; - Ok(()) - })?; - } + // USD velocity at height level + self.usd.height.compute_rolling_ratio( + starting_indexes.height, + &blocks.count.height_1y_ago, + &transactions.volume.sent_sum.usd.height, + &circulating_supply.usd.height, + exit, + )?; Ok(()) } diff --git a/crates/brk_computer/src/supply/velocity/import.rs b/crates/brk_computer/src/supply/velocity/import.rs index ec9da80e8..4b09d7edb 100644 --- a/crates/brk_computer/src/supply/velocity/import.rs +++ b/crates/brk_computer/src/supply/velocity/import.rs @@ -3,21 +3,17 @@ use brk_types::Version; use vecdb::Database; use super::Vecs; -use crate::{indexes, internal::ComputedFromDateAverage}; +use crate::{indexes, internal::ComputedFromHeightLast}; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexes: &indexes::Vecs, - compute_dollars: bool, ) -> Result { Ok(Self { - btc: ComputedFromDateAverage::forced_import(db, "btc_velocity", version, indexes)?, - usd: compute_dollars.then(|| { - ComputedFromDateAverage::forced_import(db, "usd_velocity", version, indexes) - .unwrap() - }), + btc: ComputedFromHeightLast::forced_import(db, "btc_velocity", version, indexes)?, + usd: ComputedFromHeightLast::forced_import(db, "usd_velocity", version, indexes)?, }) } } diff --git a/crates/brk_computer/src/supply/velocity/vecs.rs b/crates/brk_computer/src/supply/velocity/vecs.rs index cbaa852d6..96a49609b 100644 --- a/crates/brk_computer/src/supply/velocity/vecs.rs +++ b/crates/brk_computer/src/supply/velocity/vecs.rs @@ -1,11 +1,12 @@ use brk_traversable::Traversable; use brk_types::StoredF64; +use vecdb::{Rw, StorageMode}; -use crate::internal::ComputedFromDateAverage; +use crate::internal::ComputedFromHeightLast; /// Velocity metrics (annualized volume / circulating supply) -#[derive(Clone, Traversable)] -pub struct Vecs { - pub btc: ComputedFromDateAverage, - pub usd: Option>, +#[derive(Traversable)] +pub struct Vecs { + pub btc: ComputedFromHeightLast, + pub usd: ComputedFromHeightLast, } diff --git a/crates/brk_computer/src/traits/mod.rs b/crates/brk_computer/src/traits/mod.rs index 3320ed954..20e67865e 100644 --- a/crates/brk_computer/src/traits/mod.rs +++ b/crates/brk_computer/src/traits/mod.rs @@ -1,8 +1,7 @@ use brk_error::Result; -use brk_types::{Bitcoin, CheckedSub, Close, Date, DateIndex, Dollars, Sats, StoredF32}; +use brk_types::StoredF32; use vecdb::{ - AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, IterableVec, PcoVec, VecIndex, VecValue, - Version, + AnyVec, EagerVec, Exit, PcoVec, PcoVecValue, ReadableVec, VecIndex, VecValue, WritableVec, }; mod pricing; @@ -10,297 +9,283 @@ mod pricing; // TODO: Re-export when Phase 3 (Pricing migration) is complete // pub use pricing::{Priced, Pricing, Unpriced}; -const DCA_AMOUNT: Dollars = Dollars::mint(100.0); - -pub trait ComputeDCAStackViaLen { - fn compute_dca_stack_via_len( - &mut self, - max_from: DateIndex, - closes: &impl IterableVec>, - len: usize, - exit: &Exit, - ) -> Result<()>; - - fn compute_dca_stack_via_from( - &mut self, - max_from: DateIndex, - closes: &impl IterableVec>, - from: DateIndex, - exit: &Exit, - ) -> Result<()>; -} - -impl ComputeDCAStackViaLen for EagerVec> { - fn compute_dca_stack_via_len( - &mut self, - max_from: DateIndex, - closes: &impl IterableVec>, - len: usize, - exit: &Exit, - ) -> Result<()> { - self.validate_computed_version_or_reset(closes.version())?; - - let index = max_from.to_usize().min(self.len()); - - // Initialize prev before the loop to avoid checking on every iteration - let mut prev = if index == 0 { - Sats::ZERO - } else { - self.read_at_unwrap_once(index - 1) - }; - - let mut lookback = closes.create_lookback(index, len, 0); - - closes - .iter() - .enumerate() - .skip(index) - .try_for_each(|(i, closes)| { - let price = *closes; - let i_usize = i.to_usize(); - - let mut stack = Sats::ZERO; - - if price != Dollars::ZERO { - stack = prev + Sats::from(Bitcoin::from(DCA_AMOUNT / price)); - - let prev_price = - *lookback.get_and_push(i_usize, Close::new(price), Close::default()); - if prev_price != Dollars::ZERO { - stack = stack - .checked_sub(Sats::from(Bitcoin::from(DCA_AMOUNT / prev_price))) - .unwrap(); - } - } - - prev = stack; - - self.truncate_push_at(i, stack) - })?; - - let _lock = exit.lock(); - self.write()?; - - Ok(()) - } - - fn compute_dca_stack_via_from( - &mut self, - max_from: DateIndex, - closes: &impl IterableVec>, - from: DateIndex, - exit: &Exit, - ) -> Result<()> { - self.validate_computed_version_or_reset(closes.version())?; - - let from = from.to_usize(); - let index = max_from.min(DateIndex::from(self.len())); - - // Initialize prev before the loop to avoid checking on every iteration - let mut prev = if index.to_usize() == 0 { - Sats::ZERO - } else { - self.read_at_unwrap_once(index.to_usize() - 1) - }; - - closes - .iter() - .enumerate() - .skip(index.to_usize()) - .try_for_each(|(i, closes)| { - let price = *closes; - - let mut stack = Sats::ZERO; - - if price != Dollars::ZERO && i >= from { - stack = prev + Sats::from(Bitcoin::from(DCA_AMOUNT / price)); - } - - prev = stack; - - self.truncate_push_at(i, stack) - })?; - - let _lock = exit.lock(); - self.write()?; - - Ok(()) - } -} - -pub trait ComputeDCAAveragePriceViaLen { - fn compute_dca_average_price_via_len( - &mut self, - max_from: DateIndex, - stacks: &impl IterableVec, - len: usize, - exit: &Exit, - ) -> Result<()>; - - fn compute_dca_average_price_via_from( - &mut self, - max_from: DateIndex, - stacks: &impl IterableVec, - from: DateIndex, - exit: &Exit, - ) -> Result<()>; -} - -impl ComputeDCAAveragePriceViaLen for EagerVec> { - fn compute_dca_average_price_via_len( - &mut self, - max_from: DateIndex, - stacks: &impl IterableVec, - len: usize, - exit: &Exit, - ) -> Result<()> { - self.validate_computed_version_or_reset(Version::ONE + stacks.version())?; - - let index = max_from.min(DateIndex::from(self.len())); - - let first_price_date = DateIndex::try_from(Date::new(2010, 7, 12)) - .unwrap() - .to_usize(); - - stacks - .iter() - .enumerate() - .skip(index.to_usize()) - .try_for_each(|(i, stack)| { - let mut average_price = Dollars::from(f64::NAN); - if i > first_price_date { - average_price = DCA_AMOUNT - * len - .min(i.to_usize() + 1) - .min(i.checked_sub(first_price_date).unwrap().to_usize() + 1) - / Bitcoin::from(stack); - } - self.truncate_push_at(i, average_price) - })?; - - let _lock = exit.lock(); - self.write()?; - - Ok(()) - } - - fn compute_dca_average_price_via_from( - &mut self, - max_from: DateIndex, - stacks: &impl IterableVec, - from: DateIndex, - exit: &Exit, - ) -> Result<()> { - self.validate_computed_version_or_reset(stacks.version())?; - - let index = max_from.min(DateIndex::from(self.len())); - - let from = from.to_usize(); - - stacks - .iter() - .enumerate() - .skip(index.to_usize()) - .try_for_each(|(i, stack)| { - let mut average_price = Dollars::from(f64::NAN); - if i >= from { - average_price = DCA_AMOUNT * (i.to_usize() + 1 - from) / Bitcoin::from(stack); - } - self.truncate_push_at(i, average_price) - })?; - - let _lock = exit.lock(); - self.write()?; - - Ok(()) - } -} - -pub trait ComputeLumpSumStackViaLen { - fn compute_lump_sum_stack_via_len( - &mut self, - max_from: DateIndex, - closes: &impl IterableVec>, - lookback_prices: &impl IterableVec, - len: usize, - exit: &Exit, - ) -> Result<()>; -} - -impl ComputeLumpSumStackViaLen for EagerVec> { - /// Compute lump sum stack: sats you would have if you invested (len * DCA_AMOUNT) at the lookback price - fn compute_lump_sum_stack_via_len( - &mut self, - max_from: DateIndex, - closes: &impl IterableVec>, - lookback_prices: &impl IterableVec, - len: usize, - exit: &Exit, - ) -> Result<()> { - self.validate_computed_version_or_reset(closes.version())?; - - let index = max_from.to_usize().min(self.len()); - let total_invested = DCA_AMOUNT * len; - - lookback_prices - .iter() - .enumerate() - .skip(index) - .try_for_each(|(i, lookback_price)| { - let stack = if lookback_price == Dollars::ZERO { - Sats::ZERO - } else { - Sats::from(Bitcoin::from(total_invested / lookback_price)) - }; - - self.truncate_push_at(i, stack) - })?; - - let _lock = exit.lock(); - self.write()?; - - Ok(()) - } -} - -pub trait ComputeFromBitcoin { - fn compute_from_bitcoin( +pub trait ComputeRollingMinFromStarts { + fn compute_rolling_min_from_starts( &mut self, max_from: I, - bitcoin: &impl IterableVec, - price: &impl IterableVec>, + window_starts: &impl ReadableVec, + values: &impl ReadableVec, exit: &Exit, - ) -> Result<()>; + ) -> Result<()> + where + A: VecValue + Ord, + T: From; } -impl ComputeFromBitcoin for EagerVec> +impl ComputeRollingMinFromStarts for EagerVec> where I: VecIndex, + T: PcoVecValue, { - fn compute_from_bitcoin( + fn compute_rolling_min_from_starts( &mut self, max_from: I, - bitcoin: &impl IterableVec, - price: &impl IterableVec>, + window_starts: &impl ReadableVec, + values: &impl ReadableVec, exit: &Exit, - ) -> Result<()> { - self.compute_transform2( - max_from, - bitcoin, - price, - |(i, bitcoin, price, _)| (i, *price * bitcoin), - exit, - )?; + ) -> Result<()> + where + A: VecValue + Ord, + T: From, + { + self.validate_computed_version_or_reset(window_starts.version() + values.version())?; + self.truncate_if_needed(max_from)?; + + self.repeat_until_complete(exit, |this| { + let skip = this.len(); + let mut deque: std::collections::VecDeque<(usize, A)> = + std::collections::VecDeque::new(); + + let start_offset = if skip > 0 { + window_starts.collect_one_at(skip - 1).unwrap().to_usize() + } else { + 0 + }; + + let end = window_starts.len().min(values.len()); + let starts_batch = window_starts.collect_range_at(start_offset, end); + let values_batch = values.collect_range_at(start_offset, end); + + for (j, (start, value)) in starts_batch.into_iter().zip(values_batch).enumerate() { + let i = start_offset + j; + let start_usize = start.to_usize(); + while let Some(&(idx, _)) = deque.front() { + if idx < start_usize { + deque.pop_front(); + } else { + break; + } + } + while let Some((_, back)) = deque.back() { + if *back >= value { + deque.pop_back(); + } else { + break; + } + } + deque.push_back((i, value)); + + if i >= skip { + let min_val = deque.front().unwrap().1.clone(); + this.checked_push_at(i, T::from(min_val))?; + if this.batch_limit_reached() { + break; + } + } + } + + Ok(()) + })?; + Ok(()) } } -pub trait ComputeDrawdown { +pub trait ComputeRollingMaxFromStarts { + fn compute_rolling_max_from_starts( + &mut self, + max_from: I, + window_starts: &impl ReadableVec, + values: &impl ReadableVec, + exit: &Exit, + ) -> Result<()> + where + A: VecValue + Ord, + T: From; +} + +impl ComputeRollingMaxFromStarts for EagerVec> +where + I: VecIndex, + T: PcoVecValue, +{ + fn compute_rolling_max_from_starts( + &mut self, + max_from: I, + window_starts: &impl ReadableVec, + values: &impl ReadableVec, + exit: &Exit, + ) -> Result<()> + where + A: VecValue + Ord, + T: From, + { + self.validate_computed_version_or_reset(window_starts.version() + values.version())?; + self.truncate_if_needed(max_from)?; + + self.repeat_until_complete(exit, |this| { + let skip = this.len(); + let mut deque: std::collections::VecDeque<(usize, A)> = + std::collections::VecDeque::new(); + + let start_offset = if skip > 0 { + window_starts.collect_one_at(skip - 1).unwrap().to_usize() + } else { + 0 + }; + + let end = window_starts.len().min(values.len()); + let starts_batch = window_starts.collect_range_at(start_offset, end); + let values_batch = values.collect_range_at(start_offset, end); + + for (j, (start, value)) in starts_batch.into_iter().zip(values_batch).enumerate() { + let i = start_offset + j; + let start_usize = start.to_usize(); + while let Some(&(idx, _)) = deque.front() { + if idx < start_usize { + deque.pop_front(); + } else { + break; + } + } + while let Some((_, back)) = deque.back() { + if *back <= value { + deque.pop_back(); + } else { + break; + } + } + deque.push_back((i, value)); + + if i >= skip { + let max_val = deque.front().unwrap().1.clone(); + this.checked_push_at(i, T::from(max_val))?; + if this.batch_limit_reached() { + break; + } + } + } + + Ok(()) + })?; + + Ok(()) + } +} + +pub trait ComputeRollingMedianFromStarts { + fn compute_rolling_median_from_starts( + &mut self, + max_from: I, + window_starts: &impl ReadableVec, + values: &impl ReadableVec, + exit: &Exit, + ) -> Result<()> + where + A: VecValue + Copy, + f64: From; +} + +impl ComputeRollingMedianFromStarts for EagerVec> +where + I: VecIndex, + T: PcoVecValue + From, +{ + fn compute_rolling_median_from_starts( + &mut self, + max_from: I, + window_starts: &impl ReadableVec, + values: &impl ReadableVec, + exit: &Exit, + ) -> Result<()> + where + A: VecValue + Copy, + f64: From, + { + self.validate_computed_version_or_reset(window_starts.version() + values.version())?; + + self.truncate_if_needed(max_from)?; + + self.repeat_until_complete(exit, |this| { + let skip = this.len(); + let end = window_starts.len().min(values.len()); + + // Only collect the range needed: from window start of previous + // element to end. For incremental (1 block) this is ~window_size + // instead of the full history. + let range_start = if skip > 0 { + window_starts.collect_one_at(skip - 1).unwrap().to_usize() + } else { + 0 + }; + let partial_values: Vec = values.collect_range_at(range_start, end); + + let mut sorted: Vec = Vec::new(); + let mut prev_start_usize: usize = range_start; + + // Reconstruct state from historical data + if skip > 0 { + (range_start..skip).for_each(|idx| { + let v = f64::from(partial_values[idx - range_start]); + let pos = sorted + .binary_search_by(|a| { + a.partial_cmp(&v).unwrap_or(std::cmp::Ordering::Equal) + }) + .unwrap_or_else(|x| x); + sorted.insert(pos, v); + }); + } + + let starts_batch = window_starts.collect_range_at(skip, end); + + for (j, start) in starts_batch.into_iter().enumerate() { + let i = skip + j; + let v = f64::from(partial_values[i - range_start]); + let pos = sorted + .binary_search_by(|a| a.partial_cmp(&v).unwrap_or(std::cmp::Ordering::Equal)) + .unwrap_or_else(|x| x); + sorted.insert(pos, v); + + let start_usize = start.to_usize(); + while prev_start_usize < start_usize { + let old = f64::from(partial_values[prev_start_usize - range_start]); + if let Ok(pos) = sorted.binary_search_by(|a| { + a.partial_cmp(&old).unwrap_or(std::cmp::Ordering::Equal) + }) { + sorted.remove(pos); + } + prev_start_usize += 1; + } + + let median = if sorted.is_empty() { + 0.0 + } else if sorted.len().is_multiple_of(2) { + let mid = sorted.len() / 2; + (sorted[mid - 1] + sorted[mid]) / 2.0 + } else { + sorted[sorted.len() / 2] + }; + + this.checked_push_at(i, T::from(median))?; + + if this.batch_limit_reached() { + break; + } + } + + Ok(()) + })?; + + Ok(()) + } +} + +pub trait ComputeDrawdown { fn compute_drawdown( &mut self, max_from: I, - current: &impl IterableVec, - ath: &impl IterableVec, + current: &impl ReadableVec, + ath: &impl ReadableVec, exit: &Exit, ) -> Result<()> where @@ -316,8 +301,8 @@ where fn compute_drawdown( &mut self, max_from: I, - current: &impl IterableVec, - ath: &impl IterableVec, + current: &impl ReadableVec, + ath: &impl ReadableVec, exit: &Exit, ) -> Result<()> where diff --git a/crates/brk_computer/src/traits/pricing.rs b/crates/brk_computer/src/traits/pricing.rs index 64ad4ce6a..ada2def3d 100644 --- a/crates/brk_computer/src/traits/pricing.rs +++ b/crates/brk_computer/src/traits/pricing.rs @@ -29,7 +29,7 @@ use brk_traversable::Traversable; /// | `Data` | Computer top-level | `PricingData` | `()` | /// | `PriceRef<'a>` | Function params | `&price::Vecs` | `()` | /// | `ComputedDollarsHeight` | Value wrappers (Height) | `ComputedFromHeight` | `()` | -/// | `ComputedDollarsDateIndex` | Value wrappers (DateIndex) | `ComputedVecsDate` | `()` | +/// | `ComputedDollarsDay1` | Value wrappers (Day1) | `ComputedVecsDate` | `()` | /// | `StdDevBandsUsd` | StdDev USD bands | `StdDevBandsUsdData` | `()` | /// | `RatioUsd` | Ratio USD variants | `RatioUsdData` | `()` | /// | `BasePriced` | Base metrics | `BasePricedData` | `()` | @@ -50,8 +50,8 @@ pub trait Pricing: 'static + Clone + Send + Sync { /// Computed dollars with Height index type ComputedDollarsHeight: Clone + Send + Sync + Traversable; - /// Computed dollars with DateIndex index - type ComputedDollarsDateIndex: Clone + Send + Sync + Traversable; + /// Computed dollars with Day1 index + type ComputedDollarsDay1: Clone + Send + Sync + Traversable; // === Specialized structs === @@ -100,7 +100,7 @@ impl Pricing for Priced { type Data = (); type PriceRef<'a> = (); type ComputedDollarsHeight = (); - type ComputedDollarsDateIndex = (); + type ComputedDollarsDay1 = (); type StdDevBandsUsd = (); type RatioUsd = (); type BasePriced = (); @@ -113,7 +113,7 @@ impl Pricing for Unpriced { type Data = (); type PriceRef<'a> = (); type ComputedDollarsHeight = (); - type ComputedDollarsDateIndex = (); + type ComputedDollarsDay1 = (); type StdDevBandsUsd = (); type RatioUsd = (); type BasePriced = (); diff --git a/crates/brk_computer/src/transactions/compute.rs b/crates/brk_computer/src/transactions/compute.rs index 8d1929dee..8eafec644 100644 --- a/crates/brk_computer/src/transactions/compute.rs +++ b/crates/brk_computer/src/transactions/compute.rs @@ -2,16 +2,17 @@ use brk_error::Result; use brk_indexer::Indexer; use vecdb::Exit; -use crate::{indexes, inputs, outputs, ComputeIndexes}; +use crate::{blocks, indexes, inputs, outputs, ComputeIndexes}; use super::Vecs; impl Vecs { #[allow(clippy::too_many_arguments)] - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, + blocks: &blocks::Vecs, inputs: &inputs::Vecs, outputs: &outputs::Vecs, starting_indexes: &ComputeIndexes, @@ -19,11 +20,11 @@ impl Vecs { ) -> Result<()> { // Count computes first self.count - .compute(indexer, indexes, starting_indexes, exit)?; + .compute(indexer, starting_indexes, exit)?; // Versions depends on count self.versions - .compute(indexer, indexes, starting_indexes, exit)?; + .compute(indexer, starting_indexes, exit)?; // Size computes next self.size @@ -39,10 +40,11 @@ impl Vecs { exit, )?; - // Volume depends on fees and input/output counts + // Volume depends on fees, counts, and blocks (lookback vecs, interval) self.volume.compute( indexer, indexes, + blocks, &self.count, &self.fees, &inputs.count, diff --git a/crates/brk_computer/src/transactions/count/compute.rs b/crates/brk_computer/src/transactions/count/compute.rs index abf91b9db..6190a689c 100644 --- a/crates/brk_computer/src/transactions/count/compute.rs +++ b/crates/brk_computer/src/transactions/count/compute.rs @@ -3,18 +3,17 @@ use brk_indexer::Indexer; use vecdb::Exit; use super::Vecs; -use crate::{ComputeIndexes, indexes}; +use crate::ComputeIndexes; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { self.tx_count - .compute_all(indexes, starting_indexes, exit, |v| { + .compute(starting_indexes, exit, |v| { v.compute_count_from_indexes( starting_indexes.height, &indexer.vecs.transactions.first_txindex, diff --git a/crates/brk_computer/src/transactions/count/import.rs b/crates/brk_computer/src/transactions/count/import.rs index 9ce41e2fa..3261aa30f 100644 --- a/crates/brk_computer/src/transactions/count/import.rs +++ b/crates/brk_computer/src/transactions/count/import.rs @@ -1,13 +1,13 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_types::{StoredBool, TxIndex, Version}; -use vecdb::{Database, IterableCloneableVec, LazyVecFrom2}; +use vecdb::{Database, ReadableCloneableVec, LazyVecFrom2}; use super::Vecs; use crate::{indexes, internal::ComputedFromHeightFull}; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexer: &Indexer, @@ -16,13 +16,10 @@ impl Vecs { let txindex_to_is_coinbase = LazyVecFrom2::init( "is_coinbase", version, - indexer.vecs.transactions.height.boxed_clone(), - indexer.vecs.transactions.first_txindex.boxed_clone(), - |index: TxIndex, txindex_to_height_iter, height_to_first_txindex_iter| { - txindex_to_height_iter.get(index).map(|height| { - let txindex = height_to_first_txindex_iter.get_unwrap(height); - StoredBool::from(index == txindex) - }) + indexer.vecs.transactions.height.read_only_boxed_clone(), + indexer.vecs.transactions.first_txindex.read_only_boxed_clone(), + |index: TxIndex, _height, first_txindex| { + StoredBool::from(index == first_txindex) }, ); diff --git a/crates/brk_computer/src/transactions/count/vecs.rs b/crates/brk_computer/src/transactions/count/vecs.rs index 74ad02d45..d1291f7cc 100644 --- a/crates/brk_computer/src/transactions/count/vecs.rs +++ b/crates/brk_computer/src/transactions/count/vecs.rs @@ -1,11 +1,11 @@ use brk_traversable::Traversable; use brk_types::{Height, StoredBool, StoredU64, TxIndex}; -use vecdb::LazyVecFrom2; +use vecdb::{LazyVecFrom2, Rw, StorageMode}; use crate::internal::ComputedFromHeightFull; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub tx_count: ComputedFromHeightFull, +#[derive(Traversable)] +pub struct Vecs { + pub tx_count: ComputedFromHeightFull, pub is_coinbase: LazyVecFrom2, } diff --git a/crates/brk_computer/src/transactions/fees/compute.rs b/crates/brk_computer/src/transactions/fees/compute.rs index e66963810..7998ae3d7 100644 --- a/crates/brk_computer/src/transactions/fees/compute.rs +++ b/crates/brk_computer/src/transactions/fees/compute.rs @@ -9,7 +9,7 @@ use crate::{ComputeIndexes, indexes, inputs}; impl Vecs { #[allow(clippy::too_many_arguments)] - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, diff --git a/crates/brk_computer/src/transactions/fees/import.rs b/crates/brk_computer/src/transactions/fees/import.rs index 02fe10b67..75aebddbe 100644 --- a/crates/brk_computer/src/transactions/fees/import.rs +++ b/crates/brk_computer/src/transactions/fees/import.rs @@ -4,24 +4,28 @@ use brk_types::Version; use vecdb::{Database, EagerVec, ImportableVec}; use super::Vecs; -use crate::{indexes, internal::{ComputedFromTxDistribution, ValueFromTxFull}, price}; +use crate::{ + indexes, + internal::{ComputedFromTxDistribution, ValueFromTxFull}, + prices, +}; /// Bump this when fee/feerate aggregation logic changes (e.g., skip coinbase). const VERSION: Version = Version::ONE; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexer: &Indexer, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, ) -> Result { let v = version + VERSION; Ok(Self { input_value: EagerVec::forced_import(db, "input_value", version)?, output_value: EagerVec::forced_import(db, "output_value", version)?, - fee: ValueFromTxFull::forced_import(db, "fee", v, indexes, indexer, price)?, + fee: ValueFromTxFull::forced_import(db, "fee", v, indexes, indexer, prices)?, fee_rate: ComputedFromTxDistribution::forced_import(db, "fee_rate", v, indexes)?, }) } diff --git a/crates/brk_computer/src/transactions/fees/vecs.rs b/crates/brk_computer/src/transactions/fees/vecs.rs index fd58c6f1b..a4a22315c 100644 --- a/crates/brk_computer/src/transactions/fees/vecs.rs +++ b/crates/brk_computer/src/transactions/fees/vecs.rs @@ -1,13 +1,13 @@ use brk_traversable::Traversable; use brk_types::{FeeRate, Sats, TxIndex}; -use vecdb::{EagerVec, PcoVec}; +use vecdb::{EagerVec, PcoVec, Rw, StorageMode}; use crate::internal::{ComputedFromTxDistribution, ValueFromTxFull}; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub input_value: EagerVec>, - pub output_value: EagerVec>, - pub fee: ValueFromTxFull, - pub fee_rate: ComputedFromTxDistribution, +#[derive(Traversable)] +pub struct Vecs { + pub input_value: M::Stored>>, + pub output_value: M::Stored>>, + pub fee: ValueFromTxFull, + pub fee_rate: ComputedFromTxDistribution, } diff --git a/crates/brk_computer/src/transactions/import.rs b/crates/brk_computer/src/transactions/import.rs index f9cc0fc8b..5e7478000 100644 --- a/crates/brk_computer/src/transactions/import.rs +++ b/crates/brk_computer/src/transactions/import.rs @@ -6,17 +6,17 @@ use brk_traversable::Traversable; use brk_types::Version; use vecdb::{Database, PAGE_SIZE}; -use crate::{indexes, price}; +use crate::{indexes, prices}; use super::{CountVecs, FeesVecs, SizeVecs, Vecs, VersionsVecs, VolumeVecs}; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( parent_path: &Path, parent_version: Version, indexer: &Indexer, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, ) -> Result { let db = Database::open(&parent_path.join(super::DB_NAME))?; db.set_min_len(PAGE_SIZE * 50_000_000)?; @@ -25,9 +25,9 @@ impl Vecs { let count = CountVecs::forced_import(&db, version, indexer, indexes)?; let size = SizeVecs::forced_import(&db, version, indexer, indexes)?; - let fees = FeesVecs::forced_import(&db, version, indexer, indexes, price)?; + let fees = FeesVecs::forced_import(&db, version, indexer, indexes, prices)?; let versions = VersionsVecs::forced_import(&db, version, indexes)?; - let volume = VolumeVecs::forced_import(&db, version, indexes, price)?; + let volume = VolumeVecs::forced_import(&db, version, indexes, prices)?; let this = Self { db, diff --git a/crates/brk_computer/src/transactions/mod.rs b/crates/brk_computer/src/transactions/mod.rs index 6d95959b0..c926ecc5f 100644 --- a/crates/brk_computer/src/transactions/mod.rs +++ b/crates/brk_computer/src/transactions/mod.rs @@ -8,7 +8,7 @@ mod compute; mod import; use brk_traversable::Traversable; -use vecdb::Database; +use vecdb::{Database, Rw, StorageMode}; pub use count::Vecs as CountVecs; pub use fees::Vecs as FeesVecs; @@ -18,14 +18,14 @@ pub use volume::Vecs as VolumeVecs; pub const DB_NAME: &str = "transactions"; -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { #[traversable(skip)] pub(crate) db: Database, - pub count: CountVecs, - pub size: SizeVecs, - pub fees: FeesVecs, - pub versions: VersionsVecs, - pub volume: VolumeVecs, + pub count: CountVecs, + pub size: SizeVecs, + pub fees: FeesVecs, + pub versions: VersionsVecs, + pub volume: VolumeVecs, } diff --git a/crates/brk_computer/src/transactions/size/compute.rs b/crates/brk_computer/src/transactions/size/compute.rs index f12604f61..dafd1f190 100644 --- a/crates/brk_computer/src/transactions/size/compute.rs +++ b/crates/brk_computer/src/transactions/size/compute.rs @@ -6,7 +6,7 @@ use super::Vecs; use crate::{indexes, ComputeIndexes}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, diff --git a/crates/brk_computer/src/transactions/size/import.rs b/crates/brk_computer/src/transactions/size/import.rs index c0873c5a7..5473693dc 100644 --- a/crates/brk_computer/src/transactions/size/import.rs +++ b/crates/brk_computer/src/transactions/size/import.rs @@ -1,13 +1,13 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_types::{TxIndex, VSize, Version, Weight}; -use vecdb::{Database, IterableCloneableVec, LazyVecFrom2, VecIndex}; +use vecdb::{Database, ReadableCloneableVec, LazyVecFrom2}; use super::Vecs; use crate::{indexes, internal::LazyFromTxDistribution}; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexer: &Indexer, @@ -16,28 +16,20 @@ impl Vecs { let txindex_to_weight = LazyVecFrom2::init( "tx_weight", version, - indexer.vecs.transactions.base_size.boxed_clone(), - indexer.vecs.transactions.total_size.boxed_clone(), - |index: TxIndex, base_size_iter, total_size_iter| { - let index = index.to_usize(); - base_size_iter.get_at(index).map(|base_size| { - let total_size = total_size_iter.get_at_unwrap(index); - Weight::from_sizes(*base_size, *total_size) - }) + indexer.vecs.transactions.base_size.read_only_boxed_clone(), + indexer.vecs.transactions.total_size.read_only_boxed_clone(), + |_index: TxIndex, base_size, total_size| { + Weight::from_sizes(*base_size, *total_size) }, ); let txindex_to_vsize = LazyVecFrom2::init( "tx_vsize", version, - indexer.vecs.transactions.base_size.boxed_clone(), - indexer.vecs.transactions.total_size.boxed_clone(), - |index: TxIndex, base_size_iter, total_size_iter| { - let index = index.to_usize(); - base_size_iter.get_at(index).map(|base_size| { - let total_size = total_size_iter.get_at_unwrap(index); - VSize::from(Weight::from_sizes(*base_size, *total_size)) - }) + indexer.vecs.transactions.base_size.read_only_boxed_clone(), + indexer.vecs.transactions.total_size.read_only_boxed_clone(), + |_index: TxIndex, base_size, total_size| { + VSize::from(Weight::from_sizes(*base_size, *total_size)) }, ); diff --git a/crates/brk_computer/src/transactions/size/vecs.rs b/crates/brk_computer/src/transactions/size/vecs.rs index 46094191a..c96049b51 100644 --- a/crates/brk_computer/src/transactions/size/vecs.rs +++ b/crates/brk_computer/src/transactions/size/vecs.rs @@ -1,10 +1,11 @@ use brk_traversable::Traversable; use brk_types::{StoredU32, VSize, Weight}; +use vecdb::{Rw, StorageMode}; use crate::internal::LazyFromTxDistribution; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub vsize: LazyFromTxDistribution, - pub weight: LazyFromTxDistribution, +#[derive(Traversable)] +pub struct Vecs { + pub vsize: LazyFromTxDistribution, + pub weight: LazyFromTxDistribution, } diff --git a/crates/brk_computer/src/transactions/versions/compute.rs b/crates/brk_computer/src/transactions/versions/compute.rs index 8c290fb0f..c5dabefb0 100644 --- a/crates/brk_computer/src/transactions/versions/compute.rs +++ b/crates/brk_computer/src/transactions/versions/compute.rs @@ -1,29 +1,32 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_types::{StoredU64, TxVersion}; -use vecdb::{Exit, TypedVecIterator}; +use vecdb::{Exit, ReadableVec, VecIndex}; use super::Vecs; -use crate::{ComputeIndexes, indexes, internal::ComputedFromHeightSumCum}; +use crate::{ComputeIndexes, internal::ComputedFromHeightSumCum}; impl Vecs { - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { let tx_vany = |tx_vany: &mut ComputedFromHeightSumCum, txversion: TxVersion| { - let mut txversion_iter = indexer.vecs.transactions.txversion.iter()?; - tx_vany.compute_all(indexes, starting_indexes, exit, |vec| { + let txversion_vec = &indexer.vecs.transactions.txversion; + // Cursor avoids per-transaction PcoVec page decompression. + // Txindex values are sequential, so the cursor only advances forward. + let mut cursor = txversion_vec.cursor(); + tx_vany.compute(starting_indexes, exit, |vec| { vec.compute_filtered_count_from_indexes( starting_indexes.height, &indexer.vecs.transactions.first_txindex, &indexer.vecs.transactions.txid, |txindex| { - let v = txversion_iter.get_unwrap(txindex); - v == txversion + let ti = txindex.to_usize(); + cursor.advance(ti - cursor.position()); + cursor.next().unwrap() == txversion }, exit, )?; diff --git a/crates/brk_computer/src/transactions/versions/import.rs b/crates/brk_computer/src/transactions/versions/import.rs index edebb01cf..78ab67f32 100644 --- a/crates/brk_computer/src/transactions/versions/import.rs +++ b/crates/brk_computer/src/transactions/versions/import.rs @@ -6,7 +6,7 @@ use super::Vecs; use crate::{indexes, internal::ComputedFromHeightSumCum}; impl Vecs { - pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { + pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { Ok(Self { v1: ComputedFromHeightSumCum::forced_import(db, "tx_v1", version, indexes)?, v2: ComputedFromHeightSumCum::forced_import(db, "tx_v2", version, indexes)?, diff --git a/crates/brk_computer/src/transactions/versions/vecs.rs b/crates/brk_computer/src/transactions/versions/vecs.rs index 468dbaeff..d174cd73e 100644 --- a/crates/brk_computer/src/transactions/versions/vecs.rs +++ b/crates/brk_computer/src/transactions/versions/vecs.rs @@ -1,11 +1,12 @@ use brk_traversable::Traversable; use brk_types::StoredU64; +use vecdb::{Rw, StorageMode}; use crate::internal::ComputedFromHeightSumCum; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub v1: ComputedFromHeightSumCum, - pub v2: ComputedFromHeightSumCum, - pub v3: ComputedFromHeightSumCum, +#[derive(Traversable)] +pub struct Vecs { + pub v1: ComputedFromHeightSumCum, + pub v2: ComputedFromHeightSumCum, + pub v3: ComputedFromHeightSumCum, } diff --git a/crates/brk_computer/src/transactions/volume/compute.rs b/crates/brk_computer/src/transactions/volume/compute.rs index 2cf285a47..c0ac6552d 100644 --- a/crates/brk_computer/src/transactions/volume/compute.rs +++ b/crates/brk_computer/src/transactions/volume/compute.rs @@ -1,18 +1,19 @@ use brk_error::Result; use brk_indexer::Indexer; -use brk_types::{ONE_DAY_IN_SEC_F64, StoredF32}; +use brk_types::StoredF32; use vecdb::Exit; -use super::super::{count, fees}; use super::Vecs; -use crate::{ComputeIndexes, indexes, inputs, outputs}; +use crate::{blocks, ComputeIndexes, indexes, inputs, outputs}; +use crate::transactions::{count, fees}; impl Vecs { #[allow(clippy::too_many_arguments)] - pub fn compute( + pub(crate) fn compute( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, + blocks: &blocks::Vecs, count_vecs: &count::Vecs, fees_vecs: &fees::Vecs, inputs_count: &inputs::CountVecs, @@ -20,115 +21,81 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.sent_sum - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_filtered_sum_from_indexes( - starting_indexes.height, - &indexer.vecs.transactions.first_txindex, - &indexes.height.txindex_count, - &fees_vecs.input_value, - |sats| !sats.is_max(), - exit, - )?; - Ok(()) - })?; + self.sent_sum.sats.height.compute_filtered_sum_from_indexes( + starting_indexes.height, + &indexer.vecs.transactions.first_txindex, + &indexes.height.txindex_count, + &fees_vecs.input_value, + |sats| !sats.is_max(), + exit, + )?; - self.received_sum - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_sum_from_indexes( - starting_indexes.height, - &indexer.vecs.transactions.first_txindex, - &indexes.height.txindex_count, - &fees_vecs.output_value, - exit, - )?; - Ok(()) - })?; + self.received_sum.sats.height.compute_sum_from_indexes( + starting_indexes.height, + &indexer.vecs.transactions.first_txindex, + &indexes.height.txindex_count, + &fees_vecs.output_value, + exit, + )?; - self.annualized_volume.compute_sats(|v| { - v.compute_sum( - starting_indexes.dateindex, - &self.sent_sum.sats.dateindex.0, - 365, - exit, - )?; - Ok(()) - })?; + // Annualized volume: rolling 1y sum of per-block sent volume + self.annualized_volume.sats.height.compute_rolling_sum( + starting_indexes.height, + &blocks.count.height_1y_ago, + &self.sent_sum.sats.height, + exit, + )?; - if let Some(sent_sum_dollars) = self.sent_sum.dollars.as_ref() { - self.annualized_volume.compute_dollars(|dollars| { - dollars.compute_all(starting_indexes, exit, |v| { - v.compute_sum( - starting_indexes.dateindex, - &sent_sum_dollars.dateindex.0, - 365, - exit, - )?; - Ok(()) - }) - })?; - } + // tx_per_sec: per-block tx count / block interval + self.tx_per_sec.height.compute_transform2( + starting_indexes.height, + &count_vecs.tx_count.height, + &blocks.interval.interval.height, + |(h, tx_count, interval, ..)| { + let interval_f64 = f64::from(*interval); + let per_sec = if interval_f64 > 0.0 { + StoredF32::from(*tx_count as f64 / interval_f64) + } else { + StoredF32::NAN + }; + (h, per_sec) + }, + exit, + )?; - self.tx_per_sec.compute_all(starting_indexes, exit, |v| { - v.compute_transform2( - starting_indexes.dateindex, - &count_vecs.tx_count.dateindex.sum_cum.sum.0, - &indexes.dateindex.date, - |(i, tx_count, date, ..)| { - let completion = date.completion(); - let per_sec = if completion == 0.0 { - StoredF32::NAN - } else { - StoredF32::from(*tx_count as f64 / (completion * ONE_DAY_IN_SEC_F64)) - }; - (i, per_sec) - }, - exit, - )?; - Ok(()) - })?; + // inputs_per_sec: per-block input count / block interval + self.inputs_per_sec.height.compute_transform2( + starting_indexes.height, + &inputs_count.height.sum_cum.sum.0, + &blocks.interval.interval.height, + |(h, input_count, interval, ..)| { + let interval_f64 = f64::from(*interval); + let per_sec = if interval_f64 > 0.0 { + StoredF32::from(*input_count as f64 / interval_f64) + } else { + StoredF32::NAN + }; + (h, per_sec) + }, + exit, + )?; - self.inputs_per_sec - .compute_all(starting_indexes, exit, |v| { - v.compute_transform2( - starting_indexes.dateindex, - &inputs_count.dateindex.sum_cum.sum.0, - &indexes.dateindex.date, - |(i, input_count, date, ..)| { - let completion = date.completion(); - let per_sec = if completion == 0.0 { - StoredF32::NAN - } else { - StoredF32::from(*input_count as f64 / (completion * ONE_DAY_IN_SEC_F64)) - }; - (i, per_sec) - }, - exit, - )?; - Ok(()) - })?; - - self.outputs_per_sec - .compute_all(starting_indexes, exit, |v| { - v.compute_transform2( - starting_indexes.dateindex, - &outputs_count.total_count.dateindex.sum_cum.sum.0, - &indexes.dateindex.date, - |(i, output_count, date, ..)| { - let completion = date.completion(); - let per_sec = if completion == 0.0 { - StoredF32::NAN - } else { - StoredF32::from( - *output_count as f64 / (completion * ONE_DAY_IN_SEC_F64), - ) - }; - (i, per_sec) - }, - exit, - )?; - Ok(()) - })?; + // outputs_per_sec: per-block output count / block interval + self.outputs_per_sec.height.compute_transform2( + starting_indexes.height, + &outputs_count.total_count.height.sum_cum.sum.0, + &blocks.interval.interval.height, + |(h, output_count, interval, ..)| { + let interval_f64 = f64::from(*interval); + let per_sec = if interval_f64 > 0.0 { + StoredF32::from(*output_count as f64 / interval_f64) + } else { + StoredF32::NAN + }; + (h, per_sec) + }, + exit, + )?; Ok(()) } diff --git a/crates/brk_computer/src/transactions/volume/import.rs b/crates/brk_computer/src/transactions/volume/import.rs index 9d0cf4b14..c2d93ab99 100644 --- a/crates/brk_computer/src/transactions/volume/import.rs +++ b/crates/brk_computer/src/transactions/volume/import.rs @@ -3,48 +3,49 @@ use brk_types::Version; use vecdb::Database; use super::Vecs; -use crate::{indexes, internal::{ComputedFromDateLast, ValueFromHeightSum, ValueFromDateLast}, price}; +use crate::{ + indexes, + internal::{ComputedFromHeightLast, ValueFromHeightLast, ValueFromHeightSum}, + prices, +}; impl Vecs { - pub fn forced_import( + pub(crate) fn forced_import( db: &Database, version: Version, indexes: &indexes::Vecs, - price: Option<&price::Vecs>, + prices: &prices::Vecs, ) -> Result { let v2 = Version::TWO; - let compute_dollars = price.is_some(); - Ok(Self { - sent_sum: ValueFromHeightSum::forced_import( - db, - "sent_sum", - version, - indexes, - price, - )?, + sent_sum: ValueFromHeightSum::forced_import(db, "sent_sum", version, indexes, prices)?, received_sum: ValueFromHeightSum::forced_import( db, "received_sum", version, indexes, - price, + prices, )?, - annualized_volume: ValueFromDateLast::forced_import( + annualized_volume: ValueFromHeightLast::forced_import( db, "annualized_volume", version, - compute_dollars, + indexes, + prices, + )?, + tx_per_sec: ComputedFromHeightLast::forced_import( + db, + "tx_per_sec", + version + v2, indexes, )?, - tx_per_sec: ComputedFromDateLast::forced_import(db, "tx_per_sec", version + v2, indexes)?, - outputs_per_sec: ComputedFromDateLast::forced_import( + outputs_per_sec: ComputedFromHeightLast::forced_import( db, "outputs_per_sec", version + v2, indexes, )?, - inputs_per_sec: ComputedFromDateLast::forced_import( + inputs_per_sec: ComputedFromHeightLast::forced_import( db, "inputs_per_sec", version + v2, diff --git a/crates/brk_computer/src/transactions/volume/vecs.rs b/crates/brk_computer/src/transactions/volume/vecs.rs index 07619ba18..54a8240e2 100644 --- a/crates/brk_computer/src/transactions/volume/vecs.rs +++ b/crates/brk_computer/src/transactions/volume/vecs.rs @@ -1,15 +1,16 @@ use brk_traversable::Traversable; use brk_types::StoredF32; +use vecdb::{Rw, StorageMode}; -use crate::internal::{ComputedFromDateLast, ValueFromHeightSum, ValueFromDateLast}; +use crate::internal::{ComputedFromHeightLast, ValueFromHeightLast, ValueFromHeightSum}; /// Volume metrics -#[derive(Clone, Traversable)] -pub struct Vecs { - pub sent_sum: ValueFromHeightSum, - pub received_sum: ValueFromHeightSum, - pub annualized_volume: ValueFromDateLast, - pub tx_per_sec: ComputedFromDateLast, - pub outputs_per_sec: ComputedFromDateLast, - pub inputs_per_sec: ComputedFromDateLast, +#[derive(Traversable)] +pub struct Vecs { + pub sent_sum: ValueFromHeightSum, + pub received_sum: ValueFromHeightSum, + pub annualized_volume: ValueFromHeightLast, + pub tx_per_sec: ComputedFromHeightLast, + pub outputs_per_sec: ComputedFromHeightLast, + pub inputs_per_sec: ComputedFromHeightLast, } diff --git a/crates/brk_computer/src/utils.rs b/crates/brk_computer/src/utils.rs index ec0df2f70..3890bbb8f 100644 --- a/crates/brk_computer/src/utils.rs +++ b/crates/brk_computer/src/utils.rs @@ -20,7 +20,7 @@ impl OptionExt for Option { } } -pub fn get_percentile(sorted: &[T], percentile: f64) -> T +pub(crate) fn get_percentile(sorted: &[T], percentile: f64) -> T where T: Clone + Div + Add, { diff --git a/crates/brk_error/Cargo.toml b/crates/brk_error/Cargo.toml index e6e13b5cc..6930c331b 100644 --- a/crates/brk_error/Cargo.toml +++ b/crates/brk_error/Cargo.toml @@ -10,6 +10,7 @@ repository.workspace = true [features] bitcoin = ["dep:bitcoin"] bitcoincore-rpc = ["dep:bitcoincore-rpc"] +corepc = ["dep:corepc-client"] fjall = ["dep:fjall"] jiff = ["dep:jiff"] minreq = ["dep:minreq"] @@ -21,6 +22,7 @@ vecdb = ["dep:vecdb"] [dependencies] bitcoin = { workspace = true, optional = true } bitcoincore-rpc = { workspace = true, optional = true } +corepc-client = { workspace = true, optional = true } fjall = { workspace = true, optional = true } jiff = { workspace = true, optional = true } minreq = { workspace = true, optional = true } diff --git a/crates/brk_error/src/lib.rs b/crates/brk_error/src/lib.rs index 4d68da6cb..4be758dfb 100644 --- a/crates/brk_error/src/lib.rs +++ b/crates/brk_error/src/lib.rs @@ -15,6 +15,10 @@ pub enum Error { #[error(transparent)] BitcoinRPC(#[from] bitcoincore_rpc::Error), + #[cfg(feature = "corepc")] + #[error(transparent)] + CorepcRPC(#[from] corepc_client::client_sync::Error), + #[cfg(feature = "jiff")] #[error(transparent)] Jiff(#[from] jiff::Error), @@ -166,6 +170,7 @@ impl Error { #[cfg(feature = "vecdb")] pub fn is_data_error(&self) -> bool { matches!(self, Error::VecDB(e) if e.is_data_error()) + || matches!(self, Error::VersionMismatch { .. }) } /// Returns true if this network/fetch error indicates a permanent/blocking condition diff --git a/crates/brk_fetcher/Cargo.toml b/crates/brk_fetcher/Cargo.toml index bf115c0a6..0c256aadc 100644 --- a/crates/brk_fetcher/Cargo.toml +++ b/crates/brk_fetcher/Cargo.toml @@ -6,6 +6,7 @@ edition.workspace = true license.workspace = true homepage.workspace = true repository.workspace = true +exclude = ["examples/"] [dependencies] brk_error = { workspace = true, features = ["minreq", "serde_json"] } diff --git a/crates/brk_fetcher/src/binance.rs b/crates/brk_fetcher/src/binance.rs index f3aaebd79..6a7ef63d6 100644 --- a/crates/brk_fetcher/src/binance.rs +++ b/crates/brk_fetcher/src/binance.rs @@ -6,7 +6,7 @@ use std::{ }; use brk_error::{Error, Result}; -use brk_types::{Date, Height, OHLCCentsUnsigned, Timestamp}; +use brk_types::{Date, Height, OHLCCents, Timestamp}; use serde_json::Value; use tracing::info; @@ -18,9 +18,9 @@ use crate::{ #[derive(Clone)] pub struct Binance { path: Option, - _1mn: Option>, - _1d: Option>, - har: Option>, + _1mn: Option>, + _1d: Option>, + har: Option>, } impl Binance { @@ -37,7 +37,7 @@ impl Binance { &mut self, timestamp: Timestamp, previous_timestamp: Option, - ) -> Result { + ) -> Result { // Try live API data first if self._1mn.is_none() || self._1mn.as_ref().unwrap().last_key_value().unwrap().0 <= ×tamp @@ -69,7 +69,7 @@ impl Binance { ) } - pub fn fetch_1mn() -> Result> { + pub fn fetch_1mn() -> Result> { default_retry(|_| { let url = Self::url("interval=1m&limit=1000"); info!("Fetching {url} ..."); @@ -79,7 +79,7 @@ impl Binance { }) } - pub fn get_from_1d(&mut self, date: &Date) -> Result { + pub fn get_from_1d(&mut self, date: &Date) -> Result { if self._1d.is_none() || self._1d.as_ref().unwrap().last_key_value().unwrap().0 <= date { self._1d.replace(Self::fetch_1d()?); } @@ -92,7 +92,7 @@ impl Binance { .ok_or(Error::NotFound("Couldn't find date".into())) } - pub fn fetch_1d() -> Result> { + pub fn fetch_1d() -> Result> { default_retry(|_| { let url = Self::url("interval=1d"); info!("Fetching {url} ..."); @@ -102,7 +102,7 @@ impl Binance { }) } - fn read_har(&self) -> Result> { + fn read_har(&self) -> Result> { if self.path.is_none() { return Err(Error::NotFound("HAR path not configured".into())); } @@ -179,7 +179,7 @@ impl Binance { }) } - fn parse_ohlc_array(json: &Value) -> Result> { + fn parse_ohlc_array(json: &Value) -> Result> { let result = json .as_array() .ok_or(Error::Parse("Expected JSON array".into()))? @@ -193,7 +193,7 @@ impl Binance { Ok(result) } - fn parse_date_ohlc_array(json: &Value) -> Result> { + fn parse_date_ohlc_array(json: &Value) -> Result> { Self::parse_ohlc_array(json).map(|map| { map.into_iter() .map(|(ts, ohlc)| (date_from_timestamp(ts), ohlc)) @@ -218,7 +218,7 @@ impl PriceSource for Binance { "Binance" } - fn get_date(&mut self, date: Date) -> Option> { + fn get_date(&mut self, date: Date) -> Option> { Some(self.get_from_1d(&date)) } @@ -226,11 +226,11 @@ impl PriceSource for Binance { &mut self, timestamp: Timestamp, previous_timestamp: Option, - ) -> Option> { + ) -> Option> { Some(self.get_from_1mn(timestamp, previous_timestamp)) } - fn get_height(&mut self, _height: Height) -> Option> { + fn get_height(&mut self, _height: Height) -> Option> { None // Binance doesn't support height-based queries } diff --git a/crates/brk_fetcher/src/brk.rs b/crates/brk_fetcher/src/brk.rs index 7de8cc001..97803f2bb 100644 --- a/crates/brk_fetcher/src/brk.rs +++ b/crates/brk_fetcher/src/brk.rs @@ -2,8 +2,8 @@ use std::collections::BTreeMap; use brk_error::{Error, Result}; use brk_types::{ - CentsUnsigned, CheckedSub, Close, Date, DateIndex, Dollars, Height, High, Low, - OHLCCentsUnsigned, Open, Timestamp, + Cents, CheckedSub, Close, Date, Day1, Dollars, Height, High, Low, OHLCCents, Open, + Timestamp, }; use serde_json::Value; use tracing::info; @@ -13,15 +13,15 @@ use crate::{PriceSource, check_response, default_retry}; #[derive(Default, Clone)] #[allow(clippy::upper_case_acronyms)] pub struct BRK { - height_to_ohlc: BTreeMap>, - dateindex_to_ohlc: BTreeMap>, + height_to_ohlc: BTreeMap>, + day1_to_ohlc: BTreeMap>, } const API_URL: &str = "https://bitview.space/api/vecs"; const CHUNK_SIZE: usize = 10_000; impl BRK { - pub fn get_from_height(&mut self, height: Height) -> Result { + pub fn get_from_height(&mut self, height: Height) -> Result { let key = height.checked_sub(height % CHUNK_SIZE).unwrap(); #[allow(clippy::map_entry)] @@ -40,7 +40,7 @@ impl BRK { .ok_or(Error::NotFound("Couldn't find height in BRK".into())) } - fn fetch_height_prices(height: Height) -> Result> { + fn fetch_height_prices(height: Height) -> Result> { default_retry(|_| { let url = format!( "{API_URL}/height-to-price-ohlc?from={}&to={}", @@ -60,33 +60,33 @@ impl BRK { }) } - pub fn get_from_date(&mut self, date: Date) -> Result { - let dateindex = DateIndex::try_from(date)?; + pub fn get_from_date(&mut self, date: Date) -> Result { + let day1 = Day1::try_from(date)?; - let key = dateindex.checked_sub(dateindex % CHUNK_SIZE).unwrap(); + let key = day1.checked_sub(day1 % CHUNK_SIZE).unwrap(); #[allow(clippy::map_entry)] - if !self.dateindex_to_ohlc.contains_key(&key) - || ((key + self.dateindex_to_ohlc.get(&key).unwrap().len()) <= dateindex) + if !self.day1_to_ohlc.contains_key(&key) + || ((key + self.day1_to_ohlc.get(&key).unwrap().len()) <= day1) { - self.dateindex_to_ohlc + self.day1_to_ohlc .insert(key, Self::fetch_date_prices(key)?); } - self.dateindex_to_ohlc + self.day1_to_ohlc .get(&key) .unwrap() - .get(usize::from(dateindex.checked_sub(key).unwrap())) + .get(usize::from(day1.checked_sub(key).unwrap())) .cloned() .ok_or(Error::NotFound("Couldn't find date in BRK".into())) } - fn fetch_date_prices(dateindex: DateIndex) -> Result> { + fn fetch_date_prices(day1: Day1) -> Result> { default_retry(|_| { let url = format!( - "{API_URL}/dateindex-to-price-ohlc?from={}&to={}", - dateindex, - dateindex + CHUNK_SIZE + "{API_URL}/day1-to-price-ohlc?from={}&to={}", + day1, + day1 + CHUNK_SIZE ); info!("Fetching {url}..."); @@ -101,13 +101,13 @@ impl BRK { }) } - fn value_to_ohlc(value: &Value) -> Result { + fn value_to_ohlc(value: &Value) -> Result { let ohlc = value .as_array() .ok_or(Error::Parse("Expected OHLC array".into()))?; let get_value = |index: usize| -> Result<_> { - Ok(CentsUnsigned::from(Dollars::from( + Ok(Cents::from(Dollars::from( ohlc.get(index) .ok_or(Error::Parse("Missing OHLC value at index".into()))? .as_f64() @@ -115,7 +115,7 @@ impl BRK { ))) }; - Ok(OHLCCentsUnsigned::from(( + Ok(OHLCCents::from(( Open::new(get_value(0)?), High::new(get_value(1)?), Low::new(get_value(2)?), @@ -134,7 +134,7 @@ impl PriceSource for BRK { "BRK" } - fn get_date(&mut self, date: Date) -> Option> { + fn get_date(&mut self, date: Date) -> Option> { Some(self.get_from_date(date)) } @@ -142,11 +142,11 @@ impl PriceSource for BRK { &mut self, _timestamp: Timestamp, _previous_timestamp: Option, - ) -> Option> { + ) -> Option> { None // BRK doesn't support timestamp-based queries } - fn get_height(&mut self, height: Height) -> Option> { + fn get_height(&mut self, height: Height) -> Option> { Some(self.get_from_height(height)) } @@ -156,6 +156,6 @@ impl PriceSource for BRK { fn clear(&mut self) { self.height_to_ohlc.clear(); - self.dateindex_to_ohlc.clear(); + self.day1_to_ohlc.clear(); } } diff --git a/crates/brk_fetcher/src/kraken.rs b/crates/brk_fetcher/src/kraken.rs index e3f3f3d06..ee20f8920 100644 --- a/crates/brk_fetcher/src/kraken.rs +++ b/crates/brk_fetcher/src/kraken.rs @@ -1,7 +1,7 @@ use std::collections::BTreeMap; use brk_error::{Error, Result}; -use brk_types::{Date, Height, OHLCCentsUnsigned, Timestamp}; +use brk_types::{Date, Height, OHLCCents, Timestamp}; use serde_json::Value; use tracing::info; @@ -12,8 +12,8 @@ use crate::{ #[derive(Default, Clone)] pub struct Kraken { - _1mn: Option>, - _1d: Option>, + _1mn: Option>, + _1d: Option>, } impl Kraken { @@ -21,7 +21,7 @@ impl Kraken { &mut self, timestamp: Timestamp, previous_timestamp: Option, - ) -> Result { + ) -> Result { if self._1mn.is_none() || self._1mn.as_ref().unwrap().last_key_value().unwrap().0 <= ×tamp { @@ -35,7 +35,7 @@ impl Kraken { ) } - pub fn fetch_1mn() -> Result> { + pub fn fetch_1mn() -> Result> { default_retry(|_| { let url = Self::url(1); info!("Fetching {url} ..."); @@ -45,7 +45,7 @@ impl Kraken { }) } - fn get_from_1d(&mut self, date: &Date) -> Result { + fn get_from_1d(&mut self, date: &Date) -> Result { if self._1d.is_none() || self._1d.as_ref().unwrap().last_key_value().unwrap().0 <= date { self._1d.replace(Self::fetch_1d()?); } @@ -57,7 +57,7 @@ impl Kraken { .ok_or(Error::NotFound("Couldn't find date".into())) } - pub fn fetch_1d() -> Result> { + pub fn fetch_1d() -> Result> { default_retry(|_| { let url = Self::url(1440); info!("Fetching {url} ..."); @@ -68,7 +68,7 @@ impl Kraken { } /// Parse Kraken's nested JSON response: { result: { XXBTZUSD: [...] } } - fn parse_ohlc_response(json: &Value) -> Result> { + fn parse_ohlc_response(json: &Value) -> Result> { let result = json .get("result") .and_then(|r| r.get("XXBTZUSD")) @@ -84,7 +84,7 @@ impl Kraken { Ok(result) } - fn parse_date_ohlc_response(json: &Value) -> Result> { + fn parse_date_ohlc_response(json: &Value) -> Result> { Self::parse_ohlc_response(json).map(|map| { map.into_iter() .map(|(ts, ohlc)| (date_from_timestamp(ts), ohlc)) @@ -109,7 +109,7 @@ impl PriceSource for Kraken { "Kraken" } - fn get_date(&mut self, date: Date) -> Option> { + fn get_date(&mut self, date: Date) -> Option> { Some(self.get_from_1d(&date)) } @@ -117,11 +117,11 @@ impl PriceSource for Kraken { &mut self, timestamp: Timestamp, previous_timestamp: Option, - ) -> Option> { + ) -> Option> { Some(self.get_from_1mn(timestamp, previous_timestamp)) } - fn get_height(&mut self, _height: Height) -> Option> { + fn get_height(&mut self, _height: Height) -> Option> { None // Kraken doesn't support height-based queries } diff --git a/crates/brk_fetcher/src/lib.rs b/crates/brk_fetcher/src/lib.rs index 28efca81f..abacf1823 100644 --- a/crates/brk_fetcher/src/lib.rs +++ b/crates/brk_fetcher/src/lib.rs @@ -3,7 +3,7 @@ use std::{path::Path, thread::sleep, time::Duration}; use brk_error::{Error, Result}; -use brk_types::{Date, Height, OHLCCentsUnsigned, Timestamp}; +use brk_types::{Date, Height, OHLCCents, Timestamp}; use tracing::info; mod binance; @@ -66,9 +66,9 @@ impl Fetcher { } /// Try fetching from each source in order, return first success - fn try_sources(&mut self, mut fetch: F) -> Option> + fn try_sources(&mut self, mut fetch: F) -> Option> where - F: FnMut(&mut dyn PriceSource) -> Option>, + F: FnMut(&mut dyn PriceSource) -> Option>, { if let Some(Ok(ohlc)) = fetch(&mut self.binance) { return Some(Ok(ohlc)); @@ -82,7 +82,7 @@ impl Fetcher { None } - pub fn get_date(&mut self, date: Date) -> Result { + pub fn get_date(&mut self, date: Date) -> Result { self.fetch_with_retry( |source| source.get_date(date), || format!("Failed to fetch price for date {date}"), @@ -94,7 +94,7 @@ impl Fetcher { height: Height, timestamp: Timestamp, previous_timestamp: Option, - ) -> Result { + ) -> Result { let timestamp = timestamp.floor_seconds(); let previous_timestamp = previous_timestamp.map(|t| t.floor_seconds()); @@ -133,9 +133,9 @@ How to fix this: } /// Try each source in order, with retries on total failure - fn fetch_with_retry(&mut self, mut fetch: F, error_message: E) -> Result + fn fetch_with_retry(&mut self, mut fetch: F, error_message: E) -> Result where - F: FnMut(&mut dyn PriceSource) -> Option>, + F: FnMut(&mut dyn PriceSource) -> Option>, E: Fn() -> String, { for retry in 0..=MAX_RETRIES { diff --git a/crates/brk_fetcher/src/ohlc.rs b/crates/brk_fetcher/src/ohlc.rs index c093e0603..526fd1ffe 100644 --- a/crates/brk_fetcher/src/ohlc.rs +++ b/crates/brk_fetcher/src/ohlc.rs @@ -1,11 +1,11 @@ use std::collections::BTreeMap; use brk_error::{Error, Result}; -use brk_types::{CentsUnsigned, Close, Date, Dollars, High, Low, OHLCCentsUnsigned, Open, Timestamp}; +use brk_types::{Cents, Close, Date, Dollars, High, Low, OHLCCents, Open, Timestamp}; /// Parse OHLC value from a JSON array element at given index -pub fn parse_cents(array: &[serde_json::Value], index: usize) -> CentsUnsigned { - CentsUnsigned::from(Dollars::from( +pub fn parse_cents(array: &[serde_json::Value], index: usize) -> Cents { + Cents::from(Dollars::from( array .get(index) .and_then(|v| v.as_str()) @@ -15,8 +15,8 @@ pub fn parse_cents(array: &[serde_json::Value], index: usize) -> CentsUnsigned { } /// Build OHLCCentsUnsigned from array indices 1-4 (open, high, low, close) -pub fn ohlc_from_array(array: &[serde_json::Value]) -> OHLCCentsUnsigned { - OHLCCentsUnsigned::from(( +pub fn ohlc_from_array(array: &[serde_json::Value]) -> OHLCCents { + OHLCCents::from(( Open::new(parse_cents(array, 1)), High::new(parse_cents(array, 2)), Low::new(parse_cents(array, 3)), @@ -27,13 +27,13 @@ pub fn ohlc_from_array(array: &[serde_json::Value]) -> OHLCCentsUnsigned { /// Compute OHLC for a block from a time series of minute data. /// Aggregates all candles between previous_timestamp and timestamp. pub fn compute_ohlc_from_range( - tree: &BTreeMap, + tree: &BTreeMap, timestamp: Timestamp, previous_timestamp: Option, source_name: &str, -) -> Result { - let previous_ohlc = previous_timestamp - .map_or(Some(OHLCCentsUnsigned::default()), |t| tree.get(&t).cloned()); +) -> Result { + let previous_ohlc = + previous_timestamp.map_or(Some(OHLCCents::default()), |t| tree.get(&t).cloned()); let last_ohlc = tree.get(×tamp); @@ -44,7 +44,7 @@ pub fn compute_ohlc_from_range( } let previous_ohlc = previous_ohlc.unwrap(); - let mut result = OHLCCentsUnsigned::from(previous_ohlc.close); + let mut result = OHLCCents::from(previous_ohlc.close); let start = previous_timestamp.unwrap_or(Timestamp::new(0)); let end = timestamp; diff --git a/crates/brk_fetcher/src/source.rs b/crates/brk_fetcher/src/source.rs index 36bfc12ac..0d4c8f8ea 100644 --- a/crates/brk_fetcher/src/source.rs +++ b/crates/brk_fetcher/src/source.rs @@ -1,7 +1,7 @@ use std::time::{Duration, Instant}; use brk_error::{Error, Result}; -use brk_types::{Date, Height, OHLCCentsUnsigned, Timestamp}; +use brk_types::{Date, Height, OHLCCents, Timestamp}; use tracing::info; /// Default cooldown period for unhealthy sources (5 minutes) @@ -12,17 +12,17 @@ pub trait PriceSource { fn name(&self) -> &'static str; /// Fetch daily OHLC for a date. Returns None if this source doesn't support date queries. - fn get_date(&mut self, date: Date) -> Option>; + fn get_date(&mut self, date: Date) -> Option>; /// Fetch minute OHLC for a timestamp range. Returns None if unsupported. fn get_1mn( &mut self, timestamp: Timestamp, previous_timestamp: Option, - ) -> Option>; + ) -> Option>; /// Fetch OHLC by block height. Returns None if unsupported. - fn get_height(&mut self, height: Height) -> Option>; + fn get_height(&mut self, height: Height) -> Option>; /// Check if the source is reachable fn ping(&self) -> Result<()>; @@ -115,7 +115,7 @@ impl PriceSource for TrackedSource { self.source.name() } - fn get_date(&mut self, date: Date) -> Option> { + fn get_date(&mut self, date: Date) -> Option> { self.try_fetch(|s| s.get_date(date)) } @@ -123,11 +123,11 @@ impl PriceSource for TrackedSource { &mut self, timestamp: Timestamp, previous_timestamp: Option, - ) -> Option> { + ) -> Option> { self.try_fetch(|s| s.get_1mn(timestamp, previous_timestamp)) } - fn get_height(&mut self, height: Height) -> Option> { + fn get_height(&mut self, height: Height) -> Option> { self.try_fetch(|s| s.get_height(height)) } diff --git a/crates/brk_indexer/src/lib.rs b/crates/brk_indexer/src/lib.rs index b11f83999..9f7ea8c4d 100644 --- a/crates/brk_indexer/src/lib.rs +++ b/crates/brk_indexer/src/lib.rs @@ -7,7 +7,7 @@ use brk_iterator::Blocks; use brk_rpc::Client; use brk_types::Height; use tracing::{debug, info}; -use vecdb::{Exit, ReadableVec}; +use vecdb::{Exit, ReadOnlyClone, ReadableVec, Ro, Rw, StorageMode}; mod constants; mod indexes; mod processor; @@ -24,12 +24,22 @@ pub use brk_types::Indexes; pub use stores::Stores; pub use vecs::*; -#[derive(Clone)] -pub struct Indexer { - pub vecs: Vecs, +pub struct Indexer { + pub vecs: Vecs, pub stores: Stores, } +impl ReadOnlyClone for Indexer { + type ReadOnly = Indexer; + + fn read_only_clone(&self) -> Indexer { + Indexer { + vecs: self.vecs.read_only_clone(), + stores: self.stores.clone(), + } + } +} + impl Indexer { pub fn forced_import(outputs_dir: &Path) -> Result { Self::forced_import_inner(outputs_dir, true) diff --git a/crates/brk_indexer/src/vecs/addresses.rs b/crates/brk_indexer/src/vecs/addresses.rs index b9231ce42..f97fe5cc6 100644 --- a/crates/brk_indexer/src/vecs/addresses.rs +++ b/crates/brk_indexer/src/vecs/addresses.rs @@ -9,32 +9,32 @@ use brk_types::{ use rayon::prelude::*; use vecdb::{ AnyStoredVec, BytesVec, Database, WritableVec, ImportableVec, PcoVec, ReadableVec, - Stamp, VecIndex, + Rw, Stamp, StorageMode, VecIndex, }; use crate::readers::AddressReaders; use crate::parallel_import; -#[derive(Clone, Traversable)] -pub struct AddressesVecs { +#[derive(Traversable)] +pub struct AddressesVecs { // Height to first address index (per address type) - pub first_p2pk65addressindex: PcoVec, - pub first_p2pk33addressindex: PcoVec, - pub first_p2pkhaddressindex: PcoVec, - pub first_p2shaddressindex: PcoVec, - pub first_p2wpkhaddressindex: PcoVec, - pub first_p2wshaddressindex: PcoVec, - pub first_p2traddressindex: PcoVec, - pub first_p2aaddressindex: PcoVec, + pub first_p2pk65addressindex: M::Stored>, + pub first_p2pk33addressindex: M::Stored>, + pub first_p2pkhaddressindex: M::Stored>, + pub first_p2shaddressindex: M::Stored>, + pub first_p2wpkhaddressindex: M::Stored>, + pub first_p2wshaddressindex: M::Stored>, + pub first_p2traddressindex: M::Stored>, + pub first_p2aaddressindex: M::Stored>, // Address index to bytes (per address type) - pub p2pk65bytes: BytesVec, - pub p2pk33bytes: BytesVec, - pub p2pkhbytes: BytesVec, - pub p2shbytes: BytesVec, - pub p2wpkhbytes: BytesVec, - pub p2wshbytes: BytesVec, - pub p2trbytes: BytesVec, - pub p2abytes: BytesVec, + pub p2pk65bytes: M::Stored>, + pub p2pk33bytes: M::Stored>, + pub p2pkhbytes: M::Stored>, + pub p2shbytes: M::Stored>, + pub p2wpkhbytes: M::Stored>, + pub p2wshbytes: M::Stored>, + pub p2trbytes: M::Stored>, + pub p2abytes: M::Stored>, } impl AddressesVecs { diff --git a/crates/brk_indexer/src/vecs/blocks.rs b/crates/brk_indexer/src/vecs/blocks.rs index 58acaa601..25f31c33c 100644 --- a/crates/brk_indexer/src/vecs/blocks.rs +++ b/crates/brk_indexer/src/vecs/blocks.rs @@ -2,20 +2,20 @@ use brk_error::Result; use brk_traversable::Traversable; use brk_types::{BlockHash, Height, StoredF64, StoredU64, Timestamp, Version, Weight}; use rayon::prelude::*; -use vecdb::{AnyStoredVec, BytesVec, Database, WritableVec, ImportableVec, PcoVec, Stamp}; +use vecdb::{AnyStoredVec, BytesVec, Database, WritableVec, ImportableVec, PcoVec, Rw, Stamp, StorageMode}; use crate::parallel_import; -#[derive(Clone, Traversable)] -pub struct BlocksVecs { - pub blockhash: BytesVec, +#[derive(Traversable)] +pub struct BlocksVecs { + pub blockhash: M::Stored>, #[traversable(wrap = "difficulty", rename = "raw")] - pub difficulty: PcoVec, + pub difficulty: M::Stored>, /// Doesn't guarantee continuity due to possible reorgs and more generally the nature of mining #[traversable(wrap = "time")] - pub timestamp: PcoVec, - pub total_size: PcoVec, - pub weight: PcoVec, + pub timestamp: M::Stored>, + pub total_size: M::Stored>, + pub weight: M::Stored>, } impl BlocksVecs { diff --git a/crates/brk_indexer/src/vecs/inputs.rs b/crates/brk_indexer/src/vecs/inputs.rs index 515face8c..d2e298a04 100644 --- a/crates/brk_indexer/src/vecs/inputs.rs +++ b/crates/brk_indexer/src/vecs/inputs.rs @@ -2,17 +2,17 @@ use brk_error::Result; use brk_traversable::Traversable; use brk_types::{Height, OutPoint, OutputType, TxInIndex, TxIndex, TypeIndex, Version}; use rayon::prelude::*; -use vecdb::{AnyStoredVec, Database, WritableVec, ImportableVec, PcoVec, Stamp}; +use vecdb::{AnyStoredVec, Database, WritableVec, ImportableVec, PcoVec, Rw, Stamp, StorageMode}; use crate::parallel_import; -#[derive(Clone, Traversable)] -pub struct InputsVecs { - pub first_txinindex: PcoVec, - pub outpoint: PcoVec, - pub txindex: PcoVec, - pub outputtype: PcoVec, - pub typeindex: PcoVec, +#[derive(Traversable)] +pub struct InputsVecs { + pub first_txinindex: M::Stored>, + pub outpoint: M::Stored>, + pub txindex: M::Stored>, + pub outputtype: M::Stored>, + pub typeindex: M::Stored>, } impl InputsVecs { diff --git a/crates/brk_indexer/src/vecs/mod.rs b/crates/brk_indexer/src/vecs/mod.rs index 6d8597e4e..9497eca76 100644 --- a/crates/brk_indexer/src/vecs/mod.rs +++ b/crates/brk_indexer/src/vecs/mod.rs @@ -4,7 +4,7 @@ use brk_error::Result; use brk_traversable::Traversable; use brk_types::{AddressHash, Height, OutputType, Version}; use rayon::prelude::*; -use vecdb::{AnyStoredVec, Database, Stamp}; +use vecdb::{AnyStoredVec, Database, Rw, Stamp, StorageMode}; const PAGE_SIZE: usize = 4096; @@ -27,15 +27,16 @@ pub use transactions::*; use crate::Indexes; -#[derive(Clone, Traversable)] -pub struct Vecs { +#[derive(Traversable)] +pub struct Vecs { + #[traversable(skip)] db: Database, - pub blocks: BlocksVecs, - pub transactions: TransactionsVecs, - pub inputs: InputsVecs, - pub outputs: OutputsVecs, - pub addresses: AddressesVecs, - pub scripts: ScriptsVecs, + pub blocks: BlocksVecs, + pub transactions: TransactionsVecs, + pub inputs: InputsVecs, + pub outputs: OutputsVecs, + pub addresses: AddressesVecs, + pub scripts: ScriptsVecs, } impl Vecs { diff --git a/crates/brk_indexer/src/vecs/outputs.rs b/crates/brk_indexer/src/vecs/outputs.rs index 3b32b4354..a3b2c7051 100644 --- a/crates/brk_indexer/src/vecs/outputs.rs +++ b/crates/brk_indexer/src/vecs/outputs.rs @@ -2,17 +2,17 @@ use brk_error::Result; use brk_traversable::Traversable; use brk_types::{Height, OutputType, Sats, TxIndex, TxOutIndex, TypeIndex, Version}; use rayon::prelude::*; -use vecdb::{AnyStoredVec, BytesVec, Database, WritableVec, ImportableVec, PcoVec, Stamp}; +use vecdb::{AnyStoredVec, BytesVec, Database, WritableVec, ImportableVec, PcoVec, Rw, Stamp, StorageMode}; use crate::parallel_import; -#[derive(Clone, Traversable)] -pub struct OutputsVecs { - pub first_txoutindex: PcoVec, - pub value: BytesVec, - pub outputtype: BytesVec, - pub typeindex: BytesVec, - pub txindex: PcoVec, +#[derive(Traversable)] +pub struct OutputsVecs { + pub first_txoutindex: M::Stored>, + pub value: M::Stored>, + pub outputtype: M::Stored>, + pub typeindex: M::Stored>, + pub txindex: M::Stored>, } impl OutputsVecs { diff --git a/crates/brk_indexer/src/vecs/scripts.rs b/crates/brk_indexer/src/vecs/scripts.rs index b525709a2..a591cb07e 100644 --- a/crates/brk_indexer/src/vecs/scripts.rs +++ b/crates/brk_indexer/src/vecs/scripts.rs @@ -4,22 +4,22 @@ use brk_types::{ EmptyOutputIndex, Height, OpReturnIndex, P2MSOutputIndex, TxIndex, UnknownOutputIndex, Version, }; use rayon::prelude::*; -use vecdb::{AnyStoredVec, Database, WritableVec, ImportableVec, PcoVec, Stamp}; +use vecdb::{AnyStoredVec, Database, WritableVec, ImportableVec, PcoVec, Rw, Stamp, StorageMode}; use crate::parallel_import; -#[derive(Clone, Traversable)] -pub struct ScriptsVecs { +#[derive(Traversable)] +pub struct ScriptsVecs { // Height to first output index (per output type) - pub first_emptyoutputindex: PcoVec, - pub first_opreturnindex: PcoVec, - pub first_p2msoutputindex: PcoVec, - pub first_unknownoutputindex: PcoVec, + pub first_emptyoutputindex: M::Stored>, + pub first_opreturnindex: M::Stored>, + pub first_p2msoutputindex: M::Stored>, + pub first_unknownoutputindex: M::Stored>, // Output index to txindex (per output type) - pub empty_to_txindex: PcoVec, - pub opreturn_to_txindex: PcoVec, - pub p2ms_to_txindex: PcoVec, - pub unknown_to_txindex: PcoVec, + pub empty_to_txindex: M::Stored>, + pub opreturn_to_txindex: M::Stored>, + pub p2ms_to_txindex: M::Stored>, + pub unknown_to_txindex: M::Stored>, } impl ScriptsVecs { diff --git a/crates/brk_indexer/src/vecs/transactions.rs b/crates/brk_indexer/src/vecs/transactions.rs index c777fac09..d8fcd151e 100644 --- a/crates/brk_indexer/src/vecs/transactions.rs +++ b/crates/brk_indexer/src/vecs/transactions.rs @@ -5,22 +5,22 @@ use brk_types::{ Version, }; use rayon::prelude::*; -use vecdb::{AnyStoredVec, BytesVec, Database, ImportableVec, PcoVec, Stamp, WritableVec}; +use vecdb::{AnyStoredVec, BytesVec, Database, ImportableVec, PcoVec, Rw, Stamp, StorageMode, WritableVec}; use crate::parallel_import; -#[derive(Clone, Traversable)] -pub struct TransactionsVecs { - pub first_txindex: PcoVec, - pub height: PcoVec, - pub txid: BytesVec, - pub txversion: PcoVec, - pub rawlocktime: PcoVec, - pub base_size: PcoVec, - pub total_size: PcoVec, - pub is_explicitly_rbf: PcoVec, - pub first_txinindex: PcoVec, - pub first_txoutindex: BytesVec, +#[derive(Traversable)] +pub struct TransactionsVecs { + pub first_txindex: M::Stored>, + pub height: M::Stored>, + pub txid: M::Stored>, + pub txversion: M::Stored>, + pub rawlocktime: M::Stored>, + pub base_size: M::Stored>, + pub total_size: M::Stored>, + pub is_explicitly_rbf: M::Stored>, + pub first_txinindex: M::Stored>, + pub first_txoutindex: M::Stored>, } pub struct TxMetadataVecs<'a> { diff --git a/crates/brk_iterator/Cargo.toml b/crates/brk_iterator/Cargo.toml index a7fb1e2f5..359fe4ef6 100644 --- a/crates/brk_iterator/Cargo.toml +++ b/crates/brk_iterator/Cargo.toml @@ -6,9 +6,10 @@ edition.workspace = true license.workspace = true homepage.workspace = true repository.workspace = true +exclude = ["examples/"] [dependencies] brk_error = { workspace = true } brk_reader = { workspace = true } -brk_rpc = { workspace = true } +brk_rpc = { workspace = true, features = ["corepc"] } brk_types = { workspace = true } diff --git a/crates/brk_logger/Cargo.toml b/crates/brk_logger/Cargo.toml index 23cfb721c..3c7e984a9 100644 --- a/crates/brk_logger/Cargo.toml +++ b/crates/brk_logger/Cargo.toml @@ -6,10 +6,11 @@ edition.workspace = true license.workspace = true homepage.workspace = true repository.workspace = true +exclude = ["examples/"] [dependencies] jiff = { workspace = true } owo-colors = { workspace = true } tracing = { workspace = true } tracing-log = "0.2" -tracing-subscriber = { version = "0.3", default-features = false, features = ["fmt", "env-filter", "std"] } +tracing-subscriber = { version = "0.3", default-features = false, features = ["fmt", "std"] } diff --git a/crates/brk_logger/src/lib.rs b/crates/brk_logger/src/lib.rs index 237ac7495..3e4e966f8 100644 --- a/crates/brk_logger/src/lib.rs +++ b/crates/brk_logger/src/lib.rs @@ -6,7 +6,7 @@ mod rate_limit; use std::{io, path::Path, time::Duration}; -use tracing_subscriber::{EnvFilter, fmt, layer::SubscriberExt, util::SubscriberInitExt}; +use tracing_subscriber::{filter::Targets, fmt, layer::SubscriberExt, util::SubscriberInitExt}; use format::Formatter; use hook::{HookLayer, LOG_HOOK}; @@ -25,10 +25,14 @@ pub fn init(path: Option<&Path>) -> io::Result<()> { let level = std::env::var("LOG").unwrap_or_else(|_| DEFAULT_LEVEL.to_string()); - let filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| { - EnvFilter::new(format!( - "{level},bitcoin=off,bitcoincore-rpc=off,fjall=off,brk_fjall=off,lsm_tree=off,brk_rolldown=off,rolldown=off,tracing=off,aide=off,rustls=off,notify=off,oxc_resolver=off,tower_http=off" - )) + let directives = std::env::var("RUST_LOG").unwrap_or_else(|_| { + format!( + "{level},bitcoin=off,bitcoincore_rpc=off,corepc=off,fjall=off,brk_fjall=off,lsm_tree=off,brk_rolldown=off,rolldown=off,tracing=off,aide=off,rustls=off,notify=off,oxc_resolver=off,tower_http=off" + ) + }); + + let filter: Targets = directives.parse().unwrap_or_else(|_| { + Targets::new().with_default(tracing::Level::INFO) }); let registry = tracing_subscriber::registry() diff --git a/crates/brk_mempool/Cargo.toml b/crates/brk_mempool/Cargo.toml index 78734718c..2208a5a89 100644 --- a/crates/brk_mempool/Cargo.toml +++ b/crates/brk_mempool/Cargo.toml @@ -6,10 +6,11 @@ edition.workspace = true license.workspace = true homepage.workspace = true repository.workspace = true +exclude = ["examples/"] [dependencies] brk_error = { workspace = true } -brk_rpc = { workspace = true } +brk_rpc = { workspace = true, features = ["corepc"] } brk_types = { workspace = true } derive_more = { workspace = true } tracing = { workspace = true } diff --git a/crates/brk_oracle/README.md b/crates/brk_oracle/README.md index 055762380..8792710ec 100644 --- a/crates/brk_oracle/README.md +++ b/crates/brk_oracle/README.md @@ -1,6 +1,6 @@ # brk_oracle -Pure on-chain BTC/USD price oracle. No exchange feeds, no external APIs. Derives the bitcoin price from transaction data alone. Tracks block by block from height 575,000 (May 2019) onward with 0.1% median error. +Pure on-chain BTC/USD price oracle. No exchange feeds, no external APIs. Derives the bitcoin price from transaction data alone. Tracks block by block from height 550,000 (November 2018) onward. Inspired by [UTXOracle](https://utxo.live/oracle/) by [@SteveSimple](https://x.com/SteveSimple), which proved the concept. brk_oracle takes the same core insight and redesigns the algorithm for per-block resolution and rolling operation. See [comparison](#comparison-with-utxoracle) below. @@ -40,37 +40,38 @@ The spacing between spikes is constant (set by the ratios between dollar amounts ## How it works +The oracle tracks the price incrementally, block by block, starting from a known seed price. Each new block nudges the estimate. The search window is narrow (about ±10 bins, or ±12%), so the oracle can only follow gradual movement — it cannot jump to an arbitrary price from scratch. This is by design: it makes the algorithm resistant to noise. + For each new block: ### 1. Filter outputs Skip the coinbase transaction, then exclude noisy outputs: script types dominated by protocol activity (P2TR, P2WSH by default), dust below 1,000 sats, and round BTC amounts (0.01, 0.1, 1.0 BTC, etc.) that create false spikes unrelated to dollar purchases. -### 2. Map to log-scale bins +### 2. Build a log-scale histogram -Each remaining output becomes a bin index in a 2,400-bin histogram: +Each remaining output becomes a bin index in a 2,400-bin histogram spanning 12 decades (1 sat to 10¹² sats): ``` bin = round(log₁₀(sats) × 200) 200 bins per decade ``` -### 3. Accumulate in ring buffer +### 3. Smooth over recent blocks -A single block is too sparse for a clean signal. The histogram goes into a ring buffer (default depth: 12 blocks) so the pattern accumulates over recent blocks. - -### 4. Compute EMA - -The buffered histograms combine into an exponential moving average, weighting recent blocks more heavily: +A single block has too few outputs for a clean signal. The oracle keeps a ring buffer of the last 12 block histograms and combines them into an exponential moving average (EMA) that weights recent blocks more heavily: ``` - weight = α × (1 − α)^age default α = 2/7 (~6-block span) + EMA[bin] = Σ weight(age) × histogram[age][bin] + age=0..11 + + weight(age) = α × (1 − α)^age default α = 2/7 (~6-block span) ``` -Fully recomputed from the ring buffer each block. +The EMA is recomputed from the ring buffer each block. This makes the oracle deterministic: since only the last 12 histograms matter, any oracle started from a known price converges to the exact same state after 12 blocks, regardless of prior history. This is what makes checkpointing and restoring possible. -### 5. Score with a 19-point stencil +### 4. Score with a 19-point stencil -The core detection step. A stencil encodes where spikes from 19 round-dollar amounts ($1 through $10,000) should appear relative to each other on the log scale: +The fixed ratios between round-dollar amounts ($1, $2, $3, $5, ... $10,000) create a fingerprint: a pattern of 19 spikes with known spacing on the log scale. A stencil encodes this spacing as bin offsets from a $100 reference point: ``` $1 $5 $10 $50 $100 $200 $1k $10k @@ -81,24 +82,38 @@ The core detection step. A stencil encodes where spikes from 19 round-dollar amo (19 offsets total) ``` -The oracle slides this stencil across the EMA histogram within a narrow search window around the previous estimate. At each candidate position it reads the EMA value at all 19 expected spike locations, divides each by that offset's peak in the window (so rare amounts like $3 get equal voting weight to common amounts like $100) and sums the normalized values into a score. +The oracle slides this stencil across the EMA histogram within the search window. At each candidate position: -### 6. Pick the best position +1. **Read** the EMA value at all 19 expected spike locations +2. **Normalize** each value by dividing by that offset's peak within the search window — this gives rare amounts like $3 equal voting weight to common amounts like $100 +3. **Sum** the 19 normalized values into a single score -The position with the highest score is the new price estimate. Parabolic interpolation between the best bin and its neighbors refines it to sub-bin precision: +The position with the highest score is where the fingerprint best matches the histogram. + +### 5. Convert bin to price + +A $100 purchase at price P produces `$100 / P × 10⁸` sats, which lands in bin: + +``` + bin = log₁₀($100 / P × 10⁸) × 200 + = (2 + 8 − log₁₀(P)) × 200 + = (10 − log₁₀(P)) × 200 +``` + +So the stencil's winning position — the bin where $100 purchases land — directly encodes the price: ``` price = 10^(10 − bin / 200) dollars ``` -The search window is bounded, so the oracle must track incrementally block by block from a known seed price. +Parabolic interpolation between the best bin and its two neighbors refines the estimate to sub-bin precision. ## Pipeline ``` - block ──→ filter ──→ histogram ──→ ring ──→ EMA ──→ stencil ──→ best bin ──→ $ - outputs 2,400 bins buffer 19-point parabolic - log-scale ×12 scoring interpolation + block ──→ filter ──→ histogram ──→ ring buffer ──→ EMA ──→ stencil ──→ best bin ──→ $ + outputs 2,400 bins ×12 19-point parabolic + log-scale scoring interpolation ``` ## Input formats @@ -136,29 +151,29 @@ All parameters via `Config` with sensible defaults: | Stencil | 19 round-USD offsets ($1 to $10k), each normalized to its own peak | 803-point Gaussian + weighted spike template targeting 17 round-USD amounts | | Round BTC handling | Excluded from histogram entirely | Histogram bins smoothed by averaging neighbors | | Output filtering | Per-output: script type, dust threshold, round BTC | Per-tx: exactly 2 outputs, ≤5 inputs, no same-day inputs, ≤500-byte witness | -| Validated from | Height 575,000 (May 2019) | December 2023 | +| Validated from | Height 550,000 (November 2018) | December 2023 | | Language | Rust | Python | | Dependencies | None (pure computation, caller provides block data) | Bitcoin Core RPC | | Bins per decade | 200 | 200 | ## Accuracy -Tested over 361,245 blocks (heights 575,000 to 936,244, as of February 2026) against exchange OHLC data. Error is measured per block as distance from the oracle estimate to the exchange high/low range at that height. If the oracle falls within the range, the error is zero. +Tested over 386,251 blocks (heights 550,000 to 937,447, as of February 2026) against exchange OHLC data. Error is measured per block as distance from the oracle estimate to the exchange high/low range at that height. If the oracle falls within the range, the error is zero. ### Per-block | Metric | Value | |--------|-------| -| Median error | 0.10% | -| 95th percentile | 0.55% | -| 99th percentile | 1.4% | -| 99.9th percentile | 4.4% | -| RMSE | 0.38% | -| Max error | 18.1% | -| Bias | +0.04 bins (essentially zero) | -| Blocks > 5% error | 237 (0.07%) | -| Blocks > 10% error | 22 (0.006%) | -| Blocks > 20% error | 0 | +| Median error | 0.11% | +| 95th percentile | 0.66% | +| 99th percentile | 1.6% | +| 99.9th percentile | 6.2% | +| RMSE | 0.52% | +| Max error | 33.4% | +| Bias | +0.01 bins (essentially zero) | +| Blocks > 5% error | 519 (0.13%) | +| Blocks > 10% error | 203 | +| Blocks > 20% error | 5 | ### Daily candles @@ -166,22 +181,30 @@ Oracle daily OHLC built from per-block prices vs exchange daily OHLC: | | Median | RMSE | Max | |-------|--------|------|-----| -| Open | 0.20% | 0.49% | 5.9% | -| High | 0.54% | 0.87% | 9.1% | -| Low | 0.48% | 1.31% | 19.7% | -| Close | 0.23% | 0.58% | 6.9% | +| Open | 0.21% | 0.59% | 15.4% | +| High | 0.53% | 1.18% | 28.0% | +| Low | 0.50% | 1.52% | 19.6% | +| Close | 0.24% | 0.74% | 15.5% | ### By year -| Year | Blocks | Median | RMSE | Max | >5% | >10% | Price range | -|------|--------|--------|------|-----|-----|------|-------------| -| 2019 | 35,764 | 0.10% | 0.61% | 17.2% | 103 | 16 | $5,656–$13,868 | -| 2020 | 53,102 | 0.10% | 0.48% | 18.2% | 85 | 15 | $3,858–$29,322 | -| 2021 | 52,733 | 0.07% | 0.47% | 14.4% | 38 | 9 | $27,678–$69,000 | -| 2022 | 53,230 | 0.07% | 0.32% | 6.8% | 10 | 0 | $15,460–$48,240 | -| 2023 | 54,032 | 0.10% | 0.25% | 6.7% | 5 | 0 | $16,490–$44,700 | -| 2024 | 53,367 | 0.11% | 0.31% | 9.7% | 16 | 0 | $38,555–$108,298 | -| 2025 | 53,113 | 0.11% | 0.25% | 5.8% | 4 | 0 | $74,409–$126,198 | -| 2026 | 5,904 | 0.11% | 0.27% | 3.3% | 0 | 0 | $60,000–$97,900 | +| Year | Blocks | Median | RMSE | Max | >5% | >10% | >20% | Price range | +|------|--------|--------|------|-----|-----|------|------|-------------| +| 2018 | 6,492 | 0.69% | 2.34% | 33.4% | 183 | 122 | 5 | $3,129–$6,293 | +| 2019 | 54,272 | 0.16% | 0.74% | 17.4% | 195 | 69 | 0 | $3,338–$13,868 | +| 2020 | 53,102 | 0.10% | 0.43% | 18.1% | 68 | 3 | 0 | $3,858–$29,322 | +| 2021 | 52,733 | 0.07% | 0.47% | 14.4% | 38 | 9 | 0 | $27,678–$69,000 | +| 2022 | 53,230 | 0.07% | 0.32% | 6.8% | 10 | 0 | 0 | $15,460–$48,240 | +| 2023 | 54,032 | 0.10% | 0.25% | 6.7% | 5 | 0 | 0 | $16,490–$44,700 | +| 2024 | 53,367 | 0.11% | 0.31% | 9.7% | 16 | 0 | 0 | $38,555–$108,298 | +| 2025 | 53,113 | 0.11% | 0.25% | 5.8% | 4 | 0 | 0 | $74,409–$126,198 | +| 2026 | 5,910 | 0.10% | 0.27% | 3.3% | 0 | 0 | 0 | $60,000–$97,900 | -Accuracy improves over time as on-chain transaction volume grows. Since 2022, zero blocks exceed 10% error. All worst-case errors occur during the fastest intraday price moves in 2019 to 2021. +The oracle is only as good as the signal it reads. In late 2018 on-chain transaction volume was low and the round-dollar pattern was weak, so the first few thousand blocks are noisy (33% max error, 2.3% RMSE). By 2020 the signal is strong enough for 0.1% median accuracy. Since 2022, zero blocks exceed 10% error. + +### Why no outlier smoothing? + +Post-hoc smoothing — for example, correcting any block whose price deviates more than 5% from both its neighbors — would improve the aggregate numbers. This is deliberately not done, for two reasons: + +1. **Simplicity**: The oracle is a single forward pass with no lookback corrections. Adding smoothing means defining thresholds, neighbor windows, and replacement strategies, all of which add complexity for marginal gain. +2. **Finality**: Each block's price is produced once and never revised (unless the block itself is reorged). Downstream consumers can treat the oracle output as append-only. Smoothing would require retroactively changing already-published prices, breaking that property. diff --git a/crates/brk_oracle/examples/compare_digits.rs b/crates/brk_oracle/examples/compare_digits.rs index 7fd44e206..99d584f81 100644 --- a/crates/brk_oracle/examples/compare_digits.rs +++ b/crates/brk_oracle/examples/compare_digits.rs @@ -8,7 +8,7 @@ use std::time::Instant; use brk_indexer::Indexer; use brk_oracle::{Config, NUM_BINS, Oracle, PRICES, START_HEIGHT, cents_to_bin, sats_to_bin}; use brk_types::{OutputType, Sats, TxIndex, TxOutIndex}; -use vecdb::{AnyVec, VecIndex, VecIterator}; +use vecdb::{AnyVec, ReadableVec, VecIndex}; const BINS_5PCT: f64 = 4.24; const BINS_10PCT: f64 = 8.28; @@ -153,47 +153,35 @@ fn main() { let total_txs = indexer.vecs.transactions.height.len(); let total_outputs = indexer.vecs.outputs.value.len(); - let mut first_txindex_iter = indexer.vecs.transactions.first_txindex.into_iter(); - let mut first_txoutindex_iter = indexer.vecs.transactions.first_txoutindex.into_iter(); - let mut out_first_iter = indexer.vecs.outputs.first_txoutindex.into_iter(); - let mut value_iter = indexer.vecs.outputs.value.into_iter(); - let mut outputtype_iter = indexer.vecs.outputs.outputtype.into_iter(); + let first_txindex: Vec = indexer.vecs.transactions.first_txindex.collect(); + let out_first: Vec = indexer.vecs.outputs.first_txoutindex.collect(); let ref_config = Config::default(); let earliest_start = *start_heights.iter().min().unwrap(); for h in START_HEIGHT..total_heights { - let first_txindex: TxIndex = first_txindex_iter.get_at_unwrap(h); - let next_first_txindex = first_txindex_iter - .get_at(h + 1) - .unwrap_or(TxIndex::from(total_txs)); + let ft = first_txindex[h]; + let next_ft = first_txindex.get(h + 1).copied().unwrap_or(TxIndex::from(total_txs)); - let out_start = if first_txindex.to_usize() + 1 < next_first_txindex.to_usize() { - first_txoutindex_iter - .get_at_unwrap(first_txindex.to_usize() + 1) - .to_usize() + let out_start = if ft.to_usize() + 1 < next_ft.to_usize() { + indexer.vecs.transactions.first_txoutindex.collect_one(ft + 1).unwrap().to_usize() } else { - out_first_iter - .get_at(h + 1) - .unwrap_or(TxOutIndex::from(total_outputs)) - .to_usize() + out_first.get(h + 1).copied().unwrap_or(TxOutIndex::from(total_outputs)).to_usize() }; - let out_end = out_first_iter - .get_at(h + 1) - .unwrap_or(TxOutIndex::from(total_outputs)) - .to_usize(); + let out_end = out_first.get(h + 1).copied().unwrap_or(TxOutIndex::from(total_outputs)).to_usize(); if h < earliest_start { continue; } + let values: Vec = indexer.vecs.outputs.value.collect_range_at(out_start, out_end); + let output_types: Vec = indexer.vecs.outputs.outputtype.collect_range_at(out_start, out_end); + // Build full histogram and per-digit histograms. let mut full_hist = [0u32; NUM_BINS]; let mut digit_hist = [[0u32; NUM_BINS]; 9]; - for i in out_start..out_end { - let sats: Sats = value_iter.get_at_unwrap(i); - let output_type: OutputType = outputtype_iter.get_at_unwrap(i); + for (sats, output_type) in values.into_iter().zip(output_types) { if ref_config.excluded_output_types.contains(&output_type) { continue; } diff --git a/crates/brk_oracle/examples/determinism.rs b/crates/brk_oracle/examples/determinism.rs index 303e950ba..cdd14f738 100644 --- a/crates/brk_oracle/examples/determinism.rs +++ b/crates/brk_oracle/examples/determinism.rs @@ -13,7 +13,7 @@ use std::path::PathBuf; use brk_indexer::Indexer; use brk_oracle::{Config, NUM_BINS, Oracle, PRICES, START_HEIGHT, cents_to_bin, sats_to_bin}; use brk_types::{OutputType, Sats, TxIndex, TxOutIndex}; -use vecdb::{AnyVec, VecIndex, VecIterator}; +use vecdb::{AnyVec, ReadableVec, VecIndex}; fn seed_bin(height: usize) -> f64 { let price: f64 = PRICES @@ -49,11 +49,8 @@ fn main() { let total_txs = indexer.vecs.transactions.height.len(); let total_outputs = indexer.vecs.outputs.value.len(); - let mut first_txindex_iter = indexer.vecs.transactions.first_txindex.into_iter(); - let mut first_txoutindex_iter = indexer.vecs.transactions.first_txoutindex.into_iter(); - let mut out_first_iter = indexer.vecs.outputs.first_txoutindex.into_iter(); - let mut value_iter = indexer.vecs.outputs.value.into_iter(); - let mut outputtype_iter = indexer.vecs.outputs.outputtype.into_iter(); + let first_txindex: Vec = indexer.vecs.transactions.first_txindex.collect(); + let out_first: Vec = indexer.vecs.outputs.first_txoutindex.collect(); let ref_config = Config::default(); @@ -77,30 +74,21 @@ fn main() { let end_height = (last_start + window_size + 100).min(total_heights); for h in START_HEIGHT..end_height { - let first_txindex: TxIndex = first_txindex_iter.get_at_unwrap(h); - let next_first_txindex = first_txindex_iter - .get_at(h + 1) - .unwrap_or(TxIndex::from(total_txs)); + let ft = first_txindex[h]; + let next_ft = first_txindex.get(h + 1).copied().unwrap_or(TxIndex::from(total_txs)); - let out_start = if first_txindex.to_usize() + 1 < next_first_txindex.to_usize() { - first_txoutindex_iter - .get_at_unwrap(first_txindex.to_usize() + 1) - .to_usize() + let out_start = if ft.to_usize() + 1 < next_ft.to_usize() { + indexer.vecs.transactions.first_txoutindex.collect_one(ft + 1).unwrap().to_usize() } else { - out_first_iter - .get_at(h + 1) - .unwrap_or(TxOutIndex::from(total_outputs)) - .to_usize() + out_first.get(h + 1).copied().unwrap_or(TxOutIndex::from(total_outputs)).to_usize() }; - let out_end = out_first_iter - .get_at(h + 1) - .unwrap_or(TxOutIndex::from(total_outputs)) - .to_usize(); + let out_end = out_first.get(h + 1).copied().unwrap_or(TxOutIndex::from(total_outputs)).to_usize(); + + let values: Vec = indexer.vecs.outputs.value.collect_range_at(out_start, out_end); + let output_types: Vec = indexer.vecs.outputs.outputtype.collect_range_at(out_start, out_end); let mut hist = [0u32; NUM_BINS]; - for i in out_start..out_end { - let sats: Sats = value_iter.get_at_unwrap(i); - let output_type: OutputType = outputtype_iter.get_at_unwrap(i); + for (sats, output_type) in values.into_iter().zip(output_types) { if ref_config.excluded_output_types.contains(&output_type) { continue; } diff --git a/crates/brk_oracle/examples/noise.rs b/crates/brk_oracle/examples/noise.rs index 6f06163dc..77279d43d 100644 --- a/crates/brk_oracle/examples/noise.rs +++ b/crates/brk_oracle/examples/noise.rs @@ -114,13 +114,13 @@ fn main() { .vecs .transactions .first_txindex - .collect_one(h) + .collect_one_at(h) .unwrap(); let next_first_txindex: TxIndex = indexer .vecs .transactions .first_txindex - .collect_one(h + 1) + .collect_one_at(h + 1) .unwrap_or(TxIndex::from(total_txs)); let out_start = if first_txindex.to_usize() + 1 < next_first_txindex.to_usize() { @@ -132,7 +132,7 @@ fn main() { .vecs .outputs .first_txoutindex - .collect_one(h + 1) + .collect_one_at(h + 1) .unwrap_or(TxOutIndex::from(total_outputs)) .to_usize() }; @@ -140,7 +140,7 @@ fn main() { .vecs .outputs .first_txoutindex - .collect_one(h + 1) + .collect_one_at(h + 1) .unwrap_or(TxOutIndex::from(total_outputs)) .to_usize(); @@ -174,7 +174,7 @@ fn main() { low_bin, }); - if (h - lowest) % 50_000 == 0 { + if (h - lowest).is_multiple_of(50_000) { eprint!( "\r {}/{} ({:.0}%)", h - lowest, diff --git a/crates/brk_oracle/examples/report.rs b/crates/brk_oracle/examples/report.rs index b26e7fb3a..f4562a0fd 100644 --- a/crates/brk_oracle/examples/report.rs +++ b/crates/brk_oracle/examples/report.rs @@ -9,10 +9,10 @@ use brk_oracle::{ Config, NUM_BINS, Oracle, PRICES, START_HEIGHT, bin_to_cents, cents_to_bin, sats_to_bin, }; use brk_types::{OutputType, Sats, TxIndex, TxOutIndex}; -use vecdb::{AnyVec, VecIndex, VecIterator}; +use vecdb::{AnyVec, ReadableVec, VecIndex}; -/// DateIndex 1 = Jan 9, 2009 (block 1). For dates after genesis week: -/// dateindex = floor(timestamp / 86400) - 14252. +/// Day1 1 = Jan 9, 2009 (block 1). For dates after genesis week: +/// day1 = floor(timestamp / 86400) - 14252. const GENESIS_DAY: u32 = 14252; const BINS_5PCT: f64 = 4.24; @@ -108,7 +108,7 @@ impl YearStats { /// Oracle OHLC for a single day, built from per-block prices. struct DayCandle { - dateindex: usize, + day1: usize, open: f64, high: f64, low: f64, @@ -160,16 +160,16 @@ fn main() { }) .collect(); - // Read block timestamps for year + dateindex mapping. - let mut timestamp_iter = indexer.vecs.blocks.timestamp.into_iter(); - let mut height_years: Vec = Vec::with_capacity(total_heights); - let mut height_dateindexes: Vec = Vec::with_capacity(total_heights); - for h in 0..total_heights { - let ts: brk_types::Timestamp = timestamp_iter.get_at_unwrap(h); - let ts_u32 = *ts as u32; - height_years.push(timestamp_to_year(ts_u32)); - height_dateindexes.push((ts_u32 / 86400).saturating_sub(GENESIS_DAY) as usize); - } + // Read block timestamps for year + day1 mapping. + let timestamps: Vec = indexer.vecs.blocks.timestamp.collect(); + let height_years: Vec = timestamps + .iter() + .map(|ts| timestamp_to_year(**ts)) + .collect(); + let height_day1s: Vec = timestamps + .iter() + .map(|ts| (**ts / 86400).saturating_sub(GENESIS_DAY) as usize) + .collect(); let start_price: f64 = PRICES .lines() @@ -184,11 +184,9 @@ fn main() { let total_txs = indexer.vecs.transactions.height.len(); let total_outputs = indexer.vecs.outputs.value.len(); - let mut first_txindex_iter = indexer.vecs.transactions.first_txindex.into_iter(); - let mut first_txoutindex_iter = indexer.vecs.transactions.first_txoutindex.into_iter(); - let mut out_first_iter = indexer.vecs.outputs.first_txoutindex.into_iter(); - let mut value_iter = indexer.vecs.outputs.value.into_iter(); - let mut outputtype_iter = indexer.vecs.outputs.outputtype.into_iter(); + // Pre-collect height-indexed vecs (small). Transaction-indexed vecs are too large. + let first_txindex: Vec = indexer.vecs.transactions.first_txindex.collect(); + let out_first: Vec = indexer.vecs.outputs.first_txoutindex.collect(); let ref_config = Config::default(); @@ -202,30 +200,46 @@ fn main() { let mut current_di: Option = None; for h in START_HEIGHT..total_heights { - let first_txindex: TxIndex = first_txindex_iter.get_at_unwrap(h); - let next_first_txindex = first_txindex_iter - .get_at(h + 1) + let ft = first_txindex[h]; + let next_ft = first_txindex + .get(h + 1) + .copied() .unwrap_or(TxIndex::from(total_txs)); - let out_start = if first_txindex.to_usize() + 1 < next_first_txindex.to_usize() { - first_txoutindex_iter - .get_at_unwrap(first_txindex.to_usize() + 1) + let out_start = if ft.to_usize() + 1 < next_ft.to_usize() { + indexer + .vecs + .transactions + .first_txoutindex + .collect_one(ft + 1) + .unwrap() .to_usize() } else { - out_first_iter - .get_at(h + 1) + out_first + .get(h + 1) + .copied() .unwrap_or(TxOutIndex::from(total_outputs)) .to_usize() }; - let out_end = out_first_iter - .get_at(h + 1) + let out_end = out_first + .get(h + 1) + .copied() .unwrap_or(TxOutIndex::from(total_outputs)) .to_usize(); + let values: Vec = indexer + .vecs + .outputs + .value + .collect_range_at(out_start, out_end); + let output_types: Vec = indexer + .vecs + .outputs + .outputtype + .collect_range_at(out_start, out_end); + let mut hist = [0u32; NUM_BINS]; - for i in out_start..out_end { - let sats: Sats = value_iter.get_at_unwrap(i); - let output_type: OutputType = outputtype_iter.get_at_unwrap(i); + for (sats, output_type) in values.into_iter().zip(output_types) { if ref_config.excluded_output_types.contains(&output_type) { continue; } @@ -243,11 +257,11 @@ fn main() { let oracle_price = bin_to_cents(ref_bin) as f64 / 100.0; // Build oracle daily candle. - let di = height_dateindexes[h]; + let di = height_day1s[h]; if current_di != Some(di) { current_di = Some(di); oracle_candles.push(DayCandle { - dateindex: di, + day1: di, open: oracle_price, high: oracle_price, low: oracle_price, @@ -319,7 +333,7 @@ fn main() { let mut daily_days = 0u64; for candle in &oracle_candles { - let di = candle.dateindex; + let di = candle.day1; if di >= daily_ohlc.len() { continue; } diff --git a/crates/brk_oracle/examples/sweep_digits.rs b/crates/brk_oracle/examples/sweep_digits.rs index 2c86f7612..b1b361248 100644 --- a/crates/brk_oracle/examples/sweep_digits.rs +++ b/crates/brk_oracle/examples/sweep_digits.rs @@ -14,7 +14,7 @@ use std::time::Instant; use brk_indexer::Indexer; use brk_oracle::{Config, NUM_BINS, Oracle, PRICES, START_HEIGHT, cents_to_bin, sats_to_bin}; use brk_types::{OutputType, Sats, TxIndex, TxOutIndex}; -use vecdb::{AnyVec, VecIndex, VecIterator}; +use vecdb::{AnyVec, ReadableVec, VecIndex}; const BINS_5PCT: f64 = 4.24; const BINS_10PCT: f64 = 8.28; @@ -166,47 +166,35 @@ fn main() { let total_txs = indexer.vecs.transactions.height.len(); let total_outputs = indexer.vecs.outputs.value.len(); - let mut first_txindex_iter = indexer.vecs.transactions.first_txindex.into_iter(); - let mut first_txoutindex_iter = indexer.vecs.transactions.first_txoutindex.into_iter(); - let mut out_first_iter = indexer.vecs.outputs.first_txoutindex.into_iter(); - let mut value_iter = indexer.vecs.outputs.value.into_iter(); - let mut outputtype_iter = indexer.vecs.outputs.outputtype.into_iter(); + let first_txindex: Vec = indexer.vecs.transactions.first_txindex.collect(); + let out_first: Vec = indexer.vecs.outputs.first_txoutindex.collect(); let ref_config = Config::default(); let total_blocks = total_heights - sweep_start; let mut blocks: Vec = Vec::with_capacity(total_blocks); for h in START_HEIGHT..total_heights { - let first_txindex: TxIndex = first_txindex_iter.get_at_unwrap(h); - let next_first_txindex = first_txindex_iter - .get_at(h + 1) - .unwrap_or(TxIndex::from(total_txs)); + let ft = first_txindex[h]; + let next_ft = first_txindex.get(h + 1).copied().unwrap_or(TxIndex::from(total_txs)); - let out_start = if first_txindex.to_usize() + 1 < next_first_txindex.to_usize() { - first_txoutindex_iter - .get_at_unwrap(first_txindex.to_usize() + 1) - .to_usize() + let out_start = if ft.to_usize() + 1 < next_ft.to_usize() { + indexer.vecs.transactions.first_txoutindex.collect_one(ft + 1).unwrap().to_usize() } else { - out_first_iter - .get_at(h + 1) - .unwrap_or(TxOutIndex::from(total_outputs)) - .to_usize() + out_first.get(h + 1).copied().unwrap_or(TxOutIndex::from(total_outputs)).to_usize() }; - let out_end = out_first_iter - .get_at(h + 1) - .unwrap_or(TxOutIndex::from(total_outputs)) - .to_usize(); + let out_end = out_first.get(h + 1).copied().unwrap_or(TxOutIndex::from(total_outputs)).to_usize(); if h < sweep_start { continue; } + let values: Vec = indexer.vecs.outputs.value.collect_range_at(out_start, out_end); + let output_types: Vec = indexer.vecs.outputs.outputtype.collect_range_at(out_start, out_end); + let mut full_hist = Box::new([0u32; NUM_BINS]); let mut round_outputs = Vec::new(); - for i in out_start..out_end { - let sats: Sats = value_iter.get_at_unwrap(i); - let output_type: OutputType = outputtype_iter.get_at_unwrap(i); + for (sats, output_type) in values.into_iter().zip(output_types) { if ref_config.excluded_output_types.contains(&output_type) { continue; } diff --git a/crates/brk_oracle/examples/sweep_tolerance.rs b/crates/brk_oracle/examples/sweep_tolerance.rs index 90671ffbc..0fcf6e981 100644 --- a/crates/brk_oracle/examples/sweep_tolerance.rs +++ b/crates/brk_oracle/examples/sweep_tolerance.rs @@ -14,7 +14,7 @@ use std::time::Instant; use brk_indexer::Indexer; use brk_oracle::{Config, NUM_BINS, Oracle, PRICES, START_HEIGHT, cents_to_bin, sats_to_bin}; use brk_types::{OutputType, Sats, TxIndex, TxOutIndex}; -use vecdb::{AnyVec, VecIndex, VecIterator}; +use vecdb::{AnyVec, ReadableVec, VecIndex}; const BINS_5PCT: f64 = 4.24; const BINS_10PCT: f64 = 8.28; @@ -164,11 +164,8 @@ fn main() { let total_txs = indexer.vecs.transactions.height.len(); let total_outputs = indexer.vecs.outputs.value.len(); - let mut first_txindex_iter = indexer.vecs.transactions.first_txindex.into_iter(); - let mut first_txoutindex_iter = indexer.vecs.transactions.first_txoutindex.into_iter(); - let mut out_first_iter = indexer.vecs.outputs.first_txoutindex.into_iter(); - let mut value_iter = indexer.vecs.outputs.value.into_iter(); - let mut outputtype_iter = indexer.vecs.outputs.outputtype.into_iter(); + let first_txindex: Vec = indexer.vecs.transactions.first_txindex.collect(); + let out_first: Vec = indexer.vecs.outputs.first_txoutindex.collect(); let ref_config = Config::default(); let total_blocks = total_heights - sweep_start; @@ -179,36 +176,27 @@ fn main() { let max_tolerance: f64 = 0.05; for h in START_HEIGHT..total_heights { - let first_txindex: TxIndex = first_txindex_iter.get_at_unwrap(h); - let next_first_txindex = first_txindex_iter - .get_at(h + 1) - .unwrap_or(TxIndex::from(total_txs)); + let ft = first_txindex[h]; + let next_ft = first_txindex.get(h + 1).copied().unwrap_or(TxIndex::from(total_txs)); - let out_start = if first_txindex.to_usize() + 1 < next_first_txindex.to_usize() { - first_txoutindex_iter - .get_at_unwrap(first_txindex.to_usize() + 1) - .to_usize() + let out_start = if ft.to_usize() + 1 < next_ft.to_usize() { + indexer.vecs.transactions.first_txoutindex.collect_one(ft + 1).unwrap().to_usize() } else { - out_first_iter - .get_at(h + 1) - .unwrap_or(TxOutIndex::from(total_outputs)) - .to_usize() + out_first.get(h + 1).copied().unwrap_or(TxOutIndex::from(total_outputs)).to_usize() }; - let out_end = out_first_iter - .get_at(h + 1) - .unwrap_or(TxOutIndex::from(total_outputs)) - .to_usize(); + let out_end = out_first.get(h + 1).copied().unwrap_or(TxOutIndex::from(total_outputs)).to_usize(); if h < sweep_start { continue; } + let values: Vec = indexer.vecs.outputs.value.collect_range_at(out_start, out_end); + let output_types: Vec = indexer.vecs.outputs.outputtype.collect_range_at(out_start, out_end); + let mut full_hist = Box::new([0u32; NUM_BINS]); let mut round_outputs = Vec::new(); - for i in out_start..out_end { - let sats: Sats = value_iter.get_at_unwrap(i); - let output_type: OutputType = outputtype_iter.get_at_unwrap(i); + for (sats, output_type) in values.into_iter().zip(output_types) { if ref_config.excluded_output_types.contains(&output_type) { continue; } diff --git a/crates/brk_oracle/examples/validate.rs b/crates/brk_oracle/examples/validate.rs index 36a9f70f2..ac7fce9f1 100644 --- a/crates/brk_oracle/examples/validate.rs +++ b/crates/brk_oracle/examples/validate.rs @@ -11,7 +11,7 @@ use std::path::PathBuf; use brk_indexer::Indexer; use brk_oracle::{cents_to_bin, sats_to_bin, Config, Oracle, NUM_BINS, PRICES, START_HEIGHT}; use brk_types::{OutputType, Sats, TxIndex, TxOutIndex}; -use vecdb::{AnyVec, VecIndex, VecIterator}; +use vecdb::{AnyVec, ReadableVec, VecIndex}; const BINS_5PCT: f64 = 4.24; const BINS_10PCT: f64 = 8.28; @@ -136,40 +136,29 @@ fn main() { let total_txs = indexer.vecs.transactions.height.len(); let total_outputs = indexer.vecs.outputs.value.len(); - let mut first_txindex_iter = indexer.vecs.transactions.first_txindex.into_iter(); - let mut first_txoutindex_iter = indexer.vecs.transactions.first_txoutindex.into_iter(); - let mut out_first_iter = indexer.vecs.outputs.first_txoutindex.into_iter(); - let mut value_iter = indexer.vecs.outputs.value.into_iter(); - let mut outputtype_iter = indexer.vecs.outputs.outputtype.into_iter(); + // Pre-collect height-indexed vecs (small). Transaction-indexed vecs are too large. + let first_txindex: Vec = indexer.vecs.transactions.first_txindex.collect(); + let out_first: Vec = indexer.vecs.outputs.first_txoutindex.collect(); let ref_config = Config::default(); for h in START_HEIGHT..total_heights { - let first_txindex: TxIndex = first_txindex_iter.get_at_unwrap(h); - let next_first_txindex = first_txindex_iter - .get_at(h + 1) - .unwrap_or(TxIndex::from(total_txs)); + let ft = first_txindex[h]; + let next_ft = first_txindex.get(h + 1).copied().unwrap_or(TxIndex::from(total_txs)); - let out_start = if first_txindex.to_usize() + 1 < next_first_txindex.to_usize() { - first_txoutindex_iter - .get_at_unwrap(first_txindex.to_usize() + 1) - .to_usize() + let out_start = if ft.to_usize() + 1 < next_ft.to_usize() { + indexer.vecs.transactions.first_txoutindex.collect_one(ft + 1).unwrap().to_usize() } else { - out_first_iter - .get_at(h + 1) - .unwrap_or(TxOutIndex::from(total_outputs)) - .to_usize() + out_first.get(h + 1).copied().unwrap_or(TxOutIndex::from(total_outputs)).to_usize() }; - let out_end = out_first_iter - .get_at(h + 1) - .unwrap_or(TxOutIndex::from(total_outputs)) - .to_usize(); + let out_end = out_first.get(h + 1).copied().unwrap_or(TxOutIndex::from(total_outputs)).to_usize(); // Build filtered histogram once for all oracles. + let values: Vec = indexer.vecs.outputs.value.collect_range_at(out_start, out_end); + let output_types: Vec = indexer.vecs.outputs.outputtype.collect_range_at(out_start, out_end); + let mut hist = [0u32; NUM_BINS]; - for i in out_start..out_end { - let sats: Sats = value_iter.get_at_unwrap(i); - let output_type: OutputType = outputtype_iter.get_at_unwrap(i); + for (sats, output_type) in values.into_iter().zip(output_types) { if ref_config.excluded_output_types.contains(&output_type) { continue; } diff --git a/crates/brk_query/Cargo.toml b/crates/brk_query/Cargo.toml index 08a997ab8..992d59d1a 100644 --- a/crates/brk_query/Cargo.toml +++ b/crates/brk_query/Cargo.toml @@ -6,6 +6,7 @@ edition.workspace = true version.workspace = true homepage.workspace = true repository.workspace = true +exclude = ["examples/"] [features] tokio = ["dep:tokio", "brk_error/tokio"] @@ -17,7 +18,7 @@ brk_error = { workspace = true, features = ["jiff", "vecdb"] } brk_indexer = { workspace = true } brk_mempool = { workspace = true } brk_reader = { workspace = true } -brk_rpc = { workspace = true } +brk_rpc = { workspace = true, features = ["corepc"] } brk_traversable = { workspace = true } brk_types = { workspace = true } derive_more = { workspace = true } diff --git a/crates/brk_query/README.md b/crates/brk_query/README.md index b44da23a0..b63302164 100644 --- a/crates/brk_query/README.md +++ b/crates/brk_query/README.md @@ -66,7 +66,7 @@ let height = async_query.inner().height(); ## Recommended: mimalloc v3 -Use [mimalloc v3](https://crates.io/crates/mimalloc) as the global allocator to reduce memory usage. +Use [mimalloc v3](https://crates.io/crates/mimalloc) as the global allocator. Query operations involve many short-lived allocations; mimalloc handles this with less fragmentation and lower peak memory than the system allocator. ## Built On diff --git a/crates/brk_query/examples/query.rs b/crates/brk_query/examples/query.rs index b52c170fd..852e6725a 100644 --- a/crates/brk_query/examples/query.rs +++ b/crates/brk_query/examples/query.rs @@ -1,4 +1,4 @@ -use std::{env, fs, path::Path, thread}; +use std::{env, fs, path::Path}; use brk_computer::Computer; use brk_error::Result; @@ -11,15 +11,6 @@ use brk_types::{Address, OutputType}; use vecdb::Exit; pub fn main() -> Result<()> { - // Can't increase main thread's stack size, thus we need to use another thread - thread::Builder::new() - .stack_size(512 * 1024 * 1024) - .spawn(run)? - .join() - .unwrap() -} - -fn run() -> Result<()> { let bitcoin_dir = Client::default_bitcoin_path(); // let bitcoin_dir = Path::new("/Volumes/WD_BLACK1/bitcoin"); @@ -44,11 +35,11 @@ fn run() -> Result<()> { let indexer = Indexer::forced_import(&outputs_dir)?; - let computer = Computer::forced_import(&outputs_dir, &indexer, None)?; + let computer = Computer::forced_import(&outputs_dir, &indexer)?; let mempool = Mempool::new(&client); let mempool_clone = mempool.clone(); - thread::spawn(move || { + std::thread::spawn(move || { mempool_clone.start(); }); diff --git a/crates/brk_query/src/impl/address.rs b/crates/brk_query/src/impl/address.rs index f5123d3ff..684be2dd5 100644 --- a/crates/brk_query/src/impl/address.rs +++ b/crates/brk_query/src/impl/address.rs @@ -7,7 +7,7 @@ use brk_types::{ AddressIndexTxIndex, AddressStats, AnyAddressDataIndexEnum, OutputType, Sats, TxIndex, TxStatus, Txid, TypeIndex, Unit, Utxo, Vout, }; -use vecdb::TypedVecIterator; +use vecdb::{ReadableVec, VecIndex}; use crate::Query; @@ -62,14 +62,14 @@ impl Query { .distribution .addresses_data .funded - .iter()? - .get_unwrap(index), + .reader() + .get(usize::from(index)), AnyAddressDataIndexEnum::Empty(index) => computer .distribution .addresses_data .empty - .iter()? - .get_unwrap(index) + .reader() + .get(usize::from(index)) .into(), }; @@ -138,10 +138,10 @@ impl Query { .map(|(key, _)| key.txindex()) .collect(); - let mut txindex_to_txid_iter = indexer.vecs.transactions.txid.iter()?; + let txid_reader = indexer.vecs.transactions.txid.reader(); let txids: Vec = txindices .into_iter() - .map(|txindex| txindex_to_txid_iter.get_unwrap(txindex)) + .map(|txindex| txid_reader.get(txindex.to_usize())) .collect(); Ok(txids) @@ -166,23 +166,21 @@ impl Query { .map(|(key, _): (AddressIndexOutPoint, Unit)| (key.txindex(), key.vout())) .collect(); - let mut txindex_to_txid_iter = vecs.transactions.txid.iter()?; - let mut txindex_to_height_iter = vecs.transactions.height.iter()?; - let mut txindex_to_first_txoutindex_iter = vecs.transactions.first_txoutindex.iter()?; - let mut txoutindex_to_value_iter = vecs.outputs.value.iter()?; - let mut height_to_blockhash_iter = vecs.blocks.blockhash.iter()?; - let mut height_to_timestamp_iter = vecs.blocks.timestamp.iter()?; + let txid_reader = vecs.transactions.txid.reader(); + let first_txoutindex_reader = vecs.transactions.first_txoutindex.reader(); + let value_reader = vecs.outputs.value.reader(); + let blockhash_reader = vecs.blocks.blockhash.reader(); let utxos: Vec = outpoints .into_iter() .map(|(txindex, vout)| { - let txid: Txid = txindex_to_txid_iter.get_unwrap(txindex); - let height = txindex_to_height_iter.get_unwrap(txindex); - let first_txoutindex = txindex_to_first_txoutindex_iter.get_unwrap(txindex); + let txid: Txid = txid_reader.get(txindex.to_usize()); + let height = vecs.transactions.height.collect_one_at(txindex.to_usize()).unwrap(); + let first_txoutindex = first_txoutindex_reader.get(txindex.to_usize()); let txoutindex = first_txoutindex + vout; - let value: Sats = txoutindex_to_value_iter.get_unwrap(txoutindex); - let block_hash = height_to_blockhash_iter.get_unwrap(height); - let block_time = height_to_timestamp_iter.get_unwrap(height); + let value: Sats = value_reader.get(usize::from(txoutindex)); + let block_hash = blockhash_reader.get(usize::from(height)); + let block_time = vecs.blocks.timestamp.collect_one_at(usize::from(height)).unwrap(); Utxo { txid, diff --git a/crates/brk_query/src/impl/block/info.rs b/crates/brk_query/src/impl/block/info.rs index e056b4f7e..3463db3a0 100644 --- a/crates/brk_query/src/impl/block/info.rs +++ b/crates/brk_query/src/impl/block/info.rs @@ -1,6 +1,6 @@ use brk_error::{Error, Result}; use brk_types::{BlockHash, BlockHashPrefix, BlockInfo, Height, TxIndex}; -use vecdb::{AnyVec, GenericStoredVec, VecIndex}; +use vecdb::{AnyVec, ReadableVec, VecIndex}; use crate::Query; @@ -21,10 +21,10 @@ impl Query { } let blockhash = indexer.vecs.blocks.blockhash.read_once(height)?; - let difficulty = indexer.vecs.blocks.difficulty.read_once(height)?; - let timestamp = indexer.vecs.blocks.timestamp.read_once(height)?; - let size = indexer.vecs.blocks.total_size.read_once(height)?; - let weight = indexer.vecs.blocks.weight.read_once(height)?; + let difficulty = indexer.vecs.blocks.difficulty.collect_one(height).unwrap(); + let timestamp = indexer.vecs.blocks.timestamp.collect_one(height).unwrap(); + let size = indexer.vecs.blocks.total_size.collect_one(height).unwrap(); + let weight = indexer.vecs.blocks.weight.collect_one(height).unwrap(); let tx_count = self.tx_count_at_height(height, max_height)?; Ok(BlockInfo { @@ -45,12 +45,58 @@ impl Query { let start = start.min(max_height); let start_u32: u32 = start.into(); - let count = DEFAULT_BLOCK_COUNT.min(start_u32 + 1); + let count = DEFAULT_BLOCK_COUNT.min(start_u32 + 1) as usize; - let mut blocks = Vec::with_capacity(count as usize); - for i in 0..count { - let height = Height::from(start_u32 - i); - blocks.push(self.block_by_height(height)?); + if count == 0 { + return Ok(Vec::new()); + } + + let indexer = self.indexer(); + let computer = self.computer(); + + // Batch-read all PcoVec data for the contiguous range (avoids + // per-block page decompression — 4 reads instead of 4*count). + let end = start_u32 as usize + 1; + let begin = end - count; + + let difficulties = indexer.vecs.blocks.difficulty.collect_range_at(begin, end); + let timestamps = indexer.vecs.blocks.timestamp.collect_range_at(begin, end); + let sizes = indexer.vecs.blocks.total_size.collect_range_at(begin, end); + let weights = indexer.vecs.blocks.weight.collect_range_at(begin, end); + + // Batch-read first_txindex for tx_count computation (need one extra for next boundary) + let txindex_end = if end <= max_height.to_usize() { + end + 1 + } else { + end + }; + let first_txindexes: Vec = indexer + .vecs + .transactions + .first_txindex + .collect_range_at(begin, txindex_end); + let total_txs = computer.indexes.txindex.identity.len(); + + let mut blocks = Vec::with_capacity(count); + for i in (0..count).rev() { + let height = Height::from(begin + i); + let blockhash = indexer.vecs.blocks.blockhash.read_once(height)?; + + let tx_count = if i + 1 < first_txindexes.len() { + first_txindexes[i + 1].to_usize() - first_txindexes[i].to_usize() + } else { + total_txs - first_txindexes[i].to_usize() + }; + + blocks.push(BlockInfo { + id: blockhash, + height, + tx_count: tx_count as u32, + size: *sizes[i], + weight: weights[i], + timestamp: timestamps[i], + difficulty: *difficulties[i], + }); } Ok(blocks) @@ -72,27 +118,26 @@ impl Query { } fn max_height(&self) -> Height { - Height::from( - self.indexer() - .vecs - .blocks - .blockhash - .len() - .saturating_sub(1), - ) + Height::from(self.indexer().vecs.blocks.blockhash.len().saturating_sub(1)) } fn tx_count_at_height(&self, height: Height, max_height: Height) -> Result { let indexer = self.indexer(); let computer = self.computer(); - let first_txindex = indexer.vecs.transactions.first_txindex.read_once(height)?; + let first_txindex = indexer + .vecs + .transactions + .first_txindex + .collect_one(height) + .unwrap(); let next_first_txindex = if height < max_height { indexer .vecs .transactions .first_txindex - .read_once(height.incremented())? + .collect_one(height.incremented()) + .unwrap() } else { TxIndex::from(computer.indexes.txindex.identity.len()) }; diff --git a/crates/brk_query/src/impl/block/raw.rs b/crates/brk_query/src/impl/block/raw.rs index c18a0be7a..ef4ec0c5a 100644 --- a/crates/brk_query/src/impl/block/raw.rs +++ b/crates/brk_query/src/impl/block/raw.rs @@ -1,6 +1,6 @@ use brk_error::{Error, Result}; use brk_types::{BlockHash, Height}; -use vecdb::{AnyVec, GenericStoredVec}; +use vecdb::{AnyVec, ReadableVec}; use crate::Query; @@ -20,8 +20,8 @@ impl Query { return Err(Error::OutOfRange("Block height out of range".into())); } - let position = computer.positions.block_position.read_once(height)?; - let size = indexer.vecs.blocks.total_size.read_once(height)?; + let position = computer.positions.block_position.collect_one(height).unwrap(); + let size = indexer.vecs.blocks.total_size.collect_one(height).unwrap(); reader.read_raw_bytes(position, *size as usize) } diff --git a/crates/brk_query/src/impl/block/status.rs b/crates/brk_query/src/impl/block/status.rs index 7ae8ecfbf..51ec38167 100644 --- a/crates/brk_query/src/impl/block/status.rs +++ b/crates/brk_query/src/impl/block/status.rs @@ -1,6 +1,6 @@ use brk_error::Result; use brk_types::{BlockHash, BlockStatus, Height}; -use vecdb::{AnyVec, GenericStoredVec}; +use vecdb::AnyVec; use crate::Query; diff --git a/crates/brk_query/src/impl/block/timestamp.rs b/crates/brk_query/src/impl/block/timestamp.rs index c88b0511e..4d9b93b76 100644 --- a/crates/brk_query/src/impl/block/timestamp.rs +++ b/crates/brk_query/src/impl/block/timestamp.rs @@ -1,7 +1,7 @@ use brk_error::{Error, Result}; -use brk_types::{BlockTimestamp, Date, DateIndex, Height, Timestamp}; +use brk_types::{BlockTimestamp, Date, Day1, Height, Timestamp}; use jiff::Timestamp as JiffTimestamp; -use vecdb::{GenericStoredVec, TypedVecIterator}; +use vecdb::ReadableVec; use crate::Query; @@ -19,28 +19,26 @@ impl Query { let target = timestamp; let date = Date::from(target); - let dateindex = DateIndex::try_from(date).unwrap_or_default(); + let day1 = Day1::try_from(date).unwrap_or_default(); // Get first height of the target date let first_height_of_day = computer .indexes - .dateindex + .day1 .first_height - .read_once(dateindex) + .collect_one(day1) .unwrap_or(Height::from(0usize)); let start: usize = usize::from(first_height_of_day).min(max_height_usize); - // Use iterator for efficient sequential access - let mut timestamp_iter = indexer.vecs.blocks.timestamp.iter()?; + let timestamps = &indexer.vecs.blocks.timestamp; // Search forward from start to find the last block <= target timestamp let mut best_height = start; - let mut best_ts = timestamp_iter.get_unwrap(Height::from(start)); + let mut best_ts = timestamps.collect_one_at(start).unwrap(); for h in (start + 1)..=max_height_usize { - let height = Height::from(h); - let block_ts = timestamp_iter.get_unwrap(height); + let block_ts = timestamps.collect_one_at(h).unwrap(); if block_ts <= target { best_height = h; best_ts = block_ts; @@ -51,8 +49,7 @@ impl Query { // Check one block before start in case we need to go backward if start > 0 && best_ts > target { - let prev_height = Height::from(start - 1); - let prev_ts = timestamp_iter.get_unwrap(prev_height); + let prev_ts = timestamps.collect_one_at(start - 1).unwrap(); if prev_ts <= target { best_height = start - 1; best_ts = prev_ts; @@ -60,12 +57,7 @@ impl Query { } let height = Height::from(best_height); - let blockhash = indexer - .vecs - .blocks - .blockhash - .iter()? - .get_unwrap(height); + let blockhash = indexer.vecs.blocks.blockhash.reader().get(usize::from(height)); // Convert timestamp to ISO 8601 format let ts_secs: i64 = (*best_ts).into(); diff --git a/crates/brk_query/src/impl/block/txs.rs b/crates/brk_query/src/impl/block/txs.rs index 023b8e0ac..2639bc390 100644 --- a/crates/brk_query/src/impl/block/txs.rs +++ b/crates/brk_query/src/impl/block/txs.rs @@ -1,6 +1,6 @@ use brk_error::{Error, Result}; use brk_types::{BlockHash, Height, Transaction, TxIndex, Txid}; -use vecdb::{AnyVec, GenericStoredVec, TypedVecIterator}; +use vecdb::{AnyVec, ReadableVec}; use super::BLOCK_TXS_PAGE_SIZE; use crate::Query; @@ -31,26 +31,22 @@ impl Query { return Err(Error::OutOfRange("Block height out of range".into())); } - let first_txindex = indexer.vecs.transactions.first_txindex.read_once(height)?; + let first_txindex = indexer.vecs.transactions.first_txindex.collect_one(height).unwrap(); let next_first_txindex = indexer .vecs .transactions .first_txindex - .read_once(height.incremented()) - .unwrap_or_else(|_| TxIndex::from(indexer.vecs.transactions.txid.len())); + .collect_one(height.incremented()) + .unwrap_or_else(|| TxIndex::from(indexer.vecs.transactions.txid.len())); let first: usize = first_txindex.into(); let next: usize = next_first_txindex.into(); - let count = next - first; let txids: Vec = indexer .vecs .transactions .txid - .iter()? - .skip(first) - .take(count) - .collect(); + .collect_range_at(first, next); Ok(txids) } @@ -67,13 +63,13 @@ impl Query { return Err(Error::OutOfRange("Block height out of range".into())); } - let first_txindex = indexer.vecs.transactions.first_txindex.read_once(height)?; + let first_txindex = indexer.vecs.transactions.first_txindex.collect_one(height).unwrap(); let next_first_txindex = indexer .vecs .transactions .first_txindex - .read_once(height.incremented()) - .unwrap_or_else(|_| TxIndex::from(indexer.vecs.transactions.txid.len())); + .collect_one(height.incremented()) + .unwrap_or_else(|| TxIndex::from(indexer.vecs.transactions.txid.len())); let first: usize = first_txindex.into(); let next: usize = next_first_txindex.into(); @@ -104,13 +100,13 @@ impl Query { return Err(Error::OutOfRange("Block height out of range".into())); } - let first_txindex = indexer.vecs.transactions.first_txindex.read_once(height)?; + let first_txindex = indexer.vecs.transactions.first_txindex.collect_one(height).unwrap(); let next_first_txindex = indexer .vecs .transactions .first_txindex - .read_once(height.incremented()) - .unwrap_or_else(|_| TxIndex::from(indexer.vecs.transactions.txid.len())); + .collect_one(height.incremented()) + .unwrap_or_else(|| TxIndex::from(indexer.vecs.transactions.txid.len())); let first: usize = first_txindex.into(); let next: usize = next_first_txindex.into(); @@ -120,8 +116,8 @@ impl Query { return Err(Error::OutOfRange("Transaction index out of range".into())); } - let txindex = TxIndex::from(first + index); - let txid = indexer.vecs.transactions.txid.iter()?.get_unwrap(txindex); + let txindex = first + index; + let txid = indexer.vecs.transactions.txid.reader().get(txindex); Ok(txid) } diff --git a/crates/brk_query/src/impl/cost_basis.rs b/crates/brk_query/src/impl/cost_basis.rs index 1523b41b9..e833759d6 100644 --- a/crates/brk_query/src/impl/cost_basis.rs +++ b/crates/brk_query/src/impl/cost_basis.rs @@ -2,9 +2,9 @@ use std::{fs, path::PathBuf}; use brk_error::{Error, Result}; use brk_types::{ - CostBasisBucket, CostBasisDistribution, CostBasisFormatted, CostBasisValue, Date, DateIndex, + CostBasisBucket, CostBasisDistribution, CostBasisFormatted, CostBasisValue, Date, Day1, }; -use vecdb::IterableVec; +use vecdb::ReadableVec; use crate::Query; @@ -81,20 +81,14 @@ impl Query { value: CostBasisValue, ) -> Result { let distribution = self.cost_basis_distribution(cohort, date)?; - let dateindex = - DateIndex::try_from(date).map_err(|e| Error::Parse(e.to_string()))?; - let price = self - .computer() - .price - .as_ref() - .ok_or_else(|| Error::NotFound("Price data not available".to_string()))?; - let spot = *price + let day1 = Day1::try_from(date).map_err(|e| Error::Parse(e.to_string()))?; + let price = &self.computer().prices; + let spot = price .cents .split - .dateindex .close - .iter() - .get(dateindex) + .day1 + .collect_one(day1) .ok_or_else(|| Error::NotFound(format!("No price data for {date}")))?; Ok(distribution.format(bucket, value, spot)) } diff --git a/crates/brk_query/src/impl/metrics.rs b/crates/brk_query/src/impl/metrics.rs index 5bcb6f35a..8fa5136ff 100644 --- a/crates/brk_query/src/impl/metrics.rs +++ b/crates/brk_query/src/impl/metrics.rs @@ -3,13 +3,14 @@ use std::collections::BTreeMap; use brk_error::{Error, Result}; use brk_traversable::TreeNode; use brk_types::{ - DetailedMetricCount, Format, Index, IndexInfo, Limit, Metric, MetricData, MetricOutput, - MetricSelection, Output, PaginatedMetrics, Pagination, PaginationIndex, + DetailedMetricCount, Etag, Format, Index, IndexInfo, LegacyValue, Limit, Metric, MetricData, + MetricOutput, MetricOutputLegacy, MetricSelection, Output, OutputLegacy, PaginatedMetrics, + Pagination, PaginationIndex, Version, }; use vecdb::AnyExportableVec; use crate::{ - Query, ResolvedQuery, + Query, vecs::{IndexToVec, MetricToVec}, }; @@ -120,15 +121,11 @@ impl Query { /// Resolve query metadata without formatting (cheap). /// Use with `format` for lazy formatting after ETag check. - pub fn resolve( - &self, - params: MetricSelection, - max_weight: usize, - ) -> Result { + pub fn resolve(&self, params: MetricSelection, max_weight: usize) -> Result { let vecs = self.search(¶ms)?; let total = vecs.iter().map(|v| v.len()).min().unwrap_or(0); - let version: u64 = vecs.iter().map(|v| u64::from(v.version())).sum(); + let version: Version = vecs.iter().map(|v| v.version()).sum(); let start = params .start() @@ -182,12 +179,13 @@ impl Query { let output = match format { Format::CSV => Output::CSV(Self::columns_to_csv(&vecs, start, end)?), Format::JSON => { + let count = end.saturating_sub(start); if vecs.len() == 1 { - let mut buf = Vec::new(); + let mut buf = Vec::with_capacity(count * 12 + 256); MetricData::serialize(vecs[0], index, start, end, &mut buf)?; Output::Json(buf) } else { - let mut buf = Vec::new(); + let mut buf = Vec::with_capacity(count * 12 * vecs.len() + 256); buf.push(b'['); for (i, vec) in vecs.iter().enumerate() { if i > 0 { @@ -244,4 +242,87 @@ impl Query { pub fn metric_to_indexes(&self, metric: Metric) -> Option<&Vec> { self.vecs().metric_to_indexes(metric) } + + /// Deprecated - format a resolved query as legacy output (expensive). + pub fn format_legacy(&self, resolved: ResolvedQuery) -> Result { + let ResolvedQuery { + vecs, + format, + version, + total, + start, + end, + .. + } = resolved; + + if vecs.is_empty() { + return Ok(MetricOutputLegacy { + output: OutputLegacy::default(format), + version: Version::ZERO, + total: 0, + start: 0, + end: 0, + }); + } + + let from = Some(start as i64); + let to = Some(end as i64); + + let output = match format { + Format::CSV => OutputLegacy::CSV(Self::columns_to_csv(&vecs, start, end)?), + Format::JSON => { + if vecs.len() == 1 { + let metric = vecs[0]; + let count = metric.range_count(from, to); + let mut buf = Vec::new(); + if count == 1 { + metric.write_json_value(Some(start), &mut buf)?; + OutputLegacy::Json(LegacyValue::Value(buf)) + } else { + metric.write_json(Some(start), Some(end), &mut buf)?; + OutputLegacy::Json(LegacyValue::List(buf)) + } + } else { + let mut values = Vec::with_capacity(vecs.len()); + for vec in &vecs { + let mut buf = Vec::new(); + vec.write_json(Some(start), Some(end), &mut buf)?; + values.push(buf); + } + OutputLegacy::Json(LegacyValue::Matrix(values)) + } + } + }; + + Ok(MetricOutputLegacy { + output, + version, + total, + start, + end, + }) + } +} + +/// A resolved metric query ready for formatting. +/// Contains the vecs and metadata needed to build an ETag or format the output. +pub struct ResolvedQuery { + pub vecs: Vec<&'static dyn AnyExportableVec>, + pub format: Format, + pub index: Index, + pub version: Version, + pub total: usize, + pub start: usize, + pub end: usize, + pub height: u32, +} + +impl ResolvedQuery { + pub fn etag(&self) -> Etag { + Etag::from_metric(self.version, self.total, self.start, self.end, self.height) + } + + pub fn format(&self) -> Format { + self.format + } } diff --git a/crates/brk_query/src/impl/metrics_legacy.rs b/crates/brk_query/src/impl/metrics_legacy.rs deleted file mode 100644 index 259601082..000000000 --- a/crates/brk_query/src/impl/metrics_legacy.rs +++ /dev/null @@ -1,66 +0,0 @@ -use brk_error::Result; -use brk_types::{Format, LegacyValue, MetricOutputLegacy, OutputLegacy}; - -use crate::{Query, ResolvedQuery}; - -impl Query { - /// Deprecated - format a resolved query as legacy output (expensive). - pub fn format_legacy(&self, resolved: ResolvedQuery) -> Result { - let ResolvedQuery { - vecs, - format, - version, - total, - start, - end, - .. - } = resolved; - - if vecs.is_empty() { - return Ok(MetricOutputLegacy { - output: OutputLegacy::default(format), - version: 0, - total: 0, - start: 0, - end: 0, - }); - } - - let from = Some(start as i64); - let to = Some(end as i64); - - let output = match format { - Format::CSV => OutputLegacy::CSV(Self::columns_to_csv(&vecs, start, end)?), - Format::JSON => { - if vecs.len() == 1 { - let metric = vecs[0]; - let count = metric.range_count(from, to); - let mut buf = Vec::new(); - if count == 1 { - metric.write_json_value(Some(start), &mut buf)?; - OutputLegacy::Json(LegacyValue::Value(buf)) - } else { - metric.write_json(Some(start), Some(end), &mut buf)?; - OutputLegacy::Json(LegacyValue::List(buf)) - } - } else { - let mut values = Vec::with_capacity(vecs.len()); - for vec in &vecs { - let mut buf = Vec::new(); - vec.write_json(Some(start), Some(end), &mut buf)?; - values.push(buf); - } - OutputLegacy::Json(LegacyValue::Matrix(values)) - } - } - }; - - Ok(MetricOutputLegacy { - output, - version, - total, - start, - end, - }) - } -} diff --git a/crates/brk_query/src/impl/mining/block_fee_rates.rs b/crates/brk_query/src/impl/mining/block_fee_rates.rs index a2d43537a..cbd735abb 100644 --- a/crates/brk_query/src/impl/mining/block_fee_rates.rs +++ b/crates/brk_query/src/impl/mining/block_fee_rates.rs @@ -1,9 +1,9 @@ -// TODO: INCOMPLETE - indexes_to_fee_rate.dateindex doesn't have percentile fields -// because from_txindex.rs calls remove_percentiles() before creating dateindex. +// TODO: INCOMPLETE - indexes_to_fee_rate.day1 doesn't have percentile fields +// because from_txindex.rs calls remove_percentiles() before creating day1. // Need to either: -// 1. Use .height instead and convert height to dateindex for iteration -// 2. Fix from_txindex.rs to preserve percentiles for dateindex -// 3. Create a separate dateindex computation path with percentiles +// 1. Use .height instead and convert height to day1 for iteration +// 2. Fix from_txindex.rs to preserve percentiles for day1 +// 3. Create a separate day1 computation path with percentiles #![allow(dead_code)] @@ -15,12 +15,11 @@ use brk_types::{ }; // use vecdb::{IterableVec, VecIndex}; -// use super::dateindex_iter::DateIndexIter; use crate::Query; impl Query { pub fn block_fee_rates(&self, _time_period: TimePeriod) -> Result> { - // Disabled until percentile data is available at dateindex level + // Disabled until percentile data is available at day1 level Ok(Vec::new()) // Original implementation: @@ -30,9 +29,9 @@ impl Query { // .to_usize() // .saturating_sub(time_period.block_count()); // - // let iter = DateIndexIter::new(computer, start, current_height.to_usize()); + // let iter = Day1Iter::new(computer, start, current_height.to_usize()); // - // let vecs = &computer.transactions.transaction.indexes_to_fee_rate.dateindex; + // let vecs = &computer.transactions.transaction.indexes_to_fee_rate.day1; // let mut min = vecs.unwrap_min().iter(); // let mut pct10 = vecs.unwrap_pct10().iter(); // let mut pct25 = vecs.unwrap_pct25().iter(); diff --git a/crates/brk_query/src/impl/mining/block_fees.rs b/crates/brk_query/src/impl/mining/block_fees.rs index 313fa5712..7f1621f6e 100644 --- a/crates/brk_query/src/impl/mining/block_fees.rs +++ b/crates/brk_query/src/impl/mining/block_fees.rs @@ -1,8 +1,8 @@ use brk_error::Result; use brk_types::{BlockFeesEntry, TimePeriod}; -use vecdb::{IterableVec, VecIndex}; +use vecdb::{ReadableVec, VecIndex}; -use super::dateindex_iter::DateIndexIter; +use super::day1_iter::Day1Iter; use crate::Query; impl Query { @@ -13,19 +13,18 @@ impl Query { .to_usize() .saturating_sub(time_period.block_count()); - let iter = DateIndexIter::new(computer, start, current_height.to_usize()); + let iter = Day1Iter::new(computer, start, current_height.to_usize()); - let mut fees = computer + let fees_vec = &computer .transactions .fees .fee .sats - .dateindex - .average() - .iter(); + .day1 + .average; Ok(iter.collect(|di, ts, h| { - fees.get(di).map(|fee| BlockFeesEntry { + fees_vec.collect_one(di).map(|fee| BlockFeesEntry { avg_height: h, timestamp: ts, avg_fees: fee, diff --git a/crates/brk_query/src/impl/mining/block_rewards.rs b/crates/brk_query/src/impl/mining/block_rewards.rs index 1d486d60b..c46a0c556 100644 --- a/crates/brk_query/src/impl/mining/block_rewards.rs +++ b/crates/brk_query/src/impl/mining/block_rewards.rs @@ -1,8 +1,8 @@ use brk_error::Result; use brk_types::{BlockRewardsEntry, TimePeriod}; -use vecdb::{IterableVec, VecIndex}; +use vecdb::{ReadableVec, VecIndex}; -use super::dateindex_iter::DateIndexIter; +use super::day1_iter::Day1Iter; use crate::Query; impl Query { @@ -13,20 +13,18 @@ impl Query { .to_usize() .saturating_sub(time_period.block_count()); - let iter = DateIndexIter::new(computer, start, current_height.to_usize()); + let iter = Day1Iter::new(computer, start, current_height.to_usize()); - let mut rewards = computer - .blocks + let rewards_vec = &computer + .mining .rewards .coinbase .sats - .dateindex - .distribution - .average() - .iter(); + .day1 + .average; Ok(iter.collect(|di, ts, h| { - rewards.get(di).map(|reward| BlockRewardsEntry { + rewards_vec.collect_one(di).map(|reward| BlockRewardsEntry { avg_height: h.into(), timestamp: *ts, avg_rewards: *reward, diff --git a/crates/brk_query/src/impl/mining/block_sizes.rs b/crates/brk_query/src/impl/mining/block_sizes.rs index dd41faf8b..501d6e668 100644 --- a/crates/brk_query/src/impl/mining/block_sizes.rs +++ b/crates/brk_query/src/impl/mining/block_sizes.rs @@ -1,8 +1,8 @@ use brk_error::Result; use brk_types::{BlockSizeEntry, BlockSizesWeights, BlockWeightEntry, TimePeriod}; -use vecdb::{IterableVec, VecIndex}; +use vecdb::{ReadableVec, VecIndex}; -use super::dateindex_iter::DateIndexIter; +use super::day1_iter::Day1Iter; use crate::Query; impl Query { @@ -13,34 +13,30 @@ impl Query { .to_usize() .saturating_sub(time_period.block_count()); - let iter = DateIndexIter::new(computer, start, current_height.to_usize()); + let iter = Day1Iter::new(computer, start, current_height.to_usize()); - let mut sizes_vec = computer + let sizes_vec = &computer .blocks .size .size - .dateindex - .distribution - .average() - .iter(); - let mut weights_vec = computer + .day1 + .average; + let weights_vec = &computer .blocks .weight .weight - .dateindex - .distribution - .average() - .iter(); + .day1 + .average; let entries: Vec<_> = iter.collect(|di, ts, h| { - let size = sizes_vec.get(di).map(|s| *s); - let weight = weights_vec.get(di).map(|w| *w); + let size = sizes_vec.collect_one(di).map(|s| *s); + let weight = weights_vec.collect_one(di).map(|w| *w); Some((h.into(), (*ts), size, weight)) }); let sizes = entries .iter() - .filter_map(|(h, ts, size, _)| { + .filter_map(|(h, ts, size, _): &(u32, _, _, _)| { size.map(|s| BlockSizeEntry { avg_height: *h, timestamp: *ts, @@ -51,7 +47,7 @@ impl Query { let weights = entries .iter() - .filter_map(|(h, ts, _, weight)| { + .filter_map(|(h, ts, _, weight): &(u32, _, _, _)| { weight.map(|w| BlockWeightEntry { avg_height: *h, timestamp: *ts, diff --git a/crates/brk_query/src/impl/mining/dateindex_iter.rs b/crates/brk_query/src/impl/mining/day1_iter.rs similarity index 56% rename from crates/brk_query/src/impl/mining/dateindex_iter.rs rename to crates/brk_query/src/impl/mining/day1_iter.rs index d5fa1761a..118cb2ef1 100644 --- a/crates/brk_query/src/impl/mining/dateindex_iter.rs +++ b/crates/brk_query/src/impl/mining/day1_iter.rs @@ -1,28 +1,28 @@ use brk_computer::Computer; -use brk_types::{DateIndex, Height, Timestamp}; -use vecdb::{GenericStoredVec, IterableVec, VecIndex}; +use brk_types::{Day1, Height, Timestamp}; +use vecdb::{ReadableVec, Ro, VecIndex}; -/// Helper for iterating over dateindex ranges with sampling. -pub struct DateIndexIter<'a> { - computer: &'a Computer, - start_di: DateIndex, - end_di: DateIndex, +/// Helper for iterating over day1 ranges with sampling. +pub struct Day1Iter<'a> { + computer: &'a Computer, + start_di: Day1, + end_di: Day1, step: usize, } -impl<'a> DateIndexIter<'a> { - pub fn new(computer: &'a Computer, start_height: usize, end_height: usize) -> Self { +impl<'a> Day1Iter<'a> { + pub fn new(computer: &'a Computer, start_height: usize, end_height: usize) -> Self { let start_di = computer .indexes .height - .dateindex - .read_once(Height::from(start_height)) + .day1 + .collect_one(Height::from(start_height)) .unwrap_or_default(); let end_di = computer .indexes .height - .dateindex - .read_once(Height::from(end_height)) + .day1 + .collect_one(Height::from(end_height)) .unwrap_or_default(); let total = end_di.to_usize().saturating_sub(start_di.to_usize()) + 1; @@ -39,22 +39,22 @@ impl<'a> DateIndexIter<'a> { /// Iterate and collect entries using the provided transform function. pub fn collect(&self, mut transform: F) -> Vec where - F: FnMut(DateIndex, Timestamp, Height) -> Option, + F: FnMut(Day1, Timestamp, Height) -> Option, { let total = self .end_di .to_usize() .saturating_sub(self.start_di.to_usize()) + 1; - let mut timestamps = self.computer.blocks.time.timestamp.dateindex.iter(); - let mut heights = self.computer.indexes.dateindex.first_height.iter(); + let timestamps = &self.computer.blocks.time.timestamp.day1; + let heights = &self.computer.indexes.day1.first_height; let mut entries = Vec::with_capacity(total / self.step + 1); let mut i = self.start_di.to_usize(); while i <= self.end_di.to_usize() { - let di = DateIndex::from(i); - if let (Some(ts), Some(h)) = (timestamps.get(di), heights.get(di)) + let di = Day1::from(i); + if let (Some(ts), Some(h)) = (timestamps.collect_one(di), heights.collect_one(di)) && let Some(entry) = transform(di, ts, h) { entries.push(entry); diff --git a/crates/brk_query/src/impl/mining/difficulty.rs b/crates/brk_query/src/impl/mining/difficulty.rs index 97f36863d..9b87b09a5 100644 --- a/crates/brk_query/src/impl/mining/difficulty.rs +++ b/crates/brk_query/src/impl/mining/difficulty.rs @@ -2,7 +2,7 @@ use std::time::{SystemTime, UNIX_EPOCH}; use brk_error::Result; use brk_types::{DifficultyAdjustment, DifficultyEpoch, Height}; -use vecdb::GenericStoredVec; +use vecdb::ReadableVec; use crate::Query; @@ -24,7 +24,8 @@ impl Query { .indexes .height .difficultyepoch - .read_once(current_height)?; + .collect_one(current_height) + .unwrap(); let current_epoch_usize: usize = current_epoch.into(); // Get epoch start height @@ -32,7 +33,8 @@ impl Query { .indexes .difficultyepoch .first_height - .read_once(current_epoch)?; + .collect_one(current_epoch) + .unwrap(); let epoch_start_u32: u32 = epoch_start_height.into(); // Calculate epoch progress @@ -47,12 +49,14 @@ impl Query { .time .timestamp .difficultyepoch - .read_once(current_epoch)?; + .collect_one(current_epoch) + .unwrap(); let current_timestamp = indexer .vecs .blocks .timestamp - .read_once(current_height)?; + .collect_one(current_height) + .unwrap(); // Calculate average block time in current epoch let elapsed_time = (*current_timestamp - *epoch_start_timestamp) as u64; @@ -88,18 +92,21 @@ impl Query { .indexes .difficultyepoch .first_height - .read_once(prev_epoch)?; + .collect_one(prev_epoch) + .unwrap(); let prev_difficulty = indexer .vecs .blocks .difficulty - .read_once(prev_epoch_start)?; + .collect_one(prev_epoch_start) + .unwrap(); let curr_difficulty = indexer .vecs .blocks .difficulty - .read_once(epoch_start_height)?; + .collect_one(epoch_start_height) + .unwrap(); if *prev_difficulty > 0.0 { ((*curr_difficulty / *prev_difficulty) - 1.0) * 100.0 diff --git a/crates/brk_query/src/impl/mining/epochs.rs b/crates/brk_query/src/impl/mining/epochs.rs index b05dd3814..68e174161 100644 --- a/crates/brk_query/src/impl/mining/epochs.rs +++ b/crates/brk_query/src/impl/mining/epochs.rs @@ -1,10 +1,10 @@ use brk_computer::Computer; use brk_types::{DifficultyAdjustmentEntry, DifficultyEpoch, Height}; -use vecdb::{GenericStoredVec, IterableVec, VecIndex}; +use vecdb::{ReadableVec, Ro, VecIndex}; /// Iterate over difficulty epochs within a height range. pub fn iter_difficulty_epochs( - computer: &Computer, + computer: &Computer, start_height: usize, end_height: usize, ) -> Vec { @@ -12,38 +12,34 @@ pub fn iter_difficulty_epochs( .indexes .height .difficultyepoch - .read_once(Height::from(start_height)) + .collect_one(Height::from(start_height)) .unwrap_or_default(); let end_epoch = computer .indexes .height .difficultyepoch - .read_once(Height::from(end_height)) + .collect_one(Height::from(end_height)) .unwrap_or_default(); - let mut epoch_to_height_iter = computer - .indexes - .difficultyepoch - .first_height - .iter(); - let mut epoch_to_timestamp_iter = computer.blocks.time.timestamp.difficultyepoch.iter(); - let mut epoch_to_difficulty_iter = computer.blocks.difficulty.raw.difficultyepoch.iter(); + let epoch_to_height = &computer.indexes.difficultyepoch.first_height; + let epoch_to_timestamp = &computer.blocks.time.timestamp.difficultyepoch; + let epoch_to_difficulty = &computer.blocks.difficulty.raw.difficultyepoch; let mut results = Vec::with_capacity(end_epoch.to_usize() - start_epoch.to_usize() + 1); let mut prev_difficulty: Option = None; for epoch_usize in start_epoch.to_usize()..=end_epoch.to_usize() { let epoch = DifficultyEpoch::from(epoch_usize); - let epoch_height = epoch_to_height_iter.get(epoch).unwrap_or_default(); + let epoch_height = epoch_to_height.collect_one(epoch).unwrap_or_default(); // Skip epochs before our start height but track difficulty if epoch_height.to_usize() < start_height { - prev_difficulty = epoch_to_difficulty_iter.get(epoch).map(|d| *d); + prev_difficulty = epoch_to_difficulty.collect_one(epoch).map(|d| *d); continue; } - let epoch_timestamp = epoch_to_timestamp_iter.get(epoch).unwrap_or_default(); - let epoch_difficulty = *epoch_to_difficulty_iter.get(epoch).unwrap_or_default(); + let epoch_timestamp = epoch_to_timestamp.collect_one(epoch).unwrap_or_default(); + let epoch_difficulty = *epoch_to_difficulty.collect_one(epoch).unwrap_or_default(); let change_percent = match prev_difficulty { Some(prev) if prev > 0.0 => ((epoch_difficulty / prev) - 1.0) * 100.0, diff --git a/crates/brk_query/src/impl/mining/hashrate.rs b/crates/brk_query/src/impl/mining/hashrate.rs index 974fee439..70c883394 100644 --- a/crates/brk_query/src/impl/mining/hashrate.rs +++ b/crates/brk_query/src/impl/mining/hashrate.rs @@ -1,6 +1,6 @@ use brk_error::Result; -use brk_types::{DateIndex, DifficultyEntry, HashrateEntry, HashrateSummary, Height, TimePeriod}; -use vecdb::{GenericStoredVec, IterableVec, VecIndex}; +use brk_types::{Day1, DifficultyEntry, HashrateEntry, HashrateSummary, Height, TimePeriod}; +use vecdb::{ReadableVec, VecIndex}; use super::epochs::iter_difficulty_epochs; use crate::Query; @@ -12,21 +12,23 @@ impl Query { let current_height = self.height(); // Get current difficulty - let current_difficulty = *indexer.vecs.blocks.difficulty.read_once(current_height)?; + let current_difficulty = *indexer.vecs.blocks.difficulty.collect_one(current_height).unwrap(); // Get current hashrate - let current_dateindex = computer + let current_day1 = computer .indexes .height - .dateindex - .read_once(current_height)?; + .day1 + .collect_one(current_height) + .unwrap(); let current_hashrate = *computer - .blocks .mining + .hashrate .hash_rate - .dateindex - .read_once(current_dateindex)? as u128; + .day1 + .collect_one(current_day1) + .unwrap() as u128; // Calculate start height based on time period let end = current_height.to_usize(); @@ -36,31 +38,30 @@ impl Query { }; // Get hashrate entries using iterators for efficiency - let start_dateindex = computer + let start_day1 = computer .indexes .height - .dateindex - .read_once(Height::from(start))?; - let end_dateindex = current_dateindex; + .day1 + .collect_one(Height::from(start)) + .unwrap(); + let end_day1 = current_day1; // Sample at regular intervals to avoid too many data points - let total_days = end_dateindex + let total_days = end_day1 .to_usize() - .saturating_sub(start_dateindex.to_usize()) + .saturating_sub(start_day1.to_usize()) + 1; let step = (total_days / 200).max(1); // Max ~200 data points - // Create iterators for the loop - let mut hashrate_iter = computer.blocks.mining.hash_rate.dateindex.iter(); - - let mut timestamp_iter = computer.blocks.time.timestamp.dateindex.iter(); + let hashrate_vec = &computer.mining.hashrate.hash_rate.day1; + let timestamp_vec = &computer.blocks.time.timestamp.day1; let mut hashrates = Vec::with_capacity(total_days / step + 1); - let mut di = start_dateindex.to_usize(); - while di <= end_dateindex.to_usize() { - let dateindex = DateIndex::from(di); + let mut di = start_day1.to_usize(); + while di <= end_day1.to_usize() { + let day1 = Day1::from(di); if let (Some(hr), Some(timestamp)) = - (hashrate_iter.get(dateindex), timestamp_iter.get(dateindex)) + (hashrate_vec.collect_one(day1), timestamp_vec.collect_one(day1)) { hashrates.push(HashrateEntry { timestamp, diff --git a/crates/brk_query/src/impl/mining/mod.rs b/crates/brk_query/src/impl/mining/mod.rs index ee08665f8..57b07986b 100644 --- a/crates/brk_query/src/impl/mining/mod.rs +++ b/crates/brk_query/src/impl/mining/mod.rs @@ -2,7 +2,7 @@ mod block_fee_rates; mod block_fees; mod block_rewards; mod block_sizes; -mod dateindex_iter; +mod day1_iter; mod difficulty; mod difficulty_adjustments; mod epochs; diff --git a/crates/brk_query/src/impl/mining/pools.rs b/crates/brk_query/src/impl/mining/pools.rs index 049cb51de..da5a91e13 100644 --- a/crates/brk_query/src/impl/mining/pools.rs +++ b/crates/brk_query/src/impl/mining/pools.rs @@ -3,7 +3,7 @@ use brk_types::{ Height, PoolBlockCounts, PoolBlockShares, PoolDetail, PoolDetailInfo, PoolInfo, PoolSlug, PoolStats, PoolsSummary, TimePeriod, pools, }; -use vecdb::{AnyVec, IterableVec, VecIndex}; +use vecdb::{AnyVec, ReadableVec, VecIndex}; use crate::Query; @@ -30,18 +30,16 @@ impl Query { // For each pool, get cumulative count at end and start, subtract to get range count for (pool_id, pool_vecs) in &computer.pools.vecs { - let mut cumulative = pool_vecs + let cumulative = &pool_vecs .blocks_mined - .height_cumulative - .inner() - .iter(); + .height_cumulative; - let count_at_end: u32 = *cumulative.get(current_height).unwrap_or_default(); + let count_at_end: u32 = *cumulative.collect_one(current_height).unwrap_or_default(); let count_at_start: u32 = if start == 0 { 0 } else { - *cumulative.get(Height::from(start - 1)).unwrap_or_default() + *cumulative.collect_one(Height::from(start - 1)).unwrap_or_default() }; let block_count = count_at_end.saturating_sub(count_at_start); @@ -100,14 +98,12 @@ impl Query { .get(&slug) .ok_or_else(|| Error::NotFound("Pool data not found".into()))?; - let mut cumulative = pool_vecs + let cumulative = &pool_vecs .blocks_mined - .height_cumulative - .inner() - .iter(); + .height_cumulative; // Get total blocks (all time) - let total_all: u32 = *cumulative.get(current_height).unwrap_or_default(); + let total_all: u32 = *cumulative.collect_one(current_height).unwrap_or_default(); // Get blocks for 24h (144 blocks) let start_24h = end.saturating_sub(144); @@ -115,7 +111,7 @@ impl Query { 0 } else { *cumulative - .get(Height::from(start_24h - 1)) + .collect_one(Height::from(start_24h - 1)) .unwrap_or_default() }; let total_24h = total_all.saturating_sub(count_before_24h); @@ -126,7 +122,7 @@ impl Query { 0 } else { *cumulative - .get(Height::from(start_1w - 1)) + .collect_one(Height::from(start_1w - 1)) .unwrap_or_default() }; let total_1w = total_all.saturating_sub(count_before_1w); diff --git a/crates/brk_query/src/impl/mining/reward_stats.rs b/crates/brk_query/src/impl/mining/reward_stats.rs index 77e26200f..967bde264 100644 --- a/crates/brk_query/src/impl/mining/reward_stats.rs +++ b/crates/brk_query/src/impl/mining/reward_stats.rs @@ -1,6 +1,6 @@ use brk_error::Result; use brk_types::{Height, RewardStats, Sats}; -use vecdb::{IterableVec, VecIndex}; +use vecdb::{ReadableVec, VecIndex}; use crate::Query; @@ -12,39 +12,16 @@ impl Query { let end_block = current_height; let start_block = Height::from(current_height.to_usize().saturating_sub(block_count - 1)); - let mut coinbase_iter = computer.blocks.rewards.coinbase.sats.height.iter(); + let coinbase_vec = &computer.mining.rewards.coinbase.sats.height; + let fee_vec = &computer.transactions.fees.fee.sats.height.sum_cum.sum.0; + let tx_count_vec = &computer.transactions.count.tx_count.height; - let mut fee_iter = computer - .transactions - .fees - .fee - .sats - .height - .sum_cum - .sum - .0 - .iter(); - let mut tx_count_iter = computer.transactions.count.tx_count.height.iter(); + let start = start_block.to_usize(); + let end = end_block.to_usize() + 1; - let mut total_reward = Sats::ZERO; - let mut total_fee = Sats::ZERO; - let mut total_tx: u64 = 0; - - for height in start_block.to_usize()..=end_block.to_usize() { - let h = Height::from(height); - - if let Some(coinbase) = coinbase_iter.get(h) { - total_reward += coinbase; - } - - if let Some(fee) = fee_iter.get(h) { - total_fee += fee; - } - - if let Some(tx_count) = tx_count_iter.get(h) { - total_tx += *tx_count; - } - } + let total_reward = coinbase_vec.fold_range_at(start, end, Sats::ZERO, |acc, v| acc + v); + let total_fee = fee_vec.fold_range_at(start, end, Sats::ZERO, |acc, v| acc + v); + let total_tx = tx_count_vec.fold_range_at(start, end, 0u64, |acc, v| acc + *v); Ok(RewardStats { start_block, diff --git a/crates/brk_query/src/impl/mod.rs b/crates/brk_query/src/impl/mod.rs index caf43265d..2323b2dbb 100644 --- a/crates/brk_query/src/impl/mod.rs +++ b/crates/brk_query/src/impl/mod.rs @@ -3,7 +3,6 @@ mod block; mod cost_basis; mod mempool; mod metrics; -mod metrics_legacy; mod mining; mod price; mod transaction; diff --git a/crates/brk_query/src/impl/price.rs b/crates/brk_query/src/impl/price.rs index 9bc73bbe6..095c5d1d8 100644 --- a/crates/brk_query/src/impl/price.rs +++ b/crates/brk_query/src/impl/price.rs @@ -1,18 +1,11 @@ -use brk_error::{Error, Result}; +use brk_error::Result; use brk_types::Dollars; use crate::Query; impl Query { pub fn live_price(&self) -> Result { - let oracle_vecs = &self - .computer() - .price - .as_ref() - .ok_or_else(|| Error::OutOfRange("Oracle prices not computed yet".into()))? - .oracle; - - let mut oracle = oracle_vecs.live_oracle(self.indexer())?; + let mut oracle = self.computer().prices.cents.live_oracle(self.indexer())?; if let Some(mempool) = self.mempool() { let txs = mempool.get_txs(); diff --git a/crates/brk_query/src/impl/transaction.rs b/crates/brk_query/src/impl/transaction.rs index c35a97959..997fd9207 100644 --- a/crates/brk_query/src/impl/transaction.rs +++ b/crates/brk_query/src/impl/transaction.rs @@ -6,7 +6,7 @@ use brk_types::{ Sats, Transaction, TxIn, TxInIndex, TxIndex, TxOut, TxOutspend, TxStatus, Txid, TxidParam, TxidPrefix, Vin, Vout, Weight, }; -use vecdb::{GenericStoredVec, TypedVecIterator}; +use vecdb::{ReadableVec, VecIndex}; use crate::Query; @@ -55,9 +55,9 @@ impl Query { }; // Get block info for status - let height = indexer.vecs.transactions.height.read_once(txindex)?; + let height = indexer.vecs.transactions.height.collect_one(txindex).unwrap(); let block_hash = indexer.vecs.blocks.blockhash.read_once(height)?; - let block_time = indexer.vecs.blocks.timestamp.read_once(height)?; + let block_time = indexer.vecs.blocks.timestamp.collect_one(height).unwrap(); Ok(TxStatus { confirmed: true, @@ -120,11 +120,7 @@ impl Query { // Look up spend status let computer = self.computer(); - let txinindex = computer - .outputs - .spent - .txinindex - .read_once(txoutindex)?; + let txinindex = computer.outputs.spent.txinindex.read_once(txoutindex)?; if txinindex == TxInIndex::UNSPENT { return Ok(TxOutspend::UNSPENT); @@ -169,13 +165,12 @@ impl Query { // Get spend status for each output let computer = self.computer(); - let mut txoutindex_to_txinindex_iter = - computer.outputs.spent.txinindex.iter()?; + let txinindex_reader = computer.outputs.spent.txinindex.reader(); let mut outspends = Vec::with_capacity(output_count); for i in 0..output_count { let txoutindex = first_txoutindex + Vout::from(i); - let txinindex = txoutindex_to_txinindex_iter.get_unwrap(txoutindex); + let txinindex = txinindex_reader.get(usize::from(txoutindex)); if txinindex == TxInIndex::UNSPENT { outspends.push(TxOutspend::UNSPENT); @@ -194,22 +189,23 @@ impl Query { let reader = self.reader(); let computer = self.computer(); - // Get tx metadata using read_once for single lookups + // Get tx metadata using collect_one for PcoVec, read_once for BytesVec let txid = indexer.vecs.transactions.txid.read_once(txindex)?; - let height = indexer.vecs.transactions.height.read_once(txindex)?; - let version = indexer.vecs.transactions.txversion.read_once(txindex)?; - let lock_time = indexer.vecs.transactions.rawlocktime.read_once(txindex)?; - let total_size = indexer.vecs.transactions.total_size.read_once(txindex)?; + let height = indexer.vecs.transactions.height.collect_one(txindex).unwrap(); + let version = indexer.vecs.transactions.txversion.collect_one(txindex).unwrap(); + let lock_time = indexer.vecs.transactions.rawlocktime.collect_one(txindex).unwrap(); + let total_size = indexer.vecs.transactions.total_size.collect_one(txindex).unwrap(); let first_txinindex = indexer .vecs .transactions .first_txinindex - .read_once(txindex)?; - let position = computer.positions.tx_position.read_once(txindex)?; + .collect_one(txindex) + .unwrap(); + let position = computer.positions.tx_position.collect_one(txindex).unwrap(); // Get block info for status let block_hash = indexer.vecs.blocks.blockhash.read_once(height)?; - let block_time = indexer.vecs.blocks.timestamp.read_once(height)?; + let block_time = indexer.vecs.blocks.timestamp.collect_one(height).unwrap(); // Read and decode the raw transaction from blk file let buffer = reader.read_raw_bytes(position, *total_size as usize)?; @@ -217,12 +213,16 @@ impl Query { let tx = bitcoin::Transaction::consensus_decode(&mut cursor) .map_err(|_| Error::Parse("Failed to decode transaction".into()))?; - // For iterating through inputs, we need iterators (multiple lookups) - let mut txindex_to_txid_iter = indexer.vecs.transactions.txid.iter()?; - let mut txindex_to_first_txoutindex_iter = - indexer.vecs.transactions.first_txoutindex.iter()?; - let mut txinindex_to_outpoint_iter = indexer.vecs.inputs.outpoint.iter()?; - let mut txoutindex_to_value_iter = indexer.vecs.outputs.value.iter()?; + // Create readers for random access lookups + let txid_reader = indexer.vecs.transactions.txid.reader(); + let first_txoutindex_reader = indexer.vecs.transactions.first_txoutindex.reader(); + let value_reader = indexer.vecs.outputs.value.reader(); + + // Batch-read outpoints for all inputs (avoids per-input PcoVec page decompression) + let outpoints: Vec<_> = indexer.vecs.inputs.outpoint.collect_range_at( + usize::from(first_txinindex), + usize::from(first_txinindex) + tx.input.len(), + ); // Build inputs with prevout information let input: Vec = tx @@ -230,8 +230,7 @@ impl Query { .iter() .enumerate() .map(|(i, txin)| { - let txinindex = first_txinindex + i; - let outpoint = txinindex_to_outpoint_iter.get_unwrap(txinindex); + let outpoint = outpoints[i]; let is_coinbase = outpoint.is_coinbase(); @@ -241,15 +240,14 @@ impl Query { } else { let prev_txindex = outpoint.txindex(); let prev_vout = outpoint.vout(); - let prev_txid = txindex_to_txid_iter.get_unwrap(prev_txindex); + let prev_txid = txid_reader.get(prev_txindex.to_usize()); // Calculate the txoutindex for the prevout - let prev_first_txoutindex = - txindex_to_first_txoutindex_iter.get_unwrap(prev_txindex); + let prev_first_txoutindex = first_txoutindex_reader.get(prev_txindex.to_usize()); let prev_txoutindex = prev_first_txoutindex + prev_vout; // Get the value of the prevout - let prev_value = txoutindex_to_value_iter.get_unwrap(prev_txoutindex); + let prev_value = value_reader.get(usize::from(prev_txoutindex)); let prevout = Some(TxOut::from(( bitcoin::ScriptBuf::new(), // Placeholder - would need to reconstruct @@ -314,8 +312,8 @@ impl Query { let reader = self.reader(); let computer = self.computer(); - let total_size = indexer.vecs.transactions.total_size.read_once(txindex)?; - let position = computer.positions.tx_position.read_once(txindex)?; + let total_size = indexer.vecs.transactions.total_size.collect_one(txindex).unwrap(); + let position = computer.positions.tx_position.collect_one(txindex).unwrap(); let buffer = reader.read_raw_bytes(position, *total_size as usize)?; @@ -326,14 +324,15 @@ impl Query { let indexer = self.indexer(); // Look up spending txindex directly - let spending_txindex = indexer.vecs.inputs.txindex.read_once(txinindex)?; + let spending_txindex = indexer.vecs.inputs.txindex.collect_one(txinindex).unwrap(); // Calculate vin let spending_first_txinindex = indexer .vecs .transactions .first_txinindex - .read_once(spending_txindex)?; + .collect_one(spending_txindex) + .unwrap(); let vin = Vin::from(usize::from(txinindex) - usize::from(spending_first_txinindex)); // Get spending tx details @@ -342,9 +341,10 @@ impl Query { .vecs .transactions .height - .read_once(spending_txindex)?; + .collect_one(spending_txindex) + .unwrap(); let block_hash = indexer.vecs.blocks.blockhash.read_once(spending_height)?; - let block_time = indexer.vecs.blocks.timestamp.read_once(spending_height)?; + let block_time = indexer.vecs.blocks.timestamp.collect_one(spending_height).unwrap(); Ok(TxOutspend { spent: true, diff --git a/crates/brk_query/src/lib.rs b/crates/brk_query/src/lib.rs index b170d2ed1..10b3e25f9 100644 --- a/crates/brk_query/src/lib.rs +++ b/crates/brk_query/src/lib.rs @@ -9,11 +9,10 @@ use brk_mempool::Mempool; use brk_reader::Reader; use brk_rpc::Client; use brk_types::Height; -use vecdb::AnyStoredVec; +use vecdb::{ReadOnlyClone, Ro}; #[cfg(feature = "tokio")] mod r#async; -mod resolved; mod vecs; mod r#impl; @@ -21,7 +20,6 @@ mod r#impl; #[cfg(feature = "tokio")] pub use r#async::*; pub use r#impl::BLOCK_TXS_PAGE_SIZE; -use resolved::ResolvedQuery; pub use vecs::Vecs; #[derive(Clone)] @@ -30,8 +28,8 @@ struct QueryInner<'a> { vecs: &'a Vecs<'a>, client: Client, reader: Reader, - indexer: &'a Indexer, - computer: &'a Computer, + indexer: &'a Indexer, + computer: &'a Computer, mempool: Option, } @@ -44,8 +42,8 @@ impl Query { ) -> Self { let client = reader.client().clone(); let reader = reader.clone(); - let indexer = Box::leak(Box::new(indexer.clone())); - let computer = Box::leak(Box::new(computer.clone())); + let indexer = Box::leak(Box::new(indexer.read_only_clone())); + let computer = Box::leak(Box::new(computer.read_only_clone())); let vecs = Box::leak(Box::new(Vecs::build(indexer, computer))); Self(Arc::new(QueryInner { @@ -79,12 +77,12 @@ impl Query { } #[inline] - pub fn indexer(&self) -> &Indexer { + pub fn indexer(&self) -> &Indexer { self.0.indexer } #[inline] - pub fn computer(&self) -> &Computer { + pub fn computer(&self) -> &Computer { self.0.computer } diff --git a/crates/brk_query/src/resolved.rs b/crates/brk_query/src/resolved.rs deleted file mode 100644 index c0eb27216..000000000 --- a/crates/brk_query/src/resolved.rs +++ /dev/null @@ -1,25 +0,0 @@ -use brk_types::{Etag, Format, Index}; -use vecdb::AnyExportableVec; - -/// A resolved metric query ready for formatting. -/// Contains the vecs and metadata needed to build an ETag or format the output. -pub struct ResolvedQuery { - pub(crate) vecs: Vec<&'static dyn AnyExportableVec>, - pub(crate) format: Format, - pub(crate) index: Index, - pub(crate) version: u64, - pub(crate) total: usize, - pub(crate) start: usize, - pub(crate) end: usize, - pub(crate) height: u32, -} - -impl ResolvedQuery { - pub fn etag(&self) -> Etag { - Etag::from_metric(self.version, self.total, self.start, self.end, self.height) - } - - pub fn format(&self) -> Format { - self.format - } -} diff --git a/crates/brk_query/src/vecs.rs b/crates/brk_query/src/vecs.rs index ed5e1ee0b..cc2bf461d 100644 --- a/crates/brk_query/src/vecs.rs +++ b/crates/brk_query/src/vecs.rs @@ -8,7 +8,7 @@ use brk_types::{ }; use derive_more::{Deref, DerefMut}; use quickmatch::{QuickMatch, QuickMatchConfig}; -use vecdb::AnyExportableVec; +use vecdb::{AnyExportableVec, Ro}; #[derive(Default)] pub struct Vecs<'a> { @@ -25,7 +25,7 @@ pub struct Vecs<'a> { } impl<'a> Vecs<'a> { - pub fn build(indexer: &'a Indexer, computer: &'a Computer) -> Self { + pub fn build(indexer: &'a Indexer, computer: &'a Computer) -> Self { let mut this = Vecs::default(); indexer diff --git a/crates/brk_rpc/Cargo.toml b/crates/brk_rpc/Cargo.toml index 8aff82f66..238f5f1a9 100644 --- a/crates/brk_rpc/Cargo.toml +++ b/crates/brk_rpc/Cargo.toml @@ -1,17 +1,26 @@ [package] name = "brk_rpc" -description = "A thin wrapper around bitcoincore-rpc" +description = "A thin wrapper around bitcoincore-rpc or corepc-client" version.workspace = true edition.workspace = true license.workspace = true homepage.workspace = true repository.workspace = true +exclude = ["examples/"] + +[features] +bitcoincore-rpc = ["dep:bitcoincore-rpc", "brk_error/bitcoincore-rpc"] +corepc = ["dep:corepc-client", "dep:corepc-jsonrpc", "dep:serde_json", "dep:serde", "brk_error/corepc"] [dependencies] bitcoin = { workspace = true } -bitcoincore-rpc = { workspace = true } -brk_error = { workspace = true, features = ["bitcoincore-rpc"] } +bitcoincore-rpc = { workspace = true, optional = true } +corepc-client = { workspace = true, optional = true } +corepc-jsonrpc = { workspace = true, optional = true } +brk_error = { workspace = true } brk_logger = { workspace = true } brk_types = { workspace = true } tracing = { workspace = true } parking_lot = { workspace = true } +serde = { workspace = true, optional = true } +serde_json = { workspace = true, optional = true } diff --git a/crates/brk_rpc/examples/bench_hash_prefixes.rs b/crates/brk_rpc/examples/bench_hash_prefixes.rs new file mode 100644 index 000000000..9ef3c64e9 --- /dev/null +++ b/crates/brk_rpc/examples/bench_hash_prefixes.rs @@ -0,0 +1,62 @@ +use std::{ + collections::HashSet, + sync::atomic::{AtomicU32, Ordering}, + thread, + time::Instant, +}; + +use brk_rpc::{Auth, Client}; +use brk_types::BlockHashPrefix; + +fn main() { + let bitcoin_dir = Client::default_bitcoin_path(); + let client = Client::new( + Client::default_url(), + Auth::CookieFile(bitcoin_dir.join(".cookie")), + ) + .unwrap(); + + let tip = u32::from(client.get_last_height().unwrap()); + let num_threads = thread::available_parallelism().unwrap().get(); + + println!("Tip: {tip}, Threads: {num_threads}"); + + let counter = AtomicU32::new(0); + let start = Instant::now(); + + let results: Vec> = thread::scope(|s| { + (0..num_threads) + .map(|t| { + let client = &client; + let counter = &counter; + s.spawn(move || { + let mut local = Vec::new(); + let mut h = t as u32; + while h <= tip { + let hash = client.get_block_hash(h as u64).unwrap(); + local.push(BlockHashPrefix::from(hash)); + let c = counter.fetch_add(1, Ordering::Relaxed); + if c.is_multiple_of(50_000) && c > 0 { + let rate = c as f64 / start.elapsed().as_secs_f64(); + println!(" {c}/{tip} ({rate:.0}/s)"); + } + h += num_threads as u32; + } + local + }) + }) + .collect::>() + .into_iter() + .map(|h| h.join().unwrap()) + .collect() + }); + + let set: HashSet = results.into_iter().flatten().collect(); + + let elapsed = start.elapsed(); + let rate = tip as f64 / elapsed.as_secs_f64(); + + println!("\nDone in {elapsed:.2?}"); + println!(" {} prefixes at {rate:.0}/s", set.len()); + println!(" ~{:.1} MB", set.len() as f64 * 8.0 / 1_048_576.0); +} diff --git a/crates/brk_rpc/examples/compare_backends.rs b/crates/brk_rpc/examples/compare_backends.rs new file mode 100644 index 000000000..95ede759d --- /dev/null +++ b/crates/brk_rpc/examples/compare_backends.rs @@ -0,0 +1,267 @@ +//! Compares results from the bitcoincore-rpc and corepc backends. +//! +//! Run with: +//! cargo run -p brk_rpc --example compare_backends --features corepc + +use std::time::{Duration, Instant}; + +#[cfg(not(all(feature = "bitcoincore-rpc", feature = "corepc")))] +fn main() { + eprintln!("This example requires both features: --features bitcoincore-rpc,corepc"); + std::process::exit(1); +} + +#[cfg(all(feature = "bitcoincore-rpc", feature = "corepc"))] +fn main() { + use brk_rpc::backend::{self, Auth}; + + brk_logger::init(None).unwrap(); + + let bitcoin_dir = brk_rpc::Client::default_bitcoin_path(); + let auth = Auth::CookieFile(bitcoin_dir.join(".cookie")); + let url = brk_rpc::Client::default_url(); + + let bc = backend::bitcoincore::ClientInner::new(url, auth.clone(), 10, Duration::from_secs(1)) + .expect("bitcoincore client"); + let cp = backend::corepc::ClientInner::new(url, auth, 10, Duration::from_secs(1)) + .expect("corepc client"); + + println!("=== Comparing backends ===\n"); + + // --- get_blockchain_info --- + { + let (t1, r1) = timed(|| bc.get_blockchain_info()); + let (t2, r2) = timed(|| cp.get_blockchain_info()); + let r1 = r1.unwrap(); + let r2 = r2.unwrap(); + println!("get_blockchain_info:"); + println!( + " bitcoincore: headers={} blocks={} ({t1:?})", + r1.headers, r1.blocks + ); + println!( + " corepc: headers={} blocks={} ({t2:?})", + r2.headers, r2.blocks + ); + assert_eq!(r1.headers, r2.headers, "headers mismatch"); + assert_eq!(r1.blocks, r2.blocks, "blocks mismatch"); + println!(" MATCH\n"); + } + + // --- get_block_count --- + { + let (t1, r1) = timed(|| bc.get_block_count()); + let (t2, r2) = timed(|| cp.get_block_count()); + let r1 = r1.unwrap(); + let r2 = r2.unwrap(); + println!("get_block_count:"); + println!(" bitcoincore: {r1} ({t1:?})"); + println!(" corepc: {r2} ({t2:?})"); + assert_eq!(r1, r2, "block count mismatch"); + println!(" MATCH\n"); + } + + // --- get_block_hash (height 0) --- + let genesis_hash; + { + let (t1, r1) = timed(|| bc.get_block_hash(0)); + let (t2, r2) = timed(|| cp.get_block_hash(0)); + let r1 = r1.unwrap(); + let r2 = r2.unwrap(); + genesis_hash = r1; + println!("get_block_hash(0):"); + println!(" bitcoincore: {r1} ({t1:?})"); + println!(" corepc: {r2} ({t2:?})"); + assert_eq!(r1, r2, "genesis hash mismatch"); + println!(" MATCH\n"); + } + + // --- get_block_header --- + { + let (t1, r1) = timed(|| bc.get_block_header(&genesis_hash)); + let (t2, r2) = timed(|| cp.get_block_header(&genesis_hash)); + let r1 = r1.unwrap(); + let r2 = r2.unwrap(); + println!("get_block_header(genesis):"); + println!(" bitcoincore: prev={} ({t1:?})", r1.prev_blockhash); + println!(" corepc: prev={} ({t2:?})", r2.prev_blockhash); + assert_eq!(r1, r2, "header mismatch"); + println!(" MATCH\n"); + } + + // --- get_block_info --- + { + let (t1, r1) = timed(|| bc.get_block_info(&genesis_hash)); + let (t2, r2) = timed(|| cp.get_block_info(&genesis_hash)); + let r1 = r1.unwrap(); + let r2 = r2.unwrap(); + println!("get_block_info(genesis):"); + println!( + " bitcoincore: height={} confirmations={} ({t1:?})", + r1.height, r1.confirmations + ); + println!( + " corepc: height={} confirmations={} ({t2:?})", + r2.height, r2.confirmations + ); + assert_eq!(r1.height, r2.height, "height mismatch"); + // confirmations can drift by 1 between calls + assert!( + (r1.confirmations - r2.confirmations).abs() <= 1, + "confirmations mismatch: {} vs {}", + r1.confirmations, + r2.confirmations + ); + println!(" MATCH\n"); + } + + // --- get_block_header_info --- + { + let (t1, r1) = timed(|| bc.get_block_header_info(&genesis_hash)); + let (t2, r2) = timed(|| cp.get_block_header_info(&genesis_hash)); + let r1 = r1.unwrap(); + let r2 = r2.unwrap(); + println!("get_block_header_info(genesis):"); + println!( + " bitcoincore: height={} prev={:?} ({t1:?})", + r1.height, r1.previous_block_hash + ); + println!( + " corepc: height={} prev={:?} ({t2:?})", + r2.height, r2.previous_block_hash + ); + assert_eq!(r1.height, r2.height, "height mismatch"); + assert_eq!( + r1.previous_block_hash, r2.previous_block_hash, + "prev hash mismatch" + ); + println!(" MATCH\n"); + } + + // --- get_block (genesis) --- + { + let (t1, r1) = timed(|| bc.get_block(&genesis_hash)); + let (t2, r2) = timed(|| cp.get_block(&genesis_hash)); + let r1 = r1.unwrap(); + let r2 = r2.unwrap(); + println!("get_block(genesis):"); + println!(" bitcoincore: txs={} ({t1:?})", r1.txdata.len()); + println!(" corepc: txs={} ({t2:?})", r2.txdata.len()); + assert_eq!(r1, r2, "block mismatch"); + println!(" MATCH\n"); + } + + // --- get_raw_mempool --- + { + let (t1, r1) = timed(|| bc.get_raw_mempool()); + let (t2, r2) = timed(|| cp.get_raw_mempool()); + let r1 = r1.unwrap(); + let r2 = r2.unwrap(); + println!("get_raw_mempool:"); + println!(" bitcoincore: {} txs ({t1:?})", r1.len()); + println!(" corepc: {} txs ({t2:?})", r2.len()); + // Mempool can change between calls, just check they're reasonable + println!( + " {} (mempool is live, counts may differ slightly)\n", + if r1.len() == r2.len() { + "MATCH" + } else { + "CLOSE" + } + ); + } + + // --- get_raw_mempool_verbose --- + { + let (t1, r1) = timed(|| bc.get_raw_mempool_verbose()); + let (t2, r2) = timed(|| cp.get_raw_mempool_verbose()); + let r1 = r1.unwrap(); + let r2 = r2.unwrap(); + println!("get_raw_mempool_verbose:"); + println!(" bitcoincore: {} entries ({t1:?})", r1.len()); + println!(" corepc: {} entries ({t2:?})", r2.len()); + + // Compare a sample entry if both have data + if let (Some((txid1, e1)), Some(_)) = (r1.first(), r2.first()) + && let Some((_, e2)) = r2.iter().find(|(t, _)| t == txid1) + { + println!(" sample txid {txid1}:"); + println!( + " bitcoincore: vsize={} fee={} ancestor_count={}", + e1.vsize, e1.base_fee_sats, e1.ancestor_count + ); + println!( + " corepc: vsize={} fee={} ancestor_count={}", + e2.vsize, e2.base_fee_sats, e2.ancestor_count + ); + assert_eq!(e1.base_fee_sats, e2.base_fee_sats, "fee mismatch"); + assert_eq!( + e1.ancestor_count, e2.ancestor_count, + "ancestor_count mismatch" + ); + println!(" MATCH"); + } + println!(); + } + + // --- get_raw_transaction_hex (tx from block 1, genesis coinbase can't be retrieved) --- + let block1_hash; + { + block1_hash = bc.get_block_hash(1).unwrap(); + let block = bc.get_block(&block1_hash).unwrap(); + let coinbase_txid = block.txdata[0].compute_txid(); + let (t1, r1) = timed(|| bc.get_raw_transaction_hex(&coinbase_txid, Some(&block1_hash))); + let (t2, r2) = timed(|| cp.get_raw_transaction_hex(&coinbase_txid, Some(&block1_hash))); + let r1 = r1.unwrap(); + let r2 = r2.unwrap(); + println!("get_raw_transaction_hex(block 1 coinbase):"); + println!(" bitcoincore: {}... ({t1:?})", &r1[..40.min(r1.len())]); + println!(" corepc: {}... ({t2:?})", &r2[..40.min(r2.len())]); + assert_eq!(r1, r2, "raw tx hex mismatch"); + println!(" MATCH\n"); + } + + // --- get_tx_out (genesis coinbase, likely unspendable but test the call) --- + { + let block = bc.get_block(&genesis_hash).unwrap(); + let coinbase_txid = block.txdata[0].compute_txid(); + let (t1, r1) = timed(|| bc.get_tx_out(&coinbase_txid, 0, Some(false))); + let (t2, r2) = timed(|| cp.get_tx_out(&coinbase_txid, 0, Some(false))); + let r1 = r1.unwrap(); + let r2 = r2.unwrap(); + println!("get_tx_out(genesis coinbase, vout=0):"); + match (&r1, &r2) { + (Some(a), Some(b)) => { + println!( + " bitcoincore: coinbase={} value={:?} ({t1:?})", + a.coinbase, a.value + ); + println!( + " corepc: coinbase={} value={:?} ({t2:?})", + b.coinbase, b.value + ); + assert_eq!(a.coinbase, b.coinbase, "coinbase mismatch"); + assert_eq!(a.value, b.value, "value mismatch"); + assert_eq!(a.script_pub_key, b.script_pub_key, "script mismatch"); + println!(" MATCH"); + } + (None, None) => { + println!(" both: None (spent) ({t1:?} / {t2:?})"); + println!(" MATCH"); + } + _ => { + println!(" MISMATCH: bitcoincore={r1:?}, corepc={r2:?}"); + panic!("get_tx_out mismatch"); + } + } + println!(); + } + + println!("=== All checks passed ==="); +} + +fn timed(f: impl FnOnce() -> T) -> (Duration, T) { + let start = Instant::now(); + let result = f(); + (start.elapsed(), result) +} diff --git a/crates/brk_rpc/examples/rpc.rs b/crates/brk_rpc/examples/rpc.rs index 897a620a3..e369e115c 100644 --- a/crates/brk_rpc/examples/rpc.rs +++ b/crates/brk_rpc/examples/rpc.rs @@ -1,4 +1,3 @@ -use bitcoincore_rpc::RpcApi; use brk_rpc::{Auth, Client}; fn main() { @@ -11,6 +10,6 @@ fn main() { let client = Client::new(Client::default_url(), auth).unwrap(); loop { - println!("{:?}", client.call(|c| c.get_block_count()).unwrap()); + println!("{:?}", client.get_block_count().unwrap()); } } diff --git a/crates/brk_rpc/src/backend/bitcoincore.rs b/crates/brk_rpc/src/backend/bitcoincore.rs new file mode 100644 index 000000000..66e84e591 --- /dev/null +++ b/crates/brk_rpc/src/backend/bitcoincore.rs @@ -0,0 +1,243 @@ +use std::{thread::sleep, time::Duration}; + +use bitcoincore_rpc::{Client as CoreClient, Error as RpcError, RpcApi, jsonrpc}; +use brk_error::Result; +use brk_types::Sats; +use parking_lot::RwLock; +use tracing::info; + +use super::{Auth, BlockHeaderInfo, BlockInfo, BlockchainInfo, RawMempoolEntry, TxOutInfo}; + +fn to_rpc_auth(auth: &Auth) -> bitcoincore_rpc::Auth { + match auth { + Auth::None => bitcoincore_rpc::Auth::None, + Auth::UserPass(u, p) => bitcoincore_rpc::Auth::UserPass(u.clone(), p.clone()), + Auth::CookieFile(path) => bitcoincore_rpc::Auth::CookieFile(path.clone()), + } +} + +#[derive(Debug)] +pub struct ClientInner { + url: String, + auth: Auth, + client: RwLock, + max_retries: usize, + retry_delay: Duration, +} + +impl ClientInner { + pub fn new(url: &str, auth: Auth, max_retries: usize, retry_delay: Duration) -> Result { + let rpc_auth = to_rpc_auth(&auth); + let client = Self::retry(max_retries, retry_delay, || { + CoreClient::new(url, rpc_auth.clone()).map_err(Into::into) + })?; + + Ok(Self { + url: url.to_string(), + auth, + client: RwLock::new(client), + max_retries, + retry_delay, + }) + } + + fn recreate(&self) -> Result<()> { + *self.client.write() = CoreClient::new(&self.url, to_rpc_auth(&self.auth))?; + Ok(()) + } + + fn is_retriable(error: &RpcError) -> bool { + matches!( + error, + RpcError::JsonRpc(jsonrpc::Error::Rpc(e)) + if e.code == -32600 || e.code == 401 || e.code == -28 + ) || matches!(error, RpcError::JsonRpc(jsonrpc::Error::Transport(_))) + } + + fn retry(max_retries: usize, delay: Duration, mut f: F) -> Result + where + F: FnMut() -> Result, + { + let mut last_error = None; + + for attempt in 0..=max_retries { + if attempt > 0 { + info!( + "Retrying to connect to Bitcoin Core (attempt {}/{})", + attempt, max_retries + ); + sleep(delay); + } + + match f() { + Ok(value) => { + if attempt > 0 { + info!( + "Successfully connected to Bitcoin Core after {} retries", + attempt + ); + } + return Ok(value); + } + Err(e) => { + if attempt == 0 { + info!("Could not connect to Bitcoin Core, retrying: {}", e); + } + last_error = Some(e); + } + } + } + + let err = last_error.unwrap(); + info!( + "Failed to connect to Bitcoin Core after {} attempts", + max_retries + 1 + ); + Err(err) + } + + pub fn call_with_retry(&self, f: F) -> Result + where + F: Fn(&CoreClient) -> Result, + { + for attempt in 0..=self.max_retries { + if attempt > 0 { + info!( + "Trying to reconnect to Bitcoin Core (attempt {}/{})", + attempt, self.max_retries + ); + self.recreate().ok(); + sleep(self.retry_delay); + } + + match f(&self.client.read()) { + Ok(value) => { + if attempt > 0 { + info!( + "Successfully reconnected to Bitcoin Core after {} attempts", + attempt + ); + } + return Ok(value); + } + Err(e) if Self::is_retriable(&e) => { + if attempt == 0 { + info!("Lost connection to Bitcoin Core, reconnecting..."); + } + } + Err(e) => return Err(e), + } + } + + info!( + "Could not reconnect to Bitcoin Core after {} attempts", + self.max_retries + 1 + ); + Err(RpcError::JsonRpc(jsonrpc::Error::Rpc( + jsonrpc::error::RpcError { + code: -1, + message: "Max retries exceeded".to_string(), + data: None, + }, + ))) + } + + pub fn call_once(&self, f: F) -> Result + where + F: Fn(&CoreClient) -> Result, + { + f(&self.client.read()) + } + + // --- Wrapped methods returning shared types --- + + pub fn get_blockchain_info(&self) -> Result { + let r = self.call_with_retry(|c| c.get_blockchain_info())?; + Ok(BlockchainInfo { + headers: r.headers, + blocks: r.blocks, + }) + } + + pub fn get_block(&self, hash: &bitcoin::BlockHash) -> Result { + Ok(self.call_with_retry(|c| c.get_block(hash))?) + } + + pub fn get_block_count(&self) -> Result { + Ok(self.call_with_retry(|c| c.get_block_count())?) + } + + pub fn get_block_hash(&self, height: u64) -> Result { + Ok(self.call_with_retry(|c| c.get_block_hash(height))?) + } + + pub fn get_block_header(&self, hash: &bitcoin::BlockHash) -> Result { + Ok(self.call_with_retry(|c| c.get_block_header(hash))?) + } + + pub fn get_block_info(&self, hash: &bitcoin::BlockHash) -> Result { + let r = self.call_with_retry(|c| c.get_block_info(hash))?; + Ok(BlockInfo { + height: r.height, + confirmations: r.confirmations as i64, + }) + } + + pub fn get_block_header_info(&self, hash: &bitcoin::BlockHash) -> Result { + let r = self.call_with_retry(|c| c.get_block_header_info(hash))?; + Ok(BlockHeaderInfo { + height: r.height, + confirmations: r.confirmations as i64, + previous_block_hash: r.previous_block_hash, + }) + } + + pub fn get_tx_out( + &self, + txid: &bitcoin::Txid, + vout: u32, + include_mempool: Option, + ) -> Result> { + let r = self.call_with_retry(|c| c.get_tx_out(txid, vout, include_mempool))?; + match r { + Some(r) => Ok(Some(TxOutInfo { + coinbase: r.coinbase, + value: Sats::from(r.value.to_sat()), + script_pub_key: r.script_pub_key.script()?, + })), + None => Ok(None), + } + } + + pub fn get_raw_mempool(&self) -> Result> { + Ok(self.call_with_retry(|c| c.get_raw_mempool())?) + } + + pub fn get_raw_mempool_verbose(&self) -> Result> { + let r = self.call_with_retry(|c| c.get_raw_mempool_verbose())?; + Ok(r.into_iter() + .map(|(txid, entry)| { + ( + txid, + RawMempoolEntry { + vsize: entry.vsize, + weight: entry.weight.unwrap_or(entry.vsize * 4), + base_fee_sats: entry.fees.base.to_sat(), + ancestor_count: entry.ancestor_count, + ancestor_size: entry.ancestor_size, + ancestor_fee_sats: entry.fees.ancestor.to_sat(), + depends: entry.depends.into_iter().collect(), + }, + ) + }) + .collect()) + } + + pub fn get_raw_transaction_hex( + &self, + txid: &bitcoin::Txid, + block_hash: Option<&bitcoin::BlockHash>, + ) -> Result { + Ok(self.call_with_retry(|c| c.get_raw_transaction_hex(txid, block_hash))?) + } +} diff --git a/crates/brk_rpc/src/backend/corepc.rs b/crates/brk_rpc/src/backend/corepc.rs new file mode 100644 index 000000000..461600b73 --- /dev/null +++ b/crates/brk_rpc/src/backend/corepc.rs @@ -0,0 +1,318 @@ +use std::{thread::sleep, time::Duration}; + +use brk_error::Result; +use brk_types::Sats; +use corepc_client::client_sync::Auth as CorepcAuth; +use parking_lot::RwLock; +use tracing::info; + +use super::{Auth, BlockHeaderInfo, BlockInfo, BlockchainInfo, RawMempoolEntry, TxOutInfo}; + +type CoreClient = corepc_client::client_sync::v30::Client; +type CoreError = corepc_client::client_sync::Error; + +#[derive(Debug)] +pub struct ClientInner { + url: String, + auth: Auth, + client: RwLock, + max_retries: usize, + retry_delay: Duration, +} + +impl ClientInner { + pub fn new(url: &str, auth: Auth, max_retries: usize, retry_delay: Duration) -> Result { + let client = Self::retry(max_retries, retry_delay, || { + Self::create_client(url, &auth).map_err(Into::into) + })?; + + Ok(Self { + url: url.to_string(), + auth, + client: RwLock::new(client), + max_retries, + retry_delay, + }) + } + + fn create_client(url: &str, auth: &Auth) -> Result { + let corepc_auth = match auth { + Auth::None => CorepcAuth::None, + Auth::UserPass(u, p) => CorepcAuth::UserPass(u.clone(), p.clone()), + Auth::CookieFile(path) => CorepcAuth::CookieFile(path.clone()), + }; + match corepc_auth { + CorepcAuth::None => Ok(CoreClient::new(url)), + other => CoreClient::new_with_auth(url, other), + } + } + + fn recreate(&self) -> Result<()> { + *self.client.write() = Self::create_client(&self.url, &self.auth)?; + Ok(()) + } + + fn is_retriable(error: &CoreError) -> bool { + match error { + CoreError::JsonRpc(corepc_jsonrpc::error::Error::Rpc(e)) => { + e.code == -32600 || e.code == 401 || e.code == -28 + } + CoreError::JsonRpc(corepc_jsonrpc::error::Error::Transport(_)) => true, + _ => false, + } + } + + fn retry(max_retries: usize, delay: Duration, mut f: F) -> Result + where + F: FnMut() -> Result, + { + let mut last_error = None; + + for attempt in 0..=max_retries { + if attempt > 0 { + info!( + "Retrying to connect to Bitcoin Core (attempt {}/{})", + attempt, max_retries + ); + sleep(delay); + } + + match f() { + Ok(value) => { + if attempt > 0 { + info!( + "Successfully connected to Bitcoin Core after {} retries", + attempt + ); + } + return Ok(value); + } + Err(e) => { + if attempt == 0 { + info!("Could not connect to Bitcoin Core, retrying: {}", e); + } + last_error = Some(e); + } + } + } + + let err = last_error.unwrap(); + info!( + "Failed to connect to Bitcoin Core after {} attempts", + max_retries + 1 + ); + Err(err) + } + + fn call_with_retry(&self, f: F) -> Result + where + F: Fn(&CoreClient) -> Result, + { + for attempt in 0..=self.max_retries { + if attempt > 0 { + info!( + "Trying to reconnect to Bitcoin Core (attempt {}/{})", + attempt, self.max_retries + ); + self.recreate().ok(); + sleep(self.retry_delay); + } + + match f(&self.client.read()) { + Ok(value) => { + if attempt > 0 { + info!( + "Successfully reconnected to Bitcoin Core after {} attempts", + attempt + ); + } + return Ok(value); + } + Err(e) if Self::is_retriable(&e) => { + if attempt == 0 { + info!("Lost connection to Bitcoin Core, reconnecting..."); + } + } + Err(e) => return Err(e), + } + } + + info!( + "Could not reconnect to Bitcoin Core after {} attempts", + self.max_retries + 1 + ); + Err(CoreError::JsonRpc(corepc_jsonrpc::error::Error::Rpc( + corepc_jsonrpc::error::RpcError { + code: -1, + message: "Max retries exceeded".to_string(), + data: None, + }, + ))) + } + + // --- Wrapped methods returning shared types --- + + pub fn get_blockchain_info(&self) -> Result { + let r = self.call_with_retry(|c| c.get_blockchain_info())?; + Ok(BlockchainInfo { + headers: r.headers as u64, + blocks: r.blocks as u64, + }) + } + + pub fn get_block(&self, hash: &bitcoin::BlockHash) -> Result { + Ok(self.call_with_retry(|c| c.get_block(*hash))?) + } + + pub fn get_block_count(&self) -> Result { + let r = self.call_with_retry(|c| c.get_block_count())?; + Ok(r.0) + } + + pub fn get_block_hash(&self, height: u64) -> Result { + let r = self.call_with_retry(|c| c.get_block_hash(height))?; + Ok(r.block_hash()?) + } + + pub fn get_block_header(&self, hash: &bitcoin::BlockHash) -> Result { + let r = self.call_with_retry(|c| c.get_block_header(hash))?; + r.block_header() + .map_err(|_| CoreError::UnexpectedStructure.into()) + } + + pub fn get_block_info(&self, hash: &bitcoin::BlockHash) -> Result { + let r = self.call_with_retry(|c| c.get_block_verbose_one(*hash))?; + Ok(BlockInfo { + height: r.height as usize, + confirmations: r.confirmations, + }) + } + + pub fn get_block_header_info(&self, hash: &bitcoin::BlockHash) -> Result { + let r = self.call_with_retry(|c| c.get_block_header_verbose(hash))?; + let previous_block_hash = r + .previous_block_hash + .map(|s| s.parse::()) + .transpose() + .map_err(|_| { + corepc_client::client_sync::Error::UnexpectedStructure + })?; + Ok(BlockHeaderInfo { + height: r.height as usize, + confirmations: r.confirmations, + previous_block_hash, + }) + } + + pub fn get_tx_out( + &self, + txid: &bitcoin::Txid, + vout: u32, + include_mempool: Option, + ) -> Result> { + // corepc's typed get_tx_out doesn't support include_mempool, so use raw call + let r: Option = self.call_with_retry(|c| { + let mut args = vec![ + serde_json::to_value(txid).map_err(CoreError::from)?, + serde_json::to_value(vout).map_err(CoreError::from)?, + ]; + if let Some(mempool) = include_mempool { + args.push(serde_json::to_value(mempool).map_err(CoreError::from)?); + } + c.call("gettxout", &args) + })?; + + match r { + Some(r) => { + let script_pub_key = + bitcoin::ScriptBuf::from_hex(&r.script_pub_key.hex).map_err(|_| { + corepc_client::client_sync::Error::UnexpectedStructure + })?; + let sats = (r.value * 100_000_000.0).round() as u64; + Ok(Some(TxOutInfo { + coinbase: r.coinbase, + value: Sats::from(sats), + script_pub_key, + })) + } + None => Ok(None), + } + } + + pub fn get_raw_mempool(&self) -> Result> { + let r = self.call_with_retry(|c| c.get_raw_mempool())?; + r.0.iter() + .map(|s| { + s.parse::().map_err(|_| { + corepc_client::client_sync::Error::UnexpectedStructure.into() + }) + }) + .collect() + } + + pub fn get_raw_mempool_verbose(&self) -> Result> { + let r = self.call_with_retry(|c| c.get_raw_mempool_verbose())?; + r.0.into_iter() + .map(|(txid_str, entry)| { + let txid = txid_str.parse::().map_err(|_| { + corepc_client::client_sync::Error::UnexpectedStructure + })?; + let depends = entry + .depends + .iter() + .map(|s| { + s.parse::().map_err(|_| { + corepc_client::client_sync::Error::UnexpectedStructure + }) + }) + .collect::, _>>()?; + Ok(( + txid, + RawMempoolEntry { + vsize: entry.vsize as u64, + weight: entry.weight as u64, + base_fee_sats: (entry.fees.base * 100_000_000.0).round() as u64, + ancestor_count: entry.ancestor_count as u64, + ancestor_size: entry.ancestor_size as u64, + ancestor_fee_sats: (entry.fees.ancestor * 100_000_000.0).round() as u64, + depends, + }, + )) + }) + .collect() + } + + pub fn get_raw_transaction_hex( + &self, + txid: &bitcoin::Txid, + block_hash: Option<&bitcoin::BlockHash>, + ) -> Result { + // corepc's get_raw_transaction doesn't support block_hash param, use raw call + let r: String = self.call_with_retry(|c| { + let mut args: Vec = vec![ + serde_json::to_value(txid).map_err(CoreError::from)?, + serde_json::Value::Bool(false), + ]; + if let Some(bh) = block_hash { + args.push(serde_json::to_value(bh).map_err(CoreError::from)?); + } + c.call("getrawtransaction", &args) + })?; + Ok(r) + } +} + +// Local deserialization structs for raw RPC responses + +#[derive(serde::Deserialize)] +struct TxOutResponse { + coinbase: bool, + value: f64, + #[serde(rename = "scriptPubKey")] + script_pub_key: TxOutScriptPubKey, +} + +#[derive(serde::Deserialize)] +struct TxOutScriptPubKey { + hex: String, +} diff --git a/crates/brk_rpc/src/backend/mod.rs b/crates/brk_rpc/src/backend/mod.rs new file mode 100644 index 000000000..71eac244e --- /dev/null +++ b/crates/brk_rpc/src/backend/mod.rs @@ -0,0 +1,64 @@ +use std::path::PathBuf; + +use bitcoin::ScriptBuf; +use brk_types::Sats; + +#[derive(Debug, Clone)] +pub struct BlockchainInfo { + pub headers: u64, + pub blocks: u64, +} + +#[derive(Debug, Clone)] +pub struct BlockInfo { + pub height: usize, + pub confirmations: i64, +} + +#[derive(Debug, Clone)] +pub struct BlockHeaderInfo { + pub height: usize, + pub confirmations: i64, + pub previous_block_hash: Option, +} + +#[derive(Debug, Clone)] +pub struct TxOutInfo { + pub coinbase: bool, + pub value: Sats, + pub script_pub_key: ScriptBuf, +} + +#[derive(Debug, Clone)] +pub struct RawMempoolEntry { + pub vsize: u64, + pub weight: u64, + pub base_fee_sats: u64, + pub ancestor_count: u64, + pub ancestor_size: u64, + pub ancestor_fee_sats: u64, + pub depends: Vec, +} + +#[derive(Clone, Debug)] +pub enum Auth { + None, + UserPass(String, String), + CookieFile(PathBuf), +} + +#[cfg(feature = "bitcoincore-rpc")] +pub mod bitcoincore; + +#[cfg(feature = "corepc")] +pub mod corepc; + +// Default ClientInner: prefer bitcoincore-rpc when both are enabled +#[cfg(feature = "bitcoincore-rpc")] +pub use bitcoincore::ClientInner; + +#[cfg(all(feature = "corepc", not(feature = "bitcoincore-rpc")))] +pub use corepc::ClientInner; + +#[cfg(not(any(feature = "bitcoincore-rpc", feature = "corepc")))] +compile_error!("brk_rpc requires either the `bitcoincore-rpc` or `corepc` feature"); diff --git a/crates/brk_rpc/src/inner.rs b/crates/brk_rpc/src/inner.rs deleted file mode 100644 index 48647f272..000000000 --- a/crates/brk_rpc/src/inner.rs +++ /dev/null @@ -1,141 +0,0 @@ -use std::{thread::sleep, time::Duration}; - -use bitcoincore_rpc::{Client as CoreClient, Error as RpcError, jsonrpc}; -use brk_error::Result; -use parking_lot::RwLock; -use tracing::info; - -pub use bitcoincore_rpc::Auth; - -#[derive(Debug)] -pub struct ClientInner { - url: String, - auth: Auth, - client: RwLock, - max_retries: usize, - retry_delay: Duration, -} - -impl ClientInner { - pub fn new(url: &str, auth: Auth, max_retries: usize, retry_delay: Duration) -> Result { - let client = Self::retry(max_retries, retry_delay, || { - CoreClient::new(url, auth.clone()).map_err(Into::into) - })?; - - Ok(Self { - url: url.to_string(), - auth, - client: RwLock::new(client), - max_retries, - retry_delay, - }) - } - - fn recreate(&self) -> Result<()> { - *self.client.write() = CoreClient::new(&self.url, self.auth.clone())?; - Ok(()) - } - - fn is_retriable(error: &RpcError) -> bool { - matches!( - error, - RpcError::JsonRpc(jsonrpc::Error::Rpc(e)) - if e.code == -32600 || e.code == 401 || e.code == -28 - ) || matches!(error, RpcError::JsonRpc(jsonrpc::Error::Transport(_))) - } - - fn retry(max_retries: usize, delay: Duration, mut f: F) -> Result - where - F: FnMut() -> Result, - { - let mut last_error = None; - - for attempt in 0..=max_retries { - if attempt > 0 { - info!( - "Retrying to connect to Bitcoin Core (attempt {}/{})", - attempt, max_retries - ); - sleep(delay); - } - - match f() { - Ok(value) => { - if attempt > 0 { - info!( - "Successfully connected to Bitcoin Core after {} retries", - attempt - ); - } - return Ok(value); - } - Err(e) => { - if attempt == 0 { - info!("Could not connect to Bitcoin Core, retrying: {}", e); - } - last_error = Some(e); - } - } - } - - let err = last_error.unwrap(); - info!( - "Failed to connect to Bitcoin Core after {} attempts", - max_retries + 1 - ); - Err(err) - } - - pub fn call_with_retry(&self, f: F) -> Result - where - F: Fn(&CoreClient) -> Result, - { - for attempt in 0..=self.max_retries { - if attempt > 0 { - info!( - "Trying to reconnect to Bitcoin Core (attempt {}/{})", - attempt, self.max_retries - ); - self.recreate().ok(); - sleep(self.retry_delay); - } - - match f(&self.client.read()) { - Ok(value) => { - if attempt > 0 { - info!( - "Successfully reconnected to Bitcoin Core after {} attempts", - attempt - ); - } - return Ok(value); - } - Err(e) if Self::is_retriable(&e) => { - if attempt == 0 { - info!("Lost connection to Bitcoin Core, reconnecting..."); - } - } - Err(e) => return Err(e), - } - } - - info!( - "Could not reconnect to Bitcoin Core after {} attempts", - self.max_retries + 1 - ); - Err(RpcError::JsonRpc(jsonrpc::Error::Rpc( - jsonrpc::error::RpcError { - code: -1, - message: "Max retries exceeded".to_string(), - data: None, - }, - ))) - } - - pub fn call_once(&self, f: F) -> Result - where - F: Fn(&CoreClient) -> Result, - { - f(&self.client.read()) - } -} diff --git a/crates/brk_rpc/src/lib.rs b/crates/brk_rpc/src/lib.rs index e35a6c529..34e263ea7 100644 --- a/crates/brk_rpc/src/lib.rs +++ b/crates/brk_rpc/src/lib.rs @@ -6,22 +6,18 @@ use std::{ time::Duration, }; -use bitcoin::{block::Header, consensus::encode}; -use bitcoincore_rpc::{ - json::{GetBlockHeaderResult, GetBlockResult, GetBlockchainInfoResult, GetTxOutResult}, - {Client as CoreClient, Error as RpcError, RpcApi}, -}; +use bitcoin::consensus::encode; use brk_error::{Error, Result}; use brk_types::{ BlockHash, Height, MempoolEntryInfo, Sats, Transaction, TxIn, TxOut, TxStatus, TxWithHex, Txid, Vout, }; -pub use bitcoincore_rpc::Auth; +pub mod backend; -mod inner; +pub use backend::{Auth, BlockHeaderInfo, BlockInfo, BlockchainInfo, TxOutInfo}; -use inner::ClientInner; +use backend::ClientInner; use tracing::{debug, info}; /// @@ -53,29 +49,25 @@ impl Client { /// Returns a data structure containing various state info regarding /// blockchain processing. - pub fn get_blockchain_info(&self) -> Result { - self.call(move |c| c.get_blockchain_info()) - .map_err(Into::into) + pub fn get_blockchain_info(&self) -> Result { + self.0.get_blockchain_info() } pub fn get_block<'a, H>(&self, hash: &'a H) -> Result where &'a H: Into<&'a bitcoin::BlockHash>, { - self.call(|c| c.get_block(hash.into())).map_err(Into::into) + self.0.get_block(hash.into()) } /// Returns the numbers of block in the longest chain. pub fn get_block_count(&self) -> Result { - self.call(|c| c.get_block_count()).map_err(Into::into) + self.0.get_block_count() } /// Returns the numbers of block in the longest chain. pub fn get_last_height(&self) -> Result { - // debug!("Get last height..."); - self.call(|c| c.get_block_count()) - .map(Height::from) - .map_err(Into::into) + self.0.get_block_count().map(Height::from) } /// Get block hash at a given height @@ -83,33 +75,30 @@ impl Client { where H: Into + Copy, { - self.call(|c| c.get_block_hash(height.into())) + self.0 + .get_block_hash(height.into()) .map(BlockHash::from) - .map_err(Into::into) } - pub fn get_block_header<'a, H>(&self, hash: &'a H) -> Result
+ pub fn get_block_header<'a, H>(&self, hash: &'a H) -> Result where &'a H: Into<&'a bitcoin::BlockHash>, { - self.call(|c| c.get_block_header(hash.into())) - .map_err(Into::into) + self.0.get_block_header(hash.into()) } - pub fn get_block_info<'a, H>(&self, hash: &'a H) -> Result + pub fn get_block_info<'a, H>(&self, hash: &'a H) -> Result where &'a H: Into<&'a bitcoin::BlockHash>, { - self.call(move |c| c.get_block_info(hash.into())) - .map_err(Into::into) + self.0.get_block_info(hash.into()) } - pub fn get_block_header_info<'a, H>(&self, hash: &'a H) -> Result + pub fn get_block_header_info<'a, H>(&self, hash: &'a H) -> Result where &'a H: Into<&'a bitcoin::BlockHash>, { - self.call(|c| c.get_block_header_info(hash.into())) - .map_err(Into::into) + self.0.get_block_header_info(hash.into()) } pub fn get_transaction<'a, T, H>( @@ -125,12 +114,9 @@ impl Client { Ok(tx) } - pub fn get_mempool_transaction<'a, T>(&self, txid: &'a T) -> Result - where - &'a T: Into<&'a bitcoin::Txid>, - { + pub fn get_mempool_transaction(&self, txid: &Txid) -> Result { // Get hex first, then deserialize from it - let hex = self.get_raw_transaction_hex(txid, None as Option<&'a BlockHash>)?; + let hex = self.get_raw_transaction_hex(txid, None as Option<&BlockHash>)?; let mut tx = encode::deserialize_hex::(&hex)?; let input = mem::take(&mut tx.input) @@ -146,8 +132,8 @@ impl Client { let txout = if let Some(txout_result) = txout_result { Some(TxOut::from(( - txout_result.script_pub_key.script()?, - Sats::from(txout_result.value.to_sat()), + txout_result.script_pub_key, + txout_result.value, ))) } else { None @@ -168,7 +154,7 @@ impl Client { let mut tx = Transaction { index: None, - txid: tx.compute_txid().into(), + txid: txid.clone(), version: tx.version.into(), total_sigop_cost: tx.total_sigop_cost(|_| None), weight: tx.weight().into(), @@ -190,31 +176,30 @@ impl Client { txid: &Txid, vout: Vout, include_mempool: Option, - ) -> Result> { - self.call(|c| c.get_tx_out(txid.into(), vout.into(), include_mempool)) - .map_err(Into::into) + ) -> Result> { + self.0.get_tx_out(txid.into(), vout.into(), include_mempool) } /// Get txids of all transactions in a memory pool pub fn get_raw_mempool(&self) -> Result> { - self.call(|c| c.get_raw_mempool()) + self.0 + .get_raw_mempool() .map(|v| unsafe { mem::transmute(v) }) - .map_err(Into::into) } /// Get all mempool entries with their fee data in a single RPC call pub fn get_raw_mempool_verbose(&self) -> Result> { - let result = self.call(|c| c.get_raw_mempool_verbose())?; + let result = self.0.get_raw_mempool_verbose()?; Ok(result .into_iter() - .map(|(txid, entry)| MempoolEntryInfo { + .map(|(txid, entry): (bitcoin::Txid, backend::RawMempoolEntry)| MempoolEntryInfo { txid: txid.into(), vsize: entry.vsize, - weight: entry.weight.unwrap_or(entry.vsize * 4), - fee: Sats::from(entry.fees.base.to_sat()), + weight: entry.weight, + fee: Sats::from(entry.base_fee_sats), ancestor_count: entry.ancestor_count, ancestor_size: entry.ancestor_size, - ancestor_fee: Sats::from(entry.fees.ancestor.to_sat()), + ancestor_fee: Sats::from(entry.ancestor_fee_sats), depends: entry.depends.into_iter().map(Txid::from).collect(), }) .collect()) @@ -243,8 +228,8 @@ impl Client { &'a T: Into<&'a bitcoin::Txid>, &'a H: Into<&'a bitcoin::BlockHash>, { - self.call(|c| c.get_raw_transaction_hex(txid.into(), block_hash.map(|h| h.into()))) - .map_err(Into::into) + self.0 + .get_raw_transaction_hex(txid.into(), block_hash.map(|h| h.into())) } /// Checks if a block is in the main chain (has positive confirmations) @@ -305,16 +290,18 @@ impl Client { Ok(()) } - pub fn call(&self, f: F) -> Result + #[cfg(feature = "bitcoincore-rpc")] + pub fn call(&self, f: F) -> Result where - F: Fn(&CoreClient) -> Result, + F: Fn(&bitcoincore_rpc::Client) -> Result, { self.0.call_with_retry(f) } - pub fn call_once(&self, f: F) -> Result + #[cfg(feature = "bitcoincore-rpc")] + pub fn call_once(&self, f: F) -> Result where - F: Fn(&CoreClient) -> Result, + F: Fn(&bitcoincore_rpc::Client) -> Result, { self.0.call_once(f) } diff --git a/crates/brk_server/Cargo.toml b/crates/brk_server/Cargo.toml index f0dc26f42..585712f76 100644 --- a/crates/brk_server/Cargo.toml +++ b/crates/brk_server/Cargo.toml @@ -6,6 +6,7 @@ edition.workspace = true license.workspace = true homepage.workspace = true repository.workspace = true +exclude = ["examples/", "src/api/scalar.js"] [features] bindgen = ["dep:brk_bindgen"] @@ -16,12 +17,11 @@ axum = { workspace = true } brk_bindgen = { workspace = true, optional = true } brk_computer = { workspace = true } brk_error = { workspace = true, features = ["jiff", "serde_json", "tokio", "vecdb"] } -brk_fetcher = { workspace = true } brk_indexer = { workspace = true } brk_logger = { workspace = true } brk_query = { workspace = true } brk_reader = { workspace = true } -brk_rpc = { workspace = true } +brk_rpc = { workspace = true, features = ["corepc"] } brk_types = { workspace = true } brk_traversable = { workspace = true } brk_website = { workspace = true } diff --git a/crates/brk_server/examples/server.rs b/crates/brk_server/examples/server.rs index 0c19fa800..4433309db 100644 --- a/crates/brk_server/examples/server.rs +++ b/crates/brk_server/examples/server.rs @@ -1,8 +1,7 @@ -use std::{path::Path, thread}; +use std::path::Path; use brk_computer::Computer; use brk_error::Result; -use brk_fetcher::Fetcher; use brk_indexer::Indexer; use brk_mempool::Mempool; use brk_query::AsyncQuery; @@ -13,15 +12,6 @@ use tracing::info; use vecdb::Exit; pub fn main() -> Result<()> { - // Can't increase main thread's stack size, thus we need to use another thread - thread::Builder::new() - .stack_size(512 * 1024 * 1024) - .spawn(run)? - .join() - .unwrap() -} - -fn run() -> Result<()> { brk_logger::init(Some(Path::new(".log")))?; let bitcoin_dir = Client::default_bitcoin_path(); @@ -34,12 +24,11 @@ fn run() -> Result<()> { let reader = Reader::new(bitcoin_dir.join("blocks"), &client); let indexer = Indexer::forced_import(&outputs_dir)?; - let fetcher = Some(Fetcher::import(None)?); - let computer = Computer::forced_import(&outputs_dir, &indexer, fetcher)?; + let computer = Computer::forced_import(&outputs_dir, &indexer)?; let mempool = Mempool::new(&client); let mempool_clone = mempool.clone(); - thread::spawn(move || { + std::thread::spawn(move || { mempool_clone.start(); }); diff --git a/crates/brk_server/src/api/metrics/mod.rs b/crates/brk_server/src/api/metrics/mod.rs index 15973fbb0..f81e544a3 100644 --- a/crates/brk_server/src/api/metrics/mod.rs +++ b/crates/brk_server/src/api/metrics/mod.rs @@ -167,7 +167,7 @@ impl ApiMetricsRoutes for ApiRouter { .summary("Get supported indexes for a metric") .description( "Returns the list of indexes supported by the specified metric. \ - For example, `realized_price` might be available on dateindex, weekindex, and monthindex." + For example, `realized_price` might be available on day1, week1, and month1." ) .ok_response::>() .not_modified() @@ -262,7 +262,7 @@ impl ApiMetricsRoutes for ApiRouter { .description( "**DEPRECATED** - Use `/api/metric/{metric}/{index}` instead.\n\n\ Sunset date: 2027-01-01. May be removed earlier in case of abuse.\n\n\ - Legacy endpoint for querying metrics by variant path (e.g., `dateindex_to_price`). \ + Legacy endpoint for querying metrics by variant path (e.g., `day1_to_price`). \ Returns raw data without the MetricData wrapper." ) .deprecated() diff --git a/crates/brk_server/src/api/mod.rs b/crates/brk_server/src/api/mod.rs index 3acac0c3b..bbb2d7442 100644 --- a/crates/brk_server/src/api/mod.rs +++ b/crates/brk_server/src/api/mod.rs @@ -83,8 +83,15 @@ impl ApiRoutes for ApiRouter { ), ) .route("/api", get(Html::from(include_str!("./scalar.html")))) + // Pre-compressed with: brotli -c -q 11 scalar.js > scalar.js.br .route("/scalar.js", get(|| async { - ([(header::CONTENT_TYPE, "application/javascript")], include_str!("./scalar.js")) + ( + [ + (header::CONTENT_TYPE, "application/javascript"), + (header::CONTENT_ENCODING, "br"), + ], + include_bytes!("./scalar.js.br").as_slice(), + ) })) .route( "/api/{*path}", diff --git a/crates/brk_server/src/api/scalar.html b/crates/brk_server/src/api/scalar.html index 14f1405ac..d073688c1 100644 --- a/crates/brk_server/src/api/scalar.html +++ b/crates/brk_server/src/api/scalar.html @@ -22,6 +22,9 @@ telemetry: false, withDefaultFonts: false, // showToolbar: "never", + agent: { + disabled: true, + }, }); diff --git a/crates/brk_server/src/api/scalar.js b/crates/brk_server/src/api/scalar.js index 2d64d5b2b..918ecc078 100644 --- a/crates/brk_server/src/api/scalar.js +++ b/crates/brk_server/src/api/scalar.js @@ -1,8 +1,8 @@ /** * Minified by jsDelivr using Terser v5.39.0. - * Original file: /npm/@scalar/api-reference@1.43.10/dist/browser/standalone.js + * Original file: /npm/@scalar/api-reference@1.44.25/dist/browser/standalone.js * * Do NOT use SRI with dynamically generated files! More information: https://www.jsdelivr.com/using-sri-with-dynamic-files */ -!function(){"use strict";try{if("undefined"!=typeof document){var e=document.createElement("style");e.appendChild(document.createTextNode('.references-classic-header[data-v-9198d025]{display:flex;align-items:center;gap:12px;max-width:var(--refs-content-max-width);margin:auto;padding:12px 0}.references-classic-header-content[data-v-9198d025]{display:flex;gap:12px;flex-grow:1}.references-classic-header-container[data-v-9198d025]{padding:0 60px}@container narrow-references-container (max-width: 900px){.references-classic-header[data-v-9198d025]{padding:12px 24px}.references-classic-header-container[data-v-9198d025]{padding:0}}.references-classic-header-icon[data-v-9198d025]{height:24px;color:var(--scalar-color-1)}.client-libraries-content[data-v-6a49c111]{container:client-libraries-content / inline-size;display:flex;justify-content:center;overflow:hidden;padding:0 12px;background-color:var(--scalar-background-1);border-left:var(--scalar-border-width) solid var(--scalar-border-color);border-right:var(--scalar-border-width) solid var(--scalar-border-color)}.client-libraries[data-v-6a49c111]{display:flex;align-items:center;justify-content:center;width:100%;position:relative;cursor:pointer;white-space:nowrap;padding:8px 2px;gap:6px;color:var(--scalar-color-3);border-bottom:1px solid transparent;-webkit-user-select:none;user-select:none}.client-libraries[data-v-6a49c111]:not(.client-libraries__active):hover:before{content:"";position:absolute;width:calc(100% - 4px);height:calc(100% - 4px);background:var(--scalar-background-2);left:2px;top:2px;z-index:0;border-radius:var(--scalar-radius)}.client-libraries[data-v-6a49c111]:active{color:var(--scalar-color-1)}.client-libraries[data-v-6a49c111]:focus-visible{outline:none;box-shadow:inset 0 0 0 1px var(--scalar-color-accent)}@media screen and (max-width:450px){.client-libraries[data-v-6a49c111]:nth-of-type(4),.client-libraries[data-v-6a49c111]:nth-of-type(5){display:none}}.client-libraries-icon[data-v-6a49c111]{max-width:14px;max-height:14px;min-width:14px;width:100%;aspect-ratio:1;display:flex;align-items:center;justify-content:center;position:relative;box-sizing:border-box;color:currentColor}.client-libraries-icon__more svg[data-v-6a49c111]{height:initial}@container client-libraries-content (width < 400px){.client-libraries__select[data-v-6a49c111]{width:fit-content}.client-libraries__select .client-libraries-icon__more+span[data-v-6a49c111]{display:none}}@container client-libraries-content (width < 380px){.client-libraries[data-v-6a49c111]{width:100%}.client-libraries span[data-v-6a49c111]{display:none}}.client-libraries__active[data-v-6a49c111]{color:var(--scalar-color-1);border-bottom:1px solid var(--scalar-color-1)}@keyframes codeloader-6a49c111{0%{transform:rotate(0)}to{transform:rotate(1turn)}}.client-libraries .client-libraries-text[data-v-6a49c111]{font-size:var(--scalar-small);position:relative;display:flex;align-items:center}.client-libraries__active .client-libraries-text[data-v-6a49c111]{color:var(--scalar-color-1);font-weight:var(--scalar-semibold)}@media screen and (max-width:600px){.references-classic .client-libraries[data-v-6a49c111]{flex-direction:column}}.selected-client[data-v-e59e2ca0]{color:var(--scalar-color-1);font-size:var(--scalar-small);font-family:var(--scalar-font-code);padding:9px 12px;border-top:none;white-space:nowrap;overflow:hidden;text-overflow:ellipsis;background:var(--scalar-background-1);border:var(--scalar-border-width) solid var(--scalar-border-color);border-bottom-left-radius:var(--scalar-radius-lg);border-bottom-right-radius:var(--scalar-radius-lg);min-height:fit-content}.client-libraries-heading[data-v-e59e2ca0]{font-size:var(--scalar-small);font-weight:var(--scalar-font-medium);color:var(--scalar-color-1);padding:9px 12px;background-color:var(--scalar-background-2);display:flex;align-items:center;max-height:32px;border:var(--scalar-border-width) solid var(--scalar-border-color);border-top-left-radius:var(--scalar-radius-lg);border-top-right-radius:var(--scalar-radius-lg)}[data-v-e59e2ca0] .scalar-codeblock-pre .hljs{margin-top:8px}.badge[data-v-3dedb7e4]{color:var(--badge-text-color, var(--scalar-color-2));font-size:var(--scalar-mini);background:var(--badge-background-color, var(--scalar-background-2));border:var(--scalar-border-width) solid var(--badge-border-color, var(--scalar-border-color));padding:2px 6px;border-radius:12px;display:inline-block}.badge.text-orange[data-v-3dedb7e4]{background:color-mix(in srgb,var(--scalar-color-orange),transparent 90%);border:transparent}.badge.text-yellow[data-v-3dedb7e4]{background:color-mix(in srgb,var(--scalar-color-yellow),transparent 90%);border:transparent}.badge.text-red[data-v-3dedb7e4]{background:color-mix(in srgb,var(--scalar-color-red),transparent 90%);border:transparent}.badge.text-purple[data-v-3dedb7e4]{background:color-mix(in srgb,var(--scalar-color-purple),transparent 90%);border:transparent}.badge.text-green[data-v-3dedb7e4]{background:color-mix(in srgb,var(--scalar-color-green),transparent 90%);border:transparent}@layer properties{@supports (((-webkit-hyphens:none)) and (not (margin-trim:inline))) or ((-moz-orient:inline) and (not (color:rgb(from red r g b)))){[data-v-59b5011b],[data-v-59b5011b]:before,[data-v-59b5011b]:after,[data-v-59b5011b]::backdrop{--tw-outline-style:solid}}}.download-container[data-v-59b5011b]{z-index:1;flex-direction:column;gap:16px;width:fit-content;margin:0 .5px 8px;display:flex;position:relative}.download-container[data-v-59b5011b]:has(:focus-visible):before,.download-container.download-both[data-v-59b5011b]:hover:before{content:"";border-radius:var(--scalar-radius-lg);width:calc(100% + 24px);height:90px;box-shadow:var(--scalar-shadow-2);pointer-events:none;background:var(--scalar-background-1);position:absolute;top:-11px;left:-12px}.download-button[data-v-59b5011b]{color:var(--scalar-link-color);cursor:pointer;outline:none;justify-content:center;align-items:center;gap:4px;height:fit-content;padding:0;display:flex;position:relative;white-space:nowrap!important}.download-button[data-v-59b5011b]:before{border-radius:var(--scalar-radius);content:"";width:calc(100% + 18px);height:calc(100% + 16px);position:absolute;top:-8px;left:-9px}.download-button[data-v-59b5011b]:last-of-type:before{width:calc(100% + 15px)}.download-button[data-v-59b5011b]:hover:before{background:var(--scalar-background-2);border:var(--scalar-border-width)solid var(--scalar-border-color)}.download-button[data-v-59b5011b]:focus-visible:before{background:var(--scalar-background-2);border:var(--scalar-border-width)solid var(--scalar-border-color);outline-style:var(--tw-outline-style);outline-width:1px}.download-button span[data-v-59b5011b]{--font-color:var(--scalar-link-color,var(--scalar-color-accent));--font-visited:var(--scalar-link-color-visited,var(--scalar-color-2));-webkit-text-decoration:var(--scalar-text-decoration);text-decoration:var(--scalar-text-decoration);color:var(--font-color);font-weight:var(--scalar-link-font-weight,var(--scalar-semibold));text-underline-offset:.25rem;text-decoration-thickness:1px;-webkit-text-decoration-color:var(--font-color);text-decoration-color:var(--font-color)}@supports (color:color-mix(in lab,red,red)){.download-button span[data-v-59b5011b]{-webkit-text-decoration-color:color-mix(in srgb,var(--font-color)30%,transparent);text-decoration-color:color-mix(in srgb,var(--font-color)30%,transparent)}}.download-button span[data-v-59b5011b]{z-index:1;align-items:center;gap:6px;line-height:1.625;display:flex}.download-button:hover span[data-v-59b5011b]{-webkit-text-decoration-color:var(--scalar-color-1,currentColor);text-decoration-color:var(--scalar-color-1,currentColor);color:var(--scalar-link-color-hover,var(--scalar-color-accent));-webkit-text-decoration:var(--scalar-text-decoration-hover);text-decoration:var(--scalar-text-decoration-hover)}.download-button[data-v-59b5011b]:nth-of-type(2){clip-path:inset(50%);white-space:nowrap;border-width:0;width:1px;height:1px;margin:-1px;padding:0;position:absolute;overflow:hidden}.download-container:has(:focus-visible) .download-button[data-v-59b5011b]:nth-of-type(2),.download-container:hover .download-button[data-v-59b5011b]:nth-of-type(2){clip-path:none;white-space:normal;width:auto;height:auto;margin:0;padding:0;position:absolute;top:42px;overflow:visible}.extension[data-v-59b5011b]{z-index:1;background:var(--scalar-link-color,var(--scalar-color-accent));color:var(--scalar-background-1)}.download-container:has(:focus-visible) .extension[data-v-59b5011b],.download-container:hover .extension[data-v-59b5011b]{opacity:1}.download-link[data-v-59b5011b]{--font-color:var(--scalar-link-color,var(--scalar-color-accent));--font-visited:var(--scalar-link-color-visited,var(--scalar-color-2));-webkit-text-decoration:var(--scalar-text-decoration);text-decoration:var(--scalar-text-decoration);color:var(--font-color);font-weight:var(--scalar-link-font-weight,var(--scalar-semibold));text-underline-offset:.25rem;text-decoration-thickness:1px;-webkit-text-decoration-color:var(--font-color);text-decoration-color:var(--font-color)}@supports (color:color-mix(in lab,red,red)){.download-link[data-v-59b5011b]{-webkit-text-decoration-color:color-mix(in srgb,var(--font-color)30%,transparent);text-decoration-color:color-mix(in srgb,var(--font-color)30%,transparent)}}.download-link[data-v-59b5011b]:hover{--font-color:var(--scalar-link-color,var(--scalar-color-accent));-webkit-text-decoration-color:var(--font-color);text-decoration-color:var(--font-color)}.introduction-card[data-v-a8605b85]{display:flex;flex-direction:column}.introduction-card-row[data-v-a8605b85]{gap:24px}@media(min-width:600px){.introduction-card-row[data-v-a8605b85]{flex-flow:row wrap}}.introduction-card-row[data-v-a8605b85]>*{flex:1}@media(min-width:600px){.introduction-card-row[data-v-a8605b85]>*{min-width:min-content}}@media(max-width:600px){.introduction-card-row[data-v-a8605b85]>*{max-width:100%}}@container (max-width: 900px){.introduction-card-row[data-v-a8605b85]{flex-direction:column;align-items:stretch;gap:0px}}.introduction-card[data-v-a8605b85] .security-scheme-label{text-transform:uppercase;font-weight:var(--scalar-semibold)}.introduction-card-row[data-v-a8605b85] .scalar-card:nth-of-type(2) .scalar-card-header{display:none}.introduction-card-row[data-v-a8605b85] .scalar-card:nth-of-type(2) .scalar-card-header.scalar-card--borderless+.scalar-card-content{margin-top:0}.section[data-v-be4443e9]{position:relative;display:flex;flex-direction:column;max-width:var(--refs-content-max-width);margin:auto;padding:90px 0;scroll-margin-top:var(--refs-viewport-offset)}.section[data-v-be4443e9]:has(~div.contents){border-bottom:var(--scalar-border-width) solid var(--scalar-border-color)}.references-classic .section[data-v-be4443e9]{padding:48px 0;gap:24px}@container narrow-references-container (max-width: 900px){.references-classic .section[data-v-be4443e9],.section[data-v-be4443e9]{padding:48px 24px}}.section[data-v-be4443e9]:not(:last-of-type){border-bottom:var(--scalar-border-width) solid var(--scalar-border-color)}.section-wrapper[data-v-ff689b94]{color:var(--scalar-color-1);padding-top:12px;margin-top:-12px}.section-accordion[data-v-ff689b94]{display:flex;flex-direction:column;border-radius:var(--scalar-radius-lg);background:var(--scalar-background-2);scroll-margin-top:var(--refs-viewport-offset)}.section-accordion-transparent[data-v-ff689b94]{background:transparent;border:var(--scalar-border-width) solid var(--scalar-border-color)}.section-accordion-button[data-v-ff689b94]{padding:6px}.section-accordion-button[data-v-ff689b94]{display:flex;align-items:center;gap:6px;cursor:pointer}.section-accordion-button-content[data-v-ff689b94]{flex:1;min-width:0}.section-accordion-button-actions[data-v-ff689b94]{display:flex;align-items:center;gap:6px;color:var(--scalar-color-3)}.section-accordion-chevron[data-v-ff689b94]{margin-right:4px;cursor:pointer;opacity:1;color:var(--scalar-color-3)}.section-accordion-button:hover .section-accordion-chevron[data-v-ff689b94]{color:var(--scalar-color-1)}.section-accordion-content[data-v-ff689b94]{border-top:var(--scalar-border-width) solid var(--scalar-border-color);display:flex;flex-direction:column}.section-accordion-description[data-v-ff689b94]{font-weight:var(--scalar-semibold);font-size:var(--scalar-mini);color:var(--scalar-color--1);padding:10px 12px 0}.section-accordion-content-card[data-v-ff689b94] .property:last-of-type{padding-bottom:9px}.section-column[data-v-699c28e3]{flex:1;min-width:0}@container narrow-references-container (max-width: 900px){.section-column[data-v-699c28e3]:nth-of-type(2){padding-top:0}}.section-columns[data-v-8b9602bf]{display:flex;gap:48px}@container narrow-references-container (max-width: 900px){.section-columns[data-v-8b9602bf]{flex-direction:column;gap:24px}}.section-container[data-v-20a1472a]{position:relative;padding:0 60px;width:100%;border-top:var(--scalar-border-width) solid var(--scalar-border-color)}.section-container[data-v-20a1472a]:has(.introduction-section){border-top:none}@container narrow-references-container (max-width: 900px){.section-container[data-v-20a1472a]{padding:0}}.section-accordion-wrapper[data-v-9419dd23]{padding:0 60px}.section-accordion[data-v-9419dd23]{position:relative;width:100%;max-width:var(--refs-content-max-width);margin:auto}.section-accordion-content[data-v-9419dd23]{display:flex;flex-direction:column;gap:12px;padding-top:12px}.section-accordion-button[data-v-9419dd23]{width:100%;display:flex;cursor:pointer;padding:6px 0;margin:-6px 0;border-radius:var(--scalar-radius)}.section-accordion-chevron[data-v-9419dd23]{position:absolute;left:-22px;top:12px;color:var(--scalar-color-3)}.section-accordion-button:hover .section-accordion-chevron[data-v-9419dd23]{color:var(--scalar-color-1)}.section-accordion-title[data-v-9419dd23]{display:flex;flex-direction:column;align-items:flex-start;flex:1;padding:0 6px}.section-accordion-title[data-v-9419dd23] .section-header-wrapper{grid-template-columns:1fr}.section-accordion-title[data-v-9419dd23] .section-header{margin-bottom:0}@container narrow-references-container (max-width: 900px){.section-accordion-chevron[data-v-9419dd23]{width:16px;left:-16px;top:14px}.section-accordion-wrapper[data-v-9419dd23]{padding:calc(var(--refs-viewport-offset)) 24px 0 24px}}.loading[data-v-8e0226d7]{background:var(--scalar-background-3);animation:loading-skeleton-8e0226d7 1.5s infinite alternate;border-radius:var(--scalar-radius-lg);min-height:1.6em;margin:.6em 0;max-width:100%}.loading[data-v-8e0226d7]:first-of-type{min-height:3em;margin-bottom:24px;margin-top:0}.loading[data-v-8e0226d7]:last-of-type{width:60%}.loading.single-line[data-v-8e0226d7]{min-height:3em;margin:.6em 0;max-width:80%}@keyframes loading-skeleton-8e0226d7{0%{opacity:1}to{opacity:.33}}@container narrow-references-container (max-width: 900px){.section-content--with-columns[data-v-9735459e]{flex-direction:column;gap:24px}}.section-header-wrapper[data-v-465a7a78]{grid-template-columns:1fr;display:grid}@media(min-width:1200px){.section-header-wrapper[data-v-465a7a78]{grid-template-columns:repeat(2,1fr)}}.section-header[data-v-465a7a78]{font-size:var(--font-size,var(--scalar-heading-1));font-weight:var(--font-weight,var(--scalar-bold));color:var(--scalar-color-1);word-wrap:break-word;margin-top:0;margin-bottom:12px;line-height:1.45}.section-header.tight[data-v-465a7a78]{margin-bottom:6px}.section-header.loading[data-v-465a7a78]{width:80%}.section-header-label[data-v-f1ac6c38]{display:inline}.screenreader-only[data-v-df2e1026]{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.collapsible-section[data-v-999a158a]{border-top:var(--scalar-border-width) solid var(--scalar-border-color);position:relative}.collapsible-section-header[data-v-999a158a]{color:var(--scalar-color-1)}.collapsible-section .collapsible-section-trigger[data-v-999a158a]{display:flex;align-items:center;cursor:pointer;padding:10px 0;font-size:var(--scalar-font-size-3);z-index:1;position:relative}.collapsible-section-trigger svg[data-v-999a158a]{color:var(--scalar-color-3);position:absolute;left:-19px}.collapsible-section:hover .collapsible-section-trigger svg[data-v-999a158a]{color:var(--scalar-color-1)}.collapsible-section .collapsible-section-trigger[data-v-999a158a] .anchor-copy{line-height:18.5px}.collapsible-section-content[data-v-999a158a]{padding:0;margin:0 0 10px;scroll-margin-top:140px}.references-classic .introduction-description[data-v-fe80002d] img{max-width:720px}.icons-only[data-v-0939d4d9] span{display:none}.sticky-cards[data-v-0b1e2255]{display:flex;flex-direction:column;position:sticky;top:calc(var(--refs-viewport-offset) + 24px)}.introduction-card-item[data-v-708aae59]{display:flex;margin-bottom:12px;flex-direction:column;justify-content:start}.introduction-card-item[data-v-708aae59]:has(.description) .server-form-container{border-bottom-left-radius:0;border-bottom-right-radius:0}.introduction-card-item[data-v-708aae59] .request-item{border-bottom:0}.schema-type-icon[data-v-70cb5c13]{color:var(--scalar-color-1);display:none}.schema-type[data-v-70cb5c13]{font-family:var(--scalar-font-code);color:var(--scalar-color-1)}.property-enum-value[data-v-88bc950e]{color:var(--scalar-color-3);line-height:1.5;overflow-wrap:break-word;display:flex;align-items:stretch;position:relative;--decorator-width: 1px;--decorator-color: color-mix( in srgb, var(--scalar-background-1), var(--scalar-color-1) 25% )}.property-enum-value-content[data-v-88bc950e]{display:flex;flex-direction:column;padding:3px 0}.property-enum-value-label[data-v-88bc950e]{display:flex;font-family:var(--scalar-font-code);color:var(--scalar-color-1);position:relative}.property-enum-value:last-of-type .property-enum-value-label[data-v-88bc950e]{padding-bottom:0}.property-enum-value[data-v-88bc950e]:before{content:"";margin-right:12px;width:var(--decorator-width);display:block;background-color:var(--decorator-color)}.property-enum-value[data-v-88bc950e]:last-of-type:before,.property-enum-values:has(.enum-toggle-button) .property-enum-value[data-v-88bc950e]:nth-last-child(2):before{height:calc(.5lh + 4px)}.property-enum-value-label[data-v-88bc950e]:after{content:"";position:absolute;top:.5lh;left:-12px;width:8px;height:var(--decorator-width);background-color:var(--decorator-color)}.property-enum-value[data-v-88bc950e]:last-of-type:after{bottom:0;height:50%;background:var(--scalar-background-1);border-top:var(--scalar-border-width) solid var(--decorator-color)}.property-enum-value-description[data-v-88bc950e]{color:var(--scalar-color-3)}.property-heading:empty+.property-description[data-v-d4946030]:last-of-type,.property-description[data-v-d4946030]:first-of-type:last-of-type{margin-top:0}.property-list[data-v-d4946030]{border:var(--scalar-border-width) solid var(--scalar-border-color);border-radius:var(--scalar-radius);margin-top:10px}.property-list .property[data-v-d4946030]:last-of-type{padding-bottom:10px}.property-enum-values[data-v-d4946030]{font-size:var(--scalar-font-size-3);list-style:none;margin-top:8px;padding-left:2px}.enum-toggle-button[data-v-d4946030]:hover{color:var(--scalar-color-1)}.property-detail[data-v-827ea49d]{display:inline-flex}.property-detail+.property-detail[data-v-827ea49d]:before{display:block;content:"·";margin:0 .5ch}.property-detail-truncate[data-v-827ea49d]{overflow:hidden}.property-detail-truncate>.property-detail-value[data-v-827ea49d]{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.property-detail-prefix[data-v-827ea49d]{color:var(--scalar-color-2)}code.property-detail-value[data-v-827ea49d]{font-family:var(--scalar-font-code);font-size:var(--scalar-font-size-3);color:var(--scalar-color-2);background:var(--scalar-background-3);padding:0 4px;border:.5px solid var(--scalar-border-color);border-radius:var(--scalar-radius)}.property-example[data-v-72def0ea]{display:flex;flex-direction:column;font-size:var(--scalar-mini);position:relative}.property-example[data-v-72def0ea]:hover:before{content:"";position:absolute;top:0;left:0;width:100%;height:20px;border-radius:var(--scalar-radius)}.property-example:hover .property-example-label span[data-v-72def0ea]{color:var(--scalar-color-1)}.property-example-label span[data-v-72def0ea]{color:var(--scalar-color-3);position:relative;border-bottom:var(--scalar-border-width) dotted currentColor}.property-example-value[data-v-72def0ea]{font-family:var(--scalar-font-code);display:flex;gap:8px;align-items:center;width:100%;padding:6px}.property-example-value span[data-v-72def0ea]{display:block;overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.property-example-value[data-v-72def0ea] svg{color:var(--scalar-color-3)}.property-example-value[data-v-72def0ea]:hover svg{color:var(--scalar-color-1)}.property-example-value[data-v-72def0ea]{background:var(--scalar-background-2);border:var(--scalar-border-width) solid var(--scalar-border-color);border-radius:var(--scalar-radius)}.property-example-value-list[data-v-72def0ea]{position:absolute;top:18px;left:50%;transform:translate3d(-50%,0,0);overflow:auto;background-color:var(--scalar-background-1);box-shadow:var(--scalar-shadow-1);border-radius:var(--scalar-radius-lg);border:var(--scalar-border-width) solid var(--scalar-border-color);padding:9px;min-width:200px;max-width:300px;flex-direction:column;gap:3px;display:none;z-index:2}.property-example:hover .property-example-value-list[data-v-72def0ea],.property-example:focus-within .property-example-value-list[data-v-72def0ea]{display:flex}.property-heading[data-v-5d03e993]{display:flex;flex-wrap:wrap;align-items:baseline;row-gap:9px;white-space:nowrap}.property-heading[data-v-5d03e993]:has(+.children),.property-heading[data-v-5d03e993]:has(+.property-rule){margin-bottom:9px}.property-heading[data-v-5d03e993]>*{margin-right:9px}.property-heading[data-v-5d03e993]:last-child{margin-right:0}.property-heading>.property-detail[data-v-5d03e993]:not(:last-of-type){margin-right:0}.property-name[data-v-5d03e993]{max-width:100%;font-family:var(--scalar-font-code);font-weight:var(--scalar-semibold);font-size:var(--scalar-font-size-3);white-space:normal;overflow-wrap:break-word}.property-additional[data-v-5d03e993]{font-family:var(--scalar-font-code)}.property-required[data-v-5d03e993],.property-optional[data-v-5d03e993]{color:var(--scalar-color-2)}.property-required[data-v-5d03e993]{font-size:var(--scalar-mini);color:var(--scalar-color-orange)}.property-read-only[data-v-5d03e993]{font-size:var(--scalar-mini);color:var(--scalar-color-blue)}.property-write-only[data-v-5d03e993]{font-size:var(--scalar-mini);color:var(--scalar-color-green)}.property-discriminator[data-v-5d03e993]{font-size:var(--scalar-mini);color:var(--scalar-color-purple)}.property-detail[data-v-5d03e993]{font-size:var(--scalar-mini);color:var(--scalar-color-2);display:flex;align-items:center;min-width:0}.property-const[data-v-5d03e993]{color:var(--scalar-color-1)}.deprecated[data-v-5d03e993]{text-decoration:line-through}.property[data-v-7ddcdaa6]{color:var(--scalar-color-1);display:flex;flex-direction:column;padding:8px;font-size:var(--scalar-small);position:relative}.property.property--level-0[data-v-7ddcdaa6]:has(>.property-rule>.schema-card>.schema-properties.schema-properties-open>ul>li.property){padding-top:0}.property--compact.property--level-0[data-v-7ddcdaa6],.property--compact.property--level-1[data-v-7ddcdaa6]{padding:8px 0}.composition-panel .property.property.property.property--level-0[data-v-7ddcdaa6]{padding:0}.property--compact.property--level-0 .composition-panel .property--compact.property--level-1[data-v-7ddcdaa6]{padding:8px}.property[data-v-7ddcdaa6]:has(>.property-rule:nth-of-type(1)):not(.property--compact){padding-top:8px;padding-bottom:8px}.property--deprecated[data-v-7ddcdaa6]{background:repeating-linear-gradient(-45deg,var(--scalar-background-2) 0,var(--scalar-background-2) 2px,transparent 2px,transparent 5px);background-size:100%}.property--deprecated[data-v-7ddcdaa6]>*{opacity:.75}.property-description[data-v-7ddcdaa6]{margin-top:6px;line-height:1.4;font-size:var(--scalar-small)}.property-description[data-v-7ddcdaa6]:has(+.property-rule){margin-bottom:9px}[data-v-7ddcdaa6] .property-description *{color:var(--scalar-color-2)!important}.property[data-v-7ddcdaa6]:not(:last-of-type){border-bottom:var(--scalar-border-width) solid var(--scalar-border-color)}.property-description+.children[data-v-7ddcdaa6],.children+.property-rule[data-v-7ddcdaa6]{margin-top:9px}.children[data-v-7ddcdaa6]{display:flex;flex-direction:column}.children .property--compact.property--level-1[data-v-7ddcdaa6]{padding:12px}.property-example-value[data-v-7ddcdaa6]{all:unset;font-family:var(--scalar-font-code);padding:6px;border-top:var(--scalar-border-width) solid var(--scalar-border-color)}.property-rule[data-v-7ddcdaa6]{border-radius:var(--scalar-radius-lg);display:flex;flex-direction:column}.property-rule[data-v-7ddcdaa6] .composition-panel .schema-card .schema-properties.schema-properties-open{border-top-left-radius:0;border-top-right-radius:0}.property-rule[data-v-7ddcdaa6] .composition-panel>.schema-card>.schema-card-description{padding-left:8px;padding-right:8px;border-left:1px solid var(--scalar-border-color);border-right:1px solid var(--scalar-border-color)}.property-rule[data-v-7ddcdaa6] .composition-panel>.schema-card>.schema-card-description+.schema-properties{margin-top:0}.property-example[data-v-7ddcdaa6]{background:transparent;border:none;display:flex;flex-direction:row;gap:8px}.property-example-label[data-v-7ddcdaa6],.property-example-value[data-v-7ddcdaa6]{padding:3px 0 0}.property-example-value[data-v-7ddcdaa6]{background:var(--scalar-background-2);border-top:0;border-radius:var(--scalar-radius);padding:3px 4px}.property-name[data-v-7ddcdaa6]{font-family:var(--scalar-font-code);font-weight:var(--scalar-semibold)}.property-name-additional-properties[data-v-7ddcdaa6]:before,.property-name-pattern-properties[data-v-7ddcdaa6]:before{text-transform:uppercase;font-size:var(--scalar-micro);display:inline-block;padding:2px 4px;border-radius:var(--scalar-radius);color:var(--scalar-color-1);border:1px solid var(--scalar-border-color);background-color:var(--scalar-background-2);margin-right:4px}.property-name-pattern-properties[data-v-7ddcdaa6]:before{content:"regex"}.property-name-additional-properties[data-v-7ddcdaa6]:before{content:"unknown property name"}.error[data-v-4d81600f]{background-color:var(--scalar-color-red)}.schema-card[data-v-4d81600f]{font-size:var(--scalar-font-size-4);color:var(--scalar-color-1)}.schema-card-title[data-v-4d81600f]{height:var(--schema-title-height);padding:6px 8px;display:flex;align-items:center;gap:4px;color:var(--scalar-color-2);font-weight:var(--scalar-semibold);font-size:var(--scalar-mini);border-bottom:var(--scalar-border-width) solid transparent}button.schema-card-title[data-v-4d81600f]{cursor:pointer}button.schema-card-title[data-v-4d81600f]:hover{color:var(--scalar-color-1)}.schema-card-title-icon--open[data-v-4d81600f]{transform:rotate(45deg)}.schema-properties-open>.schema-card-title[data-v-4d81600f]{border-bottom-left-radius:0;border-bottom-right-radius:0;border-bottom:var(--scalar-border-width) solid var(--scalar-border-color)}.schema-properties-open>.schema-properties[data-v-4d81600f]{width:fit-content}.schema-card-description[data-v-4d81600f]{color:var(--scalar-color-2)}.schema-card-description+.schema-properties[data-v-4d81600f]{width:fit-content}.schema-card-description+.schema-properties[data-v-4d81600f]{margin-top:8px}.schema-properties-open.schema-properties[data-v-4d81600f],.schema-properties-open>.schema-card--open[data-v-4d81600f]{width:100%}.schema-properties[data-v-4d81600f]{display:flex;flex-direction:column;border:var(--scalar-border-width) solid var(--scalar-border-color);border-radius:var(--scalar-radius-lg);width:fit-content}.schema-properties-name[data-v-4d81600f]{width:100%}.schema-properties .schema-properties[data-v-4d81600f]{border-radius:13.5px}.schema-properties .schema-properties.schema-properties-open[data-v-4d81600f]{border-radius:var(--scalar-radius-lg)}.schema-properties-open[data-v-4d81600f]{width:100%}.schema-card--compact[data-v-4d81600f]{align-self:flex-start}.schema-card--compact.schema-card--open[data-v-4d81600f]{align-self:initial}.schema-card-title--compact[data-v-4d81600f]{color:var(--scalar-color-2);padding:6px 10px 6px 8px;height:auto;border-bottom:none}.schema-card-title--compact>.schema-card-title-icon[data-v-4d81600f]{margin:0}.schema-card-title--compact>.schema-card-title-icon--open[data-v-4d81600f]{transform:rotate(45deg)}.schema-properties-open>.schema-card-title--compact[data-v-4d81600f]{position:static}.property--level-0>.schema-properties>.schema-card--level-0>.schema-properties[data-v-4d81600f]{border:none}.property--level-0 .schema-card--level-0:not(.schema-card--compact) .property--level-1[data-v-4d81600f]{padding:0 0 8px}:not(.composition-panel)>.schema-card--compact.schema-card--level-0>.schema-properties[data-v-4d81600f]{border:none}[data-v-4d81600f] .schema-card-description p{font-size:var(--scalar-small, var(--scalar-paragraph));color:var(--scalar-color-2);line-height:1.5;display:block;margin-bottom:6px}.children .schema-card-description[data-v-4d81600f]:first-of-type{padding-top:0}.reference-models-anchor[data-v-161968a4]{display:flex;align-items:center;font-size:20px;padding-left:6px;color:var(--scalar-color-1)}.reference-models-label[data-v-161968a4]{display:block;font-size:var(--scalar-mini)}.reference-models-label[data-v-161968a4] em{font-weight:var(--scalar-bold)}.show-more[data-v-d1c2b649]{appearance:none;border:none;border:var(--scalar-border-width) solid var(--scalar-border-color);margin:auto;padding:8px 12px 8px 16px;border-radius:30px;color:var(--scalar-color-1);font-weight:var(--scalar-semibold);font-size:var(--scalar-small);display:flex;align-items:center;justify-content:center;position:relative;gap:6px;top:-48px}.show-more[data-v-d1c2b649]:hover{background:var(--scalar-background-2);cursor:pointer}.show-more[data-v-d1c2b649]:active{box-shadow:0 0 0 1px var(--scalar-border-color)}@container narrow-references-container (max-width: 900px){.show-more[data-v-d1c2b649]{top:-24px}}.tag-section[data-v-1124be5d]{margin-bottom:48px}.tag-name[data-v-1124be5d]{text-transform:capitalize}.tag-description[data-v-1124be5d]{padding-bottom:4px;text-align:left}.endpoint[data-v-ad8530a6]{display:flex;white-space:nowrap;cursor:pointer;text-decoration:none}.endpoint:hover .endpoint-path[data-v-ad8530a6],.endpoint:focus-visible .endpoint-path[data-v-ad8530a6]{text-decoration:underline}.endpoint .post[data-v-ad8530a6],.endpoint .get[data-v-ad8530a6],.endpoint .delete[data-v-ad8530a6],.endpoint .put[data-v-ad8530a6]{white-space:nowrap}.endpoint-method[data-v-ad8530a6],.endpoint-path[data-v-ad8530a6]{color:var(--scalar-color-1);min-width:62px;display:inline-flex;line-height:1.55;font-family:var(--scalar-font-code);font-size:var(--scalar-small);cursor:pointer}.endpoint-method[data-v-ad8530a6]{text-align:right}.endpoint-path[data-v-ad8530a6]{margin-left:12px;text-transform:initial}.deprecated[data-v-ad8530a6]{text-decoration:line-through}.endpoints-card[data-v-f726f753]{position:sticky;top:calc(var(--refs-viewport-offset) + 24px);font-size:var(--scalar-font-size-3)}.endpoints[data-v-f726f753]{overflow:auto;background:var(--scalar-background-2);padding:10px 12px;width:100%}.section-container[data-v-d207e09e]{border-top:var(--scalar-border-width) solid var(--scalar-border-color)}.section-container[data-v-d207e09e]:has(.show-more){background-color:color-mix(in srgb,var(--scalar-background-2),transparent)}.operation-path[data-v-ec6c8861]{overflow:hidden;word-wrap:break-word;font-weight:var(--scalar-semibold);line-break:anywhere}.deprecated[data-v-ec6c8861]{text-decoration:line-through}.empty-state[data-v-ab9ce344]{margin:10px 0 10px 12px;text-align:center;font-size:var(--scalar-mini);min-height:56px;display:flex;align-items:center;justify-content:center;border-radius:var(--scalar-radius-lg);color:var(--scalar-color-2)}.rule-title[data-v-ab9ce344]{font-family:var(--scalar-font-code);color:var(--scalar-color-1);display:inline-block;margin:12px 0 6px;border-radius:var(--scalar-radius)}.rule[data-v-ab9ce344]{margin:0 12px;border-radius:var(--scalar-radius-lg)}.rule-items[data-v-ab9ce344]{counter-reset:list-number;display:flex;flex-direction:column;gap:12px;border-left:1px solid var(--scalar-border-color);padding:12px 0}.rule-item[data-v-ab9ce344]{counter-increment:list-number;border:1px solid var(--scalar-border-color);border-radius:var(--scalar-radius-lg);overflow:hidden;margin-left:24px}.rule-item[data-v-ab9ce344]:before{border:1px solid var(--scalar-border-color);border-top:0;border-right:0;content:" ";display:block;width:24px;height:6px;border-radius:0 0 0 var(--scalar-radius-lg);margin-top:6px;color:var(--scalar-color-2);transform:translate(-25px);color:var(--scalar-color-1);position:absolute}.tab[data-v-c8207e48]{background:none;border:none;font-size:var(--scalar-small);font-family:var(--scalar-font);font-weight:var(--scalar-font-normal);color:var(--scalar-color-2);line-height:calc(var(--scalar-small) + 2px);white-space:nowrap;cursor:pointer;padding:0;margin-right:3px;text-transform:uppercase;position:relative;line-height:22px}.tab[data-v-c8207e48]:before{content:"";position:absolute;z-index:0;left:-6px;top:-6px;width:calc(100% + 12px);height:calc(100% + 12px);border-radius:var(--scalar-radius);background:var(--scalar-background-3);opacity:0}.tab[data-v-c8207e48]:hover:before,.tab[data-v-c8207e48]:focus-visible:before{opacity:1}.tab[data-v-c8207e48]:focus-visible:before{outline:1px solid var(--scalar-color-accent)}.tab span[data-v-c8207e48]{z-index:1;position:relative}.tab-selected[data-v-c8207e48]{color:var(--scalar-color-1);font-weight:var(--scalar-semibold)}.tab-selected[data-v-c8207e48]:after{content:"";position:absolute;background:currentColor;width:100%;left:0;height:1px;bottom:calc(var(--tab-list-padding-y) * -1)}.tab-list[data-v-fec8fbbb]{display:flex;gap:6px;position:relative;flex:1;--tab-list-padding-y: 7px;--tab-list-padding-x: 12px;padding:var(--tab-list-padding-y) var(--tab-list-padding-x);overflow:auto}.scalar-card-header.scalar-card-header-tabs[data-v-fec8fbbb]{padding:0}.response-card[data-v-b2851076]{font-size:var(--scalar-font-size-3)}.markdown[data-v-b2851076] *{margin:0}.code-copy[data-v-b2851076]{display:flex;align-items:center;justify-content:center;appearance:none;-webkit-appearance:none;outline:none;background:transparent;cursor:pointer;color:var(--scalar-color-3);border:none;padding:0;margin-right:12px}.code-copy[data-v-b2851076]:hover{color:var(--scalar-color-1)}.code-copy svg[data-v-b2851076]{width:13px;height:13px}.response-card-footer[data-v-b2851076]{display:flex;flex-direction:row;justify-content:space-between;flex-shrink:0;padding:7px 12px;gap:8px}.response-example-selector[data-v-b2851076]{align-self:flex-start;margin:-4px}.response-description[data-v-b2851076]{font-weight:var(--scalar-semibold);font-size:var(--scalar-small);color:var(--scalar-color--1);display:flex;align-items:center;box-sizing:border-box}.schema-type[data-v-b2851076]{font-size:var(--scalar-micro);color:var(--scalar-color-2);font-weight:var(--scalar-semibold);background:var(--scalar-background-3);padding:2px 4px;border-radius:4px;margin-right:4px}.schema-example[data-v-b2851076]{font-size:var(--scalar-micro);color:var(--scalar-color-2);font-weight:var(--scalar-semibold)}.example-response-tab[data-v-b2851076]{display:block;margin:6px}.scalar-card-checkbox[data-v-b2851076]{display:flex;align-items:center;justify-content:center;position:relative;min-height:17px;cursor:pointer;-webkit-user-select:none;user-select:none;font-size:var(--scalar-small);font-weight:var(--scalar-font-normal);color:var(--scalar-color-2);width:fit-content;white-space:nowrap;gap:6px;padding:7px 6px}.scalar-card-checkbox:has(.scalar-card-checkbox-input:focus-visible) .scalar-card-checkbox-checkmark[data-v-b2851076]{outline:1px solid var(--scalar-color-accent)}.scalar-card-checkbox[data-v-b2851076]:hover{color:var(--scalar-color--1)}.scalar-card-checkbox .scalar-card-checkbox-input[data-v-b2851076]{position:absolute;opacity:0;cursor:pointer;height:0;width:0}.scalar-card-checkbox-checkmark[data-v-b2851076]{height:16px;width:16px;border-radius:var(--scalar-radius);background-color:transparent;background-color:var(--scalar-background-3);box-shadow:inset 0 0 0 var(--scalar-border-width) var(--scalar-border-color)}.scalar-card-checkbox[data-v-b2851076]:has(.scalar-card-checkbox-input:checked){color:var(--scalar-color-1);font-weight:var(--scalar-semibold)}.scalar-card-checkbox .scalar-card-checkbox-input:checked~.scalar-card-checkbox-checkmark[data-v-b2851076]{background-color:var(--scalar-button-1);box-shadow:none}.scalar-card-checkbox-checkmark[data-v-b2851076]:after{content:"";position:absolute;display:none}.scalar-card-checkbox .scalar-card-checkbox-input:checked~.scalar-card-checkbox-checkmark[data-v-b2851076]:after{display:block}.scalar-card-checkbox .scalar-card-checkbox-checkmark[data-v-b2851076]:after{right:11.5px;top:12.5px;width:5px;height:9px;border:solid 1px var(--scalar-button-1-color);border-width:0 1.5px 1.5px 0;transform:rotate(45deg)}.headers-card[data-v-6fb09984]{z-index:0;margin-top:12px;margin-bottom:6px;position:relative;font-size:var(--scalar-font-size-4);color:var(--scalar-color-1);align-self:flex-start}.headers-card.headers-card--open[data-v-6fb09984]{align-self:initial}.headers-card-title[data-v-6fb09984]{padding:6px 10px;display:flex;align-items:center;gap:4px;color:var(--scalar-color-3);font-weight:var(--scalar-semibold);font-size:var(--scalar-micro);border-radius:13.5px}button.headers-card-title[data-v-6fb09984]{cursor:pointer}button.headers-card-title[data-v-6fb09984]:hover{color:var(--scalar-color-1)}.headers-card-title-icon--open[data-v-6fb09984]{transform:rotate(45deg)}.headers-properties[data-v-6fb09984]{display:flex;flex-direction:column;border:var(--scalar-border-width) solid var(--scalar-border-color);border-radius:13.5px;width:fit-content}.headers-properties-open>.headers-card-title[data-v-6fb09984]{border-bottom-left-radius:0;border-bottom-right-radius:0;border-bottom:var(--scalar-border-width) solid var(--scalar-border-color)}.headers-properties-open[data-v-6fb09984]{border-radius:var(--scalar-radius-lg);width:100%}.headers-card .property[data-v-6fb09984]:last-of-type{padding-bottom:10px}.headers-card-title>.headers-card-title-icon[data-v-6fb09984]{width:10px;height:10px;margin:0}.headers-card-title>.headers-card-title-icon--open[data-v-6fb09984]{transform:rotate(45deg)}.parameter-item[data-v-2b6dfab9]{display:flex;flex-direction:column;border-top:var(--scalar-border-width) solid var(--scalar-border-color)}.parameter-item:last-of-type .parameter-schema[data-v-2b6dfab9]{padding-bottom:0}.parameter-item-container[data-v-2b6dfab9]{padding:0}.parameter-item-headers[data-v-2b6dfab9]{border:var(--scalar-border-width) solid var(--scalar-border-color)}.parameter-item-name[data-v-2b6dfab9]{position:relative;font-weight:var(--scalar-semibold);font-size:var(--scalar-font-size-3);font-family:var(--scalar-font-code);color:var(--scalar-color-1);overflow-wrap:break-word}.parameter-item-description[data-v-2b6dfab9],.parameter-item-description-summary[data-v-2b6dfab9]{font-size:var(--scalar-mini);color:var(--scalar-color-2)}.parameter-item-description-summary.parameter-item-description-summary[data-v-2b6dfab9]>*{--markdown-line-height: 1}.parameter-item-trigger+.parameter-item-container[data-v-2b6dfab9] .property--level-0>.property-heading .property-detail-value{font-size:var(--scalar-micro)}.parameter-item-required-optional[data-v-2b6dfab9]{color:var(--scalar-color-2);font-weight:var(--scalar-semibold);margin-right:6px;position:relative}.parameter-item--required[data-v-2b6dfab9]{text-transform:uppercase;font-size:var(--scalar-micro);font-weight:var(--scalar-semibold);color:var(--scalar-color-orange)}.parameter-item-description[data-v-2b6dfab9],.parameter-item-description[data-v-2b6dfab9] p{margin-top:4px;font-size:var(--scalar-small);color:var(--scalar-color-2);line-height:1.4}.parameter-schema[data-v-2b6dfab9]{padding-bottom:9px;margin-top:3px}.parameter-item-trigger[data-v-2b6dfab9]{display:flex;align-items:baseline;gap:6px;flex-wrap:wrap;padding:12px 0;outline:none}.parameter-item-trigger-open[data-v-2b6dfab9]{padding-bottom:0}.parameter-item-trigger[data-v-2b6dfab9]:after{content:"";position:absolute;height:10px;width:100%;bottom:0}.parameter-item-icon[data-v-2b6dfab9]{color:var(--scalar-color-3);left:-19px;top:.5lh;translate:0 -50%;position:absolute}.parameter-item-trigger:hover .parameter-item-icon[data-v-2b6dfab9],.parameter-item-trigger:focus-visible .parameter-item-icon[data-v-2b6dfab9]{color:var(--scalar-color-1)}.parameter-item-trigger:focus-visible .parameter-item-icon[data-v-2b6dfab9]{outline:1px solid var(--scalar-color-accent);outline-offset:2px;border-radius:var(--scalar-radius)}.request-body[data-v-e9972a68]{margin-top:24px}.request-body-header[data-v-e9972a68]{display:flex;align-items:center;justify-content:space-between;padding-bottom:12px;border-bottom:var(--scalar-border-width) solid var(--scalar-border-color);flex-flow:wrap}.request-body-title[data-v-e9972a68]{display:flex;align-items:center;gap:8px;font-size:var(--scalar-font-size-2);font-weight:var(--scalar-semibold);color:var(--scalar-color-1)}.request-body-required[data-v-e9972a68]{font-size:var(--scalar-micro);color:var(--scalar-color-orange);font-weight:400}.request-body-description[data-v-e9972a68]{margin-top:6px;font-size:var(--scalar-small);width:100%}.request-body-header+.request-body-schema[data-v-e9972a68]:has(>.schema-card>.schema-card-description),.request-body-header+.request-body-schema[data-v-e9972a68]:has(>.schema-card>.schema-properties>*>.property--level-0){padding-top:8px}.request-body-description[data-v-e9972a68] .markdown *{color:var(--scalar-color-2)!important}.callback-sticky-offset[data-v-102d2096]{top:var(--refs-viewport-offset, 0px);z-index:1}.callback-operation-container[data-v-102d2096] .request-body,.callback-operation-container[data-v-102d2096] .request-body-description,.callback-operation-container[data-v-102d2096] .request-body-header{margin-top:0}.callback-operation-container[data-v-102d2096] .request-body-header{--scalar-font-size-2: var(--scalar-font-size-4);padding:8px;border-bottom:none;border:.5px solid var(--scalar-border-color);border-radius:var(--scalar-radius-lg) var(--scalar-radius-lg) 0 0;background:color-mix(in srgb,var(--scalar-background-2) 50%,transparent)}.callback-operation-container[data-v-102d2096] .request-body-schema>.schema-card>.schema-card-description{padding-inline:8px}.callback-operation-container[data-v-102d2096] ul li.property.property--level-1{padding:8px}.callback-operation-container[data-v-102d2096] .request-body-schema{background-color:var(--scalar-background-1);border:var(--scalar-border-width) solid var(--scalar-border-color);border-top:none;overflow:hidden;border-radius:0 0 var(--scalar-radius-lg) var(--scalar-radius-lg)}.callback-operation-container[data-v-102d2096] .parameter-list{margin-top:0}.callback-operation-container[data-v-102d2096] .parameter-list-title{background:color-mix(in srgb,var(--scalar-background-2) 50%,transparent);border-radius:var(--scalar-radius-lg) var(--scalar-radius-lg) 0 0;padding:8px;margin-bottom:0;border:var(--scalar-border-width) solid var(--scalar-border-color);border-bottom:none;--scalar-font-size-2: var(--scalar-font-size-4)}.callback-operation-container[data-v-102d2096] .parameter-list-items{border:var(--scalar-border-width) solid var(--scalar-border-color);border-radius:0 0 var(--scalar-radius-lg) var(--scalar-radius-lg)}.callback-operation-container[data-v-102d2096] .parameter-list-items>li:first-of-type{border-top:none}.callback-operation-container[data-v-102d2096] .parameter-list-items>li{padding:0 8px}.show-api-client-button[data-v-e851edeb]{appearance:none;border:none;padding:1px 6px;white-space:nowrap;border-radius:var(--scalar-radius);display:flex;justify-content:center;align-items:center;font-weight:var(--scalar-semibold);font-size:var(--scalar-small);line-height:22px;color:var(--scalar-background-2);font-family:var(--scalar-font);background:var(--scalar-button-1);position:relative;cursor:pointer;box-sizing:border-box;box-shadow:inset 0 0 0 1px #0000001a;outline-offset:2px}.show-api-client-button span[data-v-e851edeb],.show-api-client-button svg[data-v-e851edeb]{fill:currentColor;color:var(--scalar-button-1-color);z-index:1}.show-api-client-button[data-v-e851edeb]:hover{background:var(--scalar-button-1-hover)}.show-api-client-button svg[data-v-e851edeb]{margin-right:4px}.operation-title[data-v-727857de]{justify-content:space-between;display:flex}.operation-details[data-v-727857de]{flex-shrink:1;align-items:center;gap:9px;min-width:0;margin-top:0;display:flex}.operation-details[data-v-727857de] .endpoint-anchor .scalar-button svg{width:16px;height:16px}.endpoint-type[data-v-727857de]{z-index:0;width:60px;font-size:var(--scalar-small);text-transform:uppercase;font-weight:var(--scalar-bold);font-family:var(--scalar-font);flex-shrink:0;justify-content:center;align-items:center;gap:6px;padding:6px;display:flex;position:relative}.endpoint-type[data-v-727857de]:after{content:"";z-index:-1;opacity:.15;border-radius:var(--scalar-radius);background:currentColor;position:absolute;inset:0}.endpoint-anchor[data-v-727857de]{flex-shrink:1;align-items:center;min-width:0;display:flex}.endpoint-anchor.label[data-v-727857de]{display:flex}.endpoint-label[data-v-727857de]{min-width:0;color:var(--scalar-color-1);flex-shrink:1;align-items:baseline;gap:9px;display:flex}.endpoint-label-path[data-v-727857de]{font-family:var(--scalar-font-code);font-size:var(--scalar-mini);text-overflow:ellipsis;white-space:nowrap;overflow:hidden}.endpoint-label-path[data-v-727857de] em{color:var(--scalar-color-2)}.endpoint-label-name[data-v-727857de]{color:var(--scalar-color-2);font-size:var(--scalar-small);text-overflow:ellipsis;white-space:nowrap;flex-shrink:1000000000;overflow:hidden}.endpoint-try-hint[data-v-727857de]{flex-shrink:0;padding:2px}.endpoint-copy[data-v-727857de]{color:currentColor}.endpoint-copy[data-v-727857de] svg{stroke-width:2px}.endpoint-content[data-v-727857de]{grid-auto-columns:1fr;grid-auto-flow:row;gap:9px;padding:9px;display:grid}@media(min-width:1000px){.endpoint-content[data-v-727857de]{grid-auto-flow:column}}@container (max-width:900px){.endpoint-content[data-v-727857de]{grid-template-columns:1fr}}.endpoint-content[data-v-727857de]>*{min-width:0}.operation-details-card[data-v-727857de]{flex-direction:column;gap:12px;min-width:0;display:flex}:is(.operation-details-card-item[data-v-727857de] .parameter-list,.operation-details-card-item[data-v-727857de] .callbacks-list){border:var(--scalar-border-width)solid var(--scalar-border-color);border-radius:var(--scalar-radius-lg);margin-top:0}.operation-details-card-item[data-v-727857de]{flex-direction:column;gap:12px;display:flex}.operation-details-card-item[data-v-727857de] .parameter-list-items{margin-bottom:0}.operation-details-card[data-v-727857de] .parameter-item:last-of-type .parameter-schema{padding-bottom:12px}.operation-details-card[data-v-727857de] .parameter-list .parameter-list{margin-bottom:12px}.operation-details-card[data-v-727857de] .parameter-item{margin:0;padding:0}.operation-details-card[data-v-727857de] .property{margin:0;padding:9px}:is(.operation-details-card[data-v-727857de] .parameter-list-title,.operation-details-card[data-v-727857de] .request-body-title,.operation-details-card[data-v-727857de] .callbacks-title){text-transform:uppercase;font-weight:var(--scalar-bold);font-size:var(--scalar-mini);color:var(--scalar-color-2);margin:0;padding:9px;line-height:1.33}.operation-details-card[data-v-727857de] .callback-list-item-title{padding-left:28px;padding-right:12px}.operation-details-card[data-v-727857de] .callback-list-item-icon{left:6px}.operation-details-card[data-v-727857de] .callback-operation-container{padding-inline:9px;padding-bottom:9px}:is(.operation-details-card[data-v-727857de] .callback-operation-container>.request-body,.operation-details-card[data-v-727857de] .callback-operation-container>.parameter-list){border:none}.operation-details-card[data-v-727857de] .callback-operation-container>.request-body>.request-body-header{border-bottom:var(--scalar-border-width)solid var(--scalar-border-color);padding:0 0 9px}.operation-details-card[data-v-727857de] .request-body-description{border-top:var(--scalar-border-width)solid var(--scalar-border-color);margin-top:0;padding:9px 9px 0}.operation-details-card[data-v-727857de] .request-body{border-radius:var(--scalar-radius-lg);border:var(--scalar-border-width)solid var(--scalar-border-color);margin-top:0}.operation-details-card[data-v-727857de] .request-body .schema-card--level-0>.schema-card-description{padding-inline:9px}.operation-details-card[data-v-727857de] .request-body-header{border-bottom:0;padding-bottom:0}.operation-details-card[data-v-727857de] .contents button{margin-right:9px}.operation-details-card[data-v-727857de] .schema-card--open+.schema-card:not(.schema-card--open){margin-inline:9px;margin-bottom:9px}.operation-details-card[data-v-727857de] .request-body-schema .property--level-0{padding:0}.operation-details-card[data-v-727857de] .selected-content-type{margin-right:9px}.operation-example-card[data-v-727857de]{top:calc(var(--refs-viewport-offset) + 24px);max-height:calc(var(--refs-viewport-height) - 48px);position:sticky}@media(max-width:600px){.operation-example-card[data-v-727857de]{max-height:unset;position:static}}.examples[data-v-6e4b7777]{position:sticky;top:calc(var(--refs-viewport-offset) + 24px)}.examples[data-v-6e4b7777]>*{max-height:calc((var(--refs-viewport-height) - 60px) / 2);position:relative}@media(max-width:600px){.examples[data-v-6e4b7777]>*{max-height:unset}}.deprecated[data-v-6e4b7777] *{text-decoration:line-through}.section-flare[data-v-2a9c8c02]{top:0;right:0;pointer-events:none}.narrow-references-container{container-name:narrow-references-container;container-type:inline-size}.ref-search-meta[data-v-c1c368f9]{background:var(--scalar-background-1);border-bottom-left-radius:var(--scalar-radius-lg);border-bottom-right-radius:var(--scalar-radius-lg);padding:6px 12px;font-size:var(--scalar-font-size-4);color:var(--scalar-color-3);font-weight:var(--scalar-semibold);display:flex;gap:12px;border-top:var(--scalar-border-width) solid var(--scalar-border-color)}@layer properties{@supports (((-webkit-hyphens:none)) and (not (margin-trim:inline))) or ((-moz-orient:inline) and (not (color:rgb(from red r g b)))){*,:before,:after,::backdrop{--tw-translate-x:0;--tw-translate-y:0;--tw-translate-z:0;--tw-rotate-x:initial;--tw-rotate-y:initial;--tw-rotate-z:initial;--tw-skew-x:initial;--tw-skew-y:initial;--tw-divide-x-reverse:0;--tw-border-style:solid;--tw-divide-y-reverse:0;--tw-gradient-position:initial;--tw-gradient-from:#0000;--tw-gradient-via:#0000;--tw-gradient-to:#0000;--tw-gradient-stops:initial;--tw-gradient-via-stops:initial;--tw-gradient-from-position:0%;--tw-gradient-via-position:50%;--tw-gradient-to-position:100%;--tw-leading:initial;--tw-font-weight:initial;--tw-shadow:0 0 #0000;--tw-shadow-color:initial;--tw-shadow-alpha:100%;--tw-inset-shadow:0 0 #0000;--tw-inset-shadow-color:initial;--tw-inset-shadow-alpha:100%;--tw-ring-color:initial;--tw-ring-shadow:0 0 #0000;--tw-inset-ring-color:initial;--tw-inset-ring-shadow:0 0 #0000;--tw-ring-inset:initial;--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-offset-shadow:0 0 #0000;--tw-outline-style:solid;--tw-blur:initial;--tw-brightness:initial;--tw-contrast:initial;--tw-grayscale:initial;--tw-hue-rotate:initial;--tw-invert:initial;--tw-opacity:initial;--tw-saturate:initial;--tw-sepia:initial;--tw-drop-shadow:initial;--tw-drop-shadow-color:initial;--tw-drop-shadow-alpha:100%;--tw-drop-shadow-size:initial;--tw-backdrop-blur:initial;--tw-backdrop-brightness:initial;--tw-backdrop-contrast:initial;--tw-backdrop-grayscale:initial;--tw-backdrop-hue-rotate:initial;--tw-backdrop-invert:initial;--tw-backdrop-opacity:initial;--tw-backdrop-saturate:initial;--tw-backdrop-sepia:initial;--tw-duration:initial;--tw-ease:initial;--tw-content:"";--tw-scale-x:1;--tw-scale-y:1;--tw-scale-z:1;--tw-space-x-reverse:0}}}@layer scalar-base{@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-alert:color-mix(in srgb,var(--scalar-color-orange),var(--scalar-color-1)20%)}}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-color-1)20%)}}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-alert:color-mix(in srgb,var(--scalar-color-orange),var(--scalar-background-1)95%)}}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-background-1)95%)}}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-tooltip-background:color-mix(in srgb,var(--scalar-background-1),#fff 10%)}}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-color-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-background-1)20%)}}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-alert:color-mix(in srgb,var(--scalar-color-orange),var(--scalar-background-1)95%)}}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-background-1)95%)}}:root,:host{--leading-snug:1.375;--leading-normal:1.5;--leading-relaxed:1.625;--ease-in-out:cubic-bezier(.4,0,.2,1);--default-transition-duration:.15s;--default-transition-timing-function:cubic-bezier(.4,0,.2,1)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-alert:color-mix(in srgb,var(--scalar-color-orange),var(--scalar-color-1)20%)}}.light-mode{--scalar-color-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-color-1)20%)}}.light-mode{--scalar-background-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-alert:color-mix(in srgb,var(--scalar-color-orange),var(--scalar-background-1)95%)}}.light-mode{--scalar-background-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-background-1)95%)}}.dark-mode{--scalar-tooltip-background:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-tooltip-background:color-mix(in srgb,var(--scalar-background-1),#fff 10%)}}.dark-mode{--scalar-color-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-color-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-background-1)20%)}}.dark-mode{--scalar-background-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-alert:color-mix(in srgb,var(--scalar-color-orange),var(--scalar-background-1)95%)}}.dark-mode{--scalar-background-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-background-1)95%)}}}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-alert:color-mix(in srgb,var(--scalar-color-orange),var(--scalar-color-1)20%)}}}.light-mode{--scalar-color-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-color-1)20%)}}}.light-mode{--scalar-background-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-alert:color-mix(in srgb,var(--scalar-color-orange),var(--scalar-background-1)95%)}}}.light-mode{--scalar-background-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-background-1)95%)}}}.dark-mode{--scalar-tooltip-background:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-tooltip-background:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-tooltip-background:color-mix(in srgb,var(--scalar-background-1),#fff 10%)}}}.dark-mode{--scalar-color-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-color-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-color-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-background-1)20%)}}}.dark-mode{--scalar-background-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-alert:color-mix(in srgb,var(--scalar-color-orange),var(--scalar-background-1)95%)}}}.dark-mode{--scalar-background-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-background-1)95%)}}}}:root,:host{--leading-snug:1.375;--ease-in-out:cubic-bezier(.4,0,.2,1);--default-transition-duration:.15s;--default-transition-timing-function:cubic-bezier(.4,0,.2,1)}@supports (color:color-mix(in lab,red,red)){@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-alert:color-mix(in srgb,var(--scalar-color-orange),var(--scalar-color-1)20%)}}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-color-1)20%)}}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-alert:color-mix(in srgb,var(--scalar-color-orange),var(--scalar-background-1)95%)}}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-background-1)95%)}}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-tooltip-background:color-mix(in srgb,var(--scalar-background-1),#fff 10%)}}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-color-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-background-1)20%)}}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-alert:color-mix(in srgb,var(--scalar-color-orange),var(--scalar-background-1)95%)}}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-background-1)95%)}}.light-mode{--scalar-color-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-alert:color-mix(in srgb,var(--scalar-color-orange),var(--scalar-color-1)20%)}}}.light-mode{--scalar-color-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-color-1)20%)}}}.light-mode{--scalar-background-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-alert:color-mix(in srgb,var(--scalar-color-orange),var(--scalar-background-1)95%)}}}.light-mode{--scalar-background-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-background-1)95%)}}}.dark-mode{--scalar-tooltip-background:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-tooltip-background:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-tooltip-background:color-mix(in srgb,var(--scalar-background-1),#fff 10%)}}}.dark-mode{--scalar-color-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-color-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-color-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-background-1)20%)}}}.dark-mode{--scalar-background-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-alert:color-mix(in srgb,var(--scalar-color-orange),var(--scalar-background-1)95%)}}}.dark-mode{--scalar-background-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-background-1)95%)}}}}body{line-height:inherit;margin:0}:root{--scalar-border-width:.5px;--scalar-radius:3px;--scalar-radius-lg:6px;--scalar-radius-xl:8px;--scalar-font:"Inter",-apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,Oxygen,Ubuntu,Cantarell,"Open Sans","Helvetica Neue",sans-serif;--scalar-font-code:"JetBrains Mono",ui-monospace,Menlo,Monaco,"Cascadia Mono","Segoe UI Mono","Roboto Mono","Oxygen Mono","Ubuntu Monospace","Source Code Pro","Fira Mono","Droid Sans Mono","Courier New",monospace;--scalar-heading-1:24px;--scalar-page-description:16px;--scalar-heading-2:20px;--scalar-heading-3:16px;--scalar-heading-4:16px;--scalar-heading-5:16px;--scalar-heading-6:16px;--scalar-paragraph:16px;--scalar-small:14px;--scalar-mini:13px;--scalar-micro:12px;--scalar-bold:600;--scalar-semibold:500;--scalar-regular:400;--scalar-font-size-1:21px;--scalar-font-size-2:16px;--scalar-font-size-3:14px;--scalar-font-size-4:13px;--scalar-font-size-5:12px;--scalar-font-size-6:12px;--scalar-font-size-7:10px;--scalar-line-height-1:32px;--scalar-line-height-2:24px;--scalar-line-height-3:20px;--scalar-line-height-4:18px;--scalar-line-height-5:16px;--scalar-font-normal:400;--scalar-font-medium:500;--scalar-font-bold:700;--scalar-text-decoration:none;--scalar-text-decoration-hover:underline;--scalar-link-font-weight:inherit;--scalar-sidebar-indent:20px}.dark-mode{color-scheme:dark;--scalar-scrollbar-color:#ffffff2e;--scalar-scrollbar-color-active:#ffffff5c;--scalar-button-1:#fff;--scalar-button-1-hover:#ffffffe6;--scalar-button-1-color:black;--scalar-shadow-1:0 1px 3px 0 #0000001a;--scalar-shadow-2:0 0 0 .5px var(--scalar-border-color),#0f0f0f33 0px 3px 6px,#0f0f0f66 0px 9px 24px;--scalar-lifted-brightness:1.45;--scalar-backdrop-brightness:.5;--scalar-text-decoration-color:currentColor;--scalar-text-decoration-color-hover:currentColor}.light-mode{color-scheme:light;--scalar-scrollbar-color-active:#0000005c;--scalar-scrollbar-color:#0000002e;--scalar-button-1:#000;--scalar-button-1-hover:#000c;--scalar-button-1-color:#ffffffe6;--scalar-shadow-1:0 1px 3px 0 #0000001c;--scalar-shadow-2:#00000014 0px 13px 20px 0px,#00000014 0px 3px 8px 0px,#eeeeed 0px 0 0 .5px;--scalar-lifted-brightness:1;--scalar-backdrop-brightness:1;--scalar-text-decoration-color:currentColor;--scalar-text-decoration-color-hover:currentColor}.light-mode .dark-mode{color-scheme:dark!important}@media(max-width:460px){:root{--scalar-font-size-1:22px;--scalar-font-size-2:14px;--scalar-font-size-3:12px}}@media(max-width:720px){:root{--scalar-heading-1:24px;--scalar-page-description:20px}}:root{--scalar-text-decoration:underline;--scalar-text-decoration-hover:underline}.light-mode{--scalar-background-1:#fff;--scalar-background-2:#f6f6f6;--scalar-background-3:#e7e7e7;--scalar-background-accent:#8ab4f81f;--scalar-color-1:#1b1b1b;--scalar-color-2:#757575;--scalar-color-3:#8e8e8e;--scalar-color-accent:#09f;--scalar-border-color:#dfdfdf}.dark-mode{--scalar-background-1:#0f0f0f;--scalar-background-2:#1a1a1a;--scalar-background-3:#272727;--scalar-color-1:#e7e7e7;--scalar-color-2:#a4a4a4;--scalar-color-3:#797979;--scalar-color-accent:#00aeff;--scalar-background-accent:#3ea6ff1f;--scalar-border-color:#2d2d2d}.light-mode,.dark-mode{--scalar-sidebar-background-1:var(--scalar-background-1);--scalar-sidebar-color-1:var(--scalar-color-1);--scalar-sidebar-color-2:var(--scalar-color-2);--scalar-sidebar-border-color:var(--scalar-border-color);--scalar-sidebar-item-hover-background:var(--scalar-background-2);--scalar-sidebar-item-hover-color:var(--scalar-sidebar-color-2);--scalar-sidebar-item-active-background:var(--scalar-background-2);--scalar-sidebar-color-active:var(--scalar-sidebar-color-1);--scalar-sidebar-indent-border:var(--scalar-sidebar-border-color);--scalar-sidebar-indent-border-hover:var(--scalar-sidebar-border-color);--scalar-sidebar-indent-border-active:var(--scalar-sidebar-border-color);--scalar-sidebar-search-background:transparent;--scalar-sidebar-search-color:var(--scalar-color-3);--scalar-sidebar-search-border-color:var(--scalar-border-color)}.light-mode{--scalar-color-green:#069061;--scalar-color-red:#ef0006;--scalar-color-yellow:#edbe20;--scalar-color-blue:#0082d0;--scalar-color-orange:#ff5800;--scalar-color-purple:#5203d1;--scalar-link-color:var(--scalar-color-1);--scalar-link-color-hover:var(--scalar-link-color);--scalar-button-1:#000;--scalar-button-1-hover:#000c;--scalar-button-1-color:#ffffffe6;--scalar-tooltip-background:#1a1a1ae6;--scalar-tooltip-color:#ffffffd9;--scalar-color-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-alert:color-mix(in srgb,var(--scalar-color-orange),var(--scalar-color-1)20%)}}}}}.light-mode{--scalar-color-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-color-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-color-1)20%)}}}}}.light-mode{--scalar-background-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-alert:color-mix(in srgb,var(--scalar-color-orange),var(--scalar-background-1)95%)}}}}}.light-mode{--scalar-background-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.light-mode{--scalar-background-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-background-1)95%)}}}}}.dark-mode{--scalar-color-green:#00b648;--scalar-color-red:#dc1b19;--scalar-color-yellow:#ffc90d;--scalar-color-blue:#4eb3ec;--scalar-color-orange:#ff8d4d;--scalar-color-purple:#b191f9;--scalar-link-color:var(--scalar-color-1);--scalar-link-color-hover:var(--scalar-link-color);--scalar-button-1:#fff;--scalar-button-1-hover:#ffffffe6;--scalar-button-1-color:black;--scalar-tooltip-background:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-tooltip-background:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-tooltip-background:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-tooltip-background:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-tooltip-background:color-mix(in srgb,var(--scalar-background-1),#fff 10%)}}}}}.dark-mode{--scalar-tooltip-color:#fffffff2;--scalar-color-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-color-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-color-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-color-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-color-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-background-1)20%)}}}}}.dark-mode{--scalar-background-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-alert:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-alert:color-mix(in srgb,var(--scalar-color-orange),var(--scalar-background-1)95%)}}}}}.dark-mode{--scalar-background-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-danger:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.dark-mode{--scalar-background-danger:color-mix(in srgb,var(--scalar-color-red),var(--scalar-background-1)95%)}}}}}@supports (color:color(display-p3 1 1 1)){.light-mode{--scalar-color-accent:color(display-p3 0 .6 1);--scalar-color-green:color(display-p3 .023529 .564706 .380392);--scalar-color-red:color(display-p3 .937255 0 .023529);--scalar-color-yellow:color(display-p3 .929412 .745098 .12549);--scalar-color-blue:color(display-p3 0 .509804 .815686);--scalar-color-orange:color(display-p3 1 .4 .02);--scalar-color-purple:color(display-p3 .321569 .011765 .819608)}.dark-mode{--scalar-color-accent:color(display-p3 .07 .67 1);--scalar-color-green:color(display-p3 0 .713725 .282353);--scalar-color-red:color(display-p3 .862745 .105882 .098039);--scalar-color-yellow:color(display-p3 1 .788235 .05098);--scalar-color-blue:color(display-p3 .305882 .701961 .92549);--scalar-color-orange:color(display-p3 1 .552941 .301961);--scalar-color-purple:color(display-p3 .694118 .568627 .976471)}}:root,:host{--leading-snug:1.375;--ease-in-out:cubic-bezier(.4,0,.2,1);--default-transition-duration:.15s;--default-transition-timing-function:cubic-bezier(.4,0,.2,1);--leading-normal:1.5}body{background-color:var(--scalar-background-1);margin:0}}@layer scalar-theme;.scalar-app .\\@container{container-type:inline-size}.scalar-app .-top-2{top:-8px}.scalar-app .top-3\\.5{top:14px}.scalar-app .-left-4\\.5{left:-18px}.scalar-app .-left-5{left:-20px}.scalar-app .z-1000{z-index:1000}.scalar-app .order-789{order:789}.scalar-app .-m-1{margin:-4px}.scalar-app .-m-2{margin:-8px}.scalar-app .-mx-2{margin-inline:-8px}.scalar-app .my-2{margin-block:8px}.scalar-app .my-3{margin-block:12px}.scalar-app .-mt-1{margin-top:-4px}.scalar-app .mt-6{margin-top:24px}.scalar-app .mb-3{margin-bottom:12px}.scalar-app .size-4\\.5{width:18px;height:18px}.scalar-app .h-\\[calc\\(100\\%\\+16px\\)\\]{height:calc(100% + 16px)}.scalar-app .h-\\[var\\(--scalar-header-height\\)\\]{height:var(--scalar-header-height)}.scalar-app .max-h-\\[60vh\\]{max-height:60vh}.scalar-app .min-h-3{min-height:12px}.scalar-app .min-h-7{min-height:28px}.scalar-app .min-h-dvh{min-height:100dvh}.scalar-app .w-0{width:0}.scalar-app .w-4\\.5{width:18px}.scalar-app .w-96{width:384px}.scalar-app .w-110{width:440px}.scalar-app .w-120{width:480px}.scalar-app .max-w-\\(--refs-content-max-width\\){max-width:var(--refs-content-max-width)}.scalar-app .max-w-64{max-width:256px}.scalar-app .min-w-3{min-width:12px}.scalar-app .min-w-7{min-width:28px}.scalar-app .rotate-45{rotate:45deg}.scalar-app .scroll-mt-16{scroll-margin-top:64px}.scalar-app .scroll-mt-24{scroll-margin-top:96px}.scalar-app .list-none{list-style-type:none}.scalar-app .content-end{align-content:flex-end}.scalar-app .gap-7{gap:28px}.scalar-app .overflow-x-scroll{overflow-x:scroll}.scalar-app .rounded-b-none{border-bottom-right-radius:0;border-bottom-left-radius:0}@supports (color:color-mix(in lab,red,red)){.scalar-app .bg-b-1\\.5{background-color:color-mix(in srgb,var(--scalar-background-1),var(--scalar-background-2))}}.scalar-app .p-7{padding:28px}.scalar-app .px-15{padding-inline:60px}.scalar-app .py-2\\.25{padding-block:9px}.scalar-app .pt-1\\.5{padding-top:6px}.scalar-app .pb-12{padding-bottom:48px}.scalar-app .leading-\\[1\\.45\\]{--tw-leading:1.45;line-height:1.45}.scalar-app .leading-relaxed{--tw-leading:var(--leading-relaxed);line-height:var(--leading-relaxed)}.scalar-app .text-current{color:currentColor}.scalar-app .italic{font-style:italic}.scalar-app .\\[--scalar-address-bar-height\\:0px\\]{--scalar-address-bar-height:0px}.scalar-app .\\[grid-area\\:header\\]{grid-area:header}.scalar-app .\\[grid-area\\:navigation\\]{grid-area:navigation}:is(.scalar-app .\\*\\:\\!p-0>*){padding:0!important}.scalar-app .group-last\\:mr-0:is(:where(.group):last-child *){margin-right:0}.scalar-app .group-open\\:rotate-90:is(:where(.group):is([open],:popover-open,:open) *){rotate:90deg}.scalar-app .group-open\\:flex-wrap:is(:where(.group):is([open],:popover-open,:open) *){flex-wrap:wrap}.scalar-app .group-open\\:whitespace-normal:is(:where(.group):is([open],:popover-open,:open) *){white-space:normal}.scalar-app .group-focus-within\\/parameter-item\\:w-auto:is(:where(.group\\/parameter-item):focus-within *){width:auto}@media(hover:hover){.scalar-app .group-hover\\:flex:is(:where(.group):hover *){display:flex}.scalar-app .group-hover\\:text-c-1:is(:where(.group):hover *){color:var(--scalar-color-1)}.scalar-app .group-hover\\:opacity-100:is(:where(.group):hover *),.scalar-app .group-hover\\/heading\\:opacity-100:is(:where(.group\\/heading):hover *){opacity:1}.scalar-app .group-hover\\/parameter-item\\:w-auto:is(:where(.group\\/parameter-item):hover *){width:auto}}.scalar-app .group-has-focus-visible\\/heading\\:opacity-100:is(:where(.group\\/heading):has(:focus-visible) *){opacity:1}.scalar-app .empty\\:hidden:empty{display:none}@media(hover:hover){.scalar-app .hover\\:bg-b-2:hover{background-color:var(--scalar-background-2)}.scalar-app .hover\\:text-c-1:hover{color:var(--scalar-color-1)}}.scalar-app .has-focus\\:outline:has(:focus){outline-style:var(--tw-outline-style);outline-width:1px}@media(min-width:1200px){.scalar-app .xl\\:mb-1\\.5{margin-bottom:6px}.scalar-app .xl\\:gap-12{gap:48px}.scalar-app .xl\\:border-r{border-right-style:var(--tw-border-style);border-right-width:var(--scalar-border-width)}.scalar-app .xl\\:border-none{--tw-border-style:none;border-style:none}.scalar-app .xl\\:first\\:ml-auto:first-child{margin-left:auto}}.scalar-app .\\[\\&_a\\]\\:underline a{text-decoration-line:underline}.scalar-app .\\[\\&_a\\:hover\\]\\:text-c-1 a:hover{color:var(--scalar-color-1)}.scalar-app .\\[\\&_code\\]\\:font-code code{font-family:var(--scalar-font-code)}.scalar-app .\\[\\&_em\\]\\:text-c-1 em{color:var(--scalar-color-1)}.scalar-app .\\[\\&_em\\]\\:not-italic em{font-style:normal}@supports (color:color-mix(in lab,red,red)){.scalar-app .bg-b-1\\.5{background-color:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.scalar-app .bg-b-1\\.5{background-color:color-mix(in srgb,var(--scalar-background-1),var(--scalar-background-2))}}}@supports (color:color-mix(in lab,red,red)){.scalar-app .to-b-1\\.5{--tw-gradient-to:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.scalar-app .to-b-1\\.5{--tw-gradient-to:color-mix(in srgb,var(--scalar-background-1),var(--scalar-background-2))}}:is(.scalar-app .\\*\\:border-border-tooltip>*){border-color:var(--scalar-tooltip-color)}@supports (color:color-mix(in lab,red,red)){:is(.scalar-app .\\*\\:border-border-tooltip>*){border-color:color-mix(in srgb,var(--scalar-tooltip-color),var(--scalar-tooltip-background))}}}@property --tw-divide-x-reverse{syntax:"*";inherits:false;initial-value:0}@property --tw-divide-y-reverse{syntax:"*";inherits:false;initial-value:0}@property --tw-gradient-position{syntax:"*";inherits:false}@property --tw-gradient-from{syntax:"";inherits:false;initial-value:#0000}@property --tw-gradient-via{syntax:"";inherits:false;initial-value:#0000}@property --tw-gradient-to{syntax:"";inherits:false;initial-value:#0000}@property --tw-gradient-stops{syntax:"*";inherits:false}@property --tw-gradient-via-stops{syntax:"*";inherits:false}@property --tw-gradient-from-position{syntax:"";inherits:false;initial-value:0%}@property --tw-gradient-via-position{syntax:"";inherits:false;initial-value:50%}@property --tw-gradient-to-position{syntax:"";inherits:false;initial-value:100%}@property --tw-ease{syntax:"*";inherits:false}@keyframes fade-in-27df5cd8{0%{opacity:0}70%{opacity:0}to{opacity:1}}@keyframes rotate-27df5cd8{0%{transform:scale(3.5)rotate(0)}to{transform:scale(3.5)rotate(360deg)}}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown a{-webkit-text-decoration-color:color-mix(in srgb,var(--font-color)30%,transparent);text-decoration-color:color-mix(in srgb,var(--font-color)30%,transparent)}}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert{background-color:color-mix(in srgb,var(--scalar-background-2),transparent)}}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-note{background-color:color-mix(in srgb,var(--scalar-color-blue),transparent 97%)}}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-note{border:var(--scalar-border-width)solid color-mix(in srgb,var(--scalar-color-blue),transparent 50%)}}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-tip{background-color:color-mix(in srgb,var(--scalar-color-2),transparent 97%)}}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-tip{border:var(--scalar-border-width)solid color-mix(in srgb,var(--scalar-color-2),transparent 50%)}}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-important,.scalar-app .markdown .markdown-alert.markdown-alert-warning{background-color:color-mix(in srgb,var(--scalar-color-orange),transparent 97%)}}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-important,.scalar-app .markdown .markdown-alert.markdown-alert-warning{border:var(--scalar-border-width)solid color-mix(in srgb,var(--scalar-color-orange),transparent 50%)}}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-caution{background-color:color-mix(in srgb,var(--scalar-color-red),transparent 97%)}}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-caution{border:var(--scalar-border-width)solid color-mix(in srgb,var(--scalar-color-red),transparent 50%)}}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-success{background-color:color-mix(in srgb,var(--scalar-color-green),transparent 97%)}}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-success{border:var(--scalar-border-width)solid color-mix(in srgb,var(--scalar-color-green),transparent 50%)}}@property --tw-backdrop-blur{syntax:"*";inherits:false}@property --tw-backdrop-brightness{syntax:"*";inherits:false}@property --tw-backdrop-contrast{syntax:"*";inherits:false}@property --tw-backdrop-grayscale{syntax:"*";inherits:false}@property --tw-backdrop-hue-rotate{syntax:"*";inherits:false}@property --tw-backdrop-invert{syntax:"*";inherits:false}@property --tw-backdrop-opacity{syntax:"*";inherits:false}@property --tw-backdrop-saturate{syntax:"*";inherits:false}@property --tw-backdrop-sepia{syntax:"*";inherits:false}@keyframes fadein-layout-c36b47da{0%{opacity:0}to{opacity:1}}@keyframes fadein-modal-c36b47da{0%{opacity:0;transform:translateY(10px)}to{opacity:1;transform:translate(0)}}@media(hover:hover){.scalar-app .group-hover\\/button\\:opacity-0:is(:where(.group\\/button):hover *){opacity:0}.scalar-app .group-hover\\/group-button\\:flex:is(:where(.group\\/group-button):hover *){display:flex}.scalar-app .group-hover\\/group-button\\:hidden:is(:where(.group\\/group-button):hover *){display:none}.scalar-app .peer-hover\\/button\\:opacity-100:is(:where(.peer\\/button):hover~*),.scalar-app .hover\\:opacity-100:hover{opacity:1}}@supports (color:color-mix(in lab,red,red)){.scalar-app .bg-b-1\\.5{background-color:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.scalar-app .bg-b-1\\.5{background-color:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.scalar-app .bg-b-1\\.5{background-color:color-mix(in srgb,var(--scalar-background-1),var(--scalar-background-2))}}}}@supports (color:color-mix(in lab,red,red)){.scalar-app .to-b-1\\.5{--tw-gradient-to:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.scalar-app .to-b-1\\.5{--tw-gradient-to:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.scalar-app .to-b-1\\.5{--tw-gradient-to:color-mix(in srgb,var(--scalar-background-1),var(--scalar-background-2))}}}:is(.scalar-app .\\*\\:border-border-tooltip>*){border-color:var(--scalar-tooltip-color)}@supports (color:color-mix(in lab,red,red)){:is(.scalar-app .\\*\\:border-border-tooltip>*){border-color:var(--scalar-tooltip-color)}@supports (color:color-mix(in lab,red,red)){:is(.scalar-app .\\*\\:border-border-tooltip>*){border-color:color-mix(in srgb,var(--scalar-tooltip-color),var(--scalar-tooltip-background))}}}}@property --tw-content{syntax:"*";inherits:false;initial-value:""}@supports (color:color-mix(in lab,red,red)){.scalar-app .bg-b-1\\.5{background-color:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.scalar-app .bg-b-1\\.5{background-color:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.scalar-app .bg-b-1\\.5{background-color:color-mix(in srgb,var(--scalar-background-1),var(--scalar-background-2))}}}.scalar-app .to-b-1\\.5{--tw-gradient-to:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.scalar-app .to-b-1\\.5{--tw-gradient-to:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.scalar-app .to-b-1\\.5{--tw-gradient-to:color-mix(in srgb,var(--scalar-background-1),var(--scalar-background-2))}}}:is(.scalar-app .\\*\\:border-border-tooltip>*){border-color:var(--scalar-tooltip-color)}@supports (color:color-mix(in lab,red,red)){:is(.scalar-app .\\*\\:border-border-tooltip>*){border-color:var(--scalar-tooltip-color)}@supports (color:color-mix(in lab,red,red)){:is(.scalar-app .\\*\\:border-border-tooltip>*){border-color:color-mix(in srgb,var(--scalar-tooltip-color),var(--scalar-tooltip-background))}}}}@media(hover:hover){.scalar-app .group-hover\\:text-c-1:is(:where(.group):hover *){color:var(--scalar-color-1)}.scalar-app .group-hover\\/button\\:bg-sidebar-indent-border-hover:is(:where(.group\\/button):hover *){background-color:var(--scalar-sidebar-indent-border-hover,var(--scalar-border-color))}.scalar-app .group-hover\\/button\\:text-c-1:is(:where(.group\\/button):hover *){color:var(--scalar-color-1)}.scalar-app .group-hover\\/code-block\\:opacity-100:is(:where(.group\\/code-block):hover *){opacity:1}.scalar-app .hover\\:bg-b-2:hover{background-color:var(--scalar-background-2)}.scalar-app .hover\\:bg-b-3:hover{background-color:var(--scalar-background-3)}.scalar-app .hover\\:bg-h-btn:hover{background-color:var(--scalar-button-1-hover)}.scalar-app .hover\\:bg-sidebar-b-1:hover{background-color:var(--scalar-sidebar-background-1,var(--scalar-background-1))}.scalar-app .hover\\:bg-sidebar-b-hover:hover{background-color:var(--scalar-sidebar-item-hover-background,var(--scalar-background-2))}.scalar-app .hover\\:bg-linear-to-b:hover{--tw-gradient-position:to bottom}@supports (background-image:linear-gradient(in lab,red,red)){.scalar-app .hover\\:bg-linear-to-b:hover{--tw-gradient-position:to bottom in oklab}}.scalar-app .hover\\:bg-linear-to-b:hover{background-image:linear-gradient(var(--tw-gradient-stops))}.scalar-app .hover\\:bg-linear-to-t:hover{--tw-gradient-position:to top}@supports (background-image:linear-gradient(in lab,red,red)){.scalar-app .hover\\:bg-linear-to-t:hover{--tw-gradient-position:to top in oklab}}.scalar-app .hover\\:bg-linear-to-t:hover{background-image:linear-gradient(var(--tw-gradient-stops))}.scalar-app .hover\\:text-c-1:hover{color:var(--scalar-color-1)}.scalar-app .hover\\:text-sidebar-c-1:hover{color:var(--scalar-sidebar-color-1,var(--scalar-color-1))}.scalar-app .hover\\:text-sidebar-c-hover:hover{color:var(--scalar-sidebar-item-hover-color,var(--scalar-sidebar-color-2))}.scalar-app .hover\\:underline:hover{text-decoration-line:underline}.scalar-app .hover\\:brightness-90:hover{--tw-brightness:brightness(90%);filter:var(--tw-blur,)var(--tw-brightness,)var(--tw-contrast,)var(--tw-grayscale,)var(--tw-hue-rotate,)var(--tw-invert,)var(--tw-saturate,)var(--tw-sepia,)var(--tw-drop-shadow,)}}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown a{-webkit-text-decoration-color:var(--font-color)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown a{-webkit-text-decoration-color:color-mix(in srgb,var(--font-color)30%,transparent)}}.scalar-app .markdown a{-webkit-text-decoration-color:var(--font-color);text-decoration-color:var(--font-color)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown a{-webkit-text-decoration-color:color-mix(in srgb,var(--font-color)30%,transparent);text-decoration-color:color-mix(in srgb,var(--font-color)30%,transparent)}}.scalar-app .markdown .markdown-alert{background-color:var(--scalar-background-2)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert{background-color:color-mix(in srgb,var(--scalar-background-2),transparent)}}.scalar-app .markdown .markdown-alert.markdown-alert-note{background-color:var(--scalar-color-blue)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-note{background-color:color-mix(in srgb,var(--scalar-color-blue),transparent 97%)}}.scalar-app .markdown .markdown-alert.markdown-alert-note{border:var(--scalar-border-width)solid var(--scalar-color-blue)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-note{border:var(--scalar-border-width)solid color-mix(in srgb,var(--scalar-color-blue),transparent 50%)}}.scalar-app .markdown .markdown-alert.markdown-alert-tip{background-color:var(--scalar-color-2)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-tip{background-color:color-mix(in srgb,var(--scalar-color-2),transparent 97%)}}.scalar-app .markdown .markdown-alert.markdown-alert-tip{border:var(--scalar-border-width)solid var(--scalar-color-2)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-tip{border:var(--scalar-border-width)solid color-mix(in srgb,var(--scalar-color-2),transparent 50%)}}.scalar-app .markdown .markdown-alert.markdown-alert-important,.scalar-app .markdown .markdown-alert.markdown-alert-warning{background-color:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-important,.scalar-app .markdown .markdown-alert.markdown-alert-warning{background-color:color-mix(in srgb,var(--scalar-color-orange),transparent 97%)}}.scalar-app .markdown .markdown-alert.markdown-alert-important,.scalar-app .markdown .markdown-alert.markdown-alert-warning{border:var(--scalar-border-width)solid var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-important,.scalar-app .markdown .markdown-alert.markdown-alert-warning{border:var(--scalar-border-width)solid color-mix(in srgb,var(--scalar-color-orange),transparent 50%)}}.scalar-app .markdown .markdown-alert.markdown-alert-caution{background-color:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-caution{background-color:color-mix(in srgb,var(--scalar-color-red),transparent 97%)}}.scalar-app .markdown .markdown-alert.markdown-alert-caution{border:var(--scalar-border-width)solid var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-caution{border:var(--scalar-border-width)solid color-mix(in srgb,var(--scalar-color-red),transparent 50%)}}.scalar-app .markdown .markdown-alert.markdown-alert-success{background-color:var(--scalar-color-green)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-success{background-color:color-mix(in srgb,var(--scalar-color-green),transparent 97%)}}.scalar-app .markdown .markdown-alert.markdown-alert-success{border:var(--scalar-border-width)solid var(--scalar-color-green)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-success{border:var(--scalar-border-width)solid color-mix(in srgb,var(--scalar-color-green),transparent 50%)}}}.scalar-app .right-0\\.75{right:3px}.scalar-app .ml-2{margin-left:8px}.scalar-app .self-start{align-self:flex-start}@media(hover:hover){.scalar-app .group-hover\\/button\\:opacity-0:is(:where(.group\\/button):hover *){opacity:0}.scalar-app .group-hover\\/group-button\\:flex:is(:where(.group\\/group-button):hover *){display:flex}.scalar-app .group-hover\\/group-button\\:hidden:is(:where(.group\\/group-button):hover *){display:none}}.scalar-app .group-focus-visible\\/button\\:opacity-0:is(:where(.group\\/button):focus-visible *),.scalar-app .group-has-\\[\\~\\*_\\[aria-expanded\\=true\\]\\]\\/button\\:opacity-0:is(:where(.group\\/button):has(~* [aria-expanded=true]) *),.scalar-app .group-has-\\[\\~\\*\\:focus-within\\]\\/button\\:opacity-0:is(:where(.group\\/button):has(~:focus-within) *),.scalar-app .group-has-\\[\\~\\*\\:hover\\]\\/button\\:opacity-0:is(:where(.group\\/button):has(~:hover) *){opacity:0}@media(hover:hover){.scalar-app .peer-hover\\/button\\:opacity-100:is(:where(.peer\\/button):hover~*){opacity:1}}.scalar-app .peer-focus-visible\\/button\\:opacity-100:is(:where(.peer\\/button):focus-visible~*){opacity:1}.scalar-app .after\\:pointer-events-none:after{content:var(--tw-content);pointer-events:none}.scalar-app .after\\:absolute:after{content:var(--tw-content);position:absolute}.scalar-app .after\\:inset-0:after{content:var(--tw-content);inset:0}.scalar-app .after\\:inset-x-0:after{content:var(--tw-content);inset-inline:0}.scalar-app .after\\:-top-0\\.5:after{content:var(--tw-content);top:-2px}.scalar-app .after\\:-bottom-0\\.5:after{content:var(--tw-content);bottom:-2px}.scalar-app .after\\:block:after{content:var(--tw-content);display:block}.scalar-app .after\\:h-0\\.75:after{content:var(--tw-content);height:3px}.scalar-app .after\\:rounded:after{content:var(--tw-content);border-radius:var(--scalar-radius)}.scalar-app .after\\:bg-blue:after{content:var(--tw-content);background-color:var(--scalar-color-blue)}.scalar-app .after\\:opacity-15:after{content:var(--tw-content);opacity:.15}.scalar-app .focus-within\\:opacity-100:focus-within{opacity:1}@media(hover:hover){.scalar-app .hover\\:opacity-100:hover{opacity:1}}.scalar-app .has-\\[\\&\\[aria-expanded\\=true\\]\\]\\:opacity-100:has([aria-expanded=true]){opacity:1}:where(.scalar-app){font-family:var(--scalar-font);color:var(--scalar-color-1);-webkit-text-size-adjust:100%;tab-size:4;line-height:1.15}:where(.scalar-app) *,:where(.scalar-app) :before,:where(.scalar-app) :after{box-sizing:border-box;border-style:solid;border-width:0;border-color:var(--scalar-border-color);outline-width:1px;outline-style:none;outline-color:var(--scalar-color-accent);font-feature-settings:inherit;font-variation-settings:inherit;font-family:inherit;font-size:inherit;font-weight:inherit;font-style:inherit;-webkit-text-decoration:inherit;text-decoration:inherit;text-align:inherit;line-height:inherit;color:inherit;margin:unset;padding:unset;text-rendering:optimizeLegibility;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}:where(.scalar-app) :before,:where(.scalar-app) :after{--tw-content:""}:where(.scalar-app) button,:where(.scalar-app) input,:where(.scalar-app) optgroup,:where(.scalar-app) select,:where(.scalar-app) textarea{background:0 0}:where(.scalar-app) ::file-selector-button{background:0 0}:where(.scalar-app) ol,:where(.scalar-app) ul,:where(.scalar-app) menu{list-style:none}:where(.scalar-app) input:where(:not([type=button],[type=reset],[type=submit])),:where(.scalar-app) select,:where(.scalar-app) textarea{border-radius:var(--scalar-radius);border-width:1px}:where(.scalar-app) input::placeholder{color:var(--scalar-color-3);font-family:var(--scalar-font)}:where(.scalar-app) input[type=search]::-webkit-search-cancel-button{appearance:none}:where(.scalar-app) input[type=search]::-webkit-search-decoration{appearance:none}:where(.scalar-app) summary::-webkit-details-marker{display:none}:where(.scalar-app) input:-webkit-autofill{-webkit-background-clip:text!important;background-clip:text!important}:where(.scalar-app) :focus-visible{border-radius:var(--scalar-radius);outline-style:solid}:where(.scalar-app) button:focus-visible,:where(.scalar-app) [role=button]:focus-visible{outline-offset:-1px}:where(.scalar-app) button,:where(.scalar-app) [role=button]{cursor:pointer}:where(.scalar-app) :disabled{cursor:default}:where(.scalar-app) img,:where(.scalar-app) svg,:where(.scalar-app) video,:where(.scalar-app) canvas,:where(.scalar-app) audio,:where(.scalar-app) iframe,:where(.scalar-app) embed,:where(.scalar-app) object{vertical-align:middle;display:block}:where(.scalar-app) [hidden]{display:none}.scalar-app .cm-scroller,.scalar-app .custom-scroll{scrollbar-color:transparent transparent;scrollbar-width:thin;-webkit-overflow-scrolling:touch;overflow-y:auto}.scalar-app .custom-scroll-self-contain-overflow{overscroll-behavior:contain}.scalar-app .cm-scroller:hover,.scalar-app .custom-scroll:hover,.scalar-app.scalar-scrollbars-obtrusive .cm-scroller,.scalar-app.scalar-scrollbars-obtrusive .custom-scroll{scrollbar-color:var(--scalar-scrollbar-color,transparent)transparent}.scalar-app .cm-scroller:hover::-webkit-scrollbar-thumb{background:var(--scalar-scrollbar-color);background-clip:content-box;border:3px solid #0000}.scalar-app .custom-scroll:hover::-webkit-scrollbar-thumb{background:var(--scalar-scrollbar-color);background-clip:content-box;border:3px solid #0000}.scalar-app .cm-scroller::-webkit-scrollbar-thumb:active{background:var(--scalar-scrollbar-color-active);background-clip:content-box;border:3px solid #0000}.scalar-app .custom-scroll::-webkit-scrollbar-thumb:active{background:var(--scalar-scrollbar-color-active);background-clip:content-box;border:3px solid #0000}.scalar-app .cm-scroller::-webkit-scrollbar-corner{background:0 0}.scalar-app .custom-scroll::-webkit-scrollbar-corner{background:0 0}.scalar-app .cm-scroller::-webkit-scrollbar{width:12px;height:12px}.scalar-app .custom-scroll::-webkit-scrollbar{width:12px;height:12px}.scalar-app .cm-scroller::-webkit-scrollbar-track{background:0 0}.scalar-app .custom-scroll::-webkit-scrollbar-track{background:0 0}.scalar-app .cm-scroller::-webkit-scrollbar-thumb{background:padding-box content-box;border:3px solid #0000;border-radius:20px}.scalar-app .custom-scroll::-webkit-scrollbar-thumb{background:padding-box content-box;border:3px solid #0000;border-radius:20px}@media(pointer:coarse){.scalar-app .cm-scroller,.scalar-app .custom-scroll{padding-right:12px}}.scalar-app .invisible{visibility:hidden}.scalar-app .inset-y-0{inset-block:0}.scalar-app .top-\\(--nested-items-offset\\){top:var(--nested-items-offset)}.scalar-app .top-0\\.5{top:2px}.scalar-app .top-1\\/2{top:50%}.scalar-app .top-22{top:88px}.scalar-app .top-\\[1lh\\]{top:1lh}.scalar-app .top-px{top:1px}.scalar-app .left-2{left:8px}.scalar-app .left-2\\.5{left:10px}.scalar-app .left-4{left:16px}.scalar-app .left-10{left:40px}.scalar-app .left-\\[calc\\(4px\\+var\\(--scalar-sidebar-indent\\)\\*var\\(--scalar-sidebar-level\\)\\)\\]{left:calc(4px + var(--scalar-sidebar-indent)*var(--scalar-sidebar-level))}.scalar-app .left-border{left:var(--scalar-border-width)}.scalar-app .left-px{left:1px}.scalar-app .z-tooltip{z-index:99999}.scalar-app .-m-1\\.5{margin:-6px}.scalar-app .-m-px{margin:-1px}.scalar-app .m-1{margin:4px}.scalar-app .-mx-0\\.75{margin-inline:-3px}.scalar-app .-mx-px{margin-inline:-1px}.scalar-app .mx-px{margin-inline:1px}.scalar-app .-my-1\\.5{margin-block:-6px}.scalar-app .-my-2{margin-block:-8px}.scalar-app .my-0\\.75{margin-block:3px}.scalar-app .-mt-1\\.5{margin-top:-6px}.scalar-app .mt-0{margin-top:0}.scalar-app .mt-\\[15svh\\]{margin-top:15svh}.scalar-app .mt-\\[20svh\\]{margin-top:20svh}.scalar-app .-mr-0\\.25{margin-right:-1px}.scalar-app .mr-0{margin-right:0}.scalar-app .mr-\\[calc\\(20px-var\\(--scalar-sidebar-indent\\)\\)\\]{margin-right:calc(20px - var(--scalar-sidebar-indent))}.scalar-app .-mb-1{margin-bottom:-4px}.scalar-app .-ml-0\\.75{margin-left:-3px}.scalar-app .line-clamp-\\(--markdown-clamp\\){-webkit-line-clamp:var(--markdown-clamp);-webkit-box-orient:vertical;display:-webkit-box;overflow:hidden}.scalar-app .\\!hidden{display:none!important}.scalar-app .size-2{width:8px;height:8px}.scalar-app .size-2\\.75{width:11px;height:11px}.scalar-app .size-3\\.25{width:13px;height:13px}.scalar-app .size-60{width:240px;height:240px}.scalar-app .size-\\[23px\\]{width:23px;height:23px}.scalar-app .h-0{height:0}.scalar-app .h-1{height:4px}.scalar-app .h-24{height:96px}.scalar-app .h-32{height:128px}.scalar-app .h-\\[1lh\\]{height:1lh}.scalar-app .h-border{height:var(--scalar-border-width)}.scalar-app .h-dvh{height:100dvh}.scalar-app .max-h-20{max-height:80px}.scalar-app .max-h-\\[80svh\\]{max-height:80svh}.scalar-app .max-h-\\[90svh\\]{max-height:90svh}.scalar-app .max-h-dvh{max-height:100dvh}.scalar-app .max-h-radix-popper{max-height:calc(var(--radix-popper-available-height) - 8px)}.scalar-app .min-h-96{min-height:384px}.scalar-app .min-h-header{min-height:48px}.scalar-app .w-12{width:48px}.scalar-app .w-24{width:96px}.scalar-app .w-32{width:128px}.scalar-app .w-40{width:160px}.scalar-app .w-48{width:192px}.scalar-app .w-\\[38px\\]{width:38px}.scalar-app .w-\\[calc\\(100vw-12px\\)\\]{width:calc(100vw - 12px)}.scalar-app .w-\\[var\\(--scalar-sidebar-indent\\)\\]{width:var(--scalar-sidebar-indent)}.scalar-app .w-border{width:var(--scalar-border-width)}.scalar-app .w-min{width:min-content}.scalar-app .w-screen{width:100vw}.scalar-app .max-w-\\[360px\\]{max-width:360px}.scalar-app .max-w-\\[480px\\]{max-width:480px}.scalar-app .max-w-\\[540px\\]{max-width:540px}.scalar-app .max-w-\\[640px\\]{max-width:640px}.scalar-app .max-w-\\[800px\\]{max-width:800px}.scalar-app .max-w-\\[1000px\\]{max-width:1000px}.scalar-app .max-w-\\[inherit\\]{max-width:inherit}.scalar-app .max-w-xs{max-width:320px}.scalar-app .min-w-6{min-width:24px}.scalar-app .min-w-40{min-width:160px}.scalar-app .min-w-min{min-width:min-content}.scalar-app .flex-shrink,.scalar-app .shrink{flex-shrink:1}.scalar-app .-translate-x-full{--tw-translate-x:-100%;translate:var(--tw-translate-x)var(--tw-translate-y)}.scalar-app .translate-x-2\\.5{--tw-translate-x:10px;translate:var(--tw-translate-x)var(--tw-translate-y)}.scalar-app .translate-x-\\[14px\\]{--tw-translate-x:14px;translate:var(--tw-translate-x)var(--tw-translate-y)}.scalar-app .translate-x-full{--tw-translate-x:100%;translate:var(--tw-translate-x)var(--tw-translate-y)}.scalar-app .-translate-y-1\\/2{--tw-translate-y:-50%;translate:var(--tw-translate-x)var(--tw-translate-y)}.scalar-app .appearance-none{appearance:none}.scalar-app .grid-flow-col{grid-auto-flow:column}.scalar-app .\\!items-end{align-items:flex-end!important}.scalar-app .\\!items-start{align-items:flex-start!important}.scalar-app .items-baseline{align-items:baseline}.scalar-app .\\!justify-end{justify-content:flex-end!important}.scalar-app .\\!justify-start{justify-content:flex-start!important}.scalar-app .gap-2\\.25{gap:9px}.scalar-app .gap-x-4{column-gap:16px}.scalar-app .gap-y-8{row-gap:32px}:where(.scalar-app .divide-x>:not(:last-child)){--tw-divide-x-reverse:0;border-inline-style:var(--tw-border-style);border-inline-start-width:calc(var(--scalar-border-width)*var(--tw-divide-x-reverse));border-inline-end-width:calc(var(--scalar-border-width)*calc(1 - var(--tw-divide-x-reverse)))}.scalar-app .self-end{align-self:flex-end}.scalar-app .overflow-x-clip{overflow-x:clip}.scalar-app .overflow-y-scroll{overflow-y:scroll}.scalar-app .overscroll-contain{overscroll-behavior:contain}.scalar-app .rounded-none{border-radius:0}.scalar-app .rounded-l-none{border-top-left-radius:0;border-bottom-left-radius:0}.scalar-app .border-1{border-style:var(--tw-border-style);border-width:1px}.scalar-app .border-solid{--tw-border-style:solid;border-style:solid}.scalar-app .border-\\(--scalar-background-3\\){border-color:var(--scalar-background-3)}.scalar-app .border-border{border-color:var(--scalar-border-color)}.scalar-app .border-c-alert{border-color:var(--scalar-color-alert)}.scalar-app .border-red{border-color:var(--scalar-color-red)}.scalar-app .border-sidebar-border{border-color:var(--scalar-sidebar-border-color,var(--scalar-border-color))}.scalar-app .border-sidebar-border-search{border-color:var(--scalar-sidebar-search-border-color,var(--scalar-border-color))}.scalar-app .bg-\\(--bg-light\\){background-color:var(--bg-light)}.scalar-app .bg-b-1,.scalar-app .bg-b-1\\.5{background-color:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.scalar-app .bg-b-1\\.5{background-color:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.scalar-app .bg-b-1\\.5{background-color:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.scalar-app .bg-b-1\\.5{background-color:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.scalar-app .bg-b-1\\.5{background-color:color-mix(in srgb,var(--scalar-background-1),var(--scalar-background-2))}}}}}.scalar-app .bg-b-alert{background-color:var(--scalar-background-alert)}.scalar-app .bg-b-btn{background-color:var(--scalar-button-1)}.scalar-app .bg-b-tooltip{background-color:var(--scalar-tooltip-background)}.scalar-app .bg-backdrop{background-color:#00000038}.scalar-app .bg-border{background-color:var(--scalar-border-color)}.scalar-app .bg-c-danger{background-color:var(--scalar-color-danger)}.scalar-app .bg-inherit{background-color:inherit}.scalar-app .bg-red{background-color:var(--scalar-color-red)}.scalar-app .bg-sidebar-b-search{background-color:var(--scalar-sidebar-search-background,var(--scalar-background-2))}.scalar-app .bg-sidebar-indent-border{background-color:var(--scalar-sidebar-indent-border,var(--scalar-border-color))}.scalar-app .bg-sidebar-indent-border-active{background-color:var(--scalar-sidebar-indent-border-active,var(--scalar-color-accent))}.scalar-app .bg-transparent{background-color:#0000}.scalar-app .bg-linear-to-b{--tw-gradient-position:to bottom}@supports (background-image:linear-gradient(in lab,red,red)){.scalar-app .bg-linear-to-b{--tw-gradient-position:to bottom in oklab}}.scalar-app .bg-linear-to-b{background-image:linear-gradient(var(--tw-gradient-stops))}.scalar-app .from-b-1{--tw-gradient-from:var(--scalar-background-1);--tw-gradient-stops:var(--tw-gradient-via-stops,var(--tw-gradient-position),var(--tw-gradient-from)var(--tw-gradient-from-position),var(--tw-gradient-to)var(--tw-gradient-to-position))}.scalar-app .to-b-1\\.5{--tw-gradient-to:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.scalar-app .to-b-1\\.5{--tw-gradient-to:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.scalar-app .to-b-1\\.5{--tw-gradient-to:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.scalar-app .to-b-1\\.5{--tw-gradient-to:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.scalar-app .to-b-1\\.5{--tw-gradient-to:color-mix(in srgb,var(--scalar-background-1),var(--scalar-background-2))}}}}}.scalar-app .to-b-1\\.5{--tw-gradient-stops:var(--tw-gradient-via-stops,var(--tw-gradient-position),var(--tw-gradient-from)var(--tw-gradient-from-position),var(--tw-gradient-to)var(--tw-gradient-to-position))}.scalar-app .to-b-2{--tw-gradient-to:var(--scalar-background-2);--tw-gradient-stops:var(--tw-gradient-via-stops,var(--tw-gradient-position),var(--tw-gradient-from)var(--tw-gradient-from-position),var(--tw-gradient-to)var(--tw-gradient-to-position))}.scalar-app .mask-repeat{-webkit-mask-repeat:repeat;mask-repeat:repeat}.scalar-app .p-0\\.25{padding:1px}.scalar-app .p-2\\.5{padding:10px}.scalar-app .p-6{padding:24px}.scalar-app .px-3\\.5{padding-inline:14px}.scalar-app .px-9{padding-inline:36px}.scalar-app .py-4{padding-block:16px}.scalar-app .py-\\[6\\.75px\\]{padding-block:6.75px}.scalar-app .pl-8{padding-left:32px}.scalar-app .text-base\\/5{font-size:var(--scalar-font-size-3);line-height:var(--scalar-line-height-5)}.scalar-app .text-sm\\/5{font-size:var(--scalar-font-size-4);line-height:var(--scalar-line-height-5)}.scalar-app .text-lg{font-size:var(--scalar-font-size-2)}.scalar-app .leading-5{--tw-leading:var(--scalar-line-height-5);line-height:var(--scalar-line-height-5)}.scalar-app .font-sidebar{--tw-font-weight:var(--scalar-sidebar-font-weight,var(--scalar-regular));font-weight:var(--scalar-sidebar-font-weight,var(--scalar-regular))}.scalar-app .font-sidebar-active{--tw-font-weight:var(--scalar-sidebar-font-weight-active,var(--scalar-semibold));font-weight:var(--scalar-sidebar-font-weight-active,var(--scalar-semibold))}.scalar-app .text-nowrap{text-wrap:nowrap}.scalar-app .text-wrap{text-wrap:wrap}.scalar-app .break-words,.scalar-app .wrap-break-word{overflow-wrap:break-word}.scalar-app .text-c-accent{color:var(--scalar-color-accent)}.scalar-app .text-c-alert{color:var(--scalar-color-alert)}.scalar-app .text-c-tooltip{color:var(--scalar-tooltip-color)}.scalar-app .text-sidebar-c-1{color:var(--scalar-sidebar-color-1,var(--scalar-color-1))}.scalar-app .text-sidebar-c-search{color:var(--scalar-sidebar-search-color,var(--scalar-color-3))}.scalar-app .text-white{color:#fff}.scalar-app .opacity-40{opacity:.4}.scalar-app .outline-offset-1{outline-offset:1px}.scalar-app .outline-offset-\\[-1px\\]{outline-offset:-1px}.scalar-app .backdrop-blur{--tw-backdrop-blur:blur(8px);-webkit-backdrop-filter:var(--tw-backdrop-blur,)var(--tw-backdrop-brightness,)var(--tw-backdrop-contrast,)var(--tw-backdrop-grayscale,)var(--tw-backdrop-hue-rotate,)var(--tw-backdrop-invert,)var(--tw-backdrop-opacity,)var(--tw-backdrop-saturate,)var(--tw-backdrop-sepia,);backdrop-filter:var(--tw-backdrop-blur,)var(--tw-backdrop-brightness,)var(--tw-backdrop-contrast,)var(--tw-backdrop-grayscale,)var(--tw-backdrop-hue-rotate,)var(--tw-backdrop-invert,)var(--tw-backdrop-opacity,)var(--tw-backdrop-saturate,)var(--tw-backdrop-sepia,)}:is(.scalar-app .\\*\\:size-3>*){width:12px;height:12px}:is(.scalar-app .\\*\\:size-4>*){width:16px;height:16px}:is(.scalar-app .\\*\\:h-5>*){height:20px}:is(.scalar-app .\\*\\:min-w-5>*){min-width:20px}:is(.scalar-app .\\*\\:flex-1>*){flex:1}:is(.scalar-app .\\*\\:justify-center>*){justify-content:center}:is(.scalar-app .\\*\\:gap-px>*){gap:1px}:is(.scalar-app .\\*\\:rounded>*){border-radius:var(--scalar-radius)}:is(.scalar-app .\\*\\:border>*){border-style:var(--tw-border-style);border-width:var(--scalar-border-width)}:is(.scalar-app .\\*\\:border-border-tooltip>*){border-color:var(--scalar-tooltip-color)}@supports (color:color-mix(in lab,red,red)){:is(.scalar-app .\\*\\:border-border-tooltip>*){border-color:var(--scalar-tooltip-color)}@supports (color:color-mix(in lab,red,red)){:is(.scalar-app .\\*\\:border-border-tooltip>*){border-color:var(--scalar-tooltip-color)}@supports (color:color-mix(in lab,red,red)){:is(.scalar-app .\\*\\:border-border-tooltip>*){border-color:var(--scalar-tooltip-color)}@supports (color:color-mix(in lab,red,red)){:is(.scalar-app .\\*\\:border-border-tooltip>*){border-color:color-mix(in srgb,var(--scalar-tooltip-color),var(--scalar-tooltip-background))}}}}}:is(.scalar-app .\\*\\:px-1>*){padding-inline:4px}:is(.scalar-app .\\*\\:text-xs>*){font-size:var(--scalar-font-size-5)}@media(hover:hover){.scalar-app .group-hover\\:text-c-1:is(:where(.group):hover *){color:var(--scalar-color-1)}.scalar-app .group-hover\\/button\\:bg-sidebar-indent-border-hover:is(:where(.group\\/button):hover *){background-color:var(--scalar-sidebar-indent-border-hover,var(--scalar-border-color))}.scalar-app .group-hover\\/button\\:text-c-1:is(:where(.group\\/button):hover *){color:var(--scalar-color-1)}.scalar-app .group-hover\\/code-block\\:opacity-100:is(:where(.group\\/code-block):hover *){opacity:1}}.scalar-app .group-focus-visible\\/toggle\\:outline:is(:where(.group\\/toggle):focus-visible *){outline-style:var(--tw-outline-style);outline-width:1px}.scalar-app .placeholder\\:font-\\[inherit\\]::placeholder{font-family:inherit}.scalar-app .first\\:rounded-t-\\[inherit\\]:first-child,:is(.scalar-app .\\*\\:first\\:rounded-t-\\[inherit\\]>*):first-child{border-top-left-radius:inherit;border-top-right-radius:inherit}.scalar-app .last\\:rounded-b-\\[inherit\\]:last-child,:is(.scalar-app .\\*\\:last\\:rounded-b-\\[inherit\\]>*):last-child{border-bottom-right-radius:inherit;border-bottom-left-radius:inherit}.scalar-app .focus-within\\:outline-none:focus-within{--tw-outline-style:none;outline-style:none}@media(hover:hover){.scalar-app .hover\\:bg-b-2:hover{background-color:var(--scalar-background-2)}.scalar-app .hover\\:bg-b-3:hover{background-color:var(--scalar-background-3)}.scalar-app .hover\\:bg-h-btn:hover{background-color:var(--scalar-button-1-hover)}.scalar-app .hover\\:bg-sidebar-b-1:hover{background-color:var(--scalar-sidebar-background-1,var(--scalar-background-1))}.scalar-app .hover\\:bg-sidebar-b-hover:hover{background-color:var(--scalar-sidebar-item-hover-background,var(--scalar-background-2))}.scalar-app .hover\\:bg-linear-to-b:hover{--tw-gradient-position:to bottom}@supports (background-image:linear-gradient(in lab,red,red)){.scalar-app .hover\\:bg-linear-to-b:hover{--tw-gradient-position:to bottom in oklab}}.scalar-app .hover\\:bg-linear-to-b:hover{background-image:linear-gradient(var(--tw-gradient-stops))}.scalar-app .hover\\:bg-linear-to-t:hover{--tw-gradient-position:to top}@supports (background-image:linear-gradient(in lab,red,red)){.scalar-app .hover\\:bg-linear-to-t:hover{--tw-gradient-position:to top in oklab}}.scalar-app .hover\\:bg-linear-to-t:hover{background-image:linear-gradient(var(--tw-gradient-stops))}.scalar-app .hover\\:text-c-1:hover{color:var(--scalar-color-1)}.scalar-app .hover\\:text-sidebar-c-1:hover{color:var(--scalar-sidebar-color-1,var(--scalar-color-1))}.scalar-app .hover\\:text-sidebar-c-hover:hover{color:var(--scalar-sidebar-item-hover-color,var(--scalar-sidebar-color-2))}.scalar-app .hover\\:underline:hover{text-decoration-line:underline}.scalar-app .hover\\:brightness-90:hover{--tw-brightness:brightness(90%);filter:var(--tw-blur,)var(--tw-brightness,)var(--tw-contrast,)var(--tw-grayscale,)var(--tw-hue-rotate,)var(--tw-invert,)var(--tw-saturate,)var(--tw-sepia,)var(--tw-drop-shadow,)}}.scalar-app .focus-visible\\:border-c-btn:focus-visible{border-color:var(--scalar-button-1-color)}.scalar-app .focus-visible\\:opacity-100:focus-visible{opacity:1}.scalar-app .focus-visible\\:outline:focus-visible{outline-style:var(--tw-outline-style);outline-width:1px}.scalar-app .active\\:bg-b-btn:active{background-color:var(--scalar-button-1)}.scalar-app .active\\:brightness-90:active{--tw-brightness:brightness(90%);filter:var(--tw-blur,)var(--tw-brightness,)var(--tw-contrast,)var(--tw-grayscale,)var(--tw-hue-rotate,)var(--tw-invert,)var(--tw-saturate,)var(--tw-sepia,)var(--tw-drop-shadow,)}.scalar-app .has-\\[\\:focus-visible\\]\\:bg-sidebar-b-1:has(:focus-visible){background-color:var(--scalar-sidebar-background-1,var(--scalar-background-1))}.scalar-app .has-\\[\\:focus-visible\\]\\:outline:has(:focus-visible),.scalar-app .has-\\[input\\:focus-visible\\]\\:outline:has(:is(input:focus-visible)){outline-style:var(--tw-outline-style);outline-width:1px}@media(min-width:800px){.scalar-app .md\\:w-\\[calc\\(100vw-16px\\)\\]{width:calc(100vw - 16px)}}@media(min-width:1000px){.scalar-app .lg\\:w-\\[calc\\(100vw-32px\\)\\]{width:calc(100vw - 32px)}.scalar-app .lg\\:w-full{width:100%}}.scalar-app .dark\\:bg-\\(--bg-dark\\):where(.dark-mode,.dark-mode *){background-color:var(--bg-dark)}.scalar-app .dark\\:bg-b-3:where(.dark-mode,.dark-mode *){background-color:var(--scalar-background-3)}.scalar-app .dark\\:bg-backdrop-dark:where(.dark-mode,.dark-mode *){background-color:#00000073}.scalar-app .dark\\:bg-linear-to-t:where(.dark-mode,.dark-mode *){--tw-gradient-position:to top}@supports (background-image:linear-gradient(in lab,red,red)){.scalar-app .dark\\:bg-linear-to-t:where(.dark-mode,.dark-mode *){--tw-gradient-position:to top in oklab}}.scalar-app .dark\\:bg-linear-to-t:where(.dark-mode,.dark-mode *){background-image:linear-gradient(var(--tw-gradient-stops))}@media(hover:hover){.scalar-app .dark\\:hover\\:bg-b-3:where(.dark-mode,.dark-mode *):hover{background-color:var(--scalar-background-3)}.scalar-app .dark\\:hover\\:bg-linear-to-b:where(.dark-mode,.dark-mode *):hover{--tw-gradient-position:to bottom}@supports (background-image:linear-gradient(in lab,red,red)){.scalar-app .dark\\:hover\\:bg-linear-to-b:where(.dark-mode,.dark-mode *):hover{--tw-gradient-position:to bottom in oklab}}.scalar-app .dark\\:hover\\:bg-linear-to-b:where(.dark-mode,.dark-mode *):hover{background-image:linear-gradient(var(--tw-gradient-stops))}.scalar-app .dark\\:hover\\:bg-linear-to-t:where(.dark-mode,.dark-mode *):hover{--tw-gradient-position:to top}@supports (background-image:linear-gradient(in lab,red,red)){.scalar-app .dark\\:hover\\:bg-linear-to-t:where(.dark-mode,.dark-mode *):hover{--tw-gradient-position:to top in oklab}}.scalar-app .dark\\:hover\\:bg-linear-to-t:where(.dark-mode,.dark-mode *):hover{background-image:linear-gradient(var(--tw-gradient-stops))}}@media(max-width:720px)and (max-height:480px){.scalar-app .zoomed\\:\\!whitespace-normal{white-space:normal!important}}.loader-wrapper[data-v-27df5cd8]{--loader-size:50%;justify-content:center;align-items:center;display:flex;position:relative}.svg-loader[data-v-27df5cd8]{width:var(--loader-size);height:var(--loader-size);fill:none;stroke:currentColor;background-color:#0000;top:1rem;right:.9rem;overflow:visible}.svg-path[data-v-27df5cd8]{stroke-width:12px;fill:none;transition:all .3s}.svg-x-mark[data-v-27df5cd8]{stroke-dasharray:57;stroke-dashoffset:57px;transition-delay:0s}.svg-check-mark[data-v-27df5cd8]{stroke-dasharray:149;stroke-dashoffset:149px;transition-delay:0s}.icon-is-invalid .svg-x-mark[data-v-27df5cd8],.icon-is-valid .svg-check-mark[data-v-27df5cd8]{stroke-dashoffset:0;transition-delay:.3s}.circular-loader[data-v-27df5cd8]{transform-origin:50%;background:0 0;animation:.7s linear infinite rotate-27df5cd8,.4s fade-in-27df5cd8;transform:scale(3.5)}.loader-path[data-v-27df5cd8]{stroke-dasharray:50 200;stroke-dashoffset:-100px;stroke-linecap:round}.loader-path-off[data-v-27df5cd8]{stroke-dasharray:50 200;stroke-dashoffset:-100px;opacity:0;transition:opacity .3s}.scalar-code-block:hover .scalar-code-copy[data-v-e1870266]{opacity:100}.copy-icon[data-v-e1870266],.check-icon[data-v-e1870266]{transition:transform .15s ease-in-out;position:absolute;top:50%;left:50%;transform:translate(-50%,-50%)scale(1)}.copy-icon.copied[data-v-e1870266],.check-icon[data-v-e1870266]{transform:translate(-50%,-50%)scale(0)}.check-icon.visible[data-v-e1870266]{transform:translate(-50%,-50%)scale(1)}.scalar-code-block{background:inherit;padding:10px 8px 12px 12px;position:relative;overflow:auto}.scalar-codeblock-pre{all:unset;text-wrap:nowrap;white-space-collapse:preserve;background:0 0;border-radius:0;width:fit-content;margin:0}.toggle-icon-ellipse[data-v-60be8692]{background:var(--scalar-background-1);border-radius:50%;width:7px;height:7px;transition:width .3s ease-in-out,height .3s ease-in-out;display:inline-block;position:relative;overflow:hidden;box-shadow:inset 0 0 0 1px}.toggle-icon-moon-mask[data-v-60be8692]{background:var(--scalar-background-1);border:1px solid;border-radius:50%;width:100%;height:100%;transition:transform .3s ease-in-out;display:block;position:absolute;bottom:2.5px;left:2.5px;transform:translate(4px,-4px)}.toggle-icon-sun-ray[data-v-60be8692]{background:currentColor;border-radius:8px;width:12px;height:1px;transition:transform .3s ease-in-out;position:absolute}.toggle-icon-sun-ray[data-v-60be8692]:nth-of-type(2){transform:rotate(90deg)}.toggle-icon-sun-ray[data-v-60be8692]:nth-of-type(3){transform:rotate(45deg)}.toggle-icon-sun-ray[data-v-60be8692]:nth-of-type(4){transform:rotate(-45deg)}.toggle-icon-dark .toggle-icon-ellipse[data-v-60be8692]{width:10px;height:10px;-webkit-mask-image:radial-gradient(circle at 0 100%,pink 10px,#0000 12px);mask-image:radial-gradient(circle at 0 100%,pink 10px,#0000 12px)}.toggle-icon-dark .toggle-icon-sun-ray[data-v-60be8692]{transform:scale(0)}.toggle-icon-dark .toggle-icon-moon-mask[data-v-60be8692]{transform:translateZ(0)}.scalar-icon[data-v-b651bb23],.scalar-icon[data-v-b651bb23] *{stroke-width:var(--c07589c2)}.scalar-app :where(code.hljs) *{font-size:inherit;font-family:var(--scalar-font-code);text-align:left;white-space:pre;word-spacing:normal;word-break:normal;word-wrap:normal;tab-size:4;line-height:1.4}.scalar-app code.hljs{all:unset;font-size:inherit;color:var(--scalar-color-2);font-family:var(--scalar-font-code);counter-reset:linenumber}.scalar-app .hljs{color:var(--scalar-color-2);background:0 0}.scalar-app .hljs .line:before{color:var(--scalar-color-3);counter-increment:linenumber;content:counter(linenumber);min-width:calc(var(--line-digits)*1ch);text-align:right;margin-right:.875rem;display:inline-block}.scalar-app .hljs-comment,.scalar-app .hljs-quote{color:var(--scalar-color-3);font-style:italic}.scalar-app .hljs-number{color:var(--scalar-color-orange)}.scalar-app .hljs-regexp,.scalar-app .hljs-string,.scalar-app .hljs-built_in{color:var(--scalar-color-blue)}.scalar-app .hljs-title.class_{color:var(--scalar-color-1)}.scalar-app .hljs-keyword{color:var(--scalar-color-purple)}.scalar-app .hljs-title.function_{color:var(--scalar-color-orange)}.scalar-app .hljs-subst,.scalar-app .hljs-name{color:var(--scalar-color-blue)}.scalar-app .hljs-attr,.scalar-app .hljs-attribute{color:var(--scalar-color-1)}.scalar-app .hljs-addition,.scalar-app .hljs-literal,.scalar-app .hljs-selector-tag,.scalar-app .hljs-type{color:var(--scalar-color-green)}.scalar-app .hljs-selector-attr,.scalar-app .hljs-selector-pseudo{color:var(--scalar-color-orange)}.scalar-app .hljs-doctag,.scalar-app .hljs-section,.scalar-app .hljs-title{color:var(--scalar-color-blue)}.scalar-app .hljs-selector-id,.scalar-app .hljs-template-variable,.scalar-app .hljs-variable{color:var(--scalar-color-1)}.scalar-app .hljs-name,.scalar-app .hljs-section,.scalar-app .hljs-strong{font-weight:var(--scalar-semibold)}.scalar-app .hljs-bullet,.scalar-app .hljs-link,.scalar-app .hljs-meta,.scalar-app .hljs-symbol{color:var(--scalar-color-blue)}.scalar-app .hljs-deletion{color:var(--scalar-color-red)}.scalar-app .hljs-formula{background:var(--scalar-color-1)}.scalar-app .hljs-emphasis{font-style:italic}.scalar-app .credential .credential-value{color:#0000;font-size:0}.scalar-app .credential:after{content:"·····";color:var(--scalar-color-3);-webkit-user-select:none;user-select:none}.hljs.language-html{color:var(--scalar-color-1)}.hljs.language-html .hljs-attr{color:var(--scalar-color-2)}.hljs.language-curl .hljs-string{color:var(--scalar-color-blue)}.hljs.language-curl .hljs-literal{color:var(--scalar-color-1)}.hljs.language-php .hljs-variable{color:var(--scalar-color-blue)}.hljs.language-objectivec .hljs-meta{color:var(--scalar-color-1)}.hljs.language-objectivec .hljs-built_in,.hljs-built_in{color:var(--scalar-color-orange)}.scalar-app .markdown{--scalar-refs-heading-spacing:24px;--markdown-border:var(--scalar-border-width)solid var(--scalar-border-color);--markdown-spacing-sm:12px;--markdown-spacing-md:16px;--markdown-line-height:1.625;--markdown-heading-line-height:1.15;font-family:var(--scalar-font);word-break:break-word;line-height:var(--markdown-line-height)}.scalar-app .markdown>*{margin-bottom:var(--markdown-spacing-md)}.scalar-app .markdown>:not(h1):not(h2):not(h3):not(h4):not(h5):not(h6):last-child{margin-bottom:0}.scalar-app .markdown h1,.scalar-app .markdown h2,.scalar-app .markdown h3,.scalar-app .markdown h4,.scalar-app .markdown h5,.scalar-app .markdown h6{font-weight:var(--scalar-bold);margin-top:var(--scalar-refs-heading-spacing);margin-bottom:var(--markdown-spacing-sm);line-height:var(--markdown-heading-line-height,1.15);scroll-margin-top:1rem;display:block}.scalar-app .markdown h1{font-size:1.5rem}.scalar-app .markdown h2,.scalar-app .markdown h3{font-size:1.25rem}.scalar-app .markdown h4,.scalar-app .markdown h5,.scalar-app .markdown h6{font-size:1rem}.scalar-app .markdown b,.scalar-app .markdown strong{font-weight:var(--scalar-bold)}.scalar-app .markdown p{color:inherit;line-height:var(--markdown-line-height);display:block}.scalar-app .markdown img{border-radius:var(--scalar-radius);max-width:100%;display:inline-block;overflow:hidden}.scalar-app .markdown ul,.scalar-app .markdown ol{line-height:var(--markdown-line-height);flex-direction:column;gap:2px;padding-left:1.6em;display:flex}.scalar-app .markdown li{margin-top:2px;padding-left:7px}.scalar-app ol>li::marker{font:var(--scalar-font);font-variant-numeric:tabular-nums;font-weight:var(--scalar-semibold);white-space:nowrap}.scalar-app ol>*>li::marker{font:var(--scalar-font);font-variant-numeric:tabular-nums;font-weight:var(--scalar-semibold);white-space:nowrap}.scalar-app .markdown ol{list-style-type:decimal}.scalar-app .markdown ol ol{list-style-type:lower-alpha}.scalar-app .markdown ol ol ol ol,.scalar-app .markdown ol ol ol ol ol ol ol{list-style-type:decimal}.scalar-app .markdown ol ol ol ol ol,.scalar-app .markdown ol ol ol ol ol ol ol ol{list-style-type:lower-alpha}.scalar-app .markdown ol ol ol,.scalar-app .markdown ol ol ol ol ol ol,.scalar-app .markdown ol ol ol ol ol ol ol ol ol{list-style-type:lower-roman}.scalar-app .markdown ul>li,.scalar-app .markdown ul>*>li{list-style-type:disc}.scalar-app .markdown table{table-layout:fixed;border:var(--scalar-border-width)solid var(--scalar-border-color);border-radius:var(--scalar-radius);border-spacing:0;width:100%;margin:1em 0;display:table;position:relative;overflow-x:auto}.scalar-app .markdown tbody,.scalar-app .markdown thead{vertical-align:middle}.scalar-app .markdown tbody{display:table-row-group}.scalar-app .markdown thead{display:table-header-group}.scalar-app .markdown tr{border-color:inherit;vertical-align:inherit;display:table-row}.scalar-app .markdown td,.scalar-app .markdown th{vertical-align:top;min-width:1em;line-height:var(--markdown-line-height);word-break:break-word;font-size:var(--scalar-small);color:var(--scalar-color-1);border-right:var(--markdown-border);border-bottom:var(--markdown-border);padding:8.5px 16px;display:table-cell;position:relative}.scalar-app .markdown td>*,.scalar-app .markdown th>*{margin-bottom:0}.scalar-app .markdown th:empty{display:none}.scalar-app .markdown td:first-of-type,.scalar-app .markdown th:first-of-type{border-left:none}.scalar-app .markdown td:last-of-type,.scalar-app .markdown th:last-of-type{border-right:none}.scalar-app .markdown tr:last-of-type td{border-bottom:none}.scalar-app .markdown th{font-weight:var(--scalar-bold);text-align:left;background:var(--scalar-background-2);border-left-color:#0000}.scalar-app .markdown th:first-of-type{border-top-left-radius:var(--scalar-radius)}.scalar-app .markdown th:last-of-type{border-top-right-radius:var(--scalar-radius)}.scalar-app .markdown tr>[align=left]{text-align:left}.scalar-app .markdown tr>[align=right]{text-align:right}.scalar-app .markdown tr>[align=center]{text-align:center}.scalar-app .markdown details{border:var(--markdown-border);border-radius:var(--scalar-radius);color:var(--scalar-color-1)}.scalar-app .markdown details>:not(summary){margin:var(--markdown-spacing-md);margin-bottom:0}.scalar-app .markdown details>p:has(>strong):not(:has(:not(strong))){margin-bottom:8px}.scalar-app .markdown details>p:has(>strong):not(:has(:not(strong)))+*{margin-top:0}.scalar-app .markdown details>table{width:calc(100% - calc(var(--markdown-spacing-md)*2))}.scalar-app .markdown summary{min-height:40px;font-weight:var(--scalar-semibold);line-height:var(--markdown-line-height);cursor:pointer;-webkit-user-select:none;user-select:none;border-radius:2.5px;align-items:flex-start;gap:8px;padding:7px 14px;display:flex;position:relative}.scalar-app .markdown summary:hover{background-color:var(--scalar-background-2)}.scalar-app .markdown details[open]{padding-bottom:var(--markdown-spacing-md)}.scalar-app .markdown details[open]>summary{border-bottom:var(--markdown-border);border-bottom-right-radius:0;border-bottom-left-radius:0}.scalar-app .markdown summary:before{content:"";width:var(--markdown-spacing-md);height:var(--markdown-spacing-md);background-color:var(--scalar-color-3);flex-shrink:0;margin-top:5px;display:block;-webkit-mask-image:url(\'data:image/svg+xml,\');mask-image:url(\'data:image/svg+xml,\')}.scalar-app .markdown summary:hover:before{background-color:var(--scalar-color-1)}.scalar-app .markdown details[open]>summary:before{transition:transform .1s ease-in-out;transform:rotate(90deg)}.scalar-app .markdown details:has(+details){border-bottom:0;border-bottom-right-radius:0;border-bottom-left-radius:0;margin-bottom:0}.scalar-app .markdown details:has(+details)+details,.scalar-app .markdown details:has(+details)+details>summary{border-top-left-radius:0;border-top-right-radius:0}.scalar-app .markdown a{--font-color:var(--scalar-link-color,var(--scalar-color-accent));--font-visited:var(--scalar-link-color-visited,var(--scalar-color-2));-webkit-text-decoration:var(--scalar-text-decoration);text-decoration:var(--scalar-text-decoration);color:var(--font-color);font-weight:var(--scalar-link-font-weight,var(--scalar-semibold));text-underline-offset:.25rem;text-decoration-thickness:1px;-webkit-text-decoration-color:var(--font-color);text-decoration-color:var(--font-color)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown a{-webkit-text-decoration-color:var(--font-color)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown a{-webkit-text-decoration-color:var(--font-color)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown a{-webkit-text-decoration-color:color-mix(in srgb,var(--font-color)30%,transparent)}}}.scalar-app .markdown a{-webkit-text-decoration-color:var(--font-color);text-decoration-color:var(--font-color)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown a{-webkit-text-decoration-color:var(--font-color)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown a{-webkit-text-decoration-color:color-mix(in srgb,var(--font-color)30%,transparent)}}.scalar-app .markdown a{-webkit-text-decoration-color:var(--font-color);text-decoration-color:var(--font-color)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown a{-webkit-text-decoration-color:color-mix(in srgb,var(--font-color)30%,transparent);text-decoration-color:color-mix(in srgb,var(--font-color)30%,transparent)}}}}.scalar-app .markdown a:hover{-webkit-text-decoration-color:var(--scalar-color-1,currentColor);text-decoration-color:var(--scalar-color-1,currentColor);color:var(--scalar-link-color-hover,var(--scalar-color-accent));-webkit-text-decoration:var(--scalar-text-decoration-hover);text-decoration:var(--scalar-text-decoration-hover)}.scalar-app .markdown a:visited{color:var(--font-visited)}.scalar-app .markdown em{font-style:italic}.scalar-app .markdown sup,.scalar-app .markdown sub{font-size:var(--scalar-micro);font-weight:450}.scalar-app .markdown sup{vertical-align:super}.scalar-app .markdown sub{vertical-align:sub}.scalar-app .markdown del{text-decoration:line-through}.scalar-app .markdown code{font-family:var(--scalar-font-code);background-color:var(--scalar-background-2);box-shadow:0 0 0 var(--scalar-border-width) var(--scalar-border-color);font-size:var(--scalar-micro);border-radius:2px;padding:0 3px}.scalar-app .markdown .hljs{font-size:var(--scalar-small)}.scalar-app .markdown pre code{white-space:pre;padding:var(--markdown-spacing-sm);margin:var(--markdown-spacing-sm)0;-webkit-overflow-scrolling:touch;min-width:100px;max-width:100%;line-height:1.5;display:block;overflow-x:auto}.scalar-app .markdown hr{border:none;border-bottom:var(--markdown-border)}.scalar-app .markdown blockquote{border-left:1px solid var(--scalar-color-1);padding-left:var(--markdown-spacing-md);font-weight:var(--scalar-bold);font-size:var(--scalar-font-size-2);margin:0;display:block}.scalar-app .markdown li.task-list-item{list-style:none;position:relative}.scalar-app .markdown li.task-list-item>input{appearance:none;width:var(--markdown-spacing-md);height:var(--markdown-spacing-md);border:1px solid var(--scalar-color-3);border-radius:var(--scalar-radius);display:inline;position:absolute;top:.225em;left:-1.4em}.scalar-app .markdown li.task-list-item>input[type=checkbox]:checked{background-color:var(--scalar-color-1);border-color:var(--scalar-color-1)}.scalar-app .markdown li.task-list-item>input[type=checkbox]:before{content:"";border:solid var(--scalar-background-1);opacity:0;border-width:0 1.5px 1.5px 0;width:5px;height:10px;position:absolute;top:1px;left:5px;transform:rotate(45deg)}.scalar-app .markdown li.task-list-item>input[type=checkbox]:checked:before{opacity:1}.scalar-app .markdown .markdown-alert{border-radius:var(--scalar-radius);background-color:var(--scalar-background-2);align-items:stretch}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert{background-color:var(--scalar-background-2)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert{background-color:var(--scalar-background-2)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert{background-color:color-mix(in srgb,var(--scalar-background-2),transparent)}}}}.scalar-app .markdown .markdown-alert{border:var(--markdown-border);gap:var(--markdown-spacing-sm);padding:10px 14px;display:flex;position:relative}.scalar-app .markdown .markdown-alert .markdown-alert-icon:before{content:"";background-color:currentColor;flex-shrink:0;width:18px;height:18px;margin-top:3px;display:block;-webkit-mask-position:50%;mask-position:50%;-webkit-mask-size:contain;mask-size:contain;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat}.scalar-app .markdown .markdown-alert.markdown-alert-note{background-color:var(--scalar-color-blue)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-note{background-color:var(--scalar-color-blue)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-note{background-color:var(--scalar-color-blue)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-note{background-color:color-mix(in srgb,var(--scalar-color-blue),transparent 97%)}}}}.scalar-app .markdown .markdown-alert.markdown-alert-note{border:var(--scalar-border-width)solid var(--scalar-color-blue)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-note{border:var(--scalar-border-width)solid var(--scalar-color-blue)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-note{border:var(--scalar-border-width)solid var(--scalar-color-blue)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-note{border:var(--scalar-border-width)solid color-mix(in srgb,var(--scalar-color-blue),transparent 50%)}}}}.scalar-app .markdown .markdown-alert.markdown-alert-tip{background-color:var(--scalar-color-2)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-tip{background-color:var(--scalar-color-2)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-tip{background-color:var(--scalar-color-2)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-tip{background-color:color-mix(in srgb,var(--scalar-color-2),transparent 97%)}}}}.scalar-app .markdown .markdown-alert.markdown-alert-tip{border:var(--scalar-border-width)solid var(--scalar-color-2)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-tip{border:var(--scalar-border-width)solid var(--scalar-color-2)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-tip{border:var(--scalar-border-width)solid var(--scalar-color-2)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-tip{border:var(--scalar-border-width)solid color-mix(in srgb,var(--scalar-color-2),transparent 50%)}}}}.scalar-app .markdown .markdown-alert.markdown-alert-note .markdown-alert-icon:before,.scalar-app .markdown .markdown-alert.markdown-alert-tip .markdown-alert-icon:before{color:var(--scalar-color-blue);-webkit-mask-image:url(\'data:image/svg+xml,\');mask-image:url(\'data:image/svg+xml,\')}.scalar-app .markdown .markdown-alert.markdown-alert-important,.scalar-app .markdown .markdown-alert.markdown-alert-warning{background-color:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-important,.scalar-app .markdown .markdown-alert.markdown-alert-warning{background-color:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-important,.scalar-app .markdown .markdown-alert.markdown-alert-warning{background-color:var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-important,.scalar-app .markdown .markdown-alert.markdown-alert-warning{background-color:color-mix(in srgb,var(--scalar-color-orange),transparent 97%)}}}}.scalar-app .markdown .markdown-alert.markdown-alert-important,.scalar-app .markdown .markdown-alert.markdown-alert-warning{border:var(--scalar-border-width)solid var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-important,.scalar-app .markdown .markdown-alert.markdown-alert-warning{border:var(--scalar-border-width)solid var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-important,.scalar-app .markdown .markdown-alert.markdown-alert-warning{border:var(--scalar-border-width)solid var(--scalar-color-orange)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-important,.scalar-app .markdown .markdown-alert.markdown-alert-warning{border:var(--scalar-border-width)solid color-mix(in srgb,var(--scalar-color-orange),transparent 50%)}}}}.scalar-app .markdown .markdown-alert.markdown-alert-important .markdown-alert-icon:before,.scalar-app .markdown .markdown-alert.markdown-alert-warning .markdown-alert-icon:before{-webkit-mask-image:url(\'data:image/svg+xml,\');mask-image:url(\'data:image/svg+xml,\')}.scalar-app .markdown .markdown-alert.markdown-alert-caution{background-color:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-caution{background-color:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-caution{background-color:var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-caution{background-color:color-mix(in srgb,var(--scalar-color-red),transparent 97%)}}}}.scalar-app .markdown .markdown-alert.markdown-alert-caution{border:var(--scalar-border-width)solid var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-caution{border:var(--scalar-border-width)solid var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-caution{border:var(--scalar-border-width)solid var(--scalar-color-red)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-caution{border:var(--scalar-border-width)solid color-mix(in srgb,var(--scalar-color-red),transparent 50%)}}}}.scalar-app .markdown .markdown-alert.markdown-alert-caution .markdown-alert-icon:before{color:var(--scalar-color-red);-webkit-mask-image:url(\'data:image/svg+xml,\');mask-image:url(\'data:image/svg+xml,\')}.scalar-app .markdown .markdown-alert.markdown-alert-success{background-color:var(--scalar-color-green)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-success{background-color:var(--scalar-color-green)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-success{background-color:var(--scalar-color-green)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-success{background-color:color-mix(in srgb,var(--scalar-color-green),transparent 97%)}}}}.scalar-app .markdown .markdown-alert.markdown-alert-success{border:var(--scalar-border-width)solid var(--scalar-color-green)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-success{border:var(--scalar-border-width)solid var(--scalar-color-green)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-success{border:var(--scalar-border-width)solid var(--scalar-color-green)}@supports (color:color-mix(in lab,red,red)){.scalar-app .markdown .markdown-alert.markdown-alert-success{border:var(--scalar-border-width)solid color-mix(in srgb,var(--scalar-color-green),transparent 50%)}}}}.scalar-app .markdown .markdown-alert.markdown-alert-success .markdown-alert-icon:before{color:var(--scalar-color-green);-webkit-mask-image:url(\'data:image/svg+xml,\');mask-image:url(\'data:image/svg+xml,\')}.scalar-app .markdown .markdown-alert.markdown-alert-note .markdown-alert-icon:before{color:var(--scalar-color-blue)}.scalar-app .markdown .markdown-alert.markdown-alert-tip .markdown-alert-icon:before{color:var(--scalar-color-2)}.scalar-app .markdown .markdown-alert.markdown-alert-important .markdown-alert-icon:before{color:var(--scalar-color-purple)}.scalar-app .markdown .markdown-alert.markdown-alert-warning .markdown-alert-icon:before{color:var(--scalar-color-orange)}.scalar-app .markdown .markdown-alert .markdown-alert-content{line-height:var(--markdown-line-height);margin:0}.scalar-app .markdown.markdown-summary.markdown-summary :before,.scalar-app .markdown.markdown-summary.markdown-summary :after{content:none}.scalar-app .markdown.markdown-summary.markdown-summary :not(strong,em,a){font-size:inherit;font-weight:inherit;line-height:var(--markdown-line-height);display:contents}.scalar-app .markdown.markdown-summary.markdown-summary img,.scalar-app .markdown.markdown-summary.markdown-summary svg,.scalar-app .markdown.markdown-summary.markdown-summary hr,.scalar-app .markdown.markdown-summary.markdown-summary pre{display:none}.dark-mode .scalar-dropdown-item[data-v-6660bbc5]:hover{filter:brightness(1.1)}.group\\/item>*>.scalar-sidebar-indent .scalar-sidebar-indent-border[data-v-3e080c68]{inset-block:-1px}.group\\/item:first-child>*>.scalar-sidebar-indent .scalar-sidebar-indent-border[data-v-3e080c68]{top:0}.group\\/item:last-child>*>.scalar-sidebar-indent .scalar-sidebar-indent-border[data-v-3e080c68]{bottom:0}.group\\/items.-translate-x-full .group\\/button{transition-behavior:allow-discrete;max-height:0;transition-property:display,max-height;transition-duration:0s;transition-delay:.3s;display:none}.group\\/item.group\\/nested-items-open>*>.group\\/items.translate-x-0 .group\\/button{max-height:3.40282e38px;display:flex}.group\\/sidebar-section:first-of-type>.group\\/spacer-before,.group\\/sidebar-section:last-of-type>.group\\/spacer-after{height:0}.group\\/sidebar-section:has(+.group\\/sidebar-section)>.group\\/spacer-after{height:0;margin-bottom:-1px}:where(body)>.scalar-tooltip{--scalar-tooltip-padding:8px;padding:calc(var(--scalar-tooltip-padding) + var(--scalar-tooltip-offset));z-index:99999;max-width:320px;font-size:var(--scalar-font-size-5);--tw-leading:var(--scalar-line-height-5);line-height:var(--scalar-line-height-5);--tw-font-weight:var(--scalar-semibold);font-weight:var(--scalar-semibold);overflow-wrap:break-word;color:var(--scalar-tooltip-color)}:where(body)>.scalar-tooltip:before{content:"";inset:var(--scalar-tooltip-offset);z-index:-1;border-radius:var(--scalar-radius);background-color:var(--scalar-tooltip-background);--tw-backdrop-blur:blur(8px);-webkit-backdrop-filter:var(--tw-backdrop-blur,)var(--tw-backdrop-brightness,)var(--tw-backdrop-contrast,)var(--tw-backdrop-grayscale,)var(--tw-backdrop-hue-rotate,)var(--tw-backdrop-invert,)var(--tw-backdrop-opacity,)var(--tw-backdrop-saturate,)var(--tw-backdrop-sepia,);backdrop-filter:var(--tw-backdrop-blur,)var(--tw-backdrop-brightness,)var(--tw-backdrop-contrast,)var(--tw-backdrop-grayscale,)var(--tw-backdrop-hue-rotate,)var(--tw-backdrop-invert,)var(--tw-backdrop-opacity,)var(--tw-backdrop-saturate,)var(--tw-backdrop-sepia,);position:absolute}:where(body.dark-mode)>.scalar-tooltip:before{--tw-shadow:inset 0 0 0 var(--tw-shadow-color,calc(var(--scalar-border-width)*2))var(--scalar-border-color);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.dark-mode .scalar-dropdown-item[data-v-3402682d]:hover{filter:brightness(1.1)}.scalar-modal-layout[data-v-c36b47da]{animation:.3s ease-in-out forwards fadein-layout-c36b47da}.scalar-modal[data-v-c36b47da]{box-shadow:var(--scalar-shadow-2);animation:.3s ease-in-out .1s forwards fadein-modal-c36b47da;transform:translateY(10px)}.scalar-modal-layout-full[data-v-c36b47da]{opacity:1!important;background:0 0!important}.modal-content-search .modal-body[data-v-c36b47da]{flex-direction:column;max-height:440px;padding:0;display:flex;overflow:hidden}@media(max-width:720px)and (max-height:480px){.scalar-modal-layout .scalar-modal[data-v-c36b47da]{max-height:90svh;margin-top:5svh}}.full-size-styles[data-v-c36b47da]{margin:initial;border-right:var(--scalar-border-width)solid var(--scalar-border-color);animation:.3s ease-in-out forwards fadein-layout-c36b47da;left:0;transform:translate(0);background-color:var(--scalar-background-1)!important;max-height:100%!important;box-shadow:none!important;border-radius:0!important;position:absolute!important;top:0!important}@media(min-width:800px){.full-size-styles[data-v-c36b47da]{width:50dvw!important}}.full-size-styles[data-v-c36b47da]:after{content:"";width:50dvw;height:100dvh;position:absolute;top:0;right:-50dvw}.sidebar-heading-type[data-v-1857170e]{text-transform:uppercase;color:var(--method-color,var(--scalar-color-1));font-size:10px;line-height:14px;font-weight:var(--scalar-bold);font-family:var(--scalar-font-code);white-space:nowrap;flex-shrink:0;align-items:center;gap:4px;display:inline-flex;overflow:hidden}.scalar-app .pointer-events-auto{pointer-events:auto}.scalar-app .pointer-events-none{pointer-events:none}.scalar-app .collapse{visibility:collapse}.scalar-app .visible{visibility:visible}.scalar-app .floating-bg:before{background-color:var(--scalar-background-2);border-radius:var(--scalar-radius);content:"";opacity:0;z-index:1;width:calc(100% + 8px);height:calc(100% - 4px);transition:opacity .2s ease-in-out;position:absolute;top:2.5px;left:-4px}.scalar-app .floating-bg:hover:before{opacity:1}.scalar-app .centered{--tw-translate-y:-50%;--tw-translate-x:-50%;translate:var(--tw-translate-x)var(--tw-translate-y);position:absolute;top:50%;left:50%}.scalar-app .centered-y{--tw-translate-y:-50%;translate:var(--tw-translate-x)var(--tw-translate-y);position:absolute;top:50%}.scalar-app .centered-x{--tw-translate-x:-50%;translate:var(--tw-translate-x)var(--tw-translate-y);position:absolute;left:50%}.scalar-app .sr-only{clip-path:inset(50%);white-space:nowrap;border-width:0;width:1px;height:1px;margin:-1px;padding:0;position:absolute;overflow:hidden}.scalar-app .absolute{position:absolute}.scalar-app .fixed{position:fixed}.scalar-app .relative{position:relative}.scalar-app .static{position:static}.scalar-app .sticky{position:sticky}.scalar-app .inset-0{inset:0}.scalar-app .inset-x-0{inset-inline:0}.scalar-app .inset-x-1{inset-inline:4px}.scalar-app .-top-\\(--scalar-address-bar-height\\){top:calc(var(--scalar-address-bar-height)*-1)}.scalar-app .-top-\\[104px\\]{top:-104px}.scalar-app .top-0{top:0}.scalar-app .top-2{top:8px}.scalar-app .top-12{top:48px}.scalar-app .top-\\[calc\\(100\\%\\+4px\\)\\]{top:calc(100% + 4px)}.scalar-app .-right-\\[30px\\]{right:-30px}.scalar-app .right-0{right:0}.scalar-app .right-1{right:4px}.scalar-app .right-1\\.5{right:6px}.scalar-app .right-1\\/2{right:50%}.scalar-app .right-2{right:8px}.scalar-app .right-4{right:16px}.scalar-app .right-7{right:28px}.scalar-app .right-14{right:56px}.scalar-app .right-16{right:64px}.scalar-app .bottom-0{bottom:0}.scalar-app .bottom-1{bottom:4px}.scalar-app .bottom-1\\/2{bottom:50%}.scalar-app .bottom-\\[var\\(--scalar-border-width\\)\\]{bottom:var(--scalar-border-width)}.scalar-app .left-0{left:0}.scalar-app .left-1\\/2{left:50%}.scalar-app .left-3{left:12px}.scalar-app .-z-1{z-index:-1}.scalar-app .z-0{z-index:0}.scalar-app .z-1{z-index:1}.scalar-app .z-10{z-index:10}.scalar-app .z-20{z-index:20}.scalar-app .z-50{z-index:50}.scalar-app .z-\\[1\\]{z-index:1}.scalar-app .z-\\[1002\\]{z-index:1002}.scalar-app .z-\\[10000\\]{z-index:10000}.scalar-app .z-\\[10001\\]{z-index:10001}.scalar-app .z-context{z-index:1000}.scalar-app .z-context-plus{z-index:1001}.scalar-app .z-overlay{z-index:10000}.scalar-app .order-last{order:9999}.scalar-app .col-span-full{grid-column:1/-1}.scalar-app .container{width:100%}@media(min-width:400px){.scalar-app .container{max-width:400px}}@media(min-width:600px){.scalar-app .container{max-width:600px}}@media(min-width:800px){.scalar-app .container{max-width:800px}}@media(min-width:1000px){.scalar-app .container{max-width:1000px}}@media(min-width:1200px){.scalar-app .container{max-width:1200px}}@media(min-width:96rem){.scalar-app .container{max-width:96rem}}.scalar-app .\\!m-0{margin:0!important}.scalar-app .-m-0\\.5{margin:-2px}.scalar-app .m-0{margin:0}.scalar-app .m-4{margin:16px}.scalar-app .m-auto{margin:auto}.scalar-app .m-header{margin:48px}.scalar-app .-mx-0\\.25{margin-inline:-1px}.scalar-app .mx-1{margin-inline:4px}.scalar-app .mx-auto{margin-inline:auto}.scalar-app .-my-1{margin-block:-4px}.scalar-app .my-12{margin-block:48px}.scalar-app .-mt-\\[\\.5px\\]{margin-top:-.5px}.scalar-app .mt-0\\.25{margin-top:1px}.scalar-app .mt-1{margin-top:4px}.scalar-app .mt-1\\.5{margin-top:6px}.scalar-app .mt-2{margin-top:8px}.scalar-app .mt-3{margin-top:12px}.scalar-app .mt-5{margin-top:20px}.scalar-app .mt-10{margin-top:40px}.scalar-app .mt-\\[0\\.5px\\]{margin-top:.5px}.scalar-app .mt-auto{margin-top:auto}.scalar-app .\\!mr-0{margin-right:0!important}.scalar-app .-mr-0\\.5{margin-right:-2px}.scalar-app .-mr-1{margin-right:-4px}.scalar-app .-mr-1\\.5{margin-right:-6px}.scalar-app .-mr-3{margin-right:-12px}.scalar-app .mr-0\\.5{margin-right:2px}.scalar-app .mr-0\\.75{margin-right:3px}.scalar-app .mr-1{margin-right:4px}.scalar-app .mr-1\\.5{margin-right:6px}.scalar-app .mr-1\\.25{margin-right:5px}.scalar-app .mr-2{margin-right:8px}.scalar-app .mr-2\\.5{margin-right:10px}.scalar-app .mr-3{margin-right:12px}.scalar-app .mr-\\[6\\.25px\\]{margin-right:6.25px}.scalar-app .mr-auto{margin-right:auto}.scalar-app .\\!mb-0{margin-bottom:0!important}.scalar-app .-mb-\\[var\\(--scalar-border-width\\)\\]{margin-bottom:calc(var(--scalar-border-width)*-1)}.scalar-app .mb-0{margin-bottom:0}.scalar-app .mb-1{margin-bottom:4px}.scalar-app .mb-1\\.5{margin-bottom:6px}.scalar-app .mb-2{margin-bottom:8px}.scalar-app .mb-4{margin-bottom:16px}.scalar-app .mb-\\[\\.5px\\]{margin-bottom:.5px}.scalar-app .-ml-0\\.5{margin-left:-2px}.scalar-app .-ml-0\\.25{margin-left:-1px}.scalar-app .-ml-1{margin-left:-4px}.scalar-app .-ml-2{margin-left:-8px}.scalar-app .-ml-12{margin-left:-48px}.scalar-app .ml-0\\.5{margin-left:2px}.scalar-app .ml-0\\.75{margin-left:3px}.scalar-app .ml-1{margin-left:4px}.scalar-app .ml-1\\.25{margin-left:5px}.scalar-app .ml-3{margin-left:12px}.scalar-app .ml-auto{margin-left:auto}.scalar-app .box-border{box-sizing:border-box}.scalar-app .box-content{box-sizing:content-box}.scalar-app .flex-center{justify-content:center;align-items:center;display:flex}.scalar-app .line-clamp-1{-webkit-line-clamp:1;-webkit-box-orient:vertical;display:-webkit-box;overflow:hidden}.scalar-app .\\!block{display:block!important}.scalar-app .\\!flex{display:flex!important}.scalar-app .block{display:block}.scalar-app .contents{display:contents}.scalar-app .flex{display:flex}.scalar-app .grid{display:grid}.scalar-app .hidden{display:none}.scalar-app .inline{display:inline}.scalar-app .inline-block{display:inline-block}.scalar-app .inline-flex{display:inline-flex}.scalar-app .table{display:table}.scalar-app .aspect-\\[4\\/3\\]{aspect-ratio:4/3}.scalar-app .aspect-square{aspect-ratio:1}.scalar-app .size-2\\.5{width:10px;height:10px}.scalar-app .size-3{width:12px;height:12px}.scalar-app .size-3\\.5{width:14px;height:14px}.scalar-app .size-3\\/4{width:75%;height:75%}.scalar-app .size-4{width:16px;height:16px}.scalar-app .size-5{width:20px;height:20px}.scalar-app .size-6{width:24px;height:24px}.scalar-app .size-7{width:28px;height:28px}.scalar-app .size-8{width:32px;height:32px}.scalar-app .size-10{width:40px;height:40px}.scalar-app .size-full{width:100%;height:100%}.scalar-app .h-\\(--scalar-address-bar-height\\){height:var(--scalar-address-bar-height)}.scalar-app .h-1\\.5{height:6px}.scalar-app .h-2\\.5{height:10px}.scalar-app .h-2\\.25{height:9px}.scalar-app .h-3{height:12px}.scalar-app .h-3\\.5{height:14px}.scalar-app .h-4{height:16px}.scalar-app .h-5{height:20px}.scalar-app .h-6{height:24px}.scalar-app .h-7{height:28px}.scalar-app .h-8{height:32px}.scalar-app .h-9{height:36px}.scalar-app .h-10{height:40px}.scalar-app .h-12{height:48px}.scalar-app .h-16{height:64px}.scalar-app .h-64{height:256px}.scalar-app .h-\\[68px\\]{height:68px}.scalar-app .h-\\[calc\\(100\\%-273\\.5px\\)\\]{height:calc(100% - 273.5px)}.scalar-app .h-\\[calc\\(100\\%_-_50px\\)\\]{height:calc(100% - 50px)}.scalar-app .h-auto{height:auto}.scalar-app .h-fit{height:fit-content}.scalar-app .h-full{height:100%}.scalar-app .h-header{height:48px}.scalar-app .h-min{height:min-content}.scalar-app .h-px{height:1px}.scalar-app .h-screen{height:100vh}.scalar-app .\\!max-h-\\[initial\\]{max-height:initial!important}.scalar-app .max-h-8{max-height:32px}.scalar-app .max-h-40{max-height:160px}.scalar-app .max-h-80{max-height:320px}.scalar-app .max-h-\\[40dvh\\]{max-height:40dvh}.scalar-app .max-h-\\[50dvh\\]{max-height:50dvh}.scalar-app .max-h-\\[60svh\\]{max-height:60svh}.scalar-app .max-h-\\[auto\\]{max-height:auto}.scalar-app .max-h-\\[calc\\(100\\%-32px\\)\\]{max-height:calc(100% - 32px)}.scalar-app .max-h-\\[inherit\\]{max-height:inherit}.scalar-app .max-h-fit{max-height:fit-content}.scalar-app .max-h-screen{max-height:100vh}.scalar-app .\\!min-h-full{min-height:100%!important}.scalar-app .min-h-0{min-height:0}.scalar-app .min-h-8{min-height:32px}.scalar-app .min-h-10{min-height:40px}.scalar-app .min-h-11{min-height:44px}.scalar-app .min-h-12{min-height:48px}.scalar-app .min-h-16{min-height:64px}.scalar-app .min-h-20{min-height:80px}.scalar-app .min-h-\\[64px\\]{min-height:64px}.scalar-app .min-h-\\[65px\\]{min-height:65px}.scalar-app .min-h-\\[calc\\(1rem\\*4\\)\\]{min-height:4rem}.scalar-app .min-h-\\[calc\\(4rem\\+0\\.5px\\)\\]{min-height:calc(4rem + .5px)}.scalar-app .min-h-\\[calc\\(4rem\\+1px\\)\\]{min-height:calc(4rem + 1px)}.scalar-app .min-h-fit{min-height:fit-content}.scalar-app .\\!w-fit{width:fit-content!important}.scalar-app .w-0\\.5{width:2px}.scalar-app .w-1\\.5{width:6px}.scalar-app .w-1\\/2{width:50%}.scalar-app .w-2\\.5{width:10px}.scalar-app .w-2\\.25{width:9px}.scalar-app .w-3{width:12px}.scalar-app .w-3\\.5{width:14px}.scalar-app .w-4{width:16px}.scalar-app .w-5{width:20px}.scalar-app .w-6{width:24px}.scalar-app .w-7{width:28px}.scalar-app .w-8{width:32px}.scalar-app .w-10{width:40px}.scalar-app .w-16{width:64px}.scalar-app .w-20{width:80px}.scalar-app .w-56{width:224px}.scalar-app .w-64{width:256px}.scalar-app .w-72{width:288px}.scalar-app .w-\\[60px\\]{width:60px}.scalar-app .w-\\[100px\\]{width:100px}.scalar-app .w-\\[150px\\]{width:150px}.scalar-app .w-\\[calc\\(100\\%-10px\\)\\]{width:calc(100% - 10px)}.scalar-app .w-\\[calc\\(100\\%_-_8px\\)\\]{width:calc(100% - 8px)}.scalar-app .w-\\[inherit\\]{width:inherit}.scalar-app .w-auto{width:auto}.scalar-app .w-dvw{width:100dvw}.scalar-app .w-fit{width:fit-content}.scalar-app .w-full{width:100%}.scalar-app .w-max{width:max-content}.scalar-app .max-w-8{max-width:32px}.scalar-app .max-w-40{max-width:160px}.scalar-app .max-w-\\[14px\\]{max-width:14px}.scalar-app .max-w-\\[16rem\\]{max-width:16rem}.scalar-app .max-w-\\[37px\\]{max-width:37px}.scalar-app .max-w-\\[100\\%\\]{max-width:100%}.scalar-app .max-w-\\[150px\\]{max-width:150px}.scalar-app .max-w-\\[380px\\]{max-width:380px}.scalar-app .max-w-\\[420px\\]{max-width:420px}.scalar-app .max-w-\\[720px\\]{max-width:720px}.scalar-app .max-w-\\[calc\\(100dvw-24px\\)\\]{max-width:calc(100dvw - 24px)}.scalar-app .max-w-full{max-width:100%}.scalar-app .min-w-0{min-width:0}.scalar-app .min-w-2\\.25{min-width:9px}.scalar-app .min-w-3\\.5{min-width:14px}.scalar-app .min-w-4{min-width:16px}.scalar-app .min-w-8{min-width:32px}.scalar-app .min-w-32{min-width:128px}.scalar-app .min-w-48{min-width:192px}.scalar-app .min-w-\\[37px\\]{min-width:37px}.scalar-app .min-w-\\[100px\\]{min-width:100px}.scalar-app .min-w-\\[150px\\]{min-width:150px}.scalar-app .min-w-\\[296px\\]{min-width:296px}.scalar-app .min-w-fit{min-width:fit-content}.scalar-app .min-w-full{min-width:100%}.scalar-app .flex-1{flex:1}.scalar-app .flex-shrink{flex-shrink:1}.scalar-app .shrink-0{flex-shrink:0}.scalar-app .flex-grow{flex-grow:1}.scalar-app .-translate-x-1\\/2{--tw-translate-x:-50%;translate:var(--tw-translate-x)var(--tw-translate-y)}.scalar-app .translate-x-0{--tw-translate-x:0px;translate:var(--tw-translate-x)var(--tw-translate-y)}.scalar-app .translate-x-1\\/2{--tw-translate-x:50%;translate:var(--tw-translate-x)var(--tw-translate-y)}.scalar-app .translate-y-1\\/2{--tw-translate-y:50%;translate:var(--tw-translate-x)var(--tw-translate-y)}.scalar-app .scale-75{--tw-scale-x:75%;--tw-scale-y:75%;--tw-scale-z:75%;scale:var(--tw-scale-x)var(--tw-scale-y)}.scalar-app .rotate-90{rotate:90deg}.scalar-app .rotate-180{rotate:180deg}.scalar-app .transform{transform:var(--tw-rotate-x,)var(--tw-rotate-y,)var(--tw-rotate-z,)var(--tw-skew-x,)var(--tw-skew-y,)}.scalar-app .cursor-auto{cursor:auto}.scalar-app .cursor-default{cursor:default}.scalar-app .cursor-grab{cursor:grab}.scalar-app .cursor-help{cursor:help}.scalar-app .cursor-not-allowed{cursor:not-allowed}.scalar-app .cursor-pointer{cursor:pointer}.scalar-app .cursor-text{cursor:text}.scalar-app .resize{resize:both}.scalar-app .resize-none{resize:none}.scalar-app .auto-rows-\\[32px\\]{grid-auto-rows:32px}.scalar-app .auto-rows-auto{grid-auto-rows:auto}.scalar-app .grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.scalar-app .grid-cols-\\[44px_1fr_repeat\\(3\\,auto\\)\\]{grid-template-columns:44px 1fr repeat(3,auto)}.scalar-app .grid-cols-\\[auto_1fr\\]{grid-template-columns:auto 1fr}.scalar-app .grid-cols-\\[repeat\\(auto-fill\\,minmax\\(32px\\,1fr\\)\\)\\]{grid-template-columns:repeat(auto-fill,minmax(32px,1fr))}.scalar-app .flex-col{flex-direction:column}.scalar-app .flex-row{flex-direction:row}.scalar-app .flex-wrap{flex-wrap:wrap}.scalar-app .content-between{align-content:space-between}.scalar-app .content-start{align-content:flex-start}.scalar-app .items-center{align-items:center}.scalar-app .items-end{align-items:flex-end}.scalar-app .items-start{align-items:flex-start}.scalar-app .items-stretch{align-items:stretch}.scalar-app .justify-between{justify-content:space-between}.scalar-app .justify-center{justify-content:center}.scalar-app .justify-end{justify-content:flex-end}.scalar-app .justify-start{justify-content:flex-start}.scalar-app .justify-stretch{justify-content:stretch}.scalar-app .\\!gap-2{gap:8px!important}.scalar-app .gap-0\\.5{gap:2px}.scalar-app .gap-0\\.75{gap:3px}.scalar-app .gap-1{gap:4px}.scalar-app .gap-1\\.5{gap:6px}.scalar-app .gap-1\\.75{gap:7px}.scalar-app .gap-2{gap:8px}.scalar-app .gap-2\\.5{gap:10px}.scalar-app .gap-3{gap:12px}.scalar-app .gap-4{gap:16px}.scalar-app .gap-6{gap:24px}.scalar-app .gap-8{gap:32px}.scalar-app .gap-10{gap:40px}.scalar-app .gap-12{gap:48px}.scalar-app .gap-\\[1\\.5px\\]{gap:1.5px}.scalar-app .gap-px{gap:1px}.scalar-app .gap-x-2\\.5{column-gap:10px}:where(.scalar-app .space-x-1>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(4px*var(--tw-space-x-reverse));margin-inline-end:calc(4px*calc(1 - var(--tw-space-x-reverse)))}:where(.scalar-app .divide-y>:not(:last-child)){--tw-divide-y-reverse:0;border-bottom-style:var(--tw-border-style);border-top-style:var(--tw-border-style);border-top-width:calc(var(--scalar-border-width)*var(--tw-divide-y-reverse));border-bottom-width:calc(var(--scalar-border-width)*calc(1 - var(--tw-divide-y-reverse)))}.scalar-app .self-center{align-self:center}.scalar-app .truncate{text-overflow:ellipsis;white-space:nowrap;overflow:hidden}.scalar-app .overflow-auto{overflow:auto}.scalar-app .overflow-hidden{overflow:hidden}.scalar-app .overflow-visible{overflow:visible}.scalar-app .overflow-x-auto{overflow-x:auto}.scalar-app .overflow-y-auto{overflow-y:auto}.scalar-app .overflow-y-hidden{overflow-y:hidden}.scalar-app .\\!rounded-none{border-radius:0!important}.scalar-app .rounded{border-radius:var(--scalar-radius)}.scalar-app .rounded-\\[10px\\]{border-radius:10px}.scalar-app .rounded-full{border-radius:9999px}.scalar-app .rounded-lg{border-radius:var(--scalar-radius-lg)}.scalar-app .rounded-md{border-radius:var(--scalar-radius)}.scalar-app .rounded-px{border-radius:1px}.scalar-app .rounded-xl{border-radius:var(--scalar-radius-xl)}.scalar-app .rounded-t{border-top-left-radius:var(--scalar-radius);border-top-right-radius:var(--scalar-radius)}.scalar-app .rounded-t-lg{border-top-left-radius:var(--scalar-radius-lg);border-top-right-radius:var(--scalar-radius-lg)}.scalar-app .rounded-t-none{border-top-left-radius:0;border-top-right-radius:0}.scalar-app .rounded-b{border-bottom-right-radius:var(--scalar-radius);border-bottom-left-radius:var(--scalar-radius)}.scalar-app .rounded-b-lg{border-bottom-right-radius:var(--scalar-radius-lg);border-bottom-left-radius:var(--scalar-radius-lg)}.scalar-app .\\!border-0{border-style:var(--tw-border-style)!important;border-width:0!important}.scalar-app .border{border-style:var(--tw-border-style);border-width:var(--scalar-border-width)}.scalar-app .border-0{border-style:var(--tw-border-style);border-width:0}.scalar-app .border-\\[1\\.5px\\]{border-style:var(--tw-border-style);border-width:1.5px}.scalar-app .border-\\[1px\\]{border-style:var(--tw-border-style);border-width:1px}.scalar-app .border-x{border-inline-style:var(--tw-border-style);border-inline-width:var(--scalar-border-width)}.scalar-app .border-x-0{border-inline-style:var(--tw-border-style);border-inline-width:0}.scalar-app .border-y{border-block-style:var(--tw-border-style);border-block-width:var(--scalar-border-width)}.scalar-app .border-t{border-top-style:var(--tw-border-style);border-top-width:var(--scalar-border-width)}.scalar-app .border-t-0{border-top-style:var(--tw-border-style);border-top-width:0}.scalar-app .\\!border-r{border-right-style:var(--tw-border-style)!important;border-right-width:var(--scalar-border-width)!important}.scalar-app .border-r{border-right-style:var(--tw-border-style);border-right-width:var(--scalar-border-width)}.scalar-app .border-r-0{border-right-style:var(--tw-border-style);border-right-width:0}.scalar-app .border-r-1{border-right-style:var(--tw-border-style);border-right-width:1px}.scalar-app .border-b{border-bottom-style:var(--tw-border-style);border-bottom-width:var(--scalar-border-width)}.scalar-app .border-b-0{border-bottom-style:var(--tw-border-style);border-bottom-width:0}.scalar-app .border-b-\\[1px\\]{border-bottom-style:var(--tw-border-style);border-bottom-width:1px}.scalar-app .border-l{border-left-style:var(--tw-border-style);border-left-width:var(--scalar-border-width)}.scalar-app .border-l-0{border-left-style:var(--tw-border-style);border-left-width:0}.scalar-app .\\!border-none{--tw-border-style:none!important;border-style:none!important}.scalar-app .border-dashed{--tw-border-style:dashed;border-style:dashed}.scalar-app .border-none{--tw-border-style:none;border-style:none}.scalar-app .\\!border-current{border-color:currentColor!important}.scalar-app .border-c-1{border-color:var(--scalar-color-1)}.scalar-app .border-c-3{border-color:var(--scalar-color-3)}.scalar-app .border-c-danger{border-color:var(--scalar-color-danger)}.scalar-app .border-transparent{border-color:#0000}.scalar-app .border-r-transparent{border-right-color:#0000}.scalar-app .bg-b-1{background-color:var(--scalar-background-1)}.scalar-app .bg-b-2{background-color:var(--scalar-background-2)}.scalar-app .bg-b-3{background-color:var(--scalar-background-3)}.scalar-app .bg-b-danger{background-color:var(--scalar-background-danger)}.scalar-app .bg-c-3\\/5{background-color:var(--scalar-color-3)}@supports (color:color-mix(in lab,red,red)){.scalar-app .bg-c-3\\/5{background-color:var(--scalar-color-3)}@supports (color:color-mix(in lab,red,red)){.scalar-app .bg-c-3\\/5{background-color:color-mix(in oklab,var(--scalar-color-3)5%,transparent)}}}.scalar-app .bg-c-accent{background-color:var(--scalar-color-accent)}.scalar-app .bg-current{background-color:currentColor}.scalar-app .bg-grey{background-color:var(--scalar-color-3)}.scalar-app .bg-sidebar-b-1{background-color:var(--scalar-sidebar-background-1,var(--scalar-background-1))}.scalar-app .bg-sidebar-b-active{background-color:var(--scalar-sidebar-item-active-background,var(--scalar-background-2))}.scalar-app .bg-none{background-image:none}.scalar-app .fill-current{fill:currentColor}.scalar-app .stroke-2{stroke-width:2px}.scalar-app .stroke-\\[1\\.5\\]{stroke-width:1.5px}.scalar-app .stroke-\\[1\\.75\\]{stroke-width:1.75px}.scalar-app .stroke-\\[2\\.25\\]{stroke-width:2.25px}.scalar-app .object-contain{object-fit:contain}.scalar-app .\\!p-0{padding:0!important}.scalar-app .p-0{padding:0}.scalar-app .p-0\\.5{padding:2px}.scalar-app .p-0\\.75{padding:3px}.scalar-app .p-1{padding:4px}.scalar-app .p-1\\.5{padding:6px}.scalar-app .p-1\\.25{padding:5px}.scalar-app .p-1\\.75{padding:7px}.scalar-app .p-2{padding:8px}.scalar-app .p-3{padding:12px}.scalar-app .p-4{padding:16px}.scalar-app .p-\\[3px\\]{padding:3px}.scalar-app .p-\\[5px\\]{padding:5px}.scalar-app .p-px{padding:1px}.scalar-app .\\!px-3{padding-inline:12px!important}.scalar-app .px-0{padding-inline:0}.scalar-app .px-0\\.5{padding-inline:2px}.scalar-app .px-0\\.75{padding-inline:3px}.scalar-app .px-1{padding-inline:4px}.scalar-app .px-1\\.5{padding-inline:6px}.scalar-app .px-1\\.25{padding-inline:5px}.scalar-app .px-2{padding-inline:8px}.scalar-app .px-2\\.5{padding-inline:10px}.scalar-app .px-3{padding-inline:12px}.scalar-app .px-4{padding-inline:16px}.scalar-app .px-5{padding-inline:20px}.scalar-app .px-6{padding-inline:24px}.scalar-app .px-8{padding-inline:32px}.scalar-app .\\!py-1\\.5{padding-block:6px!important}.scalar-app .py-0{padding-block:0}.scalar-app .py-0\\.5{padding-block:2px}.scalar-app .py-0\\.25{padding-block:1px}.scalar-app .py-0\\.75{padding-block:3px}.scalar-app .py-1{padding-block:4px}.scalar-app .py-1\\.5{padding-block:6px}.scalar-app .py-1\\.25{padding-block:5px}.scalar-app .py-1\\.75{padding-block:7px}.scalar-app .py-2{padding-block:8px}.scalar-app .py-2\\.5{padding-block:10px}.scalar-app .py-3{padding-block:12px}.scalar-app .py-5{padding-block:20px}.scalar-app .py-8{padding-block:32px}.scalar-app .py-px{padding-block:1px}.scalar-app .\\!pt-0{padding-top:0!important}.scalar-app .pt-0{padding-top:0}.scalar-app .pt-2{padding-top:8px}.scalar-app .pt-3{padding-top:12px}.scalar-app .pt-4{padding-top:16px}.scalar-app .pt-6{padding-top:24px}.scalar-app .pt-8{padding-top:32px}.scalar-app .pt-px{padding-top:1px}.scalar-app .pr-0{padding-right:0}.scalar-app .pr-0\\.75{padding-right:3px}.scalar-app .pr-1{padding-right:4px}.scalar-app .pr-1\\.5{padding-right:6px}.scalar-app .pr-2{padding-right:8px}.scalar-app .pr-2\\.5{padding-right:10px}.scalar-app .pr-2\\.25{padding-right:9px}.scalar-app .pr-3{padding-right:12px}.scalar-app .pr-6{padding-right:24px}.scalar-app .pr-8{padding-right:32px}.scalar-app .pr-9{padding-right:36px}.scalar-app .pr-10{padding-right:40px}.scalar-app .pr-12{padding-right:48px}.scalar-app .pr-\\[26px\\]{padding-right:26px}.scalar-app .pb-0{padding-bottom:0}.scalar-app .pb-1\\.5{padding-bottom:6px}.scalar-app .pb-2{padding-bottom:8px}.scalar-app .pb-3{padding-bottom:12px}.scalar-app .pb-5{padding-bottom:20px}.scalar-app .pb-6{padding-bottom:24px}.scalar-app .pb-8{padding-bottom:32px}.scalar-app .pb-14{padding-bottom:56px}.scalar-app .pb-\\[75px\\]{padding-bottom:75px}.scalar-app .\\!pl-3{padding-left:12px!important}.scalar-app .pl-1{padding-left:4px}.scalar-app .pl-1\\.5{padding-left:6px}.scalar-app .pl-1\\.25{padding-left:5px}.scalar-app .pl-2{padding-left:8px}.scalar-app .pl-3{padding-left:12px}.scalar-app .pl-5{padding-left:20px}.scalar-app .pl-6{padding-left:24px}.scalar-app .pl-8\\.5{padding-left:34px}.scalar-app .pl-9{padding-left:36px}.scalar-app .pl-12{padding-left:48px}.scalar-app .pl-px{padding-left:1px}.scalar-app .text-center{text-align:center}.scalar-app .text-left{text-align:left}.scalar-app .text-right{text-align:right}.scalar-app .font-code{font-family:var(--scalar-font-code)}.scalar-app .font-sans{font-family:var(--scalar-font)}.scalar-app .text-3xs{font-size:var(--scalar-font-size-7)}.scalar-app .text-\\[6px\\]{font-size:6px}.scalar-app .text-\\[11px\\]{font-size:11px}.scalar-app .text-\\[21px\\]{font-size:21px}.scalar-app .text-base{font-size:var(--scalar-font-size-3)}.scalar-app .text-sm{font-size:var(--scalar-font-size-4)}.scalar-app .text-xl{font-size:var(--scalar-font-size-1)}.scalar-app .text-xs{font-size:var(--scalar-font-size-5)}.scalar-app .text-xxs{font-size:var(--scalar-font-size-6)}.scalar-app .\\!leading-\\[6px\\]{--tw-leading:6px!important;line-height:6px!important}.scalar-app .leading-2{--tw-leading:var(--scalar-line-height-2);line-height:var(--scalar-line-height-2)}.scalar-app .leading-3{--tw-leading:var(--scalar-line-height-3);line-height:var(--scalar-line-height-3)}.scalar-app .leading-\\[1\\.44\\]{--tw-leading:1.44;line-height:1.44}.scalar-app .leading-\\[7px\\]{--tw-leading:7px;line-height:7px}.scalar-app .leading-\\[20px\\]{--tw-leading:20px;line-height:20px}.scalar-app .leading-\\[21px\\]{--tw-leading:21px;line-height:21px}.scalar-app .leading-\\[22px\\]{--tw-leading:22px;line-height:22px}.scalar-app .leading-\\[normal\\]{--tw-leading:normal;line-height:normal}.scalar-app .leading-none{--tw-leading:1;line-height:1}.scalar-app .leading-normal{--tw-leading:var(--leading-normal);line-height:var(--leading-normal)}.scalar-app .leading-snug{--tw-leading:var(--leading-snug);line-height:var(--leading-snug)}.scalar-app .font-bold{--tw-font-weight:var(--scalar-bold);font-weight:var(--scalar-bold)}.scalar-app .font-medium{--tw-font-weight:var(--scalar-semibold);font-weight:var(--scalar-semibold)}.scalar-app .font-normal{--tw-font-weight:var(--scalar-regular);font-weight:var(--scalar-regular)}.scalar-app .text-balance{text-wrap:balance}.scalar-app .text-pretty{text-wrap:pretty}.scalar-app .break-words{overflow-wrap:break-word}.scalar-app .break-all{word-break:break-all}.scalar-app .text-ellipsis{text-overflow:ellipsis}.scalar-app .whitespace-nowrap{white-space:nowrap}.scalar-app .whitespace-pre{white-space:pre}.scalar-app .whitespace-pre-wrap{white-space:pre-wrap}.scalar-app .\\!text-c-1{color:var(--scalar-color-1)!important}.scalar-app .text-b-1{color:var(--scalar-background-1)}.scalar-app .text-blue{color:var(--scalar-color-blue)}.scalar-app .text-border{color:var(--scalar-border-color)}.scalar-app .text-c-1{color:var(--scalar-color-1)}.scalar-app .text-c-2{color:var(--scalar-color-2)}.scalar-app .text-c-3{color:var(--scalar-color-3)}.scalar-app .text-c-btn{color:var(--scalar-button-1-color)}.scalar-app .text-c-danger{color:var(--scalar-color-danger)}.scalar-app .text-green{color:var(--scalar-color-green)}.scalar-app .text-grey{color:var(--scalar-color-3)}.scalar-app .text-orange{color:var(--scalar-color-orange)}.scalar-app .text-purple{color:var(--scalar-color-purple)}.scalar-app .text-red{color:var(--scalar-color-red)}.scalar-app .text-sidebar-c-2{color:var(--scalar-sidebar-color-2,var(--scalar-color-2))}.scalar-app .text-sidebar-c-active{color:var(--scalar-sidebar-color-active,var(--scalar-sidebar-color-1))}.scalar-app .text-transparent{color:#0000}.scalar-app .text-yellow{color:var(--scalar-color-yellow)}.scalar-app .capitalize{text-transform:capitalize}.scalar-app .lowercase{text-transform:lowercase}.scalar-app .uppercase{text-transform:uppercase}.scalar-app .line-through{text-decoration-line:line-through}.scalar-app .no-underline{text-decoration-line:none}.scalar-app .underline{text-decoration-line:underline}.scalar-app .decoration-c-3{-webkit-text-decoration-color:var(--scalar-color-3);text-decoration-color:var(--scalar-color-3)}.scalar-app .underline-offset-2{text-underline-offset:2px}.scalar-app .opacity-0{opacity:0}.scalar-app .opacity-50{opacity:.5}.scalar-app .opacity-100{opacity:1}.scalar-app .bg-blend-normal{background-blend-mode:normal}.scalar-app .mix-blend-luminosity{mix-blend-mode:luminosity}.scalar-app .shadow{--tw-shadow:var(--scalar-shadow-1);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.scalar-app .shadow-\\[-8px_0_4px_var\\(--scalar-background-1\\)\\]{--tw-shadow:-8px 0 4px var(--tw-shadow-color,var(--scalar-background-1));box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.scalar-app .shadow-\\[0_-8px_0_8px_var\\(--scalar-background-1\\)\\,0_0_8px_8px_var\\(--scalar-background-1\\)\\]{--tw-shadow:0 -8px 0 8px var(--tw-shadow-color,var(--scalar-background-1)),0 0 8px 8px var(--tw-shadow-color,var(--scalar-background-1));box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.scalar-app .shadow-border{--tw-shadow:inset 0 0 0 var(--tw-shadow-color,calc(var(--scalar-border-width)*2))var(--scalar-border-color);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.scalar-app .shadow-lg{--tw-shadow:var(--scalar-shadow-2);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.scalar-app .shadow-none{--tw-shadow:0 0 #0000;box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.scalar-app .outline{outline-style:var(--tw-outline-style);outline-width:1px}.scalar-app .-outline-offset-1{outline-offset:-1px}.scalar-app .-outline-offset-2{outline-offset:-2px}.scalar-app .outline-offset-2{outline-offset:2px}.scalar-app .outline-b-3{outline-color:var(--scalar-background-3)}.scalar-app .outline-c-danger{outline-color:var(--scalar-color-danger)}.scalar-app .blur{--tw-blur:blur(8px);filter:var(--tw-blur,)var(--tw-brightness,)var(--tw-contrast,)var(--tw-grayscale,)var(--tw-hue-rotate,)var(--tw-invert,)var(--tw-saturate,)var(--tw-sepia,)var(--tw-drop-shadow,)}.scalar-app .brightness-90{--tw-brightness:brightness(90%);filter:var(--tw-blur,)var(--tw-brightness,)var(--tw-contrast,)var(--tw-grayscale,)var(--tw-hue-rotate,)var(--tw-invert,)var(--tw-saturate,)var(--tw-sepia,)var(--tw-drop-shadow,)}.scalar-app .brightness-\\[\\.9\\]{--tw-brightness:brightness(.9);filter:var(--tw-blur,)var(--tw-brightness,)var(--tw-contrast,)var(--tw-grayscale,)var(--tw-hue-rotate,)var(--tw-invert,)var(--tw-saturate,)var(--tw-sepia,)var(--tw-drop-shadow,)}.scalar-app .brightness-lifted{--tw-brightness:brightness(var(--scalar-lifted-brightness));filter:var(--tw-blur,)var(--tw-brightness,)var(--tw-contrast,)var(--tw-grayscale,)var(--tw-hue-rotate,)var(--tw-invert,)var(--tw-saturate,)var(--tw-sepia,)var(--tw-drop-shadow,)}.scalar-app .filter{filter:var(--tw-blur,)var(--tw-brightness,)var(--tw-contrast,)var(--tw-grayscale,)var(--tw-hue-rotate,)var(--tw-invert,)var(--tw-saturate,)var(--tw-sepia,)var(--tw-drop-shadow,)}.scalar-app .backdrop-filter{-webkit-backdrop-filter:var(--tw-backdrop-blur,)var(--tw-backdrop-brightness,)var(--tw-backdrop-contrast,)var(--tw-backdrop-grayscale,)var(--tw-backdrop-hue-rotate,)var(--tw-backdrop-invert,)var(--tw-backdrop-opacity,)var(--tw-backdrop-saturate,)var(--tw-backdrop-sepia,);backdrop-filter:var(--tw-backdrop-blur,)var(--tw-backdrop-brightness,)var(--tw-backdrop-contrast,)var(--tw-backdrop-grayscale,)var(--tw-backdrop-hue-rotate,)var(--tw-backdrop-invert,)var(--tw-backdrop-opacity,)var(--tw-backdrop-saturate,)var(--tw-backdrop-sepia,)}.scalar-app .transition{transition-property:color,background-color,border-color,outline-color,text-decoration-color,fill,stroke,--tw-gradient-from,--tw-gradient-via,--tw-gradient-to,opacity,box-shadow,transform,translate,scale,rotate,filter,-webkit-backdrop-filter,backdrop-filter,display,content-visibility,overlay,pointer-events;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.scalar-app .transition-colors{transition-property:color,background-color,border-color,outline-color,text-decoration-color,fill,stroke,--tw-gradient-from,--tw-gradient-via,--tw-gradient-to;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.scalar-app .transition-opacity{transition-property:opacity;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.scalar-app .transition-transform{transition-property:transform,translate,scale,rotate;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.scalar-app .transition-none{transition-property:none}.scalar-app .duration-100{--tw-duration:.1s;transition-duration:.1s}.scalar-app .duration-150{--tw-duration:.15s;transition-duration:.15s}.scalar-app .duration-200{--tw-duration:.2s;transition-duration:.2s}.scalar-app .duration-300{--tw-duration:.3s;transition-duration:.3s}.scalar-app .ease-in-out{--tw-ease:var(--ease-in-out);transition-timing-function:var(--ease-in-out)}.scalar-app .outline-none{--tw-outline-style:none;outline-style:none}.scalar-app .select-none{-webkit-user-select:none;user-select:none}.scalar-app .\\[--scalar-address-bar-height\\:32px\\]{--scalar-address-bar-height:32px}.scalar-app .app-drag-region{-webkit-app-region:drag}.scalar-app .app-no-drag-region{-webkit-app-region:no-drag}:is(.scalar-app .\\*\\:flex>*){display:flex}:is(.scalar-app .\\*\\:h-8>*){height:32px}:is(.scalar-app .\\*\\:cursor-pointer>*){cursor:pointer}:is(.scalar-app .\\*\\:items-center>*){align-items:center}:is(.scalar-app .\\*\\:rounded-none>*){border-radius:0}:is(.scalar-app .\\*\\:border-t>*){border-top-style:var(--tw-border-style);border-top-width:var(--scalar-border-width)}:is(.scalar-app .\\*\\:border-b-0>*){border-bottom-style:var(--tw-border-style);border-bottom-width:0}:is(.scalar-app .\\*\\:px-1\\.5>*){padding-inline:6px}:is(.scalar-app .\\*\\:pl-4>*){padding-left:16px}.scalar-app .group-first\\/row\\:border-t-0:is(:where(.group\\/row):first-child *){border-top-style:var(--tw-border-style);border-top-width:0}.scalar-app .group-last\\:border-b-transparent:is(:where(.group):last-child *){border-bottom-color:#0000}.scalar-app .group-last\\/label\\:rounded-br-lg:is(:where(.group\\/label):last-child *){border-bottom-right-radius:var(--scalar-radius-lg)}.scalar-app .group-focus-within\\:flex:is(:where(.group):focus-within *){display:flex}@media(hover:hover){.scalar-app .group-hover\\:block:is(:where(.group):hover *){display:block}.scalar-app .group-hover\\:flex:is(:where(.group):hover *){display:flex}.scalar-app .group-hover\\:hidden:is(:where(.group):hover *){display:none}.scalar-app .group-hover\\:inline:is(:where(.group):hover *){display:inline}.scalar-app .group-hover\\:pr-5:is(:where(.group):hover *){padding-right:20px}.scalar-app .group-hover\\:pr-6:is(:where(.group):hover *){padding-right:24px}.scalar-app .group-hover\\:pr-10:is(:where(.group):hover *){padding-right:40px}.scalar-app .group-hover\\:text-c-1:is(:where(.group):hover *){color:var(--scalar-color-1)}.scalar-app .group-hover\\:opacity-80:is(:where(.group):hover *){opacity:.8}.scalar-app .group-hover\\:opacity-100:is(:where(.group):hover *){opacity:1}.scalar-app .group-hover\\/auth\\:absolute:is(:where(.group\\/auth):hover *){position:absolute}.scalar-app .group-hover\\/auth\\:h-auto:is(:where(.group\\/auth):hover *){height:auto}.scalar-app .group-hover\\/auth\\:border-b:is(:where(.group\\/auth):hover *){border-bottom-style:var(--tw-border-style);border-bottom-width:var(--scalar-border-width)}.scalar-app .group-hover\\/cell\\:opacity-100:is(:where(.group\\/cell):hover *){opacity:1}.scalar-app .group-hover\\/item\\:flex:is(:where(.group\\/item):hover *){display:flex}.scalar-app .group-hover\\/item\\:opacity-100:is(:where(.group\\/item):hover *),.scalar-app .group-hover\\/params\\:opacity-100:is(:where(.group\\/params):hover *){opacity:1}.scalar-app .group-hover\\/row\\:flex:is(:where(.group\\/row):hover *){display:flex}.scalar-app .group-hover\\/scopes-accordion\\:text-c-2:is(:where(.group\\/scopes-accordion):hover *){color:var(--scalar-color-2)}.scalar-app .group-hover\\/upload\\:block:is(:where(.group\\/upload):hover *){display:block}}.scalar-app .group-focus-visible\\:opacity-100:is(:where(.group):focus-visible *){opacity:1}.scalar-app .group-focus-visible\\:outline:is(:where(.group):focus-visible *){outline-style:var(--tw-outline-style);outline-width:1px}.scalar-app .group-has-\\[\\.cm-focused\\]\\:z-1:is(:where(.group):has(.cm-focused) *){z-index:1}.scalar-app .group-has-\\[\\.cm-focused\\]\\:flex:is(:where(.group):has(.cm-focused) *){display:flex}.scalar-app .group-has-\\[\\.cm-focused\\]\\:pr-6:is(:where(.group):has(.cm-focused) *){padding-right:24px}.scalar-app .group-has-\\[\\.cm-focused\\]\\:pr-10:is(:where(.group):has(.cm-focused) *){padding-right:40px}.scalar-app .group-has-\\[\\:focus-visible\\]\\:hidden:is(:where(.group):has(:focus-visible) *){display:none}.scalar-app .group-has-\\[\\:focus-visible\\]\\:opacity-100:is(:where(.group):has(:focus-visible) *){opacity:1}.scalar-app .group-has-\\[\\:focus-visible\\]\\/cell\\:border-c-accent:is(:where(.group\\/cell):has(:focus-visible) *){border-color:var(--scalar-color-accent)}.scalar-app .group-has-\\[\\:focus-visible\\]\\/cell\\:opacity-100:is(:where(.group\\/cell):has(:focus-visible) *){opacity:1}.scalar-app .group-has-\\[\\:focus-visible\\]\\/input\\:block:is(:where(.group\\/input):has(:focus-visible) *){display:block}.scalar-app .group-has-\\[input\\]\\/label\\:mr-0:is(:where(.group\\/label):has(:is(input)) *){margin-right:0}.scalar-app .group-aria-expanded\\/button\\:rotate-180:is(:where(.group\\/button)[aria-expanded=true] *),.scalar-app .group-aria-expanded\\/combobox-button\\:rotate-180:is(:where(.group\\/combobox-button)[aria-expanded=true] *){rotate:180deg}.scalar-app .group-\\[\\.alert\\]\\:bg-b-alert:is(:where(.group).alert *){background-color:var(--scalar-background-alert)}.scalar-app .group-\\[\\.alert\\]\\:bg-transparent:is(:where(.group).alert *){background-color:#0000}.scalar-app .group-\\[\\.alert\\]\\:shadow-none:is(:where(.group).alert *){--tw-shadow:0 0 #0000;box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.scalar-app .group-\\[\\.alert\\]\\:outline-orange:is(:where(.group).alert *){outline-color:var(--scalar-color-orange)}.scalar-app .group-\\[\\.error\\]\\:bg-b-danger:is(:where(.group).error *){background-color:var(--scalar-background-danger)}.scalar-app .group-\\[\\.error\\]\\:bg-transparent:is(:where(.group).error *){background-color:#0000}.scalar-app .group-\\[\\.error\\]\\:text-red:is(:where(.group).error *){color:var(--scalar-color-red)}.scalar-app .group-\\[\\.error\\]\\:shadow-none:is(:where(.group).error *){--tw-shadow:0 0 #0000;box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.scalar-app .group-\\[\\.error\\]\\:outline-red:is(:where(.group).error *){outline-color:var(--scalar-color-red)}.scalar-app .peer-checked\\:text-c-1:is(:where(.peer):checked~*){color:var(--scalar-color-1)}.scalar-app .peer-has-\\[\\.cm-focused\\]\\:opacity-0:is(:where(.peer):has(.cm-focused)~*){opacity:0}.scalar-app .peer-has-\\[\\.color-selector\\]\\:hidden:is(:where(.peer):has(.color-selector)~*){display:none}.scalar-app .before\\:pointer-events-none:before{content:var(--tw-content);pointer-events:none}.scalar-app .before\\:absolute:before{content:var(--tw-content);position:absolute}.scalar-app .before\\:top-0:before{content:var(--tw-content);top:0}.scalar-app .before\\:left-3:before{content:var(--tw-content);left:12px}.scalar-app .before\\:left-\\[calc\\(\\.75rem_\\+_\\.5px\\)\\]:before{content:var(--tw-content);left:calc(.75rem + .5px)}.scalar-app .before\\:z-1:before{content:var(--tw-content);z-index:1}.scalar-app .before\\:h-\\[calc\\(100\\%_\\+_\\.5px\\)\\]:before{content:var(--tw-content);height:calc(100% + .5px)}.scalar-app .before\\:w-\\[\\.5px\\]:before{content:var(--tw-content);width:.5px}.scalar-app .before\\:bg-border:before{content:var(--tw-content);background-color:var(--scalar-border-color)}.scalar-app .after\\:content-\\[\\\'\\:\\\'\\]:after{--tw-content:":";content:var(--tw-content)}:is(.scalar-app .\\*\\:first\\:line-clamp-1>*):first-child{-webkit-line-clamp:1;-webkit-box-orient:vertical;display:-webkit-box;overflow:hidden}:is(.scalar-app .\\*\\:first\\:rounded-l>*):first-child{border-top-left-radius:var(--scalar-radius);border-bottom-left-radius:var(--scalar-radius)}:is(.scalar-app .\\*\\:first\\:border-t-0>*):first-child,:is(.scalar-app .first\\:\\*\\:border-t-0:first-child>*){border-top-style:var(--tw-border-style);border-top-width:0}:is(.scalar-app .\\*\\:first\\:text-ellipsis>*):first-child{text-overflow:ellipsis}@media(hover:hover){:is(.scalar-app .group-hover\\/auth\\:\\*\\:first\\:line-clamp-none:is(:where(.group\\/auth):hover *)>*):first-child{-webkit-line-clamp:unset;-webkit-box-orient:horizontal;display:block;overflow:visible}}.scalar-app .last\\:mb-0:last-child{margin-bottom:0}.scalar-app .last\\:rounded-b-lg:last-child{border-bottom-right-radius:var(--scalar-radius-lg);border-bottom-left-radius:var(--scalar-radius-lg)}.scalar-app .last\\:border-r-0:last-child{border-right-style:var(--tw-border-style);border-right-width:0}:is(.scalar-app .\\*\\:last\\:rounded-r>*):last-child{border-top-right-radius:var(--scalar-radius);border-bottom-right-radius:var(--scalar-radius)}.scalar-app .last\\:before\\:h-full:last-child:before{content:var(--tw-content);height:100%}.scalar-app .last-of-type\\:first-of-type\\:border-b-0:last-of-type:first-of-type{border-bottom-style:var(--tw-border-style);border-bottom-width:0}.scalar-app .focus-within\\:z-20:focus-within{z-index:20}.scalar-app .focus-within\\:border-\\(--scalar-background-3\\):focus-within{border-color:var(--scalar-background-3)}.scalar-app .focus-within\\:bg-b-1:focus-within{background-color:var(--scalar-background-1)}.scalar-app .focus-within\\:text-c-1:focus-within{color:var(--scalar-color-1)}@media(hover:hover){.scalar-app .hover\\:cursor-default:hover{cursor:default}.scalar-app .hover\\:border-\\(--scalar-background-3\\):hover{border-color:var(--scalar-background-3)}.scalar-app .hover\\:border-inherit:hover{border-color:inherit}.scalar-app .hover\\:bg-b-2:hover{background-color:var(--scalar-background-2)}.scalar-app .hover\\:bg-b-3:hover{background-color:var(--scalar-background-3)}.scalar-app .hover\\:bg-inherit:hover{background-color:inherit}.scalar-app .hover\\:bg-sidebar-b-active:hover{background-color:var(--scalar-sidebar-item-active-background,var(--scalar-background-2))}.scalar-app .hover\\:whitespace-normal:hover{white-space:normal}.scalar-app .hover\\:text-c-1:hover{color:var(--scalar-color-1)}.scalar-app .hover\\:text-c-2:hover{color:var(--scalar-color-2)}.scalar-app .hover\\:underline:hover{text-decoration-line:underline}.scalar-app .hover\\:brightness-75:hover{--tw-brightness:brightness(75%);filter:var(--tw-blur,)var(--tw-brightness,)var(--tw-contrast,)var(--tw-grayscale,)var(--tw-hue-rotate,)var(--tw-invert,)var(--tw-saturate,)var(--tw-sepia,)var(--tw-drop-shadow,)}}.scalar-app .focus\\:border-b-1:focus{border-bottom-style:var(--tw-border-style);border-bottom-width:1px;border-color:var(--scalar-background-1)}.scalar-app .focus\\:text-c-1:focus{color:var(--scalar-color-1)}.scalar-app .focus\\:outline-none:focus{--tw-outline-style:none;outline-style:none}.scalar-app .focus-visible\\:z-10:focus-visible{z-index:10}.scalar-app .active\\:text-c-1:active{color:var(--scalar-color-1)}.scalar-app .disabled\\:cursor-default:disabled{cursor:default}.scalar-app .disabled\\:text-c-2:disabled{color:var(--scalar-color-2)}.scalar-app .has-\\[\\.empty-sidebar-item\\]\\:border-t:has(.empty-sidebar-item){border-top-style:var(--tw-border-style);border-top-width:var(--scalar-border-width)}.scalar-app .has-\\[\\:focus-visible\\]\\:absolute:has(:focus-visible){position:absolute}.scalar-app .has-\\[\\:focus-visible\\]\\:z-1:has(:focus-visible){z-index:1}.scalar-app .has-\\[\\:focus-visible\\]\\:rounded-\\[4px\\]:has(:focus-visible){border-radius:4px}.scalar-app .has-\\[\\:focus-visible\\]\\:bg-b-1:has(:focus-visible){background-color:var(--scalar-background-1)}.scalar-app .has-\\[\\:focus-visible\\]\\:opacity-100:has(:focus-visible){opacity:1}.scalar-app .has-\\[\\:focus-visible\\]\\:outline:has(:focus-visible){outline-style:var(--tw-outline-style);outline-width:1px}@media not all and (min-width:800px){.scalar-app .max-md\\:absolute\\!{position:absolute!important}.scalar-app .max-md\\:w-full\\!{width:100%!important}}@media(min-width:600px){.scalar-app .sm\\:not-sr-only{clip-path:none;white-space:normal;width:auto;height:auto;margin:0;padding:0;position:static;overflow:visible}.scalar-app .sm\\:order-none{order:0}.scalar-app .sm\\:mr-1\\.5{margin-right:6px}.scalar-app .sm\\:mb-1\\.5{margin-bottom:6px}.scalar-app .sm\\:ml-1\\.5{margin-left:6px}.scalar-app .sm\\:flex{display:flex}.scalar-app .sm\\:hidden{display:none}.scalar-app .sm\\:max-w-max{max-width:max-content}.scalar-app .sm\\:min-w-max{min-width:max-content}.scalar-app .sm\\:flex-col{flex-direction:column}.scalar-app .sm\\:flex-row{flex-direction:row}.scalar-app .sm\\:justify-between{justify-content:space-between}.scalar-app .sm\\:gap-px{gap:1px}.scalar-app .sm\\:rounded{border-radius:var(--scalar-radius)}.scalar-app .sm\\:rounded-lg{border-radius:var(--scalar-radius-lg)}.scalar-app .sm\\:px-2{padding-inline:8px}.scalar-app .sm\\:px-3{padding-inline:12px}.scalar-app .sm\\:py-1\\.5{padding-block:6px}:is(.scalar-app .sm\\:\\*\\:rounded-lg>*){border-radius:var(--scalar-radius-lg)}}@media(min-width:800px){.scalar-app .md\\:right-10{right:40px}.scalar-app .md\\:bottom-10{bottom:40px}.scalar-app .md\\:mx-auto{margin-inline:auto}.scalar-app .md\\:-ml-1\\.25{margin-left:-5px}.scalar-app .md\\:ml-1\\.5{margin-left:6px}.scalar-app .md\\:block{display:block}.scalar-app .md\\:flex{display:flex}.scalar-app .md\\:grid{display:grid}.scalar-app .md\\:w-full{width:100%}.scalar-app .md\\:max-w-\\[720px\\]{max-width:720px}.scalar-app .md\\:min-w-fit{min-width:fit-content}.scalar-app .md\\:flex-none{flex:none}.scalar-app .md\\:translate-x-0{--tw-translate-x:0px;translate:var(--tw-translate-x)var(--tw-translate-y)}.scalar-app .md\\:translate-y-0{--tw-translate-y:0px;translate:var(--tw-translate-x)var(--tw-translate-y)}.scalar-app .md\\:grid-cols-\\[1fr_720px_1fr\\]{grid-template-columns:1fr 720px 1fr}.scalar-app .md\\:flex-row{flex-direction:row}.scalar-app .md\\:border-r{border-right-style:var(--tw-border-style);border-right-width:var(--scalar-border-width)}.scalar-app .md\\:border-b-0{border-bottom-style:var(--tw-border-style);border-bottom-width:0}.scalar-app .md\\:p-1\\.5{padding:6px}.scalar-app .md\\:px-0{padding-inline:0}.scalar-app .md\\:px-1\\.5{padding-inline:6px}.scalar-app .md\\:px-2{padding-inline:8px}.scalar-app .md\\:px-2\\.5{padding-inline:10px}.scalar-app .md\\:px-4{padding-inline:16px}.scalar-app .md\\:px-\\[18px\\]{padding-inline:18px}.scalar-app .md\\:py-2\\.5{padding-block:10px}.scalar-app .md\\:pb-2\\.5{padding-bottom:10px}.scalar-app .md\\:pb-\\[37px\\]{padding-bottom:37px}.scalar-app .md\\:pl-0{padding-left:0}:is(.scalar-app .md\\:\\*\\:border-t-0>*){border-top-style:var(--tw-border-style);border-top-width:0}}@media(min-width:1000px){.scalar-app .lg\\:order-none{order:0}.scalar-app .lg\\:-mr-1{margin-right:-4px}.scalar-app .lg\\:mb-0{margin-bottom:0}.scalar-app .lg\\:flex{display:flex}.scalar-app .lg\\:min-h-header{min-height:48px}.scalar-app .lg\\:w-auto{width:auto}.scalar-app .lg\\:max-w-\\[580px\\]{max-width:580px}.scalar-app .lg\\:min-w-\\[580px\\]{min-width:580px}.scalar-app .lg\\:flex-1{flex:1}.scalar-app .lg\\:p-0{padding:0}.scalar-app .lg\\:p-1{padding:4px}.scalar-app .lg\\:px-1{padding-inline:4px}.scalar-app .lg\\:px-2\\.5{padding-inline:10px}.scalar-app .lg\\:pt-1{padding-top:4px}.scalar-app .lg\\:pr-24{padding-right:96px}}@media(min-width:1200px){.scalar-app .xl\\:\\!flex{display:flex!important}.scalar-app .xl\\:flex{display:flex}.scalar-app .xl\\:hidden{display:none}.scalar-app .xl\\:h-fit{height:fit-content}.scalar-app .xl\\:h-full{height:100%}.scalar-app .xl\\:min-h-header{min-height:48px}.scalar-app .xl\\:max-w-\\[720px\\]{max-width:720px}.scalar-app .xl\\:min-w-0{min-width:0}.scalar-app .xl\\:min-w-\\[720px\\]{min-width:720px}.scalar-app .xl\\:flex-row{flex-direction:row}.scalar-app .xl\\:overflow-auto{overflow:auto}.scalar-app .xl\\:overflow-hidden{overflow:hidden}.scalar-app .xl\\:rounded-none{border-radius:0}.scalar-app .xl\\:pr-0\\.5{padding-right:2px}.scalar-app .xl\\:pl-2{padding-left:8px}:is(.scalar-app .\\*\\:xl\\:border-t-0>*){border-top-style:var(--tw-border-style);border-top-width:0}:is(.scalar-app .\\*\\:xl\\:border-l>*){border-left-style:var(--tw-border-style);border-left-width:var(--scalar-border-width)}:is(.scalar-app .\\*\\:first\\:xl\\:border-l-0>*):first-child{border-left-style:var(--tw-border-style);border-left-width:0}}.scalar-app .dark\\:bg-b-2:where(.dark-mode,.dark-mode *){background-color:var(--scalar-background-2)}@media(hover:hover){.scalar-app .hover\\:dark\\:bg-b-2:hover:where(.dark-mode,.dark-mode *){background-color:var(--scalar-background-2)}}.scalar-app .ui-open\\:rotate-90[data-headlessui-state~=open],:where([data-headlessui-state~=open]) :is(.scalar-app .ui-open\\:rotate-90){rotate:90deg}.scalar-app .ui-open\\:rotate-180[data-headlessui-state~=open],:where([data-headlessui-state~=open]) :is(.scalar-app .ui-open\\:rotate-180){rotate:180deg}.scalar-app .last\\:ui-open\\:border-b-0:last-child[data-headlessui-state~=open],:where([data-headlessui-state~=open]) .scalar-app .last\\:ui-open\\:border-b-0:last-child{border-bottom-style:var(--tw-border-style);border-bottom-width:0}.scalar-app .ui-not-open\\:hidden[data-headlessui-state]:not([data-headlessui-state~=open]),:where([data-headlessui-state]:not([data-headlessui-state~=open])) :is(.scalar-app .ui-not-open\\:hidden):not([data-headlessui-state]){display:none}.scalar-app .ui-not-open\\:rotate-0[data-headlessui-state]:not([data-headlessui-state~=open]),:where([data-headlessui-state]:not([data-headlessui-state~=open])) :is(.scalar-app .ui-not-open\\:rotate-0):not([data-headlessui-state]){rotate:none}.scalar-app .ui-checked\\:bg-b-3[data-headlessui-state~=checked],:where([data-headlessui-state~=checked]) :is(.scalar-app .ui-checked\\:bg-b-3){background-color:var(--scalar-background-3)}.scalar-app .ui-active\\:bg-b-2[data-headlessui-state~=active],:where([data-headlessui-state~=active]) :is(.scalar-app .ui-active\\:bg-b-2),:is(.scalar-app .ui-active\\:\\*\\:bg-b-2[data-headlessui-state~=active]>*),:is(:where([data-headlessui-state~=active]) :is(.scalar-app .ui-active\\:\\*\\:bg-b-2)>*){background-color:var(--scalar-background-2)}@media(max-width:720px)and (max-height:480px){.scalar-app .zoomed\\:static{position:static}.scalar-app .zoomed\\:p-1{padding:4px}}.app-platform-mac :is(.scalar-app .mac\\:pl-\\[72px\\]){padding-left:72px}@property --tw-scale-x{syntax:"*";inherits:false;initial-value:1}@property --tw-scale-y{syntax:"*";inherits:false;initial-value:1}@property --tw-scale-z{syntax:"*";inherits:false;initial-value:1}@property --tw-space-x-reverse{syntax:"*";inherits:false;initial-value:0}.nav-item[data-v-507381a3]{cursor:pointer;border-radius:var(--scalar-radius-lg);background:var(--scalar-background-3);border:var(--scalar-border-width)solid var(--scalar-background-2);color:var(--scalar-color-3);flex:1;justify-content:center;align-items:center;min-width:0;padding:4.5px;display:flex;position:relative;overflow:hidden}.dark-mode .nav-item[data-v-507381a3]{background:var(--scalar-background-2)}@supports (color:color-mix(in lab,red,red)){.dark-mode .nav-item[data-v-507381a3]{background:color-mix(in srgb,var(--scalar-background-2),transparent)}}.nav-item-icon-copy[data-v-507381a3]{white-space:nowrap;max-width:100%;-webkit-mask-image:linear-gradient(to left,transparent 0,var(--scalar-background-2)20px);mask-image:linear-gradient(to left,transparent 0,var(--scalar-background-2)20px);overflow:hidden}.nav-item:hover .nav-item-icon-copy[data-v-507381a3]{-webkit-mask-image:linear-gradient(to left,transparent 20px,var(--scalar-background-2)40px);mask-image:linear-gradient(to left,transparent 20px,var(--scalar-background-2)40px)}.nav-item-copy[data-v-507381a3]{max-width:calc(100% - 20px)}.nav-item[data-v-507381a3]:hover{color:var(--scalar-color-1)}.nav-item__active[data-v-507381a3]{background-color:var(--scalar-background-1);color:var(--scalar-color-1);border-color:var(--scalar-border-color)}.dark-mode .nav-item__active[data-v-507381a3]{background-color:var(--scalar-background-2)}.nav-item-close[data-v-507381a3]{border-radius:var(--scalar-radius);stroke-width:1.5px;max-width:20px;color:var(--scalar-color-3);opacity:0;background:0 0;margin-left:-20px;padding:2px;position:absolute;right:3px}.nav-item:hover .nav-item-close[data-v-507381a3]{opacity:1}.nav-item-close[data-v-507381a3]:hover{background-color:var(--scalar-background-4)}.nav-item__active .nav-item-close[data-v-507381a3]:hover{background-color:var(--scalar-background-2)}.download-app-button[data-v-cb45fa05]{box-shadow:0 0 0 .5px var(--scalar-border-color);background:linear-gradient(#ffffffbf,#00000009)}.dark-mode .download-app-button[data-v-cb45fa05]{background:linear-gradient(#ffffff1a,#00000026)}.download-app-button[data-v-cb45fa05]:hover{background:linear-gradient(#00000009,#ffffffbf)}.dark-mode .download-app-button[data-v-cb45fa05]:hover{background:linear-gradient(#00000026,#ffffff1a)}.http-bg-gradient[data-v-076b14a1]{background:linear-gradient(#ffffffbf,#00000009)}.http-bg-gradient[data-v-076b14a1]:hover{background:linear-gradient(#00000009,#ffffffbf)}.dark-mode .http-bg-gradient[data-v-076b14a1]{background:linear-gradient(#ffffff09,#00000026)}.dark-mode .http-bg-gradient[data-v-076b14a1]:hover{background:linear-gradient(#00000026,#ffffff09)}.scroll-timeline-x[data-v-e0578855]{scroll-timeline:--scroll-timeline x;scroll-timeline:--scroll-timeline horizontal;-ms-overflow-style:none;scrollbar-width:none;overflow:auto}.commandmenu[data-v-dd90fe74]{box-shadow:var(--scalar-shadow-2);border-radius:var(--scalar-radius-lg);background-color:var(--scalar-background-1);opacity:0;width:100%;max-width:580px;max-height:60dvh;margin:12px;animation:.3s ease-in-out .1s forwards fadeincommandmenu-dd90fe74;position:fixed;top:150px;left:50%;transform:translate(-50%,10px)}.commandmenu-overlay[data-v-dd90fe74]{cursor:pointer;background:#0003;animation:.3s ease-in-out forwards fadeincommand-dd90fe74;position:fixed;inset:0}@keyframes fadeincommand-dd90fe74{0%{opacity:0}to{opacity:1}}@keyframes fadeincommandmenu-dd90fe74{0%{opacity:0;transform:translate(-50%,10px)}to{opacity:1;transform:translate(-50%)}}.scalar .scalar-app-layout[data-v-45e9730e]{background:var(--scalar-background-1);opacity:0;border:var(--scalar-border-width)solid var(--scalar-border-color);border-radius:8px;width:100%;max-width:1390px;height:calc(100% - 120px);margin:auto;animation:.35s forwards scalarapiclientfadein-45e9730e;position:relative;overflow:hidden}@media(max-width:720px)and (max-height:480px){.scalar .scalar-app-layout[data-v-45e9730e]{height:100%;max-height:90svh}}@keyframes scalarapiclientfadein-45e9730e{0%{opacity:0}to{opacity:1}}.scalar .scalar-app-exit[data-v-45e9730e]{cursor:pointer;z-index:-1;background:#00000038;width:100vw;height:100vh;transition:all .3s ease-in-out;animation:.35s forwards scalardrawerexitfadein-45e9730e;position:fixed;top:0;left:0}.dark-mode .scalar .scalar-app-exit[data-v-45e9730e]{background:#00000073}.scalar .scalar-app-exit[data-v-45e9730e]:before{text-align:center;color:#fff;opacity:.6;font-family:sans-serif;font-size:30px;font-weight:100;line-height:50px;position:absolute;top:0;right:12px}.scalar .scalar-app-exit[data-v-45e9730e]:hover:before{opacity:1}@keyframes scalardrawerexitfadein-45e9730e{0%{opacity:0}to{opacity:1}}.scalar-container[data-v-45e9730e]{visibility:visible;z-index:10000;justify-content:center;align-items:center;width:100%;height:100%;display:flex;position:fixed;top:0;bottom:0;left:0;overflow:hidden}.scalar .url-form-input[data-v-45e9730e]{min-height:auto!important}.scalar .scalar-container[data-v-45e9730e]{line-height:normal}.scalar .scalar-app-header span[data-v-45e9730e]{color:var(--scalar-color-3)}.scalar .scalar-app-header a[data-v-45e9730e]{color:var(--scalar-color-1)}.scalar .scalar-app-header a[data-v-45e9730e]:hover{text-decoration:underline}.scalar-activate[data-v-45e9730e]{cursor:pointer;align-items:center;gap:6px;width:fit-content;margin:0 .75rem .75rem auto;font-size:.875rem;font-weight:600;line-height:24px;display:flex}.scalar-activate-button[data-v-45e9730e]{color:var(--scalar-color-blue);appearance:none;background:0 0;border:none;outline:none;align-items:center;gap:6px;padding:0 .5rem;display:flex}.scalar-activate:hover .scalar-activate-button[data-v-45e9730e]{background:var(--scalar-background-3);border-radius:3px}.open-api-client-button[data-v-f016469d]{cursor:pointer;text-align:center;white-space:nowrap;width:100%;height:31px;font-size:var(--scalar-mini);font-weight:var(--scalar-semibold);border-radius:var(--scalar-radius);box-shadow:0 0 0 .5px var(--scalar-border-color);color:var(--scalar-sidebar-color-1);justify-content:center;align-items:center;gap:6px;padding:9px 12px;line-height:1.385;text-decoration:none;display:flex}.open-api-client-button[data-v-f016469d]:hover{background:var(--scalar-sidebar-item-hover-background,var(--scalar-background-2))}[data-v-103d9d56] .cm-editor{background:0 0;outline:none;height:100%;padding:0}[data-v-103d9d56] .cm-placeholder{color:var(--scalar-color-3)}[data-v-103d9d56] .cm-content{font-family:var(--scalar-font-code);font-size:var(--scalar-small);max-height:20px;padding:8px 0}[data-v-103d9d56] .cm-tooltip{filter:brightness(var(--scalar-lifted-brightness));border-radius:var(--scalar-radius);box-shadow:var(--scalar-shadow-2);background:0 0!important;border:none!important;outline:none!important;overflow:hidden!important}[data-v-103d9d56] .cm-tooltip-autocomplete ul li{padding:3px 6px!important}[data-v-103d9d56] .cm-completionIcon-type:after{color:var(--scalar-color-3)!important}[data-v-103d9d56] .cm-tooltip-autocomplete ul li[aria-selected]{background:var(--scalar-background-2)!important;color:var(--scalar-color-1)!important}[data-v-103d9d56] .cm-tooltip-autocomplete ul{position:relative;padding:6px!important}[data-v-103d9d56] .cm-tooltip-autocomplete ul li:hover{border-radius:3px;color:var(--scalar-color-1)!important;background:var(--scalar-background-3)!important}[data-v-103d9d56] .cm-activeLine,[data-v-103d9d56] .cm-activeLineGutter{background-color:#0000}[data-v-103d9d56] .cm-selectionMatch,[data-v-103d9d56] .cm-matchingBracket{border-radius:var(--scalar-radius);background:var(--scalar-background-4)!important}[data-v-103d9d56] .cm-css-color-picker-wrapper{outline:1px solid var(--scalar-background-3);border-radius:3px;display:inline-flex;overflow:hidden}[data-v-103d9d56] .cm-gutters{color:var(--scalar-color-3);font-size:var(--scalar-small);background-color:#0000;border-right:none;border-radius:0 0 0 3px;line-height:22px}[data-v-103d9d56] .cm-gutters:before{content:"";border-radius:var(--scalar-radius)0 0 var(--scalar-radius);background-color:var(--scalar-background-1);width:calc(100% - 2px);height:calc(100% - 4px);position:absolute;top:2px;left:2px}[data-v-103d9d56] .cm-gutterElement{justify-content:flex-end;align-items:center;display:flex;position:relative;font-family:var(--scalar-font-code)!important;padding-left:0!important;padding-right:6px!important}[data-v-103d9d56] .cm-lineNumbers .cm-gutterElement{min-width:fit-content}[data-v-103d9d56] .cm-gutter+.cm-gutter :not(.cm-foldGutter) .cm-gutterElement{padding-left:0!important}[data-v-103d9d56] .cm-scroller{overflow:auto}.line-wrapping[data-v-103d9d56]:focus-within .cm-content{white-space:break-spaces;word-break:break-all;min-height:fit-content;padding:3px 6px;display:inline-table}.address-bar-history-button[data-v-a93fa60f]:hover{background:var(--scalar-background-3)}.address-bar-history-button[data-v-a93fa60f]:focus-within{background:var(--scalar-background-2)}.description[data-v-92012388] .markdown{font-weight:var(--scalar-semibold);color:var(--scalar-color--1);padding:0;display:block}.description[data-v-92012388] .markdown>:first-child{margin-top:0}[data-v-cb2a35da] .cm-editor{outline:none;width:100%;height:100%}[data-v-cb2a35da] .cm-line{padding:0}[data-v-cb2a35da] .cm-content{font-size:var(--scalar-small);align-items:center;padding:0;display:flex}.scroll-timeline-x[data-v-cb2a35da]{scroll-timeline:--scroll-timeline x;scroll-timeline:--scroll-timeline horizontal;-ms-overflow-style:none}.scroll-timeline-x-hidden[data-v-cb2a35da]{overflow-x:auto}.scroll-timeline-x-hidden[data-v-cb2a35da] .cm-scroller{scrollbar-width:none;-ms-overflow-style:none;padding-right:20px;overflow:auto}.scroll-timeline-x-hidden[data-v-cb2a35da]::-webkit-scrollbar{width:0;height:0;display:none}.scroll-timeline-x-hidden[data-v-cb2a35da] .cm-scroller::-webkit-scrollbar{width:0;height:0;display:none}.scroll-timeline-x-address[data-v-cb2a35da]{scrollbar-width:none;line-height:27px}.scroll-timeline-x-address[data-v-cb2a35da]:after{content:"";cursor:text;width:24px;height:100%;position:absolute;right:0}.scroll-timeline-x-address[data-v-cb2a35da]:empty:before{content:"Enter URL or cURL request";color:var(--scalar-color-3);pointer-events:none}.fade-left[data-v-cb2a35da],.fade-right[data-v-cb2a35da]{content:"";pointer-events:none;z-index:1;height:100%;animation-name:fadein-cb2a35da;animation-duration:1ms;animation-direction:reverse;animation-timeline:--scroll-timeline;position:sticky}.fade-left[data-v-cb2a35da]{background:linear-gradient(-90deg,var(--scalar-address-bar-bg)0%,var(--scalar-address-bar-bg)30%,var(--scalar-address-bar-bg)100%)}@supports (color:color-mix(in lab,red,red)){.fade-left[data-v-cb2a35da]{background:linear-gradient(-90deg,color-mix(in srgb,var(--scalar-address-bar-bg),transparent 100%)0%,color-mix(in srgb,var(--scalar-address-bar-bg),transparent 20%)30%,var(--scalar-address-bar-bg)100%)}}.fade-left[data-v-cb2a35da]{min-width:6px;animation-direction:normal;left:-1px}.fade-right[data-v-cb2a35da]{background:linear-gradient(90deg,var(--scalar-address-bar-bg)0%,var(--scalar-address-bar-bg)30%,var(--scalar-address-bar-bg)100%)}@supports (color:color-mix(in lab,red,red)){.fade-right[data-v-cb2a35da]{background:linear-gradient(90deg,color-mix(in srgb,var(--scalar-address-bar-bg),transparent 100%)0%,color-mix(in srgb,var(--scalar-address-bar-bg),transparent 20%)30%,var(--scalar-address-bar-bg)100%)}}.fade-right[data-v-cb2a35da]{min-width:24px;right:-1px}@keyframes fadein-cb2a35da{0%{opacity:0}1%{opacity:1}}.address-bar-bg-states[data-v-cb2a35da]{--scalar-address-bar-bg:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.address-bar-bg-states[data-v-cb2a35da]{--scalar-address-bar-bg:color-mix(in srgb,var(--scalar-background-1),var(--scalar-background-2))}}.address-bar-bg-states[data-v-cb2a35da]{background:var(--scalar-address-bar-bg)}.address-bar-bg-states[data-v-cb2a35da]:has(.cm-focused){--scalar-address-bar-bg:var(--scalar-background-1);border-color:var(--scalar-border-color);outline:1px solid var(--scalar-color-accent)}.address-bar-bg-states:has(.cm-focused) .fade-left[data-v-cb2a35da],.address-bar-bg-states:has(.cm-focused) .fade-right[data-v-cb2a35da]{--scalar-address-bar-bg:var(--scalar-background-1)}.sidebar-height[data-v-dcff7b49]{min-height:100%}@media(min-width:800px){.sidebar-mask[data-v-dcff7b49]{-webkit-mask-image:linear-gradient(0,transparent 0,transparent 0,var(--scalar-background-2)30px);mask-image:linear-gradient(0,transparent 0,transparent 0,var(--scalar-background-2)30px)}}.resizer[data-v-dcff7b49]{cursor:col-resize;border-right:2px solid #0000;width:5px;transition:border-right-color .3s;position:absolute;top:0;bottom:0;right:0}.resizer[data-v-dcff7b49]:hover,.dragging .resizer[data-v-dcff7b49]{border-right-color:var(--scalar-background-3)}.dragging[data-v-dcff7b49]{cursor:col-resize}.dragging[data-v-dcff7b49]:before{content:"";width:100%;height:100%;display:block;position:absolute}[data-v-c1a50a6e] .cm-editor{padding:0}[data-v-c1a50a6e] .cm-content{font-family:var(--scalar-font);font-size:var(--scalar-small);background-color:#0000;align-items:center;width:100%;padding:5px 8px;display:flex}[data-v-c1a50a6e] .cm-content:has(.cm-pill){padding:5px 8px}[data-v-c1a50a6e] .cm-content .cm-pill:not(:last-of-type){margin-right:.5px}[data-v-c1a50a6e] .cm-content .cm-pill:not(:first-of-type){margin-left:.5px}[data-v-c1a50a6e] .cm-line{text-overflow:ellipsis;word-break:break-word;padding:0;overflow:hidden}.required[data-v-c1a50a6e]:after{content:"Required"}input[data-v-c1a50a6e]::placeholder{color:var(--scalar-color-3)}.scalar-password-input[data-v-c1a50a6e]{text-security:disc;-webkit-text-security:disc;-moz-text-security:disc}@media(min-width:800px){.has-no-import-url,.has-import-url{contain:paint;max-width:100dvw;overflow-x:hidden}.has-no-import-url .scalar-client>main{opacity:1;background:var(--scalar-background-1);animation:.3s ease-in-out forwards transform-restore-layout}.has-import-url .scalar-client>main{opacity:0;border:var(--scalar-border-width)solid var(--scalar-border-color);z-index:10000;border-radius:12px;animation:.3s ease-in-out forwards transform-fade-layout;overflow:hidden;transform:scale(.85)translate(calc(50dvw + 80px))}.has-import-url .scalar-client .sidenav{display:none}.has-no-import-url .scalar-app,.has-import-url .scalar-app{background:var(--scalar-background-1)!important}}@keyframes transform-fade-layout{0%{opacity:0;transform:scale(.85)translate(calc(50dvw + 80px),10px)}to{opacity:1;transform:scale(.85)translate(calc(50dvw + 80px))}}@keyframes transform-restore-layout{0%{opacity:1;transform:scale(.85)translate(calc(50dvw + 80px))}to{opacity:1;transform:scale(1)translate(0)}}.openapi-color{color:var(--scalar-color-green)}.section-flare{position:fixed;top:0;right:-50dvw}.address-bar-history-button[data-v-8cf04803]:hover{background:var(--scalar-background-3)}.address-bar-history-button[data-v-8cf04803]:focus-within{background:var(--scalar-background-2)}.description[data-v-1b7a32a4] .markdown{font-weight:var(--scalar-semibold);color:var(--scalar-color--1);padding:0;display:block}.description[data-v-1b7a32a4] .markdown>:first-child{margin-top:0}[data-v-e2bff922] .cm-editor{background:0 0;outline:none;height:100%;padding:0}[data-v-e2bff922] .cm-placeholder{color:var(--scalar-color-3)}[data-v-e2bff922] .cm-content{font-family:var(--scalar-font-code);font-size:var(--scalar-small);max-height:20px;padding:8px 0}[data-v-e2bff922] .cm-tooltip{filter:brightness(var(--scalar-lifted-brightness));border-radius:var(--scalar-radius);box-shadow:var(--scalar-shadow-2);background:0 0!important;border:none!important;outline:none!important;overflow:hidden!important}[data-v-e2bff922] .cm-tooltip-autocomplete ul li{padding:3px 6px!important}[data-v-e2bff922] .cm-completionIcon-type:after{color:var(--scalar-color-3)!important}[data-v-e2bff922] .cm-tooltip-autocomplete ul li[aria-selected]{background:var(--scalar-background-2)!important;color:var(--scalar-color-1)!important}[data-v-e2bff922] .cm-tooltip-autocomplete ul{position:relative;padding:6px!important}[data-v-e2bff922] .cm-tooltip-autocomplete ul li:hover{border-radius:3px;color:var(--scalar-color-1)!important;background:var(--scalar-background-3)!important}[data-v-e2bff922] .cm-activeLine,[data-v-e2bff922] .cm-activeLineGutter{background-color:#0000}[data-v-e2bff922] .cm-selectionMatch,[data-v-e2bff922] .cm-matchingBracket{border-radius:var(--scalar-radius);background:var(--scalar-background-4)!important}[data-v-e2bff922] .cm-css-color-picker-wrapper{outline:1px solid var(--scalar-background-3);border-radius:3px;display:inline-flex;overflow:hidden}[data-v-e2bff922] .cm-gutters{color:var(--scalar-color-3);font-size:var(--scalar-small);background-color:#0000;border-right:none;border-radius:0 0 0 3px;line-height:22px}[data-v-e2bff922] .cm-gutters:before{content:"";border-radius:var(--scalar-radius)0 0 var(--scalar-radius);background-color:var(--scalar-background-1);width:calc(100% - 2px);height:calc(100% - 4px);position:absolute;top:2px;left:2px}[data-v-e2bff922] .cm-gutterElement{justify-content:flex-end;align-items:center;display:flex;position:relative;font-family:var(--scalar-font-code)!important;padding-left:0!important;padding-right:6px!important}[data-v-e2bff922] .cm-lineNumbers .cm-gutterElement{min-width:fit-content}[data-v-e2bff922] .cm-gutter+.cm-gutter :not(.cm-foldGutter) .cm-gutterElement{padding-left:0!important}[data-v-e2bff922] .cm-scroller{overflow:auto}.line-wrapping[data-v-e2bff922]:focus-within .cm-content{white-space:break-spaces;word-break:break-all;min-height:fit-content;padding:3px 6px;display:inline-table}.cm-pill{--tw-bg-base:var(--scalar-color-1);color:var(--tw-bg-base);font-size:var(--scalar-small);border-radius:30px;padding:0 9px;display:inline-block;background:var(--tw-bg-base)!important}@supports (color:color-mix(in lab,red,red)){.cm-pill{background:color-mix(in srgb,var(--tw-bg-base),transparent 94%)!important}}.cm-pill.bg-grey{background:var(--scalar-background-3)!important}.dark-mode .cm-pill{background:var(--tw-bg-base)!important}@supports (color:color-mix(in lab,red,red)){.dark-mode .cm-pill{background:color-mix(in srgb,var(--tw-bg-base),transparent 90%)!important}}.cm-pill:first-of-type{margin-left:0}.cm-editor .cm-widgetBuffer{display:none}.cm-foldPlaceholder:hover{color:var(--scalar-color-1)}.cm-foldGutter .cm-gutterElement{font-size:var(--scalar-heading-4);padding:2px!important}.cm-foldGutter .cm-gutterElement:first-of-type{display:none}.cm-foldGutter .cm-gutterElement .cm-foldMarker{padding:2px}.cm-foldGutter .cm-gutterElement:hover .cm-foldMarker{background:var(--scalar-background-2);border-radius:var(--scalar-radius);color:var(--scalar-color-1)}[data-v-90999cb7] .cm-editor{outline:none;width:100%;height:100%}[data-v-90999cb7] .cm-line{padding:0}[data-v-90999cb7] .cm-content{font-size:var(--scalar-small);align-items:center;padding:0;display:flex}.scroll-timeline-x[data-v-90999cb7]{scroll-timeline:--scroll-timeline x;scroll-timeline:--scroll-timeline horizontal;-ms-overflow-style:none}.scroll-timeline-x-hidden[data-v-90999cb7]{overflow-x:auto}.scroll-timeline-x-hidden[data-v-90999cb7] .cm-scroller{scrollbar-width:none;-ms-overflow-style:none;padding-right:20px;overflow:auto}.scroll-timeline-x-hidden[data-v-90999cb7]::-webkit-scrollbar{width:0;height:0;display:none}.scroll-timeline-x-hidden[data-v-90999cb7] .cm-scroller::-webkit-scrollbar{width:0;height:0;display:none}.scroll-timeline-x-address[data-v-90999cb7]{scrollbar-width:none;line-height:27px}.scroll-timeline-x-address[data-v-90999cb7]:after{content:"";cursor:text;width:24px;height:100%;position:absolute;right:0}.scroll-timeline-x-address[data-v-90999cb7]:empty:before{content:"Enter URL or cURL request";color:var(--scalar-color-3);pointer-events:none}.fade-left[data-v-90999cb7],.fade-right[data-v-90999cb7]{content:"";pointer-events:none;z-index:1;height:100%;animation-name:fadein-90999cb7;animation-duration:1ms;animation-direction:reverse;animation-timeline:--scroll-timeline;position:sticky}.fade-left[data-v-90999cb7]{background:linear-gradient(-90deg,var(--scalar-address-bar-bg)0%,var(--scalar-address-bar-bg)30%,var(--scalar-address-bar-bg)100%)}@supports (color:color-mix(in lab,red,red)){.fade-left[data-v-90999cb7]{background:linear-gradient(-90deg,color-mix(in srgb,var(--scalar-address-bar-bg),transparent 100%)0%,color-mix(in srgb,var(--scalar-address-bar-bg),transparent 20%)30%,var(--scalar-address-bar-bg)100%)}}.fade-left[data-v-90999cb7]{min-width:6px;animation-direction:normal;left:-1px}.fade-right[data-v-90999cb7]{background:linear-gradient(90deg,var(--scalar-address-bar-bg)0%,var(--scalar-address-bar-bg)30%,var(--scalar-address-bar-bg)100%)}@supports (color:color-mix(in lab,red,red)){.fade-right[data-v-90999cb7]{background:linear-gradient(90deg,color-mix(in srgb,var(--scalar-address-bar-bg),transparent 100%)0%,color-mix(in srgb,var(--scalar-address-bar-bg),transparent 20%)30%,var(--scalar-address-bar-bg)100%)}}.fade-right[data-v-90999cb7]{min-width:24px;right:-1px}@keyframes fadein-90999cb7{0%{opacity:0}1%{opacity:1}}.address-bar-bg-states[data-v-90999cb7]{--scalar-address-bar-bg:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.address-bar-bg-states[data-v-90999cb7]{--scalar-address-bar-bg:color-mix(in srgb,var(--scalar-background-1),var(--scalar-background-2))}}.address-bar-bg-states[data-v-90999cb7]{background:var(--scalar-address-bar-bg)}.address-bar-bg-states[data-v-90999cb7]:has(.cm-focused){--scalar-address-bar-bg:var(--scalar-background-1);border-color:var(--scalar-border-color);outline-width:1px;outline-style:solid}.address-bar-bg-states:has(.cm-focused) .fade-left[data-v-90999cb7],.address-bar-bg-states:has(.cm-focused) .fade-right[data-v-90999cb7]{--scalar-address-bar-bg:var(--scalar-background-1)}.app-exit-button[data-v-d73c7e3d]{color:#fff;background:#0000001a}.app-exit-button[data-v-d73c7e3d]:hover{background:#ffffff1a}.fade-request-section-content[data-v-f97cc68c]{background:linear-gradient(to left,var(--scalar-background-1)64%,transparent)}.filter-hover[data-v-f97cc68c]{height:100%;padding-left:24px;padding-right:39px;transition:width 0s ease-in-out .2s;position:absolute;right:0;overflow:hidden}.filter-hover[data-v-f97cc68c]:hover,.filter-hover[data-v-f97cc68c]:has(:focus-visible){z-index:10;width:100%}.filter-hover[data-v-f97cc68c]:before{content:"";background-color:var(--scalar-background-1);opacity:0;pointer-events:none;width:100%;height:fit-content;transition:all .3s ease-in-out;position:absolute;top:0;left:0}.filter-hover-item[data-v-f97cc68c]{opacity:0}.filter-hover-item[data-v-f97cc68c]:not(:last-of-type){transform:translateY(3px)}.filter-hover:hover .filter-hover-item[data-v-f97cc68c]{transition:opacity .2s ease-in-out,transform .2s ease-in-out}.filter-hover:hover .filter-hover-item[data-v-f97cc68c]:last-of-type{transition-delay:50ms}.filter-hover:hover .filter-hover-item[data-v-f97cc68c]:nth-last-of-type(2){transition-delay:.1s}.filter-hover:hover .filter-hover-item[data-v-f97cc68c]:nth-last-of-type(3){transition-delay:.15s}.filter-hover:hover .filter-hover-item[data-v-f97cc68c]:nth-last-of-type(4){transition-delay:.2s}.filter-hover:hover .filter-hover-item[data-v-f97cc68c]:nth-last-of-type(5){transition-delay:.25s}.filter-hover:hover .filter-hover-item[data-v-f97cc68c]:nth-last-of-type(6){transition-delay:.3s}.filter-hover:hover .filter-hover-item[data-v-f97cc68c]:nth-last-of-type(7){transition-delay:.35s}.filter-hover:hover .filter-hover-item[data-v-f97cc68c],.filter-hover:has(:focus-visible) .filter-hover-item[data-v-f97cc68c]{opacity:1;transform:translateZ(0)}.filter-hover[data-v-f97cc68c]:hover:before,.filter-hover[data-v-f97cc68c]:has(:focus-visible):before{opacity:.9;-webkit-backdrop-filter:blur(10px);backdrop-filter:blur(10px)}.filter-button[data-v-f97cc68c]{top:50%;transform:translateY(-50%)}.context-bar-group:hover .context-bar-group-hover\\:text-c-1[data-v-f97cc68c],.context-bar-group:has(:focus-visible) .context-bar-group-hover\\:text-c-1[data-v-f97cc68c]{--tw-text-opacity:1;color:rgb(var(--scalar-color-1)/var(--tw-text-opacity))}.context-bar-group:hover .context-bar-group-hover\\:hidden[data-v-f97cc68c],.context-bar-group:has(:focus-visible) .context-bar-group-hover\\:hidden[data-v-f97cc68c]{display:none}.schema>span[data-v-f2ab7aa3]:not(:first-child):before{content:"·";margin:0 .5ch;display:block}.schema>span[data-v-f2ab7aa3]{white-space:nowrap;display:flex}[data-v-2dc74300] .cm-editor{padding:0}[data-v-2dc74300] .cm-content{font-family:var(--scalar-font);font-size:var(--scalar-small);background-color:#0000;align-items:center;width:100%;padding:5px 8px;display:flex}[data-v-2dc74300] .cm-content:has(.cm-pill){padding:5px 8px}[data-v-2dc74300] .cm-content .cm-pill:not(:last-of-type){margin-right:.5px}[data-v-2dc74300] .cm-content .cm-pill:not(:first-of-type){margin-left:.5px}[data-v-2dc74300] .cm-line{text-overflow:ellipsis;padding:0;overflow:hidden}.filemask[data-v-2dc74300]{-webkit-mask-image:linear-gradient(to right,transparent 0,var(--scalar-background-2)20px);mask-image:linear-gradient(to right,transparent 0,var(--scalar-background-2)20px)}[data-v-0a6f7696] .cm-content{font-size:var(--scalar-small)}.form-group[data-v-43df1726]{margin-bottom:1rem}.modal-actions[data-v-43df1726]{justify-content:flex-end;gap:1rem;display:flex}[data-v-3157c3c7] .cm-editor{padding:0}[data-v-3157c3c7] .cm-content{font-family:var(--scalar-font);font-size:var(--scalar-small);background-color:#0000;align-items:center;width:100%;padding:5px 8px;display:flex}[data-v-3157c3c7] .cm-content:has(.cm-pill){padding:5px 8px}[data-v-3157c3c7] .cm-content .cm-pill:not(:last-of-type){margin-right:.5px}[data-v-3157c3c7] .cm-content .cm-pill:not(:first-of-type){margin-left:.5px}[data-v-3157c3c7] .cm-line{text-overflow:ellipsis;word-break:break-word;padding:0;overflow:hidden}.required[data-v-3157c3c7]:after{content:"Required"}input[data-v-3157c3c7]::placeholder{color:var(--scalar-color-3)}.scalar-password-input[data-v-3157c3c7]{text-security:disc;-webkit-text-security:disc;-moz-text-security:disc}.request-section-content[data-v-175541a7]{--scalar-border-width:.5px}.request-section-content-filter[data-v-175541a7]{box-shadow:0 -10px 0 10px var(--scalar-background-1)}.request-item:focus-within .request-meta-buttons[data-v-175541a7]{opacity:1}.group-hover-input[data-v-175541a7]{border-width:var(--scalar-border-width);border-color:#0000}.group:hover .group-hover-input[data-v-175541a7]{background:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.group:hover .group-hover-input[data-v-175541a7]{background:color-mix(in srgb,var(--scalar-background-1),var(--scalar-background-2))}}.group:hover .group-hover-input[data-v-175541a7]{border-color:var(--scalar-border-color)}.group-hover-input[data-v-175541a7]:focus{border-color:var(--scalar-border-color)!important;background:0 0!important}.light-mode .bg-preview[data-v-c02b5bb8]{background-image:url("data:image/svg+xml,%3Csvg xmlns=\'http://www.w3.org/2000/svg\' width=\'16\' height=\'16\' fill=\'%23000\' fill-opacity=\'10%25\'%3E%3Crect width=\'8\' height=\'8\' /%3E%3Crect x=\'8\' y=\'8\' width=\'8\' height=\'8\' /%3E%3C/svg%3E")}.dark-mode .bg-preview[data-v-c02b5bb8]{background-image:url("data:image/svg+xml,%3Csvg xmlns=\'http://www.w3.org/2000/svg\' width=\'16\' height=\'16\' fill=\'%23FFF\' fill-opacity=\'10%25\'%3E%3Crect width=\'8\' height=\'8\' /%3E%3Crect x=\'8\' y=\'8\' width=\'8\' height=\'8\' /%3E%3C/svg%3E")}[data-v-10022578] .cm-editor{font-size:var(--scalar-small);background-color:#0000;outline:none}[data-v-10022578] .cm-gutters{background-color:var(--scalar-background-1);border-radius:var(--scalar-radius)0 0 var(--scalar-radius)}.body-raw[data-v-10022578] .cm-scroller{min-width:100%;overflow:auto}.scalar-code-block[data-v-32d6d6ca] .hljs *{font-size:var(--scalar-small)}.ascii-art-animate .ascii-art-line[data-v-69ebd973]{border-right:1ch solid #0000;animation:4s step-end 1s both typewriter-69ebd973,.5s step-end infinite blinkTextCursor-69ebd973}@keyframes typewriter-69ebd973{0%{width:0}to{width:100%}}@keyframes blinkTextCursor-69ebd973{0%{border-right-color:currentColor}50%{border-right-color:#0000}}.keycap-n[data-v-b1211b87]{background:-webkit-linear-gradient(5deg,transparent 30%,var(--scalar-color-3)50%);-webkit-text-fill-color:transparent;-webkit-background-clip:text;background-clip:text}.keycap-hotkey[data-v-b1211b87]{line-height:26px;position:absolute;top:32px}.scalar-version-number[data-v-159bd9b3]{width:76px;height:76px;font-size:8px;font-family:var(--scalar-font-code);box-shadow:inset 2px 0 0 2px var(--scalar-background-2);text-align:center;text-transform:initial;-webkit-text-decoration-color:var(--scalar-color-3);text-decoration-color:var(--scalar-color-3);border-radius:9px 9px 16px 12px;flex-direction:column;justify-content:center;align-items:center;margin-top:-113px;margin-left:-36px;line-height:11px;display:flex;position:absolute;transform:skewY(13deg)}.scalar-version-number a[data-v-159bd9b3]{background:var(--scalar-background-2);border:.5px solid var(--scalar-border-color);border-radius:3px;padding:2px 4px;font-weight:700;text-decoration:none}.gitbook-show[data-v-159bd9b3]{display:none}.v-enter-active[data-v-57ced68a]{transition:opacity .5s}.v-enter-from[data-v-57ced68a]{opacity:0}.animate-response-heading .response-heading[data-v-aaba9a18]{opacity:1;animation:.2s ease-in-out forwards push-response-aaba9a18}@keyframes push-response-aaba9a18{0%{opacity:1;transform:translateY(0)}to{opacity:0;transform:translateY(-4px)}}.animate-response-heading .animate-response-children[data-v-aaba9a18]{opacity:0;animation:.2s ease-in-out 50ms forwards response-spans-aaba9a18}@keyframes response-spans-aaba9a18{0%{opacity:0;transform:translateY(4px)}to{opacity:1;transform:translateY(0)}}.request-card[data-v-59889f40]{font-size:var(--scalar-font-size-3)}.request-method[data-v-59889f40]{font-family:var(--scalar-font-code);text-transform:uppercase;margin-right:6px}.request-card-footer[data-v-59889f40]{flex-shrink:0;justify-content:flex-end;padding:6px;display:flex}.request-card-footer-addon[data-v-59889f40]{flex:1;align-items:center;min-width:0;display:flex}.request-editor-section[data-v-59889f40]{flex:1;display:flex}.request-card-simple[data-v-59889f40]{font-size:var(--scalar-small);justify-content:space-between;align-items:center;padding:8px 8px 8px 12px;display:flex}.code-snippet[data-v-59889f40]{flex-direction:column;width:100%;display:flex}.resizer[data-v-e2c54c18]{cursor:col-resize;z-index:100;border-right:2px solid #0000;width:5px;transition:border-right-color .3s;position:absolute;top:0;bottom:0;right:0}.scalar-dragging{cursor:col-resize}.resizer:hover,.scalar-dragging .resizer{border-right-color:var(--scalar-background-3)}.scalar-dragging:after{content:"";display:block;position:absolute;inset:0}.ref-search-meta[data-v-0c30b37a]{background:var(--scalar-background-1);border-bottom-left-radius:var(--scalar-radius-lg);border-bottom-right-radius:var(--scalar-radius-lg);font-size:var(--scalar-font-size-4);color:var(--scalar-color-3);font-weight:var(--scalar-semibold);border-top:var(--scalar-border-width)solid var(--scalar-border-color);gap:12px;padding:6px 12px;display:flex}.splash-screen[data-v-af32615f]{opacity:0;animation:.5s ease-in-out forwards fadeIn-af32615f}.logo-icon[data-v-af32615f]{opacity:0;animation:.6s ease-in-out .2s forwards fadeInLogo-af32615f,2s ease-in-out .8s infinite pulse-af32615f}@keyframes fadeIn-af32615f{0%{opacity:0}to{opacity:.9}}@keyframes fadeInLogo-af32615f{0%{opacity:0;transform:scale(.9)}to{opacity:.8;transform:scale(1)}}@keyframes pulse-af32615f{0%,to{opacity:.8}50%{opacity:.6}}.scroll-timeline-x[data-v-981120f2]{scroll-timeline:--scroll-timeline x;scroll-timeline:--scroll-timeline horizontal;-ms-overflow-style:none;scrollbar-width:none;overflow:auto}.scroll-timeline-x[data-v-981120f2]::-webkit-scrollbar{display:none}.commandmenu[data-v-7f3e459b]{box-shadow:var(--scalar-shadow-2);border-radius:var(--scalar-radius-lg);background-color:var(--scalar-background-1);opacity:0;width:100%;max-width:580px;max-height:60dvh;margin:12px;animation:.3s ease-in-out .1s forwards fadeincommandmenu-7f3e459b;position:fixed;top:150px;left:50%;transform:translate(-50%,10px)}.commandmenu-overlay[data-v-7f3e459b]{cursor:pointer;background:#0003;animation:.3s ease-in-out forwards fadeincommand-7f3e459b;position:fixed;inset:0}@keyframes fadeincommand-7f3e459b{0%{opacity:0}to{opacity:1}}@keyframes fadeincommandmenu-7f3e459b{0%{opacity:0;transform:translate(-50%,10px)}to{opacity:1;transform:translate(-50%)}}.empty-sidebar-item-content[data-v-59eb2624]{display:none}.empty-sidebar-item .empty-sidebar-item-content[data-v-59eb2624]{display:block}.rabbitjump[data-v-59eb2624]{opacity:0}.empty-sidebar-item:hover .rabbitjump[data-v-59eb2624]{opacity:1;animation:.5s step-end infinite rabbitAnimation-59eb2624}.empty-sidebar-item:hover .rabbitsit[data-v-59eb2624]{opacity:0;animation:.5s step-end infinite rabbitAnimation2-59eb2624}.empty-sidebar-item:hover .rabbit-ascii[data-v-59eb2624]{animation:8s linear infinite rabbitRun-59eb2624}@keyframes rabbitRun-59eb2624{0%{transform:translateZ(0)}25%{transform:translate(250px)}25.01%{transform:translate(-250px)}75%{transform:translate(250px)}75.01%{transform:translate(-250px)}to{transform:translateZ(0)}}@keyframes rabbitAnimation-59eb2624{0%,to{opacity:1}50%{opacity:0}}@keyframes rabbitAnimation2-59eb2624{0%,to{opacity:0}50%{opacity:1;transform:translateY(-8px)}}.nav-single-tab[data-v-2e741aab]{width:100%;height:100%;color:var(--scalar-color-1);justify-content:center;align-items:center;display:flex;overflow:hidden}.nav-item[data-v-2e741aab]{cursor:pointer;border-radius:var(--scalar-radius-lg);background:var(--scalar-background-3);border:var(--scalar-border-width)solid var(--scalar-background-2);color:var(--scalar-color-3);flex:1;justify-content:center;align-items:center;min-width:0;padding:4.5px 1rem;display:flex;position:relative;overflow:hidden}.dark-mode .nav-item[data-v-2e741aab]{background:var(--scalar-background-2)}@supports (color:color-mix(in lab,red,red)){.dark-mode .nav-item[data-v-2e741aab]{background:color-mix(in srgb,var(--scalar-background-2),transparent)}}.nav-item-icon-copy[data-v-2e741aab]{white-space:nowrap;max-width:100%;-webkit-mask-image:linear-gradient(to left,transparent 0,var(--scalar-background-2)20px);mask-image:linear-gradient(to left,transparent 0,var(--scalar-background-2)20px);overflow:hidden}.nav-item:hover .nav-item-icon-copy[data-v-2e741aab]{-webkit-mask-image:linear-gradient(to left,transparent 20px,var(--scalar-background-2)40px);mask-image:linear-gradient(to left,transparent 20px,var(--scalar-background-2)40px)}.nav-item-copy[data-v-2e741aab]{max-width:calc(100% - 20px)}.nav-item[data-v-2e741aab]:hover{color:var(--scalar-color-1)}.nav-item__active[data-v-2e741aab]{background-color:var(--scalar-background-1);color:var(--scalar-color-1);border-color:var(--scalar-border-color)}.dark-mode .nav-item__active[data-v-2e741aab]{background-color:var(--scalar-background-2)}.nav-item-close[data-v-2e741aab]{border-radius:var(--scalar-radius);stroke-width:1.5px;max-width:20px;color:var(--scalar-color-3);opacity:0;background:0 0;margin-left:-20px;padding:2px;position:absolute;right:3px}.nav-item:hover .nav-item-close[data-v-2e741aab]{opacity:1}.nav-item-close[data-v-2e741aab]:hover{background-color:var(--scalar-background-4)}.nav-item__active .nav-item-close[data-v-2e741aab]:hover{background-color:var(--scalar-background-2)}.download-app-button[data-v-d9bec97b]{box-shadow:0 0 0 .5px var(--scalar-border-color);background:linear-gradient(#ffffffbf,#00000009)}.dark-mode .download-app-button[data-v-d9bec97b]{background:linear-gradient(#ffffff1a,#00000026)}.download-app-button[data-v-d9bec97b]:hover{background:linear-gradient(#00000009,#ffffffbf)}.dark-mode .download-app-button[data-v-d9bec97b]:hover{background:linear-gradient(#00000026,#ffffff1a)}#scalar-client{background-color:var(--scalar-background-2);flex-direction:column;width:100dvw;height:100dvh;display:flex;position:relative}.dark-mode #scalar-client{background-color:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.dark-mode #scalar-client{background-color:color-mix(in srgb,var(--scalar-background-1)65%,black)}}.scalar-collection-auth[data-v-dc159f6f]{border:var(--scalar-border-width)solid var(--scalar-border-color);border-radius:var(--scalar-radius-lg);overflow:hidden}[data-v-6fc24fdf] .cm-editor{padding:0}[data-v-6fc24fdf] .cm-content{font-family:var(--scalar-font);font-size:var(--scalar-small);background-color:#0000;align-items:center;width:100%;padding:5px 8px;display:flex}[data-v-6fc24fdf] .cm-content:has(.cm-pill){padding:5px 8px}[data-v-6fc24fdf] .cm-content .cm-pill:not(:last-of-type){margin-right:.5px}[data-v-6fc24fdf] .cm-content .cm-pill:not(:first-of-type){margin-left:.5px}[data-v-6fc24fdf] .cm-line{text-overflow:ellipsis;padding:0;overflow:hidden}[data-v-28c8509c] .cm-editor{padding:0}[data-v-28c8509c] .cm-content{font-family:var(--scalar-font);font-size:var(--scalar-small);background-color:#0000;align-items:center;width:100%;padding:5px 8px;display:flex}[data-v-28c8509c] .cm-content:has(.cm-pill){padding:5px 8px}[data-v-28c8509c] .cm-content .cm-pill:not(:last-of-type){margin-right:.5px}[data-v-28c8509c] .cm-content .cm-pill:not(:first-of-type){margin-left:.5px}[data-v-28c8509c] .cm-line{text-overflow:ellipsis;padding:0;overflow:hidden}[data-v-7c1a2f6c] .cm-content{min-height:fit-content}[data-v-7c1a2f6c] .cm-scroller{max-width:100%;overflow:auto hidden}.group-hover-input[data-v-5a23cb87]{border-width:var(--scalar-border-width);border-color:#0000}.group:hover .group-hover-input[data-v-5a23cb87]{background:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.group:hover .group-hover-input[data-v-5a23cb87]{background:color-mix(in srgb,var(--scalar-background-1),var(--scalar-background-2))}}.group:hover .group-hover-input[data-v-5a23cb87]{border-color:var(--scalar-border-color)}.group-hover-input[data-v-5a23cb87]:focus{border-color:var(--scalar-border-color)!important;background:0 0!important}.scalar .scalar-app-layout[data-v-6c81e410]{background:var(--scalar-background-1);opacity:0;border:var(--scalar-border-width)solid var(--scalar-border-color);border-radius:8px;width:100%;max-width:1390px;height:calc(100% - 120px);margin:auto;animation:.35s forwards scalarapiclientfadein-6c81e410;position:relative;overflow:hidden}@media(max-width:720px)and (max-height:480px){.scalar .scalar-app-layout[data-v-6c81e410]{height:100%;max-height:90svh}}@keyframes scalarapiclientfadein-6c81e410{0%{opacity:0}to{opacity:1}}.scalar .scalar-app-exit[data-v-6c81e410]{cursor:pointer;z-index:-1;background:#00000038;width:100vw;height:100vh;transition:all .3s ease-in-out;animation:.35s forwards scalardrawerexitfadein-6c81e410;position:fixed;top:0;left:0}.dark-mode .scalar .scalar-app-exit[data-v-6c81e410]{background:#00000073}.scalar .scalar-app-exit[data-v-6c81e410]:before{text-align:center;color:#fff;opacity:.6;font-family:sans-serif;font-size:30px;font-weight:100;line-height:50px;position:absolute;top:0;right:12px}.scalar .scalar-app-exit[data-v-6c81e410]:hover:before{opacity:1}@keyframes scalardrawerexitfadein-6c81e410{0%{opacity:0}to{opacity:1}}.scalar-container[data-v-6c81e410]{visibility:visible;z-index:10000;justify-content:center;align-items:center;width:100%;height:100%;display:flex;position:fixed;top:0;bottom:0;left:0;overflow:hidden}.scalar .url-form-input[data-v-6c81e410]{min-height:auto!important}.scalar .scalar-container[data-v-6c81e410]{line-height:normal}.scalar .scalar-app-header span[data-v-6c81e410]{color:var(--scalar-color-3)}.scalar .scalar-app-header a[data-v-6c81e410]{color:var(--scalar-color-1)}.scalar .scalar-app-header a[data-v-6c81e410]:hover{text-decoration:underline}.scalar-activate[data-v-6c81e410]{cursor:pointer;align-items:center;gap:6px;width:fit-content;margin:0 .75rem .75rem auto;font-size:.875rem;font-weight:600;line-height:24px;display:flex}.scalar-activate-button[data-v-6c81e410]{color:var(--scalar-color-blue);appearance:none;background:0 0;border:none;outline:none;align-items:center;gap:6px;padding:0 .5rem;display:flex}.scalar-activate:hover .scalar-activate-button[data-v-6c81e410]{background:var(--scalar-background-3);border-radius:3px}.schema>span[data-v-4df72868]:not(:first-child):before{content:"·";margin:0 .5ch;display:block}.schema>span[data-v-4df72868]{white-space:nowrap;display:flex}[data-v-04661eb4] .cm-editor{padding:0}[data-v-04661eb4] .cm-content{font-family:var(--scalar-font);font-size:var(--scalar-small);background-color:#0000;align-items:center;width:100%;padding:5px 8px;display:flex}[data-v-04661eb4] .cm-content:has(.cm-pill){padding:5px 8px}[data-v-04661eb4] .cm-content .cm-pill:not(:last-of-type){margin-right:.5px}[data-v-04661eb4] .cm-content .cm-pill:not(:first-of-type){margin-left:.5px}[data-v-04661eb4] .cm-line{text-overflow:ellipsis;padding:0;overflow:hidden}.filemask[data-v-04661eb4]{-webkit-mask-image:linear-gradient(to right,transparent 0,var(--scalar-background-2)20px);mask-image:linear-gradient(to right,transparent 0,var(--scalar-background-2)20px)}[data-v-9aa4b63a] .cm-content{font-size:var(--scalar-small)}.auth-combobox-position[data-v-0bb98074]{margin-left:120px}.scroll-timeline-x[data-v-0bb98074]{scroll-timeline:--scroll-timeline x;scroll-timeline:--scroll-timeline horizontal;scrollbar-width:none;-ms-overflow-style:none;overflow:auto}.fade-left[data-v-0bb98074],.fade-right[data-v-0bb98074]{content:"";pointer-events:none;height:100%;min-height:24px;animation-name:fadein-0bb98074;animation-duration:1ms;animation-direction:reverse;animation-timeline:--scroll-timeline;position:sticky}.fade-left[data-v-0bb98074]{background:linear-gradient(-90deg,var(--scalar-background-1)0%,var(--scalar-background-1)60%,var(--scalar-background-1)100%)}@supports (color:color-mix(in lab,red,red)){.fade-left[data-v-0bb98074]{background:linear-gradient(-90deg,color-mix(in srgb,var(--scalar-background-1),transparent 100%)0%,color-mix(in srgb,var(--scalar-background-1),transparent 20%)60%,var(--scalar-background-1)100%)}}.fade-left[data-v-0bb98074]{min-width:3px;animation-direction:normal;left:-1px}.fade-right[data-v-0bb98074]{background:linear-gradient(90deg,var(--scalar-background-1)0%,var(--scalar-background-1)60%,var(--scalar-background-1)100%)}@supports (color:color-mix(in lab,red,red)){.fade-right[data-v-0bb98074]{background:linear-gradient(90deg,color-mix(in srgb,var(--scalar-background-1),transparent 100%)0%,color-mix(in srgb,var(--scalar-background-1),transparent 20%)60%,var(--scalar-background-1)100%)}}.fade-right[data-v-0bb98074]{min-width:24px;margin-left:-20px;top:0;right:-1px}@keyframes fadein-0bb98074{0%{opacity:0}15%{opacity:1}}.auth-combobox-position[data-v-3f1067a4]{margin-left:120px}.scroll-timeline-x[data-v-3f1067a4]{scroll-timeline:--scroll-timeline x;scroll-timeline:--scroll-timeline horizontal;scrollbar-width:none;-ms-overflow-style:none;overflow:auto}.fade-left[data-v-3f1067a4],.fade-right[data-v-3f1067a4]{content:"";pointer-events:none;height:100%;min-height:24px;animation-name:fadein-3f1067a4;animation-duration:1ms;animation-direction:reverse;animation-timeline:--scroll-timeline;position:sticky}.fade-left[data-v-3f1067a4]{background:linear-gradient(-90deg,var(--scalar-background-1)0%,var(--scalar-background-1)60%,var(--scalar-background-1)100%)}@supports (color:color-mix(in lab,red,red)){.fade-left[data-v-3f1067a4]{background:linear-gradient(-90deg,color-mix(in srgb,var(--scalar-background-1),transparent 100%)0%,color-mix(in srgb,var(--scalar-background-1),transparent 20%)60%,var(--scalar-background-1)100%)}}.fade-left[data-v-3f1067a4]{min-width:3px;animation-direction:normal;left:-1px}.fade-right[data-v-3f1067a4]{background:linear-gradient(90deg,var(--scalar-background-1)0%,var(--scalar-background-1)60%,var(--scalar-background-1)100%)}@supports (color:color-mix(in lab,red,red)){.fade-right[data-v-3f1067a4]{background:linear-gradient(90deg,color-mix(in srgb,var(--scalar-background-1),transparent 100%)0%,color-mix(in srgb,var(--scalar-background-1),transparent 20%)60%,var(--scalar-background-1)100%)}}.fade-right[data-v-3f1067a4]{min-width:24px;margin-left:-20px;top:0;right:-1px}@keyframes fadein-3f1067a4{0%{opacity:0}15%{opacity:1}}[data-v-2891f052] code.hljs *{font-size:var(--scalar-small)}.request-section-content[data-v-e85e2882]{--scalar-border-width:.5px}.request-section-content-filter[data-v-e85e2882]{box-shadow:0 -10px 0 10px var(--scalar-background-1)}.request-item:focus-within .request-meta-buttons[data-v-e85e2882]{opacity:1}.group-hover-input[data-v-e85e2882]{border-width:var(--scalar-border-width);border-color:#0000}.group:hover .group-hover-input[data-v-e85e2882]{background:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.group:hover .group-hover-input[data-v-e85e2882]{background:color-mix(in srgb,var(--scalar-background-1),var(--scalar-background-2))}}.group:hover .group-hover-input[data-v-e85e2882]{border-color:var(--scalar-border-color)}.group-hover-input[data-v-e85e2882]:focus{border-color:var(--scalar-border-color)!important;background:0 0!important}.light-mode .bg-preview[data-v-0956ad2d]{background-image:url("data:image/svg+xml,%3Csvg xmlns=\'http://www.w3.org/2000/svg\' width=\'16\' height=\'16\' fill=\'%23000\' fill-opacity=\'10%25\'%3E%3Crect width=\'8\' height=\'8\' /%3E%3Crect x=\'8\' y=\'8\' width=\'8\' height=\'8\' /%3E%3C/svg%3E")}.dark-mode .bg-preview[data-v-0956ad2d]{background-image:url("data:image/svg+xml,%3Csvg xmlns=\'http://www.w3.org/2000/svg\' width=\'16\' height=\'16\' fill=\'%23FFF\' fill-opacity=\'10%25\'%3E%3Crect width=\'8\' height=\'8\' /%3E%3Crect x=\'8\' y=\'8\' width=\'8\' height=\'8\' /%3E%3C/svg%3E")}[data-v-85d2902e] .cm-editor{font-size:var(--scalar-small);background-color:#0000;outline:none}[data-v-85d2902e] .cm-gutters{background-color:var(--scalar-background-1);border-radius:var(--scalar-radius)0 0 var(--scalar-radius)}.body-raw[data-v-85d2902e] .cm-scroller{min-width:100%;overflow:auto}.scalar-code-block[data-v-17966bf4] .hljs *{font-size:var(--scalar-small)}.response-body-virtual[data-headlessui-state=open],.response-body-virtual[data-headlessui-state=open] .diclosure-panel{flex-direction:column;flex-grow:1;display:flex}.scalar-version-number[data-v-6d2bdb61]{width:76px;height:76px;font-size:8px;font-family:var(--scalar-font-code);box-shadow:inset 2px 0 0 2px var(--scalar-background-2);text-align:center;text-transform:initial;-webkit-text-decoration-color:var(--scalar-color-3);text-decoration-color:var(--scalar-color-3);border-radius:9px 9px 16px 12px;flex-direction:column;justify-content:center;align-items:center;margin-top:-113px;margin-left:-36px;line-height:11px;display:flex;position:absolute;transform:skewY(13deg)}.scalar-version-number a[data-v-6d2bdb61]{background:var(--scalar-background-2);border:.5px solid var(--scalar-border-color);border-radius:3px;padding:2px 4px;font-weight:700;text-decoration:none}.gitbook-show[data-v-6d2bdb61]{display:none}.v-enter-active[data-v-7ec8af01]{transition:opacity .5s}.v-enter-from[data-v-7ec8af01]{opacity:0}.animate-response-heading .response-heading[data-v-7138ed84]{opacity:1;animation:.2s ease-in-out forwards push-response-7138ed84}@keyframes push-response-7138ed84{0%{opacity:1;transform:translateY(0)}to{opacity:0;transform:translateY(-4px)}}.animate-response-heading .animate-response-children[data-v-7138ed84]{opacity:0;animation:.2s ease-in-out 50ms forwards response-spans-7138ed84}@keyframes response-spans-7138ed84{0%{opacity:0;transform:translateY(4px)}to{opacity:1;transform:translateY(0)}}.ellipsis-position[data-v-01a1ab71]{transform:translate(calc(-100% - 4.5px))}.indent-border-line-offset[data-v-4f5a9d1f]:before{left:var(--v0bed2d4e)}.indent-padding-left[data-v-4f5a9d1f]{padding-left:calc(var(--v57ee1db0) + 6px)}.sidebar-folderitem[data-v-4f5a9d1f] .ellipsis-position{right:6px;transform:none}.search-button-fade[data-v-bca9c474]{background:linear-gradient(var(--scalar-background-1)32px,var(--scalar-background-1)38px,transparent)}@supports (color:color-mix(in lab,red,red)){.search-button-fade[data-v-bca9c474]{background:linear-gradient(var(--scalar-background-1)32px,color-mix(in srgb,var(--scalar-background-1),transparent)38px,transparent)}}.empty-sidebar-item-content[data-v-bca9c474]{display:none}.empty-sidebar-item .empty-sidebar-item-content[data-v-bca9c474]{display:block}.rabbitjump[data-v-bca9c474]{opacity:0}.empty-sidebar-item:hover .rabbitjump[data-v-bca9c474]{opacity:1;animation:.5s step-end infinite rabbitAnimation-bca9c474}.empty-sidebar-item:hover .rabbitsit[data-v-bca9c474]{opacity:0;animation:.5s step-end infinite rabbitAnimation2-bca9c474}.empty-sidebar-item:hover .rabbit-ascii[data-v-bca9c474]{animation:8s linear infinite rabbitRun-bca9c474}@keyframes rabbitRun-bca9c474{0%{transform:translateZ(0)}25%{transform:translate(250px)}25.01%{transform:translate(-250px)}75%{transform:translate(250px)}75.01%{transform:translate(-250px)}to{transform:translateZ(0)}}@keyframes rabbitAnimation-bca9c474{0%,to{opacity:1}50%{opacity:0}}@keyframes rabbitAnimation2-bca9c474{0%,to{opacity:0}50%{opacity:1;transform:translateY(-8px)}}.request-text-color-text[data-v-c508c571]{color:var(--scalar-color-1);background:linear-gradient(var(--scalar-background-1),var(--scalar-background-3));box-shadow:0 0 0 1px var(--scalar-border-color)}@media screen and (max-width:800px){.sidebar-active-hide-layout[data-v-c508c571]{display:none}.sidebar-active-width[data-v-c508c571]{width:100%}}.gitbook-show[data-v-c8df97c6]{display:none}.app-exit-button[data-v-c8df97c6]{color:#fff;background:#0000001a}.app-exit-button[data-v-c8df97c6]:hover{background:#ffffff1a}.request-text-color-text[data-v-57ae0d10]{color:var(--scalar-color-1);background:linear-gradient(var(--scalar-background-1),var(--scalar-background-3));box-shadow:0 0 0 1px var(--scalar-border-color)}@media screen and (max-width:800px){.sidebar-active-hide-layout[data-v-57ae0d10]{display:none}.sidebar-active-width[data-v-57ae0d10]{width:100%}}.group-hover-input[data-v-fced736a]{border-width:var(--scalar-border-width);border-color:#0000}.group:hover .group-hover-input[data-v-fced736a]{background:var(--scalar-background-1)}@supports (color:color-mix(in lab,red,red)){.group:hover .group-hover-input[data-v-fced736a]{background:color-mix(in srgb,var(--scalar-background-1),var(--scalar-background-2))}}.group:hover .group-hover-input[data-v-fced736a]{border-color:var(--scalar-border-color)}.group-hover-input[data-v-fced736a]:focus{border-color:var(--scalar-border-color)!important;background:0 0!important}[data-v-68d5218e] .markdown h2{font-size:var(--scalar-font-size-2)}[data-v-5997a667] .cm-content{min-height:fit-content}[data-v-5997a667] .cm-scroller{max-width:100%;overflow:auto hidden}[data-v-83bfcc8a] .cm-editor{padding:0}[data-v-83bfcc8a] .cm-content{font-family:var(--scalar-font);font-size:var(--scalar-small);background-color:#0000;align-items:center;width:100%;padding:5px 8px;display:flex}[data-v-83bfcc8a] .cm-content:has(.cm-pill){padding:5px 8px}[data-v-83bfcc8a] .cm-content .cm-pill:not(:last-of-type){margin-right:.5px}[data-v-83bfcc8a] .cm-content .cm-pill:not(:first-of-type){margin-left:.5px}[data-v-83bfcc8a] .cm-line{text-overflow:ellipsis;padding:0;overflow:hidden}.scalar-collection-auth[data-v-cc87292e]{border:var(--scalar-border-width)solid var(--scalar-border-color);border-radius:var(--scalar-radius-lg);overflow:hidden}.dragover-asChild[data-v-a89d6a6e],.dragover-above[data-v-a89d6a6e],.dragover-below[data-v-a89d6a6e]{position:relative}.dragover-above[data-v-a89d6a6e]:after,.dragover-below[data-v-a89d6a6e]:after{content:"";background:var(--scalar-color-blue);width:100%;height:3px;display:block;position:absolute;top:-1.5px}@supports (color:color-mix(in lab,red,red)){.dragover-above[data-v-a89d6a6e]:after,.dragover-below[data-v-a89d6a6e]:after{background:color-mix(in srgb,var(--scalar-color-blue),transparent 85%)}}.dragover-above[data-v-a89d6a6e]:after,.dragover-below[data-v-a89d6a6e]:after{pointer-events:none;border-radius:var(--scalar-radius)}.dragover-below[data-v-a89d6a6e]:after{top:initial;bottom:-1.5px}.dragover-asChild[data-v-a89d6a6e]:after{content:"";background:var(--scalar-color-blue);width:100%;height:100%;display:block;position:absolute;top:0;left:0}@supports (color:color-mix(in lab,red,red)){.dragover-asChild[data-v-a89d6a6e]:after{background:color-mix(in srgb,var(--scalar-color-blue),transparent 85%)}}.dragover-asChild[data-v-a89d6a6e]:after{pointer-events:none;border-radius:var(--scalar-radius)}.empty-variable-name[data-v-0b6c70e4]:empty:before{content:"Untitled";color:var(--scalar-color-3)}.form-group[data-v-694018d6]{margin-bottom:1rem}.modal-actions[data-v-694018d6]{justify-content:flex-end;gap:1rem;display:flex}:root{--scalar-loaded-api-reference:true}@property --tw-translate-x{syntax:"*";inherits:false;initial-value:0}@property --tw-translate-y{syntax:"*";inherits:false;initial-value:0}@property --tw-translate-z{syntax:"*";inherits:false;initial-value:0}@property --tw-rotate-x{syntax:"*";inherits:false}@property --tw-rotate-y{syntax:"*";inherits:false}@property --tw-rotate-z{syntax:"*";inherits:false}@property --tw-skew-x{syntax:"*";inherits:false}@property --tw-skew-y{syntax:"*";inherits:false}@property --tw-border-style{syntax:"*";inherits:false;initial-value:solid}@property --tw-leading{syntax:"*";inherits:false}@property --tw-font-weight{syntax:"*";inherits:false}@property --tw-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-shadow-color{syntax:"*";inherits:false}@property --tw-shadow-alpha{syntax:"";inherits:false;initial-value:100%}@property --tw-inset-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-inset-shadow-color{syntax:"*";inherits:false}@property --tw-inset-shadow-alpha{syntax:"";inherits:false;initial-value:100%}@property --tw-ring-color{syntax:"*";inherits:false}@property --tw-ring-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-inset-ring-color{syntax:"*";inherits:false}@property --tw-inset-ring-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-ring-inset{syntax:"*";inherits:false}@property --tw-ring-offset-width{syntax:"";inherits:false;initial-value:0}@property --tw-ring-offset-color{syntax:"*";inherits:false;initial-value:#fff}@property --tw-ring-offset-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-outline-style{syntax:"*";inherits:false;initial-value:solid}@property --tw-blur{syntax:"*";inherits:false}@property --tw-brightness{syntax:"*";inherits:false}@property --tw-contrast{syntax:"*";inherits:false}@property --tw-grayscale{syntax:"*";inherits:false}@property --tw-hue-rotate{syntax:"*";inherits:false}@property --tw-invert{syntax:"*";inherits:false}@property --tw-opacity{syntax:"*";inherits:false}@property --tw-saturate{syntax:"*";inherits:false}@property --tw-sepia{syntax:"*";inherits:false}@property --tw-drop-shadow{syntax:"*";inherits:false}@property --tw-drop-shadow-color{syntax:"*";inherits:false}@property --tw-drop-shadow-alpha{syntax:"";inherits:false;initial-value:100%}@property --tw-drop-shadow-size{syntax:"*";inherits:false}@property --tw-duration{syntax:"*";inherits:false}@layer scalar-config{.scalar-api-reference[data-v-4acc19c4]{--refs-sidebar-width: var(--scalar-sidebar-width, 0px);--refs-header-height: calc( var(--scalar-custom-header-height, 0px) + var(--scalar-header-height, 0px) );--refs-viewport-offset: calc( var(--refs-header-height, 0px) + var(--refs-content-offset, 0px) );--refs-viewport-height: calc( var(--full-height, 100dvh) - var(--refs-viewport-offset, 0px) );--refs-content-max-width: var(--scalar-content-max-width, 1540px)}.scalar-api-reference.references-classic[data-v-4acc19c4]{--refs-content-max-width: var(--scalar-content-max-width, 1420px);min-height:100dvh;--refs-sidebar-width: 0}.scalar-api-reference[data-v-4acc19c4]:has(.api-reference-toolbar){--refs-content-offset: 48px}}.references-layout[data-v-4acc19c4]{min-height:100dvh;min-width:100%;max-width:100%;flex:1;--full-height: 100dvh;display:grid;grid-template-rows:var(--scalar-header-height, 0px) repeat(2,auto);grid-template-columns:auto 1fr;grid-template-areas:"header header" "navigation rendered" "footer footer";background:var(--scalar-background-1)}.references-editor[data-v-4acc19c4]{grid-area:editor;display:flex;min-width:0;background:var(--scalar-background-1)}.references-rendered[data-v-4acc19c4]{position:relative;grid-area:rendered;min-width:0;background:var(--scalar-background-1)}.scalar-api-reference.references-classic[data-v-4acc19c4],.references-classic .references-rendered[data-v-4acc19c4]{height:initial!important;max-height:initial!important}@layer scalar-config{.references-sidebar[data-v-4acc19c4]{--refs-sidebar-width: var(--scalar-sidebar-width, 280px)}}.references-footer[data-v-4acc19c4]{grid-area:footer}@media(max-width:1000px){.references-layout[data-v-4acc19c4]{grid-template-columns:100%;grid-template-rows:var(--scalar-header-height, 0px) 0px auto auto;grid-template-areas:"header" "navigation" "rendered" "footer"}.references-editable[data-v-4acc19c4]{grid-template-areas:"header" "navigation" "editor"}.references-rendered[data-v-4acc19c4]{position:static}}@media(max-width:1000px){.scalar-api-references-standalone-mobile[data-v-4acc19c4]{--scalar-header-height: 50px}}.darklight-reference[data-v-4acc19c4]{width:100%;margin-top:auto}')),document.head.appendChild(e)}}catch(e){console.error("vite-plugin-css-injected-by-js",e)}}(),function(e){"function"==typeof define&&define.amd?define(e):e()}((function(){"use strict";const e=Object.freeze({status:"aborted"});function t(e,t,n){function r(n,r){if(n._zod||Object.defineProperty(n,"_zod",{value:{def:r,constr:i,traits:new Set},enumerable:!1}),n._zod.traits.has(e))return;n._zod.traits.add(e),t(n,r);const a=i.prototype,o=Object.keys(a);for(let e=0;e!!(n?.Parent&&t instanceof n.Parent)||t?._zod?.traits?.has(e)}),Object.defineProperty(i,"name",{value:e}),i}const n=Symbol("zod_brand");class r extends Error{constructor(){super("Encountered Promise during synchronous parse. Use .parseAsync() instead.")}}class a extends Error{constructor(e){super(`Encountered unidirectional transform during encode: ${e}`),this.name="ZodEncodeError"}}const o={};function i(e){return e&&Object.assign(o,e),o}function s(e){const t=Object.values(e).filter((e=>"number"==typeof e));return Object.entries(e).filter((([e,n])=>-1===t.indexOf(+e))).map((([e,t])=>t))}function l(e,t="|"){return e.map((e=>$(e))).join(t)}function c(e,t){return"bigint"==typeof t?t.toString():t}function u(e){return{get value(){{const t=e();return Object.defineProperty(this,"value",{value:t}),t}}}}function d(e){return null==e}function p(e){const t=e.startsWith("^")?1:0,n=e.endsWith("$")?e.length-1:e.length;return e.slice(t,n)}function h(e,t){const n=(e.toString().split(".")[1]||"").length,r=t.toString();let a=(r.split(".")[1]||"").length;if(0===a&&/\d?e-\d?/.test(r)){const e=r.match(/\d?e-(\d?)/);e?.[1]&&(a=Number.parseInt(e[1]))}const o=n>a?n:a;return Number.parseInt(e.toFixed(o).replace(".",""))%Number.parseInt(t.toFixed(o).replace(".",""))/10**o}const f=Symbol("evaluating");function m(e,t,n){let r;Object.defineProperty(e,t,{get(){if(r!==f)return void 0===r&&(r=f,r=n()),r},set(n){Object.defineProperty(e,t,{value:n})},configurable:!0})}function g(e,t,n){Object.defineProperty(e,t,{value:n,writable:!0,enumerable:!0,configurable:!0})}function v(...e){const t={};for(const n of e){const e=Object.getOwnPropertyDescriptors(n);Object.assign(t,e)}return Object.defineProperties({},t)}function b(e){return JSON.stringify(e)}function y(e){return e.toLowerCase().trim().replace(/[^\w\s-]/g,"").replace(/[\s_-]+/g,"-").replace(/^-+|-+$/g,"")}const O="captureStackTrace"in Error?Error.captureStackTrace:(...e)=>{};function w(e){return"object"==typeof e&&null!==e&&!Array.isArray(e)}const x=u((()=>{if("undefined"!=typeof navigator&&navigator?.userAgent?.includes("Cloudflare"))return!1;try{return new Function(""),!0}catch(e){return!1}}));function k(e){if(!1===w(e))return!1;const t=e.constructor;if(void 0===t)return!0;if("function"!=typeof t)return!0;const n=t.prototype;return!1!==w(n)&&!1!==Object.prototype.hasOwnProperty.call(n,"isPrototypeOf")}function S(e){return k(e)?{...e}:Array.isArray(e)?[...e]:e}const _=new Set(["string","number","symbol"]),T=new Set(["string","number","bigint","boolean","symbol","undefined"]);function E(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}function A(e,t,n){const r=new e._zod.constr(t??e._zod.def);return t&&!n?.parent||(r._zod.parent=e),r}function C(e){const t=e;if(!t)return{};if("string"==typeof t)return{error:()=>t};if(void 0!==t?.message){if(void 0!==t?.error)throw new Error("Cannot specify both `message` and `error` params");t.error=t.message}return delete t.message,"string"==typeof t.error?{...t,error:()=>t.error}:t}function $(e){return"bigint"==typeof e?e.toString()+"n":"string"==typeof e?`"${e}"`:`${e}`}function P(e){return Object.keys(e).filter((t=>"optional"===e[t]._zod.optin&&"optional"===e[t]._zod.optout))}const D={safeint:[Number.MIN_SAFE_INTEGER,Number.MAX_SAFE_INTEGER],int32:[-2147483648,2147483647],uint32:[0,4294967295],float32:[-34028234663852886e22,34028234663852886e22],float64:[-Number.MAX_VALUE,Number.MAX_VALUE]},I={int64:[BigInt("-9223372036854775808"),BigInt("9223372036854775807")],uint64:[BigInt(0),BigInt("18446744073709551615")]};function M(e,t){const n=e._zod.def,r=n.checks;if(r&&r.length>0)throw new Error(".pick() cannot be used on object schemas containing refinements");return A(e,v(e._zod.def,{get shape(){const e={};for(const r in t){if(!(r in n.shape))throw new Error(`Unrecognized key: "${r}"`);t[r]&&(e[r]=n.shape[r])}return g(this,"shape",e),e},checks:[]}))}function N(e,t){const n=e._zod.def,r=n.checks;if(r&&r.length>0)throw new Error(".omit() cannot be used on object schemas containing refinements");const a=v(e._zod.def,{get shape(){const r={...e._zod.def.shape};for(const e in t){if(!(e in n.shape))throw new Error(`Unrecognized key: "${e}"`);t[e]&&delete r[e]}return g(this,"shape",r),r},checks:[]});return A(e,a)}function R(e,t){if(!k(t))throw new Error("Invalid input to extend: expected a plain object");const n=e._zod.def.checks;if(n&&n.length>0){const n=e._zod.def.shape;for(const e in t)if(void 0!==Object.getOwnPropertyDescriptor(n,e))throw new Error("Cannot overwrite keys on object schemas containing refinements. Use `.safeExtend()` instead.")}const r=v(e._zod.def,{get shape(){const n={...e._zod.def.shape,...t};return g(this,"shape",n),n}});return A(e,r)}function L(e,t){if(!k(t))throw new Error("Invalid input to safeExtend: expected a plain object");const n=v(e._zod.def,{get shape(){const n={...e._zod.def.shape,...t};return g(this,"shape",n),n}});return A(e,n)}function B(e,t){const n=v(e._zod.def,{get shape(){const n={...e._zod.def.shape,...t._zod.def.shape};return g(this,"shape",n),n},get catchall(){return t._zod.def.catchall},checks:[]});return A(e,n)}function j(e,t,n){const r=t._zod.def.checks;if(r&&r.length>0)throw new Error(".partial() cannot be used on object schemas containing refinements");const a=v(t._zod.def,{get shape(){const r=t._zod.def.shape,a={...r};if(n)for(const t in n){if(!(t in r))throw new Error(`Unrecognized key: "${t}"`);n[t]&&(a[t]=e?new e({type:"optional",innerType:r[t]}):r[t])}else for(const t in r)a[t]=e?new e({type:"optional",innerType:r[t]}):r[t];return g(this,"shape",a),a},checks:[]});return A(t,a)}function U(e,t,n){const r=v(t._zod.def,{get shape(){const r=t._zod.def.shape,a={...r};if(n)for(const t in n){if(!(t in a))throw new Error(`Unrecognized key: "${t}"`);n[t]&&(a[t]=new e({type:"nonoptional",innerType:r[t]}))}else for(const t in r)a[t]=new e({type:"nonoptional",innerType:r[t]});return g(this,"shape",a),a}});return A(t,r)}function z(e,t=0){if(!0===e.aborted)return!0;for(let n=t;n{var n;return(n=t).path??(n.path=[]),t.path.unshift(e),t}))}function Z(e){return"string"==typeof e?e:e?.message}function Q(e,t,n){const r={...e,path:e.path??[]};if(!e.message){const a=Z(e.inst?._zod.def?.error?.(e))??Z(t?.error?.(e))??Z(n.customError?.(e))??Z(n.localeError?.(e))??"Invalid input";r.message=a}return delete r.inst,delete r.continue,t?.reportInput||delete r.input,r}function H(e){return e instanceof Set?"set":e instanceof Map?"map":e instanceof File?"file":"unknown"}function V(e){return Array.isArray(e)?"array":"string"==typeof e?"string":"unknown"}function q(e){const t=typeof e;switch(t){case"number":return Number.isNaN(e)?"nan":"number";case"object":{if(null===e)return"null";if(Array.isArray(e))return"array";const t=e;if(t&&Object.getPrototypeOf(t)!==Object.prototype&&"constructor"in t&&t.constructor)return t.constructor.name}}return t}function W(...e){const[t,n,r]=e;return"string"==typeof t?{message:t,code:"custom",input:n,inst:r}:{...t}}function X(e){const t=atob(e),n=new Uint8Array(t.length);for(let e=0;ee[t]));return Promise.all(n).then((e=>{const n={};for(let r=0;re.toString(16).padStart(2,"0"))).join("")},unwrapMessage:Z},Symbol.toStringTag,{value:"Module"})),K=(e,t)=>{e.name="$ZodError",Object.defineProperty(e,"_zod",{value:e._zod,enumerable:!1}),Object.defineProperty(e,"issues",{value:t,enumerable:!1}),e.message=JSON.stringify(t,c,2),Object.defineProperty(e,"toString",{value:()=>e.message,enumerable:!1})},J=t("$ZodError",K),ee=t("$ZodError",K,{Parent:Error});function te(e,t=e=>e.message){const n={},r=[];for(const a of e.issues)a.path.length>0?(n[a.path[0]]=n[a.path[0]]||[],n[a.path[0]].push(t(a))):r.push(t(a));return{formErrors:r,fieldErrors:n}}function ne(e,t=e=>e.message){const n={_errors:[]},r=e=>{for(const a of e.issues)if("invalid_union"===a.code&&a.errors.length)a.errors.map((e=>r({issues:e})));else if("invalid_key"===a.code)r({issues:a.issues});else if("invalid_element"===a.code)r({issues:a.issues});else if(0===a.path.length)n._errors.push(t(a));else{let e=n,r=0;for(;re.message){const n={errors:[]},r=(e,a=[])=>{var o,i;for(const s of e.issues)if("invalid_union"===s.code&&s.errors.length)s.errors.map((e=>r({issues:e},s.path)));else if("invalid_key"===s.code)r({issues:s.issues},s.path);else if("invalid_element"===s.code)r({issues:s.issues},s.path);else{const e=[...a,...s.path];if(0===e.length){n.errors.push(t(s));continue}let r=n,l=0;for(;l"object"==typeof e?e.key:e));for(const e of n)"number"==typeof e?t.push(`[${e}]`):"symbol"==typeof e?t.push(`[${JSON.stringify(String(e))}]`):/[^\w$]/.test(e)?t.push(`[${JSON.stringify(e)}]`):(t.length&&t.push("."),t.push(e));return t.join("")}function oe(e){const t=[],n=[...e.issues].sort(((e,t)=>(e.path??[]).length-(t.path??[]).length));for(const e of n)t.push(`✖ ${e.message}`),e.path?.length&&t.push(` → at ${ae(e.path)}`);return t.join("\n")}const ie=e=>(t,n,a,o)=>{const s=a?Object.assign(a,{async:!1}):{async:!1},l=t._zod.run({value:n,issues:[]},s);if(l instanceof Promise)throw new r;if(l.issues.length){const t=new(o?.Err??e)(l.issues.map((e=>Q(e,s,i()))));throw O(t,o?.callee),t}return l.value},se=ie(ee),le=e=>async(t,n,r,a)=>{const o=r?Object.assign(r,{async:!0}):{async:!0};let s=t._zod.run({value:n,issues:[]},o);if(s instanceof Promise&&(s=await s),s.issues.length){const t=new(a?.Err??e)(s.issues.map((e=>Q(e,o,i()))));throw O(t,a?.callee),t}return s.value},ce=le(ee),ue=e=>(t,n,a)=>{const o=a?{...a,async:!1}:{async:!1},s=t._zod.run({value:n,issues:[]},o);if(s instanceof Promise)throw new r;return s.issues.length?{success:!1,error:new(e??J)(s.issues.map((e=>Q(e,o,i()))))}:{success:!0,data:s.value}},de=ue(ee),pe=e=>async(t,n,r)=>{const a=r?Object.assign(r,{async:!0}):{async:!0};let o=t._zod.run({value:n,issues:[]},a);return o instanceof Promise&&(o=await o),o.issues.length?{success:!1,error:new e(o.issues.map((e=>Q(e,a,i()))))}:{success:!0,data:o.value}},he=pe(ee),fe=e=>(t,n,r)=>{const a=r?Object.assign(r,{direction:"backward"}):{direction:"backward"};return ie(e)(t,n,a)},me=fe(ee),ge=e=>(t,n,r)=>ie(e)(t,n,r),ve=ge(ee),be=e=>async(t,n,r)=>{const a=r?Object.assign(r,{direction:"backward"}):{direction:"backward"};return le(e)(t,n,a)},ye=be(ee),Oe=e=>async(t,n,r)=>le(e)(t,n,r),we=Oe(ee),xe=e=>(t,n,r)=>{const a=r?Object.assign(r,{direction:"backward"}):{direction:"backward"};return ue(e)(t,n,a)},ke=xe(ee),Se=e=>(t,n,r)=>ue(e)(t,n,r),_e=Se(ee),Te=e=>async(t,n,r)=>{const a=r?Object.assign(r,{direction:"backward"}):{direction:"backward"};return pe(e)(t,n,a)},Ee=Te(ee),Ae=e=>async(t,n,r)=>pe(e)(t,n,r),Ce=Ae(ee),$e=/^[cC][^\s-]{8,}$/,Pe=/^[0-9a-z]+$/,De=/^[0-9A-HJKMNP-TV-Za-hjkmnp-tv-z]{26}$/,Ie=/^[0-9a-vA-V]{20}$/,Me=/^[A-Za-z0-9]{27}$/,Ne=/^[a-zA-Z0-9_-]{21}$/,Re=/^P(?:(\d+W)|(?!.*W)(?=\d|T\d)(\d+Y)?(\d+M)?(\d+D)?(T(?=\d)(\d+H)?(\d+M)?(\d+([.,]\d+)?S)?)?)$/,Le=/^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})$/,Be=e=>e?new RegExp(`^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-${e}[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12})$`):/^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-8][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$/,je=Be(4),Ue=Be(6),ze=Be(7),Fe=/^(?!\.)(?!.*\.\.)([A-Za-z0-9_'+\-\.]*)[A-Za-z0-9_+-]@([A-Za-z0-9][A-Za-z0-9\-]*\.)+[A-Za-z]{2,}$/,Ze=/^[^\s@"]{1,64}@[^\s@]{1,255}$/u,Qe=Ze;function He(){return new RegExp("^(\\p{Extended_Pictographic}|\\p{Emoji_Component})+$","u")}const Ve=/^(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])$/,qe=/^(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:))$/,We=e=>{const t=E(e??":");return new RegExp(`^(?:[0-9A-F]{2}${t}){5}[0-9A-F]{2}$|^(?:[0-9a-f]{2}${t}){5}[0-9a-f]{2}$`)},Xe=/^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\.){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\/([0-9]|[1-2][0-9]|3[0-2])$/,Ge=/^(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|::|([0-9a-fA-F]{1,4})?::([0-9a-fA-F]{1,4}:?){0,6})\/(12[0-8]|1[01][0-9]|[1-9]?[0-9])$/,Ye=/^$|^(?:[0-9a-zA-Z+/]{4})*(?:(?:[0-9a-zA-Z+/]{2}==)|(?:[0-9a-zA-Z+/]{3}=))?$/,Ke=/^[A-Za-z0-9_-]*$/,Je=/^(?=.{1,253}\.?$)[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[-0-9a-zA-Z]{0,61}[0-9a-zA-Z])?)*\.?$/,et=/^([a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{2,}$/,tt=/^\+[1-9]\d{6,14}$/,nt="(?:(?:\\d\\d[2468][048]|\\d\\d[13579][26]|\\d\\d0[48]|[02468][048]00|[13579][26]00)-02-29|\\d{4}-(?:(?:0[13578]|1[02])-(?:0[1-9]|[12]\\d|3[01])|(?:0[469]|11)-(?:0[1-9]|[12]\\d|30)|(?:02)-(?:0[1-9]|1\\d|2[0-8])))",rt=new RegExp(`^${nt}$`);function at(e){const t="(?:[01]\\d|2[0-3]):[0-5]\\d";return"number"==typeof e.precision?-1===e.precision?`${t}`:0===e.precision?`${t}:[0-5]\\d`:`${t}:[0-5]\\d\\.\\d{${e.precision}}`:`${t}(?::[0-5]\\d(?:\\.\\d+)?)?`}function ot(e){return new RegExp(`^${at(e)}$`)}function it(e){const t=at({precision:e.precision}),n=["Z"];e.local&&n.push(""),e.offset&&n.push("([+-](?:[01]\\d|2[0-3]):[0-5]\\d)");const r=`${t}(?:${n.join("|")})`;return new RegExp(`^${nt}T(?:${r})$`)}const st=e=>new RegExp(`^${e?`[\\s\\S]{${e?.minimum??0},${e?.maximum??""}}`:"[\\s\\S]*"}$`),lt=/^-?\d+n?$/,ct=/^-?\d+$/,ut=/^-?\d+(?:\.\d+)?$/,dt=/^(?:true|false)$/i,pt=/^null$/i,ht=/^undefined$/i,ft=/^[^A-Z]*$/,mt=/^[^a-z]*$/,gt=/^[0-9a-fA-F]*$/;function vt(e,t){return new RegExp(`^[A-Za-z0-9+/]{${e}}${t}$`)}function bt(e){return new RegExp(`^[A-Za-z0-9_-]{${e}}$`)}const yt=vt(22,"=="),Ot=bt(22),wt=vt(27,"="),xt=bt(27),kt=vt(43,"="),St=bt(43),_t=vt(64,""),Tt=bt(64),Et=vt(86,"=="),At=bt(86),Ct=Object.freeze(Object.defineProperty({__proto__:null,base64:Ye,base64url:Ke,bigint:lt,boolean:dt,browserEmail:/^[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/,cidrv4:Xe,cidrv6:Ge,cuid:$e,cuid2:Pe,date:rt,datetime:it,domain:et,duration:Re,e164:tt,email:Fe,emoji:He,extendedDuration:/^[-+]?P(?!$)(?:(?:[-+]?\d+Y)|(?:[-+]?\d+[.,]\d+Y$))?(?:(?:[-+]?\d+M)|(?:[-+]?\d+[.,]\d+M$))?(?:(?:[-+]?\d+W)|(?:[-+]?\d+[.,]\d+W$))?(?:(?:[-+]?\d+D)|(?:[-+]?\d+[.,]\d+D$))?(?:T(?=[\d+-])(?:(?:[-+]?\d+H)|(?:[-+]?\d+[.,]\d+H$))?(?:(?:[-+]?\d+M)|(?:[-+]?\d+[.,]\d+M$))?(?:[-+]?\d+(?:[.,]\d+)?S)?)??$/,guid:Le,hex:gt,hostname:Je,html5Email:/^[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/,idnEmail:Qe,integer:ct,ipv4:Ve,ipv6:qe,ksuid:Me,lowercase:ft,mac:We,md5_base64:yt,md5_base64url:Ot,md5_hex:/^[0-9a-fA-F]{32}$/,nanoid:Ne,null:pt,number:ut,rfc5322Email:/^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/,sha1_base64:wt,sha1_base64url:xt,sha1_hex:/^[0-9a-fA-F]{40}$/,sha256_base64:kt,sha256_base64url:St,sha256_hex:/^[0-9a-fA-F]{64}$/,sha384_base64:_t,sha384_base64url:Tt,sha384_hex:/^[0-9a-fA-F]{96}$/,sha512_base64:Et,sha512_base64url:At,sha512_hex:/^[0-9a-fA-F]{128}$/,string:st,time:ot,ulid:De,undefined:ht,unicodeEmail:Ze,uppercase:mt,uuid:Be,uuid4:je,uuid6:Ue,uuid7:ze,xid:Ie},Symbol.toStringTag,{value:"Module"})),$t=t("$ZodCheck",((e,t)=>{var n;e._zod??(e._zod={}),e._zod.def=t,(n=e._zod).onattach??(n.onattach=[])})),Pt={number:"number",bigint:"bigint",object:"date"},Dt=t("$ZodCheckLessThan",((e,t)=>{$t.init(e,t);const n=Pt[typeof t.value];e._zod.onattach.push((e=>{const n=e._zod.bag,r=(t.inclusive?n.maximum:n.exclusiveMaximum)??Number.POSITIVE_INFINITY;t.value{(t.inclusive?r.value<=t.value:r.value{$t.init(e,t);const n=Pt[typeof t.value];e._zod.onattach.push((e=>{const n=e._zod.bag,r=(t.inclusive?n.minimum:n.exclusiveMinimum)??Number.NEGATIVE_INFINITY;t.value>r&&(t.inclusive?n.minimum=t.value:n.exclusiveMinimum=t.value)})),e._zod.check=r=>{(t.inclusive?r.value>=t.value:r.value>t.value)||r.issues.push({origin:n,code:"too_small",minimum:"object"==typeof t.value?t.value.getTime():t.value,input:r.value,inclusive:t.inclusive,inst:e,continue:!t.abort})}})),Mt=t("$ZodCheckMultipleOf",((e,t)=>{$t.init(e,t),e._zod.onattach.push((e=>{var n;(n=e._zod.bag).multipleOf??(n.multipleOf=t.value)})),e._zod.check=n=>{if(typeof n.value!=typeof t.value)throw new Error("Cannot mix number and bigint in multiple_of check.");("bigint"==typeof n.value?n.value%t.value===BigInt(0):0===h(n.value,t.value))||n.issues.push({origin:typeof n.value,code:"not_multiple_of",divisor:t.value,input:n.value,inst:e,continue:!t.abort})}})),Nt=t("$ZodCheckNumberFormat",((e,t)=>{$t.init(e,t),t.format=t.format||"float64";const n=t.format?.includes("int"),r=n?"int":"number",[a,o]=D[t.format];e._zod.onattach.push((e=>{const r=e._zod.bag;r.format=t.format,r.minimum=a,r.maximum=o,n&&(r.pattern=ct)})),e._zod.check=i=>{const s=i.value;if(n){if(!Number.isInteger(s))return void i.issues.push({expected:r,format:t.format,code:"invalid_type",continue:!1,input:s,inst:e});if(!Number.isSafeInteger(s))return void(s>0?i.issues.push({input:s,code:"too_big",maximum:Number.MAX_SAFE_INTEGER,note:"Integers must be within the safe integer range.",inst:e,origin:r,inclusive:!0,continue:!t.abort}):i.issues.push({input:s,code:"too_small",minimum:Number.MIN_SAFE_INTEGER,note:"Integers must be within the safe integer range.",inst:e,origin:r,inclusive:!0,continue:!t.abort}))}so&&i.issues.push({origin:"number",input:s,code:"too_big",maximum:o,inclusive:!0,inst:e,continue:!t.abort})}})),Rt=t("$ZodCheckBigIntFormat",((e,t)=>{$t.init(e,t);const[n,r]=I[t.format];e._zod.onattach.push((e=>{const a=e._zod.bag;a.format=t.format,a.minimum=n,a.maximum=r})),e._zod.check=a=>{const o=a.value;or&&a.issues.push({origin:"bigint",input:o,code:"too_big",maximum:r,inclusive:!0,inst:e,continue:!t.abort})}})),Lt=t("$ZodCheckMaxSize",((e,t)=>{var n;$t.init(e,t),(n=e._zod.def).when??(n.when=e=>{const t=e.value;return!d(t)&&void 0!==t.size}),e._zod.onattach.push((e=>{const n=e._zod.bag.maximum??Number.POSITIVE_INFINITY;t.maximum{const r=n.value;r.size<=t.maximum||n.issues.push({origin:H(r),code:"too_big",maximum:t.maximum,inclusive:!0,input:r,inst:e,continue:!t.abort})}})),Bt=t("$ZodCheckMinSize",((e,t)=>{var n;$t.init(e,t),(n=e._zod.def).when??(n.when=e=>{const t=e.value;return!d(t)&&void 0!==t.size}),e._zod.onattach.push((e=>{const n=e._zod.bag.minimum??Number.NEGATIVE_INFINITY;t.minimum>n&&(e._zod.bag.minimum=t.minimum)})),e._zod.check=n=>{const r=n.value;r.size>=t.minimum||n.issues.push({origin:H(r),code:"too_small",minimum:t.minimum,inclusive:!0,input:r,inst:e,continue:!t.abort})}})),jt=t("$ZodCheckSizeEquals",((e,t)=>{var n;$t.init(e,t),(n=e._zod.def).when??(n.when=e=>{const t=e.value;return!d(t)&&void 0!==t.size}),e._zod.onattach.push((e=>{const n=e._zod.bag;n.minimum=t.size,n.maximum=t.size,n.size=t.size})),e._zod.check=n=>{const r=n.value,a=r.size;if(a===t.size)return;const o=a>t.size;n.issues.push({origin:H(r),...o?{code:"too_big",maximum:t.size}:{code:"too_small",minimum:t.size},inclusive:!0,exact:!0,input:n.value,inst:e,continue:!t.abort})}})),Ut=t("$ZodCheckMaxLength",((e,t)=>{var n;$t.init(e,t),(n=e._zod.def).when??(n.when=e=>{const t=e.value;return!d(t)&&void 0!==t.length}),e._zod.onattach.push((e=>{const n=e._zod.bag.maximum??Number.POSITIVE_INFINITY;t.maximum{const r=n.value;if(r.length<=t.maximum)return;const a=V(r);n.issues.push({origin:a,code:"too_big",maximum:t.maximum,inclusive:!0,input:r,inst:e,continue:!t.abort})}})),zt=t("$ZodCheckMinLength",((e,t)=>{var n;$t.init(e,t),(n=e._zod.def).when??(n.when=e=>{const t=e.value;return!d(t)&&void 0!==t.length}),e._zod.onattach.push((e=>{const n=e._zod.bag.minimum??Number.NEGATIVE_INFINITY;t.minimum>n&&(e._zod.bag.minimum=t.minimum)})),e._zod.check=n=>{const r=n.value;if(r.length>=t.minimum)return;const a=V(r);n.issues.push({origin:a,code:"too_small",minimum:t.minimum,inclusive:!0,input:r,inst:e,continue:!t.abort})}})),Ft=t("$ZodCheckLengthEquals",((e,t)=>{var n;$t.init(e,t),(n=e._zod.def).when??(n.when=e=>{const t=e.value;return!d(t)&&void 0!==t.length}),e._zod.onattach.push((e=>{const n=e._zod.bag;n.minimum=t.length,n.maximum=t.length,n.length=t.length})),e._zod.check=n=>{const r=n.value,a=r.length;if(a===t.length)return;const o=V(r),i=a>t.length;n.issues.push({origin:o,...i?{code:"too_big",maximum:t.length}:{code:"too_small",minimum:t.length},inclusive:!0,exact:!0,input:n.value,inst:e,continue:!t.abort})}})),Zt=t("$ZodCheckStringFormat",((e,t)=>{var n,r;$t.init(e,t),e._zod.onattach.push((e=>{const n=e._zod.bag;n.format=t.format,t.pattern&&(n.patterns??(n.patterns=new Set),n.patterns.add(t.pattern))})),t.pattern?(n=e._zod).check??(n.check=n=>{t.pattern.lastIndex=0,t.pattern.test(n.value)||n.issues.push({origin:"string",code:"invalid_format",format:t.format,input:n.value,...t.pattern?{pattern:t.pattern.toString()}:{},inst:e,continue:!t.abort})}):(r=e._zod).check??(r.check=()=>{})})),Qt=t("$ZodCheckRegex",((e,t)=>{Zt.init(e,t),e._zod.check=n=>{t.pattern.lastIndex=0,t.pattern.test(n.value)||n.issues.push({origin:"string",code:"invalid_format",format:"regex",input:n.value,pattern:t.pattern.toString(),inst:e,continue:!t.abort})}})),Ht=t("$ZodCheckLowerCase",((e,t)=>{t.pattern??(t.pattern=ft),Zt.init(e,t)})),Vt=t("$ZodCheckUpperCase",((e,t)=>{t.pattern??(t.pattern=mt),Zt.init(e,t)})),qt=t("$ZodCheckIncludes",((e,t)=>{$t.init(e,t);const n=E(t.includes),r=new RegExp("number"==typeof t.position?`^.{${t.position}}${n}`:n);t.pattern=r,e._zod.onattach.push((e=>{const t=e._zod.bag;t.patterns??(t.patterns=new Set),t.patterns.add(r)})),e._zod.check=n=>{n.value.includes(t.includes,t.position)||n.issues.push({origin:"string",code:"invalid_format",format:"includes",includes:t.includes,input:n.value,inst:e,continue:!t.abort})}})),Wt=t("$ZodCheckStartsWith",((e,t)=>{$t.init(e,t);const n=new RegExp(`^${E(t.prefix)}.*`);t.pattern??(t.pattern=n),e._zod.onattach.push((e=>{const t=e._zod.bag;t.patterns??(t.patterns=new Set),t.patterns.add(n)})),e._zod.check=n=>{n.value.startsWith(t.prefix)||n.issues.push({origin:"string",code:"invalid_format",format:"starts_with",prefix:t.prefix,input:n.value,inst:e,continue:!t.abort})}})),Xt=t("$ZodCheckEndsWith",((e,t)=>{$t.init(e,t);const n=new RegExp(`.*${E(t.suffix)}$`);t.pattern??(t.pattern=n),e._zod.onattach.push((e=>{const t=e._zod.bag;t.patterns??(t.patterns=new Set),t.patterns.add(n)})),e._zod.check=n=>{n.value.endsWith(t.suffix)||n.issues.push({origin:"string",code:"invalid_format",format:"ends_with",suffix:t.suffix,input:n.value,inst:e,continue:!t.abort})}}));function Gt(e,t,n){e.issues.length&&t.issues.push(...F(n,e.issues))}const Yt=t("$ZodCheckProperty",((e,t)=>{$t.init(e,t),e._zod.check=e=>{const n=t.schema._zod.run({value:e.value[t.property],issues:[]},{});if(n instanceof Promise)return n.then((n=>Gt(n,e,t.property)));Gt(n,e,t.property)}})),Kt=t("$ZodCheckMimeType",((e,t)=>{$t.init(e,t);const n=new Set(t.mime);e._zod.onattach.push((e=>{e._zod.bag.mime=t.mime})),e._zod.check=r=>{n.has(r.value.type)||r.issues.push({code:"invalid_value",values:t.mime,input:r.value.type,inst:e,continue:!t.abort})}})),Jt=t("$ZodCheckOverwrite",((e,t)=>{$t.init(e,t),e._zod.check=e=>{e.value=t.tx(e.value)}}));class en{constructor(e=[]){this.content=[],this.indent=0,this&&(this.args=e)}indented(e){this.indent+=1,e(this),this.indent-=1}write(e){if("function"==typeof e)return e(this,{execution:"sync"}),void e(this,{execution:"async"});const t=e.split("\n").filter((e=>e)),n=Math.min(...t.map((e=>e.length-e.trimStart().length))),r=t.map((e=>e.slice(n))).map((e=>" ".repeat(2*this.indent)+e));for(const e of r)this.content.push(e)}compile(){const e=Function,t=this?.args;return new e(...t,[...(this?.content??[""]).map((e=>` ${e}`))].join("\n"))}}const tn={major:4,minor:3,patch:5},nn=t("$ZodType",((e,t)=>{var n;e??(e={}),e._zod.def=t,e._zod.bag=e._zod.bag||{},e._zod.version=tn;const a=[...e._zod.def.checks??[]];e._zod.traits.has("$ZodCheck")&&a.unshift(e);for(const t of a)for(const n of t._zod.onattach)n(e);if(0===a.length)(n=e._zod).deferred??(n.deferred=[]),e._zod.deferred?.push((()=>{e._zod.run=e._zod.parse}));else{const t=(e,t,n)=>{let a,o=z(e);for(const i of t){if(i._zod.def.when){if(!i._zod.def.when(e))continue}else if(o)continue;const t=e.issues.length,s=i._zod.check(e);if(s instanceof Promise&&!1===n?.async)throw new r;if(a||s instanceof Promise)a=(a??Promise.resolve()).then((async()=>{await s,e.issues.length!==t&&(o||(o=z(e,t)))}));else{if(e.issues.length===t)continue;o||(o=z(e,t))}}return a?a.then((()=>e)):e},n=(n,o,i)=>{if(z(n))return n.aborted=!0,n;const s=t(o,a,i);if(s instanceof Promise){if(!1===i.async)throw new r;return s.then((t=>e._zod.parse(t,i)))}return e._zod.parse(s,i)};e._zod.run=(o,i)=>{if(i.skipChecks)return e._zod.parse(o,i);if("backward"===i.direction){const t=e._zod.parse({value:o.value,issues:[]},{...i,skipChecks:!0});return t instanceof Promise?t.then((e=>n(e,o,i))):n(t,o,i)}const s=e._zod.parse(o,i);if(s instanceof Promise){if(!1===i.async)throw new r;return s.then((e=>t(e,a,i)))}return t(s,a,i)}}m(e,"~standard",(()=>({validate:t=>{try{const n=de(e,t);return n.success?{value:n.data}:{issues:n.error?.issues}}catch(n){return he(e,t).then((e=>e.success?{value:e.data}:{issues:e.error?.issues}))}},vendor:"zod",version:1})))})),rn=t("$ZodString",((e,t)=>{nn.init(e,t),e._zod.pattern=[...e?._zod.bag?.patterns??[]].pop()??st(e._zod.bag),e._zod.parse=(n,r)=>{if(t.coerce)try{n.value=String(n.value)}catch(e){}return"string"==typeof n.value||n.issues.push({expected:"string",code:"invalid_type",input:n.value,inst:e}),n}})),an=t("$ZodStringFormat",((e,t)=>{Zt.init(e,t),rn.init(e,t)})),on=t("$ZodGUID",((e,t)=>{t.pattern??(t.pattern=Le),an.init(e,t)})),sn=t("$ZodUUID",((e,t)=>{if(t.version){const e={v1:1,v2:2,v3:3,v4:4,v5:5,v6:6,v7:7,v8:8}[t.version];if(void 0===e)throw new Error(`Invalid UUID version: "${t.version}"`);t.pattern??(t.pattern=Be(e))}else t.pattern??(t.pattern=Be());an.init(e,t)})),ln=t("$ZodEmail",((e,t)=>{t.pattern??(t.pattern=Fe),an.init(e,t)})),cn=t("$ZodURL",((e,t)=>{an.init(e,t),e._zod.check=n=>{try{const r=n.value.trim(),a=new URL(r);return t.hostname&&(t.hostname.lastIndex=0,t.hostname.test(a.hostname)||n.issues.push({code:"invalid_format",format:"url",note:"Invalid hostname",pattern:t.hostname.source,input:n.value,inst:e,continue:!t.abort})),t.protocol&&(t.protocol.lastIndex=0,t.protocol.test(a.protocol.endsWith(":")?a.protocol.slice(0,-1):a.protocol)||n.issues.push({code:"invalid_format",format:"url",note:"Invalid protocol",pattern:t.protocol.source,input:n.value,inst:e,continue:!t.abort})),void(t.normalize?n.value=a.href:n.value=r)}catch(r){n.issues.push({code:"invalid_format",format:"url",input:n.value,inst:e,continue:!t.abort})}}})),un=t("$ZodEmoji",((e,t)=>{t.pattern??(t.pattern=He()),an.init(e,t)})),dn=t("$ZodNanoID",((e,t)=>{t.pattern??(t.pattern=Ne),an.init(e,t)})),pn=t("$ZodCUID",((e,t)=>{t.pattern??(t.pattern=$e),an.init(e,t)})),hn=t("$ZodCUID2",((e,t)=>{t.pattern??(t.pattern=Pe),an.init(e,t)})),fn=t("$ZodULID",((e,t)=>{t.pattern??(t.pattern=De),an.init(e,t)})),mn=t("$ZodXID",((e,t)=>{t.pattern??(t.pattern=Ie),an.init(e,t)})),gn=t("$ZodKSUID",((e,t)=>{t.pattern??(t.pattern=Me),an.init(e,t)})),vn=t("$ZodISODateTime",((e,t)=>{t.pattern??(t.pattern=it(t)),an.init(e,t)})),bn=t("$ZodISODate",((e,t)=>{t.pattern??(t.pattern=rt),an.init(e,t)})),yn=t("$ZodISOTime",((e,t)=>{t.pattern??(t.pattern=ot(t)),an.init(e,t)})),On=t("$ZodISODuration",((e,t)=>{t.pattern??(t.pattern=Re),an.init(e,t)})),wn=t("$ZodIPv4",((e,t)=>{t.pattern??(t.pattern=Ve),an.init(e,t),e._zod.bag.format="ipv4"})),xn=t("$ZodIPv6",((e,t)=>{t.pattern??(t.pattern=qe),an.init(e,t),e._zod.bag.format="ipv6",e._zod.check=n=>{try{new URL(`http://[${n.value}]`)}catch{n.issues.push({code:"invalid_format",format:"ipv6",input:n.value,inst:e,continue:!t.abort})}}})),kn=t("$ZodMAC",((e,t)=>{t.pattern??(t.pattern=We(t.delimiter)),an.init(e,t),e._zod.bag.format="mac"})),Sn=t("$ZodCIDRv4",((e,t)=>{t.pattern??(t.pattern=Xe),an.init(e,t)})),_n=t("$ZodCIDRv6",((e,t)=>{t.pattern??(t.pattern=Ge),an.init(e,t),e._zod.check=n=>{const r=n.value.split("/");try{if(2!==r.length)throw new Error;const[e,t]=r;if(!t)throw new Error;const n=Number(t);if(`${n}`!==t)throw new Error;if(n<0||n>128)throw new Error;new URL(`http://[${e}]`)}catch{n.issues.push({code:"invalid_format",format:"cidrv6",input:n.value,inst:e,continue:!t.abort})}}}));function Tn(e){if(""===e)return!0;if(e.length%4!=0)return!1;try{return atob(e),!0}catch{return!1}}const En=t("$ZodBase64",((e,t)=>{t.pattern??(t.pattern=Ye),an.init(e,t),e._zod.bag.contentEncoding="base64",e._zod.check=n=>{Tn(n.value)||n.issues.push({code:"invalid_format",format:"base64",input:n.value,inst:e,continue:!t.abort})}}));function An(e){if(!Ke.test(e))return!1;const t=e.replace(/[-_]/g,(e=>"-"===e?"+":"/"));return Tn(t.padEnd(4*Math.ceil(t.length/4),"="))}const Cn=t("$ZodBase64URL",((e,t)=>{t.pattern??(t.pattern=Ke),an.init(e,t),e._zod.bag.contentEncoding="base64url",e._zod.check=n=>{An(n.value)||n.issues.push({code:"invalid_format",format:"base64url",input:n.value,inst:e,continue:!t.abort})}})),$n=t("$ZodE164",((e,t)=>{t.pattern??(t.pattern=tt),an.init(e,t)}));function Pn(e,t=null){try{const n=e.split(".");if(3!==n.length)return!1;const[r]=n;if(!r)return!1;const a=JSON.parse(atob(r));return!("typ"in a&&"JWT"!==a?.typ||!a.alg||t&&(!("alg"in a)||a.alg!==t))}catch{return!1}}const Dn=t("$ZodJWT",((e,t)=>{an.init(e,t),e._zod.check=n=>{Pn(n.value,t.alg)||n.issues.push({code:"invalid_format",format:"jwt",input:n.value,inst:e,continue:!t.abort})}})),In=t("$ZodCustomStringFormat",((e,t)=>{an.init(e,t),e._zod.check=n=>{t.fn(n.value)||n.issues.push({code:"invalid_format",format:t.format,input:n.value,inst:e,continue:!t.abort})}})),Mn=t("$ZodNumber",((e,t)=>{nn.init(e,t),e._zod.pattern=e._zod.bag.pattern??ut,e._zod.parse=(n,r)=>{if(t.coerce)try{n.value=Number(n.value)}catch(e){}const a=n.value;if("number"==typeof a&&!Number.isNaN(a)&&Number.isFinite(a))return n;const o="number"==typeof a?Number.isNaN(a)?"NaN":Number.isFinite(a)?void 0:"Infinity":void 0;return n.issues.push({expected:"number",code:"invalid_type",input:a,inst:e,...o?{received:o}:{}}),n}})),Nn=t("$ZodNumberFormat",((e,t)=>{Nt.init(e,t),Mn.init(e,t)})),Rn=t("$ZodBoolean",((e,t)=>{nn.init(e,t),e._zod.pattern=dt,e._zod.parse=(n,r)=>{if(t.coerce)try{n.value=Boolean(n.value)}catch(e){}const a=n.value;return"boolean"==typeof a||n.issues.push({expected:"boolean",code:"invalid_type",input:a,inst:e}),n}})),Ln=t("$ZodBigInt",((e,t)=>{nn.init(e,t),e._zod.pattern=lt,e._zod.parse=(n,r)=>{if(t.coerce)try{n.value=BigInt(n.value)}catch(e){}return"bigint"==typeof n.value||n.issues.push({expected:"bigint",code:"invalid_type",input:n.value,inst:e}),n}})),Bn=t("$ZodBigIntFormat",((e,t)=>{Rt.init(e,t),Ln.init(e,t)})),jn=t("$ZodSymbol",((e,t)=>{nn.init(e,t),e._zod.parse=(t,n)=>{const r=t.value;return"symbol"==typeof r||t.issues.push({expected:"symbol",code:"invalid_type",input:r,inst:e}),t}})),Un=t("$ZodUndefined",((e,t)=>{nn.init(e,t),e._zod.pattern=ht,e._zod.values=new Set([void 0]),e._zod.optin="optional",e._zod.optout="optional",e._zod.parse=(t,n)=>{const r=t.value;return void 0===r||t.issues.push({expected:"undefined",code:"invalid_type",input:r,inst:e}),t}})),zn=t("$ZodNull",((e,t)=>{nn.init(e,t),e._zod.pattern=pt,e._zod.values=new Set([null]),e._zod.parse=(t,n)=>{const r=t.value;return null===r||t.issues.push({expected:"null",code:"invalid_type",input:r,inst:e}),t}})),Fn=t("$ZodAny",((e,t)=>{nn.init(e,t),e._zod.parse=e=>e})),Zn=t("$ZodUnknown",((e,t)=>{nn.init(e,t),e._zod.parse=e=>e})),Qn=t("$ZodNever",((e,t)=>{nn.init(e,t),e._zod.parse=(t,n)=>(t.issues.push({expected:"never",code:"invalid_type",input:t.value,inst:e}),t)})),Hn=t("$ZodVoid",((e,t)=>{nn.init(e,t),e._zod.parse=(t,n)=>{const r=t.value;return void 0===r||t.issues.push({expected:"void",code:"invalid_type",input:r,inst:e}),t}})),Vn=t("$ZodDate",((e,t)=>{nn.init(e,t),e._zod.parse=(n,r)=>{if(t.coerce)try{n.value=new Date(n.value)}catch(e){}const a=n.value,o=a instanceof Date;return o&&!Number.isNaN(a.getTime())||n.issues.push({expected:"date",code:"invalid_type",input:a,...o?{received:"Invalid Date"}:{},inst:e}),n}}));function qn(e,t,n){e.issues.length&&t.issues.push(...F(n,e.issues)),t.value[n]=e.value}const Wn=t("$ZodArray",((e,t)=>{nn.init(e,t),e._zod.parse=(n,r)=>{const a=n.value;if(!Array.isArray(a))return n.issues.push({expected:"array",code:"invalid_type",input:a,inst:e}),n;n.value=Array(a.length);const o=[];for(let e=0;eqn(t,n,e)))):qn(s,n,e)}return o.length?Promise.all(o).then((()=>n)):n}}));function Xn(e,t,n,r,a){if(e.issues.length){if(a&&!(n in r))return;t.issues.push(...F(n,e.issues))}void 0===e.value?n in r&&(t.value[n]=void 0):t.value[n]=e.value}function Gn(e){const t=Object.keys(e.shape);for(const n of t)if(!e.shape?.[n]?._zod?.traits?.has("$ZodType"))throw new Error(`Invalid element at key "${n}": expected a Zod schema`);const n=P(e.shape);return{...e,keys:t,keySet:new Set(t),numKeys:t.length,optionalKeys:new Set(n)}}function Yn(e,t,n,r,a,o){const i=[],s=a.keySet,l=a.catchall._zod,c=l.def.type,u="optional"===l.optout;for(const a in t){if(s.has(a))continue;if("never"===c){i.push(a);continue}const o=l.run({value:t[a],issues:[]},r);o instanceof Promise?e.push(o.then((e=>Xn(e,n,a,t,u)))):Xn(o,n,a,t,u)}return i.length&&n.issues.push({code:"unrecognized_keys",keys:i,input:t,inst:o}),e.length?Promise.all(e).then((()=>n)):n}const Kn=t("$ZodObject",((e,t)=>{nn.init(e,t);const n=Object.getOwnPropertyDescriptor(t,"shape");if(!n?.get){const e=t.shape;Object.defineProperty(t,"shape",{get:()=>{const n={...e};return Object.defineProperty(t,"shape",{value:n}),n}})}const r=u((()=>Gn(t)));m(e._zod,"propValues",(()=>{const e=t.shape,n={};for(const t in e){const r=e[t]._zod;if(r.values){n[t]??(n[t]=new Set);for(const e of r.values)n[t].add(e)}}return n}));const a=w,o=t.catchall;let i;e._zod.parse=(t,n)=>{i??(i=r.value);const s=t.value;if(!a(s))return t.issues.push({expected:"object",code:"invalid_type",input:s,inst:e}),t;t.value={};const l=[],c=i.shape;for(const e of i.keys){const r=c[e],a="optional"===r._zod.optout,o=r._zod.run({value:s[e],issues:[]},n);o instanceof Promise?l.push(o.then((n=>Xn(n,t,e,s,a)))):Xn(o,t,e,s,a)}return o?Yn(l,s,t,n,r.value,e):l.length?Promise.all(l).then((()=>t)):t}})),Jn=t("$ZodObjectJIT",((e,t)=>{Kn.init(e,t);const n=e._zod.parse,r=u((()=>Gn(t)));let a;const i=w,s=!o.jitless,l=s&&x.value,c=t.catchall;let d;e._zod.parse=(o,u)=>{d??(d=r.value);const p=o.value;return i(p)?s&&l&&!1===u?.async&&!0!==u.jitless?(a||(a=(e=>{const t=new en(["shape","payload","ctx"]),n=r.value,a=e=>{const t=b(e);return`shape[${t}]._zod.run({ value: input[${t}], issues: [] }, ctx)`};t.write("const input = payload.value;");const o=Object.create(null);let i=0;for(const e of n.keys)o[e]="key_"+i++;t.write("const newResult = {};");for(const r of n.keys){const n=o[r],i=b(r),s=e[r],l="optional"===s?._zod?.optout;t.write(`const ${n} = ${a(r)};`),l?t.write(`\n if (${n}.issues.length) {\n if (${i} in input) {\n payload.issues = payload.issues.concat(${n}.issues.map(iss => ({\n ...iss,\n path: iss.path ? [${i}, ...iss.path] : [${i}]\n })));\n }\n }\n \n if (${n}.value === undefined) {\n if (${i} in input) {\n newResult[${i}] = undefined;\n }\n } else {\n newResult[${i}] = ${n}.value;\n }\n \n `):t.write(`\n if (${n}.issues.length) {\n payload.issues = payload.issues.concat(${n}.issues.map(iss => ({\n ...iss,\n path: iss.path ? [${i}, ...iss.path] : [${i}]\n })));\n }\n \n if (${n}.value === undefined) {\n if (${i} in input) {\n newResult[${i}] = undefined;\n }\n } else {\n newResult[${i}] = ${n}.value;\n }\n \n `)}t.write("payload.value = newResult;"),t.write("return payload;");const s=t.compile();return(t,n)=>s(e,t,n)})(t.shape)),o=a(o,u),c?Yn([],p,o,u,d,e):o):n(o,u):(o.issues.push({expected:"object",code:"invalid_type",input:p,inst:e}),o)}}));function er(e,t,n,r){for(const n of e)if(0===n.issues.length)return t.value=n.value,t;const a=e.filter((e=>!z(e)));return 1===a.length?(t.value=a[0].value,a[0]):(t.issues.push({code:"invalid_union",input:t.value,inst:n,errors:e.map((e=>e.issues.map((e=>Q(e,r,i())))))}),t)}const tr=t("$ZodUnion",((e,t)=>{nn.init(e,t),m(e._zod,"optin",(()=>t.options.some((e=>"optional"===e._zod.optin))?"optional":void 0)),m(e._zod,"optout",(()=>t.options.some((e=>"optional"===e._zod.optout))?"optional":void 0)),m(e._zod,"values",(()=>{if(t.options.every((e=>e._zod.values)))return new Set(t.options.flatMap((e=>Array.from(e._zod.values))))})),m(e._zod,"pattern",(()=>{if(t.options.every((e=>e._zod.pattern))){const e=t.options.map((e=>e._zod.pattern));return new RegExp(`^(${e.map((e=>p(e.source))).join("|")})$`)}}));const n=1===t.options.length,r=t.options[0]._zod.run;e._zod.parse=(a,o)=>{if(n)return r(a,o);let i=!1;const s=[];for(const e of t.options){const t=e._zod.run({value:a.value,issues:[]},o);if(t instanceof Promise)s.push(t),i=!0;else{if(0===t.issues.length)return t;s.push(t)}}return i?Promise.all(s).then((t=>er(t,a,e,o))):er(s,a,e,o)}}));function nr(e,t,n,r){const a=e.filter((e=>0===e.issues.length));return 1===a.length?(t.value=a[0].value,t):(0===a.length?t.issues.push({code:"invalid_union",input:t.value,inst:n,errors:e.map((e=>e.issues.map((e=>Q(e,r,i())))))}):t.issues.push({code:"invalid_union",input:t.value,inst:n,errors:[],inclusive:!1}),t)}const rr=t("$ZodXor",((e,t)=>{tr.init(e,t),t.inclusive=!1;const n=1===t.options.length,r=t.options[0]._zod.run;e._zod.parse=(a,o)=>{if(n)return r(a,o);let i=!1;const s=[];for(const e of t.options){const t=e._zod.run({value:a.value,issues:[]},o);t instanceof Promise?(s.push(t),i=!0):s.push(t)}return i?Promise.all(s).then((t=>nr(t,a,e,o))):nr(s,a,e,o)}})),ar=t("$ZodDiscriminatedUnion",((e,t)=>{t.inclusive=!1,tr.init(e,t);const n=e._zod.parse;m(e._zod,"propValues",(()=>{const e={};for(const n of t.options){const r=n._zod.propValues;if(!r||0===Object.keys(r).length)throw new Error(`Invalid discriminated union option at index "${t.options.indexOf(n)}"`);for(const[t,n]of Object.entries(r)){e[t]||(e[t]=new Set);for(const r of n)e[t].add(r)}}return e}));const r=u((()=>{const e=t.options,n=new Map;for(const r of e){const e=r._zod.propValues?.[t.discriminator];if(!e||0===e.size)throw new Error(`Invalid discriminated union option at index "${t.options.indexOf(r)}"`);for(const t of e){if(n.has(t))throw new Error(`Duplicate discriminator value "${String(t)}"`);n.set(t,r)}}return n}));e._zod.parse=(a,o)=>{const i=a.value;if(!w(i))return a.issues.push({code:"invalid_type",expected:"object",input:i,inst:e}),a;const s=r.value.get(i?.[t.discriminator]);return s?s._zod.run(a,o):t.unionFallback?n(a,o):(a.issues.push({code:"invalid_union",errors:[],note:"No matching discriminator",discriminator:t.discriminator,input:i,path:[t.discriminator],inst:e}),a)}})),or=t("$ZodIntersection",((e,t)=>{nn.init(e,t),e._zod.parse=(e,n)=>{const r=e.value,a=t.left._zod.run({value:r,issues:[]},n),o=t.right._zod.run({value:r,issues:[]},n);return a instanceof Promise||o instanceof Promise?Promise.all([a,o]).then((([t,n])=>sr(e,t,n))):sr(e,a,o)}}));function ir(e,t){if(e===t)return{valid:!0,data:e};if(e instanceof Date&&t instanceof Date&&+e==+t)return{valid:!0,data:e};if(k(e)&&k(t)){const n=Object.keys(t),r=Object.keys(e).filter((e=>-1!==n.indexOf(e))),a={...e,...t};for(const n of r){const r=ir(e[n],t[n]);if(!r.valid)return{valid:!1,mergeErrorPath:[n,...r.mergeErrorPath]};a[n]=r.data}return{valid:!0,data:a}}if(Array.isArray(e)&&Array.isArray(t)){if(e.length!==t.length)return{valid:!1,mergeErrorPath:[]};const n=[];for(let r=0;re.l&&e.r)).map((([e])=>e));if(o.length&&a&&e.issues.push({...a,keys:o}),z(e))return e;const i=ir(t.value,n.value);if(!i.valid)throw new Error(`Unmergable intersection. Error path: ${JSON.stringify(i.mergeErrorPath)}`);return e.value=i.data,e}const lr=t("$ZodTuple",((e,t)=>{nn.init(e,t);const n=t.items;e._zod.parse=(r,a)=>{const o=r.value;if(!Array.isArray(o))return r.issues.push({input:o,inst:e,expected:"tuple",code:"invalid_type"}),r;r.value=[];const i=[],s=[...n].reverse().findIndex((e=>"optional"!==e._zod.optin)),l=-1===s?0:n.length-s;if(!t.rest){const t=o.length>n.length,a=o.length=o.length&&c>=l)continue;const t=e._zod.run({value:o[c],issues:[]},a);t instanceof Promise?i.push(t.then((e=>cr(e,r,c)))):cr(t,r,c)}if(t.rest){const e=o.slice(n.length);for(const n of e){c++;const e=t.rest._zod.run({value:n,issues:[]},a);e instanceof Promise?i.push(e.then((e=>cr(e,r,c)))):cr(e,r,c)}}return i.length?Promise.all(i).then((()=>r)):r}}));function cr(e,t,n){e.issues.length&&t.issues.push(...F(n,e.issues)),t.value[n]=e.value}const ur=t("$ZodRecord",((e,t)=>{nn.init(e,t),e._zod.parse=(n,r)=>{const a=n.value;if(!k(a))return n.issues.push({expected:"record",code:"invalid_type",input:a,inst:e}),n;const o=[],s=t.keyType._zod.values;if(s){n.value={};const i=new Set;for(const e of s)if("string"==typeof e||"number"==typeof e||"symbol"==typeof e){i.add("number"==typeof e?e.toString():e);const s=t.valueType._zod.run({value:a[e],issues:[]},r);s instanceof Promise?o.push(s.then((t=>{t.issues.length&&n.issues.push(...F(e,t.issues)),n.value[e]=t.value}))):(s.issues.length&&n.issues.push(...F(e,s.issues)),n.value[e]=s.value)}let l;for(const e in a)i.has(e)||(l=l??[],l.push(e));l&&l.length>0&&n.issues.push({code:"unrecognized_keys",input:a,inst:e,keys:l})}else{n.value={};for(const s of Reflect.ownKeys(a)){if("__proto__"===s)continue;let l=t.keyType._zod.run({value:s,issues:[]},r);if(l instanceof Promise)throw new Error("Async schemas not supported in object keys currently");if("string"==typeof s&&ut.test(s)&&l.issues.length&&l.issues.some((e=>"invalid_type"===e.code&&"number"===e.expected))){const e=t.keyType._zod.run({value:Number(s),issues:[]},r);if(e instanceof Promise)throw new Error("Async schemas not supported in object keys currently");0===e.issues.length&&(l=e)}if(l.issues.length){"loose"===t.mode?n.value[s]=a[s]:n.issues.push({code:"invalid_key",origin:"record",issues:l.issues.map((e=>Q(e,r,i()))),input:s,path:[s],inst:e});continue}const c=t.valueType._zod.run({value:a[s],issues:[]},r);c instanceof Promise?o.push(c.then((e=>{e.issues.length&&n.issues.push(...F(s,e.issues)),n.value[l.value]=e.value}))):(c.issues.length&&n.issues.push(...F(s,c.issues)),n.value[l.value]=c.value)}}return o.length?Promise.all(o).then((()=>n)):n}})),dr=t("$ZodMap",((e,t)=>{nn.init(e,t),e._zod.parse=(n,r)=>{const a=n.value;if(!(a instanceof Map))return n.issues.push({expected:"map",code:"invalid_type",input:a,inst:e}),n;const o=[];n.value=new Map;for(const[i,s]of a){const l=t.keyType._zod.run({value:i,issues:[]},r),c=t.valueType._zod.run({value:s,issues:[]},r);l instanceof Promise||c instanceof Promise?o.push(Promise.all([l,c]).then((([t,o])=>{pr(t,o,n,i,a,e,r)}))):pr(l,c,n,i,a,e,r)}return o.length?Promise.all(o).then((()=>n)):n}}));function pr(e,t,n,r,a,o,s){e.issues.length&&(_.has(typeof r)?n.issues.push(...F(r,e.issues)):n.issues.push({code:"invalid_key",origin:"map",input:a,inst:o,issues:e.issues.map((e=>Q(e,s,i())))})),t.issues.length&&(_.has(typeof r)?n.issues.push(...F(r,t.issues)):n.issues.push({origin:"map",code:"invalid_element",input:a,inst:o,key:r,issues:t.issues.map((e=>Q(e,s,i())))})),n.value.set(e.value,t.value)}const hr=t("$ZodSet",((e,t)=>{nn.init(e,t),e._zod.parse=(n,r)=>{const a=n.value;if(!(a instanceof Set))return n.issues.push({input:a,inst:e,expected:"set",code:"invalid_type"}),n;const o=[];n.value=new Set;for(const e of a){const a=t.valueType._zod.run({value:e,issues:[]},r);a instanceof Promise?o.push(a.then((e=>fr(e,n)))):fr(a,n)}return o.length?Promise.all(o).then((()=>n)):n}}));function fr(e,t){e.issues.length&&t.issues.push(...e.issues),t.value.add(e.value)}const mr=t("$ZodEnum",((e,t)=>{nn.init(e,t);const n=s(t.entries),r=new Set(n);e._zod.values=r,e._zod.pattern=new RegExp(`^(${n.filter((e=>_.has(typeof e))).map((e=>"string"==typeof e?E(e):e.toString())).join("|")})$`),e._zod.parse=(t,a)=>{const o=t.value;return r.has(o)||t.issues.push({code:"invalid_value",values:n,input:o,inst:e}),t}})),gr=t("$ZodLiteral",((e,t)=>{if(nn.init(e,t),0===t.values.length)throw new Error("Cannot create literal schema with no valid values");const n=new Set(t.values);e._zod.values=n,e._zod.pattern=new RegExp(`^(${t.values.map((e=>"string"==typeof e?E(e):e?E(e.toString()):String(e))).join("|")})$`),e._zod.parse=(r,a)=>{const o=r.value;return n.has(o)||r.issues.push({code:"invalid_value",values:t.values,input:o,inst:e}),r}})),vr=t("$ZodFile",((e,t)=>{nn.init(e,t),e._zod.parse=(t,n)=>{const r=t.value;return r instanceof File||t.issues.push({expected:"file",code:"invalid_type",input:r,inst:e}),t}})),br=t("$ZodTransform",((e,t)=>{nn.init(e,t),e._zod.parse=(n,o)=>{if("backward"===o.direction)throw new a(e.constructor.name);const i=t.transform(n.value,n);if(o.async)return(i instanceof Promise?i:Promise.resolve(i)).then((e=>(n.value=e,n)));if(i instanceof Promise)throw new r;return n.value=i,n}}));function yr(e,t){return e.issues.length&&void 0===t?{issues:[],value:void 0}:e}const Or=t("$ZodOptional",((e,t)=>{nn.init(e,t),e._zod.optin="optional",e._zod.optout="optional",m(e._zod,"values",(()=>t.innerType._zod.values?new Set([...t.innerType._zod.values,void 0]):void 0)),m(e._zod,"pattern",(()=>{const e=t.innerType._zod.pattern;return e?new RegExp(`^(${p(e.source)})?$`):void 0})),e._zod.parse=(e,n)=>{if("optional"===t.innerType._zod.optin){const r=t.innerType._zod.run(e,n);return r instanceof Promise?r.then((t=>yr(t,e.value))):yr(r,e.value)}return void 0===e.value?e:t.innerType._zod.run(e,n)}})),wr=t("$ZodExactOptional",((e,t)=>{Or.init(e,t),m(e._zod,"values",(()=>t.innerType._zod.values)),m(e._zod,"pattern",(()=>t.innerType._zod.pattern)),e._zod.parse=(e,n)=>t.innerType._zod.run(e,n)})),xr=t("$ZodNullable",((e,t)=>{nn.init(e,t),m(e._zod,"optin",(()=>t.innerType._zod.optin)),m(e._zod,"optout",(()=>t.innerType._zod.optout)),m(e._zod,"pattern",(()=>{const e=t.innerType._zod.pattern;return e?new RegExp(`^(${p(e.source)}|null)$`):void 0})),m(e._zod,"values",(()=>t.innerType._zod.values?new Set([...t.innerType._zod.values,null]):void 0)),e._zod.parse=(e,n)=>null===e.value?e:t.innerType._zod.run(e,n)})),kr=t("$ZodDefault",((e,t)=>{nn.init(e,t),e._zod.optin="optional",m(e._zod,"values",(()=>t.innerType._zod.values)),e._zod.parse=(e,n)=>{if("backward"===n.direction)return t.innerType._zod.run(e,n);if(void 0===e.value)return e.value=t.defaultValue,e;const r=t.innerType._zod.run(e,n);return r instanceof Promise?r.then((e=>Sr(e,t))):Sr(r,t)}}));function Sr(e,t){return void 0===e.value&&(e.value=t.defaultValue),e}const _r=t("$ZodPrefault",((e,t)=>{nn.init(e,t),e._zod.optin="optional",m(e._zod,"values",(()=>t.innerType._zod.values)),e._zod.parse=(e,n)=>("backward"===n.direction||void 0===e.value&&(e.value=t.defaultValue),t.innerType._zod.run(e,n))})),Tr=t("$ZodNonOptional",((e,t)=>{nn.init(e,t),m(e._zod,"values",(()=>{const e=t.innerType._zod.values;return e?new Set([...e].filter((e=>void 0!==e))):void 0})),e._zod.parse=(n,r)=>{const a=t.innerType._zod.run(n,r);return a instanceof Promise?a.then((t=>Er(t,e))):Er(a,e)}}));function Er(e,t){return e.issues.length||void 0!==e.value||e.issues.push({code:"invalid_type",expected:"nonoptional",input:e.value,inst:t}),e}const Ar=t("$ZodSuccess",((e,t)=>{nn.init(e,t),e._zod.parse=(e,n)=>{if("backward"===n.direction)throw new a("ZodSuccess");const r=t.innerType._zod.run(e,n);return r instanceof Promise?r.then((t=>(e.value=0===t.issues.length,e))):(e.value=0===r.issues.length,e)}})),Cr=t("$ZodCatch",((e,t)=>{nn.init(e,t),m(e._zod,"optin",(()=>t.innerType._zod.optin)),m(e._zod,"optout",(()=>t.innerType._zod.optout)),m(e._zod,"values",(()=>t.innerType._zod.values)),e._zod.parse=(e,n)=>{if("backward"===n.direction)return t.innerType._zod.run(e,n);const r=t.innerType._zod.run(e,n);return r instanceof Promise?r.then((r=>(e.value=r.value,r.issues.length&&(e.value=t.catchValue({...e,error:{issues:r.issues.map((e=>Q(e,n,i())))},input:e.value}),e.issues=[]),e))):(e.value=r.value,r.issues.length&&(e.value=t.catchValue({...e,error:{issues:r.issues.map((e=>Q(e,n,i())))},input:e.value}),e.issues=[]),e)}})),$r=t("$ZodNaN",((e,t)=>{nn.init(e,t),e._zod.parse=(t,n)=>("number"==typeof t.value&&Number.isNaN(t.value)||t.issues.push({input:t.value,inst:e,expected:"nan",code:"invalid_type"}),t)})),Pr=t("$ZodPipe",((e,t)=>{nn.init(e,t),m(e._zod,"values",(()=>t.in._zod.values)),m(e._zod,"optin",(()=>t.in._zod.optin)),m(e._zod,"optout",(()=>t.out._zod.optout)),m(e._zod,"propValues",(()=>t.in._zod.propValues)),e._zod.parse=(e,n)=>{if("backward"===n.direction){const r=t.out._zod.run(e,n);return r instanceof Promise?r.then((e=>Dr(e,t.in,n))):Dr(r,t.in,n)}const r=t.in._zod.run(e,n);return r instanceof Promise?r.then((e=>Dr(e,t.out,n))):Dr(r,t.out,n)}}));function Dr(e,t,n){return e.issues.length?(e.aborted=!0,e):t._zod.run({value:e.value,issues:e.issues},n)}const Ir=t("$ZodCodec",((e,t)=>{nn.init(e,t),m(e._zod,"values",(()=>t.in._zod.values)),m(e._zod,"optin",(()=>t.in._zod.optin)),m(e._zod,"optout",(()=>t.out._zod.optout)),m(e._zod,"propValues",(()=>t.in._zod.propValues)),e._zod.parse=(e,n)=>{if("forward"===(n.direction||"forward")){const r=t.in._zod.run(e,n);return r instanceof Promise?r.then((e=>Mr(e,t,n))):Mr(r,t,n)}{const r=t.out._zod.run(e,n);return r instanceof Promise?r.then((e=>Mr(e,t,n))):Mr(r,t,n)}}}));function Mr(e,t,n){if(e.issues.length)return e.aborted=!0,e;if("forward"===(n.direction||"forward")){const r=t.transform(e.value,e);return r instanceof Promise?r.then((r=>Nr(e,r,t.out,n))):Nr(e,r,t.out,n)}{const r=t.reverseTransform(e.value,e);return r instanceof Promise?r.then((r=>Nr(e,r,t.in,n))):Nr(e,r,t.in,n)}}function Nr(e,t,n,r){return e.issues.length?(e.aborted=!0,e):n._zod.run({value:t,issues:e.issues},r)}const Rr=t("$ZodReadonly",((e,t)=>{nn.init(e,t),m(e._zod,"propValues",(()=>t.innerType._zod.propValues)),m(e._zod,"values",(()=>t.innerType._zod.values)),m(e._zod,"optin",(()=>t.innerType?._zod?.optin)),m(e._zod,"optout",(()=>t.innerType?._zod?.optout)),e._zod.parse=(e,n)=>{if("backward"===n.direction)return t.innerType._zod.run(e,n);const r=t.innerType._zod.run(e,n);return r instanceof Promise?r.then(Lr):Lr(r)}}));function Lr(e){return e.value=Object.freeze(e.value),e}const Br=t("$ZodTemplateLiteral",((e,t)=>{nn.init(e,t);const n=[];for(const e of t.parts)if("object"==typeof e&&null!==e){if(!e._zod.pattern)throw new Error(`Invalid template literal part, no pattern found: ${[...e._zod.traits].shift()}`);const t=e._zod.pattern instanceof RegExp?e._zod.pattern.source:e._zod.pattern;if(!t)throw new Error(`Invalid template literal part: ${e._zod.traits}`);const r=t.startsWith("^")?1:0,a=t.endsWith("$")?t.length-1:t.length;n.push(t.slice(r,a))}else{if(null!==e&&!T.has(typeof e))throw new Error(`Invalid template literal part: ${e}`);n.push(E(`${e}`))}e._zod.pattern=new RegExp(`^${n.join("")}$`),e._zod.parse=(n,r)=>"string"!=typeof n.value?(n.issues.push({input:n.value,inst:e,expected:"string",code:"invalid_type"}),n):(e._zod.pattern.lastIndex=0,e._zod.pattern.test(n.value)||n.issues.push({input:n.value,inst:e,code:"invalid_format",format:t.format??"template_literal",pattern:e._zod.pattern.source}),n)})),jr=t("$ZodFunction",((e,t)=>(nn.init(e,t),e._def=t,e._zod.def=t,e.implement=t=>{if("function"!=typeof t)throw new Error("implement() must be called with a function");return function(...n){const r=e._def.input?se(e._def.input,n):n,a=Reflect.apply(t,this,r);return e._def.output?se(e._def.output,a):a}},e.implementAsync=t=>{if("function"!=typeof t)throw new Error("implementAsync() must be called with a function");return async function(...n){const r=e._def.input?await ce(e._def.input,n):n,a=await Reflect.apply(t,this,r);return e._def.output?await ce(e._def.output,a):a}},e._zod.parse=(t,n)=>{if("function"!=typeof t.value)return t.issues.push({code:"invalid_type",expected:"function",input:t.value,inst:e}),t;const r=e._def.output&&"promise"===e._def.output._zod.def.type;return t.value=r?e.implementAsync(t.value):e.implement(t.value),t},e.input=(...t)=>{const n=e.constructor;return Array.isArray(t[0])?new n({type:"function",input:new lr({type:"tuple",items:t[0],rest:t[1]}),output:e._def.output}):new n({type:"function",input:t[0],output:e._def.output})},e.output=t=>new(0,e.constructor)({type:"function",input:e._def.input,output:t}),e))),Ur=t("$ZodPromise",((e,t)=>{nn.init(e,t),e._zod.parse=(e,n)=>Promise.resolve(e.value).then((e=>t.innerType._zod.run({value:e,issues:[]},n)))})),zr=t("$ZodLazy",((e,t)=>{nn.init(e,t),m(e._zod,"innerType",(()=>t.getter())),m(e._zod,"pattern",(()=>e._zod.innerType?._zod?.pattern)),m(e._zod,"propValues",(()=>e._zod.innerType?._zod?.propValues)),m(e._zod,"optin",(()=>e._zod.innerType?._zod?.optin??void 0)),m(e._zod,"optout",(()=>e._zod.innerType?._zod?.optout??void 0)),e._zod.parse=(t,n)=>e._zod.innerType._zod.run(t,n)})),Fr=t("$ZodCustom",((e,t)=>{$t.init(e,t),nn.init(e,t),e._zod.parse=(e,t)=>e,e._zod.check=n=>{const r=n.value,a=t.fn(r);if(a instanceof Promise)return a.then((t=>Zr(t,n,r,e)));Zr(a,n,r,e)}}));function Zr(e,t,n,r){if(!e){const e={code:"custom",input:n,inst:r,path:[...r._zod.def.path??[]],continue:!r._zod.def.abort};r._zod.def.params&&(e.params=r._zod.def.params),t.issues.push(W(e))}}const Qr=()=>{const e={string:{unit:"حرف",verb:"أن يحوي"},file:{unit:"بايت",verb:"أن يحوي"},array:{unit:"عنصر",verb:"أن يحوي"},set:{unit:"عنصر",verb:"أن يحوي"}};function t(t){return e[t]??null}const n={regex:"مدخل",email:"بريد إلكتروني",url:"رابط",emoji:"إيموجي",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"تاريخ ووقت بمعيار ISO",date:"تاريخ بمعيار ISO",time:"وقت بمعيار ISO",duration:"مدة بمعيار ISO",ipv4:"عنوان IPv4",ipv6:"عنوان IPv6",cidrv4:"مدى عناوين بصيغة IPv4",cidrv6:"مدى عناوين بصيغة IPv6",base64:"نَص بترميز base64-encoded",base64url:"نَص بترميز base64url-encoded",json_string:"نَص على هيئة JSON",e164:"رقم هاتف بمعيار E.164",jwt:"JWT",template_literal:"مدخل"},r={nan:"NaN"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`مدخلات غير مقبولة: يفترض إدخال instanceof ${e.expected}، ولكن تم إدخال ${a}`:`مدخلات غير مقبولة: يفترض إدخال ${t}، ولكن تم إدخال ${a}`}case"invalid_value":return 1===e.values.length?`مدخلات غير مقبولة: يفترض إدخال ${$(e.values[0])}`:`اختيار غير مقبول: يتوقع انتقاء أحد هذه الخيارات: ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?` أكبر من اللازم: يفترض أن تكون ${e.origin??"القيمة"} ${n} ${e.maximum.toString()} ${r.unit??"عنصر"}`:`أكبر من اللازم: يفترض أن تكون ${e.origin??"القيمة"} ${n} ${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`أصغر من اللازم: يفترض لـ ${e.origin} أن يكون ${n} ${e.minimum.toString()} ${r.unit}`:`أصغر من اللازم: يفترض لـ ${e.origin} أن يكون ${n} ${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`نَص غير مقبول: يجب أن يبدأ بـ "${e.prefix}"`:"ends_with"===t.format?`نَص غير مقبول: يجب أن ينتهي بـ "${t.suffix}"`:"includes"===t.format?`نَص غير مقبول: يجب أن يتضمَّن "${t.includes}"`:"regex"===t.format?`نَص غير مقبول: يجب أن يطابق النمط ${t.pattern}`:`${n[t.format]??e.format} غير مقبول`}case"not_multiple_of":return`رقم غير مقبول: يجب أن يكون من مضاعفات ${e.divisor}`;case"unrecognized_keys":return`معرف${e.keys.length>1?"ات":""} غريب${e.keys.length>1?"ة":""}: ${l(e.keys,"، ")}`;case"invalid_key":return`معرف غير مقبول في ${e.origin}`;case"invalid_union":default:return"مدخل غير مقبول";case"invalid_element":return`مدخل غير مقبول في ${e.origin}`}}},Hr=()=>{const e={string:{unit:"simvol",verb:"olmalıdır"},file:{unit:"bayt",verb:"olmalıdır"},array:{unit:"element",verb:"olmalıdır"},set:{unit:"element",verb:"olmalıdır"}};function t(t){return e[t]??null}const n={regex:"input",email:"email address",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO datetime",date:"ISO date",time:"ISO time",duration:"ISO duration",ipv4:"IPv4 address",ipv6:"IPv6 address",cidrv4:"IPv4 range",cidrv6:"IPv6 range",base64:"base64-encoded string",base64url:"base64url-encoded string",json_string:"JSON string",e164:"E.164 number",jwt:"JWT",template_literal:"input"},r={nan:"NaN"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Yanlış dəyər: gözlənilən instanceof ${e.expected}, daxil olan ${a}`:`Yanlış dəyər: gözlənilən ${t}, daxil olan ${a}`}case"invalid_value":return 1===e.values.length?`Yanlış dəyər: gözlənilən ${$(e.values[0])}`:`Yanlış seçim: aşağıdakılardan biri olmalıdır: ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Çox böyük: gözlənilən ${e.origin??"dəyər"} ${n}${e.maximum.toString()} ${r.unit??"element"}`:`Çox böyük: gözlənilən ${e.origin??"dəyər"} ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Çox kiçik: gözlənilən ${e.origin} ${n}${e.minimum.toString()} ${r.unit}`:`Çox kiçik: gözlənilən ${e.origin} ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Yanlış mətn: "${t.prefix}" ilə başlamalıdır`:"ends_with"===t.format?`Yanlış mətn: "${t.suffix}" ilə bitməlidir`:"includes"===t.format?`Yanlış mətn: "${t.includes}" daxil olmalıdır`:"regex"===t.format?`Yanlış mətn: ${t.pattern} şablonuna uyğun olmalıdır`:`Yanlış ${n[t.format]??e.format}`}case"not_multiple_of":return`Yanlış ədəd: ${e.divisor} ilə bölünə bilən olmalıdır`;case"unrecognized_keys":return`Tanınmayan açar${e.keys.length>1?"lar":""}: ${l(e.keys,", ")}`;case"invalid_key":return`${e.origin} daxilində yanlış açar`;case"invalid_union":default:return"Yanlış dəyər";case"invalid_element":return`${e.origin} daxilində yanlış dəyər`}}};function Vr(e,t,n,r){const a=Math.abs(e),o=a%10,i=a%100;return i>=11&&i<=19?r:1===o?t:o>=2&&o<=4?n:r}const qr=()=>{const e={string:{unit:{one:"сімвал",few:"сімвалы",many:"сімвалаў"},verb:"мець"},array:{unit:{one:"элемент",few:"элементы",many:"элементаў"},verb:"мець"},set:{unit:{one:"элемент",few:"элементы",many:"элементаў"},verb:"мець"},file:{unit:{one:"байт",few:"байты",many:"байтаў"},verb:"мець"}};function t(t){return e[t]??null}const n={regex:"увод",email:"email адрас",url:"URL",emoji:"эмодзі",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO дата і час",date:"ISO дата",time:"ISO час",duration:"ISO працягласць",ipv4:"IPv4 адрас",ipv6:"IPv6 адрас",cidrv4:"IPv4 дыяпазон",cidrv6:"IPv6 дыяпазон",base64:"радок у фармаце base64",base64url:"радок у фармаце base64url",json_string:"JSON радок",e164:"нумар E.164",jwt:"JWT",template_literal:"увод"},r={nan:"NaN",number:"лік",array:"масіў"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Няправільны ўвод: чакаўся instanceof ${e.expected}, атрымана ${a}`:`Няправільны ўвод: чакаўся ${t}, атрымана ${a}`}case"invalid_value":return 1===e.values.length?`Няправільны ўвод: чакалася ${$(e.values[0])}`:`Няправільны варыянт: чакаўся адзін з ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);if(r){const t=Vr(Number(e.maximum),r.unit.one,r.unit.few,r.unit.many);return`Занадта вялікі: чакалася, што ${e.origin??"значэнне"} павінна ${r.verb} ${n}${e.maximum.toString()} ${t}`}return`Занадта вялікі: чакалася, што ${e.origin??"значэнне"} павінна быць ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);if(r){const t=Vr(Number(e.minimum),r.unit.one,r.unit.few,r.unit.many);return`Занадта малы: чакалася, што ${e.origin} павінна ${r.verb} ${n}${e.minimum.toString()} ${t}`}return`Занадта малы: чакалася, што ${e.origin} павінна быць ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Няправільны радок: павінен пачынацца з "${t.prefix}"`:"ends_with"===t.format?`Няправільны радок: павінен заканчвацца на "${t.suffix}"`:"includes"===t.format?`Няправільны радок: павінен змяшчаць "${t.includes}"`:"regex"===t.format?`Няправільны радок: павінен адпавядаць шаблону ${t.pattern}`:`Няправільны ${n[t.format]??e.format}`}case"not_multiple_of":return`Няправільны лік: павінен быць кратным ${e.divisor}`;case"unrecognized_keys":return`Нераспазнаны ${e.keys.length>1?"ключы":"ключ"}: ${l(e.keys,", ")}`;case"invalid_key":return`Няправільны ключ у ${e.origin}`;case"invalid_union":default:return"Няправільны ўвод";case"invalid_element":return`Няправільнае значэнне ў ${e.origin}`}}},Wr=()=>{const e={string:{unit:"символа",verb:"да съдържа"},file:{unit:"байта",verb:"да съдържа"},array:{unit:"елемента",verb:"да съдържа"},set:{unit:"елемента",verb:"да съдържа"}};function t(t){return e[t]??null}const n={regex:"вход",email:"имейл адрес",url:"URL",emoji:"емоджи",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO време",date:"ISO дата",time:"ISO време",duration:"ISO продължителност",ipv4:"IPv4 адрес",ipv6:"IPv6 адрес",cidrv4:"IPv4 диапазон",cidrv6:"IPv6 диапазон",base64:"base64-кодиран низ",base64url:"base64url-кодиран низ",json_string:"JSON низ",e164:"E.164 номер",jwt:"JWT",template_literal:"вход"},r={nan:"NaN",number:"число",array:"масив"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Невалиден вход: очакван instanceof ${e.expected}, получен ${a}`:`Невалиден вход: очакван ${t}, получен ${a}`}case"invalid_value":return 1===e.values.length?`Невалиден вход: очакван ${$(e.values[0])}`:`Невалидна опция: очаквано едно от ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Твърде голямо: очаква се ${e.origin??"стойност"} да съдържа ${n}${e.maximum.toString()} ${r.unit??"елемента"}`:`Твърде голямо: очаква се ${e.origin??"стойност"} да бъде ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Твърде малко: очаква се ${e.origin} да съдържа ${n}${e.minimum.toString()} ${r.unit}`:`Твърде малко: очаква се ${e.origin} да бъде ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;if("starts_with"===t.format)return`Невалиден низ: трябва да започва с "${t.prefix}"`;if("ends_with"===t.format)return`Невалиден низ: трябва да завършва с "${t.suffix}"`;if("includes"===t.format)return`Невалиден низ: трябва да включва "${t.includes}"`;if("regex"===t.format)return`Невалиден низ: трябва да съвпада с ${t.pattern}`;let r="Невалиден";return"emoji"===t.format&&(r="Невалидно"),"datetime"===t.format&&(r="Невалидно"),"date"===t.format&&(r="Невалидна"),"time"===t.format&&(r="Невалидно"),"duration"===t.format&&(r="Невалидна"),`${r} ${n[t.format]??e.format}`}case"not_multiple_of":return`Невалидно число: трябва да бъде кратно на ${e.divisor}`;case"unrecognized_keys":return`Неразпознат${e.keys.length>1?"и":""} ключ${e.keys.length>1?"ове":""}: ${l(e.keys,", ")}`;case"invalid_key":return`Невалиден ключ в ${e.origin}`;case"invalid_union":default:return"Невалиден вход";case"invalid_element":return`Невалидна стойност в ${e.origin}`}}},Xr=()=>{const e={string:{unit:"caràcters",verb:"contenir"},file:{unit:"bytes",verb:"contenir"},array:{unit:"elements",verb:"contenir"},set:{unit:"elements",verb:"contenir"}};function t(t){return e[t]??null}const n={regex:"entrada",email:"adreça electrònica",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"data i hora ISO",date:"data ISO",time:"hora ISO",duration:"durada ISO",ipv4:"adreça IPv4",ipv6:"adreça IPv6",cidrv4:"rang IPv4",cidrv6:"rang IPv6",base64:"cadena codificada en base64",base64url:"cadena codificada en base64url",json_string:"cadena JSON",e164:"número E.164",jwt:"JWT",template_literal:"entrada"},r={nan:"NaN"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Tipus invàlid: s'esperava instanceof ${e.expected}, s'ha rebut ${a}`:`Tipus invàlid: s'esperava ${t}, s'ha rebut ${a}`}case"invalid_value":return 1===e.values.length?`Valor invàlid: s'esperava ${$(e.values[0])}`:`Opció invàlida: s'esperava una de ${l(e.values," o ")}`;case"too_big":{const n=e.inclusive?"com a màxim":"menys de",r=t(e.origin);return r?`Massa gran: s'esperava que ${e.origin??"el valor"} contingués ${n} ${e.maximum.toString()} ${r.unit??"elements"}`:`Massa gran: s'esperava que ${e.origin??"el valor"} fos ${n} ${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?"com a mínim":"més de",r=t(e.origin);return r?`Massa petit: s'esperava que ${e.origin} contingués ${n} ${e.minimum.toString()} ${r.unit}`:`Massa petit: s'esperava que ${e.origin} fos ${n} ${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Format invàlid: ha de començar amb "${t.prefix}"`:"ends_with"===t.format?`Format invàlid: ha d'acabar amb "${t.suffix}"`:"includes"===t.format?`Format invàlid: ha d'incloure "${t.includes}"`:"regex"===t.format?`Format invàlid: ha de coincidir amb el patró ${t.pattern}`:`Format invàlid per a ${n[t.format]??e.format}`}case"not_multiple_of":return`Número invàlid: ha de ser múltiple de ${e.divisor}`;case"unrecognized_keys":return`Clau${e.keys.length>1?"s":""} no reconeguda${e.keys.length>1?"s":""}: ${l(e.keys,", ")}`;case"invalid_key":return`Clau invàlida a ${e.origin}`;case"invalid_union":default:return"Entrada invàlida";case"invalid_element":return`Element invàlid a ${e.origin}`}}},Gr=()=>{const e={string:{unit:"znaků",verb:"mít"},file:{unit:"bajtů",verb:"mít"},array:{unit:"prvků",verb:"mít"},set:{unit:"prvků",verb:"mít"}};function t(t){return e[t]??null}const n={regex:"regulární výraz",email:"e-mailová adresa",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"datum a čas ve formátu ISO",date:"datum ve formátu ISO",time:"čas ve formátu ISO",duration:"doba trvání ISO",ipv4:"IPv4 adresa",ipv6:"IPv6 adresa",cidrv4:"rozsah IPv4",cidrv6:"rozsah IPv6",base64:"řetězec zakódovaný ve formátu base64",base64url:"řetězec zakódovaný ve formátu base64url",json_string:"řetězec ve formátu JSON",e164:"číslo E.164",jwt:"JWT",template_literal:"vstup"},r={nan:"NaN",number:"číslo",string:"řetězec",function:"funkce",array:"pole"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Neplatný vstup: očekáváno instanceof ${e.expected}, obdrženo ${a}`:`Neplatný vstup: očekáváno ${t}, obdrženo ${a}`}case"invalid_value":return 1===e.values.length?`Neplatný vstup: očekáváno ${$(e.values[0])}`:`Neplatná možnost: očekávána jedna z hodnot ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Hodnota je příliš velká: ${e.origin??"hodnota"} musí mít ${n}${e.maximum.toString()} ${r.unit??"prvků"}`:`Hodnota je příliš velká: ${e.origin??"hodnota"} musí být ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Hodnota je příliš malá: ${e.origin??"hodnota"} musí mít ${n}${e.minimum.toString()} ${r.unit??"prvků"}`:`Hodnota je příliš malá: ${e.origin??"hodnota"} musí být ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Neplatný řetězec: musí začínat na "${t.prefix}"`:"ends_with"===t.format?`Neplatný řetězec: musí končit na "${t.suffix}"`:"includes"===t.format?`Neplatný řetězec: musí obsahovat "${t.includes}"`:"regex"===t.format?`Neplatný řetězec: musí odpovídat vzoru ${t.pattern}`:`Neplatný formát ${n[t.format]??e.format}`}case"not_multiple_of":return`Neplatné číslo: musí být násobkem ${e.divisor}`;case"unrecognized_keys":return`Neznámé klíče: ${l(e.keys,", ")}`;case"invalid_key":return`Neplatný klíč v ${e.origin}`;case"invalid_union":default:return"Neplatný vstup";case"invalid_element":return`Neplatná hodnota v ${e.origin}`}}},Yr=()=>{const e={string:{unit:"tegn",verb:"havde"},file:{unit:"bytes",verb:"havde"},array:{unit:"elementer",verb:"indeholdt"},set:{unit:"elementer",verb:"indeholdt"}};function t(t){return e[t]??null}const n={regex:"input",email:"e-mailadresse",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO dato- og klokkeslæt",date:"ISO-dato",time:"ISO-klokkeslæt",duration:"ISO-varighed",ipv4:"IPv4-område",ipv6:"IPv6-område",cidrv4:"IPv4-spektrum",cidrv6:"IPv6-spektrum",base64:"base64-kodet streng",base64url:"base64url-kodet streng",json_string:"JSON-streng",e164:"E.164-nummer",jwt:"JWT",template_literal:"input"},r={nan:"NaN",string:"streng",number:"tal",boolean:"boolean",array:"liste",object:"objekt",set:"sæt",file:"fil"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Ugyldigt input: forventede instanceof ${e.expected}, fik ${a}`:`Ugyldigt input: forventede ${t}, fik ${a}`}case"invalid_value":return 1===e.values.length?`Ugyldig værdi: forventede ${$(e.values[0])}`:`Ugyldigt valg: forventede en af følgende ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",a=t(e.origin),o=r[e.origin]??e.origin;return a?`For stor: forventede ${o??"value"} ${a.verb} ${n} ${e.maximum.toString()} ${a.unit??"elementer"}`:`For stor: forventede ${o??"value"} havde ${n} ${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",a=t(e.origin),o=r[e.origin]??e.origin;return a?`For lille: forventede ${o} ${a.verb} ${n} ${e.minimum.toString()} ${a.unit}`:`For lille: forventede ${o} havde ${n} ${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Ugyldig streng: skal starte med "${t.prefix}"`:"ends_with"===t.format?`Ugyldig streng: skal ende med "${t.suffix}"`:"includes"===t.format?`Ugyldig streng: skal indeholde "${t.includes}"`:"regex"===t.format?`Ugyldig streng: skal matche mønsteret ${t.pattern}`:`Ugyldig ${n[t.format]??e.format}`}case"not_multiple_of":return`Ugyldigt tal: skal være deleligt med ${e.divisor}`;case"unrecognized_keys":return`${e.keys.length>1?"Ukendte nøgler":"Ukendt nøgle"}: ${l(e.keys,", ")}`;case"invalid_key":return`Ugyldig nøgle i ${e.origin}`;case"invalid_union":return"Ugyldigt input: matcher ingen af de tilladte typer";case"invalid_element":return`Ugyldig værdi i ${e.origin}`;default:return"Ugyldigt input"}}},Kr=()=>{const e={string:{unit:"Zeichen",verb:"zu haben"},file:{unit:"Bytes",verb:"zu haben"},array:{unit:"Elemente",verb:"zu haben"},set:{unit:"Elemente",verb:"zu haben"}};function t(t){return e[t]??null}const n={regex:"Eingabe",email:"E-Mail-Adresse",url:"URL",emoji:"Emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO-Datum und -Uhrzeit",date:"ISO-Datum",time:"ISO-Uhrzeit",duration:"ISO-Dauer",ipv4:"IPv4-Adresse",ipv6:"IPv6-Adresse",cidrv4:"IPv4-Bereich",cidrv6:"IPv6-Bereich",base64:"Base64-codierter String",base64url:"Base64-URL-codierter String",json_string:"JSON-String",e164:"E.164-Nummer",jwt:"JWT",template_literal:"Eingabe"},r={nan:"NaN",number:"Zahl",array:"Array"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Ungültige Eingabe: erwartet instanceof ${e.expected}, erhalten ${a}`:`Ungültige Eingabe: erwartet ${t}, erhalten ${a}`}case"invalid_value":return 1===e.values.length?`Ungültige Eingabe: erwartet ${$(e.values[0])}`:`Ungültige Option: erwartet eine von ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Zu groß: erwartet, dass ${e.origin??"Wert"} ${n}${e.maximum.toString()} ${r.unit??"Elemente"} hat`:`Zu groß: erwartet, dass ${e.origin??"Wert"} ${n}${e.maximum.toString()} ist`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Zu klein: erwartet, dass ${e.origin} ${n}${e.minimum.toString()} ${r.unit} hat`:`Zu klein: erwartet, dass ${e.origin} ${n}${e.minimum.toString()} ist`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Ungültiger String: muss mit "${t.prefix}" beginnen`:"ends_with"===t.format?`Ungültiger String: muss mit "${t.suffix}" enden`:"includes"===t.format?`Ungültiger String: muss "${t.includes}" enthalten`:"regex"===t.format?`Ungültiger String: muss dem Muster ${t.pattern} entsprechen`:`Ungültig: ${n[t.format]??e.format}`}case"not_multiple_of":return`Ungültige Zahl: muss ein Vielfaches von ${e.divisor} sein`;case"unrecognized_keys":return`${e.keys.length>1?"Unbekannte Schlüssel":"Unbekannter Schlüssel"}: ${l(e.keys,", ")}`;case"invalid_key":return`Ungültiger Schlüssel in ${e.origin}`;case"invalid_union":default:return"Ungültige Eingabe";case"invalid_element":return`Ungültiger Wert in ${e.origin}`}}},Jr=()=>{const e={string:{unit:"characters",verb:"to have"},file:{unit:"bytes",verb:"to have"},array:{unit:"items",verb:"to have"},set:{unit:"items",verb:"to have"},map:{unit:"entries",verb:"to have"}};function t(t){return e[t]??null}const n={regex:"input",email:"email address",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO datetime",date:"ISO date",time:"ISO time",duration:"ISO duration",ipv4:"IPv4 address",ipv6:"IPv6 address",mac:"MAC address",cidrv4:"IPv4 range",cidrv6:"IPv6 range",base64:"base64-encoded string",base64url:"base64url-encoded string",json_string:"JSON string",e164:"E.164 number",jwt:"JWT",template_literal:"input"},r={nan:"NaN"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input);return`Invalid input: expected ${t}, received ${r[n]??n}`}case"invalid_value":return 1===e.values.length?`Invalid input: expected ${$(e.values[0])}`:`Invalid option: expected one of ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Too big: expected ${e.origin??"value"} to have ${n}${e.maximum.toString()} ${r.unit??"elements"}`:`Too big: expected ${e.origin??"value"} to be ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Too small: expected ${e.origin} to have ${n}${e.minimum.toString()} ${r.unit}`:`Too small: expected ${e.origin} to be ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Invalid string: must start with "${t.prefix}"`:"ends_with"===t.format?`Invalid string: must end with "${t.suffix}"`:"includes"===t.format?`Invalid string: must include "${t.includes}"`:"regex"===t.format?`Invalid string: must match pattern ${t.pattern}`:`Invalid ${n[t.format]??e.format}`}case"not_multiple_of":return`Invalid number: must be a multiple of ${e.divisor}`;case"unrecognized_keys":return`Unrecognized key${e.keys.length>1?"s":""}: ${l(e.keys,", ")}`;case"invalid_key":return`Invalid key in ${e.origin}`;case"invalid_union":default:return"Invalid input";case"invalid_element":return`Invalid value in ${e.origin}`}}};function ea(){return{localeError:Jr()}}const ta=()=>{const e={string:{unit:"karaktrojn",verb:"havi"},file:{unit:"bajtojn",verb:"havi"},array:{unit:"elementojn",verb:"havi"},set:{unit:"elementojn",verb:"havi"}};function t(t){return e[t]??null}const n={regex:"enigo",email:"retadreso",url:"URL",emoji:"emoĝio",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO-datotempo",date:"ISO-dato",time:"ISO-tempo",duration:"ISO-daŭro",ipv4:"IPv4-adreso",ipv6:"IPv6-adreso",cidrv4:"IPv4-rango",cidrv6:"IPv6-rango",base64:"64-ume kodita karaktraro",base64url:"URL-64-ume kodita karaktraro",json_string:"JSON-karaktraro",e164:"E.164-nombro",jwt:"JWT",template_literal:"enigo"},r={nan:"NaN",number:"nombro",array:"tabelo",null:"senvalora"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Nevalida enigo: atendiĝis instanceof ${e.expected}, riceviĝis ${a}`:`Nevalida enigo: atendiĝis ${t}, riceviĝis ${a}`}case"invalid_value":return 1===e.values.length?`Nevalida enigo: atendiĝis ${$(e.values[0])}`:`Nevalida opcio: atendiĝis unu el ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Tro granda: atendiĝis ke ${e.origin??"valoro"} havu ${n}${e.maximum.toString()} ${r.unit??"elementojn"}`:`Tro granda: atendiĝis ke ${e.origin??"valoro"} havu ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Tro malgranda: atendiĝis ke ${e.origin} havu ${n}${e.minimum.toString()} ${r.unit}`:`Tro malgranda: atendiĝis ke ${e.origin} estu ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Nevalida karaktraro: devas komenciĝi per "${t.prefix}"`:"ends_with"===t.format?`Nevalida karaktraro: devas finiĝi per "${t.suffix}"`:"includes"===t.format?`Nevalida karaktraro: devas inkluzivi "${t.includes}"`:"regex"===t.format?`Nevalida karaktraro: devas kongrui kun la modelo ${t.pattern}`:`Nevalida ${n[t.format]??e.format}`}case"not_multiple_of":return`Nevalida nombro: devas esti oblo de ${e.divisor}`;case"unrecognized_keys":return`Nekonata${e.keys.length>1?"j":""} ŝlosilo${e.keys.length>1?"j":""}: ${l(e.keys,", ")}`;case"invalid_key":return`Nevalida ŝlosilo en ${e.origin}`;case"invalid_union":default:return"Nevalida enigo";case"invalid_element":return`Nevalida valoro en ${e.origin}`}}},na=()=>{const e={string:{unit:"caracteres",verb:"tener"},file:{unit:"bytes",verb:"tener"},array:{unit:"elementos",verb:"tener"},set:{unit:"elementos",verb:"tener"}};function t(t){return e[t]??null}const n={regex:"entrada",email:"dirección de correo electrónico",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"fecha y hora ISO",date:"fecha ISO",time:"hora ISO",duration:"duración ISO",ipv4:"dirección IPv4",ipv6:"dirección IPv6",cidrv4:"rango IPv4",cidrv6:"rango IPv6",base64:"cadena codificada en base64",base64url:"URL codificada en base64",json_string:"cadena JSON",e164:"número E.164",jwt:"JWT",template_literal:"entrada"},r={nan:"NaN",string:"texto",number:"número",boolean:"booleano",array:"arreglo",object:"objeto",set:"conjunto",file:"archivo",date:"fecha",bigint:"número grande",symbol:"símbolo",undefined:"indefinido",null:"nulo",function:"función",map:"mapa",record:"registro",tuple:"tupla",enum:"enumeración",union:"unión",literal:"literal",promise:"promesa",void:"vacío",never:"nunca",unknown:"desconocido",any:"cualquiera"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Entrada inválida: se esperaba instanceof ${e.expected}, recibido ${a}`:`Entrada inválida: se esperaba ${t}, recibido ${a}`}case"invalid_value":return 1===e.values.length?`Entrada inválida: se esperaba ${$(e.values[0])}`:`Opción inválida: se esperaba una de ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",a=t(e.origin),o=r[e.origin]??e.origin;return a?`Demasiado grande: se esperaba que ${o??"valor"} tuviera ${n}${e.maximum.toString()} ${a.unit??"elementos"}`:`Demasiado grande: se esperaba que ${o??"valor"} fuera ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",a=t(e.origin),o=r[e.origin]??e.origin;return a?`Demasiado pequeño: se esperaba que ${o} tuviera ${n}${e.minimum.toString()} ${a.unit}`:`Demasiado pequeño: se esperaba que ${o} fuera ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Cadena inválida: debe comenzar con "${t.prefix}"`:"ends_with"===t.format?`Cadena inválida: debe terminar en "${t.suffix}"`:"includes"===t.format?`Cadena inválida: debe incluir "${t.includes}"`:"regex"===t.format?`Cadena inválida: debe coincidir con el patrón ${t.pattern}`:`Inválido ${n[t.format]??e.format}`}case"not_multiple_of":return`Número inválido: debe ser múltiplo de ${e.divisor}`;case"unrecognized_keys":return`Llave${e.keys.length>1?"s":""} desconocida${e.keys.length>1?"s":""}: ${l(e.keys,", ")}`;case"invalid_key":return`Llave inválida en ${r[e.origin]??e.origin}`;case"invalid_union":default:return"Entrada inválida";case"invalid_element":return`Valor inválido en ${r[e.origin]??e.origin}`}}},ra=()=>{const e={string:{unit:"کاراکتر",verb:"داشته باشد"},file:{unit:"بایت",verb:"داشته باشد"},array:{unit:"آیتم",verb:"داشته باشد"},set:{unit:"آیتم",verb:"داشته باشد"}};function t(t){return e[t]??null}const n={regex:"ورودی",email:"آدرس ایمیل",url:"URL",emoji:"ایموجی",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"تاریخ و زمان ایزو",date:"تاریخ ایزو",time:"زمان ایزو",duration:"مدت زمان ایزو",ipv4:"IPv4 آدرس",ipv6:"IPv6 آدرس",cidrv4:"IPv4 دامنه",cidrv6:"IPv6 دامنه",base64:"base64-encoded رشته",base64url:"base64url-encoded رشته",json_string:"JSON رشته",e164:"E.164 عدد",jwt:"JWT",template_literal:"ورودی"},r={nan:"NaN",number:"عدد",array:"آرایه"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`ورودی نامعتبر: می‌بایست instanceof ${e.expected} می‌بود، ${a} دریافت شد`:`ورودی نامعتبر: می‌بایست ${t} می‌بود، ${a} دریافت شد`}case"invalid_value":return 1===e.values.length?`ورودی نامعتبر: می‌بایست ${$(e.values[0])} می‌بود`:`گزینه نامعتبر: می‌بایست یکی از ${l(e.values,"|")} می‌بود`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`خیلی بزرگ: ${e.origin??"مقدار"} باید ${n}${e.maximum.toString()} ${r.unit??"عنصر"} باشد`:`خیلی بزرگ: ${e.origin??"مقدار"} باید ${n}${e.maximum.toString()} باشد`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`خیلی کوچک: ${e.origin} باید ${n}${e.minimum.toString()} ${r.unit} باشد`:`خیلی کوچک: ${e.origin} باید ${n}${e.minimum.toString()} باشد`}case"invalid_format":{const t=e;return"starts_with"===t.format?`رشته نامعتبر: باید با "${t.prefix}" شروع شود`:"ends_with"===t.format?`رشته نامعتبر: باید با "${t.suffix}" تمام شود`:"includes"===t.format?`رشته نامعتبر: باید شامل "${t.includes}" باشد`:"regex"===t.format?`رشته نامعتبر: باید با الگوی ${t.pattern} مطابقت داشته باشد`:`${n[t.format]??e.format} نامعتبر`}case"not_multiple_of":return`عدد نامعتبر: باید مضرب ${e.divisor} باشد`;case"unrecognized_keys":return`کلید${e.keys.length>1?"های":""} ناشناس: ${l(e.keys,", ")}`;case"invalid_key":return`کلید ناشناس در ${e.origin}`;case"invalid_union":default:return"ورودی نامعتبر";case"invalid_element":return`مقدار نامعتبر در ${e.origin}`}}},aa=()=>{const e={string:{unit:"merkkiä",subject:"merkkijonon"},file:{unit:"tavua",subject:"tiedoston"},array:{unit:"alkiota",subject:"listan"},set:{unit:"alkiota",subject:"joukon"},number:{unit:"",subject:"luvun"},bigint:{unit:"",subject:"suuren kokonaisluvun"},int:{unit:"",subject:"kokonaisluvun"},date:{unit:"",subject:"päivämäärän"}};function t(t){return e[t]??null}const n={regex:"säännöllinen lauseke",email:"sähköpostiosoite",url:"URL-osoite",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO-aikaleima",date:"ISO-päivämäärä",time:"ISO-aika",duration:"ISO-kesto",ipv4:"IPv4-osoite",ipv6:"IPv6-osoite",cidrv4:"IPv4-alue",cidrv6:"IPv6-alue",base64:"base64-koodattu merkkijono",base64url:"base64url-koodattu merkkijono",json_string:"JSON-merkkijono",e164:"E.164-luku",jwt:"JWT",template_literal:"templaattimerkkijono"},r={nan:"NaN"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Virheellinen tyyppi: odotettiin instanceof ${e.expected}, oli ${a}`:`Virheellinen tyyppi: odotettiin ${t}, oli ${a}`}case"invalid_value":return 1===e.values.length?`Virheellinen syöte: täytyy olla ${$(e.values[0])}`:`Virheellinen valinta: täytyy olla yksi seuraavista: ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Liian suuri: ${r.subject} täytyy olla ${n}${e.maximum.toString()} ${r.unit}`.trim():`Liian suuri: arvon täytyy olla ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Liian pieni: ${r.subject} täytyy olla ${n}${e.minimum.toString()} ${r.unit}`.trim():`Liian pieni: arvon täytyy olla ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Virheellinen syöte: täytyy alkaa "${t.prefix}"`:"ends_with"===t.format?`Virheellinen syöte: täytyy loppua "${t.suffix}"`:"includes"===t.format?`Virheellinen syöte: täytyy sisältää "${t.includes}"`:"regex"===t.format?`Virheellinen syöte: täytyy vastata säännöllistä lauseketta ${t.pattern}`:`Virheellinen ${n[t.format]??e.format}`}case"not_multiple_of":return`Virheellinen luku: täytyy olla luvun ${e.divisor} monikerta`;case"unrecognized_keys":return`${e.keys.length>1?"Tuntemattomat avaimet":"Tuntematon avain"}: ${l(e.keys,", ")}`;case"invalid_key":return"Virheellinen avain tietueessa";case"invalid_union":return"Virheellinen unioni";case"invalid_element":return"Virheellinen arvo joukossa";default:return"Virheellinen syöte"}}},oa=()=>{const e={string:{unit:"caractères",verb:"avoir"},file:{unit:"octets",verb:"avoir"},array:{unit:"éléments",verb:"avoir"},set:{unit:"éléments",verb:"avoir"}};function t(t){return e[t]??null}const n={regex:"entrée",email:"adresse e-mail",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"date et heure ISO",date:"date ISO",time:"heure ISO",duration:"durée ISO",ipv4:"adresse IPv4",ipv6:"adresse IPv6",cidrv4:"plage IPv4",cidrv6:"plage IPv6",base64:"chaîne encodée en base64",base64url:"chaîne encodée en base64url",json_string:"chaîne JSON",e164:"numéro E.164",jwt:"JWT",template_literal:"entrée"},r={nan:"NaN",number:"nombre",array:"tableau"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Entrée invalide : instanceof ${e.expected} attendu, ${a} reçu`:`Entrée invalide : ${t} attendu, ${a} reçu`}case"invalid_value":return 1===e.values.length?`Entrée invalide : ${$(e.values[0])} attendu`:`Option invalide : une valeur parmi ${l(e.values,"|")} attendue`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Trop grand : ${e.origin??"valeur"} doit ${r.verb} ${n}${e.maximum.toString()} ${r.unit??"élément(s)"}`:`Trop grand : ${e.origin??"valeur"} doit être ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Trop petit : ${e.origin} doit ${r.verb} ${n}${e.minimum.toString()} ${r.unit}`:`Trop petit : ${e.origin} doit être ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Chaîne invalide : doit commencer par "${t.prefix}"`:"ends_with"===t.format?`Chaîne invalide : doit se terminer par "${t.suffix}"`:"includes"===t.format?`Chaîne invalide : doit inclure "${t.includes}"`:"regex"===t.format?`Chaîne invalide : doit correspondre au modèle ${t.pattern}`:`${n[t.format]??e.format} invalide`}case"not_multiple_of":return`Nombre invalide : doit être un multiple de ${e.divisor}`;case"unrecognized_keys":return`Clé${e.keys.length>1?"s":""} non reconnue${e.keys.length>1?"s":""} : ${l(e.keys,", ")}`;case"invalid_key":return`Clé invalide dans ${e.origin}`;case"invalid_union":default:return"Entrée invalide";case"invalid_element":return`Valeur invalide dans ${e.origin}`}}},ia=()=>{const e={string:{unit:"caractères",verb:"avoir"},file:{unit:"octets",verb:"avoir"},array:{unit:"éléments",verb:"avoir"},set:{unit:"éléments",verb:"avoir"}};function t(t){return e[t]??null}const n={regex:"entrée",email:"adresse courriel",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"date-heure ISO",date:"date ISO",time:"heure ISO",duration:"durée ISO",ipv4:"adresse IPv4",ipv6:"adresse IPv6",cidrv4:"plage IPv4",cidrv6:"plage IPv6",base64:"chaîne encodée en base64",base64url:"chaîne encodée en base64url",json_string:"chaîne JSON",e164:"numéro E.164",jwt:"JWT",template_literal:"entrée"},r={nan:"NaN"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Entrée invalide : attendu instanceof ${e.expected}, reçu ${a}`:`Entrée invalide : attendu ${t}, reçu ${a}`}case"invalid_value":return 1===e.values.length?`Entrée invalide : attendu ${$(e.values[0])}`:`Option invalide : attendu l'une des valeurs suivantes ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"≤":"<",r=t(e.origin);return r?`Trop grand : attendu que ${e.origin??"la valeur"} ait ${n}${e.maximum.toString()} ${r.unit}`:`Trop grand : attendu que ${e.origin??"la valeur"} soit ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?"≥":">",r=t(e.origin);return r?`Trop petit : attendu que ${e.origin} ait ${n}${e.minimum.toString()} ${r.unit}`:`Trop petit : attendu que ${e.origin} soit ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Chaîne invalide : doit commencer par "${t.prefix}"`:"ends_with"===t.format?`Chaîne invalide : doit se terminer par "${t.suffix}"`:"includes"===t.format?`Chaîne invalide : doit inclure "${t.includes}"`:"regex"===t.format?`Chaîne invalide : doit correspondre au motif ${t.pattern}`:`${n[t.format]??e.format} invalide`}case"not_multiple_of":return`Nombre invalide : doit être un multiple de ${e.divisor}`;case"unrecognized_keys":return`Clé${e.keys.length>1?"s":""} non reconnue${e.keys.length>1?"s":""} : ${l(e.keys,", ")}`;case"invalid_key":return`Clé invalide dans ${e.origin}`;case"invalid_union":default:return"Entrée invalide";case"invalid_element":return`Valeur invalide dans ${e.origin}`}}},sa=()=>{const e={string:{label:"מחרוזת",gender:"f"},number:{label:"מספר",gender:"m"},boolean:{label:"ערך בוליאני",gender:"m"},bigint:{label:"BigInt",gender:"m"},date:{label:"תאריך",gender:"m"},array:{label:"מערך",gender:"m"},object:{label:"אובייקט",gender:"m"},null:{label:"ערך ריק (null)",gender:"m"},undefined:{label:"ערך לא מוגדר (undefined)",gender:"m"},symbol:{label:"סימבול (Symbol)",gender:"m"},function:{label:"פונקציה",gender:"f"},map:{label:"מפה (Map)",gender:"f"},set:{label:"קבוצה (Set)",gender:"f"},file:{label:"קובץ",gender:"m"},promise:{label:"Promise",gender:"m"},NaN:{label:"NaN",gender:"m"},unknown:{label:"ערך לא ידוע",gender:"m"},value:{label:"ערך",gender:"m"}},t={string:{unit:"תווים",shortLabel:"קצר",longLabel:"ארוך"},file:{unit:"בייטים",shortLabel:"קטן",longLabel:"גדול"},array:{unit:"פריטים",shortLabel:"קטן",longLabel:"גדול"},set:{unit:"פריטים",shortLabel:"קטן",longLabel:"גדול"},number:{unit:"",shortLabel:"קטן",longLabel:"גדול"}},n=t=>t?e[t]:void 0,r=t=>{const r=n(t);return r?r.label:t??e.unknown.label},a=e=>`ה${r(e)}`,o=e=>{const t=n(e);return"f"===(t?.gender??"m")?"צריכה להיות":"צריך להיות"},i=e=>e?t[e]??null:null,s={regex:{label:"קלט",gender:"m"},email:{label:"כתובת אימייל",gender:"f"},url:{label:"כתובת רשת",gender:"f"},emoji:{label:"אימוג'י",gender:"m"},uuid:{label:"UUID",gender:"m"},nanoid:{label:"nanoid",gender:"m"},guid:{label:"GUID",gender:"m"},cuid:{label:"cuid",gender:"m"},cuid2:{label:"cuid2",gender:"m"},ulid:{label:"ULID",gender:"m"},xid:{label:"XID",gender:"m"},ksuid:{label:"KSUID",gender:"m"},datetime:{label:"תאריך וזמן ISO",gender:"m"},date:{label:"תאריך ISO",gender:"m"},time:{label:"זמן ISO",gender:"m"},duration:{label:"משך זמן ISO",gender:"m"},ipv4:{label:"כתובת IPv4",gender:"f"},ipv6:{label:"כתובת IPv6",gender:"f"},cidrv4:{label:"טווח IPv4",gender:"m"},cidrv6:{label:"טווח IPv6",gender:"m"},base64:{label:"מחרוזת בבסיס 64",gender:"f"},base64url:{label:"מחרוזת בבסיס 64 לכתובות רשת",gender:"f"},json_string:{label:"מחרוזת JSON",gender:"f"},e164:{label:"מספר E.164",gender:"m"},jwt:{label:"JWT",gender:"m"},ends_with:{label:"קלט",gender:"m"},includes:{label:"קלט",gender:"m"},lowercase:{label:"קלט",gender:"m"},starts_with:{label:"קלט",gender:"m"},uppercase:{label:"קלט",gender:"m"}},c={nan:"NaN"};return t=>{switch(t.code){case"invalid_type":{const n=t.expected,a=c[n??""]??r(n),o=q(t.input),i=c[o]??e[o]?.label??o;return/^[A-Z]/.test(t.expected)?`קלט לא תקין: צריך להיות instanceof ${t.expected}, התקבל ${i}`:`קלט לא תקין: צריך להיות ${a}, התקבל ${i}`}case"invalid_value":{if(1===t.values.length)return`ערך לא תקין: הערך חייב להיות ${$(t.values[0])}`;const e=t.values.map((e=>$(e)));if(2===t.values.length)return`ערך לא תקין: האפשרויות המתאימות הן ${e[0]} או ${e[1]}`;const n=e[e.length-1];return`ערך לא תקין: האפשרויות המתאימות הן ${e.slice(0,-1).join(", ")} או ${n}`}case"too_big":{const e=i(t.origin),n=a(t.origin??"value");if("string"===t.origin)return`${e?.longLabel??"ארוך"} מדי: ${n} צריכה להכיל ${t.maximum.toString()} ${e?.unit??""} ${t.inclusive?"או פחות":"לכל היותר"}`.trim();if("number"===t.origin)return`גדול מדי: ${n} צריך להיות ${t.inclusive?`קטן או שווה ל-${t.maximum}`:`קטן מ-${t.maximum}`}`;if("array"===t.origin||"set"===t.origin)return`גדול מדי: ${n} ${"set"===t.origin?"צריכה":"צריך"} להכיל ${t.inclusive?`${t.maximum} ${e?.unit??""} או פחות`:`פחות מ-${t.maximum} ${e?.unit??""}`}`.trim();const r=t.inclusive?"<=":"<",s=o(t.origin??"value");return e?.unit?`${e.longLabel} מדי: ${n} ${s} ${r}${t.maximum.toString()} ${e.unit}`:`${e?.longLabel??"גדול"} מדי: ${n} ${s} ${r}${t.maximum.toString()}`}case"too_small":{const e=i(t.origin),n=a(t.origin??"value");if("string"===t.origin)return`${e?.shortLabel??"קצר"} מדי: ${n} צריכה להכיל ${t.minimum.toString()} ${e?.unit??""} ${t.inclusive?"או יותר":"לפחות"}`.trim();if("number"===t.origin)return`קטן מדי: ${n} צריך להיות ${t.inclusive?`גדול או שווה ל-${t.minimum}`:`גדול מ-${t.minimum}`}`;if("array"===t.origin||"set"===t.origin){const r="set"===t.origin?"צריכה":"צריך";return 1===t.minimum&&t.inclusive?`קטן מדי: ${n} ${r} להכיל ${t.origin,"לפחות פריט אחד"}`:`קטן מדי: ${n} ${r} להכיל ${t.inclusive?`${t.minimum} ${e?.unit??""} או יותר`:`יותר מ-${t.minimum} ${e?.unit??""}`}`.trim()}const r=t.inclusive?">=":">",s=o(t.origin??"value");return e?.unit?`${e.shortLabel} מדי: ${n} ${s} ${r}${t.minimum.toString()} ${e.unit}`:`${e?.shortLabel??"קטן"} מדי: ${n} ${s} ${r}${t.minimum.toString()}`}case"invalid_format":{const e=t;if("starts_with"===e.format)return`המחרוזת חייבת להתחיל ב "${e.prefix}"`;if("ends_with"===e.format)return`המחרוזת חייבת להסתיים ב "${e.suffix}"`;if("includes"===e.format)return`המחרוזת חייבת לכלול "${e.includes}"`;if("regex"===e.format)return`המחרוזת חייבת להתאים לתבנית ${e.pattern}`;const n=s[e.format];return`${n?.label??e.format} לא ${"f"===(n?.gender??"m")?"תקינה":"תקין"}`}case"not_multiple_of":return`מספר לא תקין: חייב להיות מכפלה של ${t.divisor}`;case"unrecognized_keys":return`מפתח${t.keys.length>1?"ות":""} לא מזוה${t.keys.length>1?"ים":"ה"}: ${l(t.keys,", ")}`;case"invalid_key":return"שדה לא תקין באובייקט";case"invalid_union":default:return"קלט לא תקין";case"invalid_element":return`ערך לא תקין ב${a(t.origin??"array")}`}}},la=()=>{const e={string:{unit:"karakter",verb:"legyen"},file:{unit:"byte",verb:"legyen"},array:{unit:"elem",verb:"legyen"},set:{unit:"elem",verb:"legyen"}};function t(t){return e[t]??null}const n={regex:"bemenet",email:"email cím",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO időbélyeg",date:"ISO dátum",time:"ISO idő",duration:"ISO időintervallum",ipv4:"IPv4 cím",ipv6:"IPv6 cím",cidrv4:"IPv4 tartomány",cidrv6:"IPv6 tartomány",base64:"base64-kódolt string",base64url:"base64url-kódolt string",json_string:"JSON string",e164:"E.164 szám",jwt:"JWT",template_literal:"bemenet"},r={nan:"NaN",number:"szám",array:"tömb"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Érvénytelen bemenet: a várt érték instanceof ${e.expected}, a kapott érték ${a}`:`Érvénytelen bemenet: a várt érték ${t}, a kapott érték ${a}`}case"invalid_value":return 1===e.values.length?`Érvénytelen bemenet: a várt érték ${$(e.values[0])}`:`Érvénytelen opció: valamelyik érték várt ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Túl nagy: ${e.origin??"érték"} mérete túl nagy ${n}${e.maximum.toString()} ${r.unit??"elem"}`:`Túl nagy: a bemeneti érték ${e.origin??"érték"} túl nagy: ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Túl kicsi: a bemeneti érték ${e.origin} mérete túl kicsi ${n}${e.minimum.toString()} ${r.unit}`:`Túl kicsi: a bemeneti érték ${e.origin} túl kicsi ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Érvénytelen string: "${t.prefix}" értékkel kell kezdődnie`:"ends_with"===t.format?`Érvénytelen string: "${t.suffix}" értékkel kell végződnie`:"includes"===t.format?`Érvénytelen string: "${t.includes}" értéket kell tartalmaznia`:"regex"===t.format?`Érvénytelen string: ${t.pattern} mintának kell megfelelnie`:`Érvénytelen ${n[t.format]??e.format}`}case"not_multiple_of":return`Érvénytelen szám: ${e.divisor} többszörösének kell lennie`;case"unrecognized_keys":return`Ismeretlen kulcs${e.keys.length>1?"s":""}: ${l(e.keys,", ")}`;case"invalid_key":return`Érvénytelen kulcs ${e.origin}`;case"invalid_union":default:return"Érvénytelen bemenet";case"invalid_element":return`Érvénytelen érték: ${e.origin}`}}};function ca(e,t,n){return 1===Math.abs(e)?t:n}function ua(e){if(!e)return"";const t=e[e.length-1];return e+(["ա","ե","ը","ի","ո","ու","օ"].includes(t)?"ն":"ը")}const da=()=>{const e={string:{unit:{one:"նշան",many:"նշաններ"},verb:"ունենալ"},file:{unit:{one:"բայթ",many:"բայթեր"},verb:"ունենալ"},array:{unit:{one:"տարր",many:"տարրեր"},verb:"ունենալ"},set:{unit:{one:"տարր",many:"տարրեր"},verb:"ունենալ"}};function t(t){return e[t]??null}const n={regex:"մուտք",email:"էլ. հասցե",url:"URL",emoji:"էմոջի",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO ամսաթիվ և ժամ",date:"ISO ամսաթիվ",time:"ISO ժամ",duration:"ISO տևողություն",ipv4:"IPv4 հասցե",ipv6:"IPv6 հասցե",cidrv4:"IPv4 միջակայք",cidrv6:"IPv6 միջակայք",base64:"base64 ձևաչափով տող",base64url:"base64url ձևաչափով տող",json_string:"JSON տող",e164:"E.164 համար",jwt:"JWT",template_literal:"մուտք"},r={nan:"NaN",number:"թիվ",array:"զանգված"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Սխալ մուտքագրում․ սպասվում էր instanceof ${e.expected}, ստացվել է ${a}`:`Սխալ մուտքագրում․ սպասվում էր ${t}, ստացվել է ${a}`}case"invalid_value":return 1===e.values.length?`Սխալ մուտքագրում․ սպասվում էր ${$(e.values[1])}`:`Սխալ տարբերակ․ սպասվում էր հետևյալներից մեկը՝ ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);if(r){const t=ca(Number(e.maximum),r.unit.one,r.unit.many);return`Չափազանց մեծ արժեք․ սպասվում է, որ ${ua(e.origin??"արժեք")} կունենա ${n}${e.maximum.toString()} ${t}`}return`Չափազանց մեծ արժեք․ սպասվում է, որ ${ua(e.origin??"արժեք")} լինի ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);if(r){const t=ca(Number(e.minimum),r.unit.one,r.unit.many);return`Չափազանց փոքր արժեք․ սպասվում է, որ ${ua(e.origin)} կունենա ${n}${e.minimum.toString()} ${t}`}return`Չափազանց փոքր արժեք․ սպասվում է, որ ${ua(e.origin)} լինի ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Սխալ տող․ պետք է սկսվի "${t.prefix}"-ով`:"ends_with"===t.format?`Սխալ տող․ պետք է ավարտվի "${t.suffix}"-ով`:"includes"===t.format?`Սխալ տող․ պետք է պարունակի "${t.includes}"`:"regex"===t.format?`Սխալ տող․ պետք է համապատասխանի ${t.pattern} ձևաչափին`:`Սխալ ${n[t.format]??e.format}`}case"not_multiple_of":return`Սխալ թիվ․ պետք է բազմապատիկ լինի ${e.divisor}-ի`;case"unrecognized_keys":return`Չճանաչված բանալի${e.keys.length>1?"ներ":""}. ${l(e.keys,", ")}`;case"invalid_key":return`Սխալ բանալի ${ua(e.origin)}-ում`;case"invalid_union":default:return"Սխալ մուտքագրում";case"invalid_element":return`Սխալ արժեք ${ua(e.origin)}-ում`}}},pa=()=>{const e={string:{unit:"karakter",verb:"memiliki"},file:{unit:"byte",verb:"memiliki"},array:{unit:"item",verb:"memiliki"},set:{unit:"item",verb:"memiliki"}};function t(t){return e[t]??null}const n={regex:"input",email:"alamat email",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"tanggal dan waktu format ISO",date:"tanggal format ISO",time:"jam format ISO",duration:"durasi format ISO",ipv4:"alamat IPv4",ipv6:"alamat IPv6",cidrv4:"rentang alamat IPv4",cidrv6:"rentang alamat IPv6",base64:"string dengan enkode base64",base64url:"string dengan enkode base64url",json_string:"string JSON",e164:"angka E.164",jwt:"JWT",template_literal:"input"},r={nan:"NaN"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Input tidak valid: diharapkan instanceof ${e.expected}, diterima ${a}`:`Input tidak valid: diharapkan ${t}, diterima ${a}`}case"invalid_value":return 1===e.values.length?`Input tidak valid: diharapkan ${$(e.values[0])}`:`Pilihan tidak valid: diharapkan salah satu dari ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Terlalu besar: diharapkan ${e.origin??"value"} memiliki ${n}${e.maximum.toString()} ${r.unit??"elemen"}`:`Terlalu besar: diharapkan ${e.origin??"value"} menjadi ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Terlalu kecil: diharapkan ${e.origin} memiliki ${n}${e.minimum.toString()} ${r.unit}`:`Terlalu kecil: diharapkan ${e.origin} menjadi ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`String tidak valid: harus dimulai dengan "${t.prefix}"`:"ends_with"===t.format?`String tidak valid: harus berakhir dengan "${t.suffix}"`:"includes"===t.format?`String tidak valid: harus menyertakan "${t.includes}"`:"regex"===t.format?`String tidak valid: harus sesuai pola ${t.pattern}`:`${n[t.format]??e.format} tidak valid`}case"not_multiple_of":return`Angka tidak valid: harus kelipatan dari ${e.divisor}`;case"unrecognized_keys":return`Kunci tidak dikenali ${e.keys.length>1?"s":""}: ${l(e.keys,", ")}`;case"invalid_key":return`Kunci tidak valid di ${e.origin}`;case"invalid_union":default:return"Input tidak valid";case"invalid_element":return`Nilai tidak valid di ${e.origin}`}}},ha=()=>{const e={string:{unit:"stafi",verb:"að hafa"},file:{unit:"bæti",verb:"að hafa"},array:{unit:"hluti",verb:"að hafa"},set:{unit:"hluti",verb:"að hafa"}};function t(t){return e[t]??null}const n={regex:"gildi",email:"netfang",url:"vefslóð",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO dagsetning og tími",date:"ISO dagsetning",time:"ISO tími",duration:"ISO tímalengd",ipv4:"IPv4 address",ipv6:"IPv6 address",cidrv4:"IPv4 range",cidrv6:"IPv6 range",base64:"base64-encoded strengur",base64url:"base64url-encoded strengur",json_string:"JSON strengur",e164:"E.164 tölugildi",jwt:"JWT",template_literal:"gildi"},r={nan:"NaN",number:"númer",array:"fylki"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Rangt gildi: Þú slóst inn ${a} þar sem á að vera instanceof ${e.expected}`:`Rangt gildi: Þú slóst inn ${a} þar sem á að vera ${t}`}case"invalid_value":return 1===e.values.length?`Rangt gildi: gert ráð fyrir ${$(e.values[0])}`:`Ógilt val: má vera eitt af eftirfarandi ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Of stórt: gert er ráð fyrir að ${e.origin??"gildi"} hafi ${n}${e.maximum.toString()} ${r.unit??"hluti"}`:`Of stórt: gert er ráð fyrir að ${e.origin??"gildi"} sé ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Of lítið: gert er ráð fyrir að ${e.origin} hafi ${n}${e.minimum.toString()} ${r.unit}`:`Of lítið: gert er ráð fyrir að ${e.origin} sé ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Ógildur strengur: verður að byrja á "${t.prefix}"`:"ends_with"===t.format?`Ógildur strengur: verður að enda á "${t.suffix}"`:"includes"===t.format?`Ógildur strengur: verður að innihalda "${t.includes}"`:"regex"===t.format?`Ógildur strengur: verður að fylgja mynstri ${t.pattern}`:`Rangt ${n[t.format]??e.format}`}case"not_multiple_of":return`Röng tala: verður að vera margfeldi af ${e.divisor}`;case"unrecognized_keys":return`Óþekkt ${e.keys.length>1?"ir lyklar":"ur lykill"}: ${l(e.keys,", ")}`;case"invalid_key":return`Rangur lykill í ${e.origin}`;case"invalid_union":default:return"Rangt gildi";case"invalid_element":return`Rangt gildi í ${e.origin}`}}},fa=()=>{const e={string:{unit:"caratteri",verb:"avere"},file:{unit:"byte",verb:"avere"},array:{unit:"elementi",verb:"avere"},set:{unit:"elementi",verb:"avere"}};function t(t){return e[t]??null}const n={regex:"input",email:"indirizzo email",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"data e ora ISO",date:"data ISO",time:"ora ISO",duration:"durata ISO",ipv4:"indirizzo IPv4",ipv6:"indirizzo IPv6",cidrv4:"intervallo IPv4",cidrv6:"intervallo IPv6",base64:"stringa codificata in base64",base64url:"URL codificata in base64",json_string:"stringa JSON",e164:"numero E.164",jwt:"JWT",template_literal:"input"},r={nan:"NaN",number:"numero",array:"vettore"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Input non valido: atteso instanceof ${e.expected}, ricevuto ${a}`:`Input non valido: atteso ${t}, ricevuto ${a}`}case"invalid_value":return 1===e.values.length?`Input non valido: atteso ${$(e.values[0])}`:`Opzione non valida: atteso uno tra ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Troppo grande: ${e.origin??"valore"} deve avere ${n}${e.maximum.toString()} ${r.unit??"elementi"}`:`Troppo grande: ${e.origin??"valore"} deve essere ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Troppo piccolo: ${e.origin} deve avere ${n}${e.minimum.toString()} ${r.unit}`:`Troppo piccolo: ${e.origin} deve essere ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Stringa non valida: deve iniziare con "${t.prefix}"`:"ends_with"===t.format?`Stringa non valida: deve terminare con "${t.suffix}"`:"includes"===t.format?`Stringa non valida: deve includere "${t.includes}"`:"regex"===t.format?`Stringa non valida: deve corrispondere al pattern ${t.pattern}`:`Invalid ${n[t.format]??e.format}`}case"not_multiple_of":return`Numero non valido: deve essere un multiplo di ${e.divisor}`;case"unrecognized_keys":return`Chiav${e.keys.length>1?"i":"e"} non riconosciut${e.keys.length>1?"e":"a"}: ${l(e.keys,", ")}`;case"invalid_key":return`Chiave non valida in ${e.origin}`;case"invalid_union":default:return"Input non valido";case"invalid_element":return`Valore non valido in ${e.origin}`}}},ma=()=>{const e={string:{unit:"文字",verb:"である"},file:{unit:"バイト",verb:"である"},array:{unit:"要素",verb:"である"},set:{unit:"要素",verb:"である"}};function t(t){return e[t]??null}const n={regex:"入力値",email:"メールアドレス",url:"URL",emoji:"絵文字",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO日時",date:"ISO日付",time:"ISO時刻",duration:"ISO期間",ipv4:"IPv4アドレス",ipv6:"IPv6アドレス",cidrv4:"IPv4範囲",cidrv6:"IPv6範囲",base64:"base64エンコード文字列",base64url:"base64urlエンコード文字列",json_string:"JSON文字列",e164:"E.164番号",jwt:"JWT",template_literal:"入力値"},r={nan:"NaN",number:"数値",array:"配列"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`無効な入力: instanceof ${e.expected}が期待されましたが、${a}が入力されました`:`無効な入力: ${t}が期待されましたが、${a}が入力されました`}case"invalid_value":return 1===e.values.length?`無効な入力: ${$(e.values[0])}が期待されました`:`無効な選択: ${l(e.values,"、")}のいずれかである必要があります`;case"too_big":{const n=e.inclusive?"以下である":"より小さい",r=t(e.origin);return r?`大きすぎる値: ${e.origin??"値"}は${e.maximum.toString()}${r.unit??"要素"}${n}必要があります`:`大きすぎる値: ${e.origin??"値"}は${e.maximum.toString()}${n}必要があります`}case"too_small":{const n=e.inclusive?"以上である":"より大きい",r=t(e.origin);return r?`小さすぎる値: ${e.origin}は${e.minimum.toString()}${r.unit}${n}必要があります`:`小さすぎる値: ${e.origin}は${e.minimum.toString()}${n}必要があります`}case"invalid_format":{const t=e;return"starts_with"===t.format?`無効な文字列: "${t.prefix}"で始まる必要があります`:"ends_with"===t.format?`無効な文字列: "${t.suffix}"で終わる必要があります`:"includes"===t.format?`無効な文字列: "${t.includes}"を含む必要があります`:"regex"===t.format?`無効な文字列: パターン${t.pattern}に一致する必要があります`:`無効な${n[t.format]??e.format}`}case"not_multiple_of":return`無効な数値: ${e.divisor}の倍数である必要があります`;case"unrecognized_keys":return`認識されていないキー${e.keys.length>1?"群":""}: ${l(e.keys,"、")}`;case"invalid_key":return`${e.origin}内の無効なキー`;case"invalid_union":default:return"無効な入力";case"invalid_element":return`${e.origin}内の無効な値`}}},ga=()=>{const e={string:{unit:"სიმბოლო",verb:"უნდა შეიცავდეს"},file:{unit:"ბაიტი",verb:"უნდა შეიცავდეს"},array:{unit:"ელემენტი",verb:"უნდა შეიცავდეს"},set:{unit:"ელემენტი",verb:"უნდა შეიცავდეს"}};function t(t){return e[t]??null}const n={regex:"შეყვანა",email:"ელ-ფოსტის მისამართი",url:"URL",emoji:"ემოჯი",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"თარიღი-დრო",date:"თარიღი",time:"დრო",duration:"ხანგრძლივობა",ipv4:"IPv4 მისამართი",ipv6:"IPv6 მისამართი",cidrv4:"IPv4 დიაპაზონი",cidrv6:"IPv6 დიაპაზონი",base64:"base64-კოდირებული სტრინგი",base64url:"base64url-კოდირებული სტრინგი",json_string:"JSON სტრინგი",e164:"E.164 ნომერი",jwt:"JWT",template_literal:"შეყვანა"},r={nan:"NaN",number:"რიცხვი",string:"სტრინგი",boolean:"ბულეანი",function:"ფუნქცია",array:"მასივი"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`არასწორი შეყვანა: მოსალოდნელი instanceof ${e.expected}, მიღებული ${a}`:`არასწორი შეყვანა: მოსალოდნელი ${t}, მიღებული ${a}`}case"invalid_value":return 1===e.values.length?`არასწორი შეყვანა: მოსალოდნელი ${$(e.values[0])}`:`არასწორი ვარიანტი: მოსალოდნელია ერთ-ერთი ${l(e.values,"|")}-დან`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`ზედმეტად დიდი: მოსალოდნელი ${e.origin??"მნიშვნელობა"} ${r.verb} ${n}${e.maximum.toString()} ${r.unit}`:`ზედმეტად დიდი: მოსალოდნელი ${e.origin??"მნიშვნელობა"} იყოს ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`ზედმეტად პატარა: მოსალოდნელი ${e.origin} ${r.verb} ${n}${e.minimum.toString()} ${r.unit}`:`ზედმეტად პატარა: მოსალოდნელი ${e.origin} იყოს ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`არასწორი სტრინგი: უნდა იწყებოდეს "${t.prefix}"-ით`:"ends_with"===t.format?`არასწორი სტრინგი: უნდა მთავრდებოდეს "${t.suffix}"-ით`:"includes"===t.format?`არასწორი სტრინგი: უნდა შეიცავდეს "${t.includes}"-ს`:"regex"===t.format?`არასწორი სტრინგი: უნდა შეესაბამებოდეს შაბლონს ${t.pattern}`:`არასწორი ${n[t.format]??e.format}`}case"not_multiple_of":return`არასწორი რიცხვი: უნდა იყოს ${e.divisor}-ის ჯერადი`;case"unrecognized_keys":return`უცნობი გასაღებ${e.keys.length>1?"ები":"ი"}: ${l(e.keys,", ")}`;case"invalid_key":return`არასწორი გასაღები ${e.origin}-ში`;case"invalid_union":default:return"არასწორი შეყვანა";case"invalid_element":return`არასწორი მნიშვნელობა ${e.origin}-ში`}}},va=()=>{const e={string:{unit:"តួអក្សរ",verb:"គួរមាន"},file:{unit:"បៃ",verb:"គួរមាន"},array:{unit:"ធាតុ",verb:"គួរមាន"},set:{unit:"ធាតុ",verb:"គួរមាន"}};function t(t){return e[t]??null}const n={regex:"ទិន្នន័យបញ្ចូល",email:"អាសយដ្ឋានអ៊ីមែល",url:"URL",emoji:"សញ្ញាអារម្មណ៍",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"កាលបរិច្ឆេទ និងម៉ោង ISO",date:"កាលបរិច្ឆេទ ISO",time:"ម៉ោង ISO",duration:"រយៈពេល ISO",ipv4:"អាសយដ្ឋាន IPv4",ipv6:"អាសយដ្ឋាន IPv6",cidrv4:"ដែនអាសយដ្ឋាន IPv4",cidrv6:"ដែនអាសយដ្ឋាន IPv6",base64:"ខ្សែអក្សរអ៊ិកូដ base64",base64url:"ខ្សែអក្សរអ៊ិកូដ base64url",json_string:"ខ្សែអក្សរ JSON",e164:"លេខ E.164",jwt:"JWT",template_literal:"ទិន្នន័យបញ្ចូល"},r={nan:"NaN",number:"លេខ",array:"អារេ (Array)",null:"គ្មានតម្លៃ (null)"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`ទិន្នន័យបញ្ចូលមិនត្រឹមត្រូវ៖ ត្រូវការ instanceof ${e.expected} ប៉ុន្តែទទួលបាន ${a}`:`ទិន្នន័យបញ្ចូលមិនត្រឹមត្រូវ៖ ត្រូវការ ${t} ប៉ុន្តែទទួលបាន ${a}`}case"invalid_value":return 1===e.values.length?`ទិន្នន័យបញ្ចូលមិនត្រឹមត្រូវ៖ ត្រូវការ ${$(e.values[0])}`:`ជម្រើសមិនត្រឹមត្រូវ៖ ត្រូវជាមួយក្នុងចំណោម ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`ធំពេក៖ ត្រូវការ ${e.origin??"តម្លៃ"} ${n} ${e.maximum.toString()} ${r.unit??"ធាតុ"}`:`ធំពេក៖ ត្រូវការ ${e.origin??"តម្លៃ"} ${n} ${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`តូចពេក៖ ត្រូវការ ${e.origin} ${n} ${e.minimum.toString()} ${r.unit}`:`តូចពេក៖ ត្រូវការ ${e.origin} ${n} ${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`ខ្សែអក្សរមិនត្រឹមត្រូវ៖ ត្រូវចាប់ផ្តើមដោយ "${t.prefix}"`:"ends_with"===t.format?`ខ្សែអក្សរមិនត្រឹមត្រូវ៖ ត្រូវបញ្ចប់ដោយ "${t.suffix}"`:"includes"===t.format?`ខ្សែអក្សរមិនត្រឹមត្រូវ៖ ត្រូវមាន "${t.includes}"`:"regex"===t.format?`ខ្សែអក្សរមិនត្រឹមត្រូវ៖ ត្រូវតែផ្គូផ្គងនឹងទម្រង់ដែលបានកំណត់ ${t.pattern}`:`មិនត្រឹមត្រូវ៖ ${n[t.format]??e.format}`}case"not_multiple_of":return`លេខមិនត្រឹមត្រូវ៖ ត្រូវតែជាពហុគុណនៃ ${e.divisor}`;case"unrecognized_keys":return`រកឃើញសោមិនស្គាល់៖ ${l(e.keys,", ")}`;case"invalid_key":return`សោមិនត្រឹមត្រូវនៅក្នុង ${e.origin}`;case"invalid_union":default:return"ទិន្នន័យមិនត្រឹមត្រូវ";case"invalid_element":return`ទិន្នន័យមិនត្រឹមត្រូវនៅក្នុង ${e.origin}`}}};function ba(){return{localeError:va()}}const ya=()=>{const e={string:{unit:"문자",verb:"to have"},file:{unit:"바이트",verb:"to have"},array:{unit:"개",verb:"to have"},set:{unit:"개",verb:"to have"}};function t(t){return e[t]??null}const n={regex:"입력",email:"이메일 주소",url:"URL",emoji:"이모지",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO 날짜시간",date:"ISO 날짜",time:"ISO 시간",duration:"ISO 기간",ipv4:"IPv4 주소",ipv6:"IPv6 주소",cidrv4:"IPv4 범위",cidrv6:"IPv6 범위",base64:"base64 인코딩 문자열",base64url:"base64url 인코딩 문자열",json_string:"JSON 문자열",e164:"E.164 번호",jwt:"JWT",template_literal:"입력"},r={nan:"NaN"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`잘못된 입력: 예상 타입은 instanceof ${e.expected}, 받은 타입은 ${a}입니다`:`잘못된 입력: 예상 타입은 ${t}, 받은 타입은 ${a}입니다`}case"invalid_value":return 1===e.values.length?`잘못된 입력: 값은 ${$(e.values[0])} 이어야 합니다`:`잘못된 옵션: ${l(e.values,"또는 ")} 중 하나여야 합니다`;case"too_big":{const n=e.inclusive?"이하":"미만",r="미만"===n?"이어야 합니다":"여야 합니다",a=t(e.origin),o=a?.unit??"요소";return a?`${e.origin??"값"}이 너무 큽니다: ${e.maximum.toString()}${o} ${n}${r}`:`${e.origin??"값"}이 너무 큽니다: ${e.maximum.toString()} ${n}${r}`}case"too_small":{const n=e.inclusive?"이상":"초과",r="이상"===n?"이어야 합니다":"여야 합니다",a=t(e.origin),o=a?.unit??"요소";return a?`${e.origin??"값"}이 너무 작습니다: ${e.minimum.toString()}${o} ${n}${r}`:`${e.origin??"값"}이 너무 작습니다: ${e.minimum.toString()} ${n}${r}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`잘못된 문자열: "${t.prefix}"(으)로 시작해야 합니다`:"ends_with"===t.format?`잘못된 문자열: "${t.suffix}"(으)로 끝나야 합니다`:"includes"===t.format?`잘못된 문자열: "${t.includes}"을(를) 포함해야 합니다`:"regex"===t.format?`잘못된 문자열: 정규식 ${t.pattern} 패턴과 일치해야 합니다`:`잘못된 ${n[t.format]??e.format}`}case"not_multiple_of":return`잘못된 숫자: ${e.divisor}의 배수여야 합니다`;case"unrecognized_keys":return`인식할 수 없는 키: ${l(e.keys,", ")}`;case"invalid_key":return`잘못된 키: ${e.origin}`;case"invalid_union":default:return"잘못된 입력";case"invalid_element":return`잘못된 값: ${e.origin}`}}},Oa=e=>e.charAt(0).toUpperCase()+e.slice(1);function wa(e){const t=Math.abs(e),n=t%10,r=t%100;return r>=11&&r<=19||0===n?"many":1===n?"one":"few"}const xa=()=>{const e={string:{unit:{one:"simbolis",few:"simboliai",many:"simbolių"},verb:{smaller:{inclusive:"turi būti ne ilgesnė kaip",notInclusive:"turi būti trumpesnė kaip"},bigger:{inclusive:"turi būti ne trumpesnė kaip",notInclusive:"turi būti ilgesnė kaip"}}},file:{unit:{one:"baitas",few:"baitai",many:"baitų"},verb:{smaller:{inclusive:"turi būti ne didesnis kaip",notInclusive:"turi būti mažesnis kaip"},bigger:{inclusive:"turi būti ne mažesnis kaip",notInclusive:"turi būti didesnis kaip"}}},array:{unit:{one:"elementą",few:"elementus",many:"elementų"},verb:{smaller:{inclusive:"turi turėti ne daugiau kaip",notInclusive:"turi turėti mažiau kaip"},bigger:{inclusive:"turi turėti ne mažiau kaip",notInclusive:"turi turėti daugiau kaip"}}},set:{unit:{one:"elementą",few:"elementus",many:"elementų"},verb:{smaller:{inclusive:"turi turėti ne daugiau kaip",notInclusive:"turi turėti mažiau kaip"},bigger:{inclusive:"turi turėti ne mažiau kaip",notInclusive:"turi turėti daugiau kaip"}}}};function t(t,n,r,a){const o=e[t]??null;return null===o?o:{unit:o.unit[n],verb:o.verb[a][r?"inclusive":"notInclusive"]}}const n={regex:"įvestis",email:"el. pašto adresas",url:"URL",emoji:"jaustukas",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO data ir laikas",date:"ISO data",time:"ISO laikas",duration:"ISO trukmė",ipv4:"IPv4 adresas",ipv6:"IPv6 adresas",cidrv4:"IPv4 tinklo prefiksas (CIDR)",cidrv6:"IPv6 tinklo prefiksas (CIDR)",base64:"base64 užkoduota eilutė",base64url:"base64url užkoduota eilutė",json_string:"JSON eilutė",e164:"E.164 numeris",jwt:"JWT",template_literal:"įvestis"},r={nan:"NaN",number:"skaičius",bigint:"sveikasis skaičius",string:"eilutė",boolean:"loginė reikšmė",undefined:"neapibrėžta reikšmė",function:"funkcija",symbol:"simbolis",array:"masyvas",object:"objektas",null:"nulinė reikšmė"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Gautas tipas ${a}, o tikėtasi - instanceof ${e.expected}`:`Gautas tipas ${a}, o tikėtasi - ${t}`}case"invalid_value":return 1===e.values.length?`Privalo būti ${$(e.values[0])}`:`Privalo būti vienas iš ${l(e.values,"|")} pasirinkimų`;case"too_big":{const n=r[e.origin]??e.origin,a=t(e.origin,wa(Number(e.maximum)),e.inclusive??!1,"smaller");if(a?.verb)return`${Oa(n??e.origin??"reikšmė")} ${a.verb} ${e.maximum.toString()} ${a.unit??"elementų"}`;const o=e.inclusive?"ne didesnis kaip":"mažesnis kaip";return`${Oa(n??e.origin??"reikšmė")} turi būti ${o} ${e.maximum.toString()} ${a?.unit}`}case"too_small":{const n=r[e.origin]??e.origin,a=t(e.origin,wa(Number(e.minimum)),e.inclusive??!1,"bigger");if(a?.verb)return`${Oa(n??e.origin??"reikšmė")} ${a.verb} ${e.minimum.toString()} ${a.unit??"elementų"}`;const o=e.inclusive?"ne mažesnis kaip":"didesnis kaip";return`${Oa(n??e.origin??"reikšmė")} turi būti ${o} ${e.minimum.toString()} ${a?.unit}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Eilutė privalo prasidėti "${t.prefix}"`:"ends_with"===t.format?`Eilutė privalo pasibaigti "${t.suffix}"`:"includes"===t.format?`Eilutė privalo įtraukti "${t.includes}"`:"regex"===t.format?`Eilutė privalo atitikti ${t.pattern}`:`Neteisingas ${n[t.format]??e.format}`}case"not_multiple_of":return`Skaičius privalo būti ${e.divisor} kartotinis.`;case"unrecognized_keys":return`Neatpažint${e.keys.length>1?"i":"as"} rakt${e.keys.length>1?"ai":"as"}: ${l(e.keys,", ")}`;case"invalid_key":return"Rastas klaidingas raktas";case"invalid_union":default:return"Klaidinga įvestis";case"invalid_element":{const t=r[e.origin]??e.origin;return`${Oa(t??e.origin??"reikšmė")} turi klaidingą įvestį`}}}},ka=()=>{const e={string:{unit:"знаци",verb:"да имаат"},file:{unit:"бајти",verb:"да имаат"},array:{unit:"ставки",verb:"да имаат"},set:{unit:"ставки",verb:"да имаат"}};function t(t){return e[t]??null}const n={regex:"внес",email:"адреса на е-пошта",url:"URL",emoji:"емоџи",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO датум и време",date:"ISO датум",time:"ISO време",duration:"ISO времетраење",ipv4:"IPv4 адреса",ipv6:"IPv6 адреса",cidrv4:"IPv4 опсег",cidrv6:"IPv6 опсег",base64:"base64-енкодирана низа",base64url:"base64url-енкодирана низа",json_string:"JSON низа",e164:"E.164 број",jwt:"JWT",template_literal:"внес"},r={nan:"NaN",number:"број",array:"низа"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Грешен внес: се очекува instanceof ${e.expected}, примено ${a}`:`Грешен внес: се очекува ${t}, примено ${a}`}case"invalid_value":return 1===e.values.length?`Invalid input: expected ${$(e.values[0])}`:`Грешана опција: се очекува една ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Премногу голем: се очекува ${e.origin??"вредноста"} да има ${n}${e.maximum.toString()} ${r.unit??"елементи"}`:`Премногу голем: се очекува ${e.origin??"вредноста"} да биде ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Премногу мал: се очекува ${e.origin} да има ${n}${e.minimum.toString()} ${r.unit}`:`Премногу мал: се очекува ${e.origin} да биде ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Неважечка низа: мора да започнува со "${t.prefix}"`:"ends_with"===t.format?`Неважечка низа: мора да завршува со "${t.suffix}"`:"includes"===t.format?`Неважечка низа: мора да вклучува "${t.includes}"`:"regex"===t.format?`Неважечка низа: мора да одгоара на патернот ${t.pattern}`:`Invalid ${n[t.format]??e.format}`}case"not_multiple_of":return`Грешен број: мора да биде делив со ${e.divisor}`;case"unrecognized_keys":return`${e.keys.length>1?"Непрепознаени клучеви":"Непрепознаен клуч"}: ${l(e.keys,", ")}`;case"invalid_key":return`Грешен клуч во ${e.origin}`;case"invalid_union":default:return"Грешен внес";case"invalid_element":return`Грешна вредност во ${e.origin}`}}},Sa=()=>{const e={string:{unit:"aksara",verb:"mempunyai"},file:{unit:"bait",verb:"mempunyai"},array:{unit:"elemen",verb:"mempunyai"},set:{unit:"elemen",verb:"mempunyai"}};function t(t){return e[t]??null}const n={regex:"input",email:"alamat e-mel",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"tarikh masa ISO",date:"tarikh ISO",time:"masa ISO",duration:"tempoh ISO",ipv4:"alamat IPv4",ipv6:"alamat IPv6",cidrv4:"julat IPv4",cidrv6:"julat IPv6",base64:"string dikodkan base64",base64url:"string dikodkan base64url",json_string:"string JSON",e164:"nombor E.164",jwt:"JWT",template_literal:"input"},r={nan:"NaN",number:"nombor"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Input tidak sah: dijangka instanceof ${e.expected}, diterima ${a}`:`Input tidak sah: dijangka ${t}, diterima ${a}`}case"invalid_value":return 1===e.values.length?`Input tidak sah: dijangka ${$(e.values[0])}`:`Pilihan tidak sah: dijangka salah satu daripada ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Terlalu besar: dijangka ${e.origin??"nilai"} ${r.verb} ${n}${e.maximum.toString()} ${r.unit??"elemen"}`:`Terlalu besar: dijangka ${e.origin??"nilai"} adalah ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Terlalu kecil: dijangka ${e.origin} ${r.verb} ${n}${e.minimum.toString()} ${r.unit}`:`Terlalu kecil: dijangka ${e.origin} adalah ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`String tidak sah: mesti bermula dengan "${t.prefix}"`:"ends_with"===t.format?`String tidak sah: mesti berakhir dengan "${t.suffix}"`:"includes"===t.format?`String tidak sah: mesti mengandungi "${t.includes}"`:"regex"===t.format?`String tidak sah: mesti sepadan dengan corak ${t.pattern}`:`${n[t.format]??e.format} tidak sah`}case"not_multiple_of":return`Nombor tidak sah: perlu gandaan ${e.divisor}`;case"unrecognized_keys":return`Kunci tidak dikenali: ${l(e.keys,", ")}`;case"invalid_key":return`Kunci tidak sah dalam ${e.origin}`;case"invalid_union":default:return"Input tidak sah";case"invalid_element":return`Nilai tidak sah dalam ${e.origin}`}}},_a=()=>{const e={string:{unit:"tekens",verb:"heeft"},file:{unit:"bytes",verb:"heeft"},array:{unit:"elementen",verb:"heeft"},set:{unit:"elementen",verb:"heeft"}};function t(t){return e[t]??null}const n={regex:"invoer",email:"emailadres",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO datum en tijd",date:"ISO datum",time:"ISO tijd",duration:"ISO duur",ipv4:"IPv4-adres",ipv6:"IPv6-adres",cidrv4:"IPv4-bereik",cidrv6:"IPv6-bereik",base64:"base64-gecodeerde tekst",base64url:"base64 URL-gecodeerde tekst",json_string:"JSON string",e164:"E.164-nummer",jwt:"JWT",template_literal:"invoer"},r={nan:"NaN",number:"getal"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Ongeldige invoer: verwacht instanceof ${e.expected}, ontving ${a}`:`Ongeldige invoer: verwacht ${t}, ontving ${a}`}case"invalid_value":return 1===e.values.length?`Ongeldige invoer: verwacht ${$(e.values[0])}`:`Ongeldige optie: verwacht één van ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin),a="date"===e.origin?"laat":"string"===e.origin?"lang":"groot";return r?`Te ${a}: verwacht dat ${e.origin??"waarde"} ${n}${e.maximum.toString()} ${r.unit??"elementen"} ${r.verb}`:`Te ${a}: verwacht dat ${e.origin??"waarde"} ${n}${e.maximum.toString()} is`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin),a="date"===e.origin?"vroeg":"string"===e.origin?"kort":"klein";return r?`Te ${a}: verwacht dat ${e.origin} ${n}${e.minimum.toString()} ${r.unit} ${r.verb}`:`Te ${a}: verwacht dat ${e.origin} ${n}${e.minimum.toString()} is`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Ongeldige tekst: moet met "${t.prefix}" beginnen`:"ends_with"===t.format?`Ongeldige tekst: moet op "${t.suffix}" eindigen`:"includes"===t.format?`Ongeldige tekst: moet "${t.includes}" bevatten`:"regex"===t.format?`Ongeldige tekst: moet overeenkomen met patroon ${t.pattern}`:`Ongeldig: ${n[t.format]??e.format}`}case"not_multiple_of":return`Ongeldig getal: moet een veelvoud van ${e.divisor} zijn`;case"unrecognized_keys":return`Onbekende key${e.keys.length>1?"s":""}: ${l(e.keys,", ")}`;case"invalid_key":return`Ongeldige key in ${e.origin}`;case"invalid_union":default:return"Ongeldige invoer";case"invalid_element":return`Ongeldige waarde in ${e.origin}`}}},Ta=()=>{const e={string:{unit:"tegn",verb:"å ha"},file:{unit:"bytes",verb:"å ha"},array:{unit:"elementer",verb:"å inneholde"},set:{unit:"elementer",verb:"å inneholde"}};function t(t){return e[t]??null}const n={regex:"input",email:"e-postadresse",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO dato- og klokkeslett",date:"ISO-dato",time:"ISO-klokkeslett",duration:"ISO-varighet",ipv4:"IPv4-område",ipv6:"IPv6-område",cidrv4:"IPv4-spekter",cidrv6:"IPv6-spekter",base64:"base64-enkodet streng",base64url:"base64url-enkodet streng",json_string:"JSON-streng",e164:"E.164-nummer",jwt:"JWT",template_literal:"input"},r={nan:"NaN",number:"tall",array:"liste"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Ugyldig input: forventet instanceof ${e.expected}, fikk ${a}`:`Ugyldig input: forventet ${t}, fikk ${a}`}case"invalid_value":return 1===e.values.length?`Ugyldig verdi: forventet ${$(e.values[0])}`:`Ugyldig valg: forventet en av ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`For stor(t): forventet ${e.origin??"value"} til å ha ${n}${e.maximum.toString()} ${r.unit??"elementer"}`:`For stor(t): forventet ${e.origin??"value"} til å ha ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`For lite(n): forventet ${e.origin} til å ha ${n}${e.minimum.toString()} ${r.unit}`:`For lite(n): forventet ${e.origin} til å ha ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Ugyldig streng: må starte med "${t.prefix}"`:"ends_with"===t.format?`Ugyldig streng: må ende med "${t.suffix}"`:"includes"===t.format?`Ugyldig streng: må inneholde "${t.includes}"`:"regex"===t.format?`Ugyldig streng: må matche mønsteret ${t.pattern}`:`Ugyldig ${n[t.format]??e.format}`}case"not_multiple_of":return`Ugyldig tall: må være et multiplum av ${e.divisor}`;case"unrecognized_keys":return`${e.keys.length>1?"Ukjente nøkler":"Ukjent nøkkel"}: ${l(e.keys,", ")}`;case"invalid_key":return`Ugyldig nøkkel i ${e.origin}`;case"invalid_union":default:return"Ugyldig input";case"invalid_element":return`Ugyldig verdi i ${e.origin}`}}},Ea=()=>{const e={string:{unit:"harf",verb:"olmalıdır"},file:{unit:"bayt",verb:"olmalıdır"},array:{unit:"unsur",verb:"olmalıdır"},set:{unit:"unsur",verb:"olmalıdır"}};function t(t){return e[t]??null}const n={regex:"giren",email:"epostagâh",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO hengâmı",date:"ISO tarihi",time:"ISO zamanı",duration:"ISO müddeti",ipv4:"IPv4 nişânı",ipv6:"IPv6 nişânı",cidrv4:"IPv4 menzili",cidrv6:"IPv6 menzili",base64:"base64-şifreli metin",base64url:"base64url-şifreli metin",json_string:"JSON metin",e164:"E.164 sayısı",jwt:"JWT",template_literal:"giren"},r={nan:"NaN",number:"numara",array:"saf",null:"gayb"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Fâsit giren: umulan instanceof ${e.expected}, alınan ${a}`:`Fâsit giren: umulan ${t}, alınan ${a}`}case"invalid_value":return 1===e.values.length?`Fâsit giren: umulan ${$(e.values[0])}`:`Fâsit tercih: mûteberler ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Fazla büyük: ${e.origin??"value"}, ${n}${e.maximum.toString()} ${r.unit??"elements"} sahip olmalıydı.`:`Fazla büyük: ${e.origin??"value"}, ${n}${e.maximum.toString()} olmalıydı.`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Fazla küçük: ${e.origin}, ${n}${e.minimum.toString()} ${r.unit} sahip olmalıydı.`:`Fazla küçük: ${e.origin}, ${n}${e.minimum.toString()} olmalıydı.`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Fâsit metin: "${t.prefix}" ile başlamalı.`:"ends_with"===t.format?`Fâsit metin: "${t.suffix}" ile bitmeli.`:"includes"===t.format?`Fâsit metin: "${t.includes}" ihtivâ etmeli.`:"regex"===t.format?`Fâsit metin: ${t.pattern} nakşına uymalı.`:`Fâsit ${n[t.format]??e.format}`}case"not_multiple_of":return`Fâsit sayı: ${e.divisor} katı olmalıydı.`;case"unrecognized_keys":return`Tanınmayan anahtar ${e.keys.length>1?"s":""}: ${l(e.keys,", ")}`;case"invalid_key":return`${e.origin} için tanınmayan anahtar var.`;case"invalid_union":return"Giren tanınamadı.";case"invalid_element":return`${e.origin} için tanınmayan kıymet var.`;default:return"Kıymet tanınamadı."}}},Aa=()=>{const e={string:{unit:"توکي",verb:"ولري"},file:{unit:"بایټس",verb:"ولري"},array:{unit:"توکي",verb:"ولري"},set:{unit:"توکي",verb:"ولري"}};function t(t){return e[t]??null}const n={regex:"ورودي",email:"بریښنالیک",url:"یو آر ال",emoji:"ایموجي",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"نیټه او وخت",date:"نېټه",time:"وخت",duration:"موده",ipv4:"د IPv4 پته",ipv6:"د IPv6 پته",cidrv4:"د IPv4 ساحه",cidrv6:"د IPv6 ساحه",base64:"base64-encoded متن",base64url:"base64url-encoded متن",json_string:"JSON متن",e164:"د E.164 شمېره",jwt:"JWT",template_literal:"ورودي"},r={nan:"NaN",number:"عدد",array:"ارې"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`ناسم ورودي: باید instanceof ${e.expected} وای, مګر ${a} ترلاسه شو`:`ناسم ورودي: باید ${t} وای, مګر ${a} ترلاسه شو`}case"invalid_value":return 1===e.values.length?`ناسم ورودي: باید ${$(e.values[0])} وای`:`ناسم انتخاب: باید یو له ${l(e.values,"|")} څخه وای`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`ډیر لوی: ${e.origin??"ارزښت"} باید ${n}${e.maximum.toString()} ${r.unit??"عنصرونه"} ولري`:`ډیر لوی: ${e.origin??"ارزښت"} باید ${n}${e.maximum.toString()} وي`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`ډیر کوچنی: ${e.origin} باید ${n}${e.minimum.toString()} ${r.unit} ولري`:`ډیر کوچنی: ${e.origin} باید ${n}${e.minimum.toString()} وي`}case"invalid_format":{const t=e;return"starts_with"===t.format?`ناسم متن: باید د "${t.prefix}" سره پیل شي`:"ends_with"===t.format?`ناسم متن: باید د "${t.suffix}" سره پای ته ورسيږي`:"includes"===t.format?`ناسم متن: باید "${t.includes}" ولري`:"regex"===t.format?`ناسم متن: باید د ${t.pattern} سره مطابقت ولري`:`${n[t.format]??e.format} ناسم دی`}case"not_multiple_of":return`ناسم عدد: باید د ${e.divisor} مضرب وي`;case"unrecognized_keys":return`ناسم ${e.keys.length>1?"کلیډونه":"کلیډ"}: ${l(e.keys,", ")}`;case"invalid_key":return`ناسم کلیډ په ${e.origin} کې`;case"invalid_union":default:return"ناسمه ورودي";case"invalid_element":return`ناسم عنصر په ${e.origin} کې`}}},Ca=()=>{const e={string:{unit:"znaków",verb:"mieć"},file:{unit:"bajtów",verb:"mieć"},array:{unit:"elementów",verb:"mieć"},set:{unit:"elementów",verb:"mieć"}};function t(t){return e[t]??null}const n={regex:"wyrażenie",email:"adres email",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"data i godzina w formacie ISO",date:"data w formacie ISO",time:"godzina w formacie ISO",duration:"czas trwania ISO",ipv4:"adres IPv4",ipv6:"adres IPv6",cidrv4:"zakres IPv4",cidrv6:"zakres IPv6",base64:"ciąg znaków zakodowany w formacie base64",base64url:"ciąg znaków zakodowany w formacie base64url",json_string:"ciąg znaków w formacie JSON",e164:"liczba E.164",jwt:"JWT",template_literal:"wejście"},r={nan:"NaN",number:"liczba",array:"tablica"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Nieprawidłowe dane wejściowe: oczekiwano instanceof ${e.expected}, otrzymano ${a}`:`Nieprawidłowe dane wejściowe: oczekiwano ${t}, otrzymano ${a}`}case"invalid_value":return 1===e.values.length?`Nieprawidłowe dane wejściowe: oczekiwano ${$(e.values[0])}`:`Nieprawidłowa opcja: oczekiwano jednej z wartości ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Za duża wartość: oczekiwano, że ${e.origin??"wartość"} będzie mieć ${n}${e.maximum.toString()} ${r.unit??"elementów"}`:`Zbyt duż(y/a/e): oczekiwano, że ${e.origin??"wartość"} będzie wynosić ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Za mała wartość: oczekiwano, że ${e.origin??"wartość"} będzie mieć ${n}${e.minimum.toString()} ${r.unit??"elementów"}`:`Zbyt mał(y/a/e): oczekiwano, że ${e.origin??"wartość"} będzie wynosić ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Nieprawidłowy ciąg znaków: musi zaczynać się od "${t.prefix}"`:"ends_with"===t.format?`Nieprawidłowy ciąg znaków: musi kończyć się na "${t.suffix}"`:"includes"===t.format?`Nieprawidłowy ciąg znaków: musi zawierać "${t.includes}"`:"regex"===t.format?`Nieprawidłowy ciąg znaków: musi odpowiadać wzorcowi ${t.pattern}`:`Nieprawidłow(y/a/e) ${n[t.format]??e.format}`}case"not_multiple_of":return`Nieprawidłowa liczba: musi być wielokrotnością ${e.divisor}`;case"unrecognized_keys":return`Nierozpoznane klucze${e.keys.length>1?"s":""}: ${l(e.keys,", ")}`;case"invalid_key":return`Nieprawidłowy klucz w ${e.origin}`;case"invalid_union":default:return"Nieprawidłowe dane wejściowe";case"invalid_element":return`Nieprawidłowa wartość w ${e.origin}`}}},$a=()=>{const e={string:{unit:"caracteres",verb:"ter"},file:{unit:"bytes",verb:"ter"},array:{unit:"itens",verb:"ter"},set:{unit:"itens",verb:"ter"}};function t(t){return e[t]??null}const n={regex:"padrão",email:"endereço de e-mail",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"data e hora ISO",date:"data ISO",time:"hora ISO",duration:"duração ISO",ipv4:"endereço IPv4",ipv6:"endereço IPv6",cidrv4:"faixa de IPv4",cidrv6:"faixa de IPv6",base64:"texto codificado em base64",base64url:"URL codificada em base64",json_string:"texto JSON",e164:"número E.164",jwt:"JWT",template_literal:"entrada"},r={nan:"NaN",number:"número",null:"nulo"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Tipo inválido: esperado instanceof ${e.expected}, recebido ${a}`:`Tipo inválido: esperado ${t}, recebido ${a}`}case"invalid_value":return 1===e.values.length?`Entrada inválida: esperado ${$(e.values[0])}`:`Opção inválida: esperada uma das ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Muito grande: esperado que ${e.origin??"valor"} tivesse ${n}${e.maximum.toString()} ${r.unit??"elementos"}`:`Muito grande: esperado que ${e.origin??"valor"} fosse ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Muito pequeno: esperado que ${e.origin} tivesse ${n}${e.minimum.toString()} ${r.unit}`:`Muito pequeno: esperado que ${e.origin} fosse ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Texto inválido: deve começar com "${t.prefix}"`:"ends_with"===t.format?`Texto inválido: deve terminar com "${t.suffix}"`:"includes"===t.format?`Texto inválido: deve incluir "${t.includes}"`:"regex"===t.format?`Texto inválido: deve corresponder ao padrão ${t.pattern}`:`${n[t.format]??e.format} inválido`}case"not_multiple_of":return`Número inválido: deve ser múltiplo de ${e.divisor}`;case"unrecognized_keys":return`Chave${e.keys.length>1?"s":""} desconhecida${e.keys.length>1?"s":""}: ${l(e.keys,", ")}`;case"invalid_key":return`Chave inválida em ${e.origin}`;case"invalid_union":return"Entrada inválida";case"invalid_element":return`Valor inválido em ${e.origin}`;default:return"Campo inválido"}}};function Pa(e,t,n,r){const a=Math.abs(e),o=a%10,i=a%100;return i>=11&&i<=19?r:1===o?t:o>=2&&o<=4?n:r}const Da=()=>{const e={string:{unit:{one:"символ",few:"символа",many:"символов"},verb:"иметь"},file:{unit:{one:"байт",few:"байта",many:"байт"},verb:"иметь"},array:{unit:{one:"элемент",few:"элемента",many:"элементов"},verb:"иметь"},set:{unit:{one:"элемент",few:"элемента",many:"элементов"},verb:"иметь"}};function t(t){return e[t]??null}const n={regex:"ввод",email:"email адрес",url:"URL",emoji:"эмодзи",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO дата и время",date:"ISO дата",time:"ISO время",duration:"ISO длительность",ipv4:"IPv4 адрес",ipv6:"IPv6 адрес",cidrv4:"IPv4 диапазон",cidrv6:"IPv6 диапазон",base64:"строка в формате base64",base64url:"строка в формате base64url",json_string:"JSON строка",e164:"номер E.164",jwt:"JWT",template_literal:"ввод"},r={nan:"NaN",number:"число",array:"массив"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Неверный ввод: ожидалось instanceof ${e.expected}, получено ${a}`:`Неверный ввод: ожидалось ${t}, получено ${a}`}case"invalid_value":return 1===e.values.length?`Неверный ввод: ожидалось ${$(e.values[0])}`:`Неверный вариант: ожидалось одно из ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);if(r){const t=Pa(Number(e.maximum),r.unit.one,r.unit.few,r.unit.many);return`Слишком большое значение: ожидалось, что ${e.origin??"значение"} будет иметь ${n}${e.maximum.toString()} ${t}`}return`Слишком большое значение: ожидалось, что ${e.origin??"значение"} будет ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);if(r){const t=Pa(Number(e.minimum),r.unit.one,r.unit.few,r.unit.many);return`Слишком маленькое значение: ожидалось, что ${e.origin} будет иметь ${n}${e.minimum.toString()} ${t}`}return`Слишком маленькое значение: ожидалось, что ${e.origin} будет ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Неверная строка: должна начинаться с "${t.prefix}"`:"ends_with"===t.format?`Неверная строка: должна заканчиваться на "${t.suffix}"`:"includes"===t.format?`Неверная строка: должна содержать "${t.includes}"`:"regex"===t.format?`Неверная строка: должна соответствовать шаблону ${t.pattern}`:`Неверный ${n[t.format]??e.format}`}case"not_multiple_of":return`Неверное число: должно быть кратным ${e.divisor}`;case"unrecognized_keys":return`Нераспознанн${e.keys.length>1?"ые":"ый"} ключ${e.keys.length>1?"и":""}: ${l(e.keys,", ")}`;case"invalid_key":return`Неверный ключ в ${e.origin}`;case"invalid_union":default:return"Неверные входные данные";case"invalid_element":return`Неверное значение в ${e.origin}`}}},Ia=()=>{const e={string:{unit:"znakov",verb:"imeti"},file:{unit:"bajtov",verb:"imeti"},array:{unit:"elementov",verb:"imeti"},set:{unit:"elementov",verb:"imeti"}};function t(t){return e[t]??null}const n={regex:"vnos",email:"e-poštni naslov",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO datum in čas",date:"ISO datum",time:"ISO čas",duration:"ISO trajanje",ipv4:"IPv4 naslov",ipv6:"IPv6 naslov",cidrv4:"obseg IPv4",cidrv6:"obseg IPv6",base64:"base64 kodiran niz",base64url:"base64url kodiran niz",json_string:"JSON niz",e164:"E.164 številka",jwt:"JWT",template_literal:"vnos"},r={nan:"NaN",number:"število",array:"tabela"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Neveljaven vnos: pričakovano instanceof ${e.expected}, prejeto ${a}`:`Neveljaven vnos: pričakovano ${t}, prejeto ${a}`}case"invalid_value":return 1===e.values.length?`Neveljaven vnos: pričakovano ${$(e.values[0])}`:`Neveljavna možnost: pričakovano eno izmed ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Preveliko: pričakovano, da bo ${e.origin??"vrednost"} imelo ${n}${e.maximum.toString()} ${r.unit??"elementov"}`:`Preveliko: pričakovano, da bo ${e.origin??"vrednost"} ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Premajhno: pričakovano, da bo ${e.origin} imelo ${n}${e.minimum.toString()} ${r.unit}`:`Premajhno: pričakovano, da bo ${e.origin} ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Neveljaven niz: mora se začeti z "${t.prefix}"`:"ends_with"===t.format?`Neveljaven niz: mora se končati z "${t.suffix}"`:"includes"===t.format?`Neveljaven niz: mora vsebovati "${t.includes}"`:"regex"===t.format?`Neveljaven niz: mora ustrezati vzorcu ${t.pattern}`:`Neveljaven ${n[t.format]??e.format}`}case"not_multiple_of":return`Neveljavno število: mora biti večkratnik ${e.divisor}`;case"unrecognized_keys":return`Neprepoznan${e.keys.length>1?"i ključi":" ključ"}: ${l(e.keys,", ")}`;case"invalid_key":return`Neveljaven ključ v ${e.origin}`;case"invalid_union":default:return"Neveljaven vnos";case"invalid_element":return`Neveljavna vrednost v ${e.origin}`}}},Ma=()=>{const e={string:{unit:"tecken",verb:"att ha"},file:{unit:"bytes",verb:"att ha"},array:{unit:"objekt",verb:"att innehålla"},set:{unit:"objekt",verb:"att innehålla"}};function t(t){return e[t]??null}const n={regex:"reguljärt uttryck",email:"e-postadress",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO-datum och tid",date:"ISO-datum",time:"ISO-tid",duration:"ISO-varaktighet",ipv4:"IPv4-intervall",ipv6:"IPv6-intervall",cidrv4:"IPv4-spektrum",cidrv6:"IPv6-spektrum",base64:"base64-kodad sträng",base64url:"base64url-kodad sträng",json_string:"JSON-sträng",e164:"E.164-nummer",jwt:"JWT",template_literal:"mall-literal"},r={nan:"NaN",number:"antal",array:"lista"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Ogiltig inmatning: förväntat instanceof ${e.expected}, fick ${a}`:`Ogiltig inmatning: förväntat ${t}, fick ${a}`}case"invalid_value":return 1===e.values.length?`Ogiltig inmatning: förväntat ${$(e.values[0])}`:`Ogiltigt val: förväntade en av ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`För stor(t): förväntade ${e.origin??"värdet"} att ha ${n}${e.maximum.toString()} ${r.unit??"element"}`:`För stor(t): förväntat ${e.origin??"värdet"} att ha ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`För lite(t): förväntade ${e.origin??"värdet"} att ha ${n}${e.minimum.toString()} ${r.unit}`:`För lite(t): förväntade ${e.origin??"värdet"} att ha ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Ogiltig sträng: måste börja med "${t.prefix}"`:"ends_with"===t.format?`Ogiltig sträng: måste sluta med "${t.suffix}"`:"includes"===t.format?`Ogiltig sträng: måste innehålla "${t.includes}"`:"regex"===t.format?`Ogiltig sträng: måste matcha mönstret "${t.pattern}"`:`Ogiltig(t) ${n[t.format]??e.format}`}case"not_multiple_of":return`Ogiltigt tal: måste vara en multipel av ${e.divisor}`;case"unrecognized_keys":return`${e.keys.length>1?"Okända nycklar":"Okänd nyckel"}: ${l(e.keys,", ")}`;case"invalid_key":return`Ogiltig nyckel i ${e.origin??"värdet"}`;case"invalid_union":default:return"Ogiltig input";case"invalid_element":return`Ogiltigt värde i ${e.origin??"värdet"}`}}},Na=()=>{const e={string:{unit:"எழுத்துக்கள்",verb:"கொண்டிருக்க வேண்டும்"},file:{unit:"பைட்டுகள்",verb:"கொண்டிருக்க வேண்டும்"},array:{unit:"உறுப்புகள்",verb:"கொண்டிருக்க வேண்டும்"},set:{unit:"உறுப்புகள்",verb:"கொண்டிருக்க வேண்டும்"}};function t(t){return e[t]??null}const n={regex:"உள்ளீடு",email:"மின்னஞ்சல் முகவரி",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO தேதி நேரம்",date:"ISO தேதி",time:"ISO நேரம்",duration:"ISO கால அளவு",ipv4:"IPv4 முகவரி",ipv6:"IPv6 முகவரி",cidrv4:"IPv4 வரம்பு",cidrv6:"IPv6 வரம்பு",base64:"base64-encoded சரம்",base64url:"base64url-encoded சரம்",json_string:"JSON சரம்",e164:"E.164 எண்",jwt:"JWT",template_literal:"input"},r={nan:"NaN",number:"எண்",array:"அணி",null:"வெறுமை"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`தவறான உள்ளீடு: எதிர்பார்க்கப்பட்டது instanceof ${e.expected}, பெறப்பட்டது ${a}`:`தவறான உள்ளீடு: எதிர்பார்க்கப்பட்டது ${t}, பெறப்பட்டது ${a}`}case"invalid_value":return 1===e.values.length?`தவறான உள்ளீடு: எதிர்பார்க்கப்பட்டது ${$(e.values[0])}`:`தவறான விருப்பம்: எதிர்பார்க்கப்பட்டது ${l(e.values,"|")} இல் ஒன்று`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`மிக பெரியது: எதிர்பார்க்கப்பட்டது ${e.origin??"மதிப்பு"} ${n}${e.maximum.toString()} ${r.unit??"உறுப்புகள்"} ஆக இருக்க வேண்டும்`:`மிக பெரியது: எதிர்பார்க்கப்பட்டது ${e.origin??"மதிப்பு"} ${n}${e.maximum.toString()} ஆக இருக்க வேண்டும்`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`மிகச் சிறியது: எதிர்பார்க்கப்பட்டது ${e.origin} ${n}${e.minimum.toString()} ${r.unit} ஆக இருக்க வேண்டும்`:`மிகச் சிறியது: எதிர்பார்க்கப்பட்டது ${e.origin} ${n}${e.minimum.toString()} ஆக இருக்க வேண்டும்`}case"invalid_format":{const t=e;return"starts_with"===t.format?`தவறான சரம்: "${t.prefix}" இல் தொடங்க வேண்டும்`:"ends_with"===t.format?`தவறான சரம்: "${t.suffix}" இல் முடிவடைய வேண்டும்`:"includes"===t.format?`தவறான சரம்: "${t.includes}" ஐ உள்ளடக்க வேண்டும்`:"regex"===t.format?`தவறான சரம்: ${t.pattern} முறைபாட்டுடன் பொருந்த வேண்டும்`:`தவறான ${n[t.format]??e.format}`}case"not_multiple_of":return`தவறான எண்: ${e.divisor} இன் பலமாக இருக்க வேண்டும்`;case"unrecognized_keys":return`அடையாளம் தெரியாத விசை${e.keys.length>1?"கள்":""}: ${l(e.keys,", ")}`;case"invalid_key":return`${e.origin} இல் தவறான விசை`;case"invalid_union":default:return"தவறான உள்ளீடு";case"invalid_element":return`${e.origin} இல் தவறான மதிப்பு`}}},Ra=()=>{const e={string:{unit:"ตัวอักษร",verb:"ควรมี"},file:{unit:"ไบต์",verb:"ควรมี"},array:{unit:"รายการ",verb:"ควรมี"},set:{unit:"รายการ",verb:"ควรมี"}};function t(t){return e[t]??null}const n={regex:"ข้อมูลที่ป้อน",email:"ที่อยู่อีเมล",url:"URL",emoji:"อิโมจิ",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"วันที่เวลาแบบ ISO",date:"วันที่แบบ ISO",time:"เวลาแบบ ISO",duration:"ช่วงเวลาแบบ ISO",ipv4:"ที่อยู่ IPv4",ipv6:"ที่อยู่ IPv6",cidrv4:"ช่วง IP แบบ IPv4",cidrv6:"ช่วง IP แบบ IPv6",base64:"ข้อความแบบ Base64",base64url:"ข้อความแบบ Base64 สำหรับ URL",json_string:"ข้อความแบบ JSON",e164:"เบอร์โทรศัพท์ระหว่างประเทศ (E.164)",jwt:"โทเคน JWT",template_literal:"ข้อมูลที่ป้อน"},r={nan:"NaN",number:"ตัวเลข",array:"อาร์เรย์ (Array)",null:"ไม่มีค่า (null)"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`ประเภทข้อมูลไม่ถูกต้อง: ควรเป็น instanceof ${e.expected} แต่ได้รับ ${a}`:`ประเภทข้อมูลไม่ถูกต้อง: ควรเป็น ${t} แต่ได้รับ ${a}`}case"invalid_value":return 1===e.values.length?`ค่าไม่ถูกต้อง: ควรเป็น ${$(e.values[0])}`:`ตัวเลือกไม่ถูกต้อง: ควรเป็นหนึ่งใน ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"ไม่เกิน":"น้อยกว่า",r=t(e.origin);return r?`เกินกำหนด: ${e.origin??"ค่า"} ควรมี${n} ${e.maximum.toString()} ${r.unit??"รายการ"}`:`เกินกำหนด: ${e.origin??"ค่า"} ควรมี${n} ${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?"อย่างน้อย":"มากกว่า",r=t(e.origin);return r?`น้อยกว่ากำหนด: ${e.origin} ควรมี${n} ${e.minimum.toString()} ${r.unit}`:`น้อยกว่ากำหนด: ${e.origin} ควรมี${n} ${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`รูปแบบไม่ถูกต้อง: ข้อความต้องขึ้นต้นด้วย "${t.prefix}"`:"ends_with"===t.format?`รูปแบบไม่ถูกต้อง: ข้อความต้องลงท้ายด้วย "${t.suffix}"`:"includes"===t.format?`รูปแบบไม่ถูกต้อง: ข้อความต้องมี "${t.includes}" อยู่ในข้อความ`:"regex"===t.format?`รูปแบบไม่ถูกต้อง: ต้องตรงกับรูปแบบที่กำหนด ${t.pattern}`:`รูปแบบไม่ถูกต้อง: ${n[t.format]??e.format}`}case"not_multiple_of":return`ตัวเลขไม่ถูกต้อง: ต้องเป็นจำนวนที่หารด้วย ${e.divisor} ได้ลงตัว`;case"unrecognized_keys":return`พบคีย์ที่ไม่รู้จัก: ${l(e.keys,", ")}`;case"invalid_key":return`คีย์ไม่ถูกต้องใน ${e.origin}`;case"invalid_union":return"ข้อมูลไม่ถูกต้อง: ไม่ตรงกับรูปแบบยูเนียนที่กำหนดไว้";case"invalid_element":return`ข้อมูลไม่ถูกต้องใน ${e.origin}`;default:return"ข้อมูลไม่ถูกต้อง"}}},La=()=>{const e={string:{unit:"karakter",verb:"olmalı"},file:{unit:"bayt",verb:"olmalı"},array:{unit:"öğe",verb:"olmalı"},set:{unit:"öğe",verb:"olmalı"}};function t(t){return e[t]??null}const n={regex:"girdi",email:"e-posta adresi",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO tarih ve saat",date:"ISO tarih",time:"ISO saat",duration:"ISO süre",ipv4:"IPv4 adresi",ipv6:"IPv6 adresi",cidrv4:"IPv4 aralığı",cidrv6:"IPv6 aralığı",base64:"base64 ile şifrelenmiş metin",base64url:"base64url ile şifrelenmiş metin",json_string:"JSON dizesi",e164:"E.164 sayısı",jwt:"JWT",template_literal:"Şablon dizesi"},r={nan:"NaN"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Geçersiz değer: beklenen instanceof ${e.expected}, alınan ${a}`:`Geçersiz değer: beklenen ${t}, alınan ${a}`}case"invalid_value":return 1===e.values.length?`Geçersiz değer: beklenen ${$(e.values[0])}`:`Geçersiz seçenek: aşağıdakilerden biri olmalı: ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Çok büyük: beklenen ${e.origin??"değer"} ${n}${e.maximum.toString()} ${r.unit??"öğe"}`:`Çok büyük: beklenen ${e.origin??"değer"} ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Çok küçük: beklenen ${e.origin} ${n}${e.minimum.toString()} ${r.unit}`:`Çok küçük: beklenen ${e.origin} ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Geçersiz metin: "${t.prefix}" ile başlamalı`:"ends_with"===t.format?`Geçersiz metin: "${t.suffix}" ile bitmeli`:"includes"===t.format?`Geçersiz metin: "${t.includes}" içermeli`:"regex"===t.format?`Geçersiz metin: ${t.pattern} desenine uymalı`:`Geçersiz ${n[t.format]??e.format}`}case"not_multiple_of":return`Geçersiz sayı: ${e.divisor} ile tam bölünebilmeli`;case"unrecognized_keys":return`Tanınmayan anahtar${e.keys.length>1?"lar":""}: ${l(e.keys,", ")}`;case"invalid_key":return`${e.origin} içinde geçersiz anahtar`;case"invalid_union":default:return"Geçersiz değer";case"invalid_element":return`${e.origin} içinde geçersiz değer`}}},Ba=()=>{const e={string:{unit:"символів",verb:"матиме"},file:{unit:"байтів",verb:"матиме"},array:{unit:"елементів",verb:"матиме"},set:{unit:"елементів",verb:"матиме"}};function t(t){return e[t]??null}const n={regex:"вхідні дані",email:"адреса електронної пошти",url:"URL",emoji:"емодзі",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"дата та час ISO",date:"дата ISO",time:"час ISO",duration:"тривалість ISO",ipv4:"адреса IPv4",ipv6:"адреса IPv6",cidrv4:"діапазон IPv4",cidrv6:"діапазон IPv6",base64:"рядок у кодуванні base64",base64url:"рядок у кодуванні base64url",json_string:"рядок JSON",e164:"номер E.164",jwt:"JWT",template_literal:"вхідні дані"},r={nan:"NaN",number:"число",array:"масив"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Неправильні вхідні дані: очікується instanceof ${e.expected}, отримано ${a}`:`Неправильні вхідні дані: очікується ${t}, отримано ${a}`}case"invalid_value":return 1===e.values.length?`Неправильні вхідні дані: очікується ${$(e.values[0])}`:`Неправильна опція: очікується одне з ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Занадто велике: очікується, що ${e.origin??"значення"} ${r.verb} ${n}${e.maximum.toString()} ${r.unit??"елементів"}`:`Занадто велике: очікується, що ${e.origin??"значення"} буде ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Занадто мале: очікується, що ${e.origin} ${r.verb} ${n}${e.minimum.toString()} ${r.unit}`:`Занадто мале: очікується, що ${e.origin} буде ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Неправильний рядок: повинен починатися з "${t.prefix}"`:"ends_with"===t.format?`Неправильний рядок: повинен закінчуватися на "${t.suffix}"`:"includes"===t.format?`Неправильний рядок: повинен містити "${t.includes}"`:"regex"===t.format?`Неправильний рядок: повинен відповідати шаблону ${t.pattern}`:`Неправильний ${n[t.format]??e.format}`}case"not_multiple_of":return`Неправильне число: повинно бути кратним ${e.divisor}`;case"unrecognized_keys":return`Нерозпізнаний ключ${e.keys.length>1?"і":""}: ${l(e.keys,", ")}`;case"invalid_key":return`Неправильний ключ у ${e.origin}`;case"invalid_union":default:return"Неправильні вхідні дані";case"invalid_element":return`Неправильне значення у ${e.origin}`}}};function ja(){return{localeError:Ba()}}const Ua=()=>{const e={string:{unit:"حروف",verb:"ہونا"},file:{unit:"بائٹس",verb:"ہونا"},array:{unit:"آئٹمز",verb:"ہونا"},set:{unit:"آئٹمز",verb:"ہونا"}};function t(t){return e[t]??null}const n={regex:"ان پٹ",email:"ای میل ایڈریس",url:"یو آر ایل",emoji:"ایموجی",uuid:"یو یو آئی ڈی",uuidv4:"یو یو آئی ڈی وی 4",uuidv6:"یو یو آئی ڈی وی 6",nanoid:"نینو آئی ڈی",guid:"جی یو آئی ڈی",cuid:"سی یو آئی ڈی",cuid2:"سی یو آئی ڈی 2",ulid:"یو ایل آئی ڈی",xid:"ایکس آئی ڈی",ksuid:"کے ایس یو آئی ڈی",datetime:"آئی ایس او ڈیٹ ٹائم",date:"آئی ایس او تاریخ",time:"آئی ایس او وقت",duration:"آئی ایس او مدت",ipv4:"آئی پی وی 4 ایڈریس",ipv6:"آئی پی وی 6 ایڈریس",cidrv4:"آئی پی وی 4 رینج",cidrv6:"آئی پی وی 6 رینج",base64:"بیس 64 ان کوڈڈ سٹرنگ",base64url:"بیس 64 یو آر ایل ان کوڈڈ سٹرنگ",json_string:"جے ایس او این سٹرنگ",e164:"ای 164 نمبر",jwt:"جے ڈبلیو ٹی",template_literal:"ان پٹ"},r={nan:"NaN",number:"نمبر",array:"آرے",null:"نل"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`غلط ان پٹ: instanceof ${e.expected} متوقع تھا، ${a} موصول ہوا`:`غلط ان پٹ: ${t} متوقع تھا، ${a} موصول ہوا`}case"invalid_value":return 1===e.values.length?`غلط ان پٹ: ${$(e.values[0])} متوقع تھا`:`غلط آپشن: ${l(e.values,"|")} میں سے ایک متوقع تھا`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`بہت بڑا: ${e.origin??"ویلیو"} کے ${n}${e.maximum.toString()} ${r.unit??"عناصر"} ہونے متوقع تھے`:`بہت بڑا: ${e.origin??"ویلیو"} کا ${n}${e.maximum.toString()} ہونا متوقع تھا`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`بہت چھوٹا: ${e.origin} کے ${n}${e.minimum.toString()} ${r.unit} ہونے متوقع تھے`:`بہت چھوٹا: ${e.origin} کا ${n}${e.minimum.toString()} ہونا متوقع تھا`}case"invalid_format":{const t=e;return"starts_with"===t.format?`غلط سٹرنگ: "${t.prefix}" سے شروع ہونا چاہیے`:"ends_with"===t.format?`غلط سٹرنگ: "${t.suffix}" پر ختم ہونا چاہیے`:"includes"===t.format?`غلط سٹرنگ: "${t.includes}" شامل ہونا چاہیے`:"regex"===t.format?`غلط سٹرنگ: پیٹرن ${t.pattern} سے میچ ہونا چاہیے`:`غلط ${n[t.format]??e.format}`}case"not_multiple_of":return`غلط نمبر: ${e.divisor} کا مضاعف ہونا چاہیے`;case"unrecognized_keys":return`غیر تسلیم شدہ کی${e.keys.length>1?"ز":""}: ${l(e.keys,"، ")}`;case"invalid_key":return`${e.origin} میں غلط کی`;case"invalid_union":default:return"غلط ان پٹ";case"invalid_element":return`${e.origin} میں غلط ویلیو`}}},za=()=>{const e={string:{unit:"belgi",verb:"bo‘lishi kerak"},file:{unit:"bayt",verb:"bo‘lishi kerak"},array:{unit:"element",verb:"bo‘lishi kerak"},set:{unit:"element",verb:"bo‘lishi kerak"}};function t(t){return e[t]??null}const n={regex:"kirish",email:"elektron pochta manzili",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO sana va vaqti",date:"ISO sana",time:"ISO vaqt",duration:"ISO davomiylik",ipv4:"IPv4 manzil",ipv6:"IPv6 manzil",mac:"MAC manzil",cidrv4:"IPv4 diapazon",cidrv6:"IPv6 diapazon",base64:"base64 kodlangan satr",base64url:"base64url kodlangan satr",json_string:"JSON satr",e164:"E.164 raqam",jwt:"JWT",template_literal:"kirish"},r={nan:"NaN",number:"raqam",array:"massiv"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Noto‘g‘ri kirish: kutilgan instanceof ${e.expected}, qabul qilingan ${a}`:`Noto‘g‘ri kirish: kutilgan ${t}, qabul qilingan ${a}`}case"invalid_value":return 1===e.values.length?`Noto‘g‘ri kirish: kutilgan ${$(e.values[0])}`:`Noto‘g‘ri variant: quyidagilardan biri kutilgan ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Juda katta: kutilgan ${e.origin??"qiymat"} ${n}${e.maximum.toString()} ${r.unit} ${r.verb}`:`Juda katta: kutilgan ${e.origin??"qiymat"} ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Juda kichik: kutilgan ${e.origin} ${n}${e.minimum.toString()} ${r.unit} ${r.verb}`:`Juda kichik: kutilgan ${e.origin} ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Noto‘g‘ri satr: "${t.prefix}" bilan boshlanishi kerak`:"ends_with"===t.format?`Noto‘g‘ri satr: "${t.suffix}" bilan tugashi kerak`:"includes"===t.format?`Noto‘g‘ri satr: "${t.includes}" ni o‘z ichiga olishi kerak`:"regex"===t.format?`Noto‘g‘ri satr: ${t.pattern} shabloniga mos kelishi kerak`:`Noto‘g‘ri ${n[t.format]??e.format}`}case"not_multiple_of":return`Noto‘g‘ri raqam: ${e.divisor} ning karralisi bo‘lishi kerak`;case"unrecognized_keys":return`Noma’lum kalit${e.keys.length>1?"lar":""}: ${l(e.keys,", ")}`;case"invalid_key":return`${e.origin} dagi kalit noto‘g‘ri`;case"invalid_union":default:return"Noto‘g‘ri kirish";case"invalid_element":return`${e.origin} da noto‘g‘ri qiymat`}}},Fa=()=>{const e={string:{unit:"ký tự",verb:"có"},file:{unit:"byte",verb:"có"},array:{unit:"phần tử",verb:"có"},set:{unit:"phần tử",verb:"có"}};function t(t){return e[t]??null}const n={regex:"đầu vào",email:"địa chỉ email",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ngày giờ ISO",date:"ngày ISO",time:"giờ ISO",duration:"khoảng thời gian ISO",ipv4:"địa chỉ IPv4",ipv6:"địa chỉ IPv6",cidrv4:"dải IPv4",cidrv6:"dải IPv6",base64:"chuỗi mã hóa base64",base64url:"chuỗi mã hóa base64url",json_string:"chuỗi JSON",e164:"số E.164",jwt:"JWT",template_literal:"đầu vào"},r={nan:"NaN",number:"số",array:"mảng"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Đầu vào không hợp lệ: mong đợi instanceof ${e.expected}, nhận được ${a}`:`Đầu vào không hợp lệ: mong đợi ${t}, nhận được ${a}`}case"invalid_value":return 1===e.values.length?`Đầu vào không hợp lệ: mong đợi ${$(e.values[0])}`:`Tùy chọn không hợp lệ: mong đợi một trong các giá trị ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Quá lớn: mong đợi ${e.origin??"giá trị"} ${r.verb} ${n}${e.maximum.toString()} ${r.unit??"phần tử"}`:`Quá lớn: mong đợi ${e.origin??"giá trị"} ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Quá nhỏ: mong đợi ${e.origin} ${r.verb} ${n}${e.minimum.toString()} ${r.unit}`:`Quá nhỏ: mong đợi ${e.origin} ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Chuỗi không hợp lệ: phải bắt đầu bằng "${t.prefix}"`:"ends_with"===t.format?`Chuỗi không hợp lệ: phải kết thúc bằng "${t.suffix}"`:"includes"===t.format?`Chuỗi không hợp lệ: phải bao gồm "${t.includes}"`:"regex"===t.format?`Chuỗi không hợp lệ: phải khớp với mẫu ${t.pattern}`:`${n[t.format]??e.format} không hợp lệ`}case"not_multiple_of":return`Số không hợp lệ: phải là bội số của ${e.divisor}`;case"unrecognized_keys":return`Khóa không được nhận dạng: ${l(e.keys,", ")}`;case"invalid_key":return`Khóa không hợp lệ trong ${e.origin}`;case"invalid_union":default:return"Đầu vào không hợp lệ";case"invalid_element":return`Giá trị không hợp lệ trong ${e.origin}`}}},Za=()=>{const e={string:{unit:"字符",verb:"包含"},file:{unit:"字节",verb:"包含"},array:{unit:"项",verb:"包含"},set:{unit:"项",verb:"包含"}};function t(t){return e[t]??null}const n={regex:"输入",email:"电子邮件",url:"URL",emoji:"表情符号",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO日期时间",date:"ISO日期",time:"ISO时间",duration:"ISO时长",ipv4:"IPv4地址",ipv6:"IPv6地址",cidrv4:"IPv4网段",cidrv6:"IPv6网段",base64:"base64编码字符串",base64url:"base64url编码字符串",json_string:"JSON字符串",e164:"E.164号码",jwt:"JWT",template_literal:"输入"},r={nan:"NaN",number:"数字",array:"数组",null:"空值(null)"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`无效输入:期望 instanceof ${e.expected},实际接收 ${a}`:`无效输入:期望 ${t},实际接收 ${a}`}case"invalid_value":return 1===e.values.length?`无效输入:期望 ${$(e.values[0])}`:`无效选项:期望以下之一 ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`数值过大:期望 ${e.origin??"值"} ${n}${e.maximum.toString()} ${r.unit??"个元素"}`:`数值过大:期望 ${e.origin??"值"} ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`数值过小:期望 ${e.origin} ${n}${e.minimum.toString()} ${r.unit}`:`数值过小:期望 ${e.origin} ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`无效字符串:必须以 "${t.prefix}" 开头`:"ends_with"===t.format?`无效字符串:必须以 "${t.suffix}" 结尾`:"includes"===t.format?`无效字符串:必须包含 "${t.includes}"`:"regex"===t.format?`无效字符串:必须满足正则表达式 ${t.pattern}`:`无效${n[t.format]??e.format}`}case"not_multiple_of":return`无效数字:必须是 ${e.divisor} 的倍数`;case"unrecognized_keys":return`出现未知的键(key): ${l(e.keys,", ")}`;case"invalid_key":return`${e.origin} 中的键(key)无效`;case"invalid_union":default:return"无效输入";case"invalid_element":return`${e.origin} 中包含无效值(value)`}}},Qa=()=>{const e={string:{unit:"字元",verb:"擁有"},file:{unit:"位元組",verb:"擁有"},array:{unit:"項目",verb:"擁有"},set:{unit:"項目",verb:"擁有"}};function t(t){return e[t]??null}const n={regex:"輸入",email:"郵件地址",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"ISO 日期時間",date:"ISO 日期",time:"ISO 時間",duration:"ISO 期間",ipv4:"IPv4 位址",ipv6:"IPv6 位址",cidrv4:"IPv4 範圍",cidrv6:"IPv6 範圍",base64:"base64 編碼字串",base64url:"base64url 編碼字串",json_string:"JSON 字串",e164:"E.164 數值",jwt:"JWT",template_literal:"輸入"},r={nan:"NaN"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`無效的輸入值:預期為 instanceof ${e.expected},但收到 ${a}`:`無效的輸入值:預期為 ${t},但收到 ${a}`}case"invalid_value":return 1===e.values.length?`無效的輸入值:預期為 ${$(e.values[0])}`:`無效的選項:預期為以下其中之一 ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`數值過大:預期 ${e.origin??"值"} 應為 ${n}${e.maximum.toString()} ${r.unit??"個元素"}`:`數值過大:預期 ${e.origin??"值"} 應為 ${n}${e.maximum.toString()}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`數值過小:預期 ${e.origin} 應為 ${n}${e.minimum.toString()} ${r.unit}`:`數值過小:預期 ${e.origin} 應為 ${n}${e.minimum.toString()}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`無效的字串:必須以 "${t.prefix}" 開頭`:"ends_with"===t.format?`無效的字串:必須以 "${t.suffix}" 結尾`:"includes"===t.format?`無效的字串:必須包含 "${t.includes}"`:"regex"===t.format?`無效的字串:必須符合格式 ${t.pattern}`:`無效的 ${n[t.format]??e.format}`}case"not_multiple_of":return`無效的數字:必須為 ${e.divisor} 的倍數`;case"unrecognized_keys":return`無法識別的鍵值${e.keys.length>1?"們":""}:${l(e.keys,"、")}`;case"invalid_key":return`${e.origin} 中有無效的鍵值`;case"invalid_union":default:return"無效的輸入值";case"invalid_element":return`${e.origin} 中有無效的值`}}},Ha=()=>{const e={string:{unit:"àmi",verb:"ní"},file:{unit:"bytes",verb:"ní"},array:{unit:"nkan",verb:"ní"},set:{unit:"nkan",verb:"ní"}};function t(t){return e[t]??null}const n={regex:"ẹ̀rọ ìbáwọlé",email:"àdírẹ́sì ìmẹ́lì",url:"URL",emoji:"emoji",uuid:"UUID",uuidv4:"UUIDv4",uuidv6:"UUIDv6",nanoid:"nanoid",guid:"GUID",cuid:"cuid",cuid2:"cuid2",ulid:"ULID",xid:"XID",ksuid:"KSUID",datetime:"àkókò ISO",date:"ọjọ́ ISO",time:"àkókò ISO",duration:"àkókò tó pé ISO",ipv4:"àdírẹ́sì IPv4",ipv6:"àdírẹ́sì IPv6",cidrv4:"àgbègbè IPv4",cidrv6:"àgbègbè IPv6",base64:"ọ̀rọ̀ tí a kọ́ ní base64",base64url:"ọ̀rọ̀ base64url",json_string:"ọ̀rọ̀ JSON",e164:"nọ́mbà E.164",jwt:"JWT",template_literal:"ẹ̀rọ ìbáwọlé"},r={nan:"NaN",number:"nọ́mbà",array:"akopọ"};return e=>{switch(e.code){case"invalid_type":{const t=r[e.expected]??e.expected,n=q(e.input),a=r[n]??n;return/^[A-Z]/.test(e.expected)?`Ìbáwọlé aṣìṣe: a ní láti fi instanceof ${e.expected}, àmọ̀ a rí ${a}`:`Ìbáwọlé aṣìṣe: a ní láti fi ${t}, àmọ̀ a rí ${a}`}case"invalid_value":return 1===e.values.length?`Ìbáwọlé aṣìṣe: a ní láti fi ${$(e.values[0])}`:`Àṣàyàn aṣìṣe: yan ọ̀kan lára ${l(e.values,"|")}`;case"too_big":{const n=e.inclusive?"<=":"<",r=t(e.origin);return r?`Tó pọ̀ jù: a ní láti jẹ́ pé ${e.origin??"iye"} ${r.verb} ${n}${e.maximum} ${r.unit}`:`Tó pọ̀ jù: a ní láti jẹ́ ${n}${e.maximum}`}case"too_small":{const n=e.inclusive?">=":">",r=t(e.origin);return r?`Kéré ju: a ní láti jẹ́ pé ${e.origin} ${r.verb} ${n}${e.minimum} ${r.unit}`:`Kéré ju: a ní láti jẹ́ ${n}${e.minimum}`}case"invalid_format":{const t=e;return"starts_with"===t.format?`Ọ̀rọ̀ aṣìṣe: gbọ́dọ̀ bẹ̀rẹ̀ pẹ̀lú "${t.prefix}"`:"ends_with"===t.format?`Ọ̀rọ̀ aṣìṣe: gbọ́dọ̀ parí pẹ̀lú "${t.suffix}"`:"includes"===t.format?`Ọ̀rọ̀ aṣìṣe: gbọ́dọ̀ ní "${t.includes}"`:"regex"===t.format?`Ọ̀rọ̀ aṣìṣe: gbọ́dọ̀ bá àpẹẹrẹ mu ${t.pattern}`:`Aṣìṣe: ${n[t.format]??e.format}`}case"not_multiple_of":return`Nọ́mbà aṣìṣe: gbọ́dọ̀ jẹ́ èyà pípín ti ${e.divisor}`;case"unrecognized_keys":return`Bọtìnì àìmọ̀: ${l(e.keys,", ")}`;case"invalid_key":return`Bọtìnì aṣìṣe nínú ${e.origin}`;case"invalid_union":default:return"Ìbáwọlé aṣìṣe";case"invalid_element":return`Iye aṣìṣe nínú ${e.origin}`}}},Va=Object.freeze(Object.defineProperty({__proto__:null,ar:function(){return{localeError:Qr()}},az:function(){return{localeError:Hr()}},be:function(){return{localeError:qr()}},bg:function(){return{localeError:Wr()}},ca:function(){return{localeError:Xr()}},cs:function(){return{localeError:Gr()}},da:function(){return{localeError:Yr()}},de:function(){return{localeError:Kr()}},en:ea,eo:function(){return{localeError:ta()}},es:function(){return{localeError:na()}},fa:function(){return{localeError:ra()}},fi:function(){return{localeError:aa()}},fr:function(){return{localeError:oa()}},frCA:function(){return{localeError:ia()}},he:function(){return{localeError:sa()}},hu:function(){return{localeError:la()}},hy:function(){return{localeError:da()}},id:function(){return{localeError:pa()}},is:function(){return{localeError:ha()}},it:function(){return{localeError:fa()}},ja:function(){return{localeError:ma()}},ka:function(){return{localeError:ga()}},kh:function(){return ba()},km:ba,ko:function(){return{localeError:ya()}},lt:function(){return{localeError:xa()}},mk:function(){return{localeError:ka()}},ms:function(){return{localeError:Sa()}},nl:function(){return{localeError:_a()}},no:function(){return{localeError:Ta()}},ota:function(){return{localeError:Ea()}},pl:function(){return{localeError:Ca()}},ps:function(){return{localeError:Aa()}},pt:function(){return{localeError:$a()}},ru:function(){return{localeError:Da()}},sl:function(){return{localeError:Ia()}},sv:function(){return{localeError:Ma()}},ta:function(){return{localeError:Na()}},th:function(){return{localeError:Ra()}},tr:function(){return{localeError:La()}},ua:function(){return ja()},uk:ja,ur:function(){return{localeError:Ua()}},uz:function(){return{localeError:za()}},vi:function(){return{localeError:Fa()}},yo:function(){return{localeError:Ha()}},zhCN:function(){return{localeError:Za()}},zhTW:function(){return{localeError:Qa()}}},Symbol.toStringTag,{value:"Module"}));var qa;const Wa=Symbol("ZodOutput"),Xa=Symbol("ZodInput");class Ga{constructor(){this._map=new WeakMap,this._idmap=new Map}add(e,...t){const n=t[0];return this._map.set(e,n),n&&"object"==typeof n&&"id"in n&&this._idmap.set(n.id,e),this}clear(){return this._map=new WeakMap,this._idmap=new Map,this}remove(e){const t=this._map.get(e);return t&&"object"==typeof t&&"id"in t&&this._idmap.delete(t.id),this._map.delete(e),this}get(e){const t=e._zod.parent;if(t){const n={...this.get(t)??{}};delete n.id;const r={...n,...this._map.get(e)};return Object.keys(r).length?r:void 0}return this._map.get(e)}has(e){return this._map.has(e)}}function Ya(){return new Ga}(qa=globalThis).__zod_globalRegistry??(qa.__zod_globalRegistry=Ya());const Ka=globalThis.__zod_globalRegistry;function Ja(e,t){return new e({type:"string",...C(t)})}function eo(e,t){return new e({type:"string",coerce:!0,...C(t)})}function to(e,t){return new e({type:"string",format:"email",check:"string_format",abort:!1,...C(t)})}function no(e,t){return new e({type:"string",format:"guid",check:"string_format",abort:!1,...C(t)})}function ro(e,t){return new e({type:"string",format:"uuid",check:"string_format",abort:!1,...C(t)})}function ao(e,t){return new e({type:"string",format:"uuid",check:"string_format",abort:!1,version:"v4",...C(t)})}function oo(e,t){return new e({type:"string",format:"uuid",check:"string_format",abort:!1,version:"v6",...C(t)})}function io(e,t){return new e({type:"string",format:"uuid",check:"string_format",abort:!1,version:"v7",...C(t)})}function so(e,t){return new e({type:"string",format:"url",check:"string_format",abort:!1,...C(t)})}function lo(e,t){return new e({type:"string",format:"emoji",check:"string_format",abort:!1,...C(t)})}function co(e,t){return new e({type:"string",format:"nanoid",check:"string_format",abort:!1,...C(t)})}function uo(e,t){return new e({type:"string",format:"cuid",check:"string_format",abort:!1,...C(t)})}function po(e,t){return new e({type:"string",format:"cuid2",check:"string_format",abort:!1,...C(t)})}function ho(e,t){return new e({type:"string",format:"ulid",check:"string_format",abort:!1,...C(t)})}function fo(e,t){return new e({type:"string",format:"xid",check:"string_format",abort:!1,...C(t)})}function mo(e,t){return new e({type:"string",format:"ksuid",check:"string_format",abort:!1,...C(t)})}function go(e,t){return new e({type:"string",format:"ipv4",check:"string_format",abort:!1,...C(t)})}function vo(e,t){return new e({type:"string",format:"ipv6",check:"string_format",abort:!1,...C(t)})}function bo(e,t){return new e({type:"string",format:"mac",check:"string_format",abort:!1,...C(t)})}function yo(e,t){return new e({type:"string",format:"cidrv4",check:"string_format",abort:!1,...C(t)})}function Oo(e,t){return new e({type:"string",format:"cidrv6",check:"string_format",abort:!1,...C(t)})}function wo(e,t){return new e({type:"string",format:"base64",check:"string_format",abort:!1,...C(t)})}function xo(e,t){return new e({type:"string",format:"base64url",check:"string_format",abort:!1,...C(t)})}function ko(e,t){return new e({type:"string",format:"e164",check:"string_format",abort:!1,...C(t)})}function So(e,t){return new e({type:"string",format:"jwt",check:"string_format",abort:!1,...C(t)})}const _o={Any:null,Minute:-1,Second:0,Millisecond:3,Microsecond:6};function To(e,t){return new e({type:"string",format:"datetime",check:"string_format",offset:!1,local:!1,precision:null,...C(t)})}function Eo(e,t){return new e({type:"string",format:"date",check:"string_format",...C(t)})}function Ao(e,t){return new e({type:"string",format:"time",check:"string_format",precision:null,...C(t)})}function Co(e,t){return new e({type:"string",format:"duration",check:"string_format",...C(t)})}function $o(e,t){return new e({type:"number",checks:[],...C(t)})}function Po(e,t){return new e({type:"number",coerce:!0,checks:[],...C(t)})}function Do(e,t){return new e({type:"number",check:"number_format",abort:!1,format:"safeint",...C(t)})}function Io(e,t){return new e({type:"number",check:"number_format",abort:!1,format:"float32",...C(t)})}function Mo(e,t){return new e({type:"number",check:"number_format",abort:!1,format:"float64",...C(t)})}function No(e,t){return new e({type:"number",check:"number_format",abort:!1,format:"int32",...C(t)})}function Ro(e,t){return new e({type:"number",check:"number_format",abort:!1,format:"uint32",...C(t)})}function Lo(e,t){return new e({type:"boolean",...C(t)})}function Bo(e,t){return new e({type:"boolean",coerce:!0,...C(t)})}function jo(e,t){return new e({type:"bigint",...C(t)})}function Uo(e,t){return new e({type:"bigint",coerce:!0,...C(t)})}function zo(e,t){return new e({type:"bigint",check:"bigint_format",abort:!1,format:"int64",...C(t)})}function Fo(e,t){return new e({type:"bigint",check:"bigint_format",abort:!1,format:"uint64",...C(t)})}function Zo(e,t){return new e({type:"symbol",...C(t)})}function Qo(e,t){return new e({type:"undefined",...C(t)})}function Ho(e,t){return new e({type:"null",...C(t)})}function Vo(e){return new e({type:"any"})}function qo(e){return new e({type:"unknown"})}function Wo(e,t){return new e({type:"never",...C(t)})}function Xo(e,t){return new e({type:"void",...C(t)})}function Go(e,t){return new e({type:"date",...C(t)})}function Yo(e,t){return new e({type:"date",coerce:!0,...C(t)})}function Ko(e,t){return new e({type:"nan",...C(t)})}function Jo(e,t){return new Dt({check:"less_than",...C(t),value:e,inclusive:!1})}function ei(e,t){return new Dt({check:"less_than",...C(t),value:e,inclusive:!0})}function ti(e,t){return new It({check:"greater_than",...C(t),value:e,inclusive:!1})}function ni(e,t){return new It({check:"greater_than",...C(t),value:e,inclusive:!0})}function ri(e){return ti(0,e)}function ai(e){return Jo(0,e)}function oi(e){return ei(0,e)}function ii(e){return ni(0,e)}function si(e,t){return new Mt({check:"multiple_of",...C(t),value:e})}function li(e,t){return new Lt({check:"max_size",...C(t),maximum:e})}function ci(e,t){return new Bt({check:"min_size",...C(t),minimum:e})}function ui(e,t){return new jt({check:"size_equals",...C(t),size:e})}function di(e,t){return new Ut({check:"max_length",...C(t),maximum:e})}function pi(e,t){return new zt({check:"min_length",...C(t),minimum:e})}function hi(e,t){return new Ft({check:"length_equals",...C(t),length:e})}function fi(e,t){return new Qt({check:"string_format",format:"regex",...C(t),pattern:e})}function mi(e){return new Ht({check:"string_format",format:"lowercase",...C(e)})}function gi(e){return new Vt({check:"string_format",format:"uppercase",...C(e)})}function vi(e,t){return new qt({check:"string_format",format:"includes",...C(t),includes:e})}function bi(e,t){return new Wt({check:"string_format",format:"starts_with",...C(t),prefix:e})}function yi(e,t){return new Xt({check:"string_format",format:"ends_with",...C(t),suffix:e})}function Oi(e,t,n){return new Yt({check:"property",property:e,schema:t,...C(n)})}function wi(e,t){return new Kt({check:"mime_type",mime:e,...C(t)})}function xi(e){return new Jt({check:"overwrite",tx:e})}function ki(e){return xi((t=>t.normalize(e)))}function Si(){return xi((e=>e.trim()))}function _i(){return xi((e=>e.toLowerCase()))}function Ti(){return xi((e=>e.toUpperCase()))}function Ei(){return xi((e=>y(e)))}function Ai(e,t,n){return new e({type:"array",element:t,...C(n)})}function Ci(e,t){return new e({type:"file",...C(t)})}function $i(e,t,n){const r=C(n);return r.abort??(r.abort=!0),new e({type:"custom",check:"custom",fn:t,...r})}function Pi(e,t,n){return new e({type:"custom",check:"custom",fn:t,...C(n)})}function Di(e){const t=Ii((n=>(n.addIssue=e=>{if("string"==typeof e)n.issues.push(W(e,n.value,t._zod.def));else{const r=e;r.fatal&&(r.continue=!1),r.code??(r.code="custom"),r.input??(r.input=n.value),r.inst??(r.inst=t),r.continue??(r.continue=!t._zod.def.abort),n.issues.push(W(r))}},e(n.value,n))));return t}function Ii(e,t){const n=new $t({check:"custom",...C(t)});return n._zod.check=e,n}function Mi(e){const t=new $t({check:"describe"});return t._zod.onattach=[t=>{const n=Ka.get(t)??{};Ka.add(t,{...n,description:e})}],t._zod.check=()=>{},t}function Ni(e){const t=new $t({check:"meta"});return t._zod.onattach=[t=>{const n=Ka.get(t)??{};Ka.add(t,{...n,...e})}],t._zod.check=()=>{},t}function Ri(e,t){const n=C(t);let r=n.truthy??["true","1","yes","on","y","enabled"],a=n.falsy??["false","0","no","off","n","disabled"];"sensitive"!==n.case&&(r=r.map((e=>"string"==typeof e?e.toLowerCase():e)),a=a.map((e=>"string"==typeof e?e.toLowerCase():e)));const o=new Set(r),i=new Set(a),s=e.Codec??Ir,l=e.Boolean??Rn,c=new s({type:"pipe",in:new(e.String??rn)({type:"string",error:n.error}),out:new l({type:"boolean",error:n.error}),transform:(e,t)=>{let r=e;return"sensitive"!==n.case&&(r=r.toLowerCase()),!!o.has(r)||!i.has(r)&&(t.issues.push({code:"invalid_value",expected:"stringbool",values:[...o,...i],input:t.value,inst:c,continue:!1}),{})},reverseTransform:(e,t)=>!0===e?r[0]||"true":a[0]||"false",error:n.error});return c}function Li(e,t,n,r={}){const a=C(r),o={...C(r),check:"string_format",type:"string",format:t,fn:"function"==typeof n?n:e=>n.test(e),...a};return n instanceof RegExp&&(o.pattern=n),new e(o)}function Bi(e){let t=e?.target??"draft-2020-12";return"draft-4"===t&&(t="draft-04"),"draft-7"===t&&(t="draft-07"),{processors:e.processors??{},metadataRegistry:e?.metadata??Ka,target:t,unrepresentable:e?.unrepresentable??"throw",override:e?.override??(()=>{}),io:e?.io??"output",counter:0,seen:new Map,cycles:e?.cycles??"ref",reused:e?.reused??"inline",external:e?.external??void 0}}function ji(e,t,n={path:[],schemaPath:[]}){var r;const a=e._zod.def,o=t.seen.get(e);if(o)return o.count++,n.schemaPath.includes(e)&&(o.cycle=n.path),o.schema;const i={schema:{},count:1,cycle:void 0,path:n.path};t.seen.set(e,i);const s=e._zod.toJSONSchema?.();if(s)i.schema=s;else{const r={...n,schemaPath:[...n.schemaPath,e],path:n.path};if(e._zod.processJSONSchema)e._zod.processJSONSchema(t,i.schema,r);else{const n=i.schema,o=t.processors[a.type];if(!o)throw new Error(`[toJSONSchema]: Non-representable type encountered: ${a.type}`);o(e,t,n,r)}const o=e._zod.parent;o&&(i.ref||(i.ref=o),ji(o,t,r),t.seen.get(o).isParent=!0)}const l=t.metadataRegistry.get(e);return l&&Object.assign(i.schema,l),"input"===t.io&&Fi(e)&&(delete i.schema.examples,delete i.schema.default),"input"===t.io&&i.schema._prefault&&((r=i.schema).default??(r.default=i.schema._prefault)),delete i.schema._prefault,t.seen.get(e).schema}function Ui(e,t){const n=e.seen.get(t);if(!n)throw new Error("Unprocessed schema. This is a bug in Zod.");const r=new Map;for(const t of e.seen.entries()){const n=e.metadataRegistry.get(t[0])?.id;if(n){const e=r.get(n);if(e&&e!==t[0])throw new Error(`Duplicate schema id "${n}" detected during JSON Schema conversion. Two different schemas cannot share the same id when converted together.`);r.set(n,t[0])}}const a=t=>{if(t[1].schema.$ref)return;const r=t[1],{ref:a,defId:o}=(t=>{const r="draft-2020-12"===e.target?"$defs":"definitions";if(e.external){const n=e.external.registry.get(t[0])?.id,a=e.external.uri??(e=>e);if(n)return{ref:a(n)};const o=t[1].defId??t[1].schema.id??"schema"+e.counter++;return t[1].defId=o,{defId:o,ref:`${a("__shared")}#/${r}/${o}`}}if(t[1]===n)return{ref:"#"};const a=`#/${r}/`,o=t[1].schema.id??"__schema"+e.counter++;return{defId:o,ref:a+o}})(t);r.def={...r.schema},o&&(r.defId=o);const i=r.schema;for(const e in i)delete i[e];i.$ref=a};if("throw"===e.cycles)for(const t of e.seen.entries()){const e=t[1];if(e.cycle)throw new Error(`Cycle detected: #/${e.cycle?.join("/")}/\n\nSet the \`cycles\` parameter to \`"ref"\` to resolve cyclical schemas with defs.`)}for(const n of e.seen.entries()){const r=n[1];if(t===n[0]){a(n);continue}if(e.external){const r=e.external.registry.get(n[0])?.id;if(t!==n[0]&&r){a(n);continue}}const o=e.metadataRegistry.get(n[0])?.id;(o||r.cycle||r.count>1&&"ref"===e.reused)&&a(n)}}function zi(e,t){const n=e.seen.get(t);if(!n)throw new Error("Unprocessed schema. This is a bug in Zod.");const r=t=>{const n=e.seen.get(t);if(null===n.ref)return;const a=n.def??n.schema,o={...a},i=n.ref;if(n.ref=null,i){r(i);const n=e.seen.get(i),s=n.schema;if(!s.$ref||"draft-07"!==e.target&&"draft-04"!==e.target&&"openapi-3.0"!==e.target?Object.assign(a,s):(a.allOf=a.allOf??[],a.allOf.push(s)),Object.assign(a,o),t._zod.parent===i)for(const e in a)"$ref"!==e&&"allOf"!==e&&(e in o||delete a[e]);if(s.$ref)for(const e in a)"$ref"!==e&&"allOf"!==e&&e in n.def&&JSON.stringify(a[e])===JSON.stringify(n.def[e])&&delete a[e]}const s=t._zod.parent;if(s&&s!==i){r(s);const t=e.seen.get(s);if(t?.schema.$ref&&(a.$ref=t.schema.$ref,t.def))for(const e in a)"$ref"!==e&&"allOf"!==e&&e in t.def&&JSON.stringify(a[e])===JSON.stringify(t.def[e])&&delete a[e]}e.override({zodSchema:t,jsonSchema:a,path:n.path??[]})};for(const t of[...e.seen.entries()].reverse())r(t[0]);const a={};if("draft-2020-12"===e.target?a.$schema="https://json-schema.org/draft/2020-12/schema":"draft-07"===e.target?a.$schema="http://json-schema.org/draft-07/schema#":"draft-04"===e.target?a.$schema="http://json-schema.org/draft-04/schema#":e.target,e.external?.uri){const n=e.external.registry.get(t)?.id;if(!n)throw new Error("Schema is missing an `id` property");a.$id=e.external.uri(n)}Object.assign(a,n.def??n.schema);const o=e.external?.defs??{};for(const t of e.seen.entries()){const e=t[1];e.def&&e.defId&&(o[e.defId]=e.def)}e.external||Object.keys(o).length>0&&("draft-2020-12"===e.target?a.$defs=o:a.definitions=o);try{const n=JSON.parse(JSON.stringify(a));return Object.defineProperty(n,"~standard",{value:{...t["~standard"],jsonSchema:{input:Qi(t,"input",e.processors),output:Qi(t,"output",e.processors)}},enumerable:!1,writable:!1}),n}catch(e){throw new Error("Error converting schema to JSON.")}}function Fi(e,t){const n=t??{seen:new Set};if(n.seen.has(e))return!1;n.seen.add(e);const r=e._zod.def;if("transform"===r.type)return!0;if("array"===r.type)return Fi(r.element,n);if("set"===r.type)return Fi(r.valueType,n);if("lazy"===r.type)return Fi(r.getter(),n);if("promise"===r.type||"optional"===r.type||"nonoptional"===r.type||"nullable"===r.type||"readonly"===r.type||"default"===r.type||"prefault"===r.type)return Fi(r.innerType,n);if("intersection"===r.type)return Fi(r.left,n)||Fi(r.right,n);if("record"===r.type||"map"===r.type)return Fi(r.keyType,n)||Fi(r.valueType,n);if("pipe"===r.type)return Fi(r.in,n)||Fi(r.out,n);if("object"===r.type){for(const e in r.shape)if(Fi(r.shape[e],n))return!0;return!1}if("union"===r.type){for(const e of r.options)if(Fi(e,n))return!0;return!1}if("tuple"===r.type){for(const e of r.items)if(Fi(e,n))return!0;return!(!r.rest||!Fi(r.rest,n))}return!1}const Zi=(e,t={})=>n=>{const r=Bi({...n,processors:t});return ji(e,r),Ui(r,e),zi(r,e)},Qi=(e,t,n={})=>r=>{const{libraryOptions:a,target:o}=r??{},i=Bi({...a??{},target:o,io:t,processors:n});return ji(e,i),Ui(i,e),zi(i,e)},Hi={guid:"uuid",url:"uri",datetime:"date-time",json_string:"json-string",regex:""},Vi=(e,t,n,r)=>{const a=n;a.type="string";const{minimum:o,maximum:i,format:s,patterns:l,contentEncoding:c}=e._zod.bag;if("number"==typeof o&&(a.minLength=o),"number"==typeof i&&(a.maxLength=i),s&&(a.format=Hi[s]??s,""===a.format&&delete a.format,"time"===s&&delete a.format),c&&(a.contentEncoding=c),l&&l.size>0){const e=[...l];1===e.length?a.pattern=e[0].source:e.length>1&&(a.allOf=[...e.map((e=>({..."draft-07"===t.target||"draft-04"===t.target||"openapi-3.0"===t.target?{type:"string"}:{},pattern:e.source})))])}},qi=(e,t,n,r)=>{const a=n,{minimum:o,maximum:i,format:s,multipleOf:l,exclusiveMaximum:c,exclusiveMinimum:u}=e._zod.bag;"string"==typeof s&&s.includes("int")?a.type="integer":a.type="number","number"==typeof u&&("draft-04"===t.target||"openapi-3.0"===t.target?(a.minimum=u,a.exclusiveMinimum=!0):a.exclusiveMinimum=u),"number"==typeof o&&(a.minimum=o,"number"==typeof u&&"draft-04"!==t.target&&(u>=o?delete a.minimum:delete a.exclusiveMinimum)),"number"==typeof c&&("draft-04"===t.target||"openapi-3.0"===t.target?(a.maximum=c,a.exclusiveMaximum=!0):a.exclusiveMaximum=c),"number"==typeof i&&(a.maximum=i,"number"==typeof c&&"draft-04"!==t.target&&(c<=i?delete a.maximum:delete a.exclusiveMaximum)),"number"==typeof l&&(a.multipleOf=l)},Wi=(e,t,n,r)=>{n.type="boolean"},Xi=(e,t,n,r)=>{if("throw"===t.unrepresentable)throw new Error("BigInt cannot be represented in JSON Schema")},Gi=(e,t,n,r)=>{if("throw"===t.unrepresentable)throw new Error("Symbols cannot be represented in JSON Schema")},Yi=(e,t,n,r)=>{"openapi-3.0"===t.target?(n.type="string",n.nullable=!0,n.enum=[null]):n.type="null"},Ki=(e,t,n,r)=>{if("throw"===t.unrepresentable)throw new Error("Undefined cannot be represented in JSON Schema")},Ji=(e,t,n,r)=>{if("throw"===t.unrepresentable)throw new Error("Void cannot be represented in JSON Schema")},es=(e,t,n,r)=>{n.not={}},ts=(e,t,n,r)=>{if("throw"===t.unrepresentable)throw new Error("Date cannot be represented in JSON Schema")},ns=(e,t,n,r)=>{const a=s(e._zod.def.entries);a.every((e=>"number"==typeof e))&&(n.type="number"),a.every((e=>"string"==typeof e))&&(n.type="string"),n.enum=a},rs=(e,t,n,r)=>{const a=e._zod.def,o=[];for(const e of a.values)if(void 0===e){if("throw"===t.unrepresentable)throw new Error("Literal `undefined` cannot be represented in JSON Schema")}else if("bigint"==typeof e){if("throw"===t.unrepresentable)throw new Error("BigInt literals cannot be represented in JSON Schema");o.push(Number(e))}else o.push(e);if(0===o.length);else if(1===o.length){const e=o[0];n.type=null===e?"null":typeof e,"draft-04"===t.target||"openapi-3.0"===t.target?n.enum=[e]:n.const=e}else o.every((e=>"number"==typeof e))&&(n.type="number"),o.every((e=>"string"==typeof e))&&(n.type="string"),o.every((e=>"boolean"==typeof e))&&(n.type="boolean"),o.every((e=>null===e))&&(n.type="null"),n.enum=o},as=(e,t,n,r)=>{if("throw"===t.unrepresentable)throw new Error("NaN cannot be represented in JSON Schema")},os=(e,t,n,r)=>{const a=n,o=e._zod.pattern;if(!o)throw new Error("Pattern not found in template literal");a.type="string",a.pattern=o.source},is=(e,t,n,r)=>{const a=n,o={type:"string",format:"binary",contentEncoding:"binary"},{minimum:i,maximum:s,mime:l}=e._zod.bag;void 0!==i&&(o.minLength=i),void 0!==s&&(o.maxLength=s),l?1===l.length?(o.contentMediaType=l[0],Object.assign(a,o)):(Object.assign(a,o),a.anyOf=l.map((e=>({contentMediaType:e})))):Object.assign(a,o)},ss=(e,t,n,r)=>{n.type="boolean"},ls=(e,t,n,r)=>{if("throw"===t.unrepresentable)throw new Error("Custom types cannot be represented in JSON Schema")},cs=(e,t,n,r)=>{if("throw"===t.unrepresentable)throw new Error("Function types cannot be represented in JSON Schema")},us=(e,t,n,r)=>{if("throw"===t.unrepresentable)throw new Error("Transforms cannot be represented in JSON Schema")},ds=(e,t,n,r)=>{if("throw"===t.unrepresentable)throw new Error("Map cannot be represented in JSON Schema")},ps=(e,t,n,r)=>{if("throw"===t.unrepresentable)throw new Error("Set cannot be represented in JSON Schema")},hs=(e,t,n,r)=>{const a=n,o=e._zod.def,{minimum:i,maximum:s}=e._zod.bag;"number"==typeof i&&(a.minItems=i),"number"==typeof s&&(a.maxItems=s),a.type="array",a.items=ji(o.element,t,{...r,path:[...r.path,"items"]})},fs=(e,t,n,r)=>{const a=n,o=e._zod.def;a.type="object",a.properties={};const i=o.shape;for(const e in i)a.properties[e]=ji(i[e],t,{...r,path:[...r.path,"properties",e]});const s=new Set(Object.keys(i)),l=new Set([...s].filter((e=>{const n=o.shape[e]._zod;return"input"===t.io?void 0===n.optin:void 0===n.optout})));l.size>0&&(a.required=Array.from(l)),"never"===o.catchall?._zod.def.type?a.additionalProperties=!1:o.catchall?o.catchall&&(a.additionalProperties=ji(o.catchall,t,{...r,path:[...r.path,"additionalProperties"]})):"output"===t.io&&(a.additionalProperties=!1)},ms=(e,t,n,r)=>{const a=e._zod.def,o=!1===a.inclusive,i=a.options.map(((e,n)=>ji(e,t,{...r,path:[...r.path,o?"oneOf":"anyOf",n]})));o?n.oneOf=i:n.anyOf=i},gs=(e,t,n,r)=>{const a=e._zod.def,o=ji(a.left,t,{...r,path:[...r.path,"allOf",0]}),i=ji(a.right,t,{...r,path:[...r.path,"allOf",1]}),s=e=>"allOf"in e&&1===Object.keys(e).length,l=[...s(o)?o.allOf:[o],...s(i)?i.allOf:[i]];n.allOf=l},vs=(e,t,n,r)=>{const a=n,o=e._zod.def;a.type="array";const i="draft-2020-12"===t.target?"prefixItems":"items",s="draft-2020-12"===t.target||"openapi-3.0"===t.target?"items":"additionalItems",l=o.items.map(((e,n)=>ji(e,t,{...r,path:[...r.path,i,n]}))),c=o.rest?ji(o.rest,t,{...r,path:[...r.path,s,..."openapi-3.0"===t.target?[o.items.length]:[]]}):null;"draft-2020-12"===t.target?(a.prefixItems=l,c&&(a.items=c)):"openapi-3.0"===t.target?(a.items={anyOf:l},c&&a.items.anyOf.push(c),a.minItems=l.length,c||(a.maxItems=l.length)):(a.items=l,c&&(a.additionalItems=c));const{minimum:u,maximum:d}=e._zod.bag;"number"==typeof u&&(a.minItems=u),"number"==typeof d&&(a.maxItems=d)},bs=(e,t,n,r)=>{const a=n,o=e._zod.def;a.type="object";const i=o.keyType,s=i._zod.bag,l=s?.patterns;if("loose"===o.mode&&l&&l.size>0){const e=ji(o.valueType,t,{...r,path:[...r.path,"patternProperties","*"]});a.patternProperties={};for(const t of l)a.patternProperties[t.source]=e}else"draft-07"!==t.target&&"draft-2020-12"!==t.target||(a.propertyNames=ji(o.keyType,t,{...r,path:[...r.path,"propertyNames"]})),a.additionalProperties=ji(o.valueType,t,{...r,path:[...r.path,"additionalProperties"]});const c=i._zod.values;if(c){const e=[...c].filter((e=>"string"==typeof e||"number"==typeof e));e.length>0&&(a.required=e)}},ys=(e,t,n,r)=>{const a=e._zod.def,o=ji(a.innerType,t,r),i=t.seen.get(e);"openapi-3.0"===t.target?(i.ref=a.innerType,n.nullable=!0):n.anyOf=[o,{type:"null"}]},Os=(e,t,n,r)=>{const a=e._zod.def;ji(a.innerType,t,r),t.seen.get(e).ref=a.innerType},ws=(e,t,n,r)=>{const a=e._zod.def;ji(a.innerType,t,r),t.seen.get(e).ref=a.innerType,n.default=JSON.parse(JSON.stringify(a.defaultValue))},xs=(e,t,n,r)=>{const a=e._zod.def;ji(a.innerType,t,r),t.seen.get(e).ref=a.innerType,"input"===t.io&&(n._prefault=JSON.parse(JSON.stringify(a.defaultValue)))},ks=(e,t,n,r)=>{const a=e._zod.def;let o;ji(a.innerType,t,r),t.seen.get(e).ref=a.innerType;try{o=a.catchValue(void 0)}catch{throw new Error("Dynamic catch values are not supported in JSON Schema")}n.default=o},Ss=(e,t,n,r)=>{const a=e._zod.def,o="input"===t.io?"transform"===a.in._zod.def.type?a.out:a.in:a.out;ji(o,t,r),t.seen.get(e).ref=o},_s=(e,t,n,r)=>{const a=e._zod.def;ji(a.innerType,t,r),t.seen.get(e).ref=a.innerType,n.readOnly=!0},Ts=(e,t,n,r)=>{const a=e._zod.def;ji(a.innerType,t,r),t.seen.get(e).ref=a.innerType},Es=(e,t,n,r)=>{const a=e._zod.def;ji(a.innerType,t,r),t.seen.get(e).ref=a.innerType},As=(e,t,n,r)=>{const a=e._zod.innerType;ji(a,t,r),t.seen.get(e).ref=a},Cs={string:Vi,number:qi,boolean:Wi,bigint:Xi,symbol:Gi,null:Yi,undefined:Ki,void:Ji,never:es,any:(e,t,n,r)=>{},unknown:(e,t,n,r)=>{},date:ts,enum:ns,literal:rs,nan:as,template_literal:os,file:is,success:ss,custom:ls,function:cs,transform:us,map:ds,set:ps,array:hs,object:fs,union:ms,intersection:gs,tuple:vs,record:bs,nullable:ys,nonoptional:Os,default:ws,prefault:xs,catch:ks,pipe:Ss,readonly:_s,promise:Ts,optional:Es,lazy:As};function $s(e,t){if("_idmap"in e){const n=e,r=Bi({...t,processors:Cs}),a={};for(const e of n._idmap.entries()){const[t,n]=e;ji(n,r)}const o={},i={registry:n,uri:t?.uri,defs:a};r.external=i;for(const e of n._idmap.entries()){const[t,n]=e;Ui(r,n),o[t]=zi(r,n)}if(Object.keys(a).length>0){const e="draft-2020-12"===r.target?"$defs":"definitions";o.__shared={[e]:a}}return{schemas:o}}const n=Bi({...t,processors:Cs});return ji(e,n),Ui(n,e),zi(n,e)}const Ps=Object.freeze(Object.defineProperty({__proto__:null},Symbol.toStringTag,{value:"Module"})),Ds=Object.freeze(Object.defineProperty({__proto__:null,$ZodAny:Fn,$ZodArray:Wn,$ZodAsyncError:r,$ZodBase64:En,$ZodBase64URL:Cn,$ZodBigInt:Ln,$ZodBigIntFormat:Bn,$ZodBoolean:Rn,$ZodCIDRv4:Sn,$ZodCIDRv6:_n,$ZodCUID:pn,$ZodCUID2:hn,$ZodCatch:Cr,$ZodCheck:$t,$ZodCheckBigIntFormat:Rt,$ZodCheckEndsWith:Xt,$ZodCheckGreaterThan:It,$ZodCheckIncludes:qt,$ZodCheckLengthEquals:Ft,$ZodCheckLessThan:Dt,$ZodCheckLowerCase:Ht,$ZodCheckMaxLength:Ut,$ZodCheckMaxSize:Lt,$ZodCheckMimeType:Kt,$ZodCheckMinLength:zt,$ZodCheckMinSize:Bt,$ZodCheckMultipleOf:Mt,$ZodCheckNumberFormat:Nt,$ZodCheckOverwrite:Jt,$ZodCheckProperty:Yt,$ZodCheckRegex:Qt,$ZodCheckSizeEquals:jt,$ZodCheckStartsWith:Wt,$ZodCheckStringFormat:Zt,$ZodCheckUpperCase:Vt,$ZodCodec:Ir,$ZodCustom:Fr,$ZodCustomStringFormat:In,$ZodDate:Vn,$ZodDefault:kr,$ZodDiscriminatedUnion:ar,$ZodE164:$n,$ZodEmail:ln,$ZodEmoji:un,$ZodEncodeError:a,$ZodEnum:mr,$ZodError:J,$ZodExactOptional:wr,$ZodFile:vr,$ZodFunction:jr,$ZodGUID:on,$ZodIPv4:wn,$ZodIPv6:xn,$ZodISODate:bn,$ZodISODateTime:vn,$ZodISODuration:On,$ZodISOTime:yn,$ZodIntersection:or,$ZodJWT:Dn,$ZodKSUID:gn,$ZodLazy:zr,$ZodLiteral:gr,$ZodMAC:kn,$ZodMap:dr,$ZodNaN:$r,$ZodNanoID:dn,$ZodNever:Qn,$ZodNonOptional:Tr,$ZodNull:zn,$ZodNullable:xr,$ZodNumber:Mn,$ZodNumberFormat:Nn,$ZodObject:Kn,$ZodObjectJIT:Jn,$ZodOptional:Or,$ZodPipe:Pr,$ZodPrefault:_r,$ZodPromise:Ur,$ZodReadonly:Rr,$ZodRealError:ee,$ZodRecord:ur,$ZodRegistry:Ga,$ZodSet:hr,$ZodString:rn,$ZodStringFormat:an,$ZodSuccess:Ar,$ZodSymbol:jn,$ZodTemplateLiteral:Br,$ZodTransform:br,$ZodTuple:lr,$ZodType:nn,$ZodULID:fn,$ZodURL:cn,$ZodUUID:sn,$ZodUndefined:Un,$ZodUnion:tr,$ZodUnknown:Zn,$ZodVoid:Hn,$ZodXID:mn,$ZodXor:rr,$brand:n,$constructor:t,$input:Xa,$output:Wa,Doc:en,JSONSchema:Ps,JSONSchemaGenerator:class{get metadataRegistry(){return this.ctx.metadataRegistry}get target(){return this.ctx.target}get unrepresentable(){return this.ctx.unrepresentable}get override(){return this.ctx.override}get io(){return this.ctx.io}get counter(){return this.ctx.counter}set counter(e){this.ctx.counter=e}get seen(){return this.ctx.seen}constructor(e){let t=e?.target??"draft-2020-12";"draft-4"===t&&(t="draft-04"),"draft-7"===t&&(t="draft-07"),this.ctx=Bi({processors:Cs,target:t,...e?.metadata&&{metadata:e.metadata},...e?.unrepresentable&&{unrepresentable:e.unrepresentable},...e?.override&&{override:e.override},...e?.io&&{io:e.io}})}process(e,t={path:[],schemaPath:[]}){return ji(e,this.ctx,t)}emit(e,t){t&&(t.cycles&&(this.ctx.cycles=t.cycles),t.reused&&(this.ctx.reused=t.reused),t.external&&(this.ctx.external=t.external)),Ui(this.ctx,e);const n=zi(this.ctx,e),{"~standard":r,...a}=n;return a}},NEVER:e,TimePrecision:_o,_any:Vo,_array:Ai,_base64:wo,_base64url:xo,_bigint:jo,_boolean:Lo,_catch:function(e,t,n){return new e({type:"catch",innerType:t,catchValue:"function"==typeof n?n:()=>n})},_check:Ii,_cidrv4:yo,_cidrv6:Oo,_coercedBigint:Uo,_coercedBoolean:Bo,_coercedDate:Yo,_coercedNumber:Po,_coercedString:eo,_cuid:uo,_cuid2:po,_custom:$i,_date:Go,_decode:ge,_decodeAsync:Oe,_default:function(e,t,n){return new e({type:"default",innerType:t,get defaultValue(){return"function"==typeof n?n():S(n)}})},_discriminatedUnion:function(e,t,n,r){return new e({type:"union",options:n,discriminator:t,...C(r)})},_e164:ko,_email:to,_emoji:lo,_encode:fe,_encodeAsync:be,_endsWith:yi,_enum:function(e,t,n){return new e({type:"enum",entries:Array.isArray(t)?Object.fromEntries(t.map((e=>[e,e]))):t,...C(n)})},_file:Ci,_float32:Io,_float64:Mo,_gt:ti,_gte:ni,_guid:no,_includes:vi,_int:Do,_int32:No,_int64:zo,_intersection:function(e,t,n){return new e({type:"intersection",left:t,right:n})},_ipv4:go,_ipv6:vo,_isoDate:Eo,_isoDateTime:To,_isoDuration:Co,_isoTime:Ao,_jwt:So,_ksuid:mo,_lazy:function(e,t){return new e({type:"lazy",getter:t})},_length:hi,_literal:function(e,t,n){return new e({type:"literal",values:Array.isArray(t)?t:[t],...C(n)})},_lowercase:mi,_lt:Jo,_lte:ei,_mac:bo,_map:function(e,t,n,r){return new e({type:"map",keyType:t,valueType:n,...C(r)})},_max:ei,_maxLength:di,_maxSize:li,_mime:wi,_min:ni,_minLength:pi,_minSize:ci,_multipleOf:si,_nan:Ko,_nanoid:co,_nativeEnum:function(e,t,n){return new e({type:"enum",entries:t,...C(n)})},_negative:ai,_never:Wo,_nonnegative:ii,_nonoptional:function(e,t,n){return new e({type:"nonoptional",innerType:t,...C(n)})},_nonpositive:oi,_normalize:ki,_null:Ho,_nullable:function(e,t){return new e({type:"nullable",innerType:t})},_number:$o,_optional:function(e,t){return new e({type:"optional",innerType:t})},_overwrite:xi,_parse:ie,_parseAsync:le,_pipe:function(e,t,n){return new e({type:"pipe",in:t,out:n})},_positive:ri,_promise:function(e,t){return new e({type:"promise",innerType:t})},_property:Oi,_readonly:function(e,t){return new e({type:"readonly",innerType:t})},_record:function(e,t,n,r){return new e({type:"record",keyType:t,valueType:n,...C(r)})},_refine:Pi,_regex:fi,_safeDecode:Se,_safeDecodeAsync:Ae,_safeEncode:xe,_safeEncodeAsync:Te,_safeParse:ue,_safeParseAsync:pe,_set:function(e,t,n){return new e({type:"set",valueType:t,...C(n)})},_size:ui,_slugify:Ei,_startsWith:bi,_string:Ja,_stringFormat:Li,_stringbool:Ri,_success:function(e,t){return new e({type:"success",innerType:t})},_superRefine:Di,_symbol:Zo,_templateLiteral:function(e,t,n){return new e({type:"template_literal",parts:t,...C(n)})},_toLowerCase:_i,_toUpperCase:Ti,_transform:function(e,t){return new e({type:"transform",transform:t})},_trim:Si,_tuple:function(e,t,n,r){const a=n instanceof nn;return new e({type:"tuple",items:t,rest:a?n:null,...C(a?r:n)})},_uint32:Ro,_uint64:Fo,_ulid:ho,_undefined:Qo,_union:function(e,t,n){return new e({type:"union",options:t,...C(n)})},_unknown:qo,_uppercase:gi,_url:so,_uuid:ro,_uuidv4:ao,_uuidv6:oo,_uuidv7:io,_void:Xo,_xid:fo,_xor:function(e,t,n){return new e({type:"union",options:t,inclusive:!1,...C(n)})},clone:A,config:i,createStandardJSONSchemaMethod:Qi,createToJSONSchemaMethod:Zi,decode:ve,decodeAsync:we,describe:Mi,encode:me,encodeAsync:ye,extractDefs:Ui,finalize:zi,flattenError:te,formatError:ne,globalConfig:o,globalRegistry:Ka,initializeContext:Bi,isValidBase64:Tn,isValidBase64URL:An,isValidJWT:Pn,locales:Va,meta:Ni,parse:se,parseAsync:ce,prettifyError:oe,process:ji,regexes:Ct,registry:Ya,safeDecode:_e,safeDecodeAsync:Ce,safeEncode:ke,safeEncodeAsync:Ee,safeParse:de,safeParseAsync:he,toDotPath:ae,toJSONSchema:$s,treeifyError:re,util:Y,version:tn},Symbol.toStringTag,{value:"Module"})),Is=Object.freeze(Object.defineProperty({__proto__:null,endsWith:yi,gt:ti,gte:ni,includes:vi,length:hi,lowercase:mi,lt:Jo,lte:ei,maxLength:di,maxSize:li,mime:wi,minLength:pi,minSize:ci,multipleOf:si,negative:ai,nonnegative:ii,nonpositive:oi,normalize:ki,overwrite:xi,positive:ri,property:Oi,regex:fi,size:ui,slugify:Ei,startsWith:bi,toLowerCase:_i,toUpperCase:Ti,trim:Si,uppercase:gi},Symbol.toStringTag,{value:"Module"})),Ms=t("ZodISODateTime",((e,t)=>{vn.init(e,t),ll.init(e,t)}));function Ns(e){return To(Ms,e)}const Rs=t("ZodISODate",((e,t)=>{bn.init(e,t),ll.init(e,t)}));function Ls(e){return Eo(Rs,e)}const Bs=t("ZodISOTime",((e,t)=>{yn.init(e,t),ll.init(e,t)}));function js(e){return Ao(Bs,e)}const Us=t("ZodISODuration",((e,t)=>{On.init(e,t),ll.init(e,t)}));function zs(e){return Co(Us,e)}const Fs=Object.freeze(Object.defineProperty({__proto__:null,ZodISODate:Rs,ZodISODateTime:Ms,ZodISODuration:Us,ZodISOTime:Bs,date:Ls,datetime:Ns,duration:zs,time:js},Symbol.toStringTag,{value:"Module"})),Zs=(e,t)=>{J.init(e,t),e.name="ZodError",Object.defineProperties(e,{format:{value:t=>ne(e,t)},flatten:{value:t=>te(e,t)},addIssue:{value:t=>{e.issues.push(t),e.message=JSON.stringify(e.issues,c,2)}},addIssues:{value:t=>{e.issues.push(...t),e.message=JSON.stringify(e.issues,c,2)}},isEmpty:{get:()=>0===e.issues.length}})},Qs=t("ZodError",Zs),Hs=t("ZodError",Zs,{Parent:Error}),Vs=ie(Hs),qs=le(Hs),Ws=ue(Hs),Xs=pe(Hs),Gs=fe(Hs),Ys=ge(Hs),Ks=be(Hs),Js=Oe(Hs),el=xe(Hs),tl=Se(Hs),nl=Te(Hs),rl=Ae(Hs),al=t("ZodType",((e,t)=>(nn.init(e,t),Object.assign(e["~standard"],{jsonSchema:{input:Qi(e,"input"),output:Qi(e,"output")}}),e.toJSONSchema=Zi(e,{}),e.def=t,e.type=t.type,Object.defineProperty(e,"_def",{value:t}),e.check=(...n)=>e.clone(v(t,{checks:[...t.checks??[],...n.map((e=>"function"==typeof e?{_zod:{check:e,def:{check:"custom"},onattach:[]}}:e))]}),{parent:!0}),e.with=e.check,e.clone=(t,n)=>A(e,t,n),e.brand=()=>e,e.register=(t,n)=>(t.add(e,n),e),e.parse=(t,n)=>Vs(e,t,n,{callee:e.parse}),e.safeParse=(t,n)=>Ws(e,t,n),e.parseAsync=async(t,n)=>qs(e,t,n,{callee:e.parseAsync}),e.safeParseAsync=async(t,n)=>Xs(e,t,n),e.spa=e.safeParseAsync,e.encode=(t,n)=>Gs(e,t,n),e.decode=(t,n)=>Ys(e,t,n),e.encodeAsync=async(t,n)=>Ks(e,t,n),e.decodeAsync=async(t,n)=>Js(e,t,n),e.safeEncode=(t,n)=>el(e,t,n),e.safeDecode=(t,n)=>tl(e,t,n),e.safeEncodeAsync=async(t,n)=>nl(e,t,n),e.safeDecodeAsync=async(t,n)=>rl(e,t,n),e.refine=(t,n)=>e.check(Yu(t,n)),e.superRefine=t=>e.check(Ku(t)),e.overwrite=t=>e.check(xi(t)),e.optional=()=>gu(e),e.exactOptional=()=>bu(e),e.nullable=()=>Ou(e),e.nullish=()=>gu(Ou(e)),e.nonoptional=t=>Eu(e,t),e.array=()=>Rc(e),e.or=t=>Zc([e,t]),e.and=t=>Xc(e,t),e.transform=t=>Nu(e,fu(t)),e.default=t=>ku(e,t),e.prefault=t=>_u(e,t),e.catch=t=>Pu(e,t),e.pipe=t=>Nu(e,t),e.readonly=()=>ju(e),e.describe=t=>{const n=e.clone();return Ka.add(n,{description:t}),n},Object.defineProperty(e,"description",{get:()=>Ka.get(e)?.description,configurable:!0}),e.meta=(...t)=>{if(0===t.length)return Ka.get(e);const n=e.clone();return Ka.add(n,t[0]),n},e.isOptional=()=>e.safeParse(void 0).success,e.isNullable=()=>e.safeParse(null).success,e.apply=t=>t(e),e))),ol=t("_ZodString",((e,t)=>{rn.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>Vi(e,t,n);const n=e._zod.bag;e.format=n.format??null,e.minLength=n.minimum??null,e.maxLength=n.maximum??null,e.regex=(...t)=>e.check(fi(...t)),e.includes=(...t)=>e.check(vi(...t)),e.startsWith=(...t)=>e.check(bi(...t)),e.endsWith=(...t)=>e.check(yi(...t)),e.min=(...t)=>e.check(pi(...t)),e.max=(...t)=>e.check(di(...t)),e.length=(...t)=>e.check(hi(...t)),e.nonempty=(...t)=>e.check(pi(1,...t)),e.lowercase=t=>e.check(mi(t)),e.uppercase=t=>e.check(gi(t)),e.trim=()=>e.check(Si()),e.normalize=(...t)=>e.check(ki(...t)),e.toLowerCase=()=>e.check(_i()),e.toUpperCase=()=>e.check(Ti()),e.slugify=()=>e.check(Ei())})),il=t("ZodString",((e,t)=>{rn.init(e,t),ol.init(e,t),e.email=t=>e.check(to(cl,t)),e.url=t=>e.check(so(bl,t)),e.jwt=t=>e.check(So(Yl,t)),e.emoji=t=>e.check(lo(wl,t)),e.guid=t=>e.check(no(dl,t)),e.uuid=t=>e.check(ro(hl,t)),e.uuidv4=t=>e.check(ao(hl,t)),e.uuidv6=t=>e.check(oo(hl,t)),e.uuidv7=t=>e.check(io(hl,t)),e.nanoid=t=>e.check(co(kl,t)),e.guid=t=>e.check(no(dl,t)),e.cuid=t=>e.check(uo(_l,t)),e.cuid2=t=>e.check(po(El,t)),e.ulid=t=>e.check(ho(Cl,t)),e.base64=t=>e.check(wo(Hl,t)),e.base64url=t=>e.check(xo(ql,t)),e.xid=t=>e.check(fo(Pl,t)),e.ksuid=t=>e.check(mo(Il,t)),e.ipv4=t=>e.check(go(Nl,t)),e.ipv6=t=>e.check(vo(jl,t)),e.cidrv4=t=>e.check(yo(zl,t)),e.cidrv6=t=>e.check(Oo(Zl,t)),e.e164=t=>e.check(ko(Xl,t)),e.datetime=t=>e.check(Ns(t)),e.date=t=>e.check(Ls(t)),e.time=t=>e.check(js(t)),e.duration=t=>e.check(zs(t))}));function sl(e){return Ja(il,e)}const ll=t("ZodStringFormat",((e,t)=>{an.init(e,t),ol.init(e,t)})),cl=t("ZodEmail",((e,t)=>{ln.init(e,t),ll.init(e,t)}));function ul(e){return to(cl,e)}const dl=t("ZodGUID",((e,t)=>{on.init(e,t),ll.init(e,t)}));function pl(e){return no(dl,e)}const hl=t("ZodUUID",((e,t)=>{sn.init(e,t),ll.init(e,t)}));function fl(e){return ro(hl,e)}function ml(e){return ao(hl,e)}function gl(e){return oo(hl,e)}function vl(e){return io(hl,e)}const bl=t("ZodURL",((e,t)=>{cn.init(e,t),ll.init(e,t)}));function yl(e){return so(bl,e)}function Ol(e){return so(bl,{protocol:/^https?$/,hostname:et,...C(e)})}const wl=t("ZodEmoji",((e,t)=>{un.init(e,t),ll.init(e,t)}));function xl(e){return lo(wl,e)}const kl=t("ZodNanoID",((e,t)=>{dn.init(e,t),ll.init(e,t)}));function Sl(e){return co(kl,e)}const _l=t("ZodCUID",((e,t)=>{pn.init(e,t),ll.init(e,t)}));function Tl(e){return uo(_l,e)}const El=t("ZodCUID2",((e,t)=>{hn.init(e,t),ll.init(e,t)}));function Al(e){return po(El,e)}const Cl=t("ZodULID",((e,t)=>{fn.init(e,t),ll.init(e,t)}));function $l(e){return ho(Cl,e)}const Pl=t("ZodXID",((e,t)=>{mn.init(e,t),ll.init(e,t)}));function Dl(e){return fo(Pl,e)}const Il=t("ZodKSUID",((e,t)=>{gn.init(e,t),ll.init(e,t)}));function Ml(e){return mo(Il,e)}const Nl=t("ZodIPv4",((e,t)=>{wn.init(e,t),ll.init(e,t)}));function Rl(e){return go(Nl,e)}const Ll=t("ZodMAC",((e,t)=>{kn.init(e,t),ll.init(e,t)}));function Bl(e){return bo(Ll,e)}const jl=t("ZodIPv6",((e,t)=>{xn.init(e,t),ll.init(e,t)}));function Ul(e){return vo(jl,e)}const zl=t("ZodCIDRv4",((e,t)=>{Sn.init(e,t),ll.init(e,t)}));function Fl(e){return yo(zl,e)}const Zl=t("ZodCIDRv6",((e,t)=>{_n.init(e,t),ll.init(e,t)}));function Ql(e){return Oo(Zl,e)}const Hl=t("ZodBase64",((e,t)=>{En.init(e,t),ll.init(e,t)}));function Vl(e){return wo(Hl,e)}const ql=t("ZodBase64URL",((e,t)=>{Cn.init(e,t),ll.init(e,t)}));function Wl(e){return xo(ql,e)}const Xl=t("ZodE164",((e,t)=>{$n.init(e,t),ll.init(e,t)}));function Gl(e){return ko(Xl,e)}const Yl=t("ZodJWT",((e,t)=>{Dn.init(e,t),ll.init(e,t)}));function Kl(e){return So(Yl,e)}const Jl=t("ZodCustomStringFormat",((e,t)=>{In.init(e,t),ll.init(e,t)}));function ec(e,t,n={}){return Li(Jl,e,t,n)}function tc(e){return Li(Jl,"hostname",Je,e)}function nc(e){return Li(Jl,"hex",gt,e)}function rc(e,t){const n=`${e}_${t?.enc??"hex"}`,r=Ct[n];if(!r)throw new Error(`Unrecognized hash format: ${n}`);return Li(Jl,n,r,t)}const ac=t("ZodNumber",((e,t)=>{Mn.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>qi(e,t,n),e.gt=(t,n)=>e.check(ti(t,n)),e.gte=(t,n)=>e.check(ni(t,n)),e.min=(t,n)=>e.check(ni(t,n)),e.lt=(t,n)=>e.check(Jo(t,n)),e.lte=(t,n)=>e.check(ei(t,n)),e.max=(t,n)=>e.check(ei(t,n)),e.int=t=>e.check(sc(t)),e.safe=t=>e.check(sc(t)),e.positive=t=>e.check(ti(0,t)),e.nonnegative=t=>e.check(ni(0,t)),e.negative=t=>e.check(Jo(0,t)),e.nonpositive=t=>e.check(ei(0,t)),e.multipleOf=(t,n)=>e.check(si(t,n)),e.step=(t,n)=>e.check(si(t,n)),e.finite=()=>e;const n=e._zod.bag;e.minValue=Math.max(n.minimum??Number.NEGATIVE_INFINITY,n.exclusiveMinimum??Number.NEGATIVE_INFINITY)??null,e.maxValue=Math.min(n.maximum??Number.POSITIVE_INFINITY,n.exclusiveMaximum??Number.POSITIVE_INFINITY)??null,e.isInt=(n.format??"").includes("int")||Number.isSafeInteger(n.multipleOf??.5),e.isFinite=!0,e.format=n.format??null}));function oc(e){return $o(ac,e)}const ic=t("ZodNumberFormat",((e,t)=>{Nn.init(e,t),ac.init(e,t)}));function sc(e){return Do(ic,e)}function lc(e){return Io(ic,e)}function cc(e){return Mo(ic,e)}function uc(e){return No(ic,e)}function dc(e){return Ro(ic,e)}const pc=t("ZodBoolean",((e,t)=>{Rn.init(e,t),al.init(e,t),e._zod.processJSONSchema=(e,t,n)=>Wi(0,0,t)}));function hc(e){return Lo(pc,e)}const fc=t("ZodBigInt",((e,t)=>{Ln.init(e,t),al.init(e,t),e._zod.processJSONSchema=(e,t,n)=>Xi(0,e),e.gte=(t,n)=>e.check(ni(t,n)),e.min=(t,n)=>e.check(ni(t,n)),e.gt=(t,n)=>e.check(ti(t,n)),e.gte=(t,n)=>e.check(ni(t,n)),e.min=(t,n)=>e.check(ni(t,n)),e.lt=(t,n)=>e.check(Jo(t,n)),e.lte=(t,n)=>e.check(ei(t,n)),e.max=(t,n)=>e.check(ei(t,n)),e.positive=t=>e.check(ti(BigInt(0),t)),e.negative=t=>e.check(Jo(BigInt(0),t)),e.nonpositive=t=>e.check(ei(BigInt(0),t)),e.nonnegative=t=>e.check(ni(BigInt(0),t)),e.multipleOf=(t,n)=>e.check(si(t,n));const n=e._zod.bag;e.minValue=n.minimum??null,e.maxValue=n.maximum??null,e.format=n.format??null}));function mc(e){return jo(fc,e)}const gc=t("ZodBigIntFormat",((e,t)=>{Bn.init(e,t),fc.init(e,t)}));function vc(e){return zo(gc,e)}function bc(e){return Fo(gc,e)}const yc=t("ZodSymbol",((e,t)=>{jn.init(e,t),al.init(e,t),e._zod.processJSONSchema=(e,t,n)=>Gi(0,e)}));function Oc(e){return Zo(yc,e)}const wc=t("ZodUndefined",((e,t)=>{Un.init(e,t),al.init(e,t),e._zod.processJSONSchema=(e,t,n)=>Ki(0,e)}));function xc(e){return Qo(wc,e)}const kc=t("ZodNull",((e,t)=>{zn.init(e,t),al.init(e,t),e._zod.processJSONSchema=(e,t,n)=>Yi(0,e,t)}));function Sc(e){return Ho(kc,e)}const _c=t("ZodAny",((e,t)=>{Fn.init(e,t),al.init(e,t),e._zod.processJSONSchema=(e,t,n)=>{}}));function Tc(){return Vo(_c)}const Ec=t("ZodUnknown",((e,t)=>{Zn.init(e,t),al.init(e,t),e._zod.processJSONSchema=(e,t,n)=>{}}));function Ac(){return qo(Ec)}const Cc=t("ZodNever",((e,t)=>{Qn.init(e,t),al.init(e,t),e._zod.processJSONSchema=(e,t,n)=>es(0,0,t)}));function $c(e){return Wo(Cc,e)}const Pc=t("ZodVoid",((e,t)=>{Hn.init(e,t),al.init(e,t),e._zod.processJSONSchema=(e,t,n)=>Ji(0,e)}));function Dc(e){return Xo(Pc,e)}const Ic=t("ZodDate",((e,t)=>{Vn.init(e,t),al.init(e,t),e._zod.processJSONSchema=(e,t,n)=>ts(0,e),e.min=(t,n)=>e.check(ni(t,n)),e.max=(t,n)=>e.check(ei(t,n));const n=e._zod.bag;e.minDate=n.minimum?new Date(n.minimum):null,e.maxDate=n.maximum?new Date(n.maximum):null}));function Mc(e){return Go(Ic,e)}const Nc=t("ZodArray",((e,t)=>{Wn.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>hs(e,t,n,r),e.element=t.element,e.min=(t,n)=>e.check(pi(t,n)),e.nonempty=t=>e.check(pi(1,t)),e.max=(t,n)=>e.check(di(t,n)),e.length=(t,n)=>e.check(hi(t,n)),e.unwrap=()=>e.element}));function Rc(e,t){return Ai(Nc,e,t)}function Lc(e){const t=e._zod.def.shape;return su(Object.keys(t))}const Bc=t("ZodObject",((e,t)=>{Jn.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>fs(e,t,n,r),m(e,"shape",(()=>t.shape)),e.keyof=()=>su(Object.keys(e._zod.def.shape)),e.catchall=t=>e.clone({...e._zod.def,catchall:t}),e.passthrough=()=>e.clone({...e._zod.def,catchall:Ac()}),e.loose=()=>e.clone({...e._zod.def,catchall:Ac()}),e.strict=()=>e.clone({...e._zod.def,catchall:$c()}),e.strip=()=>e.clone({...e._zod.def,catchall:void 0}),e.extend=t=>R(e,t),e.safeExtend=t=>L(e,t),e.merge=t=>B(e,t),e.pick=t=>M(e,t),e.omit=t=>N(e,t),e.partial=(...t)=>j(mu,e,t[0]),e.required=(...t)=>U(Tu,e,t[0])}));function jc(e,t){const n={type:"object",shape:e??{},...C(t)};return new Bc(n)}function Uc(e,t){return new Bc({type:"object",shape:e,catchall:$c(),...C(t)})}function zc(e,t){return new Bc({type:"object",shape:e,catchall:Ac(),...C(t)})}const Fc=t("ZodUnion",((e,t)=>{tr.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>ms(e,t,n,r),e.options=t.options}));function Zc(e,t){return new Fc({type:"union",options:e,...C(t)})}const Qc=t("ZodXor",((e,t)=>{Fc.init(e,t),rr.init(e,t),e._zod.processJSONSchema=(t,n,r)=>ms(e,t,n,r),e.options=t.options}));function Hc(e,t){return new Qc({type:"union",options:e,inclusive:!1,...C(t)})}const Vc=t("ZodDiscriminatedUnion",((e,t)=>{Fc.init(e,t),ar.init(e,t)}));function qc(e,t,n){return new Vc({type:"union",options:t,discriminator:e,...C(n)})}const Wc=t("ZodIntersection",((e,t)=>{or.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>gs(e,t,n,r)}));function Xc(e,t){return new Wc({type:"intersection",left:e,right:t})}const Gc=t("ZodTuple",((e,t)=>{lr.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>vs(e,t,n,r),e.rest=t=>e.clone({...e._zod.def,rest:t})}));function Yc(e,t,n){const r=t instanceof nn;return new Gc({type:"tuple",items:e,rest:r?t:null,...C(r?n:t)})}const Kc=t("ZodRecord",((e,t)=>{ur.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>bs(e,t,n,r),e.keyType=t.keyType,e.valueType=t.valueType}));function Jc(e,t,n){return new Kc({type:"record",keyType:e,valueType:t,...C(n)})}function eu(e,t,n){const r=A(e);return r._zod.values=void 0,new Kc({type:"record",keyType:r,valueType:t,...C(n)})}function tu(e,t,n){return new Kc({type:"record",keyType:e,valueType:t,mode:"loose",...C(n)})}const nu=t("ZodMap",((e,t)=>{dr.init(e,t),al.init(e,t),e._zod.processJSONSchema=(e,t,n)=>ds(0,e),e.keyType=t.keyType,e.valueType=t.valueType,e.min=(...t)=>e.check(ci(...t)),e.nonempty=t=>e.check(ci(1,t)),e.max=(...t)=>e.check(li(...t)),e.size=(...t)=>e.check(ui(...t))}));function ru(e,t,n){return new nu({type:"map",keyType:e,valueType:t,...C(n)})}const au=t("ZodSet",((e,t)=>{hr.init(e,t),al.init(e,t),e._zod.processJSONSchema=(e,t,n)=>ps(0,e),e.min=(...t)=>e.check(ci(...t)),e.nonempty=t=>e.check(ci(1,t)),e.max=(...t)=>e.check(li(...t)),e.size=(...t)=>e.check(ui(...t))}));function ou(e,t){return new au({type:"set",valueType:e,...C(t)})}const iu=t("ZodEnum",((e,t)=>{mr.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>ns(e,0,n),e.enum=t.entries,e.options=Object.values(t.entries);const n=new Set(Object.keys(t.entries));e.extract=(e,r)=>{const a={};for(const r of e){if(!n.has(r))throw new Error(`Key ${r} not found in enum`);a[r]=t.entries[r]}return new iu({...t,checks:[],...C(r),entries:a})},e.exclude=(e,r)=>{const a={...t.entries};for(const t of e){if(!n.has(t))throw new Error(`Key ${t} not found in enum`);delete a[t]}return new iu({...t,checks:[],...C(r),entries:a})}}));function su(e,t){const n=Array.isArray(e)?Object.fromEntries(e.map((e=>[e,e]))):e;return new iu({type:"enum",entries:n,...C(t)})}function lu(e,t){return new iu({type:"enum",entries:e,...C(t)})}const cu=t("ZodLiteral",((e,t)=>{gr.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>rs(e,t,n),e.values=new Set(t.values),Object.defineProperty(e,"value",{get(){if(t.values.length>1)throw new Error("This schema contains multiple valid literal values. Use `.values` instead.");return t.values[0]}})}));function uu(e,t){return new cu({type:"literal",values:Array.isArray(e)?e:[e],...C(t)})}const du=t("ZodFile",((e,t)=>{vr.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>is(e,0,n),e.min=(t,n)=>e.check(ci(t,n)),e.max=(t,n)=>e.check(li(t,n)),e.mime=(t,n)=>e.check(wi(Array.isArray(t)?t:[t],n))}));function pu(e){return Ci(du,e)}const hu=t("ZodTransform",((e,t)=>{br.init(e,t),al.init(e,t),e._zod.processJSONSchema=(e,t,n)=>us(0,e),e._zod.parse=(n,r)=>{if("backward"===r.direction)throw new a(e.constructor.name);n.addIssue=r=>{if("string"==typeof r)n.issues.push(W(r,n.value,t));else{const t=r;t.fatal&&(t.continue=!1),t.code??(t.code="custom"),t.input??(t.input=n.value),t.inst??(t.inst=e),n.issues.push(W(t))}};const o=t.transform(n.value,n);return o instanceof Promise?o.then((e=>(n.value=e,n))):(n.value=o,n)}}));function fu(e){return new hu({type:"transform",transform:e})}const mu=t("ZodOptional",((e,t)=>{Or.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>Es(e,t,0,r),e.unwrap=()=>e._zod.def.innerType}));function gu(e){return new mu({type:"optional",innerType:e})}const vu=t("ZodExactOptional",((e,t)=>{wr.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>Es(e,t,0,r),e.unwrap=()=>e._zod.def.innerType}));function bu(e){return new vu({type:"optional",innerType:e})}const yu=t("ZodNullable",((e,t)=>{xr.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>ys(e,t,n,r),e.unwrap=()=>e._zod.def.innerType}));function Ou(e){return new yu({type:"nullable",innerType:e})}function wu(e){return gu(Ou(e))}const xu=t("ZodDefault",((e,t)=>{kr.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>ws(e,t,n,r),e.unwrap=()=>e._zod.def.innerType,e.removeDefault=e.unwrap}));function ku(e,t){return new xu({type:"default",innerType:e,get defaultValue(){return"function"==typeof t?t():S(t)}})}const Su=t("ZodPrefault",((e,t)=>{_r.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>xs(e,t,n,r),e.unwrap=()=>e._zod.def.innerType}));function _u(e,t){return new Su({type:"prefault",innerType:e,get defaultValue(){return"function"==typeof t?t():S(t)}})}const Tu=t("ZodNonOptional",((e,t)=>{Tr.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>Os(e,t,0,r),e.unwrap=()=>e._zod.def.innerType}));function Eu(e,t){return new Tu({type:"nonoptional",innerType:e,...C(t)})}const Au=t("ZodSuccess",((e,t)=>{Ar.init(e,t),al.init(e,t),e._zod.processJSONSchema=(e,t,n)=>ss(0,0,t),e.unwrap=()=>e._zod.def.innerType}));function Cu(e){return new Au({type:"success",innerType:e})}const $u=t("ZodCatch",((e,t)=>{Cr.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>ks(e,t,n,r),e.unwrap=()=>e._zod.def.innerType,e.removeCatch=e.unwrap}));function Pu(e,t){return new $u({type:"catch",innerType:e,catchValue:"function"==typeof t?t:()=>t})}const Du=t("ZodNaN",((e,t)=>{$r.init(e,t),al.init(e,t),e._zod.processJSONSchema=(e,t,n)=>as(0,e)}));function Iu(e){return Ko(Du,e)}const Mu=t("ZodPipe",((e,t)=>{Pr.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>Ss(e,t,0,r),e.in=t.in,e.out=t.out}));function Nu(e,t){return new Mu({type:"pipe",in:e,out:t})}const Ru=t("ZodCodec",((e,t)=>{Mu.init(e,t),Ir.init(e,t)}));function Lu(e,t,n){return new Ru({type:"pipe",in:e,out:t,transform:n.decode,reverseTransform:n.encode})}const Bu=t("ZodReadonly",((e,t)=>{Rr.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>_s(e,t,n,r),e.unwrap=()=>e._zod.def.innerType}));function ju(e){return new Bu({type:"readonly",innerType:e})}const Uu=t("ZodTemplateLiteral",((e,t)=>{Br.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>os(e,0,n)}));function zu(e,t){return new Uu({type:"template_literal",parts:e,...C(t)})}const Fu=t("ZodLazy",((e,t)=>{zr.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>As(e,t,0,r),e.unwrap=()=>e._zod.def.getter()}));function Zu(e){return new Fu({type:"lazy",getter:e})}const Qu=t("ZodPromise",((e,t)=>{Ur.init(e,t),al.init(e,t),e._zod.processJSONSchema=(t,n,r)=>Ts(e,t,0,r),e.unwrap=()=>e._zod.def.innerType}));function Hu(e){return new Qu({type:"promise",innerType:e})}const Vu=t("ZodFunction",((e,t)=>{jr.init(e,t),al.init(e,t),e._zod.processJSONSchema=(e,t,n)=>cs(0,e)}));function qu(e){return new Vu({type:"function",input:Array.isArray(e?.input)?Yc(e?.input):e?.input??Rc(Ac()),output:e?.output??Ac()})}const Wu=t("ZodCustom",((e,t)=>{Fr.init(e,t),al.init(e,t),e._zod.processJSONSchema=(e,t,n)=>ls(0,e)}));function Xu(e){const t=new $t({check:"custom"});return t._zod.check=e,t}function Gu(e,t){return $i(Wu,e??(()=>!0),t)}function Yu(e,t={}){return Pi(Wu,e,t)}function Ku(e){return Di(e)}const Ju=Mi,ed=Ni;function td(e,t={}){const n=new Wu({type:"custom",check:"custom",fn:t=>t instanceof e,abort:!0,...C(t)});return n._zod.bag.Class=e,n._zod.check=t=>{t.value instanceof e||t.issues.push({code:"invalid_type",expected:e.name,input:t.value,inst:n,path:[...n._zod.def.path??[]]})},n}const nd=(...e)=>Ri({Codec:Ru,Boolean:pc,String:il},...e);function rd(e){const t=Zu((()=>Zc([sl(e),oc(),hc(),Sc(),Rc(t),Jc(sl(),t)])));return t}function ad(e,t){return Nu(fu(e),t)}const od=Object.freeze(Object.defineProperty({__proto__:null,ZodAny:_c,ZodArray:Nc,ZodBase64:Hl,ZodBase64URL:ql,ZodBigInt:fc,ZodBigIntFormat:gc,ZodBoolean:pc,ZodCIDRv4:zl,ZodCIDRv6:Zl,ZodCUID:_l,ZodCUID2:El,ZodCatch:$u,ZodCodec:Ru,ZodCustom:Wu,ZodCustomStringFormat:Jl,ZodDate:Ic,ZodDefault:xu,ZodDiscriminatedUnion:Vc,ZodE164:Xl,ZodEmail:cl,ZodEmoji:wl,ZodEnum:iu,ZodExactOptional:vu,ZodFile:du,ZodFunction:Vu,ZodGUID:dl,ZodIPv4:Nl,ZodIPv6:jl,ZodIntersection:Wc,ZodJWT:Yl,ZodKSUID:Il,ZodLazy:Fu,ZodLiteral:cu,ZodMAC:Ll,ZodMap:nu,ZodNaN:Du,ZodNanoID:kl,ZodNever:Cc,ZodNonOptional:Tu,ZodNull:kc,ZodNullable:yu,ZodNumber:ac,ZodNumberFormat:ic,ZodObject:Bc,ZodOptional:mu,ZodPipe:Mu,ZodPrefault:Su,ZodPromise:Qu,ZodReadonly:Bu,ZodRecord:Kc,ZodSet:au,ZodString:il,ZodStringFormat:ll,ZodSuccess:Au,ZodSymbol:yc,ZodTemplateLiteral:Uu,ZodTransform:hu,ZodTuple:Gc,ZodType:al,ZodULID:Cl,ZodURL:bl,ZodUUID:hl,ZodUndefined:wc,ZodUnion:Fc,ZodUnknown:Ec,ZodVoid:Pc,ZodXID:Pl,ZodXor:Qc,_ZodString:ol,_default:ku,_function:qu,any:Tc,array:Rc,base64:Vl,base64url:Wl,bigint:mc,boolean:hc,catch:Pu,check:Xu,cidrv4:Fl,cidrv6:Ql,codec:Lu,cuid:Tl,cuid2:Al,custom:Gu,date:Mc,describe:Ju,discriminatedUnion:qc,e164:Gl,email:ul,emoji:xl,enum:su,exactOptional:bu,file:pu,float32:lc,float64:cc,function:qu,guid:pl,hash:rc,hex:nc,hostname:tc,httpUrl:Ol,instanceof:td,int:sc,int32:uc,int64:vc,intersection:Xc,ipv4:Rl,ipv6:Ul,json:rd,jwt:Kl,keyof:Lc,ksuid:Ml,lazy:Zu,literal:uu,looseObject:zc,looseRecord:tu,mac:Bl,map:ru,meta:ed,nan:Iu,nanoid:Sl,nativeEnum:lu,never:$c,nonoptional:Eu,null:Sc,nullable:Ou,nullish:wu,number:oc,object:jc,optional:gu,partialRecord:eu,pipe:Nu,prefault:_u,preprocess:ad,promise:Hu,readonly:ju,record:Jc,refine:Yu,set:ou,strictObject:Uc,string:sl,stringFormat:ec,stringbool:nd,success:Cu,superRefine:Ku,symbol:Oc,templateLiteral:zu,transform:fu,tuple:Yc,uint32:dc,uint64:bc,ulid:$l,undefined:xc,union:Zc,unknown:Ac,url:yl,uuid:fl,uuidv4:ml,uuidv6:gl,uuidv7:vl,void:Dc,xid:Dl,xor:Hc},Symbol.toStringTag,{value:"Module"}));var id;id||(id={});const sd={...od,...Is,iso:Fs},ld=new Set(["$schema","$ref","$defs","definitions","$id","id","$comment","$anchor","$vocabulary","$dynamicRef","$dynamicAnchor","type","enum","const","anyOf","oneOf","allOf","not","properties","required","additionalProperties","patternProperties","propertyNames","minProperties","maxProperties","items","prefixItems","additionalItems","minItems","maxItems","uniqueItems","contains","minContains","maxContains","minLength","maxLength","pattern","format","minimum","maximum","exclusiveMinimum","exclusiveMaximum","multipleOf","description","default","contentEncoding","contentMediaType","contentSchema","unevaluatedItems","unevaluatedProperties","if","then","else","dependentSchemas","dependentRequired","nullable","readOnly"]);function cd(e,t){if(void 0!==e.not){if("object"==typeof e.not&&0===Object.keys(e.not).length)return sd.never();throw new Error("not is not supported in Zod (except { not: {} } for never)")}if(void 0!==e.unevaluatedItems)throw new Error("unevaluatedItems is not supported");if(void 0!==e.unevaluatedProperties)throw new Error("unevaluatedProperties is not supported");if(void 0!==e.if||void 0!==e.then||void 0!==e.else)throw new Error("Conditional schemas (if/then/else) are not supported");if(void 0!==e.dependentSchemas||void 0!==e.dependentRequired)throw new Error("dependentSchemas and dependentRequired are not supported");if(e.$ref){const n=e.$ref;if(t.refs.has(n))return t.refs.get(n);if(t.processing.has(n))return sd.lazy((()=>{if(!t.refs.has(n))throw new Error(`Circular reference not resolved: ${n}`);return t.refs.get(n)}));t.processing.add(n);const r=function(e,t){if(!e.startsWith("#"))throw new Error("External $ref is not supported, only local refs (#/...) are allowed");const n=e.slice(1).split("/").filter(Boolean);if(0===n.length)return t.rootSchema;const r="draft-2020-12"===t.version?"$defs":"definitions";if(n[0]===r){const r=n[1];if(!r||!t.defs[r])throw new Error(`Reference not found: ${e}`);return t.defs[r]}throw new Error(`Reference not found: ${e}`)}(n,t),a=ud(r,t);return t.refs.set(n,a),t.processing.delete(n),a}if(void 0!==e.enum){const n=e.enum;if("openapi-3.0"===t.version&&!0===e.nullable&&1===n.length&&null===n[0])return sd.null();if(0===n.length)return sd.never();if(1===n.length)return sd.literal(n[0]);if(n.every((e=>"string"==typeof e)))return sd.enum(n);const r=n.map((e=>sd.literal(e)));return r.length<2?r[0]:sd.union([r[0],r[1],...r.slice(2)])}if(void 0!==e.const)return sd.literal(e.const);const n=e.type;if(Array.isArray(n)){const r=n.map((n=>cd({...e,type:n},t)));return 0===r.length?sd.never():1===r.length?r[0]:sd.union(r)}if(!n)return sd.any();let r;switch(n){case"string":{let t=sd.string();if(e.format){const n=e.format;"email"===n?t=t.check(sd.email()):"uri"===n||"uri-reference"===n?t=t.check(sd.url()):"uuid"===n||"guid"===n?t=t.check(sd.uuid()):"date-time"===n?t=t.check(sd.iso.datetime()):"date"===n?t=t.check(sd.iso.date()):"time"===n?t=t.check(sd.iso.time()):"duration"===n?t=t.check(sd.iso.duration()):"ipv4"===n?t=t.check(sd.ipv4()):"ipv6"===n?t=t.check(sd.ipv6()):"mac"===n?t=t.check(sd.mac()):"cidr"===n?t=t.check(sd.cidrv4()):"cidr-v6"===n?t=t.check(sd.cidrv6()):"base64"===n?t=t.check(sd.base64()):"base64url"===n?t=t.check(sd.base64url()):"e164"===n?t=t.check(sd.e164()):"jwt"===n?t=t.check(sd.jwt()):"emoji"===n?t=t.check(sd.emoji()):"nanoid"===n?t=t.check(sd.nanoid()):"cuid"===n?t=t.check(sd.cuid()):"cuid2"===n?t=t.check(sd.cuid2()):"ulid"===n?t=t.check(sd.ulid()):"xid"===n?t=t.check(sd.xid()):"ksuid"===n&&(t=t.check(sd.ksuid()))}"number"==typeof e.minLength&&(t=t.min(e.minLength)),"number"==typeof e.maxLength&&(t=t.max(e.maxLength)),e.pattern&&(t=t.regex(new RegExp(e.pattern))),r=t;break}case"number":case"integer":{let t="integer"===n?sd.number().int():sd.number();"number"==typeof e.minimum&&(t=t.min(e.minimum)),"number"==typeof e.maximum&&(t=t.max(e.maximum)),"number"==typeof e.exclusiveMinimum?t=t.gt(e.exclusiveMinimum):!0===e.exclusiveMinimum&&"number"==typeof e.minimum&&(t=t.gt(e.minimum)),"number"==typeof e.exclusiveMaximum?t=t.lt(e.exclusiveMaximum):!0===e.exclusiveMaximum&&"number"==typeof e.maximum&&(t=t.lt(e.maximum)),"number"==typeof e.multipleOf&&(t=t.multipleOf(e.multipleOf)),r=t;break}case"boolean":r=sd.boolean();break;case"null":r=sd.null();break;case"object":{const n={},a=e.properties||{},o=new Set(e.required||[]);for(const[e,r]of Object.entries(a)){const a=ud(r,t);n[e]=o.has(e)?a:a.optional()}if(e.propertyNames){const a=ud(e.propertyNames,t),o=e.additionalProperties&&"object"==typeof e.additionalProperties?ud(e.additionalProperties,t):sd.any();if(0===Object.keys(n).length){r=sd.record(a,o);break}const i=sd.object(n).passthrough(),s=sd.looseRecord(a,o);r=sd.intersection(i,s);break}if(e.patternProperties){const a=e.patternProperties,o=Object.keys(a),i=[];for(const e of o){const n=ud(a[e],t),r=sd.string().regex(new RegExp(e));i.push(sd.looseRecord(r,n))}const s=[];if(Object.keys(n).length>0&&s.push(sd.object(n).passthrough()),s.push(...i),0===s.length)r=sd.object({}).passthrough();else if(1===s.length)r=s[0];else{let e=sd.intersection(s[0],s[1]);for(let t=2;tud(e,t))),i=a&&"object"==typeof a&&!Array.isArray(a)?ud(a,t):void 0;r=i?sd.tuple(o).rest(i):sd.tuple(o),"number"==typeof e.minItems&&(r=r.check(sd.minLength(e.minItems))),"number"==typeof e.maxItems&&(r=r.check(sd.maxLength(e.maxItems)))}else if(Array.isArray(a)){const n=a.map((e=>ud(e,t))),o=e.additionalItems&&"object"==typeof e.additionalItems?ud(e.additionalItems,t):void 0;r=o?sd.tuple(n).rest(o):sd.tuple(n),"number"==typeof e.minItems&&(r=r.check(sd.minLength(e.minItems))),"number"==typeof e.maxItems&&(r=r.check(sd.maxLength(e.maxItems)))}else if(void 0!==a){const n=ud(a,t);let o=sd.array(n);"number"==typeof e.minItems&&(o=o.min(e.minItems)),"number"==typeof e.maxItems&&(o=o.max(e.maxItems)),r=o}else r=sd.array(sd.any());break}default:throw new Error(`Unsupported type: ${n}`)}return e.description&&(r=r.describe(e.description)),void 0!==e.default&&(r=r.default(e.default)),r}function ud(e,t){if("boolean"==typeof e)return e?sd.any():sd.never();let n=cd(e,t);const r=e.type||void 0!==e.enum||void 0!==e.const;if(e.anyOf&&Array.isArray(e.anyOf)){const a=e.anyOf.map((e=>ud(e,t))),o=sd.union(a);n=r?sd.intersection(n,o):o}if(e.oneOf&&Array.isArray(e.oneOf)){const a=e.oneOf.map((e=>ud(e,t))),o=sd.xor(a);n=r?sd.intersection(n,o):o}if(e.allOf&&Array.isArray(e.allOf))if(0===e.allOf.length)n=r?n:sd.any();else{let a=r?n:ud(e.allOf[0],t);for(let n=r?0:1;n0&&t.registry.add(n,a),n}function dd(e){return eo(il,e)}const pd=Object.freeze(Object.defineProperty({__proto__:null,bigint:function(e){return Uo(fc,e)},boolean:function(e){return Bo(pc,e)},date:function(e){return Yo(Ic,e)},number:function(e){return Po(ac,e)},string:dd},Symbol.toStringTag,{value:"Module"}));i(ea());const hd=Object.freeze(Object.defineProperty({__proto__:null,$brand:n,$input:Xa,$output:Wa,NEVER:e,TimePrecision:_o,ZodAny:_c,ZodArray:Nc,ZodBase64:Hl,ZodBase64URL:ql,ZodBigInt:fc,ZodBigIntFormat:gc,ZodBoolean:pc,ZodCIDRv4:zl,ZodCIDRv6:Zl,ZodCUID:_l,ZodCUID2:El,ZodCatch:$u,ZodCodec:Ru,ZodCustom:Wu,ZodCustomStringFormat:Jl,ZodDate:Ic,ZodDefault:xu,ZodDiscriminatedUnion:Vc,ZodE164:Xl,ZodEmail:cl,ZodEmoji:wl,ZodEnum:iu,ZodError:Qs,ZodExactOptional:vu,ZodFile:du,get ZodFirstPartyTypeKind(){return id},ZodFunction:Vu,ZodGUID:dl,ZodIPv4:Nl,ZodIPv6:jl,ZodISODate:Rs,ZodISODateTime:Ms,ZodISODuration:Us,ZodISOTime:Bs,ZodIntersection:Wc,ZodIssueCode:{invalid_type:"invalid_type",too_big:"too_big",too_small:"too_small",invalid_format:"invalid_format",not_multiple_of:"not_multiple_of",unrecognized_keys:"unrecognized_keys",invalid_union:"invalid_union",invalid_key:"invalid_key",invalid_element:"invalid_element",invalid_value:"invalid_value",custom:"custom"},ZodJWT:Yl,ZodKSUID:Il,ZodLazy:Fu,ZodLiteral:cu,ZodMAC:Ll,ZodMap:nu,ZodNaN:Du,ZodNanoID:kl,ZodNever:Cc,ZodNonOptional:Tu,ZodNull:kc,ZodNullable:yu,ZodNumber:ac,ZodNumberFormat:ic,ZodObject:Bc,ZodOptional:mu,ZodPipe:Mu,ZodPrefault:Su,ZodPromise:Qu,ZodReadonly:Bu,ZodRealError:Hs,ZodRecord:Kc,ZodSet:au,ZodString:il,ZodStringFormat:ll,ZodSuccess:Au,ZodSymbol:yc,ZodTemplateLiteral:Uu,ZodTransform:hu,ZodTuple:Gc,ZodType:al,ZodULID:Cl,ZodURL:bl,ZodUUID:hl,ZodUndefined:wc,ZodUnion:Fc,ZodUnknown:Ec,ZodVoid:Pc,ZodXID:Pl,ZodXor:Qc,_ZodString:ol,_default:ku,_function:qu,any:Tc,array:Rc,base64:Vl,base64url:Wl,bigint:mc,boolean:hc,catch:Pu,check:Xu,cidrv4:Fl,cidrv6:Ql,clone:A,codec:Lu,coerce:pd,config:i,core:Ds,cuid:Tl,cuid2:Al,custom:Gu,date:Mc,decode:Ys,decodeAsync:Js,describe:Ju,discriminatedUnion:qc,e164:Gl,email:ul,emoji:xl,encode:Gs,encodeAsync:Ks,endsWith:yi,enum:su,exactOptional:bu,file:pu,flattenError:te,float32:lc,float64:cc,formatError:ne,fromJSONSchema:function(e,t){if("boolean"==typeof e)return e?sd.any():sd.never();const n=function(e,t){const n=e.$schema;return"https://json-schema.org/draft/2020-12/schema"===n?"draft-2020-12":"http://json-schema.org/draft-07/schema#"===n?"draft-7":"http://json-schema.org/draft-04/schema#"===n?"draft-4":t??"draft-2020-12"}(e,t?.defaultTarget);return ud(e,{version:n,defs:e.$defs||e.definitions||{},refs:new Map,processing:new Set,rootSchema:e,registry:t?.registry??Ka})},function:qu,getErrorMap:function(){return i().customError},globalRegistry:Ka,gt:ti,gte:ni,guid:pl,hash:rc,hex:nc,hostname:tc,httpUrl:Ol,includes:vi,instanceof:td,int:sc,int32:uc,int64:vc,intersection:Xc,ipv4:Rl,ipv6:Ul,iso:Fs,json:rd,jwt:Kl,keyof:Lc,ksuid:Ml,lazy:Zu,length:hi,literal:uu,locales:Va,looseObject:zc,looseRecord:tu,lowercase:mi,lt:Jo,lte:ei,mac:Bl,map:ru,maxLength:di,maxSize:li,meta:ed,mime:wi,minLength:pi,minSize:ci,multipleOf:si,nan:Iu,nanoid:Sl,nativeEnum:lu,negative:ai,never:$c,nonnegative:ii,nonoptional:Eu,nonpositive:oi,normalize:ki,null:Sc,nullable:Ou,nullish:wu,number:oc,object:jc,optional:gu,overwrite:xi,parse:Vs,parseAsync:qs,partialRecord:eu,pipe:Nu,positive:ri,prefault:_u,preprocess:ad,prettifyError:oe,promise:Hu,property:Oi,readonly:ju,record:Jc,refine:Yu,regex:fi,regexes:Ct,registry:Ya,safeDecode:tl,safeDecodeAsync:rl,safeEncode:el,safeEncodeAsync:nl,safeParse:Ws,safeParseAsync:Xs,set:ou,setErrorMap:function(e){i({customError:e})},size:ui,slugify:Ei,startsWith:bi,strictObject:Uc,string:sl,stringFormat:ec,stringbool:nd,success:Cu,superRefine:Ku,symbol:Oc,templateLiteral:zu,toJSONSchema:$s,toLowerCase:_i,toUpperCase:Ti,transform:fu,treeifyError:re,trim:Si,tuple:Yc,uint32:dc,uint64:bc,ulid:$l,undefined:xc,union:Zc,unknown:Ac,uppercase:gi,url:yl,util:Y,uuid:fl,uuidv4:ml,uuidv6:gl,uuidv7:vl,void:Dc,xid:Dl,xor:Hc},Symbol.toStringTag,{value:"Module"})),fd=jc({title:sl().optional(),component:Ac(),props:Jc(sl(),Tc()).optional()}),md=jc({"request.section":Rc(fd).optional(),"response.section":Rc(fd).optional()}),gd=jc({onBeforeRequest:qu({input:[jc({request:td(Request)})]}).optional(),onResponseReceived:qu({input:[jc({response:td(Response),operation:Jc(sl(),Tc())})]}).optional()}),vd=qu({input:[],output:jc({name:sl(),views:md.optional(),hooks:gd.optional()})}),bd="https://api.scalar.com/request-proxy",yd="https://proxy.scalar.com",Od=hd.object({title:hd.string().optional(),slug:hd.string().optional(),authentication:hd.any().optional(),baseServerURL:hd.string().optional(),hideClientButton:hd.boolean().optional().default(!1).catch(!1),proxyUrl:hd.string().optional(),searchHotKey:hd.enum(["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z"]).optional(),servers:hd.array(hd.any()).optional(),showSidebar:hd.boolean().optional().default(!0).catch(!0),showDeveloperTools:hd.enum(["always","localhost","never"]).optional().default("localhost").catch("localhost"),showToolbar:hd.enum(["always","localhost","never"]).optional().default("localhost").catch("localhost"),operationTitleSource:hd.enum(["summary","path"]).optional().default("summary").catch("summary"),theme:hd.enum(["alternate","default","moon","purple","solarized","bluePlanet","deepSpace","saturn","kepler","elysiajs","fastify","mars","laserwave","none"]).optional().default("default").catch("default"),_integration:hd.enum(["adonisjs","astro","docusaurus","dotnet","elysiajs","express","fastapi","fastify","go","hono","html","laravel","litestar","nestjs","nextjs","nitro","nuxt","platformatic","react","rust","svelte","vue"]).nullable().optional(),onRequestSent:hd.function({input:[hd.string()],output:hd.void()}).optional(),persistAuth:hd.boolean().optional().default(!1).catch(!1),plugins:hd.array(vd).optional(),telemetry:hd.boolean().optional().default(!0)}),wd=hd.object({default:hd.boolean().default(!1).optional().catch(!1),url:hd.string().optional(),content:hd.union([hd.string(),hd.null(),hd.record(hd.string(),hd.any()),hd.function({input:[],output:hd.record(hd.string(),hd.any())})]).optional(),title:hd.string().optional(),slug:hd.string().optional(),spec:hd.object({url:hd.string().optional(),content:hd.union([hd.string(),hd.null(),hd.record(hd.string(),hd.any()),hd.function({input:[],output:hd.record(hd.string(),hd.any())})]).optional()}).optional()});Od.extend(wd.shape);const xd=jc({name:sl().regex(/^x-/),component:Ac(),renderer:Ac().optional()}),kd=jc({component:Ac(),renderer:Ac().optional(),props:Jc(sl(),Tc()).optional()}),Sd=jc({"content.end":Rc(kd).optional()}),_d=qu({input:[],output:jc({name:sl(),extensions:Rc(xd),views:Sd.optional()})}),Td=Gu(),Ed=Od.extend({layout:su(["modern","classic"]).optional().default("modern").catch("modern"),proxy:sl().optional(),fetch:Td.optional(),plugins:Rc(_d).optional(),isEditable:hc().optional().default(!1).catch(!1),isLoading:hc().optional().default(!1).catch(!1),hideModels:hc().optional().default(!1).catch(!1),documentDownloadType:su(["yaml","json","both","direct","none"]).optional().default("both").catch("both"),hideDownloadButton:hc().optional(),hideTestRequestButton:hc().optional().default(!1).catch(!1),hideSearch:hc().optional().default(!1).catch(!1),showOperationId:hc().optional().default(!1).catch(!1),darkMode:hc().optional(),forceDarkModeState:su(["dark","light"]).optional(),hideDarkModeToggle:hc().optional().default(!1).catch(!1),metaData:Tc().optional(),favicon:sl().optional(),hiddenClients:Zc([Jc(sl(),Zc([hc(),Rc(sl())])),Rc(sl()),uu(!0)]).optional(),defaultHttpClient:jc({targetKey:Gu(),clientKey:sl()}).optional(),customCss:sl().optional(),onSpecUpdate:qu({input:[sl()],output:Dc()}).optional(),onServerChange:qu({input:[sl()],output:Dc()}).optional(),onDocumentSelect:qu({input:[]}).optional(),onLoaded:qu().optional(),onBeforeRequest:qu({input:[jc({request:td(Request)})]}).optional(),onShowMore:qu({input:[sl()]}).optional(),onSidebarClick:qu({input:[sl()]}).optional(),pathRouting:jc({basePath:sl()}).optional(),generateHeadingSlug:qu({input:[jc({slug:sl().default("headingSlug")})],output:sl()}).optional(),generateModelSlug:qu({input:[jc({name:sl().default("modelName")})],output:sl()}).optional(),generateTagSlug:qu({input:[jc({name:sl().default("tagName")})],output:sl()}).optional(),generateOperationSlug:qu({input:[jc({path:sl(),operationId:sl().optional(),method:sl(),summary:sl().optional()})],output:sl()}).optional(),generateWebhookSlug:qu({input:[jc({name:sl(),method:sl().optional()})],output:sl()}).optional(),redirect:qu({input:[sl()],output:sl().nullable().optional()}).optional(),withDefaultFonts:hc().optional().default(!0).catch(!0),defaultOpenAllTags:hc().optional().default(!1).catch(!1),expandAllModelSections:hc().optional().default(!1).catch(!1),expandAllResponses:hc().optional().default(!1).catch(!1),tagsSorter:Zc([uu("alpha"),qu({input:[Tc(),Tc()],output:oc()})]).optional(),operationsSorter:Zc([uu("alpha"),uu("method"),qu({input:[Tc(),Tc()],output:oc()})]).optional(),orderSchemaPropertiesBy:Zc([uu("alpha"),uu("preserve")]).optional().default("alpha").catch("alpha"),orderRequiredPropertiesFirst:hc().optional().default(!0).catch(!0)}),Ad=Ed.extend(wd.shape).transform((e=>(e.hideDownloadButton&&(console.warn("[DEPRECATED] You're using the deprecated 'hideDownloadButton' attribute. Use 'documentDownloadType: 'none'' instead."),e.documentDownloadType="none"),e.spec?.url&&(console.warn("[DEPRECATED] You're using the deprecated 'spec.url' attribute. Remove the spec prefix and move the 'url' attribute to the top level."),e.url=e.spec.url,delete e.spec),e.spec?.content&&(console.warn("[DEPRECATED] You're using the deprecated 'spec.content' attribute. Remove the spec prefix and move the 'content' attribute to the top level."),e.content=e.spec.content,delete e.spec),e.proxy&&(console.warn("[DEPRECATED] You're using the deprecated 'proxy' attribute, rename it to 'proxyUrl' or update the package."),e.proxyUrl||(e.proxyUrl=e.proxy),delete e.proxy),e.proxyUrl===bd&&(console.warn(`[DEPRECATED] Warning: configuration.proxyUrl points to our old proxy (${bd}).`),console.warn(`[DEPRECATED] We are overwriting the value and use the new proxy URL (${yd}) instead.`),console.warn(`[DEPRECATED] Action Required: You should manually update your configuration to use the new URL (${yd}). Read more: https://github.com/scalar/scalar`),e.proxyUrl=yd),e.showToolbar&&"localhost"!==e.showToolbar&&(console.warn("[DEPRECATED] You're using the deprecated 'showToolbar' attribute. Use 'showDeveloperTools' instead."),e.showDeveloperTools=e.showToolbar,delete e.showToolbar),e)));function Cd(e,t){const n=[],r=t.resolveKeyData||(e=>e.key),a=t.resolveValueData||(e=>e.value);for(const[o,i]of Object.entries(e))n.push(...(Array.isArray(i)?i:[i]).map((e=>{const n={key:o,value:e},i=a(n);return"object"==typeof i?Cd(i,t):Array.isArray(i)?i:{["function"==typeof t.key?t.key(n):t.key]:r(n),["function"==typeof t.value?t.value(n):t.value]:i}})).flat());return n}function $d(e,t){return Object.entries(e).map((([e,n])=>{if("object"==typeof n&&(n=$d(n,t)),t.resolve){const r=t.resolve({key:e,value:n});if(void 0!==r)return r}return"number"==typeof n&&(n=n.toString()),"string"==typeof n&&t.wrapValue&&(n=n.replace(new RegExp(t.wrapValue,"g"),`\\${t.wrapValue}`),n=`${t.wrapValue}${n}${t.wrapValue}`),`${e}${t.keyValueSeparator||""}${n}`})).join(t.entrySeparator||"")}jc({cdn:sl().optional().default("https://cdn.jsdelivr.net/npm/@scalar/api-reference"),pageTitle:sl().optional().default("Scalar API Reference")});const Pd=new Set(["title","titleTemplate","script","style","noscript"]),Dd=new Set(["base","meta","link","style","script","noscript"]),Id=new Set(["title","titleTemplate","templateParams","base","htmlAttrs","bodyAttrs","meta","link","style","script","noscript"]),Md=new Set(["base","title","titleTemplate","bodyAttrs","htmlAttrs","templateParams"]),Nd=new Set(["tagPosition","tagPriority","tagDuplicateStrategy","children","innerHTML","textContent","processTemplateParams"]),Rd="undefined"!=typeof window;function Ld(e){let t=9;for(let n=0;n>>9)).toString(16).substring(1,8).toLowerCase()}function Bd(e){if(e._h)return e._h;if(e._d)return Ld(e._d);let t=`${e.tag}:${e.textContent||e.innerHTML||""}:`;for(const n in e.props)t+=`${n}:${String(e.props[n])},`;return Ld(t)}const jd=e=>({keyValue:e,metaKey:"property"}),Ud=e=>({keyValue:e}),zd={appleItunesApp:{unpack:{entrySeparator:", ",resolve:({key:e,value:t})=>`${Qd(e)}=${t}`}},articleExpirationTime:jd("article:expiration_time"),articleModifiedTime:jd("article:modified_time"),articlePublishedTime:jd("article:published_time"),bookReleaseDate:jd("book:release_date"),charset:{metaKey:"charset"},contentSecurityPolicy:{unpack:{entrySeparator:"; ",resolve:({key:e,value:t})=>`${Qd(e)} ${t}`},metaKey:"http-equiv"},contentType:{metaKey:"http-equiv"},defaultStyle:{metaKey:"http-equiv"},fbAppId:jd("fb:app_id"),msapplicationConfig:Ud("msapplication-Config"),msapplicationTileColor:Ud("msapplication-TileColor"),msapplicationTileImage:Ud("msapplication-TileImage"),ogAudioSecureUrl:jd("og:audio:secure_url"),ogAudioUrl:jd("og:audio"),ogImageSecureUrl:jd("og:image:secure_url"),ogImageUrl:jd("og:image"),ogSiteName:jd("og:site_name"),ogVideoSecureUrl:jd("og:video:secure_url"),ogVideoUrl:jd("og:video"),profileFirstName:jd("profile:first_name"),profileLastName:jd("profile:last_name"),profileUsername:jd("profile:username"),refresh:{metaKey:"http-equiv",unpack:{entrySeparator:";",resolve({key:e,value:t}){if("seconds"===e)return`${t}`}}},robots:{unpack:{entrySeparator:", ",resolve:({key:e,value:t})=>"boolean"==typeof t?`${Qd(e)}`:`${Qd(e)}:${t}`}},xUaCompatible:{metaKey:"http-equiv"}},Fd=new Set(["og","book","article","profile"]);function Zd(e){const t=Qd(e),n=t.indexOf(":");return Fd.has(t.substring(0,n))?"property":zd[e]?.metaKey||"name"}function Qd(e){const t=e.replace(/([A-Z])/g,"-$1").toLowerCase(),n=t.indexOf("-"),r=t.substring(0,n);return"twitter"===r||Fd.has(r)?e.replace(/([A-Z])/g,":$1").toLowerCase():t}function Hd(e){if(Array.isArray(e))return e.map((e=>Hd(e)));if("object"!=typeof e||Array.isArray(e))return e;const t={};for(const n in e)Object.prototype.hasOwnProperty.call(e,n)&&(t[Qd(n)]=Hd(e[n]));return t}function Vd(e,t){const n=zd[t];return"refresh"===t?`${e.seconds};url=${e.url}`:$d(Hd(e),{keyValueSeparator:"=",entrySeparator:", ",resolve:({value:e,key:t})=>null===e?"":"boolean"==typeof e?`${t}`:void 0,...n?.unpack})}const qd=new Set(["og:image","og:video","og:audio","twitter:image"]);function Wd(e){const t={};for(const n in e){if(!Object.prototype.hasOwnProperty.call(e,n))continue;const r=e[n];"false"!==String(r)&&n&&(t[n]=r)}return t}function Xd(e,t){const n=Wd(t),r=Qd(e),a=Zd(r);if(qd.has(r)){const t={};for(const r in n)Object.prototype.hasOwnProperty.call(n,r)&&(t[`${e}${"url"===r?"":`${r[0].toUpperCase()}${r.slice(1)}`}`]=n[r]);return Gd(t).sort(((e,t)=>(e[a]?.length||0)-(t[a]?.length||0)))}return[{[a]:r,...n}]}function Gd(e){const t=[],n={};for(const r in e){if(!Object.prototype.hasOwnProperty.call(e,r))continue;const a=e[r];if(Array.isArray(a))for(const e of a)t.push(..."string"==typeof e?Gd({[r]:e}):Xd(r,e));else if("object"==typeof a&&a){if(qd.has(Qd(r))){t.push(...Xd(r,a));continue}n[r]=Wd(a)}else n[r]=a}const r=Cd(n,{key:({key:e})=>Zd(e),value:({key:e})=>"charset"===e?"charset":"content",resolveKeyData:({key:e})=>function(e){return zd[e]?.keyValue||Qd(e)}(e),resolveValueData:({value:e,key:t})=>null===e?"_null":"object"==typeof e?Vd(e,t):"number"==typeof e?e.toString():e});return[...t,...r].map((e=>("_null"===e.content&&(e.content=null),e)))}function Yd(e,t,n,r){const a=r||ep("object"!=typeof t||"function"==typeof t||t instanceof Promise?{["script"===e||"noscript"===e||"style"===e?"innerHTML":"textContent"]:t}:{...t},"templateParams"===e||"titleTemplate"===e);if(a instanceof Promise)return a.then((r=>Yd(e,t,n,r)));const o={tag:e,props:a};for(const e of Nd){const t=void 0!==o.props[e]?o.props[e]:n[e];void 0!==t&&(("innerHTML"!==e&&"textContent"!==e&&"children"!==e||Pd.has(o.tag))&&(o["children"===e?"innerHTML":e]=t),delete o.props[e])}return o.props.body&&(o.tagPosition="bodyClose",delete o.props.body),"script"===o.tag&&"object"==typeof o.innerHTML&&(o.innerHTML=JSON.stringify(o.innerHTML),o.props.type=o.props.type||"application/json"),Array.isArray(o.props.content)?o.props.content.map((e=>({...o,props:{...o.props,content:e}}))):o}function Kd(e,t){const n="class"===e?" ":";";return t&&"object"==typeof t&&!Array.isArray(t)&&(t=Object.entries(t).filter((([,e])=>e)).map((([t,n])=>"style"===e?`${t}:${n}`:t))),String(Array.isArray(t)?t.join(n):t)?.split(n).filter((e=>Boolean(e.trim()))).join(n)}function Jd(e,t,n,r){for(let a=r;a(e[r]=o,Jd(e,t,n,a))));if(!t&&!Nd.has(r)){const t=String(e[r]),n=r.startsWith("data-");"true"===t||""===t?e[r]=!n||"true":e[r]||(n&&"false"===t?e[r]="false":delete e[r])}}else e[r]=Kd(r,e[r])}}function ep(e,t=!1){const n=Jd(e,t,Object.keys(e),0);return n instanceof Promise?n.then((()=>e)):e}function tp(e,t,n){for(let r=n;r(t[r]=n,tp(e,t,r))));Array.isArray(n)?e.push(...n):e.push(n)}}function np(e){const t=[],n=e.resolvedInput;for(const r in n){if(!Object.prototype.hasOwnProperty.call(n,r))continue;const a=n[r];if(void 0!==a&&Id.has(r))if(Array.isArray(a))for(const n of a)t.push(Yd(r,n,e));else t.push(Yd(r,a,e))}if(0===t.length)return[];const r=[];return o=()=>r.map(((t,n)=>(t._e=e._i,e.mode&&(t._m=e.mode),t._p=(e._i<<10)+n,t))),(a=tp(r,t,0))instanceof Promise?a.then(o):o();var a,o}const rp=new Set(["onload","onerror","onabort","onprogress","onloadstart"]),ap={base:-10,title:10},op={critical:-80,high:-10,low:20};function ip(e){const t=e.tagPriority;if("number"==typeof t)return t;let n=100;return"meta"===e.tag?"content-security-policy"===e.props["http-equiv"]?n=-30:e.props.charset?n=-20:"viewport"===e.props.name&&(n=-15):"link"===e.tag&&"preconnect"===e.props.rel?n=20:e.tag in ap&&(n=ap[e.tag]),t&&t in op?n+op[t]:n}const sp=[{prefix:"before:",offset:-1},{prefix:"after:",offset:1}],lp=["name","property","http-equiv"];function cp(e){const{props:t,tag:n}=e;if(Md.has(n))return n;if("link"===n&&"canonical"===t.rel)return"canonical";if(t.charset)return"charset";if(t.id)return`${n}:id:${t.id}`;for(const e of lp)if(void 0!==t[e])return`${n}:${e}:${t[e]}`;return!1}const up="%separator",dp=new RegExp(`${up}(?:\\s*${up})*`,"g");function pp(e,t,n,r=!1){if("string"!=typeof e||!e.includes("%"))return e;let a=e;try{a=decodeURI(e)}catch{}const o=a.match(/%\w+(?:\.\w+)?/g);if(!o)return e;const i=e.includes(up);return e=e.replace(/%\w+(?:\.\w+)?/g,(e=>{if(e===up||!o.includes(e))return e;const n=function(e,t,n=!1){let r;if("s"===t||"pageTitle"===t)r=e.pageTitle;else if(t.includes(".")){const n=t.indexOf(".");r=e[t.substring(0,n)]?.[t.substring(n+1)]}else r=e[t];if(void 0!==r)return n?(r||"").replace(/"/g,'\\"'):r||""}(t,e.slice(1),r);return void 0!==n?n:e})).trim(),i&&(e.endsWith(up)&&(e=e.slice(0,-10)),e.startsWith(up)&&(e=e.slice(10)),e=e.replace(dp,n).trim()),e}function hp(e,t){return null==e?t||null:"function"==typeof e?e(t):e}function fp(e){return t=>{const n=t.resolvedOptions.document?.head.querySelector('script[id="unhead:payload"]')?.innerHTML||!1;return n&&t.push(JSON.parse(n)),{mode:"client",hooks:{"entries:updated":t=>{!function(e,t={}){const n=t.delayFn||(e=>setTimeout(e,10));e._domDebouncedUpdatePromise=e._domDebouncedUpdatePromise||new Promise((r=>n((()=>async function(e,t={}){const n=t.document||e.resolvedOptions.document;if(!n||!e.dirty)return;const r={shouldRender:!0,tags:[]};return await e.hooks.callHook("dom:beforeRender",r),r.shouldRender?(e._domUpdatePromise||(e._domUpdatePromise=new Promise((async t=>{const r=(await e.resolveTags()).map((e=>({tag:e,id:Dd.has(e.tag)?Bd(e):e.tag,shouldRender:!0})));let a=e._dom;if(!a){a={elMap:{htmlAttrs:n.documentElement,bodyAttrs:n.body}};const e=new Set;for(const t of["body","head"]){const r=n[t]?.children;for(const t of r){const n=t.tagName.toLowerCase();if(!Dd.has(n))continue;const r={tag:n,props:await ep(t.getAttributeNames().reduce(((e,n)=>({...e,[n]:t.getAttribute(n)})),{})),innerHTML:t.innerHTML},o=cp(r);let i=o,s=1;for(;i&&e.has(i);)i=`${o}:${s++}`;i&&(r._d=i,e.add(i)),a.elMap[t.getAttribute("data-hid")||Bd(r)]=t}}}function o(e,t,n){const r=`${e}:${t}`;a.sideEffects[r]=n,delete a.pendingSideEffects[r]}function i({id:e,$el:t,tag:r}){const i=r.tag.endsWith("Attrs");if(a.elMap[e]=t,i||(r.textContent&&r.textContent!==t.textContent&&(t.textContent=r.textContent),r.innerHTML&&r.innerHTML!==t.innerHTML&&(t.innerHTML=r.innerHTML),o(e,"el",(()=>{a.elMap[e]?.remove(),delete a.elMap[e]}))),r._eventHandlers)for(const e in r._eventHandlers)Object.prototype.hasOwnProperty.call(r._eventHandlers,e)&&""!==t.getAttribute(`data-${e}`)&&(("bodyAttrs"===r.tag?n.defaultView:t).addEventListener(e.substring(2),r._eventHandlers[e].bind(t)),t.setAttribute(`data-${e}`,""));for(const n in r.props){if(!Object.prototype.hasOwnProperty.call(r.props,n))continue;const a=r.props[n],s=`attr:${n}`;if("class"===n){if(!a)continue;for(const n of a.split(" "))i&&o(e,`${s}:${n}`,(()=>t.classList.remove(n))),!t.classList.contains(n)&&t.classList.add(n)}else if("style"===n){if(!a)continue;for(const n of a.split(";")){const r=n.indexOf(":"),a=n.substring(0,r).trim(),i=n.substring(r+1).trim();o(e,`${s}:${a}`,(()=>{t.style.removeProperty(a)})),t.style.setProperty(a,i)}}else t.getAttribute(n)!==a&&t.setAttribute(n,!0===a?"":String(a)),i&&o(e,s,(()=>t.removeAttribute(n)))}}a.pendingSideEffects={...a.sideEffects},a.sideEffects={};const s=[],l={bodyClose:void 0,bodyOpen:void 0,head:void 0};for(const e of r){const{tag:t,shouldRender:r,id:o}=e;r&&("title"!==t.tag?(e.$el=e.$el||a.elMap[o],e.$el?i(e):Dd.has(t.tag)&&s.push(e)):n.title=t.textContent)}for(const e of s){const t=e.tag.tagPosition||"head";e.$el=n.createElement(e.tag.tag),i(e),l[t]=l[t]||n.createDocumentFragment(),l[t].appendChild(e.$el)}for(const t of r)await e.hooks.callHook("dom:renderTag",t,n,o);l.head&&n.head.appendChild(l.head),l.bodyOpen&&n.body.insertBefore(l.bodyOpen,n.body.firstChild),l.bodyClose&&n.body.appendChild(l.bodyClose);for(const e in a.pendingSideEffects)a.pendingSideEffects[e]();e._dom=a,await e.hooks.callHook("dom:rendered",{renders:r}),t()})).finally((()=>{e._domUpdatePromise=void 0,e.dirty=!1}))),e._domUpdatePromise):void 0}(e,t).then((()=>{delete e._domDebouncedUpdatePromise,r()}))))))}(t,e)}}}}}function mp(e,t={},n){for(const r in e){const a=e[r],o=n?`${n}:${r}`:r;"object"==typeof a&&null!==a?mp(a,t,o):"function"==typeof a&&(t[o]=a)}return t}const gp={run:e=>e()},vp=void 0!==console.createTask?console.createTask:()=>gp;function bp(e,t){const n=t.shift(),r=vp(n);return e.reduce(((e,n)=>e.then((()=>r.run((()=>n(...t)))))),Promise.resolve())}function yp(e,t){const n=t.shift(),r=vp(n);return Promise.all(e.map((e=>r.run((()=>e(...t))))))}function Op(e,t){for(const n of[...e])n(t)}class wp{constructor(){this._hooks={},this._before=void 0,this._after=void 0,this._deprecatedMessages=void 0,this._deprecatedHooks={},this.hook=this.hook.bind(this),this.callHook=this.callHook.bind(this),this.callHookWith=this.callHookWith.bind(this)}hook(e,t,n={}){if(!e||"function"!=typeof t)return()=>{};const r=e;let a;for(;this._deprecatedHooks[e];)a=this._deprecatedHooks[e],e=a.to;if(a&&!n.allowDeprecated){let e=a.message;e||(e=`${r} hook has been deprecated`+(a.to?`, please use ${a.to}`:"")),this._deprecatedMessages||(this._deprecatedMessages=new Set),this._deprecatedMessages.has(e)||(console.warn(e),this._deprecatedMessages.add(e))}if(!t.name)try{Object.defineProperty(t,"name",{get:()=>"_"+e.replace(/\W+/g,"_")+"_hook_cb",configurable:!0})}catch{}return this._hooks[e]=this._hooks[e]||[],this._hooks[e].push(t),()=>{t&&(this.removeHook(e,t),t=void 0)}}hookOnce(e,t){let n,r=(...e)=>("function"==typeof n&&n(),n=void 0,r=void 0,t(...e));return n=this.hook(e,r),n}removeHook(e,t){if(this._hooks[e]){const n=this._hooks[e].indexOf(t);-1!==n&&this._hooks[e].splice(n,1),0===this._hooks[e].length&&delete this._hooks[e]}}deprecateHook(e,t){this._deprecatedHooks[e]="string"==typeof t?{to:t}:t;const n=this._hooks[e]||[];delete this._hooks[e];for(const t of n)this.hook(e,t)}deprecateHooks(e){Object.assign(this._deprecatedHooks,e);for(const t in e)this.deprecateHook(t,e[t])}addHooks(e){const t=mp(e),n=Object.keys(t).map((e=>this.hook(e,t[e])));return()=>{for(const e of n.splice(0,n.length))e()}}removeHooks(e){const t=mp(e);for(const e in t)this.removeHook(e,t[e])}removeAllHooks(){for(const e in this._hooks)delete this._hooks[e]}callHook(e,...t){return t.unshift(e),this.callHookWith(bp,e,...t)}callHookParallel(e,...t){return t.unshift(e),this.callHookWith(yp,e,...t)}callHookWith(e,t,...n){const r=this._before||this._after?{name:t,args:n,context:{}}:void 0;this._before&&Op(this._before,r);const a=e(t in this._hooks?[...this._hooks[t]]:[],n);return a instanceof Promise?a.finally((()=>{this._after&&r&&Op(this._after,r)})):(this._after&&r&&Op(this._after,r),a)}beforeEach(e){return this._before=this._before||[],this._before.push(e),()=>{if(void 0!==this._before){const t=this._before.indexOf(e);-1!==t&&this._before.splice(t,1)}}}afterEach(e){return this._after=this._after||[],this._after.push(e),()=>{if(void 0!==this._after){const t=this._after.indexOf(e);-1!==t&&this._after.splice(t,1)}}}}const xp=new Set(["templateParams","htmlAttrs","bodyAttrs"]),kp={hooks:{"tag:normalise":({tag:e})=>{e.props.hid&&(e.key=e.props.hid,delete e.props.hid),e.props.vmid&&(e.key=e.props.vmid,delete e.props.vmid),e.props.key&&(e.key=e.props.key,delete e.props.key);const t=cp(e);!t||t.startsWith("meta:og:")||t.startsWith("meta:twitter:")||delete e.key;const n=t||!!e.key&&`${e.tag}:${e.key}`;n&&(e._d=n)},"tags:resolve":e=>{const t=Object.create(null);for(const n of e.tags){const e=(n.key?`${n.tag}:${n.key}`:n._d)||Bd(n),r=t[e];if(r){let a=n?.tagDuplicateStrategy;if(!a&&xp.has(n.tag)&&(a="merge"),"merge"===a){const a=r.props;a.style&&n.props.style&&(";"!==a.style[a.style.length-1]&&(a.style+=";"),n.props.style=`${a.style} ${n.props.style}`),a.class&&n.props.class?n.props.class=`${a.class} ${n.props.class}`:a.class&&(n.props.class=a.class),t[e].props={...a,...n.props};continue}if(n._e===r._e){r._duped=r._duped||[],n._d=`${r._d}:${r._duped.length+1}`,r._duped.push(n);continue}if(ip(n)>ip(r))continue}n.innerHTML||n.textContent||0!==Object.keys(n.props).length||!Dd.has(n.tag)?t[e]=n:delete t[e]}const n=[];for(const e in t){const r=t[e],a=r._duped;n.push(r),a&&(delete r._duped,n.push(...a))}e.tags=n,e.tags=e.tags.filter((e=>!("meta"===e.tag&&(e.props.name||e.props.property)&&!e.props.content)))}}},Sp=new Set(["script","link","bodyAttrs"]),_p=e=>({hooks:{"tags:resolve":t=>{for(const n of t.tags){if(!Sp.has(n.tag))continue;const t=n.props;for(const r in t){if("o"!==r[0]||"n"!==r[1])continue;if(!Object.prototype.hasOwnProperty.call(t,r))continue;const a=t[r];"function"==typeof a&&(e.ssr&&rp.has(r)?t[r]=`this.dataset.${r}fired = true`:delete t[r],n._eventHandlers=n._eventHandlers||{},n._eventHandlers[r]=a)}e.ssr&&n._eventHandlers&&(n.props.src||n.props.href)&&(n.key=n.key||Ld(n.props.src||n.props.href))}},"dom:renderTag":({$el:e,tag:t})=>{const n=e?.dataset;if(n)for(const r in n){if(!r.endsWith("fired"))continue;const n=r.slice(0,-5);rp.has(n)&&t._eventHandlers?.[n]?.call(e,new Event(n.substring(2)))}}}}),Tp=new Set(["link","style","script","noscript"]),Ep={hooks:{"tag:normalise":({tag:e})=>{e.key&&Tp.has(e.tag)&&(e.props["data-hid"]=e._h=Ld(e.key))}}},Ap={mode:"server",hooks:{"tags:beforeResolve":e=>{const t={};let n=!1;for(const r of e.tags)"server"!==r._m||"titleTemplate"!==r.tag&&"templateParams"!==r.tag&&"title"!==r.tag||(t[r.tag]="title"===r.tag||"titleTemplate"===r.tag?r.textContent:r.props,n=!0);n&&e.tags.push({tag:"script",innerHTML:JSON.stringify(t),props:{id:"unhead:payload",type:"application/json"}})}}},Cp={hooks:{"tags:resolve":e=>{for(const t of e.tags)if("string"==typeof t.tagPriority)for(const{prefix:n,offset:r}of sp){if(!t.tagPriority.startsWith(n))continue;const a=t.tagPriority.substring(n.length),o=e.tags.find((e=>e._d===a))?._p;if(void 0!==o){t._p=o+r;break}}e.tags.sort(((e,t)=>{const n=ip(e),r=ip(t);return nr?1:e._p-t._p}))}}},$p={meta:"content",link:"href",htmlAttrs:"lang"},Pp=["innerHTML","textContent"],Dp=e=>({hooks:{"tags:resolve":t=>{const{tags:n}=t;let r;for(let e=0;e"title"===e.tag))?.textContent||"",a,o);for(const e of n){if(!1===e.processTemplateParams)continue;const t=$p[e.tag];if(t&&"string"==typeof e.props[t])e.props[t]=pp(e.props[t],a,o);else if(e.processTemplateParams||"titleTemplate"===e.tag||"title"===e.tag)for(const t of Pp)"string"==typeof e[t]&&(e[t]=pp(e[t],a,o,"script"===e.tag&&e.props.type.endsWith("json")))}e._templateParams=a,e._separator=o},"tags:afterResolve":({tags:t})=>{let n;for(let e=0;e{const{tags:t}=e;let n,r;for(let e=0;e{for(const t of e.tags)"string"==typeof t.innerHTML&&(!t.innerHTML||"application/ld+json"!==t.props.type&&"application/json"!==t.props.type?t.innerHTML=t.innerHTML.replace(new RegExp(`e in t}const Bp={},jp=[],Up=()=>{},zp=()=>!1,Fp=e=>111===e.charCodeAt(0)&&110===e.charCodeAt(1)&&(e.charCodeAt(2)>122||e.charCodeAt(2)<97),Zp=e=>e.startsWith("onUpdate:"),Qp=Object.assign,Hp=(e,t)=>{const n=e.indexOf(t);n>-1&&e.splice(n,1)},Vp=Object.prototype.hasOwnProperty,qp=(e,t)=>Vp.call(e,t),Wp=Array.isArray,Xp=e=>"[object Map]"===ah(e),Gp=e=>"[object Set]"===ah(e),Yp=e=>"[object Date]"===ah(e),Kp=e=>"function"==typeof e,Jp=e=>"string"==typeof e,eh=e=>"symbol"==typeof e,th=e=>null!==e&&"object"==typeof e,nh=e=>(th(e)||Kp(e))&&Kp(e.then)&&Kp(e.catch),rh=Object.prototype.toString,ah=e=>rh.call(e),oh=e=>"[object Object]"===ah(e),ih=e=>Jp(e)&&"NaN"!==e&&"-"!==e[0]&&""+parseInt(e,10)===e,sh=Lp(",key,ref,ref_for,ref_key,onVnodeBeforeMount,onVnodeMounted,onVnodeBeforeUpdate,onVnodeUpdated,onVnodeBeforeUnmount,onVnodeUnmounted"),lh=e=>{const t=Object.create(null);return n=>t[n]||(t[n]=e(n))},ch=/-\w/g,uh=lh((e=>e.replace(ch,(e=>e.slice(1).toUpperCase())))),dh=/\B([A-Z])/g,ph=lh((e=>e.replace(dh,"-$1").toLowerCase())),hh=lh((e=>e.charAt(0).toUpperCase()+e.slice(1))),fh=lh((e=>e?`on${hh(e)}`:"")),mh=(e,t)=>!Object.is(e,t),gh=(e,...t)=>{for(let n=0;n{Object.defineProperty(e,t,{configurable:!0,enumerable:!1,writable:r,value:n})},bh=e=>{const t=parseFloat(e);return isNaN(t)?e:t};let yh;const Oh=()=>yh||(yh="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof self?self:"undefined"!=typeof window?window:"undefined"!=typeof global?global:{});function wh(e){if(Wp(e)){const t={};for(let n=0;n{if(e){const n=e.split(kh);n.length>1&&(t[n[0].trim()]=n[1].trim())}})),t}function Th(e){let t="";if(Jp(e))t=e;else if(Wp(e))for(let n=0;n$h(e,t)))}const Dh=e=>!(!e||!0!==e.__v_isRef),Ih=e=>Jp(e)?e:null==e?"":Wp(e)||th(e)&&(e.toString===rh||!Kp(e.toString))?Dh(e)?Ih(e.value):JSON.stringify(e,Mh,2):String(e),Mh=(e,t)=>Dh(t)?Mh(e,t.value):Xp(t)?{[`Map(${t.size})`]:[...t.entries()].reduce(((e,[t,n],r)=>(e[Nh(t,r)+" =>"]=n,e)),{})}:Gp(t)?{[`Set(${t.size})`]:[...t.values()].map((e=>Nh(e)))}:eh(t)?Nh(t):!th(t)||Wp(t)||oh(t)?t:String(t),Nh=(e,t="")=>{var n;return eh(e)?`Symbol(${null!=(n=e.description)?n:t})`:e};function Rh(e){return null==e?"initial":"string"==typeof e?""===e?" ":e:String(e)}let Lh,Bh;class jh{constructor(e=!1){this.detached=e,this._active=!0,this._on=0,this.effects=[],this.cleanups=[],this._isPaused=!1,this.parent=Lh,!e&&Lh&&(this.index=(Lh.scopes||(Lh.scopes=[])).push(this)-1)}get active(){return this._active}pause(){if(this._active){let e,t;if(this._isPaused=!0,this.scopes)for(e=0,t=this.scopes.length;e0&&0==--this._on&&(Lh=this.prevScope,this.prevScope=void 0)}stop(e){if(this._active){let t,n;for(this._active=!1,t=0,n=this.effects.length;t0)return;if(Vh){let e=Vh;for(Vh=void 0;e;){const t=e.next;e.next=void 0,e.flags&=-9,e=t}}let e;for(;Hh;){let t=Hh;for(Hh=void 0;t;){const n=t.next;if(t.next=void 0,t.flags&=-9,1&t.flags)try{t.trigger()}catch(t){e||(e=t)}t=n}}if(e)throw e}function Yh(e){for(let t=e.deps;t;t=t.nextDep)t.version=-1,t.prevActiveLink=t.dep.activeLink,t.dep.activeLink=t}function Kh(e){let t,n=e.depsTail,r=n;for(;r;){const e=r.prevDep;-1===r.version?(r===n&&(n=e),tf(r),nf(r)):t=r,r.dep.activeLink=r.prevActiveLink,r.prevActiveLink=void 0,r=e}e.deps=t,e.depsTail=n}function Jh(e){for(let t=e.deps;t;t=t.nextDep)if(t.dep.version!==t.version||t.dep.computed&&(ef(t.dep.computed)||t.dep.version!==t.version))return!0;return!!e._dirty}function ef(e){if(4&e.flags&&!(16&e.flags))return;if(e.flags&=-17,e.globalVersion===cf)return;if(e.globalVersion=cf,!e.isSSR&&128&e.flags&&(!e.deps&&!e._dirty||!Jh(e)))return;e.flags|=2;const t=e.dep,n=Bh,r=rf;Bh=e,rf=!0;try{Yh(e);const n=e.fn(e._value);(0===t.version||mh(n,e._value))&&(e.flags|=128,e._value=n,t.version++)}catch(e){throw t.version++,e}finally{Bh=n,rf=r,Kh(e),e.flags&=-3}}function tf(e,t=!1){const{dep:n,prevSub:r,nextSub:a}=e;if(r&&(r.nextSub=a,e.prevSub=void 0),a&&(a.prevSub=r,e.nextSub=void 0),n.subs===e&&(n.subs=r,!r&&n.computed)){n.computed.flags&=-5;for(let e=n.computed.deps;e;e=e.nextDep)tf(e,!0)}t||--n.sc||!n.map||n.map.delete(n.key)}function nf(e){const{prevDep:t,nextDep:n}=e;t&&(t.nextDep=n,e.prevDep=void 0),n&&(n.prevDep=t,e.nextDep=void 0)}let rf=!0;const af=[];function of(){af.push(rf),rf=!1}function sf(){const e=af.pop();rf=void 0===e||e}function lf(e){const{cleanup:t}=e;if(e.cleanup=void 0,t){const e=Bh;Bh=void 0;try{t()}finally{Bh=e}}}let cf=0;class uf{constructor(e,t){this.sub=e,this.dep=t,this.version=t.version,this.nextDep=this.prevDep=this.nextSub=this.prevSub=this.prevActiveLink=void 0}}class df{constructor(e){this.computed=e,this.version=0,this.activeLink=void 0,this.subs=void 0,this.map=void 0,this.key=void 0,this.sc=0,this.__v_skip=!0}track(e){if(!Bh||!rf||Bh===this.computed)return;let t=this.activeLink;if(void 0===t||t.sub!==Bh)t=this.activeLink=new uf(Bh,this),Bh.deps?(t.prevDep=Bh.depsTail,Bh.depsTail.nextDep=t,Bh.depsTail=t):Bh.deps=Bh.depsTail=t,pf(t);else if(-1===t.version&&(t.version=this.version,t.nextDep)){const e=t.nextDep;e.prevDep=t.prevDep,t.prevDep&&(t.prevDep.nextDep=e),t.prevDep=Bh.depsTail,t.nextDep=void 0,Bh.depsTail.nextDep=t,Bh.depsTail=t,Bh.deps===t&&(Bh.deps=e)}return t}trigger(e){this.version++,cf++,this.notify(e)}notify(e){Xh();try{for(let e=this.subs;e;e=e.prevSub)e.sub.notify()&&e.sub.dep.notify()}finally{Gh()}}}function pf(e){if(e.dep.sc++,4&e.sub.flags){const t=e.dep.computed;if(t&&!e.dep.subs){t.flags|=20;for(let e=t.deps;e;e=e.nextDep)pf(e)}const n=e.dep.subs;n!==e&&(e.prevSub=n,n&&(n.nextSub=e)),e.dep.subs=e}}const hf=new WeakMap,ff=Symbol(""),mf=Symbol(""),gf=Symbol("");function vf(e,t,n){if(rf&&Bh){let t=hf.get(e);t||hf.set(e,t=new Map);let r=t.get(n);r||(t.set(n,r=new df),r.map=t,r.key=n),r.track()}}function bf(e,t,n,r,a,o){const i=hf.get(e);if(!i)return void cf++;const s=e=>{e&&e.trigger()};if(Xh(),"clear"===t)i.forEach(s);else{const a=Wp(e),o=a&&ih(n);if(a&&"length"===n){const e=Number(r);i.forEach(((t,n)=>{("length"===n||n===gf||!eh(n)&&n>=e)&&s(t)}))}else switch((void 0!==n||i.has(void 0))&&s(i.get(n)),o&&s(i.get(gf)),t){case"add":a?o&&s(i.get("length")):(s(i.get(ff)),Xp(e)&&s(i.get(mf)));break;case"delete":a||(s(i.get(ff)),Xp(e)&&s(i.get(mf)));break;case"set":Xp(e)&&s(i.get(ff))}}Gh()}function yf(e){const t=om(e);return t===e?t:(vf(t,0,gf),rm(e)?t:t.map(sm))}function Of(e){return vf(e=om(e),0,gf),e}function wf(e,t){return nm(e)?tm(e)?lm(sm(t)):lm(t):sm(t)}const xf={__proto__:null,[Symbol.iterator](){return kf(this,Symbol.iterator,(e=>wf(this,e)))},concat(...e){return yf(this).concat(...e.map((e=>Wp(e)?yf(e):e)))},entries(){return kf(this,"entries",(e=>(e[1]=wf(this,e[1]),e)))},every(e,t){return _f(this,"every",e,t,void 0,arguments)},filter(e,t){return _f(this,"filter",e,t,(e=>e.map((e=>wf(this,e)))),arguments)},find(e,t){return _f(this,"find",e,t,(e=>wf(this,e)),arguments)},findIndex(e,t){return _f(this,"findIndex",e,t,void 0,arguments)},findLast(e,t){return _f(this,"findLast",e,t,(e=>wf(this,e)),arguments)},findLastIndex(e,t){return _f(this,"findLastIndex",e,t,void 0,arguments)},forEach(e,t){return _f(this,"forEach",e,t,void 0,arguments)},includes(...e){return Ef(this,"includes",e)},indexOf(...e){return Ef(this,"indexOf",e)},join(e){return yf(this).join(e)},lastIndexOf(...e){return Ef(this,"lastIndexOf",e)},map(e,t){return _f(this,"map",e,t,void 0,arguments)},pop(){return Af(this,"pop")},push(...e){return Af(this,"push",e)},reduce(e,...t){return Tf(this,"reduce",e,t)},reduceRight(e,...t){return Tf(this,"reduceRight",e,t)},shift(){return Af(this,"shift")},some(e,t){return _f(this,"some",e,t,void 0,arguments)},splice(...e){return Af(this,"splice",e)},toReversed(){return yf(this).toReversed()},toSorted(e){return yf(this).toSorted(e)},toSpliced(...e){return yf(this).toSpliced(...e)},unshift(...e){return Af(this,"unshift",e)},values(){return kf(this,"values",(e=>wf(this,e)))}};function kf(e,t,n){const r=Of(e),a=r[t]();return r===e||rm(e)||(a._next=a.next,a.next=()=>{const e=a._next();return e.done||(e.value=n(e.value)),e}),a}const Sf=Array.prototype;function _f(e,t,n,r,a,o){const i=Of(e),s=i!==e&&!rm(e),l=i[t];if(l!==Sf[t]){const t=l.apply(e,o);return s?sm(t):t}let c=n;i!==e&&(s?c=function(t,r){return n.call(this,wf(e,t),r,e)}:n.length>2&&(c=function(t,r){return n.call(this,t,r,e)}));const u=l.call(i,c,r);return s&&a?a(u):u}function Tf(e,t,n,r){const a=Of(e);let o=n;return a!==e&&(rm(e)?n.length>3&&(o=function(t,r,a){return n.call(this,t,r,a,e)}):o=function(t,r,a){return n.call(this,t,wf(e,r),a,e)}),a[t](o,...r)}function Ef(e,t,n){const r=om(e);vf(r,0,gf);const a=r[t](...n);return-1!==a&&!1!==a||!am(n[0])?a:(n[0]=om(n[0]),r[t](...n))}function Af(e,t,n=[]){of(),Xh();const r=om(e)[t].apply(e,n);return Gh(),sf(),r}const Cf=Lp("__proto__,__v_isRef,__isVue"),$f=new Set(Object.getOwnPropertyNames(Symbol).filter((e=>"arguments"!==e&&"caller"!==e)).map((e=>Symbol[e])).filter(eh));function Pf(e){eh(e)||(e=String(e));const t=om(this);return vf(t,0,e),t.hasOwnProperty(e)}class Df{constructor(e=!1,t=!1){this._isReadonly=e,this._isShallow=t}get(e,t,n){if("__v_skip"===t)return e.__v_skip;const r=this._isReadonly,a=this._isShallow;if("__v_isReactive"===t)return!r;if("__v_isReadonly"===t)return r;if("__v_isShallow"===t)return a;if("__v_raw"===t)return n===(r?a?Gf:Xf:a?Wf:qf).get(e)||Object.getPrototypeOf(e)===Object.getPrototypeOf(n)?e:void 0;const o=Wp(e);if(!r){let e;if(o&&(e=xf[t]))return e;if("hasOwnProperty"===t)return Pf}const i=Reflect.get(e,t,cm(e)?e:n);if(eh(t)?$f.has(t):Cf(t))return i;if(r||vf(e,0,t),a)return i;if(cm(i)){const e=o&&ih(t)?i:i.value;return r&&th(e)?Kf(e):e}return th(i)?r?Kf(i):Yf(i):i}}class If extends Df{constructor(e=!1){super(!1,e)}set(e,t,n,r){let a=e[t];const o=Wp(e)&&ih(t);if(!this._isShallow){const e=nm(a);if(rm(n)||nm(n)||(a=om(a),n=om(n)),!o&&cm(a)&&!cm(n))return e||(a.value=n),!0}const i=o?Number(t)e,Uf=e=>Reflect.getPrototypeOf(e);function zf(e){return function(...t){return"delete"!==e&&("clear"===e?void 0:this)}}function Ff(e,t){const n=function(e,t){const n={get(n){const r=this.__v_raw,a=om(r),o=om(n);e||(mh(n,o)&&vf(a,0,n),vf(a,0,o));const{has:i}=Uf(a),s=t?jf:e?lm:sm;return i.call(a,n)?s(r.get(n)):i.call(a,o)?s(r.get(o)):void(r!==a&&r.get(n))},get size(){const t=this.__v_raw;return!e&&vf(om(t),0,ff),t.size},has(t){const n=this.__v_raw,r=om(n),a=om(t);return e||(mh(t,a)&&vf(r,0,t),vf(r,0,a)),t===a?n.has(t):n.has(t)||n.has(a)},forEach(n,r){const a=this,o=a.__v_raw,i=om(o),s=t?jf:e?lm:sm;return!e&&vf(i,0,ff),o.forEach(((e,t)=>n.call(r,s(e),s(t),a)))}};return Qp(n,e?{add:zf("add"),set:zf("set"),delete:zf("delete"),clear:zf("clear")}:{add(e){t||rm(e)||nm(e)||(e=om(e));const n=om(this);return Uf(n).has.call(n,e)||(n.add(e),bf(n,"add",e,e)),this},set(e,n){t||rm(n)||nm(n)||(n=om(n));const r=om(this),{has:a,get:o}=Uf(r);let i=a.call(r,e);i||(e=om(e),i=a.call(r,e));const s=o.call(r,e);return r.set(e,n),i?mh(n,s)&&bf(r,"set",e,n):bf(r,"add",e,n),this},delete(e){const t=om(this),{has:n,get:r}=Uf(t);let a=n.call(t,e);a||(e=om(e),a=n.call(t,e)),r&&r.call(t,e);const o=t.delete(e);return a&&bf(t,"delete",e,void 0),o},clear(){const e=om(this),t=0!==e.size,n=e.clear();return t&&bf(e,"clear",void 0,void 0),n}}),["keys","values","entries",Symbol.iterator].forEach((r=>{n[r]=function(e,t,n){return function(...r){const a=this.__v_raw,o=om(a),i=Xp(o),s="entries"===e||e===Symbol.iterator&&i,l="keys"===e&&i,c=a[e](...r),u=n?jf:t?lm:sm;return!t&&vf(o,0,l?mf:ff),{next(){const{value:e,done:t}=c.next();return t?{value:e,done:t}:{value:s?[u(e[0]),u(e[1])]:u(e),done:t}},[Symbol.iterator](){return this}}}}(r,e,t)})),n}(e,t);return(t,r,a)=>"__v_isReactive"===r?!e:"__v_isReadonly"===r?e:"__v_raw"===r?t:Reflect.get(qp(n,r)&&r in t?n:t,r,a)}const Zf={get:Ff(!1,!1)},Qf={get:Ff(!1,!0)},Hf={get:Ff(!0,!1)},Vf={get:Ff(!0,!0)},qf=new WeakMap,Wf=new WeakMap,Xf=new WeakMap,Gf=new WeakMap;function Yf(e){return nm(e)?e:em(e,!1,Nf,Zf,qf)}function Kf(e){return em(e,!0,Rf,Hf,Xf)}function Jf(e){return em(e,!0,Bf,Vf,Gf)}function em(e,t,n,r,a){if(!th(e))return e;if(e.__v_raw&&(!t||!e.__v_isReactive))return e;const o=(i=e).__v_skip||!Object.isExtensible(i)?0:function(e){switch(e){case"Object":case"Array":return 1;case"Map":case"Set":case"WeakMap":case"WeakSet":return 2;default:return 0}}((e=>ah(e).slice(8,-1))(i));var i;if(0===o)return e;const s=a.get(e);if(s)return s;const l=new Proxy(e,2===o?r:n);return a.set(e,l),l}function tm(e){return nm(e)?tm(e.__v_raw):!(!e||!e.__v_isReactive)}function nm(e){return!(!e||!e.__v_isReadonly)}function rm(e){return!(!e||!e.__v_isShallow)}function am(e){return!!e&&!!e.__v_raw}function om(e){const t=e&&e.__v_raw;return t?om(t):e}function im(e){return!qp(e,"__v_skip")&&Object.isExtensible(e)&&vh(e,"__v_skip",!0),e}const sm=e=>th(e)?Yf(e):e,lm=e=>th(e)?Kf(e):e;function cm(e){return!!e&&!0===e.__v_isRef}function um(e){return pm(e,!1)}function dm(e){return pm(e,!0)}function pm(e,t){return cm(e)?e:new hm(e,t)}class hm{constructor(e,t){this.dep=new df,this.__v_isRef=!0,this.__v_isShallow=!1,this._rawValue=t?e:om(e),this._value=t?e:sm(e),this.__v_isShallow=t}get value(){return this.dep.track(),this._value}set value(e){const t=this._rawValue,n=this.__v_isShallow||rm(e)||nm(e);e=n?e:om(e),mh(e,t)&&(this._rawValue=e,this._value=n?e:sm(e),this.dep.trigger())}}function fm(e){return cm(e)?e.value:e}function mm(e){return Kp(e)?e():fm(e)}const gm={get:(e,t,n)=>"__v_raw"===t?e:fm(Reflect.get(e,t,n)),set:(e,t,n,r)=>{const a=e[t];return cm(a)&&!cm(n)?(a.value=n,!0):Reflect.set(e,t,n,r)}};function vm(e){return tm(e)?e:new Proxy(e,gm)}class bm{constructor(e){this.__v_isRef=!0,this._value=void 0;const t=this.dep=new df,{get:n,set:r}=e(t.track.bind(t),t.trigger.bind(t));this._get=n,this._set=r}get value(){return this._value=this._get()}set value(e){this._set(e)}}function ym(e){return new bm(e)}function Om(e){const t=Wp(e)?new Array(e.length):{};for(const n in e)t[n]=Sm(e,n);return t}class wm{constructor(e,t,n){this._object=e,this._key=t,this._defaultValue=n,this.__v_isRef=!0,this._value=void 0,this._raw=om(e);let r=!0,a=e;if(!Wp(e)||!ih(String(t)))do{r=!am(a)||rm(a)}while(r&&(a=a.__v_raw));this._shallow=r}get value(){let e=this._object[this._key];return this._shallow&&(e=fm(e)),this._value=void 0===e?this._defaultValue:e}set value(e){if(this._shallow&&cm(this._raw[this._key])){const t=this._object[this._key];if(cm(t))return void(t.value=e)}this._object[this._key]=e}get dep(){return function(e,t){const n=hf.get(e);return n&&n.get(t)}(this._raw,this._key)}}class xm{constructor(e){this._getter=e,this.__v_isRef=!0,this.__v_isReadonly=!0,this._value=void 0}get value(){return this._value=this._getter()}}function km(e,t,n){return cm(e)?e:Kp(e)?new xm(e):th(e)&&arguments.length>1?Sm(e,t,n):um(e)}function Sm(e,t,n){return new wm(e,t,n)}class _m{constructor(e,t,n){this.fn=e,this.setter=t,this._value=void 0,this.dep=new df(this),this.__v_isRef=!0,this.deps=void 0,this.depsTail=void 0,this.flags=16,this.globalVersion=cf-1,this.next=void 0,this.effect=this,this.__v_isReadonly=!t,this.isSSR=n}notify(){if(this.flags|=16,!(8&this.flags)&&Bh!==this)return Wh(this,!0),!0}get value(){const e=this.dep.track();return ef(this),e&&(e.version=this.dep.version),this._value}set value(e){this.setter&&this.setter(e)}}const Tm={},Em=new WeakMap;let Am;function Cm(e,t=1/0,n){if(t<=0||!th(e)||e.__v_skip)return e;if(((n=n||new Map).get(e)||0)>=t)return e;if(n.set(e,t),t--,cm(e))Cm(e.value,t,n);else if(Wp(e))for(let r=0;r{Cm(e,t,n)}));else if(oh(e)){for(const r in e)Cm(e[r],t,n);for(const r of Object.getOwnPropertySymbols(e))Object.prototype.propertyIsEnumerable.call(e,r)&&Cm(e[r],t,n)}return e}function $m(e,t,n,r){try{return r?e(...r):e()}catch(e){Dm(e,t,n)}}function Pm(e,t,n,r){if(Kp(e)){const a=$m(e,t,n,r);return a&&nh(a)&&a.catch((e=>{Dm(e,t,n)})),a}if(Wp(e)){const a=[];for(let o=0;o=Vm(n)?Im.push(e):Im.splice(function(e){let t=Mm+1,n=Im.length;for(;t>>1,a=Im[r],o=Vm(a);oVm(e)-Vm(t)));if(Nm.length=0,Rm)return void Rm.push(...e);for(Rm=e,Lm=0;Lmnull==e.id?2&e.flags?-1:1/0:e.id;function qm(e){try{for(Mm=0;Mm{r._d&&Ab(-1);const a=Gm(t);let o;try{o=e(...n)}finally{Gm(a),r._d&&Ab(1)}return o};return r._n=!0,r._c=!0,r._d=!0,r}function Km(e,t){if(null===Wm)return e;const n=ly(Wm),r=e.dirs||(e.dirs=[]);for(let e=0;e1)return n&&Kp(t)?t.call(r&&r.proxy):t}}function ng(){return!(!Yb()&&!Uv)}const rg=Symbol.for("v-scx");function ag(e,t){return ig(e,null,t)}function og(e,t,n){return ig(e,t,n)}function ig(e,t,n=Bp){const{immediate:r,deep:a,flush:o,once:i}=n,s=Qp({},n),l=t&&r||!t&&"post"!==o;let c;if(ry)if("sync"===o){const e=tg(rg);c=e.__watcherHandles||(e.__watcherHandles=[])}else if(!l){const e=()=>{};return e.stop=Up,e.resume=Up,e.pause=Up,e}const u=Gb;s.call=(e,t,n)=>Pm(e,u,t,n);let d=!1;"post"===o?s.scheduler=e=>{db(e,u&&u.suspense)}:"sync"!==o&&(d=!0,s.scheduler=(e,t)=>{t?e():zm(e)}),s.augmentJob=e=>{t&&(e.flags|=4),d&&(e.flags|=2,u&&(e.id=u.uid,e.i=u))};const p=function(e,t,n=Bp){const{immediate:r,deep:a,once:o,scheduler:i,augmentJob:s,call:l}=n,c=e=>a?e:rm(e)||!1===a||0===a?Cm(e,1):Cm(e);let u,d,p,h,f=!1,m=!1;if(cm(e)?(d=()=>e.value,f=rm(e)):tm(e)?(d=()=>c(e),f=!0):Wp(e)?(m=!0,f=e.some((e=>tm(e)||rm(e))),d=()=>e.map((e=>cm(e)?e.value:tm(e)?c(e):Kp(e)?l?l(e,2):e():void 0))):d=Kp(e)?t?l?()=>l(e,2):e:()=>{if(p){of();try{p()}finally{sf()}}const t=Am;Am=u;try{return l?l(e,3,[h]):e(h)}finally{Am=t}}:Up,t&&a){const e=d,t=!0===a?1/0:a;d=()=>Cm(e(),t)}const g=zh(),v=()=>{u.stop(),g&&g.active&&Hp(g.effects,u)};if(o&&t){const e=t;t=(...t)=>{e(...t),v()}}let b=m?new Array(e.length).fill(Tm):Tm;const y=e=>{if(1&u.flags&&(u.dirty||e))if(t){const e=u.run();if(a||f||(m?e.some(((e,t)=>mh(e,b[t]))):mh(e,b))){p&&p();const n=Am;Am=u;try{const n=[e,b===Tm?void 0:m&&b[0]===Tm?[]:b,h];b=e,l?l(t,3,n):t(...n)}finally{Am=n}}}else u.run()};return s&&s(y),u=new Qh(d),u.scheduler=i?()=>i(y,!1):y,h=e=>function(e,t=!1,n=Am){if(n){let t=Em.get(n);t||Em.set(n,t=[]),t.push(e)}}(e,!1,u),p=u.onStop=()=>{const e=Em.get(u);if(e){if(l)l(e,4);else for(const t of e)t();Em.delete(u)}},t?r?y(!0):b=u.run():i?i(y.bind(null,!0),!0):u.run(),v.pause=u.pause.bind(u),v.resume=u.resume.bind(u),v.stop=v,v}(e,t,s);return ry&&(c?c.push(p):l&&p()),p}function sg(e,t,n){const r=this.proxy,a=Jp(e)?e.includes(".")?lg(r,e):()=>r[e]:e.bind(r,r);let o;Kp(t)?o=t:(o=t.handler,n=t);const i=ey(this),s=ig(a,o.bind(r),n);return i(),s}function lg(e,t){const n=t.split(".");return()=>{let t=e;for(let e=0;ee.__isTeleport,dg=e=>e&&(e.disabled||""===e.disabled),pg=e=>e&&(e.defer||""===e.defer),hg=e=>"undefined"!=typeof SVGElement&&e instanceof SVGElement,fg=e=>"function"==typeof MathMLElement&&e instanceof MathMLElement,mg=(e,t)=>{const n=e&&e.to;return Jp(n)?t?t(n):null:n},gg={name:"Teleport",__isTeleport:!0,process(e,t,n,r,a,o,i,s,l,c){const{mc:u,pc:d,pbc:p,o:{insert:h,querySelector:f,createText:m,createComment:g}}=c,v=dg(t.props);let{shapeFlag:b,children:y,dynamicChildren:O}=t;if(null==e){const e=t.el=m(""),c=t.anchor=m("");h(e,n,r),h(c,n,r);const d=(e,t)=>{16&b&&u(y,e,t,a,o,i,s,l)},p=()=>{const e=t.target=mg(t.props,f),n=Og(e,t,m,h);e&&("svg"!==i&&hg(e)?i="svg":"mathml"!==i&&fg(e)&&(i="mathml"),a&&a.isCE&&(a.ce._teleportTargets||(a.ce._teleportTargets=new Set)).add(e),v||(d(e,n),yg(t,!1)))};v&&(d(n,c),yg(t,!0)),pg(t.props)?(t.el.__isMounted=!1,db((()=>{p(),delete t.el.__isMounted}),o)):p()}else{if(pg(t.props)&&!1===e.el.__isMounted)return void db((()=>{gg.process(e,t,n,r,a,o,i,s,l,c)}),o);t.el=e.el,t.targetStart=e.targetStart;const u=t.anchor=e.anchor,h=t.target=e.target,m=t.targetAnchor=e.targetAnchor,g=dg(e.props),b=g?n:h,y=g?u:m;if("svg"===i||hg(h)?i="svg":("mathml"===i||fg(h))&&(i="mathml"),O?(p(e.dynamicChildren,O,b,a,o,i,s),mb(e,t,!0)):l||d(e,t,b,y,a,o,i,s,!1),v)g?t.props&&e.props&&t.props.to!==e.props.to&&(t.props.to=e.props.to):vg(t,n,u,c,1);else if((t.props&&t.props.to)!==(e.props&&e.props.to)){const e=t.target=mg(t.props,f);e&&vg(t,e,null,c,0)}else g&&vg(t,h,m,c,1);yg(t,v)}},remove(e,t,n,{um:r,o:{remove:a}},o){const{shapeFlag:i,children:s,anchor:l,targetStart:c,targetAnchor:u,target:d,props:p}=e;if(d&&(a(c),a(u)),o&&a(l),16&i){const e=o||!dg(p);for(let a=0;a{const t=e.subTree;return t.component?_g(t.component):t};function Tg(e){let t=e[0];if(e.length>1)for(const n of e)if(n.type!==xb){t=n;break}return t}const Eg={name:"BaseTransition",props:Sg,setup(e,{slots:t}){const n=Yb(),r=function(){const e={isMounted:!1,isLeaving:!1,isUnmounting:!1,leavingVNodes:new Map};return Gg((()=>{e.isMounted=!0})),Jg((()=>{e.isUnmounting=!0})),e}();return()=>{const a=t.default&&Ig(t.default(),!0);if(!a||!a.length)return;const o=Tg(a),i=om(e),{mode:s}=i;if(r.isLeaving)return $g(o);const l=Pg(o);if(!l)return $g(o);let c=Cg(l,i,r,n,(e=>c=e));l.type!==xb&&Dg(l,c);let u=n.subTree&&Pg(n.subTree);if(u&&u.type!==xb&&!Ib(u,l)&&_g(n).type!==xb){let e=Cg(u,i,r,n);if(Dg(u,e),"out-in"===s&&l.type!==xb)return r.isLeaving=!0,e.afterLeave=()=>{r.isLeaving=!1,8&n.job.flags||n.update(),delete e.afterLeave,u=void 0},$g(o);"in-out"===s&&l.type!==xb?e.delayLeave=(e,t,n)=>{Ag(r,u)[String(u.key)]=u,e[wg]=()=>{t(),e[wg]=void 0,delete c.delayedLeave,u=void 0},c.delayedLeave=()=>{n(),delete c.delayedLeave,u=void 0}}:u=void 0}else u&&(u=void 0);return o}}};function Ag(e,t){const{leavingVNodes:n}=e;let r=n.get(t.type);return r||(r=Object.create(null),n.set(t.type,r)),r}function Cg(e,t,n,r,a){const{appear:o,mode:i,persisted:s=!1,onBeforeEnter:l,onEnter:c,onAfterEnter:u,onEnterCancelled:d,onBeforeLeave:p,onLeave:h,onAfterLeave:f,onLeaveCancelled:m,onBeforeAppear:g,onAppear:v,onAfterAppear:b,onAppearCancelled:y}=t,O=String(e.key),w=Ag(n,e),x=(e,t)=>{e&&Pm(e,r,9,t)},k=(e,t)=>{const n=t[1];x(e,t),Wp(e)?e.every((e=>e.length<=1))&&n():e.length<=1&&n()},S={mode:i,persisted:s,beforeEnter(t){let r=l;if(!n.isMounted){if(!o)return;r=g||l}t[wg]&&t[wg](!0);const a=w[O];a&&Ib(e,a)&&a.el[wg]&&a.el[wg](),x(r,[t])},enter(e){let t=c,r=u,a=d;if(!n.isMounted){if(!o)return;t=v||c,r=b||u,a=y||d}let i=!1;const s=e[xg]=t=>{i||(i=!0,x(t?a:r,[e]),S.delayedLeave&&S.delayedLeave(),e[xg]=void 0)};t?k(t,[e,s]):s()},leave(t,r){const a=String(e.key);if(t[xg]&&t[xg](!0),n.isUnmounting)return r();x(p,[t]);let o=!1;const i=t[wg]=n=>{o||(o=!0,r(),x(n?m:f,[t]),t[wg]=void 0,w[a]===e&&delete w[a])};w[a]=e,h?k(h,[t,i]):i()},clone(e){const o=Cg(e,t,n,r,a);return a&&a(o),o}};return S}function $g(e){if(Fg(e))return(e=jb(e)).children=null,e}function Pg(e){if(!Fg(e))return ug(e.type)&&e.children?Tg(e.children):e;if(e.component)return e.component.subTree;const{shapeFlag:t,children:n}=e;if(n){if(16&t)return n[0];if(32&t&&Kp(n.default))return n.default()}}function Dg(e,t){6&e.shapeFlag&&e.component?(e.transition=t,Dg(e.component.subTree,t)):128&e.shapeFlag?(e.ssContent.transition=t.clone(e.ssContent),e.ssFallback.transition=t.clone(e.ssFallback)):e.transition=t}function Ig(e,t=!1,n){let r=[],a=0;for(let o=0;o1)for(let e=0;en.value,set:e=>n.value=e})}return n}const Bg=new WeakMap;function jg(e,t,n,r,a=!1){if(Wp(e))return void e.forEach(((e,o)=>jg(e,t&&(Wp(t)?t[o]:t),n,r,a)));if(zg(r)&&!a)return void(512&r.shapeFlag&&r.type.__asyncResolved&&r.component.subTree.component&&jg(e,t,n,r.component.subTree));const o=4&r.shapeFlag?ly(r.component):r.el,i=a?null:o,{i:s,r:l}=e,c=t&&t.r,u=s.refs===Bp?s.refs={}:s.refs,d=s.setupState,p=om(d),h=d===Bp?zp:e=>qp(p,e);if(null!=c&&c!==l)if(Ug(t),Jp(c))u[c]=null,h(c)&&(d[c]=null);else if(cm(c)){c.value=null;const e=t;e.k&&(u[e.k]=null)}if(Kp(l))$m(l,s,12,[i,u]);else{const t=Jp(l),r=cm(l);if(t||r){const s=()=>{if(e.f){const n=t?h(l)?d[l]:u[l]:l.value;if(a)Wp(n)&&Hp(n,o);else if(Wp(n))n.includes(o)||n.push(o);else if(t)u[l]=[o],h(l)&&(d[l]=u[l]);else{const t=[o];l.value=t,e.k&&(u[e.k]=t)}}else t?(u[l]=i,h(l)&&(d[l]=i)):r&&(l.value=i,e.k&&(u[e.k]=i))};if(i){const t=()=>{s(),Bg.delete(e)};t.id=-1,Bg.set(e,t),db(t,n)}else Ug(e),s()}}}function Ug(e){const t=Bg.get(e);t&&(t.flags|=8,Bg.delete(e))}Oh().requestIdleCallback,Oh().cancelIdleCallback;const zg=e=>!!e.type.__asyncLoader,Fg=e=>e.type.__isKeepAlive;function Zg(e,t){Hg(e,"a",t)}function Qg(e,t){Hg(e,"da",t)}function Hg(e,t,n=Gb){const r=e.__wdc||(e.__wdc=()=>{let t=n;for(;t;){if(t.isDeactivated)return;t=t.parent}return e()});if(qg(t,r,n),n){let e=n.parent;for(;e&&e.parent;)Fg(e.parent.vnode)&&Vg(r,t,n,e),e=e.parent}}function Vg(e,t,n,r){const a=qg(t,e,r,!0);ev((()=>{Hp(r[t],a)}),n)}function qg(e,t,n=Gb,r=!1){if(n){const a=n[e]||(n[e]=[]),o=t.__weh||(t.__weh=(...r)=>{of();const a=ey(n),o=Pm(t,n,e,r);return a(),sf(),o});return r?a.unshift(o):a.push(o),o}}const Wg=e=>(t,n=Gb)=>{ry&&"sp"!==e||qg(e,((...e)=>t(...e)),n)},Xg=Wg("bm"),Gg=Wg("m"),Yg=Wg("bu"),Kg=Wg("u"),Jg=Wg("bum"),ev=Wg("um"),tv=Wg("sp"),nv=Wg("rtg"),rv=Wg("rtc");function av(e,t=Gb){qg("ec",e,t)}const ov="components";function iv(e,t){return cv(ov,e,!0,t)||e}const sv=Symbol.for("v-ndc");function lv(e){return Jp(e)?cv(ov,e,!1)||e:e||sv}function cv(e,t,n=!0,r=!1){const a=Wm||Gb;if(a){const n=a.type;{const e=function(e,t=!0){return Kp(e)?e.displayName||e.name:e.name||t&&e.__name}(n,!1);if(e&&(e===t||e===uh(t)||e===hh(uh(t))))return n}const o=uv(a[e]||n[e],t)||uv(a.appContext[e],t);return!o&&r?n:o}}function uv(e,t){return e&&(e[t]||e[uh(t)]||e[hh(uh(t))])}function dv(e,t,n,r){let a;const o=n,i=Wp(e);if(i||Jp(e)){let n=!1,r=!1;i&&tm(e)&&(n=!rm(e),r=nm(e),e=Of(e)),a=new Array(e.length);for(let i=0,s=e.length;it(e,n,void 0,o)));else{const n=Object.keys(e);a=new Array(n.length);for(let r=0,i=n.length;r{const t=r.fn(...e);return t&&(t.key=r.key),t}:r.fn)}return e}function hv(e,t,n={},r,a){if(Wm.ce||Wm.parent&&zg(Wm.parent)&&Wm.parent.ce){const e=Object.keys(n).length>0;return"default"!==t&&(n.name=t),Tb(),Pb(Ob,null,[Lb("slot",n,r&&r())],e?-2:64)}let o=e[t];o&&o._c&&(o._d=!1),Tb();const i=o&&fv(o(n)),s=n.key||i&&i.key,l=Pb(Ob,{key:(s&&!eh(s)?s:`_${t}`)+(!i&&r?"_fb":"")},i||(r?r():[]),i&&1===e._?64:-2);return!a&&l.scopeId&&(l.slotScopeIds=[l.scopeId+"-s"]),o&&o._c&&(o._d=!0),l}function fv(e){return e.some((e=>!Db(e)||e.type!==xb&&!(e.type===Ob&&!fv(e.children))))?e:null}function mv(e,t){const n={};for(const t in e)n[fh(t)]=e[t];return n}const gv=e=>e?ny(e)?ly(e):gv(e.parent):null,vv=Qp(Object.create(null),{$:e=>e,$el:e=>e.vnode.el,$data:e=>e.data,$props:e=>e.props,$attrs:e=>e.attrs,$slots:e=>e.slots,$refs:e=>e.refs,$parent:e=>gv(e.parent),$root:e=>gv(e.root),$host:e=>e.ce,$emit:e=>e.emit,$options:e=>Cv(e),$forceUpdate:e=>e.f||(e.f=()=>{zm(e.update)}),$nextTick:e=>e.n||(e.n=Um.bind(e.proxy)),$watch:e=>sg.bind(e)}),bv=(e,t)=>e!==Bp&&!e.__isScriptSetup&&qp(e,t),yv={get({_:e},t){if("__v_skip"===t)return!0;const{ctx:n,setupState:r,data:a,props:o,accessCache:i,type:s,appContext:l}=e;if("$"!==t[0]){const e=i[t];if(void 0!==e)switch(e){case 1:return r[t];case 2:return a[t];case 4:return n[t];case 3:return o[t]}else{if(bv(r,t))return i[t]=1,r[t];if(a!==Bp&&qp(a,t))return i[t]=2,a[t];if(qp(o,t))return i[t]=3,o[t];if(n!==Bp&&qp(n,t))return i[t]=4,n[t];Tv&&(i[t]=0)}}const c=vv[t];let u,d;return c?("$attrs"===t&&vf(e.attrs,0,""),c(e)):(u=s.__cssModules)&&(u=u[t])?u:n!==Bp&&qp(n,t)?(i[t]=4,n[t]):(d=l.config.globalProperties,qp(d,t)?d[t]:void 0)},set({_:e},t,n){const{data:r,setupState:a,ctx:o}=e;return bv(a,t)?(a[t]=n,!0):r!==Bp&&qp(r,t)?(r[t]=n,!0):!(qp(e.props,t)||"$"===t[0]&&t.slice(1)in e||(o[t]=n,0))},has({_:{data:e,setupState:t,accessCache:n,ctx:r,appContext:a,props:o,type:i}},s){let l;return!!(n[s]||e!==Bp&&"$"!==s[0]&&qp(e,s)||bv(t,s)||qp(o,s)||qp(r,s)||qp(vv,s)||qp(a.config.globalProperties,s)||(l=i.__cssModules)&&l[s])},defineProperty(e,t,n){return null!=n.get?e._.accessCache[t]=0:qp(n,"value")&&this.set(e,t,n.value,null),Reflect.defineProperty(e,t,n)}};function Ov(){return xv().slots}function wv(){return xv().attrs}function xv(e){const t=Yb();return t.setupContext||(t.setupContext=sy(t))}function kv(e){return Wp(e)?e.reduce(((e,t)=>(e[t]=null,e)),{}):e}function Sv(e,t){const n=kv(e);for(const e in t){if(e.startsWith("__skip"))continue;let r=n[e];r?Wp(r)||Kp(r)?r=n[e]={type:r,default:t[e]}:r.default=t[e]:null===r&&(r=n[e]={default:t[e]}),r&&t[`__skip_${e}`]&&(r.skipFactory=!0)}return n}function _v(e,t){return e&&t?Wp(e)&&Wp(t)?e.concat(t):Qp({},kv(e),kv(t)):e||t}let Tv=!0;function Ev(e,t,n){Pm(Wp(e)?e.map((e=>e.bind(t.proxy))):e.bind(t.proxy),t,n)}function Av(e,t,n,r){let a=r.includes(".")?lg(n,r):()=>n[r];if(Jp(e)){const n=t[e];Kp(n)&&og(a,n)}else if(Kp(e))og(a,e.bind(n));else if(th(e))if(Wp(e))e.forEach((e=>Av(e,t,n,r)));else{const r=Kp(e.handler)?e.handler.bind(n):t[e.handler];Kp(r)&&og(a,r,e)}}function Cv(e){const t=e.type,{mixins:n,extends:r}=t,{mixins:a,optionsCache:o,config:{optionMergeStrategies:i}}=e.appContext,s=o.get(t);let l;return s?l=s:a.length||n||r?(l={},a.length&&a.forEach((e=>$v(l,e,i,!0))),$v(l,t,i)):l=t,th(t)&&o.set(t,l),l}function $v(e,t,n,r=!1){const{mixins:a,extends:o}=t;o&&$v(e,o,n,!0),a&&a.forEach((t=>$v(e,t,n,!0)));for(const a in t)if(r&&"expose"===a);else{const r=Pv[a]||n&&n[a];e[a]=r?r(e[a],t[a]):t[a]}return e}const Pv={data:Dv,props:Rv,emits:Rv,methods:Nv,computed:Nv,beforeCreate:Mv,created:Mv,beforeMount:Mv,mounted:Mv,beforeUpdate:Mv,updated:Mv,beforeDestroy:Mv,beforeUnmount:Mv,destroyed:Mv,unmounted:Mv,activated:Mv,deactivated:Mv,errorCaptured:Mv,serverPrefetch:Mv,components:Nv,directives:Nv,watch:function(e,t){if(!e)return t;if(!t)return e;const n=Qp(Object.create(null),e);for(const r in t)n[r]=Mv(e[r],t[r]);return n},provide:Dv,inject:function(e,t){return Nv(Iv(e),Iv(t))}};function Dv(e,t){return t?e?function(){return Qp(Kp(e)?e.call(this,this):e,Kp(t)?t.call(this,this):t)}:t:e}function Iv(e){if(Wp(e)){const t={};for(let n=0;n(a.has(e)||(e&&Kp(e.install)?(a.add(e),e.install(s,...t)):Kp(e)&&(a.add(e),e(s,...t))),s),mixin:e=>(r.mixins.includes(e)||r.mixins.push(e),s),component:(e,t)=>t?(r.components[e]=t,s):r.components[e],directive:(e,t)=>t?(r.directives[e]=t,s):r.directives[e],mount(a,o,l){if(!i){const o=s._ceVNode||Lb(t,n);return o.appContext=r,!0===l?l="svg":!1===l&&(l=void 0),e(o,a,l),i=!0,s._container=a,a.__vue_app__=s,ly(o.component)}},onUnmount(e){o.push(e)},unmount(){i&&(Pm(o,s._instance,16),e(null,s._container),delete s._container.__vue_app__)},provide:(e,t)=>(r.provides[e]=t,s),runWithContext(e){const t=Uv;Uv=s;try{return e()}finally{Uv=t}}};return s}}let Uv=null;function zv(e,t,n=Bp){const r=Yb(),a=uh(t),o=ph(t),i=Fv(e,a),s=ym(((i,s)=>{let l,c,u=Bp;return ig((()=>{const t=e[a];mh(l,t)&&(l=t,s())}),null,{flush:"sync"}),{get:()=>(i(),n.get?n.get(l):l),set(e){const i=n.set?n.set(e):e;if(!(mh(i,l)||u!==Bp&&mh(e,u)))return;const d=r.vnode.props;d&&(t in d||a in d||o in d)&&(`onUpdate:${t}`in d||`onUpdate:${a}`in d||`onUpdate:${o}`in d)||(l=e,s()),r.emit(`update:${t}`,i),mh(e,i)&&mh(e,u)&&!mh(i,c)&&s(),u=e,c=i}}}));return s[Symbol.iterator]=()=>{let e=0;return{next:()=>e<2?{value:e++?i||Bp:s,done:!1}:{done:!0}}},s}const Fv=(e,t)=>"modelValue"===t||"model-value"===t?e.modelModifiers:e[`${t}Modifiers`]||e[`${uh(t)}Modifiers`]||e[`${ph(t)}Modifiers`];function Zv(e,t,...n){if(e.isUnmounted)return;const r=e.vnode.props||Bp;let a=n;const o=t.startsWith("update:"),i=o&&Fv(r,t.slice(7));let s;i&&(i.trim&&(a=n.map((e=>Jp(e)?e.trim():e))),i.number&&(a=n.map(bh)));let l=r[s=fh(t)]||r[s=fh(uh(t))];!l&&o&&(l=r[s=fh(ph(t))]),l&&Pm(l,e,6,a);const c=r[s+"Once"];if(c){if(e.emitted){if(e.emitted[s])return}else e.emitted={};e.emitted[s]=!0,Pm(c,e,6,a)}}const Qv=new WeakMap;function Hv(e,t,n=!1){const r=n?Qv:t.emitsCache,a=r.get(e);if(void 0!==a)return a;const o=e.emits;let i={},s=!1;if(!Kp(e)){const r=e=>{const n=Hv(e,t,!0);n&&(s=!0,Qp(i,n))};!n&&t.mixins.length&&t.mixins.forEach(r),e.extends&&r(e.extends),e.mixins&&e.mixins.forEach(r)}return o||s?(Wp(o)?o.forEach((e=>i[e]=null)):Qp(i,o),th(e)&&r.set(e,i),i):(th(e)&&r.set(e,null),null)}function Vv(e,t){return!(!e||!Fp(t))&&(t=t.slice(2).replace(/Once$/,""),qp(e,t[0].toLowerCase()+t.slice(1))||qp(e,ph(t))||qp(e,t))}function qv(e){const{type:t,vnode:n,proxy:r,withProxy:a,propsOptions:[o],slots:i,attrs:s,emit:l,render:c,renderCache:u,props:d,data:p,setupState:h,ctx:f,inheritAttrs:m}=e,g=Gm(e);let v,b;try{if(4&n.shapeFlag){const e=a||r,t=e;v=Zb(c.call(t,e,u,d,h,p,f)),b=s}else{const e=t;v=Zb(e.length>1?e(d,{attrs:s,slots:i,emit:l}):e(d,null)),b=t.props?s:Wv(s)}}catch(t){Sb.length=0,Dm(t,e,1),v=Lb(xb)}let y=v;if(b&&!1!==m){const e=Object.keys(b),{shapeFlag:t}=y;e.length&&7&t&&(o&&e.some(Zp)&&(b=Xv(b,o)),y=jb(y,b,!1,!0))}return n.dirs&&(y=jb(y,null,!1,!0),y.dirs=y.dirs?y.dirs.concat(n.dirs):n.dirs),n.transition&&Dg(y,n.transition),v=y,Gm(g),v}const Wv=e=>{let t;for(const n in e)("class"===n||"style"===n||Fp(n))&&((t||(t={}))[n]=e[n]);return t},Xv=(e,t)=>{const n={};for(const r in e)Zp(r)&&r.slice(9)in t||(n[r]=e[r]);return n};function Gv(e,t,n){const r=Object.keys(t);if(r.length!==Object.keys(e).length)return!0;for(let a=0;aObject.create(Yv),Jv=e=>Object.getPrototypeOf(e)===Yv;function eb(e,t,n,r){const[a,o]=e.propsOptions;let i,s=!1;if(t)for(let l in t){if(sh(l))continue;const c=t[l];let u;a&&qp(a,u=uh(l))?o&&o.includes(u)?(i||(i={}))[u]=c:n[u]=c:Vv(e.emitsOptions,l)||l in r&&c===r[l]||(r[l]=c,s=!0)}if(o){const t=om(n),r=i||Bp;for(let i=0;i{l=!0;const[n,r]=rb(e,t,!0);Qp(i,n),r&&s.push(...r)};!n&&t.mixins.length&&t.mixins.forEach(r),e.extends&&r(e.extends),e.mixins&&e.mixins.forEach(r)}if(!o&&!l)return th(e)&&r.set(e,jp),jp;if(Wp(o))for(let e=0;e"_"===e||"_ctx"===e||"$stable"===e,ib=e=>Wp(e)?e.map(Zb):[Zb(e)],sb=(e,t,n)=>{if(t._n)return t;const r=Ym(((...e)=>ib(t(...e))),n);return r._c=!1,r},lb=(e,t,n)=>{const r=e._ctx;for(const n in e){if(ob(n))continue;const a=e[n];if(Kp(a))t[n]=sb(0,a,r);else if(null!=a){const e=ib(a);t[n]=()=>e}}},cb=(e,t)=>{const n=ib(t);e.slots.default=()=>n},ub=(e,t,n)=>{for(const r in t)!n&&ob(r)||(e[r]=t[r])},db=function(e,t){t&&t.pendingBranch?Wp(e)?t.effects.push(...e):t.effects.push(e):Zm(e)};function pb(e){return function(e){Oh().__VUE__=!0;const{insert:t,remove:n,patchProp:r,createElement:a,createText:o,createComment:i,setText:s,setElementText:l,parentNode:c,nextSibling:u,setScopeId:d=Up,insertStaticContent:p}=e,h=(e,t,n,r=null,a=null,o=null,i=void 0,s=null,l=!!t.dynamicChildren)=>{if(e===t)return;e&&!Ib(e,t)&&(r=U(e),N(e,a,o,!0),e=null),-2===t.patchFlag&&(l=!1,t.dynamicChildren=null);const{type:c,ref:u,shapeFlag:d}=t;switch(c){case wb:f(e,t,n,r);break;case xb:m(e,t,n,r);break;case kb:null==e&&g(t,n,r,i);break;case Ob:_(e,t,n,r,a,o,i,s,l);break;default:1&d?b(e,t,n,r,a,o,i,s,l):6&d?T(e,t,n,r,a,o,i,s,l):(64&d||128&d)&&c.process(e,t,n,r,a,o,i,s,l,Z)}null!=u&&a?jg(u,e&&e.ref,o,t||e,!t):null==u&&e&&null!=e.ref&&jg(e.ref,null,o,e,!0)},f=(e,n,r,a)=>{if(null==e)t(n.el=o(n.children),r,a);else{const t=n.el=e.el;n.children!==e.children&&s(t,n.children)}},m=(e,n,r,a)=>{null==e?t(n.el=i(n.children||""),r,a):n.el=e.el},g=(e,t,n,r)=>{[e.el,e.anchor]=p(e.children,t,n,r,e.el,e.anchor)},v=({el:e,anchor:n},r,a)=>{let o;for(;e&&e!==n;)o=u(e),t(e,r,a),e=o;t(n,r,a)},b=(e,t,n,r,a,o,i,s,l)=>{if("svg"===t.type?i="svg":"math"===t.type&&(i="mathml"),null==e)y(t,n,r,a,o,i,s,l);else{const n=e.el&&e.el._isVueCE?e.el:null;try{n&&n._beginPatch(),x(e,t,a,o,i,s,l)}finally{n&&n._endPatch()}}},y=(e,n,o,i,s,c,u,d)=>{let p,h;const{props:f,shapeFlag:m,transition:g,dirs:v}=e;if(p=e.el=a(e.type,c,f&&f.is,f),8&m?l(p,e.children):16&m&&w(e.children,p,null,i,s,hb(e,c),u,d),v&&Jm(e,null,i,"created"),O(p,e,e.scopeId,u,i),f){for(const e in f)"value"===e||sh(e)||r(p,e,null,f[e],c,i);"value"in f&&r(p,"value",null,f.value,c),(h=f.onVnodeBeforeMount)&&qb(h,i,e)}v&&Jm(e,null,i,"beforeMount");const b=function(e,t){return(!e||e&&!e.pendingBranch)&&t&&!t.persisted}(s,g);b&&g.beforeEnter(p),t(p,n,o),((h=f&&f.onVnodeMounted)||b||v)&&db((()=>{h&&qb(h,i,e),b&&g.enter(p),v&&Jm(e,null,i,"mounted")}),s)},O=(e,t,n,r,a)=>{if(n&&d(e,n),r)for(let t=0;t{for(let c=l;c{const c=t.el=e.el;let{patchFlag:u,dynamicChildren:d,dirs:p}=t;u|=16&e.patchFlag;const h=e.props||Bp,f=t.props||Bp;let m;if(n&&fb(n,!1),(m=f.onVnodeBeforeUpdate)&&qb(m,n,t,e),p&&Jm(t,e,n,"beforeUpdate"),n&&fb(n,!0),(h.innerHTML&&null==f.innerHTML||h.textContent&&null==f.textContent)&&l(c,""),d?k(e.dynamicChildren,d,c,n,a,hb(t,o),i):s||P(e,t,c,null,n,a,hb(t,o),i,!1),u>0){if(16&u)S(c,h,f,n,o);else if(2&u&&h.class!==f.class&&r(c,"class",null,f.class,o),4&u&&r(c,"style",h.style,f.style,o),8&u){const e=t.dynamicProps;for(let t=0;t{m&&qb(m,n,t,e),p&&Jm(t,e,n,"updated")}),a)},k=(e,t,n,r,a,o,i)=>{for(let s=0;s{if(t!==n){if(t!==Bp)for(const i in t)sh(i)||i in n||r(e,i,t[i],null,o,a);for(const i in n){if(sh(i))continue;const s=n[i],l=t[i];s!==l&&"value"!==i&&r(e,i,l,s,o,a)}"value"in n&&r(e,"value",t.value,n.value,o)}},_=(e,n,r,a,i,s,l,c,u)=>{const d=n.el=e?e.el:o(""),p=n.anchor=e?e.anchor:o("");let{patchFlag:h,dynamicChildren:f,slotScopeIds:m}=n;m&&(c=c?c.concat(m):m),null==e?(t(d,r,a),t(p,r,a),w(n.children||[],r,p,i,s,l,c,u)):h>0&&64&h&&f&&e.dynamicChildren&&e.dynamicChildren.length===f.length?(k(e.dynamicChildren,f,r,i,s,l,c),(null!=n.key||i&&n===i.subTree)&&mb(e,n,!0)):P(e,n,r,p,i,s,l,c,u)},T=(e,t,n,r,a,o,i,s,l)=>{t.slotScopeIds=s,null==e?512&t.shapeFlag?a.ctx.activate(t,n,r,i,l):E(t,n,r,a,o,i,l):A(e,t,l)},E=(e,t,n,r,a,o,i)=>{const s=e.component=function(e,t,n){const r=e.type,a=(t?t.appContext:e.appContext)||Wb,o={uid:Xb++,vnode:e,type:r,parent:t,appContext:a,root:null,next:null,subTree:null,effect:null,update:null,job:null,scope:new jh(!0),render:null,proxy:null,exposed:null,exposeProxy:null,withProxy:null,provides:t?t.provides:Object.create(a.provides),ids:t?t.ids:["",0,0],accessCache:null,renderCache:[],components:null,directives:null,propsOptions:rb(r,a),emitsOptions:Hv(r,a),emit:null,emitted:null,propsDefaults:Bp,inheritAttrs:r.inheritAttrs,ctx:Bp,data:Bp,props:Bp,attrs:Bp,slots:Bp,refs:Bp,setupState:Bp,setupContext:null,suspense:n,suspenseId:n?n.pendingId:0,asyncDep:null,asyncResolved:!1,isMounted:!1,isUnmounted:!1,isDeactivated:!1,bc:null,c:null,bm:null,m:null,bu:null,u:null,um:null,bum:null,da:null,a:null,rtg:null,rtc:null,ec:null,sp:null};return o.ctx={_:o},o.root=t?t.root:o,o.emit=Zv.bind(null,o),e.ce&&e.ce(o),o}(e,r,a);if(Fg(e)&&(s.ctx.renderer=Z),function(e,t=!1,n=!1){t&&Jb(t);const{props:r,children:a}=e.vnode,o=ny(e);(function(e,t,n,r=!1){const a={},o=Kv();e.propsDefaults=Object.create(null),eb(e,t,a,o);for(const t in e.propsOptions[0])t in a||(a[t]=void 0);n?e.props=r?a:em(a,!1,Lf,Qf,Wf):e.type.props?e.props=a:e.props=o,e.attrs=o})(e,r,o,t),((e,t,n)=>{const r=e.slots=Kv();if(32&e.vnode.shapeFlag){const e=t._;e?(ub(r,t,n),n&&vh(r,"_",e,!0)):lb(t,r)}else t&&cb(e,t)})(e,a,n||t);o&&function(e,t){const n=e.type;e.accessCache=Object.create(null),e.proxy=new Proxy(e.ctx,yv);const{setup:r}=n;if(r){of();const n=e.setupContext=r.length>1?sy(e):null,a=ey(e),o=$m(r,e,0,[e.props,n]),i=nh(o);if(sf(),a(),!i&&!e.sp||zg(e)||Rg(e),i){if(o.then(ty,ty),t)return o.then((t=>{ay(e,t)})).catch((t=>{Dm(t,e,0)}));e.asyncDep=o}else ay(e,o)}else oy(e)}(e,t);t&&Jb(!1)}(s,!1,i),s.asyncDep){if(a&&a.registerDep(s,C,i),!e.el){const r=s.subTree=Lb(xb);m(null,r,t,n),e.placeholder=r.el}}else C(s,e,t,n,a,o,i)},A=(e,t,n)=>{const r=t.component=e.component;if(function(e,t,n){const{props:r,children:a,component:o}=e,{props:i,children:s,patchFlag:l}=t,c=o.emitsOptions;if(t.dirs||t.transition)return!0;if(!(n&&l>=0))return!(!a&&!s||s&&s.$stable)||r!==i&&(r?!i||Gv(r,i,c):!!i);if(1024&l)return!0;if(16&l)return r?Gv(r,i,c):!!i;if(8&l){const e=t.dynamicProps;for(let t=0;t{const s=()=>{if(e.isMounted){let{next:t,bu:n,u:r,parent:l,vnode:u}=e;{const n=gb(e);if(n)return t&&(t.el=u.el,$(e,t,i)),void n.asyncDep.then((()=>{e.isUnmounted||s()}))}let d,p=t;fb(e,!1),t?(t.el=u.el,$(e,t,i)):t=u,n&&gh(n),(d=t.props&&t.props.onVnodeBeforeUpdate)&&qb(d,l,t,u),fb(e,!0);const f=qv(e),m=e.subTree;e.subTree=f,h(m,f,c(m.el),U(m),e,a,o),t.el=f.el,null===p&&function({vnode:e,parent:t},n){for(;t;){const r=t.subTree;if(r.suspense&&r.suspense.activeBranch===e&&(r.el=e.el),r!==e)break;(e=t.vnode).el=n,t=t.parent}}(e,f.el),r&&db(r,a),(d=t.props&&t.props.onVnodeUpdated)&&db((()=>qb(d,l,t,u)),a)}else{let i;const{el:s,props:l}=t,{bm:c,m:u,parent:d,root:p,type:f}=e,m=zg(t);fb(e,!1),c&&gh(c),!m&&(i=l&&l.onVnodeBeforeMount)&&qb(i,d,t),fb(e,!0);{p.ce&&!1!==p.ce._def.shadowRoot&&p.ce._injectChildStyle(f);const i=e.subTree=qv(e);h(null,i,n,r,e,a,o),t.el=i.el}if(u&&db(u,a),!m&&(i=l&&l.onVnodeMounted)){const e=t;db((()=>qb(i,d,e)),a)}(256&t.shapeFlag||d&&zg(d.vnode)&&256&d.vnode.shapeFlag)&&e.a&&db(e.a,a),e.isMounted=!0,t=n=r=null}};e.scope.on();const l=e.effect=new Qh(s);e.scope.off();const u=e.update=l.run.bind(l),d=e.job=l.runIfDirty.bind(l);d.i=e,d.id=e.uid,l.scheduler=()=>zm(d),fb(e,!0),u()},$=(e,t,n)=>{t.component=e;const r=e.vnode.props;e.vnode=t,e.next=null,function(e,t,n,r){const{props:a,attrs:o,vnode:{patchFlag:i}}=e,s=om(a),[l]=e.propsOptions;let c=!1;if(!(r||i>0)||16&i){let r;eb(e,t,a,o)&&(c=!0);for(const o in s)t&&(qp(t,o)||(r=ph(o))!==o&&qp(t,r))||(l?!n||void 0===n[o]&&void 0===n[r]||(a[o]=tb(l,s,o,void 0,e,!0)):delete a[o]);if(o!==s)for(const e in o)t&&qp(t,e)||(delete o[e],c=!0)}else if(8&i){const n=e.vnode.dynamicProps;for(let r=0;r{const{vnode:r,slots:a}=e;let o=!0,i=Bp;if(32&r.shapeFlag){const e=t._;e?n&&1===e?o=!1:ub(a,t,n):(o=!t.$stable,lb(t,a)),i=t}else t&&(cb(e,t),i={default:1});if(o)for(const e in a)ob(e)||null!=i[e]||delete a[e]})(e,t.children,n),of(),Qm(e),sf()},P=(e,t,n,r,a,o,i,s,c=!1)=>{const u=e&&e.children,d=e?e.shapeFlag:0,p=t.children,{patchFlag:h,shapeFlag:f}=t;if(h>0){if(128&h)return void I(u,p,n,r,a,o,i,s,c);if(256&h)return void D(u,p,n,r,a,o,i,s,c)}8&f?(16&d&&j(u,a,o),p!==u&&l(n,p)):16&d?16&f?I(u,p,n,r,a,o,i,s,c):j(u,a,o,!0):(8&d&&l(n,""),16&f&&w(p,n,r,a,o,i,s,c))},D=(e,t,n,r,a,o,i,s,l)=>{t=t||jp;const c=(e=e||jp).length,u=t.length,d=Math.min(c,u);let p;for(p=0;pu?j(e,a,o,!0,!1,d):w(t,n,r,a,o,i,s,l,d)},I=(e,t,n,r,a,o,i,s,l)=>{let c=0;const u=t.length;let d=e.length-1,p=u-1;for(;c<=d&&c<=p;){const r=e[c],u=t[c]=l?Qb(t[c]):Zb(t[c]);if(!Ib(r,u))break;h(r,u,n,null,a,o,i,s,l),c++}for(;c<=d&&c<=p;){const r=e[d],c=t[p]=l?Qb(t[p]):Zb(t[p]);if(!Ib(r,c))break;h(r,c,n,null,a,o,i,s,l),d--,p--}if(c>d){if(c<=p){const e=p+1,d=ep)for(;c<=d;)N(e[c],a,o,!0),c++;else{const f=c,m=c,g=new Map;for(c=m;c<=p;c++){const e=t[c]=l?Qb(t[c]):Zb(t[c]);null!=e.key&&g.set(e.key,c)}let v,b=0;const y=p-m+1;let O=!1,w=0;const x=new Array(y);for(c=0;c=y){N(r,a,o,!0);continue}let u;if(null!=r.key)u=g.get(r.key);else for(v=m;v<=p;v++)if(0===x[v-m]&&Ib(r,t[v])){u=v;break}void 0===u?N(r,a,o,!0):(x[u-m]=c+1,u>=w?w=u:O=!0,h(r,t[u],n,null,a,o,i,s,l),b++)}const k=O?function(e){const t=e.slice(),n=[0];let r,a,o,i,s;const l=e.length;for(r=0;r>1,e[n[s]]0&&(t[r]=n[o-1]),n[o]=r)}}for(o=n.length,i=n[o-1];o-- >0;)n[o]=i,i=t[i];return n}(x):jp;for(v=k.length-1,c=y-1;c>=0;c--){const e=m+c,d=t[e],p=t[e+1],f=e+1{const{el:s,type:l,transition:c,children:u,shapeFlag:d}=e;if(6&d)M(e.component.subTree,r,a,o);else if(128&d)e.suspense.move(r,a,o);else if(64&d)l.move(e,r,a,Z);else if(l!==Ob)if(l!==kb)if(2!==o&&1&d&&c)if(0===o)c.beforeEnter(s),t(s,r,a),db((()=>c.enter(s)),i);else{const{leave:o,delayLeave:i,afterLeave:l}=c,u=()=>{e.ctx.isUnmounted?n(s):t(s,r,a)},d=()=>{s._isLeaving&&s[wg](!0),o(s,(()=>{u(),l&&l()}))};i?i(s,u,d):d()}else t(s,r,a);else v(e,r,a);else{t(s,r,a);for(let e=0;e{const{type:o,props:i,ref:s,children:l,dynamicChildren:c,shapeFlag:u,patchFlag:d,dirs:p,cacheIndex:h}=e;if(-2===d&&(a=!1),null!=s&&(of(),jg(s,null,n,e,!0),sf()),null!=h&&(t.renderCache[h]=void 0),256&u)return void t.ctx.deactivate(e);const f=1&u&&p,m=!zg(e);let g;if(m&&(g=i&&i.onVnodeBeforeUnmount)&&qb(g,t,e),6&u)B(e.component,n,r);else{if(128&u)return void e.suspense.unmount(n,r);f&&Jm(e,null,t,"beforeUnmount"),64&u?e.type.remove(e,t,n,Z,r):c&&!c.hasOnce&&(o!==Ob||d>0&&64&d)?j(c,t,n,!1,!0):(o===Ob&&384&d||!a&&16&u)&&j(l,t,n),r&&R(e)}(m&&(g=i&&i.onVnodeUnmounted)||f)&&db((()=>{g&&qb(g,t,e),f&&Jm(e,null,t,"unmounted")}),n)},R=e=>{const{type:t,el:r,anchor:a,transition:o}=e;if(t===Ob)return void L(r,a);if(t===kb)return void(({el:e,anchor:t})=>{let r;for(;e&&e!==t;)r=u(e),n(e),e=r;n(t)})(e);const i=()=>{n(r),o&&!o.persisted&&o.afterLeave&&o.afterLeave()};if(1&e.shapeFlag&&o&&!o.persisted){const{leave:t,delayLeave:n}=o,a=()=>t(r,i);n?n(e.el,i,a):a()}else i()},L=(e,t)=>{let r;for(;e!==t;)r=u(e),n(e),e=r;n(t)},B=(e,t,n)=>{const{bum:r,scope:a,job:o,subTree:i,um:s,m:l,a:c}=e;vb(l),vb(c),r&&gh(r),a.stop(),o&&(o.flags|=8,N(i,e,t,n)),s&&db(s,t),db((()=>{e.isUnmounted=!0}),t)},j=(e,t,n,r=!1,a=!1,o=0)=>{for(let i=o;i{if(6&e.shapeFlag)return U(e.component.subTree);if(128&e.shapeFlag)return e.suspense.next();const t=u(e.anchor||e.el),n=t&&t[cg];return n?u(n):t};let z=!1;const F=(e,t,n)=>{let r;null==e?t._vnode&&(N(t._vnode,null,null,!0),r=t._vnode.component):h(t._vnode||null,e,t,null,null,null,n),t._vnode=e,z||(z=!0,Qm(r),Hm(),z=!1)},Z={p:h,um:N,m:M,r:R,mt:E,mc:w,pc:P,pbc:k,n:U,o:e};return{render:F,hydrate:undefined,createApp:jv(F)}}(e)}function hb({type:e,props:t},n){return"svg"===n&&"foreignObject"===e||"mathml"===n&&"annotation-xml"===e&&t&&t.encoding&&t.encoding.includes("html")?void 0:n}function fb({effect:e,job:t},n){n?(e.flags|=32,t.flags|=4):(e.flags&=-33,t.flags&=-5)}function mb(e,t,n=!1){const r=e.children,a=t.children;if(Wp(r)&&Wp(a))for(let t=0;te.__isSuspense,Ob=Symbol.for("v-fgt"),wb=Symbol.for("v-txt"),xb=Symbol.for("v-cmt"),kb=Symbol.for("v-stc"),Sb=[];let _b=null;function Tb(e=!1){Sb.push(_b=e?null:[])}let Eb=1;function Ab(e,t=!1){Eb+=e,e<0&&_b&&t&&(_b.hasOnce=!0)}function Cb(e){return e.dynamicChildren=Eb>0?_b||jp:null,Sb.pop(),_b=Sb[Sb.length-1]||null,Eb>0&&_b&&_b.push(e),e}function $b(e,t,n,r,a,o){return Cb(Rb(e,t,n,r,a,o,!0))}function Pb(e,t,n,r,a){return Cb(Lb(e,t,n,r,a,!0))}function Db(e){return!!e&&!0===e.__v_isVNode}function Ib(e,t){return e.type===t.type&&e.key===t.key}const Mb=({key:e})=>null!=e?e:null,Nb=({ref:e,ref_key:t,ref_for:n})=>("number"==typeof e&&(e=""+e),null!=e?Jp(e)||cm(e)||Kp(e)?{i:Wm,r:e,k:t,f:!!n}:e:null);function Rb(e,t=null,n=null,r=0,a=null,o=(e===Ob?0:1),i=!1,s=!1){const l={__v_isVNode:!0,__v_skip:!0,type:e,props:t,key:t&&Mb(t),ref:t&&Nb(t),scopeId:Xm,slotScopeIds:null,children:n,component:null,suspense:null,ssContent:null,ssFallback:null,dirs:null,transition:null,el:null,anchor:null,target:null,targetStart:null,targetAnchor:null,staticCount:0,shapeFlag:o,patchFlag:r,dynamicProps:a,dynamicChildren:null,appContext:null,ctx:Wm};return s?(Hb(l,n),128&o&&e.normalize(l)):n&&(l.shapeFlag|=Jp(n)?8:16),Eb>0&&!i&&_b&&(l.patchFlag>0||6&o)&&32!==l.patchFlag&&_b.push(l),l}const Lb=function(e,t=null,n=null,r=0,a=null,o=!1){if(e&&e!==sv||(e=xb),Db(e)){const r=jb(e,t,!0);return n&&Hb(r,n),Eb>0&&!o&&_b&&(6&r.shapeFlag?_b[_b.indexOf(e)]=r:_b.push(r)),r.patchFlag=-2,r}var i;if(Kp(i=e)&&"__vccOpts"in i&&(e=e.__vccOpts),t){t=Bb(t);let{class:e,style:n}=t;e&&!Jp(e)&&(t.class=Th(e)),th(n)&&(am(n)&&!Wp(n)&&(n=Qp({},n)),t.style=wh(n))}return Rb(e,t,n,r,a,Jp(e)?1:yb(e)?128:ug(e)?64:th(e)?4:Kp(e)?2:0,o,!0)};function Bb(e){return e?am(e)||Jv(e)?Qp({},e):e:null}function jb(e,t,n=!1,r=!1){const{props:a,ref:o,patchFlag:i,children:s,transition:l}=e,c=t?Vb(a||{},t):a,u={__v_isVNode:!0,__v_skip:!0,type:e.type,props:c,key:c&&Mb(c),ref:t&&t.ref?n&&o?Wp(o)?o.concat(Nb(t)):[o,Nb(t)]:Nb(t):o,scopeId:e.scopeId,slotScopeIds:e.slotScopeIds,children:s,target:e.target,targetStart:e.targetStart,targetAnchor:e.targetAnchor,staticCount:e.staticCount,shapeFlag:e.shapeFlag,patchFlag:t&&e.type!==Ob?-1===i?16:16|i:i,dynamicProps:e.dynamicProps,dynamicChildren:e.dynamicChildren,appContext:e.appContext,dirs:e.dirs,transition:l,component:e.component,suspense:e.suspense,ssContent:e.ssContent&&jb(e.ssContent),ssFallback:e.ssFallback&&jb(e.ssFallback),placeholder:e.placeholder,el:e.el,anchor:e.anchor,ctx:e.ctx,ce:e.ce};return l&&r&&Dg(u,l.clone(u)),u}function Ub(e=" ",t=0){return Lb(wb,null,e,t)}function zb(e,t){const n=Lb(kb,null,e);return n.staticCount=t,n}function Fb(e="",t=!1){return t?(Tb(),Pb(xb,null,e)):Lb(xb,null,e)}function Zb(e){return null==e||"boolean"==typeof e?Lb(xb):Wp(e)?Lb(Ob,null,e.slice()):Db(e)?Qb(e):Lb(wb,null,String(e))}function Qb(e){return null===e.el&&-1!==e.patchFlag||e.memo?e:jb(e)}function Hb(e,t){let n=0;const{shapeFlag:r}=e;if(null==t)t=null;else if(Wp(t))n=16;else if("object"==typeof t){if(65&r){const n=t.default;return void(n&&(n._c&&(n._d=!1),Hb(e,n()),n._c&&(n._d=!0)))}{n=32;const r=t._;r||Jv(t)?3===r&&Wm&&(1===Wm.slots._?t._=1:(t._=2,e.patchFlag|=1024)):t._ctx=Wm}}else Kp(t)?(t={default:t,_ctx:Wm},n=32):(t=String(t),64&r?(n=16,t=[Ub(t)]):n=8);e.children=t,e.shapeFlag|=n}function Vb(...e){const t={};for(let n=0;nGb||Wm;let Kb,Jb;{const e=Oh(),t=(t,n)=>{let r;return(r=e[t])||(r=e[t]=[]),r.push(n),e=>{r.length>1?r.forEach((t=>t(e))):r[0](e)}};Kb=t("__VUE_INSTANCE_SETTERS__",(e=>Gb=e)),Jb=t("__VUE_SSR_SETTERS__",(e=>ry=e))}const ey=e=>{const t=Gb;return Kb(e),e.scope.on(),()=>{e.scope.off(),Kb(t)}},ty=()=>{Gb&&Gb.scope.off(),Kb(null)};function ny(e){return 4&e.vnode.shapeFlag}let ry=!1;function ay(e,t,n){Kp(t)?e.type.__ssrInlineRender?e.ssrRender=t:e.render=t:th(t)&&(e.setupState=vm(t)),oy(e)}function oy(e,t,n){const r=e.type;e.render||(e.render=r.render||Up);{const t=ey(e);of();try{!function(e){const t=Cv(e),n=e.proxy,r=e.ctx;Tv=!1,t.beforeCreate&&Ev(t.beforeCreate,e,"bc");const{data:a,computed:o,methods:i,watch:s,provide:l,inject:c,created:u,beforeMount:d,mounted:p,beforeUpdate:h,updated:f,activated:m,deactivated:g,beforeDestroy:v,beforeUnmount:b,destroyed:y,unmounted:O,render:w,renderTracked:x,renderTriggered:k,errorCaptured:S,serverPrefetch:_,expose:T,inheritAttrs:E,components:A,directives:C,filters:$}=t;if(c&&function(e,t){Wp(e)&&(e=Iv(e));for(const n in e){const r=e[n];let a;a=th(r)?"default"in r?tg(r.from||n,r.default,!0):tg(r.from||n):tg(r),cm(a)?Object.defineProperty(t,n,{enumerable:!0,configurable:!0,get:()=>a.value,set:e=>a.value=e}):t[n]=a}}(c,r),i)for(const e in i){const t=i[e];Kp(t)&&(r[e]=t.bind(n))}if(a){const t=a.call(n,n);th(t)&&(e.data=Yf(t))}if(Tv=!0,o)for(const e in o){const t=o[e],a=Kp(t)?t.bind(n,n):Kp(t.get)?t.get.bind(n,n):Up,i=!Kp(t)&&Kp(t.set)?t.set.bind(n):Up,s=cy({get:a,set:i});Object.defineProperty(r,e,{enumerable:!0,configurable:!0,get:()=>s.value,set:e=>s.value=e})}if(s)for(const e in s)Av(s[e],r,n,e);if(l){const e=Kp(l)?l.call(n):l;Reflect.ownKeys(e).forEach((t=>{eg(t,e[t])}))}function P(e,t){Wp(t)?t.forEach((t=>e(t.bind(n)))):t&&e(t.bind(n))}if(u&&Ev(u,e,"c"),P(Xg,d),P(Gg,p),P(Yg,h),P(Kg,f),P(Zg,m),P(Qg,g),P(av,S),P(rv,x),P(nv,k),P(Jg,b),P(ev,O),P(tv,_),Wp(T))if(T.length){const t=e.exposed||(e.exposed={});T.forEach((e=>{Object.defineProperty(t,e,{get:()=>n[e],set:t=>n[e]=t,enumerable:!0})}))}else e.exposed||(e.exposed={});w&&e.render===Up&&(e.render=w),null!=E&&(e.inheritAttrs=E),A&&(e.components=A),C&&(e.directives=C),_&&Rg(e)}(e)}finally{sf(),t()}}}const iy={get:(e,t)=>(vf(e,0,""),e[t])};function sy(e){return{attrs:new Proxy(e.attrs,iy),slots:e.slots,emit:e.emit,expose:t=>{e.exposed=t||{}}}}function ly(e){return e.exposed?e.exposeProxy||(e.exposeProxy=new Proxy(vm(im(e.exposed)),{get:(t,n)=>n in t?t[n]:n in vv?vv[n](e):void 0,has:(e,t)=>t in e||t in vv})):e.proxy}const cy=(e,t)=>{const n=function(e,t,n=!1){let r,a;return Kp(e)?r=e:(r=e.get,a=e.set),new _m(r,a,n)}(e,0,ry);return n};function uy(e,t,n){try{Ab(-1);const r=arguments.length;return 2===r?th(t)&&!Wp(t)?Db(t)?Lb(e,null,[t]):Lb(e,t):Lb(e,null,t):(r>3?n=Array.prototype.slice.call(arguments,2):3===r&&Db(n)&&(n=[n]),Lb(e,t,n))}finally{Ab(1)}}const dy="3.5.26";let py;const hy="undefined"!=typeof window&&window.trustedTypes;if(hy)try{py=hy.createPolicy("vue",{createHTML:e=>e})}catch(hw){}const fy=py?e=>py.createHTML(e):e=>e,my="undefined"!=typeof document?document:null,gy=my&&my.createElement("template"),vy={insert:(e,t,n)=>{t.insertBefore(e,n||null)},remove:e=>{const t=e.parentNode;t&&t.removeChild(e)},createElement:(e,t,n,r)=>{const a="svg"===t?my.createElementNS("http://www.w3.org/2000/svg",e):"mathml"===t?my.createElementNS("http://www.w3.org/1998/Math/MathML",e):n?my.createElement(e,{is:n}):my.createElement(e);return"select"===e&&r&&null!=r.multiple&&a.setAttribute("multiple",r.multiple),a},createText:e=>my.createTextNode(e),createComment:e=>my.createComment(e),setText:(e,t)=>{e.nodeValue=t},setElementText:(e,t)=>{e.textContent=t},parentNode:e=>e.parentNode,nextSibling:e=>e.nextSibling,querySelector:e=>my.querySelector(e),setScopeId(e,t){e.setAttribute(t,"")},insertStaticContent(e,t,n,r,a,o){const i=n?n.previousSibling:t.lastChild;if(a&&(a===o||a.nextSibling))for(;t.insertBefore(a.cloneNode(!0),n),a!==o&&(a=a.nextSibling););else{gy.innerHTML=fy("svg"===r?`${e}`:"mathml"===r?`${e}`:e);const a=gy.content;if("svg"===r||"mathml"===r){const e=a.firstChild;for(;e.firstChild;)a.appendChild(e.firstChild);a.removeChild(e)}t.insertBefore(a,n)}return[i?i.nextSibling:t.firstChild,n?n.previousSibling:t.lastChild]}},by="transition",yy="animation",Oy=Symbol("_vtc"),wy={name:String,type:String,css:{type:Boolean,default:!0},duration:[String,Number,Object],enterFromClass:String,enterActiveClass:String,enterToClass:String,appearFromClass:String,appearActiveClass:String,appearToClass:String,leaveFromClass:String,leaveActiveClass:String,leaveToClass:String},xy=Qp({},Sg,wy),ky=(e=>(e.displayName="Transition",e.props=xy,e))(((e,{slots:t})=>uy(Eg,function(e){const t={};for(const n in e)n in wy||(t[n]=e[n]);if(!1===e.css)return t;const{name:n="v",type:r,duration:a,enterFromClass:o=`${n}-enter-from`,enterActiveClass:i=`${n}-enter-active`,enterToClass:s=`${n}-enter-to`,appearFromClass:l=o,appearActiveClass:c=i,appearToClass:u=s,leaveFromClass:d=`${n}-leave-from`,leaveActiveClass:p=`${n}-leave-active`,leaveToClass:h=`${n}-leave-to`}=e,f=function(e){if(null==e)return null;if(th(e))return[Ty(e.enter),Ty(e.leave)];{const t=Ty(e);return[t,t]}}(a),m=f&&f[0],g=f&&f[1],{onBeforeEnter:v,onEnter:b,onEnterCancelled:y,onLeave:O,onLeaveCancelled:w,onBeforeAppear:x=v,onAppear:k=b,onAppearCancelled:S=y}=t,_=(e,t,n,r)=>{e._enterCancelled=r,Ay(e,t?u:s),Ay(e,t?c:i),n&&n()},T=(e,t)=>{e._isLeaving=!1,Ay(e,d),Ay(e,h),Ay(e,p),t&&t()},E=e=>(t,n)=>{const a=e?k:b,i=()=>_(t,e,n);Sy(a,[t,i]),Cy((()=>{Ay(t,e?l:o),Ey(t,e?u:s),_y(a)||Py(t,r,m,i)}))};return Qp(t,{onBeforeEnter(e){Sy(v,[e]),Ey(e,o),Ey(e,i)},onBeforeAppear(e){Sy(x,[e]),Ey(e,l),Ey(e,c)},onEnter:E(!1),onAppear:E(!0),onLeave(e,t){e._isLeaving=!0;const n=()=>T(e,t);Ey(e,d),e._enterCancelled?(Ey(e,p),My(e)):(My(e),Ey(e,p)),Cy((()=>{e._isLeaving&&(Ay(e,d),Ey(e,h),_y(O)||Py(e,r,g,n))})),Sy(O,[e,n])},onEnterCancelled(e){_(e,!1,void 0,!0),Sy(y,[e])},onAppearCancelled(e){_(e,!0,void 0,!0),Sy(S,[e])},onLeaveCancelled(e){T(e),Sy(w,[e])}})}(e),t))),Sy=(e,t=[])=>{Wp(e)?e.forEach((e=>e(...t))):e&&e(...t)},_y=e=>!!e&&(Wp(e)?e.some((e=>e.length>1)):e.length>1);function Ty(e){return(e=>{const t=Jp(e)?Number(e):NaN;return isNaN(t)?e:t})(e)}function Ey(e,t){t.split(/\s+/).forEach((t=>t&&e.classList.add(t))),(e[Oy]||(e[Oy]=new Set)).add(t)}function Ay(e,t){t.split(/\s+/).forEach((t=>t&&e.classList.remove(t)));const n=e[Oy];n&&(n.delete(t),n.size||(e[Oy]=void 0))}function Cy(e){requestAnimationFrame((()=>{requestAnimationFrame(e)}))}let $y=0;function Py(e,t,n,r){const a=e._endId=++$y,o=()=>{a===e._endId&&r()};if(null!=n)return setTimeout(o,n);const{type:i,timeout:s,propCount:l}=function(e,t){const n=window.getComputedStyle(e),r=e=>(n[e]||"").split(", "),a=r(`${by}Delay`),o=r(`${by}Duration`),i=Dy(a,o),s=r(`${yy}Delay`),l=r(`${yy}Duration`),c=Dy(s,l);let u=null,d=0,p=0;t===by?i>0&&(u=by,d=i,p=o.length):t===yy?c>0&&(u=yy,d=c,p=l.length):(d=Math.max(i,c),u=d>0?i>c?by:yy:null,p=u?u===by?o.length:l.length:0);return{type:u,timeout:d,propCount:p,hasTransform:u===by&&/\b(?:transform|all)(?:,|$)/.test(r(`${by}Property`).toString())}}(e,t);if(!i)return r();const c=i+"end";let u=0;const d=()=>{e.removeEventListener(c,p),o()},p=t=>{t.target===e&&++u>=l&&d()};setTimeout((()=>{uIy(t)+Iy(e[n]))))}function Iy(e){return"auto"===e?0:1e3*Number(e.slice(0,-1).replace(",","."))}function My(e){return(e?e.ownerDocument:document).body.offsetHeight}const Ny=Symbol("_vod"),Ry=Symbol("_vsh"),Ly={name:"show",beforeMount(e,{value:t},{transition:n}){e[Ny]="none"===e.style.display?"":e.style.display,n&&t?n.beforeEnter(e):By(e,t)},mounted(e,{value:t},{transition:n}){n&&t&&n.enter(e)},updated(e,{value:t,oldValue:n},{transition:r}){!t!=!n&&(r?t?(r.beforeEnter(e),By(e,!0),r.enter(e)):r.leave(e,(()=>{By(e,!1)})):By(e,t))},beforeUnmount(e,{value:t}){By(e,t)}};function By(e,t){e.style.display=t?e[Ny]:"none",e[Ry]=!t}const jy=Symbol("");function Uy(e){const t=Yb();if(!t)return;const n=t.ut=(n=e(t.proxy))=>{Array.from(document.querySelectorAll(`[data-v-owner="${t.uid}"]`)).forEach((e=>Fy(e,n)))},r=()=>{const r=e(t.proxy);t.ce?Fy(t.ce,r):zy(t.subTree,r),n(r)};Yg((()=>{Zm(r)})),Gg((()=>{og(r,Up,{flush:"post"});const e=new MutationObserver(r);e.observe(t.subTree.el.parentNode,{childList:!0}),ev((()=>e.disconnect()))}))}function zy(e,t){if(128&e.shapeFlag){const n=e.suspense;e=n.activeBranch,n.pendingBranch&&!n.isHydrating&&n.effects.push((()=>{zy(n.activeBranch,t)}))}for(;e.component;)e=e.component.subTree;if(1&e.shapeFlag&&e.el)Fy(e.el,t);else if(e.type===Ob)e.children.forEach((e=>zy(e,t)));else if(e.type===kb){let{el:n,anchor:r}=e;for(;n&&(Fy(n,t),n!==r);)n=n.nextSibling}}function Fy(e,t){if(1===e.nodeType){const n=e.style;let r="";for(const e in t){const a=Rh(t[e]);n.setProperty(`--${e}`,a),r+=`--${e}: ${a};`}n[jy]=r}}const Zy=/(?:^|;)\s*display\s*:/,Qy=/\s*!important$/;function Hy(e,t,n){if(Wp(n))n.forEach((n=>Hy(e,t,n)));else if(null==n&&(n=""),t.startsWith("--"))e.setProperty(t,n);else{const r=function(e,t){const n=qy[t];if(n)return n;let r=uh(t);if("filter"!==r&&r in e)return qy[t]=r;r=hh(r);for(let n=0;neO||(tO.then((()=>eO=0)),eO=Date.now()),rO=e=>111===e.charCodeAt(0)&&110===e.charCodeAt(1)&&e.charCodeAt(2)>96&&e.charCodeAt(2)<123,aO=e=>{const t=e.props["onUpdate:modelValue"]||!1;return Wp(t)?e=>gh(t,e):t};function oO(e){e.target.composing=!0}function iO(e){const t=e.target;t.composing&&(t.composing=!1,t.dispatchEvent(new Event("input")))}const sO=Symbol("_assign");function lO(e,t,n){return t&&(e=e.trim()),n&&(e=bh(e)),e}const cO={created(e,{modifiers:{lazy:t,trim:n,number:r}},a){e[sO]=aO(a);const o=r||a.props&&"number"===a.props.type;Yy(e,t?"change":"input",(t=>{t.target.composing||e[sO](lO(e.value,n,o))})),(n||o)&&Yy(e,"change",(()=>{e.value=lO(e.value,n,o)})),t||(Yy(e,"compositionstart",oO),Yy(e,"compositionend",iO),Yy(e,"change",iO))},mounted(e,{value:t}){e.value=null==t?"":t},beforeUpdate(e,{value:t,oldValue:n,modifiers:{lazy:r,trim:a,number:o}},i){if(e[sO]=aO(i),e.composing)return;const s=null==t?"":t;if((!o&&"number"!==e.type||/^0\d/.test(e.value)?e.value:bh(e.value))!==s){if(document.activeElement===e&&"range"!==e.type){if(r&&t===n)return;if(a&&e.value.trim()===s)return}e.value=s}}},uO={deep:!0,created(e,t,n){e[sO]=aO(n),Yy(e,"change",(()=>{const t=e._modelValue,n=mO(e),r=e.checked,a=e[sO];if(Wp(t)){const e=Ph(t,n),o=-1!==e;if(r&&!o)a(t.concat(n));else if(!r&&o){const n=[...t];n.splice(e,1),a(n)}}else if(Gp(t)){const e=new Set(t);r?e.add(n):e.delete(n),a(e)}else a(gO(e,r))}))},mounted:dO,beforeUpdate(e,t,n){e[sO]=aO(n),dO(e,t,n)}};function dO(e,{value:t,oldValue:n},r){let a;if(e._modelValue=t,Wp(t))a=Ph(t,r.props.value)>-1;else if(Gp(t))a=t.has(r.props.value);else{if(t===n)return;a=$h(t,gO(e,!0))}e.checked!==a&&(e.checked=a)}const pO={created(e,{value:t},n){e.checked=$h(t,n.props.value),e[sO]=aO(n),Yy(e,"change",(()=>{e[sO](mO(e))}))},beforeUpdate(e,{value:t,oldValue:n},r){e[sO]=aO(r),t!==n&&(e.checked=$h(t,r.props.value))}},hO={deep:!0,created(e,{value:t,modifiers:{number:n}},r){const a=Gp(t);Yy(e,"change",(()=>{const t=Array.prototype.filter.call(e.options,(e=>e.selected)).map((e=>n?bh(mO(e)):mO(e)));e[sO](e.multiple?a?new Set(t):t:t[0]),e._assigning=!0,Um((()=>{e._assigning=!1}))})),e[sO]=aO(r)},mounted(e,{value:t}){fO(e,t)},beforeUpdate(e,t,n){e[sO]=aO(n)},updated(e,{value:t}){e._assigning||fO(e,t)}};function fO(e,t){const n=e.multiple,r=Wp(t);if(!n||r||Gp(t)){for(let a=0,o=e.options.length;aString(e)===String(i))):Ph(t,i)>-1}else o.selected=t.has(i);else if($h(mO(o),t))return void(e.selectedIndex!==a&&(e.selectedIndex=a))}n||-1===e.selectedIndex||(e.selectedIndex=-1)}}function mO(e){return"_value"in e?e._value:e.value}function gO(e,t){const n=t?"_trueValue":"_falseValue";return n in e?e[n]:t}const vO={created(e,t,n){bO(e,t,n,null,"created")},mounted(e,t,n){bO(e,t,n,null,"mounted")},beforeUpdate(e,t,n,r){bO(e,t,n,r,"beforeUpdate")},updated(e,t,n,r){bO(e,t,n,r,"updated")}};function bO(e,t,n,r,a){const o=function(e,t){switch(e){case"SELECT":return hO;case"TEXTAREA":return cO;default:switch(t){case"checkbox":return uO;case"radio":return pO;default:return cO}}}(e.tagName,n.props&&n.props.type)[a];o&&o(e,t,n,r)}const yO=["ctrl","shift","alt","meta"],OO={stop:e=>e.stopPropagation(),prevent:e=>e.preventDefault(),self:e=>e.target!==e.currentTarget,ctrl:e=>!e.ctrlKey,shift:e=>!e.shiftKey,alt:e=>!e.altKey,meta:e=>!e.metaKey,left:e=>"button"in e&&0!==e.button,middle:e=>"button"in e&&1!==e.button,right:e=>"button"in e&&2!==e.button,exact:(e,t)=>yO.some((n=>e[`${n}Key`]&&!t.includes(n)))},wO=(e,t)=>{const n=e._withMods||(e._withMods={}),r=t.join(".");return n[r]||(n[r]=(n,...r)=>{for(let e=0;e{const n=e._withKeys||(e._withKeys={}),r=t.join(".");return n[r]||(n[r]=n=>{if(!("key"in n))return;const r=ph(n.key);return t.some((e=>e===r||xO[e]===r))?e(n):void 0})},SO=Qp({patchProp:(e,t,n,r,a,o)=>{const i="svg"===a;"class"===t?function(e,t,n){const r=e[Oy];r&&(t=(t?[t,...r]:[...r]).join(" ")),null==t?e.removeAttribute("class"):n?e.setAttribute("class",t):e.className=t}(e,r,i):"style"===t?function(e,t,n){const r=e.style,a=Jp(n);let o=!1;if(n&&!a){if(t)if(Jp(t))for(const e of t.split(";")){const t=e.slice(0,e.indexOf(":")).trim();null==n[t]&&Hy(r,t,"")}else for(const e in t)null==n[e]&&Hy(r,e,"");for(const e in n)"display"===e&&(o=!0),Hy(r,e,n[e])}else if(a){if(t!==n){const e=r[jy];e&&(n+=";"+e),r.cssText=n,o=Zy.test(n)}}else t&&e.removeAttribute("style");Ny in e&&(e[Ny]=o?r.display:"",e[Ry]&&(r.display="none"))}(e,n,r):Fp(t)?Zp(t)||function(e,t,n,r,a=null){const o=e[Ky]||(e[Ky]={}),i=o[t];if(r&&i)i.value=r;else{const[n,s]=function(e){let t;if(Jy.test(e)){let n;for(t={};n=e.match(Jy);)e=e.slice(0,e.length-n[0].length),t[n[0].toLowerCase()]=!0}return[":"===e[2]?e.slice(3):ph(e.slice(2)),t]}(t);if(r){const i=o[t]=function(e,t){const n=e=>{if(e._vts){if(e._vts<=n.attached)return}else e._vts=Date.now();Pm(function(e,t){if(Wp(t)){const n=e.stopImmediatePropagation;return e.stopImmediatePropagation=()=>{n.call(e),e._stopped=!0},t.map((e=>t=>!t._stopped&&e&&e(t)))}return t}(e,n.value),t,5,[e])};return n.value=e,n.attached=nO(),n}(r,a);Yy(e,n,i,s)}else i&&(function(e,t,n,r){e.removeEventListener(t,n,r)}(e,n,i,s),o[t]=void 0)}}(e,t,0,r,o):("."===t[0]?(t=t.slice(1),1):"^"===t[0]?(t=t.slice(1),0):function(e,t,n,r){if(r)return"innerHTML"===t||"textContent"===t||!!(t in e&&rO(t)&&Kp(n));if("spellcheck"===t||"draggable"===t||"translate"===t||"autocorrect"===t)return!1;if("sandbox"===t&&"IFRAME"===e.tagName)return!1;if("form"===t)return!1;if("list"===t&&"INPUT"===e.tagName)return!1;if("type"===t&&"TEXTAREA"===e.tagName)return!1;if("width"===t||"height"===t){const t=e.tagName;if("IMG"===t||"VIDEO"===t||"CANVAS"===t||"SOURCE"===t)return!1}return(!rO(t)||!Jp(n))&&t in e}(e,t,r,i))?(Gy(e,t,r),e.tagName.includes("-")||"value"!==t&&"checked"!==t&&"selected"!==t||Xy(e,t,r,i,0,"value"!==t)):!e._isVueCE||!/[A-Z]/.test(t)&&Jp(r)?("true-value"===t?e._trueValue=r:"false-value"===t&&(e._falseValue=r),Xy(e,t,r,i)):Gy(e,uh(t),r,0,t)}},vy);let _O;function TO(){return _O||(_O=pb(SO))}const EO=(...e)=>{TO().render(...e)},AO=(...e)=>{const t=TO().createApp(...e),{mount:n}=t;return t.mount=e=>{const r=function(e){return Jp(e)?document.querySelector(e):e}(e);if(!r)return;const a=t._component;Kp(a)||a.render||a.template||(a.template=r.innerHTML),1===r.nodeType&&(r.textContent="");const o=n(r,!1,function(e){return e instanceof SVGElement?"svg":"function"==typeof MathMLElement&&e instanceof MathMLElement?"mathml":void 0}(r));return r instanceof Element&&(r.removeAttribute("v-cloak"),r.setAttribute("data-v-app","")),o},t},CO="3"===dy[0];function $O(e){if(e instanceof Promise||e instanceof Date||e instanceof RegExp)return e;const t="function"==typeof(n=e)?n():fm(n);var n;if(!e||!t)return t;if(Array.isArray(t))return t.map((e=>$O(e)));if("object"==typeof t){const e={};for(const n in t)Object.prototype.hasOwnProperty.call(t,n)&&("titleTemplate"===n||"o"===n[0]&&"n"===n[1]?e[n]=fm(t[n]):e[n]=$O(t[n]));return e}return t}const PO={hooks:{"entries:resolve":e=>{for(const t of e.entries)t.resolvedInput=$O(t.input)}}},DO="usehead";function IO(e={}){e.domDelayFn=e.domDelayFn||(e=>Um((()=>setTimeout((()=>e()),0))));const t=function(e={}){const t=function(e={}){const t=new wp;t.addHooks(e.hooks||{}),e.document=e.document||(Rd?document:void 0);const n=!e.document,r=()=>{s.dirty=!0,t.callHook("entries:updated",s)};let a=0,o=[];const i=[],s={plugins:i,dirty:!1,resolvedOptions:e,hooks:t,headEntries:()=>o,use(e){const r="function"==typeof e?e(s):e;r.key&&i.some((e=>e.key===r.key))||(i.push(r),Rp(r.mode,n)&&t.addHooks(r.hooks||{}))},push(e,t){delete t?.head;const i={_i:a++,input:e,...t};return Rp(i.mode,n)&&(o.push(i),r()),{dispose(){o=o.filter((e=>e._i!==i._i)),r()},patch(e){for(const t of o)t._i===i._i&&(t.input=i.input=e);r()}}},async resolveTags(){const e={tags:[],entries:[...o]};await t.callHook("entries:resolve",e);for(const n of e.entries){const r=n.resolvedInput||n.input;if(n.resolvedInput=await(n.transform?n.transform(r):r),n.resolvedInput)for(const r of await np(n)){const a={tag:r,entry:n,resolvedOptions:s.resolvedOptions};await t.callHook("tag:normalise",a),e.tags.push(a.tag)}}return await t.callHook("tags:beforeResolve",e),await t.callHook("tags:resolve",e),await t.callHook("tags:afterResolve",e),e.tags},ssr:n};return[kp,Ap,_p,Ep,Cp,Dp,Ip,Mp,...e?.plugins||[]].forEach((e=>s.use(e))),s.hooks.callHook("init",s),s}(e);return t.use(fp()),Np=t}(e);return t.use(PO),t.install=function(e){return{install(t){CO&&(t.config.globalProperties.$unhead=e,t.config.globalProperties.$head=e,t.provide(DO,e))}}.install}(t),t}const MO="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:{},NO="__unhead_injection_handler__";function RO(e,t={}){const n=t.head||(NO in MO?MO[NO]():tg(DO)||Np);if(n)return n.ssr?n.push(e,t):function(e,t,n={}){const r=um(!1),a=um({});ag((()=>{a.value=r.value?{}:$O(t)}));const o=e.push(a.value,n);return og(a,(e=>{o.patch(e)})),Yb()&&(Jg((()=>{o.dispose()})),Qg((()=>{r.value=!0})),Zg((()=>{r.value=!1}))),o}(n,e,t)}var LO;let BO=Symbol("headlessui.useid"),jO=0;const UO=null!=(LO=Ng)?LO:function(){return tg(BO,(()=>""+ ++jO))()};function zO(e){var t;if(null==e||null==e.value)return null;let n=null!=(t=e.value.$el)?t:e.value;return n instanceof Node?n:null}function FO(e,t,...n){if(e in t){let r=t[e];return"function"==typeof r?r(...n):r}let r=new Error(`Tried to handle "${e}" but there is no handler defined. Only defined handlers are: ${Object.keys(t).map((e=>`"${e}"`)).join(", ")}.`);throw Error.captureStackTrace&&Error.captureStackTrace(r,FO),r}var ZO=Object.defineProperty,QO=(e,t,n)=>(((e,t,n)=>{t in e?ZO(e,t,{enumerable:!0,configurable:!0,writable:!0,value:n}):e[t]=n})(e,"symbol"!=typeof t?t+"":t,n),n);let HO=new class{constructor(){QO(this,"current",this.detect()),QO(this,"currentId",0)}set(e){this.current!==e&&(this.currentId=0,this.current=e)}reset(){this.set(this.detect())}nextId(){return++this.currentId}get isServer(){return"server"===this.current}get isClient(){return"client"===this.current}detect(){return"undefined"==typeof window||"undefined"==typeof document?"server":"client"}};function VO(e){if(HO.isServer)return null;if(e instanceof Node)return e.ownerDocument;if(null!=e&&e.hasOwnProperty("value")){let t=zO(e);if(t)return t.ownerDocument}return document}let qO=["[contentEditable=true]","[tabindex]","a[href]","area[href]","button:not([disabled])","iframe","input:not([disabled])","select:not([disabled])","textarea:not([disabled])"].map((e=>`${e}:not([tabindex='-1'])`)).join(",");var WO,XO,GO,YO=((GO=YO||{})[GO.First=1]="First",GO[GO.Previous=2]="Previous",GO[GO.Next=4]="Next",GO[GO.Last=8]="Last",GO[GO.WrapAround=16]="WrapAround",GO[GO.NoScroll=32]="NoScroll",GO),KO=((XO=KO||{})[XO.Error=0]="Error",XO[XO.Overflow=1]="Overflow",XO[XO.Success=2]="Success",XO[XO.Underflow=3]="Underflow",XO),JO=((WO=JO||{})[WO.Previous=-1]="Previous",WO[WO.Next=1]="Next",WO);function ew(e=document.body){return null==e?[]:Array.from(e.querySelectorAll(qO)).sort(((e,t)=>Math.sign((e.tabIndex||Number.MAX_SAFE_INTEGER)-(t.tabIndex||Number.MAX_SAFE_INTEGER))))}var tw=(e=>(e[e.Strict=0]="Strict",e[e.Loose=1]="Loose",e))(tw||{});function nw(e,t=0){var n;return e!==(null==(n=VO(e))?void 0:n.body)&&FO(t,{0:()=>e.matches(qO),1(){let t=e;for(;null!==t;){if(t.matches(qO))return!0;t=t.parentElement}return!1}})}function rw(e){let t=VO(e);Um((()=>{t&&!nw(t.activeElement,0)&&function(e){null==e||e.focus({preventScroll:!0})}(e)}))}var aw=(e=>(e[e.Keyboard=0]="Keyboard",e[e.Mouse=1]="Mouse",e))(aw||{});"undefined"!=typeof window&&"undefined"!=typeof document&&(document.addEventListener("keydown",(e=>{e.metaKey||e.altKey||e.ctrlKey||(document.documentElement.dataset.headlessuiFocusVisible="")}),!0),document.addEventListener("click",(e=>{1===e.detail?delete document.documentElement.dataset.headlessuiFocusVisible:0===e.detail&&(document.documentElement.dataset.headlessuiFocusVisible="")}),!0));let ow=["textarea","input"].join(",");function iw(e,t=e=>e){return e.slice().sort(((e,n)=>{let r=t(e),a=t(n);if(null===r||null===a)return 0;let o=r.compareDocumentPosition(a);return o&Node.DOCUMENT_POSITION_FOLLOWING?-1:o&Node.DOCUMENT_POSITION_PRECEDING?1:0}))}function sw(e,t,{sorted:n=!0,relativeTo:r=null,skipElements:a=[]}={}){var o;let i=null!=(o=Array.isArray(e)?e.length>0?e[0].ownerDocument:document:null==e?void 0:e.ownerDocument)?o:document,s=Array.isArray(e)?n?iw(e):e:ew(e);a.length>0&&s.length>1&&(s=s.filter((e=>!a.includes(e)))),r=null!=r?r:i.activeElement;let l,c=(()=>{if(5&t)return 1;if(10&t)return-1;throw new Error("Missing Focus.First, Focus.Previous, Focus.Next or Focus.Last")})(),u=(()=>{if(1&t)return 0;if(2&t)return Math.max(0,s.indexOf(r))-1;if(4&t)return Math.max(0,s.indexOf(r))+1;if(8&t)return s.length-1;throw new Error("Missing Focus.First, Focus.Previous, Focus.Next or Focus.Last")})(),d=32&t?{preventScroll:!0}:{},p=0,h=s.length;do{if(p>=h||p+h<=0)return 0;let e=u+p;if(16&t)e=(e+h)%h;else{if(e<0)return 3;if(e>=h)return 1}l=s[e],null==l||l.focus(d),p+=c}while(l!==i.activeElement);return 6&t&&function(e){var t,n;return null!=(n=null==(t=null==e?void 0:e.matches)?void 0:t.call(e,ow))&&n}(l)&&l.select(),2}function lw(e,t,n){HO.isServer||ag((r=>{document.addEventListener(e,t,n),r((()=>document.removeEventListener(e,t,n)))}))}function cw(e,t,n=cy((()=>!0))){function r(r,a){if(!n.value||r.defaultPrevented)return;let o=a(r);if(null===o||!o.getRootNode().contains(o))return;let i=function e(t){return"function"==typeof t?e(t()):Array.isArray(t)||t instanceof Set?t:[t]}(e);for(let e of i){if(null===e)continue;let t=e instanceof HTMLElement?e:zO(e);if(null!=t&&t.contains(o)||r.composed&&r.composedPath().includes(t))return}return!nw(o,tw.Loose)&&-1!==o.tabIndex&&r.preventDefault(),t(r,o)}let a=um(null);lw("pointerdown",(e=>{var t,r;n.value&&(a.value=(null==(r=null==(t=e.composedPath)?void 0:t.call(e))?void 0:r[0])||e.target)}),!0),lw("mousedown",(e=>{var t,r;n.value&&(a.value=(null==(r=null==(t=e.composedPath)?void 0:t.call(e))?void 0:r[0])||e.target)}),!0),lw("click",(e=>{/iPhone/gi.test(window.navigator.platform)||/Mac/gi.test(window.navigator.platform)&&window.navigator.maxTouchPoints>0||/Android/gi.test(window.navigator.userAgent)||a.value&&(r(e,(()=>a.value)),a.value=null)}),!0),lw("touchend",(e=>r(e,(()=>e.target instanceof HTMLElement?e.target:null))),!0),function(e,t,n){HO.isServer||ag((r=>{window.addEventListener(e,t,n),r((()=>window.removeEventListener(e,t,n)))}))}("blur",(e=>r(e,(()=>window.document.activeElement instanceof HTMLIFrameElement?window.document.activeElement:null))),!0)}function uw(e,t){if(e)return e;let n=null!=t?t:"button";return"string"==typeof n&&"button"===n.toLowerCase()?"button":void 0}function dw(e,t){let n=um(uw(e.value.type,e.value.as));return Gg((()=>{n.value=uw(e.value.type,e.value.as)})),ag((()=>{var e;n.value||zO(t)&&zO(t)instanceof HTMLButtonElement&&(null==(e=zO(t))||!e.hasAttribute("type"))&&(n.value="button")})),n}function pw(e){return[e.screenX,e.screenY]}var hw,fw=(e=>(e[e.None=0]="None",e[e.RenderStrategy=1]="RenderStrategy",e[e.Static=2]="Static",e))(fw||{}),mw=((hw=mw||{})[hw.Unmount=0]="Unmount",hw[hw.Hidden=1]="Hidden",hw);function gw({visible:e=!0,features:t=0,ourProps:n,theirProps:r,...a}){var o;let i=yw(r,n),s=Object.assign(a,{props:i});return e||2&t&&i.static?vw(s):1&t?FO(null==(o=i.unmount)||o?0:1,{0:()=>null,1:()=>vw({...a,props:{...i,hidden:!0,style:{display:"none"}}})}):vw(s)}function vw({props:e,attrs:t,slots:n,slot:r,name:a}){var o,i;let{as:s,...l}=Ow(e,["unmount","static"]),c=null==(o=n.default)?void 0:o.call(n,r),u={};if(r){let e=!1,t=[];for(let[n,a]of Object.entries(r))"boolean"==typeof a&&(e=!0),!0===a&&t.push(n);e&&(u["data-headlessui-state"]=t.join(" "))}if("template"===s){if(c=bw(null!=c?c:[]),Object.keys(l).length>0||Object.keys(t).length>0){let[e,...n]=null!=c?c:[];if(!function(e){return null!=e&&("string"==typeof e.type||"object"==typeof e.type||"function"==typeof e.type)}(e)||n.length>0)throw new Error(['Passing props on "template"!',"",`The current component <${a} /> is rendering a "template".`,"However we need to passthrough the following props:",Object.keys(l).concat(Object.keys(t)).map((e=>e.trim())).filter(((e,t,n)=>n.indexOf(e)===t)).sort(((e,t)=>e.localeCompare(t))).map((e=>` - ${e}`)).join("\n"),"","You can apply a few solutions:",['Add an `as="..."` prop, to ensure that we render an actual element instead of a "template".',"Render a single element as the child so that we can forward the props onto that element."].map((e=>` - ${e}`)).join("\n")].join("\n"));let r=yw(null!=(i=e.props)?i:{},l,u),o=jb(e,r,!0);for(let e in r)e.startsWith("on")&&(o.props||(o.props={}),o.props[e]=r[e]);return o}return Array.isArray(c)&&1===c.length?c[0]:c}return uy(s,Object.assign({},l,u),{default:()=>c})}function bw(e){return e.flatMap((e=>e.type===Ob?bw(e.children):[e]))}function yw(...e){if(0===e.length)return{};if(1===e.length)return e[0];let t={},n={};for(let r of e)for(let e in r)e.startsWith("on")&&"function"==typeof r[e]?(null!=n[e]||(n[e]=[]),n[e].push(r[e])):t[e]=r[e];if(t.disabled||t["aria-disabled"])return Object.assign(t,Object.fromEntries(Object.keys(n).map((e=>[e,void 0]))));for(let e in n)Object.assign(t,{[e](t,...r){let a=n[e];for(let e of a){if(t instanceof Event&&t.defaultPrevented)return;e(t,...r)}}});return t}function Ow(e,t=[]){let n=Object.assign({},e);for(let e of t)e in n&&delete n[e];return n}var ww=(e=>(e[e.None=1]="None",e[e.Focusable=2]="Focusable",e[e.Hidden=4]="Hidden",e))(ww||{});let xw=Mg({name:"Hidden",props:{as:{type:[Object,String],default:"div"},features:{type:Number,default:1}},setup:(e,{slots:t,attrs:n})=>()=>{var r;let{features:a,...o}=e;return gw({ourProps:{"aria-hidden":!(2&~a)||(null!=(r=o["aria-hidden"])?r:void 0),hidden:!(4&~a)||void 0,style:{position:"fixed",top:1,left:1,width:1,height:0,padding:0,margin:-1,overflow:"hidden",clip:"rect(0, 0, 0, 0)",whiteSpace:"nowrap",borderWidth:"0",...!(4&~a)&&!!(2&~a)&&{display:"none"}}},theirProps:o,slot:{},attrs:n,slots:t,name:"Hidden"})}}),kw=Symbol("Context");var Sw=(e=>(e[e.Open=1]="Open",e[e.Closed=2]="Closed",e[e.Closing=4]="Closing",e[e.Opening=8]="Opening",e))(Sw||{});function _w(){return tg(kw,null)}function Tw(e){eg(kw,e)}var Ew,Aw,Cw=((Ew=Cw||{}).Space=" ",Ew.Enter="Enter",Ew.Escape="Escape",Ew.Backspace="Backspace",Ew.Delete="Delete",Ew.ArrowLeft="ArrowLeft",Ew.ArrowUp="ArrowUp",Ew.ArrowRight="ArrowRight",Ew.ArrowDown="ArrowDown",Ew.Home="Home",Ew.End="End",Ew.PageUp="PageUp",Ew.PageDown="PageDown",Ew.Tab="Tab",Ew),$w=((Aw=$w||{})[Aw.First=0]="First",Aw[Aw.Previous=1]="Previous",Aw[Aw.Next=2]="Next",Aw[Aw.Last=3]="Last",Aw[Aw.Specific=4]="Specific",Aw[Aw.Nothing=5]="Nothing",Aw);var Pw=(e=>(e[e.Open=0]="Open",e[e.Closed=1]="Closed",e))(Pw||{});let Dw=Symbol("DisclosureContext");function Iw(e){let t=tg(Dw,null);if(null===t){let t=new Error(`<${e} /> is missing a parent component.`);throw Error.captureStackTrace&&Error.captureStackTrace(t,Iw),t}return t}let Mw=Symbol("DisclosurePanelContext"),Nw=Mg({name:"Disclosure",props:{as:{type:[Object,String],default:"template"},defaultOpen:{type:[Boolean],default:!1}},setup(e,{slots:t,attrs:n}){let r=um(e.defaultOpen?0:1),a=um(null),o=um(null),i={buttonId:um(`headlessui-disclosure-button-${UO()}`),panelId:um(`headlessui-disclosure-panel-${UO()}`),disclosureState:r,panel:a,button:o,toggleDisclosure(){r.value=FO(r.value,{0:1,1:0})},closeDisclosure(){1!==r.value&&(r.value=1)},close(e){i.closeDisclosure();let t=e?e instanceof HTMLElement?e:e.value instanceof HTMLElement?zO(e):zO(i.button):zO(i.button);null==t||t.focus()}};return eg(Dw,i),Tw(cy((()=>FO(r.value,{0:Sw.Open,1:Sw.Closed})))),()=>{let{defaultOpen:a,...o}=e;return gw({theirProps:o,ourProps:{},slot:{open:0===r.value,close:i.close},slots:t,attrs:n,name:"Disclosure"})}}}),Rw=Mg({name:"DisclosureButton",props:{as:{type:[Object,String],default:"button"},disabled:{type:[Boolean],default:!1},id:{type:String,default:null}},setup(e,{attrs:t,slots:n,expose:r}){let a=Iw("DisclosureButton"),o=tg(Mw,null),i=cy((()=>null!==o&&o.value===a.panelId.value));Gg((()=>{i.value||null!==e.id&&(a.buttonId.value=e.id)})),ev((()=>{i.value||(a.buttonId.value=null)}));let s=um(null);r({el:s,$el:s}),i.value||ag((()=>{a.button.value=s.value}));let l=dw(cy((()=>({as:e.as,type:t.type}))),s);function c(){var t;e.disabled||(i.value?(a.toggleDisclosure(),null==(t=zO(a.button))||t.focus()):a.toggleDisclosure())}function u(t){var n;if(!e.disabled)if(i.value)switch(t.key){case Cw.Space:case Cw.Enter:t.preventDefault(),t.stopPropagation(),a.toggleDisclosure(),null==(n=zO(a.button))||n.focus()}else switch(t.key){case Cw.Space:case Cw.Enter:t.preventDefault(),t.stopPropagation(),a.toggleDisclosure()}}function d(e){e.key===Cw.Space&&e.preventDefault()}return()=>{var r;let o={open:0===a.disclosureState.value},{id:p,...h}=e;return gw({ourProps:i.value?{ref:s,type:l.value,onClick:c,onKeydown:u}:{id:null!=(r=a.buttonId.value)?r:p,ref:s,type:l.value,"aria-expanded":0===a.disclosureState.value,"aria-controls":0===a.disclosureState.value||zO(a.panel)?a.panelId.value:void 0,disabled:!!e.disabled||void 0,onClick:c,onKeydown:u,onKeyup:d},theirProps:h,slot:o,attrs:t,slots:n,name:"DisclosureButton"})}}}),Lw=Mg({name:"DisclosurePanel",props:{as:{type:[Object,String],default:"div"},static:{type:Boolean,default:!1},unmount:{type:Boolean,default:!0},id:{type:String,default:null}},setup(e,{attrs:t,slots:n,expose:r}){let a=Iw("DisclosurePanel");Gg((()=>{null!==e.id&&(a.panelId.value=e.id)})),ev((()=>{a.panelId.value=null})),r({el:a.panel,$el:a.panel}),eg(Mw,a.panelId);let o=_w(),i=cy((()=>null!==o?(o.value&Sw.Open)===Sw.Open:0===a.disclosureState.value));return()=>{var r;let o={open:0===a.disclosureState.value,close:a.close},{id:s,...l}=e;return gw({ourProps:{id:null!=(r=a.panelId.value)?r:s,ref:a.panel},theirProps:l,slot:o,attrs:t,slots:n,features:fw.RenderStrategy|fw.Static,visible:i.value,name:"DisclosurePanel"})}}}),Bw=/([\u2700-\u27BF]|[\uE000-\uF8FF]|\uD83C[\uDC00-\uDFFF]|\uD83D[\uDC00-\uDFFF]|[\u2011-\u26FF]|\uD83E[\uDD10-\uDDFF])/g;function jw(e){var t,n;let r=null!=(t=e.innerText)?t:"",a=e.cloneNode(!0);if(!(a instanceof HTMLElement))return r;let o=!1;for(let e of a.querySelectorAll('[hidden],[aria-hidden],[role="img"]'))e.remove(),o=!0;let i=o?null!=(n=a.innerText)?n:"":r;return Bw.test(i)&&(i=i.replace(Bw,"")),i}var Uw=(e=>(e[e.Open=0]="Open",e[e.Closed=1]="Closed",e))(Uw||{}),zw=(e=>(e[e.Pointer=0]="Pointer",e[e.Other=1]="Other",e))(zw||{});let Fw=Symbol("MenuContext");function Zw(e){let t=tg(Fw,null);if(null===t){let t=new Error(`<${e} /> is missing a parent component.`);throw Error.captureStackTrace&&Error.captureStackTrace(t,Zw),t}return t}let Qw=Mg({name:"Menu",props:{as:{type:[Object,String],default:"template"}},setup(e,{slots:t,attrs:n}){let r=um(1),a=um(null),o=um(null),i=um([]),s=um(""),l=um(null),c=um(1);function u(e=e=>e){let t=null!==l.value?i.value[l.value]:null,n=iw(e(i.value.slice()),(e=>zO(e.dataRef.domRef))),r=t?n.indexOf(t):null;return-1===r&&(r=null),{items:n,activeItemIndex:r}}let d={menuState:r,buttonRef:a,itemsRef:o,items:i,searchQuery:s,activeItemIndex:l,activationTrigger:c,closeMenu:()=>{r.value=1,l.value=null},openMenu:()=>r.value=0,goToItem(e,t,n){let r=u(),a=function(e,t){let n=t.resolveItems();if(n.length<=0)return null;let r=t.resolveActiveIndex(),a=null!=r?r:-1;switch(e.focus){case 0:for(let e=0;e=0;--e)if(!t.resolveDisabled(n[e],e,n))return e;return r;case 2:for(let e=a+1;e=0;--e)if(!t.resolveDisabled(n[e],e,n))return e;return r;case 4:for(let r=0;rr.items,resolveActiveIndex:()=>r.activeItemIndex,resolveId:e=>e.id,resolveDisabled:e=>e.dataRef.disabled});s.value="",l.value=a,c.value=null!=n?n:1,i.value=r.items},search(e){let t=""!==s.value?0:1;s.value+=e.toLowerCase();let n=(null!==l.value?i.value.slice(l.value+t).concat(i.value.slice(0,l.value+t)):i.value).find((e=>e.dataRef.textValue.startsWith(s.value)&&!e.dataRef.disabled)),r=n?i.value.indexOf(n):-1;-1===r||r===l.value||(l.value=r,c.value=1)},clearSearch(){s.value=""},registerItem(e,t){let n=u((n=>[...n,{id:e,dataRef:t}]));i.value=n.items,l.value=n.activeItemIndex,c.value=1},unregisterItem(e){let t=u((t=>{let n=t.findIndex((t=>t.id===e));return-1!==n&&t.splice(n,1),t}));i.value=t.items,l.value=t.activeItemIndex,c.value=1}};return cw([a,o],((e,t)=>{var n;d.closeMenu(),nw(t,tw.Loose)||(e.preventDefault(),null==(n=zO(a))||n.focus())}),cy((()=>0===r.value))),eg(Fw,d),Tw(cy((()=>FO(r.value,{0:Sw.Open,1:Sw.Closed})))),()=>{let a={open:0===r.value,close:d.closeMenu};return gw({ourProps:{},theirProps:e,slot:a,slots:t,attrs:n,name:"Menu"})}}}),Hw=Mg({name:"MenuButton",props:{disabled:{type:Boolean,default:!1},as:{type:[Object,String],default:"button"},id:{type:String,default:null}},setup(e,{attrs:t,slots:n,expose:r}){var a;let o=null!=(a=e.id)?a:`headlessui-menu-button-${UO()}`,i=Zw("MenuButton");function s(e){switch(e.key){case Cw.Space:case Cw.Enter:case Cw.ArrowDown:e.preventDefault(),e.stopPropagation(),i.openMenu(),Um((()=>{var e;null==(e=zO(i.itemsRef))||e.focus({preventScroll:!0}),i.goToItem($w.First)}));break;case Cw.ArrowUp:e.preventDefault(),e.stopPropagation(),i.openMenu(),Um((()=>{var e;null==(e=zO(i.itemsRef))||e.focus({preventScroll:!0}),i.goToItem($w.Last)}))}}function l(e){e.key===Cw.Space&&e.preventDefault()}function c(t){e.disabled||(0===i.menuState.value?(i.closeMenu(),Um((()=>{var e;return null==(e=zO(i.buttonRef))?void 0:e.focus({preventScroll:!0})}))):(t.preventDefault(),i.openMenu(),function(e){requestAnimationFrame((()=>requestAnimationFrame(e)))}((()=>{var e;return null==(e=zO(i.itemsRef))?void 0:e.focus({preventScroll:!0})}))))}r({el:i.buttonRef,$el:i.buttonRef});let u=dw(cy((()=>({as:e.as,type:t.type}))),i.buttonRef);return()=>{var r;let a={open:0===i.menuState.value},{...d}=e;return gw({ourProps:{ref:i.buttonRef,id:o,type:u.value,"aria-haspopup":"menu","aria-controls":null==(r=zO(i.itemsRef))?void 0:r.id,"aria-expanded":0===i.menuState.value,onKeydown:s,onKeyup:l,onClick:c},theirProps:d,slot:a,attrs:t,slots:n,name:"MenuButton"})}}}),Vw=Mg({name:"MenuItems",props:{as:{type:[Object,String],default:"div"},static:{type:Boolean,default:!1},unmount:{type:Boolean,default:!0},id:{type:String,default:null}},setup(e,{attrs:t,slots:n,expose:r}){var a;let o=null!=(a=e.id)?a:`headlessui-menu-items-${UO()}`,i=Zw("MenuItems"),s=um(null);function l(e){var t;switch(s.value&&clearTimeout(s.value),e.key){case Cw.Space:if(""!==i.searchQuery.value)return e.preventDefault(),e.stopPropagation(),i.search(e.key);case Cw.Enter:e.preventDefault(),e.stopPropagation(),null!==i.activeItemIndex.value&&(null==(t=zO(i.items.value[i.activeItemIndex.value].dataRef.domRef))||t.click()),i.closeMenu(),rw(zO(i.buttonRef));break;case Cw.ArrowDown:return e.preventDefault(),e.stopPropagation(),i.goToItem($w.Next);case Cw.ArrowUp:return e.preventDefault(),e.stopPropagation(),i.goToItem($w.Previous);case Cw.Home:case Cw.PageUp:return e.preventDefault(),e.stopPropagation(),i.goToItem($w.First);case Cw.End:case Cw.PageDown:return e.preventDefault(),e.stopPropagation(),i.goToItem($w.Last);case Cw.Escape:e.preventDefault(),e.stopPropagation(),i.closeMenu(),Um((()=>{var e;return null==(e=zO(i.buttonRef))?void 0:e.focus({preventScroll:!0})}));break;case Cw.Tab:e.preventDefault(),e.stopPropagation(),i.closeMenu(),Um((()=>function(e,t){return sw(ew(),t,{relativeTo:e})}(zO(i.buttonRef),e.shiftKey?YO.Previous:YO.Next)));break;default:1===e.key.length&&(i.search(e.key),s.value=setTimeout((()=>i.clearSearch()),350))}}function c(e){e.key===Cw.Space&&e.preventDefault()}r({el:i.itemsRef,$el:i.itemsRef}),function({container:e,accept:t,walk:n,enabled:r}){ag((()=>{let a=e.value;if(!a||void 0!==r&&!r.value)return;let o=VO(e);if(!o)return;let i=Object.assign((e=>t(e)),{acceptNode:t}),s=o.createTreeWalker(a,NodeFilter.SHOW_ELEMENT,i,!1);for(;s.nextNode();)n(s.currentNode)}))}({container:cy((()=>zO(i.itemsRef))),enabled:cy((()=>0===i.menuState.value)),accept:e=>"menuitem"===e.getAttribute("role")?NodeFilter.FILTER_REJECT:e.hasAttribute("role")?NodeFilter.FILTER_SKIP:NodeFilter.FILTER_ACCEPT,walk(e){e.setAttribute("role","none")}});let u=_w(),d=cy((()=>null!==u?(u.value&Sw.Open)===Sw.Open:0===i.menuState.value));return()=>{var r,a;let s={open:0===i.menuState.value},{...u}=e;return gw({ourProps:{"aria-activedescendant":null===i.activeItemIndex.value||null==(r=i.items.value[i.activeItemIndex.value])?void 0:r.id,"aria-labelledby":null==(a=zO(i.buttonRef))?void 0:a.id,id:o,onKeydown:l,onKeyup:c,role:"menu",tabIndex:0,ref:i.itemsRef},theirProps:u,slot:s,attrs:t,slots:n,features:fw.RenderStrategy|fw.Static,visible:d.value,name:"MenuItems"})}}}),qw=Mg({name:"MenuItem",inheritAttrs:!1,props:{as:{type:[Object,String],default:"template"},disabled:{type:Boolean,default:!1},id:{type:String,default:null}},setup(e,{slots:t,attrs:n,expose:r}){var a;let o=null!=(a=e.id)?a:`headlessui-menu-item-${UO()}`,i=Zw("MenuItem"),s=um(null);r({el:s,$el:s});let l=cy((()=>null!==i.activeItemIndex.value&&i.items.value[i.activeItemIndex.value].id===o)),c=function(e){let t=um(""),n=um("");return()=>{let r=zO(e);if(!r)return"";let a=r.innerText;if(t.value===a)return n.value;let o=function(e){let t=e.getAttribute("aria-label");if("string"==typeof t)return t.trim();let n=e.getAttribute("aria-labelledby");if(n){let e=n.split(" ").map((e=>{let t=document.getElementById(e);if(t){let e=t.getAttribute("aria-label");return"string"==typeof e?e.trim():jw(t).trim()}return null})).filter(Boolean);if(e.length>0)return e.join(", ")}return jw(e).trim()}(r).trim().toLowerCase();return t.value=a,n.value=o,o}}(s),u=cy((()=>({disabled:e.disabled,get textValue(){return c()},domRef:s})));function d(t){if(e.disabled)return t.preventDefault();i.closeMenu(),rw(zO(i.buttonRef))}function p(){if(e.disabled)return i.goToItem($w.Nothing);i.goToItem($w.Specific,o)}Gg((()=>i.registerItem(o,u))),ev((()=>i.unregisterItem(o))),ag((()=>{0===i.menuState.value&&l.value&&0!==i.activationTrigger.value&&Um((()=>{var e,t;return null==(t=null==(e=zO(s))?void 0:e.scrollIntoView)?void 0:t.call(e,{block:"nearest"})}))}));let h=function(){let e=um([-1,-1]);return{wasMoved(t){let n=pw(t);return(e.value[0]!==n[0]||e.value[1]!==n[1])&&(e.value=n,!0)},update(t){e.value=pw(t)}}}();function f(e){h.update(e)}function m(t){h.wasMoved(t)&&(e.disabled||l.value||i.goToItem($w.Specific,o,0))}function g(t){h.wasMoved(t)&&(e.disabled||l.value&&i.goToItem($w.Nothing))}return()=>{let{disabled:r,...a}=e,c={active:l.value,disabled:r,close:i.closeMenu};return gw({ourProps:{id:o,ref:s,role:"menuitem",tabIndex:!0===r?void 0:-1,"aria-disabled":!0===r||void 0,onClick:d,onFocus:p,onPointerenter:f,onMouseenter:f,onPointermove:m,onMousemove:m,onPointerleave:g,onMouseleave:g},theirProps:{...n,...a},slot:c,attrs:n,slots:t,name:"MenuItem"})}}}),Ww=Mg({props:{onFocus:{type:Function,required:!0}},setup(e){let t=um(!0);return()=>t.value?uy(xw,{as:"button",type:"button",features:ww.Focusable,onFocus(n){n.preventDefault();let r,a=50;r=requestAnimationFrame((function n(){var o;if(!(a--<=0))return null!=(o=e.onFocus)&&o.call(e)?(t.value=!1,void cancelAnimationFrame(r)):void(r=requestAnimationFrame(n));r&&cancelAnimationFrame(r)}))}}):null}});var Xw,Gw,Yw=((Gw=Yw||{})[Gw.Forwards=0]="Forwards",Gw[Gw.Backwards=1]="Backwards",Gw),Kw=((Xw=Kw||{})[Xw.Less=-1]="Less",Xw[Xw.Equal=0]="Equal",Xw[Xw.Greater=1]="Greater",Xw);let Jw=Symbol("TabsContext");function ex(e){let t=tg(Jw,null);if(null===t){let t=new Error(`<${e} /> is missing a parent component.`);throw Error.captureStackTrace&&Error.captureStackTrace(t,ex),t}return t}let tx=Symbol("TabsSSRContext"),nx=Mg({name:"TabGroup",emits:{change:e=>!0},props:{as:{type:[Object,String],default:"template"},selectedIndex:{type:[Number],default:null},defaultIndex:{type:[Number],default:0},vertical:{type:[Boolean],default:!1},manual:{type:[Boolean],default:!1}},inheritAttrs:!1,setup(e,{slots:t,attrs:n,emit:r}){var a;let o=um(null!=(a=e.selectedIndex)?a:e.defaultIndex),i=um([]),s=um([]),l=cy((()=>null!==e.selectedIndex)),c=cy((()=>l.value?e.selectedIndex:o.value));function u(e){var t;let n=iw(d.tabs.value,zO),r=iw(d.panels.value,zO),a=n.filter((e=>{var t;return!(null!=(t=zO(e))&&t.hasAttribute("disabled"))}));if(e<0||e>n.length-1){let t=FO(null===o.value?0:Math.sign(e-o.value),{[-1]:()=>1,0:()=>FO(Math.sign(e),{[-1]:()=>0,0:()=>0,1:()=>1}),1:()=>0}),i=FO(t,{0:()=>n.indexOf(a[0]),1:()=>n.indexOf(a[a.length-1])});-1!==i&&(o.value=i),d.tabs.value=n,d.panels.value=r}else{let i=n.slice(0,e),s=[...n.slice(e),...i].find((e=>a.includes(e)));if(!s)return;let l=null!=(t=n.indexOf(s))?t:d.selectedIndex.value;-1===l&&(l=d.selectedIndex.value),o.value=l,d.tabs.value=n,d.panels.value=r}}let d={selectedIndex:cy((()=>{var t,n;return null!=(n=null!=(t=o.value)?t:e.defaultIndex)?n:null})),orientation:cy((()=>e.vertical?"vertical":"horizontal")),activation:cy((()=>e.manual?"manual":"auto")),tabs:i,panels:s,setSelectedIndex(e){c.value!==e&&r("change",e),l.value||u(e)},registerTab(e){var t;if(i.value.includes(e))return;let n=i.value[o.value];if(i.value.push(e),i.value=iw(i.value,zO),!l.value){let e=null!=(t=i.value.indexOf(n))?t:o.value;-1!==e&&(o.value=e)}},unregisterTab(e){let t=i.value.indexOf(e);-1!==t&&i.value.splice(t,1)},registerPanel(e){s.value.includes(e)||(s.value.push(e),s.value=iw(s.value,zO))},unregisterPanel(e){let t=s.value.indexOf(e);-1!==t&&s.value.splice(t,1)}};eg(Jw,d);let p=um({tabs:[],panels:[]}),h=um(!1);Gg((()=>{h.value=!0})),eg(tx,cy((()=>h.value?null:p.value)));let f=cy((()=>e.selectedIndex));return Gg((()=>{og([f],(()=>{var t;return u(null!=(t=e.selectedIndex)?t:e.defaultIndex)}),{immediate:!0})})),ag((()=>{if(!l.value||null==c.value||d.tabs.value.length<=0)return;let e=iw(d.tabs.value,zO);e.some(((e,t)=>zO(d.tabs.value[t])!==zO(e)))&&d.setSelectedIndex(e.findIndex((e=>zO(e)===zO(d.tabs.value[c.value]))))})),()=>{let r={selectedIndex:o.value};return uy(Ob,[i.value.length<=0&&uy(Ww,{onFocus:()=>{for(let e of i.value){let t=zO(e);if(0===(null==t?void 0:t.tabIndex))return t.focus(),!0}return!1}}),gw({theirProps:{...n,...Ow(e,["selectedIndex","defaultIndex","manual","vertical","onChange"])},ourProps:{},slot:r,slots:t,attrs:n,name:"TabGroup"})])}}}),rx=Mg({name:"TabList",props:{as:{type:[Object,String],default:"div"}},setup(e,{attrs:t,slots:n}){let r=ex("TabList");return()=>{let a={selectedIndex:r.selectedIndex.value};return gw({ourProps:{role:"tablist","aria-orientation":r.orientation.value},theirProps:e,slot:a,attrs:t,slots:n,name:"TabList"})}}}),ax=Mg({name:"Tab",props:{as:{type:[Object,String],default:"button"},disabled:{type:[Boolean],default:!1},id:{type:String,default:null}},setup(e,{attrs:t,slots:n,expose:r}){var a;let o=null!=(a=e.id)?a:`headlessui-tabs-tab-${UO()}`,i=ex("Tab"),s=um(null);r({el:s,$el:s}),Gg((()=>i.registerTab(s))),ev((()=>i.unregisterTab(s)));let l=tg(tx),c=cy((()=>{if(l.value){let e=l.value.tabs.indexOf(o);return-1===e?l.value.tabs.push(o)-1:e}return-1})),u=cy((()=>{let e=i.tabs.value.indexOf(s);return-1===e?c.value:e})),d=cy((()=>u.value===i.selectedIndex.value));function p(e){var t;let n=e();if(n===KO.Success&&"auto"===i.activation.value){let e=null==(t=VO(s))?void 0:t.activeElement,n=i.tabs.value.findIndex((t=>zO(t)===e));-1!==n&&i.setSelectedIndex(n)}return n}function h(e){let t=i.tabs.value.map((e=>zO(e))).filter(Boolean);if(e.key===Cw.Space||e.key===Cw.Enter)return e.preventDefault(),e.stopPropagation(),void i.setSelectedIndex(u.value);switch(e.key){case Cw.Home:case Cw.PageUp:return e.preventDefault(),e.stopPropagation(),p((()=>sw(t,YO.First)));case Cw.End:case Cw.PageDown:return e.preventDefault(),e.stopPropagation(),p((()=>sw(t,YO.Last)))}return p((()=>FO(i.orientation.value,{vertical:()=>e.key===Cw.ArrowUp?sw(t,YO.Previous|YO.WrapAround):e.key===Cw.ArrowDown?sw(t,YO.Next|YO.WrapAround):KO.Error,horizontal:()=>e.key===Cw.ArrowLeft?sw(t,YO.Previous|YO.WrapAround):e.key===Cw.ArrowRight?sw(t,YO.Next|YO.WrapAround):KO.Error})))===KO.Success?e.preventDefault():void 0}let f=um(!1);function m(){var t;f.value||(f.value=!0,!e.disabled&&(null==(t=zO(s))||t.focus({preventScroll:!0}),i.setSelectedIndex(u.value),function(e){"function"==typeof queueMicrotask?queueMicrotask(e):Promise.resolve().then(e).catch((e=>setTimeout((()=>{throw e}))))}((()=>{f.value=!1}))))}function g(e){e.preventDefault()}let v=dw(cy((()=>({as:e.as,type:t.type}))),s);return()=>{var r,a;let l={selected:d.value,disabled:null!=(r=e.disabled)&&r},{...c}=e;return gw({ourProps:{ref:s,onKeydown:h,onMousedown:g,onClick:m,id:o,role:"tab",type:v.value,"aria-controls":null==(a=zO(i.panels.value[u.value]))?void 0:a.id,"aria-selected":d.value,tabIndex:d.value?0:-1,disabled:!!e.disabled||void 0},theirProps:c,slot:l,attrs:t,slots:n,name:"Tab"})}}}),ox=Mg({name:"TabPanels",props:{as:{type:[Object,String],default:"div"}},setup(e,{slots:t,attrs:n}){let r=ex("TabPanels");return()=>{let a={selectedIndex:r.selectedIndex.value};return gw({theirProps:e,ourProps:{},slot:a,attrs:n,slots:t,name:"TabPanels"})}}}),ix=Mg({name:"TabPanel",props:{as:{type:[Object,String],default:"div"},static:{type:Boolean,default:!1},unmount:{type:Boolean,default:!0},id:{type:String,default:null},tabIndex:{type:Number,default:0}},setup(e,{attrs:t,slots:n,expose:r}){var a;let o=null!=(a=e.id)?a:`headlessui-tabs-panel-${UO()}`,i=ex("TabPanel"),s=um(null);r({el:s,$el:s}),Gg((()=>i.registerPanel(s))),ev((()=>i.unregisterPanel(s)));let l=tg(tx),c=cy((()=>{if(l.value){let e=l.value.panels.indexOf(o);return-1===e?l.value.panels.push(o)-1:e}return-1})),u=cy((()=>{let e=i.panels.value.indexOf(s);return-1===e?c.value:e})),d=cy((()=>u.value===i.selectedIndex.value));return()=>{var r;let a={selected:d.value},{tabIndex:l,...c}=e,p={ref:s,id:o,role:"tabpanel","aria-labelledby":null==(r=zO(i.tabs.value[u.value]))?void 0:r.id,tabIndex:d.value?l:-1};return d.value||!e.unmount||e.static?gw({ourProps:p,theirProps:c,slot:a,attrs:t,slots:n,features:fw.Static|fw.RenderStrategy,visible:d.value,name:"TabPanel"}):uy(xw,{as:"span","aria-hidden":!0,...p})}}});function sx(e){var t,n,r="";if("string"==typeof e||"number"==typeof e)r+=e;else if("object"==typeof e)if(Array.isArray(e)){var a=e.length;for(t=0;t"boolean"==typeof e?`${e}`:0===e?"0":e,ux=e=>{const t=fx(e),{conflictingClassGroups:n,conflictingClassGroupModifiers:r}=e;return{getClassGroupId:e=>{const n=e.split("-");return""===n[0]&&1!==n.length&&n.shift(),dx(n,t)||hx(e)},getConflictingClassGroupIds:(e,t)=>{const a=n[e]||[];return t&&r[e]?[...a,...r[e]]:a}}},dx=(e,t)=>{if(0===e.length)return t.classGroupId;const n=e[0],r=t.nextPart.get(n),a=r?dx(e.slice(1),r):void 0;if(a)return a;if(0===t.validators.length)return;const o=e.join("-");return t.validators.find((({validator:e})=>e(o)))?.classGroupId},px=/^\[(.+)\]$/,hx=e=>{if(px.test(e)){const t=px.exec(e)[1],n=t?.substring(0,t.indexOf(":"));if(n)return"arbitrary.."+n}},fx=e=>{const{theme:t,prefix:n}=e,r={nextPart:new Map,validators:[]};return bx(Object.entries(e.classGroups),n).forEach((([e,n])=>{mx(n,r,e,t)})),r},mx=(e,t,n,r)=>{e.forEach((e=>{if("string"!=typeof e){if("function"==typeof e)return vx(e)?void mx(e(r),t,n,r):void t.validators.push({validator:e,classGroupId:n});Object.entries(e).forEach((([e,a])=>{mx(a,gx(t,e),n,r)}))}else(""===e?t:gx(t,e)).classGroupId=n}))},gx=(e,t)=>{let n=e;return t.split("-").forEach((e=>{n.nextPart.has(e)||n.nextPart.set(e,{nextPart:new Map,validators:[]}),n=n.nextPart.get(e)})),n},vx=e=>e.isThemeGetter,bx=(e,t)=>t?e.map((([e,n])=>[e,n.map((e=>"string"==typeof e?t+e:"object"==typeof e?Object.fromEntries(Object.entries(e).map((([e,n])=>[t+e,n]))):e))])):e,yx=e=>{if(e<1)return{get:()=>{},set:()=>{}};let t=0,n=new Map,r=new Map;const a=(a,o)=>{n.set(a,o),t++,t>e&&(t=0,r=n,n=new Map)};return{get(e){let t=n.get(e);return void 0!==t?t:void 0!==(t=r.get(e))?(a(e,t),t):void 0},set(e,t){n.has(e)?n.set(e,t):a(e,t)}}},Ox=e=>{const{separator:t,experimentalParseClassName:n}=e,r=1===t.length,a=t[0],o=t.length,i=e=>{const n=[];let i,s=0,l=0;for(let c=0;cl?i-l:void 0}};return n?e=>n({className:e,parseClassName:i}):i},wx=e=>{if(e.length<=1)return e;const t=[];let n=[];return e.forEach((e=>{"["===e[0]?(t.push(...n.sort(),e),n=[]):n.push(e)})),t.push(...n.sort()),t},xx=/\s+/;function kx(){let e,t,n=0,r="";for(;n{if("string"==typeof e)return e;let t,n="";for(let r=0;rt(e)),e());return n=(e=>({cache:yx(e.cacheSize),parseClassName:Ox(e),...ux(e)}))(l),r=n.cache.get,a=n.cache.set,o=i,i(s)};function i(e){const t=r(e);if(t)return t;const o=((e,t)=>{const{parseClassName:n,getClassGroupId:r,getConflictingClassGroupIds:a}=t,o=[],i=e.trim().split(xx);let s="";for(let e=i.length-1;e>=0;e-=1){const t=i[e],{modifiers:l,hasImportantModifier:c,baseClassName:u,maybePostfixModifierPosition:d}=n(t);let p=Boolean(d),h=r(p?u.substring(0,d):u);if(!h){if(!p){s=t+(s.length>0?" "+s:s);continue}if(h=r(u),!h){s=t+(s.length>0?" "+s:s);continue}p=!1}const f=wx(l).join(":"),m=c?f+"!":f,g=m+h;if(o.includes(g))continue;o.push(g);const v=a(h,p);for(let e=0;e0?" "+s:s)}return s})(e,n);return a(e,o),o}return function(){return o(kx.apply(null,arguments))}}const Tx=e=>{const t=t=>t[e]||[];return t.isThemeGetter=!0,t},Ex=/^\[(?:([a-z-]+):)?(.+)\]$/i,Ax=/^\d+\/\d+$/,Cx=new Set(["px","full","screen"]),$x=/^(\d+(\.\d+)?)?(xs|sm|md|lg|xl)$/,Px=/\d+(%|px|r?em|[sdl]?v([hwib]|min|max)|pt|pc|in|cm|mm|cap|ch|ex|r?lh|cq(w|h|i|b|min|max))|\b(calc|min|max|clamp)\(.+\)|^0$/,Dx=/^(rgba?|hsla?|hwb|(ok)?(lab|lch))\(.+\)$/,Ix=/^(inset_)?-?((\d+)?\.?(\d+)[a-z]+|0)_-?((\d+)?\.?(\d+)[a-z]+|0)/,Mx=/^(url|image|image-set|cross-fade|element|(repeating-)?(linear|radial|conic)-gradient)\(.+\)$/,Nx=e=>Lx(e)||Cx.has(e)||Ax.test(e),Rx=e=>Gx(e,"length",Yx),Lx=e=>Boolean(e)&&!Number.isNaN(Number(e)),Bx=e=>Gx(e,"number",Lx),jx=e=>Boolean(e)&&Number.isInteger(Number(e)),Ux=e=>e.endsWith("%")&&Lx(e.slice(0,-1)),zx=e=>Ex.test(e),Fx=e=>$x.test(e),Zx=new Set(["length","size","percentage"]),Qx=e=>Gx(e,Zx,Kx),Hx=e=>Gx(e,"position",Kx),Vx=new Set(["image","url"]),qx=e=>Gx(e,Vx,ek),Wx=e=>Gx(e,"",Jx),Xx=()=>!0,Gx=(e,t,n)=>{const r=Ex.exec(e);return!!r&&(r[1]?"string"==typeof t?r[1]===t:t.has(r[1]):n(r[2]))},Yx=e=>Px.test(e)&&!Dx.test(e),Kx=()=>!1,Jx=e=>Ix.test(e),ek=e=>Mx.test(e),tk=()=>{const e=Tx("colors"),t=Tx("spacing"),n=Tx("blur"),r=Tx("brightness"),a=Tx("borderColor"),o=Tx("borderRadius"),i=Tx("borderSpacing"),s=Tx("borderWidth"),l=Tx("contrast"),c=Tx("grayscale"),u=Tx("hueRotate"),d=Tx("invert"),p=Tx("gap"),h=Tx("gradientColorStops"),f=Tx("gradientColorStopPositions"),m=Tx("inset"),g=Tx("margin"),v=Tx("opacity"),b=Tx("padding"),y=Tx("saturate"),O=Tx("scale"),w=Tx("sepia"),x=Tx("skew"),k=Tx("space"),S=Tx("translate"),_=()=>["auto",zx,t],T=()=>[zx,t],E=()=>["",Nx,Rx],A=()=>["auto",Lx,zx],C=()=>["","0",zx],$=()=>[Lx,zx];return{cacheSize:500,separator:":",theme:{colors:[Xx],spacing:[Nx,Rx],blur:["none","",Fx,zx],brightness:$(),borderColor:[e],borderRadius:["none","","full",Fx,zx],borderSpacing:T(),borderWidth:E(),contrast:$(),grayscale:C(),hueRotate:$(),invert:C(),gap:T(),gradientColorStops:[e],gradientColorStopPositions:[Ux,Rx],inset:_(),margin:_(),opacity:$(),padding:T(),saturate:$(),scale:$(),sepia:C(),skew:$(),space:T(),translate:T()},classGroups:{aspect:[{aspect:["auto","square","video",zx]}],container:["container"],columns:[{columns:[Fx]}],"break-after":[{"break-after":["auto","avoid","all","avoid-page","page","left","right","column"]}],"break-before":[{"break-before":["auto","avoid","all","avoid-page","page","left","right","column"]}],"break-inside":[{"break-inside":["auto","avoid","avoid-page","avoid-column"]}],"box-decoration":[{"box-decoration":["slice","clone"]}],box:[{box:["border","content"]}],display:["block","inline-block","inline","flex","inline-flex","table","inline-table","table-caption","table-cell","table-column","table-column-group","table-footer-group","table-header-group","table-row-group","table-row","flow-root","grid","inline-grid","contents","list-item","hidden"],float:[{float:["right","left","none","start","end"]}],clear:[{clear:["left","right","both","none","start","end"]}],isolation:["isolate","isolation-auto"],"object-fit":[{object:["contain","cover","fill","none","scale-down"]}],"object-position":[{object:["bottom","center","left","left-bottom","left-top","right","right-bottom","right-top","top",zx]}],overflow:[{overflow:["auto","hidden","clip","visible","scroll"]}],"overflow-x":[{"overflow-x":["auto","hidden","clip","visible","scroll"]}],"overflow-y":[{"overflow-y":["auto","hidden","clip","visible","scroll"]}],overscroll:[{overscroll:["auto","contain","none"]}],"overscroll-x":[{"overscroll-x":["auto","contain","none"]}],"overscroll-y":[{"overscroll-y":["auto","contain","none"]}],position:["static","fixed","absolute","relative","sticky"],inset:[{inset:[m]}],"inset-x":[{"inset-x":[m]}],"inset-y":[{"inset-y":[m]}],start:[{start:[m]}],end:[{end:[m]}],top:[{top:[m]}],right:[{right:[m]}],bottom:[{bottom:[m]}],left:[{left:[m]}],visibility:["visible","invisible","collapse"],z:[{z:["auto",jx,zx]}],basis:[{basis:_()}],"flex-direction":[{flex:["row","row-reverse","col","col-reverse"]}],"flex-wrap":[{flex:["wrap","wrap-reverse","nowrap"]}],flex:[{flex:["1","auto","initial","none",zx]}],grow:[{grow:C()}],shrink:[{shrink:C()}],order:[{order:["first","last","none",jx,zx]}],"grid-cols":[{"grid-cols":[Xx]}],"col-start-end":[{col:["auto",{span:["full",jx,zx]},zx]}],"col-start":[{"col-start":A()}],"col-end":[{"col-end":A()}],"grid-rows":[{"grid-rows":[Xx]}],"row-start-end":[{row:["auto",{span:[jx,zx]},zx]}],"row-start":[{"row-start":A()}],"row-end":[{"row-end":A()}],"grid-flow":[{"grid-flow":["row","col","dense","row-dense","col-dense"]}],"auto-cols":[{"auto-cols":["auto","min","max","fr",zx]}],"auto-rows":[{"auto-rows":["auto","min","max","fr",zx]}],gap:[{gap:[p]}],"gap-x":[{"gap-x":[p]}],"gap-y":[{"gap-y":[p]}],"justify-content":[{justify:["normal","start","end","center","between","around","evenly","stretch"]}],"justify-items":[{"justify-items":["start","end","center","stretch"]}],"justify-self":[{"justify-self":["auto","start","end","center","stretch"]}],"align-content":[{content:["normal","start","end","center","between","around","evenly","stretch","baseline"]}],"align-items":[{items:["start","end","center","baseline","stretch"]}],"align-self":[{self:["auto","start","end","center","stretch","baseline"]}],"place-content":[{"place-content":["start","end","center","between","around","evenly","stretch","baseline"]}],"place-items":[{"place-items":["start","end","center","baseline","stretch"]}],"place-self":[{"place-self":["auto","start","end","center","stretch"]}],p:[{p:[b]}],px:[{px:[b]}],py:[{py:[b]}],ps:[{ps:[b]}],pe:[{pe:[b]}],pt:[{pt:[b]}],pr:[{pr:[b]}],pb:[{pb:[b]}],pl:[{pl:[b]}],m:[{m:[g]}],mx:[{mx:[g]}],my:[{my:[g]}],ms:[{ms:[g]}],me:[{me:[g]}],mt:[{mt:[g]}],mr:[{mr:[g]}],mb:[{mb:[g]}],ml:[{ml:[g]}],"space-x":[{"space-x":[k]}],"space-x-reverse":["space-x-reverse"],"space-y":[{"space-y":[k]}],"space-y-reverse":["space-y-reverse"],w:[{w:["auto","min","max","fit","svw","lvw","dvw",zx,t]}],"min-w":[{"min-w":[zx,t,"min","max","fit"]}],"max-w":[{"max-w":[zx,t,"none","full","min","max","fit","prose",{screen:[Fx]},Fx]}],h:[{h:[zx,t,"auto","min","max","fit","svh","lvh","dvh"]}],"min-h":[{"min-h":[zx,t,"min","max","fit","svh","lvh","dvh"]}],"max-h":[{"max-h":[zx,t,"min","max","fit","svh","lvh","dvh"]}],size:[{size:[zx,t,"auto","min","max","fit"]}],"font-size":[{text:["base",Fx,Rx]}],"font-smoothing":["antialiased","subpixel-antialiased"],"font-style":["italic","not-italic"],"font-weight":[{font:["thin","extralight","light","normal","medium","semibold","bold","extrabold","black",Bx]}],"font-family":[{font:[Xx]}],"fvn-normal":["normal-nums"],"fvn-ordinal":["ordinal"],"fvn-slashed-zero":["slashed-zero"],"fvn-figure":["lining-nums","oldstyle-nums"],"fvn-spacing":["proportional-nums","tabular-nums"],"fvn-fraction":["diagonal-fractions","stacked-fractions"],tracking:[{tracking:["tighter","tight","normal","wide","wider","widest",zx]}],"line-clamp":[{"line-clamp":["none",Lx,Bx]}],leading:[{leading:["none","tight","snug","normal","relaxed","loose",Nx,zx]}],"list-image":[{"list-image":["none",zx]}],"list-style-type":[{list:["none","disc","decimal",zx]}],"list-style-position":[{list:["inside","outside"]}],"placeholder-color":[{placeholder:[e]}],"placeholder-opacity":[{"placeholder-opacity":[v]}],"text-alignment":[{text:["left","center","right","justify","start","end"]}],"text-color":[{text:[e]}],"text-opacity":[{"text-opacity":[v]}],"text-decoration":["underline","overline","line-through","no-underline"],"text-decoration-style":[{decoration:["solid","dashed","dotted","double","none","wavy"]}],"text-decoration-thickness":[{decoration:["auto","from-font",Nx,Rx]}],"underline-offset":[{"underline-offset":["auto",Nx,zx]}],"text-decoration-color":[{decoration:[e]}],"text-transform":["uppercase","lowercase","capitalize","normal-case"],"text-overflow":["truncate","text-ellipsis","text-clip"],"text-wrap":[{text:["wrap","nowrap","balance","pretty"]}],indent:[{indent:T()}],"vertical-align":[{align:["baseline","top","middle","bottom","text-top","text-bottom","sub","super",zx]}],whitespace:[{whitespace:["normal","nowrap","pre","pre-line","pre-wrap","break-spaces"]}],break:[{break:["normal","words","all","keep"]}],hyphens:[{hyphens:["none","manual","auto"]}],content:[{content:["none",zx]}],"bg-attachment":[{bg:["fixed","local","scroll"]}],"bg-clip":[{"bg-clip":["border","padding","content","text"]}],"bg-opacity":[{"bg-opacity":[v]}],"bg-origin":[{"bg-origin":["border","padding","content"]}],"bg-position":[{bg:["bottom","center","left","left-bottom","left-top","right","right-bottom","right-top","top",Hx]}],"bg-repeat":[{bg:["no-repeat",{repeat:["","x","y","round","space"]}]}],"bg-size":[{bg:["auto","cover","contain",Qx]}],"bg-image":[{bg:["none",{"gradient-to":["t","tr","r","br","b","bl","l","tl"]},qx]}],"bg-color":[{bg:[e]}],"gradient-from-pos":[{from:[f]}],"gradient-via-pos":[{via:[f]}],"gradient-to-pos":[{to:[f]}],"gradient-from":[{from:[h]}],"gradient-via":[{via:[h]}],"gradient-to":[{to:[h]}],rounded:[{rounded:[o]}],"rounded-s":[{"rounded-s":[o]}],"rounded-e":[{"rounded-e":[o]}],"rounded-t":[{"rounded-t":[o]}],"rounded-r":[{"rounded-r":[o]}],"rounded-b":[{"rounded-b":[o]}],"rounded-l":[{"rounded-l":[o]}],"rounded-ss":[{"rounded-ss":[o]}],"rounded-se":[{"rounded-se":[o]}],"rounded-ee":[{"rounded-ee":[o]}],"rounded-es":[{"rounded-es":[o]}],"rounded-tl":[{"rounded-tl":[o]}],"rounded-tr":[{"rounded-tr":[o]}],"rounded-br":[{"rounded-br":[o]}],"rounded-bl":[{"rounded-bl":[o]}],"border-w":[{border:[s]}],"border-w-x":[{"border-x":[s]}],"border-w-y":[{"border-y":[s]}],"border-w-s":[{"border-s":[s]}],"border-w-e":[{"border-e":[s]}],"border-w-t":[{"border-t":[s]}],"border-w-r":[{"border-r":[s]}],"border-w-b":[{"border-b":[s]}],"border-w-l":[{"border-l":[s]}],"border-opacity":[{"border-opacity":[v]}],"border-style":[{border:["solid","dashed","dotted","double","none","hidden"]}],"divide-x":[{"divide-x":[s]}],"divide-x-reverse":["divide-x-reverse"],"divide-y":[{"divide-y":[s]}],"divide-y-reverse":["divide-y-reverse"],"divide-opacity":[{"divide-opacity":[v]}],"divide-style":[{divide:["solid","dashed","dotted","double","none"]}],"border-color":[{border:[a]}],"border-color-x":[{"border-x":[a]}],"border-color-y":[{"border-y":[a]}],"border-color-s":[{"border-s":[a]}],"border-color-e":[{"border-e":[a]}],"border-color-t":[{"border-t":[a]}],"border-color-r":[{"border-r":[a]}],"border-color-b":[{"border-b":[a]}],"border-color-l":[{"border-l":[a]}],"divide-color":[{divide:[a]}],"outline-style":[{outline:["","solid","dashed","dotted","double","none"]}],"outline-offset":[{"outline-offset":[Nx,zx]}],"outline-w":[{outline:[Nx,Rx]}],"outline-color":[{outline:[e]}],"ring-w":[{ring:E()}],"ring-w-inset":["ring-inset"],"ring-color":[{ring:[e]}],"ring-opacity":[{"ring-opacity":[v]}],"ring-offset-w":[{"ring-offset":[Nx,Rx]}],"ring-offset-color":[{"ring-offset":[e]}],shadow:[{shadow:["","inner","none",Fx,Wx]}],"shadow-color":[{shadow:[Xx]}],opacity:[{opacity:[v]}],"mix-blend":[{"mix-blend":["normal","multiply","screen","overlay","darken","lighten","color-dodge","color-burn","hard-light","soft-light","difference","exclusion","hue","saturation","color","luminosity","plus-lighter","plus-darker"]}],"bg-blend":[{"bg-blend":["normal","multiply","screen","overlay","darken","lighten","color-dodge","color-burn","hard-light","soft-light","difference","exclusion","hue","saturation","color","luminosity"]}],filter:[{filter:["","none"]}],blur:[{blur:[n]}],brightness:[{brightness:[r]}],contrast:[{contrast:[l]}],"drop-shadow":[{"drop-shadow":["","none",Fx,zx]}],grayscale:[{grayscale:[c]}],"hue-rotate":[{"hue-rotate":[u]}],invert:[{invert:[d]}],saturate:[{saturate:[y]}],sepia:[{sepia:[w]}],"backdrop-filter":[{"backdrop-filter":["","none"]}],"backdrop-blur":[{"backdrop-blur":[n]}],"backdrop-brightness":[{"backdrop-brightness":[r]}],"backdrop-contrast":[{"backdrop-contrast":[l]}],"backdrop-grayscale":[{"backdrop-grayscale":[c]}],"backdrop-hue-rotate":[{"backdrop-hue-rotate":[u]}],"backdrop-invert":[{"backdrop-invert":[d]}],"backdrop-opacity":[{"backdrop-opacity":[v]}],"backdrop-saturate":[{"backdrop-saturate":[y]}],"backdrop-sepia":[{"backdrop-sepia":[w]}],"border-collapse":[{border:["collapse","separate"]}],"border-spacing":[{"border-spacing":[i]}],"border-spacing-x":[{"border-spacing-x":[i]}],"border-spacing-y":[{"border-spacing-y":[i]}],"table-layout":[{table:["auto","fixed"]}],caption:[{caption:["top","bottom"]}],transition:[{transition:["none","all","","colors","opacity","shadow","transform",zx]}],duration:[{duration:$()}],ease:[{ease:["linear","in","out","in-out",zx]}],delay:[{delay:$()}],animate:[{animate:["none","spin","ping","pulse","bounce",zx]}],transform:[{transform:["","gpu","none"]}],scale:[{scale:[O]}],"scale-x":[{"scale-x":[O]}],"scale-y":[{"scale-y":[O]}],rotate:[{rotate:[jx,zx]}],"translate-x":[{"translate-x":[S]}],"translate-y":[{"translate-y":[S]}],"skew-x":[{"skew-x":[x]}],"skew-y":[{"skew-y":[x]}],"transform-origin":[{origin:["center","top","top-right","right","bottom-right","bottom","bottom-left","left","top-left",zx]}],accent:[{accent:["auto",e]}],appearance:[{appearance:["none","auto"]}],cursor:[{cursor:["auto","default","pointer","wait","text","move","help","not-allowed","none","context-menu","progress","cell","crosshair","vertical-text","alias","copy","no-drop","grab","grabbing","all-scroll","col-resize","row-resize","n-resize","e-resize","s-resize","w-resize","ne-resize","nw-resize","se-resize","sw-resize","ew-resize","ns-resize","nesw-resize","nwse-resize","zoom-in","zoom-out",zx]}],"caret-color":[{caret:[e]}],"pointer-events":[{"pointer-events":["none","auto"]}],resize:[{resize:["none","y","x",""]}],"scroll-behavior":[{scroll:["auto","smooth"]}],"scroll-m":[{"scroll-m":T()}],"scroll-mx":[{"scroll-mx":T()}],"scroll-my":[{"scroll-my":T()}],"scroll-ms":[{"scroll-ms":T()}],"scroll-me":[{"scroll-me":T()}],"scroll-mt":[{"scroll-mt":T()}],"scroll-mr":[{"scroll-mr":T()}],"scroll-mb":[{"scroll-mb":T()}],"scroll-ml":[{"scroll-ml":T()}],"scroll-p":[{"scroll-p":T()}],"scroll-px":[{"scroll-px":T()}],"scroll-py":[{"scroll-py":T()}],"scroll-ps":[{"scroll-ps":T()}],"scroll-pe":[{"scroll-pe":T()}],"scroll-pt":[{"scroll-pt":T()}],"scroll-pr":[{"scroll-pr":T()}],"scroll-pb":[{"scroll-pb":T()}],"scroll-pl":[{"scroll-pl":T()}],"snap-align":[{snap:["start","end","center","align-none"]}],"snap-stop":[{snap:["normal","always"]}],"snap-type":[{snap:["none","x","y","both"]}],"snap-strictness":[{snap:["mandatory","proximity"]}],touch:[{touch:["auto","none","manipulation"]}],"touch-x":[{"touch-pan":["x","left","right"]}],"touch-y":[{"touch-pan":["y","up","down"]}],"touch-pz":["touch-pinch-zoom"],select:[{select:["none","text","all","auto"]}],"will-change":[{"will-change":["auto","scroll","contents","transform",zx]}],fill:[{fill:[e,"none"]}],"stroke-w":[{stroke:[Nx,Rx,Bx]}],stroke:[{stroke:[e,"none"]}],sr:["sr-only","not-sr-only"],"forced-color-adjust":[{"forced-color-adjust":["auto","none"]}]},conflictingClassGroups:{overflow:["overflow-x","overflow-y"],overscroll:["overscroll-x","overscroll-y"],inset:["inset-x","inset-y","start","end","top","right","bottom","left"],"inset-x":["right","left"],"inset-y":["top","bottom"],flex:["basis","grow","shrink"],gap:["gap-x","gap-y"],p:["px","py","ps","pe","pt","pr","pb","pl"],px:["pr","pl"],py:["pt","pb"],m:["mx","my","ms","me","mt","mr","mb","ml"],mx:["mr","ml"],my:["mt","mb"],size:["w","h"],"font-size":["leading"],"fvn-normal":["fvn-ordinal","fvn-slashed-zero","fvn-figure","fvn-spacing","fvn-fraction"],"fvn-ordinal":["fvn-normal"],"fvn-slashed-zero":["fvn-normal"],"fvn-figure":["fvn-normal"],"fvn-spacing":["fvn-normal"],"fvn-fraction":["fvn-normal"],"line-clamp":["display","overflow"],rounded:["rounded-s","rounded-e","rounded-t","rounded-r","rounded-b","rounded-l","rounded-ss","rounded-se","rounded-ee","rounded-es","rounded-tl","rounded-tr","rounded-br","rounded-bl"],"rounded-s":["rounded-ss","rounded-es"],"rounded-e":["rounded-se","rounded-ee"],"rounded-t":["rounded-tl","rounded-tr"],"rounded-r":["rounded-tr","rounded-br"],"rounded-b":["rounded-br","rounded-bl"],"rounded-l":["rounded-tl","rounded-bl"],"border-spacing":["border-spacing-x","border-spacing-y"],"border-w":["border-w-s","border-w-e","border-w-t","border-w-r","border-w-b","border-w-l"],"border-w-x":["border-w-r","border-w-l"],"border-w-y":["border-w-t","border-w-b"],"border-color":["border-color-s","border-color-e","border-color-t","border-color-r","border-color-b","border-color-l"],"border-color-x":["border-color-r","border-color-l"],"border-color-y":["border-color-t","border-color-b"],"scroll-m":["scroll-mx","scroll-my","scroll-ms","scroll-me","scroll-mt","scroll-mr","scroll-mb","scroll-ml"],"scroll-mx":["scroll-mr","scroll-ml"],"scroll-my":["scroll-mt","scroll-mb"],"scroll-p":["scroll-px","scroll-py","scroll-ps","scroll-pe","scroll-pt","scroll-pr","scroll-pb","scroll-pl"],"scroll-px":["scroll-pr","scroll-pl"],"scroll-py":["scroll-pt","scroll-pb"],touch:["touch-x","touch-y","touch-pz"],"touch-x":["touch"],"touch-y":["touch"],"touch-pz":["touch"]},conflictingClassGroupModifiers:{"font-size":["leading"]}}},nk=(e,t,n)=>{void 0!==n&&(e[t]=n)},rk=(e,t)=>{if(t)for(const n in t)nk(e,n,t[n])},ak=(e,t)=>{if(t)for(const n in t){const r=t[n];void 0!==r&&(e[n]=(e[n]||[]).concat(r))}},ok=((e,...t)=>"function"==typeof e?_x(tk,e,...t):_x((()=>((e,{cacheSize:t,prefix:n,separator:r,experimentalParseClassName:a,extend:o={},override:i={}})=>{nk(e,"cacheSize",t),nk(e,"prefix",n),nk(e,"separator",r),nk(e,"experimentalParseClassName",a);for(const t in i)rk(e[t],i[t]);for(const t in o)ak(e[t],o[t]);return e})(tk(),e)),...t))({extend:{classGroups:{"font-size":["text-3xs","text-xxs"],"font-weight":["font-sidebar","font-sidebar-active"]}}}),{cva:ik,cx:sk}=(e=>{const t=function(){for(var t=arguments.length,n=new Array(t),r=0;r{const r=Object.fromEntries(Object.entries(e||{}).filter((e=>{let[t]=e;return!["class","className"].includes(t)})));return t(n.map((e=>e(r))),null==e?void 0:e.class,null==e?void 0:e.className)}},cva:e=>n=>{var r;if(null==(null==e?void 0:e.variants))return t(null==e?void 0:e.base,null==n?void 0:n.class,null==n?void 0:n.className);const{variants:a,defaultVariants:o}=e,i=Object.keys(a).map((e=>{const t=null==n?void 0:n[e],r=null==o?void 0:o[e],i=cx(t)||cx(r);return a[e][i]})),s={...o,...n&&Object.entries(n).reduce(((e,t)=>{let[n,r]=t;return void 0===r?e:{...e,[n]:r}}),{})},l=null==e||null===(r=e.compoundVariants)||void 0===r?void 0:r.reduce(((e,t)=>{let{class:n,className:r,...a}=t;return Object.entries(a).every((e=>{let[t,n]=e;const r=s[t];return Array.isArray(n)?n.includes(r):r===n}))?[...e,n,r]:e}),[]);return t(null==e?void 0:e.base,i,l,null==n?void 0:n.class,null==n?void 0:n.className)},cx:t}})({hooks:{onComplete:e=>ok(e)}});function lk(){const e=wv(),t=cy((()=>{const{class:t,style:n,...r}=e;return{class:t||"",style:n,rest:r}}));return{cx:function(...e){return{class:sk(...e,t.value.class),style:t.value.style,...t.value.rest}},stylingAttrsCx:function(...e){return{class:sk(...e,t.value.class),style:t.value.style}},otherAttrs:cy((()=>t.value.rest))}}const ck={solid:["scalar-button-solid","bg-b-btn text-c-btn focus-visible:border-c-btn active:bg-b-btn hover:bg-h-btn outline-offset-1"],outlined:["scalar-button-outlined","active:bg-btn-1 border border-solid border-border bg-b-1 text-c-1 hover:bg-b-2"],ghost:["scalar-button-ghost","bg-transparent text-c-3 active:text-c-1 hover:text-c-1"],gradient:["scalar-button-gradient","border bg-b-1.5 bg-linear-to-b from-b-1 to-b-2 hover:bg-linear-to-t","dark:bg-linear-to-t dark:hover:bg-linear-to-b"],danger:["scalar-button-danger","bg-c-danger text-white active:brightness-90 hover:brightness-90"]},uk={class:"circular-loader"},dk=Mg({inheritAttrs:!1,__name:"ScalarLoading",props:{loader:{},size:{}},setup(e){const{cx:t}=lk(),n=ik({variants:{size:{xs:"size-3",sm:"size-3.5",md:"size-4",lg:"size-5",xl:"size-6","2xl":"size-8","3xl":"size-10",full:"size-full"}},defaultVariants:{size:"full"}});return(r,a)=>e.loader?(Tb(),$b("div",Eh(Vb({key:0},fm(t)("loader-wrapper",fm(n)({size:e.size})))),[(Tb(),$b("svg",{class:Th(["svg-loader",{"icon-is-valid":e.loader.isValid,"icon-is-invalid":e.loader.isInvalid}]),viewBox:"0 0 100 100",xmlns:"http://www.w3.org/2000/svg","xmlns:xlink":"http://www.w3.org/1999/xlink"},[a[0]||(a[0]=zb('',5)),Rb("g",uk,[Rb("circle",{class:Th(["loader-path",{"loader-path-off":!e.loader.isLoading}]),cx:"50",cy:"50",fill:"none",r:"20","stroke-width":"3"},null,2)])],2))],16)):Fb("",!0)}}),pk=(e,t)=>{const n=e.__vccOpts||e;for(const[e,r]of t)n[e]=r;return n},hk=pk(dk,[["__scopeId","data-v-27df5cd8"]]),fk={key:3,class:"centered"},mk=Mg({inheritAttrs:!1,__name:"ScalarButton",props:{is:{default:"button"},variant:{default:"solid"},size:{default:"md"},disabled:{type:Boolean},icon:{type:[Object,Function]},loader:{}},setup(e){const t=ik({base:"scalar-button flex cursor-pointer items-center justify-center rounded font-medium -outline-offset-1",variants:{disabled:{true:"bg-b-2 text-color-3 shadow-none"},size:{xs:"px-2 py-1 text-xs leading-5",sm:"px-3.5 py-2 text-sm leading-5",md:"px-5 py-3 text-sm leading-5"},variant:ck},compoundVariants:[{disabled:!0,variant:["solid","outlined","ghost","gradient","danger"],class:"bg-b-2 text-c-3 shadow-none hover:bg-b-[_] cursor-not-allowed active:bg-b-[_] hover:text-c-[_] active:text-c-[_]"},{disabled:!0,variant:["gradient"],class:"to-b-1.5 bg-linear-to-b hover:bg-linear-to-b dark:hover:bg-linear-to-t"}]}),n=ik({base:"shrink-0",variants:{size:{xs:"size-2.75 -ml-0.25 mr-1",sm:"size-3.25 -ml-0.5 mr-1.5",md:"size-3.5 -ml-0.5 mr-1.5"}}}),r=ik({variants:{size:{xs:"size-4",sm:"size-5",md:"size-6"}}}),{cx:a}=lk();return(o,i)=>(Tb(),Pb(lv(e.is),Vb({"aria-disabled":e.disabled||void 0,type:"button"===e.is?"button":void 0},fm(a)(fm(t)({disabled:e.disabled,size:e.size,variant:e.variant}),{relative:e.loader?.isActive})),{default:Ym((()=>[o.$slots.icon||e.icon?(Tb(),$b("div",{key:0,class:Th([fm(n)({size:e.size}),{invisible:e.loader?.isActive}])},[hv(o.$slots,"icon",{},(()=>[(Tb(),Pb(lv(e.icon),{class:"size-full"}))]))],2)):Fb("",!0),e.loader?(Tb(),$b("span",{key:1,class:Th({invisible:e.loader?.isActive})},[hv(o.$slots,"default")],2)):hv(o.$slots,"default",{key:2}),e.loader?.isActive?(Tb(),$b("div",fk,[Lb(fm(hk),{class:Th(fm(r)({size:e.size})),loader:e.loader},null,8,["class","loader"])])):Fb("",!0)])),_:3},16,["aria-disabled","type"]))}}),gk=Symbol(),vk=Mg({inheritAttrs:!1,__name:"ScalarCard",props:{label:{}},setup(e){const{id:t}=(()=>{const e=um();return eg(gk,e),{id:e}})(),n=cy((()=>e.label?{"aria-label":e.label}:t.value?{"aria-labelledby":t.value}:{})),{cx:r}=lk();return(e,t)=>(Tb(),$b("div",Vb({role:"group"},{...n.value,...fm(r)("scalar-card bg-b-2 flex flex-col divide-y rounded-lg border *:first:rounded-t-[inherit] *:last:rounded-b-[inherit]")}),[hv(e.$slots,"default")],16))}}),bk=Mg({inheritAttrs:!1,__name:"ScalarCardSection",setup(e){const{cx:t}=lk();return(e,n)=>(Tb(),$b("div",Eh(Bb(fm(t)("scalar-card-content flex overflow-auto"))),[hv(e.$slots,"default")],16))}}),yk=Mg({inheritAttrs:!1,__name:"ScalarCardFooter",setup(e){const{cx:t}=lk();return(e,n)=>(Tb(),Pb(bk,Eh(Bb(fm(t)("scalar-card-footer"))),{default:Ym((()=>[hv(e.$slots,"default")])),_:3},16))}}),Ok=["id"],wk={key:0,class:"flex"},xk=Mg({inheritAttrs:!1,__name:"ScalarCardHeader",setup(e){const{cx:t}=lk(),n=Ng();return(e=>{const t=tg(gk,void 0);t&&(t.value=e)})(n),(e,r)=>(Tb(),Pb(bk,Eh(Bb(fm(t)("scalar-card-header leading-[22px] font-medium py-[6.75px] px-3 shrink-0"))),{default:Ym((()=>[Rb("div",{id:fm(n),class:"scalar-card-header-title min-w-0 flex-1 truncate"},[hv(e.$slots,"default")],8,Ok),e.$slots.actions?(Tb(),$b("div",wk,[hv(e.$slots,"actions")])):Fb("",!0)])),_:3},16))}});function kk(e={}){const t=cy((()=>e.label?{"aria-label":e.label}:{"aria-hidden":!0,role:"presentation"}));return{bind:cy((()=>({width:"1em",height:"1em",...t.value}))),weight:cy((()=>e.weight??"regular"))}}const Sk={key:0},_k={key:1},Tk={key:2},Ek={key:3},Ak={key:4},Ck={key:5},$k=Mg({name:"ScalarIconArrowUpRight",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",Sk,[...t[0]||(t[0]=[Rb("path",{d:"M204,64V168a12,12,0,0,1-24,0V93L72.49,200.49a12,12,0,0,1-17-17L163,76H88a12,12,0,0,1,0-24H192A12,12,0,0,1,204,64Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",_k,[...t[1]||(t[1]=[Rb("path",{d:"M192,64V168L88,64Z",opacity:"0.2"},null,-1),Rb("path",{d:"M192,56H88a8,8,0,0,0-5.66,13.66L128.69,116,58.34,186.34a8,8,0,0,0,11.32,11.32L140,127.31l46.34,46.35A8,8,0,0,0,200,168V64A8,8,0,0,0,192,56Zm-8,92.69-38.34-38.34h0L107.31,72H184Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",Tk,[...t[2]||(t[2]=[Rb("path",{d:"M200,64V168a8,8,0,0,1-13.66,5.66L140,127.31,69.66,197.66a8,8,0,0,1-11.32-11.32L128.69,116,82.34,69.66A8,8,0,0,1,88,56H192A8,8,0,0,1,200,64Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",Ek,[...t[3]||(t[3]=[Rb("path",{d:"M198,64V168a6,6,0,0,1-12,0V78.48L68.24,196.24a6,6,0,0,1-8.48-8.48L177.52,70H88a6,6,0,0,1,0-12H192A6,6,0,0,1,198,64Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",Ak,[...t[4]||(t[4]=[Rb("path",{d:"M200,64V168a8,8,0,0,1-16,0V83.31L69.66,197.66a8,8,0,0,1-11.32-11.32L172.69,72H88a8,8,0,0,1,0-16H192A8,8,0,0,1,200,64Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",Ck,[...t[5]||(t[5]=[Rb("path",{d:"M196,64V168a4,4,0,0,1-8,0V73.66L66.83,194.83a4,4,0,0,1-5.66-5.66L182.34,68H88a4,4,0,0,1,0-8H192A4,4,0,0,1,196,64Z"},null,-1)])])):Fb("",!0)],16))}}),Pk={key:0},Dk={key:1},Ik={key:2},Mk={key:3},Nk={key:4},Rk={key:5},Lk=Mg({name:"ScalarIconBook",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",Pk,[...t[0]||(t[0]=[Rb("path",{d:"M208,20H72A36,36,0,0,0,36,56V224a12,12,0,0,0,12,12H192a12,12,0,0,0,0-24H60v-4a12,12,0,0,1,12-12H208a12,12,0,0,0,12-12V32A12,12,0,0,0,208,20ZM196,172H72a35.59,35.59,0,0,0-12,2.06V56A12,12,0,0,1,72,44H196Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",Dk,[...t[1]||(t[1]=[Rb("path",{d:"M208,32V192H72a24,24,0,0,0-24,24V56A24,24,0,0,1,72,32Z",opacity:"0.2"},null,-1),Rb("path",{d:"M208,24H72A32,32,0,0,0,40,56V224a8,8,0,0,0,8,8H192a8,8,0,0,0,0-16H56a16,16,0,0,1,16-16H208a8,8,0,0,0,8-8V32A8,8,0,0,0,208,24Zm-8,160H72a31.82,31.82,0,0,0-16,4.29V56A16,16,0,0,1,72,40H200Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",Ik,[...t[2]||(t[2]=[Rb("path",{d:"M216,32V192a8,8,0,0,1-8,8H72a16,16,0,0,0-16,16H192a8,8,0,0,1,0,16H48a8,8,0,0,1-8-8V56A32,32,0,0,1,72,24H208A8,8,0,0,1,216,32Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",Mk,[...t[3]||(t[3]=[Rb("path",{d:"M208,26H72A30,30,0,0,0,42,56V224a6,6,0,0,0,6,6H192a6,6,0,0,0,0-12H54v-2a18,18,0,0,1,18-18H208a6,6,0,0,0,6-6V32A6,6,0,0,0,208,26Zm-6,160H72a29.87,29.87,0,0,0-18,6V56A18,18,0,0,1,72,38H202Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",Nk,[...t[4]||(t[4]=[Rb("path",{d:"M208,24H72A32,32,0,0,0,40,56V224a8,8,0,0,0,8,8H192a8,8,0,0,0,0-16H56a16,16,0,0,1,16-16H208a8,8,0,0,0,8-8V32A8,8,0,0,0,208,24Zm-8,160H72a31.82,31.82,0,0,0-16,4.29V56A16,16,0,0,1,72,40H200Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",Rk,[...t[5]||(t[5]=[Rb("path",{d:"M208,28H72A28,28,0,0,0,44,56V224a4,4,0,0,0,4,4H192a4,4,0,0,0,0-8H52v-4a20,20,0,0,1,20-20H208a4,4,0,0,0,4-4V32A4,4,0,0,0,208,28Zm-4,160H72a27.94,27.94,0,0,0-20,8.42V56A20,20,0,0,1,72,36H204Z"},null,-1)])])):Fb("",!0)],16))}}),Bk={key:0},jk={key:1},Uk={key:2},zk={key:3},Fk={key:4},Zk={key:5},Qk=Mg({name:"ScalarIconBookOpenText",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",Bk,[...t[0]||(t[0]=[Rb("path",{d:"M232,44H160a43.86,43.86,0,0,0-32,13.85A43.86,43.86,0,0,0,96,44H24A12,12,0,0,0,12,56V200a12,12,0,0,0,12,12H96a20,20,0,0,1,20,20,12,12,0,0,0,24,0,20,20,0,0,1,20-20h72a12,12,0,0,0,12-12V56A12,12,0,0,0,232,44ZM96,188H36V68H96a20,20,0,0,1,20,20V192.81A43.79,43.79,0,0,0,96,188Zm124,0H160a43.71,43.71,0,0,0-20,4.83V88a20,20,0,0,1,20-20h60ZM164,96h32a12,12,0,0,1,0,24H164a12,12,0,0,1,0-24Zm44,52a12,12,0,0,1-12,12H164a12,12,0,0,1,0-24h32A12,12,0,0,1,208,148Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",jk,[...t[1]||(t[1]=[Rb("path",{d:"M232,56V200H160a32,32,0,0,0-32,32V88a32,32,0,0,1,32-32Z",opacity:"0.2"},null,-1),Rb("path",{d:"M232,48H160a40,40,0,0,0-32,16A40,40,0,0,0,96,48H24a8,8,0,0,0-8,8V200a8,8,0,0,0,8,8H96a24,24,0,0,1,24,24,8,8,0,0,0,16,0,24,24,0,0,1,24-24h72a8,8,0,0,0,8-8V56A8,8,0,0,0,232,48ZM96,192H32V64H96a24,24,0,0,1,24,24V200A39.81,39.81,0,0,0,96,192Zm128,0H160a39.81,39.81,0,0,0-24,8V88a24,24,0,0,1,24-24h64ZM160,88h40a8,8,0,0,1,0,16H160a8,8,0,0,1,0-16Zm48,40a8,8,0,0,1-8,8H160a8,8,0,0,1,0-16h40A8,8,0,0,1,208,128Zm0,32a8,8,0,0,1-8,8H160a8,8,0,0,1,0-16h40A8,8,0,0,1,208,160Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",Uk,[...t[2]||(t[2]=[Rb("path",{d:"M232,48H168a32,32,0,0,0-32,32v87.73a8.17,8.17,0,0,1-7.47,8.25,8,8,0,0,1-8.53-8V80A32,32,0,0,0,88,48H24a8,8,0,0,0-8,8V200a8,8,0,0,0,8,8H96a24,24,0,0,1,24,23.94,7.9,7.9,0,0,0,5.12,7.55A8,8,0,0,0,136,232a24,24,0,0,1,24-24h72a8,8,0,0,0,8-8V56A8,8,0,0,0,232,48ZM208,168H168.27a8.17,8.17,0,0,1-8.25-7.47,8,8,0,0,1,8-8.53h39.73a8.17,8.17,0,0,1,8.25,7.47A8,8,0,0,1,208,168Zm0-32H168.27a8.17,8.17,0,0,1-8.25-7.47,8,8,0,0,1,8-8.53h39.73a8.17,8.17,0,0,1,8.25,7.47A8,8,0,0,1,208,136Zm0-32H168.27A8.17,8.17,0,0,1,160,96.53,8,8,0,0,1,168,88h39.73A8.17,8.17,0,0,1,216,95.47,8,8,0,0,1,208,104Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",zk,[...t[3]||(t[3]=[Rb("path",{d:"M232,50H160a38,38,0,0,0-32,17.55A38,38,0,0,0,96,50H24a6,6,0,0,0-6,6V200a6,6,0,0,0,6,6H96a26,26,0,0,1,26,26,6,6,0,0,0,12,0,26,26,0,0,1,26-26h72a6,6,0,0,0,6-6V56A6,6,0,0,0,232,50ZM96,194H30V62H96a26,26,0,0,1,26,26V204.31A37.86,37.86,0,0,0,96,194Zm130,0H160a37.87,37.87,0,0,0-26,10.32V88a26,26,0,0,1,26-26h66ZM160,90h40a6,6,0,0,1,0,12H160a6,6,0,0,1,0-12Zm46,38a6,6,0,0,1-6,6H160a6,6,0,0,1,0-12h40A6,6,0,0,1,206,128Zm0,32a6,6,0,0,1-6,6H160a6,6,0,0,1,0-12h40A6,6,0,0,1,206,160Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",Fk,[...t[4]||(t[4]=[Rb("path",{d:"M232,48H160a40,40,0,0,0-32,16A40,40,0,0,0,96,48H24a8,8,0,0,0-8,8V200a8,8,0,0,0,8,8H96a24,24,0,0,1,24,24,8,8,0,0,0,16,0,24,24,0,0,1,24-24h72a8,8,0,0,0,8-8V56A8,8,0,0,0,232,48ZM96,192H32V64H96a24,24,0,0,1,24,24V200A39.81,39.81,0,0,0,96,192Zm128,0H160a39.81,39.81,0,0,0-24,8V88a24,24,0,0,1,24-24h64ZM160,88h40a8,8,0,0,1,0,16H160a8,8,0,0,1,0-16Zm48,40a8,8,0,0,1-8,8H160a8,8,0,0,1,0-16h40A8,8,0,0,1,208,128Zm0,32a8,8,0,0,1-8,8H160a8,8,0,0,1,0-16h40A8,8,0,0,1,208,160Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",Zk,[...t[5]||(t[5]=[Rb("path",{d:"M232,52H160a36,36,0,0,0-32,19.54A36,36,0,0,0,96,52H24a4,4,0,0,0-4,4V200a4,4,0,0,0,4,4H96a28,28,0,0,1,28,28,4,4,0,0,0,8,0,28,28,0,0,1,28-28h72a4,4,0,0,0,4-4V56A4,4,0,0,0,232,52ZM96,196H28V60H96a28,28,0,0,1,28,28V209.4A35.94,35.94,0,0,0,96,196Zm132,0H160a35.94,35.94,0,0,0-28,13.41V88a28,28,0,0,1,28-28h68ZM160,92h40a4,4,0,0,1,0,8H160a4,4,0,0,1,0-8Zm44,36a4,4,0,0,1-4,4H160a4,4,0,0,1,0-8h40A4,4,0,0,1,204,128Zm0,32a4,4,0,0,1-4,4H160a4,4,0,0,1,0-8h40A4,4,0,0,1,204,160Z"},null,-1)])])):Fb("",!0)],16))}}),Hk={key:0},Vk={key:1},qk={key:2},Wk={key:3},Xk={key:4},Gk={key:5},Yk=Mg({name:"ScalarIconBracketsCurly",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",Hk,[...t[0]||(t[0]=[Rb("path",{d:"M54.8,119.49A35.06,35.06,0,0,1,49.05,128a35.06,35.06,0,0,1,5.75,8.51C60,147.24,60,159.83,60,172c0,25.94,1.84,32,20,32a12,12,0,0,1,0,24c-19.14,0-32.2-6.9-38.8-20.51C36,196.76,36,184.17,36,172c0-25.94-1.84-32-20-32a12,12,0,0,1,0-24c18.16,0,20-6.06,20-32,0-12.17,0-24.76,5.2-35.49C47.8,34.9,60.86,28,80,28a12,12,0,0,1,0,24c-18.16,0-20,6.06-20,32C60,96.17,60,108.76,54.8,119.49ZM240,116c-18.16,0-20-6.06-20-32,0-12.17,0-24.76-5.2-35.49C208.2,34.9,195.14,28,176,28a12,12,0,0,0,0,24c18.16,0,20,6.06,20,32,0,12.17,0,24.76,5.2,35.49A35.06,35.06,0,0,0,207,128a35.06,35.06,0,0,0-5.75,8.51C196,147.24,196,159.83,196,172c0,25.94-1.84,32-20,32a12,12,0,0,0,0,24c19.14,0,32.2-6.9,38.8-20.51C220,196.76,220,184.17,220,172c0-25.94,1.84-32,20-32a12,12,0,0,0,0-24Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",Vk,[...t[1]||(t[1]=[Rb("path",{d:"M240,128c-64,0,0,88-64,88H80c-64,0,0-88-64-88,64,0,0-88,64-88h96C240,40,176,128,240,128Z",opacity:"0.2"},null,-1),Rb("path",{d:"M43.18,128a29.78,29.78,0,0,1,8,10.26c4.8,9.9,4.8,22,4.8,33.74,0,24.31,1,36,24,36a8,8,0,0,1,0,16c-17.48,0-29.32-6.14-35.2-18.26-4.8-9.9-4.8-22-4.8-33.74,0-24.31-1-36-24-36a8,8,0,0,1,0-16c23,0,24-11.69,24-36,0-11.72,0-23.84,4.8-33.74C50.68,38.14,62.52,32,80,32a8,8,0,0,1,0,16C57,48,56,59.69,56,84c0,11.72,0,23.84-4.8,33.74A29.78,29.78,0,0,1,43.18,128ZM240,120c-23,0-24-11.69-24-36,0-11.72,0-23.84-4.8-33.74C205.32,38.14,193.48,32,176,32a8,8,0,0,0,0,16c23,0,24,11.69,24,36,0,11.72,0,23.84,4.8,33.74a29.78,29.78,0,0,0,8,10.26,29.78,29.78,0,0,0-8,10.26c-4.8,9.9-4.8,22-4.8,33.74,0,24.31-1,36-24,36a8,8,0,0,0,0,16c17.48,0,29.32-6.14,35.2-18.26,4.8-9.9,4.8-22,4.8-33.74,0-24.31,1-36,24-36a8,8,0,0,0,0-16Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",qk,[...t[2]||(t[2]=[Rb("path",{d:"M216,40H40A16,16,0,0,0,24,56V200a16,16,0,0,0,16,16H216a16,16,0,0,0,16-16V56A16,16,0,0,0,216,40ZM88,155.84c.29,14.26.41,20.16,16,20.16a8,8,0,0,1,0,16c-31.27,0-31.72-22.43-32-35.84C71.71,141.9,71.59,136,56,136a8,8,0,0,1,0-16c15.59,0,15.71-5.9,16-20.16C72.28,86.43,72.73,64,104,64a8,8,0,0,1,0,16c-15.59,0-15.71,5.9-16,20.16-.17,8.31-.41,20.09-8,27.84C87.59,135.75,87.83,147.53,88,155.84ZM200,136c-15.59,0-15.71,5.9-16,20.16-.28,13.41-.73,35.84-32,35.84a8,8,0,0,1,0-16c15.59,0,15.71-5.9,16-20.16.17-8.31.41-20.09,8-27.84-7.6-7.75-7.84-19.53-8-27.84C167.71,85.9,167.59,80,152,80a8,8,0,0,1,0-16c31.27,0,31.72,22.43,32,35.84.29,14.26.41,20.16,16,20.16a8,8,0,0,1,0,16Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",Wk,[...t[3]||(t[3]=[Rb("path",{d:"M39.91,128a27.68,27.68,0,0,1,9.49,11.13C54,148.62,54,160.51,54,172c0,24.27,1.21,38,26,38a6,6,0,0,1,0,12c-16.88,0-27.81-5.6-33.4-17.13C42,195.38,42,183.49,42,172c0-24.27-1.21-38-26-38a6,6,0,0,1,0-12c24.79,0,26-13.73,26-38,0-11.49,0-23.38,4.6-32.87C52.19,39.6,63.12,34,80,34a6,6,0,0,1,0,12C55.21,46,54,59.73,54,84c0,11.49,0,23.38-4.6,32.87A27.68,27.68,0,0,1,39.91,128ZM240,122c-24.79,0-26-13.73-26-38,0-11.49,0-23.38-4.6-32.87C203.81,39.6,192.88,34,176,34a6,6,0,0,0,0,12c24.79,0,26,13.73,26,38,0,11.49,0,23.38,4.6,32.87A27.68,27.68,0,0,0,216.09,128a27.68,27.68,0,0,0-9.49,11.13C202,148.62,202,160.51,202,172c0,24.27-1.21,38-26,38a6,6,0,0,0,0,12c16.88,0,27.81-5.6,33.4-17.13,4.6-9.49,4.6-21.38,4.6-32.87,0-24.27,1.21-38,26-38a6,6,0,0,0,0-12Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",Xk,[...t[4]||(t[4]=[Rb("path",{d:"M43.18,128a29.78,29.78,0,0,1,8,10.26c4.8,9.9,4.8,22,4.8,33.74,0,24.31,1,36,24,36a8,8,0,0,1,0,16c-17.48,0-29.32-6.14-35.2-18.26-4.8-9.9-4.8-22-4.8-33.74,0-24.31-1-36-24-36a8,8,0,0,1,0-16c23,0,24-11.69,24-36,0-11.72,0-23.84,4.8-33.74C50.68,38.14,62.52,32,80,32a8,8,0,0,1,0,16C57,48,56,59.69,56,84c0,11.72,0,23.84-4.8,33.74A29.78,29.78,0,0,1,43.18,128ZM240,120c-23,0-24-11.69-24-36,0-11.72,0-23.84-4.8-33.74C205.32,38.14,193.48,32,176,32a8,8,0,0,0,0,16c23,0,24,11.69,24,36,0,11.72,0,23.84,4.8,33.74a29.78,29.78,0,0,0,8,10.26,29.78,29.78,0,0,0-8,10.26c-4.8,9.9-4.8,22-4.8,33.74,0,24.31-1,36-24,36a8,8,0,0,0,0,16c17.48,0,29.32-6.14,35.2-18.26,4.8-9.9,4.8-22,4.8-33.74,0-24.31,1-36,24-36a8,8,0,0,0,0-16Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",Gk,[...t[5]||(t[5]=[Rb("path",{d:"M35.89,128C52,136.23,52,155.64,52,172c0,24.8,1.35,40,28,40a4,4,0,0,1,0,8c-36,0-36-26.61-36-48,0-24.8-1.35-40-28-40a4,4,0,0,1,0-8c26.65,0,28-15.2,28-40,0-21.39,0-48,36-48a4,4,0,0,1,0,8C53.35,44,52,59.2,52,84,52,100.36,52,119.77,35.89,128ZM240,124c-26.65,0-28-15.2-28-40,0-21.39,0-48-36-48a4,4,0,0,0,0,8c26.65,0,28,15.2,28,40,0,16.36,0,35.77,16.11,44C204,136.23,204,155.64,204,172c0,24.8-1.35,40-28,40a4,4,0,0,0,0,8c36,0,36-26.61,36-48,0-24.8,1.35-40,28-40a4,4,0,0,0,0-8Z"},null,-1)])])):Fb("",!0)],16))}}),Kk={key:0},Jk={key:1},eS={key:2},tS={key:3},nS={key:4},rS={key:5},aS=Mg({name:"ScalarIconCaretDown",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",Kk,[...t[0]||(t[0]=[Rb("path",{d:"M216.49,104.49l-80,80a12,12,0,0,1-17,0l-80-80a12,12,0,0,1,17-17L128,159l71.51-71.52a12,12,0,0,1,17,17Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",Jk,[...t[1]||(t[1]=[Rb("path",{d:"M208,96l-80,80L48,96Z",opacity:"0.2"},null,-1),Rb("path",{d:"M215.39,92.94A8,8,0,0,0,208,88H48a8,8,0,0,0-5.66,13.66l80,80a8,8,0,0,0,11.32,0l80-80A8,8,0,0,0,215.39,92.94ZM128,164.69,67.31,104H188.69Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",eS,[...t[2]||(t[2]=[Rb("path",{d:"M213.66,101.66l-80,80a8,8,0,0,1-11.32,0l-80-80A8,8,0,0,1,48,88H208a8,8,0,0,1,5.66,13.66Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",tS,[...t[3]||(t[3]=[Rb("path",{d:"M212.24,100.24l-80,80a6,6,0,0,1-8.48,0l-80-80a6,6,0,0,1,8.48-8.48L128,167.51l75.76-75.75a6,6,0,0,1,8.48,8.48Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",nS,[...t[4]||(t[4]=[Rb("path",{d:"M213.66,101.66l-80,80a8,8,0,0,1-11.32,0l-80-80A8,8,0,0,1,53.66,90.34L128,164.69l74.34-74.35a8,8,0,0,1,11.32,11.32Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",rS,[...t[5]||(t[5]=[Rb("path",{d:"M210.83,98.83l-80,80a4,4,0,0,1-5.66,0l-80-80a4,4,0,0,1,5.66-5.66L128,170.34l77.17-77.17a4,4,0,1,1,5.66,5.66Z"},null,-1)])])):Fb("",!0)],16))}}),oS={key:0},iS={key:1},sS={key:2},lS={key:3},cS={key:4},uS={key:5},dS=Mg({name:"ScalarIconCaretRight",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",oS,[...t[0]||(t[0]=[Rb("path",{d:"M184.49,136.49l-80,80a12,12,0,0,1-17-17L159,128,87.51,56.49a12,12,0,1,1,17-17l80,80A12,12,0,0,1,184.49,136.49Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",iS,[...t[1]||(t[1]=[Rb("path",{d:"M176,128,96,208V48Z",opacity:"0.2"},null,-1),Rb("path",{d:"M181.66,122.34l-80-80A8,8,0,0,0,88,48V208a8,8,0,0,0,13.66,5.66l80-80A8,8,0,0,0,181.66,122.34ZM104,188.69V67.31L164.69,128Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",sS,[...t[2]||(t[2]=[Rb("path",{d:"M181.66,133.66l-80,80A8,8,0,0,1,88,208V48a8,8,0,0,1,13.66-5.66l80,80A8,8,0,0,1,181.66,133.66Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",lS,[...t[3]||(t[3]=[Rb("path",{d:"M180.24,132.24l-80,80a6,6,0,0,1-8.48-8.48L167.51,128,91.76,52.24a6,6,0,0,1,8.48-8.48l80,80A6,6,0,0,1,180.24,132.24Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",cS,[...t[4]||(t[4]=[Rb("path",{d:"M181.66,133.66l-80,80a8,8,0,0,1-11.32-11.32L164.69,128,90.34,53.66a8,8,0,0,1,11.32-11.32l80,80A8,8,0,0,1,181.66,133.66Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",uS,[...t[5]||(t[5]=[Rb("path",{d:"M178.83,130.83l-80,80a4,4,0,0,1-5.66-5.66L170.34,128,93.17,50.83a4,4,0,0,1,5.66-5.66l80,80A4,4,0,0,1,178.83,130.83Z"},null,-1)])])):Fb("",!0)],16))}}),pS={key:0},hS={key:1},fS={key:2},mS={key:3},gS={key:4},vS={key:5},bS=Mg({name:"ScalarIconCaretUpDown",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",pS,[...t[0]||(t[0]=[Rb("path",{d:"M184.49,167.51a12,12,0,0,1,0,17l-48,48a12,12,0,0,1-17,0l-48-48a12,12,0,0,1,17-17L128,207l39.51-39.52A12,12,0,0,1,184.49,167.51Zm-96-79L128,49l39.51,39.52a12,12,0,0,0,17-17l-48-48a12,12,0,0,0-17,0l-48,48a12,12,0,0,0,17,17Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",hS,[...t[1]||(t[1]=[Rb("path",{d:"M80,176h96l-48,48ZM128,32,80,80h96Z",opacity:"0.2"},null,-1),Rb("path",{d:"M176,168H80a8,8,0,0,0-5.66,13.66l48,48a8,8,0,0,0,11.32,0l48-48A8,8,0,0,0,176,168Zm-48,44.69L99.31,184h57.38ZM80,88h96a8,8,0,0,0,5.66-13.66l-48-48a8,8,0,0,0-11.32,0l-48,48A8,8,0,0,0,80,88Zm48-44.69L156.69,72H99.31Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",fS,[...t[2]||(t[2]=[Rb("path",{d:"M72.61,83.06a8,8,0,0,1,1.73-8.72l48-48a8,8,0,0,1,11.32,0l48,48A8,8,0,0,1,176,88H80A8,8,0,0,1,72.61,83.06ZM176,168H80a8,8,0,0,0-5.66,13.66l48,48a8,8,0,0,0,11.32,0l48-48A8,8,0,0,0,176,168Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",mS,[...t[3]||(t[3]=[Rb("path",{d:"M180.24,171.76a6,6,0,0,1,0,8.48l-48,48a6,6,0,0,1-8.48,0l-48-48a6,6,0,0,1,8.48-8.48L128,215.51l43.76-43.75A6,6,0,0,1,180.24,171.76Zm-96-87.52L128,40.49l43.76,43.75a6,6,0,0,0,8.48-8.48l-48-48a6,6,0,0,0-8.48,0l-48,48a6,6,0,0,0,8.48,8.48Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",gS,[...t[4]||(t[4]=[Rb("path",{d:"M181.66,170.34a8,8,0,0,1,0,11.32l-48,48a8,8,0,0,1-11.32,0l-48-48a8,8,0,0,1,11.32-11.32L128,212.69l42.34-42.35A8,8,0,0,1,181.66,170.34Zm-96-84.68L128,43.31l42.34,42.35a8,8,0,0,0,11.32-11.32l-48-48a8,8,0,0,0-11.32,0l-48,48A8,8,0,0,0,85.66,85.66Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",vS,[...t[5]||(t[5]=[Rb("path",{d:"M178.83,173.17a4,4,0,0,1,0,5.66l-48,48a4,4,0,0,1-5.66,0l-48-48a4,4,0,0,1,5.66-5.66L128,218.34l45.17-45.17A4,4,0,0,1,178.83,173.17Zm-96-90.34L128,37.66l45.17,45.17a4,4,0,1,0,5.66-5.66l-48-48a4,4,0,0,0-5.66,0l-48,48a4,4,0,0,0,5.66,5.66Z"},null,-1)])])):Fb("",!0)],16))}}),yS={key:0},OS={key:1},wS={key:2},xS={key:3},kS={key:4},SS={key:5},_S=Mg({name:"ScalarIconCheck",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",yS,[...t[0]||(t[0]=[Rb("path",{d:"M232.49,80.49l-128,128a12,12,0,0,1-17,0l-56-56a12,12,0,1,1,17-17L96,183,215.51,63.51a12,12,0,0,1,17,17Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",OS,[...t[1]||(t[1]=[Rb("path",{d:"M232,56V200a16,16,0,0,1-16,16H40a16,16,0,0,1-16-16V56A16,16,0,0,1,40,40H216A16,16,0,0,1,232,56Z",opacity:"0.2"},null,-1),Rb("path",{d:"M205.66,85.66l-96,96a8,8,0,0,1-11.32,0l-40-40a8,8,0,0,1,11.32-11.32L104,164.69l90.34-90.35a8,8,0,0,1,11.32,11.32Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",wS,[...t[2]||(t[2]=[Rb("path",{d:"M216,40H40A16,16,0,0,0,24,56V200a16,16,0,0,0,16,16H216a16,16,0,0,0,16-16V56A16,16,0,0,0,216,40ZM205.66,85.66l-96,96a8,8,0,0,1-11.32,0l-40-40a8,8,0,0,1,11.32-11.32L104,164.69l90.34-90.35a8,8,0,0,1,11.32,11.32Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",xS,[...t[3]||(t[3]=[Rb("path",{d:"M228.24,76.24l-128,128a6,6,0,0,1-8.48,0l-56-56a6,6,0,0,1,8.48-8.48L96,191.51,219.76,67.76a6,6,0,0,1,8.48,8.48Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",kS,[...t[4]||(t[4]=[Rb("path",{d:"M229.66,77.66l-128,128a8,8,0,0,1-11.32,0l-56-56a8,8,0,0,1,11.32-11.32L96,188.69,218.34,66.34a8,8,0,0,1,11.32,11.32Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",SS,[...t[5]||(t[5]=[Rb("path",{d:"M226.83,74.83l-128,128a4,4,0,0,1-5.66,0l-56-56a4,4,0,0,1,5.66-5.66L96,194.34,221.17,69.17a4,4,0,1,1,5.66,5.66Z"},null,-1)])])):Fb("",!0)],16))}}),TS={key:0},ES={key:1},AS={key:2},CS={key:3},$S={key:4},PS={key:5},DS=Mg({name:"ScalarIconCopy",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",TS,[...t[0]||(t[0]=[Rb("path",{d:"M216,28H88A12,12,0,0,0,76,40V76H40A12,12,0,0,0,28,88V216a12,12,0,0,0,12,12H168a12,12,0,0,0,12-12V180h36a12,12,0,0,0,12-12V40A12,12,0,0,0,216,28ZM156,204H52V100H156Zm48-48H180V88a12,12,0,0,0-12-12H100V52H204Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",ES,[...t[1]||(t[1]=[Rb("path",{d:"M216,40V168H168V88H88V40Z",opacity:"0.2"},null,-1),Rb("path",{d:"M216,32H88a8,8,0,0,0-8,8V80H40a8,8,0,0,0-8,8V216a8,8,0,0,0,8,8H168a8,8,0,0,0,8-8V176h40a8,8,0,0,0,8-8V40A8,8,0,0,0,216,32ZM160,208H48V96H160Zm48-48H176V88a8,8,0,0,0-8-8H96V48H208Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",AS,[...t[2]||(t[2]=[Rb("path",{d:"M216,32H88a8,8,0,0,0-8,8V80H40a8,8,0,0,0-8,8V216a8,8,0,0,0,8,8H168a8,8,0,0,0,8-8V176h40a8,8,0,0,0,8-8V40A8,8,0,0,0,216,32Zm-8,128H176V88a8,8,0,0,0-8-8H96V48H208Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",CS,[...t[3]||(t[3]=[Rb("path",{d:"M216,34H88a6,6,0,0,0-6,6V82H40a6,6,0,0,0-6,6V216a6,6,0,0,0,6,6H168a6,6,0,0,0,6-6V174h42a6,6,0,0,0,6-6V40A6,6,0,0,0,216,34ZM162,210H46V94H162Zm48-48H174V88a6,6,0,0,0-6-6H94V46H210Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",$S,[...t[4]||(t[4]=[Rb("path",{d:"M216,32H88a8,8,0,0,0-8,8V80H40a8,8,0,0,0-8,8V216a8,8,0,0,0,8,8H168a8,8,0,0,0,8-8V176h40a8,8,0,0,0,8-8V40A8,8,0,0,0,216,32ZM160,208H48V96H160Zm48-48H176V88a8,8,0,0,0-8-8H96V48H208Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",PS,[...t[5]||(t[5]=[Rb("path",{d:"M216,36H88a4,4,0,0,0-4,4V84H40a4,4,0,0,0-4,4V216a4,4,0,0,0,4,4H168a4,4,0,0,0,4-4V172h44a4,4,0,0,0,4-4V40A4,4,0,0,0,216,36ZM164,212H44V92H164Zm48-48H172V88a4,4,0,0,0-4-4H92V44H212Z"},null,-1)])])):Fb("",!0)],16))}}),IS={key:0},MS={key:1},NS={key:2},RS={key:3},LS={key:4},BS={key:5},jS=Mg({name:"ScalarIconDiscordLogo",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",IS,[...t[0]||(t[0]=[Rb("path",{d:"M108,136a16,16,0,1,1-16-16A16,16,0,0,1,108,136Zm56-16a16,16,0,1,0,16,16A16,16,0,0,0,164,120Zm76.07,76.56-67,29.71A20.15,20.15,0,0,1,146,214.9l-8.54-23.13c-3.13.14-6.27.24-9.45.24s-6.32-.1-9.45-.24L110,214.9a20.19,20.19,0,0,1-27.08,11.37l-67-29.71A19.93,19.93,0,0,1,4.62,173.41L34.15,57A20,20,0,0,1,50.37,42.19l36.06-5.93A20.26,20.26,0,0,1,109.22,51.1l4.41,17.41c4.74-.33,9.52-.51,14.37-.51s9.63.18,14.37.51l4.41-17.41a20.25,20.25,0,0,1,22.79-14.84l36.06,5.93A20,20,0,0,1,221.85,57l29.53,116.38A19.93,19.93,0,0,1,240.07,196.56ZM227.28,176,199.23,65.46l-30.07-4.94-2.84,11.17c2.9.58,5.78,1.2,8.61,1.92a12,12,0,1,1-5.86,23.27A168.43,168.43,0,0,0,128,92a168.43,168.43,0,0,0-41.07,4.88,12,12,0,0,1-5.86-23.27c2.83-.72,5.71-1.34,8.61-1.92L86.85,60.52,56.77,65.46,28.72,176l60.22,26.7,5-13.57c-4.37-.76-8.67-1.65-12.88-2.71a12,12,0,0,1,5.86-23.28A168.43,168.43,0,0,0,128,168a168.43,168.43,0,0,0,41.07-4.88,12,12,0,0,1,5.86,23.28c-4.21,1.06-8.51,1.95-12.88,2.71l5,13.57Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",MS,[...t[1]||(t[1]=[Rb("path",{d:"M235.21,185.59l-67,29.7a8.15,8.15,0,0,1-11-4.56L147,183.06a190.5,190.5,0,0,1-19,.94,190.5,190.5,0,0,1-19-.94L98.75,210.73a8.15,8.15,0,0,1-11,4.56l-67-29.7a8,8,0,0,1-4.55-9.24L45.77,60A8.08,8.08,0,0,1,52.31,54l36.06-5.92a8.1,8.1,0,0,1,9.21,6l5,19.63a192.32,192.32,0,0,1,50.88,0l5-19.63a8.1,8.1,0,0,1,9.21-6L203.69,54A8.08,8.08,0,0,1,210.23,60l29.53,116.37A8,8,0,0,1,235.21,185.59Z",opacity:"0.2"},null,-1),Rb("path",{d:"M104,140a12,12,0,1,1-12-12A12,12,0,0,1,104,140Zm60-12a12,12,0,1,0,12,12A12,12,0,0,0,164,128Zm74.45,64.9-67,29.71a16.17,16.17,0,0,1-21.71-9.1l-8.11-22q-6.72.45-13.63.46t-13.63-.46l-8.11,22a16.18,16.18,0,0,1-21.71,9.1l-67-29.71a15.94,15.94,0,0,1-9.06-18.51L38,58A16.08,16.08,0,0,1,51,46.13l36.06-5.92a16.21,16.21,0,0,1,18.26,11.88l3.26,12.83Q118.11,64,128,64t19.4.92l3.26-12.83a16.22,16.22,0,0,1,18.26-11.88L205,46.13A16.08,16.08,0,0,1,218,58l29.53,116.38A15.94,15.94,0,0,1,238.45,192.9ZM232,178.28,202.47,62s0,0-.08,0L166.33,56a.17.17,0,0,0-.17,0l-2.83,11.14c5,.94,10,2.06,14.83,3.42A8,8,0,0,1,176,86.31a8.09,8.09,0,0,1-2.16-.3A172.25,172.25,0,0,0,128,80a172.25,172.25,0,0,0-45.84,6,8,8,0,1,1-4.32-15.4c4.82-1.36,9.78-2.48,14.82-3.42L89.83,56a.21.21,0,0,0-.12,0h0L53.61,61.92a.24.24,0,0,0-.09,0L24,178.33,91,208a.21.21,0,0,0,.22,0L98,189.72a173.2,173.2,0,0,1-20.14-4.32A8,8,0,0,1,82.16,170,171.85,171.85,0,0,0,128,176a171.85,171.85,0,0,0,45.84-6,8,8,0,0,1,4.32,15.41A173.2,173.2,0,0,1,158,189.72L164.75,208a.22.22,0,0,0,.21,0Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",NS,[...t[2]||(t[2]=[Rb("path",{d:"M247.51,174.39,218,58a16.08,16.08,0,0,0-13-11.88l-36.06-5.92a16.22,16.22,0,0,0-18.26,11.88l-.21.85a4,4,0,0,0,3.27,4.93,155.62,155.62,0,0,1,24.41,5.62,8.2,8.2,0,0,1,5.62,9.7,8,8,0,0,1-10.19,5.64,155.4,155.4,0,0,0-90.8-.1,8.22,8.22,0,0,1-10.28-4.81,8,8,0,0,1,5.08-10.33,156.85,156.85,0,0,1,24.72-5.72,4,4,0,0,0,3.27-4.93l-.21-.85A16.21,16.21,0,0,0,87.08,40.21L51,46.13A16.08,16.08,0,0,0,38,58L8.49,174.39a15.94,15.94,0,0,0,9.06,18.51l67,29.71a16.17,16.17,0,0,0,21.71-9.1l3.49-9.45a4,4,0,0,0-3.27-5.35,158.13,158.13,0,0,1-28.63-6.2,8.2,8.2,0,0,1-5.61-9.67,8,8,0,0,1,10.2-5.66,155.59,155.59,0,0,0,91.12,0,8,8,0,0,1,10.19,5.65,8.19,8.19,0,0,1-5.61,9.68,157.84,157.84,0,0,1-28.62,6.2,4,4,0,0,0-3.27,5.35l3.49,9.45a16.18,16.18,0,0,0,21.71,9.1l67-29.71A15.94,15.94,0,0,0,247.51,174.39ZM92,152a12,12,0,1,1,12-12A12,12,0,0,1,92,152Zm72,0a12,12,0,1,1,12-12A12,12,0,0,1,164,152Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",RS,[...t[3]||(t[3]=[Rb("path",{d:"M102,140a10,10,0,1,1-10-10A10,10,0,0,1,102,140Zm62-10a10,10,0,1,0,10,10A10,10,0,0,0,164,130Zm73.64,61.08-67,29.71a14.43,14.43,0,0,1-5.77,1.21,14.13,14.13,0,0,1-13.25-9.18L143,189.43c-4.93.37-9.92.58-15,.58s-10.06-.21-15-.58l-8.63,23.39A14.13,14.13,0,0,1,91.13,222a14.43,14.43,0,0,1-5.77-1.21l-67-29.71a14,14,0,0,1-7.93-16.2L40,58.5A14.07,14.07,0,0,1,51.34,48.11L87.4,42.19a14.19,14.19,0,0,1,16,10.39l3.69,14.53a197.5,197.5,0,0,1,41.82,0l3.69-14.53a14.19,14.19,0,0,1,16-10.39l36.06,5.92A14.07,14.07,0,0,1,216,58.5l29.53,116.38A14,14,0,0,1,237.64,191.08Zm-3.7-13.25L204.41,61.45a2.08,2.08,0,0,0-1.7-1.5L166.65,54a2.13,2.13,0,0,0-2.42,1.5l-3.36,13.24a169.28,169.28,0,0,1,16.75,3.76A6,6,0,0,1,176,84.31a5.71,5.71,0,0,1-1.62-.23A174.26,174.26,0,0,0,128,78a174.26,174.26,0,0,0-46.38,6.08,6,6,0,1,1-3.24-11.55,169.28,169.28,0,0,1,16.75-3.76L91.77,55.53A2.12,2.12,0,0,0,89.35,54L53.29,60a2.08,2.08,0,0,0-1.7,1.5L22.06,177.83a2,2,0,0,0,1.16,2.28l67,29.7a2.19,2.19,0,0,0,1.76,0,2.07,2.07,0,0,0,1.14-1.17l7.58-20.55a171.46,171.46,0,0,1-22.33-4.64,6,6,0,1,1,3.24-11.55A174.26,174.26,0,0,0,128,178a174.26,174.26,0,0,0,46.38-6.08,6,6,0,1,1,3.24,11.55,171.46,171.46,0,0,1-22.33,4.64l7.58,20.55a2.07,2.07,0,0,0,1.14,1.17,2.19,2.19,0,0,0,1.76,0l67-29.7A2,2,0,0,0,233.94,177.83Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",LS,[...t[4]||(t[4]=[Rb("path",{d:"M104,140a12,12,0,1,1-12-12A12,12,0,0,1,104,140Zm60-12a12,12,0,1,0,12,12A12,12,0,0,0,164,128Zm74.45,64.9-67,29.71a16.17,16.17,0,0,1-21.71-9.1l-8.11-22q-6.72.45-13.63.46t-13.63-.46l-8.11,22a16.18,16.18,0,0,1-21.71,9.1l-67-29.71a15.93,15.93,0,0,1-9.06-18.51L38,58A16.07,16.07,0,0,1,51,46.14l36.06-5.93a16.22,16.22,0,0,1,18.26,11.88l3.26,12.84Q118.11,64,128,64t19.4.93l3.26-12.84a16.21,16.21,0,0,1,18.26-11.88L205,46.14A16.07,16.07,0,0,1,218,58l29.53,116.38A15.93,15.93,0,0,1,238.45,192.9ZM232,178.28,202.47,62s0,0-.08,0L166.33,56a.17.17,0,0,0-.17,0l-2.83,11.14c5,.94,10,2.06,14.83,3.42A8,8,0,0,1,176,86.31a8.09,8.09,0,0,1-2.16-.3A172.25,172.25,0,0,0,128,80a172.25,172.25,0,0,0-45.84,6,8,8,0,1,1-4.32-15.4c4.82-1.36,9.78-2.48,14.82-3.42L89.83,56s0,0-.12,0h0L53.61,61.93a.17.17,0,0,0-.09,0L24,178.33,91,208a.23.23,0,0,0,.22,0L98,189.72a173.2,173.2,0,0,1-20.14-4.32A8,8,0,0,1,82.16,170,171.85,171.85,0,0,0,128,176a171.85,171.85,0,0,0,45.84-6,8,8,0,0,1,4.32,15.41A173.2,173.2,0,0,1,158,189.72L164.75,208a.22.22,0,0,0,.21,0Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",BS,[...t[5]||(t[5]=[Rb("path",{d:"M100,140a8,8,0,1,1-8-8A8,8,0,0,1,100,140Zm64-8a8,8,0,1,0,8,8A8,8,0,0,0,164,132Zm72.83,57.25-67,29.71a12.36,12.36,0,0,1-5,1,12.13,12.13,0,0,1-11.38-7.88l-9.15-24.81c-5.36.45-10.81.69-16.34.69s-11-.24-16.34-.69l-9.15,24.81A12.13,12.13,0,0,1,91.13,220a12.36,12.36,0,0,1-5-1l-67-29.71a12,12,0,0,1-6.8-13.88L41.9,59a12.06,12.06,0,0,1,9.77-8.91l36.06-5.92a12.18,12.18,0,0,1,13.73,8.91l4.12,16.22a195.47,195.47,0,0,1,44.84,0l4.12-16.22a12.18,12.18,0,0,1,13.73-8.91l36.06,5.92A12.06,12.06,0,0,1,214.1,59l29.53,116.38A12,12,0,0,1,236.83,189.25Zm-1-11.91L206.35,61A4.07,4.07,0,0,0,203,58L167,52.05a4.15,4.15,0,0,0-4.69,3L158.4,70.38a166.74,166.74,0,0,1,18.68,4.08,4,4,0,1,1-2.16,7.7A176.21,176.21,0,0,0,128,76a176.21,176.21,0,0,0-46.92,6.16,4,4,0,1,1-2.16-7.7A166.74,166.74,0,0,1,97.6,70.38L93.71,55a4.15,4.15,0,0,0-4.69-3L53,58a4.07,4.07,0,0,0-3.31,3L20.12,177.34a4,4,0,0,0,2.29,4.59l67,29.71a4.16,4.16,0,0,0,3.35,0A4,4,0,0,0,95,209.35l8.45-22.88a171.49,171.49,0,0,1-24.53-4.92,4,4,0,0,1,2.16-7.71A176.21,176.21,0,0,0,128,180a176.21,176.21,0,0,0,46.92-6.16,4,4,0,0,1,2.16,7.71,171.49,171.49,0,0,1-24.53,4.92L161,209.35a4,4,0,0,0,2.23,2.32,4.16,4.16,0,0,0,3.35,0l67-29.71A4,4,0,0,0,235.88,177.34Z"},null,-1)])])):Fb("",!0)],16))}}),US={key:0},zS={key:1},FS={key:2},ZS={key:3},QS={key:4},HS={key:5},VS=Mg({name:"ScalarIconEnvelopeSimple",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",US,[...t[0]||(t[0]=[Rb("path",{d:"M224,44H32A12,12,0,0,0,20,56V192a20,20,0,0,0,20,20H216a20,20,0,0,0,20-20V56A12,12,0,0,0,224,44ZM193.15,68,128,127.72,62.85,68ZM44,188V83.28l75.89,69.57a12,12,0,0,0,16.22,0L212,83.28V188Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",zS,[...t[1]||(t[1]=[Rb("path",{d:"M224,56l-96,88L32,56Z",opacity:"0.2"},null,-1),Rb("path",{d:"M224,48H32a8,8,0,0,0-8,8V192a16,16,0,0,0,16,16H216a16,16,0,0,0,16-16V56A8,8,0,0,0,224,48ZM203.43,64,128,133.15,52.57,64ZM216,192H40V74.19l82.59,75.71a8,8,0,0,0,10.82,0L216,74.19V192Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",FS,[...t[2]||(t[2]=[Rb("path",{d:"M224,48H32a8,8,0,0,0-8,8V192a16,16,0,0,0,16,16H216a16,16,0,0,0,16-16V56A8,8,0,0,0,224,48Zm-8,144H40V74.19l82.59,75.71a8,8,0,0,0,10.82,0L216,74.19V192Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",ZS,[...t[3]||(t[3]=[Rb("path",{d:"M224,50H32a6,6,0,0,0-6,6V192a14,14,0,0,0,14,14H216a14,14,0,0,0,14-14V56A6,6,0,0,0,224,50ZM208.58,62,128,135.86,47.42,62ZM216,194H40a2,2,0,0,1-2-2V69.64l86,78.78a6,6,0,0,0,8.1,0L218,69.64V192A2,2,0,0,1,216,194Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",QS,[...t[4]||(t[4]=[Rb("path",{d:"M224,48H32a8,8,0,0,0-8,8V192a16,16,0,0,0,16,16H216a16,16,0,0,0,16-16V56A8,8,0,0,0,224,48ZM203.43,64,128,133.15,52.57,64ZM216,192H40V74.19l82.59,75.71a8,8,0,0,0,10.82,0L216,74.19V192Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",HS,[...t[5]||(t[5]=[Rb("path",{d:"M224,52H32a4,4,0,0,0-4,4V192a12,12,0,0,0,12,12H216a12,12,0,0,0,12-12V56A4,4,0,0,0,224,52Zm-10.28,8L128,138.57,42.28,60ZM216,196H40a4,4,0,0,1-4-4V65.09L125.3,147a4,4,0,0,0,5.4,0L220,65.09V192A4,4,0,0,1,216,196Z"},null,-1)])])):Fb("",!0)],16))}}),qS={key:0},WS={key:1},XS={key:2},GS={key:3},YS={key:4},KS={key:5},JS=Mg({name:"ScalarIconEye",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",qS,[...t[0]||(t[0]=[Rb("path",{d:"M251,123.13c-.37-.81-9.13-20.26-28.48-39.61C196.63,57.67,164,44,128,44S59.37,57.67,33.51,83.52C14.16,102.87,5.4,122.32,5,123.13a12.08,12.08,0,0,0,0,9.75c.37.82,9.13,20.26,28.49,39.61C59.37,198.34,92,212,128,212s68.63-13.66,94.48-39.51c19.36-19.35,28.12-38.79,28.49-39.61A12.08,12.08,0,0,0,251,123.13Zm-46.06,33C183.47,177.27,157.59,188,128,188s-55.47-10.73-76.91-31.88A130.36,130.36,0,0,1,29.52,128,130.45,130.45,0,0,1,51.09,99.89C72.54,78.73,98.41,68,128,68s55.46,10.73,76.91,31.89A130.36,130.36,0,0,1,226.48,128,130.45,130.45,0,0,1,204.91,156.12ZM128,84a44,44,0,1,0,44,44A44.05,44.05,0,0,0,128,84Zm0,64a20,20,0,1,1,20-20A20,20,0,0,1,128,148Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",WS,[...t[1]||(t[1]=[Rb("path",{d:"M128,56C48,56,16,128,16,128s32,72,112,72,112-72,112-72S208,56,128,56Zm0,112a40,40,0,1,1,40-40A40,40,0,0,1,128,168Z",opacity:"0.2"},null,-1),Rb("path",{d:"M247.31,124.76c-.35-.79-8.82-19.58-27.65-38.41C194.57,61.26,162.88,48,128,48S61.43,61.26,36.34,86.35C17.51,105.18,9,124,8.69,124.76a8,8,0,0,0,0,6.5c.35.79,8.82,19.57,27.65,38.4C61.43,194.74,93.12,208,128,208s66.57-13.26,91.66-38.34c18.83-18.83,27.3-37.61,27.65-38.4A8,8,0,0,0,247.31,124.76ZM128,192c-30.78,0-57.67-11.19-79.93-33.25A133.47,133.47,0,0,1,25,128,133.33,133.33,0,0,1,48.07,97.25C70.33,75.19,97.22,64,128,64s57.67,11.19,79.93,33.25A133.46,133.46,0,0,1,231.05,128C223.84,141.46,192.43,192,128,192Zm0-112a48,48,0,1,0,48,48A48.05,48.05,0,0,0,128,80Zm0,80a32,32,0,1,1,32-32A32,32,0,0,1,128,160Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",XS,[...t[2]||(t[2]=[Rb("path",{d:"M247.31,124.76c-.35-.79-8.82-19.58-27.65-38.41C194.57,61.26,162.88,48,128,48S61.43,61.26,36.34,86.35C17.51,105.18,9,124,8.69,124.76a8,8,0,0,0,0,6.5c.35.79,8.82,19.57,27.65,38.4C61.43,194.74,93.12,208,128,208s66.57-13.26,91.66-38.34c18.83-18.83,27.3-37.61,27.65-38.4A8,8,0,0,0,247.31,124.76ZM128,168a40,40,0,1,1,40-40A40,40,0,0,1,128,168Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",GS,[...t[3]||(t[3]=[Rb("path",{d:"M245.48,125.57c-.34-.78-8.66-19.23-27.24-37.81C201,70.54,171.38,50,128,50S55,70.54,37.76,87.76c-18.58,18.58-26.9,37-27.24,37.81a6,6,0,0,0,0,4.88c.34.77,8.66,19.22,27.24,37.8C55,185.47,84.62,206,128,206s73-20.53,90.24-37.75c18.58-18.58,26.9-37,27.24-37.8A6,6,0,0,0,245.48,125.57ZM128,194c-31.38,0-58.78-11.42-81.45-33.93A134.77,134.77,0,0,1,22.69,128,134.56,134.56,0,0,1,46.55,95.94C69.22,73.42,96.62,62,128,62s58.78,11.42,81.45,33.94A134.56,134.56,0,0,1,233.31,128C226.94,140.21,195,194,128,194Zm0-112a46,46,0,1,0,46,46A46.06,46.06,0,0,0,128,82Zm0,80a34,34,0,1,1,34-34A34,34,0,0,1,128,162Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",YS,[...t[4]||(t[4]=[Rb("path",{d:"M247.31,124.76c-.35-.79-8.82-19.58-27.65-38.41C194.57,61.26,162.88,48,128,48S61.43,61.26,36.34,86.35C17.51,105.18,9,124,8.69,124.76a8,8,0,0,0,0,6.5c.35.79,8.82,19.57,27.65,38.4C61.43,194.74,93.12,208,128,208s66.57-13.26,91.66-38.34c18.83-18.83,27.3-37.61,27.65-38.4A8,8,0,0,0,247.31,124.76ZM128,192c-30.78,0-57.67-11.19-79.93-33.25A133.47,133.47,0,0,1,25,128,133.33,133.33,0,0,1,48.07,97.25C70.33,75.19,97.22,64,128,64s57.67,11.19,79.93,33.25A133.46,133.46,0,0,1,231.05,128C223.84,141.46,192.43,192,128,192Zm0-112a48,48,0,1,0,48,48A48.05,48.05,0,0,0,128,80Zm0,80a32,32,0,1,1,32-32A32,32,0,0,1,128,160Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",KS,[...t[5]||(t[5]=[Rb("path",{d:"M243.66,126.38c-.34-.76-8.52-18.89-26.83-37.2C199.87,72.22,170.7,52,128,52S56.13,72.22,39.17,89.18c-18.31,18.31-26.49,36.44-26.83,37.2a4.08,4.08,0,0,0,0,3.25c.34.77,8.52,18.89,26.83,37.2,17,17,46.14,37.17,88.83,37.17s71.87-20.21,88.83-37.17c18.31-18.31,26.49-36.43,26.83-37.2A4.08,4.08,0,0,0,243.66,126.38Zm-32.7,35c-23.07,23-51,34.62-83,34.62s-59.89-11.65-83-34.62A135.71,135.71,0,0,1,20.44,128,135.69,135.69,0,0,1,45,94.62C68.11,71.65,96,60,128,60s59.89,11.65,83,34.62A135.79,135.79,0,0,1,235.56,128,135.71,135.71,0,0,1,211,161.38ZM128,84a44,44,0,1,0,44,44A44.05,44.05,0,0,0,128,84Zm0,80a36,36,0,1,1,36-36A36,36,0,0,1,128,164Z"},null,-1)])])):Fb("",!0)],16))}}),e_={key:0},t_={key:1},n_={key:2},r_={key:3},a_={key:4},o_={key:5},i_=Mg({name:"ScalarIconEyeSlash",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",e_,[...t[0]||(t[0]=[Rb("path",{d:"M56.88,31.93A12,12,0,1,0,39.12,48.07l16,17.65C20.67,88.66,5.72,121.58,5,123.13a12.08,12.08,0,0,0,0,9.75c.37.82,9.13,20.26,28.49,39.61C59.37,198.34,92,212,128,212a131.34,131.34,0,0,0,51-10l20.09,22.1a12,12,0,0,0,17.76-16.14ZM128,188c-29.59,0-55.47-10.73-76.91-31.88A130.69,130.69,0,0,1,29.52,128c5.27-9.31,18.79-29.9,42-44.29l90.09,99.11A109.33,109.33,0,0,1,128,188Zm123-55.12c-.36.81-9,20-28,39.16a12,12,0,1,1-17-16.9A130.48,130.48,0,0,0,226.48,128a130.36,130.36,0,0,0-21.57-28.12C183.46,78.73,157.59,68,128,68c-3.35,0-6.7.14-10,.42a12,12,0,1,1-2-23.91c3.93-.34,8-.51,12-.51,36,0,68.63,13.67,94.49,39.52,19.35,19.35,28.11,38.8,28.48,39.61A12.08,12.08,0,0,1,251,132.88Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",t_,[...t[1]||(t[1]=[Rb("path",{d:"M128,56C48,56,16,128,16,128s32,72,112,72,112-72,112-72S208,56,128,56Zm0,112a40,40,0,1,1,40-40A40,40,0,0,1,128,168Z",opacity:"0.2"},null,-1),Rb("path",{d:"M53.92,34.62A8,8,0,1,0,42.08,45.38L61.32,66.55C25,88.84,9.38,123.2,8.69,124.76a8,8,0,0,0,0,6.5c.35.79,8.82,19.57,27.65,38.4C61.43,194.74,93.12,208,128,208a127.11,127.11,0,0,0,52.07-10.83l22,24.21a8,8,0,1,0,11.84-10.76Zm47.33,75.84,41.67,45.85a32,32,0,0,1-41.67-45.85ZM128,192c-30.78,0-57.67-11.19-79.93-33.25A133.16,133.16,0,0,1,25,128c4.69-8.79,19.66-33.39,47.35-49.38l18,19.75a48,48,0,0,0,63.66,70l14.73,16.2A112,112,0,0,1,128,192Zm6-95.43a8,8,0,0,1,3-15.72,48.16,48.16,0,0,1,38.77,42.64,8,8,0,0,1-7.22,8.71,6.39,6.39,0,0,1-.75,0,8,8,0,0,1-8-7.26A32.09,32.09,0,0,0,134,96.57Zm113.28,34.69c-.42.94-10.55,23.37-33.36,43.8a8,8,0,1,1-10.67-11.92A132.77,132.77,0,0,0,231.05,128a133.15,133.15,0,0,0-23.12-30.77C185.67,75.19,158.78,64,128,64a118.37,118.37,0,0,0-19.36,1.57A8,8,0,1,1,106,49.79,134,134,0,0,1,128,48c34.88,0,66.57,13.26,91.66,38.35,18.83,18.83,27.3,37.62,27.65,38.41A8,8,0,0,1,247.31,131.26Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",n_,[...t[2]||(t[2]=[Rb("path",{d:"M96.68,57.87a4,4,0,0,1,2.08-6.6A130.13,130.13,0,0,1,128,48c34.88,0,66.57,13.26,91.66,38.35,18.83,18.83,27.3,37.62,27.65,38.41a8,8,0,0,1,0,6.5c-.35.79-8.82,19.57-27.65,38.4q-4.28,4.26-8.79,8.07a4,4,0,0,1-5.55-.36ZM213.92,210.62a8,8,0,1,1-11.84,10.76L180,197.13A127.21,127.21,0,0,1,128,208c-34.88,0-66.57-13.26-91.66-38.34C17.51,150.83,9,132.05,8.69,131.26a8,8,0,0,1,0-6.5C9,124,17.51,105.18,36.34,86.35a135,135,0,0,1,25-19.78L42.08,45.38A8,8,0,1,1,53.92,34.62Zm-65.49-48.25-52.69-58a40,40,0,0,0,52.69,58Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",r_,[...t[3]||(t[3]=[Rb("path",{d:"M52.44,36A6,6,0,0,0,43.56,44L64.44,67c-37.28,21.9-53.23,57-53.92,58.57a6,6,0,0,0,0,4.88c.34.77,8.66,19.22,27.24,37.8C55,185.47,84.62,206,128,206a124.91,124.91,0,0,0,52.57-11.25l23,25.29a6,6,0,0,0,8.88-8.08Zm48.62,71.32,45,49.52a34,34,0,0,1-45-49.52ZM128,194c-31.38,0-58.78-11.42-81.45-33.93A134.57,134.57,0,0,1,22.69,128c4.29-8.2,20.1-35.18,50-51.91L92.89,98.3a46,46,0,0,0,61.35,67.48l17.81,19.6A113.47,113.47,0,0,1,128,194Zm6.4-99.4a6,6,0,0,1,2.25-11.79,46.17,46.17,0,0,1,37.15,40.87,6,6,0,0,1-5.42,6.53l-.56,0a6,6,0,0,1-6-5.45A34.1,34.1,0,0,0,134.4,94.6Zm111.08,35.85c-.41.92-10.37,23-32.86,43.12a6,6,0,1,1-8-8.94A134.07,134.07,0,0,0,233.31,128a134.67,134.67,0,0,0-23.86-32.07C186.78,73.42,159.38,62,128,62a120.19,120.19,0,0,0-19.69,1.6,6,6,0,1,1-2-11.83A131.12,131.12,0,0,1,128,50c43.38,0,73,20.54,90.24,37.76,18.58,18.58,26.9,37,27.24,37.81A6,6,0,0,1,245.48,130.45Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",a_,[...t[4]||(t[4]=[Rb("path",{d:"M53.92,34.62A8,8,0,1,0,42.08,45.38L61.32,66.55C25,88.84,9.38,123.2,8.69,124.76a8,8,0,0,0,0,6.5c.35.79,8.82,19.57,27.65,38.4C61.43,194.74,93.12,208,128,208a127.11,127.11,0,0,0,52.07-10.83l22,24.21a8,8,0,1,0,11.84-10.76Zm47.33,75.84,41.67,45.85a32,32,0,0,1-41.67-45.85ZM128,192c-30.78,0-57.67-11.19-79.93-33.25A133.16,133.16,0,0,1,25,128c4.69-8.79,19.66-33.39,47.35-49.38l18,19.75a48,48,0,0,0,63.66,70l14.73,16.2A112,112,0,0,1,128,192Zm6-95.43a8,8,0,0,1,3-15.72,48.16,48.16,0,0,1,38.77,42.64,8,8,0,0,1-7.22,8.71,6.39,6.39,0,0,1-.75,0,8,8,0,0,1-8-7.26A32.09,32.09,0,0,0,134,96.57Zm113.28,34.69c-.42.94-10.55,23.37-33.36,43.8a8,8,0,1,1-10.67-11.92A132.77,132.77,0,0,0,231.05,128a133.15,133.15,0,0,0-23.12-30.77C185.67,75.19,158.78,64,128,64a118.37,118.37,0,0,0-19.36,1.57A8,8,0,1,1,106,49.79,134,134,0,0,1,128,48c34.88,0,66.57,13.26,91.66,38.35,18.83,18.83,27.3,37.62,27.65,38.41A8,8,0,0,1,247.31,131.26Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",o_,[...t[5]||(t[5]=[Rb("path",{d:"M51,37.31A4,4,0,0,0,45,42.69L67.59,67.5C29.34,89,13,124.81,12.34,126.38a4.08,4.08,0,0,0,0,3.25c.34.77,8.52,18.89,26.83,37.2,17,17,46.14,37.17,88.83,37.17a122.59,122.59,0,0,0,53.06-11.69l24,26.38a4,4,0,1,0,5.92-5.38ZM149.1,157.16A36,36,0,0,1,101,104.22ZM128,196c-32,0-59.89-11.65-83-34.62A135.81,135.81,0,0,1,20.44,128c3.65-7.23,20.09-36.81,52.68-54.43l22.45,24.7a44,44,0,0,0,59,64.83l20.89,23A114.94,114.94,0,0,1,128,196Zm6.78-103.36a4,4,0,0,1,1.49-7.86,44.15,44.15,0,0,1,35.54,39.09,4,4,0,0,1-3.61,4.35l-.38,0a4,4,0,0,1-4-3.63A36.1,36.1,0,0,0,134.78,92.64Zm108.88,37c-.41.91-10.2,22.58-32.38,42.45a4,4,0,0,1-2.67,1,4,4,0,0,1-2.67-7A136.71,136.71,0,0,0,235.56,128,136.07,136.07,0,0,0,211,94.62C187.89,71.65,160,60,128,60a122,122,0,0,0-20,1.63,4,4,0,0,1-1.32-7.89A129.3,129.3,0,0,1,128,52c42.7,0,71.87,20.22,88.83,37.18,18.31,18.31,26.49,36.44,26.83,37.2A4.08,4.08,0,0,1,243.66,129.63Z"},null,-1)])])):Fb("",!0)],16))}}),s_={key:0},l_={key:1},c_={key:2},u_={key:3},d_={key:4},p_={key:5},h_=Mg({name:"ScalarIconFileMd",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",s_,[...t[0]||(t[0]=[Rb("path",{d:"M100,152v56a12,12,0,0,1-24,0V190.07l-6.17,8.81a12,12,0,0,1-19.66,0L44,190.07V208a12,12,0,0,1-24,0V152a12,12,0,0,1,21.83-6.88L60,171.07l18.17-25.95A12,12,0,0,1,100,152Zm84,28a40,40,0,0,1-40,40H128a12,12,0,0,1-12-12V152a12,12,0,0,1,12-12h16A40,40,0,0,1,184,180Zm-24,0a16,16,0,0,0-16-16h-4v32h4A16,16,0,0,0,160,180Zm60-92V224a12,12,0,0,1-24,0V104H148a12,12,0,0,1-12-12V44H60v64a12,12,0,0,1-24,0V40A20,20,0,0,1,56,20h96a12,12,0,0,1,8.49,3.52l56,56A12,12,0,0,1,220,88Zm-60-8h23L160,57Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",l_,[...t[1]||(t[1]=[Rb("path",{d:"M208,88H152V32Z",opacity:"0.2"},null,-1),Rb("path",{d:"M213.66,82.34l-56-56A8,8,0,0,0,152,24H56A16,16,0,0,0,40,40v72a8,8,0,0,0,16,0V40h88V88a8,8,0,0,0,8,8h48V224a8,8,0,0,0,16,0V88A8,8,0,0,0,213.66,82.34ZM160,51.31,188.69,80H160ZM144,144H128a8,8,0,0,0-8,8v56a8,8,0,0,0,8,8h16a36,36,0,0,0,0-72Zm0,56h-8V160h8a20,20,0,0,1,0,40Zm-40-48v56a8,8,0,0,1-16,0V177.38L74.55,196.59a8,8,0,0,1-13.1,0L48,177.38V208a8,8,0,0,1-16,0V152a8,8,0,0,1,14.55-4.59L68,178.05l21.45-30.64A8,8,0,0,1,104,152Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",c_,[...t[2]||(t[2]=[Rb("path",{d:"M213.66,82.34l-56-56A8,8,0,0,0,152,24H56A16,16,0,0,0,40,40v76a4,4,0,0,0,4,4H196a4,4,0,0,1,4,4V224a8,8,0,0,0,9.19,7.91,8.15,8.15,0,0,0,6.81-8.16V88A8,8,0,0,0,213.66,82.34ZM152,88V44l44,44Zm-8,56H128a8,8,0,0,0-8,8v56a8,8,0,0,0,8,8h15.32c19.66,0,36.21-15.48,36.67-35.13A36,36,0,0,0,144,144Zm-.49,56H136V160h8a20,20,0,0,1,20,20.77C163.58,191.59,154.34,200,143.51,200ZM104,152v55.73A8.17,8.17,0,0,1,96.53,216,8,8,0,0,1,88,208V177.38l-13.32,19a8.3,8.3,0,0,1-4.2,3.2,8,8,0,0,1-9-3L48,177.38v30.35A8.17,8.17,0,0,1,40.53,216,8,8,0,0,1,32,208V152.31a8.27,8.27,0,0,1,4.56-7.53,8,8,0,0,1,10,2.63L68,178.05l21.27-30.39a8.28,8.28,0,0,1,8.06-3.55A8,8,0,0,1,104,152Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",u_,[...t[3]||(t[3]=[Rb("path",{d:"M212.24,83.76l-56-56A6,6,0,0,0,152,26H56A14,14,0,0,0,42,40v72a6,6,0,0,0,12,0V40a2,2,0,0,1,2-2h90V88a6,6,0,0,0,6,6h50V224a6,6,0,0,0,12,0V88A6,6,0,0,0,212.24,83.76ZM158,46.48,193.52,82H158ZM144,146H128a6,6,0,0,0-6,6v56a6,6,0,0,0,6,6h16a34,34,0,0,0,0-68Zm0,56H134V158h10a22,22,0,0,1,0,44Zm-42-50v56a6,6,0,0,1-12,0V171L72.92,195.44a6,6,0,0,1-9.84,0L46,171v37a6,6,0,0,1-12,0V152a6,6,0,0,1,10.92-3.44l23.08,33,23.08-33A6,6,0,0,1,102,152Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",d_,[...t[4]||(t[4]=[Rb("path",{d:"M213.66,82.34l-56-56A8,8,0,0,0,152,24H56A16,16,0,0,0,40,40v72a8,8,0,0,0,16,0V40h88V88a8,8,0,0,0,8,8h48V224a8,8,0,0,0,16,0V88A8,8,0,0,0,213.66,82.34ZM160,51.31,188.69,80H160ZM144,144H128a8,8,0,0,0-8,8v56a8,8,0,0,0,8,8h16a36,36,0,0,0,0-72Zm0,56h-8V160h8a20,20,0,0,1,0,40Zm-40-48v56a8,8,0,0,1-16,0V177.38L74.55,196.59a8,8,0,0,1-13.1,0L48,177.38V208a8,8,0,0,1-16,0V152a8,8,0,0,1,14.55-4.59L68,178.05l21.45-30.64A8,8,0,0,1,104,152Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",p_,[...t[5]||(t[5]=[Rb("path",{d:"M210.83,85.17l-56-56A4,4,0,0,0,152,28H56A12,12,0,0,0,44,40v72a4,4,0,0,0,8,0V40a4,4,0,0,1,4-4h92V88a4,4,0,0,0,4,4h52V224a4,4,0,0,0,8,0V88A4,4,0,0,0,210.83,85.17ZM156,41.65,198.34,84H156ZM144,148H128a4,4,0,0,0-4,4v56a4,4,0,0,0,4,4h16a32,32,0,0,0,0-64Zm0,56H132V156h12a24,24,0,0,1,0,48Zm-44-52v56a4,4,0,0,1-8,0V164.69l-20.72,29.6a4,4,0,0,1-6.56,0L44,164.69V208a4,4,0,0,1-8,0V152a4,4,0,0,1,7.28-2.29L68,185l24.72-35.31A4,4,0,0,1,100,152Z"},null,-1)])])):Fb("",!0)],16))}}),f_={key:0},m_={key:1},g_={key:2},v_={key:3},b_={key:4},y_={key:5},O_=Mg({name:"ScalarIconFileText",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",f_,[...t[0]||(t[0]=[Rb("path",{d:"M216.49,79.52l-56-56A12,12,0,0,0,152,20H56A20,20,0,0,0,36,40V216a20,20,0,0,0,20,20H200a20,20,0,0,0,20-20V88A12,12,0,0,0,216.49,79.52ZM160,57l23,23H160ZM60,212V44h76V92a12,12,0,0,0,12,12h48V212Zm112-80a12,12,0,0,1-12,12H96a12,12,0,0,1,0-24h64A12,12,0,0,1,172,132Zm0,40a12,12,0,0,1-12,12H96a12,12,0,0,1,0-24h64A12,12,0,0,1,172,172Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",m_,[...t[1]||(t[1]=[Rb("path",{d:"M208,88H152V32Z",opacity:"0.2"},null,-1),Rb("path",{d:"M213.66,82.34l-56-56A8,8,0,0,0,152,24H56A16,16,0,0,0,40,40V216a16,16,0,0,0,16,16H200a16,16,0,0,0,16-16V88A8,8,0,0,0,213.66,82.34ZM160,51.31,188.69,80H160ZM200,216H56V40h88V88a8,8,0,0,0,8,8h48V216Zm-32-80a8,8,0,0,1-8,8H96a8,8,0,0,1,0-16h64A8,8,0,0,1,168,136Zm0,32a8,8,0,0,1-8,8H96a8,8,0,0,1,0-16h64A8,8,0,0,1,168,168Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",g_,[...t[2]||(t[2]=[Rb("path",{d:"M213.66,82.34l-56-56A8,8,0,0,0,152,24H56A16,16,0,0,0,40,40V216a16,16,0,0,0,16,16H200a16,16,0,0,0,16-16V88A8,8,0,0,0,213.66,82.34ZM160,176H96a8,8,0,0,1,0-16h64a8,8,0,0,1,0,16Zm0-32H96a8,8,0,0,1,0-16h64a8,8,0,0,1,0,16Zm-8-56V44l44,44Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",v_,[...t[3]||(t[3]=[Rb("path",{d:"M212.24,83.76l-56-56A6,6,0,0,0,152,26H56A14,14,0,0,0,42,40V216a14,14,0,0,0,14,14H200a14,14,0,0,0,14-14V88A6,6,0,0,0,212.24,83.76ZM158,46.48,193.52,82H158ZM200,218H56a2,2,0,0,1-2-2V40a2,2,0,0,1,2-2h90V88a6,6,0,0,0,6,6h50V216A2,2,0,0,1,200,218Zm-34-82a6,6,0,0,1-6,6H96a6,6,0,0,1,0-12h64A6,6,0,0,1,166,136Zm0,32a6,6,0,0,1-6,6H96a6,6,0,0,1,0-12h64A6,6,0,0,1,166,168Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",b_,[...t[4]||(t[4]=[Rb("path",{d:"M213.66,82.34l-56-56A8,8,0,0,0,152,24H56A16,16,0,0,0,40,40V216a16,16,0,0,0,16,16H200a16,16,0,0,0,16-16V88A8,8,0,0,0,213.66,82.34ZM160,51.31,188.69,80H160ZM200,216H56V40h88V88a8,8,0,0,0,8,8h48V216Zm-32-80a8,8,0,0,1-8,8H96a8,8,0,0,1,0-16h64A8,8,0,0,1,168,136Zm0,32a8,8,0,0,1-8,8H96a8,8,0,0,1,0-16h64A8,8,0,0,1,168,168Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",y_,[...t[5]||(t[5]=[Rb("path",{d:"M210.83,85.17l-56-56A4,4,0,0,0,152,28H56A12,12,0,0,0,44,40V216a12,12,0,0,0,12,12H200a12,12,0,0,0,12-12V88A4,4,0,0,0,210.83,85.17ZM156,41.65,198.34,84H156ZM200,220H56a4,4,0,0,1-4-4V40a4,4,0,0,1,4-4h92V88a4,4,0,0,0,4,4h52V216A4,4,0,0,1,200,220Zm-36-84a4,4,0,0,1-4,4H96a4,4,0,0,1,0-8h64A4,4,0,0,1,164,136Zm0,32a4,4,0,0,1-4,4H96a4,4,0,0,1,0-8h64A4,4,0,0,1,164,168Z"},null,-1)])])):Fb("",!0)],16))}}),w_={key:0},x_={key:1},k_={key:2},S_={key:3},__={key:4},T_={key:5},E_=Mg({name:"ScalarIconGavel",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",w_,[...t[0]||(t[0]=[Rb("path",{d:"M246.14,113.86l-16-16a20,20,0,0,0-23.06-3.75l-45.2-45.2a20,20,0,0,0-3.74-23.06l-16-16a20,20,0,0,0-28.28,0l-64,64a20,20,0,0,0,0,28.28l16,16a20,20,0,0,0,23,3.79L29.36,181.38a32,32,0,0,0,45.26,45.26L134,167.21a20,20,0,0,0,3.81,22.94l16,16a20,20,0,0,0,28.29,0l64-64a20,20,0,0,0,0-28.29ZM80,98.34,69.64,88,128,29.65,138.34,40ZM57.64,209.67a8,8,0,0,1-11.31-11.32l59.52-59.52,11.31,11.32Zm92.7-60.29-43.72-43.72,39-39,43.72,43.72Zm17.65,37L157.65,176,216,117.66,226.34,128Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",x_,[...t[1]||(t[1]=[Rb("path",{d:"M149.66,45.66l-64,64a8,8,0,0,1-11.32,0l-16-16a8,8,0,0,1,0-11.32l64-64a8,8,0,0,1,11.32,0l16,16A8,8,0,0,1,149.66,45.66Zm88,76.68-16-16a8,8,0,0,0-11.32,0l-64,64a8,8,0,0,0,0,11.32l16,16a8,8,0,0,0,11.32,0l64-64A8,8,0,0,0,237.66,122.34Z",opacity:"0.2"},null,-1),Rb("path",{d:"M243.32,116.69l-16-16a16,16,0,0,0-20.84-1.53L156.84,49.52a16,16,0,0,0-1.52-20.84l-16-16a16,16,0,0,0-22.63,0l-64,64a16,16,0,0,0,0,22.63l16,16a16,16,0,0,0,20.83,1.52L96.69,124,31.31,189.38A25,25,0,0,0,66.63,224.7L132,159.32l7.17,7.16a16,16,0,0,0,1.52,20.84l16,16a16,16,0,0,0,22.63,0l64-64A16,16,0,0,0,243.32,116.69ZM80,104,64,88l64-64,16,16ZM55.32,213.38a9,9,0,0,1-12.69,0,9,9,0,0,1,0-12.68L108,135.32,120.69,148ZM101,105.66,145.66,61,195,110.34,150.35,155ZM168,192l-16-16,4-4h0l56-56h0l4-4,16,16Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",k_,[...t[2]||(t[2]=[Rb("path",{d:"M52.69,99.31a16,16,0,0,1,0-22.63l64-64a16,16,0,0,1,22.63,22.63l-64,64a16,16,0,0,1-22.63,0Zm190.63,17.37a16,16,0,0,0-22.63,0l-64,64a16,16,0,0,0,0,22.63h0a16,16,0,0,0,22.63,0l64-64A16,16,0,0,0,243.32,116.68Zm-35.11-15.8L155.12,47.79a4,4,0,0,0-5.66,0L87.8,109.45a4,4,0,0,0,0,5.66L103,130.34,28.69,204.69a16,16,0,0,0,22.62,22.62L125.66,153l15.23,15.23a4,4,0,0,0,5.66,0l61.66-61.66A4,4,0,0,0,208.21,100.88Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",S_,[...t[3]||(t[3]=[Rb("path",{d:"M241.91,118.1l-16-16a14,14,0,0,0-19.55-.23L154.13,49.64a14,14,0,0,0-.23-19.55l-16-16a14,14,0,0,0-19.8,0l-64,64a14,14,0,0,0,0,19.8l16,16a14,14,0,0,0,19.55.23L99.52,124,32.73,190.79a23,23,0,0,0,32.48,32.49L132,156.49l9.87,9.87a14,14,0,0,0,.23,19.55l16,16a14,14,0,0,0,19.8,0l64-64A14,14,0,0,0,241.91,118.1Zm-91.56,39.76-52.21-52.2,47.52-47.52,52.2,52.2ZM78.59,105.41l-16-16a2,2,0,0,1,0-2.83l64-64a2,2,0,0,1,2.83,0l16,16a2,2,0,0,1,0,2.83l-64,64A2,2,0,0,1,78.59,105.41ZM56.73,214.8a11,11,0,0,1-15.52-15.52L108,132.49,123.52,148Zm176.69-85.38-64,64a2,2,0,0,1-2.83,0l-16-16a2,2,0,0,1,0-2.83l64-64a2,2,0,0,1,2.83,0l16,16A2,2,0,0,1,233.42,129.42Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",__,[...t[4]||(t[4]=[Rb("path",{d:"M243.32,116.69l-16-16a16,16,0,0,0-20.84-1.53L156.84,49.52a16,16,0,0,0-1.52-20.84l-16-16a16,16,0,0,0-22.63,0l-64,64a16,16,0,0,0,0,22.63l16,16a16,16,0,0,0,20.83,1.52L96.69,124,31.31,189.38A25,25,0,0,0,66.63,224.7L132,159.32l7.17,7.16a16,16,0,0,0,1.52,20.84l16,16a16,16,0,0,0,22.63,0l64-64A16,16,0,0,0,243.32,116.69ZM80,104,64,88l64-64,16,16ZM55.32,213.38a9,9,0,0,1-12.69,0,9,9,0,0,1,0-12.68L108,135.32,120.69,148ZM101,105.66,145.66,61,195,110.34,150.35,155ZM168,192l-16-16,4-4h0l56-56h0l4-4,16,16Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",T_,[...t[5]||(t[5]=[Rb("path",{d:"M240.49,119.52l-16-16a12,12,0,0,0-17,0l-1.17,1.17-55-55,1.18-1.17a12,12,0,0,0,0-17l-16-16a12,12,0,0,0-17,0l-64,64a12,12,0,0,0,0,17l16,16a12,12,0,0,0,17,0l1.17-1.18L102.34,124l-68.2,68.21A21,21,0,0,0,63.8,221.87L132,153.66l12.69,12.69-1.18,1.17a12,12,0,0,0,0,17l16,16a12,12,0,0,0,17,0l64-64a12,12,0,0,0,0-17ZM77.17,106.83l-16-16a4,4,0,0,1,0-5.66l64-64a4,4,0,0,1,5.66,0l16,16a4,4,0,0,1,0,5.65l-64,64A4,4,0,0,1,77.17,106.83Zm-19,109.38A13,13,0,1,1,39.8,197.87L108,129.66,126.34,148ZM95.31,105.66l50.35-50.35,55,55-50.35,50.35Zm139.52,25.17-64,64a4,4,0,0,1-5.66,0l-16-16a4,4,0,0,1,0-5.65l64-64a4,4,0,0,1,5.66,0l16,16a4,4,0,0,1,0,5.66Z"},null,-1)])])):Fb("",!0)],16))}}),A_={key:0},C_={key:1},$_={key:2},P_={key:3},D_={key:4},I_={key:5},M_=Mg({name:"ScalarIconGear",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",A_,[...t[0]||(t[0]=[Rb("path",{d:"M128,76a52,52,0,1,0,52,52A52.06,52.06,0,0,0,128,76Zm0,80a28,28,0,1,1,28-28A28,28,0,0,1,128,156Zm92-27.21v-1.58l14-17.51a12,12,0,0,0,2.23-10.59A111.75,111.75,0,0,0,225,71.89,12,12,0,0,0,215.89,66L193.61,63.5l-1.11-1.11L190,40.1A12,12,0,0,0,184.11,31a111.67,111.67,0,0,0-27.23-11.27A12,12,0,0,0,146.3,22L128.79,36h-1.58L109.7,22a12,12,0,0,0-10.59-2.23A111.75,111.75,0,0,0,71.89,31.05,12,12,0,0,0,66,40.11L63.5,62.39,62.39,63.5,40.1,66A12,12,0,0,0,31,71.89,111.67,111.67,0,0,0,19.77,99.12,12,12,0,0,0,22,109.7l14,17.51v1.58L22,146.3a12,12,0,0,0-2.23,10.59,111.75,111.75,0,0,0,11.29,27.22A12,12,0,0,0,40.11,190l22.28,2.48,1.11,1.11L66,215.9A12,12,0,0,0,71.89,225a111.67,111.67,0,0,0,27.23,11.27A12,12,0,0,0,109.7,234l17.51-14h1.58l17.51,14a12,12,0,0,0,10.59,2.23A111.75,111.75,0,0,0,184.11,225a12,12,0,0,0,5.91-9.06l2.48-22.28,1.11-1.11L215.9,190a12,12,0,0,0,9.06-5.91,111.67,111.67,0,0,0,11.27-27.23A12,12,0,0,0,234,146.3Zm-24.12-4.89a70.1,70.1,0,0,1,0,8.2,12,12,0,0,0,2.61,8.22l12.84,16.05A86.47,86.47,0,0,1,207,166.86l-20.43,2.27a12,12,0,0,0-7.65,4,69,69,0,0,1-5.8,5.8,12,12,0,0,0-4,7.65L166.86,207a86.47,86.47,0,0,1-10.49,4.35l-16.05-12.85a12,12,0,0,0-7.5-2.62c-.24,0-.48,0-.72,0a70.1,70.1,0,0,1-8.2,0,12.06,12.06,0,0,0-8.22,2.6L99.63,211.33A86.47,86.47,0,0,1,89.14,207l-2.27-20.43a12,12,0,0,0-4-7.65,69,69,0,0,1-5.8-5.8,12,12,0,0,0-7.65-4L49,166.86a86.47,86.47,0,0,1-4.35-10.49l12.84-16.05a12,12,0,0,0,2.61-8.22,70.1,70.1,0,0,1,0-8.2,12,12,0,0,0-2.61-8.22L44.67,99.63A86.47,86.47,0,0,1,49,89.14l20.43-2.27a12,12,0,0,0,7.65-4,69,69,0,0,1,5.8-5.8,12,12,0,0,0,4-7.65L89.14,49a86.47,86.47,0,0,1,10.49-4.35l16.05,12.85a12.06,12.06,0,0,0,8.22,2.6,70.1,70.1,0,0,1,8.2,0,12,12,0,0,0,8.22-2.6l16.05-12.85A86.47,86.47,0,0,1,166.86,49l2.27,20.43a12,12,0,0,0,4,7.65,69,69,0,0,1,5.8,5.8,12,12,0,0,0,7.65,4L207,89.14a86.47,86.47,0,0,1,4.35,10.49l-12.84,16.05A12,12,0,0,0,195.88,123.9Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",C_,[...t[1]||(t[1]=[Rb("path",{d:"M207.86,123.18l16.78-21a99.14,99.14,0,0,0-10.07-24.29l-26.7-3a81,81,0,0,0-6.81-6.81l-3-26.71a99.43,99.43,0,0,0-24.3-10l-21,16.77a81.59,81.59,0,0,0-9.64,0l-21-16.78A99.14,99.14,0,0,0,77.91,41.43l-3,26.7a81,81,0,0,0-6.81,6.81l-26.71,3a99.43,99.43,0,0,0-10,24.3l16.77,21a81.59,81.59,0,0,0,0,9.64l-16.78,21a99.14,99.14,0,0,0,10.07,24.29l26.7,3a81,81,0,0,0,6.81,6.81l3,26.71a99.43,99.43,0,0,0,24.3,10l21-16.77a81.59,81.59,0,0,0,9.64,0l21,16.78a99.14,99.14,0,0,0,24.29-10.07l3-26.7a81,81,0,0,0,6.81-6.81l26.71-3a99.43,99.43,0,0,0,10-24.3l-16.77-21A81.59,81.59,0,0,0,207.86,123.18ZM128,168a40,40,0,1,1,40-40A40,40,0,0,1,128,168Z",opacity:"0.2"},null,-1),Rb("path",{d:"M128,80a48,48,0,1,0,48,48A48.05,48.05,0,0,0,128,80Zm0,80a32,32,0,1,1,32-32A32,32,0,0,1,128,160Zm88-29.84q.06-2.16,0-4.32l14.92-18.64a8,8,0,0,0,1.48-7.06,107.6,107.6,0,0,0-10.88-26.25,8,8,0,0,0-6-3.93l-23.72-2.64q-1.48-1.56-3-3L186,40.54a8,8,0,0,0-3.94-6,107.29,107.29,0,0,0-26.25-10.86,8,8,0,0,0-7.06,1.48L130.16,40Q128,40,125.84,40L107.2,25.11a8,8,0,0,0-7.06-1.48A107.6,107.6,0,0,0,73.89,34.51a8,8,0,0,0-3.93,6L67.32,64.27q-1.56,1.49-3,3L40.54,70a8,8,0,0,0-6,3.94,107.71,107.71,0,0,0-10.87,26.25,8,8,0,0,0,1.49,7.06L40,125.84Q40,128,40,130.16L25.11,148.8a8,8,0,0,0-1.48,7.06,107.6,107.6,0,0,0,10.88,26.25,8,8,0,0,0,6,3.93l23.72,2.64q1.49,1.56,3,3L70,215.46a8,8,0,0,0,3.94,6,107.71,107.71,0,0,0,26.25,10.87,8,8,0,0,0,7.06-1.49L125.84,216q2.16.06,4.32,0l18.64,14.92a8,8,0,0,0,7.06,1.48,107.21,107.21,0,0,0,26.25-10.88,8,8,0,0,0,3.93-6l2.64-23.72q1.56-1.48,3-3L215.46,186a8,8,0,0,0,6-3.94,107.71,107.71,0,0,0,10.87-26.25,8,8,0,0,0-1.49-7.06Zm-16.1-6.5a73.93,73.93,0,0,1,0,8.68,8,8,0,0,0,1.74,5.48l14.19,17.73a91.57,91.57,0,0,1-6.23,15L187,173.11a8,8,0,0,0-5.1,2.64,74.11,74.11,0,0,1-6.14,6.14,8,8,0,0,0-2.64,5.1l-2.51,22.58a91.32,91.32,0,0,1-15,6.23l-17.74-14.19a8,8,0,0,0-5-1.75h-.48a73.93,73.93,0,0,1-8.68,0,8.06,8.06,0,0,0-5.48,1.74L100.45,215.8a91.57,91.57,0,0,1-15-6.23L82.89,187a8,8,0,0,0-2.64-5.1,74.11,74.11,0,0,1-6.14-6.14,8,8,0,0,0-5.1-2.64L46.43,170.6a91.32,91.32,0,0,1-6.23-15l14.19-17.74a8,8,0,0,0,1.74-5.48,73.93,73.93,0,0,1,0-8.68,8,8,0,0,0-1.74-5.48L40.2,100.45a91.57,91.57,0,0,1,6.23-15L69,82.89a8,8,0,0,0,5.1-2.64,74.11,74.11,0,0,1,6.14-6.14A8,8,0,0,0,82.89,69L85.4,46.43a91.32,91.32,0,0,1,15-6.23l17.74,14.19a8,8,0,0,0,5.48,1.74,73.93,73.93,0,0,1,8.68,0,8.06,8.06,0,0,0,5.48-1.74L155.55,40.2a91.57,91.57,0,0,1,15,6.23L173.11,69a8,8,0,0,0,2.64,5.1,74.11,74.11,0,0,1,6.14,6.14,8,8,0,0,0,5.1,2.64l22.58,2.51a91.32,91.32,0,0,1,6.23,15l-14.19,17.74A8,8,0,0,0,199.87,123.66Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",$_,[...t[2]||(t[2]=[Rb("path",{d:"M216,130.16q.06-2.16,0-4.32l14.92-18.64a8,8,0,0,0,1.48-7.06,107.6,107.6,0,0,0-10.88-26.25,8,8,0,0,0-6-3.93l-23.72-2.64q-1.48-1.56-3-3L186,40.54a8,8,0,0,0-3.94-6,107.29,107.29,0,0,0-26.25-10.86,8,8,0,0,0-7.06,1.48L130.16,40Q128,40,125.84,40L107.2,25.11a8,8,0,0,0-7.06-1.48A107.6,107.6,0,0,0,73.89,34.51a8,8,0,0,0-3.93,6L67.32,64.27q-1.56,1.49-3,3L40.54,70a8,8,0,0,0-6,3.94,107.71,107.71,0,0,0-10.87,26.25,8,8,0,0,0,1.49,7.06L40,125.84Q40,128,40,130.16L25.11,148.8a8,8,0,0,0-1.48,7.06,107.6,107.6,0,0,0,10.88,26.25,8,8,0,0,0,6,3.93l23.72,2.64q1.49,1.56,3,3L70,215.46a8,8,0,0,0,3.94,6,107.71,107.71,0,0,0,26.25,10.87,8,8,0,0,0,7.06-1.49L125.84,216q2.16.06,4.32,0l18.64,14.92a8,8,0,0,0,7.06,1.48,107.21,107.21,0,0,0,26.25-10.88,8,8,0,0,0,3.93-6l2.64-23.72q1.56-1.48,3-3L215.46,186a8,8,0,0,0,6-3.94,107.71,107.71,0,0,0,10.87-26.25,8,8,0,0,0-1.49-7.06ZM128,168a40,40,0,1,1,40-40A40,40,0,0,1,128,168Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",P_,[...t[3]||(t[3]=[Rb("path",{d:"M128,82a46,46,0,1,0,46,46A46.06,46.06,0,0,0,128,82Zm0,80a34,34,0,1,1,34-34A34,34,0,0,1,128,162ZM214,130.84c.06-1.89.06-3.79,0-5.68L229.33,106a6,6,0,0,0,1.11-5.29A105.34,105.34,0,0,0,219.76,74.9a6,6,0,0,0-4.53-3l-24.45-2.71q-1.93-2.07-4-4l-2.72-24.46a6,6,0,0,0-3-4.53,105.65,105.65,0,0,0-25.77-10.66A6,6,0,0,0,150,26.68l-19.2,15.37c-1.89-.06-3.79-.06-5.68,0L106,26.67a6,6,0,0,0-5.29-1.11A105.34,105.34,0,0,0,74.9,36.24a6,6,0,0,0-3,4.53L69.23,65.22q-2.07,1.94-4,4L40.76,72a6,6,0,0,0-4.53,3,105.65,105.65,0,0,0-10.66,25.77A6,6,0,0,0,26.68,106l15.37,19.2c-.06,1.89-.06,3.79,0,5.68L26.67,150.05a6,6,0,0,0-1.11,5.29A105.34,105.34,0,0,0,36.24,181.1a6,6,0,0,0,4.53,3l24.45,2.71q1.94,2.07,4,4L72,215.24a6,6,0,0,0,3,4.53,105.65,105.65,0,0,0,25.77,10.66,6,6,0,0,0,5.29-1.11L125.16,214c1.89.06,3.79.06,5.68,0l19.21,15.38a6,6,0,0,0,3.75,1.31,6.2,6.2,0,0,0,1.54-.2,105.34,105.34,0,0,0,25.76-10.68,6,6,0,0,0,3-4.53l2.71-24.45q2.07-1.93,4-4l24.46-2.72a6,6,0,0,0,4.53-3,105.49,105.49,0,0,0,10.66-25.77,6,6,0,0,0-1.11-5.29Zm-3.1,41.63-23.64,2.63a6,6,0,0,0-3.82,2,75.14,75.14,0,0,1-6.31,6.31,6,6,0,0,0-2,3.82l-2.63,23.63A94.28,94.28,0,0,1,155.14,218l-18.57-14.86a6,6,0,0,0-3.75-1.31h-.36a78.07,78.07,0,0,1-8.92,0,6,6,0,0,0-4.11,1.3L100.87,218a94.13,94.13,0,0,1-17.34-7.17L80.9,187.21a6,6,0,0,0-2-3.82,75.14,75.14,0,0,1-6.31-6.31,6,6,0,0,0-3.82-2l-23.63-2.63A94.28,94.28,0,0,1,38,155.14l14.86-18.57a6,6,0,0,0,1.3-4.11,78.07,78.07,0,0,1,0-8.92,6,6,0,0,0-1.3-4.11L38,100.87a94.13,94.13,0,0,1,7.17-17.34L68.79,80.9a6,6,0,0,0,3.82-2,75.14,75.14,0,0,1,6.31-6.31,6,6,0,0,0,2-3.82l2.63-23.63A94.28,94.28,0,0,1,100.86,38l18.57,14.86a6,6,0,0,0,4.11,1.3,78.07,78.07,0,0,1,8.92,0,6,6,0,0,0,4.11-1.3L155.13,38a94.13,94.13,0,0,1,17.34,7.17l2.63,23.64a6,6,0,0,0,2,3.82,75.14,75.14,0,0,1,6.31,6.31,6,6,0,0,0,3.82,2l23.63,2.63A94.28,94.28,0,0,1,218,100.86l-14.86,18.57a6,6,0,0,0-1.3,4.11,78.07,78.07,0,0,1,0,8.92,6,6,0,0,0,1.3,4.11L218,155.13A94.13,94.13,0,0,1,210.85,172.47Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",D_,[...t[4]||(t[4]=[Rb("path",{d:"M128,80a48,48,0,1,0,48,48A48.05,48.05,0,0,0,128,80Zm0,80a32,32,0,1,1,32-32A32,32,0,0,1,128,160Zm88-29.84q.06-2.16,0-4.32l14.92-18.64a8,8,0,0,0,1.48-7.06,107.21,107.21,0,0,0-10.88-26.25,8,8,0,0,0-6-3.93l-23.72-2.64q-1.48-1.56-3-3L186,40.54a8,8,0,0,0-3.94-6,107.71,107.71,0,0,0-26.25-10.87,8,8,0,0,0-7.06,1.49L130.16,40Q128,40,125.84,40L107.2,25.11a8,8,0,0,0-7.06-1.48A107.6,107.6,0,0,0,73.89,34.51a8,8,0,0,0-3.93,6L67.32,64.27q-1.56,1.49-3,3L40.54,70a8,8,0,0,0-6,3.94,107.71,107.71,0,0,0-10.87,26.25,8,8,0,0,0,1.49,7.06L40,125.84Q40,128,40,130.16L25.11,148.8a8,8,0,0,0-1.48,7.06,107.21,107.21,0,0,0,10.88,26.25,8,8,0,0,0,6,3.93l23.72,2.64q1.49,1.56,3,3L70,215.46a8,8,0,0,0,3.94,6,107.71,107.71,0,0,0,26.25,10.87,8,8,0,0,0,7.06-1.49L125.84,216q2.16.06,4.32,0l18.64,14.92a8,8,0,0,0,7.06,1.48,107.21,107.21,0,0,0,26.25-10.88,8,8,0,0,0,3.93-6l2.64-23.72q1.56-1.48,3-3L215.46,186a8,8,0,0,0,6-3.94,107.71,107.71,0,0,0,10.87-26.25,8,8,0,0,0-1.49-7.06Zm-16.1-6.5a73.93,73.93,0,0,1,0,8.68,8,8,0,0,0,1.74,5.48l14.19,17.73a91.57,91.57,0,0,1-6.23,15L187,173.11a8,8,0,0,0-5.1,2.64,74.11,74.11,0,0,1-6.14,6.14,8,8,0,0,0-2.64,5.1l-2.51,22.58a91.32,91.32,0,0,1-15,6.23l-17.74-14.19a8,8,0,0,0-5-1.75h-.48a73.93,73.93,0,0,1-8.68,0,8,8,0,0,0-5.48,1.74L100.45,215.8a91.57,91.57,0,0,1-15-6.23L82.89,187a8,8,0,0,0-2.64-5.1,74.11,74.11,0,0,1-6.14-6.14,8,8,0,0,0-5.1-2.64L46.43,170.6a91.32,91.32,0,0,1-6.23-15l14.19-17.74a8,8,0,0,0,1.74-5.48,73.93,73.93,0,0,1,0-8.68,8,8,0,0,0-1.74-5.48L40.2,100.45a91.57,91.57,0,0,1,6.23-15L69,82.89a8,8,0,0,0,5.1-2.64,74.11,74.11,0,0,1,6.14-6.14A8,8,0,0,0,82.89,69L85.4,46.43a91.32,91.32,0,0,1,15-6.23l17.74,14.19a8,8,0,0,0,5.48,1.74,73.93,73.93,0,0,1,8.68,0,8,8,0,0,0,5.48-1.74L155.55,40.2a91.57,91.57,0,0,1,15,6.23L173.11,69a8,8,0,0,0,2.64,5.1,74.11,74.11,0,0,1,6.14,6.14,8,8,0,0,0,5.1,2.64l22.58,2.51a91.32,91.32,0,0,1,6.23,15l-14.19,17.74A8,8,0,0,0,199.87,123.66Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",I_,[...t[5]||(t[5]=[Rb("path",{d:"M128,84a44,44,0,1,0,44,44A44.05,44.05,0,0,0,128,84Zm0,80a36,36,0,1,1,36-36A36,36,0,0,1,128,164Zm83.93-32.49q.13-3.51,0-7l15.83-19.79a4,4,0,0,0,.75-3.53A103.64,103.64,0,0,0,218,75.9a4,4,0,0,0-3-2l-25.19-2.8c-1.58-1.71-3.24-3.37-4.95-4.95L182.07,41a4,4,0,0,0-2-3A104,104,0,0,0,154.82,27.5a4,4,0,0,0-3.53.74L131.51,44.07q-3.51-.14-7,0L104.7,28.24a4,4,0,0,0-3.53-.75A103.64,103.64,0,0,0,75.9,38a4,4,0,0,0-2,3l-2.8,25.19c-1.71,1.58-3.37,3.24-4.95,4.95L41,73.93a4,4,0,0,0-3,2A104,104,0,0,0,27.5,101.18a4,4,0,0,0,.74,3.53l15.83,19.78q-.14,3.51,0,7L28.24,151.3a4,4,0,0,0-.75,3.53A103.64,103.64,0,0,0,38,180.1a4,4,0,0,0,3,2l25.19,2.8c1.58,1.71,3.24,3.37,4.95,4.95l2.8,25.2a4,4,0,0,0,2,3,104,104,0,0,0,25.28,10.46,4,4,0,0,0,3.53-.74l19.78-15.83q3.51.13,7,0l19.79,15.83a4,4,0,0,0,2.5.88,4,4,0,0,0,1-.13A103.64,103.64,0,0,0,180.1,218a4,4,0,0,0,2-3l2.8-25.19c1.71-1.58,3.37-3.24,4.95-4.95l25.2-2.8a4,4,0,0,0,3-2,104,104,0,0,0,10.46-25.28,4,4,0,0,0-.74-3.53Zm.17,42.83-24.67,2.74a4,4,0,0,0-2.55,1.32,76.2,76.2,0,0,1-6.48,6.48,4,4,0,0,0-1.32,2.55l-2.74,24.66a95.45,95.45,0,0,1-19.64,8.15l-19.38-15.51a4,4,0,0,0-2.5-.87h-.24a73.67,73.67,0,0,1-9.16,0,4,4,0,0,0-2.74.87l-19.37,15.5a95.33,95.33,0,0,1-19.65-8.13l-2.74-24.67a4,4,0,0,0-1.32-2.55,76.2,76.2,0,0,1-6.48-6.48,4,4,0,0,0-2.55-1.32l-24.66-2.74a95.45,95.45,0,0,1-8.15-19.64l15.51-19.38a4,4,0,0,0,.87-2.74,77.76,77.76,0,0,1,0-9.16,4,4,0,0,0-.87-2.74l-15.5-19.37A95.33,95.33,0,0,1,43.9,81.66l24.67-2.74a4,4,0,0,0,2.55-1.32,76.2,76.2,0,0,1,6.48-6.48,4,4,0,0,0,1.32-2.55l2.74-24.66a95.45,95.45,0,0,1,19.64-8.15l19.38,15.51a4,4,0,0,0,2.74.87,73.67,73.67,0,0,1,9.16,0,4,4,0,0,0,2.74-.87l19.37-15.5a95.33,95.33,0,0,1,19.65,8.13l2.74,24.67a4,4,0,0,0,1.32,2.55,76.2,76.2,0,0,1,6.48,6.48,4,4,0,0,0,2.55,1.32l24.66,2.74a95.45,95.45,0,0,1,8.15,19.64l-15.51,19.38a4,4,0,0,0-.87,2.74,77.76,77.76,0,0,1,0,9.16,4,4,0,0,0,.87,2.74l15.5,19.37A95.33,95.33,0,0,1,212.1,174.34Z"},null,-1)])])):Fb("",!0)],16))}}),N_={key:0},R_={key:1},L_={key:2},B_={key:3},j_={key:4},U_={key:5},z_=Mg({name:"ScalarIconGitBranch",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",N_,[...t[0]||(t[0]=[Rb("path",{d:"M236,64a36,36,0,1,0-48,33.94V112a4,4,0,0,1-4,4H96a27.8,27.8,0,0,0-4,.29V97.94a36,36,0,1,0-24,0v60.12a36,36,0,1,0,24,0V144a4,4,0,0,1,4-4h88a28,28,0,0,0,28-28V97.94A36.07,36.07,0,0,0,236,64ZM80,52A12,12,0,1,1,68,64,12,12,0,0,1,80,52Zm0,152a12,12,0,1,1,12-12A12,12,0,0,1,80,204ZM200,76a12,12,0,1,1,12-12A12,12,0,0,1,200,76Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",R_,[...t[1]||(t[1]=[Rb("path",{d:"M224,64a24,24,0,1,1-24-24A24,24,0,0,1,224,64Z",opacity:"0.2"},null,-1),Rb("path",{d:"M232,64a32,32,0,1,0-40,31v17a8,8,0,0,1-8,8H96a23.84,23.84,0,0,0-8,1.38V95a32,32,0,1,0-16,0v66a32,32,0,1,0,16,0V144a8,8,0,0,1,8-8h88a24,24,0,0,0,24-24V95A32.06,32.06,0,0,0,232,64ZM64,64A16,16,0,1,1,80,80,16,16,0,0,1,64,64ZM96,192a16,16,0,1,1-16-16A16,16,0,0,1,96,192ZM200,80a16,16,0,1,1,16-16A16,16,0,0,1,200,80Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",L_,[...t[2]||(t[2]=[Rb("path",{d:"M232,64a32,32,0,1,0-40,31v17a8,8,0,0,1-8,8H96a23.84,23.84,0,0,0-8,1.38V95a32,32,0,1,0-16,0v66a32,32,0,1,0,16,0V144a8,8,0,0,1,8-8h88a24,24,0,0,0,24-24V95A32.06,32.06,0,0,0,232,64ZM64,64A16,16,0,1,1,80,80,16,16,0,0,1,64,64ZM96,192a16,16,0,1,1-16-16A16,16,0,0,1,96,192Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",B_,[...t[3]||(t[3]=[Rb("path",{d:"M230,64a30,30,0,1,0-36,29.4V112a10,10,0,0,1-10,10H96a21.84,21.84,0,0,0-10,2.42v-31a30,30,0,1,0-12,0v69.2a30,30,0,1,0,12,0V144a10,10,0,0,1,10-10h88a22,22,0,0,0,22-22V93.4A30.05,30.05,0,0,0,230,64ZM62,64A18,18,0,1,1,80,82,18,18,0,0,1,62,64ZM98,192a18,18,0,1,1-18-18A18,18,0,0,1,98,192ZM200,82a18,18,0,1,1,18-18A18,18,0,0,1,200,82Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",j_,[...t[4]||(t[4]=[Rb("path",{d:"M232,64a32,32,0,1,0-40,31v17a8,8,0,0,1-8,8H96a23.84,23.84,0,0,0-8,1.38V95a32,32,0,1,0-16,0v66a32,32,0,1,0,16,0V144a8,8,0,0,1,8-8h88a24,24,0,0,0,24-24V95A32.06,32.06,0,0,0,232,64ZM64,64A16,16,0,1,1,80,80,16,16,0,0,1,64,64ZM96,192a16,16,0,1,1-16-16A16,16,0,0,1,96,192ZM200,80a16,16,0,1,1,16-16A16,16,0,0,1,200,80Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",U_,[...t[5]||(t[5]=[Rb("path",{d:"M228,64a28,28,0,1,0-32,27.71V112a12,12,0,0,1-12,12H96a19.91,19.91,0,0,0-12,4V91.71a28,28,0,1,0-8,0v72.58a28,28,0,1,0,8,0V144a12,12,0,0,1,12-12h88a20,20,0,0,0,20-20V91.71A28,28,0,0,0,228,64ZM60,64A20,20,0,1,1,80,84,20,20,0,0,1,60,64Zm40,128a20,20,0,1,1-20-20A20,20,0,0,1,100,192ZM200,84a20,20,0,1,1,20-20A20,20,0,0,1,200,84Z"},null,-1)])])):Fb("",!0)],16))}}),F_={key:0},Z_={key:1},Q_={key:2},H_={key:3},V_={key:4},q_={key:5},W_=Mg({name:"ScalarIconGithubLogo",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",F_,[...t[0]||(t[0]=[Rb("path",{d:"M212.62,75.17A63.7,63.7,0,0,0,206.39,26,12,12,0,0,0,196,20a63.71,63.71,0,0,0-50,24H126A63.71,63.71,0,0,0,76,20a12,12,0,0,0-10.39,6,63.7,63.7,0,0,0-6.23,49.17A61.5,61.5,0,0,0,52,104v8a60.1,60.1,0,0,0,45.76,58.28A43.66,43.66,0,0,0,92,192v4H76a20,20,0,0,1-20-20,44.05,44.05,0,0,0-44-44,12,12,0,0,0,0,24,20,20,0,0,1,20,20,44.05,44.05,0,0,0,44,44H92v12a12,12,0,0,0,24,0V192a20,20,0,0,1,40,0v40a12,12,0,0,0,24,0V192a43.66,43.66,0,0,0-5.76-21.72A60.1,60.1,0,0,0,220,112v-8A61.5,61.5,0,0,0,212.62,75.17ZM196,112a36,36,0,0,1-36,36H112a36,36,0,0,1-36-36v-8a37.87,37.87,0,0,1,6.13-20.12,11.65,11.65,0,0,0,1.58-11.49,39.9,39.9,0,0,1-.4-27.72,39.87,39.87,0,0,1,26.41,17.8A12,12,0,0,0,119.82,68h32.35a12,12,0,0,0,10.11-5.53,39.84,39.84,0,0,1,26.41-17.8,39.9,39.9,0,0,1-.4,27.72,12,12,0,0,0,1.61,11.53A37.85,37.85,0,0,1,196,104Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",Z_,[...t[1]||(t[1]=[Rb("path",{d:"M208,104v8a48,48,0,0,1-48,48H136a32,32,0,0,1,32,32v40H104V192a32,32,0,0,1,32-32H112a48,48,0,0,1-48-48v-8a49.28,49.28,0,0,1,8.51-27.3A51.92,51.92,0,0,1,76,32a52,52,0,0,1,43.83,24h32.34A52,52,0,0,1,196,32a51.92,51.92,0,0,1,3.49,44.7A49.28,49.28,0,0,1,208,104Z",opacity:"0.2"},null,-1),Rb("path",{d:"M208.3,75.68A59.74,59.74,0,0,0,202.93,28,8,8,0,0,0,196,24a59.75,59.75,0,0,0-48,24H124A59.75,59.75,0,0,0,76,24a8,8,0,0,0-6.93,4,59.78,59.78,0,0,0-5.38,47.68A58.14,58.14,0,0,0,56,104v8a56.06,56.06,0,0,0,48.44,55.47A39.8,39.8,0,0,0,96,192v8H72a24,24,0,0,1-24-24A40,40,0,0,0,8,136a8,8,0,0,0,0,16,24,24,0,0,1,24,24,40,40,0,0,0,40,40H96v16a8,8,0,0,0,16,0V192a24,24,0,0,1,48,0v40a8,8,0,0,0,16,0V192a39.8,39.8,0,0,0-8.44-24.53A56.06,56.06,0,0,0,216,112v-8A58,58,0,0,0,208.3,75.68ZM200,112a40,40,0,0,1-40,40H112a40,40,0,0,1-40-40v-8a41.74,41.74,0,0,1,6.9-22.48A8,8,0,0,0,80,73.83a43.81,43.81,0,0,1,.79-33.58,43.88,43.88,0,0,1,32.32,20.06A8,8,0,0,0,119.82,64h32.35a8,8,0,0,0,6.74-3.69,43.87,43.87,0,0,1,32.32-20.06A43.81,43.81,0,0,1,192,73.83a8.09,8.09,0,0,0,1,7.65A41.76,41.76,0,0,1,200,104Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",Q_,[...t[2]||(t[2]=[Rb("path",{d:"M216,104v8a56.06,56.06,0,0,1-48.44,55.47A39.8,39.8,0,0,1,176,192v40a8,8,0,0,1-8,8H104a8,8,0,0,1-8-8V216H72a40,40,0,0,1-40-40A24,24,0,0,0,8,152a8,8,0,0,1,0-16,40,40,0,0,1,40,40,24,24,0,0,0,24,24H96v-8a39.8,39.8,0,0,1,8.44-24.53A56.06,56.06,0,0,1,56,112v-8a58.14,58.14,0,0,1,7.69-28.32A59.78,59.78,0,0,1,69.07,28,8,8,0,0,1,76,24a59.75,59.75,0,0,1,48,24h24a59.75,59.75,0,0,1,48-24,8,8,0,0,1,6.93,4,59.74,59.74,0,0,1,5.37,47.68A58,58,0,0,1,216,104Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",H_,[...t[3]||(t[3]=[Rb("path",{d:"M206.13,75.92A57.79,57.79,0,0,0,201.2,29a6,6,0,0,0-5.2-3,57.77,57.77,0,0,0-47,24H123A57.77,57.77,0,0,0,76,26a6,6,0,0,0-5.2,3,57.79,57.79,0,0,0-4.93,46.92A55.88,55.88,0,0,0,58,104v8a54.06,54.06,0,0,0,50.45,53.87A37.85,37.85,0,0,0,98,192v10H72a26,26,0,0,1-26-26A38,38,0,0,0,8,138a6,6,0,0,0,0,12,26,26,0,0,1,26,26,38,38,0,0,0,38,38H98v18a6,6,0,0,0,12,0V192a26,26,0,0,1,52,0v40a6,6,0,0,0,12,0V192a37.85,37.85,0,0,0-10.45-26.13A54.06,54.06,0,0,0,214,112v-8A55.88,55.88,0,0,0,206.13,75.92ZM202,112a42,42,0,0,1-42,42H112a42,42,0,0,1-42-42v-8a43.86,43.86,0,0,1,7.3-23.69,6,6,0,0,0,.81-5.76,45.85,45.85,0,0,1,1.43-36.42,45.85,45.85,0,0,1,35.23,21.1A6,6,0,0,0,119.83,62h32.34a6,6,0,0,0,5.06-2.76,45.83,45.83,0,0,1,35.23-21.11,45.85,45.85,0,0,1,1.43,36.42,6,6,0,0,0,.79,5.74A43.78,43.78,0,0,1,202,104Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",V_,[...t[4]||(t[4]=[Rb("path",{d:"M208.31,75.68A59.78,59.78,0,0,0,202.93,28,8,8,0,0,0,196,24a59.75,59.75,0,0,0-48,24H124A59.75,59.75,0,0,0,76,24a8,8,0,0,0-6.93,4,59.78,59.78,0,0,0-5.38,47.68A58.14,58.14,0,0,0,56,104v8a56.06,56.06,0,0,0,48.44,55.47A39.8,39.8,0,0,0,96,192v8H72a24,24,0,0,1-24-24A40,40,0,0,0,8,136a8,8,0,0,0,0,16,24,24,0,0,1,24,24,40,40,0,0,0,40,40H96v16a8,8,0,0,0,16,0V192a24,24,0,0,1,48,0v40a8,8,0,0,0,16,0V192a39.8,39.8,0,0,0-8.44-24.53A56.06,56.06,0,0,0,216,112v-8A58.14,58.14,0,0,0,208.31,75.68ZM200,112a40,40,0,0,1-40,40H112a40,40,0,0,1-40-40v-8a41.74,41.74,0,0,1,6.9-22.48A8,8,0,0,0,80,73.83a43.81,43.81,0,0,1,.79-33.58,43.88,43.88,0,0,1,32.32,20.06A8,8,0,0,0,119.82,64h32.35a8,8,0,0,0,6.74-3.69,43.87,43.87,0,0,1,32.32-20.06A43.81,43.81,0,0,1,192,73.83a8.09,8.09,0,0,0,1,7.65A41.72,41.72,0,0,1,200,104Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",q_,[...t[5]||(t[5]=[Rb("path",{d:"M203.94,76.16A55.73,55.73,0,0,0,199.46,30,4,4,0,0,0,196,28a55.78,55.78,0,0,0-46,24H122A55.78,55.78,0,0,0,76,28a4,4,0,0,0-3.46,2,55.73,55.73,0,0,0-4.48,46.16A53.78,53.78,0,0,0,60,104v8a52.06,52.06,0,0,0,52,52h1.41A36,36,0,0,0,100,192v12H72a28,28,0,0,1-28-28A36,36,0,0,0,8,140a4,4,0,0,0,0,8,28,28,0,0,1,28,28,36,36,0,0,0,36,36h28v20a4,4,0,0,0,8,0V192a28,28,0,0,1,56,0v40a4,4,0,0,0,8,0V192a36,36,0,0,0-13.41-28H160a52.06,52.06,0,0,0,52-52v-8A53.78,53.78,0,0,0,203.94,76.16ZM204,112a44.05,44.05,0,0,1-44,44H112a44.05,44.05,0,0,1-44-44v-8a45.76,45.76,0,0,1,7.71-24.89,4,4,0,0,0,.53-3.84,47.82,47.82,0,0,1,2.1-39.21,47.8,47.8,0,0,1,38.12,22.1A4,4,0,0,0,119.83,60h32.34a4,4,0,0,0,3.37-1.84,47.8,47.8,0,0,1,38.12-22.1,47.82,47.82,0,0,1,2.1,39.21,4,4,0,0,0,.53,3.83A45.85,45.85,0,0,1,204,104Z"},null,-1)])])):Fb("",!0)],16))}}),X_={key:0},G_={key:1},Y_={key:2},K_={key:3},J_={key:4},eT={key:5},tT=Mg({name:"ScalarIconGlobe",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",X_,[...t[0]||(t[0]=[Rb("path",{d:"M128,20A108,108,0,1,0,236,128,108.12,108.12,0,0,0,128,20Zm0,187a113.4,113.4,0,0,1-20.39-35h40.82a116.94,116.94,0,0,1-10,20.77A108.61,108.61,0,0,1,128,207Zm-26.49-59a135.42,135.42,0,0,1,0-40h53a135.42,135.42,0,0,1,0,40ZM44,128a83.49,83.49,0,0,1,2.43-20H77.25a160.63,160.63,0,0,0,0,40H46.43A83.49,83.49,0,0,1,44,128Zm84-79a113.4,113.4,0,0,1,20.39,35H107.59a116.94,116.94,0,0,1,10-20.77A108.61,108.61,0,0,1,128,49Zm50.73,59h30.82a83.52,83.52,0,0,1,0,40H178.75a160.63,160.63,0,0,0,0-40Zm20.77-24H173.71a140.82,140.82,0,0,0-15.5-34.36A84.51,84.51,0,0,1,199.52,84ZM97.79,49.64A140.82,140.82,0,0,0,82.29,84H56.48A84.51,84.51,0,0,1,97.79,49.64ZM56.48,172H82.29a140.82,140.82,0,0,0,15.5,34.36A84.51,84.51,0,0,1,56.48,172Zm101.73,34.36A140.82,140.82,0,0,0,173.71,172h25.81A84.51,84.51,0,0,1,158.21,206.36Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",G_,[...t[1]||(t[1]=[Rb("path",{d:"M224,128a96,96,0,1,1-96-96A96,96,0,0,1,224,128Z",opacity:"0.2"},null,-1),Rb("path",{d:"M128,24h0A104,104,0,1,0,232,128,104.12,104.12,0,0,0,128,24Zm88,104a87.61,87.61,0,0,1-3.33,24H174.16a157.44,157.44,0,0,0,0-48h38.51A87.61,87.61,0,0,1,216,128ZM102,168H154a115.11,115.11,0,0,1-26,45A115.27,115.27,0,0,1,102,168Zm-3.9-16a140.84,140.84,0,0,1,0-48h59.88a140.84,140.84,0,0,1,0,48ZM40,128a87.61,87.61,0,0,1,3.33-24H81.84a157.44,157.44,0,0,0,0,48H43.33A87.61,87.61,0,0,1,40,128ZM154,88H102a115.11,115.11,0,0,1,26-45A115.27,115.27,0,0,1,154,88Zm52.33,0H170.71a135.28,135.28,0,0,0-22.3-45.6A88.29,88.29,0,0,1,206.37,88ZM107.59,42.4A135.28,135.28,0,0,0,85.29,88H49.63A88.29,88.29,0,0,1,107.59,42.4ZM49.63,168H85.29a135.28,135.28,0,0,0,22.3,45.6A88.29,88.29,0,0,1,49.63,168Zm98.78,45.6a135.28,135.28,0,0,0,22.3-45.6h35.66A88.29,88.29,0,0,1,148.41,213.6Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",Y_,[...t[2]||(t[2]=[Rb("path",{d:"M128,24h0A104,104,0,1,0,232,128,104.12,104.12,0,0,0,128,24Zm78.36,64H170.71a135.28,135.28,0,0,0-22.3-45.6A88.29,88.29,0,0,1,206.37,88ZM216,128a87.61,87.61,0,0,1-3.33,24H174.16a157.44,157.44,0,0,0,0-48h38.51A87.61,87.61,0,0,1,216,128ZM128,43a115.27,115.27,0,0,1,26,45H102A115.11,115.11,0,0,1,128,43ZM102,168H154a115.11,115.11,0,0,1-26,45A115.27,115.27,0,0,1,102,168Zm-3.9-16a140.84,140.84,0,0,1,0-48h59.88a140.84,140.84,0,0,1,0,48Zm50.35,61.6a135.28,135.28,0,0,0,22.3-45.6h35.66A88.29,88.29,0,0,1,148.41,213.6Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",K_,[...t[3]||(t[3]=[Rb("path",{d:"M128,26A102,102,0,1,0,230,128,102.12,102.12,0,0,0,128,26Zm81.57,64H169.19a132.58,132.58,0,0,0-25.73-50.67A90.29,90.29,0,0,1,209.57,90ZM218,128a89.7,89.7,0,0,1-3.83,26H171.81a155.43,155.43,0,0,0,0-52h42.36A89.7,89.7,0,0,1,218,128Zm-90,87.83a110,110,0,0,1-15.19-19.45A124.24,124.24,0,0,1,99.35,166h57.3a124.24,124.24,0,0,1-13.46,30.38A110,110,0,0,1,128,215.83ZM96.45,154a139.18,139.18,0,0,1,0-52h63.1a139.18,139.18,0,0,1,0,52ZM38,128a89.7,89.7,0,0,1,3.83-26H84.19a155.43,155.43,0,0,0,0,52H41.83A89.7,89.7,0,0,1,38,128Zm90-87.83a110,110,0,0,1,15.19,19.45A124.24,124.24,0,0,1,156.65,90H99.35a124.24,124.24,0,0,1,13.46-30.38A110,110,0,0,1,128,40.17Zm-15.46-.84A132.58,132.58,0,0,0,86.81,90H46.43A90.29,90.29,0,0,1,112.54,39.33ZM46.43,166H86.81a132.58,132.58,0,0,0,25.73,50.67A90.29,90.29,0,0,1,46.43,166Zm97,50.67A132.58,132.58,0,0,0,169.19,166h40.38A90.29,90.29,0,0,1,143.46,216.67Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",J_,[...t[4]||(t[4]=[Rb("path",{d:"M128,24h0A104,104,0,1,0,232,128,104.12,104.12,0,0,0,128,24Zm88,104a87.61,87.61,0,0,1-3.33,24H174.16a157.44,157.44,0,0,0,0-48h38.51A87.61,87.61,0,0,1,216,128ZM102,168H154a115.11,115.11,0,0,1-26,45A115.27,115.27,0,0,1,102,168Zm-3.9-16a140.84,140.84,0,0,1,0-48h59.88a140.84,140.84,0,0,1,0,48ZM40,128a87.61,87.61,0,0,1,3.33-24H81.84a157.44,157.44,0,0,0,0,48H43.33A87.61,87.61,0,0,1,40,128ZM154,88H102a115.11,115.11,0,0,1,26-45A115.27,115.27,0,0,1,154,88Zm52.33,0H170.71a135.28,135.28,0,0,0-22.3-45.6A88.29,88.29,0,0,1,206.37,88ZM107.59,42.4A135.28,135.28,0,0,0,85.29,88H49.63A88.29,88.29,0,0,1,107.59,42.4ZM49.63,168H85.29a135.28,135.28,0,0,0,22.3,45.6A88.29,88.29,0,0,1,49.63,168Zm98.78,45.6a135.28,135.28,0,0,0,22.3-45.6h35.66A88.29,88.29,0,0,1,148.41,213.6Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",eT,[...t[5]||(t[5]=[Rb("path",{d:"M128,28h0A100,100,0,1,0,228,128,100.11,100.11,0,0,0,128,28Zm0,190.61c-6.33-6.09-23-24.41-31.27-54.61h62.54C151,194.2,134.33,212.52,128,218.61ZM94.82,156a140.42,140.42,0,0,1,0-56h66.36a140.42,140.42,0,0,1,0,56ZM128,37.39c6.33,6.09,23,24.41,31.27,54.61H96.73C105,61.8,121.67,43.48,128,37.39ZM169.41,100h46.23a92.09,92.09,0,0,1,0,56H169.41a152.65,152.65,0,0,0,0-56Zm43.25-8h-45a129.39,129.39,0,0,0-29.19-55.4A92.25,92.25,0,0,1,212.66,92ZM117.54,36.6A129.39,129.39,0,0,0,88.35,92h-45A92.25,92.25,0,0,1,117.54,36.6ZM40.36,100H86.59a152.65,152.65,0,0,0,0,56H40.36a92.09,92.09,0,0,1,0-56Zm3,64h45a129.39,129.39,0,0,0,29.19,55.4A92.25,92.25,0,0,1,43.34,164Zm95.12,55.4A129.39,129.39,0,0,0,167.65,164h45A92.25,92.25,0,0,1,138.46,219.4Z"},null,-1)])])):Fb("",!0)],16))}}),nT={key:0},rT={key:1},aT={key:2},oT={key:3},iT={key:4},sT={key:5},lT=Mg({name:"ScalarIconGlobeSimple",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",nT,[...t[0]||(t[0]=[Rb("path",{d:"M128,20A108,108,0,1,0,236,128,108.12,108.12,0,0,0,128,20Zm83.13,96H179.56a144.3,144.3,0,0,0-21.35-66.36A84.22,84.22,0,0,1,211.13,116ZM128,207c-9.36-10.81-24.46-33.13-27.45-67h54.94a119.74,119.74,0,0,1-17.11,52.77A108.61,108.61,0,0,1,128,207Zm-27.45-91a119.74,119.74,0,0,1,17.11-52.77A108.61,108.61,0,0,1,128,49c9.36,10.81,24.46,33.13,27.45,67ZM97.79,49.64A144.3,144.3,0,0,0,76.44,116H44.87A84.22,84.22,0,0,1,97.79,49.64ZM44.87,140H76.44a144.3,144.3,0,0,0,21.35,66.36A84.22,84.22,0,0,1,44.87,140Zm113.34,66.36A144.3,144.3,0,0,0,179.56,140h31.57A84.22,84.22,0,0,1,158.21,206.36Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",rT,[...t[1]||(t[1]=[Rb("path",{d:"M224,128a96,96,0,1,1-96-96A96,96,0,0,1,224,128Z",opacity:"0.2"},null,-1),Rb("path",{d:"M128,24h0A104,104,0,1,0,232,128,104.12,104.12,0,0,0,128,24Zm87.62,96H175.79C174,83.49,159.94,57.67,148.41,42.4A88.19,88.19,0,0,1,215.63,120ZM96.23,136h63.54c-2.31,41.61-22.23,67.11-31.77,77C118.45,203.1,98.54,177.6,96.23,136Zm0-16C98.54,78.39,118.46,52.89,128,43c9.55,9.93,29.46,35.43,31.77,77Zm11.36-77.6C96.06,57.67,82,83.49,80.21,120H40.37A88.19,88.19,0,0,1,107.59,42.4ZM40.37,136H80.21c1.82,36.51,15.85,62.33,27.38,77.6A88.19,88.19,0,0,1,40.37,136Zm108,77.6c11.53-15.27,25.56-41.09,27.38-77.6h39.84A88.19,88.19,0,0,1,148.41,213.6Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",aT,[...t[2]||(t[2]=[Rb("path",{d:"M128,24h0A104,104,0,1,0,232,128,104.12,104.12,0,0,0,128,24Zm87.62,96H175.79C174,83.49,159.94,57.67,148.41,42.4A88.19,88.19,0,0,1,215.63,120ZM96.23,136h63.54c-2.31,41.61-22.23,67.11-31.77,77C118.45,203.1,98.54,177.6,96.23,136Zm0-16C98.54,78.39,118.46,52.89,128,43c9.55,9.93,29.46,35.43,31.77,77Zm52.18,93.6c11.53-15.27,25.56-41.09,27.38-77.6h39.84A88.19,88.19,0,0,1,148.41,213.6Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",oT,[...t[3]||(t[3]=[Rb("path",{d:"M128,26A102,102,0,1,0,230,128,102.12,102.12,0,0,0,128,26Zm89.8,96H173.89c-1.54-40.77-18.48-68.23-30.43-82.67A90.19,90.19,0,0,1,217.8,122ZM128,215.83a110,110,0,0,1-15.19-19.45A128.37,128.37,0,0,1,94.13,134h67.74a128.37,128.37,0,0,1-18.68,62.38A110,110,0,0,1,128,215.83ZM94.13,122a128.37,128.37,0,0,1,18.68-62.38A110,110,0,0,1,128,40.17a110,110,0,0,1,15.19,19.45A128.37,128.37,0,0,1,161.87,122Zm18.41-82.67c-12,14.44-28.89,41.9-30.43,82.67H38.2A90.19,90.19,0,0,1,112.54,39.33ZM38.2,134H82.11c1.54,40.77,18.48,68.23,30.43,82.67A90.19,90.19,0,0,1,38.2,134Zm105.26,82.67c11.95-14.44,28.89-41.9,30.43-82.67H217.8A90.19,90.19,0,0,1,143.46,216.67Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",iT,[...t[4]||(t[4]=[Rb("path",{d:"M128,24h0A104,104,0,1,0,232,128,104.12,104.12,0,0,0,128,24Zm87.62,96H175.79C174,83.49,159.94,57.67,148.41,42.4A88.19,88.19,0,0,1,215.63,120ZM96.23,136h63.54c-2.31,41.61-22.23,67.11-31.77,77C118.45,203.1,98.54,177.6,96.23,136Zm0-16C98.54,78.39,118.46,52.89,128,43c9.55,9.93,29.46,35.43,31.77,77Zm11.36-77.6C96.06,57.67,82,83.49,80.21,120H40.37A88.19,88.19,0,0,1,107.59,42.4ZM40.37,136H80.21c1.82,36.51,15.85,62.33,27.38,77.6A88.19,88.19,0,0,1,40.37,136Zm108,77.6c11.53-15.27,25.56-41.09,27.38-77.6h39.84A88.19,88.19,0,0,1,148.41,213.6Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",sT,[...t[5]||(t[5]=[Rb("path",{d:"M128,28h0A100,100,0,1,0,228,128,100.11,100.11,0,0,0,128,28Zm91.9,96h-48c-1.15-45.55-21.74-74.52-33.48-87.4A92.14,92.14,0,0,1,219.91,124ZM128,218.61c-8.32-8-34.57-37.13-35.93-86.61h71.86C162.57,181.48,136.32,210.61,128,218.61ZM92.07,124C93.43,74.52,119.68,45.39,128,37.39c8.32,8,34.57,37.13,35.93,86.61Zm25.47-87.4C105.8,49.48,85.21,78.45,84.06,124h-48A92.14,92.14,0,0,1,117.54,36.6ZM36.09,132h48c1.15,45.55,21.74,74.52,33.48,87.4A92.14,92.14,0,0,1,36.09,132Zm102.37,87.4c11.74-12.88,32.33-41.85,33.48-87.4h48A92.14,92.14,0,0,1,138.46,219.4Z"},null,-1)])])):Fb("",!0)],16))}}),cT={key:0},uT={key:1},dT={key:2},pT={key:3},hT={key:4},fT={key:5},mT=Mg({name:"ScalarIconHash",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",cT,[...t[0]||(t[0]=[Rb("path",{d:"M224,84H180.2l7.61-41.85a12,12,0,0,0-23.62-4.3L155.8,84H116.2l7.61-41.85a12,12,0,1,0-23.62-4.3L91.8,84H48a12,12,0,0,0,0,24H87.44l-7.27,40H32a12,12,0,0,0,0,24H75.8l-7.61,41.85a12,12,0,0,0,9.66,14A11.43,11.43,0,0,0,80,228a12,12,0,0,0,11.8-9.86L100.2,172h39.6l-7.61,41.85a12,12,0,0,0,9.66,14,11.43,11.43,0,0,0,2.16.2,12,12,0,0,0,11.8-9.86L164.2,172H208a12,12,0,0,0,0-24H168.56l7.27-40H224a12,12,0,0,0,0-24Zm-79.83,64H104.56l7.27-40h39.61Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",uT,[...t[1]||(t[1]=[Rb("path",{d:"M165.82,96l-11.64,64h-64l11.64-64Z",opacity:"0.2"},null,-1),Rb("path",{d:"M224,88H175.4l8.47-46.57a8,8,0,0,0-15.74-2.86l-9,49.43H111.4l8.47-46.57a8,8,0,0,0-15.74-2.86L95.14,88H48a8,8,0,0,0,0,16H92.23L83.5,152H32a8,8,0,0,0,0,16H80.6l-8.47,46.57a8,8,0,0,0,6.44,9.3A7.79,7.79,0,0,0,80,224a8,8,0,0,0,7.86-6.57l9-49.43H144.6l-8.47,46.57a8,8,0,0,0,6.44,9.3A7.79,7.79,0,0,0,144,224a8,8,0,0,0,7.86-6.57l9-49.43H208a8,8,0,0,0,0-16H163.77l8.73-48H224a8,8,0,0,0,0-16Zm-76.5,64H99.77l8.73-48h47.73Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",dT,[...t[2]||(t[2]=[Rb("path",{d:"M116.25,112h31.5l-8,32h-31.5ZM224,48V208a16,16,0,0,1-16,16H48a16,16,0,0,1-16-16V48A16,16,0,0,1,48,32H208A16,16,0,0,1,224,48Zm-16,56a8,8,0,0,0-8-8H168.25l7.51-30.06a8,8,0,0,0-15.52-3.88L151.75,96h-31.5l7.51-30.06a8,8,0,0,0-15.52-3.88L103.75,96H64a8,8,0,0,0,0,16H99.75l-8,32H56a8,8,0,0,0,0,16H87.75l-7.51,30.06a8,8,0,0,0,5.82,9.7,8.13,8.13,0,0,0,2,.24,8,8,0,0,0,7.75-6.06L104.25,160h31.5l-7.51,30.06a8,8,0,0,0,5.82,9.7A8.13,8.13,0,0,0,136,200a8,8,0,0,0,7.75-6.06L152.25,160H192a8,8,0,0,0,0-16H156.25l8-32H200A8,8,0,0,0,208,104Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",pT,[...t[3]||(t[3]=[Rb("path",{d:"M224,90H173l8.89-48.93a6,6,0,1,0-11.8-2.14L160.81,90H109l8.89-48.93a6,6,0,0,0-11.8-2.14L96.81,90H48a6,6,0,0,0,0,12H94.63l-9.46,52H32a6,6,0,0,0,0,12H83L74.1,214.93a6,6,0,0,0,4.83,7A5.64,5.64,0,0,0,80,222a6,6,0,0,0,5.89-4.93L95.19,166H147l-8.89,48.93a6,6,0,0,0,4.83,7,5.64,5.64,0,0,0,1.08.1,6,6,0,0,0,5.89-4.93L159.19,166H208a6,6,0,0,0,0-12H161.37l9.46-52H224a6,6,0,0,0,0-12Zm-74.83,64H97.37l9.46-52h51.8Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",hT,[...t[4]||(t[4]=[Rb("path",{d:"M224,88H175.4l8.47-46.57a8,8,0,0,0-15.74-2.86l-9,49.43H111.4l8.47-46.57a8,8,0,0,0-15.74-2.86L95.14,88H48a8,8,0,0,0,0,16H92.23L83.5,152H32a8,8,0,0,0,0,16H80.6l-8.47,46.57a8,8,0,0,0,6.44,9.3A7.79,7.79,0,0,0,80,224a8,8,0,0,0,7.86-6.57l9-49.43H144.6l-8.47,46.57a8,8,0,0,0,6.44,9.3A7.79,7.79,0,0,0,144,224a8,8,0,0,0,7.86-6.57l9-49.43H208a8,8,0,0,0,0-16H163.77l8.73-48H224a8,8,0,0,0,0-16Zm-76.5,64H99.77l8.73-48h47.73Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",fT,[...t[5]||(t[5]=[Rb("path",{d:"M224,92H170.61l9.33-51.28a4,4,0,1,0-7.88-1.44L162.48,92H106.61l9.33-51.28a4,4,0,1,0-7.88-1.44L98.48,92H48a4,4,0,0,0,0,8H97L86.84,156H32a4,4,0,0,0,0,8H85.39l-9.33,51.28a4,4,0,0,0,3.22,4.65A3.65,3.65,0,0,0,80,220a4,4,0,0,0,3.94-3.29L93.52,164h55.87l-9.33,51.28a4,4,0,0,0,3.22,4.65,3.65,3.65,0,0,0,.72.07,4,4,0,0,0,3.94-3.29L157.52,164H208a4,4,0,0,0,0-8H159l10.19-56H224a4,4,0,0,0,0-8Zm-73.16,64H95l10.19-56H161Z"},null,-1)])])):Fb("",!0)],16))}}),gT={key:0},vT={key:1},bT={key:2},yT={key:3},OT={key:4},wT={key:5},xT=Mg({name:"ScalarIconHouse",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",gT,[...t[0]||(t[0]=[Rb("path",{d:"M222.14,105.85l-80-80a20,20,0,0,0-28.28,0l-80,80A19.86,19.86,0,0,0,28,120v96a12,12,0,0,0,12,12h64a12,12,0,0,0,12-12V164h24v52a12,12,0,0,0,12,12h64a12,12,0,0,0,12-12V120A19.86,19.86,0,0,0,222.14,105.85ZM204,204H164V152a12,12,0,0,0-12-12H104a12,12,0,0,0-12,12v52H52V121.65l76-76,76,76Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",vT,[...t[1]||(t[1]=[Rb("path",{d:"M216,120v96H152V152H104v64H40V120a8,8,0,0,1,2.34-5.66l80-80a8,8,0,0,1,11.32,0l80,80A8,8,0,0,1,216,120Z",opacity:"0.2"},null,-1),Rb("path",{d:"M219.31,108.68l-80-80a16,16,0,0,0-22.62,0l-80,80A15.87,15.87,0,0,0,32,120v96a8,8,0,0,0,8,8h64a8,8,0,0,0,8-8V160h32v56a8,8,0,0,0,8,8h64a8,8,0,0,0,8-8V120A15.87,15.87,0,0,0,219.31,108.68ZM208,208H160V152a8,8,0,0,0-8-8H104a8,8,0,0,0-8,8v56H48V120l80-80,80,80Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",bT,[...t[2]||(t[2]=[Rb("path",{d:"M224,120v96a8,8,0,0,1-8,8H160a8,8,0,0,1-8-8V164a4,4,0,0,0-4-4H108a4,4,0,0,0-4,4v52a8,8,0,0,1-8,8H40a8,8,0,0,1-8-8V120a16,16,0,0,1,4.69-11.31l80-80a16,16,0,0,1,22.62,0l80,80A16,16,0,0,1,224,120Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",yT,[...t[3]||(t[3]=[Rb("path",{d:"M217.9,110.1l-80-80a14,14,0,0,0-19.8,0l-80,80A13.92,13.92,0,0,0,34,120v96a6,6,0,0,0,6,6h64a6,6,0,0,0,6-6V158h36v58a6,6,0,0,0,6,6h64a6,6,0,0,0,6-6V120A13.92,13.92,0,0,0,217.9,110.1ZM210,210H158V152a6,6,0,0,0-6-6H104a6,6,0,0,0-6,6v58H46V120a2,2,0,0,1,.58-1.42l80-80a2,2,0,0,1,2.84,0l80,80A2,2,0,0,1,210,120Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",OT,[...t[4]||(t[4]=[Rb("path",{d:"M219.31,108.68l-80-80a16,16,0,0,0-22.62,0l-80,80A15.87,15.87,0,0,0,32,120v96a8,8,0,0,0,8,8h64a8,8,0,0,0,8-8V160h32v56a8,8,0,0,0,8,8h64a8,8,0,0,0,8-8V120A15.87,15.87,0,0,0,219.31,108.68ZM208,208H160V152a8,8,0,0,0-8-8H104a8,8,0,0,0-8,8v56H48V120l80-80,80,80Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",wT,[...t[5]||(t[5]=[Rb("path",{d:"M216.49,111.51l-80-80a12,12,0,0,0-17,0l-80,80A12,12,0,0,0,36,120v96a4,4,0,0,0,4,4h64a4,4,0,0,0,4-4V156h40v60a4,4,0,0,0,4,4h64a4,4,0,0,0,4-4V120A12,12,0,0,0,216.49,111.51ZM212,212H156V152a4,4,0,0,0-4-4H104a4,4,0,0,0-4,4v60H44V120a4,4,0,0,1,1.17-2.83l80-80a4,4,0,0,1,5.66,0l80,80A4,4,0,0,1,212,120Z"},null,-1)])])):Fb("",!0)],16))}}),kT={key:0},ST={key:1},_T={key:2},TT={key:3},ET={key:4},AT={key:5},CT=Mg({name:"ScalarIconInfo",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",kT,[...t[0]||(t[0]=[Rb("path",{d:"M108,84a16,16,0,1,1,16,16A16,16,0,0,1,108,84Zm128,44A108,108,0,1,1,128,20,108.12,108.12,0,0,1,236,128Zm-24,0a84,84,0,1,0-84,84A84.09,84.09,0,0,0,212,128Zm-72,36.68V132a20,20,0,0,0-20-20,12,12,0,0,0-4,23.32V168a20,20,0,0,0,20,20,12,12,0,0,0,4-23.32Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",ST,[...t[1]||(t[1]=[Rb("path",{d:"M224,128a96,96,0,1,1-96-96A96,96,0,0,1,224,128Z",opacity:"0.2"},null,-1),Rb("path",{d:"M144,176a8,8,0,0,1-8,8,16,16,0,0,1-16-16V128a8,8,0,0,1,0-16,16,16,0,0,1,16,16v40A8,8,0,0,1,144,176Zm88-48A104,104,0,1,1,128,24,104.11,104.11,0,0,1,232,128Zm-16,0a88,88,0,1,0-88,88A88.1,88.1,0,0,0,216,128ZM124,96a12,12,0,1,0-12-12A12,12,0,0,0,124,96Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",_T,[...t[2]||(t[2]=[Rb("path",{d:"M128,24A104,104,0,1,0,232,128,104.11,104.11,0,0,0,128,24Zm-4,48a12,12,0,1,1-12,12A12,12,0,0,1,124,72Zm12,112a16,16,0,0,1-16-16V128a8,8,0,0,1,0-16,16,16,0,0,1,16,16v40a8,8,0,0,1,0,16Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",TT,[...t[3]||(t[3]=[Rb("path",{d:"M142,176a6,6,0,0,1-6,6,14,14,0,0,1-14-14V128a2,2,0,0,0-2-2,6,6,0,0,1,0-12,14,14,0,0,1,14,14v40a2,2,0,0,0,2,2A6,6,0,0,1,142,176ZM124,94a10,10,0,1,0-10-10A10,10,0,0,0,124,94Zm106,34A102,102,0,1,1,128,26,102.12,102.12,0,0,1,230,128Zm-12,0a90,90,0,1,0-90,90A90.1,90.1,0,0,0,218,128Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",ET,[...t[4]||(t[4]=[Rb("path",{d:"M128,24A104,104,0,1,0,232,128,104.11,104.11,0,0,0,128,24Zm0,192a88,88,0,1,1,88-88A88.1,88.1,0,0,1,128,216Zm16-40a8,8,0,0,1-8,8,16,16,0,0,1-16-16V128a8,8,0,0,1,0-16,16,16,0,0,1,16,16v40A8,8,0,0,1,144,176ZM112,84a12,12,0,1,1,12,12A12,12,0,0,1,112,84Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",AT,[...t[5]||(t[5]=[Rb("path",{d:"M140,176a4,4,0,0,1-4,4,12,12,0,0,1-12-12V128a4,4,0,0,0-4-4,4,4,0,0,1,0-8,12,12,0,0,1,12,12v40a4,4,0,0,0,4,4A4,4,0,0,1,140,176ZM124,92a8,8,0,1,0-8-8A8,8,0,0,0,124,92Zm104,36A100,100,0,1,1,128,28,100.11,100.11,0,0,1,228,128Zm-8,0a92,92,0,1,0-92,92A92.1,92.1,0,0,0,220,128Z"},null,-1)])])):Fb("",!0)],16))}}),$T={key:0},PT={key:1},DT={key:2},IT={key:3},MT={key:4},NT={key:5},RT=Mg({name:"ScalarIconLink",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",$T,[...t[0]||(t[0]=[Rb("path",{d:"M117.18,188.74a12,12,0,0,1,0,17l-5.12,5.12A58.26,58.26,0,0,1,70.6,228h0A58.62,58.62,0,0,1,29.14,127.92L63.89,93.17a58.64,58.64,0,0,1,98.56,28.11,12,12,0,1,1-23.37,5.44,34.65,34.65,0,0,0-58.22-16.58L46.11,144.89A34.62,34.62,0,0,0,70.57,204h0a34.41,34.41,0,0,0,24.49-10.14l5.11-5.12A12,12,0,0,1,117.18,188.74ZM226.83,45.17a58.65,58.65,0,0,0-82.93,0l-5.11,5.11a12,12,0,0,0,17,17l5.12-5.12a34.63,34.63,0,1,1,49,49L175.1,145.86A34.39,34.39,0,0,1,150.61,156h0a34.63,34.63,0,0,1-33.69-26.72,12,12,0,0,0-23.38,5.44A58.64,58.64,0,0,0,150.56,180h.05a58.28,58.28,0,0,0,41.47-17.17l34.75-34.75a58.62,58.62,0,0,0,0-82.91Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",PT,[...t[1]||(t[1]=[Rb("path",{d:"M218.34,119.6,183.6,154.34a46.58,46.58,0,0,1-44.31,12.26c-.31.34-.62.67-.95,1L103.6,202.34A46.63,46.63,0,1,1,37.66,136.4L72.4,101.66A46.6,46.6,0,0,1,116.71,89.4c.31-.34.62-.67,1-1L152.4,53.66a46.63,46.63,0,0,1,65.94,65.94Z",opacity:"0.2"},null,-1),Rb("path",{d:"M240,88.23a54.43,54.43,0,0,1-16,37L189.25,160a54.27,54.27,0,0,1-38.63,16h-.05A54.63,54.63,0,0,1,96,119.84a8,8,0,0,1,16,.45A38.62,38.62,0,0,0,150.58,160h0a38.39,38.39,0,0,0,27.31-11.31l34.75-34.75a38.63,38.63,0,0,0-54.63-54.63l-11,11A8,8,0,0,1,135.7,59l11-11A54.65,54.65,0,0,1,224,48,54.86,54.86,0,0,1,240,88.23ZM109,185.66l-11,11A38.41,38.41,0,0,1,70.6,208h0a38.63,38.63,0,0,1-27.29-65.94L78,107.31A38.63,38.63,0,0,1,144,135.71a8,8,0,0,0,7.78,8.22H152a8,8,0,0,0,8-7.78A54.86,54.86,0,0,0,144,96a54.65,54.65,0,0,0-77.27,0L32,130.75A54.62,54.62,0,0,0,70.56,224h0a54.28,54.28,0,0,0,38.64-16l11-11A8,8,0,0,0,109,185.66Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",DT,[...t[2]||(t[2]=[Rb("path",{d:"M208,32H48A16,16,0,0,0,32,48V208a16,16,0,0,0,16,16H208a16,16,0,0,0,16-16V48A16,16,0,0,0,208,32ZM115.7,192.49a43.31,43.31,0,0,1-55-66.43l25.37-25.37a43.35,43.35,0,0,1,61.25,0,42.9,42.9,0,0,1,9.95,15.43,8,8,0,1,1-15,5.6A27.33,27.33,0,0,0,97.37,112L72,137.37a27.32,27.32,0,0,0,34.68,41.91,8,8,0,1,1,9,13.21Zm79.61-62.55-25.37,25.37A43,43,0,0,1,139.32,168h0a43.35,43.35,0,0,1-40.53-28.12,8,8,0,1,1,15-5.6A27.35,27.35,0,0,0,139.28,152h0a27.14,27.14,0,0,0,19.32-8L184,118.63a27.32,27.32,0,0,0-34.68-41.91,8,8,0,1,1-9-13.21,43.32,43.32,0,0,1,55,66.43Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",IT,[...t[3]||(t[3]=[Rb("path",{d:"M238,88.18a52.42,52.42,0,0,1-15.4,35.66l-34.75,34.75A52.28,52.28,0,0,1,150.62,174h-.05A52.63,52.63,0,0,1,98,119.9a6,6,0,0,1,6-5.84h.17a6,6,0,0,1,5.83,6.16A40.62,40.62,0,0,0,150.58,162h0a40.4,40.4,0,0,0,28.73-11.9l34.75-34.74A40.63,40.63,0,0,0,156.63,57.9l-11,11a6,6,0,0,1-8.49-8.49l11-11a52.62,52.62,0,0,1,74.43,0A52.83,52.83,0,0,1,238,88.18Zm-127.62,98.9-11,11A40.36,40.36,0,0,1,70.6,210h0a40.63,40.63,0,0,1-28.7-69.36L76.62,105.9A40.63,40.63,0,0,1,146,135.77a6,6,0,0,0,5.83,6.16H152a6,6,0,0,0,6-5.84A52.63,52.63,0,0,0,68.14,97.42L33.38,132.16A52.63,52.63,0,0,0,70.56,222h0a52.26,52.26,0,0,0,37.22-15.42l11-11a6,6,0,1,0-8.49-8.48Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",MT,[...t[4]||(t[4]=[Rb("path",{d:"M240,88.23a54.43,54.43,0,0,1-16,37L189.25,160a54.27,54.27,0,0,1-38.63,16h-.05A54.63,54.63,0,0,1,96,119.84a8,8,0,0,1,16,.45A38.62,38.62,0,0,0,150.58,160h0a38.39,38.39,0,0,0,27.31-11.31l34.75-34.75a38.63,38.63,0,0,0-54.63-54.63l-11,11A8,8,0,0,1,135.7,59l11-11A54.65,54.65,0,0,1,224,48,54.86,54.86,0,0,1,240,88.23ZM109,185.66l-11,11A38.41,38.41,0,0,1,70.6,208h0a38.63,38.63,0,0,1-27.29-65.94L78,107.31A38.63,38.63,0,0,1,144,135.71a8,8,0,0,0,16,.45A54.86,54.86,0,0,0,144,96a54.65,54.65,0,0,0-77.27,0L32,130.75A54.62,54.62,0,0,0,70.56,224h0a54.28,54.28,0,0,0,38.64-16l11-11A8,8,0,0,0,109,185.66Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",NT,[...t[5]||(t[5]=[Rb("path",{d:"M236,88.12a50.44,50.44,0,0,1-14.81,34.31l-34.75,34.74A50.33,50.33,0,0,1,150.62,172h-.05A50.63,50.63,0,0,1,100,120a4,4,0,0,1,4-3.89h.11a4,4,0,0,1,3.89,4.11A42.64,42.64,0,0,0,150.58,164h0a42.32,42.32,0,0,0,30.14-12.49l34.75-34.74a42.63,42.63,0,1,0-60.29-60.28l-11,11a4,4,0,0,1-5.66-5.65l11-11A50.64,50.64,0,0,1,236,88.12ZM111.78,188.49l-11,11A42.33,42.33,0,0,1,70.6,212h0a42.63,42.63,0,0,1-30.11-72.77l34.75-34.74A42.63,42.63,0,0,1,148,135.82a4,4,0,0,0,8,.23A50.64,50.64,0,0,0,69.55,98.83L34.8,133.57A50.63,50.63,0,0,0,70.56,220h0a50.33,50.33,0,0,0,35.81-14.83l11-11a4,4,0,1,0-5.65-5.66Z"},null,-1)])])):Fb("",!0)],16))}}),LT={key:0},BT={key:1},jT={key:2},UT={key:3},zT={key:4},FT={key:5},ZT=Mg({name:"ScalarIconList",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",LT,[...t[0]||(t[0]=[Rb("path",{d:"M228,128a12,12,0,0,1-12,12H40a12,12,0,0,1,0-24H216A12,12,0,0,1,228,128ZM40,76H216a12,12,0,0,0,0-24H40a12,12,0,0,0,0,24ZM216,180H40a12,12,0,0,0,0,24H216a12,12,0,0,0,0-24Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",BT,[...t[1]||(t[1]=[Rb("path",{d:"M216,64V192H40V64Z",opacity:"0.2"},null,-1),Rb("path",{d:"M224,128a8,8,0,0,1-8,8H40a8,8,0,0,1,0-16H216A8,8,0,0,1,224,128ZM40,72H216a8,8,0,0,0,0-16H40a8,8,0,0,0,0,16ZM216,184H40a8,8,0,0,0,0,16H216a8,8,0,0,0,0-16Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",jT,[...t[2]||(t[2]=[Rb("path",{d:"M208,32H48A16,16,0,0,0,32,48V208a16,16,0,0,0,16,16H208a16,16,0,0,0,16-16V48A16,16,0,0,0,208,32ZM192,184H64a8,8,0,0,1,0-16H192a8,8,0,0,1,0,16Zm0-48H64a8,8,0,0,1,0-16H192a8,8,0,0,1,0,16Zm0-48H64a8,8,0,0,1,0-16H192a8,8,0,0,1,0,16Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",UT,[...t[3]||(t[3]=[Rb("path",{d:"M222,128a6,6,0,0,1-6,6H40a6,6,0,0,1,0-12H216A6,6,0,0,1,222,128ZM40,70H216a6,6,0,0,0,0-12H40a6,6,0,0,0,0,12ZM216,186H40a6,6,0,0,0,0,12H216a6,6,0,0,0,0-12Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",zT,[...t[4]||(t[4]=[Rb("path",{d:"M224,128a8,8,0,0,1-8,8H40a8,8,0,0,1,0-16H216A8,8,0,0,1,224,128ZM40,72H216a8,8,0,0,0,0-16H40a8,8,0,0,0,0,16ZM216,184H40a8,8,0,0,0,0,16H216a8,8,0,0,0,0-16Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",FT,[...t[5]||(t[5]=[Rb("path",{d:"M220,128a4,4,0,0,1-4,4H40a4,4,0,0,1,0-8H216A4,4,0,0,1,220,128ZM40,68H216a4,4,0,0,0,0-8H40a4,4,0,0,0,0,8ZM216,188H40a4,4,0,0,0,0,8H216a4,4,0,0,0,0-8Z"},null,-1)])])):Fb("",!0)],16))}}),QT={key:0},HT={key:1},VT={key:2},qT={key:3},WT={key:4},XT={key:5},GT=Mg({name:"ScalarIconLockSimple",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",QT,[...t[0]||(t[0]=[Rb("path",{d:"M208,76H180V56A52,52,0,0,0,76,56V76H48A20,20,0,0,0,28,96V208a20,20,0,0,0,20,20H208a20,20,0,0,0,20-20V96A20,20,0,0,0,208,76ZM100,56a28,28,0,0,1,56,0V76H100ZM204,204H52V100H204Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",HT,[...t[1]||(t[1]=[Rb("path",{d:"M216,96V208a8,8,0,0,1-8,8H48a8,8,0,0,1-8-8V96a8,8,0,0,1,8-8H208A8,8,0,0,1,216,96Z",opacity:"0.2"},null,-1),Rb("path",{d:"M208,80H176V56a48,48,0,0,0-96,0V80H48A16,16,0,0,0,32,96V208a16,16,0,0,0,16,16H208a16,16,0,0,0,16-16V96A16,16,0,0,0,208,80ZM96,56a32,32,0,0,1,64,0V80H96ZM208,208H48V96H208V208Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",VT,[...t[2]||(t[2]=[Rb("path",{d:"M208,80H176V56a48,48,0,0,0-96,0V80H48A16,16,0,0,0,32,96V208a16,16,0,0,0,16,16H208a16,16,0,0,0,16-16V96A16,16,0,0,0,208,80ZM96,56a32,32,0,0,1,64,0V80H96Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",qT,[...t[3]||(t[3]=[Rb("path",{d:"M208,82H174V56a46,46,0,0,0-92,0V82H48A14,14,0,0,0,34,96V208a14,14,0,0,0,14,14H208a14,14,0,0,0,14-14V96A14,14,0,0,0,208,82ZM94,56a34,34,0,0,1,68,0V82H94ZM210,208a2,2,0,0,1-2,2H48a2,2,0,0,1-2-2V96a2,2,0,0,1,2-2H208a2,2,0,0,1,2,2Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",WT,[...t[4]||(t[4]=[Rb("path",{d:"M208,80H176V56a48,48,0,0,0-96,0V80H48A16,16,0,0,0,32,96V208a16,16,0,0,0,16,16H208a16,16,0,0,0,16-16V96A16,16,0,0,0,208,80ZM96,56a32,32,0,0,1,64,0V80H96ZM208,208H48V96H208V208Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",XT,[...t[5]||(t[5]=[Rb("path",{d:"M208,84H172V56a44,44,0,0,0-88,0V84H48A12,12,0,0,0,36,96V208a12,12,0,0,0,12,12H208a12,12,0,0,0,12-12V96A12,12,0,0,0,208,84ZM92,56a36,36,0,0,1,72,0V84H92ZM212,208a4,4,0,0,1-4,4H48a4,4,0,0,1-4-4V96a4,4,0,0,1,4-4H208a4,4,0,0,1,4,4Z"},null,-1)])])):Fb("",!0)],16))}}),YT={key:0},KT={key:1},JT={key:2},eE={key:3},tE={key:4},nE={key:5},rE=Mg({name:"ScalarIconMagnifyingGlass",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",YT,[...t[0]||(t[0]=[Rb("path",{d:"M232.49,215.51,185,168a92.12,92.12,0,1,0-17,17l47.53,47.54a12,12,0,0,0,17-17ZM44,112a68,68,0,1,1,68,68A68.07,68.07,0,0,1,44,112Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",KT,[...t[1]||(t[1]=[Rb("path",{d:"M192,112a80,80,0,1,1-80-80A80,80,0,0,1,192,112Z",opacity:"0.2"},null,-1),Rb("path",{d:"M229.66,218.34,179.6,168.28a88.21,88.21,0,1,0-11.32,11.31l50.06,50.07a8,8,0,0,0,11.32-11.32ZM40,112a72,72,0,1,1,72,72A72.08,72.08,0,0,1,40,112Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",JT,[...t[2]||(t[2]=[Rb("path",{d:"M168,112a56,56,0,1,1-56-56A56,56,0,0,1,168,112Zm61.66,117.66a8,8,0,0,1-11.32,0l-50.06-50.07a88,88,0,1,1,11.32-11.31l50.06,50.06A8,8,0,0,1,229.66,229.66ZM112,184a72,72,0,1,0-72-72A72.08,72.08,0,0,0,112,184Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",eE,[...t[3]||(t[3]=[Rb("path",{d:"M228.24,219.76l-51.38-51.38a86.15,86.15,0,1,0-8.48,8.48l51.38,51.38a6,6,0,0,0,8.48-8.48ZM38,112a74,74,0,1,1,74,74A74.09,74.09,0,0,1,38,112Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",tE,[...t[4]||(t[4]=[Rb("path",{d:"M229.66,218.34l-50.07-50.06a88.11,88.11,0,1,0-11.31,11.31l50.06,50.07a8,8,0,0,0,11.32-11.32ZM40,112a72,72,0,1,1,72,72A72.08,72.08,0,0,1,40,112Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",nE,[...t[5]||(t[5]=[Rb("path",{d:"M226.83,221.17l-52.7-52.7a84.1,84.1,0,1,0-5.66,5.66l52.7,52.7a4,4,0,0,0,5.66-5.66ZM36,112a76,76,0,1,1,76,76A76.08,76.08,0,0,1,36,112Z"},null,-1)])])):Fb("",!0)],16))}}),aE={key:0},oE={key:1},iE={key:2},sE={key:3},lE={key:4},cE={key:5},uE=Mg({name:"ScalarIconPencilSimple",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",aE,[...t[0]||(t[0]=[Rb("path",{d:"M230.14,70.54,185.46,25.85a20,20,0,0,0-28.29,0L33.86,149.17A19.85,19.85,0,0,0,28,163.31V208a20,20,0,0,0,20,20H92.69a19.86,19.86,0,0,0,14.14-5.86L230.14,98.82a20,20,0,0,0,0-28.28ZM91,204H52V165l84-84,39,39ZM192,103,153,64l18.34-18.34,39,39Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",oE,[...t[1]||(t[1]=[Rb("path",{d:"M221.66,90.34,192,120,136,64l29.66-29.66a8,8,0,0,1,11.31,0L221.66,79A8,8,0,0,1,221.66,90.34Z",opacity:"0.2"},null,-1),Rb("path",{d:"M227.31,73.37,182.63,28.68a16,16,0,0,0-22.63,0L36.69,152A15.86,15.86,0,0,0,32,163.31V208a16,16,0,0,0,16,16H92.69A15.86,15.86,0,0,0,104,219.31L227.31,96a16,16,0,0,0,0-22.63ZM92.69,208H48V163.31l88-88L180.69,120ZM192,108.68,147.31,64l24-24L216,84.68Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",iE,[...t[2]||(t[2]=[Rb("path",{d:"M227.31,73.37,182.63,28.68a16,16,0,0,0-22.63,0L36.69,152A15.86,15.86,0,0,0,32,163.31V208a16,16,0,0,0,16,16H92.69A15.86,15.86,0,0,0,104,219.31L227.31,96a16,16,0,0,0,0-22.63ZM192,108.68,147.31,64l24-24L216,84.68Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",sE,[...t[3]||(t[3]=[Rb("path",{d:"M225.9,74.78,181.21,30.09a14,14,0,0,0-19.8,0L38.1,153.41a13.94,13.94,0,0,0-4.1,9.9V208a14,14,0,0,0,14,14H92.69a13.94,13.94,0,0,0,9.9-4.1L225.9,94.58a14,14,0,0,0,0-19.8ZM94.1,209.41a2,2,0,0,1-1.41.59H48a2,2,0,0,1-2-2V163.31a2,2,0,0,1,.59-1.41L136,72.48,183.51,120ZM217.41,86.1,192,111.51,144.49,64,169.9,38.58a2,2,0,0,1,2.83,0l44.68,44.69a2,2,0,0,1,0,2.83Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",lE,[...t[4]||(t[4]=[Rb("path",{d:"M227.31,73.37,182.63,28.68a16,16,0,0,0-22.63,0L36.69,152A15.86,15.86,0,0,0,32,163.31V208a16,16,0,0,0,16,16H92.69A15.86,15.86,0,0,0,104,219.31L227.31,96a16,16,0,0,0,0-22.63ZM92.69,208H48V163.31l88-88L180.69,120ZM192,108.68,147.31,64l24-24L216,84.68Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",cE,[...t[5]||(t[5]=[Rb("path",{d:"M224.49,76.2,179.8,31.51a12,12,0,0,0-17,0L133.17,61.17h0L39.52,154.83A11.9,11.9,0,0,0,36,163.31V208a12,12,0,0,0,12,12H92.69a12,12,0,0,0,8.48-3.51L224.48,93.17a12,12,0,0,0,0-17Zm-129,134.63A4,4,0,0,1,92.69,212H48a4,4,0,0,1-4-4V163.31a4,4,0,0,1,1.17-2.83L136,69.65,186.34,120ZM218.83,87.51,192,114.34,141.66,64l26.82-26.83a4,4,0,0,1,5.66,0l44.69,44.68a4,4,0,0,1,0,5.66Z"},null,-1)])])):Fb("",!0)],16))}}),dE={key:0},pE={key:1},hE={key:2},fE={key:3},mE={key:4},gE={key:5},vE=Mg({name:"ScalarIconPlay",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",dE,[...t[0]||(t[0]=[Rb("path",{d:"M234.49,111.07,90.41,22.94A20,20,0,0,0,60,39.87V216.13a20,20,0,0,0,30.41,16.93l144.08-88.13a19.82,19.82,0,0,0,0-33.86ZM84,208.85V47.15L216.16,128Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",pE,[...t[1]||(t[1]=[Rb("path",{d:"M228.23,134.69,84.15,222.81A8,8,0,0,1,72,216.12V39.88a8,8,0,0,1,12.15-6.69l144.08,88.12A7.82,7.82,0,0,1,228.23,134.69Z",opacity:"0.2"},null,-1),Rb("path",{d:"M232.4,114.49,88.32,26.35a16,16,0,0,0-16.2-.3A15.86,15.86,0,0,0,64,39.87V216.13A15.94,15.94,0,0,0,80,232a16.07,16.07,0,0,0,8.36-2.35L232.4,141.51a15.81,15.81,0,0,0,0-27ZM80,215.94V40l143.83,88Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",hE,[...t[2]||(t[2]=[Rb("path",{d:"M240,128a15.74,15.74,0,0,1-7.6,13.51L88.32,229.65a16,16,0,0,1-16.2.3A15.86,15.86,0,0,1,64,216.13V39.87a15.86,15.86,0,0,1,8.12-13.82,16,16,0,0,1,16.2.3L232.4,114.49A15.74,15.74,0,0,1,240,128Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",fE,[...t[3]||(t[3]=[Rb("path",{d:"M231.36,116.19,87.28,28.06a14,14,0,0,0-14.18-.27A13.69,13.69,0,0,0,66,39.87V216.13a13.69,13.69,0,0,0,7.1,12.08,14,14,0,0,0,14.18-.27l144.08-88.13a13.82,13.82,0,0,0,0-23.62Zm-6.26,13.38L81,217.7a2,2,0,0,1-2.06,0,1.78,1.78,0,0,1-1-1.61V39.87a1.78,1.78,0,0,1,1-1.61A2.06,2.06,0,0,1,80,38a2,2,0,0,1,1,.31L225.1,126.43a1.82,1.82,0,0,1,0,3.14Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",mE,[...t[4]||(t[4]=[Rb("path",{d:"M232.4,114.49,88.32,26.35a16,16,0,0,0-16.2-.3A15.86,15.86,0,0,0,64,39.87V216.13A15.94,15.94,0,0,0,80,232a16.07,16.07,0,0,0,8.36-2.35L232.4,141.51a15.81,15.81,0,0,0,0-27ZM80,215.94V40l143.83,88Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",gE,[...t[5]||(t[5]=[Rb("path",{d:"M230.32,117.9,86.24,29.79a11.91,11.91,0,0,0-12.17-.23A11.71,11.71,0,0,0,68,39.89V216.11a11.71,11.71,0,0,0,6.07,10.33,11.91,11.91,0,0,0,12.17-.23L230.32,138.1a11.82,11.82,0,0,0,0-20.2Zm-4.18,13.37L82.06,219.39a4,4,0,0,1-4.07.07,3.77,3.77,0,0,1-2-3.35V39.89a3.77,3.77,0,0,1,2-3.35,4,4,0,0,1,4.07.07l144.08,88.12a3.8,3.8,0,0,1,0,6.54Z"},null,-1)])])):Fb("",!0)],16))}}),bE={key:0},yE={key:1},OE={key:2},wE={key:3},xE={key:4},kE={key:5},SE=Mg({name:"ScalarIconPlus",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",bE,[...t[0]||(t[0]=[Rb("path",{d:"M228,128a12,12,0,0,1-12,12H140v76a12,12,0,0,1-24,0V140H40a12,12,0,0,1,0-24h76V40a12,12,0,0,1,24,0v76h76A12,12,0,0,1,228,128Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",yE,[...t[1]||(t[1]=[Rb("path",{d:"M216,56V200a16,16,0,0,1-16,16H56a16,16,0,0,1-16-16V56A16,16,0,0,1,56,40H200A16,16,0,0,1,216,56Z",opacity:"0.2"},null,-1),Rb("path",{d:"M224,128a8,8,0,0,1-8,8H136v80a8,8,0,0,1-16,0V136H40a8,8,0,0,1,0-16h80V40a8,8,0,0,1,16,0v80h80A8,8,0,0,1,224,128Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",OE,[...t[2]||(t[2]=[Rb("path",{d:"M208,32H48A16,16,0,0,0,32,48V208a16,16,0,0,0,16,16H208a16,16,0,0,0,16-16V48A16,16,0,0,0,208,32ZM184,136H136v48a8,8,0,0,1-16,0V136H72a8,8,0,0,1,0-16h48V72a8,8,0,0,1,16,0v48h48a8,8,0,0,1,0,16Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",wE,[...t[3]||(t[3]=[Rb("path",{d:"M222,128a6,6,0,0,1-6,6H134v82a6,6,0,0,1-12,0V134H40a6,6,0,0,1,0-12h82V40a6,6,0,0,1,12,0v82h82A6,6,0,0,1,222,128Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",xE,[...t[4]||(t[4]=[Rb("path",{d:"M224,128a8,8,0,0,1-8,8H136v80a8,8,0,0,1-16,0V136H40a8,8,0,0,1,0-16h80V40a8,8,0,0,1,16,0v80h80A8,8,0,0,1,224,128Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",kE,[...t[5]||(t[5]=[Rb("path",{d:"M220,128a4,4,0,0,1-4,4H132v84a4,4,0,0,1-8,0V132H40a4,4,0,0,1,0-8h84V40a4,4,0,0,1,8,0v84h84A4,4,0,0,1,220,128Z"},null,-1)])])):Fb("",!0)],16))}}),_E={key:0},TE={key:1},EE={key:2},AE={key:3},CE={key:4},$E={key:5},PE=Mg({name:"ScalarIconScroll",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",_E,[...t[0]||(t[0]=[Rb("path",{d:"M92,92a12,12,0,0,1,12-12h60a12,12,0,0,1,0,24H104A12,12,0,0,1,92,92Zm12,52h60a12,12,0,0,0,0-24H104a12,12,0,0,0,0,24Zm132,48a36,36,0,0,1-36,36H88a36,36,0,0,1-36-36V64a12,12,0,0,0-24,0c0,3.73,3.35,6.51,3.38,6.54l-.18-.14h0A12,12,0,1,1,16.81,89.59h0C15.49,88.62,4,79.55,4,64A36,36,0,0,1,40,28H176a36,36,0,0,1,36,36V164h4a12,12,0,0,1,7.2,2.4C224.51,167.38,236,176.45,236,192ZM92.62,172.2A12,12,0,0,1,104,164h84V64a12,12,0,0,0-12-12H73.94A35.88,35.88,0,0,1,76,64V192a12,12,0,0,0,24,0c0-3.58-3.17-6.38-3.2-6.4A12,12,0,0,1,92.62,172.2ZM212,192a7.69,7.69,0,0,0-1.24-4h-87a30.32,30.32,0,0,1,.26,4,35.84,35.84,0,0,1-2.06,12H200A12,12,0,0,0,212,192Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",TE,[...t[1]||(t[1]=[Rb("path",{d:"M200,176H104s8,6,8,16a24,24,0,0,1-48,0V64A24,24,0,0,0,40,40H176a24,24,0,0,1,24,24Z",opacity:"0.2"},null,-1),Rb("path",{d:"M96,104a8,8,0,0,1,8-8h64a8,8,0,0,1,0,16H104A8,8,0,0,1,96,104Zm8,40h64a8,8,0,0,0,0-16H104a8,8,0,0,0,0,16Zm128,48a32,32,0,0,1-32,32H88a32,32,0,0,1-32-32V64a16,16,0,0,0-32,0c0,5.74,4.83,9.62,4.88,9.66h0A8,8,0,0,1,24,88a7.89,7.89,0,0,1-4.79-1.61h0C18.05,85.54,8,77.61,8,64A32,32,0,0,1,40,32H176a32,32,0,0,1,32,32V168h8a8,8,0,0,1,4.8,1.6C222,170.46,232,178.39,232,192ZM96.26,173.48A8.07,8.07,0,0,1,104,168h88V64a16,16,0,0,0-16-16H67.69A31.71,31.71,0,0,1,72,64V192a16,16,0,0,0,32,0c0-5.74-4.83-9.62-4.88-9.66A7.82,7.82,0,0,1,96.26,173.48ZM216,192a12.58,12.58,0,0,0-3.23-8h-94a26.92,26.92,0,0,1,1.21,8,31.82,31.82,0,0,1-4.29,16H200A16,16,0,0,0,216,192Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",EE,[...t[2]||(t[2]=[Rb("path",{d:"M220.8,169.6A8,8,0,0,0,216,168h-8V64a32,32,0,0,0-32-32H40A32,32,0,0,0,8,64C8,77.61,18.05,85.54,19.2,86.4h0A7.89,7.89,0,0,0,24,88a8,8,0,0,0,4.87-14.33h0C28.83,73.62,24,69.74,24,64a16,16,0,0,1,32,0V192a32,32,0,0,0,32,32H200a32,32,0,0,0,32-32C232,178.39,222,170.46,220.8,169.6ZM104,96h64a8,8,0,0,1,0,16H104a8,8,0,0,1,0-16Zm-8,40a8,8,0,0,1,8-8h64a8,8,0,0,1,0,16H104A8,8,0,0,1,96,136Zm104,72H107.71A31.82,31.82,0,0,0,112,192a26.92,26.92,0,0,0-1.21-8h102a12.58,12.58,0,0,1,3.23,8A16,16,0,0,1,200,208Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",AE,[...t[3]||(t[3]=[Rb("path",{d:"M98,136a6,6,0,0,1,6-6h64a6,6,0,0,1,0,12H104A6,6,0,0,1,98,136Zm6-26h64a6,6,0,0,0,0-12H104a6,6,0,0,0,0,12Zm126,82a30,30,0,0,1-30,30H88a30,30,0,0,1-30-30V64a18,18,0,0,0-36,0c0,6.76,5.58,11.19,5.64,11.23A6,6,0,1,1,20.4,84.8C20,84.48,10,76.85,10,64A30,30,0,0,1,40,34H176a30,30,0,0,1,30,30V170h10a6,6,0,0,1,3.6,1.2C220,171.52,230,179.15,230,192Zm-124,0c0-6.76-5.59-11.19-5.64-11.23A6,6,0,0,1,104,170h90V64a18,18,0,0,0-18-18H64a29.82,29.82,0,0,1,6,18V192a18,18,0,0,0,36,0Zm112,0a14.94,14.94,0,0,0-4.34-10H115.88A24.83,24.83,0,0,1,118,192a29.87,29.87,0,0,1-6,18h88A18,18,0,0,0,218,192Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",CE,[...t[4]||(t[4]=[Rb("path",{d:"M96,104a8,8,0,0,1,8-8h64a8,8,0,0,1,0,16H104A8,8,0,0,1,96,104Zm8,40h64a8,8,0,0,0,0-16H104a8,8,0,0,0,0,16Zm128,48a32,32,0,0,1-32,32H88a32,32,0,0,1-32-32V64a16,16,0,0,0-32,0c0,5.74,4.83,9.62,4.88,9.66h0A8,8,0,0,1,24,88a7.89,7.89,0,0,1-4.79-1.61h0C18.05,85.54,8,77.61,8,64A32,32,0,0,1,40,32H176a32,32,0,0,1,32,32V168h8a8,8,0,0,1,4.8,1.6C222,170.46,232,178.39,232,192ZM96.26,173.48A8.07,8.07,0,0,1,104,168h88V64a16,16,0,0,0-16-16H67.69A31.71,31.71,0,0,1,72,64V192a16,16,0,0,0,32,0c0-5.74-4.83-9.62-4.88-9.66A7.82,7.82,0,0,1,96.26,173.48ZM216,192a12.58,12.58,0,0,0-3.23-8h-94a26.92,26.92,0,0,1,1.21,8,31.82,31.82,0,0,1-4.29,16H200A16,16,0,0,0,216,192Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",$E,[...t[5]||(t[5]=[Rb("path",{d:"M100,104a4,4,0,0,1,4-4h64a4,4,0,0,1,0,8H104A4,4,0,0,1,100,104Zm4,36h64a4,4,0,0,0,0-8H104a4,4,0,0,0,0,8Zm124,52a28,28,0,0,1-28,28H88a28,28,0,0,1-28-28V64a20,20,0,0,0-40,0c0,7.78,6.34,12.75,6.4,12.8a4,4,0,1,1-4.8,6.4C21.21,82.91,12,75.86,12,64A28,28,0,0,1,40,36H176a28,28,0,0,1,28,28V172h12a4,4,0,0,1,2.4.8C218.79,173.09,228,180.14,228,192Zm-120,0c0-7.78-6.34-12.75-6.4-12.8A4,4,0,0,1,104,172h92V64a20,20,0,0,0-20-20H59.57A27.9,27.9,0,0,1,68,64V192a20,20,0,0,0,40,0Zm112,0c0-6-3.74-10.3-5.5-12H112.61A23.31,23.31,0,0,1,116,192a27.94,27.94,0,0,1-8.42,20H200A20,20,0,0,0,220,192Z"},null,-1)])])):Fb("",!0)],16))}}),DE={key:0},IE={key:1},ME={key:2},NE={key:3},RE={key:4},LE={key:5},BE=Mg({name:"ScalarIconSwap",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",DE,[...t[0]||(t[0]=[Rb("path",{d:"M228,48V152a20,20,0,0,1-20,20H112.92a12,12,0,0,1-17.41,16.49l-20-20a12,12,0,0,1,0-17l20-20A12,12,0,0,1,112.92,148H204V52H100a12,12,0,0,1-24,0V48A20,20,0,0,1,96,28H208A20,20,0,0,1,228,48ZM168,192a12,12,0,0,0-12,12H52V108h91.08a12,12,0,0,0,17.41,16.49l20-20a12,12,0,0,0,0-17l-20-20A12,12,0,0,0,143.08,84H48a20,20,0,0,0-20,20V208a20,20,0,0,0,20,20H160a20,20,0,0,0,20-20v-4A12,12,0,0,0,168,192Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",IE,[...t[1]||(t[1]=[Rb("path",{d:"M216,48V152a8,8,0,0,1-8,8H168v48a8,8,0,0,1-8,8H48a8,8,0,0,1-8-8V104a8,8,0,0,1,8-8H88V48a8,8,0,0,1,8-8H208A8,8,0,0,1,216,48Z",opacity:"0.2"},null,-1),Rb("path",{d:"M224,48V152a16,16,0,0,1-16,16H99.31l10.35,10.34a8,8,0,0,1-11.32,11.32l-24-24a8,8,0,0,1,0-11.32l24-24a8,8,0,0,1,11.32,11.32L99.31,152H208V48H96v8a8,8,0,0,1-16,0V48A16,16,0,0,1,96,32H208A16,16,0,0,1,224,48ZM168,192a8,8,0,0,0-8,8v8H48V104H156.69l-10.35,10.34a8,8,0,0,0,11.32,11.32l24-24a8,8,0,0,0,0-11.32l-24-24a8,8,0,0,0-11.32,11.32L156.69,88H48a16,16,0,0,0-16,16V208a16,16,0,0,0,16,16H160a16,16,0,0,0,16-16v-8A8,8,0,0,0,168,192Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",ME,[...t[2]||(t[2]=[Rb("path",{d:"M224,48V152a16,16,0,0,1-16,16H112v16a8,8,0,0,1-13.66,5.66l-24-24a8,8,0,0,1,0-11.32l24-24A8,8,0,0,1,112,136v16h96V48H96v8a8,8,0,0,1-16,0V48A16,16,0,0,1,96,32H208A16,16,0,0,1,224,48ZM168,192a8,8,0,0,0-8,8v8H48V104h96v16a8,8,0,0,0,13.66,5.66l24-24a8,8,0,0,0,0-11.32l-24-24A8,8,0,0,0,144,72V88H48a16,16,0,0,0-16,16V208a16,16,0,0,0,16,16H160a16,16,0,0,0,16-16v-8A8,8,0,0,0,168,192Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",NE,[...t[3]||(t[3]=[Rb("path",{d:"M222,48V152a14,14,0,0,1-14,14H94.49l13.75,13.76a6,6,0,1,1-8.48,8.48l-24-24a6,6,0,0,1,0-8.48l24-24a6,6,0,0,1,8.48,8.48L94.49,154H208a2,2,0,0,0,2-2V48a2,2,0,0,0-2-2H96a2,2,0,0,0-2,2v8a6,6,0,0,1-12,0V48A14,14,0,0,1,96,34H208A14,14,0,0,1,222,48ZM168,194a6,6,0,0,0-6,6v8a2,2,0,0,1-2,2H48a2,2,0,0,1-2-2V104a2,2,0,0,1,2-2H161.51l-13.75,13.76a6,6,0,1,0,8.48,8.48l24-24a6,6,0,0,0,0-8.48l-24-24a6,6,0,0,0-8.48,8.48L161.51,90H48a14,14,0,0,0-14,14V208a14,14,0,0,0,14,14H160a14,14,0,0,0,14-14v-8A6,6,0,0,0,168,194Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",RE,[...t[4]||(t[4]=[Rb("path",{d:"M224,48V152a16,16,0,0,1-16,16H99.31l10.35,10.34a8,8,0,0,1-11.32,11.32l-24-24a8,8,0,0,1,0-11.32l24-24a8,8,0,0,1,11.32,11.32L99.31,152H208V48H96v8a8,8,0,0,1-16,0V48A16,16,0,0,1,96,32H208A16,16,0,0,1,224,48ZM168,192a8,8,0,0,0-8,8v8H48V104H156.69l-10.35,10.34a8,8,0,0,0,11.32,11.32l24-24a8,8,0,0,0,0-11.32l-24-24a8,8,0,0,0-11.32,11.32L156.69,88H48a16,16,0,0,0-16,16V208a16,16,0,0,0,16,16H160a16,16,0,0,0,16-16v-8A8,8,0,0,0,168,192Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",LE,[...t[5]||(t[5]=[Rb("path",{d:"M220,48V152a12,12,0,0,1-12,12H89.66l17.17,17.17a4,4,0,0,1-5.66,5.66l-24-24a4,4,0,0,1,0-5.66l24-24a4,4,0,0,1,5.66,5.66L89.66,156H208a4,4,0,0,0,4-4V48a4,4,0,0,0-4-4H96a4,4,0,0,0-4,4v8a4,4,0,0,1-8,0V48A12,12,0,0,1,96,36H208A12,12,0,0,1,220,48ZM168,196a4,4,0,0,0-4,4v8a4,4,0,0,1-4,4H48a4,4,0,0,1-4-4V104a4,4,0,0,1,4-4H166.34l-17.17,17.17a4,4,0,0,0,5.66,5.66l24-24a4,4,0,0,0,0-5.66l-24-24a4,4,0,0,0-5.66,5.66L166.34,92H48a12,12,0,0,0-12,12V208a12,12,0,0,0,12,12H160a12,12,0,0,0,12-12v-8A4,4,0,0,0,168,196Z"},null,-1)])])):Fb("",!0)],16))}}),jE={key:0},UE={key:1},zE={key:2},FE={key:3},ZE={key:4},QE={key:5},HE=Mg({name:"ScalarIconTag",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",jE,[...t[0]||(t[0]=[Rb("path",{d:"M246.15,133.18,146.83,33.86A19.85,19.85,0,0,0,132.69,28H40A12,12,0,0,0,28,40v92.69a19.85,19.85,0,0,0,5.86,14.14l99.32,99.32a20,20,0,0,0,28.28,0l84.69-84.69A20,20,0,0,0,246.15,133.18Zm-98.83,93.17L52,131V52h79l95.32,95.32ZM104,88A16,16,0,1,1,88,72,16,16,0,0,1,104,88Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",UE,[...t[1]||(t[1]=[Rb("path",{d:"M237.66,153,153,237.66a8,8,0,0,1-11.31,0L42.34,138.34A8,8,0,0,1,40,132.69V40h92.69a8,8,0,0,1,5.65,2.34l99.32,99.32A8,8,0,0,1,237.66,153Z",opacity:"0.2"},null,-1),Rb("path",{d:"M243.31,136,144,36.69A15.86,15.86,0,0,0,132.69,32H40a8,8,0,0,0-8,8v92.69A15.86,15.86,0,0,0,36.69,144L136,243.31a16,16,0,0,0,22.63,0l84.68-84.68a16,16,0,0,0,0-22.63Zm-96,96L48,132.69V48h84.69L232,147.31ZM96,84A12,12,0,1,1,84,72,12,12,0,0,1,96,84Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",zE,[...t[2]||(t[2]=[Rb("path",{d:"M243.31,136,144,36.69A15.86,15.86,0,0,0,132.69,32H40a8,8,0,0,0-8,8v92.69A15.86,15.86,0,0,0,36.69,144L136,243.31a16,16,0,0,0,22.63,0l84.68-84.68a16,16,0,0,0,0-22.63ZM84,96A12,12,0,1,1,96,84,12,12,0,0,1,84,96Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",FE,[...t[3]||(t[3]=[Rb("path",{d:"M241.91,137.42,142.59,38.1a13.94,13.94,0,0,0-9.9-4.1H40a6,6,0,0,0-6,6v92.69a13.94,13.94,0,0,0,4.1,9.9l99.32,99.32a14,14,0,0,0,19.8,0l84.69-84.69A14,14,0,0,0,241.91,137.42Zm-8.49,11.31-84.69,84.69a2,2,0,0,1-2.83,0L46.59,134.1a2,2,0,0,1-.59-1.41V46h86.69a2,2,0,0,1,1.41.59l99.32,99.31A2,2,0,0,1,233.42,148.73ZM94,84A10,10,0,1,1,84,74,10,10,0,0,1,94,84Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",ZE,[...t[4]||(t[4]=[Rb("path",{d:"M243.31,136,144,36.69A15.86,15.86,0,0,0,132.69,32H40a8,8,0,0,0-8,8v92.69A15.86,15.86,0,0,0,36.69,144L136,243.31a16,16,0,0,0,22.63,0l84.68-84.68a16,16,0,0,0,0-22.63Zm-96,96L48,132.69V48h84.69L232,147.31ZM96,84A12,12,0,1,1,84,72,12,12,0,0,1,96,84Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",QE,[...t[5]||(t[5]=[Rb("path",{d:"M240.49,138.83,141.17,39.51A11.93,11.93,0,0,0,132.69,36H40a4,4,0,0,0-4,4v92.69a11.93,11.93,0,0,0,3.51,8.48l99.32,99.32a12,12,0,0,0,17,0l84.69-84.69a12,12,0,0,0,0-17Zm-5.66,11.31-84.69,84.69a4,4,0,0,1-5.65,0L45.17,135.51A4,4,0,0,1,44,132.69V44h88.69a4,4,0,0,1,2.82,1.17l99.32,99.32A4,4,0,0,1,234.83,150.14ZM92,84a8,8,0,1,1-8-8A8,8,0,0,1,92,84Z"},null,-1)])])):Fb("",!0)],16))}}),VE={key:0},qE={key:1},WE={key:2},XE={key:3},GE={key:4},YE={key:5},KE=Mg({name:"ScalarIconTerminalWindow",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",VE,[...t[0]||(t[0]=[Rb("path",{d:"M72.5,150.63,100.79,128,72.5,105.37a12,12,0,1,1,15-18.74l40,32a12,12,0,0,1,0,18.74l-40,32a12,12,0,0,1-15-18.74ZM144,172h32a12,12,0,0,0,0-24H144a12,12,0,0,0,0,24ZM236,56V200a20,20,0,0,1-20,20H40a20,20,0,0,1-20-20V56A20,20,0,0,1,40,36H216A20,20,0,0,1,236,56Zm-24,4H44V196H212Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",qE,[...t[1]||(t[1]=[Rb("path",{d:"M224,56V200a8,8,0,0,1-8,8H40a8,8,0,0,1-8-8V56a8,8,0,0,1,8-8H216A8,8,0,0,1,224,56Z",opacity:"0.2"},null,-1),Rb("path",{d:"M128,128a8,8,0,0,1-3,6.25l-40,32a8,8,0,1,1-10-12.5L107.19,128,75,102.25a8,8,0,1,1,10-12.5l40,32A8,8,0,0,1,128,128Zm48,24H136a8,8,0,0,0,0,16h40a8,8,0,0,0,0-16Zm56-96V200a16,16,0,0,1-16,16H40a16,16,0,0,1-16-16V56A16,16,0,0,1,40,40H216A16,16,0,0,1,232,56ZM216,200V56H40V200H216Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",WE,[...t[2]||(t[2]=[Rb("path",{d:"M216,40H40A16,16,0,0,0,24,56V200a16,16,0,0,0,16,16H216a16,16,0,0,0,16-16V56A16,16,0,0,0,216,40Zm-91,94.25-40,32a8,8,0,1,1-10-12.5L107.19,128,75,102.25a8,8,0,1,1,10-12.5l40,32a8,8,0,0,1,0,12.5ZM176,168H136a8,8,0,0,1,0-16h40a8,8,0,0,1,0,16Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",XE,[...t[3]||(t[3]=[Rb("path",{d:"M126,128a6,6,0,0,1-2.25,4.69l-40,32a6,6,0,0,1-7.5-9.38L110.4,128,76.25,100.69a6,6,0,1,1,7.5-9.38l40,32A6,6,0,0,1,126,128Zm50,26H136a6,6,0,0,0,0,12h40a6,6,0,0,0,0-12Zm54-98V200a14,14,0,0,1-14,14H40a14,14,0,0,1-14-14V56A14,14,0,0,1,40,42H216A14,14,0,0,1,230,56Zm-12,0a2,2,0,0,0-2-2H40a2,2,0,0,0-2,2V200a2,2,0,0,0,2,2H216a2,2,0,0,0,2-2Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",GE,[...t[4]||(t[4]=[Rb("path",{d:"M128,128a8,8,0,0,1-3,6.25l-40,32a8,8,0,1,1-10-12.5L107.19,128,75,102.25a8,8,0,1,1,10-12.5l40,32A8,8,0,0,1,128,128Zm48,24H136a8,8,0,0,0,0,16h40a8,8,0,0,0,0-16Zm56-96V200a16,16,0,0,1-16,16H40a16,16,0,0,1-16-16V56A16,16,0,0,1,40,40H216A16,16,0,0,1,232,56ZM216,200V56H40V200H216Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",YE,[...t[5]||(t[5]=[Rb("path",{d:"M122.5,124.88a4,4,0,0,1,0,6.24l-40,32a4,4,0,0,1-5-6.24L113.6,128,77.5,99.12a4,4,0,0,1,5-6.24ZM176,156H136a4,4,0,0,0,0,8h40a4,4,0,0,0,0-8ZM228,56V200a12,12,0,0,1-12,12H40a12,12,0,0,1-12-12V56A12,12,0,0,1,40,44H216A12,12,0,0,1,228,56Zm-8,0a4,4,0,0,0-4-4H40a4,4,0,0,0-4,4V200a4,4,0,0,0,4,4H216a4,4,0,0,0,4-4Z"},null,-1)])])):Fb("",!0)],16))}}),JE={key:0},eA={key:1},tA={key:2},nA={key:3},rA={key:4},aA={key:5},oA=Mg({name:"ScalarIconTextAlignLeft",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",JE,[...t[0]||(t[0]=[Rb("path",{d:"M28,64A12,12,0,0,1,40,52H216a12,12,0,0,1,0,24H40A12,12,0,0,1,28,64Zm12,52H168a12,12,0,0,0,0-24H40a12,12,0,0,0,0,24Zm176,16H40a12,12,0,0,0,0,24H216a12,12,0,0,0,0-24Zm-48,40H40a12,12,0,0,0,0,24H168a12,12,0,0,0,0-24Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",eA,[...t[1]||(t[1]=[Rb("path",{d:"M216,64V168a16,16,0,0,1-16,16H40V64Z",opacity:"0.2"},null,-1),Rb("path",{d:"M32,64a8,8,0,0,1,8-8H216a8,8,0,0,1,0,16H40A8,8,0,0,1,32,64Zm8,48H168a8,8,0,0,0,0-16H40a8,8,0,0,0,0,16Zm176,24H40a8,8,0,0,0,0,16H216a8,8,0,0,0,0-16Zm-48,40H40a8,8,0,0,0,0,16H168a8,8,0,0,0,0-16Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",tA,[...t[2]||(t[2]=[Rb("path",{d:"M208,32H48A16,16,0,0,0,32,48V208a16,16,0,0,0,16,16H208a16,16,0,0,0,16-16V48A16,16,0,0,0,208,32ZM160,184H64a8,8,0,0,1,0-16h96a8,8,0,0,1,0,16Zm32-32H64a8,8,0,0,1,0-16H192a8,8,0,0,1,0,16ZM56,112a8,8,0,0,1,8-8h96a8,8,0,0,1,0,16H64A8,8,0,0,1,56,112ZM192,88H64a8,8,0,0,1,0-16H192a8,8,0,0,1,0,16Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",nA,[...t[3]||(t[3]=[Rb("path",{d:"M34,64a6,6,0,0,1,6-6H216a6,6,0,0,1,0,12H40A6,6,0,0,1,34,64Zm6,46H168a6,6,0,0,0,0-12H40a6,6,0,0,0,0,12Zm176,28H40a6,6,0,0,0,0,12H216a6,6,0,0,0,0-12Zm-48,40H40a6,6,0,0,0,0,12H168a6,6,0,0,0,0-12Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",rA,[...t[4]||(t[4]=[Rb("path",{d:"M32,64a8,8,0,0,1,8-8H216a8,8,0,0,1,0,16H40A8,8,0,0,1,32,64Zm8,48H168a8,8,0,0,0,0-16H40a8,8,0,0,0,0,16Zm176,24H40a8,8,0,0,0,0,16H216a8,8,0,0,0,0-16Zm-48,40H40a8,8,0,0,0,0,16H168a8,8,0,0,0,0-16Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",aA,[...t[5]||(t[5]=[Rb("path",{d:"M36,64a4,4,0,0,1,4-4H216a4,4,0,0,1,0,8H40A4,4,0,0,1,36,64Zm4,44H168a4,4,0,0,0,0-8H40a4,4,0,0,0,0,8Zm176,32H40a4,4,0,0,0,0,8H216a4,4,0,0,0,0-8Zm-48,40H40a4,4,0,0,0,0,8H168a4,4,0,0,0,0-8Z"},null,-1)])])):Fb("",!0)],16))}}),iA={key:0},sA={key:1},lA={key:2},cA={key:3},uA={key:4},dA={key:5},pA=Mg({name:"ScalarIconTrash",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",iA,[...t[0]||(t[0]=[Rb("path",{d:"M216,48H180V36A28,28,0,0,0,152,8H104A28,28,0,0,0,76,36V48H40a12,12,0,0,0,0,24h4V208a20,20,0,0,0,20,20H192a20,20,0,0,0,20-20V72h4a12,12,0,0,0,0-24ZM100,36a4,4,0,0,1,4-4h48a4,4,0,0,1,4,4V48H100Zm88,168H68V72H188ZM116,104v64a12,12,0,0,1-24,0V104a12,12,0,0,1,24,0Zm48,0v64a12,12,0,0,1-24,0V104a12,12,0,0,1,24,0Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",sA,[...t[1]||(t[1]=[Rb("path",{d:"M200,56V208a8,8,0,0,1-8,8H64a8,8,0,0,1-8-8V56Z",opacity:"0.2"},null,-1),Rb("path",{d:"M216,48H176V40a24,24,0,0,0-24-24H104A24,24,0,0,0,80,40v8H40a8,8,0,0,0,0,16h8V208a16,16,0,0,0,16,16H192a16,16,0,0,0,16-16V64h8a8,8,0,0,0,0-16ZM96,40a8,8,0,0,1,8-8h48a8,8,0,0,1,8,8v8H96Zm96,168H64V64H192ZM112,104v64a8,8,0,0,1-16,0V104a8,8,0,0,1,16,0Zm48,0v64a8,8,0,0,1-16,0V104a8,8,0,0,1,16,0Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",lA,[...t[2]||(t[2]=[Rb("path",{d:"M216,48H176V40a24,24,0,0,0-24-24H104A24,24,0,0,0,80,40v8H40a8,8,0,0,0,0,16h8V208a16,16,0,0,0,16,16H192a16,16,0,0,0,16-16V64h8a8,8,0,0,0,0-16ZM112,168a8,8,0,0,1-16,0V104a8,8,0,0,1,16,0Zm48,0a8,8,0,0,1-16,0V104a8,8,0,0,1,16,0Zm0-120H96V40a8,8,0,0,1,8-8h48a8,8,0,0,1,8,8Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",cA,[...t[3]||(t[3]=[Rb("path",{d:"M216,50H174V40a22,22,0,0,0-22-22H104A22,22,0,0,0,82,40V50H40a6,6,0,0,0,0,12H50V208a14,14,0,0,0,14,14H192a14,14,0,0,0,14-14V62h10a6,6,0,0,0,0-12ZM94,40a10,10,0,0,1,10-10h48a10,10,0,0,1,10,10V50H94ZM194,208a2,2,0,0,1-2,2H64a2,2,0,0,1-2-2V62H194ZM110,104v64a6,6,0,0,1-12,0V104a6,6,0,0,1,12,0Zm48,0v64a6,6,0,0,1-12,0V104a6,6,0,0,1,12,0Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",uA,[...t[4]||(t[4]=[Rb("path",{d:"M216,48H176V40a24,24,0,0,0-24-24H104A24,24,0,0,0,80,40v8H40a8,8,0,0,0,0,16h8V208a16,16,0,0,0,16,16H192a16,16,0,0,0,16-16V64h8a8,8,0,0,0,0-16ZM96,40a8,8,0,0,1,8-8h48a8,8,0,0,1,8,8v8H96Zm96,168H64V64H192ZM112,104v64a8,8,0,0,1-16,0V104a8,8,0,0,1,16,0Zm48,0v64a8,8,0,0,1-16,0V104a8,8,0,0,1,16,0Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",dA,[...t[5]||(t[5]=[Rb("path",{d:"M216,52H172V40a20,20,0,0,0-20-20H104A20,20,0,0,0,84,40V52H40a4,4,0,0,0,0,8H52V208a12,12,0,0,0,12,12H192a12,12,0,0,0,12-12V60h12a4,4,0,0,0,0-8ZM92,40a12,12,0,0,1,12-12h48a12,12,0,0,1,12,12V52H92ZM196,208a4,4,0,0,1-4,4H64a4,4,0,0,1-4-4V60H196ZM108,104v64a4,4,0,0,1-8,0V104a4,4,0,0,1,8,0Zm48,0v64a4,4,0,0,1-8,0V104a4,4,0,0,1,8,0Z"},null,-1)])])):Fb("",!0)],16))}}),hA={key:0},fA={key:1},mA={key:2},gA={key:3},vA={key:4},bA={key:5},yA=Mg({name:"ScalarIconWarning",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",hA,[...t[0]||(t[0]=[Rb("path",{d:"M240.26,186.1,152.81,34.23h0a28.74,28.74,0,0,0-49.62,0L15.74,186.1a27.45,27.45,0,0,0,0,27.71A28.31,28.31,0,0,0,40.55,228h174.9a28.31,28.31,0,0,0,24.79-14.19A27.45,27.45,0,0,0,240.26,186.1Zm-20.8,15.7a4.46,4.46,0,0,1-4,2.2H40.55a4.46,4.46,0,0,1-4-2.2,3.56,3.56,0,0,1,0-3.73L124,46.2a4.77,4.77,0,0,1,8,0l87.44,151.87A3.56,3.56,0,0,1,219.46,201.8ZM116,136V104a12,12,0,0,1,24,0v32a12,12,0,0,1-24,0Zm28,40a16,16,0,1,1-16-16A16,16,0,0,1,144,176Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",fA,[...t[1]||(t[1]=[Rb("path",{d:"M215.46,216H40.54C27.92,216,20,202.79,26.13,192.09L113.59,40.22c6.3-11,22.52-11,28.82,0l87.46,151.87C236,202.79,228.08,216,215.46,216Z",opacity:"0.2"},null,-1),Rb("path",{d:"M236.8,188.09,149.35,36.22h0a24.76,24.76,0,0,0-42.7,0L19.2,188.09a23.51,23.51,0,0,0,0,23.72A24.35,24.35,0,0,0,40.55,224h174.9a24.35,24.35,0,0,0,21.33-12.19A23.51,23.51,0,0,0,236.8,188.09ZM222.93,203.8a8.5,8.5,0,0,1-7.48,4.2H40.55a8.5,8.5,0,0,1-7.48-4.2,7.59,7.59,0,0,1,0-7.72L120.52,44.21a8.75,8.75,0,0,1,15,0l87.45,151.87A7.59,7.59,0,0,1,222.93,203.8ZM120,144V104a8,8,0,0,1,16,0v40a8,8,0,0,1-16,0Zm20,36a12,12,0,1,1-12-12A12,12,0,0,1,140,180Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",mA,[...t[2]||(t[2]=[Rb("path",{d:"M236.8,188.09,149.35,36.22h0a24.76,24.76,0,0,0-42.7,0L19.2,188.09a23.51,23.51,0,0,0,0,23.72A24.35,24.35,0,0,0,40.55,224h174.9a24.35,24.35,0,0,0,21.33-12.19A23.51,23.51,0,0,0,236.8,188.09ZM120,104a8,8,0,0,1,16,0v40a8,8,0,0,1-16,0Zm8,88a12,12,0,1,1,12-12A12,12,0,0,1,128,192Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",gA,[...t[3]||(t[3]=[Rb("path",{d:"M235.07,189.09,147.61,37.22h0a22.75,22.75,0,0,0-39.22,0L20.93,189.09a21.53,21.53,0,0,0,0,21.72A22.35,22.35,0,0,0,40.55,222h174.9a22.35,22.35,0,0,0,19.6-11.19A21.53,21.53,0,0,0,235.07,189.09ZM224.66,204.8a10.46,10.46,0,0,1-9.21,5.2H40.55a10.46,10.46,0,0,1-9.21-5.2,9.51,9.51,0,0,1,0-9.72L118.79,43.21a10.75,10.75,0,0,1,18.42,0l87.46,151.87A9.51,9.51,0,0,1,224.66,204.8ZM122,144V104a6,6,0,0,1,12,0v40a6,6,0,0,1-12,0Zm16,36a10,10,0,1,1-10-10A10,10,0,0,1,138,180Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",vA,[...t[4]||(t[4]=[Rb("path",{d:"M236.8,188.09,149.35,36.22h0a24.76,24.76,0,0,0-42.7,0L19.2,188.09a23.51,23.51,0,0,0,0,23.72A24.35,24.35,0,0,0,40.55,224h174.9a24.35,24.35,0,0,0,21.33-12.19A23.51,23.51,0,0,0,236.8,188.09ZM222.93,203.8a8.5,8.5,0,0,1-7.48,4.2H40.55a8.5,8.5,0,0,1-7.48-4.2,7.59,7.59,0,0,1,0-7.72L120.52,44.21a8.75,8.75,0,0,1,15,0l87.45,151.87A7.59,7.59,0,0,1,222.93,203.8ZM120,144V104a8,8,0,0,1,16,0v40a8,8,0,0,1-16,0Zm20,36a12,12,0,1,1-12-12A12,12,0,0,1,140,180Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",bA,[...t[5]||(t[5]=[Rb("path",{d:"M233.34,190.09,145.88,38.22h0a20.75,20.75,0,0,0-35.76,0L22.66,190.09a19.52,19.52,0,0,0,0,19.71A20.36,20.36,0,0,0,40.54,220H215.46a20.36,20.36,0,0,0,17.86-10.2A19.52,19.52,0,0,0,233.34,190.09ZM226.4,205.8a12.47,12.47,0,0,1-10.94,6.2H40.54a12.47,12.47,0,0,1-10.94-6.2,11.45,11.45,0,0,1,0-11.72L117.05,42.21a12.76,12.76,0,0,1,21.9,0L226.4,194.08A11.45,11.45,0,0,1,226.4,205.8ZM124,144V104a4,4,0,0,1,8,0v40a4,4,0,0,1-8,0Zm12,36a8,8,0,1,1-8-8A8,8,0,0,1,136,180Z"},null,-1)])])):Fb("",!0)],16))}}),OA={key:0},wA={key:1},xA={key:2},kA={key:3},SA={key:4},_A={key:5},TA=Mg({name:"ScalarIconWarningCircle",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",OA,[...t[0]||(t[0]=[Rb("path",{d:"M128,20A108,108,0,1,0,236,128,108.12,108.12,0,0,0,128,20Zm0,192a84,84,0,1,1,84-84A84.09,84.09,0,0,1,128,212Zm-12-80V80a12,12,0,0,1,24,0v52a12,12,0,0,1-24,0Zm28,40a16,16,0,1,1-16-16A16,16,0,0,1,144,172Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",wA,[...t[1]||(t[1]=[Rb("path",{d:"M224,128a96,96,0,1,1-96-96A96,96,0,0,1,224,128Z",opacity:"0.2"},null,-1),Rb("path",{d:"M128,24A104,104,0,1,0,232,128,104.11,104.11,0,0,0,128,24Zm0,192a88,88,0,1,1,88-88A88.1,88.1,0,0,1,128,216Zm-8-80V80a8,8,0,0,1,16,0v56a8,8,0,0,1-16,0Zm20,36a12,12,0,1,1-12-12A12,12,0,0,1,140,172Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",xA,[...t[2]||(t[2]=[Rb("path",{d:"M128,24A104,104,0,1,0,232,128,104.11,104.11,0,0,0,128,24Zm-8,56a8,8,0,0,1,16,0v56a8,8,0,0,1-16,0Zm8,104a12,12,0,1,1,12-12A12,12,0,0,1,128,184Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",kA,[...t[3]||(t[3]=[Rb("path",{d:"M128,26A102,102,0,1,0,230,128,102.12,102.12,0,0,0,128,26Zm0,192a90,90,0,1,1,90-90A90.1,90.1,0,0,1,128,218Zm-6-82V80a6,6,0,0,1,12,0v56a6,6,0,0,1-12,0Zm16,36a10,10,0,1,1-10-10A10,10,0,0,1,138,172Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",SA,[...t[4]||(t[4]=[Rb("path",{d:"M128,24A104,104,0,1,0,232,128,104.11,104.11,0,0,0,128,24Zm0,192a88,88,0,1,1,88-88A88.1,88.1,0,0,1,128,216Zm-8-80V80a8,8,0,0,1,16,0v56a8,8,0,0,1-16,0Zm20,36a12,12,0,1,1-12-12A12,12,0,0,1,140,172Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",_A,[...t[5]||(t[5]=[Rb("path",{d:"M128,28A100,100,0,1,0,228,128,100.11,100.11,0,0,0,128,28Zm0,192a92,92,0,1,1,92-92A92.1,92.1,0,0,1,128,220Zm-4-84V80a4,4,0,0,1,8,0v56a4,4,0,0,1-8,0Zm12,36a8,8,0,1,1-8-8A8,8,0,0,1,136,172Z"},null,-1)])])):Fb("",!0)],16))}}),EA={key:0},AA={key:1},CA={key:2},$A={key:3},PA={key:4},DA={key:5},IA=Mg({name:"ScalarIconWarningOctagon",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",EA,[...t[0]||(t[0]=[Rb("path",{d:"M116,132V80a12,12,0,0,1,24,0v52a12,12,0,0,1-24,0ZM236,91.55v72.9a19.86,19.86,0,0,1-5.86,14.14l-51.55,51.55A19.85,19.85,0,0,1,164.45,236H91.55a19.85,19.85,0,0,1-14.14-5.86L25.86,178.59A19.86,19.86,0,0,1,20,164.45V91.55a19.86,19.86,0,0,1,5.86-14.14L77.41,25.86A19.85,19.85,0,0,1,91.55,20h72.9a19.85,19.85,0,0,1,14.14,5.86l51.55,51.55A19.86,19.86,0,0,1,236,91.55Zm-24,1.66L162.79,44H93.21L44,93.21v69.58L93.21,212h69.58L212,162.79ZM128,156a16,16,0,1,0,16,16A16,16,0,0,0,128,156Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",AA,[...t[1]||(t[1]=[Rb("path",{d:"M224,91.55v72.9a8,8,0,0,1-2.34,5.66l-51.55,51.55a8,8,0,0,1-5.66,2.34H91.55a8,8,0,0,1-5.66-2.34L34.34,170.11A8,8,0,0,1,32,164.45V91.55a8,8,0,0,1,2.34-5.66L85.89,34.34A8,8,0,0,1,91.55,32h72.9a8,8,0,0,1,5.66,2.34l51.55,51.55A8,8,0,0,1,224,91.55Z",opacity:"0.2"},null,-1),Rb("path",{d:"M120,136V80a8,8,0,0,1,16,0v56a8,8,0,0,1-16,0ZM232,91.55v72.9a15.86,15.86,0,0,1-4.69,11.31l-51.55,51.55A15.86,15.86,0,0,1,164.45,232H91.55a15.86,15.86,0,0,1-11.31-4.69L28.69,175.76A15.86,15.86,0,0,1,24,164.45V91.55a15.86,15.86,0,0,1,4.69-11.31L80.24,28.69A15.86,15.86,0,0,1,91.55,24h72.9a15.86,15.86,0,0,1,11.31,4.69l51.55,51.55A15.86,15.86,0,0,1,232,91.55Zm-16,0L164.45,40H91.55L40,91.55v72.9L91.55,216h72.9L216,164.45ZM128,160a12,12,0,1,0,12,12A12,12,0,0,0,128,160Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",CA,[...t[2]||(t[2]=[Rb("path",{d:"M227.31,80.23,175.77,28.69A16.13,16.13,0,0,0,164.45,24H91.55a16.13,16.13,0,0,0-11.32,4.69L28.69,80.23A16.13,16.13,0,0,0,24,91.55v72.9a16.13,16.13,0,0,0,4.69,11.32l51.54,51.54A16.13,16.13,0,0,0,91.55,232h72.9a16.13,16.13,0,0,0,11.32-4.69l51.54-51.54A16.13,16.13,0,0,0,232,164.45V91.55A16.13,16.13,0,0,0,227.31,80.23ZM120,80a8,8,0,0,1,16,0v56a8,8,0,0,1-16,0Zm8,104a12,12,0,1,1,12-12A12,12,0,0,1,128,184Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",$A,[...t[3]||(t[3]=[Rb("path",{d:"M122,136V80a6,6,0,0,1,12,0v56a6,6,0,0,1-12,0ZM230,91.55v72.9a13.92,13.92,0,0,1-4.1,9.9L174.35,225.9a13.92,13.92,0,0,1-9.9,4.1H91.55a13.92,13.92,0,0,1-9.9-4.1L30.1,174.35a13.92,13.92,0,0,1-4.1-9.9V91.55a13.92,13.92,0,0,1,4.1-9.9L81.65,30.1a13.92,13.92,0,0,1,9.9-4.1h72.9a13.92,13.92,0,0,1,9.9,4.1L225.9,81.65A13.92,13.92,0,0,1,230,91.55Zm-12,0a2,2,0,0,0-.59-1.42L165.87,38.59a2,2,0,0,0-1.42-.59H91.55a2,2,0,0,0-1.41.59L38.58,90.13A2,2,0,0,0,38,91.55v72.9a2,2,0,0,0,.59,1.42l51.54,51.54a2,2,0,0,0,1.42.59h72.9a2,2,0,0,0,1.41-.59l51.56-51.54a2,2,0,0,0,.58-1.42ZM128,162a10,10,0,1,0,10,10A10,10,0,0,0,128,162Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",PA,[...t[4]||(t[4]=[Rb("path",{d:"M120,136V80a8,8,0,0,1,16,0v56a8,8,0,0,1-16,0ZM232,91.55v72.9a15.86,15.86,0,0,1-4.69,11.31l-51.55,51.55A15.86,15.86,0,0,1,164.45,232H91.55a15.86,15.86,0,0,1-11.31-4.69L28.69,175.76A15.86,15.86,0,0,1,24,164.45V91.55a15.86,15.86,0,0,1,4.69-11.31L80.24,28.69A15.86,15.86,0,0,1,91.55,24h72.9a15.86,15.86,0,0,1,11.31,4.69l51.55,51.55A15.86,15.86,0,0,1,232,91.55Zm-16,0L164.45,40H91.55L40,91.55v72.9L91.55,216h72.9L216,164.45ZM128,160a12,12,0,1,0,12,12A12,12,0,0,0,128,160Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",DA,[...t[5]||(t[5]=[Rb("path",{d:"M124,136V80a4,4,0,0,1,8,0v56a4,4,0,0,1-8,0ZM228,91.55v72.9a12,12,0,0,1-3.51,8.49l-51.55,51.55a12,12,0,0,1-8.49,3.51H91.55a12,12,0,0,1-8.49-3.51L31.51,172.94A12,12,0,0,1,28,164.45V91.55a12,12,0,0,1,3.51-8.49L83.06,31.51A12,12,0,0,1,91.55,28h72.9a12,12,0,0,1,8.49,3.51l51.55,51.55A12,12,0,0,1,228,91.55Zm-8,0a4,4,0,0,0-1.17-2.83L167.28,37.17A4.06,4.06,0,0,0,164.45,36H91.55a4.06,4.06,0,0,0-2.83,1.17L37.17,88.72A4,4,0,0,0,36,91.55v72.9a4,4,0,0,0,1.17,2.83l51.55,51.55A4.06,4.06,0,0,0,91.55,220h72.9a4.06,4.06,0,0,0,2.83-1.17l51.55-51.55a4,4,0,0,0,1.17-2.83ZM128,164a8,8,0,1,0,8,8A8,8,0,0,0,128,164Z"},null,-1)])])):Fb("",!0)],16))}}),MA={key:0},NA={key:1},RA={key:2},LA={key:3},BA={key:4},jA={key:5},UA=Mg({name:"ScalarIconWebhooksLogo",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",MA,[...t[0]||(t[0]=[Rb("path",{d:"M192,180H118.71a56,56,0,1,1-104.6-37.46,12,12,0,1,1,21.37,10.92A31.64,31.64,0,0,0,32,168a32,32,0,0,0,64,0,12,12,0,0,1,12-12h84a12,12,0,0,1,0,24Zm0-68a55.9,55.9,0,0,0-18.45,3.12L138.22,57.71a12,12,0,0,0-20.44,12.58l40.94,66.52a12,12,0,0,0,16.52,3.93,32,32,0,1,1,19.68,59.13A12,12,0,0,0,196,223.82a10.05,10.05,0,0,0,1.09,0A56,56,0,0,0,192,112ZM57.71,178.22a12,12,0,0,0,16.51-3.93l40.94-66.52a12,12,0,0,0-3.92-16.51,32,32,0,1,1,45.28-41.8,12,12,0,1,0,21.37-10.92A56,56,0,1,0,89.1,104.32L53.78,161.71A12,12,0,0,0,57.71,178.22Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",NA,[...t[1]||(t[1]=[Rb("path",{d:"M128,104a40,40,0,1,1,40-40A40,40,0,0,1,128,104Zm64,24a40,40,0,1,0,40,40A40,40,0,0,0,192,128ZM64,128a40,40,0,1,0,40,40A40,40,0,0,0,64,128Z",opacity:"0.2"},null,-1),Rb("path",{d:"M178.16,176H111.32A48,48,0,1,1,25.6,139.19a8,8,0,0,1,12.8,9.61A31.69,31.69,0,0,0,32,168a32,32,0,0,0,64,0,8,8,0,0,1,8-8h74.16a16,16,0,1,1,0,16ZM64,184a16,16,0,0,0,14.08-23.61l35.77-58.14a8,8,0,0,0-2.62-11,32,32,0,1,1,46.1-40.06A8,8,0,1,0,172,44.79a48,48,0,1,0-75.62,55.33L64.44,152c-.15,0-.29,0-.44,0a16,16,0,0,0,0,32Zm128-64a48.18,48.18,0,0,0-18,3.49L142.08,71.6A16,16,0,1,0,128,80l.44,0,35.78,58.15a8,8,0,0,0,11,2.61A32,32,0,1,1,192,200a8,8,0,0,0,0,16,48,48,0,0,0,0-96Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",RA,[...t[2]||(t[2]=[Rb("path",{d:"M50.15,160,89.07,92.57l-2.24-3.88a48,48,0,1,1,85.05-44.17,8.17,8.17,0,0,1-3.19,10.4,8,8,0,0,1-11.35-3.72,32,32,0,1,0-56.77,29.3.57.57,0,0,1,.08.13l13.83,23.94a8,8,0,0,1,0,8L77.86,176a16,16,0,0,1-27.71-16Zm141-40H178.81L141.86,56a16,16,0,0,0-27.71,16l34.64,60a8,8,0,0,0,6.92,4h35.63c17.89,0,32.95,14.64,32.66,32.53A32,32,0,0,1,192.31,200a8.23,8.23,0,0,0-8.28,7.33,8,8,0,0,0,8,8.67,48.05,48.05,0,0,0,48-48.93C239.49,140.79,217.48,120,191.19,120ZM208,167.23c-.4-8.61-7.82-15.23-16.43-15.23H114.81a8,8,0,0,0-6.93,4L91.72,184h0a32,32,0,1,1-53.47-35,8.2,8.2,0,0,0-.92-11,8,8,0,0,0-11.72,1.17A47.63,47.63,0,0,0,16,167.54,48,48,0,0,0,105.55,192v0l4.62-8H192A16,16,0,0,0,208,167.23Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",LA,[...t[3]||(t[3]=[Rb("path",{d:"M179.37,174H109.6a46,46,0,1,1-82.4-33.61,6,6,0,0,1,9.6,7.21A33.68,33.68,0,0,0,30,168a34,34,0,0,0,68,0,6,6,0,0,1,6-6h75.37a14,14,0,1,1,0,12ZM64,182a14,14,0,0,0,11.73-21.62l36.42-59.18a6,6,0,0,0-2-8.25,34,34,0,1,1,49-42.57,6,6,0,1,0,11-4.79A46,46,0,1,0,99,99.7L65.52,154.08c-.5-.05-1-.08-1.52-.08a14,14,0,0,0,0,28Zm128-60a46,46,0,0,0-18.8,4L139.73,71.61A14,14,0,1,0,128,78a12.79,12.79,0,0,0,1.52-.09l36.4,59.17a6.05,6.05,0,0,0,3.73,2.69,6,6,0,0,0,4.53-.73A34,34,0,1,1,192,202a6,6,0,0,0,0,12,46,46,0,0,0,0-92Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",BA,[...t[4]||(t[4]=[Rb("path",{d:"M178.16,176H111.32A48,48,0,1,1,25.6,139.19a8,8,0,0,1,12.8,9.61A31.69,31.69,0,0,0,32,168a32,32,0,0,0,64,0,8,8,0,0,1,8-8h74.16a16,16,0,1,1,0,16ZM64,184a16,16,0,0,0,14.08-23.61l35.77-58.14a8,8,0,0,0-2.62-11,32,32,0,1,1,46.1-40.06A8,8,0,1,0,172,44.79a48,48,0,1,0-75.62,55.33L64.44,152c-.15,0-.29,0-.44,0a16,16,0,0,0,0,32Zm128-64a48.18,48.18,0,0,0-18,3.49L142.08,71.6A16,16,0,1,0,128,80l.44,0,35.78,58.15a8,8,0,0,0,11,2.61A32,32,0,1,1,192,200a8,8,0,0,0,0,16,48,48,0,0,0,0-96Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",jA,[...t[5]||(t[5]=[Rb("path",{d:"M180.7,172H107.81a44,44,0,1,1-79-30.41,4,4,0,0,1,6.4,4.81A35.67,35.67,0,0,0,28,168a36,36,0,0,0,72,0,4,4,0,0,1,4-4h76.7a12,12,0,1,1,0,8ZM64,180a12,12,0,0,0,9.33-19.54l37.11-60.3a4,4,0,0,0-1.31-5.51A36,36,0,1,1,161,49.58a4,4,0,1,0,7.33-3.19,44,44,0,1,0-66.71,52.83l-35.1,57.05A11.58,11.58,0,0,0,64,156a12,12,0,0,0,0,24Zm128-56a44,44,0,0,0-19.56,4.58l-35.11-57A12,12,0,1,0,128,76a12.24,12.24,0,0,0,2.52-.27L167.63,136a4,4,0,0,0,5.5,1.31A36,36,0,1,1,192,204a4,4,0,0,0,0,8,44,44,0,0,0,0-88Z"},null,-1)])])):Fb("",!0)],16))}}),zA={key:0},FA={key:1},ZA={key:2},QA={key:3},HA={key:4},VA={key:5},qA=Mg({name:"ScalarIconX",props:{label:{},weight:{}},setup(e){const t=e,{bind:n,weight:r}=kk(t);return(e,t)=>(Tb(),$b("svg",Vb({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 256 256",fill:"currentColor"},fm(n)),[hv(e.$slots,"default"),"bold"===fm(r)?(Tb(),$b("g",zA,[...t[0]||(t[0]=[Rb("path",{d:"M208.49,191.51a12,12,0,0,1-17,17L128,145,64.49,208.49a12,12,0,0,1-17-17L111,128,47.51,64.49a12,12,0,0,1,17-17L128,111l63.51-63.52a12,12,0,0,1,17,17L145,128Z"},null,-1)])])):"duotone"===fm(r)?(Tb(),$b("g",FA,[...t[1]||(t[1]=[Rb("path",{d:"M216,56V200a16,16,0,0,1-16,16H56a16,16,0,0,1-16-16V56A16,16,0,0,1,56,40H200A16,16,0,0,1,216,56Z",opacity:"0.2"},null,-1),Rb("path",{d:"M205.66,194.34a8,8,0,0,1-11.32,11.32L128,139.31,61.66,205.66a8,8,0,0,1-11.32-11.32L116.69,128,50.34,61.66A8,8,0,0,1,61.66,50.34L128,116.69l66.34-66.35a8,8,0,0,1,11.32,11.32L139.31,128Z"},null,-1)])])):"fill"===fm(r)?(Tb(),$b("g",ZA,[...t[2]||(t[2]=[Rb("path",{d:"M208,32H48A16,16,0,0,0,32,48V208a16,16,0,0,0,16,16H208a16,16,0,0,0,16-16V48A16,16,0,0,0,208,32ZM181.66,170.34a8,8,0,0,1-11.32,11.32L128,139.31,85.66,181.66a8,8,0,0,1-11.32-11.32L116.69,128,74.34,85.66A8,8,0,0,1,85.66,74.34L128,116.69l42.34-42.35a8,8,0,0,1,11.32,11.32L139.31,128Z"},null,-1)])])):"light"===fm(r)?(Tb(),$b("g",QA,[...t[3]||(t[3]=[Rb("path",{d:"M204.24,195.76a6,6,0,1,1-8.48,8.48L128,136.49,60.24,204.24a6,6,0,0,1-8.48-8.48L119.51,128,51.76,60.24a6,6,0,0,1,8.48-8.48L128,119.51l67.76-67.75a6,6,0,0,1,8.48,8.48L136.49,128Z"},null,-1)])])):"regular"===fm(r)?(Tb(),$b("g",HA,[...t[4]||(t[4]=[Rb("path",{d:"M205.66,194.34a8,8,0,0,1-11.32,11.32L128,139.31,61.66,205.66a8,8,0,0,1-11.32-11.32L116.69,128,50.34,61.66A8,8,0,0,1,61.66,50.34L128,116.69l66.34-66.35a8,8,0,0,1,11.32,11.32L139.31,128Z"},null,-1)])])):"thin"===fm(r)?(Tb(),$b("g",VA,[...t[5]||(t[5]=[Rb("path",{d:"M202.83,197.17a4,4,0,0,1-5.66,5.66L128,133.66,58.83,202.83a4,4,0,0,1-5.66-5.66L122.34,128,53.17,58.83a4,4,0,0,1,5.66-5.66L128,122.34l69.17-69.17a4,4,0,1,1,5.66,5.66L133.66,128Z"},null,-1)])])):Fb("",!0)],16))}}),WA=Mg({__name:"ScalarCheckbox",props:{selected:{type:Boolean},type:{default:"checkbox"}},setup:e=>(t,n)=>(Tb(),$b("div",{class:Th(["flex size-4 items-center justify-center p-0.75",[e.selected?"bg-c-accent text-b-1":"text-transparent shadow-border","checkbox"===e.type?"rounded":"rounded-full"]])},[e.selected?(Tb(),Pb(fm(_S),{key:0,class:"size-3",weight:"bold"})):Fb("",!0)],2))}),XA=Symbol(),GA=Mg({inheritAttrs:!1,__name:"ScalarFormInput",props:{is:{default:"button"}},setup(e){const{cx:t}=lk(),n=tg(XA,!1),r=ik({base:["bg-b-1.5 flex items-center text-c-2 gap-0.75 px-3 py-2.5 ","outline-offset-[-1px] has-[:focus-visible]:outline"],variants:{grouped:{true:"first:rounded-t-[inherit] last:rounded-b-[inherit]",false:"rounded border"},button:{true:"cursor-pointer hover:bg-b-2"}}});return(a,o)=>(Tb(),Pb(lv(e.is),Vb({type:"button"===e.is?"button":void 0},fm(t)(fm(r)({button:"button"===e.is,grouped:fm(n)}))),{default:Ym((()=>[hv(a.$slots,"default")])),_:3},16,["type"]))}}),YA={class:"flex-1 text-left min-w-0 truncate"},KA=["type"],JA=Mg({inheritAttrs:!1,__name:"ScalarCheckboxInput",props:_v({type:{default:"checkbox"}},{modelValue:{type:Boolean},modelModifiers:{}}),emits:["update:modelValue"],setup(e){const t=zv(e,"modelValue"),{stylingAttrsCx:n,otherAttrs:r}=lk();return(a,o)=>(Tb(),Pb(fm(GA),Vb({is:"label"},fm(n)("cursor-pointer gap-2 hover:bg-b-2",{"text-c-1":t.value})),{default:Ym((()=>[Lb(WA,{class:"shrink-0",selected:t.value,type:e.type},null,8,["selected","type"]),Rb("div",YA,[hv(a.$slots,"default")]),Km(Rb("input",Vb({"onUpdate:modelValue":o[0]||(o[0]=e=>t.value=e),class:"sr-only",type:e.type},fm(r)),null,16,KA),[[vO,t.value]])])),_:3},16))}}),eC=Mg({inheritAttrs:!1,__name:"ScalarFormInputGroup",props:{is:{default:"div"}},setup(e){const{cx:t}=lk();return eg(XA,!0),(n,r)=>(Tb(),Pb(lv(e.is),Eh(Bb(fm(t)("flex flex-col border rounded divide-y"))),{default:Ym((()=>[hv(n.$slots,"default")])),_:3},16))}}),tC=Mg({__name:"ScalarCheckboxRadioGroup",props:_v({options:{default:()=>[]}},{modelValue:{},modelModifiers:{}}),emits:["update:modelValue"],setup(e){const t=zv(e,"modelValue"),n=Ng();return(r,a)=>(Tb(),Pb(fm(eC),null,{default:Ym((()=>[(Tb(!0),$b(Ob,null,dv(e.options,(e=>(Tb(),Pb(JA,{key:e.value,modelValue:t.value?.value===e.value,name:fm(n),type:"radio",value:e.value,"onUpdate:modelValue":n=>t.value=n?e:void 0},{default:Ym((()=>[Ub(Ih(e.label),1)])),_:2},1032,["modelValue","name","value","onUpdate:modelValue"])))),128))])),_:1}))}});let nC=class{constructor(e,t,n){this.property=e,this.normal=t,n&&(this.space=n)}};function rC(e,t){const n={},r={};let a=-1;for(;++a"xlink:"+t.slice(5).toLowerCase(),properties:{xLinkActuate:null,xLinkArcRole:null,xLinkHref:null,xLinkRole:null,xLinkShow:null,xLinkTitle:null,xLinkType:null}}),xC=OC({space:"xml",transform:(e,t)=>"xml:"+t.slice(3).toLowerCase(),properties:{xmlLang:null,xmlBase:null,xmlSpace:null}});function kC(e,t){return t in e?e[t]:t}function SC(e,t){return kC(e,t.toLowerCase())}const _C=OC({space:"xmlns",attributes:{xmlnsxlink:"xmlns:xlink"},transform:SC,properties:{xmlns:null,xmlnsXLink:null}}),TC=OC({transform:(e,t)=>"role"===t?t:"aria-"+t.slice(4).toLowerCase(),properties:{ariaActiveDescendant:null,ariaAtomic:lC,ariaAutoComplete:null,ariaBusy:lC,ariaChecked:lC,ariaColCount:uC,ariaColIndex:uC,ariaColSpan:uC,ariaControls:dC,ariaCurrent:null,ariaDescribedBy:dC,ariaDetails:null,ariaDisabled:lC,ariaDropEffect:dC,ariaErrorMessage:null,ariaExpanded:lC,ariaFlowTo:dC,ariaGrabbed:lC,ariaHasPopup:null,ariaHidden:lC,ariaInvalid:null,ariaKeyShortcuts:null,ariaLabel:null,ariaLabelledBy:dC,ariaLevel:uC,ariaLive:null,ariaModal:lC,ariaMultiLine:lC,ariaMultiSelectable:lC,ariaOrientation:null,ariaOwns:dC,ariaPlaceholder:null,ariaPosInSet:uC,ariaPressed:lC,ariaReadOnly:lC,ariaRelevant:null,ariaRequired:lC,ariaRoleDescription:dC,ariaRowCount:uC,ariaRowIndex:uC,ariaRowSpan:uC,ariaSelected:lC,ariaSetSize:uC,ariaSort:null,ariaValueMax:uC,ariaValueMin:uC,ariaValueNow:uC,ariaValueText:null,role:null}}),EC=OC({space:"html",attributes:{acceptcharset:"accept-charset",classname:"class",htmlfor:"for",httpequiv:"http-equiv"},transform:SC,mustUseProperty:["checked","multiple","muted","selected"],properties:{abbr:null,accept:pC,acceptCharset:dC,accessKey:dC,action:null,allow:null,allowFullScreen:sC,allowPaymentRequest:sC,allowUserMedia:sC,alt:null,as:null,async:sC,autoCapitalize:null,autoComplete:dC,autoFocus:sC,autoPlay:sC,blocking:dC,capture:null,charSet:null,checked:sC,cite:null,className:dC,cols:uC,colSpan:null,content:null,contentEditable:lC,controls:sC,controlsList:dC,coords:uC|pC,crossOrigin:null,data:null,dateTime:null,decoding:null,default:sC,defer:sC,dir:null,dirName:null,disabled:sC,download:cC,draggable:lC,encType:null,enterKeyHint:null,fetchPriority:null,form:null,formAction:null,formEncType:null,formMethod:null,formNoValidate:sC,formTarget:null,headers:dC,height:uC,hidden:sC,high:uC,href:null,hrefLang:null,htmlFor:dC,httpEquiv:dC,id:null,imageSizes:null,imageSrcSet:null,inert:sC,inputMode:null,integrity:null,is:null,isMap:sC,itemId:null,itemProp:dC,itemRef:dC,itemScope:sC,itemType:dC,kind:null,label:null,lang:null,language:null,list:null,loading:null,loop:sC,low:uC,manifest:null,max:null,maxLength:uC,media:null,method:null,min:null,minLength:uC,multiple:sC,muted:sC,name:null,nonce:null,noModule:sC,noValidate:sC,onAbort:null,onAfterPrint:null,onAuxClick:null,onBeforeMatch:null,onBeforePrint:null,onBeforeToggle:null,onBeforeUnload:null,onBlur:null,onCancel:null,onCanPlay:null,onCanPlayThrough:null,onChange:null,onClick:null,onClose:null,onContextLost:null,onContextMenu:null,onContextRestored:null,onCopy:null,onCueChange:null,onCut:null,onDblClick:null,onDrag:null,onDragEnd:null,onDragEnter:null,onDragExit:null,onDragLeave:null,onDragOver:null,onDragStart:null,onDrop:null,onDurationChange:null,onEmptied:null,onEnded:null,onError:null,onFocus:null,onFormData:null,onHashChange:null,onInput:null,onInvalid:null,onKeyDown:null,onKeyPress:null,onKeyUp:null,onLanguageChange:null,onLoad:null,onLoadedData:null,onLoadedMetadata:null,onLoadEnd:null,onLoadStart:null,onMessage:null,onMessageError:null,onMouseDown:null,onMouseEnter:null,onMouseLeave:null,onMouseMove:null,onMouseOut:null,onMouseOver:null,onMouseUp:null,onOffline:null,onOnline:null,onPageHide:null,onPageShow:null,onPaste:null,onPause:null,onPlay:null,onPlaying:null,onPopState:null,onProgress:null,onRateChange:null,onRejectionHandled:null,onReset:null,onResize:null,onScroll:null,onScrollEnd:null,onSecurityPolicyViolation:null,onSeeked:null,onSeeking:null,onSelect:null,onSlotChange:null,onStalled:null,onStorage:null,onSubmit:null,onSuspend:null,onTimeUpdate:null,onToggle:null,onUnhandledRejection:null,onUnload:null,onVolumeChange:null,onWaiting:null,onWheel:null,open:sC,optimum:uC,pattern:null,ping:dC,placeholder:null,playsInline:sC,popover:null,popoverTarget:null,popoverTargetAction:null,poster:null,preload:null,readOnly:sC,referrerPolicy:null,rel:dC,required:sC,reversed:sC,rows:uC,rowSpan:uC,sandbox:dC,scope:null,scoped:sC,seamless:sC,selected:sC,shadowRootClonable:sC,shadowRootDelegatesFocus:sC,shadowRootMode:null,shape:null,size:uC,sizes:null,slot:null,span:uC,spellCheck:lC,src:null,srcDoc:null,srcLang:null,srcSet:null,start:uC,step:null,style:null,tabIndex:uC,target:null,title:null,translate:null,type:null,typeMustMatch:sC,useMap:null,value:lC,width:uC,wrap:null,writingSuggestions:null,align:null,aLink:null,archive:dC,axis:null,background:null,bgColor:null,border:uC,borderColor:null,bottomMargin:uC,cellPadding:null,cellSpacing:null,char:null,charOff:null,classId:null,clear:null,code:null,codeBase:null,codeType:null,color:null,compact:sC,declare:sC,event:null,face:null,frame:null,frameBorder:null,hSpace:uC,leftMargin:uC,link:null,longDesc:null,lowSrc:null,marginHeight:uC,marginWidth:uC,noResize:sC,noHref:sC,noShade:sC,noWrap:sC,object:null,profile:null,prompt:null,rev:null,rightMargin:uC,rules:null,scheme:null,scrolling:lC,standby:null,summary:null,text:null,topMargin:uC,valueType:null,version:null,vAlign:null,vLink:null,vSpace:uC,allowTransparency:null,autoCorrect:null,autoSave:null,disablePictureInPicture:sC,disableRemotePlayback:sC,prefix:null,property:null,results:uC,security:null,unselectable:null}}),AC=OC({space:"svg",attributes:{accentHeight:"accent-height",alignmentBaseline:"alignment-baseline",arabicForm:"arabic-form",baselineShift:"baseline-shift",capHeight:"cap-height",className:"class",clipPath:"clip-path",clipRule:"clip-rule",colorInterpolation:"color-interpolation",colorInterpolationFilters:"color-interpolation-filters",colorProfile:"color-profile",colorRendering:"color-rendering",crossOrigin:"crossorigin",dataType:"datatype",dominantBaseline:"dominant-baseline",enableBackground:"enable-background",fillOpacity:"fill-opacity",fillRule:"fill-rule",floodColor:"flood-color",floodOpacity:"flood-opacity",fontFamily:"font-family",fontSize:"font-size",fontSizeAdjust:"font-size-adjust",fontStretch:"font-stretch",fontStyle:"font-style",fontVariant:"font-variant",fontWeight:"font-weight",glyphName:"glyph-name",glyphOrientationHorizontal:"glyph-orientation-horizontal",glyphOrientationVertical:"glyph-orientation-vertical",hrefLang:"hreflang",horizAdvX:"horiz-adv-x",horizOriginX:"horiz-origin-x",horizOriginY:"horiz-origin-y",imageRendering:"image-rendering",letterSpacing:"letter-spacing",lightingColor:"lighting-color",markerEnd:"marker-end",markerMid:"marker-mid",markerStart:"marker-start",navDown:"nav-down",navDownLeft:"nav-down-left",navDownRight:"nav-down-right",navLeft:"nav-left",navNext:"nav-next",navPrev:"nav-prev",navRight:"nav-right",navUp:"nav-up",navUpLeft:"nav-up-left",navUpRight:"nav-up-right",onAbort:"onabort",onActivate:"onactivate",onAfterPrint:"onafterprint",onBeforePrint:"onbeforeprint",onBegin:"onbegin",onCancel:"oncancel",onCanPlay:"oncanplay",onCanPlayThrough:"oncanplaythrough",onChange:"onchange",onClick:"onclick",onClose:"onclose",onCopy:"oncopy",onCueChange:"oncuechange",onCut:"oncut",onDblClick:"ondblclick",onDrag:"ondrag",onDragEnd:"ondragend",onDragEnter:"ondragenter",onDragExit:"ondragexit",onDragLeave:"ondragleave",onDragOver:"ondragover",onDragStart:"ondragstart",onDrop:"ondrop",onDurationChange:"ondurationchange",onEmptied:"onemptied",onEnd:"onend",onEnded:"onended",onError:"onerror",onFocus:"onfocus",onFocusIn:"onfocusin",onFocusOut:"onfocusout",onHashChange:"onhashchange",onInput:"oninput",onInvalid:"oninvalid",onKeyDown:"onkeydown",onKeyPress:"onkeypress",onKeyUp:"onkeyup",onLoad:"onload",onLoadedData:"onloadeddata",onLoadedMetadata:"onloadedmetadata",onLoadStart:"onloadstart",onMessage:"onmessage",onMouseDown:"onmousedown",onMouseEnter:"onmouseenter",onMouseLeave:"onmouseleave",onMouseMove:"onmousemove",onMouseOut:"onmouseout",onMouseOver:"onmouseover",onMouseUp:"onmouseup",onMouseWheel:"onmousewheel",onOffline:"onoffline",onOnline:"ononline",onPageHide:"onpagehide",onPageShow:"onpageshow",onPaste:"onpaste",onPause:"onpause",onPlay:"onplay",onPlaying:"onplaying",onPopState:"onpopstate",onProgress:"onprogress",onRateChange:"onratechange",onRepeat:"onrepeat",onReset:"onreset",onResize:"onresize",onScroll:"onscroll",onSeeked:"onseeked",onSeeking:"onseeking",onSelect:"onselect",onShow:"onshow",onStalled:"onstalled",onStorage:"onstorage",onSubmit:"onsubmit",onSuspend:"onsuspend",onTimeUpdate:"ontimeupdate",onToggle:"ontoggle",onUnload:"onunload",onVolumeChange:"onvolumechange",onWaiting:"onwaiting",onZoom:"onzoom",overlinePosition:"overline-position",overlineThickness:"overline-thickness",paintOrder:"paint-order",panose1:"panose-1",pointerEvents:"pointer-events",referrerPolicy:"referrerpolicy",renderingIntent:"rendering-intent",shapeRendering:"shape-rendering",stopColor:"stop-color",stopOpacity:"stop-opacity",strikethroughPosition:"strikethrough-position",strikethroughThickness:"strikethrough-thickness",strokeDashArray:"stroke-dasharray",strokeDashOffset:"stroke-dashoffset",strokeLineCap:"stroke-linecap",strokeLineJoin:"stroke-linejoin",strokeMiterLimit:"stroke-miterlimit",strokeOpacity:"stroke-opacity",strokeWidth:"stroke-width",tabIndex:"tabindex",textAnchor:"text-anchor",textDecoration:"text-decoration",textRendering:"text-rendering",transformOrigin:"transform-origin",typeOf:"typeof",underlinePosition:"underline-position",underlineThickness:"underline-thickness",unicodeBidi:"unicode-bidi",unicodeRange:"unicode-range",unitsPerEm:"units-per-em",vAlphabetic:"v-alphabetic",vHanging:"v-hanging",vIdeographic:"v-ideographic",vMathematical:"v-mathematical",vectorEffect:"vector-effect",vertAdvY:"vert-adv-y",vertOriginX:"vert-origin-x",vertOriginY:"vert-origin-y",wordSpacing:"word-spacing",writingMode:"writing-mode",xHeight:"x-height",playbackOrder:"playbackorder",timelineBegin:"timelinebegin"},transform:kC,properties:{about:hC,accentHeight:uC,accumulate:null,additive:null,alignmentBaseline:null,alphabetic:uC,amplitude:uC,arabicForm:null,ascent:uC,attributeName:null,attributeType:null,azimuth:uC,bandwidth:null,baselineShift:null,baseFrequency:null,baseProfile:null,bbox:null,begin:null,bias:uC,by:null,calcMode:null,capHeight:uC,className:dC,clip:null,clipPath:null,clipPathUnits:null,clipRule:null,color:null,colorInterpolation:null,colorInterpolationFilters:null,colorProfile:null,colorRendering:null,content:null,contentScriptType:null,contentStyleType:null,crossOrigin:null,cursor:null,cx:null,cy:null,d:null,dataType:null,defaultAction:null,descent:uC,diffuseConstant:uC,direction:null,display:null,dur:null,divisor:uC,dominantBaseline:null,download:sC,dx:null,dy:null,edgeMode:null,editable:null,elevation:uC,enableBackground:null,end:null,event:null,exponent:uC,externalResourcesRequired:null,fill:null,fillOpacity:uC,fillRule:null,filter:null,filterRes:null,filterUnits:null,floodColor:null,floodOpacity:null,focusable:null,focusHighlight:null,fontFamily:null,fontSize:null,fontSizeAdjust:null,fontStretch:null,fontStyle:null,fontVariant:null,fontWeight:null,format:null,fr:null,from:null,fx:null,fy:null,g1:pC,g2:pC,glyphName:pC,glyphOrientationHorizontal:null,glyphOrientationVertical:null,glyphRef:null,gradientTransform:null,gradientUnits:null,handler:null,hanging:uC,hatchContentUnits:null,hatchUnits:null,height:null,href:null,hrefLang:null,horizAdvX:uC,horizOriginX:uC,horizOriginY:uC,id:null,ideographic:uC,imageRendering:null,initialVisibility:null,in:null,in2:null,intercept:uC,k:uC,k1:uC,k2:uC,k3:uC,k4:uC,kernelMatrix:hC,kernelUnitLength:null,keyPoints:null,keySplines:null,keyTimes:null,kerning:null,lang:null,lengthAdjust:null,letterSpacing:null,lightingColor:null,limitingConeAngle:uC,local:null,markerEnd:null,markerMid:null,markerStart:null,markerHeight:null,markerUnits:null,markerWidth:null,mask:null,maskContentUnits:null,maskUnits:null,mathematical:null,max:null,media:null,mediaCharacterEncoding:null,mediaContentEncodings:null,mediaSize:uC,mediaTime:null,method:null,min:null,mode:null,name:null,navDown:null,navDownLeft:null,navDownRight:null,navLeft:null,navNext:null,navPrev:null,navRight:null,navUp:null,navUpLeft:null,navUpRight:null,numOctaves:null,observer:null,offset:null,onAbort:null,onActivate:null,onAfterPrint:null,onBeforePrint:null,onBegin:null,onCancel:null,onCanPlay:null,onCanPlayThrough:null,onChange:null,onClick:null,onClose:null,onCopy:null,onCueChange:null,onCut:null,onDblClick:null,onDrag:null,onDragEnd:null,onDragEnter:null,onDragExit:null,onDragLeave:null,onDragOver:null,onDragStart:null,onDrop:null,onDurationChange:null,onEmptied:null,onEnd:null,onEnded:null,onError:null,onFocus:null,onFocusIn:null,onFocusOut:null,onHashChange:null,onInput:null,onInvalid:null,onKeyDown:null,onKeyPress:null,onKeyUp:null,onLoad:null,onLoadedData:null,onLoadedMetadata:null,onLoadStart:null,onMessage:null,onMouseDown:null,onMouseEnter:null,onMouseLeave:null,onMouseMove:null,onMouseOut:null,onMouseOver:null,onMouseUp:null,onMouseWheel:null,onOffline:null,onOnline:null,onPageHide:null,onPageShow:null,onPaste:null,onPause:null,onPlay:null,onPlaying:null,onPopState:null,onProgress:null,onRateChange:null,onRepeat:null,onReset:null,onResize:null,onScroll:null,onSeeked:null,onSeeking:null,onSelect:null,onShow:null,onStalled:null,onStorage:null,onSubmit:null,onSuspend:null,onTimeUpdate:null,onToggle:null,onUnload:null,onVolumeChange:null,onWaiting:null,onZoom:null,opacity:null,operator:null,order:null,orient:null,orientation:null,origin:null,overflow:null,overlay:null,overlinePosition:uC,overlineThickness:uC,paintOrder:null,panose1:null,path:null,pathLength:uC,patternContentUnits:null,patternTransform:null,patternUnits:null,phase:null,ping:dC,pitch:null,playbackOrder:null,pointerEvents:null,points:null,pointsAtX:uC,pointsAtY:uC,pointsAtZ:uC,preserveAlpha:null,preserveAspectRatio:null,primitiveUnits:null,propagate:null,property:hC,r:null,radius:null,referrerPolicy:null,refX:null,refY:null,rel:hC,rev:hC,renderingIntent:null,repeatCount:null,repeatDur:null,requiredExtensions:hC,requiredFeatures:hC,requiredFonts:hC,requiredFormats:hC,resource:null,restart:null,result:null,rotate:null,rx:null,ry:null,scale:null,seed:null,shapeRendering:null,side:null,slope:null,snapshotTime:null,specularConstant:uC,specularExponent:uC,spreadMethod:null,spacing:null,startOffset:null,stdDeviation:null,stemh:null,stemv:null,stitchTiles:null,stopColor:null,stopOpacity:null,strikethroughPosition:uC,strikethroughThickness:uC,string:null,stroke:null,strokeDashArray:hC,strokeDashOffset:null,strokeLineCap:null,strokeLineJoin:null,strokeMiterLimit:uC,strokeOpacity:uC,strokeWidth:null,style:null,surfaceScale:uC,syncBehavior:null,syncBehaviorDefault:null,syncMaster:null,syncTolerance:null,syncToleranceDefault:null,systemLanguage:hC,tabIndex:uC,tableValues:null,target:null,targetX:uC,targetY:uC,textAnchor:null,textDecoration:null,textRendering:null,textLength:null,timelineBegin:null,title:null,transformBehavior:null,type:null,typeOf:hC,to:null,transform:null,transformOrigin:null,u1:null,u2:null,underlinePosition:uC,underlineThickness:uC,unicode:null,unicodeBidi:null,unicodeRange:null,unitsPerEm:uC,values:null,vAlphabetic:uC,vMathematical:uC,vectorEffect:null,vHanging:uC,vIdeographic:uC,version:null,vertAdvY:uC,vertOriginX:uC,vertOriginY:uC,viewBox:null,viewTarget:null,visibility:null,width:null,widths:null,wordSpacing:null,writingMode:null,x:null,x1:null,x2:null,xChannelSelector:null,xHeight:uC,y:null,y1:null,y2:null,yChannelSelector:null,z:null,zoomAndPan:null}}),CC=/^data[-\w.:]+$/i,$C=/-[a-z]/g,PC=/[A-Z]/g;function DC(e,t){const n=aC(t);let r=t,a=oC;if(n in e.normal)return e.property[e.normal[n]];if(n.length>4&&"data"===n.slice(0,4)&&CC.test(t)){if("-"===t.charAt(4)){const e=t.slice(5).replace($C,MC);r="data"+e.charAt(0).toUpperCase()+e.slice(1)}else{const e=t.slice(4);if(!$C.test(e)){let n=e.replace(PC,IC);"-"!==n.charAt(0)&&(n="-"+n),t="data"+n}}a=vC}return new a(r,t)}function IC(e){return"-"+e.toLowerCase()}function MC(e){return e.charAt(1).toUpperCase()}const NC=rC([xC,wC,_C,TC,EC],"html"),RC=rC([xC,wC,_C,TC,AC],"svg");function LC(e){const t=[],n=String(e||"");let r=n.indexOf(","),a=0,o=!1;for(;!o;){-1===r&&(r=n.length,o=!0);const e=n.slice(a,r).trim();!e&&o||t.push(e),a=r+1,r=n.indexOf(",",a)}return t}function BC(e,t){const n=t||{};return(""===e[e.length-1]?[...e,""]:e).join((n.padRight?" ":"")+","+(!1===n.padLeft?"":" ")).trim()}const jC=/[#.]/g;function UC(e){const t=String(e||"").trim();return t?t.split(/[ \t\n\r\f]+/g):[]}function zC(e){return e.join(" ").trim()}const FC=new Set(["button","menu","reset","submit"]),ZC={}.hasOwnProperty;function QC(e,t,n){const r=n&&function(e){const t={};let n=-1;for(;++n-1&&ee)return{line:t+1,column:e-(t>0?n[t-1]:0)+1,offset:e}},toOffset:function(e){const t=e&&e.line,r=e&&e.column;if("number"==typeof t&&"number"==typeof r&&!Number.isNaN(t)&&!Number.isNaN(r)&&t-1 in n){const e=(n[t-2]||0)+r-1||0;if(e>-1&&e=55296&&e<=57343}function f$(e){return 32!==e&&10!==e&&13!==e&&9!==e&&12!==e&&e>=1&&e<=31||e>=127&&e<=159}function m$(e){return e>=64976&&e<=65007||i$.has(e)}var g$,v$,b$,y$;(v$=g$||(g$={})).controlCharacterInInputStream="control-character-in-input-stream",v$.noncharacterInInputStream="noncharacter-in-input-stream",v$.surrogateInInputStream="surrogate-in-input-stream",v$.nonVoidHtmlElementStartTagWithTrailingSolidus="non-void-html-element-start-tag-with-trailing-solidus",v$.endTagWithAttributes="end-tag-with-attributes",v$.endTagWithTrailingSolidus="end-tag-with-trailing-solidus",v$.unexpectedSolidusInTag="unexpected-solidus-in-tag",v$.unexpectedNullCharacter="unexpected-null-character",v$.unexpectedQuestionMarkInsteadOfTagName="unexpected-question-mark-instead-of-tag-name",v$.invalidFirstCharacterOfTagName="invalid-first-character-of-tag-name",v$.unexpectedEqualsSignBeforeAttributeName="unexpected-equals-sign-before-attribute-name",v$.missingEndTagName="missing-end-tag-name",v$.unexpectedCharacterInAttributeName="unexpected-character-in-attribute-name",v$.unknownNamedCharacterReference="unknown-named-character-reference",v$.missingSemicolonAfterCharacterReference="missing-semicolon-after-character-reference",v$.unexpectedCharacterAfterDoctypeSystemIdentifier="unexpected-character-after-doctype-system-identifier",v$.unexpectedCharacterInUnquotedAttributeValue="unexpected-character-in-unquoted-attribute-value",v$.eofBeforeTagName="eof-before-tag-name",v$.eofInTag="eof-in-tag",v$.missingAttributeValue="missing-attribute-value",v$.missingWhitespaceBetweenAttributes="missing-whitespace-between-attributes",v$.missingWhitespaceAfterDoctypePublicKeyword="missing-whitespace-after-doctype-public-keyword",v$.missingWhitespaceBetweenDoctypePublicAndSystemIdentifiers="missing-whitespace-between-doctype-public-and-system-identifiers",v$.missingWhitespaceAfterDoctypeSystemKeyword="missing-whitespace-after-doctype-system-keyword",v$.missingQuoteBeforeDoctypePublicIdentifier="missing-quote-before-doctype-public-identifier",v$.missingQuoteBeforeDoctypeSystemIdentifier="missing-quote-before-doctype-system-identifier",v$.missingDoctypePublicIdentifier="missing-doctype-public-identifier",v$.missingDoctypeSystemIdentifier="missing-doctype-system-identifier",v$.abruptDoctypePublicIdentifier="abrupt-doctype-public-identifier",v$.abruptDoctypeSystemIdentifier="abrupt-doctype-system-identifier",v$.cdataInHtmlContent="cdata-in-html-content",v$.incorrectlyOpenedComment="incorrectly-opened-comment",v$.eofInScriptHtmlCommentLikeText="eof-in-script-html-comment-like-text",v$.eofInDoctype="eof-in-doctype",v$.nestedComment="nested-comment",v$.abruptClosingOfEmptyComment="abrupt-closing-of-empty-comment",v$.eofInComment="eof-in-comment",v$.incorrectlyClosedComment="incorrectly-closed-comment",v$.eofInCdata="eof-in-cdata",v$.absenceOfDigitsInNumericCharacterReference="absence-of-digits-in-numeric-character-reference",v$.nullCharacterReference="null-character-reference",v$.surrogateCharacterReference="surrogate-character-reference",v$.characterReferenceOutsideUnicodeRange="character-reference-outside-unicode-range",v$.controlCharacterReference="control-character-reference",v$.noncharacterCharacterReference="noncharacter-character-reference",v$.missingWhitespaceBeforeDoctypeName="missing-whitespace-before-doctype-name",v$.missingDoctypeName="missing-doctype-name",v$.invalidCharacterSequenceAfterDoctypeName="invalid-character-sequence-after-doctype-name",v$.duplicateAttribute="duplicate-attribute",v$.nonConformingDoctype="non-conforming-doctype",v$.missingDoctype="missing-doctype",v$.misplacedDoctype="misplaced-doctype",v$.endTagWithoutMatchingOpenElement="end-tag-without-matching-open-element",v$.closingOfElementWithOpenChildElements="closing-of-element-with-open-child-elements",v$.disallowedContentInNoscriptInHead="disallowed-content-in-noscript-in-head",v$.openElementsLeftAfterEof="open-elements-left-after-eof",v$.abandonedHeadElementChild="abandoned-head-element-child",v$.misplacedStartTagForHeadElement="misplaced-start-tag-for-head-element",v$.nestedNoscriptInHead="nested-noscript-in-head",v$.eofInElementThatCanContainOnlyText="eof-in-element-that-can-contain-only-text";class O${constructor(e){this.handler=e,this.html="",this.pos=-1,this.lastGapPos=-2,this.gapStack=[],this.skipNextNewLine=!1,this.lastChunkWritten=!1,this.endOfChunkHit=!1,this.bufferWaterline=65536,this.isEol=!1,this.lineStartPos=0,this.droppedBufferSize=0,this.line=1,this.lastErrOffset=-1}get col(){return this.pos-this.lineStartPos+Number(this.lastGapPos!==this.pos)}get offset(){return this.droppedBufferSize+this.pos}getError(e,t){const{line:n,col:r,offset:a}=this,o=r+t,i=a+t;return{code:e,startLine:n,endLine:n,startCol:o,endCol:o,startOffset:i,endOffset:i}}_err(e){this.handler.onParseError&&this.lastErrOffset!==this.offset&&(this.lastErrOffset=this.offset,this.handler.onParseError(this.getError(e,0)))}_addGap(){this.gapStack.push(this.lastGapPos),this.lastGapPos=this.pos}_processSurrogate(e){if(this.pos!==this.html.length-1){const t=this.html.charCodeAt(this.pos+1);if(function(e){return e>=56320&&e<=57343}(t))return this.pos++,this._addGap(),1024*(e-55296)+9216+t}else if(!this.lastChunkWritten)return this.endOfChunkHit=!0,l$.EOF;return this._err(g$.surrogateInInputStream),e}willDropParsedChunk(){return this.pos>this.bufferWaterline}dropParsedChunk(){this.willDropParsedChunk()&&(this.html=this.html.substring(this.pos),this.lineStartPos-=this.pos,this.droppedBufferSize+=this.pos,this.pos=0,this.lastGapPos=-2,this.gapStack.length=0)}write(e,t){this.html.length>0?this.html+=e:this.html=e,this.endOfChunkHit=!1,this.lastChunkWritten=t}insertHtmlAtCurrentPos(e){this.html=this.html.substring(0,this.pos+1)+e+this.html.substring(this.pos+1),this.endOfChunkHit=!1}startsWith(e,t){if(this.pos+e.length>this.html.length)return this.endOfChunkHit=!this.lastChunkWritten,!1;if(t)return this.html.startsWith(e,this.pos);for(let t=0;t=this.html.length)return this.endOfChunkHit=!this.lastChunkWritten,l$.EOF;const n=this.html.charCodeAt(t);return n===l$.CARRIAGE_RETURN?l$.LINE_FEED:n}advance(){if(this.pos++,this.isEol&&(this.isEol=!1,this.line++,this.lineStartPos=this.pos),this.pos>=this.html.length)return this.endOfChunkHit=!this.lastChunkWritten,l$.EOF;let e=this.html.charCodeAt(this.pos);return e===l$.CARRIAGE_RETURN?(this.isEol=!0,this.skipNextNewLine=!0,l$.LINE_FEED):e===l$.LINE_FEED&&(this.isEol=!0,this.skipNextNewLine)?(this.line--,this.skipNextNewLine=!1,this._addGap(),this.advance()):(this.skipNextNewLine=!1,h$(e)&&(e=this._processSurrogate(e)),null===this.handler.onParseError||e>31&&e<127||e===l$.LINE_FEED||e===l$.CARRIAGE_RETURN||e>159&&e<64976||this._checkForProblematicCharacters(e),e)}_checkForProblematicCharacters(e){f$(e)?this._err(g$.controlCharacterInInputStream):m$(e)&&this._err(g$.noncharacterInInputStream)}retreat(e){for(this.pos-=e;this.pos=0;n--)if(e.attrs[n].name===t)return e.attrs[n].value;return null}(y$=b$||(b$={}))[y$.CHARACTER=0]="CHARACTER",y$[y$.NULL_CHARACTER=1]="NULL_CHARACTER",y$[y$.WHITESPACE_CHARACTER=2]="WHITESPACE_CHARACTER",y$[y$.START_TAG=3]="START_TAG",y$[y$.END_TAG=4]="END_TAG",y$[y$.COMMENT=5]="COMMENT",y$[y$.DOCTYPE=6]="DOCTYPE",y$[y$.EOF=7]="EOF",y$[y$.HIBERNATION=8]="HIBERNATION";const x$=new Uint16Array('ᵁ<Õıʊҝջאٵ۞ޢߖࠏ੊ઑඡ๭༉༦჊ረዡᐕᒝᓃᓟᔥ\0\0\0\0\0\0ᕫᛍᦍᰒᷝ὾⁠↰⊍⏀⏻⑂⠤⤒ⴈ⹈⿎〖㊺㘹㞬㣾㨨㩱㫠㬮ࠀEMabcfglmnoprstu\\bfms„‹•˜¦³¹ÈÏlig耻Æ䃆P耻&䀦cute耻Á䃁reve;䄂Āiyx}rc耻Â䃂;䐐r;쀀𝔄rave耻À䃀pha;䎑acr;䄀d;橓Āgp¡on;䄄f;쀀𝔸plyFunction;恡ing耻Å䃅Ācs¾Ãr;쀀𝒜ign;扔ilde耻Ã䃃ml耻Ä䃄ЀaceforsuåûþėĜĢħĪĀcrêòkslash;或Ŷöø;櫧ed;挆y;䐑ƀcrtąċĔause;戵noullis;愬a;䎒r;쀀𝔅pf;쀀𝔹eve;䋘còēmpeq;扎܀HOacdefhilorsuōőŖƀƞƢƵƷƺǜȕɳɸɾcy;䐧PY耻©䂩ƀcpyŝŢźute;䄆Ā;iŧŨ拒talDifferentialD;慅leys;愭ȀaeioƉƎƔƘron;䄌dil耻Ç䃇rc;䄈nint;戰ot;䄊ĀdnƧƭilla;䂸terDot;䂷òſi;䎧rcleȀDMPTLJNjǑǖot;抙inus;抖lus;投imes;抗oĀcsǢǸkwiseContourIntegral;戲eCurlyĀDQȃȏoubleQuote;思uote;怙ȀlnpuȞȨɇɕonĀ;eȥȦ户;橴ƀgitȯȶȺruent;扡nt;戯ourIntegral;戮ĀfrɌɎ;愂oduct;成nterClockwiseContourIntegral;戳oss;樯cr;쀀𝒞pĀ;Cʄʅ拓ap;才րDJSZacefiosʠʬʰʴʸˋ˗ˡ˦̳ҍĀ;oŹʥtrahd;椑cy;䐂cy;䐅cy;䐏ƀgrsʿ˄ˇger;怡r;憡hv;櫤Āayː˕ron;䄎;䐔lĀ;t˝˞戇a;䎔r;쀀𝔇Āaf˫̧Ācm˰̢riticalȀADGT̖̜̀̆cute;䂴oŴ̋̍;䋙bleAcute;䋝rave;䁠ilde;䋜ond;拄ferentialD;慆Ѱ̽\0\0\0͔͂\0Ѕf;쀀𝔻ƀ;DE͈͉͍䂨ot;惜qual;扐blèCDLRUVͣͲ΂ϏϢϸontourIntegraìȹoɴ͹\0\0ͻ»͉nArrow;懓Āeo·ΤftƀARTΐΖΡrrow;懐ightArrow;懔eåˊngĀLRΫτeftĀARγιrrow;柸ightArrow;柺ightArrow;柹ightĀATϘϞrrow;懒ee;抨pɁϩ\0\0ϯrrow;懑ownArrow;懕erticalBar;戥ǹABLRTaВЪаўѿͼrrowƀ;BUНОТ憓ar;椓pArrow;懵reve;䌑eft˒к\0ц\0ѐightVector;楐eeVector;楞ectorĀ;Bљњ憽ar;楖ightǔѧ\0ѱeeVector;楟ectorĀ;BѺѻ懁ar;楗eeĀ;A҆҇护rrow;憧ĀctҒҗr;쀀𝒟rok;䄐ࠀNTacdfglmopqstuxҽӀӄӋӞӢӧӮӵԡԯԶՒ՝ՠեG;䅊H耻Ð䃐cute耻É䃉ƀaiyӒӗӜron;䄚rc耻Ê䃊;䐭ot;䄖r;쀀𝔈rave耻È䃈ement;戈ĀapӺӾcr;䄒tyɓԆ\0\0ԒmallSquare;旻erySmallSquare;斫ĀgpԦԪon;䄘f;쀀𝔼silon;䎕uĀaiԼՉlĀ;TՂՃ橵ilde;扂librium;懌Āci՗՚r;愰m;橳a;䎗ml耻Ë䃋Āipժկsts;戃onentialE;慇ʀcfiosօֈ֍ֲ׌y;䐤r;쀀𝔉lledɓ֗\0\0֣mallSquare;旼erySmallSquare;斪Ͱֺ\0ֿ\0\0ׄf;쀀𝔽All;戀riertrf;愱cò׋؀JTabcdfgorstר׬ׯ׺؀ؒؖ؛؝أ٬ٲcy;䐃耻>䀾mmaĀ;d׷׸䎓;䏜reve;䄞ƀeiy؇،ؐdil;䄢rc;䄜;䐓ot;䄠r;쀀𝔊;拙pf;쀀𝔾eater̀EFGLSTصلَٖٛ٦qualĀ;Lؾؿ扥ess;招ullEqual;执reater;檢ess;扷lantEqual;橾ilde;扳cr;쀀𝒢;扫ЀAacfiosuڅڋږڛڞڪھۊRDcy;䐪Āctڐڔek;䋇;䁞irc;䄤r;愌lbertSpace;愋ǰگ\0ڲf;愍izontalLine;攀Āctۃۅòکrok;䄦mpńېۘownHumðįqual;扏܀EJOacdfgmnostuۺ۾܃܇܎ܚܞܡܨ݄ݸދޏޕcy;䐕lig;䄲cy;䐁cute耻Í䃍Āiyܓܘrc耻Î䃎;䐘ot;䄰r;愑rave耻Ì䃌ƀ;apܠܯܿĀcgܴܷr;䄪inaryI;慈lieóϝǴ݉\0ݢĀ;eݍݎ戬Āgrݓݘral;戫section;拂isibleĀCTݬݲomma;恣imes;恢ƀgptݿރވon;䄮f;쀀𝕀a;䎙cr;愐ilde;䄨ǫޚ\0ޞcy;䐆l耻Ï䃏ʀcfosuެ޷޼߂ߐĀiyޱ޵rc;䄴;䐙r;쀀𝔍pf;쀀𝕁ǣ߇\0ߌr;쀀𝒥rcy;䐈kcy;䐄΀HJacfosߤߨ߽߬߱ࠂࠈcy;䐥cy;䐌ppa;䎚Āey߶߻dil;䄶;䐚r;쀀𝔎pf;쀀𝕂cr;쀀𝒦րJTaceflmostࠥࠩࠬࡐࡣ঳সে্਷ੇcy;䐉耻<䀼ʀcmnpr࠷࠼ࡁࡄࡍute;䄹bda;䎛g;柪lacetrf;愒r;憞ƀaeyࡗ࡜ࡡron;䄽dil;䄻;䐛Āfsࡨ॰tԀACDFRTUVarࡾࢩࢱࣦ࣠ࣼयज़ΐ४Ānrࢃ࢏gleBracket;柨rowƀ;BR࢙࢚࢞憐ar;懤ightArrow;懆eiling;挈oǵࢷ\0ࣃbleBracket;柦nǔࣈ\0࣒eeVector;楡ectorĀ;Bࣛࣜ懃ar;楙loor;挊ightĀAV࣯ࣵrrow;憔ector;楎Āerँगeƀ;AVउऊऐ抣rrow;憤ector;楚iangleƀ;BEतथऩ抲ar;槏qual;抴pƀDTVषूौownVector;楑eeVector;楠ectorĀ;Bॖॗ憿ar;楘ectorĀ;B॥०憼ar;楒ightáΜs̀EFGLSTॾঋকঝঢভqualGreater;拚ullEqual;扦reater;扶ess;檡lantEqual;橽ilde;扲r;쀀𝔏Ā;eঽা拘ftarrow;懚idot;䄿ƀnpw৔ਖਛgȀLRlr৞৷ਂਐeftĀAR০৬rrow;柵ightArrow;柷ightArrow;柶eftĀarγਊightáοightáϊf;쀀𝕃erĀLRਢਬeftArrow;憙ightArrow;憘ƀchtਾੀੂòࡌ;憰rok;䅁;扪Ѐacefiosuਗ਼੝੠੷੼અઋ઎p;椅y;䐜Ādl੥੯iumSpace;恟lintrf;愳r;쀀𝔐nusPlus;戓pf;쀀𝕄cò੶;䎜ҀJacefostuણધભીଔଙඑ඗ඞcy;䐊cute;䅃ƀaey઴હાron;䅇dil;䅅;䐝ƀgswે૰଎ativeƀMTV૓૟૨ediumSpace;怋hiĀcn૦૘ë૙eryThiî૙tedĀGL૸ଆreaterGreateòٳessLesóੈLine;䀊r;쀀𝔑ȀBnptଢନଷ଺reak;恠BreakingSpace;䂠f;愕ڀ;CDEGHLNPRSTV୕ୖ୪୼஡௫ఄ౞಄ದ೘ൡඅ櫬Āou୛୤ngruent;扢pCap;扭oubleVerticalBar;戦ƀlqxஃஊ஛ement;戉ualĀ;Tஒஓ扠ilde;쀀≂̸ists;戄reater΀;EFGLSTஶஷ஽௉௓௘௥扯qual;扱ullEqual;쀀≧̸reater;쀀≫̸ess;批lantEqual;쀀⩾̸ilde;扵umpń௲௽ownHump;쀀≎̸qual;쀀≏̸eĀfsఊధtTriangleƀ;BEచఛడ拪ar;쀀⧏̸qual;括s̀;EGLSTవశ఼ౄోౘ扮qual;扰reater;扸ess;쀀≪̸lantEqual;쀀⩽̸ilde;扴estedĀGL౨౹reaterGreater;쀀⪢̸essLess;쀀⪡̸recedesƀ;ESಒಓಛ技qual;쀀⪯̸lantEqual;拠ĀeiಫಹverseElement;戌ghtTriangleƀ;BEೋೌ೒拫ar;쀀⧐̸qual;拭ĀquೝഌuareSuĀbp೨೹setĀ;E೰ೳ쀀⊏̸qual;拢ersetĀ;Eഃആ쀀⊐̸qual;拣ƀbcpഓതൎsetĀ;Eഛഞ쀀⊂⃒qual;抈ceedsȀ;ESTലള഻െ抁qual;쀀⪰̸lantEqual;拡ilde;쀀≿̸ersetĀ;E൘൛쀀⊃⃒qual;抉ildeȀ;EFT൮൯൵ൿ扁qual;扄ullEqual;扇ilde;扉erticalBar;戤cr;쀀𝒩ilde耻Ñ䃑;䎝܀Eacdfgmoprstuvලෂ෉෕ෛ෠෧෼ขภยา฿ไlig;䅒cute耻Ó䃓Āiy෎ීrc耻Ô䃔;䐞blac;䅐r;쀀𝔒rave耻Ò䃒ƀaei෮ෲ෶cr;䅌ga;䎩cron;䎟pf;쀀𝕆enCurlyĀDQฎบoubleQuote;怜uote;怘;橔Āclวฬr;쀀𝒪ash耻Ø䃘iŬื฼de耻Õ䃕es;樷ml耻Ö䃖erĀBP๋๠Āar๐๓r;怾acĀek๚๜;揞et;掴arenthesis;揜Ҁacfhilors๿ງຊຏຒດຝະ໼rtialD;戂y;䐟r;쀀𝔓i;䎦;䎠usMinus;䂱Āipຢອncareplanåڝf;愙Ȁ;eio຺ູ໠໤檻cedesȀ;EST່້໏໚扺qual;檯lantEqual;扼ilde;找me;怳Ādp໩໮uct;戏ortionĀ;aȥ໹l;戝Āci༁༆r;쀀𝒫;䎨ȀUfos༑༖༛༟OT耻"䀢r;쀀𝔔pf;愚cr;쀀𝒬؀BEacefhiorsu༾གྷཇའཱིྦྷྪྭ႖ႩႴႾarr;椐G耻®䂮ƀcnrཎནབute;䅔g;柫rĀ;tཛྷཝ憠l;椖ƀaeyཧཬཱron;䅘dil;䅖;䐠Ā;vླྀཹ愜erseĀEUྂྙĀlq྇ྎement;戋uilibrium;懋pEquilibrium;楯r»ཹo;䎡ghtЀACDFTUVa࿁࿫࿳ဢဨၛႇϘĀnr࿆࿒gleBracket;柩rowƀ;BL࿜࿝࿡憒ar;懥eftArrow;懄eiling;按oǵ࿹\0စbleBracket;柧nǔည\0နeeVector;楝ectorĀ;Bဝသ懂ar;楕loor;挋Āerိ၃eƀ;AVဵံြ抢rrow;憦ector;楛iangleƀ;BEၐၑၕ抳ar;槐qual;抵pƀDTVၣၮၸownVector;楏eeVector;楜ectorĀ;Bႂႃ憾ar;楔ectorĀ;B႑႒懀ar;楓Āpuႛ႞f;愝ndImplies;楰ightarrow;懛ĀchႹႼr;愛;憱leDelayed;槴ڀHOacfhimoqstuფჱჷჽᄙᄞᅑᅖᅡᅧᆵᆻᆿĀCcჩხHcy;䐩y;䐨FTcy;䐬cute;䅚ʀ;aeiyᄈᄉᄎᄓᄗ檼ron;䅠dil;䅞rc;䅜;䐡r;쀀𝔖ortȀDLRUᄪᄴᄾᅉownArrow»ОeftArrow»࢚ightArrow»࿝pArrow;憑gma;䎣allCircle;战pf;쀀𝕊ɲᅭ\0\0ᅰt;戚areȀ;ISUᅻᅼᆉᆯ斡ntersection;抓uĀbpᆏᆞsetĀ;Eᆗᆘ抏qual;抑ersetĀ;Eᆨᆩ抐qual;抒nion;抔cr;쀀𝒮ar;拆ȀbcmpᇈᇛሉላĀ;sᇍᇎ拐etĀ;Eᇍᇕqual;抆ĀchᇠህeedsȀ;ESTᇭᇮᇴᇿ扻qual;檰lantEqual;扽ilde;承Tháྌ;我ƀ;esሒሓሣ拑rsetĀ;Eሜም抃qual;抇et»ሓրHRSacfhiorsሾቄ቉ቕ቞ቱቶኟዂወዑORN耻Þ䃞ADE;愢ĀHc቎ቒcy;䐋y;䐦Ābuቚቜ;䀉;䎤ƀaeyብቪቯron;䅤dil;䅢;䐢r;쀀𝔗Āeiቻ኉Dzኀ\0ኇefore;戴a;䎘Ācn኎ኘkSpace;쀀  Space;怉ldeȀ;EFTካኬኲኼ戼qual;扃ullEqual;扅ilde;扈pf;쀀𝕋ipleDot;惛Āctዖዛr;쀀𝒯rok;䅦ૡዷጎጚጦ\0ጬጱ\0\0\0\0\0ጸጽ፷ᎅ\0᏿ᐄᐊᐐĀcrዻጁute耻Ú䃚rĀ;oጇገ憟cir;楉rǣጓ\0጖y;䐎ve;䅬Āiyጞጣrc耻Û䃛;䐣blac;䅰r;쀀𝔘rave耻Ù䃙acr;䅪Ādiፁ፩erĀBPፈ፝Āarፍፐr;䁟acĀekፗፙ;揟et;掵arenthesis;揝onĀ;P፰፱拃lus;抎Āgp፻፿on;䅲f;쀀𝕌ЀADETadps᎕ᎮᎸᏄϨᏒᏗᏳrrowƀ;BDᅐᎠᎤar;椒ownArrow;懅ownArrow;憕quilibrium;楮eeĀ;AᏋᏌ报rrow;憥ownáϳerĀLRᏞᏨeftArrow;憖ightArrow;憗iĀ;lᏹᏺ䏒on;䎥ing;䅮cr;쀀𝒰ilde;䅨ml耻Ü䃜ҀDbcdefosvᐧᐬᐰᐳᐾᒅᒊᒐᒖash;披ar;櫫y;䐒ashĀ;lᐻᐼ抩;櫦Āerᑃᑅ;拁ƀbtyᑌᑐᑺar;怖Ā;iᑏᑕcalȀBLSTᑡᑥᑪᑴar;戣ine;䁼eparator;杘ilde;所ThinSpace;怊r;쀀𝔙pf;쀀𝕍cr;쀀𝒱dash;抪ʀcefosᒧᒬᒱᒶᒼirc;䅴dge;拀r;쀀𝔚pf;쀀𝕎cr;쀀𝒲Ȁfiosᓋᓐᓒᓘr;쀀𝔛;䎞pf;쀀𝕏cr;쀀𝒳ҀAIUacfosuᓱᓵᓹᓽᔄᔏᔔᔚᔠcy;䐯cy;䐇cy;䐮cute耻Ý䃝Āiyᔉᔍrc;䅶;䐫r;쀀𝔜pf;쀀𝕐cr;쀀𝒴ml;䅸ЀHacdefosᔵᔹᔿᕋᕏᕝᕠᕤcy;䐖cute;䅹Āayᕄᕉron;䅽;䐗ot;䅻Dzᕔ\0ᕛoWidtè૙a;䎖r;愨pf;愤cr;쀀𝒵௡ᖃᖊᖐ\0ᖰᖶᖿ\0\0\0\0ᗆᗛᗫᙟ᙭\0ᚕ᚛ᚲᚹ\0ᚾcute耻á䃡reve;䄃̀;Ediuyᖜᖝᖡᖣᖨᖭ戾;쀀∾̳;房rc耻â䃢te肻´̆;䐰lig耻æ䃦Ā;r²ᖺ;쀀𝔞rave耻à䃠ĀepᗊᗖĀfpᗏᗔsym;愵èᗓha;䎱ĀapᗟcĀclᗤᗧr;䄁g;樿ɤᗰ\0\0ᘊʀ;adsvᗺᗻᗿᘁᘇ戧nd;橕;橜lope;橘;橚΀;elmrszᘘᘙᘛᘞᘿᙏᙙ戠;榤e»ᘙsdĀ;aᘥᘦ戡ѡᘰᘲᘴᘶᘸᘺᘼᘾ;榨;榩;榪;榫;榬;榭;榮;榯tĀ;vᙅᙆ戟bĀ;dᙌᙍ抾;榝Āptᙔᙗh;戢»¹arr;捼Āgpᙣᙧon;䄅f;쀀𝕒΀;Eaeiop዁ᙻᙽᚂᚄᚇᚊ;橰cir;橯;扊d;手s;䀧roxĀ;e዁ᚒñᚃing耻å䃥ƀctyᚡᚦᚨr;쀀𝒶;䀪mpĀ;e዁ᚯñʈilde耻ã䃣ml耻ä䃤Āciᛂᛈoninôɲnt;樑ࠀNabcdefiklnoprsu᛭ᛱᜰ᜼ᝃᝈ᝸᝽០៦ᠹᡐᜍ᤽᥈ᥰot;櫭Ācrᛶ᜞kȀcepsᜀᜅᜍᜓong;扌psilon;䏶rime;怵imĀ;e᜚᜛戽q;拍Ŷᜢᜦee;抽edĀ;gᜬᜭ挅e»ᜭrkĀ;t፜᜷brk;掶Āoyᜁᝁ;䐱quo;怞ʀcmprtᝓ᝛ᝡᝤᝨausĀ;eĊĉptyv;榰séᜌnoõēƀahwᝯ᝱ᝳ;䎲;愶een;扬r;쀀𝔟g΀costuvwឍឝឳេ៕៛៞ƀaiuបពរðݠrc;旯p»፱ƀdptឤឨឭot;樀lus;樁imes;樂ɱឹ\0\0ើcup;樆ar;昅riangleĀdu៍្own;施p;斳plus;樄eåᑄåᒭarow;植ƀako៭ᠦᠵĀcn៲ᠣkƀlst៺֫᠂ozenge;槫riangleȀ;dlr᠒᠓᠘᠝斴own;斾eft;旂ight;斸k;搣Ʊᠫ\0ᠳƲᠯ\0ᠱ;斒;斑4;斓ck;斈ĀeoᠾᡍĀ;qᡃᡆ쀀=⃥uiv;쀀≡⃥t;挐Ȁptwxᡙᡞᡧᡬf;쀀𝕓Ā;tᏋᡣom»Ꮜtie;拈؀DHUVbdhmptuvᢅᢖᢪᢻᣗᣛᣬ᣿ᤅᤊᤐᤡȀLRlrᢎᢐᢒᢔ;敗;敔;敖;敓ʀ;DUduᢡᢢᢤᢦᢨ敐;敦;敩;敤;敧ȀLRlrᢳᢵᢷᢹ;敝;敚;敜;教΀;HLRhlrᣊᣋᣍᣏᣑᣓᣕ救;敬;散;敠;敫;敢;敟ox;槉ȀLRlrᣤᣦᣨᣪ;敕;敒;攐;攌ʀ;DUduڽ᣷᣹᣻᣽;敥;敨;攬;攴inus;抟lus;択imes;抠ȀLRlrᤙᤛᤝ᤟;敛;敘;攘;攔΀;HLRhlrᤰᤱᤳᤵᤷ᤻᤹攂;敪;敡;敞;攼;攤;攜Āevģ᥂bar耻¦䂦Ȁceioᥑᥖᥚᥠr;쀀𝒷mi;恏mĀ;e᜚᜜lƀ;bhᥨᥩᥫ䁜;槅sub;柈Ŭᥴ᥾lĀ;e᥹᥺怢t»᥺pƀ;Eeįᦅᦇ;檮Ā;qۜۛೡᦧ\0᧨ᨑᨕᨲ\0ᨷᩐ\0\0᪴\0\0᫁\0\0ᬡᬮ᭍᭒\0᯽\0ᰌƀcpr᦭ᦲ᧝ute;䄇̀;abcdsᦿᧀᧄ᧊᧕᧙戩nd;橄rcup;橉Āau᧏᧒p;橋p;橇ot;橀;쀀∩︀Āeo᧢᧥t;恁îړȀaeiu᧰᧻ᨁᨅǰ᧵\0᧸s;橍on;䄍dil耻ç䃧rc;䄉psĀ;sᨌᨍ橌m;橐ot;䄋ƀdmnᨛᨠᨦil肻¸ƭptyv;榲t脀¢;eᨭᨮ䂢räƲr;쀀𝔠ƀceiᨽᩀᩍy;䑇ckĀ;mᩇᩈ朓ark»ᩈ;䏇r΀;Ecefms᩟᩠ᩢᩫ᪤᪪᪮旋;槃ƀ;elᩩᩪᩭ䋆q;扗eɡᩴ\0\0᪈rrowĀlr᩼᪁eft;憺ight;憻ʀRSacd᪒᪔᪖᪚᪟»ཇ;擈st;抛irc;抚ash;抝nint;樐id;櫯cir;槂ubsĀ;u᪻᪼晣it»᪼ˬ᫇᫔᫺\0ᬊonĀ;eᫍᫎ䀺Ā;qÇÆɭ᫙\0\0᫢aĀ;t᫞᫟䀬;䁀ƀ;fl᫨᫩᫫戁îᅠeĀmx᫱᫶ent»᫩eóɍǧ᫾\0ᬇĀ;dኻᬂot;橭nôɆƀfryᬐᬔᬗ;쀀𝕔oäɔ脀©;sŕᬝr;愗Āaoᬥᬩrr;憵ss;朗Ācuᬲᬷr;쀀𝒸Ābpᬼ᭄Ā;eᭁᭂ櫏;櫑Ā;eᭉᭊ櫐;櫒dot;拯΀delprvw᭠᭬᭷ᮂᮬᯔ᯹arrĀlr᭨᭪;椸;椵ɰ᭲\0\0᭵r;拞c;拟arrĀ;p᭿ᮀ憶;椽̀;bcdosᮏᮐᮖᮡᮥᮨ截rcap;橈Āauᮛᮞp;橆p;橊ot;抍r;橅;쀀∪︀Ȁalrv᮵ᮿᯞᯣrrĀ;mᮼᮽ憷;椼yƀevwᯇᯔᯘqɰᯎ\0\0ᯒreã᭳uã᭵ee;拎edge;拏en耻¤䂤earrowĀlrᯮ᯳eft»ᮀight»ᮽeäᯝĀciᰁᰇoninôǷnt;戱lcty;挭ঀAHabcdefhijlorstuwz᰸᰻᰿ᱝᱩᱵᲊᲞᲬᲷ᳻᳿ᴍᵻᶑᶫᶻ᷆᷍rò΁ar;楥Ȁglrs᱈ᱍ᱒᱔ger;怠eth;愸òᄳhĀ;vᱚᱛ怐»ऊūᱡᱧarow;椏aã̕Āayᱮᱳron;䄏;䐴ƀ;ao̲ᱼᲄĀgrʿᲁr;懊tseq;橷ƀglmᲑᲔᲘ耻°䂰ta;䎴ptyv;榱ĀirᲣᲨsht;楿;쀀𝔡arĀlrᲳᲵ»ࣜ»သʀaegsv᳂͸᳖᳜᳠mƀ;oș᳊᳔ndĀ;ș᳑uit;晦amma;䏝in;拲ƀ;io᳧᳨᳸䃷de脀÷;o᳧ᳰntimes;拇nø᳷cy;䑒cɯᴆ\0\0ᴊrn;挞op;挍ʀlptuwᴘᴝᴢᵉᵕlar;䀤f;쀀𝕕ʀ;emps̋ᴭᴷᴽᵂqĀ;d͒ᴳot;扑inus;戸lus;戔quare;抡blebarwedgåúnƀadhᄮᵝᵧownarrowóᲃarpoonĀlrᵲᵶefôᲴighôᲶŢᵿᶅkaro÷གɯᶊ\0\0ᶎrn;挟op;挌ƀcotᶘᶣᶦĀryᶝᶡ;쀀𝒹;䑕l;槶rok;䄑Ādrᶰᶴot;拱iĀ;fᶺ᠖斿Āah᷀᷃ròЩaòྦangle;榦Āci᷒ᷕy;䑟grarr;柿ऀDacdefglmnopqrstuxḁḉḙḸոḼṉṡṾấắẽỡἪἷὄ὎὚ĀDoḆᴴoôᲉĀcsḎḔute耻é䃩ter;橮ȀaioyḢḧḱḶron;䄛rĀ;cḭḮ扖耻ê䃪lon;払;䑍ot;䄗ĀDrṁṅot;扒;쀀𝔢ƀ;rsṐṑṗ檚ave耻è䃨Ā;dṜṝ檖ot;檘Ȁ;ilsṪṫṲṴ檙nters;揧;愓Ā;dṹṺ檕ot;檗ƀapsẅẉẗcr;䄓tyƀ;svẒẓẕ戅et»ẓpĀ1;ẝẤijạả;怄;怅怃ĀgsẪẬ;䅋p;怂ĀgpẴẸon;䄙f;쀀𝕖ƀalsỄỎỒrĀ;sỊị拕l;槣us;橱iƀ;lvỚớở䎵on»ớ;䏵ȀcsuvỪỳἋἣĀioữḱrc»Ḯɩỹ\0\0ỻíՈantĀglἂἆtr»ṝess»Ṻƀaeiἒ἖Ἒls;䀽st;扟vĀ;DȵἠD;橸parsl;槥ĀDaἯἳot;打rr;楱ƀcdiἾὁỸr;愯oô͒ĀahὉὋ;䎷耻ð䃰Āmrὓὗl耻ë䃫o;悬ƀcipὡὤὧl;䀡sôծĀeoὬὴctatioîՙnentialåչৡᾒ\0ᾞ\0ᾡᾧ\0\0ῆῌ\0ΐ\0ῦῪ \0 ⁚llingdotseñṄy;䑄male;晀ƀilrᾭᾳ῁lig;耀ffiɩᾹ\0\0᾽g;耀ffig;耀ffl;쀀𝔣lig;耀filig;쀀fjƀaltῙ῜ῡt;晭ig;耀flns;斱of;䆒ǰ΅\0ῳf;쀀𝕗ĀakֿῷĀ;vῼ´拔;櫙artint;樍Āao‌⁕Ācs‑⁒ႉ‸⁅⁈\0⁐β•‥‧‪‬\0‮耻½䂽;慓耻¼䂼;慕;慙;慛Ƴ‴\0‶;慔;慖ʴ‾⁁\0\0⁃耻¾䂾;慗;慜5;慘ƶ⁌\0⁎;慚;慝8;慞l;恄wn;挢cr;쀀𝒻ࢀEabcdefgijlnorstv₂₉₟₥₰₴⃰⃵⃺⃿℃ℒℸ̗ℾ⅒↞Ā;lٍ₇;檌ƀcmpₐₕ₝ute;䇵maĀ;dₜ᳚䎳;檆reve;䄟Āiy₪₮rc;䄝;䐳ot;䄡Ȁ;lqsؾق₽⃉ƀ;qsؾٌ⃄lanô٥Ȁ;cdl٥⃒⃥⃕c;檩otĀ;o⃜⃝檀Ā;l⃢⃣檂;檄Ā;e⃪⃭쀀⋛︀s;檔r;쀀𝔤Ā;gٳ؛mel;愷cy;䑓Ȁ;Eajٚℌℎℐ;檒;檥;檤ȀEaesℛℝ℩ℴ;扩pĀ;p℣ℤ檊rox»ℤĀ;q℮ℯ檈Ā;q℮ℛim;拧pf;쀀𝕘Āci⅃ⅆr;愊mƀ;el٫ⅎ⅐;檎;檐茀>;cdlqr׮ⅠⅪⅮⅳⅹĀciⅥⅧ;檧r;橺ot;拗Par;榕uest;橼ʀadelsↄⅪ←ٖ↛ǰ↉\0↎proø₞r;楸qĀlqؿ↖lesó₈ií٫Āen↣↭rtneqq;쀀≩︀Å↪ԀAabcefkosy⇄⇇⇱⇵⇺∘∝∯≨≽ròΠȀilmr⇐⇔⇗⇛rsðᒄf»․ilôکĀdr⇠⇤cy;䑊ƀ;cwࣴ⇫⇯ir;楈;憭ar;意irc;䄥ƀalr∁∎∓rtsĀ;u∉∊晥it»∊lip;怦con;抹r;쀀𝔥sĀew∣∩arow;椥arow;椦ʀamopr∺∾≃≞≣rr;懿tht;戻kĀlr≉≓eftarrow;憩ightarrow;憪f;쀀𝕙bar;怕ƀclt≯≴≸r;쀀𝒽asè⇴rok;䄧Ābp⊂⊇ull;恃hen»ᱛૡ⊣\0⊪\0⊸⋅⋎\0⋕⋳\0\0⋸⌢⍧⍢⍿\0⎆⎪⎴cute耻í䃭ƀ;iyݱ⊰⊵rc耻î䃮;䐸Ācx⊼⊿y;䐵cl耻¡䂡ĀfrΟ⋉;쀀𝔦rave耻ì䃬Ȁ;inoܾ⋝⋩⋮Āin⋢⋦nt;樌t;戭fin;槜ta;愩lig;䄳ƀaop⋾⌚⌝ƀcgt⌅⌈⌗r;䄫ƀelpܟ⌏⌓inåގarôܠh;䄱f;抷ed;䆵ʀ;cfotӴ⌬⌱⌽⍁are;愅inĀ;t⌸⌹戞ie;槝doô⌙ʀ;celpݗ⍌⍐⍛⍡al;抺Āgr⍕⍙eróᕣã⍍arhk;樗rod;樼Ȁcgpt⍯⍲⍶⍻y;䑑on;䄯f;쀀𝕚a;䎹uest耻¿䂿Āci⎊⎏r;쀀𝒾nʀ;EdsvӴ⎛⎝⎡ӳ;拹ot;拵Ā;v⎦⎧拴;拳Ā;iݷ⎮lde;䄩ǫ⎸\0⎼cy;䑖l耻ï䃯̀cfmosu⏌⏗⏜⏡⏧⏵Āiy⏑⏕rc;䄵;䐹r;쀀𝔧ath;䈷pf;쀀𝕛ǣ⏬\0⏱r;쀀𝒿rcy;䑘kcy;䑔Ѐacfghjos␋␖␢␧␭␱␵␻ppaĀ;v␓␔䎺;䏰Āey␛␠dil;䄷;䐺r;쀀𝔨reen;䄸cy;䑅cy;䑜pf;쀀𝕜cr;쀀𝓀஀ABEHabcdefghjlmnoprstuv⑰⒁⒆⒍⒑┎┽╚▀♎♞♥♹♽⚚⚲⛘❝❨➋⟀⠁⠒ƀart⑷⑺⑼rò৆òΕail;椛arr;椎Ā;gঔ⒋;檋ar;楢ॣ⒥\0⒪\0⒱\0\0\0\0\0⒵Ⓔ\0ⓆⓈⓍ\0⓹ute;䄺mptyv;榴raîࡌbda;䎻gƀ;dlࢎⓁⓃ;榑åࢎ;檅uo耻«䂫rЀ;bfhlpst࢙ⓞⓦⓩ⓫⓮⓱⓵Ā;f࢝ⓣs;椟s;椝ë≒p;憫l;椹im;楳l;憢ƀ;ae⓿─┄檫il;椙Ā;s┉┊檭;쀀⪭︀ƀabr┕┙┝rr;椌rk;杲Āak┢┬cĀek┨┪;䁻;䁛Āes┱┳;榋lĀdu┹┻;榏;榍Ȁaeuy╆╋╖╘ron;䄾Ādi═╔il;䄼ìࢰâ┩;䐻Ȁcqrs╣╦╭╽a;椶uoĀ;rนᝆĀdu╲╷har;楧shar;楋h;憲ʀ;fgqs▋▌উ◳◿扤tʀahlrt▘▤▷◂◨rrowĀ;t࢙□aé⓶arpoonĀdu▯▴own»њp»०eftarrows;懇ightƀahs◍◖◞rrowĀ;sࣴࢧarpoonó྘quigarro÷⇰hreetimes;拋ƀ;qs▋ও◺lanôবʀ;cdgsব☊☍☝☨c;檨otĀ;o☔☕橿Ā;r☚☛檁;檃Ā;e☢☥쀀⋚︀s;檓ʀadegs☳☹☽♉♋pproøⓆot;拖qĀgq♃♅ôউgtò⒌ôছiíলƀilr♕࣡♚sht;楼;쀀𝔩Ā;Eজ♣;檑š♩♶rĀdu▲♮Ā;l॥♳;楪lk;斄cy;䑙ʀ;achtੈ⚈⚋⚑⚖rò◁orneòᴈard;楫ri;旺Āio⚟⚤dot;䅀ustĀ;a⚬⚭掰che»⚭ȀEaes⚻⚽⛉⛔;扨pĀ;p⛃⛄檉rox»⛄Ā;q⛎⛏檇Ā;q⛎⚻im;拦Ѐabnoptwz⛩⛴⛷✚✯❁❇❐Ānr⛮⛱g;柬r;懽rëࣁgƀlmr⛿✍✔eftĀar০✇ightá৲apsto;柼ightá৽parrowĀlr✥✩efô⓭ight;憬ƀafl✶✹✽r;榅;쀀𝕝us;樭imes;樴š❋❏st;戗áፎƀ;ef❗❘᠀旊nge»❘arĀ;l❤❥䀨t;榓ʀachmt❳❶❼➅➇ròࢨorneòᶌarĀ;d྘➃;業;怎ri;抿̀achiqt➘➝ੀ➢➮➻quo;怹r;쀀𝓁mƀ;egল➪➬;檍;檏Ābu┪➳oĀ;rฟ➹;怚rok;䅂萀<;cdhilqrࠫ⟒☹⟜⟠⟥⟪⟰Āci⟗⟙;檦r;橹reå◲mes;拉arr;楶uest;橻ĀPi⟵⟹ar;榖ƀ;ef⠀भ᠛旃rĀdu⠇⠍shar;楊har;楦Āen⠗⠡rtneqq;쀀≨︀Å⠞܀Dacdefhilnopsu⡀⡅⢂⢎⢓⢠⢥⢨⣚⣢⣤ઃ⣳⤂Dot;戺Ȁclpr⡎⡒⡣⡽r耻¯䂯Āet⡗⡙;時Ā;e⡞⡟朠se»⡟Ā;sျ⡨toȀ;dluျ⡳⡷⡻owîҌefôएðᏑker;斮Āoy⢇⢌mma;権;䐼ash;怔asuredangle»ᘦr;쀀𝔪o;愧ƀcdn⢯⢴⣉ro耻µ䂵Ȁ;acdᑤ⢽⣀⣄sôᚧir;櫰ot肻·Ƶusƀ;bd⣒ᤃ⣓戒Ā;uᴼ⣘;横ţ⣞⣡p;櫛ò−ðઁĀdp⣩⣮els;抧f;쀀𝕞Āct⣸⣽r;쀀𝓂pos»ᖝƀ;lm⤉⤊⤍䎼timap;抸ఀGLRVabcdefghijlmoprstuvw⥂⥓⥾⦉⦘⧚⧩⨕⨚⩘⩝⪃⪕⪤⪨⬄⬇⭄⭿⮮ⰴⱧⱼ⳩Āgt⥇⥋;쀀⋙̸Ā;v⥐௏쀀≫⃒ƀelt⥚⥲⥶ftĀar⥡⥧rrow;懍ightarrow;懎;쀀⋘̸Ā;v⥻ే쀀≪⃒ightarrow;懏ĀDd⦎⦓ash;抯ash;抮ʀbcnpt⦣⦧⦬⦱⧌la»˞ute;䅄g;쀀∠⃒ʀ;Eiop඄⦼⧀⧅⧈;쀀⩰̸d;쀀≋̸s;䅉roø඄urĀ;a⧓⧔普lĀ;s⧓ସdz⧟\0⧣p肻 ଷmpĀ;e௹ఀʀaeouy⧴⧾⨃⨐⨓ǰ⧹\0⧻;橃on;䅈dil;䅆ngĀ;dൾ⨊ot;쀀⩭̸p;橂;䐽ash;怓΀;Aadqsxஒ⨩⨭⨻⩁⩅⩐rr;懗rĀhr⨳⨶k;椤Ā;oᏲᏰot;쀀≐̸uiöୣĀei⩊⩎ar;椨í஘istĀ;s஠டr;쀀𝔫ȀEest௅⩦⩹⩼ƀ;qs஼⩭௡ƀ;qs஼௅⩴lanô௢ií௪Ā;rஶ⪁»ஷƀAap⪊⪍⪑rò⥱rr;憮ar;櫲ƀ;svྍ⪜ྌĀ;d⪡⪢拼;拺cy;䑚΀AEadest⪷⪺⪾⫂⫅⫶⫹rò⥦;쀀≦̸rr;憚r;急Ȁ;fqs఻⫎⫣⫯tĀar⫔⫙rro÷⫁ightarro÷⪐ƀ;qs఻⪺⫪lanôౕĀ;sౕ⫴»శiíౝĀ;rవ⫾iĀ;eచథiäඐĀpt⬌⬑f;쀀𝕟膀¬;in⬙⬚⬶䂬nȀ;Edvஉ⬤⬨⬮;쀀⋹̸ot;쀀⋵̸ǡஉ⬳⬵;拷;拶iĀ;vಸ⬼ǡಸ⭁⭃;拾;拽ƀaor⭋⭣⭩rȀ;ast୻⭕⭚⭟lleì୻l;쀀⫽⃥;쀀∂̸lint;樔ƀ;ceಒ⭰⭳uåಥĀ;cಘ⭸Ā;eಒ⭽ñಘȀAait⮈⮋⮝⮧rò⦈rrƀ;cw⮔⮕⮙憛;쀀⤳̸;쀀↝̸ghtarrow»⮕riĀ;eೋೖ΀chimpqu⮽⯍⯙⬄୸⯤⯯Ȁ;cerല⯆ഷ⯉uå൅;쀀𝓃ortɭ⬅\0\0⯖ará⭖mĀ;e൮⯟Ā;q൴൳suĀbp⯫⯭å೸åഋƀbcp⯶ⰑⰙȀ;Ees⯿ⰀഢⰄ抄;쀀⫅̸etĀ;eഛⰋqĀ;qണⰀcĀ;eലⰗñസȀ;EesⰢⰣൟⰧ抅;쀀⫆̸etĀ;e൘ⰮqĀ;qൠⰣȀgilrⰽⰿⱅⱇìௗlde耻ñ䃱çృiangleĀlrⱒⱜeftĀ;eచⱚñదightĀ;eೋⱥñ೗Ā;mⱬⱭ䎽ƀ;esⱴⱵⱹ䀣ro;愖p;怇ҀDHadgilrsⲏⲔⲙⲞⲣⲰⲶⳓⳣash;抭arr;椄p;쀀≍⃒ash;抬ĀetⲨⲬ;쀀≥⃒;쀀>⃒nfin;槞ƀAetⲽⳁⳅrr;椂;쀀≤⃒Ā;rⳊⳍ쀀<⃒ie;쀀⊴⃒ĀAtⳘⳜrr;椃rie;쀀⊵⃒im;쀀∼⃒ƀAan⳰⳴ⴂrr;懖rĀhr⳺⳽k;椣Ā;oᏧᏥear;椧ቓ᪕\0\0\0\0\0\0\0\0\0\0\0\0\0ⴭ\0ⴸⵈⵠⵥ⵲ⶄᬇ\0\0ⶍⶫ\0ⷈⷎ\0ⷜ⸙⸫⸾⹃Ācsⴱ᪗ute耻ó䃳ĀiyⴼⵅrĀ;c᪞ⵂ耻ô䃴;䐾ʀabios᪠ⵒⵗLjⵚlac;䅑v;樸old;榼lig;䅓Ācr⵩⵭ir;榿;쀀𝔬ͯ⵹\0\0⵼\0ⶂn;䋛ave耻ò䃲;槁Ābmⶈ෴ar;榵Ȁacitⶕ⶘ⶥⶨrò᪀Āir⶝ⶠr;榾oss;榻nå๒;槀ƀaeiⶱⶵⶹcr;䅍ga;䏉ƀcdnⷀⷅǍron;䎿;榶pf;쀀𝕠ƀaelⷔ⷗ǒr;榷rp;榹΀;adiosvⷪⷫⷮ⸈⸍⸐⸖戨rò᪆Ȁ;efmⷷⷸ⸂⸅橝rĀ;oⷾⷿ愴f»ⷿ耻ª䂪耻º䂺gof;抶r;橖lope;橗;橛ƀclo⸟⸡⸧ò⸁ash耻ø䃸l;折iŬⸯ⸴de耻õ䃵esĀ;aǛ⸺s;樶ml耻ö䃶bar;挽ૡ⹞\0⹽\0⺀⺝\0⺢⺹\0\0⻋ຜ\0⼓\0\0⼫⾼\0⿈rȀ;astЃ⹧⹲຅脀¶;l⹭⹮䂶leìЃɩ⹸\0\0⹻m;櫳;櫽y;䐿rʀcimpt⺋⺏⺓ᡥ⺗nt;䀥od;䀮il;怰enk;怱r;쀀𝔭ƀimo⺨⺰⺴Ā;v⺭⺮䏆;䏕maô੶ne;明ƀ;tv⺿⻀⻈䏀chfork»´;䏖Āau⻏⻟nĀck⻕⻝kĀ;h⇴⻛;愎ö⇴sҀ;abcdemst⻳⻴ᤈ⻹⻽⼄⼆⼊⼎䀫cir;樣ir;樢Āouᵀ⼂;樥;橲n肻±ຝim;樦wo;樧ƀipu⼙⼠⼥ntint;樕f;쀀𝕡nd耻£䂣Ԁ;Eaceinosu່⼿⽁⽄⽇⾁⾉⾒⽾⾶;檳p;檷uå໙Ā;c໎⽌̀;acens່⽙⽟⽦⽨⽾pproø⽃urlyeñ໙ñ໎ƀaes⽯⽶⽺pprox;檹qq;檵im;拨iíໟmeĀ;s⾈ຮ怲ƀEas⽸⾐⽺ð⽵ƀdfp໬⾙⾯ƀals⾠⾥⾪lar;挮ine;挒urf;挓Ā;t໻⾴ï໻rel;抰Āci⿀⿅r;쀀𝓅;䏈ncsp;怈̀fiopsu⿚⋢⿟⿥⿫⿱r;쀀𝔮pf;쀀𝕢rime;恗cr;쀀𝓆ƀaeo⿸〉〓tĀei⿾々rnionóڰnt;樖stĀ;e【】䀿ñἙô༔઀ABHabcdefhilmnoprstux぀けさすムㄎㄫㅇㅢㅲㆎ㈆㈕㈤㈩㉘㉮㉲㊐㊰㊷ƀartぇおがròႳòϝail;検aròᱥar;楤΀cdenqrtとふへみわゔヌĀeuねぱ;쀀∽̱te;䅕iãᅮmptyv;榳gȀ;del࿑らるろ;榒;榥å࿑uo耻»䂻rր;abcfhlpstw࿜ガクシスゼゾダッデナp;極Ā;f࿠ゴs;椠;椳s;椞ë≝ð✮l;楅im;楴l;憣;憝Āaiパフil;椚oĀ;nホボ戶aló༞ƀabrョリヮrò៥rk;杳ĀakンヽcĀekヹ・;䁽;䁝Āes㄂㄄;榌lĀduㄊㄌ;榎;榐Ȁaeuyㄗㄜㄧㄩron;䅙Ādiㄡㄥil;䅗ì࿲âヺ;䑀Ȁclqsㄴㄷㄽㅄa;椷dhar;楩uoĀ;rȎȍh;憳ƀacgㅎㅟངlȀ;ipsླྀㅘㅛႜnåႻarôྩt;断ƀilrㅩဣㅮsht;楽;쀀𝔯ĀaoㅷㆆrĀduㅽㅿ»ѻĀ;l႑ㆄ;楬Ā;vㆋㆌ䏁;䏱ƀgns㆕ㇹㇼht̀ahlrstㆤㆰ㇂㇘㇤㇮rrowĀ;t࿜ㆭaéトarpoonĀduㆻㆿowîㅾp»႒eftĀah㇊㇐rrowó࿪arpoonóՑightarrows;應quigarro÷ニhreetimes;拌g;䋚ingdotseñἲƀahm㈍㈐㈓rò࿪aòՑ;怏oustĀ;a㈞㈟掱che»㈟mid;櫮Ȁabpt㈲㈽㉀㉒Ānr㈷㈺g;柭r;懾rëဃƀafl㉇㉊㉎r;榆;쀀𝕣us;樮imes;樵Āap㉝㉧rĀ;g㉣㉤䀩t;榔olint;樒arò㇣Ȁachq㉻㊀Ⴜ㊅quo;怺r;쀀𝓇Ābu・㊊oĀ;rȔȓƀhir㊗㊛㊠reåㇸmes;拊iȀ;efl㊪ၙᠡ㊫方tri;槎luhar;楨;愞ൡ㋕㋛㋟㌬㌸㍱\0㍺㎤\0\0㏬㏰\0㐨㑈㑚㒭㒱㓊㓱\0㘖\0\0㘳cute;䅛quï➺Ԁ;Eaceinpsyᇭ㋳㋵㋿㌂㌋㌏㌟㌦㌩;檴ǰ㋺\0㋼;檸on;䅡uåᇾĀ;dᇳ㌇il;䅟rc;䅝ƀEas㌖㌘㌛;檶p;檺im;择olint;樓iíሄ;䑁otƀ;be㌴ᵇ㌵担;橦΀Aacmstx㍆㍊㍗㍛㍞㍣㍭rr;懘rĀhr㍐㍒ë∨Ā;oਸ਼਴t耻§䂧i;䀻war;椩mĀin㍩ðnuóñt;朶rĀ;o㍶⁕쀀𝔰Ȁacoy㎂㎆㎑㎠rp;景Āhy㎋㎏cy;䑉;䑈rtɭ㎙\0\0㎜iäᑤaraì⹯耻­䂭Āgm㎨㎴maƀ;fv㎱㎲㎲䏃;䏂Ѐ;deglnprካ㏅㏉㏎㏖㏞㏡㏦ot;橪Ā;q኱ኰĀ;E㏓㏔檞;檠Ā;E㏛㏜檝;檟e;扆lus;樤arr;楲aròᄽȀaeit㏸㐈㐏㐗Āls㏽㐄lsetmé㍪hp;樳parsl;槤Ādlᑣ㐔e;挣Ā;e㐜㐝檪Ā;s㐢㐣檬;쀀⪬︀ƀflp㐮㐳㑂tcy;䑌Ā;b㐸㐹䀯Ā;a㐾㐿槄r;挿f;쀀𝕤aĀdr㑍ЂesĀ;u㑔㑕晠it»㑕ƀcsu㑠㑹㒟Āau㑥㑯pĀ;sᆈ㑫;쀀⊓︀pĀ;sᆴ㑵;쀀⊔︀uĀbp㑿㒏ƀ;esᆗᆜ㒆etĀ;eᆗ㒍ñᆝƀ;esᆨᆭ㒖etĀ;eᆨ㒝ñᆮƀ;afᅻ㒦ְrť㒫ֱ»ᅼaròᅈȀcemt㒹㒾㓂㓅r;쀀𝓈tmîñiì㐕aræᆾĀar㓎㓕rĀ;f㓔ឿ昆Āan㓚㓭ightĀep㓣㓪psiloîỠhé⺯s»⡒ʀbcmnp㓻㕞ሉ㖋㖎Ҁ;Edemnprs㔎㔏㔑㔕㔞㔣㔬㔱㔶抂;櫅ot;檽Ā;dᇚ㔚ot;櫃ult;櫁ĀEe㔨㔪;櫋;把lus;檿arr;楹ƀeiu㔽㕒㕕tƀ;en㔎㕅㕋qĀ;qᇚ㔏eqĀ;q㔫㔨m;櫇Ābp㕚㕜;櫕;櫓c̀;acensᇭ㕬㕲㕹㕻㌦pproø㋺urlyeñᇾñᇳƀaes㖂㖈㌛pproø㌚qñ㌗g;晪ڀ123;Edehlmnps㖩㖬㖯ሜ㖲㖴㗀㗉㗕㗚㗟㗨㗭耻¹䂹耻²䂲耻³䂳;櫆Āos㖹㖼t;檾ub;櫘Ā;dሢ㗅ot;櫄sĀou㗏㗒l;柉b;櫗arr;楻ult;櫂ĀEe㗤㗦;櫌;抋lus;櫀ƀeiu㗴㘉㘌tƀ;enሜ㗼㘂qĀ;qሢ㖲eqĀ;q㗧㗤m;櫈Ābp㘑㘓;櫔;櫖ƀAan㘜㘠㘭rr;懙rĀhr㘦㘨ë∮Ā;oਫ਩war;椪lig耻ß䃟௡㙑㙝㙠ዎ㙳㙹\0㙾㛂\0\0\0\0\0㛛㜃\0㜉㝬\0\0\0㞇ɲ㙖\0\0㙛get;挖;䏄rë๟ƀaey㙦㙫㙰ron;䅥dil;䅣;䑂lrec;挕r;쀀𝔱Ȁeiko㚆㚝㚵㚼Dz㚋\0㚑eĀ4fኄኁaƀ;sv㚘㚙㚛䎸ym;䏑Ācn㚢㚲kĀas㚨㚮pproø዁im»ኬsðኞĀas㚺㚮ð዁rn耻þ䃾Ǭ̟㛆⋧es膀×;bd㛏㛐㛘䃗Ā;aᤏ㛕r;樱;樰ƀeps㛡㛣㜀á⩍Ȁ;bcf҆㛬㛰㛴ot;挶ir;櫱Ā;o㛹㛼쀀𝕥rk;櫚á㍢rime;怴ƀaip㜏㜒㝤dåቈ΀adempst㜡㝍㝀㝑㝗㝜㝟ngleʀ;dlqr㜰㜱㜶㝀㝂斵own»ᶻeftĀ;e⠀㜾ñम;扜ightĀ;e㊪㝋ñၚot;旬inus;樺lus;樹b;槍ime;樻ezium;揢ƀcht㝲㝽㞁Āry㝷㝻;쀀𝓉;䑆cy;䑛rok;䅧Āio㞋㞎xô᝷headĀlr㞗㞠eftarro÷ࡏightarrow»ཝऀAHabcdfghlmoprstuw㟐㟓㟗㟤㟰㟼㠎㠜㠣㠴㡑㡝㡫㢩㣌㣒㣪㣶ròϭar;楣Ācr㟜㟢ute耻ú䃺òᅐrǣ㟪\0㟭y;䑞ve;䅭Āiy㟵㟺rc耻û䃻;䑃ƀabh㠃㠆㠋ròᎭlac;䅱aòᏃĀir㠓㠘sht;楾;쀀𝔲rave耻ù䃹š㠧㠱rĀlr㠬㠮»ॗ»ႃlk;斀Āct㠹㡍ɯ㠿\0\0㡊rnĀ;e㡅㡆挜r»㡆op;挏ri;旸Āal㡖㡚cr;䅫肻¨͉Āgp㡢㡦on;䅳f;쀀𝕦̀adhlsuᅋ㡸㡽፲㢑㢠ownáᎳarpoonĀlr㢈㢌efô㠭ighô㠯iƀ;hl㢙㢚㢜䏅»ᏺon»㢚parrows;懈ƀcit㢰㣄㣈ɯ㢶\0\0㣁rnĀ;e㢼㢽挝r»㢽op;挎ng;䅯ri;旹cr;쀀𝓊ƀdir㣙㣝㣢ot;拰lde;䅩iĀ;f㜰㣨»᠓Āam㣯㣲rò㢨l耻ü䃼angle;榧ހABDacdeflnoprsz㤜㤟㤩㤭㦵㦸㦽㧟㧤㧨㧳㧹㧽㨁㨠ròϷarĀ;v㤦㤧櫨;櫩asèϡĀnr㤲㤷grt;榜΀eknprst㓣㥆㥋㥒㥝㥤㦖appá␕othinçẖƀhir㓫⻈㥙opô⾵Ā;hᎷ㥢ïㆍĀiu㥩㥭gmá㎳Ābp㥲㦄setneqĀ;q㥽㦀쀀⊊︀;쀀⫋︀setneqĀ;q㦏㦒쀀⊋︀;쀀⫌︀Āhr㦛㦟etá㚜iangleĀlr㦪㦯eft»थight»ၑy;䐲ash»ံƀelr㧄㧒㧗ƀ;beⷪ㧋㧏ar;抻q;扚lip;拮Ābt㧜ᑨaòᑩr;쀀𝔳tré㦮suĀbp㧯㧱»ജ»൙pf;쀀𝕧roð໻tré㦴Ācu㨆㨋r;쀀𝓋Ābp㨐㨘nĀEe㦀㨖»㥾nĀEe㦒㨞»㦐igzag;榚΀cefoprs㨶㨻㩖㩛㩔㩡㩪irc;䅵Ādi㩀㩑Ābg㩅㩉ar;機eĀ;qᗺ㩏;扙erp;愘r;쀀𝔴pf;쀀𝕨Ā;eᑹ㩦atèᑹcr;쀀𝓌ૣណ㪇\0㪋\0㪐㪛\0\0㪝㪨㪫㪯\0\0㫃㫎\0㫘ៜ៟tré៑r;쀀𝔵ĀAa㪔㪗ròσrò৶;䎾ĀAa㪡㪤ròθrò৫að✓is;拻ƀdptឤ㪵㪾Āfl㪺ឩ;쀀𝕩imåឲĀAa㫇㫊ròώròਁĀcq㫒ីr;쀀𝓍Āpt៖㫜ré។Ѐacefiosu㫰㫽㬈㬌㬑㬕㬛㬡cĀuy㫶㫻te耻ý䃽;䑏Āiy㬂㬆rc;䅷;䑋n耻¥䂥r;쀀𝔶cy;䑗pf;쀀𝕪cr;쀀𝓎Ācm㬦㬩y;䑎l耻ÿ䃿Ԁacdefhiosw㭂㭈㭔㭘㭤㭩㭭㭴㭺㮀cute;䅺Āay㭍㭒ron;䅾;䐷ot;䅼Āet㭝㭡træᕟa;䎶r;쀀𝔷cy;䐶grarr;懝pf;쀀𝕫cr;쀀𝓏Ājn㮅㮇;怍j;怌'.split("").map((e=>e.charCodeAt(0)))),k$=new Map([[0,65533],[128,8364],[130,8218],[131,402],[132,8222],[133,8230],[134,8224],[135,8225],[136,710],[137,8240],[138,352],[139,8249],[140,338],[142,381],[145,8216],[146,8217],[147,8220],[148,8221],[149,8226],[150,8211],[151,8212],[152,732],[153,8482],[154,353],[155,8250],[156,339],[158,382],[159,376]]);var S$,_$,T$,E$,A$,C$,$$,P$,D$,I$,M$,N$,R$,L$,B$,j$,U$,z$;function F$(e){return e>=S$.ZERO&&e<=S$.NINE}function Z$(e){return e===S$.EQUALS||function(e){return e>=S$.UPPER_A&&e<=S$.UPPER_Z||e>=S$.LOWER_A&&e<=S$.LOWER_Z||F$(e)}(e)}(_$=S$||(S$={}))[_$.NUM=35]="NUM",_$[_$.SEMI=59]="SEMI",_$[_$.EQUALS=61]="EQUALS",_$[_$.ZERO=48]="ZERO",_$[_$.NINE=57]="NINE",_$[_$.LOWER_A=97]="LOWER_A",_$[_$.LOWER_F=102]="LOWER_F",_$[_$.LOWER_X=120]="LOWER_X",_$[_$.LOWER_Z=122]="LOWER_Z",_$[_$.UPPER_A=65]="UPPER_A",_$[_$.UPPER_F=70]="UPPER_F",_$[_$.UPPER_Z=90]="UPPER_Z",(E$=T$||(T$={}))[E$.VALUE_LENGTH=49152]="VALUE_LENGTH",E$[E$.BRANCH_LENGTH=16256]="BRANCH_LENGTH",E$[E$.JUMP_TABLE=127]="JUMP_TABLE",(C$=A$||(A$={}))[C$.EntityStart=0]="EntityStart",C$[C$.NumericStart=1]="NumericStart",C$[C$.NumericDecimal=2]="NumericDecimal",C$[C$.NumericHex=3]="NumericHex",C$[C$.NamedEntity=4]="NamedEntity",(P$=$$||($$={}))[P$.Legacy=0]="Legacy",P$[P$.Strict=1]="Strict",P$[P$.Attribute=2]="Attribute";class Q${constructor(e,t,n){this.decodeTree=e,this.emitCodePoint=t,this.errors=n,this.state=A$.EntityStart,this.consumed=1,this.result=0,this.treeIndex=0,this.excess=1,this.decodeMode=$$.Strict}startEntity(e){this.decodeMode=e,this.state=A$.EntityStart,this.result=0,this.treeIndex=0,this.excess=1,this.consumed=1}write(e,t){switch(this.state){case A$.EntityStart:return e.charCodeAt(t)===S$.NUM?(this.state=A$.NumericStart,this.consumed+=1,this.stateNumericStart(e,t+1)):(this.state=A$.NamedEntity,this.stateNamedEntity(e,t));case A$.NumericStart:return this.stateNumericStart(e,t);case A$.NumericDecimal:return this.stateNumericDecimal(e,t);case A$.NumericHex:return this.stateNumericHex(e,t);case A$.NamedEntity:return this.stateNamedEntity(e,t)}}stateNumericStart(e,t){return t>=e.length?-1:(32|e.charCodeAt(t))===S$.LOWER_X?(this.state=A$.NumericHex,this.consumed+=1,this.stateNumericHex(e,t+1)):(this.state=A$.NumericDecimal,this.stateNumericDecimal(e,t))}addToNumericResult(e,t,n,r){if(t!==n){const a=n-t;this.result=this.result*Math.pow(r,a)+Number.parseInt(e.substr(t,a),r),this.consumed+=a}}stateNumericHex(e,t){const n=t;for(;t=S$.UPPER_A&&r<=S$.UPPER_F||r>=S$.LOWER_A&&r<=S$.LOWER_F)))return this.addToNumericResult(e,n,t,16),this.emitNumericEntity(a,3);t+=1}var r;return this.addToNumericResult(e,n,t,16),-1}stateNumericDecimal(e,t){const n=t;for(;t=55296&&e<=57343||e>1114111?65533:null!==(t=k$.get(e))&&void 0!==t?t:e}(this.result),this.consumed),this.errors&&(e!==S$.SEMI&&this.errors.missingSemicolonAfterCharacterReference(),this.errors.validateNumericCharacterReference(this.result)),this.consumed}stateNamedEntity(e,t){const{decodeTree:n}=this;let r=n[this.treeIndex],a=(r&T$.VALUE_LENGTH)>>14;for(;t>14,0!==a){if(o===S$.SEMI)return this.emitNamedEntityData(this.treeIndex,a,this.consumed+this.excess);this.decodeMode!==$$.Strict&&(this.result=this.treeIndex,this.consumed+=this.excess,this.excess=0)}}return-1}emitNotTerminatedNamedEntity(){var e;const{result:t,decodeTree:n}=this,r=(n[t]&T$.VALUE_LENGTH)>>14;return this.emitNamedEntityData(t,r,this.consumed),null===(e=this.errors)||void 0===e||e.missingSemicolonAfterCharacterReference(),this.consumed}emitNamedEntityData(e,t,n){const{decodeTree:r}=this;return this.emitCodePoint(1===t?r[e]&~T$.VALUE_LENGTH:r[e+1],n),3===t&&this.emitCodePoint(r[e+2],n),n}end(){var e;switch(this.state){case A$.NamedEntity:return 0===this.result||this.decodeMode===$$.Attribute&&this.result!==this.treeIndex?0:this.emitNotTerminatedNamedEntity();case A$.NumericDecimal:return this.emitNumericEntity(0,2);case A$.NumericHex:return this.emitNumericEntity(0,3);case A$.NumericStart:return null===(e=this.errors)||void 0===e||e.absenceOfDigitsInNumericCharacterReference(this.consumed),0;case A$.EntityStart:return 0}}}function H$(e,t,n,r){const a=(t&T$.BRANCH_LENGTH)>>7,o=t&T$.JUMP_TABLE;if(0===a)return 0!==o&&r===o?n:-1;if(o){const t=r-o;return t<0||t>=a?-1:e[n+t]-1}let i=n,s=i+a-1;for(;i<=s;){const t=i+s>>>1,n=e[t];if(nr))return e[t+a];s=t-1}}return-1}(I$=D$||(D$={})).HTML="http://www.w3.org/1999/xhtml",I$.MATHML="http://www.w3.org/1998/Math/MathML",I$.SVG="http://www.w3.org/2000/svg",I$.XLINK="http://www.w3.org/1999/xlink",I$.XML="http://www.w3.org/XML/1998/namespace",I$.XMLNS="http://www.w3.org/2000/xmlns/",(N$=M$||(M$={})).TYPE="type",N$.ACTION="action",N$.ENCODING="encoding",N$.PROMPT="prompt",N$.NAME="name",N$.COLOR="color",N$.FACE="face",N$.SIZE="size",(L$=R$||(R$={})).NO_QUIRKS="no-quirks",L$.QUIRKS="quirks",L$.LIMITED_QUIRKS="limited-quirks",(j$=B$||(B$={})).A="a",j$.ADDRESS="address",j$.ANNOTATION_XML="annotation-xml",j$.APPLET="applet",j$.AREA="area",j$.ARTICLE="article",j$.ASIDE="aside",j$.B="b",j$.BASE="base",j$.BASEFONT="basefont",j$.BGSOUND="bgsound",j$.BIG="big",j$.BLOCKQUOTE="blockquote",j$.BODY="body",j$.BR="br",j$.BUTTON="button",j$.CAPTION="caption",j$.CENTER="center",j$.CODE="code",j$.COL="col",j$.COLGROUP="colgroup",j$.DD="dd",j$.DESC="desc",j$.DETAILS="details",j$.DIALOG="dialog",j$.DIR="dir",j$.DIV="div",j$.DL="dl",j$.DT="dt",j$.EM="em",j$.EMBED="embed",j$.FIELDSET="fieldset",j$.FIGCAPTION="figcaption",j$.FIGURE="figure",j$.FONT="font",j$.FOOTER="footer",j$.FOREIGN_OBJECT="foreignObject",j$.FORM="form",j$.FRAME="frame",j$.FRAMESET="frameset",j$.H1="h1",j$.H2="h2",j$.H3="h3",j$.H4="h4",j$.H5="h5",j$.H6="h6",j$.HEAD="head",j$.HEADER="header",j$.HGROUP="hgroup",j$.HR="hr",j$.HTML="html",j$.I="i",j$.IMG="img",j$.IMAGE="image",j$.INPUT="input",j$.IFRAME="iframe",j$.KEYGEN="keygen",j$.LABEL="label",j$.LI="li",j$.LINK="link",j$.LISTING="listing",j$.MAIN="main",j$.MALIGNMARK="malignmark",j$.MARQUEE="marquee",j$.MATH="math",j$.MENU="menu",j$.META="meta",j$.MGLYPH="mglyph",j$.MI="mi",j$.MO="mo",j$.MN="mn",j$.MS="ms",j$.MTEXT="mtext",j$.NAV="nav",j$.NOBR="nobr",j$.NOFRAMES="noframes",j$.NOEMBED="noembed",j$.NOSCRIPT="noscript",j$.OBJECT="object",j$.OL="ol",j$.OPTGROUP="optgroup",j$.OPTION="option",j$.P="p",j$.PARAM="param",j$.PLAINTEXT="plaintext",j$.PRE="pre",j$.RB="rb",j$.RP="rp",j$.RT="rt",j$.RTC="rtc",j$.RUBY="ruby",j$.S="s",j$.SCRIPT="script",j$.SEARCH="search",j$.SECTION="section",j$.SELECT="select",j$.SOURCE="source",j$.SMALL="small",j$.SPAN="span",j$.STRIKE="strike",j$.STRONG="strong",j$.STYLE="style",j$.SUB="sub",j$.SUMMARY="summary",j$.SUP="sup",j$.TABLE="table",j$.TBODY="tbody",j$.TEMPLATE="template",j$.TEXTAREA="textarea",j$.TFOOT="tfoot",j$.TD="td",j$.TH="th",j$.THEAD="thead",j$.TITLE="title",j$.TR="tr",j$.TRACK="track",j$.TT="tt",j$.U="u",j$.UL="ul",j$.SVG="svg",j$.VAR="var",j$.WBR="wbr",j$.XMP="xmp",(z$=U$||(U$={}))[z$.UNKNOWN=0]="UNKNOWN",z$[z$.A=1]="A",z$[z$.ADDRESS=2]="ADDRESS",z$[z$.ANNOTATION_XML=3]="ANNOTATION_XML",z$[z$.APPLET=4]="APPLET",z$[z$.AREA=5]="AREA",z$[z$.ARTICLE=6]="ARTICLE",z$[z$.ASIDE=7]="ASIDE",z$[z$.B=8]="B",z$[z$.BASE=9]="BASE",z$[z$.BASEFONT=10]="BASEFONT",z$[z$.BGSOUND=11]="BGSOUND",z$[z$.BIG=12]="BIG",z$[z$.BLOCKQUOTE=13]="BLOCKQUOTE",z$[z$.BODY=14]="BODY",z$[z$.BR=15]="BR",z$[z$.BUTTON=16]="BUTTON",z$[z$.CAPTION=17]="CAPTION",z$[z$.CENTER=18]="CENTER",z$[z$.CODE=19]="CODE",z$[z$.COL=20]="COL",z$[z$.COLGROUP=21]="COLGROUP",z$[z$.DD=22]="DD",z$[z$.DESC=23]="DESC",z$[z$.DETAILS=24]="DETAILS",z$[z$.DIALOG=25]="DIALOG",z$[z$.DIR=26]="DIR",z$[z$.DIV=27]="DIV",z$[z$.DL=28]="DL",z$[z$.DT=29]="DT",z$[z$.EM=30]="EM",z$[z$.EMBED=31]="EMBED",z$[z$.FIELDSET=32]="FIELDSET",z$[z$.FIGCAPTION=33]="FIGCAPTION",z$[z$.FIGURE=34]="FIGURE",z$[z$.FONT=35]="FONT",z$[z$.FOOTER=36]="FOOTER",z$[z$.FOREIGN_OBJECT=37]="FOREIGN_OBJECT",z$[z$.FORM=38]="FORM",z$[z$.FRAME=39]="FRAME",z$[z$.FRAMESET=40]="FRAMESET",z$[z$.H1=41]="H1",z$[z$.H2=42]="H2",z$[z$.H3=43]="H3",z$[z$.H4=44]="H4",z$[z$.H5=45]="H5",z$[z$.H6=46]="H6",z$[z$.HEAD=47]="HEAD",z$[z$.HEADER=48]="HEADER",z$[z$.HGROUP=49]="HGROUP",z$[z$.HR=50]="HR",z$[z$.HTML=51]="HTML",z$[z$.I=52]="I",z$[z$.IMG=53]="IMG",z$[z$.IMAGE=54]="IMAGE",z$[z$.INPUT=55]="INPUT",z$[z$.IFRAME=56]="IFRAME",z$[z$.KEYGEN=57]="KEYGEN",z$[z$.LABEL=58]="LABEL",z$[z$.LI=59]="LI",z$[z$.LINK=60]="LINK",z$[z$.LISTING=61]="LISTING",z$[z$.MAIN=62]="MAIN",z$[z$.MALIGNMARK=63]="MALIGNMARK",z$[z$.MARQUEE=64]="MARQUEE",z$[z$.MATH=65]="MATH",z$[z$.MENU=66]="MENU",z$[z$.META=67]="META",z$[z$.MGLYPH=68]="MGLYPH",z$[z$.MI=69]="MI",z$[z$.MO=70]="MO",z$[z$.MN=71]="MN",z$[z$.MS=72]="MS",z$[z$.MTEXT=73]="MTEXT",z$[z$.NAV=74]="NAV",z$[z$.NOBR=75]="NOBR",z$[z$.NOFRAMES=76]="NOFRAMES",z$[z$.NOEMBED=77]="NOEMBED",z$[z$.NOSCRIPT=78]="NOSCRIPT",z$[z$.OBJECT=79]="OBJECT",z$[z$.OL=80]="OL",z$[z$.OPTGROUP=81]="OPTGROUP",z$[z$.OPTION=82]="OPTION",z$[z$.P=83]="P",z$[z$.PARAM=84]="PARAM",z$[z$.PLAINTEXT=85]="PLAINTEXT",z$[z$.PRE=86]="PRE",z$[z$.RB=87]="RB",z$[z$.RP=88]="RP",z$[z$.RT=89]="RT",z$[z$.RTC=90]="RTC",z$[z$.RUBY=91]="RUBY",z$[z$.S=92]="S",z$[z$.SCRIPT=93]="SCRIPT",z$[z$.SEARCH=94]="SEARCH",z$[z$.SECTION=95]="SECTION",z$[z$.SELECT=96]="SELECT",z$[z$.SOURCE=97]="SOURCE",z$[z$.SMALL=98]="SMALL",z$[z$.SPAN=99]="SPAN",z$[z$.STRIKE=100]="STRIKE",z$[z$.STRONG=101]="STRONG",z$[z$.STYLE=102]="STYLE",z$[z$.SUB=103]="SUB",z$[z$.SUMMARY=104]="SUMMARY",z$[z$.SUP=105]="SUP",z$[z$.TABLE=106]="TABLE",z$[z$.TBODY=107]="TBODY",z$[z$.TEMPLATE=108]="TEMPLATE",z$[z$.TEXTAREA=109]="TEXTAREA",z$[z$.TFOOT=110]="TFOOT",z$[z$.TD=111]="TD",z$[z$.TH=112]="TH",z$[z$.THEAD=113]="THEAD",z$[z$.TITLE=114]="TITLE",z$[z$.TR=115]="TR",z$[z$.TRACK=116]="TRACK",z$[z$.TT=117]="TT",z$[z$.U=118]="U",z$[z$.UL=119]="UL",z$[z$.SVG=120]="SVG",z$[z$.VAR=121]="VAR",z$[z$.WBR=122]="WBR",z$[z$.XMP=123]="XMP";const V$=new Map([[B$.A,U$.A],[B$.ADDRESS,U$.ADDRESS],[B$.ANNOTATION_XML,U$.ANNOTATION_XML],[B$.APPLET,U$.APPLET],[B$.AREA,U$.AREA],[B$.ARTICLE,U$.ARTICLE],[B$.ASIDE,U$.ASIDE],[B$.B,U$.B],[B$.BASE,U$.BASE],[B$.BASEFONT,U$.BASEFONT],[B$.BGSOUND,U$.BGSOUND],[B$.BIG,U$.BIG],[B$.BLOCKQUOTE,U$.BLOCKQUOTE],[B$.BODY,U$.BODY],[B$.BR,U$.BR],[B$.BUTTON,U$.BUTTON],[B$.CAPTION,U$.CAPTION],[B$.CENTER,U$.CENTER],[B$.CODE,U$.CODE],[B$.COL,U$.COL],[B$.COLGROUP,U$.COLGROUP],[B$.DD,U$.DD],[B$.DESC,U$.DESC],[B$.DETAILS,U$.DETAILS],[B$.DIALOG,U$.DIALOG],[B$.DIR,U$.DIR],[B$.DIV,U$.DIV],[B$.DL,U$.DL],[B$.DT,U$.DT],[B$.EM,U$.EM],[B$.EMBED,U$.EMBED],[B$.FIELDSET,U$.FIELDSET],[B$.FIGCAPTION,U$.FIGCAPTION],[B$.FIGURE,U$.FIGURE],[B$.FONT,U$.FONT],[B$.FOOTER,U$.FOOTER],[B$.FOREIGN_OBJECT,U$.FOREIGN_OBJECT],[B$.FORM,U$.FORM],[B$.FRAME,U$.FRAME],[B$.FRAMESET,U$.FRAMESET],[B$.H1,U$.H1],[B$.H2,U$.H2],[B$.H3,U$.H3],[B$.H4,U$.H4],[B$.H5,U$.H5],[B$.H6,U$.H6],[B$.HEAD,U$.HEAD],[B$.HEADER,U$.HEADER],[B$.HGROUP,U$.HGROUP],[B$.HR,U$.HR],[B$.HTML,U$.HTML],[B$.I,U$.I],[B$.IMG,U$.IMG],[B$.IMAGE,U$.IMAGE],[B$.INPUT,U$.INPUT],[B$.IFRAME,U$.IFRAME],[B$.KEYGEN,U$.KEYGEN],[B$.LABEL,U$.LABEL],[B$.LI,U$.LI],[B$.LINK,U$.LINK],[B$.LISTING,U$.LISTING],[B$.MAIN,U$.MAIN],[B$.MALIGNMARK,U$.MALIGNMARK],[B$.MARQUEE,U$.MARQUEE],[B$.MATH,U$.MATH],[B$.MENU,U$.MENU],[B$.META,U$.META],[B$.MGLYPH,U$.MGLYPH],[B$.MI,U$.MI],[B$.MO,U$.MO],[B$.MN,U$.MN],[B$.MS,U$.MS],[B$.MTEXT,U$.MTEXT],[B$.NAV,U$.NAV],[B$.NOBR,U$.NOBR],[B$.NOFRAMES,U$.NOFRAMES],[B$.NOEMBED,U$.NOEMBED],[B$.NOSCRIPT,U$.NOSCRIPT],[B$.OBJECT,U$.OBJECT],[B$.OL,U$.OL],[B$.OPTGROUP,U$.OPTGROUP],[B$.OPTION,U$.OPTION],[B$.P,U$.P],[B$.PARAM,U$.PARAM],[B$.PLAINTEXT,U$.PLAINTEXT],[B$.PRE,U$.PRE],[B$.RB,U$.RB],[B$.RP,U$.RP],[B$.RT,U$.RT],[B$.RTC,U$.RTC],[B$.RUBY,U$.RUBY],[B$.S,U$.S],[B$.SCRIPT,U$.SCRIPT],[B$.SEARCH,U$.SEARCH],[B$.SECTION,U$.SECTION],[B$.SELECT,U$.SELECT],[B$.SOURCE,U$.SOURCE],[B$.SMALL,U$.SMALL],[B$.SPAN,U$.SPAN],[B$.STRIKE,U$.STRIKE],[B$.STRONG,U$.STRONG],[B$.STYLE,U$.STYLE],[B$.SUB,U$.SUB],[B$.SUMMARY,U$.SUMMARY],[B$.SUP,U$.SUP],[B$.TABLE,U$.TABLE],[B$.TBODY,U$.TBODY],[B$.TEMPLATE,U$.TEMPLATE],[B$.TEXTAREA,U$.TEXTAREA],[B$.TFOOT,U$.TFOOT],[B$.TD,U$.TD],[B$.TH,U$.TH],[B$.THEAD,U$.THEAD],[B$.TITLE,U$.TITLE],[B$.TR,U$.TR],[B$.TRACK,U$.TRACK],[B$.TT,U$.TT],[B$.U,U$.U],[B$.UL,U$.UL],[B$.SVG,U$.SVG],[B$.VAR,U$.VAR],[B$.WBR,U$.WBR],[B$.XMP,U$.XMP]]);function q$(e){var t;return null!==(t=V$.get(e))&&void 0!==t?t:U$.UNKNOWN}const W$=U$,X$={[D$.HTML]:new Set([W$.ADDRESS,W$.APPLET,W$.AREA,W$.ARTICLE,W$.ASIDE,W$.BASE,W$.BASEFONT,W$.BGSOUND,W$.BLOCKQUOTE,W$.BODY,W$.BR,W$.BUTTON,W$.CAPTION,W$.CENTER,W$.COL,W$.COLGROUP,W$.DD,W$.DETAILS,W$.DIR,W$.DIV,W$.DL,W$.DT,W$.EMBED,W$.FIELDSET,W$.FIGCAPTION,W$.FIGURE,W$.FOOTER,W$.FORM,W$.FRAME,W$.FRAMESET,W$.H1,W$.H2,W$.H3,W$.H4,W$.H5,W$.H6,W$.HEAD,W$.HEADER,W$.HGROUP,W$.HR,W$.HTML,W$.IFRAME,W$.IMG,W$.INPUT,W$.LI,W$.LINK,W$.LISTING,W$.MAIN,W$.MARQUEE,W$.MENU,W$.META,W$.NAV,W$.NOEMBED,W$.NOFRAMES,W$.NOSCRIPT,W$.OBJECT,W$.OL,W$.P,W$.PARAM,W$.PLAINTEXT,W$.PRE,W$.SCRIPT,W$.SECTION,W$.SELECT,W$.SOURCE,W$.STYLE,W$.SUMMARY,W$.TABLE,W$.TBODY,W$.TD,W$.TEMPLATE,W$.TEXTAREA,W$.TFOOT,W$.TH,W$.THEAD,W$.TITLE,W$.TR,W$.TRACK,W$.UL,W$.WBR,W$.XMP]),[D$.MATHML]:new Set([W$.MI,W$.MO,W$.MN,W$.MS,W$.MTEXT,W$.ANNOTATION_XML]),[D$.SVG]:new Set([W$.TITLE,W$.FOREIGN_OBJECT,W$.DESC]),[D$.XLINK]:new Set,[D$.XML]:new Set,[D$.XMLNS]:new Set},G$=new Set([W$.H1,W$.H2,W$.H3,W$.H4,W$.H5,W$.H6]);var Y$,K$;B$.STYLE,B$.SCRIPT,B$.XMP,B$.IFRAME,B$.NOEMBED,B$.NOFRAMES,B$.PLAINTEXT,(K$=Y$||(Y$={}))[K$.DATA=0]="DATA",K$[K$.RCDATA=1]="RCDATA",K$[K$.RAWTEXT=2]="RAWTEXT",K$[K$.SCRIPT_DATA=3]="SCRIPT_DATA",K$[K$.PLAINTEXT=4]="PLAINTEXT",K$[K$.TAG_OPEN=5]="TAG_OPEN",K$[K$.END_TAG_OPEN=6]="END_TAG_OPEN",K$[K$.TAG_NAME=7]="TAG_NAME",K$[K$.RCDATA_LESS_THAN_SIGN=8]="RCDATA_LESS_THAN_SIGN",K$[K$.RCDATA_END_TAG_OPEN=9]="RCDATA_END_TAG_OPEN",K$[K$.RCDATA_END_TAG_NAME=10]="RCDATA_END_TAG_NAME",K$[K$.RAWTEXT_LESS_THAN_SIGN=11]="RAWTEXT_LESS_THAN_SIGN",K$[K$.RAWTEXT_END_TAG_OPEN=12]="RAWTEXT_END_TAG_OPEN",K$[K$.RAWTEXT_END_TAG_NAME=13]="RAWTEXT_END_TAG_NAME",K$[K$.SCRIPT_DATA_LESS_THAN_SIGN=14]="SCRIPT_DATA_LESS_THAN_SIGN",K$[K$.SCRIPT_DATA_END_TAG_OPEN=15]="SCRIPT_DATA_END_TAG_OPEN",K$[K$.SCRIPT_DATA_END_TAG_NAME=16]="SCRIPT_DATA_END_TAG_NAME",K$[K$.SCRIPT_DATA_ESCAPE_START=17]="SCRIPT_DATA_ESCAPE_START",K$[K$.SCRIPT_DATA_ESCAPE_START_DASH=18]="SCRIPT_DATA_ESCAPE_START_DASH",K$[K$.SCRIPT_DATA_ESCAPED=19]="SCRIPT_DATA_ESCAPED",K$[K$.SCRIPT_DATA_ESCAPED_DASH=20]="SCRIPT_DATA_ESCAPED_DASH",K$[K$.SCRIPT_DATA_ESCAPED_DASH_DASH=21]="SCRIPT_DATA_ESCAPED_DASH_DASH",K$[K$.SCRIPT_DATA_ESCAPED_LESS_THAN_SIGN=22]="SCRIPT_DATA_ESCAPED_LESS_THAN_SIGN",K$[K$.SCRIPT_DATA_ESCAPED_END_TAG_OPEN=23]="SCRIPT_DATA_ESCAPED_END_TAG_OPEN",K$[K$.SCRIPT_DATA_ESCAPED_END_TAG_NAME=24]="SCRIPT_DATA_ESCAPED_END_TAG_NAME",K$[K$.SCRIPT_DATA_DOUBLE_ESCAPE_START=25]="SCRIPT_DATA_DOUBLE_ESCAPE_START",K$[K$.SCRIPT_DATA_DOUBLE_ESCAPED=26]="SCRIPT_DATA_DOUBLE_ESCAPED",K$[K$.SCRIPT_DATA_DOUBLE_ESCAPED_DASH=27]="SCRIPT_DATA_DOUBLE_ESCAPED_DASH",K$[K$.SCRIPT_DATA_DOUBLE_ESCAPED_DASH_DASH=28]="SCRIPT_DATA_DOUBLE_ESCAPED_DASH_DASH",K$[K$.SCRIPT_DATA_DOUBLE_ESCAPED_LESS_THAN_SIGN=29]="SCRIPT_DATA_DOUBLE_ESCAPED_LESS_THAN_SIGN",K$[K$.SCRIPT_DATA_DOUBLE_ESCAPE_END=30]="SCRIPT_DATA_DOUBLE_ESCAPE_END",K$[K$.BEFORE_ATTRIBUTE_NAME=31]="BEFORE_ATTRIBUTE_NAME",K$[K$.ATTRIBUTE_NAME=32]="ATTRIBUTE_NAME",K$[K$.AFTER_ATTRIBUTE_NAME=33]="AFTER_ATTRIBUTE_NAME",K$[K$.BEFORE_ATTRIBUTE_VALUE=34]="BEFORE_ATTRIBUTE_VALUE",K$[K$.ATTRIBUTE_VALUE_DOUBLE_QUOTED=35]="ATTRIBUTE_VALUE_DOUBLE_QUOTED",K$[K$.ATTRIBUTE_VALUE_SINGLE_QUOTED=36]="ATTRIBUTE_VALUE_SINGLE_QUOTED",K$[K$.ATTRIBUTE_VALUE_UNQUOTED=37]="ATTRIBUTE_VALUE_UNQUOTED",K$[K$.AFTER_ATTRIBUTE_VALUE_QUOTED=38]="AFTER_ATTRIBUTE_VALUE_QUOTED",K$[K$.SELF_CLOSING_START_TAG=39]="SELF_CLOSING_START_TAG",K$[K$.BOGUS_COMMENT=40]="BOGUS_COMMENT",K$[K$.MARKUP_DECLARATION_OPEN=41]="MARKUP_DECLARATION_OPEN",K$[K$.COMMENT_START=42]="COMMENT_START",K$[K$.COMMENT_START_DASH=43]="COMMENT_START_DASH",K$[K$.COMMENT=44]="COMMENT",K$[K$.COMMENT_LESS_THAN_SIGN=45]="COMMENT_LESS_THAN_SIGN",K$[K$.COMMENT_LESS_THAN_SIGN_BANG=46]="COMMENT_LESS_THAN_SIGN_BANG",K$[K$.COMMENT_LESS_THAN_SIGN_BANG_DASH=47]="COMMENT_LESS_THAN_SIGN_BANG_DASH",K$[K$.COMMENT_LESS_THAN_SIGN_BANG_DASH_DASH=48]="COMMENT_LESS_THAN_SIGN_BANG_DASH_DASH",K$[K$.COMMENT_END_DASH=49]="COMMENT_END_DASH",K$[K$.COMMENT_END=50]="COMMENT_END",K$[K$.COMMENT_END_BANG=51]="COMMENT_END_BANG",K$[K$.DOCTYPE=52]="DOCTYPE",K$[K$.BEFORE_DOCTYPE_NAME=53]="BEFORE_DOCTYPE_NAME",K$[K$.DOCTYPE_NAME=54]="DOCTYPE_NAME",K$[K$.AFTER_DOCTYPE_NAME=55]="AFTER_DOCTYPE_NAME",K$[K$.AFTER_DOCTYPE_PUBLIC_KEYWORD=56]="AFTER_DOCTYPE_PUBLIC_KEYWORD",K$[K$.BEFORE_DOCTYPE_PUBLIC_IDENTIFIER=57]="BEFORE_DOCTYPE_PUBLIC_IDENTIFIER",K$[K$.DOCTYPE_PUBLIC_IDENTIFIER_DOUBLE_QUOTED=58]="DOCTYPE_PUBLIC_IDENTIFIER_DOUBLE_QUOTED",K$[K$.DOCTYPE_PUBLIC_IDENTIFIER_SINGLE_QUOTED=59]="DOCTYPE_PUBLIC_IDENTIFIER_SINGLE_QUOTED",K$[K$.AFTER_DOCTYPE_PUBLIC_IDENTIFIER=60]="AFTER_DOCTYPE_PUBLIC_IDENTIFIER",K$[K$.BETWEEN_DOCTYPE_PUBLIC_AND_SYSTEM_IDENTIFIERS=61]="BETWEEN_DOCTYPE_PUBLIC_AND_SYSTEM_IDENTIFIERS",K$[K$.AFTER_DOCTYPE_SYSTEM_KEYWORD=62]="AFTER_DOCTYPE_SYSTEM_KEYWORD",K$[K$.BEFORE_DOCTYPE_SYSTEM_IDENTIFIER=63]="BEFORE_DOCTYPE_SYSTEM_IDENTIFIER",K$[K$.DOCTYPE_SYSTEM_IDENTIFIER_DOUBLE_QUOTED=64]="DOCTYPE_SYSTEM_IDENTIFIER_DOUBLE_QUOTED",K$[K$.DOCTYPE_SYSTEM_IDENTIFIER_SINGLE_QUOTED=65]="DOCTYPE_SYSTEM_IDENTIFIER_SINGLE_QUOTED",K$[K$.AFTER_DOCTYPE_SYSTEM_IDENTIFIER=66]="AFTER_DOCTYPE_SYSTEM_IDENTIFIER",K$[K$.BOGUS_DOCTYPE=67]="BOGUS_DOCTYPE",K$[K$.CDATA_SECTION=68]="CDATA_SECTION",K$[K$.CDATA_SECTION_BRACKET=69]="CDATA_SECTION_BRACKET",K$[K$.CDATA_SECTION_END=70]="CDATA_SECTION_END",K$[K$.CHARACTER_REFERENCE=71]="CHARACTER_REFERENCE",K$[K$.AMBIGUOUS_AMPERSAND=72]="AMBIGUOUS_AMPERSAND";const J$={DATA:Y$.DATA,RCDATA:Y$.RCDATA,RAWTEXT:Y$.RAWTEXT,SCRIPT_DATA:Y$.SCRIPT_DATA,PLAINTEXT:Y$.PLAINTEXT,CDATA_SECTION:Y$.CDATA_SECTION};function eP(e){return e>=l$.LATIN_CAPITAL_A&&e<=l$.LATIN_CAPITAL_Z}function tP(e){return function(e){return e>=l$.LATIN_SMALL_A&&e<=l$.LATIN_SMALL_Z}(e)||eP(e)}function nP(e){return tP(e)||function(e){return e>=l$.DIGIT_0&&e<=l$.DIGIT_9}(e)}function rP(e){return e+32}function aP(e){return e===l$.SPACE||e===l$.LINE_FEED||e===l$.TABULATION||e===l$.FORM_FEED}function oP(e){return aP(e)||e===l$.SOLIDUS||e===l$.GREATER_THAN_SIGN}class iP{constructor(e,t){this.options=e,this.handler=t,this.paused=!1,this.inLoop=!1,this.inForeignNode=!1,this.lastStartTagName="",this.active=!1,this.state=Y$.DATA,this.returnState=Y$.DATA,this.entityStartPos=0,this.consumedAfterSnapshot=-1,this.currentCharacterToken=null,this.currentToken=null,this.currentAttr={name:"",value:""},this.preprocessor=new O$(t),this.currentLocation=this.getCurrentLocation(-1),this.entityDecoder=new Q$(x$,((e,t)=>{this.preprocessor.pos=this.entityStartPos+t-1,this._flushCodePointConsumedAsCharacterReference(e)}),t.onParseError?{missingSemicolonAfterCharacterReference:()=>{this._err(g$.missingSemicolonAfterCharacterReference,1)},absenceOfDigitsInNumericCharacterReference:e=>{this._err(g$.absenceOfDigitsInNumericCharacterReference,this.entityStartPos-this.preprocessor.pos+e)},validateNumericCharacterReference:e=>{const t=function(e){return e===l$.NULL?g$.nullCharacterReference:e>1114111?g$.characterReferenceOutsideUnicodeRange:h$(e)?g$.surrogateCharacterReference:m$(e)?g$.noncharacterCharacterReference:f$(e)||e===l$.CARRIAGE_RETURN?g$.controlCharacterReference:null}(e);t&&this._err(t,1)}}:void 0)}_err(e,t=0){var n,r;null===(r=(n=this.handler).onParseError)||void 0===r||r.call(n,this.preprocessor.getError(e,t))}getCurrentLocation(e){return this.options.sourceCodeLocationInfo?{startLine:this.preprocessor.line,startCol:this.preprocessor.col-e,startOffset:this.preprocessor.offset-e,endLine:-1,endCol:-1,endOffset:-1}:null}_runParsingLoop(){if(!this.inLoop){for(this.inLoop=!0;this.active&&!this.paused;){this.consumedAfterSnapshot=0;const e=this._consume();this._ensureHibernation()||this._callState(e)}this.inLoop=!1}}pause(){this.paused=!0}resume(e){if(!this.paused)throw new Error("Parser was already resumed");this.paused=!1,this.inLoop||(this._runParsingLoop(),this.paused||null==e||e())}write(e,t,n){this.active=!0,this.preprocessor.write(e,t),this._runParsingLoop(),this.paused||null==n||n()}insertHtmlAtCurrentPos(e){this.active=!0,this.preprocessor.insertHtmlAtCurrentPos(e),this._runParsingLoop()}_ensureHibernation(){return!!this.preprocessor.endOfChunkHit&&(this.preprocessor.retreat(this.consumedAfterSnapshot),this.consumedAfterSnapshot=0,this.active=!1,!0)}_consume(){return this.consumedAfterSnapshot++,this.preprocessor.advance()}_advanceBy(e){this.consumedAfterSnapshot+=e;for(let t=0;t0&&this._err(g$.endTagWithAttributes),e.selfClosing&&this._err(g$.endTagWithTrailingSolidus),this.handler.onEndTag(e)),this.preprocessor.dropParsedChunk()}emitCurrentComment(e){this.prepareToken(e),this.handler.onComment(e),this.preprocessor.dropParsedChunk()}emitCurrentDoctype(e){this.prepareToken(e),this.handler.onDoctype(e),this.preprocessor.dropParsedChunk()}_emitCurrentCharacterToken(e){if(this.currentCharacterToken){switch(e&&this.currentCharacterToken.location&&(this.currentCharacterToken.location.endLine=e.startLine,this.currentCharacterToken.location.endCol=e.startCol,this.currentCharacterToken.location.endOffset=e.startOffset),this.currentCharacterToken.type){case b$.CHARACTER:this.handler.onCharacter(this.currentCharacterToken);break;case b$.NULL_CHARACTER:this.handler.onNullCharacter(this.currentCharacterToken);break;case b$.WHITESPACE_CHARACTER:this.handler.onWhitespaceCharacter(this.currentCharacterToken)}this.currentCharacterToken=null}}_emitEOFToken(){const e=this.getCurrentLocation(0);e&&(e.endLine=e.startLine,e.endCol=e.startCol,e.endOffset=e.startOffset),this._emitCurrentCharacterToken(e),this.handler.onEof({type:b$.EOF,location:e}),this.active=!1}_appendCharToCurrentCharacterToken(e,t){if(this.currentCharacterToken){if(this.currentCharacterToken.type===e)return void(this.currentCharacterToken.chars+=t);this.currentLocation=this.getCurrentLocation(0),this._emitCurrentCharacterToken(this.currentLocation),this.preprocessor.dropParsedChunk()}this._createCharacterToken(e,t)}_emitCodePoint(e){const t=aP(e)?b$.WHITESPACE_CHARACTER:e===l$.NULL?b$.NULL_CHARACTER:b$.CHARACTER;this._appendCharToCurrentCharacterToken(t,String.fromCodePoint(e))}_emitChars(e){this._appendCharToCurrentCharacterToken(b$.CHARACTER,e)}_startCharacterReference(){this.returnState=this.state,this.state=Y$.CHARACTER_REFERENCE,this.entityStartPos=this.preprocessor.pos,this.entityDecoder.startEntity(this._isCharacterReferenceInAttribute()?$$.Attribute:$$.Legacy)}_isCharacterReferenceInAttribute(){return this.returnState===Y$.ATTRIBUTE_VALUE_DOUBLE_QUOTED||this.returnState===Y$.ATTRIBUTE_VALUE_SINGLE_QUOTED||this.returnState===Y$.ATTRIBUTE_VALUE_UNQUOTED}_flushCodePointConsumedAsCharacterReference(e){this._isCharacterReferenceInAttribute()?this.currentAttr.value+=String.fromCodePoint(e):this._emitCodePoint(e)}_callState(e){switch(this.state){case Y$.DATA:this._stateData(e);break;case Y$.RCDATA:this._stateRcdata(e);break;case Y$.RAWTEXT:this._stateRawtext(e);break;case Y$.SCRIPT_DATA:this._stateScriptData(e);break;case Y$.PLAINTEXT:this._statePlaintext(e);break;case Y$.TAG_OPEN:this._stateTagOpen(e);break;case Y$.END_TAG_OPEN:this._stateEndTagOpen(e);break;case Y$.TAG_NAME:this._stateTagName(e);break;case Y$.RCDATA_LESS_THAN_SIGN:this._stateRcdataLessThanSign(e);break;case Y$.RCDATA_END_TAG_OPEN:this._stateRcdataEndTagOpen(e);break;case Y$.RCDATA_END_TAG_NAME:this._stateRcdataEndTagName(e);break;case Y$.RAWTEXT_LESS_THAN_SIGN:this._stateRawtextLessThanSign(e);break;case Y$.RAWTEXT_END_TAG_OPEN:this._stateRawtextEndTagOpen(e);break;case Y$.RAWTEXT_END_TAG_NAME:this._stateRawtextEndTagName(e);break;case Y$.SCRIPT_DATA_LESS_THAN_SIGN:this._stateScriptDataLessThanSign(e);break;case Y$.SCRIPT_DATA_END_TAG_OPEN:this._stateScriptDataEndTagOpen(e);break;case Y$.SCRIPT_DATA_END_TAG_NAME:this._stateScriptDataEndTagName(e);break;case Y$.SCRIPT_DATA_ESCAPE_START:this._stateScriptDataEscapeStart(e);break;case Y$.SCRIPT_DATA_ESCAPE_START_DASH:this._stateScriptDataEscapeStartDash(e);break;case Y$.SCRIPT_DATA_ESCAPED:this._stateScriptDataEscaped(e);break;case Y$.SCRIPT_DATA_ESCAPED_DASH:this._stateScriptDataEscapedDash(e);break;case Y$.SCRIPT_DATA_ESCAPED_DASH_DASH:this._stateScriptDataEscapedDashDash(e);break;case Y$.SCRIPT_DATA_ESCAPED_LESS_THAN_SIGN:this._stateScriptDataEscapedLessThanSign(e);break;case Y$.SCRIPT_DATA_ESCAPED_END_TAG_OPEN:this._stateScriptDataEscapedEndTagOpen(e);break;case Y$.SCRIPT_DATA_ESCAPED_END_TAG_NAME:this._stateScriptDataEscapedEndTagName(e);break;case Y$.SCRIPT_DATA_DOUBLE_ESCAPE_START:this._stateScriptDataDoubleEscapeStart(e);break;case Y$.SCRIPT_DATA_DOUBLE_ESCAPED:this._stateScriptDataDoubleEscaped(e);break;case Y$.SCRIPT_DATA_DOUBLE_ESCAPED_DASH:this._stateScriptDataDoubleEscapedDash(e);break;case Y$.SCRIPT_DATA_DOUBLE_ESCAPED_DASH_DASH:this._stateScriptDataDoubleEscapedDashDash(e);break;case Y$.SCRIPT_DATA_DOUBLE_ESCAPED_LESS_THAN_SIGN:this._stateScriptDataDoubleEscapedLessThanSign(e);break;case Y$.SCRIPT_DATA_DOUBLE_ESCAPE_END:this._stateScriptDataDoubleEscapeEnd(e);break;case Y$.BEFORE_ATTRIBUTE_NAME:this._stateBeforeAttributeName(e);break;case Y$.ATTRIBUTE_NAME:this._stateAttributeName(e);break;case Y$.AFTER_ATTRIBUTE_NAME:this._stateAfterAttributeName(e);break;case Y$.BEFORE_ATTRIBUTE_VALUE:this._stateBeforeAttributeValue(e);break;case Y$.ATTRIBUTE_VALUE_DOUBLE_QUOTED:this._stateAttributeValueDoubleQuoted(e);break;case Y$.ATTRIBUTE_VALUE_SINGLE_QUOTED:this._stateAttributeValueSingleQuoted(e);break;case Y$.ATTRIBUTE_VALUE_UNQUOTED:this._stateAttributeValueUnquoted(e);break;case Y$.AFTER_ATTRIBUTE_VALUE_QUOTED:this._stateAfterAttributeValueQuoted(e);break;case Y$.SELF_CLOSING_START_TAG:this._stateSelfClosingStartTag(e);break;case Y$.BOGUS_COMMENT:this._stateBogusComment(e);break;case Y$.MARKUP_DECLARATION_OPEN:this._stateMarkupDeclarationOpen(e);break;case Y$.COMMENT_START:this._stateCommentStart(e);break;case Y$.COMMENT_START_DASH:this._stateCommentStartDash(e);break;case Y$.COMMENT:this._stateComment(e);break;case Y$.COMMENT_LESS_THAN_SIGN:this._stateCommentLessThanSign(e);break;case Y$.COMMENT_LESS_THAN_SIGN_BANG:this._stateCommentLessThanSignBang(e);break;case Y$.COMMENT_LESS_THAN_SIGN_BANG_DASH:this._stateCommentLessThanSignBangDash(e);break;case Y$.COMMENT_LESS_THAN_SIGN_BANG_DASH_DASH:this._stateCommentLessThanSignBangDashDash(e);break;case Y$.COMMENT_END_DASH:this._stateCommentEndDash(e);break;case Y$.COMMENT_END:this._stateCommentEnd(e);break;case Y$.COMMENT_END_BANG:this._stateCommentEndBang(e);break;case Y$.DOCTYPE:this._stateDoctype(e);break;case Y$.BEFORE_DOCTYPE_NAME:this._stateBeforeDoctypeName(e);break;case Y$.DOCTYPE_NAME:this._stateDoctypeName(e);break;case Y$.AFTER_DOCTYPE_NAME:this._stateAfterDoctypeName(e);break;case Y$.AFTER_DOCTYPE_PUBLIC_KEYWORD:this._stateAfterDoctypePublicKeyword(e);break;case Y$.BEFORE_DOCTYPE_PUBLIC_IDENTIFIER:this._stateBeforeDoctypePublicIdentifier(e);break;case Y$.DOCTYPE_PUBLIC_IDENTIFIER_DOUBLE_QUOTED:this._stateDoctypePublicIdentifierDoubleQuoted(e);break;case Y$.DOCTYPE_PUBLIC_IDENTIFIER_SINGLE_QUOTED:this._stateDoctypePublicIdentifierSingleQuoted(e);break;case Y$.AFTER_DOCTYPE_PUBLIC_IDENTIFIER:this._stateAfterDoctypePublicIdentifier(e);break;case Y$.BETWEEN_DOCTYPE_PUBLIC_AND_SYSTEM_IDENTIFIERS:this._stateBetweenDoctypePublicAndSystemIdentifiers(e);break;case Y$.AFTER_DOCTYPE_SYSTEM_KEYWORD:this._stateAfterDoctypeSystemKeyword(e);break;case Y$.BEFORE_DOCTYPE_SYSTEM_IDENTIFIER:this._stateBeforeDoctypeSystemIdentifier(e);break;case Y$.DOCTYPE_SYSTEM_IDENTIFIER_DOUBLE_QUOTED:this._stateDoctypeSystemIdentifierDoubleQuoted(e);break;case Y$.DOCTYPE_SYSTEM_IDENTIFIER_SINGLE_QUOTED:this._stateDoctypeSystemIdentifierSingleQuoted(e);break;case Y$.AFTER_DOCTYPE_SYSTEM_IDENTIFIER:this._stateAfterDoctypeSystemIdentifier(e);break;case Y$.BOGUS_DOCTYPE:this._stateBogusDoctype(e);break;case Y$.CDATA_SECTION:this._stateCdataSection(e);break;case Y$.CDATA_SECTION_BRACKET:this._stateCdataSectionBracket(e);break;case Y$.CDATA_SECTION_END:this._stateCdataSectionEnd(e);break;case Y$.CHARACTER_REFERENCE:this._stateCharacterReference();break;case Y$.AMBIGUOUS_AMPERSAND:this._stateAmbiguousAmpersand(e);break;default:throw new Error("Unknown state")}}_stateData(e){switch(e){case l$.LESS_THAN_SIGN:this.state=Y$.TAG_OPEN;break;case l$.AMPERSAND:this._startCharacterReference();break;case l$.NULL:this._err(g$.unexpectedNullCharacter),this._emitCodePoint(e);break;case l$.EOF:this._emitEOFToken();break;default:this._emitCodePoint(e)}}_stateRcdata(e){switch(e){case l$.AMPERSAND:this._startCharacterReference();break;case l$.LESS_THAN_SIGN:this.state=Y$.RCDATA_LESS_THAN_SIGN;break;case l$.NULL:this._err(g$.unexpectedNullCharacter),this._emitChars(s$);break;case l$.EOF:this._emitEOFToken();break;default:this._emitCodePoint(e)}}_stateRawtext(e){switch(e){case l$.LESS_THAN_SIGN:this.state=Y$.RAWTEXT_LESS_THAN_SIGN;break;case l$.NULL:this._err(g$.unexpectedNullCharacter),this._emitChars(s$);break;case l$.EOF:this._emitEOFToken();break;default:this._emitCodePoint(e)}}_stateScriptData(e){switch(e){case l$.LESS_THAN_SIGN:this.state=Y$.SCRIPT_DATA_LESS_THAN_SIGN;break;case l$.NULL:this._err(g$.unexpectedNullCharacter),this._emitChars(s$);break;case l$.EOF:this._emitEOFToken();break;default:this._emitCodePoint(e)}}_statePlaintext(e){switch(e){case l$.NULL:this._err(g$.unexpectedNullCharacter),this._emitChars(s$);break;case l$.EOF:this._emitEOFToken();break;default:this._emitCodePoint(e)}}_stateTagOpen(e){if(tP(e))this._createStartTagToken(),this.state=Y$.TAG_NAME,this._stateTagName(e);else switch(e){case l$.EXCLAMATION_MARK:this.state=Y$.MARKUP_DECLARATION_OPEN;break;case l$.SOLIDUS:this.state=Y$.END_TAG_OPEN;break;case l$.QUESTION_MARK:this._err(g$.unexpectedQuestionMarkInsteadOfTagName),this._createCommentToken(1),this.state=Y$.BOGUS_COMMENT,this._stateBogusComment(e);break;case l$.EOF:this._err(g$.eofBeforeTagName),this._emitChars("<"),this._emitEOFToken();break;default:this._err(g$.invalidFirstCharacterOfTagName),this._emitChars("<"),this.state=Y$.DATA,this._stateData(e)}}_stateEndTagOpen(e){if(tP(e))this._createEndTagToken(),this.state=Y$.TAG_NAME,this._stateTagName(e);else switch(e){case l$.GREATER_THAN_SIGN:this._err(g$.missingEndTagName),this.state=Y$.DATA;break;case l$.EOF:this._err(g$.eofBeforeTagName),this._emitChars("");break;case l$.NULL:this._err(g$.unexpectedNullCharacter),this.state=Y$.SCRIPT_DATA_ESCAPED,this._emitChars(s$);break;case l$.EOF:this._err(g$.eofInScriptHtmlCommentLikeText),this._emitEOFToken();break;default:this.state=Y$.SCRIPT_DATA_ESCAPED,this._emitCodePoint(e)}}_stateScriptDataEscapedLessThanSign(e){e===l$.SOLIDUS?this.state=Y$.SCRIPT_DATA_ESCAPED_END_TAG_OPEN:tP(e)?(this._emitChars("<"),this.state=Y$.SCRIPT_DATA_DOUBLE_ESCAPE_START,this._stateScriptDataDoubleEscapeStart(e)):(this._emitChars("<"),this.state=Y$.SCRIPT_DATA_ESCAPED,this._stateScriptDataEscaped(e))}_stateScriptDataEscapedEndTagOpen(e){tP(e)?(this.state=Y$.SCRIPT_DATA_ESCAPED_END_TAG_NAME,this._stateScriptDataEscapedEndTagName(e)):(this._emitChars("");break;case l$.NULL:this._err(g$.unexpectedNullCharacter),this.state=Y$.SCRIPT_DATA_DOUBLE_ESCAPED,this._emitChars(s$);break;case l$.EOF:this._err(g$.eofInScriptHtmlCommentLikeText),this._emitEOFToken();break;default:this.state=Y$.SCRIPT_DATA_DOUBLE_ESCAPED,this._emitCodePoint(e)}}_stateScriptDataDoubleEscapedLessThanSign(e){e===l$.SOLIDUS?(this.state=Y$.SCRIPT_DATA_DOUBLE_ESCAPE_END,this._emitChars("/")):(this.state=Y$.SCRIPT_DATA_DOUBLE_ESCAPED,this._stateScriptDataDoubleEscaped(e))}_stateScriptDataDoubleEscapeEnd(e){if(this.preprocessor.startsWith(p$,!1)&&oP(this.preprocessor.peek(6))){this._emitCodePoint(e);for(let e=0;e<6;e++)this._emitCodePoint(this._consume());this.state=Y$.SCRIPT_DATA_ESCAPED}else this._ensureHibernation()||(this.state=Y$.SCRIPT_DATA_DOUBLE_ESCAPED,this._stateScriptDataDoubleEscaped(e))}_stateBeforeAttributeName(e){switch(e){case l$.SPACE:case l$.LINE_FEED:case l$.TABULATION:case l$.FORM_FEED:break;case l$.SOLIDUS:case l$.GREATER_THAN_SIGN:case l$.EOF:this.state=Y$.AFTER_ATTRIBUTE_NAME,this._stateAfterAttributeName(e);break;case l$.EQUALS_SIGN:this._err(g$.unexpectedEqualsSignBeforeAttributeName),this._createAttr("="),this.state=Y$.ATTRIBUTE_NAME;break;default:this._createAttr(""),this.state=Y$.ATTRIBUTE_NAME,this._stateAttributeName(e)}}_stateAttributeName(e){switch(e){case l$.SPACE:case l$.LINE_FEED:case l$.TABULATION:case l$.FORM_FEED:case l$.SOLIDUS:case l$.GREATER_THAN_SIGN:case l$.EOF:this._leaveAttrName(),this.state=Y$.AFTER_ATTRIBUTE_NAME,this._stateAfterAttributeName(e);break;case l$.EQUALS_SIGN:this._leaveAttrName(),this.state=Y$.BEFORE_ATTRIBUTE_VALUE;break;case l$.QUOTATION_MARK:case l$.APOSTROPHE:case l$.LESS_THAN_SIGN:this._err(g$.unexpectedCharacterInAttributeName),this.currentAttr.name+=String.fromCodePoint(e);break;case l$.NULL:this._err(g$.unexpectedNullCharacter),this.currentAttr.name+=s$;break;default:this.currentAttr.name+=String.fromCodePoint(eP(e)?rP(e):e)}}_stateAfterAttributeName(e){switch(e){case l$.SPACE:case l$.LINE_FEED:case l$.TABULATION:case l$.FORM_FEED:break;case l$.SOLIDUS:this.state=Y$.SELF_CLOSING_START_TAG;break;case l$.EQUALS_SIGN:this.state=Y$.BEFORE_ATTRIBUTE_VALUE;break;case l$.GREATER_THAN_SIGN:this.state=Y$.DATA,this.emitCurrentTagToken();break;case l$.EOF:this._err(g$.eofInTag),this._emitEOFToken();break;default:this._createAttr(""),this.state=Y$.ATTRIBUTE_NAME,this._stateAttributeName(e)}}_stateBeforeAttributeValue(e){switch(e){case l$.SPACE:case l$.LINE_FEED:case l$.TABULATION:case l$.FORM_FEED:break;case l$.QUOTATION_MARK:this.state=Y$.ATTRIBUTE_VALUE_DOUBLE_QUOTED;break;case l$.APOSTROPHE:this.state=Y$.ATTRIBUTE_VALUE_SINGLE_QUOTED;break;case l$.GREATER_THAN_SIGN:this._err(g$.missingAttributeValue),this.state=Y$.DATA,this.emitCurrentTagToken();break;default:this.state=Y$.ATTRIBUTE_VALUE_UNQUOTED,this._stateAttributeValueUnquoted(e)}}_stateAttributeValueDoubleQuoted(e){switch(e){case l$.QUOTATION_MARK:this.state=Y$.AFTER_ATTRIBUTE_VALUE_QUOTED;break;case l$.AMPERSAND:this._startCharacterReference();break;case l$.NULL:this._err(g$.unexpectedNullCharacter),this.currentAttr.value+=s$;break;case l$.EOF:this._err(g$.eofInTag),this._emitEOFToken();break;default:this.currentAttr.value+=String.fromCodePoint(e)}}_stateAttributeValueSingleQuoted(e){switch(e){case l$.APOSTROPHE:this.state=Y$.AFTER_ATTRIBUTE_VALUE_QUOTED;break;case l$.AMPERSAND:this._startCharacterReference();break;case l$.NULL:this._err(g$.unexpectedNullCharacter),this.currentAttr.value+=s$;break;case l$.EOF:this._err(g$.eofInTag),this._emitEOFToken();break;default:this.currentAttr.value+=String.fromCodePoint(e)}}_stateAttributeValueUnquoted(e){switch(e){case l$.SPACE:case l$.LINE_FEED:case l$.TABULATION:case l$.FORM_FEED:this._leaveAttrValue(),this.state=Y$.BEFORE_ATTRIBUTE_NAME;break;case l$.AMPERSAND:this._startCharacterReference();break;case l$.GREATER_THAN_SIGN:this._leaveAttrValue(),this.state=Y$.DATA,this.emitCurrentTagToken();break;case l$.NULL:this._err(g$.unexpectedNullCharacter),this.currentAttr.value+=s$;break;case l$.QUOTATION_MARK:case l$.APOSTROPHE:case l$.LESS_THAN_SIGN:case l$.EQUALS_SIGN:case l$.GRAVE_ACCENT:this._err(g$.unexpectedCharacterInUnquotedAttributeValue),this.currentAttr.value+=String.fromCodePoint(e);break;case l$.EOF:this._err(g$.eofInTag),this._emitEOFToken();break;default:this.currentAttr.value+=String.fromCodePoint(e)}}_stateAfterAttributeValueQuoted(e){switch(e){case l$.SPACE:case l$.LINE_FEED:case l$.TABULATION:case l$.FORM_FEED:this._leaveAttrValue(),this.state=Y$.BEFORE_ATTRIBUTE_NAME;break;case l$.SOLIDUS:this._leaveAttrValue(),this.state=Y$.SELF_CLOSING_START_TAG;break;case l$.GREATER_THAN_SIGN:this._leaveAttrValue(),this.state=Y$.DATA,this.emitCurrentTagToken();break;case l$.EOF:this._err(g$.eofInTag),this._emitEOFToken();break;default:this._err(g$.missingWhitespaceBetweenAttributes),this.state=Y$.BEFORE_ATTRIBUTE_NAME,this._stateBeforeAttributeName(e)}}_stateSelfClosingStartTag(e){switch(e){case l$.GREATER_THAN_SIGN:this.currentToken.selfClosing=!0,this.state=Y$.DATA,this.emitCurrentTagToken();break;case l$.EOF:this._err(g$.eofInTag),this._emitEOFToken();break;default:this._err(g$.unexpectedSolidusInTag),this.state=Y$.BEFORE_ATTRIBUTE_NAME,this._stateBeforeAttributeName(e)}}_stateBogusComment(e){const t=this.currentToken;switch(e){case l$.GREATER_THAN_SIGN:this.state=Y$.DATA,this.emitCurrentComment(t);break;case l$.EOF:this.emitCurrentComment(t),this._emitEOFToken();break;case l$.NULL:this._err(g$.unexpectedNullCharacter),t.data+=s$;break;default:t.data+=String.fromCodePoint(e)}}_stateMarkupDeclarationOpen(e){this._consumeSequenceIfMatch("--",!0)?(this._createCommentToken(3),this.state=Y$.COMMENT_START):this._consumeSequenceIfMatch(d$,!1)?(this.currentLocation=this.getCurrentLocation(8),this.state=Y$.DOCTYPE):this._consumeSequenceIfMatch(u$,!0)?this.inForeignNode?this.state=Y$.CDATA_SECTION:(this._err(g$.cdataInHtmlContent),this._createCommentToken(8),this.currentToken.data="[CDATA[",this.state=Y$.BOGUS_COMMENT):this._ensureHibernation()||(this._err(g$.incorrectlyOpenedComment),this._createCommentToken(2),this.state=Y$.BOGUS_COMMENT,this._stateBogusComment(e))}_stateCommentStart(e){switch(e){case l$.HYPHEN_MINUS:this.state=Y$.COMMENT_START_DASH;break;case l$.GREATER_THAN_SIGN:{this._err(g$.abruptClosingOfEmptyComment),this.state=Y$.DATA;const e=this.currentToken;this.emitCurrentComment(e);break}default:this.state=Y$.COMMENT,this._stateComment(e)}}_stateCommentStartDash(e){const t=this.currentToken;switch(e){case l$.HYPHEN_MINUS:this.state=Y$.COMMENT_END;break;case l$.GREATER_THAN_SIGN:this._err(g$.abruptClosingOfEmptyComment),this.state=Y$.DATA,this.emitCurrentComment(t);break;case l$.EOF:this._err(g$.eofInComment),this.emitCurrentComment(t),this._emitEOFToken();break;default:t.data+="-",this.state=Y$.COMMENT,this._stateComment(e)}}_stateComment(e){const t=this.currentToken;switch(e){case l$.HYPHEN_MINUS:this.state=Y$.COMMENT_END_DASH;break;case l$.LESS_THAN_SIGN:t.data+="<",this.state=Y$.COMMENT_LESS_THAN_SIGN;break;case l$.NULL:this._err(g$.unexpectedNullCharacter),t.data+=s$;break;case l$.EOF:this._err(g$.eofInComment),this.emitCurrentComment(t),this._emitEOFToken();break;default:t.data+=String.fromCodePoint(e)}}_stateCommentLessThanSign(e){const t=this.currentToken;switch(e){case l$.EXCLAMATION_MARK:t.data+="!",this.state=Y$.COMMENT_LESS_THAN_SIGN_BANG;break;case l$.LESS_THAN_SIGN:t.data+="<";break;default:this.state=Y$.COMMENT,this._stateComment(e)}}_stateCommentLessThanSignBang(e){e===l$.HYPHEN_MINUS?this.state=Y$.COMMENT_LESS_THAN_SIGN_BANG_DASH:(this.state=Y$.COMMENT,this._stateComment(e))}_stateCommentLessThanSignBangDash(e){e===l$.HYPHEN_MINUS?this.state=Y$.COMMENT_LESS_THAN_SIGN_BANG_DASH_DASH:(this.state=Y$.COMMENT_END_DASH,this._stateCommentEndDash(e))}_stateCommentLessThanSignBangDashDash(e){e!==l$.GREATER_THAN_SIGN&&e!==l$.EOF&&this._err(g$.nestedComment),this.state=Y$.COMMENT_END,this._stateCommentEnd(e)}_stateCommentEndDash(e){const t=this.currentToken;switch(e){case l$.HYPHEN_MINUS:this.state=Y$.COMMENT_END;break;case l$.EOF:this._err(g$.eofInComment),this.emitCurrentComment(t),this._emitEOFToken();break;default:t.data+="-",this.state=Y$.COMMENT,this._stateComment(e)}}_stateCommentEnd(e){const t=this.currentToken;switch(e){case l$.GREATER_THAN_SIGN:this.state=Y$.DATA,this.emitCurrentComment(t);break;case l$.EXCLAMATION_MARK:this.state=Y$.COMMENT_END_BANG;break;case l$.HYPHEN_MINUS:t.data+="-";break;case l$.EOF:this._err(g$.eofInComment),this.emitCurrentComment(t),this._emitEOFToken();break;default:t.data+="--",this.state=Y$.COMMENT,this._stateComment(e)}}_stateCommentEndBang(e){const t=this.currentToken;switch(e){case l$.HYPHEN_MINUS:t.data+="--!",this.state=Y$.COMMENT_END_DASH;break;case l$.GREATER_THAN_SIGN:this._err(g$.incorrectlyClosedComment),this.state=Y$.DATA,this.emitCurrentComment(t);break;case l$.EOF:this._err(g$.eofInComment),this.emitCurrentComment(t),this._emitEOFToken();break;default:t.data+="--!",this.state=Y$.COMMENT,this._stateComment(e)}}_stateDoctype(e){switch(e){case l$.SPACE:case l$.LINE_FEED:case l$.TABULATION:case l$.FORM_FEED:this.state=Y$.BEFORE_DOCTYPE_NAME;break;case l$.GREATER_THAN_SIGN:this.state=Y$.BEFORE_DOCTYPE_NAME,this._stateBeforeDoctypeName(e);break;case l$.EOF:{this._err(g$.eofInDoctype),this._createDoctypeToken(null);const e=this.currentToken;e.forceQuirks=!0,this.emitCurrentDoctype(e),this._emitEOFToken();break}default:this._err(g$.missingWhitespaceBeforeDoctypeName),this.state=Y$.BEFORE_DOCTYPE_NAME,this._stateBeforeDoctypeName(e)}}_stateBeforeDoctypeName(e){if(eP(e))this._createDoctypeToken(String.fromCharCode(rP(e))),this.state=Y$.DOCTYPE_NAME;else switch(e){case l$.SPACE:case l$.LINE_FEED:case l$.TABULATION:case l$.FORM_FEED:break;case l$.NULL:this._err(g$.unexpectedNullCharacter),this._createDoctypeToken(s$),this.state=Y$.DOCTYPE_NAME;break;case l$.GREATER_THAN_SIGN:{this._err(g$.missingDoctypeName),this._createDoctypeToken(null);const e=this.currentToken;e.forceQuirks=!0,this.emitCurrentDoctype(e),this.state=Y$.DATA;break}case l$.EOF:{this._err(g$.eofInDoctype),this._createDoctypeToken(null);const e=this.currentToken;e.forceQuirks=!0,this.emitCurrentDoctype(e),this._emitEOFToken();break}default:this._createDoctypeToken(String.fromCodePoint(e)),this.state=Y$.DOCTYPE_NAME}}_stateDoctypeName(e){const t=this.currentToken;switch(e){case l$.SPACE:case l$.LINE_FEED:case l$.TABULATION:case l$.FORM_FEED:this.state=Y$.AFTER_DOCTYPE_NAME;break;case l$.GREATER_THAN_SIGN:this.state=Y$.DATA,this.emitCurrentDoctype(t);break;case l$.NULL:this._err(g$.unexpectedNullCharacter),t.name+=s$;break;case l$.EOF:this._err(g$.eofInDoctype),t.forceQuirks=!0,this.emitCurrentDoctype(t),this._emitEOFToken();break;default:t.name+=String.fromCodePoint(eP(e)?rP(e):e)}}_stateAfterDoctypeName(e){const t=this.currentToken;switch(e){case l$.SPACE:case l$.LINE_FEED:case l$.TABULATION:case l$.FORM_FEED:break;case l$.GREATER_THAN_SIGN:this.state=Y$.DATA,this.emitCurrentDoctype(t);break;case l$.EOF:this._err(g$.eofInDoctype),t.forceQuirks=!0,this.emitCurrentDoctype(t),this._emitEOFToken();break;default:this._consumeSequenceIfMatch("public",!1)?this.state=Y$.AFTER_DOCTYPE_PUBLIC_KEYWORD:this._consumeSequenceIfMatch("system",!1)?this.state=Y$.AFTER_DOCTYPE_SYSTEM_KEYWORD:this._ensureHibernation()||(this._err(g$.invalidCharacterSequenceAfterDoctypeName),t.forceQuirks=!0,this.state=Y$.BOGUS_DOCTYPE,this._stateBogusDoctype(e))}}_stateAfterDoctypePublicKeyword(e){const t=this.currentToken;switch(e){case l$.SPACE:case l$.LINE_FEED:case l$.TABULATION:case l$.FORM_FEED:this.state=Y$.BEFORE_DOCTYPE_PUBLIC_IDENTIFIER;break;case l$.QUOTATION_MARK:this._err(g$.missingWhitespaceAfterDoctypePublicKeyword),t.publicId="",this.state=Y$.DOCTYPE_PUBLIC_IDENTIFIER_DOUBLE_QUOTED;break;case l$.APOSTROPHE:this._err(g$.missingWhitespaceAfterDoctypePublicKeyword),t.publicId="",this.state=Y$.DOCTYPE_PUBLIC_IDENTIFIER_SINGLE_QUOTED;break;case l$.GREATER_THAN_SIGN:this._err(g$.missingDoctypePublicIdentifier),t.forceQuirks=!0,this.state=Y$.DATA,this.emitCurrentDoctype(t);break;case l$.EOF:this._err(g$.eofInDoctype),t.forceQuirks=!0,this.emitCurrentDoctype(t),this._emitEOFToken();break;default:this._err(g$.missingQuoteBeforeDoctypePublicIdentifier),t.forceQuirks=!0,this.state=Y$.BOGUS_DOCTYPE,this._stateBogusDoctype(e)}}_stateBeforeDoctypePublicIdentifier(e){const t=this.currentToken;switch(e){case l$.SPACE:case l$.LINE_FEED:case l$.TABULATION:case l$.FORM_FEED:break;case l$.QUOTATION_MARK:t.publicId="",this.state=Y$.DOCTYPE_PUBLIC_IDENTIFIER_DOUBLE_QUOTED;break;case l$.APOSTROPHE:t.publicId="",this.state=Y$.DOCTYPE_PUBLIC_IDENTIFIER_SINGLE_QUOTED;break;case l$.GREATER_THAN_SIGN:this._err(g$.missingDoctypePublicIdentifier),t.forceQuirks=!0,this.state=Y$.DATA,this.emitCurrentDoctype(t);break;case l$.EOF:this._err(g$.eofInDoctype),t.forceQuirks=!0,this.emitCurrentDoctype(t),this._emitEOFToken();break;default:this._err(g$.missingQuoteBeforeDoctypePublicIdentifier),t.forceQuirks=!0,this.state=Y$.BOGUS_DOCTYPE,this._stateBogusDoctype(e)}}_stateDoctypePublicIdentifierDoubleQuoted(e){const t=this.currentToken;switch(e){case l$.QUOTATION_MARK:this.state=Y$.AFTER_DOCTYPE_PUBLIC_IDENTIFIER;break;case l$.NULL:this._err(g$.unexpectedNullCharacter),t.publicId+=s$;break;case l$.GREATER_THAN_SIGN:this._err(g$.abruptDoctypePublicIdentifier),t.forceQuirks=!0,this.emitCurrentDoctype(t),this.state=Y$.DATA;break;case l$.EOF:this._err(g$.eofInDoctype),t.forceQuirks=!0,this.emitCurrentDoctype(t),this._emitEOFToken();break;default:t.publicId+=String.fromCodePoint(e)}}_stateDoctypePublicIdentifierSingleQuoted(e){const t=this.currentToken;switch(e){case l$.APOSTROPHE:this.state=Y$.AFTER_DOCTYPE_PUBLIC_IDENTIFIER;break;case l$.NULL:this._err(g$.unexpectedNullCharacter),t.publicId+=s$;break;case l$.GREATER_THAN_SIGN:this._err(g$.abruptDoctypePublicIdentifier),t.forceQuirks=!0,this.emitCurrentDoctype(t),this.state=Y$.DATA;break;case l$.EOF:this._err(g$.eofInDoctype),t.forceQuirks=!0,this.emitCurrentDoctype(t),this._emitEOFToken();break;default:t.publicId+=String.fromCodePoint(e)}}_stateAfterDoctypePublicIdentifier(e){const t=this.currentToken;switch(e){case l$.SPACE:case l$.LINE_FEED:case l$.TABULATION:case l$.FORM_FEED:this.state=Y$.BETWEEN_DOCTYPE_PUBLIC_AND_SYSTEM_IDENTIFIERS;break;case l$.GREATER_THAN_SIGN:this.state=Y$.DATA,this.emitCurrentDoctype(t);break;case l$.QUOTATION_MARK:this._err(g$.missingWhitespaceBetweenDoctypePublicAndSystemIdentifiers),t.systemId="",this.state=Y$.DOCTYPE_SYSTEM_IDENTIFIER_DOUBLE_QUOTED;break;case l$.APOSTROPHE:this._err(g$.missingWhitespaceBetweenDoctypePublicAndSystemIdentifiers),t.systemId="",this.state=Y$.DOCTYPE_SYSTEM_IDENTIFIER_SINGLE_QUOTED;break;case l$.EOF:this._err(g$.eofInDoctype),t.forceQuirks=!0,this.emitCurrentDoctype(t),this._emitEOFToken();break;default:this._err(g$.missingQuoteBeforeDoctypeSystemIdentifier),t.forceQuirks=!0,this.state=Y$.BOGUS_DOCTYPE,this._stateBogusDoctype(e)}}_stateBetweenDoctypePublicAndSystemIdentifiers(e){const t=this.currentToken;switch(e){case l$.SPACE:case l$.LINE_FEED:case l$.TABULATION:case l$.FORM_FEED:break;case l$.GREATER_THAN_SIGN:this.emitCurrentDoctype(t),this.state=Y$.DATA;break;case l$.QUOTATION_MARK:t.systemId="",this.state=Y$.DOCTYPE_SYSTEM_IDENTIFIER_DOUBLE_QUOTED;break;case l$.APOSTROPHE:t.systemId="",this.state=Y$.DOCTYPE_SYSTEM_IDENTIFIER_SINGLE_QUOTED;break;case l$.EOF:this._err(g$.eofInDoctype),t.forceQuirks=!0,this.emitCurrentDoctype(t),this._emitEOFToken();break;default:this._err(g$.missingQuoteBeforeDoctypeSystemIdentifier),t.forceQuirks=!0,this.state=Y$.BOGUS_DOCTYPE,this._stateBogusDoctype(e)}}_stateAfterDoctypeSystemKeyword(e){const t=this.currentToken;switch(e){case l$.SPACE:case l$.LINE_FEED:case l$.TABULATION:case l$.FORM_FEED:this.state=Y$.BEFORE_DOCTYPE_SYSTEM_IDENTIFIER;break;case l$.QUOTATION_MARK:this._err(g$.missingWhitespaceAfterDoctypeSystemKeyword),t.systemId="",this.state=Y$.DOCTYPE_SYSTEM_IDENTIFIER_DOUBLE_QUOTED;break;case l$.APOSTROPHE:this._err(g$.missingWhitespaceAfterDoctypeSystemKeyword),t.systemId="",this.state=Y$.DOCTYPE_SYSTEM_IDENTIFIER_SINGLE_QUOTED;break;case l$.GREATER_THAN_SIGN:this._err(g$.missingDoctypeSystemIdentifier),t.forceQuirks=!0,this.state=Y$.DATA,this.emitCurrentDoctype(t);break;case l$.EOF:this._err(g$.eofInDoctype),t.forceQuirks=!0,this.emitCurrentDoctype(t),this._emitEOFToken();break;default:this._err(g$.missingQuoteBeforeDoctypeSystemIdentifier),t.forceQuirks=!0,this.state=Y$.BOGUS_DOCTYPE,this._stateBogusDoctype(e)}}_stateBeforeDoctypeSystemIdentifier(e){const t=this.currentToken;switch(e){case l$.SPACE:case l$.LINE_FEED:case l$.TABULATION:case l$.FORM_FEED:break;case l$.QUOTATION_MARK:t.systemId="",this.state=Y$.DOCTYPE_SYSTEM_IDENTIFIER_DOUBLE_QUOTED;break;case l$.APOSTROPHE:t.systemId="",this.state=Y$.DOCTYPE_SYSTEM_IDENTIFIER_SINGLE_QUOTED;break;case l$.GREATER_THAN_SIGN:this._err(g$.missingDoctypeSystemIdentifier),t.forceQuirks=!0,this.state=Y$.DATA,this.emitCurrentDoctype(t);break;case l$.EOF:this._err(g$.eofInDoctype),t.forceQuirks=!0,this.emitCurrentDoctype(t),this._emitEOFToken();break;default:this._err(g$.missingQuoteBeforeDoctypeSystemIdentifier),t.forceQuirks=!0,this.state=Y$.BOGUS_DOCTYPE,this._stateBogusDoctype(e)}}_stateDoctypeSystemIdentifierDoubleQuoted(e){const t=this.currentToken;switch(e){case l$.QUOTATION_MARK:this.state=Y$.AFTER_DOCTYPE_SYSTEM_IDENTIFIER;break;case l$.NULL:this._err(g$.unexpectedNullCharacter),t.systemId+=s$;break;case l$.GREATER_THAN_SIGN:this._err(g$.abruptDoctypeSystemIdentifier),t.forceQuirks=!0,this.emitCurrentDoctype(t),this.state=Y$.DATA;break;case l$.EOF:this._err(g$.eofInDoctype),t.forceQuirks=!0,this.emitCurrentDoctype(t),this._emitEOFToken();break;default:t.systemId+=String.fromCodePoint(e)}}_stateDoctypeSystemIdentifierSingleQuoted(e){const t=this.currentToken;switch(e){case l$.APOSTROPHE:this.state=Y$.AFTER_DOCTYPE_SYSTEM_IDENTIFIER;break;case l$.NULL:this._err(g$.unexpectedNullCharacter),t.systemId+=s$;break;case l$.GREATER_THAN_SIGN:this._err(g$.abruptDoctypeSystemIdentifier),t.forceQuirks=!0,this.emitCurrentDoctype(t),this.state=Y$.DATA;break;case l$.EOF:this._err(g$.eofInDoctype),t.forceQuirks=!0,this.emitCurrentDoctype(t),this._emitEOFToken();break;default:t.systemId+=String.fromCodePoint(e)}}_stateAfterDoctypeSystemIdentifier(e){const t=this.currentToken;switch(e){case l$.SPACE:case l$.LINE_FEED:case l$.TABULATION:case l$.FORM_FEED:break;case l$.GREATER_THAN_SIGN:this.emitCurrentDoctype(t),this.state=Y$.DATA;break;case l$.EOF:this._err(g$.eofInDoctype),t.forceQuirks=!0,this.emitCurrentDoctype(t),this._emitEOFToken();break;default:this._err(g$.unexpectedCharacterAfterDoctypeSystemIdentifier),this.state=Y$.BOGUS_DOCTYPE,this._stateBogusDoctype(e)}}_stateBogusDoctype(e){const t=this.currentToken;switch(e){case l$.GREATER_THAN_SIGN:this.emitCurrentDoctype(t),this.state=Y$.DATA;break;case l$.NULL:this._err(g$.unexpectedNullCharacter);break;case l$.EOF:this.emitCurrentDoctype(t),this._emitEOFToken()}}_stateCdataSection(e){switch(e){case l$.RIGHT_SQUARE_BRACKET:this.state=Y$.CDATA_SECTION_BRACKET;break;case l$.EOF:this._err(g$.eofInCdata),this._emitEOFToken();break;default:this._emitCodePoint(e)}}_stateCdataSectionBracket(e){e===l$.RIGHT_SQUARE_BRACKET?this.state=Y$.CDATA_SECTION_END:(this._emitChars("]"),this.state=Y$.CDATA_SECTION,this._stateCdataSection(e))}_stateCdataSectionEnd(e){switch(e){case l$.GREATER_THAN_SIGN:this.state=Y$.DATA;break;case l$.RIGHT_SQUARE_BRACKET:this._emitChars("]");break;default:this._emitChars("]]"),this.state=Y$.CDATA_SECTION,this._stateCdataSection(e)}}_stateCharacterReference(){let e=this.entityDecoder.write(this.preprocessor.html,this.preprocessor.pos);if(e<0){if(!this.preprocessor.lastChunkWritten)return this.active=!1,this.preprocessor.pos=this.preprocessor.html.length-1,this.consumedAfterSnapshot=0,void(this.preprocessor.endOfChunkHit=!0);e=this.entityDecoder.end()}0===e?(this.preprocessor.pos=this.entityStartPos,this._flushCodePointConsumedAsCharacterReference(l$.AMPERSAND),this.state=!this._isCharacterReferenceInAttribute()&&nP(this.preprocessor.peek(1))?Y$.AMBIGUOUS_AMPERSAND:this.returnState):this.state=this.returnState}_stateAmbiguousAmpersand(e){nP(e)?this._flushCodePointConsumedAsCharacterReference(e):(e===l$.SEMICOLON&&this._err(g$.unknownNamedCharacterReference),this.state=this.returnState,this._callState(e))}}const sP=new Set([U$.DD,U$.DT,U$.LI,U$.OPTGROUP,U$.OPTION,U$.P,U$.RB,U$.RP,U$.RT,U$.RTC]),lP=new Set([...sP,U$.CAPTION,U$.COLGROUP,U$.TBODY,U$.TD,U$.TFOOT,U$.TH,U$.THEAD,U$.TR]),cP=new Set([U$.APPLET,U$.CAPTION,U$.HTML,U$.MARQUEE,U$.OBJECT,U$.TABLE,U$.TD,U$.TEMPLATE,U$.TH]),uP=new Set([...cP,U$.OL,U$.UL]),dP=new Set([...cP,U$.BUTTON]),pP=new Set([U$.ANNOTATION_XML,U$.MI,U$.MN,U$.MO,U$.MS,U$.MTEXT]),hP=new Set([U$.DESC,U$.FOREIGN_OBJECT,U$.TITLE]),fP=new Set([U$.TR,U$.TEMPLATE,U$.HTML]),mP=new Set([U$.TBODY,U$.TFOOT,U$.THEAD,U$.TEMPLATE,U$.HTML]),gP=new Set([U$.TABLE,U$.TEMPLATE,U$.HTML]),vP=new Set([U$.TD,U$.TH]);class bP{get currentTmplContentOrNode(){return this._isInTemplate()?this.treeAdapter.getTemplateContent(this.current):this.current}constructor(e,t,n){this.treeAdapter=t,this.handler=n,this.items=[],this.tagIDs=[],this.stackTop=-1,this.tmplCount=0,this.currentTagId=U$.UNKNOWN,this.current=e}_indexOf(e){return this.items.lastIndexOf(e,this.stackTop)}_isInTemplate(){return this.currentTagId===U$.TEMPLATE&&this.treeAdapter.getNamespaceURI(this.current)===D$.HTML}_updateCurrentElement(){this.current=this.items[this.stackTop],this.currentTagId=this.tagIDs[this.stackTop]}push(e,t){this.stackTop++,this.items[this.stackTop]=e,this.current=e,this.tagIDs[this.stackTop]=t,this.currentTagId=t,this._isInTemplate()&&this.tmplCount++,this.handler.onItemPush(e,t,!0)}pop(){const e=this.current;this.tmplCount>0&&this._isInTemplate()&&this.tmplCount--,this.stackTop--,this._updateCurrentElement(),this.handler.onItemPop(e,!0)}replace(e,t){const n=this._indexOf(e);this.items[n]=t,n===this.stackTop&&(this.current=t)}insertAfter(e,t,n){const r=this._indexOf(e)+1;this.items.splice(r,0,t),this.tagIDs.splice(r,0,n),this.stackTop++,r===this.stackTop&&this._updateCurrentElement(),this.current&&void 0!==this.currentTagId&&this.handler.onItemPush(this.current,this.currentTagId,r===this.stackTop)}popUntilTagNamePopped(e){let t=this.stackTop+1;do{t=this.tagIDs.lastIndexOf(e,t-1)}while(t>0&&this.treeAdapter.getNamespaceURI(this.items[t])!==D$.HTML);this.shortenToLength(Math.max(t,0))}shortenToLength(e){for(;this.stackTop>=e;){const t=this.current;this.tmplCount>0&&this._isInTemplate()&&(this.tmplCount-=1),this.stackTop--,this._updateCurrentElement(),this.handler.onItemPop(t,this.stackTop=0;n--)if(e.has(this.tagIDs[n])&&this.treeAdapter.getNamespaceURI(this.items[n])===t)return n;return-1}clearBackTo(e,t){const n=this._indexOfTagNames(e,t);this.shortenToLength(n+1)}clearBackToTableContext(){this.clearBackTo(gP,D$.HTML)}clearBackToTableBodyContext(){this.clearBackTo(mP,D$.HTML)}clearBackToTableRowContext(){this.clearBackTo(fP,D$.HTML)}remove(e){const t=this._indexOf(e);t>=0&&(t===this.stackTop?this.pop():(this.items.splice(t,1),this.tagIDs.splice(t,1),this.stackTop--,this._updateCurrentElement(),this.handler.onItemPop(e,!1)))}tryPeekProperlyNestedBodyElement(){return this.stackTop>=1&&this.tagIDs[1]===U$.BODY?this.items[1]:null}contains(e){return this._indexOf(e)>-1}getCommonAncestor(e){const t=this._indexOf(e)-1;return t>=0?this.items[t]:null}isRootHtmlElementCurrent(){return 0===this.stackTop&&this.tagIDs[0]===U$.HTML}hasInDynamicScope(e,t){for(let n=this.stackTop;n>=0;n--){const r=this.tagIDs[n];switch(this.treeAdapter.getNamespaceURI(this.items[n])){case D$.HTML:if(r===e)return!0;if(t.has(r))return!1;break;case D$.SVG:if(hP.has(r))return!1;break;case D$.MATHML:if(pP.has(r))return!1}}return!0}hasInScope(e){return this.hasInDynamicScope(e,cP)}hasInListItemScope(e){return this.hasInDynamicScope(e,uP)}hasInButtonScope(e){return this.hasInDynamicScope(e,dP)}hasNumberedHeaderInScope(){for(let e=this.stackTop;e>=0;e--){const t=this.tagIDs[e];switch(this.treeAdapter.getNamespaceURI(this.items[e])){case D$.HTML:if(G$.has(t))return!0;if(cP.has(t))return!1;break;case D$.SVG:if(hP.has(t))return!1;break;case D$.MATHML:if(pP.has(t))return!1}}return!0}hasInTableScope(e){for(let t=this.stackTop;t>=0;t--)if(this.treeAdapter.getNamespaceURI(this.items[t])===D$.HTML)switch(this.tagIDs[t]){case e:return!0;case U$.TABLE:case U$.HTML:return!1}return!0}hasTableBodyContextInTableScope(){for(let e=this.stackTop;e>=0;e--)if(this.treeAdapter.getNamespaceURI(this.items[e])===D$.HTML)switch(this.tagIDs[e]){case U$.TBODY:case U$.THEAD:case U$.TFOOT:return!0;case U$.TABLE:case U$.HTML:return!1}return!0}hasInSelectScope(e){for(let t=this.stackTop;t>=0;t--)if(this.treeAdapter.getNamespaceURI(this.items[t])===D$.HTML)switch(this.tagIDs[t]){case e:return!0;case U$.OPTION:case U$.OPTGROUP:break;default:return!1}return!0}generateImpliedEndTags(){for(;void 0!==this.currentTagId&&sP.has(this.currentTagId);)this.pop()}generateImpliedEndTagsThoroughly(){for(;void 0!==this.currentTagId&&lP.has(this.currentTagId);)this.pop()}generateImpliedEndTagsWithExclusion(e){for(;void 0!==this.currentTagId&&this.currentTagId!==e&&lP.has(this.currentTagId);)this.pop()}}var yP,OP;(OP=yP||(yP={}))[OP.Marker=0]="Marker",OP[OP.Element=1]="Element";const wP={type:yP.Marker};class xP{constructor(e){this.treeAdapter=e,this.entries=[],this.bookmark=null}_getNoahArkConditionCandidates(e,t){const n=[],r=t.length,a=this.treeAdapter.getTagName(e),o=this.treeAdapter.getNamespaceURI(e);for(let e=0;e[e.name,e.value])));let a=0;for(let e=0;er.get(e.name)===e.value))&&(a+=1,a>=3&&this.entries.splice(t.idx,1))}}insertMarker(){this.entries.unshift(wP)}pushElement(e,t){this._ensureNoahArkCondition(e),this.entries.unshift({type:yP.Element,element:e,token:t})}insertElementAfterBookmark(e,t){const n=this.entries.indexOf(this.bookmark);this.entries.splice(n,0,{type:yP.Element,element:e,token:t})}removeEntry(e){const t=this.entries.indexOf(e);-1!==t&&this.entries.splice(t,1)}clearToLastMarker(){const e=this.entries.indexOf(wP);-1===e?this.entries.length=0:this.entries.splice(0,e+1)}getElementEntryInScopeWithTagName(e){const t=this.entries.find((t=>t.type===yP.Marker||this.treeAdapter.getTagName(t.element)===e));return t&&t.type===yP.Element?t:null}getElementEntry(e){return this.entries.find((t=>t.type===yP.Element&&t.element===e))}}const kP={createDocument:()=>({nodeName:"#document",mode:R$.NO_QUIRKS,childNodes:[]}),createDocumentFragment:()=>({nodeName:"#document-fragment",childNodes:[]}),createElement:(e,t,n)=>({nodeName:e,tagName:e,attrs:n,namespaceURI:t,childNodes:[],parentNode:null}),createCommentNode:e=>({nodeName:"#comment",data:e,parentNode:null}),createTextNode:e=>({nodeName:"#text",value:e,parentNode:null}),appendChild(e,t){e.childNodes.push(t),t.parentNode=e},insertBefore(e,t,n){const r=e.childNodes.indexOf(n);e.childNodes.splice(r,0,t),t.parentNode=e},setTemplateContent(e,t){e.content=t},getTemplateContent:e=>e.content,setDocumentType(e,t,n,r){const a=e.childNodes.find((e=>"#documentType"===e.nodeName));if(a)a.name=t,a.publicId=n,a.systemId=r;else{const a={nodeName:"#documentType",name:t,publicId:n,systemId:r,parentNode:null};kP.appendChild(e,a)}},setDocumentMode(e,t){e.mode=t},getDocumentMode:e=>e.mode,detachNode(e){if(e.parentNode){const t=e.parentNode.childNodes.indexOf(e);e.parentNode.childNodes.splice(t,1),e.parentNode=null}},insertText(e,t){if(e.childNodes.length>0){const n=e.childNodes[e.childNodes.length-1];if(kP.isTextNode(n))return void(n.value+=t)}kP.appendChild(e,kP.createTextNode(t))},insertTextBefore(e,t,n){const r=e.childNodes[e.childNodes.indexOf(n)-1];r&&kP.isTextNode(r)?r.value+=t:kP.insertBefore(e,kP.createTextNode(t),n)},adoptAttributes(e,t){const n=new Set(e.attrs.map((e=>e.name)));for(let r=0;re.childNodes[0],getChildNodes:e=>e.childNodes,getParentNode:e=>e.parentNode,getAttrList:e=>e.attrs,getTagName:e=>e.tagName,getNamespaceURI:e=>e.namespaceURI,getTextNodeContent:e=>e.value,getCommentNodeContent:e=>e.data,getDocumentTypeNodeName:e=>e.name,getDocumentTypeNodePublicId:e=>e.publicId,getDocumentTypeNodeSystemId:e=>e.systemId,isTextNode:e=>"#text"===e.nodeName,isCommentNode:e=>"#comment"===e.nodeName,isDocumentTypeNode:e=>"#documentType"===e.nodeName,isElementNode:e=>Object.prototype.hasOwnProperty.call(e,"tagName"),setNodeSourceCodeLocation(e,t){e.sourceCodeLocation=t},getNodeSourceCodeLocation:e=>e.sourceCodeLocation,updateNodeSourceCodeLocation(e,t){e.sourceCodeLocation={...e.sourceCodeLocation,...t}}},SP="html",_P=["+//silmaril//dtd html pro v0r11 19970101//","-//as//dtd html 3.0 aswedit + extensions//","-//advasoft ltd//dtd html 3.0 aswedit + extensions//","-//ietf//dtd html 2.0 level 1//","-//ietf//dtd html 2.0 level 2//","-//ietf//dtd html 2.0 strict level 1//","-//ietf//dtd html 2.0 strict level 2//","-//ietf//dtd html 2.0 strict//","-//ietf//dtd html 2.0//","-//ietf//dtd html 2.1e//","-//ietf//dtd html 3.0//","-//ietf//dtd html 3.2 final//","-//ietf//dtd html 3.2//","-//ietf//dtd html 3//","-//ietf//dtd html level 0//","-//ietf//dtd html level 1//","-//ietf//dtd html level 2//","-//ietf//dtd html level 3//","-//ietf//dtd html strict level 0//","-//ietf//dtd html strict level 1//","-//ietf//dtd html strict level 2//","-//ietf//dtd html strict level 3//","-//ietf//dtd html strict//","-//ietf//dtd html//","-//metrius//dtd metrius presentational//","-//microsoft//dtd internet explorer 2.0 html strict//","-//microsoft//dtd internet explorer 2.0 html//","-//microsoft//dtd internet explorer 2.0 tables//","-//microsoft//dtd internet explorer 3.0 html strict//","-//microsoft//dtd internet explorer 3.0 html//","-//microsoft//dtd internet explorer 3.0 tables//","-//netscape comm. corp.//dtd html//","-//netscape comm. corp.//dtd strict html//","-//o'reilly and associates//dtd html 2.0//","-//o'reilly and associates//dtd html extended 1.0//","-//o'reilly and associates//dtd html extended relaxed 1.0//","-//sq//dtd html 2.0 hotmetal + extensions//","-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//","-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//","-//spyglass//dtd html 2.0 extended//","-//sun microsystems corp.//dtd hotjava html//","-//sun microsystems corp.//dtd hotjava strict html//","-//w3c//dtd html 3 1995-03-24//","-//w3c//dtd html 3.2 draft//","-//w3c//dtd html 3.2 final//","-//w3c//dtd html 3.2//","-//w3c//dtd html 3.2s draft//","-//w3c//dtd html 4.0 frameset//","-//w3c//dtd html 4.0 transitional//","-//w3c//dtd html experimental 19960712//","-//w3c//dtd html experimental 970421//","-//w3c//dtd w3 html//","-//w3o//dtd w3 html 3.0//","-//webtechs//dtd mozilla html 2.0//","-//webtechs//dtd mozilla html//"],TP=[..._P,"-//w3c//dtd html 4.01 frameset//","-//w3c//dtd html 4.01 transitional//"],EP=new Set(["-//w3o//dtd w3 html strict 3.0//en//","-/w3c/dtd html 4.0 transitional/en","html"]),AP=["-//w3c//dtd xhtml 1.0 frameset//","-//w3c//dtd xhtml 1.0 transitional//"],CP=[...AP,"-//w3c//dtd html 4.01 frameset//","-//w3c//dtd html 4.01 transitional//"];function $P(e,t){return t.some((t=>e.startsWith(t)))}const PP="text/html",DP="application/xhtml+xml",IP=new Map(["attributeName","attributeType","baseFrequency","baseProfile","calcMode","clipPathUnits","diffuseConstant","edgeMode","filterUnits","glyphRef","gradientTransform","gradientUnits","kernelMatrix","kernelUnitLength","keyPoints","keySplines","keyTimes","lengthAdjust","limitingConeAngle","markerHeight","markerUnits","markerWidth","maskContentUnits","maskUnits","numOctaves","pathLength","patternContentUnits","patternTransform","patternUnits","pointsAtX","pointsAtY","pointsAtZ","preserveAlpha","preserveAspectRatio","primitiveUnits","refX","refY","repeatCount","repeatDur","requiredExtensions","requiredFeatures","specularConstant","specularExponent","spreadMethod","startOffset","stdDeviation","stitchTiles","surfaceScale","systemLanguage","tableValues","targetX","targetY","textLength","viewBox","viewTarget","xChannelSelector","yChannelSelector","zoomAndPan"].map((e=>[e.toLowerCase(),e]))),MP=new Map([["xlink:actuate",{prefix:"xlink",name:"actuate",namespace:D$.XLINK}],["xlink:arcrole",{prefix:"xlink",name:"arcrole",namespace:D$.XLINK}],["xlink:href",{prefix:"xlink",name:"href",namespace:D$.XLINK}],["xlink:role",{prefix:"xlink",name:"role",namespace:D$.XLINK}],["xlink:show",{prefix:"xlink",name:"show",namespace:D$.XLINK}],["xlink:title",{prefix:"xlink",name:"title",namespace:D$.XLINK}],["xlink:type",{prefix:"xlink",name:"type",namespace:D$.XLINK}],["xml:lang",{prefix:"xml",name:"lang",namespace:D$.XML}],["xml:space",{prefix:"xml",name:"space",namespace:D$.XML}],["xmlns",{prefix:"",name:"xmlns",namespace:D$.XMLNS}],["xmlns:xlink",{prefix:"xmlns",name:"xlink",namespace:D$.XMLNS}]]),NP=new Map(["altGlyph","altGlyphDef","altGlyphItem","animateColor","animateMotion","animateTransform","clipPath","feBlend","feColorMatrix","feComponentTransfer","feComposite","feConvolveMatrix","feDiffuseLighting","feDisplacementMap","feDistantLight","feFlood","feFuncA","feFuncB","feFuncG","feFuncR","feGaussianBlur","feImage","feMerge","feMergeNode","feMorphology","feOffset","fePointLight","feSpecularLighting","feSpotLight","feTile","feTurbulence","foreignObject","glyphRef","linearGradient","radialGradient","textPath"].map((e=>[e.toLowerCase(),e]))),RP=new Set([U$.B,U$.BIG,U$.BLOCKQUOTE,U$.BODY,U$.BR,U$.CENTER,U$.CODE,U$.DD,U$.DIV,U$.DL,U$.DT,U$.EM,U$.EMBED,U$.H1,U$.H2,U$.H3,U$.H4,U$.H5,U$.H6,U$.HEAD,U$.HR,U$.I,U$.IMG,U$.LI,U$.LISTING,U$.MENU,U$.META,U$.NOBR,U$.OL,U$.P,U$.PRE,U$.RUBY,U$.S,U$.SMALL,U$.SPAN,U$.STRONG,U$.STRIKE,U$.SUB,U$.SUP,U$.TABLE,U$.TT,U$.U,U$.UL,U$.VAR]);function LP(e){for(let t=0;t0&&this._setContextModes(e,t)}onItemPop(e,t){var n,r;if(this.options.sourceCodeLocationInfo&&this._setEndLocation(e,this.currentToken),null===(r=(n=this.treeAdapter).onItemPop)||void 0===r||r.call(n,e,this.openElements.current),t){let e,t;0===this.openElements.stackTop&&this.fragmentContext?(e=this.fragmentContext,t=this.fragmentContextID):({current:e,currentTagId:t}=this.openElements),this._setContextModes(e,t)}}_setContextModes(e,t){const n=e===this.document||e&&this.treeAdapter.getNamespaceURI(e)===D$.HTML;this.currentNotInHTML=!n,this.tokenizer.inForeignNode=!n&&void 0!==e&&void 0!==t&&!this._isIntegrationPoint(t,e)}_switchToTextParsing(e,t){this._insertElement(e,D$.HTML),this.tokenizer.state=t,this.originalInsertionMode=this.insertionMode,this.insertionMode=UP.TEXT}switchToPlaintextParsing(){this.insertionMode=UP.TEXT,this.originalInsertionMode=UP.IN_BODY,this.tokenizer.state=J$.PLAINTEXT}_getAdjustedCurrentElement(){return 0===this.openElements.stackTop&&this.fragmentContext?this.fragmentContext:this.openElements.current}_findFormInFragmentContext(){let e=this.fragmentContext;for(;e;){if(this.treeAdapter.getTagName(e)===B$.FORM){this.formElement=e;break}e=this.treeAdapter.getParentNode(e)}}_initTokenizerForFragmentParsing(){if(this.fragmentContext&&this.treeAdapter.getNamespaceURI(this.fragmentContext)===D$.HTML)switch(this.fragmentContextID){case U$.TITLE:case U$.TEXTAREA:this.tokenizer.state=J$.RCDATA;break;case U$.STYLE:case U$.XMP:case U$.IFRAME:case U$.NOEMBED:case U$.NOFRAMES:case U$.NOSCRIPT:this.tokenizer.state=J$.RAWTEXT;break;case U$.SCRIPT:this.tokenizer.state=J$.SCRIPT_DATA;break;case U$.PLAINTEXT:this.tokenizer.state=J$.PLAINTEXT}}_setDocumentType(e){const t=e.name||"",n=e.publicId||"",r=e.systemId||"";if(this.treeAdapter.setDocumentType(this.document,t,n,r),e.location){const t=this.treeAdapter.getChildNodes(this.document).find((e=>this.treeAdapter.isDocumentTypeNode(e)));t&&this.treeAdapter.setNodeSourceCodeLocation(t,e.location)}}_attachElementToTree(e,t){if(this.options.sourceCodeLocationInfo){const n=t&&{...t,startTag:t};this.treeAdapter.setNodeSourceCodeLocation(e,n)}if(this._shouldFosterParentOnInsertion())this._fosterParentElement(e);else{const t=this.openElements.currentTmplContentOrNode;this.treeAdapter.appendChild(null!=t?t:this.document,e)}}_appendElement(e,t){const n=this.treeAdapter.createElement(e.tagName,t,e.attrs);this._attachElementToTree(n,e.location)}_insertElement(e,t){const n=this.treeAdapter.createElement(e.tagName,t,e.attrs);this._attachElementToTree(n,e.location),this.openElements.push(n,e.tagID)}_insertFakeElement(e,t){const n=this.treeAdapter.createElement(e,D$.HTML,[]);this._attachElementToTree(n,null),this.openElements.push(n,t)}_insertTemplate(e){const t=this.treeAdapter.createElement(e.tagName,D$.HTML,e.attrs),n=this.treeAdapter.createDocumentFragment();this.treeAdapter.setTemplateContent(t,n),this._attachElementToTree(t,e.location),this.openElements.push(t,e.tagID),this.options.sourceCodeLocationInfo&&this.treeAdapter.setNodeSourceCodeLocation(n,null)}_insertFakeRootElement(){const e=this.treeAdapter.createElement(B$.HTML,D$.HTML,[]);this.options.sourceCodeLocationInfo&&this.treeAdapter.setNodeSourceCodeLocation(e,null),this.treeAdapter.appendChild(this.openElements.current,e),this.openElements.push(e,U$.HTML)}_appendCommentNode(e,t){const n=this.treeAdapter.createCommentNode(e.data);this.treeAdapter.appendChild(t,n),this.options.sourceCodeLocationInfo&&this.treeAdapter.setNodeSourceCodeLocation(n,e.location)}_insertCharacters(e){let t,n;if(this._shouldFosterParentOnInsertion()?(({parent:t,beforeElement:n}=this._findFosterParentingLocation()),n?this.treeAdapter.insertTextBefore(t,e.chars,n):this.treeAdapter.insertText(t,e.chars)):(t=this.openElements.currentTmplContentOrNode,this.treeAdapter.insertText(t,e.chars)),!e.location)return;const r=this.treeAdapter.getChildNodes(t),a=n?r.lastIndexOf(n):r.length,o=r[a-1];if(this.treeAdapter.getNodeSourceCodeLocation(o)){const{endLine:t,endCol:n,endOffset:r}=e.location;this.treeAdapter.updateNodeSourceCodeLocation(o,{endLine:t,endCol:n,endOffset:r})}else this.options.sourceCodeLocationInfo&&this.treeAdapter.setNodeSourceCodeLocation(o,e.location)}_adoptNodes(e,t){for(let n=this.treeAdapter.getFirstChild(e);n;n=this.treeAdapter.getFirstChild(e))this.treeAdapter.detachNode(n),this.treeAdapter.appendChild(t,n)}_setEndLocation(e,t){if(this.treeAdapter.getNodeSourceCodeLocation(e)&&t.location){const n=t.location,r=this.treeAdapter.getTagName(e),a=t.type===b$.END_TAG&&r===t.tagName?{endTag:{...n},endLine:n.endLine,endCol:n.endCol,endOffset:n.endOffset}:{endLine:n.startLine,endCol:n.startCol,endOffset:n.startOffset};this.treeAdapter.updateNodeSourceCodeLocation(e,a)}}shouldProcessStartTagTokenInForeignContent(e){if(!this.currentNotInHTML)return!1;let t,n;return 0===this.openElements.stackTop&&this.fragmentContext?(t=this.fragmentContext,n=this.fragmentContextID):({current:t,currentTagId:n}=this.openElements),(e.tagID!==U$.SVG||this.treeAdapter.getTagName(t)!==B$.ANNOTATION_XML||this.treeAdapter.getNamespaceURI(t)!==D$.MATHML)&&(this.tokenizer.inForeignNode||(e.tagID===U$.MGLYPH||e.tagID===U$.MALIGNMARK)&&void 0!==n&&!this._isIntegrationPoint(n,t,D$.HTML))}_processToken(e){switch(e.type){case b$.CHARACTER:this.onCharacter(e);break;case b$.NULL_CHARACTER:this.onNullCharacter(e);break;case b$.COMMENT:this.onComment(e);break;case b$.DOCTYPE:this.onDoctype(e);break;case b$.START_TAG:this._processStartTag(e);break;case b$.END_TAG:this.onEndTag(e);break;case b$.EOF:this.onEof(e);break;case b$.WHITESPACE_CHARACTER:this.onWhitespaceCharacter(e)}}_isIntegrationPoint(e,t,n){return function(e,t,n,r){return(!r||r===D$.HTML)&&function(e,t,n){if(t===D$.MATHML&&e===U$.ANNOTATION_XML)for(let e=0;ee.type===yP.Marker||this.openElements.contains(e.element)));for(let n=-1===t?e-1:t-1;n>=0;n--){const e=this.activeFormattingElements.entries[n];this._insertElement(e.token,this.treeAdapter.getNamespaceURI(e.element)),e.element=this.openElements.current}}}_closeTableCell(){this.openElements.generateImpliedEndTags(),this.openElements.popUntilTableCellPopped(),this.activeFormattingElements.clearToLastMarker(),this.insertionMode=UP.IN_ROW}_closePElement(){this.openElements.generateImpliedEndTagsWithExclusion(U$.P),this.openElements.popUntilTagNamePopped(U$.P)}_resetInsertionMode(){for(let e=this.openElements.stackTop;e>=0;e--)switch(0===e&&this.fragmentContext?this.fragmentContextID:this.openElements.tagIDs[e]){case U$.TR:return void(this.insertionMode=UP.IN_ROW);case U$.TBODY:case U$.THEAD:case U$.TFOOT:return void(this.insertionMode=UP.IN_TABLE_BODY);case U$.CAPTION:return void(this.insertionMode=UP.IN_CAPTION);case U$.COLGROUP:return void(this.insertionMode=UP.IN_COLUMN_GROUP);case U$.TABLE:return void(this.insertionMode=UP.IN_TABLE);case U$.BODY:return void(this.insertionMode=UP.IN_BODY);case U$.FRAMESET:return void(this.insertionMode=UP.IN_FRAMESET);case U$.SELECT:return void this._resetInsertionModeForSelect(e);case U$.TEMPLATE:return void(this.insertionMode=this.tmplInsertionModeStack[0]);case U$.HTML:return void(this.insertionMode=this.headElement?UP.AFTER_HEAD:UP.BEFORE_HEAD);case U$.TD:case U$.TH:if(e>0)return void(this.insertionMode=UP.IN_CELL);break;case U$.HEAD:if(e>0)return void(this.insertionMode=UP.IN_HEAD)}this.insertionMode=UP.IN_BODY}_resetInsertionModeForSelect(e){if(e>0)for(let t=e-1;t>0;t--){const e=this.openElements.tagIDs[t];if(e===U$.TEMPLATE)break;if(e===U$.TABLE)return void(this.insertionMode=UP.IN_SELECT_IN_TABLE)}this.insertionMode=UP.IN_SELECT}_isElementCausesFosterParenting(e){return ZP.has(e)}_shouldFosterParentOnInsertion(){return this.fosterParentingEnabled&&void 0!==this.openElements.currentTagId&&this._isElementCausesFosterParenting(this.openElements.currentTagId)}_findFosterParentingLocation(){for(let e=this.openElements.stackTop;e>=0;e--){const t=this.openElements.items[e];switch(this.openElements.tagIDs[e]){case U$.TEMPLATE:if(this.treeAdapter.getNamespaceURI(t)===D$.HTML)return{parent:this.treeAdapter.getTemplateContent(t),beforeElement:null};break;case U$.TABLE:{const n=this.treeAdapter.getParentNode(t);return n?{parent:n,beforeElement:t}:{parent:this.openElements.items[e-1],beforeElement:null}}}}return{parent:this.openElements.items[0],beforeElement:null}}_fosterParentElement(e){const t=this._findFosterParentingLocation();t.beforeElement?this.treeAdapter.insertBefore(t.parent,e,t.beforeElement):this.treeAdapter.appendChild(t.parent,e)}_isSpecialElement(e,t){const n=this.treeAdapter.getNamespaceURI(e);return X$[n].has(t)}onCharacter(e){if(this.skipNextNewLine=!1,this.tokenizer.inForeignNode)!function(e,t){e._insertCharacters(t),e.framesetOk=!1}(this,e);else switch(this.insertionMode){case UP.INITIAL:tD(this,e);break;case UP.BEFORE_HTML:nD(this,e);break;case UP.BEFORE_HEAD:rD(this,e);break;case UP.IN_HEAD:iD(this,e);break;case UP.IN_HEAD_NO_SCRIPT:sD(this,e);break;case UP.AFTER_HEAD:lD(this,e);break;case UP.IN_BODY:case UP.IN_CAPTION:case UP.IN_CELL:case UP.IN_TEMPLATE:dD(this,e);break;case UP.TEXT:case UP.IN_SELECT:case UP.IN_SELECT_IN_TABLE:this._insertCharacters(e);break;case UP.IN_TABLE:case UP.IN_TABLE_BODY:case UP.IN_ROW:OD(this,e);break;case UP.IN_TABLE_TEXT:_D(this,e);break;case UP.IN_COLUMN_GROUP:CD(this,e);break;case UP.AFTER_BODY:BD(this,e);break;case UP.AFTER_AFTER_BODY:jD(this,e)}}onNullCharacter(e){if(this.skipNextNewLine=!1,this.tokenizer.inForeignNode)!function(e,t){t.chars=s$,e._insertCharacters(t)}(this,e);else switch(this.insertionMode){case UP.INITIAL:tD(this,e);break;case UP.BEFORE_HTML:nD(this,e);break;case UP.BEFORE_HEAD:rD(this,e);break;case UP.IN_HEAD:iD(this,e);break;case UP.IN_HEAD_NO_SCRIPT:sD(this,e);break;case UP.AFTER_HEAD:lD(this,e);break;case UP.TEXT:this._insertCharacters(e);break;case UP.IN_TABLE:case UP.IN_TABLE_BODY:case UP.IN_ROW:OD(this,e);break;case UP.IN_COLUMN_GROUP:CD(this,e);break;case UP.AFTER_BODY:BD(this,e);break;case UP.AFTER_AFTER_BODY:jD(this,e)}}onComment(e){if(this.skipNextNewLine=!1,this.currentNotInHTML)JP(this,e);else switch(this.insertionMode){case UP.INITIAL:case UP.BEFORE_HTML:case UP.BEFORE_HEAD:case UP.IN_HEAD:case UP.IN_HEAD_NO_SCRIPT:case UP.AFTER_HEAD:case UP.IN_BODY:case UP.IN_TABLE:case UP.IN_CAPTION:case UP.IN_COLUMN_GROUP:case UP.IN_TABLE_BODY:case UP.IN_ROW:case UP.IN_CELL:case UP.IN_SELECT:case UP.IN_SELECT_IN_TABLE:case UP.IN_TEMPLATE:case UP.IN_FRAMESET:case UP.AFTER_FRAMESET:JP(this,e);break;case UP.IN_TABLE_TEXT:TD(this,e);break;case UP.AFTER_BODY:!function(e,t){e._appendCommentNode(t,e.openElements.items[0])}(this,e);break;case UP.AFTER_AFTER_BODY:case UP.AFTER_AFTER_FRAMESET:!function(e,t){e._appendCommentNode(t,e.document)}(this,e)}}onDoctype(e){switch(this.skipNextNewLine=!1,this.insertionMode){case UP.INITIAL:!function(e,t){e._setDocumentType(t);const n=t.forceQuirks?R$.QUIRKS:function(e){if(e.name!==SP)return R$.QUIRKS;const{systemId:t}=e;if(t&&"http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"===t.toLowerCase())return R$.QUIRKS;let{publicId:n}=e;if(null!==n){if(n=n.toLowerCase(),EP.has(n))return R$.QUIRKS;let e=null===t?TP:_P;if($P(n,e))return R$.QUIRKS;if(e=null===t?AP:CP,$P(n,e))return R$.LIMITED_QUIRKS}return R$.NO_QUIRKS}(t);(function(e){return e.name===SP&&null===e.publicId&&(null===e.systemId||"about:legacy-compat"===e.systemId)})(t)||e._err(t,g$.nonConformingDoctype),e.treeAdapter.setDocumentMode(e.document,n),e.insertionMode=UP.BEFORE_HTML}(this,e);break;case UP.BEFORE_HEAD:case UP.IN_HEAD:case UP.IN_HEAD_NO_SCRIPT:case UP.AFTER_HEAD:this._err(e,g$.misplacedDoctype);break;case UP.IN_TABLE_TEXT:TD(this,e)}}onStartTag(e){this.skipNextNewLine=!1,this.currentToken=e,this._processStartTag(e),e.selfClosing&&!e.ackSelfClosing&&this._err(e,g$.nonVoidHtmlElementStartTagWithTrailingSolidus)}_processStartTag(e){this.shouldProcessStartTagTokenInForeignContent(e)?function(e,t){if(function(e){const t=e.tagID;return t===U$.FONT&&e.attrs.some((({name:e})=>e===M$.COLOR||e===M$.SIZE||e===M$.FACE))||RP.has(t)}(t))UD(e),e._startTagOutsideForeignContent(t);else{const n=e._getAdjustedCurrentElement(),r=e.treeAdapter.getNamespaceURI(n);r===D$.MATHML?LP(t):r===D$.SVG&&(function(e){const t=NP.get(e.tagName);null!=t&&(e.tagName=t,e.tagID=q$(e.tagName))}(t),BP(t)),jP(t),t.selfClosing?e._appendElement(t,r):e._insertElement(t,r),t.ackSelfClosing=!0}}(this,e):this._startTagOutsideForeignContent(e)}_startTagOutsideForeignContent(e){switch(this.insertionMode){case UP.INITIAL:tD(this,e);break;case UP.BEFORE_HTML:!function(e,t){t.tagID===U$.HTML?(e._insertElement(t,D$.HTML),e.insertionMode=UP.BEFORE_HEAD):nD(e,t)}(this,e);break;case UP.BEFORE_HEAD:!function(e,t){switch(t.tagID){case U$.HTML:gD(e,t);break;case U$.HEAD:e._insertElement(t,D$.HTML),e.headElement=e.openElements.current,e.insertionMode=UP.IN_HEAD;break;default:rD(e,t)}}(this,e);break;case UP.IN_HEAD:aD(this,e);break;case UP.IN_HEAD_NO_SCRIPT:!function(e,t){switch(t.tagID){case U$.HTML:gD(e,t);break;case U$.BASEFONT:case U$.BGSOUND:case U$.HEAD:case U$.LINK:case U$.META:case U$.NOFRAMES:case U$.STYLE:aD(e,t);break;case U$.NOSCRIPT:e._err(t,g$.nestedNoscriptInHead);break;default:sD(e,t)}}(this,e);break;case UP.AFTER_HEAD:!function(e,t){switch(t.tagID){case U$.HTML:gD(e,t);break;case U$.BODY:e._insertElement(t,D$.HTML),e.framesetOk=!1,e.insertionMode=UP.IN_BODY;break;case U$.FRAMESET:e._insertElement(t,D$.HTML),e.insertionMode=UP.IN_FRAMESET;break;case U$.BASE:case U$.BASEFONT:case U$.BGSOUND:case U$.LINK:case U$.META:case U$.NOFRAMES:case U$.SCRIPT:case U$.STYLE:case U$.TEMPLATE:case U$.TITLE:e._err(t,g$.abandonedHeadElementChild),e.openElements.push(e.headElement,U$.HEAD),aD(e,t),e.openElements.remove(e.headElement);break;case U$.HEAD:e._err(t,g$.misplacedStartTagForHeadElement);break;default:lD(e,t)}}(this,e);break;case UP.IN_BODY:gD(this,e);break;case UP.IN_TABLE:wD(this,e);break;case UP.IN_TABLE_TEXT:TD(this,e);break;case UP.IN_CAPTION:!function(e,t){const n=t.tagID;ED.has(n)?e.openElements.hasInTableScope(U$.CAPTION)&&(e.openElements.generateImpliedEndTags(),e.openElements.popUntilTagNamePopped(U$.CAPTION),e.activeFormattingElements.clearToLastMarker(),e.insertionMode=UP.IN_TABLE,wD(e,t)):gD(e,t)}(this,e);break;case UP.IN_COLUMN_GROUP:AD(this,e);break;case UP.IN_TABLE_BODY:$D(this,e);break;case UP.IN_ROW:DD(this,e);break;case UP.IN_CELL:!function(e,t){const n=t.tagID;ED.has(n)?(e.openElements.hasInTableScope(U$.TD)||e.openElements.hasInTableScope(U$.TH))&&(e._closeTableCell(),DD(e,t)):gD(e,t)}(this,e);break;case UP.IN_SELECT:MD(this,e);break;case UP.IN_SELECT_IN_TABLE:!function(e,t){const n=t.tagID;n===U$.CAPTION||n===U$.TABLE||n===U$.TBODY||n===U$.TFOOT||n===U$.THEAD||n===U$.TR||n===U$.TD||n===U$.TH?(e.openElements.popUntilTagNamePopped(U$.SELECT),e._resetInsertionMode(),e._processStartTag(t)):MD(e,t)}(this,e);break;case UP.IN_TEMPLATE:!function(e,t){switch(t.tagID){case U$.BASE:case U$.BASEFONT:case U$.BGSOUND:case U$.LINK:case U$.META:case U$.NOFRAMES:case U$.SCRIPT:case U$.STYLE:case U$.TEMPLATE:case U$.TITLE:aD(e,t);break;case U$.CAPTION:case U$.COLGROUP:case U$.TBODY:case U$.TFOOT:case U$.THEAD:e.tmplInsertionModeStack[0]=UP.IN_TABLE,e.insertionMode=UP.IN_TABLE,wD(e,t);break;case U$.COL:e.tmplInsertionModeStack[0]=UP.IN_COLUMN_GROUP,e.insertionMode=UP.IN_COLUMN_GROUP,AD(e,t);break;case U$.TR:e.tmplInsertionModeStack[0]=UP.IN_TABLE_BODY,e.insertionMode=UP.IN_TABLE_BODY,$D(e,t);break;case U$.TD:case U$.TH:e.tmplInsertionModeStack[0]=UP.IN_ROW,e.insertionMode=UP.IN_ROW,DD(e,t);break;default:e.tmplInsertionModeStack[0]=UP.IN_BODY,e.insertionMode=UP.IN_BODY,gD(e,t)}}(this,e);break;case UP.AFTER_BODY:!function(e,t){t.tagID===U$.HTML?gD(e,t):BD(e,t)}(this,e);break;case UP.IN_FRAMESET:!function(e,t){switch(t.tagID){case U$.HTML:gD(e,t);break;case U$.FRAMESET:e._insertElement(t,D$.HTML);break;case U$.FRAME:e._appendElement(t,D$.HTML),t.ackSelfClosing=!0;break;case U$.NOFRAMES:aD(e,t)}}(this,e);break;case UP.AFTER_FRAMESET:!function(e,t){switch(t.tagID){case U$.HTML:gD(e,t);break;case U$.NOFRAMES:aD(e,t)}}(this,e);break;case UP.AFTER_AFTER_BODY:!function(e,t){t.tagID===U$.HTML?gD(e,t):jD(e,t)}(this,e);break;case UP.AFTER_AFTER_FRAMESET:!function(e,t){switch(t.tagID){case U$.HTML:gD(e,t);break;case U$.NOFRAMES:aD(e,t)}}(this,e)}}onEndTag(e){this.skipNextNewLine=!1,this.currentToken=e,this.currentNotInHTML?function(e,t){if(t.tagID===U$.P||t.tagID===U$.BR)return UD(e),void e._endTagOutsideForeignContent(t);for(let n=e.openElements.stackTop;n>0;n--){const r=e.openElements.items[n];if(e.treeAdapter.getNamespaceURI(r)===D$.HTML){e._endTagOutsideForeignContent(t);break}const a=e.treeAdapter.getTagName(r);if(a.toLowerCase()===t.tagName){t.tagName=a,e.openElements.shortenToLength(n);break}}}(this,e):this._endTagOutsideForeignContent(e)}_endTagOutsideForeignContent(e){switch(this.insertionMode){case UP.INITIAL:tD(this,e);break;case UP.BEFORE_HTML:!function(e,t){const n=t.tagID;n!==U$.HTML&&n!==U$.HEAD&&n!==U$.BODY&&n!==U$.BR||nD(e,t)}(this,e);break;case UP.BEFORE_HEAD:!function(e,t){const n=t.tagID;n===U$.HEAD||n===U$.BODY||n===U$.HTML||n===U$.BR?rD(e,t):e._err(t,g$.endTagWithoutMatchingOpenElement)}(this,e);break;case UP.IN_HEAD:!function(e,t){switch(t.tagID){case U$.HEAD:e.openElements.pop(),e.insertionMode=UP.AFTER_HEAD;break;case U$.BODY:case U$.BR:case U$.HTML:iD(e,t);break;case U$.TEMPLATE:oD(e,t);break;default:e._err(t,g$.endTagWithoutMatchingOpenElement)}}(this,e);break;case UP.IN_HEAD_NO_SCRIPT:!function(e,t){switch(t.tagID){case U$.NOSCRIPT:e.openElements.pop(),e.insertionMode=UP.IN_HEAD;break;case U$.BR:sD(e,t);break;default:e._err(t,g$.endTagWithoutMatchingOpenElement)}}(this,e);break;case UP.AFTER_HEAD:!function(e,t){switch(t.tagID){case U$.BODY:case U$.HTML:case U$.BR:lD(e,t);break;case U$.TEMPLATE:oD(e,t);break;default:e._err(t,g$.endTagWithoutMatchingOpenElement)}}(this,e);break;case UP.IN_BODY:bD(this,e);break;case UP.TEXT:!function(e,t){var n;t.tagID===U$.SCRIPT&&(null===(n=e.scriptHandler)||void 0===n||n.call(e,e.openElements.current)),e.openElements.pop(),e.insertionMode=e.originalInsertionMode}(this,e);break;case UP.IN_TABLE:xD(this,e);break;case UP.IN_TABLE_TEXT:TD(this,e);break;case UP.IN_CAPTION:!function(e,t){const n=t.tagID;switch(n){case U$.CAPTION:case U$.TABLE:e.openElements.hasInTableScope(U$.CAPTION)&&(e.openElements.generateImpliedEndTags(),e.openElements.popUntilTagNamePopped(U$.CAPTION),e.activeFormattingElements.clearToLastMarker(),e.insertionMode=UP.IN_TABLE,n===U$.TABLE&&xD(e,t));break;case U$.BODY:case U$.COL:case U$.COLGROUP:case U$.HTML:case U$.TBODY:case U$.TD:case U$.TFOOT:case U$.TH:case U$.THEAD:case U$.TR:break;default:bD(e,t)}}(this,e);break;case UP.IN_COLUMN_GROUP:!function(e,t){switch(t.tagID){case U$.COLGROUP:e.openElements.currentTagId===U$.COLGROUP&&(e.openElements.pop(),e.insertionMode=UP.IN_TABLE);break;case U$.TEMPLATE:oD(e,t);break;case U$.COL:break;default:CD(e,t)}}(this,e);break;case UP.IN_TABLE_BODY:PD(this,e);break;case UP.IN_ROW:ID(this,e);break;case UP.IN_CELL:!function(e,t){const n=t.tagID;switch(n){case U$.TD:case U$.TH:e.openElements.hasInTableScope(n)&&(e.openElements.generateImpliedEndTags(),e.openElements.popUntilTagNamePopped(n),e.activeFormattingElements.clearToLastMarker(),e.insertionMode=UP.IN_ROW);break;case U$.TABLE:case U$.TBODY:case U$.TFOOT:case U$.THEAD:case U$.TR:e.openElements.hasInTableScope(n)&&(e._closeTableCell(),ID(e,t));break;case U$.BODY:case U$.CAPTION:case U$.COL:case U$.COLGROUP:case U$.HTML:break;default:bD(e,t)}}(this,e);break;case UP.IN_SELECT:ND(this,e);break;case UP.IN_SELECT_IN_TABLE:!function(e,t){const n=t.tagID;n===U$.CAPTION||n===U$.TABLE||n===U$.TBODY||n===U$.TFOOT||n===U$.THEAD||n===U$.TR||n===U$.TD||n===U$.TH?e.openElements.hasInTableScope(n)&&(e.openElements.popUntilTagNamePopped(U$.SELECT),e._resetInsertionMode(),e.onEndTag(t)):ND(e,t)}(this,e);break;case UP.IN_TEMPLATE:!function(e,t){t.tagID===U$.TEMPLATE&&oD(e,t)}(this,e);break;case UP.AFTER_BODY:LD(this,e);break;case UP.IN_FRAMESET:!function(e,t){t.tagID!==U$.FRAMESET||e.openElements.isRootHtmlElementCurrent()||(e.openElements.pop(),e.fragmentContext||e.openElements.currentTagId===U$.FRAMESET||(e.insertionMode=UP.AFTER_FRAMESET))}(this,e);break;case UP.AFTER_FRAMESET:!function(e,t){t.tagID===U$.HTML&&(e.insertionMode=UP.AFTER_AFTER_FRAMESET)}(this,e);break;case UP.AFTER_AFTER_BODY:jD(this,e)}}onEof(e){switch(this.insertionMode){case UP.INITIAL:tD(this,e);break;case UP.BEFORE_HTML:nD(this,e);break;case UP.BEFORE_HEAD:rD(this,e);break;case UP.IN_HEAD:iD(this,e);break;case UP.IN_HEAD_NO_SCRIPT:sD(this,e);break;case UP.AFTER_HEAD:lD(this,e);break;case UP.IN_BODY:case UP.IN_TABLE:case UP.IN_CAPTION:case UP.IN_COLUMN_GROUP:case UP.IN_TABLE_BODY:case UP.IN_ROW:case UP.IN_CELL:case UP.IN_SELECT:case UP.IN_SELECT_IN_TABLE:yD(this,e);break;case UP.TEXT:!function(e,t){e._err(t,g$.eofInElementThatCanContainOnlyText),e.openElements.pop(),e.insertionMode=e.originalInsertionMode,e.onEof(t)}(this,e);break;case UP.IN_TABLE_TEXT:TD(this,e);break;case UP.IN_TEMPLATE:RD(this,e);break;case UP.AFTER_BODY:case UP.IN_FRAMESET:case UP.AFTER_FRAMESET:case UP.AFTER_AFTER_BODY:case UP.AFTER_AFTER_FRAMESET:eD(this,e)}}onWhitespaceCharacter(e){if(this.skipNextNewLine&&(this.skipNextNewLine=!1,e.chars.charCodeAt(0)===l$.LINE_FEED)){if(1===e.chars.length)return;e.chars=e.chars.substr(1)}if(this.tokenizer.inForeignNode)this._insertCharacters(e);else switch(this.insertionMode){case UP.IN_HEAD:case UP.IN_HEAD_NO_SCRIPT:case UP.AFTER_HEAD:case UP.TEXT:case UP.IN_COLUMN_GROUP:case UP.IN_SELECT:case UP.IN_SELECT_IN_TABLE:case UP.IN_FRAMESET:case UP.AFTER_FRAMESET:this._insertCharacters(e);break;case UP.IN_BODY:case UP.IN_CAPTION:case UP.IN_CELL:case UP.IN_TEMPLATE:case UP.AFTER_BODY:case UP.AFTER_AFTER_BODY:case UP.AFTER_AFTER_FRAMESET:uD(this,e);break;case UP.IN_TABLE:case UP.IN_TABLE_BODY:case UP.IN_ROW:OD(this,e);break;case UP.IN_TABLE_TEXT:SD(this,e)}}};function VP(e,t){let n=e.activeFormattingElements.getElementEntryInScopeWithTagName(t.tagName);return n?e.openElements.contains(n.element)?e.openElements.hasInScope(t.tagID)||(n=null):(e.activeFormattingElements.removeEntry(n),n=null):vD(e,t),n}function qP(e,t){let n=null,r=e.openElements.stackTop;for(;r>=0;r--){const a=e.openElements.items[r];if(a===t.element)break;e._isSpecialElement(a,e.openElements.tagIDs[r])&&(n=a)}return n||(e.openElements.shortenToLength(Math.max(r,0)),e.activeFormattingElements.removeEntry(t)),n}function WP(e,t,n){let r=t,a=e.openElements.getCommonAncestor(t);for(let o=0,i=a;i!==n;o++,i=a){a=e.openElements.getCommonAncestor(i);const n=e.activeFormattingElements.getElementEntry(i),s=n&&o>=3;!n||s?(s&&e.activeFormattingElements.removeEntry(n),e.openElements.remove(i)):(i=XP(e,n),r===t&&(e.activeFormattingElements.bookmark=n),e.treeAdapter.detachNode(r),e.treeAdapter.appendChild(i,r),r=i)}return r}function XP(e,t){const n=e.treeAdapter.getNamespaceURI(t.element),r=e.treeAdapter.createElement(t.token.tagName,n,t.token.attrs);return e.openElements.replace(t.element,r),t.element=r,r}function GP(e,t,n){const r=q$(e.treeAdapter.getTagName(t));if(e._isElementCausesFosterParenting(r))e._fosterParentElement(n);else{const a=e.treeAdapter.getNamespaceURI(t);r===U$.TEMPLATE&&a===D$.HTML&&(t=e.treeAdapter.getTemplateContent(t)),e.treeAdapter.appendChild(t,n)}}function YP(e,t,n){const r=e.treeAdapter.getNamespaceURI(n.element),{token:a}=n,o=e.treeAdapter.createElement(a.tagName,r,a.attrs);e._adoptNodes(t,o),e.treeAdapter.appendChild(t,o),e.activeFormattingElements.insertElementAfterBookmark(o,a),e.activeFormattingElements.removeEntry(n),e.openElements.remove(n.element),e.openElements.insertAfter(t,o,a.tagID)}function KP(e,t){for(let n=0;n<8;n++){const n=VP(e,t);if(!n)break;const r=qP(e,n);if(!r)break;e.activeFormattingElements.bookmark=n;const a=WP(e,r,n.element),o=e.openElements.getCommonAncestor(n.element);e.treeAdapter.detachNode(a),o&&GP(e,o,a),YP(e,r,n)}}function JP(e,t){e._appendCommentNode(t,e.openElements.currentTmplContentOrNode)}function eD(e,t){if(e.stopped=!0,t.location){const n=e.fragmentContext?0:2;for(let r=e.openElements.stackTop;r>=n;r--)e._setEndLocation(e.openElements.items[r],t);if(!e.fragmentContext&&e.openElements.stackTop>=0){const n=e.openElements.items[0],r=e.treeAdapter.getNodeSourceCodeLocation(n);if(r&&!r.endTag&&(e._setEndLocation(n,t),e.openElements.stackTop>=1)){const n=e.openElements.items[1],r=e.treeAdapter.getNodeSourceCodeLocation(n);r&&!r.endTag&&e._setEndLocation(n,t)}}}}function tD(e,t){e._err(t,g$.missingDoctype,!0),e.treeAdapter.setDocumentMode(e.document,R$.QUIRKS),e.insertionMode=UP.BEFORE_HTML,e._processToken(t)}function nD(e,t){e._insertFakeRootElement(),e.insertionMode=UP.BEFORE_HEAD,e._processToken(t)}function rD(e,t){e._insertFakeElement(B$.HEAD,U$.HEAD),e.headElement=e.openElements.current,e.insertionMode=UP.IN_HEAD,e._processToken(t)}function aD(e,t){switch(t.tagID){case U$.HTML:gD(e,t);break;case U$.BASE:case U$.BASEFONT:case U$.BGSOUND:case U$.LINK:case U$.META:e._appendElement(t,D$.HTML),t.ackSelfClosing=!0;break;case U$.TITLE:e._switchToTextParsing(t,J$.RCDATA);break;case U$.NOSCRIPT:e.options.scriptingEnabled?e._switchToTextParsing(t,J$.RAWTEXT):(e._insertElement(t,D$.HTML),e.insertionMode=UP.IN_HEAD_NO_SCRIPT);break;case U$.NOFRAMES:case U$.STYLE:e._switchToTextParsing(t,J$.RAWTEXT);break;case U$.SCRIPT:e._switchToTextParsing(t,J$.SCRIPT_DATA);break;case U$.TEMPLATE:e._insertTemplate(t),e.activeFormattingElements.insertMarker(),e.framesetOk=!1,e.insertionMode=UP.IN_TEMPLATE,e.tmplInsertionModeStack.unshift(UP.IN_TEMPLATE);break;case U$.HEAD:e._err(t,g$.misplacedStartTagForHeadElement);break;default:iD(e,t)}}function oD(e,t){e.openElements.tmplCount>0?(e.openElements.generateImpliedEndTagsThoroughly(),e.openElements.currentTagId!==U$.TEMPLATE&&e._err(t,g$.closingOfElementWithOpenChildElements),e.openElements.popUntilTagNamePopped(U$.TEMPLATE),e.activeFormattingElements.clearToLastMarker(),e.tmplInsertionModeStack.shift(),e._resetInsertionMode()):e._err(t,g$.endTagWithoutMatchingOpenElement)}function iD(e,t){e.openElements.pop(),e.insertionMode=UP.AFTER_HEAD,e._processToken(t)}function sD(e,t){const n=t.type===b$.EOF?g$.openElementsLeftAfterEof:g$.disallowedContentInNoscriptInHead;e._err(t,n),e.openElements.pop(),e.insertionMode=UP.IN_HEAD,e._processToken(t)}function lD(e,t){e._insertFakeElement(B$.BODY,U$.BODY),e.insertionMode=UP.IN_BODY,cD(e,t)}function cD(e,t){switch(t.type){case b$.CHARACTER:dD(e,t);break;case b$.WHITESPACE_CHARACTER:uD(e,t);break;case b$.COMMENT:JP(e,t);break;case b$.START_TAG:gD(e,t);break;case b$.END_TAG:bD(e,t);break;case b$.EOF:yD(e,t)}}function uD(e,t){e._reconstructActiveFormattingElements(),e._insertCharacters(t)}function dD(e,t){e._reconstructActiveFormattingElements(),e._insertCharacters(t),e.framesetOk=!1}function pD(e,t){e._reconstructActiveFormattingElements(),e._appendElement(t,D$.HTML),e.framesetOk=!1,t.ackSelfClosing=!0}function hD(e){const t=w$(e,M$.TYPE);return null!=t&&"hidden"===t.toLowerCase()}function fD(e,t){e._switchToTextParsing(t,J$.RAWTEXT)}function mD(e,t){e._reconstructActiveFormattingElements(),e._insertElement(t,D$.HTML)}function gD(e,t){switch(t.tagID){case U$.I:case U$.S:case U$.B:case U$.U:case U$.EM:case U$.TT:case U$.BIG:case U$.CODE:case U$.FONT:case U$.SMALL:case U$.STRIKE:case U$.STRONG:!function(e,t){e._reconstructActiveFormattingElements(),e._insertElement(t,D$.HTML),e.activeFormattingElements.pushElement(e.openElements.current,t)}(e,t);break;case U$.A:!function(e,t){const n=e.activeFormattingElements.getElementEntryInScopeWithTagName(B$.A);n&&(KP(e,t),e.openElements.remove(n.element),e.activeFormattingElements.removeEntry(n)),e._reconstructActiveFormattingElements(),e._insertElement(t,D$.HTML),e.activeFormattingElements.pushElement(e.openElements.current,t)}(e,t);break;case U$.H1:case U$.H2:case U$.H3:case U$.H4:case U$.H5:case U$.H6:!function(e,t){e.openElements.hasInButtonScope(U$.P)&&e._closePElement(),void 0!==e.openElements.currentTagId&&G$.has(e.openElements.currentTagId)&&e.openElements.pop(),e._insertElement(t,D$.HTML)}(e,t);break;case U$.P:case U$.DL:case U$.OL:case U$.UL:case U$.DIV:case U$.DIR:case U$.NAV:case U$.MAIN:case U$.MENU:case U$.ASIDE:case U$.CENTER:case U$.FIGURE:case U$.FOOTER:case U$.HEADER:case U$.HGROUP:case U$.DIALOG:case U$.DETAILS:case U$.ADDRESS:case U$.ARTICLE:case U$.SEARCH:case U$.SECTION:case U$.SUMMARY:case U$.FIELDSET:case U$.BLOCKQUOTE:case U$.FIGCAPTION:!function(e,t){e.openElements.hasInButtonScope(U$.P)&&e._closePElement(),e._insertElement(t,D$.HTML)}(e,t);break;case U$.LI:case U$.DD:case U$.DT:!function(e,t){e.framesetOk=!1;const n=t.tagID;for(let t=e.openElements.stackTop;t>=0;t--){const r=e.openElements.tagIDs[t];if(n===U$.LI&&r===U$.LI||(n===U$.DD||n===U$.DT)&&(r===U$.DD||r===U$.DT)){e.openElements.generateImpliedEndTagsWithExclusion(r),e.openElements.popUntilTagNamePopped(r);break}if(r!==U$.ADDRESS&&r!==U$.DIV&&r!==U$.P&&e._isSpecialElement(e.openElements.items[t],r))break}e.openElements.hasInButtonScope(U$.P)&&e._closePElement(),e._insertElement(t,D$.HTML)}(e,t);break;case U$.BR:case U$.IMG:case U$.WBR:case U$.AREA:case U$.EMBED:case U$.KEYGEN:pD(e,t);break;case U$.HR:!function(e,t){e.openElements.hasInButtonScope(U$.P)&&e._closePElement(),e._appendElement(t,D$.HTML),e.framesetOk=!1,t.ackSelfClosing=!0}(e,t);break;case U$.RB:case U$.RTC:!function(e,t){e.openElements.hasInScope(U$.RUBY)&&e.openElements.generateImpliedEndTags(),e._insertElement(t,D$.HTML)}(e,t);break;case U$.RT:case U$.RP:!function(e,t){e.openElements.hasInScope(U$.RUBY)&&e.openElements.generateImpliedEndTagsWithExclusion(U$.RTC),e._insertElement(t,D$.HTML)}(e,t);break;case U$.PRE:case U$.LISTING:!function(e,t){e.openElements.hasInButtonScope(U$.P)&&e._closePElement(),e._insertElement(t,D$.HTML),e.skipNextNewLine=!0,e.framesetOk=!1}(e,t);break;case U$.XMP:!function(e,t){e.openElements.hasInButtonScope(U$.P)&&e._closePElement(),e._reconstructActiveFormattingElements(),e.framesetOk=!1,e._switchToTextParsing(t,J$.RAWTEXT)}(e,t);break;case U$.SVG:!function(e,t){e._reconstructActiveFormattingElements(),BP(t),jP(t),t.selfClosing?e._appendElement(t,D$.SVG):e._insertElement(t,D$.SVG),t.ackSelfClosing=!0}(e,t);break;case U$.HTML:!function(e,t){0===e.openElements.tmplCount&&e.treeAdapter.adoptAttributes(e.openElements.items[0],t.attrs)}(e,t);break;case U$.BASE:case U$.LINK:case U$.META:case U$.STYLE:case U$.TITLE:case U$.SCRIPT:case U$.BGSOUND:case U$.BASEFONT:case U$.TEMPLATE:aD(e,t);break;case U$.BODY:!function(e,t){const n=e.openElements.tryPeekProperlyNestedBodyElement();n&&0===e.openElements.tmplCount&&(e.framesetOk=!1,e.treeAdapter.adoptAttributes(n,t.attrs))}(e,t);break;case U$.FORM:!function(e,t){const n=e.openElements.tmplCount>0;e.formElement&&!n||(e.openElements.hasInButtonScope(U$.P)&&e._closePElement(),e._insertElement(t,D$.HTML),n||(e.formElement=e.openElements.current))}(e,t);break;case U$.NOBR:!function(e,t){e._reconstructActiveFormattingElements(),e.openElements.hasInScope(U$.NOBR)&&(KP(e,t),e._reconstructActiveFormattingElements()),e._insertElement(t,D$.HTML),e.activeFormattingElements.pushElement(e.openElements.current,t)}(e,t);break;case U$.MATH:!function(e,t){e._reconstructActiveFormattingElements(),LP(t),jP(t),t.selfClosing?e._appendElement(t,D$.MATHML):e._insertElement(t,D$.MATHML),t.ackSelfClosing=!0}(e,t);break;case U$.TABLE:!function(e,t){e.treeAdapter.getDocumentMode(e.document)!==R$.QUIRKS&&e.openElements.hasInButtonScope(U$.P)&&e._closePElement(),e._insertElement(t,D$.HTML),e.framesetOk=!1,e.insertionMode=UP.IN_TABLE}(e,t);break;case U$.INPUT:!function(e,t){e._reconstructActiveFormattingElements(),e._appendElement(t,D$.HTML),hD(t)||(e.framesetOk=!1),t.ackSelfClosing=!0}(e,t);break;case U$.PARAM:case U$.TRACK:case U$.SOURCE:!function(e,t){e._appendElement(t,D$.HTML),t.ackSelfClosing=!0}(e,t);break;case U$.IMAGE:!function(e,t){t.tagName=B$.IMG,t.tagID=U$.IMG,pD(e,t)}(e,t);break;case U$.BUTTON:!function(e,t){e.openElements.hasInScope(U$.BUTTON)&&(e.openElements.generateImpliedEndTags(),e.openElements.popUntilTagNamePopped(U$.BUTTON)),e._reconstructActiveFormattingElements(),e._insertElement(t,D$.HTML),e.framesetOk=!1}(e,t);break;case U$.APPLET:case U$.OBJECT:case U$.MARQUEE:!function(e,t){e._reconstructActiveFormattingElements(),e._insertElement(t,D$.HTML),e.activeFormattingElements.insertMarker(),e.framesetOk=!1}(e,t);break;case U$.IFRAME:!function(e,t){e.framesetOk=!1,e._switchToTextParsing(t,J$.RAWTEXT)}(e,t);break;case U$.SELECT:!function(e,t){e._reconstructActiveFormattingElements(),e._insertElement(t,D$.HTML),e.framesetOk=!1,e.insertionMode=e.insertionMode===UP.IN_TABLE||e.insertionMode===UP.IN_CAPTION||e.insertionMode===UP.IN_TABLE_BODY||e.insertionMode===UP.IN_ROW||e.insertionMode===UP.IN_CELL?UP.IN_SELECT_IN_TABLE:UP.IN_SELECT}(e,t);break;case U$.OPTION:case U$.OPTGROUP:!function(e,t){e.openElements.currentTagId===U$.OPTION&&e.openElements.pop(),e._reconstructActiveFormattingElements(),e._insertElement(t,D$.HTML)}(e,t);break;case U$.NOEMBED:case U$.NOFRAMES:fD(e,t);break;case U$.FRAMESET:!function(e,t){const n=e.openElements.tryPeekProperlyNestedBodyElement();e.framesetOk&&n&&(e.treeAdapter.detachNode(n),e.openElements.popAllUpToHtmlElement(),e._insertElement(t,D$.HTML),e.insertionMode=UP.IN_FRAMESET)}(e,t);break;case U$.TEXTAREA:!function(e,t){e._insertElement(t,D$.HTML),e.skipNextNewLine=!0,e.tokenizer.state=J$.RCDATA,e.originalInsertionMode=e.insertionMode,e.framesetOk=!1,e.insertionMode=UP.TEXT}(e,t);break;case U$.NOSCRIPT:e.options.scriptingEnabled?fD(e,t):mD(e,t);break;case U$.PLAINTEXT:!function(e,t){e.openElements.hasInButtonScope(U$.P)&&e._closePElement(),e._insertElement(t,D$.HTML),e.tokenizer.state=J$.PLAINTEXT}(e,t);break;case U$.COL:case U$.TH:case U$.TD:case U$.TR:case U$.HEAD:case U$.FRAME:case U$.TBODY:case U$.TFOOT:case U$.THEAD:case U$.CAPTION:case U$.COLGROUP:break;default:mD(e,t)}}function vD(e,t){const n=t.tagName,r=t.tagID;for(let t=e.openElements.stackTop;t>0;t--){const a=e.openElements.items[t],o=e.openElements.tagIDs[t];if(r===o&&(r!==U$.UNKNOWN||e.treeAdapter.getTagName(a)===n)){e.openElements.generateImpliedEndTagsWithExclusion(r),e.openElements.stackTop>=t&&e.openElements.shortenToLength(t);break}if(e._isSpecialElement(a,o))break}}function bD(e,t){switch(t.tagID){case U$.A:case U$.B:case U$.I:case U$.S:case U$.U:case U$.EM:case U$.TT:case U$.BIG:case U$.CODE:case U$.FONT:case U$.NOBR:case U$.SMALL:case U$.STRIKE:case U$.STRONG:KP(e,t);break;case U$.P:!function(e){e.openElements.hasInButtonScope(U$.P)||e._insertFakeElement(B$.P,U$.P),e._closePElement()}(e);break;case U$.DL:case U$.UL:case U$.OL:case U$.DIR:case U$.DIV:case U$.NAV:case U$.PRE:case U$.MAIN:case U$.MENU:case U$.ASIDE:case U$.BUTTON:case U$.CENTER:case U$.FIGURE:case U$.FOOTER:case U$.HEADER:case U$.HGROUP:case U$.DIALOG:case U$.ADDRESS:case U$.ARTICLE:case U$.DETAILS:case U$.SEARCH:case U$.SECTION:case U$.SUMMARY:case U$.LISTING:case U$.FIELDSET:case U$.BLOCKQUOTE:case U$.FIGCAPTION:!function(e,t){const n=t.tagID;e.openElements.hasInScope(n)&&(e.openElements.generateImpliedEndTags(),e.openElements.popUntilTagNamePopped(n))}(e,t);break;case U$.LI:!function(e){e.openElements.hasInListItemScope(U$.LI)&&(e.openElements.generateImpliedEndTagsWithExclusion(U$.LI),e.openElements.popUntilTagNamePopped(U$.LI))}(e);break;case U$.DD:case U$.DT:!function(e,t){const n=t.tagID;e.openElements.hasInScope(n)&&(e.openElements.generateImpliedEndTagsWithExclusion(n),e.openElements.popUntilTagNamePopped(n))}(e,t);break;case U$.H1:case U$.H2:case U$.H3:case U$.H4:case U$.H5:case U$.H6:!function(e){e.openElements.hasNumberedHeaderInScope()&&(e.openElements.generateImpliedEndTags(),e.openElements.popUntilNumberedHeaderPopped())}(e);break;case U$.BR:!function(e){e._reconstructActiveFormattingElements(),e._insertFakeElement(B$.BR,U$.BR),e.openElements.pop(),e.framesetOk=!1}(e);break;case U$.BODY:!function(e,t){if(e.openElements.hasInScope(U$.BODY)&&(e.insertionMode=UP.AFTER_BODY,e.options.sourceCodeLocationInfo)){const n=e.openElements.tryPeekProperlyNestedBodyElement();n&&e._setEndLocation(n,t)}}(e,t);break;case U$.HTML:!function(e,t){e.openElements.hasInScope(U$.BODY)&&(e.insertionMode=UP.AFTER_BODY,LD(e,t))}(e,t);break;case U$.FORM:!function(e){const t=e.openElements.tmplCount>0,{formElement:n}=e;t||(e.formElement=null),(n||t)&&e.openElements.hasInScope(U$.FORM)&&(e.openElements.generateImpliedEndTags(),t?e.openElements.popUntilTagNamePopped(U$.FORM):n&&e.openElements.remove(n))}(e);break;case U$.APPLET:case U$.OBJECT:case U$.MARQUEE:!function(e,t){const n=t.tagID;e.openElements.hasInScope(n)&&(e.openElements.generateImpliedEndTags(),e.openElements.popUntilTagNamePopped(n),e.activeFormattingElements.clearToLastMarker())}(e,t);break;case U$.TEMPLATE:oD(e,t);break;default:vD(e,t)}}function yD(e,t){e.tmplInsertionModeStack.length>0?RD(e,t):eD(e,t)}function OD(e,t){if(void 0!==e.openElements.currentTagId&&ZP.has(e.openElements.currentTagId))switch(e.pendingCharacterTokens.length=0,e.hasNonWhitespacePendingCharacterToken=!1,e.originalInsertionMode=e.insertionMode,e.insertionMode=UP.IN_TABLE_TEXT,t.type){case b$.CHARACTER:_D(e,t);break;case b$.WHITESPACE_CHARACTER:SD(e,t)}else kD(e,t)}function wD(e,t){switch(t.tagID){case U$.TD:case U$.TH:case U$.TR:!function(e,t){e.openElements.clearBackToTableContext(),e._insertFakeElement(B$.TBODY,U$.TBODY),e.insertionMode=UP.IN_TABLE_BODY,$D(e,t)}(e,t);break;case U$.STYLE:case U$.SCRIPT:case U$.TEMPLATE:aD(e,t);break;case U$.COL:!function(e,t){e.openElements.clearBackToTableContext(),e._insertFakeElement(B$.COLGROUP,U$.COLGROUP),e.insertionMode=UP.IN_COLUMN_GROUP,AD(e,t)}(e,t);break;case U$.FORM:!function(e,t){e.formElement||0!==e.openElements.tmplCount||(e._insertElement(t,D$.HTML),e.formElement=e.openElements.current,e.openElements.pop())}(e,t);break;case U$.TABLE:!function(e,t){e.openElements.hasInTableScope(U$.TABLE)&&(e.openElements.popUntilTagNamePopped(U$.TABLE),e._resetInsertionMode(),e._processStartTag(t))}(e,t);break;case U$.TBODY:case U$.TFOOT:case U$.THEAD:!function(e,t){e.openElements.clearBackToTableContext(),e._insertElement(t,D$.HTML),e.insertionMode=UP.IN_TABLE_BODY}(e,t);break;case U$.INPUT:!function(e,t){hD(t)?e._appendElement(t,D$.HTML):kD(e,t),t.ackSelfClosing=!0}(e,t);break;case U$.CAPTION:!function(e,t){e.openElements.clearBackToTableContext(),e.activeFormattingElements.insertMarker(),e._insertElement(t,D$.HTML),e.insertionMode=UP.IN_CAPTION}(e,t);break;case U$.COLGROUP:!function(e,t){e.openElements.clearBackToTableContext(),e._insertElement(t,D$.HTML),e.insertionMode=UP.IN_COLUMN_GROUP}(e,t);break;default:kD(e,t)}}function xD(e,t){switch(t.tagID){case U$.TABLE:e.openElements.hasInTableScope(U$.TABLE)&&(e.openElements.popUntilTagNamePopped(U$.TABLE),e._resetInsertionMode());break;case U$.TEMPLATE:oD(e,t);break;case U$.BODY:case U$.CAPTION:case U$.COL:case U$.COLGROUP:case U$.HTML:case U$.TBODY:case U$.TD:case U$.TFOOT:case U$.TH:case U$.THEAD:case U$.TR:break;default:kD(e,t)}}function kD(e,t){const n=e.fosterParentingEnabled;e.fosterParentingEnabled=!0,cD(e,t),e.fosterParentingEnabled=n}function SD(e,t){e.pendingCharacterTokens.push(t)}function _D(e,t){e.pendingCharacterTokens.push(t),e.hasNonWhitespacePendingCharacterToken=!0}function TD(e,t){let n=0;if(e.hasNonWhitespacePendingCharacterToken)for(;n0&&e.openElements.currentTagId===U$.OPTION&&e.openElements.tagIDs[e.openElements.stackTop-1]===U$.OPTGROUP&&e.openElements.pop(),e.openElements.currentTagId===U$.OPTGROUP&&e.openElements.pop();break;case U$.OPTION:e.openElements.currentTagId===U$.OPTION&&e.openElements.pop();break;case U$.SELECT:e.openElements.hasInSelectScope(U$.SELECT)&&(e.openElements.popUntilTagNamePopped(U$.SELECT),e._resetInsertionMode());break;case U$.TEMPLATE:oD(e,t)}}function RD(e,t){e.openElements.tmplCount>0?(e.openElements.popUntilTagNamePopped(U$.TEMPLATE),e.activeFormattingElements.clearToLastMarker(),e.tmplInsertionModeStack.shift(),e._resetInsertionMode(),e.onEof(t)):eD(e,t)}function LD(e,t){var n;if(t.tagID===U$.HTML){if(e.fragmentContext||(e.insertionMode=UP.AFTER_AFTER_BODY),e.options.sourceCodeLocationInfo&&e.openElements.tagIDs[0]===U$.HTML){e._setEndLocation(e.openElements.items[0],t);const r=e.openElements.items[1];r&&!(null===(n=e.treeAdapter.getNodeSourceCodeLocation(r))||void 0===n?void 0:n.endTag)&&e._setEndLocation(r,t)}}else BD(e,t)}function BD(e,t){e.insertionMode=UP.IN_BODY,cD(e,t)}function jD(e,t){e.insertionMode=UP.IN_BODY,cD(e,t)}function UD(e){for(;e.treeAdapter.getNamespaceURI(e.openElements.current)!==D$.HTML&&void 0!==e.openElements.currentTagId&&!e._isIntegrationPoint(e.openElements.currentTagId,e.openElements.current);)e.openElements.pop()}function zD(e,t){return HP.parse(e,t)}function FD(e,t,n){"string"==typeof e&&(n=t,t=e,e=null);const r=HP.getFragmentParser(e,n);return r.tokenizer.write(t,!0),r.getFragment()}function ZD(e){return e&&"object"==typeof e?"position"in e||"type"in e?HD(e.position):"start"in e||"end"in e?HD(e):"line"in e||"column"in e?QD(e):"":""}function QD(e){return VD(e&&e.line)+":"+VD(e&&e.column)}function HD(e){return QD(e&&e.start)+"-"+QD(e&&e.end)}function VD(e){return e&&"number"==typeof e?e:1}B$.AREA,B$.BASE,B$.BASEFONT,B$.BGSOUND,B$.BR,B$.COL,B$.EMBED,B$.FRAME,B$.HR,B$.IMG,B$.INPUT,B$.KEYGEN,B$.LINK,B$.META,B$.PARAM,B$.SOURCE,B$.TRACK,B$.WBR;class qD extends Error{constructor(e,t,n){super(),"string"==typeof t&&(n=t,t=void 0);let r="",a={},o=!1;if(t&&(a="line"in t&&"column"in t||"start"in t&&"end"in t?{place:t}:"type"in t?{ancestors:[t],place:t.position}:{...t}),"string"==typeof e?r=e:!a.cause&&e&&(o=!0,r=e.message,a.cause=e),!a.ruleId&&!a.source&&"string"==typeof n){const e=n.indexOf(":");-1===e?a.ruleId=n:(a.source=n.slice(0,e),a.ruleId=n.slice(e+1))}if(!a.place&&a.ancestors&&a.ancestors){const e=a.ancestors[a.ancestors.length-1];e&&(a.place=e.position)}const i=a.place&&"start"in a.place?a.place.start:a.place;this.ancestors=a.ancestors||void 0,this.cause=a.cause||void 0,this.column=i?i.column:void 0,this.fatal=void 0,this.file,this.message=r,this.line=i?i.line:void 0,this.name=ZD(a.place)||"1:1",this.place=a.place||void 0,this.reason=this.message,this.ruleId=a.ruleId||void 0,this.source=a.source||void 0,this.stack=o&&a.cause&&"string"==typeof a.cause.stack?a.cause.stack:"",this.actual,this.expected,this.note,this.url}}qD.prototype.file="",qD.prototype.name="",qD.prototype.reason="",qD.prototype.message="",qD.prototype.stack="",qD.prototype.column=void 0,qD.prototype.line=void 0,qD.prototype.ancestors=void 0,qD.prototype.cause=void 0,qD.prototype.fatal=void 0,qD.prototype.place=void 0,qD.prototype.ruleId=void 0,qD.prototype.source=void 0;const WD=function(e,t){if(void 0!==t&&"string"!=typeof t)throw new TypeError('"ext" argument must be a string');JD(e);let n,r=0,a=-1,o=e.length;if(void 0===t||0===t.length||t.length>e.length){for(;o--;)if(47===e.codePointAt(o)){if(n){r=o+1;break}}else a<0&&(n=!0,a=o+1);return a<0?"":e.slice(r,a)}if(t===e)return"";let i=-1,s=t.length-1;for(;o--;)if(47===e.codePointAt(o)){if(n){r=o+1;break}}else i<0&&(n=!0,i=o+1),s>-1&&(e.codePointAt(o)===t.codePointAt(s--)?s<0&&(a=o):(s=-1,a=i));return r===a?a=i:a<0&&(a=e.length),e.slice(r,a)},XD=function(e){if(JD(e),0===e.length)return".";let t,n=-1,r=e.length;for(;--r;)if(47===e.codePointAt(r)){if(t){n=r;break}}else t||(t=!0);return n<0?47===e.codePointAt(0)?"/":".":1===n&&47===e.codePointAt(0)?"//":e.slice(0,n)},GD=function(e){JD(e);let t,n=e.length,r=-1,a=0,o=-1,i=0;for(;n--;){const s=e.codePointAt(n);if(47!==s)r<0&&(t=!0,r=n+1),46===s?o<0?o=n:1!==i&&(i=1):o>-1&&(i=-1);else if(t){a=n+1;break}}return o<0||r<0||0===i||1===i&&o===r-1&&o===a+1?"":e.slice(o,r)},YD=function(...e){let t,n=-1;for(;++n2){if(r=a.lastIndexOf("/"),r!==a.length-1){r<0?(a="",o=0):(a=a.slice(0,r),o=a.length-1-a.lastIndexOf("/")),i=l,s=0;continue}}else if(a.length>0){a="",o=0,i=l,s=0;continue}t&&(a=a.length>0?a+"/..":"..",o=2)}else a.length>0?a+="/"+e.slice(i+1,l):a=e.slice(i+1,l),o=l-i-1;i=l,s=0}else 46===n&&s>-1?s++:s=-1}return a}(e,!t);return 0!==n.length||t||(n="."),n.length>0&&47===e.codePointAt(e.length-1)&&(n+="/"),t?"/"+n:n}(t)},KD="/";function JD(e){if("string"!=typeof e)throw new TypeError("Path must be a string. Received "+JSON.stringify(e))}const eI=function(){return"/"};function tI(e){return Boolean(null!==e&&"object"==typeof e&&"href"in e&&e.href&&"protocol"in e&&e.protocol&&void 0===e.auth)}const nI=["history","path","basename","stem","extname","dirname"];class rI{constructor(e){let t;t=e?tI(e)?{path:e}:"string"==typeof e||function(e){return Boolean(e&&"object"==typeof e&&"byteLength"in e&&"byteOffset"in e)}(e)?{value:e}:e:{},this.cwd="cwd"in t?"":eI(),this.data={},this.history=[],this.messages=[],this.value,this.map,this.result,this.stored;let n,r=-1;for(;++r`",url:!1},abruptClosingOfEmptyComment:{reason:"Unexpected abruptly closed empty comment",description:"Unexpected `>` or `->`. Expected `--\x3e` to close comments"},abruptDoctypePublicIdentifier:{reason:"Unexpected abruptly closed public identifier",description:"Unexpected `>`. Expected a closing `\"` or `'` after the public identifier"},abruptDoctypeSystemIdentifier:{reason:"Unexpected abruptly closed system identifier",description:"Unexpected `>`. Expected a closing `\"` or `'` after the identifier identifier"},absenceOfDigitsInNumericCharacterReference:{reason:"Unexpected non-digit at start of numeric character reference",description:"Unexpected `%c`. Expected `[0-9]` for decimal references or `[0-9a-fA-F]` for hexadecimal references"},cdataInHtmlContent:{reason:"Unexpected CDATA section in HTML",description:"Unexpected `` in ``",description:"Unexpected text character `%c`. Only use text in `