diff --git a/crates/brk_client/src/lib.rs b/crates/brk_client/src/lib.rs index dea3fce40..9a16b146e 100644 --- a/crates/brk_client/src/lib.rs +++ b/crates/brk_client/src/lib.rs @@ -2219,6 +2219,42 @@ impl _10y1m1w1y2y3m3y4y5y6m6y8yPattern3 { } } +/// Pattern struct for repeated tree structure. +pub struct _10y1m1w1y2y3m3y4y5y6m6y8yPattern2 { + pub _10y: MetricPattern1, + pub _1m: MetricPattern1, + pub _1w: MetricPattern1, + pub _1y: MetricPattern1, + pub _2y: MetricPattern1, + pub _3m: MetricPattern1, + pub _3y: MetricPattern1, + pub _4y: MetricPattern1, + pub _5y: MetricPattern1, + pub _6m: MetricPattern1, + pub _6y: MetricPattern1, + pub _8y: MetricPattern1, +} + +impl _10y1m1w1y2y3m3y4y5y6m6y8yPattern2 { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + _10y: MetricPattern1::new(client.clone(), _p("10y", &acc)), + _1m: MetricPattern1::new(client.clone(), _p("1m", &acc)), + _1w: MetricPattern1::new(client.clone(), _p("1w", &acc)), + _1y: MetricPattern1::new(client.clone(), _p("1y", &acc)), + _2y: MetricPattern1::new(client.clone(), _p("2y", &acc)), + _3m: MetricPattern1::new(client.clone(), _p("3m", &acc)), + _3y: MetricPattern1::new(client.clone(), _p("3y", &acc)), + _4y: MetricPattern1::new(client.clone(), _p("4y", &acc)), + _5y: MetricPattern1::new(client.clone(), _p("5y", &acc)), + _6m: MetricPattern1::new(client.clone(), _p("6m", &acc)), + _6y: MetricPattern1::new(client.clone(), _p("6y", &acc)), + _8y: MetricPattern1::new(client.clone(), _p("8y", &acc)), + } + } +} + /// Pattern struct for repeated tree structure. pub struct InvestedNegNetNuplSupplyUnrealizedPattern { pub invested_capital_in_loss_pct: MetricPattern1, @@ -2255,78 +2291,6 @@ impl InvestedNegNetNuplSupplyUnrealizedPattern { } } -/// Pattern struct for repeated tree structure. -pub struct _10y1m1w1y2y3m3y4y5y6m6y8yPattern2 { - pub _10y: MetricPattern1, - pub _1m: MetricPattern1, - pub _1w: MetricPattern1, - pub _1y: MetricPattern1, - pub _2y: MetricPattern1, - pub _3m: MetricPattern1, - pub _3y: MetricPattern1, - pub _4y: MetricPattern1, - pub _5y: MetricPattern1, - pub _6m: MetricPattern1, - pub _6y: MetricPattern1, - pub _8y: MetricPattern1, -} - -impl _10y1m1w1y2y3m3y4y5y6m6y8yPattern2 { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - _10y: MetricPattern1::new(client.clone(), _p("10y", &acc)), - _1m: MetricPattern1::new(client.clone(), _p("1m", &acc)), - _1w: MetricPattern1::new(client.clone(), _p("1w", &acc)), - _1y: MetricPattern1::new(client.clone(), _p("1y", &acc)), - _2y: MetricPattern1::new(client.clone(), _p("2y", &acc)), - _3m: MetricPattern1::new(client.clone(), _p("3m", &acc)), - _3y: MetricPattern1::new(client.clone(), _p("3y", &acc)), - _4y: MetricPattern1::new(client.clone(), _p("4y", &acc)), - _5y: MetricPattern1::new(client.clone(), _p("5y", &acc)), - _6m: MetricPattern1::new(client.clone(), _p("6m", &acc)), - _6y: MetricPattern1::new(client.clone(), _p("6y", &acc)), - _8y: MetricPattern1::new(client.clone(), _p("8y", &acc)), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct _201520162017201820192020202120222023202420252026Pattern2 { - pub _2015: MetricPattern1, - pub _2016: MetricPattern1, - pub _2017: MetricPattern1, - pub _2018: MetricPattern1, - pub _2019: MetricPattern1, - pub _2020: MetricPattern1, - pub _2021: MetricPattern1, - pub _2022: MetricPattern1, - pub _2023: MetricPattern1, - pub _2024: MetricPattern1, - pub _2025: MetricPattern1, - pub _2026: MetricPattern1, -} - -impl _201520162017201820192020202120222023202420252026Pattern2 { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - _2015: MetricPattern1::new(client.clone(), _m(&acc, "2015_returns")), - _2016: MetricPattern1::new(client.clone(), _m(&acc, "2016_returns")), - _2017: MetricPattern1::new(client.clone(), _m(&acc, "2017_returns")), - _2018: MetricPattern1::new(client.clone(), _m(&acc, "2018_returns")), - _2019: MetricPattern1::new(client.clone(), _m(&acc, "2019_returns")), - _2020: MetricPattern1::new(client.clone(), _m(&acc, "2020_returns")), - _2021: MetricPattern1::new(client.clone(), _m(&acc, "2021_returns")), - _2022: MetricPattern1::new(client.clone(), _m(&acc, "2022_returns")), - _2023: MetricPattern1::new(client.clone(), _m(&acc, "2023_returns")), - _2024: MetricPattern1::new(client.clone(), _m(&acc, "2024_returns")), - _2025: MetricPattern1::new(client.clone(), _m(&acc, "2025_returns")), - _2026: MetricPattern1::new(client.clone(), _m(&acc, "2026_returns")), - } - } -} - /// Pattern struct for repeated tree structure. pub struct AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90RollingSumPattern { pub average: MetricPattern18, @@ -2859,6 +2823,28 @@ impl InvestedMaxMinPercentilesSpotPattern { } } +/// Pattern struct for repeated tree structure. +pub struct EmaHistogramLineSignalPattern { + pub ema_fast: MetricPattern1, + pub ema_slow: MetricPattern1, + pub histogram: MetricPattern1, + pub line: MetricPattern1, + pub signal: MetricPattern1, +} + +impl EmaHistogramLineSignalPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + ema_fast: MetricPattern1::new(client.clone(), _m(&acc, "ema_fast_1y")), + ema_slow: MetricPattern1::new(client.clone(), _m(&acc, "ema_slow_1y")), + histogram: MetricPattern1::new(client.clone(), _m(&acc, "histogram_1y")), + line: MetricPattern1::new(client.clone(), _m(&acc, "line_1y")), + signal: MetricPattern1::new(client.clone(), _m(&acc, "signal_1y")), + } + } +} + /// Pattern struct for repeated tree structure. pub struct _1y24h30d7dPattern2 { pub _1y: BtcCentsSatsUsdPattern, @@ -2991,24 +2977,6 @@ impl CentsSatsUsdPattern { } } -/// Pattern struct for repeated tree structure. -pub struct HistogramLineSignalPattern { - pub histogram: MetricPattern1, - pub line: MetricPattern1, - pub signal: MetricPattern1, -} - -impl HistogramLineSignalPattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - histogram: MetricPattern1::new(client.clone(), _m(&acc, "histogram_1y")), - line: MetricPattern1::new(client.clone(), _m(&acc, "line_1y")), - signal: MetricPattern1::new(client.clone(), _m(&acc, "signal_1y")), - } - } -} - /// Pattern struct for repeated tree structure. pub struct _6bBlockTxindexPattern { pub _6b: AverageMaxMedianMinPct10Pct25Pct75Pct90Pattern, @@ -3252,7 +3220,7 @@ impl MetricsTree_Blocks { /// Metrics tree node. pub struct MetricsTree_Blocks_Difficulty { pub raw: MetricPattern1, - pub as_hash: MetricPattern1, + pub as_hash: MetricPattern1, pub adjustment: MetricPattern1, pub epoch: MetricPattern1, pub blocks_before_next_adjustment: MetricPattern1, @@ -3342,22 +3310,34 @@ pub struct MetricsTree_Blocks_Count { pub height_34d_ago: MetricPattern18, pub height_55d_ago: MetricPattern18, pub height_2m_ago: MetricPattern18, + pub height_9w_ago: MetricPattern18, + pub height_12w_ago: MetricPattern18, pub height_89d_ago: MetricPattern18, + pub height_3m_ago: MetricPattern18, + pub height_14w_ago: MetricPattern18, pub height_111d_ago: MetricPattern18, pub height_144d_ago: MetricPattern18, - pub height_3m_ago: MetricPattern18, pub height_6m_ago: MetricPattern18, + pub height_26w_ago: MetricPattern18, pub height_200d_ago: MetricPattern18, + pub height_9m_ago: MetricPattern18, pub height_350d_ago: MetricPattern18, + pub height_12m_ago: MetricPattern18, pub height_1y_ago: MetricPattern18, + pub height_14m_ago: MetricPattern18, pub height_2y_ago: MetricPattern18, - pub height_200w_ago: MetricPattern18, + pub height_26m_ago: MetricPattern18, pub height_3y_ago: MetricPattern18, + pub height_200w_ago: MetricPattern18, pub height_4y_ago: MetricPattern18, pub height_5y_ago: MetricPattern18, pub height_6y_ago: MetricPattern18, pub height_8y_ago: MetricPattern18, + pub height_9y_ago: MetricPattern18, pub height_10y_ago: MetricPattern18, + pub height_12y_ago: MetricPattern18, + pub height_14y_ago: MetricPattern18, + pub height_26y_ago: MetricPattern18, } impl MetricsTree_Blocks_Count { @@ -3381,22 +3361,34 @@ impl MetricsTree_Blocks_Count { height_34d_ago: MetricPattern18::new(client.clone(), "height_34d_ago".to_string()), height_55d_ago: MetricPattern18::new(client.clone(), "height_55d_ago".to_string()), height_2m_ago: MetricPattern18::new(client.clone(), "height_2m_ago".to_string()), + height_9w_ago: MetricPattern18::new(client.clone(), "height_9w_ago".to_string()), + height_12w_ago: MetricPattern18::new(client.clone(), "height_12w_ago".to_string()), height_89d_ago: MetricPattern18::new(client.clone(), "height_89d_ago".to_string()), + height_3m_ago: MetricPattern18::new(client.clone(), "height_3m_ago".to_string()), + height_14w_ago: MetricPattern18::new(client.clone(), "height_14w_ago".to_string()), height_111d_ago: MetricPattern18::new(client.clone(), "height_111d_ago".to_string()), height_144d_ago: MetricPattern18::new(client.clone(), "height_144d_ago".to_string()), - height_3m_ago: MetricPattern18::new(client.clone(), "height_3m_ago".to_string()), height_6m_ago: MetricPattern18::new(client.clone(), "height_6m_ago".to_string()), + height_26w_ago: MetricPattern18::new(client.clone(), "height_26w_ago".to_string()), height_200d_ago: MetricPattern18::new(client.clone(), "height_200d_ago".to_string()), + height_9m_ago: MetricPattern18::new(client.clone(), "height_9m_ago".to_string()), height_350d_ago: MetricPattern18::new(client.clone(), "height_350d_ago".to_string()), + height_12m_ago: MetricPattern18::new(client.clone(), "height_12m_ago".to_string()), height_1y_ago: MetricPattern18::new(client.clone(), "height_1y_ago".to_string()), + height_14m_ago: MetricPattern18::new(client.clone(), "height_14m_ago".to_string()), height_2y_ago: MetricPattern18::new(client.clone(), "height_2y_ago".to_string()), - height_200w_ago: MetricPattern18::new(client.clone(), "height_200w_ago".to_string()), + height_26m_ago: MetricPattern18::new(client.clone(), "height_26m_ago".to_string()), height_3y_ago: MetricPattern18::new(client.clone(), "height_3y_ago".to_string()), + height_200w_ago: MetricPattern18::new(client.clone(), "height_200w_ago".to_string()), height_4y_ago: MetricPattern18::new(client.clone(), "height_4y_ago".to_string()), height_5y_ago: MetricPattern18::new(client.clone(), "height_5y_ago".to_string()), height_6y_ago: MetricPattern18::new(client.clone(), "height_6y_ago".to_string()), height_8y_ago: MetricPattern18::new(client.clone(), "height_8y_ago".to_string()), + height_9y_ago: MetricPattern18::new(client.clone(), "height_9y_ago".to_string()), height_10y_ago: MetricPattern18::new(client.clone(), "height_10y_ago".to_string()), + height_12y_ago: MetricPattern18::new(client.clone(), "height_12y_ago".to_string()), + height_14y_ago: MetricPattern18::new(client.clone(), "height_14y_ago".to_string()), + height_26y_ago: MetricPattern18::new(client.clone(), "height_26y_ago".to_string()), } } } @@ -3873,9 +3865,9 @@ impl MetricsTree_Mining_Rewards { pub struct MetricsTree_Mining_Hashrate { pub hash_rate: MetricPattern1, pub hash_rate_1w_sma: MetricPattern1, - pub hash_rate_1m_sma: MetricPattern1, - pub hash_rate_2m_sma: MetricPattern1, - pub hash_rate_1y_sma: MetricPattern1, + pub hash_rate_1m_sma: MetricPattern1, + pub hash_rate_2m_sma: MetricPattern1, + pub hash_rate_1y_sma: MetricPattern1, pub hash_rate_ath: MetricPattern1, pub hash_rate_drawdown: MetricPattern1, pub hash_price_ths: MetricPattern1, @@ -5012,25 +5004,13 @@ pub struct MetricsTree_Market_Dca { pub dca_sats_per_day: MetricPattern18, pub period_stack: _10y1m1w1y2y3m3y4y5y6m6y8yPattern3, pub period_average_price: MetricsTree_Market_Dca_PeriodAveragePrice, - pub period_returns: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2, + pub period_returns: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2, pub period_cagr: _10y2y3y4y5y6y8yPattern, - pub period_days_in_profit: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2, - pub period_days_in_loss: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2, - pub period_min_return: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2, - pub period_max_return: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2, pub period_lump_sum_stack: _10y1m1w1y2y3m3y4y5y6m6y8yPattern3, - pub period_lump_sum_returns: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2, - pub period_lump_sum_days_in_profit: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2, - pub period_lump_sum_days_in_loss: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2, - pub period_lump_sum_min_return: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2, - pub period_lump_sum_max_return: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2, + pub period_lump_sum_returns: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2, pub class_stack: MetricsTree_Market_Dca_ClassStack, pub class_average_price: MetricsTree_Market_Dca_ClassAveragePrice, - pub class_returns: _201520162017201820192020202120222023202420252026Pattern2, - pub class_days_in_profit: MetricsTree_Market_Dca_ClassDaysInProfit, - pub class_days_in_loss: MetricsTree_Market_Dca_ClassDaysInLoss, - pub class_min_return: MetricsTree_Market_Dca_ClassMinReturn, - pub class_max_return: MetricsTree_Market_Dca_ClassMaxReturn, + pub class_returns: MetricsTree_Market_Dca_ClassReturns, } impl MetricsTree_Market_Dca { @@ -5041,23 +5021,11 @@ impl MetricsTree_Market_Dca { period_average_price: MetricsTree_Market_Dca_PeriodAveragePrice::new(client.clone(), format!("{base_path}_period_average_price")), period_returns: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2::new(client.clone(), "dca_returns".to_string()), period_cagr: _10y2y3y4y5y6y8yPattern::new(client.clone(), "dca_cagr".to_string()), - period_days_in_profit: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2::new(client.clone(), "dca_days_in_profit".to_string()), - period_days_in_loss: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2::new(client.clone(), "dca_days_in_loss".to_string()), - period_min_return: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2::new(client.clone(), "dca_min_return".to_string()), - period_max_return: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2::new(client.clone(), "dca_max_return".to_string()), period_lump_sum_stack: _10y1m1w1y2y3m3y4y5y6m6y8yPattern3::new(client.clone(), "lump_sum_stack".to_string()), period_lump_sum_returns: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2::new(client.clone(), "lump_sum_returns".to_string()), - period_lump_sum_days_in_profit: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2::new(client.clone(), "lump_sum_days_in_profit".to_string()), - period_lump_sum_days_in_loss: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2::new(client.clone(), "lump_sum_days_in_loss".to_string()), - period_lump_sum_min_return: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2::new(client.clone(), "lump_sum_min_return".to_string()), - period_lump_sum_max_return: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2::new(client.clone(), "lump_sum_max_return".to_string()), class_stack: MetricsTree_Market_Dca_ClassStack::new(client.clone(), format!("{base_path}_class_stack")), class_average_price: MetricsTree_Market_Dca_ClassAveragePrice::new(client.clone(), format!("{base_path}_class_average_price")), - class_returns: _201520162017201820192020202120222023202420252026Pattern2::new(client.clone(), "dca_class".to_string()), - class_days_in_profit: MetricsTree_Market_Dca_ClassDaysInProfit::new(client.clone(), format!("{base_path}_class_days_in_profit")), - class_days_in_loss: MetricsTree_Market_Dca_ClassDaysInLoss::new(client.clone(), format!("{base_path}_class_days_in_loss")), - class_min_return: MetricsTree_Market_Dca_ClassMinReturn::new(client.clone(), format!("{base_path}_class_min_return")), - class_max_return: MetricsTree_Market_Dca_ClassMaxReturn::new(client.clone(), format!("{base_path}_class_max_return")), + class_returns: MetricsTree_Market_Dca_ClassReturns::new(client.clone(), format!("{base_path}_class_returns")), } } } @@ -5168,77 +5136,7 @@ impl MetricsTree_Market_Dca_ClassAveragePrice { } /// Metrics tree node. -pub struct MetricsTree_Market_Dca_ClassDaysInProfit { - pub _2015: MetricPattern1, - pub _2016: MetricPattern1, - pub _2017: MetricPattern1, - pub _2018: MetricPattern1, - pub _2019: MetricPattern1, - pub _2020: MetricPattern1, - pub _2021: MetricPattern1, - pub _2022: MetricPattern1, - pub _2023: MetricPattern1, - pub _2024: MetricPattern1, - pub _2025: MetricPattern1, - pub _2026: MetricPattern1, -} - -impl MetricsTree_Market_Dca_ClassDaysInProfit { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - _2015: MetricPattern1::new(client.clone(), "dca_class_2015_days_in_profit".to_string()), - _2016: MetricPattern1::new(client.clone(), "dca_class_2016_days_in_profit".to_string()), - _2017: MetricPattern1::new(client.clone(), "dca_class_2017_days_in_profit".to_string()), - _2018: MetricPattern1::new(client.clone(), "dca_class_2018_days_in_profit".to_string()), - _2019: MetricPattern1::new(client.clone(), "dca_class_2019_days_in_profit".to_string()), - _2020: MetricPattern1::new(client.clone(), "dca_class_2020_days_in_profit".to_string()), - _2021: MetricPattern1::new(client.clone(), "dca_class_2021_days_in_profit".to_string()), - _2022: MetricPattern1::new(client.clone(), "dca_class_2022_days_in_profit".to_string()), - _2023: MetricPattern1::new(client.clone(), "dca_class_2023_days_in_profit".to_string()), - _2024: MetricPattern1::new(client.clone(), "dca_class_2024_days_in_profit".to_string()), - _2025: MetricPattern1::new(client.clone(), "dca_class_2025_days_in_profit".to_string()), - _2026: MetricPattern1::new(client.clone(), "dca_class_2026_days_in_profit".to_string()), - } - } -} - -/// Metrics tree node. -pub struct MetricsTree_Market_Dca_ClassDaysInLoss { - pub _2015: MetricPattern1, - pub _2016: MetricPattern1, - pub _2017: MetricPattern1, - pub _2018: MetricPattern1, - pub _2019: MetricPattern1, - pub _2020: MetricPattern1, - pub _2021: MetricPattern1, - pub _2022: MetricPattern1, - pub _2023: MetricPattern1, - pub _2024: MetricPattern1, - pub _2025: MetricPattern1, - pub _2026: MetricPattern1, -} - -impl MetricsTree_Market_Dca_ClassDaysInLoss { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - _2015: MetricPattern1::new(client.clone(), "dca_class_2015_days_in_loss".to_string()), - _2016: MetricPattern1::new(client.clone(), "dca_class_2016_days_in_loss".to_string()), - _2017: MetricPattern1::new(client.clone(), "dca_class_2017_days_in_loss".to_string()), - _2018: MetricPattern1::new(client.clone(), "dca_class_2018_days_in_loss".to_string()), - _2019: MetricPattern1::new(client.clone(), "dca_class_2019_days_in_loss".to_string()), - _2020: MetricPattern1::new(client.clone(), "dca_class_2020_days_in_loss".to_string()), - _2021: MetricPattern1::new(client.clone(), "dca_class_2021_days_in_loss".to_string()), - _2022: MetricPattern1::new(client.clone(), "dca_class_2022_days_in_loss".to_string()), - _2023: MetricPattern1::new(client.clone(), "dca_class_2023_days_in_loss".to_string()), - _2024: MetricPattern1::new(client.clone(), "dca_class_2024_days_in_loss".to_string()), - _2025: MetricPattern1::new(client.clone(), "dca_class_2025_days_in_loss".to_string()), - _2026: MetricPattern1::new(client.clone(), "dca_class_2026_days_in_loss".to_string()), - } - } -} - -/// Metrics tree node. -pub struct MetricsTree_Market_Dca_ClassMinReturn { +pub struct MetricsTree_Market_Dca_ClassReturns { pub _2015: MetricPattern1, pub _2016: MetricPattern1, pub _2017: MetricPattern1, @@ -5253,56 +5151,21 @@ pub struct MetricsTree_Market_Dca_ClassMinReturn { pub _2026: MetricPattern1, } -impl MetricsTree_Market_Dca_ClassMinReturn { +impl MetricsTree_Market_Dca_ClassReturns { pub fn new(client: Arc, base_path: String) -> Self { Self { - _2015: MetricPattern1::new(client.clone(), "dca_class_2015_min_return".to_string()), - _2016: MetricPattern1::new(client.clone(), "dca_class_2016_min_return".to_string()), - _2017: MetricPattern1::new(client.clone(), "dca_class_2017_min_return".to_string()), - _2018: MetricPattern1::new(client.clone(), "dca_class_2018_min_return".to_string()), - _2019: MetricPattern1::new(client.clone(), "dca_class_2019_min_return".to_string()), - _2020: MetricPattern1::new(client.clone(), "dca_class_2020_min_return".to_string()), - _2021: MetricPattern1::new(client.clone(), "dca_class_2021_min_return".to_string()), - _2022: MetricPattern1::new(client.clone(), "dca_class_2022_min_return".to_string()), - _2023: MetricPattern1::new(client.clone(), "dca_class_2023_min_return".to_string()), - _2024: MetricPattern1::new(client.clone(), "dca_class_2024_min_return".to_string()), - _2025: MetricPattern1::new(client.clone(), "dca_class_2025_min_return".to_string()), - _2026: MetricPattern1::new(client.clone(), "dca_class_2026_min_return".to_string()), - } - } -} - -/// Metrics tree node. -pub struct MetricsTree_Market_Dca_ClassMaxReturn { - pub _2015: MetricPattern1, - pub _2016: MetricPattern1, - pub _2017: MetricPattern1, - pub _2018: MetricPattern1, - pub _2019: MetricPattern1, - pub _2020: MetricPattern1, - pub _2021: MetricPattern1, - pub _2022: MetricPattern1, - pub _2023: MetricPattern1, - pub _2024: MetricPattern1, - pub _2025: MetricPattern1, - pub _2026: MetricPattern1, -} - -impl MetricsTree_Market_Dca_ClassMaxReturn { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - _2015: MetricPattern1::new(client.clone(), "dca_class_2015_max_return".to_string()), - _2016: MetricPattern1::new(client.clone(), "dca_class_2016_max_return".to_string()), - _2017: MetricPattern1::new(client.clone(), "dca_class_2017_max_return".to_string()), - _2018: MetricPattern1::new(client.clone(), "dca_class_2018_max_return".to_string()), - _2019: MetricPattern1::new(client.clone(), "dca_class_2019_max_return".to_string()), - _2020: MetricPattern1::new(client.clone(), "dca_class_2020_max_return".to_string()), - _2021: MetricPattern1::new(client.clone(), "dca_class_2021_max_return".to_string()), - _2022: MetricPattern1::new(client.clone(), "dca_class_2022_max_return".to_string()), - _2023: MetricPattern1::new(client.clone(), "dca_class_2023_max_return".to_string()), - _2024: MetricPattern1::new(client.clone(), "dca_class_2024_max_return".to_string()), - _2025: MetricPattern1::new(client.clone(), "dca_class_2025_max_return".to_string()), - _2026: MetricPattern1::new(client.clone(), "dca_class_2026_max_return".to_string()), + _2015: MetricPattern1::new(client.clone(), "dca_class_2015_returns".to_string()), + _2016: MetricPattern1::new(client.clone(), "dca_class_2016_returns".to_string()), + _2017: MetricPattern1::new(client.clone(), "dca_class_2017_returns".to_string()), + _2018: MetricPattern1::new(client.clone(), "dca_class_2018_returns".to_string()), + _2019: MetricPattern1::new(client.clone(), "dca_class_2019_returns".to_string()), + _2020: MetricPattern1::new(client.clone(), "dca_class_2020_returns".to_string()), + _2021: MetricPattern1::new(client.clone(), "dca_class_2021_returns".to_string()), + _2022: MetricPattern1::new(client.clone(), "dca_class_2022_returns".to_string()), + _2023: MetricPattern1::new(client.clone(), "dca_class_2023_returns".to_string()), + _2024: MetricPattern1::new(client.clone(), "dca_class_2024_returns".to_string()), + _2025: MetricPattern1::new(client.clone(), "dca_class_2025_returns".to_string()), + _2026: MetricPattern1::new(client.clone(), "dca_class_2026_returns".to_string()), } } } @@ -5451,7 +5314,7 @@ pub struct MetricsTree_Market_Indicators_Macd { pub _1d: MetricsTree_Market_Indicators_Macd_1d, pub _1w: MetricsTree_Market_Indicators_Macd_1w, pub _1m: MetricsTree_Market_Indicators_Macd_1m, - pub _1y: HistogramLineSignalPattern, + pub _1y: EmaHistogramLineSignalPattern, } impl MetricsTree_Market_Indicators_Macd { @@ -5460,13 +5323,15 @@ impl MetricsTree_Market_Indicators_Macd { _1d: MetricsTree_Market_Indicators_Macd_1d::new(client.clone(), format!("{base_path}_1d")), _1w: MetricsTree_Market_Indicators_Macd_1w::new(client.clone(), format!("{base_path}_1w")), _1m: MetricsTree_Market_Indicators_Macd_1m::new(client.clone(), format!("{base_path}_1m")), - _1y: HistogramLineSignalPattern::new(client.clone(), "macd".to_string()), + _1y: EmaHistogramLineSignalPattern::new(client.clone(), "macd".to_string()), } } } /// Metrics tree node. pub struct MetricsTree_Market_Indicators_Macd_1d { + pub ema_fast: MetricPattern1, + pub ema_slow: MetricPattern1, pub line: MetricPattern1, pub signal: MetricPattern1, pub histogram: MetricPattern1, @@ -5475,6 +5340,8 @@ pub struct MetricsTree_Market_Indicators_Macd_1d { impl MetricsTree_Market_Indicators_Macd_1d { pub fn new(client: Arc, base_path: String) -> Self { Self { + ema_fast: MetricPattern1::new(client.clone(), "macd_ema_fast_1d".to_string()), + ema_slow: MetricPattern1::new(client.clone(), "macd_ema_slow_1d".to_string()), line: MetricPattern1::new(client.clone(), "macd_line_1d".to_string()), signal: MetricPattern1::new(client.clone(), "macd_signal_1d".to_string()), histogram: MetricPattern1::new(client.clone(), "macd_histogram_1d".to_string()), @@ -5484,6 +5351,8 @@ impl MetricsTree_Market_Indicators_Macd_1d { /// Metrics tree node. pub struct MetricsTree_Market_Indicators_Macd_1w { + pub ema_fast: MetricPattern1, + pub ema_slow: MetricPattern1, pub line: MetricPattern1, pub signal: MetricPattern1, pub histogram: MetricPattern1, @@ -5492,6 +5361,8 @@ pub struct MetricsTree_Market_Indicators_Macd_1w { impl MetricsTree_Market_Indicators_Macd_1w { pub fn new(client: Arc, base_path: String) -> Self { Self { + ema_fast: MetricPattern1::new(client.clone(), "macd_ema_fast_1w".to_string()), + ema_slow: MetricPattern1::new(client.clone(), "macd_ema_slow_1w".to_string()), line: MetricPattern1::new(client.clone(), "macd_line_1w".to_string()), signal: MetricPattern1::new(client.clone(), "macd_signal_1w".to_string()), histogram: MetricPattern1::new(client.clone(), "macd_histogram_1w".to_string()), @@ -5501,6 +5372,8 @@ impl MetricsTree_Market_Indicators_Macd_1w { /// Metrics tree node. pub struct MetricsTree_Market_Indicators_Macd_1m { + pub ema_fast: MetricPattern1, + pub ema_slow: MetricPattern1, pub line: MetricPattern1, pub signal: MetricPattern1, pub histogram: MetricPattern1, @@ -5509,6 +5382,8 @@ pub struct MetricsTree_Market_Indicators_Macd_1m { impl MetricsTree_Market_Indicators_Macd_1m { pub fn new(client: Arc, base_path: String) -> Self { Self { + ema_fast: MetricPattern1::new(client.clone(), "macd_ema_fast_1m".to_string()), + ema_slow: MetricPattern1::new(client.clone(), "macd_ema_slow_1m".to_string()), line: MetricPattern1::new(client.clone(), "macd_line_1m".to_string()), signal: MetricPattern1::new(client.clone(), "macd_signal_1m".to_string()), histogram: MetricPattern1::new(client.clone(), "macd_histogram_1m".to_string()), diff --git a/crates/brk_computer/src/blocks/count/compute.rs b/crates/brk_computer/src/blocks/count/compute.rs index 0f7f6dafc..e94dd7949 100644 --- a/crates/brk_computer/src/blocks/count/compute.rs +++ b/crates/brk_computer/src/blocks/count/compute.rs @@ -72,97 +72,93 @@ impl Vecs { self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 55, |s| { &mut s.height_55d_ago })?; - self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 2 * 30, |s| { + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 60, |s| { &mut s.height_2m_ago })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 63, |s| { + &mut s.height_9w_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 84, |s| { + &mut s.height_12w_ago + })?; self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 89, |s| { &mut s.height_89d_ago })?; - self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 3 * 30, |s| { + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 90, |s| { &mut s.height_3m_ago })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 98, |s| { + &mut s.height_14w_ago + })?; self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 111, |s| { &mut s.height_111d_ago })?; self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 144, |s| { &mut s.height_144d_ago })?; - self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 6 * 30, |s| { + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 180, |s| { &mut s.height_6m_ago })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 182, |s| { + &mut s.height_26w_ago + })?; self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 200, |s| { &mut s.height_200d_ago })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 270, |s| { + &mut s.height_9m_ago + })?; self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 350, |s| { &mut s.height_350d_ago })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 360, |s| { + &mut s.height_12m_ago + })?; self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 365, |s| { &mut s.height_1y_ago })?; - self.compute_rolling_start( - &monotonic_data, - time, - starting_indexes, - exit, - 2 * 365, - |s| &mut s.height_2y_ago, - )?; - self.compute_rolling_start( - &monotonic_data, - time, - starting_indexes, - exit, - 200 * 7, - |s| &mut s.height_200w_ago, - )?; - self.compute_rolling_start( - &monotonic_data, - time, - starting_indexes, - exit, - 3 * 365, - |s| &mut s.height_3y_ago, - )?; - self.compute_rolling_start( - &monotonic_data, - time, - starting_indexes, - exit, - 4 * 365, - |s| &mut s.height_4y_ago, - )?; - self.compute_rolling_start( - &monotonic_data, - time, - starting_indexes, - exit, - 5 * 365, - |s| &mut s.height_5y_ago, - )?; - self.compute_rolling_start( - &monotonic_data, - time, - starting_indexes, - exit, - 6 * 365, - |s| &mut s.height_6y_ago, - )?; - self.compute_rolling_start( - &monotonic_data, - time, - starting_indexes, - exit, - 8 * 365, - |s| &mut s.height_8y_ago, - )?; - self.compute_rolling_start( - &monotonic_data, - time, - starting_indexes, - exit, - 10 * 365, - |s| &mut s.height_10y_ago, - )?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 420, |s| { + &mut s.height_14m_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 730, |s| { + &mut s.height_2y_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 780, |s| { + &mut s.height_26m_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 1095, |s| { + &mut s.height_3y_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 1400, |s| { + &mut s.height_200w_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 1460, |s| { + &mut s.height_4y_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 1825, |s| { + &mut s.height_5y_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 2190, |s| { + &mut s.height_6y_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 2920, |s| { + &mut s.height_8y_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 3285, |s| { + &mut s.height_9y_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 3650, |s| { + &mut s.height_10y_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 4380, |s| { + &mut s.height_12y_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 5110, |s| { + &mut s.height_14y_ago + })?; + self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 9490, |s| { + &mut s.height_26y_ago + })?; // Compute rolling window block counts (both block_count's own rolling + separate block_count_sum) let ws = crate::internal::WindowStarts { diff --git a/crates/brk_computer/src/blocks/count/import.rs b/crates/brk_computer/src/blocks/count/import.rs index fdac4dbdc..637af210f 100644 --- a/crates/brk_computer/src/blocks/count/import.rs +++ b/crates/brk_computer/src/blocks/count/import.rs @@ -37,22 +37,34 @@ impl Vecs { height_34d_ago: ImportableVec::forced_import(db, "height_34d_ago", version)?, height_55d_ago: ImportableVec::forced_import(db, "height_55d_ago", version)?, height_2m_ago: ImportableVec::forced_import(db, "height_2m_ago", version)?, + height_9w_ago: ImportableVec::forced_import(db, "height_9w_ago", version)?, + height_12w_ago: ImportableVec::forced_import(db, "height_12w_ago", version)?, height_89d_ago: ImportableVec::forced_import(db, "height_89d_ago", version)?, + height_3m_ago: ImportableVec::forced_import(db, "height_3m_ago", version)?, + height_14w_ago: ImportableVec::forced_import(db, "height_14w_ago", version)?, height_111d_ago: ImportableVec::forced_import(db, "height_111d_ago", version)?, height_144d_ago: ImportableVec::forced_import(db, "height_144d_ago", version)?, - height_3m_ago: ImportableVec::forced_import(db, "height_3m_ago", version)?, height_6m_ago: ImportableVec::forced_import(db, "height_6m_ago", version)?, + height_26w_ago: ImportableVec::forced_import(db, "height_26w_ago", version)?, height_200d_ago: ImportableVec::forced_import(db, "height_200d_ago", version)?, + height_9m_ago: ImportableVec::forced_import(db, "height_9m_ago", version)?, height_350d_ago: ImportableVec::forced_import(db, "height_350d_ago", version)?, + height_12m_ago: ImportableVec::forced_import(db, "height_12m_ago", version)?, height_1y_ago: ImportableVec::forced_import(db, "height_1y_ago", version)?, + height_14m_ago: ImportableVec::forced_import(db, "height_14m_ago", version)?, height_2y_ago: ImportableVec::forced_import(db, "height_2y_ago", version)?, - height_200w_ago: ImportableVec::forced_import(db, "height_200w_ago", version)?, + height_26m_ago: ImportableVec::forced_import(db, "height_26m_ago", version)?, height_3y_ago: ImportableVec::forced_import(db, "height_3y_ago", version)?, + height_200w_ago: ImportableVec::forced_import(db, "height_200w_ago", version)?, height_4y_ago: ImportableVec::forced_import(db, "height_4y_ago", version)?, height_5y_ago: ImportableVec::forced_import(db, "height_5y_ago", version)?, height_6y_ago: ImportableVec::forced_import(db, "height_6y_ago", version)?, height_8y_ago: ImportableVec::forced_import(db, "height_8y_ago", version)?, + height_9y_ago: ImportableVec::forced_import(db, "height_9y_ago", version)?, height_10y_ago: ImportableVec::forced_import(db, "height_10y_ago", version)?, + height_12y_ago: ImportableVec::forced_import(db, "height_12y_ago", version)?, + height_14y_ago: ImportableVec::forced_import(db, "height_14y_ago", version)?, + height_26y_ago: ImportableVec::forced_import(db, "height_26y_ago", version)?, block_count_sum: RollingWindows::forced_import( db, "block_count_sum", diff --git a/crates/brk_computer/src/blocks/count/vecs.rs b/crates/brk_computer/src/blocks/count/vecs.rs index 99a4bc160..170497e7a 100644 --- a/crates/brk_computer/src/blocks/count/vecs.rs +++ b/crates/brk_computer/src/blocks/count/vecs.rs @@ -12,37 +12,50 @@ pub struct Vecs { pub block_count: ComputedFromHeightCumulativeSum, pub block_count_sum: RollingWindows, + // Window starts sorted by duration pub height_1h_ago: M::Stored>>, - pub height_24h_ago: M::Stored>>, + pub height_24h_ago: M::Stored>>, // 1d pub height_3d_ago: M::Stored>>, - pub height_1w_ago: M::Stored>>, + pub height_1w_ago: M::Stored>>, // 7d pub height_8d_ago: M::Stored>>, pub height_9d_ago: M::Stored>>, pub height_12d_ago: M::Stored>>, pub height_13d_ago: M::Stored>>, - pub height_2w_ago: M::Stored>>, + pub height_2w_ago: M::Stored>>, // 14d pub height_21d_ago: M::Stored>>, pub height_26d_ago: M::Stored>>, - pub height_1m_ago: M::Stored>>, + pub height_1m_ago: M::Stored>>, // 30d pub height_34d_ago: M::Stored>>, pub height_55d_ago: M::Stored>>, - pub height_2m_ago: M::Stored>>, + pub height_2m_ago: M::Stored>>, // 60d + pub height_9w_ago: M::Stored>>, // 63d + pub height_12w_ago: M::Stored>>, // 84d pub height_89d_ago: M::Stored>>, + pub height_3m_ago: M::Stored>>, // 90d + pub height_14w_ago: M::Stored>>, // 98d pub height_111d_ago: M::Stored>>, pub height_144d_ago: M::Stored>>, - pub height_3m_ago: M::Stored>>, - pub height_6m_ago: M::Stored>>, + pub height_6m_ago: M::Stored>>, // 180d + pub height_26w_ago: M::Stored>>, // 182d pub height_200d_ago: M::Stored>>, + pub height_9m_ago: M::Stored>>, // 270d pub height_350d_ago: M::Stored>>, - pub height_1y_ago: M::Stored>>, - pub height_2y_ago: M::Stored>>, - pub height_200w_ago: M::Stored>>, - pub height_3y_ago: M::Stored>>, - pub height_4y_ago: M::Stored>>, - pub height_5y_ago: M::Stored>>, - pub height_6y_ago: M::Stored>>, - pub height_8y_ago: M::Stored>>, - pub height_10y_ago: M::Stored>>, + pub height_12m_ago: M::Stored>>, // 360d + pub height_1y_ago: M::Stored>>, // 365d + pub height_14m_ago: M::Stored>>, // 420d + pub height_2y_ago: M::Stored>>, // 730d + pub height_26m_ago: M::Stored>>, // 780d + pub height_3y_ago: M::Stored>>, // 1095d + pub height_200w_ago: M::Stored>>, // 1400d + pub height_4y_ago: M::Stored>>, // 1460d + pub height_5y_ago: M::Stored>>, // 1825d + pub height_6y_ago: M::Stored>>, // 2190d + pub height_8y_ago: M::Stored>>, // 2920d + pub height_9y_ago: M::Stored>>, // 3285d + pub height_10y_ago: M::Stored>>, // 3650d + pub height_12y_ago: M::Stored>>, // 4380d + pub height_14y_ago: M::Stored>>, // 5110d + pub height_26y_ago: M::Stored>>, // 9490d } impl Vecs { @@ -72,22 +85,34 @@ impl Vecs { 34 => &self.height_34d_ago, 55 => &self.height_55d_ago, 60 => &self.height_2m_ago, + 63 => &self.height_9w_ago, + 84 => &self.height_12w_ago, 89 => &self.height_89d_ago, 90 => &self.height_3m_ago, + 98 => &self.height_14w_ago, 111 => &self.height_111d_ago, 144 => &self.height_144d_ago, 180 => &self.height_6m_ago, + 182 => &self.height_26w_ago, 200 => &self.height_200d_ago, + 270 => &self.height_9m_ago, 350 => &self.height_350d_ago, + 360 => &self.height_12m_ago, 365 => &self.height_1y_ago, + 420 => &self.height_14m_ago, 730 => &self.height_2y_ago, + 780 => &self.height_26m_ago, 1095 => &self.height_3y_ago, 1400 => &self.height_200w_ago, 1460 => &self.height_4y_ago, 1825 => &self.height_5y_ago, 2190 => &self.height_6y_ago, 2920 => &self.height_8y_ago, + 3285 => &self.height_9y_ago, 3650 => &self.height_10y_ago, + 4380 => &self.height_12y_ago, + 5110 => &self.height_14y_ago, + 9490 => &self.height_26y_ago, _ => panic!("No start vec for {days} days"), } } diff --git a/crates/brk_computer/src/blocks/difficulty/compute.rs b/crates/brk_computer/src/blocks/difficulty/compute.rs index 264610003..e9118efa6 100644 --- a/crates/brk_computer/src/blocks/difficulty/compute.rs +++ b/crates/brk_computer/src/blocks/difficulty/compute.rs @@ -1,6 +1,6 @@ use brk_error::Result; use brk_indexer::Indexer; -use brk_types::{StoredF32, StoredU32}; +use brk_types::{StoredF64, StoredU32}; use vecdb::Exit; use super::super::TARGET_BLOCKS_PER_DAY_F32; @@ -22,7 +22,7 @@ impl Vecs { self.as_hash.height.compute_transform( starting_indexes.height, &indexer.vecs.blocks.difficulty, - |(i, v, ..)| (i, StoredF32::from(*v * multiplier)), + |(i, v, ..)| (i, StoredF64::from(*v * multiplier)), exit, )?; diff --git a/crates/brk_computer/src/blocks/difficulty/vecs.rs b/crates/brk_computer/src/blocks/difficulty/vecs.rs index d96682bc3..55a1971cc 100644 --- a/crates/brk_computer/src/blocks/difficulty/vecs.rs +++ b/crates/brk_computer/src/blocks/difficulty/vecs.rs @@ -8,7 +8,7 @@ use crate::internal::{ComputedFromHeight, ComputedHeightDerived}; #[derive(Traversable)] pub struct Vecs { pub raw: ComputedHeightDerived, - pub as_hash: ComputedFromHeight, + pub as_hash: ComputedFromHeight, pub adjustment: ComputedFromHeight, pub epoch: ComputedFromHeight, pub blocks_before_next_adjustment: ComputedFromHeight, diff --git a/crates/brk_computer/src/distribution/compute/block_loop.rs b/crates/brk_computer/src/distribution/compute/block_loop.rs index d02bd0778..2d7dd6a76 100644 --- a/crates/brk_computer/src/distribution/compute/block_loop.rs +++ b/crates/brk_computer/src/distribution/compute/block_loop.rs @@ -48,6 +48,7 @@ pub(crate) fn process_blocks( starting_height: Height, last_height: Height, chain_state: &mut Vec, + txindex_to_height: &mut RangeMap, exit: &Exit, ) -> Result<()> { // Create computation context with pre-computed vectors for thread-safe access @@ -110,26 +111,28 @@ pub(crate) fn process_blocks( let mut vr = VecsReaders::new(&vecs.any_address_indexes, &vecs.addresses_data); debug!("VecsReaders created"); - // Build txindex -> height lookup map for efficient prev_height computation - debug!("building txindex_to_height RangeMap"); - let mut txindex_to_height: RangeMap = { - let first_txindex_len = indexer.vecs.transactions.first_txindex.len(); - let all_first_txindexes: Vec = indexer + // Extend txindex_to_height RangeMap with new entries (incremental, O(new_blocks)) + let target_len = indexer.vecs.transactions.first_txindex.len(); + let current_len = txindex_to_height.len(); + if current_len < target_len { + debug!("extending txindex_to_height RangeMap from {} to {}", current_len, target_len); + let new_entries: Vec = indexer .vecs .transactions .first_txindex - .collect_range_at(0, first_txindex_len); - let mut map = RangeMap::with_capacity(first_txindex_len); - for first_txindex in all_first_txindexes { - map.push(first_txindex); + .collect_range_at(current_len, target_len); + for first_txindex in new_entries { + txindex_to_height.push(first_txindex); } - map - }; - debug!("txindex_to_height RangeMap built"); + } else if current_len > target_len { + debug!("truncating txindex_to_height RangeMap from {} to {}", current_len, target_len); + txindex_to_height.truncate(target_len); + } + debug!("txindex_to_height RangeMap ready ({} entries)", txindex_to_height.len()); // Create reusable iterators and buffers for per-block reads let mut txout_iters = TxOutReaders::new(indexer); - let mut txin_iters = TxInReaders::new(indexer, inputs, &mut txindex_to_height); + let mut txin_iters = TxInReaders::new(indexer, inputs, txindex_to_height); let mut txout_to_txindex_buf = IndexToTxIndexBuf::new(); let mut txin_to_txindex_buf = IndexToTxIndexBuf::new(); diff --git a/crates/brk_computer/src/distribution/range_map.rs b/crates/brk_computer/src/distribution/range_map.rs index 5df4849c8..5b5e72a62 100644 --- a/crates/brk_computer/src/distribution/range_map.rs +++ b/crates/brk_computer/src/distribution/range_map.rs @@ -12,7 +12,7 @@ const CACHE_SIZE: usize = 8; /// /// Includes an LRU cache of recently accessed ranges to avoid binary search /// when there's locality in access patterns. -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct RangeMap { /// Sorted vec of first_index values. Position in vec = value. first_indexes: Vec, @@ -44,6 +44,17 @@ impl + Copy + Default> RangeMap { } } + /// Number of ranges stored. + pub(crate) fn len(&self) -> usize { + self.first_indexes.len() + } + + /// Truncate to `new_len` ranges and clear the cache. + pub(crate) fn truncate(&mut self, new_len: usize) { + self.first_indexes.truncate(new_len); + self.cache_len = 0; + } + /// Push a new first_index. Value is implicitly the current length. /// Must be called in order (first_index must be >= all previous). #[inline] diff --git a/crates/brk_computer/src/distribution/vecs.rs b/crates/brk_computer/src/distribution/vecs.rs index f10418798..1861c84cc 100644 --- a/crates/brk_computer/src/distribution/vecs.rs +++ b/crates/brk_computer/src/distribution/vecs.rs @@ -5,7 +5,7 @@ use brk_indexer::Indexer; use brk_traversable::Traversable; use brk_types::{ Day1, EmptyAddressData, EmptyAddressIndex, FundedAddressData, FundedAddressIndex, Height, - SupplyState, Version, + SupplyState, TxIndex, Version, }; use tracing::{debug, info}; use vecdb::{ @@ -23,7 +23,7 @@ use crate::{ }; use super::{ - AddressCohorts, AddressesDataVecs, AnyAddressIndexesVecs, UTXOCohorts, + AddressCohorts, AddressesDataVecs, AnyAddressIndexesVecs, RangeMap, UTXOCohorts, address::{ AddrCountsVecs, AddressActivityVecs, GrowthRateVecs, NewAddrCountVecs, TotalAddrCountVecs, }, @@ -61,6 +61,14 @@ pub struct Vecs { LazyVecFrom1, pub emptyaddressindex: LazyVecFrom1, + + /// In-memory block state for UTXO processing. Persisted via supply_state. + /// Kept across compute() calls to avoid O(n) rebuild on resume. + #[traversable(skip)] + chain_state: Vec, + /// In-memory txindex→height reverse lookup. Kept across compute() calls. + #[traversable(skip)] + txindex_to_height: RangeMap, } const SAVED_STAMPED_CHANGES: u16 = 10; @@ -148,6 +156,9 @@ impl Vecs { fundedaddressindex, emptyaddressindex, + chain_state: Vec::new(), + txindex_to_height: RangeMap::default(), + db, states_path, }; @@ -230,8 +241,12 @@ impl Vecs { debug!("recovered_height={}", recovered_height); - // Fresh start: reset all state - let (starting_height, mut chain_state) = if recovered_height.is_zero() { + // Take chain_state and txindex_to_height out of self to avoid borrow conflicts + let mut chain_state = std::mem::take(&mut self.chain_state); + let mut txindex_to_height = std::mem::take(&mut self.txindex_to_height); + + // Recover or reuse chain_state + let starting_height = if recovered_height.is_zero() { self.supply_state.reset()?; self.addr_count.reset_height()?; self.empty_addr_count.reset_height()?; @@ -243,11 +258,18 @@ impl Vecs { &mut self.address_cohorts, )?; + chain_state.clear(); + txindex_to_height.truncate(0); + info!("State recovery: fresh start"); - (Height::ZERO, vec![]) + Height::ZERO + } else if chain_state.len() == usize::from(recovered_height) { + // Normal resume: chain_state already matches, reuse as-is + debug!("reusing in-memory chain_state ({} entries)", chain_state.len()); + recovered_height } else { - // Recover chain_state from stored values - debug!("recovering chain_state from stored values"); + // Rollback or first run after restart: rebuild from supply_state + debug!("rebuilding chain_state from stored values"); let height_to_timestamp = &blocks.time.timestamp_monotonic; let height_to_price = &prices.price.cents.height; @@ -257,7 +279,7 @@ impl Vecs { debug!("building supply_state vec for {} heights", recovered_height); let supply_state_data: Vec<_> = self.supply_state.collect_range_at(0, end); - let chain_state = supply_state_data + chain_state = supply_state_data .into_iter() .enumerate() .map(|(h, supply)| BlockState { @@ -266,9 +288,12 @@ impl Vecs { timestamp: timestamp_data[h], }) .collect(); - debug!("chain_state vec built"); + debug!("chain_state rebuilt"); - (recovered_height, chain_state) + // Truncate RangeMap to match (entries are immutable, safe to keep) + txindex_to_height.truncate(end); + + recovered_height }; // Update starting_indexes if we need to recompute from an earlier point @@ -316,10 +341,15 @@ impl Vecs { starting_height, last_height, &mut chain_state, + &mut txindex_to_height, exit, )?; } + // Put chain_state and txindex_to_height back + self.chain_state = chain_state; + self.txindex_to_height = txindex_to_height; + // 5. Compute aggregates (overlapping cohorts from separate cohorts) aggregates::compute_overlapping( &mut self.utxo_cohorts, diff --git a/crates/brk_computer/src/internal/from_height/bps.rs b/crates/brk_computer/src/internal/from_height/bps.rs new file mode 100644 index 000000000..58797a6b1 --- /dev/null +++ b/crates/brk_computer/src/internal/from_height/bps.rs @@ -0,0 +1,46 @@ +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{StoredF32, Version}; +use schemars::JsonSchema; +use vecdb::{Database, ReadableCloneableVec, Rw, StorageMode, UnaryTransform}; + +use crate::indexes; + +use super::{ComputedFromHeight, LazyFromHeight}; +use crate::internal::NumericValue; + +/// Basis-point storage with lazy float view. +/// +/// Stores integer basis points on disk (Pco-compressed), +/// exposes a lazy StoredF32 view (bps / 100). +#[derive(Traversable)] +pub struct BpsFromHeight +where + B: NumericValue + JsonSchema, +{ + pub bps: ComputedFromHeight, + pub float: LazyFromHeight, +} + +impl BpsFromHeight +where + B: NumericValue + JsonSchema, +{ + pub(crate) fn forced_import>( + db: &Database, + name: &str, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { + let bps = ComputedFromHeight::forced_import(db, name, version, indexes)?; + + let float = LazyFromHeight::from_computed::( + &format!("{name}_float"), + version, + bps.height.read_only_boxed_clone(), + &bps, + ); + + Ok(Self { bps, float }) + } +} diff --git a/crates/brk_computer/src/internal/from_height/mod.rs b/crates/brk_computer/src/internal/from_height/mod.rs index 34b750254..c303c1efa 100644 --- a/crates/brk_computer/src/internal/from_height/mod.rs +++ b/crates/brk_computer/src/internal/from_height/mod.rs @@ -1,5 +1,6 @@ mod aggregated; mod base; +mod bps; mod by_unit; mod constant; mod cumulative; @@ -16,6 +17,7 @@ mod value; pub use aggregated::*; pub use base::*; +pub use bps::*; pub use by_unit::*; pub use constant::*; pub use cumulative::*; diff --git a/crates/brk_computer/src/internal/from_height/ratio/extension.rs b/crates/brk_computer/src/internal/from_height/ratio/extension.rs index 57c9ae1b7..ce28d42d1 100644 --- a/crates/brk_computer/src/internal/from_height/ratio/extension.rs +++ b/crates/brk_computer/src/internal/from_height/ratio/extension.rs @@ -7,7 +7,6 @@ use crate::{ ComputeIndexes, blocks, indexes, internal::{ComputedFromHeightStdDevExtended, Price}, }; -use brk_types::get_percentile; use super::super::ComputedFromHeight; @@ -34,7 +33,7 @@ pub struct ComputedFromHeightRatioExtension { pub ratio_1y_sd: ComputedFromHeightStdDevExtended, } -const VERSION: Version = Version::TWO; +const VERSION: Version = Version::new(3); impl ComputedFromHeightRatioExtension { pub(crate) fn forced_import( @@ -119,7 +118,8 @@ impl ComputedFromHeightRatioExtension { exit, )?; - // Percentiles: insert into sorted array on day boundaries + // Percentiles via order-statistic Fenwick tree with coordinate compression. + // O(n log n) total vs O(n²) for the naive sorted-insert approach. let ratio_version = ratio_source.version(); self.mut_ratio_vecs() .try_for_each(|v| -> Result<()> { @@ -135,68 +135,85 @@ impl ComputedFromHeightRatioExtension { .min(starting_indexes.height); let start = starting_height.to_usize(); - let day_start = &blocks.count.height_24h_ago; + let ratio_len = ratio_source.len(); - // Collect sorted history up to starting point (one per day boundary) - let mut sorted = { - let ratio_data = ratio_source.collect_range_at(0, start); - let day_start_hist = day_start.collect_range_at(0, start); - let mut sorted: Vec = Vec::new(); - let mut last_day_start = Height::from(0_usize); - for (h, ratio) in ratio_data.into_iter().enumerate() { - let cur_day_start = day_start_hist[h]; - if h == 0 || cur_day_start != last_day_start { - sorted.push(ratio); - last_day_start = cur_day_start; + if ratio_len > start { + let all_ratios = ratio_source.collect_range_at(0, ratio_len); + + // Coordinate compression: unique sorted values → integer ranks + let coords = { + let mut c = all_ratios.clone(); + c.sort_unstable(); + c.dedup(); + c + }; + let m = coords.len(); + + // Build Fenwick tree (BIT) from elements [0, start) in O(m) + let mut bit = vec![0u32; m + 1]; // 1-indexed + for &v in &all_ratios[..start] { + bit[coords.binary_search(&v).unwrap() + 1] += 1; + } + for i in 1..=m { + let j = i + (i & i.wrapping_neg()); + if j <= m { + bit[j] += bit[i]; } } - sorted.sort_unstable(); - sorted - }; - let pct1_vec = &mut self.ratio_pct1.height; - let pct2_vec = &mut self.ratio_pct2.height; - let pct5_vec = &mut self.ratio_pct5.height; - let pct95_vec = &mut self.ratio_pct95.height; - let pct98_vec = &mut self.ratio_pct98.height; - let pct99_vec = &mut self.ratio_pct99.height; + // Highest power of 2 <= m (for binary-lifting kth query) + let log2 = { + let mut b = 1usize; + while b <= m { + b <<= 1; + } + b >> 1 + }; - let ratio_len = ratio_source.len(); - let ratio_data = ratio_source.collect_range_at(start, ratio_len); - let mut last_day_start = if start > 0 { - day_start - .collect_one_at(start - 1) - .unwrap_or(Height::from(0_usize)) - } else { - Height::from(0_usize) - }; + // Find rank of k-th smallest element (k is 1-indexed) in O(log m) + let kth = |bit: &[u32], mut k: u32| -> usize { + let mut pos = 0; + let mut b = log2; + while b > 0 { + let next = pos + b; + if next <= m && bit[next] < k { + k -= bit[next]; + pos = next; + } + b >>= 1; + } + pos + }; - let day_start_data = day_start.collect_range_at(start, ratio_len); + let mut pct_vecs: [&mut EagerVec>; 6] = [ + &mut self.ratio_pct1.height, + &mut self.ratio_pct2.height, + &mut self.ratio_pct5.height, + &mut self.ratio_pct95.height, + &mut self.ratio_pct98.height, + &mut self.ratio_pct99.height, + ]; + const PCTS: [f64; 6] = [0.01, 0.02, 0.05, 0.95, 0.98, 0.99]; - for (offset, ratio) in ratio_data.into_iter().enumerate() { - let index = start + offset; + let mut count = start; + for (offset, &ratio) in all_ratios[start..].iter().enumerate() { + count += 1; - let cur_day_start = day_start_data[offset]; - if index == 0 || cur_day_start != last_day_start { - let pos = sorted.binary_search(&ratio).unwrap_or_else(|p| p); - sorted.insert(pos, ratio); - last_day_start = cur_day_start; - } + // Insert into Fenwick tree: O(log m) + let mut i = coords.binary_search(&ratio).unwrap() + 1; + while i <= m { + bit[i] += 1; + i += i & i.wrapping_neg(); + } - if sorted.is_empty() { - pct1_vec.truncate_push_at(index, StoredF32::NAN)?; - pct2_vec.truncate_push_at(index, StoredF32::NAN)?; - pct5_vec.truncate_push_at(index, StoredF32::NAN)?; - pct95_vec.truncate_push_at(index, StoredF32::NAN)?; - pct98_vec.truncate_push_at(index, StoredF32::NAN)?; - pct99_vec.truncate_push_at(index, StoredF32::NAN)?; - } else { - pct1_vec.truncate_push_at(index, get_percentile(&sorted, 0.01))?; - pct2_vec.truncate_push_at(index, get_percentile(&sorted, 0.02))?; - pct5_vec.truncate_push_at(index, get_percentile(&sorted, 0.05))?; - pct95_vec.truncate_push_at(index, get_percentile(&sorted, 0.95))?; - pct98_vec.truncate_push_at(index, get_percentile(&sorted, 0.98))?; - pct99_vec.truncate_push_at(index, get_percentile(&sorted, 0.99))?; + // Nearest-rank percentile: one kth query each + let idx = start + offset; + let cf = count as f64; + for (vec, &pct) in pct_vecs.iter_mut().zip(PCTS.iter()) { + let k = (cf * pct).ceil().max(1.0) as u32; + let val = coords[kth(&bit, k)]; + vec.truncate_push_at(idx, val)?; + } } } diff --git a/crates/brk_computer/src/internal/from_height/stddev/mod.rs b/crates/brk_computer/src/internal/from_height/stddev/mod.rs index 39d79b40f..96a1483ee 100644 --- a/crates/brk_computer/src/internal/from_height/stddev/mod.rs +++ b/crates/brk_computer/src/internal/from_height/stddev/mod.rs @@ -5,7 +5,7 @@ pub use extended::*; use brk_error::Result; use brk_traversable::Traversable; use brk_types::{Height, StoredF32, Version}; -use vecdb::{AnyStoredVec, AnyVec, Database, Exit, ReadableVec, Rw, StorageMode, VecIndex, WritableVec}; +use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode}; use crate::{ComputeIndexes, blocks, indexes}; @@ -51,118 +51,23 @@ impl ComputedFromHeightStdDev { exit: &Exit, source: &impl ReadableVec, ) -> Result<()> { - // 1. Compute SMA using the appropriate lookback vec (or full-history SMA) - if self.days != usize::MAX { - let window_starts = blocks.count.start_vec(self.days); - self.sma.height.compute_rolling_average( - starting_indexes.height, - window_starts, - source, - exit, - )?; - } else { - // Full history SMA (days == usize::MAX) - self.sma.height.compute_sma_( - starting_indexes.height, - source, - self.days, - exit, - None, - )?; - } + let window_starts = blocks.count.start_vec(self.days); - // Split borrows: sd is mutated, sma is read - compute_sd( - &mut self.sd, - blocks, - starting_indexes, - exit, - &self.sma.height, + self.sma.height.compute_rolling_average( + starting_indexes.height, + window_starts, source, - ) + exit, + )?; + + self.sd.height.compute_rolling_sd( + starting_indexes.height, + window_starts, + source, + &self.sma.height, + exit, + )?; + + Ok(()) } } - -fn compute_sd( - sd: &mut ComputedFromHeight, - blocks: &blocks::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - sma: &impl ReadableVec, - source: &impl ReadableVec, -) -> Result<()> { - let source_version = source.version(); - - sd.height - .validate_computed_version_or_reset(source_version)?; - - let starting_height = Height::from(sd.height.len()).min(starting_indexes.height); - - let day_start = &blocks.count.height_24h_ago; - let start = starting_height.to_usize(); - - let mut n: usize = 0; - let mut welford_sum: f64 = 0.0; - let mut welford_sum_sq: f64 = 0.0; - if start > 0 { - let day_start_hist = day_start.collect_range_at(0, start); - let source_hist = source.collect_range_at(0, start); - let mut last_ds = Height::from(0_usize); - for h in 0..start { - let cur_ds = day_start_hist[h]; - if h == 0 || cur_ds != last_ds { - let val = *source_hist[h] as f64; - n += 1; - welford_sum += val; - welford_sum_sq += val * val; - last_ds = cur_ds; - } - } - } - - let source_len = source.len(); - let source_data = source.collect_range_at(start, source_len); - let sma_data = sma.collect_range_at(start, sma.len()); - let mut last_day_start = if start > 0 { - day_start - .collect_one_at(start - 1) - .unwrap_or(Height::from(0_usize)) - } else { - Height::from(0_usize) - }; - - let day_start_data = day_start.collect_range_at(start, source_len); - - for (offset, ratio) in source_data.into_iter().enumerate() { - let index = start + offset; - let cur_day_start = day_start_data[offset]; - if index == 0 || cur_day_start != last_day_start { - let val = *ratio as f64; - n += 1; - welford_sum += val; - welford_sum_sq += val * val; - last_day_start = cur_day_start; - } - - let average = sma_data[offset]; - let avg_f64 = *average as f64; - - let sd_val = if n > 0 { - let nf = n as f64; - let variance = - welford_sum_sq / nf - 2.0 * avg_f64 * welford_sum / nf + avg_f64 * avg_f64; - StoredF32::from(variance.max(0.0).sqrt() as f32) - } else { - StoredF32::from(0.0_f32) - }; - - sd.height.truncate_push_at(index, sd_val)?; - } - - { - let _lock = exit.lock(); - sd.height.flush()?; - } - - Ok(()) -} diff --git a/crates/brk_computer/src/internal/transform/bp16_to_float.rs b/crates/brk_computer/src/internal/transform/bp16_to_float.rs new file mode 100644 index 000000000..20c8357ad --- /dev/null +++ b/crates/brk_computer/src/internal/transform/bp16_to_float.rs @@ -0,0 +1,11 @@ +use brk_types::{BasisPoints16, StoredF32}; +use vecdb::UnaryTransform; + +pub struct Bp16ToFloat; + +impl UnaryTransform for Bp16ToFloat { + #[inline(always)] + fn apply(bp: BasisPoints16) -> StoredF32 { + StoredF32::from(bp.to_f32()) + } +} diff --git a/crates/brk_computer/src/internal/transform/bp32_to_float.rs b/crates/brk_computer/src/internal/transform/bp32_to_float.rs new file mode 100644 index 000000000..9226be12e --- /dev/null +++ b/crates/brk_computer/src/internal/transform/bp32_to_float.rs @@ -0,0 +1,11 @@ +use brk_types::{BasisPoints32, StoredF32}; +use vecdb::UnaryTransform; + +pub struct Bp32ToFloat; + +impl UnaryTransform for Bp32ToFloat { + #[inline(always)] + fn apply(bp: BasisPoints32) -> StoredF32 { + StoredF32::from(bp.to_f32()) + } +} diff --git a/crates/brk_computer/src/internal/transform/bps16_to_float.rs b/crates/brk_computer/src/internal/transform/bps16_to_float.rs new file mode 100644 index 000000000..7a3882cdc --- /dev/null +++ b/crates/brk_computer/src/internal/transform/bps16_to_float.rs @@ -0,0 +1,11 @@ +use brk_types::{BasisPointsSigned16, StoredF32}; +use vecdb::UnaryTransform; + +pub struct Bps16ToFloat; + +impl UnaryTransform for Bps16ToFloat { + #[inline(always)] + fn apply(bp: BasisPointsSigned16) -> StoredF32 { + StoredF32::from(bp.to_f32()) + } +} diff --git a/crates/brk_computer/src/internal/transform/bps32_to_float.rs b/crates/brk_computer/src/internal/transform/bps32_to_float.rs new file mode 100644 index 000000000..a43f1fc07 --- /dev/null +++ b/crates/brk_computer/src/internal/transform/bps32_to_float.rs @@ -0,0 +1,11 @@ +use brk_types::{BasisPointsSigned32, StoredF32}; +use vecdb::UnaryTransform; + +pub struct Bps32ToFloat; + +impl UnaryTransform for Bps32ToFloat { + #[inline(always)] + fn apply(bp: BasisPointsSigned32) -> StoredF32 { + StoredF32::from(bp.to_f32()) + } +} diff --git a/crates/brk_computer/src/internal/transform/mod.rs b/crates/brk_computer/src/internal/transform/mod.rs index 14fbf8840..07c9816f3 100644 --- a/crates/brk_computer/src/internal/transform/mod.rs +++ b/crates/brk_computer/src/internal/transform/mod.rs @@ -1,3 +1,7 @@ +mod bp16_to_float; +mod bp32_to_float; +mod bps16_to_float; +mod bps32_to_float; mod block_count_target; mod cents_halve; mod cents_identity; @@ -42,6 +46,10 @@ mod volatility_sqrt30; mod volatility_sqrt365; mod volatility_sqrt7; +pub use bp16_to_float::*; +pub use bp32_to_float::*; +pub use bps16_to_float::*; +pub use bps32_to_float::*; pub use block_count_target::*; pub use cents_halve::*; pub use cents_identity::*; diff --git a/crates/brk_computer/src/market/ath/compute.rs b/crates/brk_computer/src/market/ath/compute.rs index 713931542..16f2ba198 100644 --- a/crates/brk_computer/src/market/ath/compute.rs +++ b/crates/brk_computer/src/market/ath/compute.rs @@ -1,14 +1,15 @@ use brk_error::Result; -use brk_types::StoredU16; +use brk_types::{Day1, StoredU16}; use vecdb::{Exit, ReadableVec, VecIndex}; use super::Vecs; -use crate::{ComputeIndexes, prices, traits::ComputeDrawdown}; +use crate::{ComputeIndexes, indexes, prices, traits::ComputeDrawdown}; impl Vecs { pub(crate) fn compute( &mut self, prices: &prices::Vecs, + indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { @@ -18,27 +19,29 @@ impl Vecs { exit, )?; - let mut prev = None; - self.days_since_price_ath.height.compute_transform2( + let mut ath_day: Option = None; + self.days_since_price_ath.height.compute_transform3( starting_indexes.height, &self.price_ath.cents.height, &prices.price.cents.height, - |(i, ath, price, slf)| { - if prev.is_none() { - let i = i.to_usize(); - prev.replace(if i > 0 { - slf.collect_one_at(i - 1).unwrap() + &indexes.height.day1, + |(i, ath, price, day, slf)| { + if ath_day.is_none() { + let idx = i.to_usize(); + ath_day = Some(if idx > 0 { + let prev_days_since = slf.collect_one_at(idx - 1).unwrap(); + Day1::from(day.to_usize().saturating_sub(usize::from(prev_days_since))) } else { - StoredU16::default() + day }); } - let days = if price == ath { - StoredU16::default() + if price == ath { + ath_day = Some(day); + (i, StoredU16::default()) } else { - prev.unwrap() + StoredU16::new(1) - }; - prev.replace(days); - (i, days) + let days_since = (day.to_usize() - ath_day.unwrap().to_usize()) as u16; + (i, StoredU16::from(days_since)) + } }, exit, )?; diff --git a/crates/brk_computer/src/market/compute.rs b/crates/brk_computer/src/market/compute.rs index e7d13618a..a9e1077b0 100644 --- a/crates/brk_computer/src/market/compute.rs +++ b/crates/brk_computer/src/market/compute.rs @@ -19,7 +19,7 @@ impl Vecs { exit: &Exit, ) -> Result<()> { // ATH metrics (independent) - self.ath.compute(prices, starting_indexes, exit)?; + self.ath.compute(prices, indexes, starting_indexes, exit)?; // Lookback metrics (independent) self.lookback @@ -27,7 +27,7 @@ impl Vecs { // Returns metrics (depends on lookback) self.returns - .compute(indexes, prices, blocks, &self.lookback, starting_indexes, exit)?; + .compute(prices, blocks, &self.lookback, starting_indexes, exit)?; // Volatility (depends on returns) self.volatility @@ -39,7 +39,7 @@ impl Vecs { // Moving average metrics (independent) self.moving_average - .compute(blocks, prices, indexes, starting_indexes, exit)?; + .compute(blocks, prices, starting_indexes, exit)?; // DCA metrics (depends on lookback for lump sum comparison) self.dca diff --git a/crates/brk_computer/src/market/dca/compute.rs b/crates/brk_computer/src/market/dca/compute.rs index 54aea1ac3..2e4e92031 100644 --- a/crates/brk_computer/src/market/dca/compute.rs +++ b/crates/brk_computer/src/market/dca/compute.rs @@ -1,8 +1,8 @@ use brk_error::Result; -use brk_types::{Bitcoin, Cents, Day1, Date, Dollars, Height, Sats, StoredF32, StoredU32}; -use vecdb::{AnyVec, EagerVec, Exit, ReadableOptionVec, ReadableVec, PcoVec, PcoVecValue, VecIndex}; +use brk_types::{Bitcoin, Cents, Date, Day1, Dollars, Sats, StoredF32}; +use vecdb::{AnyVec, Exit, ReadableOptionVec, ReadableVec, VecIndex}; -use super::{ByDcaClass, ByDcaPeriod, Vecs}; +use super::Vecs; use crate::{ ComputeIndexes, blocks, indexes, internal::{ComputedFromHeight, PercentageDiffCents}, @@ -103,36 +103,20 @@ impl Vecs { )?; } - // DCA by period - CAGR (computed from returns) + // DCA by period - CAGR (computed from returns at height level) for (cagr, returns, days) in self.period_cagr.zip_mut_with_period(&self.period_returns) { let years = days as f32 / 365.0; - let returns_data: Vec = returns.day1.collect_or_default(); cagr.height.compute_transform( starting_indexes.height, - h2d, - |(h, di, _)| { - let v = returns_data.get(di.to_usize()) - .map(|r| ((**r / 100.0 + 1.0).powf(1.0 / years) - 1.0) * 100.0) - .unwrap_or(0.0); + &returns.height, + |(h, r, ..)| { + let v = ((*r / 100.0 + 1.0).powf(1.0 / years) - 1.0) * 100.0; (h, StoredF32::from(v)) }, exit, )?; } - // DCA by period - profitability - compute_period_rolling( - &mut self.period_days_in_profit, - &mut self.period_days_in_loss, - &mut self.period_min_return, - &mut self.period_max_return, - &self.period_returns, - blocks, - h2d, - starting_indexes, - exit, - )?; - // Lump sum by period - stack let lookback_dca = lookback.price_ago.as_dca_period(); for (stack, lookback_price, days) in @@ -171,19 +155,6 @@ impl Vecs { )?; } - // Lump sum by period - profitability - compute_period_rolling( - &mut self.period_lump_sum_days_in_profit, - &mut self.period_lump_sum_days_in_loss, - &mut self.period_lump_sum_min_return, - &mut self.period_lump_sum_max_return, - &self.period_lump_sum_returns, - blocks, - h2d, - starting_indexes, - exit, - )?; - // DCA by year class - stack (cumulative sum from class start date) let start_days = super::ByDcaClass::<()>::start_days(); for (stack, day1) in self.class_stack.iter_mut().zip(start_days) { @@ -265,7 +236,6 @@ impl Vecs { .iter_mut() .zip(self.class_average_price.iter()) { - returns.compute_binary::( starting_indexes.height, &prices.price.cents.height, @@ -274,18 +244,6 @@ impl Vecs { )?; } - // DCA by year class - profitability - compute_class_cumulative( - &mut self.class_days_in_profit, - &mut self.class_days_in_loss, - &mut self.class_min_return, - &mut self.class_max_return, - &self.class_returns, - h2d, - starting_indexes, - exit, - )?; - Ok(()) } } @@ -297,217 +255,3 @@ fn sats_from_dca(price: Dollars) -> Sats { Sats::from(Bitcoin::from(DCA_AMOUNT / price)) } } - -#[allow(clippy::too_many_arguments)] -fn compute_period_rolling( - days_in_profit: &mut ByDcaPeriod>, - days_in_loss: &mut ByDcaPeriod>, - min_return: &mut ByDcaPeriod>, - max_return: &mut ByDcaPeriod>, - returns: &ByDcaPeriod>, - blocks: &blocks::Vecs, - h2d: &EagerVec>, - starting_indexes: &ComputeIndexes, - exit: &Exit, -) -> Result<()> { - for ((((dip, dil), minr), maxr), (ret, days)) in days_in_profit - .iter_mut() - .zip(days_in_loss.iter_mut()) - .zip(min_return.iter_mut()) - .zip(max_return.iter_mut()) - .zip(returns.iter_with_days()) - { - let window_starts = blocks.count.start_vec(days as usize); - let returns_data: Vec = ret.day1.collect_or_default(); - - compute_rolling( - &mut dip.height, h2d, &returns_data, window_starts, starting_indexes.height, exit, - |buf| StoredU32::from(buf.iter().copied().filter(|r| **r > 0.0).count()), - )?; - - compute_rolling( - &mut dil.height, h2d, &returns_data, window_starts, starting_indexes.height, exit, - |buf| StoredU32::from(buf.iter().copied().filter(|r| **r < 0.0).count()), - )?; - - compute_rolling( - &mut minr.height, h2d, &returns_data, window_starts, starting_indexes.height, exit, - |buf| { - buf.iter() - .copied() - .reduce(|a, b| if *b < *a { b } else { a }) - .unwrap_or_default() - }, - )?; - - compute_rolling( - &mut maxr.height, h2d, &returns_data, window_starts, starting_indexes.height, exit, - |buf| { - buf.iter() - .copied() - .reduce(|a, b| if *b > *a { b } else { a }) - .unwrap_or_default() - }, - )?; - } - Ok(()) -} - -#[allow(clippy::too_many_arguments)] -fn compute_class_cumulative( - days_in_profit: &mut ByDcaClass>, - days_in_loss: &mut ByDcaClass>, - min_return: &mut ByDcaClass>, - max_return: &mut ByDcaClass>, - returns: &ByDcaClass>, - h2d: &EagerVec>, - starting_indexes: &ComputeIndexes, - exit: &Exit, -) -> Result<()> { - let start_days = ByDcaClass::<()>::start_days(); - - for (((((dip, dil), minr), maxr), ret), from) in days_in_profit - .iter_mut() - .zip(days_in_loss.iter_mut()) - .zip(min_return.iter_mut()) - .zip(max_return.iter_mut()) - .zip(returns.iter()) - .zip(start_days) - { - compute_cumulative( - &mut dip.height, h2d, &ret.day1, from, starting_indexes.height, exit, - StoredU32::ZERO, - |prev, ret| if *ret > 0.0 { prev + StoredU32::ONE } else { prev }, - )?; - - compute_cumulative( - &mut dil.height, h2d, &ret.day1, from, starting_indexes.height, exit, - StoredU32::ZERO, - |prev, ret| if *ret < 0.0 { prev + StoredU32::ONE } else { prev }, - )?; - - compute_cumulative( - &mut minr.height, h2d, &ret.day1, from, starting_indexes.height, exit, - StoredF32::from(f32::MAX), - |prev, ret| if *ret < *prev { ret } else { prev }, - )?; - - compute_cumulative( - &mut maxr.height, h2d, &ret.day1, from, starting_indexes.height, exit, - StoredF32::from(f32::MIN), - |prev, ret| if *ret > *prev { ret } else { prev }, - )?; - } - Ok(()) -} - -/// Compute a rolling day-window metric at height level using _start vecs. -#[allow(clippy::too_many_arguments)] -fn compute_rolling( - output: &mut EagerVec>, - h2d: &EagerVec>, - returns_data: &[StoredF32], - window_starts: &EagerVec>, - starting_height: Height, - exit: &Exit, - mut aggregate: impl FnMut(&[StoredF32]) -> T, -) -> Result<()> { - // Cursor + cache avoids per-height PcoVec page decompression for the - // h2d lookback read. Window-start heights are non-decreasing so the - // cursor only moves forward; the cache handles repeated values. - let mut h2d_cursor = h2d.cursor(); - let mut last_ws = Height::ZERO; - let mut last_ws_di = Day1::default(); - - output.compute_transform2( - starting_height, - h2d, - window_starts, - |(h, di, window_start, ..)| { - let window_start_di = if window_start == last_ws { - last_ws_di - } else { - let target = window_start.to_usize(); - let ws_di = if target >= h2d_cursor.position() { - h2d_cursor.advance(target - h2d_cursor.position()); - h2d_cursor.next().unwrap_or_default() - } else { - // Cursor past target (batch boundary); rare fallback - h2d.collect_one(window_start).unwrap_or_default() - }; - last_ws = window_start; - last_ws_di = ws_di; - ws_di - }; - let start = window_start_di.to_usize(); - let end = di.to_usize() + 1; - if start >= end { - return (h, T::default()); - } - (h, aggregate(&returns_data[start..end])) - }, - exit, - )?; - - Ok(()) -} - -/// Compute a cumulative metric at height level starting from a fixed date. -#[allow(clippy::too_many_arguments)] -fn compute_cumulative( - output: &mut EagerVec>, - h2d: &EagerVec>, - returns: &impl ReadableOptionVec, - from_day1: Day1, - starting_height: Height, - exit: &Exit, - initial: T, - mut accumulate: impl FnMut(T, StoredF32) -> T, -) -> Result<()> { - let mut last_di: Option = None; - let sh = starting_height.to_usize(); - let mut prev_value = if sh > 0 { - output.collect_one_at(sh - 1).unwrap_or_default() - } else { - T::default() - }; - - output.compute_transform( - starting_height, - h2d, - |(h, di, _)| { - let hi = h.to_usize(); - - if last_di.is_none() && hi > 0 { - last_di = Some(h2d.collect_one_at(hi - 1).unwrap()); - } - - if di < from_day1 { - last_di = Some(di); - prev_value = T::default(); - return (h, T::default()); - } - - let prev_di = last_di; - last_di = Some(di); - - let same_day = prev_di.is_some_and(|prev| prev == di); - let result = if same_day { - prev_value - } else { - let prev = if hi > 0 && prev_di.is_some_and(|pd| pd >= from_day1) { - prev_value - } else { - initial - }; - let ret = returns.collect_one_flat(di).unwrap_or_default(); - accumulate(prev, ret) - }; - prev_value = result; - (h, result) - }, - exit, - )?; - - Ok(()) -} diff --git a/crates/brk_computer/src/market/dca/import.rs b/crates/brk_computer/src/market/dca/import.rs index 6b3f82b16..2fc87a0d4 100644 --- a/crates/brk_computer/src/market/dca/import.rs +++ b/crates/brk_computer/src/market/dca/import.rs @@ -5,10 +5,7 @@ use vecdb::{Database, ImportableVec}; use super::{ByDcaCagr, ByDcaClass, ByDcaPeriod, Vecs}; use crate::{ indexes, - internal::{ - ComputedFromHeight, Price, - ValueFromHeight, - }, + internal::{ComputedFromHeight, Price, ValueFromHeight}, }; impl Vecs { @@ -17,88 +14,26 @@ impl Vecs { version: Version, indexes: &indexes::Vecs, ) -> Result { - // DCA by period - stack (KISS) let period_stack = ByDcaPeriod::try_new(|name, _days| { - ValueFromHeight::forced_import( - db, - &format!("{name}_dca_stack"), - version, - indexes, - ) + ValueFromHeight::forced_import(db, &format!("{name}_dca_stack"), version, indexes) })?; - // DCA by period - average price let period_average_price = ByDcaPeriod::try_new(|name, _days| { - Price::forced_import( - db, - &format!("{name}_dca_average_price"), - version, - indexes, - ) + Price::forced_import(db, &format!("{name}_dca_average_price"), version, indexes) })?; let period_returns = ByDcaPeriod::try_new(|name, _days| { - ComputedFromHeight::forced_import( - db, - &format!("{name}_dca_returns"), - version, - indexes, - ) + ComputedFromHeight::forced_import(db, &format!("{name}_dca_returns"), version, indexes) })?; - // DCA by period - CAGR let period_cagr = ByDcaCagr::try_new(|name, _days| { ComputedFromHeight::forced_import(db, &format!("{name}_dca_cagr"), version, indexes) })?; - // DCA by period - profitability - let period_days_in_profit = ByDcaPeriod::try_new(|name, _days| { - ComputedFromHeight::forced_import( - db, - &format!("{name}_dca_days_in_profit"), - version + Version::ONE, - indexes, - ) - })?; - - let period_days_in_loss = ByDcaPeriod::try_new(|name, _days| { - ComputedFromHeight::forced_import( - db, - &format!("{name}_dca_days_in_loss"), - version + Version::ONE, - indexes, - ) - })?; - - let period_min_return = ByDcaPeriod::try_new(|name, _days| { - ComputedFromHeight::forced_import( - db, - &format!("{name}_dca_min_return"), - version, - indexes, - ) - })?; - - let period_max_return = ByDcaPeriod::try_new(|name, _days| { - ComputedFromHeight::forced_import( - db, - &format!("{name}_dca_max_return"), - version, - indexes, - ) - })?; - - // Lump sum by period - stack (KISS) let period_lump_sum_stack = ByDcaPeriod::try_new(|name, _days| { - ValueFromHeight::forced_import( - db, - &format!("{name}_lump_sum_stack"), - version, - indexes, - ) + ValueFromHeight::forced_import(db, &format!("{name}_lump_sum_stack"), version, indexes) })?; - // Lump sum by period - returns let period_lump_sum_returns = ByDcaPeriod::try_new(|name, _days| { ComputedFromHeight::forced_import( db, @@ -108,92 +43,16 @@ impl Vecs { ) })?; - // Lump sum by period - profitability - let period_lump_sum_days_in_profit = ByDcaPeriod::try_new(|name, _days| { - ComputedFromHeight::forced_import( - db, - &format!("{name}_lump_sum_days_in_profit"), - version + Version::ONE, - indexes, - ) - })?; - - let period_lump_sum_days_in_loss = ByDcaPeriod::try_new(|name, _days| { - ComputedFromHeight::forced_import( - db, - &format!("{name}_lump_sum_days_in_loss"), - version + Version::ONE, - indexes, - ) - })?; - - let period_lump_sum_min_return = ByDcaPeriod::try_new(|name, _days| { - ComputedFromHeight::forced_import( - db, - &format!("{name}_lump_sum_min_return"), - version, - indexes, - ) - })?; - - let period_lump_sum_max_return = ByDcaPeriod::try_new(|name, _days| { - ComputedFromHeight::forced_import( - db, - &format!("{name}_lump_sum_max_return"), - version, - indexes, - ) - })?; - - // DCA by year class - stack (KISS) let class_stack = ByDcaClass::try_new(|name, _year, _day1| { - ValueFromHeight::forced_import( - db, - &format!("{name}_stack"), - version, - indexes, - ) + ValueFromHeight::forced_import(db, &format!("{name}_stack"), version, indexes) })?; - // DCA by year class - average price let class_average_price = ByDcaClass::try_new(|name, _year, _day1| { Price::forced_import(db, &format!("{name}_average_price"), version, indexes) })?; let class_returns = ByDcaClass::try_new(|name, _year, _day1| { - ComputedFromHeight::forced_import( - db, - &format!("{name}_returns"), - version, - indexes, - ) - })?; - - // DCA by year class - profitability - let class_days_in_profit = ByDcaClass::try_new(|name, _year, _day1| { - ComputedFromHeight::forced_import( - db, - &format!("{name}_days_in_profit"), - version, - indexes, - ) - })?; - - let class_days_in_loss = ByDcaClass::try_new(|name, _year, _day1| { - ComputedFromHeight::forced_import( - db, - &format!("{name}_days_in_loss"), - version, - indexes, - ) - })?; - - let class_min_return = ByDcaClass::try_new(|name, _year, _day1| { - ComputedFromHeight::forced_import(db, &format!("{name}_min_return"), version, indexes) - })?; - - let class_max_return = ByDcaClass::try_new(|name, _year, _day1| { - ComputedFromHeight::forced_import(db, &format!("{name}_max_return"), version, indexes) + ComputedFromHeight::forced_import(db, &format!("{name}_returns"), version, indexes) })?; Ok(Self { @@ -202,23 +61,11 @@ impl Vecs { period_average_price, period_returns, period_cagr, - period_days_in_profit, - period_days_in_loss, - period_min_return, - period_max_return, period_lump_sum_stack, period_lump_sum_returns, - period_lump_sum_days_in_profit, - period_lump_sum_days_in_loss, - period_lump_sum_min_return, - period_lump_sum_max_return, class_stack, class_average_price, class_returns, - class_days_in_profit, - class_days_in_loss, - class_min_return, - class_max_return, }) } } diff --git a/crates/brk_computer/src/market/dca/vecs.rs b/crates/brk_computer/src/market/dca/vecs.rs index 5e6ab962c..caa4bb561 100644 --- a/crates/brk_computer/src/market/dca/vecs.rs +++ b/crates/brk_computer/src/market/dca/vecs.rs @@ -1,11 +1,9 @@ use brk_traversable::Traversable; -use brk_types::{Cents, Height, Sats, StoredF32, StoredU32}; +use brk_types::{Cents, Height, Sats, StoredF32}; use vecdb::{EagerVec, PcoVec, Rw, StorageMode}; use super::{ByDcaCagr, ByDcaClass, ByDcaPeriod}; -use crate::internal::{ - ComputedFromHeight, Price, ValueFromHeight, -}; +use crate::internal::{ComputedFromHeight, Price, ValueFromHeight}; /// Dollar-cost averaging metrics by time period and year class #[derive(Traversable)] @@ -14,36 +12,18 @@ pub struct Vecs { /// Computed once, reused by all period rolling sums. pub dca_sats_per_day: M::Stored>>, - // DCA by period - KISS types + // DCA by period pub period_stack: ByDcaPeriod>, pub period_average_price: ByDcaPeriod>>, pub period_returns: ByDcaPeriod>, pub period_cagr: ByDcaCagr>, - // DCA by period - profitability - pub period_days_in_profit: ByDcaPeriod>, - pub period_days_in_loss: ByDcaPeriod>, - pub period_min_return: ByDcaPeriod>, - pub period_max_return: ByDcaPeriod>, - - // Lump sum by period (for comparison with DCA) - KISS types + // Lump sum by period (for comparison with DCA) pub period_lump_sum_stack: ByDcaPeriod>, pub period_lump_sum_returns: ByDcaPeriod>, - // Lump sum by period - profitability - pub period_lump_sum_days_in_profit: ByDcaPeriod>, - pub period_lump_sum_days_in_loss: ByDcaPeriod>, - pub period_lump_sum_min_return: ByDcaPeriod>, - pub period_lump_sum_max_return: ByDcaPeriod>, - - // DCA by year class - KISS types + // DCA by year class pub class_stack: ByDcaClass>, pub class_average_price: ByDcaClass>>, pub class_returns: ByDcaClass>, - - // DCA by year class - profitability - pub class_days_in_profit: ByDcaClass>, - pub class_days_in_loss: ByDcaClass>, - pub class_min_return: ByDcaClass>, - pub class_max_return: ByDcaClass>, } diff --git a/crates/brk_computer/src/market/indicators/compute.rs b/crates/brk_computer/src/market/indicators/compute.rs index b4026a265..08f55a4a2 100644 --- a/crates/brk_computer/src/market/indicators/compute.rs +++ b/crates/brk_computer/src/market/indicators/compute.rs @@ -9,6 +9,16 @@ use crate::{ mining, prices, transactions, }; +fn tf_multiplier(tf: &str) -> usize { + match tf { + "1d" => 1, + "1w" => 7, + "1m" => 30, + "1y" => 365, + _ => unreachable!(), + } +} + impl Vecs { #[allow(clippy::too_many_arguments)] pub(crate) fn compute( @@ -60,18 +70,22 @@ impl Vecs { )?; } - // Pre-collect Height→Day1 mapping - let h2d: Vec = indexes.height.day1.collect(); - let total_heights = h2d.len(); - // RSI per timeframe for (tf, rsi_chain) in self.rsi.iter_mut() { + let m = tf_multiplier(tf); + let returns_source = match tf { + "1d" => &returns.price_returns._24h.height, + "1w" => &returns.price_returns._1w.height, + "1m" => &returns.price_returns._1m.height, + "1y" => &returns.price_returns._1y.height, + _ => unreachable!(), + }; super::rsi::compute( rsi_chain, - tf, - returns, - &h2d, - total_heights, + blocks, + returns_source, + 14 * m, + 3 * m, starting_indexes, exit, )?; @@ -79,18 +93,22 @@ impl Vecs { // MACD per timeframe for (tf, macd_chain) in self.macd.iter_mut() { + let m = tf_multiplier(tf); super::macd::compute( macd_chain, - tf, + blocks, prices, - &h2d, - total_heights, + 12 * m, + 26 * m, + 9 * m, starting_indexes, exit, )?; } - // Gini (daily only, expanded to Height) + // Gini (daily, expanded to Height) + let h2d: Vec = indexes.height.day1.collect(); + let total_heights = h2d.len(); super::gini::compute( &mut self.gini, distribution, diff --git a/crates/brk_computer/src/market/indicators/gini.rs b/crates/brk_computer/src/market/indicators/gini.rs index 075fc8fb8..aad84d69b 100644 --- a/crates/brk_computer/src/market/indicators/gini.rs +++ b/crates/brk_computer/src/market/indicators/gini.rs @@ -1,5 +1,5 @@ use brk_error::Result; -use brk_types::{Day1, Sats, StoredF32, StoredU64, Version}; +use brk_types::{Day1, StoredF32, Version}; use vecdb::{AnyStoredVec, AnyVec, Exit, ReadableOptionVec, VecIndex, WritableVec}; use crate::{ComputeIndexes, distribution, internal::ComputedFromHeight}; @@ -44,35 +44,39 @@ pub(super) fn compute( return Ok(()); } - // Pre-collect all daily data - let supply_data: Vec> = supply_vecs + let num_days = supply_vecs .iter() - .map(|v| v.collect_or_default()) - .collect(); - let count_data: Vec> = count_vecs - .iter() - .map(|v| v.collect_or_default()) - .collect(); - let num_days = supply_data.first().map_or(0, |v| v.len()); + .map(|v| v.len()) + .min() + .unwrap_or(0) + .min(count_vecs.iter().map(|v| v.len()).min().unwrap_or(0)); - // Compute gini per day in-memory - let mut gini_daily = Vec::with_capacity(num_days); - let mut buckets: Vec<(u64, u64)> = Vec::with_capacity(supply_data.len()); - for di in 0..num_days { + // Only compute gini for new days (each day is independent) + let start_day = if start_height > 0 { + h2d[start_height].to_usize() + } else { + 0 + }; + + let mut gini_new: Vec = Vec::with_capacity(num_days.saturating_sub(start_day)); + let mut buckets: Vec<(u64, u64)> = Vec::with_capacity(supply_vecs.len()); + for di in start_day..num_days { buckets.clear(); - buckets.extend(supply_data.iter().zip(count_data.iter()).map(|(s, c)| { - let count: u64 = c[di].into(); - let supply: u64 = s[di].into(); - (count, supply) - })); - gini_daily.push(gini_from_lorenz(&buckets)); + let day = Day1::from(di); + for (sv, cv) in supply_vecs.iter().zip(count_vecs.iter()) { + let supply: u64 = sv.collect_one_flat(day).unwrap_or_default().into(); + let count: u64 = cv.collect_one_flat(day).unwrap_or_default().into(); + buckets.push((count, supply)); + } + gini_new.push(gini_from_lorenz(&buckets)); } // Expand to Height (start_height..total_heights).for_each(|h| { let di = h2d[h].to_usize(); - let val = if di < gini_daily.len() { - StoredF32::from(gini_daily[di]) + let offset = di.saturating_sub(start_day); + let val = if offset < gini_new.len() { + StoredF32::from(gini_new[offset]) } else { StoredF32::NAN }; @@ -92,7 +96,7 @@ fn gini_from_lorenz(buckets: &[(u64, u64)]) -> f32 { let total_supply: u64 = buckets.iter().map(|(_, s)| s).sum(); if total_count == 0 || total_supply == 0 { - return 0.0; + return f32::NAN; } let (mut cumulative_count, mut cumulative_supply, mut area) = (0u64, 0u64, 0.0f64); diff --git a/crates/brk_computer/src/market/indicators/import.rs b/crates/brk_computer/src/market/indicators/import.rs index 2f0f7fe47..13209c46c 100644 --- a/crates/brk_computer/src/market/indicators/import.rs +++ b/crates/brk_computer/src/market/indicators/import.rs @@ -81,6 +81,18 @@ impl MacdChain { )?; Ok(Self { + ema_fast: ComputedFromHeight::forced_import( + db, + &format!("macd_ema_fast_{tf}"), + version, + indexes, + )?, + ema_slow: ComputedFromHeight::forced_import( + db, + &format!("macd_ema_slow_{tf}"), + version, + indexes, + )?, line, signal, histogram, diff --git a/crates/brk_computer/src/market/indicators/macd.rs b/crates/brk_computer/src/market/indicators/macd.rs index 6eaa7d708..3431ee0ed 100644 --- a/crates/brk_computer/src/market/indicators/macd.rs +++ b/crates/brk_computer/src/market/indicators/macd.rs @@ -1,103 +1,61 @@ use brk_error::Result; -use brk_types::{Day1, StoredF32}; -use vecdb::{AnyStoredVec, AnyVec, Exit, VecIndex, WritableVec}; +use vecdb::Exit; -use super::{MacdChain, smoothing::compute_ema, timeframe::{collect_closes, date_to_period}}; -use crate::{ComputeIndexes, prices}; +use super::MacdChain; +use crate::{ComputeIndexes, blocks, prices}; -#[allow(clippy::too_many_arguments)] pub(super) fn compute( chain: &mut MacdChain, - tf: &str, + blocks: &blocks::Vecs, prices: &prices::Vecs, - h2d: &[Day1], - total_heights: usize, + fast_days: usize, + slow_days: usize, + signal_days: usize, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let source_version = prices.price.usd.height.version(); + let close = &prices.price.usd.height; + let ws_fast = blocks.count.start_vec(fast_days); + let ws_slow = blocks.count.start_vec(slow_days); + let ws_signal = blocks.count.start_vec(signal_days); - chain - .line - .height - .validate_computed_version_or_reset(source_version)?; - chain - .signal - .height - .validate_computed_version_or_reset(source_version)?; - - chain.line.height.truncate_if_needed_at( - chain - .line - .height - .len() - .min(starting_indexes.height.to_usize()), - )?; - chain.signal.height.truncate_if_needed_at( - chain - .signal - .height - .len() - .min(starting_indexes.height.to_usize()), + chain.ema_fast.height.compute_rolling_ema( + starting_indexes.height, + ws_fast, + close, + exit, )?; - chain - .histogram - .height - .validate_computed_version_or_reset(source_version)?; - chain.histogram.height.truncate_if_needed_at( - chain - .histogram - .height - .len() - .min(starting_indexes.height.to_usize()), + chain.ema_slow.height.compute_rolling_ema( + starting_indexes.height, + ws_slow, + close, + exit, )?; - let start_height = chain.line.height.len(); - if start_height >= total_heights { - return Ok(()); - } + // MACD line = ema_fast - ema_slow + chain.line.height.compute_subtract( + starting_indexes.height, + &chain.ema_fast.height, + &chain.ema_slow.height, + exit, + )?; - // Collect close prices at timeframe level - let closes = collect_closes(tf, prices); - let closes_f32: Vec = closes.iter().map(|d| **d as f32).collect(); + // Signal = EMA of MACD line + chain.signal.height.compute_rolling_ema( + starting_indexes.height, + ws_signal, + &chain.line.height, + exit, + )?; - // Compute MACD in-memory - let ema12 = compute_ema(&closes_f32, 12); - let ema26 = compute_ema(&closes_f32, 26); - - let macd_line: Vec = ema12.iter().zip(ema26.iter()).map(|(a, b)| a - b).collect(); - - let macd_signal = compute_ema(&macd_line, 9); - - let macd_histogram: Vec = macd_line.iter().zip(macd_signal.iter()).map(|(a, b)| a - b).collect(); - - // Expand to Height - (start_height..total_heights).for_each(|h| { - let pi = date_to_period(tf, h2d[h]); - chain.line.height.push(if pi < macd_line.len() { - StoredF32::from(macd_line[pi]) - } else { - StoredF32::NAN - }); - chain.signal.height.push(if pi < macd_signal.len() { - StoredF32::from(macd_signal[pi]) - } else { - StoredF32::NAN - }); - chain.histogram.height.push(if pi < macd_histogram.len() { - StoredF32::from(macd_histogram[pi]) - } else { - StoredF32::NAN - }); - }); - - { - let _lock = exit.lock(); - chain.line.height.write()?; - chain.signal.height.write()?; - chain.histogram.height.write()?; - } + // Histogram = line - signal + chain.histogram.height.compute_subtract( + starting_indexes.height, + &chain.line.height, + &chain.signal.height, + exit, + )?; Ok(()) } diff --git a/crates/brk_computer/src/market/indicators/mod.rs b/crates/brk_computer/src/market/indicators/mod.rs index 2ea5670ee..982f5726a 100644 --- a/crates/brk_computer/src/market/indicators/mod.rs +++ b/crates/brk_computer/src/market/indicators/mod.rs @@ -3,8 +3,6 @@ mod gini; mod import; mod macd; mod rsi; -mod smoothing; -mod timeframe; mod vecs; pub use vecs::{ByIndicatorTimeframe, MacdChain, RsiChain, Vecs}; diff --git a/crates/brk_computer/src/market/indicators/rsi.rs b/crates/brk_computer/src/market/indicators/rsi.rs index 0d38c6993..81663f867 100644 --- a/crates/brk_computer/src/market/indicators/rsi.rs +++ b/crates/brk_computer/src/market/indicators/rsi.rs @@ -1,126 +1,115 @@ use brk_error::Result; -use brk_types::{Day1, StoredF32}; -use vecdb::{AnyStoredVec, AnyVec, Exit, VecIndex, WritableVec}; +use brk_types::{Height, StoredF32}; +use vecdb::{Exit, ReadableVec}; -use super::{ - RsiChain, - smoothing::{compute_rma, compute_rolling_max, compute_rolling_min, compute_sma}, - timeframe::{collect_returns, date_to_period}, -}; -use crate::{ComputeIndexes, market::returns::Vecs as ReturnsVecs}; +use super::RsiChain; +use crate::{ComputeIndexes, blocks}; -#[allow(clippy::too_many_arguments)] pub(super) fn compute( chain: &mut RsiChain, - tf: &str, - returns: &ReturnsVecs, - h2d: &[Day1], - total_heights: usize, + blocks: &blocks::Vecs, + returns_source: &impl ReadableVec, + rma_days: usize, + stoch_sma_days: usize, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let source_version = returns.price_returns._24h.height.version(); + let ws_rma = blocks.count.start_vec(rma_days); + let ws_sma = blocks.count.start_vec(stoch_sma_days); - let vecs = [ - &mut chain.gains.height, - &mut chain.losses.height, - &mut chain.average_gain.height, - &mut chain.average_loss.height, - &mut chain.rsi.height, - &mut chain.rsi_min.height, - &mut chain.rsi_max.height, - &mut chain.stoch_rsi.height, - &mut chain.stoch_rsi_k.height, - &mut chain.stoch_rsi_d.height, - ]; + // Gains = max(return, 0) + chain.gains.height.compute_transform( + starting_indexes.height, + returns_source, + |(h, r, ..)| (h, StoredF32::from((*r).max(0.0))), + exit, + )?; - for v in vecs { - v.validate_computed_version_or_reset(source_version)?; - v.truncate_if_needed_at(v.len().min(starting_indexes.height.to_usize()))?; - } + // Losses = max(-return, 0) + chain.losses.height.compute_transform( + starting_indexes.height, + returns_source, + |(h, r, ..)| (h, StoredF32::from((-*r).max(0.0))), + exit, + )?; - let start_height = chain.gains.height.len(); - if start_height >= total_heights { - return Ok(()); - } + // Average gain = RMA of gains + chain.average_gain.height.compute_rolling_rma( + starting_indexes.height, + ws_rma, + &chain.gains.height, + exit, + )?; - // Collect returns at the appropriate timeframe level - let period_returns = collect_returns(tf, returns); + // Average loss = RMA of losses + chain.average_loss.height.compute_rolling_rma( + starting_indexes.height, + ws_rma, + &chain.losses.height, + exit, + )?; - // Compute in-memory - let gains: Vec = period_returns.iter().map(|r| r.max(0.0)).collect(); - let losses: Vec = period_returns.iter().map(|r| (-r).max(0.0)).collect(); - let avg_gain = compute_rma(&gains, 14); - let avg_loss = compute_rma(&losses, 14); + // RSI = 100 * avg_gain / (avg_gain + avg_loss) + chain.rsi.height.compute_transform2( + starting_indexes.height, + &chain.average_gain.height, + &chain.average_loss.height, + |(h, g, l, ..)| { + let sum = *g + *l; + let rsi = if sum == 0.0 { 50.0 } else { 100.0 * *g / sum }; + (h, StoredF32::from(rsi)) + }, + exit, + )?; - let rsi: Vec = avg_gain - .iter() - .zip(avg_loss.iter()) - .map(|(g, l)| { - let sum = g + l; - if sum == 0.0 { 50.0 } else { 100.0 * g / sum } - }) - .collect(); + // Rolling min/max of RSI over rma_days window + chain.rsi_min.height.compute_rolling_min_from_starts( + starting_indexes.height, + ws_rma, + &chain.rsi.height, + exit, + )?; - let rsi_min = compute_rolling_min(&rsi, 14); - let rsi_max = compute_rolling_max(&rsi, 14); + chain.rsi_max.height.compute_rolling_max_from_starts( + starting_indexes.height, + ws_rma, + &chain.rsi.height, + exit, + )?; - let stoch_rsi: Vec = rsi - .iter() - .zip(rsi_min.iter()) - .zip(rsi_max.iter()) - .map(|((r, mn), mx)| { - let range = mx - mn; - if range == 0.0 { - f32::NAN + // StochRSI = (rsi - rsi_min) / (rsi_max - rsi_min) * 100 + chain.stoch_rsi.height.compute_transform3( + starting_indexes.height, + &chain.rsi.height, + &chain.rsi_min.height, + &chain.rsi_max.height, + |(h, r, mn, mx, ..)| { + let range = *mx - *mn; + let stoch = if range == 0.0 { + StoredF32::NAN } else { - (r - mn) / range * 100.0 - } - }) - .collect(); + StoredF32::from((*r - *mn) / range * 100.0) + }; + (h, stoch) + }, + exit, + )?; - let stoch_rsi_k = compute_sma(&stoch_rsi, 3); - let stoch_rsi_d = compute_sma(&stoch_rsi_k, 3); + // StochRSI K = SMA of StochRSI + chain.stoch_rsi_k.height.compute_rolling_average( + starting_indexes.height, + ws_sma, + &chain.stoch_rsi.height, + exit, + )?; - // Expand to Height - macro_rules! expand { - ($target:expr, $buffer:expr) => { - for h in start_height..total_heights { - let pi = date_to_period(tf, h2d[h]); - let val = if pi < $buffer.len() { - StoredF32::from($buffer[pi]) - } else { - StoredF32::NAN - }; - $target.push(val); - } - }; - } - - expand!(chain.gains.height, gains); - expand!(chain.losses.height, losses); - expand!(chain.average_gain.height, avg_gain); - expand!(chain.average_loss.height, avg_loss); - expand!(chain.rsi.height, rsi); - expand!(chain.rsi_min.height, rsi_min); - expand!(chain.rsi_max.height, rsi_max); - expand!(chain.stoch_rsi.height, stoch_rsi); - expand!(chain.stoch_rsi_k.height, stoch_rsi_k); - expand!(chain.stoch_rsi_d.height, stoch_rsi_d); - - { - let _lock = exit.lock(); - chain.gains.height.write()?; - chain.losses.height.write()?; - chain.average_gain.height.write()?; - chain.average_loss.height.write()?; - chain.rsi.height.write()?; - chain.rsi_min.height.write()?; - chain.rsi_max.height.write()?; - chain.stoch_rsi.height.write()?; - chain.stoch_rsi_k.height.write()?; - chain.stoch_rsi_d.height.write()?; - } + // StochRSI D = SMA of K + chain.stoch_rsi_d.height.compute_rolling_average( + starting_indexes.height, + ws_sma, + &chain.stoch_rsi_k.height, + exit, + )?; Ok(()) } diff --git a/crates/brk_computer/src/market/indicators/smoothing.rs b/crates/brk_computer/src/market/indicators/smoothing.rs deleted file mode 100644 index f0910d9dd..000000000 --- a/crates/brk_computer/src/market/indicators/smoothing.rs +++ /dev/null @@ -1,89 +0,0 @@ -use std::collections::VecDeque; - -pub(super) fn compute_rma(source: &[f32], period: usize) -> Vec { - let mut result = Vec::with_capacity(source.len()); - let k = 1.0 / period as f32; - let mut sum = 0.0f32; - - for (i, &val) in source.iter().enumerate() { - if i < period { - sum += val; - result.push(sum / (i + 1) as f32); - } else { - let prev = result[i - 1]; - result.push(val * k + prev * (1.0 - k)); - } - } - - result -} - -pub(super) fn compute_ema(source: &[f32], period: usize) -> Vec { - let mut result = Vec::with_capacity(source.len()); - let k = 2.0 / (period as f32 + 1.0); - let mut sum = 0.0f32; - - for (i, &val) in source.iter().enumerate() { - if i < period { - sum += val; - result.push(sum / (i + 1) as f32); - } else { - let prev = result[i - 1]; - result.push(val * k + prev * (1.0 - k)); - } - } - - result -} - -pub(super) fn compute_sma(source: &[f32], window: usize) -> Vec { - let mut result = Vec::with_capacity(source.len()); - let mut sum = 0.0f32; - - for (i, &val) in source.iter().enumerate() { - sum += val; - if i >= window { - sum -= source[i - window]; - } - let count = (i + 1).min(window); - result.push(sum / count as f32); - } - - result -} - -pub(super) fn compute_rolling_min(source: &[f32], window: usize) -> Vec { - let mut result = Vec::with_capacity(source.len()); - let mut deque = VecDeque::new(); - - for (i, &val) in source.iter().enumerate() { - while deque.back().is_some_and(|&(_, v): &(usize, f32)| v >= val) { - deque.pop_back(); - } - deque.push_back((i, val)); - if deque.front().unwrap().0 + window <= i { - deque.pop_front(); - } - result.push(deque.front().unwrap().1); - } - - result -} - -pub(super) fn compute_rolling_max(source: &[f32], window: usize) -> Vec { - let mut result = Vec::with_capacity(source.len()); - let mut deque = VecDeque::new(); - - for (i, &val) in source.iter().enumerate() { - while deque.back().is_some_and(|&(_, v): &(usize, f32)| v <= val) { - deque.pop_back(); - } - deque.push_back((i, val)); - if deque.front().unwrap().0 + window <= i { - deque.pop_front(); - } - result.push(deque.front().unwrap().1); - } - - result -} diff --git a/crates/brk_computer/src/market/indicators/timeframe.rs b/crates/brk_computer/src/market/indicators/timeframe.rs deleted file mode 100644 index 898426a83..000000000 --- a/crates/brk_computer/src/market/indicators/timeframe.rs +++ /dev/null @@ -1,36 +0,0 @@ -use brk_types::{Day1, Dollars, Month1, StoredF32, Week1, Year1}; -use vecdb::{ReadableOptionVec, VecIndex}; - -use crate::{market::returns::Vecs as ReturnsVecs, prices}; - -pub(super) fn collect_returns(tf: &str, returns: &ReturnsVecs) -> Vec { - let data: Vec = match tf { - "1d" => returns.price_returns._24h.day1.collect_or_default(), - "1w" => returns.price_returns._1w.week1.collect_or_default(), - "1m" => returns.price_returns._1m.month1.collect_or_default(), - "1y" => returns.price_returns._1y.year1.collect_or_default(), - _ => unreachable!(), - }; - data.into_iter().map(|v| *v).collect() -} - -pub(super) fn collect_closes(tf: &str, prices: &prices::Vecs) -> Vec { - match tf { - "1d" => prices.split.close.usd.day1.collect_or_default(), - "1w" => prices.split.close.usd.week1.collect_or_default(), - "1m" => prices.split.close.usd.month1.collect_or_default(), - "1y" => prices.split.close.usd.year1.collect_or_default(), - _ => unreachable!(), - } -} - -#[inline] -pub(super) fn date_to_period(tf: &str, di: Day1) -> usize { - match tf { - "1d" => di.to_usize(), - "1w" => Week1::from(di).to_usize(), - "1m" => Month1::from(di).to_usize(), - "1y" => Year1::from(Month1::from(di)).to_usize(), - _ => unreachable!(), - } -} diff --git a/crates/brk_computer/src/market/indicators/vecs.rs b/crates/brk_computer/src/market/indicators/vecs.rs index c7292db7b..0e32445b4 100644 --- a/crates/brk_computer/src/market/indicators/vecs.rs +++ b/crates/brk_computer/src/market/indicators/vecs.rs @@ -51,6 +51,8 @@ pub struct RsiChain { #[derive(Traversable)] pub struct MacdChain { + pub ema_fast: ComputedFromHeight, + pub ema_slow: ComputedFromHeight, pub line: ComputedFromHeight, pub signal: ComputedFromHeight, pub histogram: ComputedFromHeight, diff --git a/crates/brk_computer/src/market/lookback/compute.rs b/crates/brk_computer/src/market/lookback/compute.rs index 150f759f7..76943d566 100644 --- a/crates/brk_computer/src/market/lookback/compute.rs +++ b/crates/brk_computer/src/market/lookback/compute.rs @@ -1,6 +1,5 @@ use brk_error::Result; -use brk_types::Cents; -use vecdb::{Exit, ReadableVec, VecIndex}; +use vecdb::Exit; use super::Vecs; use crate::{blocks, ComputeIndexes, prices}; @@ -13,17 +12,14 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let close_data: Vec = prices.price.cents.height.collect(); + let price = &prices.price.cents.height; for (price_ago, days) in self.price_ago.iter_mut_with_days() { let window_starts = blocks.count.start_vec(days as usize); - price_ago.cents.height.compute_transform( + price_ago.cents.height.compute_lookback( starting_indexes.height, window_starts, - |(h, start_h, _)| { - let val = close_data[start_h.to_usize()]; - (h, val) - }, + price, exit, )?; } diff --git a/crates/brk_computer/src/market/moving_average/compute.rs b/crates/brk_computer/src/market/moving_average/compute.rs index 8e4dbe14a..7b717dea5 100644 --- a/crates/brk_computer/src/market/moving_average/compute.rs +++ b/crates/brk_computer/src/market/moving_average/compute.rs @@ -1,16 +1,14 @@ use brk_error::Result; -use brk_types::Cents; -use vecdb::{Exit, ReadableOptionVec, VecIndex}; +use vecdb::Exit; use super::Vecs; -use crate::{ComputeIndexes, blocks, indexes, prices}; +use crate::{ComputeIndexes, blocks, prices}; impl Vecs { pub(crate) fn compute( &mut self, blocks: &blocks::Vecs, prices: &prices::Vecs, - indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { @@ -41,9 +39,6 @@ impl Vecs { })?; } - let h2d = &indexes.height.day1; - let closes: Vec = prices.split.close.cents.day1.collect_or_default(); - for (ema, period) in [ (&mut self.price_1w_ema, 7), (&mut self.price_8d_ema, 8), @@ -62,18 +57,9 @@ impl Vecs { (&mut self.price_200w_ema, 200 * 7), (&mut self.price_4y_ema, 4 * 365), ] { - let k = 2.0f64 / (period as f64 + 1.0); - - // Compute date-level EMA, then expand to height level - let date_ema = compute_date_ema(&closes, k); - + let window_starts = blocks.count.start_vec(period); ema.compute_all(blocks, prices, starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - h2d, - |(h, date, ..)| (h, Cents::from(date_ema[date.to_usize()])), - exit, - )?; + v.compute_rolling_ema(starting_indexes.height, window_starts, close, exit)?; Ok(()) })?; } @@ -81,18 +67,3 @@ impl Vecs { Ok(()) } } - -fn compute_date_ema(closes: &[Cents], k: f64) -> Vec { - let mut date_ema: Vec = Vec::with_capacity(closes.len()); - let mut ema_val = 0.0f64; - for (d, close) in closes.iter().enumerate() { - let close = f64::from(*close); - if d == 0 { - ema_val = close; - } else { - ema_val = close * k + ema_val * (1.0 - k); - } - date_ema.push(ema_val); - } - date_ema -} diff --git a/crates/brk_computer/src/market/range/compute.rs b/crates/brk_computer/src/market/range/compute.rs index 57d79d35b..dc9513987 100644 --- a/crates/brk_computer/src/market/range/compute.rs +++ b/crates/brk_computer/src/market/range/compute.rs @@ -3,10 +3,7 @@ use brk_types::StoredF32; use vecdb::{Exit, ReadableVec, VecIndex}; use super::Vecs; -use crate::{ - blocks, ComputeIndexes, prices, - traits::{ComputeRollingMaxFromStarts, ComputeRollingMinFromStarts}, -}; +use crate::{blocks, ComputeIndexes, prices}; impl Vecs { pub(crate) fn compute( diff --git a/crates/brk_computer/src/market/returns/compute.rs b/crates/brk_computer/src/market/returns/compute.rs index af35cb4a7..71038cf6e 100644 --- a/crates/brk_computer/src/market/returns/compute.rs +++ b/crates/brk_computer/src/market/returns/compute.rs @@ -1,14 +1,13 @@ use brk_error::Result; use brk_types::{Dollars, StoredF32}; -use vecdb::{Exit, ReadableOptionVec}; +use vecdb::Exit; use super::Vecs; -use crate::{ComputeIndexes, blocks, indexes, internal::PercentageDiffDollars, market::lookback, prices}; +use crate::{ComputeIndexes, blocks, internal::PercentageDiffDollars, market::lookback, prices}; impl Vecs { pub(crate) fn compute( &mut self, - indexes: &indexes::Vecs, prices: &prices::Vecs, blocks: &blocks::Vecs, lookback: &lookback::Vecs, @@ -29,27 +28,16 @@ impl Vecs { )?; } - // CAGR computed from returns (2y+ periods only) - let h2d = &indexes.height.day1; + // CAGR computed from returns at height level (2y+ periods only) let price_returns_dca = self.price_returns.as_dca_period(); for (cagr, returns, days) in self.cagr.zip_mut_with_period(&price_returns_dca) { let years = days as f32 / 365.0; - let mut cached_di = None; - let mut cached_val = StoredF32::from(0.0); cagr.height.compute_transform( starting_indexes.height, - h2d, - |(h, di, _)| { - if cached_di != Some(di) { - cached_di = Some(di); - cached_val = StoredF32::from( - returns.day1 - .collect_one_flat(di) - .map(|r| ((*r / 100.0 + 1.0).powf(1.0 / years) - 1.0) * 100.0) - .unwrap_or(0.0) - ); - } - (h, cached_val) + &returns.height, + |(h, r, ..)| { + let v = ((*r / 100.0 + 1.0).powf(1.0 / years) - 1.0) * 100.0; + (h, StoredF32::from(v)) }, exit, )?; diff --git a/crates/brk_computer/src/market/volatility/compute.rs b/crates/brk_computer/src/market/volatility/compute.rs index 926e804c0..3124b5fa9 100644 --- a/crates/brk_computer/src/market/volatility/compute.rs +++ b/crates/brk_computer/src/market/volatility/compute.rs @@ -1,6 +1,6 @@ use brk_error::Result; use brk_types::{Height, StoredF32}; -use vecdb::Exit; +use vecdb::{EagerVec, Exit, PcoVec, ReadableVec}; use super::super::returns; use super::Vecs; @@ -13,73 +13,39 @@ impl Vecs { exit: &Exit, ) -> Result<()> { // Sharpe ratios: returns / volatility - self.sharpe_1w.height.compute_transform2( - starting_indexes_height, - &returns.price_returns._1w.height, - &self.price_1w_volatility.height, - |(h, ret, vol, ..)| { - let ratio = if *vol == 0.0 { 0.0 } else { *ret / *vol }; - (h, StoredF32::from(ratio)) - }, - exit, - )?; - - self.sharpe_1m.height.compute_transform2( - starting_indexes_height, - &returns.price_returns._1m.height, - &self.price_1m_volatility.height, - |(h, ret, vol, ..)| { - let ratio = if *vol == 0.0 { 0.0 } else { *ret / *vol }; - (h, StoredF32::from(ratio)) - }, - exit, - )?; - - self.sharpe_1y.height.compute_transform2( - starting_indexes_height, - &returns.price_returns._1y.height, - &self.price_1y_volatility.height, - |(h, ret, vol, ..)| { - let ratio = if *vol == 0.0 { 0.0 } else { *ret / *vol }; - (h, StoredF32::from(ratio)) - }, - exit, - )?; + for (out, ret, vol) in [ + (&mut self.sharpe_1w, &returns.price_returns._1w.height, &self.price_1w_volatility.height), + (&mut self.sharpe_1m, &returns.price_returns._1m.height, &self.price_1m_volatility.height), + (&mut self.sharpe_1y, &returns.price_returns._1y.height, &self.price_1y_volatility.height), + ] { + compute_ratio(&mut out.height, starting_indexes_height, ret, vol, exit)?; + } // Sortino ratios: returns / downside volatility - self.sortino_1w.height.compute_transform2( - starting_indexes_height, - &returns.price_returns._1w.height, - &returns.downside_1w_sd.sd.height, - |(h, ret, vol, ..)| { - let ratio = if *vol == 0.0 { 0.0 } else { *ret / *vol }; - (h, StoredF32::from(ratio)) - }, - exit, - )?; - - self.sortino_1m.height.compute_transform2( - starting_indexes_height, - &returns.price_returns._1m.height, - &returns.downside_1m_sd.sd.height, - |(h, ret, vol, ..)| { - let ratio = if *vol == 0.0 { 0.0 } else { *ret / *vol }; - (h, StoredF32::from(ratio)) - }, - exit, - )?; - - self.sortino_1y.height.compute_transform2( - starting_indexes_height, - &returns.price_returns._1y.height, - &returns.downside_1y_sd.sd.height, - |(h, ret, vol, ..)| { - let ratio = if *vol == 0.0 { 0.0 } else { *ret / *vol }; - (h, StoredF32::from(ratio)) - }, - exit, - )?; + compute_ratio(&mut self.sortino_1w.height, starting_indexes_height, &returns.price_returns._1w.height, &returns.downside_1w_sd.sd.height, exit)?; + compute_ratio(&mut self.sortino_1m.height, starting_indexes_height, &returns.price_returns._1m.height, &returns.downside_1m_sd.sd.height, exit)?; + compute_ratio(&mut self.sortino_1y.height, starting_indexes_height, &returns.price_returns._1y.height, &returns.downside_1y_sd.sd.height, exit)?; Ok(()) } } + +fn compute_ratio( + out: &mut EagerVec>, + starting_indexes_height: Height, + ret: &impl ReadableVec, + vol: &impl ReadableVec, + exit: &Exit, +) -> Result<()> { + out.compute_transform2( + starting_indexes_height, + ret, + vol, + |(h, ret, vol, ..)| { + let ratio = if *vol == 0.0 { 0.0 } else { *ret / *vol }; + (h, StoredF32::from(ratio)) + }, + exit, + )?; + Ok(()) +} diff --git a/crates/brk_computer/src/mining/hashrate/vecs.rs b/crates/brk_computer/src/mining/hashrate/vecs.rs index 1c5de513c..da66f1cbb 100644 --- a/crates/brk_computer/src/mining/hashrate/vecs.rs +++ b/crates/brk_computer/src/mining/hashrate/vecs.rs @@ -9,9 +9,9 @@ use crate::internal::ComputedFromHeight; pub struct Vecs { pub hash_rate: ComputedFromHeight, pub hash_rate_1w_sma: ComputedFromHeight, - pub hash_rate_1m_sma: ComputedFromHeight, - pub hash_rate_2m_sma: ComputedFromHeight, - pub hash_rate_1y_sma: ComputedFromHeight, + pub hash_rate_1m_sma: ComputedFromHeight, + pub hash_rate_2m_sma: ComputedFromHeight, + pub hash_rate_1y_sma: ComputedFromHeight, pub hash_rate_ath: ComputedFromHeight, pub hash_rate_drawdown: ComputedFromHeight, pub hash_price_ths: ComputedFromHeight, diff --git a/crates/brk_computer/src/traits/mod.rs b/crates/brk_computer/src/traits/mod.rs index 902f31a3b..4ec19b20e 100644 --- a/crates/brk_computer/src/traits/mod.rs +++ b/crates/brk_computer/src/traits/mod.rs @@ -7,155 +7,6 @@ use vecdb::{ use crate::internal::sliding_window::SlidingWindowSorted; -/// Unified rolling extremum (min or max) from window starts. -/// -/// `should_replace` determines whether to evict the deque back: -/// - For min: `|back, new| *back >= *new` -/// - For max: `|back, new| *back <= *new` -pub fn compute_rolling_extremum_from_starts( - out: &mut EagerVec>, - max_from: I, - window_starts: &impl ReadableVec, - values: &impl ReadableVec, - should_replace: fn(&A, &A) -> bool, - exit: &Exit, -) -> Result<()> -where - I: VecIndex, - T: PcoVecValue + From, - A: VecValue + Ord, -{ - out.validate_and_truncate(window_starts.version() + values.version(), max_from)?; - - out.repeat_until_complete(exit, |this| { - let skip = this.len(); - let mut deque: std::collections::VecDeque<(usize, A)> = - std::collections::VecDeque::new(); - - let start_offset = if skip > 0 { - window_starts.collect_one_at(skip - 1).unwrap().to_usize() - } else { - 0 - }; - - let end = window_starts.len().min(values.len()); - let starts_batch = window_starts.collect_range_at(start_offset, end); - let values_batch = values.collect_range_at(start_offset, end); - - for (j, (start, value)) in starts_batch.into_iter().zip(values_batch).enumerate() { - let i = start_offset + j; - let start_usize = start.to_usize(); - while let Some(&(idx, _)) = deque.front() { - if idx < start_usize { - deque.pop_front(); - } else { - break; - } - } - while let Some((_, back)) = deque.back() { - if should_replace(back, &value) { - deque.pop_back(); - } else { - break; - } - } - deque.push_back((i, value)); - - if i >= skip { - let extremum = deque.front().unwrap().1.clone(); - this.checked_push_at(i, T::from(extremum))?; - if this.batch_limit_reached() { - break; - } - } - } - - Ok(()) - })?; - - Ok(()) -} - -pub trait ComputeRollingMinFromStarts { - fn compute_rolling_min_from_starts( - &mut self, - max_from: I, - window_starts: &impl ReadableVec, - values: &impl ReadableVec, - exit: &Exit, - ) -> Result<()> - where - A: VecValue + Ord, - T: From; -} - -impl ComputeRollingMinFromStarts for EagerVec> -where - I: VecIndex, - T: PcoVecValue, -{ - fn compute_rolling_min_from_starts( - &mut self, - max_from: I, - window_starts: &impl ReadableVec, - values: &impl ReadableVec, - exit: &Exit, - ) -> Result<()> - where - A: VecValue + Ord, - T: From, - { - compute_rolling_extremum_from_starts( - self, - max_from, - window_starts, - values, - |back, new| *back >= *new, - exit, - ) - } -} - -pub trait ComputeRollingMaxFromStarts { - fn compute_rolling_max_from_starts( - &mut self, - max_from: I, - window_starts: &impl ReadableVec, - values: &impl ReadableVec, - exit: &Exit, - ) -> Result<()> - where - A: VecValue + Ord, - T: From; -} - -impl ComputeRollingMaxFromStarts for EagerVec> -where - I: VecIndex, - T: PcoVecValue, -{ - fn compute_rolling_max_from_starts( - &mut self, - max_from: I, - window_starts: &impl ReadableVec, - values: &impl ReadableVec, - exit: &Exit, - ) -> Result<()> - where - A: VecValue + Ord, - T: From, - { - compute_rolling_extremum_from_starts( - self, - max_from, - window_starts, - values, - |back, new| *back <= *new, - exit, - ) - } -} - pub trait ComputeRollingMedianFromStarts { fn compute_rolling_median_from_starts( &mut self, diff --git a/crates/brk_server/src/api/mod.rs b/crates/brk_server/src/api/mod.rs index bbb2d7442..de217a099 100644 --- a/crates/brk_server/src/api/mod.rs +++ b/crates/brk_server/src/api/mod.rs @@ -93,6 +93,10 @@ impl ApiRoutes for ApiRouter { include_bytes!("./scalar.js.br").as_slice(), ) })) + .route( + "/.well-known/openapi.json", + get(|| async { Redirect::permanent("/openapi.json") }), + ) .route( "/api/{*path}", get(|| async { Redirect::permanent("/api") }), diff --git a/crates/brk_server/src/api/openapi/compact.rs b/crates/brk_server/src/api/openapi/compact.rs index ddb168e2d..acec004e8 100644 --- a/crates/brk_server/src/api/openapi/compact.rs +++ b/crates/brk_server/src/api/openapi/compact.rs @@ -40,7 +40,7 @@ impl ApiJson { /// 16. Remove required arrays from schemas /// 17. Remove redundant "type": "object" when properties exist /// 18. Flatten single-element type arrays -/// 19. Replace large enums (>20 values) with string type +/// 19. Replace large enums (>40 values) with string type fn compact_json(json: &str) -> String { let mut spec: Value = serde_json::from_str(json).expect("Invalid OpenAPI JSON"); @@ -175,9 +175,9 @@ fn compact_value(value: &mut Value) { obj.remove("type"); } - // Step 19: Replace large enums (>20 values) with just string type + // Step 19: Replace large enums (>40 values) with just string type if let Some(Value::Array(enum_values)) = obj.get("enum") - && enum_values.len() > 20 + && enum_values.len() > 40 { obj.remove("enum"); } diff --git a/crates/brk_server/src/api/openapi/mod.rs b/crates/brk_server/src/api/openapi/mod.rs index 4a12e5eb9..b0ace4475 100644 --- a/crates/brk_server/src/api/openapi/mod.rs +++ b/crates/brk_server/src/api/openapi/mod.rs @@ -29,7 +29,7 @@ pub fn create_openapi() -> OpenApi { - **Metrics**: Thousands of time-series metrics across multiple indexes (date, block height, etc.) - **[Mempool.space](https://mempool.space/docs/api/rest) compatible** (WIP): Most non-metrics endpoints follow the mempool.space API format - **Multiple formats**: JSON and CSV output -- **LLM-optimized**: Compact OpenAPI spec at [`/api.json`](/api.json) for AI tools (full spec at [`/openapi.json`](/openapi.json)) +- **LLM-optimized**: [`/llms.txt`](/llms.txt) for discovery, [`/api.json`](/api.json) compact OpenAPI spec for tool use (full spec at [`/openapi.json`](/openapi.json)) ### Client Libraries diff --git a/crates/brk_types/src/basis_points_16.rs b/crates/brk_types/src/basis_points_16.rs new file mode 100644 index 000000000..c715064a4 --- /dev/null +++ b/crates/brk_types/src/basis_points_16.rs @@ -0,0 +1,139 @@ +use std::ops::{Add, AddAssign, Div}; + +use derive_more::Deref; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use vecdb::{CheckedSub, Formattable, Pco}; + +use super::StoredF32; + +/// Unsigned basis points stored as u16. +/// 1 bp = 0.01%. Range: 0–655.35%. +/// Use for bounded 0–100% values (dominance, adoption, RSI, etc.). +#[derive( + Debug, + Deref, + Clone, + Default, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + Serialize, + Deserialize, + Pco, + JsonSchema, +)] +pub struct BasisPoints16(u16); + +impl BasisPoints16 { + pub const ZERO: Self = Self(0); + + #[inline] + pub const fn new(value: u16) -> Self { + Self(value) + } + + #[inline(always)] + pub const fn inner(self) -> u16 { + self.0 + } + + /// Convert to f32: divide by 100. + #[inline] + pub fn to_f32(self) -> f32 { + self.0 as f32 / 100.0 + } +} + +impl From for BasisPoints16 { + #[inline] + fn from(value: usize) -> Self { + Self(value as u16) + } +} + +impl From for BasisPoints16 { + #[inline] + fn from(value: u16) -> Self { + Self(value) + } +} + +impl From for u16 { + #[inline] + fn from(value: BasisPoints16) -> Self { + value.0 + } +} + +/// Convert from float: multiply by 100 and round. +/// Input is in "display" form (e.g., 45.23 for 45.23%). +impl From for BasisPoints16 { + #[inline] + fn from(value: f64) -> Self { + debug_assert!(value >= 0.0 && value <= u16::MAX as f64 / 100.0, "f64 out of BasisPoints16 range: {value}"); + Self((value * 100.0).round() as u16) + } +} + +impl From for f64 { + #[inline] + fn from(value: BasisPoints16) -> Self { + value.0 as f64 / 100.0 + } +} + +impl From for StoredF32 { + #[inline] + fn from(value: BasisPoints16) -> Self { + StoredF32::from(value.to_f32()) + } +} + +impl Add for BasisPoints16 { + type Output = Self; + #[inline] + fn add(self, rhs: Self) -> Self::Output { + Self(self.0 + rhs.0) + } +} + +impl AddAssign for BasisPoints16 { + #[inline] + fn add_assign(&mut self, rhs: Self) { + self.0 += rhs.0; + } +} + +impl Div for BasisPoints16 { + type Output = Self; + #[inline] + fn div(self, rhs: usize) -> Self::Output { + Self(self.0 / rhs as u16) + } +} + +impl CheckedSub for BasisPoints16 { + fn checked_sub(self, rhs: Self) -> Option { + self.0.checked_sub(rhs.0).map(Self) + } +} + +impl std::fmt::Display for BasisPoints16 { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut buf = itoa::Buffer::new(); + let str = buf.format(self.0); + f.write_str(str) + } +} + +impl Formattable for BasisPoints16 { + #[inline(always)] + fn fmt_csv(&self, f: &mut String) -> std::fmt::Result { + use std::fmt::Write; + write!(f, "{}", self) + } +} diff --git a/crates/brk_types/src/basis_points_32.rs b/crates/brk_types/src/basis_points_32.rs new file mode 100644 index 000000000..f1d377438 --- /dev/null +++ b/crates/brk_types/src/basis_points_32.rs @@ -0,0 +1,142 @@ +use std::ops::{Add, AddAssign, Div}; + +use derive_more::Deref; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use vecdb::{CheckedSub, Formattable, Pco}; + +use super::StoredF32; + +/// Unsigned basis points stored as u32. +/// 1 bp = 0.01%. Range: 0–42,949,672.95%. +/// Use for unbounded unsigned ratios (MVRV, NVT, SOPR, etc.). +#[derive( + Debug, + Deref, + Clone, + Default, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + Serialize, + Deserialize, + Pco, + JsonSchema, +)] +pub struct BasisPoints32(u32); + +impl BasisPoints32 { + pub const ZERO: Self = Self(0); + + #[inline] + pub const fn new(value: u32) -> Self { + Self(value) + } + + #[inline(always)] + pub const fn inner(self) -> u32 { + self.0 + } + + /// Convert to f32: divide by 100. + #[inline] + pub fn to_f32(self) -> f32 { + self.0 as f32 / 100.0 + } +} + +impl From for BasisPoints32 { + #[inline] + fn from(value: usize) -> Self { + Self(value as u32) + } +} + +impl From for BasisPoints32 { + #[inline] + fn from(value: u32) -> Self { + Self(value) + } +} + +impl From for u32 { + #[inline] + fn from(value: BasisPoints32) -> Self { + value.0 + } +} + +/// Convert from float: multiply by 100 and round. +/// Input is in "display" form (e.g., 450.23 for 450.23%). +impl From for BasisPoints32 { + #[inline] + fn from(value: f64) -> Self { + debug_assert!( + value >= 0.0 && value <= u32::MAX as f64 / 100.0, + "f64 out of BasisPoints32 range: {value}" + ); + Self((value * 100.0).round() as u32) + } +} + +impl From for f64 { + #[inline] + fn from(value: BasisPoints32) -> Self { + value.0 as f64 / 100.0 + } +} + +impl From for StoredF32 { + #[inline] + fn from(value: BasisPoints32) -> Self { + StoredF32::from(value.to_f32()) + } +} + +impl Add for BasisPoints32 { + type Output = Self; + #[inline] + fn add(self, rhs: Self) -> Self::Output { + Self(self.0 + rhs.0) + } +} + +impl AddAssign for BasisPoints32 { + #[inline] + fn add_assign(&mut self, rhs: Self) { + self.0 += rhs.0; + } +} + +impl Div for BasisPoints32 { + type Output = Self; + #[inline] + fn div(self, rhs: usize) -> Self::Output { + Self(self.0 / rhs as u32) + } +} + +impl CheckedSub for BasisPoints32 { + fn checked_sub(self, rhs: Self) -> Option { + self.0.checked_sub(rhs.0).map(Self) + } +} + +impl std::fmt::Display for BasisPoints32 { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut buf = itoa::Buffer::new(); + let str = buf.format(self.0); + f.write_str(str) + } +} + +impl Formattable for BasisPoints32 { + #[inline(always)] + fn fmt_csv(&self, f: &mut String) -> std::fmt::Result { + use std::fmt::Write; + write!(f, "{}", self) + } +} diff --git a/crates/brk_types/src/basis_points_signed_16.rs b/crates/brk_types/src/basis_points_signed_16.rs new file mode 100644 index 000000000..8c2af427b --- /dev/null +++ b/crates/brk_types/src/basis_points_signed_16.rs @@ -0,0 +1,162 @@ +use std::ops::{Add, AddAssign, Div, Sub, SubAssign}; + +use derive_more::Deref; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use vecdb::{CheckedSub, Formattable, Pco}; + +use super::StoredF32; + +/// Signed basis points stored as i16. +/// 1 bp = 0.01%. Range: -327.67% to +327.67%. +/// Use for signed bounded values (NUPL, net PnL ratios, etc.). +#[derive( + Debug, + Deref, + Clone, + Default, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + Serialize, + Deserialize, + Pco, + JsonSchema, +)] +pub struct BasisPointsSigned16(i16); + +impl BasisPointsSigned16 { + pub const ZERO: Self = Self(0); + + #[inline] + pub const fn new(value: i16) -> Self { + Self(value) + } + + #[inline(always)] + pub const fn inner(self) -> i16 { + self.0 + } + + #[inline] + pub fn is_negative(self) -> bool { + self.0 < 0 + } + + /// Convert to f32: divide by 100. + #[inline] + pub fn to_f32(self) -> f32 { + self.0 as f32 / 100.0 + } +} + +impl From for BasisPointsSigned16 { + #[inline] + fn from(value: usize) -> Self { + Self(value as i16) + } +} + +impl From for BasisPointsSigned16 { + #[inline] + fn from(value: i16) -> Self { + Self(value) + } +} + +impl From for i16 { + #[inline] + fn from(value: BasisPointsSigned16) -> Self { + value.0 + } +} + +/// Convert from float: multiply by 100 and round. +/// Input is in "display" form (e.g., -45.23 for -45.23%). +impl From for BasisPointsSigned16 { + #[inline] + fn from(value: f64) -> Self { + debug_assert!( + value >= i16::MIN as f64 / 100.0 && value <= i16::MAX as f64 / 100.0, + "f64 out of BasisPointsSigned16 range: {value}" + ); + Self((value * 100.0).round() as i16) + } +} + +impl From for f64 { + #[inline] + fn from(value: BasisPointsSigned16) -> Self { + value.0 as f64 / 100.0 + } +} + +impl From for StoredF32 { + #[inline] + fn from(value: BasisPointsSigned16) -> Self { + StoredF32::from(value.to_f32()) + } +} + +impl Add for BasisPointsSigned16 { + type Output = Self; + #[inline] + fn add(self, rhs: Self) -> Self::Output { + Self(self.0 + rhs.0) + } +} + +impl AddAssign for BasisPointsSigned16 { + #[inline] + fn add_assign(&mut self, rhs: Self) { + self.0 += rhs.0; + } +} + +impl Sub for BasisPointsSigned16 { + type Output = Self; + #[inline] + fn sub(self, rhs: Self) -> Self::Output { + Self(self.0 - rhs.0) + } +} + +impl SubAssign for BasisPointsSigned16 { + #[inline] + fn sub_assign(&mut self, rhs: Self) { + self.0 -= rhs.0; + } +} + +impl Div for BasisPointsSigned16 { + type Output = Self; + #[inline] + fn div(self, rhs: usize) -> Self::Output { + Self(self.0 / rhs as i16) + } +} + +impl CheckedSub for BasisPointsSigned16 { + fn checked_sub(self, rhs: Self) -> Option { + self.0.checked_sub(rhs.0).map(Self) + } +} + +impl std::fmt::Display for BasisPointsSigned16 { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut buf = itoa::Buffer::new(); + let str = buf.format(self.0); + f.write_str(str) + } +} + +impl Formattable for BasisPointsSigned16 { + #[inline(always)] + fn fmt_csv(&self, f: &mut String) -> std::fmt::Result { + use std::fmt::Write; + write!(f, "{}", self) + } +} diff --git a/crates/brk_types/src/basis_points_signed_32.rs b/crates/brk_types/src/basis_points_signed_32.rs new file mode 100644 index 000000000..cf703c725 --- /dev/null +++ b/crates/brk_types/src/basis_points_signed_32.rs @@ -0,0 +1,162 @@ +use std::ops::{Add, AddAssign, Div, Sub, SubAssign}; + +use derive_more::Deref; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use vecdb::{CheckedSub, Formattable, Pco}; + +use super::StoredF32; + +/// Signed basis points stored as i32. +/// 1 bp = 0.01%. Range: -21,474,836.47% to +21,474,836.47%. +/// Use for unbounded signed values (returns, growth rates, volatility, z-scores, etc.). +#[derive( + Debug, + Deref, + Clone, + Default, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + Serialize, + Deserialize, + Pco, + JsonSchema, +)] +pub struct BasisPointsSigned32(i32); + +impl BasisPointsSigned32 { + pub const ZERO: Self = Self(0); + + #[inline] + pub const fn new(value: i32) -> Self { + Self(value) + } + + #[inline(always)] + pub const fn inner(self) -> i32 { + self.0 + } + + #[inline] + pub fn is_negative(self) -> bool { + self.0 < 0 + } + + /// Convert to f32: divide by 100. + #[inline] + pub fn to_f32(self) -> f32 { + self.0 as f32 / 100.0 + } +} + +impl From for BasisPointsSigned32 { + #[inline] + fn from(value: usize) -> Self { + Self(value as i32) + } +} + +impl From for BasisPointsSigned32 { + #[inline] + fn from(value: i32) -> Self { + Self(value) + } +} + +impl From for i32 { + #[inline] + fn from(value: BasisPointsSigned32) -> Self { + value.0 + } +} + +/// Convert from float: multiply by 100 and round. +/// Input is in "display" form (e.g., -5000.23 for -5000.23%). +impl From for BasisPointsSigned32 { + #[inline] + fn from(value: f64) -> Self { + debug_assert!( + value >= i32::MIN as f64 / 100.0 && value <= i32::MAX as f64 / 100.0, + "f64 out of BasisPointsSigned32 range: {value}" + ); + Self((value * 100.0).round() as i32) + } +} + +impl From for f64 { + #[inline] + fn from(value: BasisPointsSigned32) -> Self { + value.0 as f64 / 100.0 + } +} + +impl From for StoredF32 { + #[inline] + fn from(value: BasisPointsSigned32) -> Self { + StoredF32::from(value.to_f32()) + } +} + +impl Add for BasisPointsSigned32 { + type Output = Self; + #[inline] + fn add(self, rhs: Self) -> Self::Output { + Self(self.0 + rhs.0) + } +} + +impl AddAssign for BasisPointsSigned32 { + #[inline] + fn add_assign(&mut self, rhs: Self) { + self.0 += rhs.0; + } +} + +impl Sub for BasisPointsSigned32 { + type Output = Self; + #[inline] + fn sub(self, rhs: Self) -> Self::Output { + Self(self.0 - rhs.0) + } +} + +impl SubAssign for BasisPointsSigned32 { + #[inline] + fn sub_assign(&mut self, rhs: Self) { + self.0 -= rhs.0; + } +} + +impl Div for BasisPointsSigned32 { + type Output = Self; + #[inline] + fn div(self, rhs: usize) -> Self::Output { + Self(self.0 / rhs as i32) + } +} + +impl CheckedSub for BasisPointsSigned32 { + fn checked_sub(self, rhs: Self) -> Option { + self.0.checked_sub(rhs.0).map(Self) + } +} + +impl std::fmt::Display for BasisPointsSigned32 { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut buf = itoa::Buffer::new(); + let str = buf.format(self.0); + f.write_str(str) + } +} + +impl Formattable for BasisPointsSigned32 { + #[inline(always)] + fn fmt_csv(&self, f: &mut String) -> std::fmt::Result { + use std::fmt::Write; + write!(f, "{}", self) + } +} diff --git a/crates/brk_types/src/datarange.rs b/crates/brk_types/src/datarange.rs index 257deeb6f..0b5ea9609 100644 --- a/crates/brk_types/src/datarange.rs +++ b/crates/brk_types/src/datarange.rs @@ -6,17 +6,17 @@ use crate::{de_unquote_i64, de_unquote_limit, Limit}; /// Range parameters for slicing data #[derive(Default, Debug, Deserialize, JsonSchema)] pub struct DataRange { - /// Inclusive starting index, if negative counts from end + /// Inclusive starting index, if negative counts from end. Aliases: `from`, `f`, `s` #[serde(default, alias = "s", alias = "from", alias = "f", deserialize_with = "de_unquote_i64")] #[schemars(example = 0, example = -1, example = -10, example = -1000)] start: Option, - /// Exclusive ending index, if negative counts from end + /// Exclusive ending index, if negative counts from end. Aliases: `to`, `t`, `e` #[serde(default, alias = "e", alias = "to", alias = "t", deserialize_with = "de_unquote_i64")] #[schemars(example = 1000)] end: Option, - /// Maximum number of values to return (ignored if `end` is set) + /// Maximum number of values to return (ignored if `end` is set). Aliases: `count`, `c`, `l` #[serde(default, alias = "l", alias = "count", alias = "c", deserialize_with = "de_unquote_limit")] limit: Option, } diff --git a/crates/brk_types/src/lib.rs b/crates/brk_types/src/lib.rs index 762e0559f..3230ec02c 100644 --- a/crates/brk_types/src/lib.rs +++ b/crates/brk_types/src/lib.rs @@ -15,6 +15,10 @@ mod addresstxidsparam; mod addressvalidation; mod age; mod anyaddressindex; +mod basis_points_16; +mod basis_points_32; +mod basis_points_signed_16; +mod basis_points_signed_32; mod bitcoin; mod blkmetadata; mod blkposition; @@ -202,6 +206,10 @@ pub use addresstxidsparam::*; pub use addressvalidation::*; pub use age::*; pub use anyaddressindex::*; +pub use basis_points_16::*; +pub use basis_points_32::*; +pub use basis_points_signed_16::*; +pub use basis_points_signed_32::*; pub use bitcoin::*; pub use blkmetadata::*; pub use blkposition::*; diff --git a/crates/brk_types/src/percentile.rs b/crates/brk_types/src/percentile.rs index 3f4f77929..0e4137c8f 100644 --- a/crates/brk_types/src/percentile.rs +++ b/crates/brk_types/src/percentile.rs @@ -30,7 +30,7 @@ where if fract != 0.0 { let left = sorted.get(index as usize).unwrap().clone(); let right = sorted.get(index.ceil() as usize).unwrap().clone(); - left / 2 + right / 2 + (left + right) / 2 } else { sorted.get(index as usize).unwrap().clone() } diff --git a/docs/README.md b/docs/README.md index fdfc01262..680c119a6 100644 --- a/docs/README.md +++ b/docs/README.md @@ -43,7 +43,7 @@ curl https://bitview.space/api/mempool/price Query metrics and blockchain data in JSON or CSV. -[Documentation](https://bitview.space/api) · [JavaScript](https://www.npmjs.com/package/brk-client) · [Python](https://pypi.org/project/brk-client) · [Rust](https://crates.io/crates/brk_client) · [LLM-friendly schema](https://bitview.space/api#tag/server/GET/api.json) +[Documentation](https://bitview.space/api) · [JavaScript](https://www.npmjs.com/package/brk-client) · [Python](https://pypi.org/project/brk-client) · [Rust](https://crates.io/crates/brk_client) · [llms.txt](https://bitview.space/llms.txt) · [LLM-friendly schema](https://bitview.space/api.json) ### Self-host diff --git a/modules/brk-client/index.js b/modules/brk-client/index.js index e6ace7d53..ca6ae56e2 100644 --- a/modules/brk-client/index.js +++ b/modules/brk-client/index.js @@ -2866,6 +2866,45 @@ function create_10y1m1w1y2y3m3y4y5y6m6y8yPattern3(client, acc) { }; } +/** + * @typedef {Object} _10y1m1w1y2y3m3y4y5y6m6y8yPattern2 + * @property {MetricPattern1} _10y + * @property {MetricPattern1} _1m + * @property {MetricPattern1} _1w + * @property {MetricPattern1} _1y + * @property {MetricPattern1} _2y + * @property {MetricPattern1} _3m + * @property {MetricPattern1} _3y + * @property {MetricPattern1} _4y + * @property {MetricPattern1} _5y + * @property {MetricPattern1} _6m + * @property {MetricPattern1} _6y + * @property {MetricPattern1} _8y + */ + +/** + * Create a _10y1m1w1y2y3m3y4y5y6m6y8yPattern2 pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {_10y1m1w1y2y3m3y4y5y6m6y8yPattern2} + */ +function create_10y1m1w1y2y3m3y4y5y6m6y8yPattern2(client, acc) { + return { + _10y: createMetricPattern1(client, _p('10y', acc)), + _1m: createMetricPattern1(client, _p('1m', acc)), + _1w: createMetricPattern1(client, _p('1w', acc)), + _1y: createMetricPattern1(client, _p('1y', acc)), + _2y: createMetricPattern1(client, _p('2y', acc)), + _3m: createMetricPattern1(client, _p('3m', acc)), + _3y: createMetricPattern1(client, _p('3y', acc)), + _4y: createMetricPattern1(client, _p('4y', acc)), + _5y: createMetricPattern1(client, _p('5y', acc)), + _6m: createMetricPattern1(client, _p('6m', acc)), + _6y: createMetricPattern1(client, _p('6y', acc)), + _8y: createMetricPattern1(client, _p('8y', acc)), + }; +} + /** * @typedef {Object} InvestedNegNetNuplSupplyUnrealizedPattern * @property {MetricPattern1} investedCapitalInLossPct @@ -2905,88 +2944,6 @@ function createInvestedNegNetNuplSupplyUnrealizedPattern(client, acc) { }; } -/** - * @template T - * @typedef {Object} _10y1m1w1y2y3m3y4y5y6m6y8yPattern2 - * @property {MetricPattern1} _10y - * @property {MetricPattern1} _1m - * @property {MetricPattern1} _1w - * @property {MetricPattern1} _1y - * @property {MetricPattern1} _2y - * @property {MetricPattern1} _3m - * @property {MetricPattern1} _3y - * @property {MetricPattern1} _4y - * @property {MetricPattern1} _5y - * @property {MetricPattern1} _6m - * @property {MetricPattern1} _6y - * @property {MetricPattern1} _8y - */ - -/** - * Create a _10y1m1w1y2y3m3y4y5y6m6y8yPattern2 pattern node - * @template T - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {_10y1m1w1y2y3m3y4y5y6m6y8yPattern2} - */ -function create_10y1m1w1y2y3m3y4y5y6m6y8yPattern2(client, acc) { - return { - _10y: createMetricPattern1(client, _p('10y', acc)), - _1m: createMetricPattern1(client, _p('1m', acc)), - _1w: createMetricPattern1(client, _p('1w', acc)), - _1y: createMetricPattern1(client, _p('1y', acc)), - _2y: createMetricPattern1(client, _p('2y', acc)), - _3m: createMetricPattern1(client, _p('3m', acc)), - _3y: createMetricPattern1(client, _p('3y', acc)), - _4y: createMetricPattern1(client, _p('4y', acc)), - _5y: createMetricPattern1(client, _p('5y', acc)), - _6m: createMetricPattern1(client, _p('6m', acc)), - _6y: createMetricPattern1(client, _p('6y', acc)), - _8y: createMetricPattern1(client, _p('8y', acc)), - }; -} - -/** - * @template T - * @typedef {Object} _201520162017201820192020202120222023202420252026Pattern2 - * @property {MetricPattern1} _2015 - * @property {MetricPattern1} _2016 - * @property {MetricPattern1} _2017 - * @property {MetricPattern1} _2018 - * @property {MetricPattern1} _2019 - * @property {MetricPattern1} _2020 - * @property {MetricPattern1} _2021 - * @property {MetricPattern1} _2022 - * @property {MetricPattern1} _2023 - * @property {MetricPattern1} _2024 - * @property {MetricPattern1} _2025 - * @property {MetricPattern1} _2026 - */ - -/** - * Create a _201520162017201820192020202120222023202420252026Pattern2 pattern node - * @template T - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {_201520162017201820192020202120222023202420252026Pattern2} - */ -function create_201520162017201820192020202120222023202420252026Pattern2(client, acc) { - return { - _2015: createMetricPattern1(client, _m(acc, '2015_returns')), - _2016: createMetricPattern1(client, _m(acc, '2016_returns')), - _2017: createMetricPattern1(client, _m(acc, '2017_returns')), - _2018: createMetricPattern1(client, _m(acc, '2018_returns')), - _2019: createMetricPattern1(client, _m(acc, '2019_returns')), - _2020: createMetricPattern1(client, _m(acc, '2020_returns')), - _2021: createMetricPattern1(client, _m(acc, '2021_returns')), - _2022: createMetricPattern1(client, _m(acc, '2022_returns')), - _2023: createMetricPattern1(client, _m(acc, '2023_returns')), - _2024: createMetricPattern1(client, _m(acc, '2024_returns')), - _2025: createMetricPattern1(client, _m(acc, '2025_returns')), - _2026: createMetricPattern1(client, _m(acc, '2026_returns')), - }; -} - /** * @typedef {Object} AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90RollingSumPattern * @property {MetricPattern18} average @@ -3580,6 +3537,31 @@ function createInvestedMaxMinPercentilesSpotPattern(client, acc) { }; } +/** + * @typedef {Object} EmaHistogramLineSignalPattern + * @property {MetricPattern1} emaFast + * @property {MetricPattern1} emaSlow + * @property {MetricPattern1} histogram + * @property {MetricPattern1} line + * @property {MetricPattern1} signal + */ + +/** + * Create a EmaHistogramLineSignalPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {EmaHistogramLineSignalPattern} + */ +function createEmaHistogramLineSignalPattern(client, acc) { + return { + emaFast: createMetricPattern1(client, _m(acc, 'ema_fast_1y')), + emaSlow: createMetricPattern1(client, _m(acc, 'ema_slow_1y')), + histogram: createMetricPattern1(client, _m(acc, 'histogram_1y')), + line: createMetricPattern1(client, _m(acc, 'line_1y')), + signal: createMetricPattern1(client, _m(acc, 'signal_1y')), + }; +} + /** * @typedef {Object} _1y24h30d7dPattern2 * @property {BtcCentsSatsUsdPattern} _1y @@ -3735,27 +3717,6 @@ function createCentsSatsUsdPattern(client, acc) { }; } -/** - * @typedef {Object} HistogramLineSignalPattern - * @property {MetricPattern1} histogram - * @property {MetricPattern1} line - * @property {MetricPattern1} signal - */ - -/** - * Create a HistogramLineSignalPattern pattern node - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {HistogramLineSignalPattern} - */ -function createHistogramLineSignalPattern(client, acc) { - return { - histogram: createMetricPattern1(client, _m(acc, 'histogram_1y')), - line: createMetricPattern1(client, _m(acc, 'line_1y')), - signal: createMetricPattern1(client, _m(acc, 'signal_1y')), - }; -} - /** * @template T * @typedef {Object} _6bBlockTxindexPattern @@ -3994,7 +3955,7 @@ function createRatioPattern2(client, acc) { /** * @typedef {Object} MetricsTree_Blocks_Difficulty * @property {MetricPattern1} raw - * @property {MetricPattern1} asHash + * @property {MetricPattern1} asHash * @property {MetricPattern1} adjustment * @property {MetricPattern1} epoch * @property {MetricPattern1} blocksBeforeNextAdjustment @@ -4043,22 +4004,34 @@ function createRatioPattern2(client, acc) { * @property {MetricPattern18} height34dAgo * @property {MetricPattern18} height55dAgo * @property {MetricPattern18} height2mAgo + * @property {MetricPattern18} height9wAgo + * @property {MetricPattern18} height12wAgo * @property {MetricPattern18} height89dAgo + * @property {MetricPattern18} height3mAgo + * @property {MetricPattern18} height14wAgo * @property {MetricPattern18} height111dAgo * @property {MetricPattern18} height144dAgo - * @property {MetricPattern18} height3mAgo * @property {MetricPattern18} height6mAgo + * @property {MetricPattern18} height26wAgo * @property {MetricPattern18} height200dAgo + * @property {MetricPattern18} height9mAgo * @property {MetricPattern18} height350dAgo + * @property {MetricPattern18} height12mAgo * @property {MetricPattern18} height1yAgo + * @property {MetricPattern18} height14mAgo * @property {MetricPattern18} height2yAgo - * @property {MetricPattern18} height200wAgo + * @property {MetricPattern18} height26mAgo * @property {MetricPattern18} height3yAgo + * @property {MetricPattern18} height200wAgo * @property {MetricPattern18} height4yAgo * @property {MetricPattern18} height5yAgo * @property {MetricPattern18} height6yAgo * @property {MetricPattern18} height8yAgo + * @property {MetricPattern18} height9yAgo * @property {MetricPattern18} height10yAgo + * @property {MetricPattern18} height12yAgo + * @property {MetricPattern18} height14yAgo + * @property {MetricPattern18} height26yAgo */ /** @@ -4269,9 +4242,9 @@ function createRatioPattern2(client, acc) { * @typedef {Object} MetricsTree_Mining_Hashrate * @property {MetricPattern1} hashRate * @property {MetricPattern1} hashRate1wSma - * @property {MetricPattern1} hashRate1mSma - * @property {MetricPattern1} hashRate2mSma - * @property {MetricPattern1} hashRate1ySma + * @property {MetricPattern1} hashRate1mSma + * @property {MetricPattern1} hashRate2mSma + * @property {MetricPattern1} hashRate1ySma * @property {MetricPattern1} hashRateAth * @property {MetricPattern1} hashRateDrawdown * @property {MetricPattern1} hashPriceThs @@ -4761,25 +4734,13 @@ function createRatioPattern2(client, acc) { * @property {MetricPattern18} dcaSatsPerDay * @property {_10y1m1w1y2y3m3y4y5y6m6y8yPattern3} periodStack * @property {MetricsTree_Market_Dca_PeriodAveragePrice} periodAveragePrice - * @property {_10y1m1w1y2y3m3y4y5y6m6y8yPattern2} periodReturns + * @property {_10y1m1w1y2y3m3y4y5y6m6y8yPattern2} periodReturns * @property {_10y2y3y4y5y6y8yPattern} periodCagr - * @property {_10y1m1w1y2y3m3y4y5y6m6y8yPattern2} periodDaysInProfit - * @property {_10y1m1w1y2y3m3y4y5y6m6y8yPattern2} periodDaysInLoss - * @property {_10y1m1w1y2y3m3y4y5y6m6y8yPattern2} periodMinReturn - * @property {_10y1m1w1y2y3m3y4y5y6m6y8yPattern2} periodMaxReturn * @property {_10y1m1w1y2y3m3y4y5y6m6y8yPattern3} periodLumpSumStack - * @property {_10y1m1w1y2y3m3y4y5y6m6y8yPattern2} periodLumpSumReturns - * @property {_10y1m1w1y2y3m3y4y5y6m6y8yPattern2} periodLumpSumDaysInProfit - * @property {_10y1m1w1y2y3m3y4y5y6m6y8yPattern2} periodLumpSumDaysInLoss - * @property {_10y1m1w1y2y3m3y4y5y6m6y8yPattern2} periodLumpSumMinReturn - * @property {_10y1m1w1y2y3m3y4y5y6m6y8yPattern2} periodLumpSumMaxReturn + * @property {_10y1m1w1y2y3m3y4y5y6m6y8yPattern2} periodLumpSumReturns * @property {MetricsTree_Market_Dca_ClassStack} classStack * @property {MetricsTree_Market_Dca_ClassAveragePrice} classAveragePrice - * @property {_201520162017201820192020202120222023202420252026Pattern2} classReturns - * @property {MetricsTree_Market_Dca_ClassDaysInProfit} classDaysInProfit - * @property {MetricsTree_Market_Dca_ClassDaysInLoss} classDaysInLoss - * @property {MetricsTree_Market_Dca_ClassMinReturn} classMinReturn - * @property {MetricsTree_Market_Dca_ClassMaxReturn} classMaxReturn + * @property {MetricsTree_Market_Dca_ClassReturns} classReturns */ /** @@ -4831,55 +4792,7 @@ function createRatioPattern2(client, acc) { */ /** - * @typedef {Object} MetricsTree_Market_Dca_ClassDaysInProfit - * @property {MetricPattern1} _2015 - * @property {MetricPattern1} _2016 - * @property {MetricPattern1} _2017 - * @property {MetricPattern1} _2018 - * @property {MetricPattern1} _2019 - * @property {MetricPattern1} _2020 - * @property {MetricPattern1} _2021 - * @property {MetricPattern1} _2022 - * @property {MetricPattern1} _2023 - * @property {MetricPattern1} _2024 - * @property {MetricPattern1} _2025 - * @property {MetricPattern1} _2026 - */ - -/** - * @typedef {Object} MetricsTree_Market_Dca_ClassDaysInLoss - * @property {MetricPattern1} _2015 - * @property {MetricPattern1} _2016 - * @property {MetricPattern1} _2017 - * @property {MetricPattern1} _2018 - * @property {MetricPattern1} _2019 - * @property {MetricPattern1} _2020 - * @property {MetricPattern1} _2021 - * @property {MetricPattern1} _2022 - * @property {MetricPattern1} _2023 - * @property {MetricPattern1} _2024 - * @property {MetricPattern1} _2025 - * @property {MetricPattern1} _2026 - */ - -/** - * @typedef {Object} MetricsTree_Market_Dca_ClassMinReturn - * @property {MetricPattern1} _2015 - * @property {MetricPattern1} _2016 - * @property {MetricPattern1} _2017 - * @property {MetricPattern1} _2018 - * @property {MetricPattern1} _2019 - * @property {MetricPattern1} _2020 - * @property {MetricPattern1} _2021 - * @property {MetricPattern1} _2022 - * @property {MetricPattern1} _2023 - * @property {MetricPattern1} _2024 - * @property {MetricPattern1} _2025 - * @property {MetricPattern1} _2026 - */ - -/** - * @typedef {Object} MetricsTree_Market_Dca_ClassMaxReturn + * @typedef {Object} MetricsTree_Market_Dca_ClassReturns * @property {MetricPattern1} _2015 * @property {MetricPattern1} _2016 * @property {MetricPattern1} _2017 @@ -4961,11 +4874,13 @@ function createRatioPattern2(client, acc) { * @property {MetricsTree_Market_Indicators_Macd_1d} _1d * @property {MetricsTree_Market_Indicators_Macd_1w} _1w * @property {MetricsTree_Market_Indicators_Macd_1m} _1m - * @property {HistogramLineSignalPattern} _1y + * @property {EmaHistogramLineSignalPattern} _1y */ /** * @typedef {Object} MetricsTree_Market_Indicators_Macd_1d + * @property {MetricPattern1} emaFast + * @property {MetricPattern1} emaSlow * @property {MetricPattern1} line * @property {MetricPattern1} signal * @property {MetricPattern1} histogram @@ -4973,6 +4888,8 @@ function createRatioPattern2(client, acc) { /** * @typedef {Object} MetricsTree_Market_Indicators_Macd_1w + * @property {MetricPattern1} emaFast + * @property {MetricPattern1} emaSlow * @property {MetricPattern1} line * @property {MetricPattern1} signal * @property {MetricPattern1} histogram @@ -4980,6 +4897,8 @@ function createRatioPattern2(client, acc) { /** * @typedef {Object} MetricsTree_Market_Indicators_Macd_1m + * @property {MetricPattern1} emaFast + * @property {MetricPattern1} emaSlow * @property {MetricPattern1} line * @property {MetricPattern1} signal * @property {MetricPattern1} histogram @@ -6582,22 +6501,34 @@ class BrkClient extends BrkClientBase { height34dAgo: createMetricPattern18(this, 'height_34d_ago'), height55dAgo: createMetricPattern18(this, 'height_55d_ago'), height2mAgo: createMetricPattern18(this, 'height_2m_ago'), + height9wAgo: createMetricPattern18(this, 'height_9w_ago'), + height12wAgo: createMetricPattern18(this, 'height_12w_ago'), height89dAgo: createMetricPattern18(this, 'height_89d_ago'), + height3mAgo: createMetricPattern18(this, 'height_3m_ago'), + height14wAgo: createMetricPattern18(this, 'height_14w_ago'), height111dAgo: createMetricPattern18(this, 'height_111d_ago'), height144dAgo: createMetricPattern18(this, 'height_144d_ago'), - height3mAgo: createMetricPattern18(this, 'height_3m_ago'), height6mAgo: createMetricPattern18(this, 'height_6m_ago'), + height26wAgo: createMetricPattern18(this, 'height_26w_ago'), height200dAgo: createMetricPattern18(this, 'height_200d_ago'), + height9mAgo: createMetricPattern18(this, 'height_9m_ago'), height350dAgo: createMetricPattern18(this, 'height_350d_ago'), + height12mAgo: createMetricPattern18(this, 'height_12m_ago'), height1yAgo: createMetricPattern18(this, 'height_1y_ago'), + height14mAgo: createMetricPattern18(this, 'height_14m_ago'), height2yAgo: createMetricPattern18(this, 'height_2y_ago'), - height200wAgo: createMetricPattern18(this, 'height_200w_ago'), + height26mAgo: createMetricPattern18(this, 'height_26m_ago'), height3yAgo: createMetricPattern18(this, 'height_3y_ago'), + height200wAgo: createMetricPattern18(this, 'height_200w_ago'), height4yAgo: createMetricPattern18(this, 'height_4y_ago'), height5yAgo: createMetricPattern18(this, 'height_5y_ago'), height6yAgo: createMetricPattern18(this, 'height_6y_ago'), height8yAgo: createMetricPattern18(this, 'height_8y_ago'), + height9yAgo: createMetricPattern18(this, 'height_9y_ago'), height10yAgo: createMetricPattern18(this, 'height_10y_ago'), + height12yAgo: createMetricPattern18(this, 'height_12y_ago'), + height14yAgo: createMetricPattern18(this, 'height_14y_ago'), + height26yAgo: createMetricPattern18(this, 'height_26y_ago'), }, interval: createAverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(this, 'block_interval'), halving: { @@ -7111,16 +7042,8 @@ class BrkClient extends BrkClientBase { }, periodReturns: create_10y1m1w1y2y3m3y4y5y6m6y8yPattern2(this, 'dca_returns'), periodCagr: create_10y2y3y4y5y6y8yPattern(this, 'dca_cagr'), - periodDaysInProfit: create_10y1m1w1y2y3m3y4y5y6m6y8yPattern2(this, 'dca_days_in_profit'), - periodDaysInLoss: create_10y1m1w1y2y3m3y4y5y6m6y8yPattern2(this, 'dca_days_in_loss'), - periodMinReturn: create_10y1m1w1y2y3m3y4y5y6m6y8yPattern2(this, 'dca_min_return'), - periodMaxReturn: create_10y1m1w1y2y3m3y4y5y6m6y8yPattern2(this, 'dca_max_return'), periodLumpSumStack: create_10y1m1w1y2y3m3y4y5y6m6y8yPattern3(this, 'lump_sum_stack'), periodLumpSumReturns: create_10y1m1w1y2y3m3y4y5y6m6y8yPattern2(this, 'lump_sum_returns'), - periodLumpSumDaysInProfit: create_10y1m1w1y2y3m3y4y5y6m6y8yPattern2(this, 'lump_sum_days_in_profit'), - periodLumpSumDaysInLoss: create_10y1m1w1y2y3m3y4y5y6m6y8yPattern2(this, 'lump_sum_days_in_loss'), - periodLumpSumMinReturn: create_10y1m1w1y2y3m3y4y5y6m6y8yPattern2(this, 'lump_sum_min_return'), - periodLumpSumMaxReturn: create_10y1m1w1y2y3m3y4y5y6m6y8yPattern2(this, 'lump_sum_max_return'), classStack: { _2015: createBtcCentsSatsUsdPattern(this, 'dca_class_2015_stack'), _2016: createBtcCentsSatsUsdPattern(this, 'dca_class_2016_stack'), @@ -7149,62 +7072,19 @@ class BrkClient extends BrkClientBase { _2025: createCentsSatsUsdPattern(this, 'dca_class_2025_average_price'), _2026: createCentsSatsUsdPattern(this, 'dca_class_2026_average_price'), }, - classReturns: create_201520162017201820192020202120222023202420252026Pattern2(this, 'dca_class'), - classDaysInProfit: { - _2015: createMetricPattern1(this, 'dca_class_2015_days_in_profit'), - _2016: createMetricPattern1(this, 'dca_class_2016_days_in_profit'), - _2017: createMetricPattern1(this, 'dca_class_2017_days_in_profit'), - _2018: createMetricPattern1(this, 'dca_class_2018_days_in_profit'), - _2019: createMetricPattern1(this, 'dca_class_2019_days_in_profit'), - _2020: createMetricPattern1(this, 'dca_class_2020_days_in_profit'), - _2021: createMetricPattern1(this, 'dca_class_2021_days_in_profit'), - _2022: createMetricPattern1(this, 'dca_class_2022_days_in_profit'), - _2023: createMetricPattern1(this, 'dca_class_2023_days_in_profit'), - _2024: createMetricPattern1(this, 'dca_class_2024_days_in_profit'), - _2025: createMetricPattern1(this, 'dca_class_2025_days_in_profit'), - _2026: createMetricPattern1(this, 'dca_class_2026_days_in_profit'), - }, - classDaysInLoss: { - _2015: createMetricPattern1(this, 'dca_class_2015_days_in_loss'), - _2016: createMetricPattern1(this, 'dca_class_2016_days_in_loss'), - _2017: createMetricPattern1(this, 'dca_class_2017_days_in_loss'), - _2018: createMetricPattern1(this, 'dca_class_2018_days_in_loss'), - _2019: createMetricPattern1(this, 'dca_class_2019_days_in_loss'), - _2020: createMetricPattern1(this, 'dca_class_2020_days_in_loss'), - _2021: createMetricPattern1(this, 'dca_class_2021_days_in_loss'), - _2022: createMetricPattern1(this, 'dca_class_2022_days_in_loss'), - _2023: createMetricPattern1(this, 'dca_class_2023_days_in_loss'), - _2024: createMetricPattern1(this, 'dca_class_2024_days_in_loss'), - _2025: createMetricPattern1(this, 'dca_class_2025_days_in_loss'), - _2026: createMetricPattern1(this, 'dca_class_2026_days_in_loss'), - }, - classMinReturn: { - _2015: createMetricPattern1(this, 'dca_class_2015_min_return'), - _2016: createMetricPattern1(this, 'dca_class_2016_min_return'), - _2017: createMetricPattern1(this, 'dca_class_2017_min_return'), - _2018: createMetricPattern1(this, 'dca_class_2018_min_return'), - _2019: createMetricPattern1(this, 'dca_class_2019_min_return'), - _2020: createMetricPattern1(this, 'dca_class_2020_min_return'), - _2021: createMetricPattern1(this, 'dca_class_2021_min_return'), - _2022: createMetricPattern1(this, 'dca_class_2022_min_return'), - _2023: createMetricPattern1(this, 'dca_class_2023_min_return'), - _2024: createMetricPattern1(this, 'dca_class_2024_min_return'), - _2025: createMetricPattern1(this, 'dca_class_2025_min_return'), - _2026: createMetricPattern1(this, 'dca_class_2026_min_return'), - }, - classMaxReturn: { - _2015: createMetricPattern1(this, 'dca_class_2015_max_return'), - _2016: createMetricPattern1(this, 'dca_class_2016_max_return'), - _2017: createMetricPattern1(this, 'dca_class_2017_max_return'), - _2018: createMetricPattern1(this, 'dca_class_2018_max_return'), - _2019: createMetricPattern1(this, 'dca_class_2019_max_return'), - _2020: createMetricPattern1(this, 'dca_class_2020_max_return'), - _2021: createMetricPattern1(this, 'dca_class_2021_max_return'), - _2022: createMetricPattern1(this, 'dca_class_2022_max_return'), - _2023: createMetricPattern1(this, 'dca_class_2023_max_return'), - _2024: createMetricPattern1(this, 'dca_class_2024_max_return'), - _2025: createMetricPattern1(this, 'dca_class_2025_max_return'), - _2026: createMetricPattern1(this, 'dca_class_2026_max_return'), + classReturns: { + _2015: createMetricPattern1(this, 'dca_class_2015_returns'), + _2016: createMetricPattern1(this, 'dca_class_2016_returns'), + _2017: createMetricPattern1(this, 'dca_class_2017_returns'), + _2018: createMetricPattern1(this, 'dca_class_2018_returns'), + _2019: createMetricPattern1(this, 'dca_class_2019_returns'), + _2020: createMetricPattern1(this, 'dca_class_2020_returns'), + _2021: createMetricPattern1(this, 'dca_class_2021_returns'), + _2022: createMetricPattern1(this, 'dca_class_2022_returns'), + _2023: createMetricPattern1(this, 'dca_class_2023_returns'), + _2024: createMetricPattern1(this, 'dca_class_2024_returns'), + _2025: createMetricPattern1(this, 'dca_class_2025_returns'), + _2026: createMetricPattern1(this, 'dca_class_2026_returns'), }, }, indicators: { @@ -7254,21 +7134,27 @@ class BrkClient extends BrkClientBase { piCycle: createMetricPattern1(this, 'pi_cycle'), macd: { _1d: { + emaFast: createMetricPattern1(this, 'macd_ema_fast_1d'), + emaSlow: createMetricPattern1(this, 'macd_ema_slow_1d'), line: createMetricPattern1(this, 'macd_line_1d'), signal: createMetricPattern1(this, 'macd_signal_1d'), histogram: createMetricPattern1(this, 'macd_histogram_1d'), }, _1w: { + emaFast: createMetricPattern1(this, 'macd_ema_fast_1w'), + emaSlow: createMetricPattern1(this, 'macd_ema_slow_1w'), line: createMetricPattern1(this, 'macd_line_1w'), signal: createMetricPattern1(this, 'macd_signal_1w'), histogram: createMetricPattern1(this, 'macd_histogram_1w'), }, _1m: { + emaFast: createMetricPattern1(this, 'macd_ema_fast_1m'), + emaSlow: createMetricPattern1(this, 'macd_ema_slow_1m'), line: createMetricPattern1(this, 'macd_line_1m'), signal: createMetricPattern1(this, 'macd_signal_1m'), histogram: createMetricPattern1(this, 'macd_histogram_1m'), }, - _1y: createHistogramLineSignalPattern(this, 'macd'), + _1y: createEmaHistogramLineSignalPattern(this, 'macd'), }, gini: createMetricPattern1(this, 'gini'), }, diff --git a/modules/brk-client/package.json b/modules/brk-client/package.json index f5447d19f..7a2e44be2 100644 --- a/modules/brk-client/package.json +++ b/modules/brk-client/package.json @@ -7,7 +7,7 @@ "test:basic": "node tests/basic.js", "test:tree": "node tests/tree.js" }, - "description": "BRK JavaScript client", + "description": "Bitcoin on-chain analytics client — thousands of metrics, block explorer, and address index", "engines": { "node": ">=18" }, @@ -23,7 +23,13 @@ "brk", "bitcoin", "blockchain", - "research" + "research", + "on-chain", + "analytics", + "metrics", + "api", + "data", + "cryptocurrency" ], "license": "MIT", "main": "index.js", diff --git a/packages/brk_client/brk_client/__init__.py b/packages/brk_client/brk_client/__init__.py index a35db6ca3..bfd287f03 100644 --- a/packages/brk_client/brk_client/__init__.py +++ b/packages/brk_client/brk_client/__init__.py @@ -2688,6 +2688,24 @@ class _10y1m1w1y2y3m3y4y5y6m6y8yPattern3: self._6y: BtcCentsSatsUsdPattern = BtcCentsSatsUsdPattern(client, _p('6y', acc)) self._8y: BtcCentsSatsUsdPattern = BtcCentsSatsUsdPattern(client, _p('8y', acc)) +class _10y1m1w1y2y3m3y4y5y6m6y8yPattern2: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self._10y: MetricPattern1[StoredF32] = MetricPattern1(client, _p('10y', acc)) + self._1m: MetricPattern1[StoredF32] = MetricPattern1(client, _p('1m', acc)) + self._1w: MetricPattern1[StoredF32] = MetricPattern1(client, _p('1w', acc)) + self._1y: MetricPattern1[StoredF32] = MetricPattern1(client, _p('1y', acc)) + self._2y: MetricPattern1[StoredF32] = MetricPattern1(client, _p('2y', acc)) + self._3m: MetricPattern1[StoredF32] = MetricPattern1(client, _p('3m', acc)) + self._3y: MetricPattern1[StoredF32] = MetricPattern1(client, _p('3y', acc)) + self._4y: MetricPattern1[StoredF32] = MetricPattern1(client, _p('4y', acc)) + self._5y: MetricPattern1[StoredF32] = MetricPattern1(client, _p('5y', acc)) + self._6m: MetricPattern1[StoredF32] = MetricPattern1(client, _p('6m', acc)) + self._6y: MetricPattern1[StoredF32] = MetricPattern1(client, _p('6y', acc)) + self._8y: MetricPattern1[StoredF32] = MetricPattern1(client, _p('8y', acc)) + class InvestedNegNetNuplSupplyUnrealizedPattern: """Pattern struct for repeated tree structure.""" @@ -2706,42 +2724,6 @@ class InvestedNegNetNuplSupplyUnrealizedPattern: self.unrealized_loss_rel_to_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_market_cap')) self.unrealized_profit_rel_to_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_market_cap')) -class _10y1m1w1y2y3m3y4y5y6m6y8yPattern2(Generic[T]): - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self._10y: MetricPattern1[T] = MetricPattern1(client, _p('10y', acc)) - self._1m: MetricPattern1[T] = MetricPattern1(client, _p('1m', acc)) - self._1w: MetricPattern1[T] = MetricPattern1(client, _p('1w', acc)) - self._1y: MetricPattern1[T] = MetricPattern1(client, _p('1y', acc)) - self._2y: MetricPattern1[T] = MetricPattern1(client, _p('2y', acc)) - self._3m: MetricPattern1[T] = MetricPattern1(client, _p('3m', acc)) - self._3y: MetricPattern1[T] = MetricPattern1(client, _p('3y', acc)) - self._4y: MetricPattern1[T] = MetricPattern1(client, _p('4y', acc)) - self._5y: MetricPattern1[T] = MetricPattern1(client, _p('5y', acc)) - self._6m: MetricPattern1[T] = MetricPattern1(client, _p('6m', acc)) - self._6y: MetricPattern1[T] = MetricPattern1(client, _p('6y', acc)) - self._8y: MetricPattern1[T] = MetricPattern1(client, _p('8y', acc)) - -class _201520162017201820192020202120222023202420252026Pattern2(Generic[T]): - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self._2015: MetricPattern1[T] = MetricPattern1(client, _m(acc, '2015_returns')) - self._2016: MetricPattern1[T] = MetricPattern1(client, _m(acc, '2016_returns')) - self._2017: MetricPattern1[T] = MetricPattern1(client, _m(acc, '2017_returns')) - self._2018: MetricPattern1[T] = MetricPattern1(client, _m(acc, '2018_returns')) - self._2019: MetricPattern1[T] = MetricPattern1(client, _m(acc, '2019_returns')) - self._2020: MetricPattern1[T] = MetricPattern1(client, _m(acc, '2020_returns')) - self._2021: MetricPattern1[T] = MetricPattern1(client, _m(acc, '2021_returns')) - self._2022: MetricPattern1[T] = MetricPattern1(client, _m(acc, '2022_returns')) - self._2023: MetricPattern1[T] = MetricPattern1(client, _m(acc, '2023_returns')) - self._2024: MetricPattern1[T] = MetricPattern1(client, _m(acc, '2024_returns')) - self._2025: MetricPattern1[T] = MetricPattern1(client, _m(acc, '2025_returns')) - self._2026: MetricPattern1[T] = MetricPattern1(client, _m(acc, '2026_returns')) - class AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90RollingSumPattern: """Pattern struct for repeated tree structure.""" @@ -3008,6 +2990,17 @@ class InvestedMaxMinPercentilesSpotPattern: self.spot_cost_basis_percentile: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'spot_cost_basis_percentile')) self.spot_invested_capital_percentile: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'spot_invested_capital_percentile')) +class EmaHistogramLineSignalPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.ema_fast: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'ema_fast_1y')) + self.ema_slow: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'ema_slow_1y')) + self.histogram: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'histogram_1y')) + self.line: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'line_1y')) + self.signal: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'signal_1y')) + class _1y24h30d7dPattern2: """Pattern struct for repeated tree structure.""" @@ -3074,15 +3067,6 @@ class CentsSatsUsdPattern: self.sats: MetricPattern1[SatsFract] = MetricPattern1(client, _m(acc, 'sats')) self.usd: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'usd')) -class HistogramLineSignalPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.histogram: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'histogram_1y')) - self.line: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'line_1y')) - self.signal: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'signal_1y')) - class _6bBlockTxindexPattern(Generic[T]): """Pattern struct for repeated tree structure.""" @@ -3171,7 +3155,7 @@ class MetricsTree_Blocks_Difficulty: def __init__(self, client: BrkClientBase, base_path: str = ''): self.raw: MetricPattern1[StoredF64] = MetricPattern1(client, 'difficulty') - self.as_hash: MetricPattern1[StoredF32] = MetricPattern1(client, 'difficulty_as_hash') + self.as_hash: MetricPattern1[StoredF64] = MetricPattern1(client, 'difficulty_as_hash') self.adjustment: MetricPattern1[StoredF32] = MetricPattern1(client, 'difficulty_adjustment') self.epoch: MetricPattern1[DifficultyEpoch] = MetricPattern1(client, 'difficulty_epoch') self.blocks_before_next_adjustment: MetricPattern1[StoredU32] = MetricPattern1(client, 'blocks_before_next_difficulty_adjustment') @@ -3223,22 +3207,34 @@ class MetricsTree_Blocks_Count: self.height_34d_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_34d_ago') self.height_55d_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_55d_ago') self.height_2m_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_2m_ago') + self.height_9w_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_9w_ago') + self.height_12w_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_12w_ago') self.height_89d_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_89d_ago') + self.height_3m_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_3m_ago') + self.height_14w_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_14w_ago') self.height_111d_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_111d_ago') self.height_144d_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_144d_ago') - self.height_3m_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_3m_ago') self.height_6m_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_6m_ago') + self.height_26w_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_26w_ago') self.height_200d_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_200d_ago') + self.height_9m_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_9m_ago') self.height_350d_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_350d_ago') + self.height_12m_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_12m_ago') self.height_1y_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_1y_ago') + self.height_14m_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_14m_ago') self.height_2y_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_2y_ago') - self.height_200w_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_200w_ago') + self.height_26m_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_26m_ago') self.height_3y_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_3y_ago') + self.height_200w_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_200w_ago') self.height_4y_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_4y_ago') self.height_5y_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_5y_ago') self.height_6y_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_6y_ago') self.height_8y_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_8y_ago') + self.height_9y_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_9y_ago') self.height_10y_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_10y_ago') + self.height_12y_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_12y_ago') + self.height_14y_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_14y_ago') + self.height_26y_ago: MetricPattern18[Height] = MetricPattern18(client, 'height_26y_ago') class MetricsTree_Blocks_Halving: """Metrics tree node.""" @@ -3479,9 +3475,9 @@ class MetricsTree_Mining_Hashrate: def __init__(self, client: BrkClientBase, base_path: str = ''): self.hash_rate: MetricPattern1[StoredF64] = MetricPattern1(client, 'hash_rate') self.hash_rate_1w_sma: MetricPattern1[StoredF64] = MetricPattern1(client, 'hash_rate_1w_sma') - self.hash_rate_1m_sma: MetricPattern1[StoredF32] = MetricPattern1(client, 'hash_rate_1m_sma') - self.hash_rate_2m_sma: MetricPattern1[StoredF32] = MetricPattern1(client, 'hash_rate_2m_sma') - self.hash_rate_1y_sma: MetricPattern1[StoredF32] = MetricPattern1(client, 'hash_rate_1y_sma') + self.hash_rate_1m_sma: MetricPattern1[StoredF64] = MetricPattern1(client, 'hash_rate_1m_sma') + self.hash_rate_2m_sma: MetricPattern1[StoredF64] = MetricPattern1(client, 'hash_rate_2m_sma') + self.hash_rate_1y_sma: MetricPattern1[StoredF64] = MetricPattern1(client, 'hash_rate_1y_sma') self.hash_rate_ath: MetricPattern1[StoredF64] = MetricPattern1(client, 'hash_rate_ath') self.hash_rate_drawdown: MetricPattern1[StoredF32] = MetricPattern1(client, 'hash_rate_drawdown') self.hash_price_ths: MetricPattern1[StoredF32] = MetricPattern1(client, 'hash_price_ths') @@ -4061,73 +4057,22 @@ class MetricsTree_Market_Dca_ClassAveragePrice: self._2025: CentsSatsUsdPattern = CentsSatsUsdPattern(client, 'dca_class_2025_average_price') self._2026: CentsSatsUsdPattern = CentsSatsUsdPattern(client, 'dca_class_2026_average_price') -class MetricsTree_Market_Dca_ClassDaysInProfit: +class MetricsTree_Market_Dca_ClassReturns: """Metrics tree node.""" def __init__(self, client: BrkClientBase, base_path: str = ''): - self._2015: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2015_days_in_profit') - self._2016: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2016_days_in_profit') - self._2017: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2017_days_in_profit') - self._2018: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2018_days_in_profit') - self._2019: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2019_days_in_profit') - self._2020: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2020_days_in_profit') - self._2021: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2021_days_in_profit') - self._2022: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2022_days_in_profit') - self._2023: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2023_days_in_profit') - self._2024: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2024_days_in_profit') - self._2025: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2025_days_in_profit') - self._2026: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2026_days_in_profit') - -class MetricsTree_Market_Dca_ClassDaysInLoss: - """Metrics tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self._2015: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2015_days_in_loss') - self._2016: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2016_days_in_loss') - self._2017: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2017_days_in_loss') - self._2018: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2018_days_in_loss') - self._2019: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2019_days_in_loss') - self._2020: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2020_days_in_loss') - self._2021: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2021_days_in_loss') - self._2022: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2022_days_in_loss') - self._2023: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2023_days_in_loss') - self._2024: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2024_days_in_loss') - self._2025: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2025_days_in_loss') - self._2026: MetricPattern1[StoredU32] = MetricPattern1(client, 'dca_class_2026_days_in_loss') - -class MetricsTree_Market_Dca_ClassMinReturn: - """Metrics tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self._2015: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2015_min_return') - self._2016: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2016_min_return') - self._2017: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2017_min_return') - self._2018: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2018_min_return') - self._2019: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2019_min_return') - self._2020: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2020_min_return') - self._2021: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2021_min_return') - self._2022: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2022_min_return') - self._2023: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2023_min_return') - self._2024: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2024_min_return') - self._2025: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2025_min_return') - self._2026: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2026_min_return') - -class MetricsTree_Market_Dca_ClassMaxReturn: - """Metrics tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self._2015: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2015_max_return') - self._2016: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2016_max_return') - self._2017: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2017_max_return') - self._2018: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2018_max_return') - self._2019: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2019_max_return') - self._2020: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2020_max_return') - self._2021: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2021_max_return') - self._2022: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2022_max_return') - self._2023: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2023_max_return') - self._2024: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2024_max_return') - self._2025: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2025_max_return') - self._2026: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2026_max_return') + self._2015: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2015_returns') + self._2016: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2016_returns') + self._2017: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2017_returns') + self._2018: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2018_returns') + self._2019: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2019_returns') + self._2020: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2020_returns') + self._2021: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2021_returns') + self._2022: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2022_returns') + self._2023: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2023_returns') + self._2024: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2024_returns') + self._2025: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2025_returns') + self._2026: MetricPattern1[StoredF32] = MetricPattern1(client, 'dca_class_2026_returns') class MetricsTree_Market_Dca: """Metrics tree node.""" @@ -4136,25 +4081,13 @@ class MetricsTree_Market_Dca: self.dca_sats_per_day: MetricPattern18[Sats] = MetricPattern18(client, 'dca_sats_per_day') self.period_stack: _10y1m1w1y2y3m3y4y5y6m6y8yPattern3 = _10y1m1w1y2y3m3y4y5y6m6y8yPattern3(client, 'dca_stack') self.period_average_price: MetricsTree_Market_Dca_PeriodAveragePrice = MetricsTree_Market_Dca_PeriodAveragePrice(client) - self.period_returns: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2[StoredF32] = _10y1m1w1y2y3m3y4y5y6m6y8yPattern2(client, 'dca_returns') + self.period_returns: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2 = _10y1m1w1y2y3m3y4y5y6m6y8yPattern2(client, 'dca_returns') self.period_cagr: _10y2y3y4y5y6y8yPattern = _10y2y3y4y5y6y8yPattern(client, 'dca_cagr') - self.period_days_in_profit: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2[StoredU32] = _10y1m1w1y2y3m3y4y5y6m6y8yPattern2(client, 'dca_days_in_profit') - self.period_days_in_loss: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2[StoredU32] = _10y1m1w1y2y3m3y4y5y6m6y8yPattern2(client, 'dca_days_in_loss') - self.period_min_return: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2[StoredF32] = _10y1m1w1y2y3m3y4y5y6m6y8yPattern2(client, 'dca_min_return') - self.period_max_return: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2[StoredF32] = _10y1m1w1y2y3m3y4y5y6m6y8yPattern2(client, 'dca_max_return') self.period_lump_sum_stack: _10y1m1w1y2y3m3y4y5y6m6y8yPattern3 = _10y1m1w1y2y3m3y4y5y6m6y8yPattern3(client, 'lump_sum_stack') - self.period_lump_sum_returns: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2[StoredF32] = _10y1m1w1y2y3m3y4y5y6m6y8yPattern2(client, 'lump_sum_returns') - self.period_lump_sum_days_in_profit: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2[StoredU32] = _10y1m1w1y2y3m3y4y5y6m6y8yPattern2(client, 'lump_sum_days_in_profit') - self.period_lump_sum_days_in_loss: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2[StoredU32] = _10y1m1w1y2y3m3y4y5y6m6y8yPattern2(client, 'lump_sum_days_in_loss') - self.period_lump_sum_min_return: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2[StoredF32] = _10y1m1w1y2y3m3y4y5y6m6y8yPattern2(client, 'lump_sum_min_return') - self.period_lump_sum_max_return: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2[StoredF32] = _10y1m1w1y2y3m3y4y5y6m6y8yPattern2(client, 'lump_sum_max_return') + self.period_lump_sum_returns: _10y1m1w1y2y3m3y4y5y6m6y8yPattern2 = _10y1m1w1y2y3m3y4y5y6m6y8yPattern2(client, 'lump_sum_returns') self.class_stack: MetricsTree_Market_Dca_ClassStack = MetricsTree_Market_Dca_ClassStack(client) self.class_average_price: MetricsTree_Market_Dca_ClassAveragePrice = MetricsTree_Market_Dca_ClassAveragePrice(client) - self.class_returns: _201520162017201820192020202120222023202420252026Pattern2[StoredF32] = _201520162017201820192020202120222023202420252026Pattern2(client, 'dca_class') - self.class_days_in_profit: MetricsTree_Market_Dca_ClassDaysInProfit = MetricsTree_Market_Dca_ClassDaysInProfit(client) - self.class_days_in_loss: MetricsTree_Market_Dca_ClassDaysInLoss = MetricsTree_Market_Dca_ClassDaysInLoss(client) - self.class_min_return: MetricsTree_Market_Dca_ClassMinReturn = MetricsTree_Market_Dca_ClassMinReturn(client) - self.class_max_return: MetricsTree_Market_Dca_ClassMaxReturn = MetricsTree_Market_Dca_ClassMaxReturn(client) + self.class_returns: MetricsTree_Market_Dca_ClassReturns = MetricsTree_Market_Dca_ClassReturns(client) class MetricsTree_Market_Indicators_Rsi_1d: """Metrics tree node.""" @@ -4214,6 +4147,8 @@ class MetricsTree_Market_Indicators_Macd_1d: """Metrics tree node.""" def __init__(self, client: BrkClientBase, base_path: str = ''): + self.ema_fast: MetricPattern1[StoredF32] = MetricPattern1(client, 'macd_ema_fast_1d') + self.ema_slow: MetricPattern1[StoredF32] = MetricPattern1(client, 'macd_ema_slow_1d') self.line: MetricPattern1[StoredF32] = MetricPattern1(client, 'macd_line_1d') self.signal: MetricPattern1[StoredF32] = MetricPattern1(client, 'macd_signal_1d') self.histogram: MetricPattern1[StoredF32] = MetricPattern1(client, 'macd_histogram_1d') @@ -4222,6 +4157,8 @@ class MetricsTree_Market_Indicators_Macd_1w: """Metrics tree node.""" def __init__(self, client: BrkClientBase, base_path: str = ''): + self.ema_fast: MetricPattern1[StoredF32] = MetricPattern1(client, 'macd_ema_fast_1w') + self.ema_slow: MetricPattern1[StoredF32] = MetricPattern1(client, 'macd_ema_slow_1w') self.line: MetricPattern1[StoredF32] = MetricPattern1(client, 'macd_line_1w') self.signal: MetricPattern1[StoredF32] = MetricPattern1(client, 'macd_signal_1w') self.histogram: MetricPattern1[StoredF32] = MetricPattern1(client, 'macd_histogram_1w') @@ -4230,6 +4167,8 @@ class MetricsTree_Market_Indicators_Macd_1m: """Metrics tree node.""" def __init__(self, client: BrkClientBase, base_path: str = ''): + self.ema_fast: MetricPattern1[StoredF32] = MetricPattern1(client, 'macd_ema_fast_1m') + self.ema_slow: MetricPattern1[StoredF32] = MetricPattern1(client, 'macd_ema_slow_1m') self.line: MetricPattern1[StoredF32] = MetricPattern1(client, 'macd_line_1m') self.signal: MetricPattern1[StoredF32] = MetricPattern1(client, 'macd_signal_1m') self.histogram: MetricPattern1[StoredF32] = MetricPattern1(client, 'macd_histogram_1m') @@ -4241,7 +4180,7 @@ class MetricsTree_Market_Indicators_Macd: self._1d: MetricsTree_Market_Indicators_Macd_1d = MetricsTree_Market_Indicators_Macd_1d(client) self._1w: MetricsTree_Market_Indicators_Macd_1w = MetricsTree_Market_Indicators_Macd_1w(client) self._1m: MetricsTree_Market_Indicators_Macd_1m = MetricsTree_Market_Indicators_Macd_1m(client) - self._1y: HistogramLineSignalPattern = HistogramLineSignalPattern(client, 'macd') + self._1y: EmaHistogramLineSignalPattern = EmaHistogramLineSignalPattern(client, 'macd') class MetricsTree_Market_Indicators: """Metrics tree node.""" diff --git a/packages/brk_client/pyproject.toml b/packages/brk_client/pyproject.toml index c0c1e9274..ea6ed9984 100644 --- a/packages/brk_client/pyproject.toml +++ b/packages/brk_client/pyproject.toml @@ -1,11 +1,11 @@ [project] name = "brk-client" version = "0.1.9" -description = "Python client for the Bitcoin Research Kit" +description = "Bitcoin on-chain analytics client — thousands of metrics, block explorer, and address index" readme = "README.md" requires-python = ">=3.9" license = "MIT" -keywords = ["bitcoin", "blockchain", "analytics", "on-chain"] +keywords = ["bitcoin", "blockchain", "analytics", "on-chain", "metrics", "api", "data", "cryptocurrency", "brk"] classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", diff --git a/website/.well-known/ai-plugin.json b/website/.well-known/ai-plugin.json new file mode 100644 index 000000000..1588d4a91 --- /dev/null +++ b/website/.well-known/ai-plugin.json @@ -0,0 +1,15 @@ +{ + "schema_version": "v1", + "name_for_human": "Bitcoin Research Kit", + "name_for_model": "bitcoin_research_kit", + "description_for_human": "Query Bitcoin on-chain analytics: thousands of metrics, block explorer, address index.", + "description_for_model": "Bitcoin on-chain analytics API. Search metrics via /api/metrics/search/{query}, fetch data via /api/metric/{metric}/{index}. No auth required.", + "auth": { "type": "none" }, + "api": { + "type": "openapi", + "url": "https://bitview.space/openapi.json" + }, + "logo_url": "https://bitview.space/assets/favicon-196.png", + "contact_email": "hello@bitcoinresearchkit.org", + "legal_info_url": "https://github.com/bitcoinresearchkit/brk/blob/main/docs/LICENSE.md" +} diff --git a/website/llms.txt b/website/llms.txt new file mode 100644 index 000000000..d15bbb9ae --- /dev/null +++ b/website/llms.txt @@ -0,0 +1,72 @@ +# Bitcoin Research Kit (BRK) + +> Open-source Bitcoin on-chain analytics API. Thousands of metrics, block explorer, address index — all computed from a Bitcoin Core node, including the price. + +Base URL: https://bitview.space + +## API + +Free, no auth required. JSON and CSV output. Mempool.space compatible for block/tx/address endpoints. + +- [Interactive docs](https://bitview.space/api) +- [OpenAPI spec (compact, LLM-optimized)](https://bitview.space/api.json) +- [OpenAPI spec (full)](https://bitview.space/openapi.json) + +## Discover + +Search for metrics by keyword: + + GET /api/metrics/search/{query} + +Browse all available metrics: + + GET /api/metrics/list + +List available indexes (date, height, etc.): + + GET /api/metrics/indexes + +Browse the full metric catalog as a tree: + + GET /api/metrics + +## Query + +Get a metric by name and index: + + GET /api/metric/{metric}/{index} + GET /api/metric/{metric}/{index}?start=-30 + +Fetch multiple metrics at once: + + GET /api/metrics/bulk?index={index}&metrics={metric1},{metric2} + +See the `MetricData` schema and query parameters (`start`, `end`, `limit`) in the [OpenAPI spec](https://bitview.space/api.json). + +## Block Explorer + + GET /api/block-height/{height} + GET /api/block/{hash} + GET /api/tx/{txid} + GET /api/address/{address} + +## Mempool + + GET /api/mempool/price + GET /api/mempool/info + GET /api/v1/fees/recommended + +## Client Libraries + +- JavaScript: https://www.npmjs.com/package/brk-client +- Python: https://pypi.org/project/brk-client/ +- Rust: https://crates.io/crates/brk_client + +## Metric Categories + +Market, supply, mining, network activity, UTXO age bands, cointime economics, holder cohorts, cost basis distributions, and more. Use the discovery endpoints above to explore. + +## Source + +- GitHub: https://github.com/bitcoinresearchkit/brk +- License: MIT diff --git a/website/robots.txt b/website/robots.txt index 14267e903..cd2cf2066 100644 --- a/website/robots.txt +++ b/website/robots.txt @@ -1,2 +1,7 @@ User-agent: * -Allow: / \ No newline at end of file +Allow: / + +# LLM-friendly resources +# llms.txt: https://llmstxt.org/ +# api.json: Compact OpenAPI spec optimized for LLM consumption +Sitemap: https://bitview.space/llms.txt \ No newline at end of file