global: snapshot

This commit is contained in:
nym21
2026-01-04 11:51:22 +01:00
parent 3cae817915
commit 13ab7d39d7
26 changed files with 1696 additions and 1467 deletions

86
Cargo.lock generated
View File

@@ -211,7 +211,7 @@ checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -378,7 +378,7 @@ dependencies = [
"regex",
"rustc-hash",
"shlex",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -872,7 +872,7 @@ dependencies = [
"proc-macro2",
"quote",
"serde_json",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -1325,7 +1325,7 @@ version = "0.1.0-alpha.1"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -1518,7 +1518,7 @@ dependencies = [
"heck",
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -1845,7 +1845,7 @@ dependencies = [
"proc-macro2",
"quote",
"strsim",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -1856,7 +1856,7 @@ checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81"
dependencies = [
"darling_core",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -1890,7 +1890,7 @@ checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -1923,7 +1923,7 @@ dependencies = [
"proc-macro2",
"quote",
"rustc_version",
"syn 2.0.112",
"syn 2.0.113",
"unicode-xid",
]
@@ -1978,7 +1978,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -2041,7 +2041,7 @@ dependencies = [
"once_cell",
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -2265,7 +2265,7 @@ checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -2359,7 +2359,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -2851,7 +2851,7 @@ checksum = "b787bebb543f8969132630c51fd0afab173a86c6abae56ff3b9e5e3e3f9f6e58"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -3285,7 +3285,7 @@ dependencies = [
"proc-macro-crate",
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -3441,7 +3441,7 @@ checksum = "003b4612827f6501183873fb0735da92157e3c7daa71c40921c7d2758fec2229"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -3485,7 +3485,7 @@ dependencies = [
"phf",
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -3995,7 +3995,7 @@ dependencies = [
"phf_shared",
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -4024,7 +4024,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -4306,6 +4306,8 @@ dependencies = [
[[package]]
name = "rawdb"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63786b0b37f520a26c26787dc0de01a3c2a704706964455d556155a4deed2c6f"
dependencies = [
"libc",
"log",
@@ -4382,7 +4384,7 @@ checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -4593,7 +4595,7 @@ dependencies = [
"proc-macro2",
"quote",
"serde_derive_internals",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -4698,7 +4700,7 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -4709,7 +4711,7 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -4930,7 +4932,7 @@ dependencies = [
"heck",
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -4955,9 +4957,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.112"
version = "2.0.113"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21f182278bf2d2bcb3c88b1b08a37df029d71ce3d3ae26168e3c653b213b99d4"
checksum = "678faa00651c9eb72dd2020cbdf275d92eccb2400d568e419efdd64838145cb4"
dependencies = [
"proc-macro2",
"quote",
@@ -4978,7 +4980,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -5040,7 +5042,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -5051,7 +5053,7 @@ checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -5115,7 +5117,7 @@ checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -5261,7 +5263,7 @@ checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -5351,7 +5353,7 @@ checksum = "ee6ff59666c9cbaec3533964505d39154dc4e0a56151fdea30a09ed0301f62e2"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
"termcolor",
]
@@ -5498,6 +5500,8 @@ checksum = "8f54a172d0620933a27a4360d3db3e2ae0dd6cceae9730751a036bbf182c4b23"
[[package]]
name = "vecdb"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4c6a6a5e26cf1c7f13e9ea470c94153514ea750e491fb3c5b2846c69db5d17f"
dependencies = [
"ctrlc",
"log",
@@ -5517,9 +5521,11 @@ dependencies = [
[[package]]
name = "vecdb_derive"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "374b601ba9563eaba6f46b452a905dee8e7cda6b3321f40be99e50cbccb36cd4"
dependencies = [
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -5606,7 +5612,7 @@ dependencies = [
"bumpalo",
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
"wasm-bindgen-shared",
]
@@ -5725,7 +5731,7 @@ checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -5736,7 +5742,7 @@ checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -6010,7 +6016,7 @@ checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
"synstructure",
]
@@ -6031,7 +6037,7 @@ checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]
@@ -6051,7 +6057,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
"synstructure",
]
@@ -6085,7 +6091,7 @@ checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.112",
"syn 2.0.113",
]
[[package]]

View File

@@ -81,8 +81,8 @@ serde_derive = "1.0.228"
serde_json = { version = "1.0.148", features = ["float_roundtrip"] }
smallvec = "1.15.1"
tokio = { version = "1.49.0", features = ["rt-multi-thread"] }
# vecdb = { version = "0.5.2", features = ["derive", "serde_json", "pco", "schemars"] }
vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco", "schemars"] }
vecdb = { version = "0.5.4", features = ["derive", "serde_json", "pco", "schemars"] }
# vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco", "schemars"] }
# vecdb = { git = "https://github.com/anydb-rs/anydb", features = ["derive", "serde_json", "pco"] }
[workspace.metadata.release]

File diff suppressed because it is too large Load Diff

View File

@@ -1,66 +1,71 @@
use std::ops::Range;
use brk_types::Age;
use brk_traversable::Traversable;
use rayon::iter::{IntoParallelIterator, ParallelIterator};
use serde::Serialize;
use super::{CohortName, Filter, TimeFilter};
// Age boundary constants in days
pub const DAYS_1D: usize = 1;
pub const DAYS_1W: usize = 7;
pub const DAYS_1M: usize = 30;
pub const DAYS_2M: usize = 2 * 30;
pub const DAYS_3M: usize = 3 * 30;
pub const DAYS_4M: usize = 4 * 30;
pub const DAYS_5M: usize = 5 * 30;
pub const DAYS_6M: usize = 6 * 30;
pub const DAYS_1Y: usize = 365;
pub const DAYS_2Y: usize = 2 * 365;
pub const DAYS_3Y: usize = 3 * 365;
pub const DAYS_4Y: usize = 4 * 365;
pub const DAYS_5Y: usize = 5 * 365;
pub const DAYS_6Y: usize = 6 * 365;
pub const DAYS_7Y: usize = 7 * 365;
pub const DAYS_8Y: usize = 8 * 365;
pub const DAYS_10Y: usize = 10 * 365;
pub const DAYS_12Y: usize = 12 * 365;
pub const DAYS_15Y: usize = 15 * 365;
// Age boundary constants in hours
pub const HOURS_1H: usize = 1;
pub const HOURS_1D: usize = 24;
pub const HOURS_1W: usize = 24 * 7;
pub const HOURS_1M: usize = 24 * 30;
pub const HOURS_2M: usize = 24 * 2 * 30;
pub const HOURS_3M: usize = 24 * 3 * 30;
pub const HOURS_4M: usize = 24 * 4 * 30;
pub const HOURS_5M: usize = 24 * 5 * 30; // STH/LTH threshold
pub const HOURS_6M: usize = 24 * 6 * 30;
pub const HOURS_1Y: usize = 24 * 365;
pub const HOURS_2Y: usize = 24 * 2 * 365;
pub const HOURS_3Y: usize = 24 * 3 * 365;
pub const HOURS_4Y: usize = 24 * 4 * 365;
pub const HOURS_5Y: usize = 24 * 5 * 365;
pub const HOURS_6Y: usize = 24 * 6 * 365;
pub const HOURS_7Y: usize = 24 * 7 * 365;
pub const HOURS_8Y: usize = 24 * 8 * 365;
pub const HOURS_10Y: usize = 24 * 10 * 365;
pub const HOURS_12Y: usize = 24 * 12 * 365;
pub const HOURS_15Y: usize = 24 * 15 * 365;
/// Age boundaries in days. Defines the cohort ranges:
/// [0, B[0]), [B[0], B[1]), [B[1], B[2]), ..., [B[n-1], ∞)
pub const AGE_BOUNDARIES: [usize; 19] = [
DAYS_1D, DAYS_1W, DAYS_1M, DAYS_2M, DAYS_3M, DAYS_4M, DAYS_5M, DAYS_6M, DAYS_1Y, DAYS_2Y,
DAYS_3Y, DAYS_4Y, DAYS_5Y, DAYS_6Y, DAYS_7Y, DAYS_8Y, DAYS_10Y, DAYS_12Y, DAYS_15Y,
/// Age boundaries in hours. Defines the cohort ranges:
/// [0, 1h), [1h, 1d), [1d, 1w), [1w, 1m), ..., [15y, ∞)
pub const AGE_BOUNDARIES: [usize; 20] = [
HOURS_1H, HOURS_1D, HOURS_1W, HOURS_1M, HOURS_2M, HOURS_3M, HOURS_4M,
HOURS_5M, HOURS_6M, HOURS_1Y, HOURS_2Y, HOURS_3Y, HOURS_4Y, HOURS_5Y,
HOURS_6Y, HOURS_7Y, HOURS_8Y, HOURS_10Y, HOURS_12Y, HOURS_15Y,
];
/// Age range bounds (end = usize::MAX means unbounded)
pub const AGE_RANGE_BOUNDS: ByAgeRange<Range<usize>> = ByAgeRange {
up_to_1d: 0..DAYS_1D,
_1d_to_1w: DAYS_1D..DAYS_1W,
_1w_to_1m: DAYS_1W..DAYS_1M,
_1m_to_2m: DAYS_1M..DAYS_2M,
_2m_to_3m: DAYS_2M..DAYS_3M,
_3m_to_4m: DAYS_3M..DAYS_4M,
_4m_to_5m: DAYS_4M..DAYS_5M,
_5m_to_6m: DAYS_5M..DAYS_6M,
_6m_to_1y: DAYS_6M..DAYS_1Y,
_1y_to_2y: DAYS_1Y..DAYS_2Y,
_2y_to_3y: DAYS_2Y..DAYS_3Y,
_3y_to_4y: DAYS_3Y..DAYS_4Y,
_4y_to_5y: DAYS_4Y..DAYS_5Y,
_5y_to_6y: DAYS_5Y..DAYS_6Y,
_6y_to_7y: DAYS_6Y..DAYS_7Y,
_7y_to_8y: DAYS_7Y..DAYS_8Y,
_8y_to_10y: DAYS_8Y..DAYS_10Y,
_10y_to_12y: DAYS_10Y..DAYS_12Y,
_12y_to_15y: DAYS_12Y..DAYS_15Y,
from_15y: DAYS_15Y..usize::MAX,
up_to_1h: 0..HOURS_1H,
_1h_to_1d: HOURS_1H..HOURS_1D,
_1d_to_1w: HOURS_1D..HOURS_1W,
_1w_to_1m: HOURS_1W..HOURS_1M,
_1m_to_2m: HOURS_1M..HOURS_2M,
_2m_to_3m: HOURS_2M..HOURS_3M,
_3m_to_4m: HOURS_3M..HOURS_4M,
_4m_to_5m: HOURS_4M..HOURS_5M,
_5m_to_6m: HOURS_5M..HOURS_6M,
_6m_to_1y: HOURS_6M..HOURS_1Y,
_1y_to_2y: HOURS_1Y..HOURS_2Y,
_2y_to_3y: HOURS_2Y..HOURS_3Y,
_3y_to_4y: HOURS_3Y..HOURS_4Y,
_4y_to_5y: HOURS_4Y..HOURS_5Y,
_5y_to_6y: HOURS_5Y..HOURS_6Y,
_6y_to_7y: HOURS_6Y..HOURS_7Y,
_7y_to_8y: HOURS_7Y..HOURS_8Y,
_8y_to_10y: HOURS_8Y..HOURS_10Y,
_10y_to_12y: HOURS_10Y..HOURS_12Y,
_12y_to_15y: HOURS_12Y..HOURS_15Y,
from_15y: HOURS_15Y..usize::MAX,
};
/// Age range filters
pub const AGE_RANGE_FILTERS: ByAgeRange<Filter> = ByAgeRange {
up_to_1d: Filter::Time(TimeFilter::Range(AGE_RANGE_BOUNDS.up_to_1d)),
up_to_1h: Filter::Time(TimeFilter::Range(AGE_RANGE_BOUNDS.up_to_1h)),
_1h_to_1d: Filter::Time(TimeFilter::Range(AGE_RANGE_BOUNDS._1h_to_1d)),
_1d_to_1w: Filter::Time(TimeFilter::Range(AGE_RANGE_BOUNDS._1d_to_1w)),
_1w_to_1m: Filter::Time(TimeFilter::Range(AGE_RANGE_BOUNDS._1w_to_1m)),
_1m_to_2m: Filter::Time(TimeFilter::Range(AGE_RANGE_BOUNDS._1m_to_2m)),
@@ -84,7 +89,8 @@ pub const AGE_RANGE_FILTERS: ByAgeRange<Filter> = ByAgeRange {
/// Age range names
pub const AGE_RANGE_NAMES: ByAgeRange<CohortName> = ByAgeRange {
up_to_1d: CohortName::new("up_to_1d_old", "<1d", "Up to 1 Day Old"),
up_to_1h: CohortName::new("up_to_1h_old", "<1h", "Up to 1 Hour Old"),
_1h_to_1d: CohortName::new("at_least_1h_up_to_1d_old", "1h-1d", "1 Hour to 1 Day Old"),
_1d_to_1w: CohortName::new("at_least_1d_up_to_1w_old", "1d-1w", "1 Day to 1 Week Old"),
_1w_to_1m: CohortName::new("at_least_1w_up_to_1m_old", "1w-1m", "1 Week to 1 Month Old"),
_1m_to_2m: CohortName::new("at_least_1m_up_to_2m_old", "1m-2m", "1 to 2 Months Old"),
@@ -114,7 +120,8 @@ impl ByAgeRange<CohortName> {
#[derive(Default, Clone, Traversable, Serialize)]
pub struct ByAgeRange<T> {
pub up_to_1d: T,
pub up_to_1h: T,
pub _1h_to_1d: T,
pub _1d_to_1w: T,
pub _1w_to_1m: T,
pub _1m_to_2m: T,
@@ -137,33 +144,62 @@ pub struct ByAgeRange<T> {
}
impl<T> ByAgeRange<T> {
/// Get mutable reference by days old. O(1).
/// Get mutable reference by Age. O(1).
#[inline]
pub fn get_mut_by_days_old(&mut self, days_old: usize) -> &mut T {
match days_old {
0..DAYS_1D => &mut self.up_to_1d,
DAYS_1D..DAYS_1W => &mut self._1d_to_1w,
DAYS_1W..DAYS_1M => &mut self._1w_to_1m,
DAYS_1M..DAYS_2M => &mut self._1m_to_2m,
DAYS_2M..DAYS_3M => &mut self._2m_to_3m,
DAYS_3M..DAYS_4M => &mut self._3m_to_4m,
DAYS_4M..DAYS_5M => &mut self._4m_to_5m,
DAYS_5M..DAYS_6M => &mut self._5m_to_6m,
DAYS_6M..DAYS_1Y => &mut self._6m_to_1y,
DAYS_1Y..DAYS_2Y => &mut self._1y_to_2y,
DAYS_2Y..DAYS_3Y => &mut self._2y_to_3y,
DAYS_3Y..DAYS_4Y => &mut self._3y_to_4y,
DAYS_4Y..DAYS_5Y => &mut self._4y_to_5y,
DAYS_5Y..DAYS_6Y => &mut self._5y_to_6y,
DAYS_6Y..DAYS_7Y => &mut self._6y_to_7y,
DAYS_7Y..DAYS_8Y => &mut self._7y_to_8y,
DAYS_8Y..DAYS_10Y => &mut self._8y_to_10y,
DAYS_10Y..DAYS_12Y => &mut self._10y_to_12y,
DAYS_12Y..DAYS_15Y => &mut self._12y_to_15y,
pub fn get_mut(&mut self, age: Age) -> &mut T {
match age.hours() {
0..HOURS_1H => &mut self.up_to_1h,
HOURS_1H..HOURS_1D => &mut self._1h_to_1d,
HOURS_1D..HOURS_1W => &mut self._1d_to_1w,
HOURS_1W..HOURS_1M => &mut self._1w_to_1m,
HOURS_1M..HOURS_2M => &mut self._1m_to_2m,
HOURS_2M..HOURS_3M => &mut self._2m_to_3m,
HOURS_3M..HOURS_4M => &mut self._3m_to_4m,
HOURS_4M..HOURS_5M => &mut self._4m_to_5m,
HOURS_5M..HOURS_6M => &mut self._5m_to_6m,
HOURS_6M..HOURS_1Y => &mut self._6m_to_1y,
HOURS_1Y..HOURS_2Y => &mut self._1y_to_2y,
HOURS_2Y..HOURS_3Y => &mut self._2y_to_3y,
HOURS_3Y..HOURS_4Y => &mut self._3y_to_4y,
HOURS_4Y..HOURS_5Y => &mut self._4y_to_5y,
HOURS_5Y..HOURS_6Y => &mut self._5y_to_6y,
HOURS_6Y..HOURS_7Y => &mut self._6y_to_7y,
HOURS_7Y..HOURS_8Y => &mut self._7y_to_8y,
HOURS_8Y..HOURS_10Y => &mut self._8y_to_10y,
HOURS_10Y..HOURS_12Y => &mut self._10y_to_12y,
HOURS_12Y..HOURS_15Y => &mut self._12y_to_15y,
_ => &mut self.from_15y,
}
}
/// Get reference by Age. O(1).
#[inline]
pub fn get(&self, age: Age) -> &T {
match age.hours() {
0..HOURS_1H => &self.up_to_1h,
HOURS_1H..HOURS_1D => &self._1h_to_1d,
HOURS_1D..HOURS_1W => &self._1d_to_1w,
HOURS_1W..HOURS_1M => &self._1w_to_1m,
HOURS_1M..HOURS_2M => &self._1m_to_2m,
HOURS_2M..HOURS_3M => &self._2m_to_3m,
HOURS_3M..HOURS_4M => &self._3m_to_4m,
HOURS_4M..HOURS_5M => &self._4m_to_5m,
HOURS_5M..HOURS_6M => &self._5m_to_6m,
HOURS_6M..HOURS_1Y => &self._6m_to_1y,
HOURS_1Y..HOURS_2Y => &self._1y_to_2y,
HOURS_2Y..HOURS_3Y => &self._2y_to_3y,
HOURS_3Y..HOURS_4Y => &self._3y_to_4y,
HOURS_4Y..HOURS_5Y => &self._4y_to_5y,
HOURS_5Y..HOURS_6Y => &self._5y_to_6y,
HOURS_6Y..HOURS_7Y => &self._6y_to_7y,
HOURS_7Y..HOURS_8Y => &self._7y_to_8y,
HOURS_8Y..HOURS_10Y => &self._8y_to_10y,
HOURS_10Y..HOURS_12Y => &self._10y_to_12y,
HOURS_12Y..HOURS_15Y => &self._12y_to_15y,
_ => &self.from_15y,
}
}
pub fn new<F>(mut create: F) -> Self
where
F: FnMut(Filter, &'static str) -> T,
@@ -171,7 +207,8 @@ impl<T> ByAgeRange<T> {
let f = AGE_RANGE_FILTERS;
let n = AGE_RANGE_NAMES;
Self {
up_to_1d: create(f.up_to_1d.clone(), n.up_to_1d.id),
up_to_1h: create(f.up_to_1h.clone(), n.up_to_1h.id),
_1h_to_1d: create(f._1h_to_1d.clone(), n._1h_to_1d.id),
_1d_to_1w: create(f._1d_to_1w.clone(), n._1d_to_1w.id),
_1w_to_1m: create(f._1w_to_1m.clone(), n._1w_to_1m.id),
_1m_to_2m: create(f._1m_to_2m.clone(), n._1m_to_2m.id),
@@ -201,7 +238,8 @@ impl<T> ByAgeRange<T> {
let f = AGE_RANGE_FILTERS;
let n = AGE_RANGE_NAMES;
Ok(Self {
up_to_1d: create(f.up_to_1d.clone(), n.up_to_1d.id)?,
up_to_1h: create(f.up_to_1h.clone(), n.up_to_1h.id)?,
_1h_to_1d: create(f._1h_to_1d.clone(), n._1h_to_1d.id)?,
_1d_to_1w: create(f._1d_to_1w.clone(), n._1d_to_1w.id)?,
_1w_to_1m: create(f._1w_to_1m.clone(), n._1w_to_1m.id)?,
_1m_to_2m: create(f._1m_to_2m.clone(), n._1m_to_2m.id)?,
@@ -226,7 +264,8 @@ impl<T> ByAgeRange<T> {
pub fn iter(&self) -> impl Iterator<Item = &T> {
[
&self.up_to_1d,
&self.up_to_1h,
&self._1h_to_1d,
&self._1d_to_1w,
&self._1w_to_1m,
&self._1m_to_2m,
@@ -252,7 +291,8 @@ impl<T> ByAgeRange<T> {
pub fn iter_mut(&mut self) -> impl Iterator<Item = &mut T> {
[
&mut self.up_to_1d,
&mut self.up_to_1h,
&mut self._1h_to_1d,
&mut self._1d_to_1w,
&mut self._1w_to_1m,
&mut self._1m_to_2m,
@@ -281,7 +321,8 @@ impl<T> ByAgeRange<T> {
T: Send + Sync,
{
[
&mut self.up_to_1d,
&mut self.up_to_1h,
&mut self._1h_to_1d,
&mut self._1d_to_1w,
&mut self._1w_to_1m,
&mut self._1m_to_2m,
@@ -305,4 +346,3 @@ impl<T> ByAgeRange<T> {
.into_par_iter()
}
}

View File

@@ -3,53 +3,53 @@ use rayon::prelude::*;
use serde::Serialize;
use super::{
CohortName, Filter, TimeFilter, DAYS_10Y, DAYS_12Y, DAYS_15Y, DAYS_1M, DAYS_1W, DAYS_1Y,
DAYS_2M, DAYS_2Y, DAYS_3M, DAYS_3Y, DAYS_4M, DAYS_4Y, DAYS_5M, DAYS_5Y, DAYS_6M, DAYS_6Y,
DAYS_7Y, DAYS_8Y,
CohortName, Filter, TimeFilter, HOURS_10Y, HOURS_12Y, HOURS_15Y, HOURS_1M, HOURS_1W, HOURS_1Y,
HOURS_2M, HOURS_2Y, HOURS_3M, HOURS_3Y, HOURS_4M, HOURS_4Y, HOURS_5M, HOURS_5Y, HOURS_6M,
HOURS_6Y, HOURS_7Y, HOURS_8Y,
};
/// Max age thresholds in days
pub const MAX_AGE_DAYS: ByMaxAge<usize> = ByMaxAge {
_1w: DAYS_1W,
_1m: DAYS_1M,
_2m: DAYS_2M,
_3m: DAYS_3M,
_4m: DAYS_4M,
_5m: DAYS_5M,
_6m: DAYS_6M,
_1y: DAYS_1Y,
_2y: DAYS_2Y,
_3y: DAYS_3Y,
_4y: DAYS_4Y,
_5y: DAYS_5Y,
_6y: DAYS_6Y,
_7y: DAYS_7Y,
_8y: DAYS_8Y,
_10y: DAYS_10Y,
_12y: DAYS_12Y,
_15y: DAYS_15Y,
/// Max age thresholds in hours
pub const MAX_AGE_HOURS: ByMaxAge<usize> = ByMaxAge {
_1w: HOURS_1W,
_1m: HOURS_1M,
_2m: HOURS_2M,
_3m: HOURS_3M,
_4m: HOURS_4M,
_5m: HOURS_5M,
_6m: HOURS_6M,
_1y: HOURS_1Y,
_2y: HOURS_2Y,
_3y: HOURS_3Y,
_4y: HOURS_4Y,
_5y: HOURS_5Y,
_6y: HOURS_6Y,
_7y: HOURS_7Y,
_8y: HOURS_8Y,
_10y: HOURS_10Y,
_12y: HOURS_12Y,
_15y: HOURS_15Y,
};
/// Max age filters (LowerThan threshold)
/// Max age filters (LowerThan threshold in hours)
pub const MAX_AGE_FILTERS: ByMaxAge<Filter> = ByMaxAge {
_1w: Filter::Time(TimeFilter::LowerThan(MAX_AGE_DAYS._1w)),
_1m: Filter::Time(TimeFilter::LowerThan(MAX_AGE_DAYS._1m)),
_2m: Filter::Time(TimeFilter::LowerThan(MAX_AGE_DAYS._2m)),
_3m: Filter::Time(TimeFilter::LowerThan(MAX_AGE_DAYS._3m)),
_4m: Filter::Time(TimeFilter::LowerThan(MAX_AGE_DAYS._4m)),
_5m: Filter::Time(TimeFilter::LowerThan(MAX_AGE_DAYS._5m)),
_6m: Filter::Time(TimeFilter::LowerThan(MAX_AGE_DAYS._6m)),
_1y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_DAYS._1y)),
_2y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_DAYS._2y)),
_3y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_DAYS._3y)),
_4y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_DAYS._4y)),
_5y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_DAYS._5y)),
_6y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_DAYS._6y)),
_7y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_DAYS._7y)),
_8y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_DAYS._8y)),
_10y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_DAYS._10y)),
_12y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_DAYS._12y)),
_15y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_DAYS._15y)),
_1w: Filter::Time(TimeFilter::LowerThan(MAX_AGE_HOURS._1w)),
_1m: Filter::Time(TimeFilter::LowerThan(MAX_AGE_HOURS._1m)),
_2m: Filter::Time(TimeFilter::LowerThan(MAX_AGE_HOURS._2m)),
_3m: Filter::Time(TimeFilter::LowerThan(MAX_AGE_HOURS._3m)),
_4m: Filter::Time(TimeFilter::LowerThan(MAX_AGE_HOURS._4m)),
_5m: Filter::Time(TimeFilter::LowerThan(MAX_AGE_HOURS._5m)),
_6m: Filter::Time(TimeFilter::LowerThan(MAX_AGE_HOURS._6m)),
_1y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_HOURS._1y)),
_2y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_HOURS._2y)),
_3y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_HOURS._3y)),
_4y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_HOURS._4y)),
_5y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_HOURS._5y)),
_6y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_HOURS._6y)),
_7y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_HOURS._7y)),
_8y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_HOURS._8y)),
_10y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_HOURS._10y)),
_12y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_HOURS._12y)),
_15y: Filter::Time(TimeFilter::LowerThan(MAX_AGE_HOURS._15y)),
};
/// Max age names

View File

@@ -3,53 +3,53 @@ use rayon::prelude::*;
use serde::Serialize;
use super::{
CohortName, Filter, TimeFilter, DAYS_10Y, DAYS_12Y, DAYS_1D, DAYS_1M, DAYS_1W, DAYS_1Y,
DAYS_2M, DAYS_2Y, DAYS_3M, DAYS_3Y, DAYS_4M, DAYS_4Y, DAYS_5M, DAYS_5Y, DAYS_6M, DAYS_6Y,
DAYS_7Y, DAYS_8Y,
CohortName, Filter, TimeFilter, HOURS_10Y, HOURS_12Y, HOURS_1D, HOURS_1M, HOURS_1W, HOURS_1Y,
HOURS_2M, HOURS_2Y, HOURS_3M, HOURS_3Y, HOURS_4M, HOURS_4Y, HOURS_5M, HOURS_5Y, HOURS_6M,
HOURS_6Y, HOURS_7Y, HOURS_8Y,
};
/// Min age thresholds in days
pub const MIN_AGE_DAYS: ByMinAge<usize> = ByMinAge {
_1d: DAYS_1D,
_1w: DAYS_1W,
_1m: DAYS_1M,
_2m: DAYS_2M,
_3m: DAYS_3M,
_4m: DAYS_4M,
_5m: DAYS_5M,
_6m: DAYS_6M,
_1y: DAYS_1Y,
_2y: DAYS_2Y,
_3y: DAYS_3Y,
_4y: DAYS_4Y,
_5y: DAYS_5Y,
_6y: DAYS_6Y,
_7y: DAYS_7Y,
_8y: DAYS_8Y,
_10y: DAYS_10Y,
_12y: DAYS_12Y,
/// Min age thresholds in hours
pub const MIN_AGE_HOURS: ByMinAge<usize> = ByMinAge {
_1d: HOURS_1D,
_1w: HOURS_1W,
_1m: HOURS_1M,
_2m: HOURS_2M,
_3m: HOURS_3M,
_4m: HOURS_4M,
_5m: HOURS_5M,
_6m: HOURS_6M,
_1y: HOURS_1Y,
_2y: HOURS_2Y,
_3y: HOURS_3Y,
_4y: HOURS_4Y,
_5y: HOURS_5Y,
_6y: HOURS_6Y,
_7y: HOURS_7Y,
_8y: HOURS_8Y,
_10y: HOURS_10Y,
_12y: HOURS_12Y,
};
/// Min age filters (GreaterOrEqual threshold)
/// Min age filters (GreaterOrEqual threshold in hours)
pub const MIN_AGE_FILTERS: ByMinAge<Filter> = ByMinAge {
_1d: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_DAYS._1d)),
_1w: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_DAYS._1w)),
_1m: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_DAYS._1m)),
_2m: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_DAYS._2m)),
_3m: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_DAYS._3m)),
_4m: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_DAYS._4m)),
_5m: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_DAYS._5m)),
_6m: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_DAYS._6m)),
_1y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_DAYS._1y)),
_2y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_DAYS._2y)),
_3y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_DAYS._3y)),
_4y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_DAYS._4y)),
_5y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_DAYS._5y)),
_6y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_DAYS._6y)),
_7y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_DAYS._7y)),
_8y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_DAYS._8y)),
_10y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_DAYS._10y)),
_12y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_DAYS._12y)),
_1d: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_HOURS._1d)),
_1w: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_HOURS._1w)),
_1m: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_HOURS._1m)),
_2m: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_HOURS._2m)),
_3m: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_HOURS._3m)),
_4m: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_HOURS._4m)),
_5m: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_HOURS._5m)),
_6m: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_HOURS._6m)),
_1y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_HOURS._1y)),
_2y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_HOURS._2y)),
_3y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_HOURS._3y)),
_4y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_HOURS._4y)),
_5y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_HOURS._5y)),
_6y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_HOURS._6y)),
_7y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_HOURS._7y)),
_8y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_HOURS._8y)),
_10y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_HOURS._10y)),
_12y: Filter::Time(TimeFilter::GreaterOrEqual(MIN_AGE_HOURS._12y)),
};
/// Min age names

View File

@@ -29,13 +29,13 @@ impl Filter {
}
}
/// Check if a time value (days) is contained by this filter
pub fn contains_time(&self, days: usize) -> bool {
/// Check if a time value (hours) is contained by this filter
pub fn contains_time(&self, hours: usize) -> bool {
match self {
Filter::All => true,
Filter::Term(Term::Sth) => days < Term::THRESHOLD_DAYS,
Filter::Term(Term::Lth) => days >= Term::THRESHOLD_DAYS,
Filter::Time(t) => t.contains(days),
Filter::Term(Term::Sth) => hours < Term::THRESHOLD_HOURS,
Filter::Term(Term::Lth) => hours >= Term::THRESHOLD_HOURS,
Filter::Time(t) => t.contains(hours),
_ => false,
}
}
@@ -54,12 +54,12 @@ impl Filter {
match (self, other) {
(Filter::All, _) => true,
(Filter::Term(Term::Sth), Filter::Time(t)) => {
matches!(t, TimeFilter::LowerThan(d) if *d <= Term::THRESHOLD_DAYS)
|| matches!(t, TimeFilter::Range(r) if r.end <= Term::THRESHOLD_DAYS)
matches!(t, TimeFilter::LowerThan(h) if *h <= Term::THRESHOLD_HOURS)
|| matches!(t, TimeFilter::Range(r) if r.end <= Term::THRESHOLD_HOURS)
}
(Filter::Term(Term::Lth), Filter::Time(t)) => {
matches!(t, TimeFilter::GreaterOrEqual(d) if *d >= Term::THRESHOLD_DAYS)
|| matches!(t, TimeFilter::Range(r) if r.start >= Term::THRESHOLD_DAYS)
matches!(t, TimeFilter::GreaterOrEqual(h) if *h >= Term::THRESHOLD_HOURS)
|| matches!(t, TimeFilter::Range(r) if r.start >= Term::THRESHOLD_HOURS)
}
(Filter::Time(t1), Filter::Time(t2)) => t1.includes(t2),
(Filter::Amount(a1), Filter::Amount(a2)) => a1.includes(a2),
@@ -89,17 +89,17 @@ impl Filter {
}
/// Whether to compute adjusted metrics (adjusted SOPR, adjusted value created/destroyed)
/// For UTXO context: true for All, Term, max_age (LowerThan), and up_to_1d age range
/// For UTXO context: true for All, STH, and max_age (LowerThan)
/// For Address context: always false
/// Note: LTH doesn't need adjusted (everything >= 5 months is already > 1 hour)
/// Note: age ranges don't need adjusted (0-1h data lives in its own cohort)
pub fn compute_adjusted(&self, context: CohortContext) -> bool {
match context {
CohortContext::Address => false,
CohortContext::Utxo => match self {
Filter::All | Filter::Term(_) => true,
Filter::Time(TimeFilter::LowerThan(_)) => true,
Filter::Time(TimeFilter::Range(r)) if r.start == 0 => true,
_ => false,
},
CohortContext::Utxo => matches!(
self,
Filter::All | Filter::Term(Term::Sth) | Filter::Time(TimeFilter::LowerThan(_))
),
}
}
}

View File

@@ -21,10 +21,11 @@ mod cohort_name;
mod filter;
mod filtered;
mod state_level;
mod term;
mod time_filter;
mod utxo;
pub use brk_types::{Age, Term};
pub use address::*;
pub use amount_filter::*;
pub use by_address_type::*;
@@ -33,7 +34,6 @@ pub use by_amount_range::*;
pub use by_any_address::*;
pub use by_epoch::*;
pub use by_ge_amount::*;
pub use by_year::*;
pub use by_lt_amount::*;
pub use by_max_age::*;
pub use by_min_age::*;
@@ -41,11 +41,11 @@ pub use by_spendable_type::*;
pub use by_term::*;
pub use by_type::*;
pub use by_unspendable_type::*;
pub use by_year::*;
pub use cohort_context::*;
pub use cohort_name::*;
pub use filter::*;
pub use filtered::*;
pub use state_level::*;
pub use term::*;
pub use time_filter::*;
pub use utxo::*;

View File

@@ -1,7 +1,7 @@
use brk_cohort::{AmountBucket, ByAddressType};
use brk_error::Result;
use brk_types::{CheckedSub, Dollars, Height, Sats, Timestamp, TypeIndex};
use vecdb::{VecIndex, unlikely};
use brk_types::{Age, CheckedSub, Dollars, Height, Sats, Timestamp, TypeIndex};
use vecdb::{unlikely, VecIndex};
use crate::distribution::{address::HeightToAddressTypeToVec, cohorts::AddressCohorts};
@@ -11,7 +11,7 @@ use super::super::cache::AddressLookup;
///
/// For each spent UTXO:
/// 1. Look up address data
/// 2. Calculate age metrics (blocks_old, days_old)
/// 2. Calculate age metrics
/// 3. Update address balance and cohort membership
/// 4. Handle addresses becoming empty
///
@@ -33,13 +33,8 @@ pub fn process_sent(
for (prev_height, by_type) in sent_data.into_iter() {
let prev_price = height_to_price.map(|v| v[prev_height.to_usize()]);
let prev_timestamp = height_to_timestamp[prev_height.to_usize()];
let blocks_old = current_height.to_usize() - prev_height.to_usize();
let days_old = current_timestamp.difference_in_days_between_float(prev_timestamp);
let older_than_hour = current_timestamp
.checked_sub(prev_timestamp)
.unwrap()
.is_more_than_hour();
let age = Age::new(current_timestamp, prev_timestamp, blocks_old);
for (output_type, vec) in by_type.unwrap().into_iter() {
// Cache mutable refs for this address type
@@ -123,15 +118,7 @@ pub fn process_sent(
.state
.as_mut()
.unwrap()
.send(
addr_data,
value,
current_price,
prev_price,
blocks_old,
days_old,
older_than_hour,
)?;
.send(addr_data, value, current_price, prev_price, age)?;
}
}
}

View File

@@ -72,6 +72,7 @@ impl AddressCohortVecs {
version,
indexes,
price,
up_to_1h_realized: None,
};
let height_to_addr_count =

View File

@@ -38,7 +38,33 @@ impl UTXOCohorts {
) -> Result<Self> {
let v = version + VERSION;
// Create "all" cohort first - it doesn't need global sources (it IS the global source)
// Phase 1: Import base cohorts that don't need adjusted (age_range, amount_range, etc.)
// These are the source cohorts for overlapping computations.
let base = |f: Filter, name: &'static str| {
UTXOCohortVecs::forced_import(
db,
f,
name,
v,
indexes,
price,
states_path,
StateLevel::Full,
None,
None,
)
};
let age_range = ByAgeRange::try_new(&base)?;
let amount_range = ByAmountRange::try_new(&base)?;
let epoch = ByEpoch::try_new(&base)?;
let year = ByYear::try_new(&base)?;
let type_ = BySpendableType::try_new(&base)?;
// Get up_to_1h realized for adjusted computation (cohort - up_to_1h)
let up_to_1h_realized = age_range.up_to_1h.metrics.realized.as_ref();
// Phase 2: Import "all" cohort (needs up_to_1h for adjusted, is global supply source)
let all = UTXOCohortVecs::forced_import(
db,
Filter::All,
@@ -49,13 +75,13 @@ impl UTXOCohorts {
states_path,
StateLevel::PriceOnly,
None,
up_to_1h_realized,
)?;
// Get reference to all's supply for other cohorts to use as global source
let all_supply = Some(&all.metrics.supply);
// Create all cohorts first (while borrowing all_supply), then assemble struct
let price_only = |f: Filter, name: &'static str| {
// Phase 3: Import cohorts that need adjusted and/or all_supply
let price_only_adjusted = |f: Filter, name: &'static str| {
UTXOCohortVecs::forced_import(
db,
f,
@@ -66,12 +92,13 @@ impl UTXOCohorts {
states_path,
StateLevel::PriceOnly,
all_supply,
up_to_1h_realized,
)
};
let term = ByTerm::try_new(&price_only)?;
let term = ByTerm::try_new(&price_only_adjusted)?;
let full = |f: Filter, name: &'static str| {
let none_adjusted = |f: Filter, name: &'static str| {
UTXOCohortVecs::forced_import(
db,
f,
@@ -80,10 +107,15 @@ impl UTXOCohorts {
indexes,
price,
states_path,
StateLevel::Full,
StateLevel::None,
all_supply,
up_to_1h_realized,
)
};
let max_age = ByMaxAge::try_new(&none_adjusted)?;
// Phase 4: Import remaining cohorts (no adjusted needed)
let none = |f: Filter, name: &'static str| {
UTXOCohortVecs::forced_import(
db,
@@ -95,16 +127,11 @@ impl UTXOCohorts {
states_path,
StateLevel::None,
all_supply,
None,
)
};
let epoch = ByEpoch::try_new(&full)?;
let year = ByYear::try_new(&full)?;
let type_ = BySpendableType::try_new(&full)?;
let max_age = ByMaxAge::try_new(&none)?;
let min_age = ByMinAge::try_new(&none)?;
let age_range = ByAgeRange::try_new(&full)?;
let amount_range = ByAmountRange::try_new(&full)?;
let lt_amount = ByLowerThanAmount::try_new(&none)?;
let ge_amount = ByGreatEqualAmount::try_new(&none)?;

View File

@@ -8,7 +8,7 @@ impl UTXOCohorts {
/// Process received outputs for this block.
///
/// New UTXOs are added to:
/// - The "up_to_1d" age cohort (all new UTXOs start at 0 days old)
/// - The "up_to_1h" age cohort (all new UTXOs start at 0 hours old)
/// - The appropriate epoch cohort based on block height
/// - The appropriate year cohort based on block timestamp
/// - The appropriate output type cohort (P2PKH, P2SH, etc.)
@@ -22,9 +22,9 @@ impl UTXOCohorts {
) {
let supply_state = received.spendable_supply;
// New UTXOs go into up_to_1d, current epoch, and current year
// New UTXOs go into up_to_1h, current epoch, and current year
[
&mut self.0.age_range.up_to_1d,
&mut self.0.age_range.up_to_1h,
self.0.epoch.mut_vec_from_height(height),
self.0.year.mut_vec_from_timestamp(timestamp),
]

View File

@@ -1,4 +1,4 @@
use brk_types::{CheckedSub, Height};
use brk_types::{Age, Height};
use rustc_hash::FxHashMap;
use vecdb::VecIndex;
@@ -35,37 +35,22 @@ impl UTXOCohorts {
let block_state = &chain_state[height.to_usize()];
let prev_price = block_state.price;
let blocks_old = chain_len - 1 - height.to_usize();
let days_old = last_timestamp.difference_in_days_between(block_state.timestamp);
let days_old_float =
last_timestamp.difference_in_days_between_float(block_state.timestamp);
let older_than_hour = last_timestamp
.checked_sub(block_state.timestamp)
.unwrap()
.is_more_than_hour();
let age = Age::new(last_timestamp, block_state.timestamp, blocks_old);
// Update age range cohort (direct index lookup)
self.0
.age_range
.get_mut_by_days_old(days_old)
.state
.um()
.send(
&sent.spendable_supply,
current_price,
prev_price,
blocks_old,
days_old_float,
older_than_hour,
);
self.0.age_range.get_mut(age).state.um().send(
&sent.spendable_supply,
current_price,
prev_price,
age,
);
// Update epoch cohort (direct lookup by height)
self.0.epoch.mut_vec_from_height(height).state.um().send(
&sent.spendable_supply,
current_price,
prev_price,
blocks_old,
days_old_float,
older_than_hour,
age,
);
// Update year cohort (direct lookup by timestamp)
@@ -74,42 +59,31 @@ impl UTXOCohorts {
.mut_vec_from_timestamp(block_state.timestamp)
.state
.um()
.send(
&sent.spendable_supply,
current_price,
prev_price,
blocks_old,
days_old_float,
older_than_hour,
);
.send(&sent.spendable_supply, current_price, prev_price, age);
// Update output type cohorts
sent.by_type
.spendable
.iter_typed()
.for_each(|(output_type, supply_state)| {
self.0.type_.get_mut(output_type).state.um().send(
supply_state,
current_price,
prev_price,
blocks_old,
days_old_float,
older_than_hour,
)
self.0
.type_
.get_mut(output_type)
.state
.um()
.send(supply_state, current_price, prev_price, age)
});
// Update amount range cohorts
sent.by_size_group
.iter_typed()
.for_each(|(group, supply_state)| {
self.0.amount_range.get_mut(group).state.um().send(
supply_state,
current_price,
prev_price,
blocks_old,
days_old_float,
older_than_hour,
);
self.0
.amount_range
.get_mut(group)
.state
.um()
.send(supply_state, current_price, prev_price, age);
});
}
}

View File

@@ -1,5 +1,5 @@
use brk_cohort::AGE_BOUNDARIES;
use brk_types::{ONE_DAY_IN_SEC, Timestamp};
use brk_types::{ONE_HOUR_IN_SEC, Timestamp};
use crate::distribution::state::BlockState;
@@ -8,11 +8,11 @@ use super::groups::UTXOCohorts;
impl UTXOCohorts {
/// Handle age transitions when processing a new block.
///
/// UTXOs age with each block. When they cross day boundaries,
/// they move between age-based cohorts (e.g., from "0-1d" to "1-7d").
/// UTXOs age with each block. When they cross hour boundaries,
/// they move between age-based cohorts (e.g., from "0-1h" to "1h-1d").
///
/// Complexity: O(k * (log n + m)) where:
/// - k = 19 boundaries to check
/// - k = 20 boundaries to check
/// - n = total blocks in chain_state
/// - m = blocks crossing each boundary (typically 0-2 per boundary per block)
pub fn tick_tock_next_block(&mut self, chain_state: &[BlockState], timestamp: Timestamp) {
@@ -28,19 +28,19 @@ impl UTXOCohorts {
return;
}
// Get age_range cohort states (indexed 0..20)
// Cohort i covers days [BOUNDARIES[i-1], BOUNDARIES[i])
// Cohort 0 covers [0, 1) days
// Cohort 19 covers [15*365, infinity) days
// Get age_range cohort states (indexed 0..21)
// Cohort i covers hours [BOUNDARIES[i-1], BOUNDARIES[i])
// Cohort 0 covers [0, 1) hours
// Cohort 20 covers [15*365*24, infinity) hours
let mut age_cohorts: Vec<_> = self.0.age_range.iter_mut().map(|v| &mut v.state).collect();
// For each boundary, find blocks that just crossed it
for (boundary_idx, &boundary_days) in AGE_BOUNDARIES.iter().enumerate() {
let boundary_seconds = (boundary_days as u32) * ONE_DAY_IN_SEC;
// For each boundary (in hours), find blocks that just crossed it
for (boundary_idx, &boundary_hours) in AGE_BOUNDARIES.iter().enumerate() {
let boundary_seconds = (boundary_hours as u32) * ONE_HOUR_IN_SEC;
// Blocks crossing boundary B have timestamps in (prev - B*DAY, curr - B*DAY]
// prev_days < B and curr_days >= B
// means: block was younger than B days, now is B days or older
// Blocks crossing boundary B have timestamps in (prev - B*HOUR, curr - B*HOUR]
// prev_hours < B and curr_hours >= B
// means: block was younger than B hours, now is B hours or older
let upper_timestamp = (*timestamp).saturating_sub(boundary_seconds);
let lower_timestamp = (*prev_timestamp).saturating_sub(boundary_seconds);
@@ -53,23 +53,11 @@ impl UTXOCohorts {
let start_idx = chain_state.partition_point(|b| *b.timestamp <= lower_timestamp);
let end_idx = chain_state.partition_point(|b| *b.timestamp <= upper_timestamp);
// Process blocks that crossed this boundary
// Move supply from younger cohort to older cohort
for block_state in &chain_state[start_idx..end_idx] {
// Double-check the day boundary was actually crossed
// (handles edge cases with day boundaries)
let prev_days = prev_timestamp.difference_in_days_between(block_state.timestamp);
let curr_days = timestamp.difference_in_days_between(block_state.timestamp);
if prev_days >= boundary_days || curr_days < boundary_days {
continue;
}
// Block crossed from cohort[boundary_idx] to cohort[boundary_idx + 1]
// Decrement from the "younger" cohort
if let Some(state) = age_cohorts[boundary_idx].as_mut() {
state.decrement(&block_state.supply, block_state.price);
}
// Increment in the "older" cohort
if let Some(state) = age_cohorts[boundary_idx + 1].as_mut() {
state.increment(&block_state.supply, block_state.price);
}

View File

@@ -9,7 +9,7 @@ use vecdb::{AnyStoredVec, Database, Exit, IterableVec};
use crate::{ComputeIndexes, indexes, price, distribution::state::UTXOCohortState};
use crate::distribution::metrics::{CohortMetrics, ImportConfig, SupplyMetrics};
use crate::distribution::metrics::{CohortMetrics, ImportConfig, RealizedMetrics, SupplyMetrics};
use super::super::traits::{CohortVecs, DynCohortVecs};
@@ -33,6 +33,9 @@ impl UTXOCohortVecs {
///
/// `all_supply` is the supply metrics from the "all" cohort, used as global
/// sources for `*_rel_to_market_cap` ratios. Pass `None` for the "all" cohort itself.
///
/// `up_to_1h_realized` is used for cohorts where `compute_adjusted()` is true,
/// to create lazy adjusted vecs: adjusted = cohort - up_to_1h.
#[allow(clippy::too_many_arguments)]
pub fn forced_import(
db: &Database,
@@ -44,6 +47,7 @@ impl UTXOCohortVecs {
states_path: &Path,
state_level: StateLevel,
all_supply: Option<&SupplyMetrics>,
up_to_1h_realized: Option<&RealizedMetrics>,
) -> Result<Self> {
let compute_dollars = price.is_some();
let full_name = CohortContext::Utxo.full_name(&filter, name);
@@ -56,6 +60,7 @@ impl UTXOCohortVecs {
version,
indexes,
price,
up_to_1h_realized,
};
Ok(Self {

View File

@@ -4,6 +4,8 @@ use vecdb::Database;
use crate::{indexes, price};
use super::RealizedMetrics;
/// Configuration for importing metrics.
pub struct ImportConfig<'a> {
pub db: &'a Database,
@@ -13,6 +15,9 @@ pub struct ImportConfig<'a> {
pub version: Version,
pub indexes: &'a indexes::Vecs,
pub price: Option<&'a price::Vecs>,
/// Source for lazy adjusted computation: adjusted = cohort - up_to_1h.
/// Required for cohorts where `compute_adjusted()` is true.
pub up_to_1h_realized: Option<&'a RealizedMetrics>,
}
impl<'a> ImportConfig<'a> {

View File

@@ -13,8 +13,8 @@ use crate::{
indexes,
internal::{
ComputedRatioVecsFromDateIndex, ComputedVecsFromDateIndex, ComputedVecsFromHeight,
LazyVecsFrom2FromHeight, LazyVecsFromDateIndex, LazyVecsFromHeight, PercentageDollarsF32,
Source, StoredF32Identity, VecBuilderOptions,
DollarsMinus, LazyVecsFrom2FromHeight, LazyVecsFromDateIndex, LazyVecsFromHeight,
PercentageDollarsF32, Source, StoredF32Identity, VecBuilderOptions,
},
price,
utils::OptionExt,
@@ -64,11 +64,11 @@ pub struct RealizedMetrics {
pub height_to_value_destroyed: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_value_destroyed: ComputedVecsFromHeight<Dollars>,
// === Adjusted Value (optional) ===
pub height_to_adjusted_value_created: Option<EagerVec<PcoVec<Height, Dollars>>>,
pub indexes_to_adjusted_value_created: Option<ComputedVecsFromHeight<Dollars>>,
pub height_to_adjusted_value_destroyed: Option<EagerVec<PcoVec<Height, Dollars>>>,
pub indexes_to_adjusted_value_destroyed: Option<ComputedVecsFromHeight<Dollars>>,
// === Adjusted Value (lazy: cohort - up_to_1h) ===
pub indexes_to_adjusted_value_created:
Option<LazyVecsFrom2FromHeight<Dollars, Dollars, Dollars>>,
pub indexes_to_adjusted_value_destroyed:
Option<LazyVecsFrom2FromHeight<Dollars, Dollars, Dollars>>,
// === SOPR (Spent Output Profit Ratio) ===
pub dateindex_to_sopr: EagerVec<PcoVec<DateIndex, StoredF64>>,
@@ -226,16 +226,48 @@ impl RealizedMetrics {
let height_to_value_destroyed =
EagerVec::forced_import(cfg.db, &cfg.name("value_destroyed"), cfg.version)?;
let height_to_adjusted_value_created = compute_adjusted
let indexes_to_value_created = ComputedVecsFromHeight::forced_import(
cfg.db,
&cfg.name("value_created"),
Source::Vec(height_to_value_created.boxed_clone()),
cfg.version,
cfg.indexes,
sum,
)?;
let indexes_to_value_destroyed = ComputedVecsFromHeight::forced_import(
cfg.db,
&cfg.name("value_destroyed"),
Source::Vec(height_to_value_destroyed.boxed_clone()),
cfg.version,
cfg.indexes,
sum,
)?;
// Create lazy adjusted vecs if compute_adjusted and up_to_1h is available
let indexes_to_adjusted_value_created = (compute_adjusted && cfg.up_to_1h_realized.is_some())
.then(|| {
EagerVec::forced_import(cfg.db, &cfg.name("adjusted_value_created"), cfg.version)
})
.transpose()?;
let height_to_adjusted_value_destroyed = compute_adjusted
.then(|| {
EagerVec::forced_import(cfg.db, &cfg.name("adjusted_value_destroyed"), cfg.version)
})
.transpose()?;
let up_to_1h = cfg.up_to_1h_realized.unwrap();
LazyVecsFrom2FromHeight::from_computed::<DollarsMinus>(
&cfg.name("adjusted_value_created"),
cfg.version,
height_to_value_created.boxed_clone(),
up_to_1h.height_to_value_created.boxed_clone(),
&indexes_to_value_created,
&up_to_1h.indexes_to_value_created,
)
});
let indexes_to_adjusted_value_destroyed =
(compute_adjusted && cfg.up_to_1h_realized.is_some()).then(|| {
let up_to_1h = cfg.up_to_1h_realized.unwrap();
LazyVecsFrom2FromHeight::from_computed::<DollarsMinus>(
&cfg.name("adjusted_value_destroyed"),
cfg.version,
height_to_value_destroyed.boxed_clone(),
up_to_1h.height_to_value_destroyed.boxed_clone(),
&indexes_to_value_destroyed,
&up_to_1h.indexes_to_value_destroyed,
)
});
// Create realized_price_extra first so we can reference its ratio for MVRV proxy
let indexes_to_realized_price_extra = ComputedRatioVecsFromDateIndex::forced_import(
@@ -317,62 +349,14 @@ impl RealizedMetrics {
.transpose()?,
// === Value Created/Destroyed ===
indexes_to_value_created: ComputedVecsFromHeight::forced_import(
cfg.db,
&cfg.name("value_created"),
Source::Vec(height_to_value_created.boxed_clone()),
cfg.version,
cfg.indexes,
sum,
)?,
indexes_to_value_destroyed: ComputedVecsFromHeight::forced_import(
cfg.db,
&cfg.name("value_destroyed"),
Source::Vec(height_to_value_destroyed.boxed_clone()),
cfg.version,
cfg.indexes,
sum,
)?,
height_to_value_created,
indexes_to_value_created,
height_to_value_destroyed,
indexes_to_value_destroyed,
// === Adjusted Value (optional) ===
indexes_to_adjusted_value_created: compute_adjusted
.then(|| {
ComputedVecsFromHeight::forced_import(
cfg.db,
&cfg.name("adjusted_value_created"),
Source::Vec(
height_to_adjusted_value_created
.as_ref()
.unwrap()
.boxed_clone(),
),
cfg.version,
cfg.indexes,
sum,
)
})
.transpose()?,
indexes_to_adjusted_value_destroyed: compute_adjusted
.then(|| {
ComputedVecsFromHeight::forced_import(
cfg.db,
&cfg.name("adjusted_value_destroyed"),
Source::Vec(
height_to_adjusted_value_destroyed
.as_ref()
.unwrap()
.boxed_clone(),
),
cfg.version,
cfg.indexes,
sum,
)
})
.transpose()?,
height_to_adjusted_value_created,
height_to_adjusted_value_destroyed,
// === Adjusted Value (lazy: cohort - up_to_1h) ===
indexes_to_adjusted_value_created,
indexes_to_adjusted_value_destroyed,
// === SOPR ===
dateindex_to_sopr: EagerVec::forced_import(
@@ -464,22 +448,12 @@ impl RealizedMetrics {
/// Get minimum length across height-indexed vectors written in block loop.
pub fn min_stateful_height_len(&self) -> usize {
let mut min = self
.height_to_realized_cap
self.height_to_realized_cap
.len()
.min(self.height_to_realized_profit.len())
.min(self.height_to_realized_loss.len())
.min(self.height_to_value_created.len())
.min(self.height_to_value_destroyed.len());
if let Some(v) = &self.height_to_adjusted_value_created {
min = min.min(v.len());
}
if let Some(v) = &self.height_to_adjusted_value_destroyed {
min = min.min(v.len());
}
min
.min(self.height_to_value_destroyed.len())
}
/// Push realized state values to height-indexed vectors.
@@ -495,13 +469,6 @@ impl RealizedMetrics {
self.height_to_value_destroyed
.truncate_push(height, state.value_destroyed)?;
if let Some(v) = self.height_to_adjusted_value_created.as_mut() {
v.truncate_push(height, state.adj_value_created)?;
}
if let Some(v) = self.height_to_adjusted_value_destroyed.as_mut() {
v.truncate_push(height, state.adj_value_destroyed)?;
}
Ok(())
}
@@ -512,31 +479,19 @@ impl RealizedMetrics {
self.height_to_realized_loss.write()?;
self.height_to_value_created.write()?;
self.height_to_value_destroyed.write()?;
if let Some(v) = self.height_to_adjusted_value_created.as_mut() {
v.write()?;
}
if let Some(v) = self.height_to_adjusted_value_destroyed.as_mut() {
v.write()?;
}
Ok(())
}
/// Returns a parallel iterator over all vecs for parallel writing.
pub fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
let mut vecs: Vec<&mut dyn AnyStoredVec> = vec![
&mut self.height_to_realized_cap,
[
&mut self.height_to_realized_cap as &mut dyn AnyStoredVec,
&mut self.height_to_realized_profit,
&mut self.height_to_realized_loss,
&mut self.height_to_value_created,
&mut self.height_to_value_destroyed,
];
if let Some(v) = self.height_to_adjusted_value_created.as_mut() {
vecs.push(v);
}
if let Some(v) = self.height_to_adjusted_value_destroyed.as_mut() {
vecs.push(v);
}
vecs.into_par_iter()
]
.into_par_iter()
}
/// Validate computed versions against base version.
@@ -593,37 +548,6 @@ impl RealizedMetrics {
exit,
)?;
if self.height_to_adjusted_value_created.is_some() {
self.height_to_adjusted_value_created
.um()
.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| {
v.height_to_adjusted_value_created
.as_ref()
.unwrap_or(&v.height_to_value_created)
})
.collect::<Vec<_>>(),
exit,
)?;
self.height_to_adjusted_value_destroyed
.um()
.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| {
v.height_to_adjusted_value_destroyed
.as_ref()
.unwrap_or(&v.height_to_value_destroyed)
})
.collect::<Vec<_>>(),
exit,
)?;
}
Ok(())
}
@@ -695,25 +619,6 @@ impl RealizedMetrics {
Some(&self.height_to_value_destroyed),
)?;
// Optional: adjusted value
if let Some(adjusted_value_created) = self.indexes_to_adjusted_value_created.as_mut() {
adjusted_value_created.compute_rest(
indexes,
starting_indexes,
exit,
self.height_to_adjusted_value_created.as_ref(),
)?;
}
if let Some(adjusted_value_destroyed) = self.indexes_to_adjusted_value_destroyed.as_mut() {
adjusted_value_destroyed.compute_rest(
indexes,
starting_indexes,
exit,
self.height_to_adjusted_value_destroyed.as_ref(),
)?;
}
Ok(())
}
@@ -784,16 +689,20 @@ impl RealizedMetrics {
exit,
)?;
// Optional: adjusted SOPR
// Optional: adjusted SOPR (lazy: cohort - up_to_1h)
if let (Some(adjusted_sopr), Some(adj_created), Some(adj_destroyed)) = (
self.dateindex_to_adjusted_sopr.as_mut(),
self.indexes_to_adjusted_value_created.as_ref(),
self.indexes_to_adjusted_value_destroyed.as_ref(),
self.indexes_to_adjusted_value_created
.as_ref()
.and_then(|v| v.dateindex.sum.as_ref()),
self.indexes_to_adjusted_value_destroyed
.as_ref()
.and_then(|v| v.dateindex.sum.as_ref()),
) {
adjusted_sopr.compute_divide(
starting_indexes.dateindex,
adj_created.dateindex.unwrap_sum(),
adj_destroyed.dateindex.unwrap_sum(),
adj_created.as_ref(),
adj_destroyed.as_ref(),
exit,
)?;

View File

@@ -1,13 +1,10 @@
use std::path::Path;
use brk_error::Result;
use brk_types::{Dollars, Height, LoadedAddressData, Sats, SupplyState};
use brk_types::{Age, Dollars, Height, LoadedAddressData, Sats, SupplyState};
use vecdb::unlikely;
use super::{
super::cost_basis::RealizedState,
base::CohortState,
};
use super::{super::cost_basis::RealizedState, base::CohortState};
#[derive(Clone)]
pub struct AddressCohortState {
@@ -43,16 +40,13 @@ impl AddressCohortState {
self.inner.reset_single_iteration_values();
}
#[allow(clippy::too_many_arguments)]
pub fn send(
&mut self,
addressdata: &mut LoadedAddressData,
value: Sats,
current_price: Option<Dollars>,
prev_price: Option<Dollars>,
blocks_old: usize,
days_old: f64,
older_than_hour: bool,
age: Age,
) -> Result<()> {
let compute_price = current_price.is_some();
@@ -76,9 +70,7 @@ impl AddressCohortState {
},
current_price,
prev_price,
blocks_old,
days_old,
older_than_hour,
age,
compute_price.then(|| (addressdata.realized_price(), &supply_state)),
prev_realized_price.map(|prev_price| (prev_price, &prev_supply_state)),
);

View File

@@ -1,7 +1,7 @@
use std::path::Path;
use brk_error::Result;
use brk_types::{Dollars, Height, Sats, SupplyState};
use brk_types::{Age, Dollars, Height, Sats, SupplyState};
use crate::internal::PERCENTILES_LEN;
@@ -246,17 +246,13 @@ impl CohortState {
supply: &SupplyState,
current_price: Option<Dollars>,
prev_price: Option<Dollars>,
blocks_old: usize,
days_old: f64,
older_than_hour: bool,
age: Age,
) {
self.send_(
supply,
current_price,
prev_price,
blocks_old,
days_old,
older_than_hour,
age,
None,
prev_price.map(|prev_price| (prev_price, supply)),
);
@@ -269,9 +265,7 @@ impl CohortState {
supply: &SupplyState,
current_price: Option<Dollars>,
prev_price: Option<Dollars>,
blocks_old: usize,
days_old: f64,
older_than_hour: bool,
age: Age,
price_to_amount_increment: Option<(Dollars, &SupplyState)>,
price_to_amount_decrement: Option<(Dollars, &SupplyState)>,
) {
@@ -283,14 +277,13 @@ impl CohortState {
if supply.value > Sats::ZERO {
self.sent += supply.value;
self.satblocks_destroyed += supply.value * blocks_old;
self.satdays_destroyed +=
Sats::from((u64::from(supply.value) as f64 * days_old).floor() as u64);
self.satblocks_destroyed += age.satblocks_destroyed(supply.value);
self.satdays_destroyed += age.satdays_destroyed(supply.value);
if let Some(realized) = self.realized.as_mut() {
let current_price = current_price.unwrap();
let prev_price = prev_price.unwrap();
realized.send(supply, current_price, prev_price, older_than_hour);
realized.send(supply, current_price, prev_price);
if let Some((price, supply)) = price_to_amount_increment
&& supply.value.is_not_zero()

View File

@@ -8,9 +8,7 @@ pub struct RealizedState {
pub profit: Dollars,
pub loss: Dollars,
pub value_created: Dollars,
pub adj_value_created: Dollars,
pub value_destroyed: Dollars,
pub adj_value_destroyed: Dollars,
}
impl RealizedState {
@@ -19,9 +17,7 @@ impl RealizedState {
profit: Dollars::NAN,
loss: Dollars::NAN,
value_created: Dollars::NAN,
adj_value_created: Dollars::NAN,
value_destroyed: Dollars::NAN,
adj_value_destroyed: Dollars::NAN,
};
pub fn reset_single_iteration_values(&mut self) {
@@ -29,9 +25,7 @@ impl RealizedState {
self.profit = Dollars::ZERO;
self.loss = Dollars::ZERO;
self.value_created = Dollars::ZERO;
self.adj_value_created = Dollars::ZERO;
self.value_destroyed = Dollars::ZERO;
self.adj_value_destroyed = Dollars::ZERO;
}
}
@@ -49,9 +43,7 @@ impl RealizedState {
self.profit = Dollars::ZERO;
self.loss = Dollars::ZERO;
self.value_created = Dollars::ZERO;
self.adj_value_created = Dollars::ZERO;
self.value_destroyed = Dollars::ZERO;
self.adj_value_destroyed = Dollars::ZERO;
}
self.cap += realized_cap;
@@ -74,7 +66,6 @@ impl RealizedState {
supply_state: &SupplyState,
current_price: Dollars,
prev_price: Dollars,
older_than_hour: bool,
) {
let current_value = current_price * supply_state.value;
let prev_value = prev_price * supply_state.value;
@@ -82,11 +73,6 @@ impl RealizedState {
self.value_created += current_value;
self.value_destroyed += prev_value;
if older_than_hour {
self.adj_value_created += current_value;
self.adj_value_destroyed += prev_value;
}
match current_price.cmp(&prev_price) {
Ordering::Greater => {
self.profit += current_value.checked_sub(prev_value).unwrap();

View File

@@ -0,0 +1,65 @@
use crate::{Sats, Term, Timestamp};
/// Represents the age of a UTXO or address balance.
/// Encapsulates all age-related calculations in one type-safe struct.
#[derive(Debug, Clone, Copy)]
pub struct Age {
/// Age in hours (primary internal unit for cohort boundaries)
hours: usize,
/// Age in blocks (for satblocks_destroyed calculation)
blocks: usize,
/// Age in days as float (for satdays_destroyed - established terminology)
days: f64,
}
impl Age {
/// Create from timestamps and block count
#[inline]
pub fn new(current_timestamp: Timestamp, prev_timestamp: Timestamp, blocks: usize) -> Self {
Self {
hours: current_timestamp.difference_in_hours_between(prev_timestamp),
blocks,
days: current_timestamp.difference_in_days_between_float(prev_timestamp),
}
}
/// Hours old (for cohort bucket lookup via HOURS_* boundaries)
#[inline]
pub fn hours(&self) -> usize {
self.hours
}
/// Blocks old (for satblocks_destroyed calculation)
#[inline]
pub fn blocks(&self) -> usize {
self.blocks
}
/// Days old as float (for satdays_destroyed - established terminology)
#[inline]
pub fn days(&self) -> f64 {
self.days
}
/// STH or LTH based on age (5 months = 3600 hours threshold)
#[inline]
pub fn term(&self) -> Term {
if self.hours >= Term::THRESHOLD_HOURS {
Term::Lth
} else {
Term::Sth
}
}
/// Calculate satblocks destroyed for given supply
#[inline]
pub fn satblocks_destroyed(&self, supply: Sats) -> Sats {
Sats::from(u64::from(supply) * self.blocks as u64)
}
/// Calculate satdays destroyed for given supply
#[inline]
pub fn satdays_destroyed(&self, supply: Sats) -> Sats {
Sats::from((u64::from(supply) as f64 * self.days).floor() as u64)
}
}

View File

@@ -4,6 +4,7 @@ pub use vecdb::{CheckedSub, Exit, PrintableIndex, Version};
mod address;
mod addressbytes;
mod age;
mod addresschainstats;
mod addresshash;
mod addressindexoutpoint;
@@ -131,6 +132,7 @@ mod stored_u8;
mod supply_state;
mod timeperiod;
mod timeperiodparam;
mod term;
mod timestamp;
mod timestampparam;
mod treenode;
@@ -163,6 +165,7 @@ mod yearindex;
pub use address::*;
pub use addressbytes::*;
pub use age::*;
pub use addresschainstats::*;
pub use addresshash::*;
pub use addressindexoutpoint::*;
@@ -288,6 +291,7 @@ pub use stored_u16::*;
pub use stored_u32::*;
pub use stored_u64::*;
pub use supply_state::*;
pub use term::*;
pub use timeperiod::*;
pub use timeperiodparam::*;
pub use timestamp::*;

View File

@@ -1,7 +1,5 @@
use crate::DAYS_5M;
/// Classification for short-term vs long-term holders.
/// The threshold is 150 days (approximately 5 months).
/// The threshold is 150 days (approximately 5 months) = 3600 hours.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Term {
/// Short-Term Holder: < 150 days
@@ -11,5 +9,6 @@ pub enum Term {
}
impl Term {
pub const THRESHOLD_DAYS: usize = DAYS_5M;
/// Threshold in hours (150 days * 24 hours = 3600 hours)
pub const THRESHOLD_HOURS: usize = 24 * 150; // 3600
}

View File

@@ -66,6 +66,11 @@ impl Timestamp {
(self.0 - older.0) as f64 / ONE_DAY_IN_SEC_F64
}
#[inline]
pub fn difference_in_hours_between(&self, older: Self) -> usize {
((self.0 - older.0) / ONE_HOUR_IN_SEC) as usize
}
#[inline]
pub fn is_more_than_hour(&self) -> bool {
self.0 >= ONE_HOUR_IN_SEC

File diff suppressed because it is too large Load Diff

View File

@@ -2094,8 +2094,8 @@ class RealizedPattern3:
self.adjusted_sopr: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'adjusted_sopr'))
self.adjusted_sopr_30d_ema: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'adjusted_sopr_30d_ema'))
self.adjusted_sopr_7d_ema: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'adjusted_sopr_7d_ema'))
self.adjusted_value_created: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'adjusted_value_created'))
self.adjusted_value_destroyed: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'adjusted_value_destroyed'))
self.adjusted_value_created: TotalRealizedPnlPattern[Dollars] = TotalRealizedPnlPattern(client, _m(acc, 'adjusted_value_created'))
self.adjusted_value_destroyed: TotalRealizedPnlPattern[Dollars] = TotalRealizedPnlPattern(client, _m(acc, 'adjusted_value_destroyed'))
self.mvrv: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'mvrv'))
self.neg_realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'neg_realized_loss'))
self.net_realized_pnl: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'net_realized_pnl'))
@@ -2132,8 +2132,8 @@ class RealizedPattern4:
self.adjusted_sopr: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'adjusted_sopr'))
self.adjusted_sopr_30d_ema: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'adjusted_sopr_30d_ema'))
self.adjusted_sopr_7d_ema: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'adjusted_sopr_7d_ema'))
self.adjusted_value_created: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'adjusted_value_created'))
self.adjusted_value_destroyed: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'adjusted_value_destroyed'))
self.adjusted_value_created: TotalRealizedPnlPattern[Dollars] = TotalRealizedPnlPattern(client, _m(acc, 'adjusted_value_created'))
self.adjusted_value_destroyed: TotalRealizedPnlPattern[Dollars] = TotalRealizedPnlPattern(client, _m(acc, 'adjusted_value_destroyed'))
self.mvrv: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'mvrv'))
self.neg_realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'neg_realized_loss'))
self.net_realized_pnl: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'net_realized_pnl'))
@@ -2284,31 +2284,6 @@ class Price111dSmaPattern:
self.ratio_pct99_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'ratio_pct99_usd'))
self.ratio_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, _m(acc, 'ratio'))
class PercentilesPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.cost_basis_pct05: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct05'))
self.cost_basis_pct10: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct10'))
self.cost_basis_pct15: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct15'))
self.cost_basis_pct20: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct20'))
self.cost_basis_pct25: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct25'))
self.cost_basis_pct30: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct30'))
self.cost_basis_pct35: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct35'))
self.cost_basis_pct40: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct40'))
self.cost_basis_pct45: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct45'))
self.cost_basis_pct50: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct50'))
self.cost_basis_pct55: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct55'))
self.cost_basis_pct60: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct60'))
self.cost_basis_pct65: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct65'))
self.cost_basis_pct70: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct70'))
self.cost_basis_pct75: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct75'))
self.cost_basis_pct80: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct80'))
self.cost_basis_pct85: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct85'))
self.cost_basis_pct90: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct90'))
self.cost_basis_pct95: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct95'))
class ActivePriceRatioPattern:
"""Pattern struct for repeated tree structure."""
@@ -2334,7 +2309,32 @@ class ActivePriceRatioPattern:
self.ratio_pct99_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct99_usd'))
self.ratio_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, acc)
class RelativePattern2:
class PercentilesPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.cost_basis_pct05: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct05'))
self.cost_basis_pct10: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct10'))
self.cost_basis_pct15: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct15'))
self.cost_basis_pct20: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct20'))
self.cost_basis_pct25: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct25'))
self.cost_basis_pct30: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct30'))
self.cost_basis_pct35: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct35'))
self.cost_basis_pct40: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct40'))
self.cost_basis_pct45: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct45'))
self.cost_basis_pct50: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct50'))
self.cost_basis_pct55: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct55'))
self.cost_basis_pct60: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct60'))
self.cost_basis_pct65: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct65'))
self.cost_basis_pct70: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct70'))
self.cost_basis_pct75: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct75'))
self.cost_basis_pct80: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct80'))
self.cost_basis_pct85: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct85'))
self.cost_basis_pct90: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct90'))
self.cost_basis_pct95: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct95'))
class RelativePattern5:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
@@ -2432,6 +2432,23 @@ class PeriodAvgPricePattern(Generic[T]):
self._6y: MetricPattern4[T] = MetricPattern4(client, (f'6y_{{acc}}' if acc else '6y'))
self._8y: MetricPattern4[T] = MetricPattern4(client, (f'8y_{{acc}}' if acc else '8y'))
class BitcoinPattern(Generic[T]):
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.average: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'avg'))
self.base: MetricPattern25[T] = MetricPattern25(client, acc)
self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative'))
self.max: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'max'))
self.median: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'median'))
self.min: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'min'))
self.pct10: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct10'))
self.pct25: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct25'))
self.pct75: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct75'))
self.pct90: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct90'))
self.sum: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'sum'))
class ClassAvgPricePattern(Generic[T]):
"""Pattern struct for repeated tree structure."""
@@ -2449,22 +2466,21 @@ class ClassAvgPricePattern(Generic[T]):
self._2024: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2024_avg_price'))
self._2025: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2025_avg_price'))
class BitcoinPattern(Generic[T]):
class RelativePattern2:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.average: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'avg'))
self.base: MetricPattern25[T] = MetricPattern25(client, acc)
self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative'))
self.max: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'max'))
self.median: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'median'))
self.min: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'min'))
self.pct10: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct10'))
self.pct25: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct25'))
self.pct75: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct75'))
self.pct90: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct90'))
self.sum: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'sum'))
self.neg_unrealized_loss_rel_to_own_market_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'neg_unrealized_loss_rel_to_own_market_cap'))
self.neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'neg_unrealized_loss_rel_to_own_total_unrealized_pnl'))
self.net_unrealized_pnl_rel_to_own_market_cap: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'net_unrealized_pnl_rel_to_own_market_cap'))
self.net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'net_unrealized_pnl_rel_to_own_total_unrealized_pnl'))
self.supply_in_loss_rel_to_own_supply: MetricPattern5[StoredF64] = MetricPattern5(client, _m(acc, 'supply_in_loss_rel_to_own_supply'))
self.supply_in_profit_rel_to_own_supply: MetricPattern5[StoredF64] = MetricPattern5(client, _m(acc, 'supply_in_profit_rel_to_own_supply'))
self.unrealized_loss_rel_to_own_market_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'unrealized_loss_rel_to_own_market_cap'))
self.unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'unrealized_loss_rel_to_own_total_unrealized_pnl'))
self.unrealized_profit_rel_to_own_market_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'unrealized_profit_rel_to_own_market_cap'))
self.unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'unrealized_profit_rel_to_own_total_unrealized_pnl'))
class RelativePattern:
"""Pattern struct for repeated tree structure."""
@@ -2487,16 +2503,16 @@ class BlockSizePattern(Generic[T]):
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.average: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'avg'))
self.average: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'avg'))
self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative'))
self.max: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'max'))
self.median: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'median'))
self.min: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'min'))
self.pct10: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'pct10'))
self.pct25: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'pct25'))
self.pct75: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'pct75'))
self.pct90: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'pct90'))
self.sum: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'sum'))
self.max: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'max'))
self.median: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'median'))
self.min: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'min'))
self.pct10: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct10'))
self.pct25: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct25'))
self.pct75: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct75'))
self.pct90: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct90'))
self.sum: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'sum'))
class UnrealizedPattern:
"""Pattern struct for repeated tree structure."""
@@ -2518,41 +2534,28 @@ class AddresstypeToHeightToAddrCountPattern(Generic[T]):
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.p2a: MetricPattern25[T] = MetricPattern25(client, (f'p2a_{{acc}}' if acc else 'p2a'))
self.p2pk33: MetricPattern25[T] = MetricPattern25(client, (f'p2pk33_{{acc}}' if acc else 'p2pk33'))
self.p2pk65: MetricPattern25[T] = MetricPattern25(client, (f'p2pk65_{{acc}}' if acc else 'p2pk65'))
self.p2pkh: MetricPattern25[T] = MetricPattern25(client, (f'p2pkh_{{acc}}' if acc else 'p2pkh'))
self.p2sh: MetricPattern25[T] = MetricPattern25(client, (f'p2sh_{{acc}}' if acc else 'p2sh'))
self.p2tr: MetricPattern25[T] = MetricPattern25(client, (f'p2tr_{{acc}}' if acc else 'p2tr'))
self.p2wpkh: MetricPattern25[T] = MetricPattern25(client, (f'p2wpkh_{{acc}}' if acc else 'p2wpkh'))
self.p2wsh: MetricPattern25[T] = MetricPattern25(client, (f'p2wsh_{{acc}}' if acc else 'p2wsh'))
self.p2a: MetricPattern29[T] = MetricPattern29(client, (f'p2a_{{acc}}' if acc else 'p2a'))
self.p2pk33: MetricPattern31[T] = MetricPattern31(client, (f'p2pk33_{{acc}}' if acc else 'p2pk33'))
self.p2pk65: MetricPattern32[T] = MetricPattern32(client, (f'p2pk65_{{acc}}' if acc else 'p2pk65'))
self.p2pkh: MetricPattern33[T] = MetricPattern33(client, (f'p2pkh_{{acc}}' if acc else 'p2pkh'))
self.p2sh: MetricPattern34[T] = MetricPattern34(client, (f'p2sh_{{acc}}' if acc else 'p2sh'))
self.p2tr: MetricPattern35[T] = MetricPattern35(client, (f'p2tr_{{acc}}' if acc else 'p2tr'))
self.p2wpkh: MetricPattern36[T] = MetricPattern36(client, (f'p2wpkh_{{acc}}' if acc else 'p2wpkh'))
self.p2wsh: MetricPattern37[T] = MetricPattern37(client, (f'p2wsh_{{acc}}' if acc else 'p2wsh'))
class BlockIntervalPattern(Generic[T]):
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.average: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'avg'))
self.max: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'max'))
self.median: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'median'))
self.min: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'min'))
self.pct10: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct10'))
self.pct25: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct25'))
self.pct75: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct75'))
self.pct90: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct90'))
class PeriodCagrPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self._10y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'10y_{{acc}}' if acc else '10y'))
self._2y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'2y_{{acc}}' if acc else '2y'))
self._3y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'3y_{{acc}}' if acc else '3y'))
self._4y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'4y_{{acc}}' if acc else '4y'))
self._5y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'5y_{{acc}}' if acc else '5y'))
self._6y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'6y_{{acc}}' if acc else '6y'))
self._8y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'8y_{{acc}}' if acc else '8y'))
self.average: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'avg'))
self.max: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'max'))
self.median: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'median'))
self.min: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'min'))
self.pct10: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'pct10'))
self.pct25: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'pct25'))
self.pct75: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'pct75'))
self.pct90: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'pct90'))
class _0satsPattern:
"""Pattern struct for repeated tree structure."""
@@ -2567,15 +2570,28 @@ class _0satsPattern:
self.supply: SupplyPattern3 = SupplyPattern3(client, acc)
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc)
class UpTo1dPattern:
class PeriodCagrPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self._10y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'10y_{{acc}}' if acc else '10y'))
self._2y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'2y_{{acc}}' if acc else '2y'))
self._3y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'3y_{{acc}}' if acc else '3y'))
self._4y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'4y_{{acc}}' if acc else '4y'))
self._5y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'5y_{{acc}}' if acc else '5y'))
self._6y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'6y_{{acc}}' if acc else '6y'))
self._8y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'8y_{{acc}}' if acc else '8y'))
class _0satsPattern2:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.activity: ActivityPattern2 = ActivityPattern2(client, acc)
self.cost_basis: CostBasisPattern2 = CostBasisPattern2(client, acc)
self.realized: RealizedPattern3 = RealizedPattern3(client, acc)
self.relative: RelativePattern2 = RelativePattern2(client, acc)
self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc)
self.realized: RealizedPattern = RealizedPattern(client, acc)
self.relative: RelativePattern4 = RelativePattern4(client, _m(acc, 'supply_in'))
self.supply: SupplyPattern3 = SupplyPattern3(client, acc)
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc)
@@ -2591,6 +2607,18 @@ class _10yPattern:
self.supply: SupplyPattern3 = SupplyPattern3(client, acc)
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc)
class _100btcPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.activity: ActivityPattern2 = ActivityPattern2(client, acc)
self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc)
self.realized: RealizedPattern = RealizedPattern(client, acc)
self.relative: RelativePattern = RelativePattern(client, acc)
self.supply: SupplyPattern3 = SupplyPattern3(client, acc)
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc)
class _10yTo12yPattern:
"""Pattern struct for repeated tree structure."""
@@ -2603,18 +2631,6 @@ class _10yTo12yPattern:
self.supply: SupplyPattern3 = SupplyPattern3(client, acc)
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc)
class _0satsPattern2:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.activity: ActivityPattern2 = ActivityPattern2(client, acc)
self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc)
self.realized: RealizedPattern = RealizedPattern(client, acc)
self.relative: RelativePattern = RelativePattern(client, acc)
self.supply: SupplyPattern3 = SupplyPattern3(client, acc)
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc)
class SegwitAdoptionPattern(Generic[T]):
"""Pattern struct for repeated tree structure."""
@@ -2627,17 +2643,6 @@ class SegwitAdoptionPattern(Generic[T]):
self.min: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'min'))
self.sum: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'sum'))
class SupplyPattern3:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply'))
self.supply_half: ActiveSupplyPattern = ActiveSupplyPattern(client, _m(acc, 'supply_half'))
self.supply_half_value: ActiveSupplyPattern = ActiveSupplyPattern(client, _m(acc, 'supply_half'))
self.supply_value: SupplyValuePattern = SupplyValuePattern(client, _m(acc, 'supply'))
self.utxo_count: MetricPattern1[StoredU64] = MetricPattern1(client, _m(acc, 'utxo_count'))
class ActivityPattern2:
"""Pattern struct for repeated tree structure."""
@@ -2649,6 +2654,17 @@ class ActivityPattern2:
self.satdays_destroyed: MetricPattern25[Sats] = MetricPattern25(client, _m(acc, 'satdays_destroyed'))
self.sent: SentPattern = SentPattern(client, _m(acc, 'sent'))
class SupplyPattern3:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply'))
self.supply_half: ActiveSupplyPattern = ActiveSupplyPattern(client, _m(acc, 'supply_half'))
self.supply_half_value: ActiveSupplyPattern = ActiveSupplyPattern(client, _m(acc, 'supply_half'))
self.supply_value: SupplyValuePattern = SupplyValuePattern(client, _m(acc, 'supply'))
self.utxo_count: MetricPattern1[StoredU64] = MetricPattern1(client, _m(acc, 'utxo_count'))
class SupplyPattern2:
"""Pattern struct for repeated tree structure."""
@@ -2659,16 +2675,6 @@ class SupplyPattern2:
self.dollars: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'usd'))
self.sats: MetricPattern4[Sats] = MetricPattern4(client, acc)
class SentPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.base: MetricPattern25[Sats] = MetricPattern25(client, acc)
self.bitcoin: BlockCountPattern[Bitcoin] = BlockCountPattern(client, _m(acc, 'btc'))
self.dollars: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'usd'))
self.sats: SatsPattern = SatsPattern(client, acc)
class OpreturnPattern:
"""Pattern struct for repeated tree structure."""
@@ -2679,14 +2685,15 @@ class OpreturnPattern:
self.dollars: BitcoinPattern2[Dollars] = BitcoinPattern2(client, _m(acc, 'usd'))
self.sats: SatsPattern4 = SatsPattern4(client, acc)
class CostBasisPattern2:
class SentPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.max_cost_basis: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'max_cost_basis'))
self.min_cost_basis: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'min_cost_basis'))
self.percentiles: PercentilesPattern = PercentilesPattern(client, _m(acc, 'cost_basis'))
self.base: MetricPattern25[Sats] = MetricPattern25(client, acc)
self.bitcoin: BlockCountPattern[Bitcoin] = BlockCountPattern(client, _m(acc, 'btc'))
self.dollars: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'usd'))
self.sats: SatsPattern = SatsPattern(client, acc)
class CoinbasePattern:
"""Pattern struct for repeated tree structure."""
@@ -2715,6 +2722,15 @@ class ActiveSupplyPattern:
self.dollars: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'usd'))
self.sats: MetricPattern1[Sats] = MetricPattern1(client, acc)
class CostBasisPattern2:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.max_cost_basis: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'max_cost_basis'))
self.min_cost_basis: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'min_cost_basis'))
self.percentiles: PercentilesPattern = PercentilesPattern(client, _m(acc, 'cost_basis'))
class BlockCountPattern(Generic[T]):
"""Pattern struct for repeated tree structure."""
@@ -2733,14 +2749,6 @@ class BitcoinPattern2(Generic[T]):
self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative'))
self.last: MetricPattern2[T] = MetricPattern2(client, acc)
class CostBasisPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.max_cost_basis: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'max_cost_basis'))
self.min_cost_basis: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'min_cost_basis'))
class SatsPattern4:
"""Pattern struct for repeated tree structure."""
@@ -2749,13 +2757,13 @@ class SatsPattern4:
self.cumulative: MetricPattern1[Sats] = MetricPattern1(client, _m(acc, 'cumulative'))
self.last: MetricPattern2[Sats] = MetricPattern2(client, acc)
class SupplyValuePattern:
class RelativePattern4:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.bitcoin: MetricPattern25[Bitcoin] = MetricPattern25(client, _m(acc, 'btc'))
self.dollars: MetricPattern25[Dollars] = MetricPattern25(client, _m(acc, 'usd'))
self.supply_in_loss_rel_to_own_supply: MetricPattern5[StoredF64] = MetricPattern5(client, _m(acc, 'loss_rel_to_own_supply'))
self.supply_in_profit_rel_to_own_supply: MetricPattern5[StoredF64] = MetricPattern5(client, _m(acc, 'profit_rel_to_own_supply'))
class SatsPattern:
"""Pattern struct for repeated tree structure."""
@@ -2765,6 +2773,22 @@ class SatsPattern:
self.cumulative: MetricPattern1[Sats] = MetricPattern1(client, _m(acc, 'cumulative'))
self.sum: MetricPattern2[Sats] = MetricPattern2(client, acc)
class CostBasisPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.max_cost_basis: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'max_cost_basis'))
self.min_cost_basis: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'min_cost_basis'))
class SupplyValuePattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.bitcoin: MetricPattern25[Bitcoin] = MetricPattern25(client, _m(acc, 'btc'))
self.dollars: MetricPattern25[Dollars] = MetricPattern25(client, _m(acc, 'usd'))
class _1dReturns1mSdPattern:
"""Pattern struct for repeated tree structure."""
@@ -3126,6 +3150,7 @@ class CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange:
self._10y_to_12y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_10y_up_to_12y_old')
self._12y_to_15y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_12y_up_to_15y_old')
self._1d_to_1w: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_1d_up_to_1w_old')
self._1h_to_1d: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_1h_up_to_1d_old')
self._1m_to_2m: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_1m_up_to_2m_old')
self._1w_to_1m: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_1w_up_to_1m_old')
self._1y_to_2y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_1y_up_to_2y_old')
@@ -3142,7 +3167,7 @@ class CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange:
self._7y_to_8y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_7y_up_to_8y_old')
self._8y_to_10y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_8y_up_to_10y_old')
self.from_15y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_15y_old')
self.up_to_1d: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_1d_old')
self.up_to_1h: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_up_to_1h_old')
class CatalogTree_Computed_Distribution_UtxoCohorts_All:
"""Catalog tree node."""
@@ -3200,37 +3225,37 @@ class CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount:
"""Catalog tree node."""
def __init__(self, client: BrkClientBase, base_path: str = ''):
self._100btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_100btc')
self._100k_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_100k_sats')
self._100sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_100sats')
self._10btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_10btc')
self._10k_btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_10k_btc')
self._10k_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_10k_sats')
self._10m_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_10m_sats')
self._10sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_10sats')
self._1btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_1btc')
self._1k_btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_1k_btc')
self._1k_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_1k_sats')
self._1m_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_1m_sats')
self._1sat: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_1sat')
self._100btc: _100btcPattern = _100btcPattern(client, 'utxos_above_100btc')
self._100k_sats: _100btcPattern = _100btcPattern(client, 'utxos_above_100k_sats')
self._100sats: _100btcPattern = _100btcPattern(client, 'utxos_above_100sats')
self._10btc: _100btcPattern = _100btcPattern(client, 'utxos_above_10btc')
self._10k_btc: _100btcPattern = _100btcPattern(client, 'utxos_above_10k_btc')
self._10k_sats: _100btcPattern = _100btcPattern(client, 'utxos_above_10k_sats')
self._10m_sats: _100btcPattern = _100btcPattern(client, 'utxos_above_10m_sats')
self._10sats: _100btcPattern = _100btcPattern(client, 'utxos_above_10sats')
self._1btc: _100btcPattern = _100btcPattern(client, 'utxos_above_1btc')
self._1k_btc: _100btcPattern = _100btcPattern(client, 'utxos_above_1k_btc')
self._1k_sats: _100btcPattern = _100btcPattern(client, 'utxos_above_1k_sats')
self._1m_sats: _100btcPattern = _100btcPattern(client, 'utxos_above_1m_sats')
self._1sat: _100btcPattern = _100btcPattern(client, 'utxos_above_1sat')
class CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount:
"""Catalog tree node."""
def __init__(self, client: BrkClientBase, base_path: str = ''):
self._100btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_100btc')
self._100k_btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_100k_btc')
self._100k_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_100k_sats')
self._100sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_100sats')
self._10btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_10btc')
self._10k_btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_10k_btc')
self._10k_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_10k_sats')
self._10m_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_10m_sats')
self._10sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_10sats')
self._1btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_1btc')
self._1k_btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_1k_btc')
self._1k_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_1k_sats')
self._1m_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_1m_sats')
self._100btc: _100btcPattern = _100btcPattern(client, 'utxos_under_100btc')
self._100k_btc: _100btcPattern = _100btcPattern(client, 'utxos_under_100k_btc')
self._100k_sats: _100btcPattern = _100btcPattern(client, 'utxos_under_100k_sats')
self._100sats: _100btcPattern = _100btcPattern(client, 'utxos_under_100sats')
self._10btc: _100btcPattern = _100btcPattern(client, 'utxos_under_10btc')
self._10k_btc: _100btcPattern = _100btcPattern(client, 'utxos_under_10k_btc')
self._10k_sats: _100btcPattern = _100btcPattern(client, 'utxos_under_10k_sats')
self._10m_sats: _100btcPattern = _100btcPattern(client, 'utxos_under_10m_sats')
self._10sats: _100btcPattern = _100btcPattern(client, 'utxos_under_10sats')
self._1btc: _100btcPattern = _100btcPattern(client, 'utxos_under_1btc')
self._1k_btc: _100btcPattern = _100btcPattern(client, 'utxos_under_1k_btc')
self._1k_sats: _100btcPattern = _100btcPattern(client, 'utxos_under_1k_sats')
self._1m_sats: _100btcPattern = _100btcPattern(client, 'utxos_under_1m_sats')
class CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge:
"""Catalog tree node."""
@@ -3259,31 +3284,53 @@ class CatalogTree_Computed_Distribution_UtxoCohorts_MinAge:
"""Catalog tree node."""
def __init__(self, client: BrkClientBase, base_path: str = ''):
self._10y: _0satsPattern2 = _0satsPattern2(client, 'utxos_at_least_10y_old')
self._12y: _0satsPattern2 = _0satsPattern2(client, 'utxos_at_least_12y_old')
self._1d: _0satsPattern2 = _0satsPattern2(client, 'utxos_at_least_1d_old')
self._1m: _0satsPattern2 = _0satsPattern2(client, 'utxos_at_least_1m_old')
self._1w: _0satsPattern2 = _0satsPattern2(client, 'utxos_at_least_1w_old')
self._1y: _0satsPattern2 = _0satsPattern2(client, 'utxos_at_least_1y_old')
self._2m: _0satsPattern2 = _0satsPattern2(client, 'utxos_at_least_2m_old')
self._2y: _0satsPattern2 = _0satsPattern2(client, 'utxos_at_least_2y_old')
self._3m: _0satsPattern2 = _0satsPattern2(client, 'utxos_at_least_3m_old')
self._3y: _0satsPattern2 = _0satsPattern2(client, 'utxos_at_least_3y_old')
self._4m: _0satsPattern2 = _0satsPattern2(client, 'utxos_at_least_4m_old')
self._4y: _0satsPattern2 = _0satsPattern2(client, 'utxos_at_least_4y_old')
self._5m: _0satsPattern2 = _0satsPattern2(client, 'utxos_at_least_5m_old')
self._5y: _0satsPattern2 = _0satsPattern2(client, 'utxos_at_least_5y_old')
self._6m: _0satsPattern2 = _0satsPattern2(client, 'utxos_at_least_6m_old')
self._6y: _0satsPattern2 = _0satsPattern2(client, 'utxos_at_least_6y_old')
self._7y: _0satsPattern2 = _0satsPattern2(client, 'utxos_at_least_7y_old')
self._8y: _0satsPattern2 = _0satsPattern2(client, 'utxos_at_least_8y_old')
self._10y: _100btcPattern = _100btcPattern(client, 'utxos_at_least_10y_old')
self._12y: _100btcPattern = _100btcPattern(client, 'utxos_at_least_12y_old')
self._1d: _100btcPattern = _100btcPattern(client, 'utxos_at_least_1d_old')
self._1m: _100btcPattern = _100btcPattern(client, 'utxos_at_least_1m_old')
self._1w: _100btcPattern = _100btcPattern(client, 'utxos_at_least_1w_old')
self._1y: _100btcPattern = _100btcPattern(client, 'utxos_at_least_1y_old')
self._2m: _100btcPattern = _100btcPattern(client, 'utxos_at_least_2m_old')
self._2y: _100btcPattern = _100btcPattern(client, 'utxos_at_least_2y_old')
self._3m: _100btcPattern = _100btcPattern(client, 'utxos_at_least_3m_old')
self._3y: _100btcPattern = _100btcPattern(client, 'utxos_at_least_3y_old')
self._4m: _100btcPattern = _100btcPattern(client, 'utxos_at_least_4m_old')
self._4y: _100btcPattern = _100btcPattern(client, 'utxos_at_least_4y_old')
self._5m: _100btcPattern = _100btcPattern(client, 'utxos_at_least_5m_old')
self._5y: _100btcPattern = _100btcPattern(client, 'utxos_at_least_5y_old')
self._6m: _100btcPattern = _100btcPattern(client, 'utxos_at_least_6m_old')
self._6y: _100btcPattern = _100btcPattern(client, 'utxos_at_least_6y_old')
self._7y: _100btcPattern = _100btcPattern(client, 'utxos_at_least_7y_old')
self._8y: _100btcPattern = _100btcPattern(client, 'utxos_at_least_8y_old')
class CatalogTree_Computed_Distribution_UtxoCohorts_Term:
"""Catalog tree node."""
def __init__(self, client: BrkClientBase, base_path: str = ''):
self.long: UpTo1dPattern = UpTo1dPattern(client, 'lth')
self.short: UpTo1dPattern = UpTo1dPattern(client, 'sth')
self.long: CatalogTree_Computed_Distribution_UtxoCohorts_Term_Long = CatalogTree_Computed_Distribution_UtxoCohorts_Term_Long(client, f'{base_path}_long')
self.short: CatalogTree_Computed_Distribution_UtxoCohorts_Term_Short = CatalogTree_Computed_Distribution_UtxoCohorts_Term_Short(client, f'{base_path}_short')
class CatalogTree_Computed_Distribution_UtxoCohorts_Term_Long:
"""Catalog tree node."""
def __init__(self, client: BrkClientBase, base_path: str = ''):
self.activity: ActivityPattern2 = ActivityPattern2(client, 'lth')
self.cost_basis: CostBasisPattern2 = CostBasisPattern2(client, 'lth')
self.realized: RealizedPattern2 = RealizedPattern2(client, 'lth')
self.relative: RelativePattern5 = RelativePattern5(client, 'lth')
self.supply: SupplyPattern3 = SupplyPattern3(client, 'lth')
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, 'lth')
class CatalogTree_Computed_Distribution_UtxoCohorts_Term_Short:
"""Catalog tree node."""
def __init__(self, client: BrkClientBase, base_path: str = ''):
self.activity: ActivityPattern2 = ActivityPattern2(client, 'sth')
self.cost_basis: CostBasisPattern2 = CostBasisPattern2(client, 'sth')
self.realized: RealizedPattern3 = RealizedPattern3(client, 'sth')
self.relative: RelativePattern5 = RelativePattern5(client, 'sth')
self.supply: SupplyPattern3 = SupplyPattern3(client, 'sth')
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, 'sth')
class CatalogTree_Computed_Distribution_UtxoCohorts_Type:
"""Catalog tree node."""
@@ -4475,10 +4522,15 @@ class BrkClient(BrkClientBase):
}
AGE_RANGE_NAMES = {
"up_to_1d": {
"id": "up_to_1d_old",
"short": "<1d",
"long": "Up to 1 Day Old"
"up_to_1h": {
"id": "up_to_1h_old",
"short": "<1h",
"long": "Up to 1 Hour Old"
},
"_1h_to_1d": {
"id": "at_least_1h_up_to_1d_old",
"short": "1h-1d",
"long": "1 Hour to 1 Day Old"
},
"_1d_to_1w": {
"id": "at_least_1d_up_to_1w_old",