global: snapshot

This commit is contained in:
nym21
2026-01-20 23:05:21 +01:00
parent 9613fce919
commit 2edd9ed2d7
33 changed files with 1020 additions and 1108 deletions

View File

@@ -13,3 +13,6 @@ rustflags = ["-C", "target-cpu=native", "-C", "target-feature=+bmi1,+bmi2,+avx2"
[target.x86_64-pc-windows-gnu]
rustflags = ["-C", "target-cpu=native", "-C", "target-feature=+bmi1,+bmi2,+avx2"]
[alias]
dev = "run -p brk_cli --features brk_server/bindgen"

125
Cargo.lock generated
View File

@@ -54,7 +54,7 @@ dependencies = [
"serde",
"serde_json",
"serde_qs",
"thiserror 2.0.18",
"thiserror",
"tower-layer",
"tower-service",
"tracing",
@@ -463,7 +463,6 @@ dependencies = [
"color-eyre",
"derive_more",
"pco",
"plotters",
"rayon",
"rustc-hash",
"schemars",
@@ -483,7 +482,7 @@ dependencies = [
"jiff",
"minreq",
"serde_json",
"thiserror 2.0.18",
"thiserror",
"tokio",
"vecdb",
]
@@ -540,10 +539,8 @@ name = "brk_logger"
version = "0.1.0-alpha.6"
dependencies = [
"jiff",
"logroller",
"owo-colors",
"tracing",
"tracing-appender",
"tracing-log",
"tracing-subscriber",
]
@@ -1071,15 +1068,6 @@ dependencies = [
"parking_lot_core",
]
[[package]]
name = "deranged"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587"
dependencies = [
"powerfmt",
]
[[package]]
name = "derive_more"
version = "2.1.1"
@@ -1981,18 +1969,6 @@ version = "0.4.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
[[package]]
name = "logroller"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83db12bbf439ebe64c0b0e4402f435b6f866db498fc1ae17e1b5d1a01625e2be"
dependencies = [
"chrono",
"flate2",
"regex",
"thiserror 1.0.69",
]
[[package]]
name = "lsm-tree"
version = "3.0.1"
@@ -2137,12 +2113,6 @@ dependencies = [
"minimal-lexical",
]
[[package]]
name = "num-conv"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
[[package]]
name = "num-traits"
version = "0.2.19"
@@ -2372,12 +2342,6 @@ dependencies = [
"zerovec",
]
[[package]]
name = "powerfmt"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
[[package]]
name = "ppv-lite86"
version = "0.2.21"
@@ -2482,7 +2446,9 @@ dependencies = [
[[package]]
name = "rawdb"
version = "0.5.11"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fdd8290a282cf2ea860ee2e787b3229731db7dac73a16c9240c545e20e91b302"
dependencies = [
"libc",
"log",
@@ -2490,7 +2456,7 @@ dependencies = [
"parking_lot",
"rayon",
"smallvec",
"thiserror 2.0.18",
"thiserror",
]
[[package]]
@@ -2530,7 +2496,7 @@ checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac"
dependencies = [
"getrandom 0.2.17",
"libredox",
"thiserror 2.0.18",
"thiserror",
]
[[package]]
@@ -2843,7 +2809,7 @@ dependencies = [
"futures",
"percent-encoding",
"serde",
"thiserror 2.0.18",
"thiserror",
]
[[package]]
@@ -2998,33 +2964,13 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "thiserror"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
dependencies = [
"thiserror-impl 1.0.69",
]
[[package]]
name = "thiserror"
version = "2.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4"
dependencies = [
"thiserror-impl 2.0.18",
]
[[package]]
name = "thiserror-impl"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
dependencies = [
"proc-macro2",
"quote",
"syn",
"thiserror-impl",
]
[[package]]
@@ -3047,37 +2993,6 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "time"
version = "0.3.45"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9e442fc33d7fdb45aa9bfeb312c095964abdf596f7567261062b2a7107aaabd"
dependencies = [
"deranged",
"itoa",
"num-conv",
"powerfmt",
"serde_core",
"time-core",
"time-macros",
]
[[package]]
name = "time-core"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b36ee98fd31ec7426d599183e8fe26932a8dc1fb76ddb6214d05493377d34ca"
[[package]]
name = "time-macros"
version = "0.2.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71e552d1249bf61ac2a52db88179fd0673def1e1ad8243a00d9ec9ed71fee3dd"
dependencies = [
"num-conv",
"time-core",
]
[[package]]
name = "tinystr"
version = "0.8.2"
@@ -3225,18 +3140,6 @@ dependencies = [
"tracing-core",
]
[[package]]
name = "tracing-appender"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "786d480bce6247ab75f005b14ae1624ad978d3029d9113f0a22fa1ac773faeaf"
dependencies = [
"crossbeam-channel",
"thiserror 2.0.18",
"time",
"tracing-subscriber",
]
[[package]]
name = "tracing-attributes"
version = "0.1.31"
@@ -3363,7 +3266,9 @@ checksum = "8f54a172d0620933a27a4360d3db3e2ae0dd6cceae9730751a036bbf182c4b23"
[[package]]
name = "vecdb"
version = "0.5.11"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81910b96a48ea197d1871259164b957c05f3e94d94cd107c4b87cf24e7f2968f"
dependencies = [
"ctrlc",
"log",
@@ -3374,7 +3279,7 @@ dependencies = [
"schemars",
"serde",
"serde_json",
"thiserror 2.0.18",
"thiserror",
"vecdb_derive",
"zerocopy",
"zstd",
@@ -3382,7 +3287,9 @@ dependencies = [
[[package]]
name = "vecdb_derive"
version = "0.5.11"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ab7250822f3caf8795728690804d39ab5c72c51f5558b90788a79bc99776d55"
dependencies = [
"quote",
"syn",

View File

@@ -81,8 +81,8 @@ tokio = { version = "1.49.0", features = ["rt-multi-thread"] }
tracing = { version = "0.1", default-features = false, features = ["std"] }
tower-http = { version = "0.6.8", features = ["catch-panic", "compression-br", "compression-gzip", "compression-zstd", "cors", "normalize-path", "timeout", "trace"] }
tower-layer = "0.3"
# vecdb = { version = "0.5.11", features = ["derive", "serde_json", "pco", "schemars"] }
vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco", "schemars"] }
vecdb = { version = "0.6.0", features = ["derive", "serde_json", "pco", "schemars"] }
# vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco", "schemars"] }
[workspace.metadata.release]
shared-version = true

View File

@@ -51,9 +51,11 @@ class BrkError extends Error {{
/**
* @template T
* @typedef {{Object}} MetricData
* @property {{number}} version - Version of the metric data
* @property {{number}} total - Total number of data points
* @property {{number}} start - Start index (inclusive)
* @property {{number}} end - End index (exclusive)
* @property {{string}} stamp - ISO 8601 timestamp of when the response was generated
* @property {{T[]}} data - The metric data
*/
/** @typedef {{MetricData<any>}} AnyMetricData */

View File

@@ -133,9 +133,11 @@ pub fn generate_endpoint_class(output: &mut String) {
output,
r#"class MetricData(TypedDict, Generic[T]):
"""Metric data with range information."""
version: int
total: int
start: int
end: int
stamp: str
data: List[T]

View File

@@ -51,6 +51,22 @@ pub fn run() -> anyhow::Result<()> {
let mut indexer = Indexer::forced_import(&config.brkdir())?;
#[cfg(not(debug_assertions))]
{
// Pre-run indexer if too far behind, then drop and reimport to reduce memory
let chain_height = client.get_last_height()?;
let indexed_height = indexer.vecs.starting_height();
let blocks_behind = chain_height.saturating_sub(*indexed_height);
if blocks_behind > 1000 {
info!("Indexing {blocks_behind} blocks before starting server...");
sleep(Duration::from_secs(3));
indexer.index(&blocks, &client, &exit)?;
drop(indexer);
Mimalloc::collect();
indexer = Indexer::forced_import(&config.brkdir())?;
}
}
let mut computer = Computer::forced_import(&config.brkdir(), &indexer, config.fetcher())?;
let mempool = Mempool::new(&client);

View File

@@ -32,6 +32,5 @@ vecdb = { workspace = true }
[dev-dependencies]
brk_alloc = { workspace = true }
plotters = "0.3"
brk_bencher = { workspace = true }
color-eyre = { workspace = true }

View File

@@ -70,7 +70,9 @@ where
}};
}
let index = validate_vec!(first, last, min, max, average, sum, cumulative, median, pct10, pct25, pct75, pct90);
let index = validate_vec!(
first, last, min, max, average, sum, cumulative, median, pct10, pct25, pct75, pct90
);
let needs_first = first.is_some();
let needs_last = last.is_some();
@@ -298,7 +300,9 @@ where
};
}
write_vec!(first, last, min, max, average, sum, cumulative, median, pct10, pct25, pct75, pct90);
write_vec!(
first, last, min, max, average, sum, cumulative, median, pct10, pct25, pct75, pct90
);
Ok(())
}
@@ -306,7 +310,7 @@ where
/// Compute cumulative extension from a source vec.
///
/// Used when only cumulative needs to be extended from an existing source.
pub fn compute_cumulative_extend<I, T>(
pub fn compute_cumulative<I, T>(
max_from: I,
source: &impl IterableVec<I, T>,
cumulative: &mut EagerVec<PcoVec<I, T>>,
@@ -340,6 +344,47 @@ where
Ok(())
}
/// Compute cumulative from binary transform of two source vecs.
pub fn compute_cumulative_transform2<I, T, S1, S2, F>(
max_from: I,
source1: &impl IterableVec<I, S1>,
source2: &impl IterableVec<I, S2>,
cumulative: &mut EagerVec<PcoVec<I, T>>,
transform: F,
exit: &Exit,
) -> Result<()>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1: VecValue,
S2: VecValue,
F: Fn(S1, S2) -> T,
{
let combined_version = source1.version() + source2.version();
cumulative.validate_computed_version_or_reset(combined_version)?;
let index = max_from.min(I::from(cumulative.len()));
let target_len = source1.len().min(source2.len());
let mut cumulative_val = index
.decremented()
.map_or(T::from(0_usize), |idx| cumulative.read_unwrap_once(idx));
let mut iter1 = source1.iter();
let mut iter2 = source2.iter();
for i in index.to_usize()..target_len {
let idx = I::from(i);
cumulative_val += transform(iter1.get_unwrap(idx), iter2.get_unwrap(idx));
cumulative.truncate_push_at(i, cumulative_val)?;
}
let _lock = exit.lock();
cumulative.write()?;
Ok(())
}
/// Compute coarser aggregations from already-aggregated source data.
///
/// This is used for dateindex → weekindex, monthindex, etc. where we derive

View File

@@ -12,15 +12,14 @@ use brk_types::{
};
use derive_more::{Deref, DerefMut};
use vecdb::{
AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec,
IterableBoxedVec, IterableCloneableVec, IterableVec, LazyVecFrom3,
Database, EagerVec, Exit, ImportableVec, IterableBoxedVec, IterableCloneableVec, LazyVecFrom3,
};
use crate::{
ComputeIndexes, indexes,
internal::{
CumulativeVec, Full, LazyBinaryTransformFull, LazyDateDerivedFull, LazyFull,
SatsTimesClosePrice, Stats,
SatsTimesClosePrice, Stats, compute_cumulative,
},
};
@@ -137,7 +136,12 @@ impl ValueDollarsFromTxFull {
exit: &Exit,
) -> Result<()> {
// Compute height cumulative by summing lazy height.sum values
self.compute_height_cumulative(starting_indexes.height, exit)?;
compute_cumulative(
starting_indexes.height,
&self.height.sum,
&mut self.height_cumulative.0,
exit,
)?;
// Compute dateindex stats by aggregating lazy height stats
self.dateindex.compute(
@@ -150,30 +154,6 @@ impl ValueDollarsFromTxFull {
Ok(())
}
/// Compute cumulative USD by summing `sum_sats[h] * price[h]` for all heights.
fn compute_height_cumulative(&mut self, max_from: Height, exit: &Exit) -> Result<()> {
let starting_height = max_from.min(Height::from(self.height_cumulative.0.len()));
let mut cumulative = starting_height.decremented().map_or(Dollars::ZERO, |h| {
self.height_cumulative.0.iter().get_unwrap(h)
});
let mut sum_iter = self.height.sum.iter();
let start_idx = *starting_height as usize;
let end_idx = sum_iter.len();
for h in start_idx..end_idx {
let sum_usd = sum_iter.get_unwrap(Height::from(h));
cumulative += sum_usd;
self.height_cumulative.0.truncate_push_at(h, cumulative)?;
}
let _lock = exit.lock();
self.height_cumulative.0.write()?;
Ok(())
}
}
fn create_lazy_txindex(

View File

@@ -11,8 +11,8 @@ use vecdb::{Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec}
use crate::{
ComputeIndexes, indexes,
internal::{
ComputedVecValue, CumulativeVec, LazyDateDerivedFull, Full, LazyFull, NumericValue,
compute_cumulative_extend,
ComputedVecValue, CumulativeVec, Full, LazyDateDerivedFull, LazyFull, NumericValue,
compute_cumulative,
},
};
@@ -102,6 +102,6 @@ where
height_source: &impl IterableVec<Height, T>,
exit: &Exit,
) -> Result<()> {
compute_cumulative_extend(max_from, height_source, &mut self.height_cumulative.0, exit)
compute_cumulative(max_from, height_source, &mut self.height_cumulative.0, exit)
}
}

View File

@@ -15,7 +15,7 @@ use crate::{
ComputeIndexes, indexes,
internal::{
ComputedVecValue, CumulativeVec, LazyDateDerivedSumCum, LazySumCum, NumericValue, SumCum,
compute_cumulative_extend,
compute_cumulative,
},
};
@@ -99,7 +99,7 @@ where
source: &impl IterableVec<Height, T>,
exit: &Exit,
) -> Result<()> {
compute_cumulative_extend(max_from, source, &mut self.height_cumulative.0, exit)
compute_cumulative(max_from, source, &mut self.height_cumulative.0, exit)
}
fn compute_dateindex_sum_cum(

View File

@@ -1,7 +1,10 @@
use brk_error::Result;
use brk_traversable::Traversable;
use schemars::JsonSchema;
use vecdb::{AnyVec, Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec, VecIndex, VecValue, Version};
use vecdb::{
AnyVec, Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec, VecIndex,
VecValue, Version,
};
use crate::internal::{ComputedVecValue, CumulativeVec, SumVec};
@@ -48,7 +51,7 @@ impl<I: VecIndex, T: ComputedVecValue + JsonSchema> SumCum<I, T> {
first_indexes,
count_indexes,
exit,
0, // min_skip_count
0, // min_skip_count
None, // first
None, // last
None, // min
@@ -64,16 +67,6 @@ impl<I: VecIndex, T: ComputedVecValue + JsonSchema> SumCum<I, T> {
)
}
/// Extend cumulative from an existing source vec.
pub fn extend_cumulative(
&mut self,
max_from: I,
source: &impl IterableVec<I, T>,
exit: &Exit,
) -> Result<()> {
crate::internal::compute_cumulative_extend(max_from, source, &mut self.cumulative.0, exit)
}
pub fn len(&self) -> usize {
self.sum.0.len().min(self.cumulative.0.len())
}

View File

@@ -9,9 +9,7 @@ repository.workspace = true
[dependencies]
jiff = { workspace = true }
logroller = "0.1"
owo-colors = "4.2.3"
tracing = { workspace = true }
tracing-appender = "0.2"
tracing-log = "0.2"
tracing-subscriber = { version = "0.3", default-features = false, features = ["fmt", "env-filter", "std"] }

View File

@@ -0,0 +1,168 @@
use std::fmt::Write;
use jiff::{Timestamp, tz};
use owo_colors::OwoColorize;
use tracing::{Event, Level, Subscriber, field::Field};
use tracing_subscriber::{
fmt::{FmtContext, FormatEvent, FormatFields, format::Writer},
registry::LookupSpan,
};
// Don't remove, used to know the target of unwanted logs
const WITH_TARGET: bool = false;
// const WITH_TARGET: bool = true;
const fn level_str(level: Level) -> &'static str {
match level {
Level::ERROR => "error",
Level::WARN => "warn ",
Level::INFO => "info ",
Level::DEBUG => "debug",
Level::TRACE => "trace",
}
}
pub struct Formatter<const ANSI: bool>;
impl<S, N, const ANSI: bool> FormatEvent<S, N> for Formatter<ANSI>
where
S: Subscriber + for<'a> LookupSpan<'a>,
N: for<'a> FormatFields<'a> + 'static,
{
fn format_event(
&self,
_ctx: &FmtContext<'_, S, N>,
mut writer: Writer<'_>,
event: &Event<'_>,
) -> std::fmt::Result {
let ts = Timestamp::now()
.to_zoned(tz::TimeZone::system())
.strftime("%Y-%m-%d %H:%M:%S")
.to_string();
let level = *event.metadata().level();
let level_str = level_str(level);
if ANSI {
let level_colored = match level {
Level::ERROR => level_str.red().to_string(),
Level::WARN => level_str.yellow().to_string(),
Level::INFO => level_str.green().to_string(),
Level::DEBUG => level_str.blue().to_string(),
Level::TRACE => level_str.cyan().to_string(),
};
if WITH_TARGET {
write!(
writer,
"{} {} {} {level_colored} ",
ts.bright_black(),
event.metadata().target(),
"-".bright_black(),
)?;
} else {
write!(
writer,
"{} {} {level_colored} ",
ts.bright_black(),
"-".bright_black()
)?;
}
} else if WITH_TARGET {
write!(writer, "{ts} {} - {level_str} ", event.metadata().target())?;
} else {
write!(writer, "{ts} - {level_str} ")?;
}
let mut visitor = FieldVisitor::<ANSI>::new();
event.record(&mut visitor);
write!(writer, "{}", visitor.finish())?;
writeln!(writer)
}
}
struct FieldVisitor<const ANSI: bool> {
result: String,
status: Option<u64>,
uri: Option<String>,
latency: Option<String>,
}
impl<const ANSI: bool> FieldVisitor<ANSI> {
fn new() -> Self {
Self {
result: String::new(),
status: None,
uri: None,
latency: None,
}
}
fn finish(self) -> String {
if let Some(status) = self.status {
let status_str = if ANSI {
match status {
200..=299 => status.green().to_string(),
300..=399 => status.bright_black().to_string(),
_ => status.red().to_string(),
}
} else {
status.to_string()
};
let uri = self.uri.as_deref().unwrap_or("");
let latency = self.latency.as_deref().unwrap_or("");
if ANSI {
format!("{status_str} {uri} {}", latency.bright_black())
} else {
format!("{status_str} {uri} {latency}")
}
} else {
self.result
}
}
}
impl<const ANSI: bool> tracing::field::Visit for FieldVisitor<ANSI> {
fn record_u64(&mut self, field: &Field, value: u64) {
let name = field.name();
if name == "status" {
self.status = Some(value);
} else if !name.starts_with("log.") {
let _ = write!(self.result, "{}={} ", name, value);
}
}
fn record_i64(&mut self, field: &Field, value: i64) {
let name = field.name();
if !name.starts_with("log.") {
let _ = write!(self.result, "{}={} ", name, value);
}
}
fn record_str(&mut self, field: &Field, value: &str) {
let name = field.name();
if name == "uri" {
self.uri = Some(value.to_string());
} else if name == "message" {
let _ = write!(self.result, "{value}");
} else if !name.starts_with("log.") {
let _ = write!(self.result, "{}={} ", name, value);
}
}
fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) {
let name = field.name();
match name {
"uri" => self.uri = Some(format!("{value:?}")),
"latency" => self.latency = Some(format!("{value:?}")),
"message" => {
let _ = write!(self.result, "{value:?}");
}
_ if name.starts_with("log.") => {}
_ => {
let _ = write!(self.result, "{}={:?} ", name, value);
}
}
}
}

View File

@@ -0,0 +1,30 @@
use std::{fmt::Write, sync::OnceLock};
use tracing::{Event, Subscriber, field::Field};
type LogHook = Box<dyn Fn(&str) + Send + Sync>;
pub static LOG_HOOK: OnceLock<LogHook> = OnceLock::new();
pub struct HookLayer;
impl<S: Subscriber> tracing_subscriber::Layer<S> for HookLayer {
fn on_event(&self, event: &Event<'_>, _: tracing_subscriber::layer::Context<'_, S>) {
if let Some(hook) = LOG_HOOK.get() {
let mut msg = String::new();
event.record(&mut MessageVisitor(&mut msg));
hook(&msg);
}
}
}
struct MessageVisitor<'a>(&'a mut String);
impl tracing::field::Visit for MessageVisitor<'_> {
fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) {
if field.name() == "message" {
self.0.clear();
let _ = write!(self.0, "{value:?}");
}
}
}

View File

@@ -1,215 +1,21 @@
#![doc = include_str!("../README.md")]
use std::{fmt::Write as _, io, path::Path, sync::OnceLock};
mod format;
mod hook;
mod rate_limit;
use jiff::{Timestamp, tz};
use logroller::{LogRollerBuilder, Rotation, RotationSize};
use owo_colors::OwoColorize;
use tracing::{Event, Level, Subscriber, field::Field};
use tracing_appender::non_blocking::WorkerGuard;
use tracing_subscriber::{
EnvFilter,
fmt::{self, FmtContext, FormatEvent, FormatFields, format::Writer},
layer::SubscriberExt,
registry::LookupSpan,
util::SubscriberInitExt,
};
use std::{io, path::Path, time::Duration};
type LogHook = Box<dyn Fn(&str) + Send + Sync>;
use tracing_subscriber::{EnvFilter, fmt, layer::SubscriberExt, util::SubscriberInitExt};
static GUARD: OnceLock<WorkerGuard> = OnceLock::new();
static LOG_HOOK: OnceLock<LogHook> = OnceLock::new();
use format::Formatter;
use hook::{HookLayer, LOG_HOOK};
use rate_limit::RateLimitedFile;
const MAX_LOG_FILES: u64 = 5;
const MAX_FILE_SIZE_MB: u64 = 42;
// Don't remove, used to know the target of unwanted logs
const WITH_TARGET: bool = false;
// const WITH_TARGET: bool = true;
const fn level_str(level: Level) -> &'static str {
match level {
Level::ERROR => "error",
Level::WARN => "warn ",
Level::INFO => "info ",
Level::DEBUG => "debug",
Level::TRACE => "trace",
}
}
struct Formatter<const ANSI: bool>;
/// Visitor that collects structured fields for colored formatting
struct FieldVisitor<const ANSI: bool> {
result: String,
status: Option<u64>,
uri: Option<String>,
latency: Option<String>,
}
impl<const ANSI: bool> FieldVisitor<ANSI> {
fn new() -> Self {
Self {
result: String::new(),
status: None,
uri: None,
latency: None,
}
}
fn finish(self) -> String {
// Format HTTP-style log if we have status
if let Some(status) = self.status {
let status_str = if ANSI {
match status {
200..=299 => status.green().to_string(),
300..=399 => status.bright_black().to_string(),
_ => status.red().to_string(),
}
} else {
status.to_string()
};
let uri = self.uri.as_deref().unwrap_or("");
let latency = self.latency.as_deref().unwrap_or("");
if ANSI {
format!("{status_str} {uri} {}", latency.bright_black())
} else {
format!("{status_str} {uri} {latency}")
}
} else {
self.result
}
}
}
impl<const ANSI: bool> tracing::field::Visit for FieldVisitor<ANSI> {
fn record_u64(&mut self, field: &Field, value: u64) {
let name = field.name();
if name == "status" {
self.status = Some(value);
} else if !name.starts_with("log.") {
let _ = write!(self.result, "{}={} ", name, value);
}
}
fn record_i64(&mut self, field: &Field, value: i64) {
let name = field.name();
if !name.starts_with("log.") {
let _ = write!(self.result, "{}={} ", name, value);
}
}
fn record_str(&mut self, field: &Field, value: &str) {
let name = field.name();
if name == "uri" {
self.uri = Some(value.to_string());
} else if name == "message" {
let _ = write!(self.result, "{value}");
} else if !name.starts_with("log.") {
let _ = write!(self.result, "{}={} ", name, value);
}
}
fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) {
let name = field.name();
match name {
"uri" => self.uri = Some(format!("{value:?}")),
"latency" => self.latency = Some(format!("{value:?}")),
"message" => {
let _ = write!(self.result, "{value:?}");
}
_ if name.starts_with("log.") => {}
_ => {
let _ = write!(self.result, "{}={:?} ", name, value);
}
}
}
}
impl<S, N, const ANSI: bool> FormatEvent<S, N> for Formatter<ANSI>
where
S: Subscriber + for<'a> LookupSpan<'a>,
N: for<'a> FormatFields<'a> + 'static,
{
fn format_event(
&self,
_ctx: &FmtContext<'_, S, N>,
mut writer: Writer<'_>,
event: &Event<'_>,
) -> std::fmt::Result {
let ts = Timestamp::now()
.to_zoned(tz::TimeZone::system())
.strftime("%Y-%m-%d %H:%M:%S")
.to_string();
let level = *event.metadata().level();
let level_str = level_str(level);
if ANSI {
let level_colored = match level {
Level::ERROR => level_str.red().to_string(),
Level::WARN => level_str.yellow().to_string(),
Level::INFO => level_str.green().to_string(),
Level::DEBUG => level_str.blue().to_string(),
Level::TRACE => level_str.cyan().to_string(),
};
if WITH_TARGET {
write!(
writer,
"{} {} {} {level_colored} ",
ts.bright_black(),
event.metadata().target(),
"-".bright_black(),
)?;
} else {
write!(
writer,
"{} {} {level_colored} ",
ts.bright_black(),
"-".bright_black()
)?;
}
} else if WITH_TARGET {
write!(writer, "{ts} {} - {level_str} ", event.metadata().target())?;
} else {
write!(writer, "{ts} - {level_str} ")?;
}
let mut visitor = FieldVisitor::<ANSI>::new();
event.record(&mut visitor);
write!(writer, "{}", visitor.finish())?;
writeln!(writer)
}
}
struct HookLayer;
impl<S: Subscriber> tracing_subscriber::Layer<S> for HookLayer {
fn on_event(&self, event: &Event<'_>, _: tracing_subscriber::layer::Context<'_, S>) {
if let Some(hook) = LOG_HOOK.get() {
let mut msg = String::new();
event.record(&mut MessageVisitor(&mut msg));
hook(&msg);
}
}
}
struct MessageVisitor<'a>(&'a mut String);
impl tracing::field::Visit for MessageVisitor<'_> {
fn record_debug(&mut self, field: &tracing::field::Field, value: &dyn std::fmt::Debug) {
use std::fmt::Write;
if field.name() == "message" {
self.0.clear();
let _ = write!(self.0, "{value:?}");
}
}
}
/// Days to keep log files before cleanup
const MAX_LOG_AGE_DAYS: u64 = 7;
pub fn init(path: Option<&Path>) -> io::Result<()> {
// Bridge log crate to tracing (for vecdb and other log-based crates)
tracing_log::LogTracer::init().ok();
#[cfg(debug_assertions)]
@@ -217,12 +23,11 @@ pub fn init(path: Option<&Path>) -> io::Result<()> {
#[cfg(not(debug_assertions))]
const DEFAULT_LEVEL: &str = "info";
let default_filter = format!(
"{DEFAULT_LEVEL},bitcoin=off,bitcoincore-rpc=off,fjall=off,brk_fjall=off,lsm_tree=off,brk_rolldown=off,rolldown=off,tracing=off,aide=off,rustls=off,notify=off,oxc_resolver=off,tower_http=off"
);
let filter =
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new(default_filter));
let filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| {
EnvFilter::new(format!(
"{DEFAULT_LEVEL},bitcoin=off,bitcoincore-rpc=off,fjall=off,brk_fjall=off,lsm_tree=off,brk_rolldown=off,rolldown=off,tracing=off,aide=off,rustls=off,notify=off,oxc_resolver=off,tower_http=off"
))
});
let registry = tracing_subscriber::registry()
.with(filter)
@@ -231,25 +36,20 @@ pub fn init(path: Option<&Path>) -> io::Result<()> {
if let Some(path) = path {
let dir = path.parent().unwrap_or(Path::new("."));
let filename = path
let prefix = path
.file_name()
.and_then(|s| s.to_str())
.unwrap_or("app.log");
let roller = LogRollerBuilder::new(dir, Path::new(filename))
.rotation(Rotation::SizeBased(RotationSize::MB(MAX_FILE_SIZE_MB)))
.max_keep_files(MAX_LOG_FILES)
.build()
.map_err(io::Error::other)?;
cleanup_old_logs(dir, prefix);
let (non_blocking, guard) = tracing_appender::non_blocking(roller);
GUARD.set(guard).ok();
let writer = RateLimitedFile::new(dir, prefix);
registry
.with(
fmt::layer()
.event_format(Formatter::<false>)
.with_writer(non_blocking),
.with_writer(writer),
)
.init();
} else {
@@ -260,7 +60,6 @@ pub fn init(path: Option<&Path>) -> io::Result<()> {
}
/// Register a hook that gets called for every log message.
/// Can only be called once.
pub fn register_hook<F>(hook: F) -> Result<(), &'static str>
where
F: Fn(&str) + Send + Sync + 'static,
@@ -269,3 +68,29 @@ where
.set(Box::new(hook))
.map_err(|_| "Hook already registered")
}
fn cleanup_old_logs(dir: &Path, prefix: &str) {
let max_age = Duration::from_secs(MAX_LOG_AGE_DAYS * 24 * 60 * 60);
let Ok(entries) = std::fs::read_dir(dir) else {
return;
};
for entry in entries.flatten() {
let path = entry.path();
let Some(name) = path.file_name().and_then(|n| n.to_str()) else {
continue;
};
if !name.starts_with(prefix) || name == prefix {
continue;
}
if let Ok(meta) = path.metadata()
&& let Ok(modified) = meta.modified()
&& let Ok(age) = modified.elapsed()
&& age > max_age
{
let _ = std::fs::remove_file(&path);
}
}
}

View File

@@ -0,0 +1,90 @@
use std::{
fs::OpenOptions,
io::{self, Write},
path::PathBuf,
sync::{
Arc,
atomic::{AtomicU64, Ordering},
},
time::{SystemTime, UNIX_EPOCH},
};
use jiff::{Timestamp, tz};
use tracing_subscriber::fmt::MakeWriter;
const MAX_WRITES_PER_SEC: u64 = 100;
struct Inner {
dir: PathBuf,
prefix: String,
count: AtomicU64,
last_second: AtomicU64,
}
impl Inner {
fn can_write(&self) -> bool {
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs();
let last = self.last_second.load(Ordering::Relaxed);
if now != last {
self.last_second.store(now, Ordering::Relaxed);
self.count.store(1, Ordering::Relaxed);
true
} else {
self.count.fetch_add(1, Ordering::Relaxed) < MAX_WRITES_PER_SEC
}
}
fn path(&self) -> PathBuf {
let date = Timestamp::now()
.to_zoned(tz::TimeZone::system())
.strftime("%Y-%m-%d")
.to_string();
self.dir.join(format!("{}.{}", self.prefix, date))
}
}
#[derive(Clone)]
pub struct RateLimitedFile(Arc<Inner>);
impl RateLimitedFile {
pub fn new(dir: &std::path::Path, prefix: &str) -> Self {
Self(Arc::new(Inner {
dir: dir.to_path_buf(),
prefix: prefix.to_string(),
count: AtomicU64::new(0),
last_second: AtomicU64::new(0),
}))
}
}
pub struct FileWriter(Arc<Inner>);
impl Write for FileWriter {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
if !self.0.can_write() {
return Ok(buf.len());
}
OpenOptions::new()
.create(true)
.append(true)
.open(self.0.path())?
.write(buf)
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
impl<'a> MakeWriter<'a> for RateLimitedFile {
type Writer = FileWriter;
fn make_writer(&'a self) -> Self::Writer {
FileWriter(Arc::clone(&self.0))
}
}

View File

@@ -20,7 +20,9 @@ pub use constants::{HeatmapFilter, NUM_BINS, ROUND_USD_AMOUNTS};
pub use filters::FILTERS;
pub use oracle::{
derive_daily_ohlc, derive_daily_ohlc_with_confidence, derive_height_price,
derive_ohlc_from_height_prices, derive_price_from_histogram, OracleConfig, OracleResult,
derive_height_price_with_confidence, derive_ohlc_from_height_prices,
derive_ohlc_from_height_prices_with_confidence, derive_price_from_histogram,
HeightPriceResult, OracleConfig, OracleResult,
};
pub use signal::{compute_expected_bins_per_day, usd_to_bin};
pub use histogram::load_or_compute_output_conditions;

View File

@@ -7,10 +7,13 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[features]
bindgen = ["dep:brk_bindgen"]
[dependencies]
aide = { workspace = true }
axum = { workspace = true }
brk_bindgen = { workspace = true }
brk_bindgen = { workspace = true, optional = true }
brk_computer = { workspace = true }
brk_error = { workspace = true, features = ["jiff", "serde_json", "tokio", "vecdb"] }
brk_fetcher = { workspace = true }

View File

@@ -1,7 +1,6 @@
#![doc = include_str!("../README.md")]
use std::{
panic,
path::PathBuf,
sync::Arc,
time::{Duration, Instant},
@@ -62,6 +61,9 @@ impl Server {
pub async fn serve(self, port: Option<Port>) -> brk_error::Result<()> {
let state = self.0;
#[cfg(feature = "bindgen")]
let vecs = state.query.inner().vecs();
let compression_layer = CompressionLayer::new().br(true).gzip(true).zstd(true);
let response_uri_layer = axum::middleware::from_fn(
@@ -96,8 +98,6 @@ impl Server {
)
.on_eos(());
let vecs = state.query.inner().vecs();
let website_router = brk_website::router(state.website.clone());
let mut router = ApiRouter::new().add_api_routes();
if !state.website.is_enabled() {
@@ -141,28 +141,33 @@ impl Server {
let mut openapi = create_openapi();
let router = router.finish_api(&mut openapi);
let workspace_root: PathBuf = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.parent()
.and_then(|p| p.parent())
.unwrap()
.into();
let output_paths = brk_bindgen::ClientOutputPaths::new()
.rust(workspace_root.join("crates/brk_client/src/lib.rs"))
.javascript(workspace_root.join("modules/brk-client/index.js"))
.python(workspace_root.join("packages/brk_client/brk_client/__init__.py"));
#[cfg(feature = "bindgen")]
{
let workspace_root = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.parent()
.and_then(|p| p.parent())
.unwrap()
.to_path_buf();
let output_paths = brk_bindgen::ClientOutputPaths::new()
.rust(workspace_root.join("crates/brk_client/src/lib.rs"))
.javascript(workspace_root.join("modules/brk-client/index.js"))
.python(workspace_root.join("packages/brk_client/brk_client/__init__.py"));
let openapi_json = serde_json::to_string(&openapi).unwrap();
let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
brk_bindgen::generate_clients(vecs, &openapi_json, &output_paths)
}));
match result {
Ok(Ok(())) => info!("Generated clients"),
Ok(Err(e)) => error!("Failed to generate clients: {e}"),
Err(_) => error!("Client generation panicked"),
}
}
let api_json = Arc::new(ApiJson::new(&openapi));
let openapi_json = serde_json::to_string(&openapi).unwrap();
let result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
brk_bindgen::generate_clients(vecs, &openapi_json, &output_paths)
}));
match result {
Ok(Ok(())) => info!("Generated clients"),
Ok(Err(e)) => error!("Failed to generate clients: {e}"),
Err(_) => error!("Client generation panicked"),
}
let router = router
.layer(Extension(Arc::new(openapi)))

View File

@@ -839,9 +839,11 @@ class BrkError extends Error {
/**
* @template T
* @typedef {Object} MetricData
* @property {number} version - Version of the metric data
* @property {number} total - Total number of data points
* @property {number} start - Start index (inclusive)
* @property {number} end - End index (exclusive)
* @property {string} stamp - ISO 8601 timestamp of when the response was generated
* @property {T[]} data - The metric data
*/
/** @typedef {MetricData<any>} AnyMetricData */

View File

@@ -1041,9 +1041,11 @@ def _p(prefix: str, acc: str) -> str:
class MetricData(TypedDict, Generic[T]):
"""Metric data with range information."""
version: int
total: int
start: int
end: int
stamp: str
data: List[T]

View File

@@ -1,13 +1,14 @@
import { ios } from "../../utils/env.js";
import { domToBlob } from "../../modules/modern-screenshot/4.6.7/dist/index.mjs";
import { ios, canShare } from "../utils/env.js";
import { domToBlob } from "../modules/modern-screenshot/4.6.7/dist/index.mjs";
export const canCapture = !ios || canShare;
/**
* @param {Object} args
* @param {Element} args.element
* @param {string} args.name
* @param {string} args.title
*/
export async function screenshot({ element, name, title }) {
export async function capture({ element, name }) {
const blob = await domToBlob(element, {
scale: 2,
});
@@ -16,15 +17,13 @@ export async function screenshot({ element, name, title }) {
const file = new File(
[blob],
`bitview-${name}-${new Date().toJSON().split(".")[0]}.png`,
{
type: "image/png",
},
{ type: "image/png" },
);
try {
await navigator.share({
files: [file],
title: `${title} on ${window.document.location.hostname}`,
title: `${name} on ${window.document.location.hostname}`,
});
return;
} catch (err) {

View File

@@ -1,8 +1,26 @@
import { oklchToRgba } from "./oklch.js";
/** @type {Map<string, string>} */
const rgbaCache = new Map();
/**
* Convert oklch to rgba with caching
* @param {string} color - oklch color string
*/
function toRgba(color) {
if (color === "transparent") return color;
const cached = rgbaCache.get(color);
if (cached) return cached;
const rgba = oklchToRgba(color);
rgbaCache.set(color, rgba);
return rgba;
}
/**
* Reduce color opacity to 50% for dimming effect
* @param {string} color - oklch color string
*/
export function tameColor(color) {
function tameColor(color) {
if (color === "transparent") return color;
return `${color.slice(0, -1)} / 50%)`;
}
@@ -23,9 +41,10 @@ export function tameColor(color) {
* @returns {Color}
*/
function createColor(getter) {
const color = /** @type {Color} */ (() => getter());
color.tame = () => tameColor(getter());
color.highlight = (highlighted) => highlighted ? getter() : tameColor(getter());
const color = /** @type {Color} */ (() => toRgba(getter()));
color.tame = () => toRgba(tameColor(getter()));
color.highlight = (highlighted) =>
highlighted ? toRgba(getter()) : toRgba(tameColor(getter()));
return color;
}

View File

@@ -1,18 +1,16 @@
import {
createChart as _createChart,
createSeriesMarkers,
createChart as untypedLcCreateChart,
CandlestickSeries,
HistogramSeries,
LineSeries,
BaselineSeries,
// } from "../modules/lightweight-charts/5.1.0/dist/lightweight-charts.standalone.development.mjs";
} from "../modules/lightweight-charts/5.1.0/dist/lightweight-charts.standalone.production.mjs";
import { createMinMaxMarkers } from "./markers.js";
import { createLegend } from "./legend.js";
import { capture, canCapture } from "./capture.js";
const createChart = /** @type {CreateChart} */ (_createChart);
const lcCreateChart = /** @type {CreateLCChart} */ (untypedLcCreateChart);
import { createChoiceField } from "../utils/dom.js";
import { createOklchToRGBA } from "./oklch.js";
import { throttle } from "../utils/timing.js";
import { serdeBool } from "../utils/serde.js";
import { stringToId, numberToShortUSFormat } from "../utils/format.js";
@@ -43,8 +41,6 @@ import { resources } from "../resources.js";
* @property {Signal<string | null>} url
* @property {() => readonly T[]} getData
* @property {(data: T) => void} update
* @property {(markers: TimeSeriesMarker[]) => void} setMarkers
* @property {VoidFunction} clearMarkers
* @property {VoidFunction} remove
*/
@@ -65,8 +61,6 @@ import { resources } from "../resources.js";
* @property {function(number): void} removeFrom
*/
const oklchToRGBA = createOklchToRGBA();
const lineWidth = /** @type {any} */ (1.5);
/**
@@ -80,9 +74,10 @@ const lineWidth = /** @type {any} */ (1.5);
* @param {((unknownTimeScaleCallback: VoidFunction) => void)} [args.timeScaleSetCallback]
* @param {number | null} [args.initialVisibleBarsCount]
* @param {true} [args.fitContent]
* @param {HTMLElement} [args.captureElement]
* @param {{unit: Unit; blueprints: AnySeriesBlueprint[]}[]} [args.config]
*/
export function createChartElement({
export function createChart({
parent,
signals,
colors,
@@ -92,6 +87,7 @@ export function createChartElement({
timeScaleSetCallback,
initialVisibleBarsCount,
fitContent,
captureElement,
config,
}) {
const div = window.document.createElement("div");
@@ -112,7 +108,7 @@ export function createChartElement({
const legendBottom = createLegend(signals);
div.append(legendBottom.element);
const ichart = createChart(
const ichart = lcCreateChart(
chartDiv,
/** @satisfies {DeepPartial<ChartOptions>} */ ({
autoSize: true,
@@ -120,8 +116,6 @@ export function createChartElement({
fontFamily: style.fontFamily,
background: { color: "transparent" },
attributionLogo: false,
colorSpace: "display-p3",
colorParsers: [oklchToRGBA],
},
grid: {
vertLines: { visible: false },
@@ -133,6 +127,7 @@ export function createChartElement({
timeScale: {
borderVisible: false,
enableConflation: true,
// conflationThresholdFactor: 8,
...(fitContent
? {
minBarSpacing: 0.001,
@@ -144,7 +139,7 @@ export function createChartElement({
locale: "en-us",
},
crosshair: {
mode: 3,
mode: 0,
},
...(fitContent
? {
@@ -170,18 +165,6 @@ export function createChartElement({
});
};
const seriesList = signals.createSignal(
/** @type {Set<AnySeries>} */ (new Set()),
{ equals: false },
);
const seriesCount = signals.createMemo(() => seriesList().size);
const markers = createMinMaxMarkers({
chart: ichart,
seriesList,
colors,
formatValue: numberToShortUSFormat,
});
const visibleBarsCount = signals.createSignal(
initialVisibleBarsCount ?? Infinity,
);
@@ -193,20 +176,11 @@ export function createChartElement({
const shouldShowLine = signals.createMemo(
() => visibleBarsCountBucket() >= 2,
);
const shouldUpdateMarkers = signals.createMemo(
() => visibleBarsCount() * seriesCount() <= 20_000,
);
signals.createEffect(shouldUpdateMarkers, (should) => {
if (should) markers.update();
else markers.clear();
});
ichart.timeScale().subscribeVisibleLogicalRangeChange(
throttle((range) => {
if (range) {
visibleBarsCount.set(range.to - range.from);
if (shouldUpdateMarkers()) markers.update();
}
}, 100),
);
@@ -273,7 +247,7 @@ export function createChartElement({
activeResources.forEach((v) => {
v.fetch();
});
}),
}, 10_000),
);
if (fitContent) {
@@ -386,12 +360,10 @@ export function createChartElement({
* @param {Accessor<WhitespaceData[]>} [args.data]
* @param {number} args.paneIndex
* @param {boolean} [args.defaultActive]
* @param {(ctx: { active: Signal<boolean>, highlighted: Signal<boolean> }) => void} args.setup
* @param {(ctx: { active: Signal<boolean>, highlighted: Signal<boolean>, zOrder: number }) => void} args.setup
* @param {() => readonly any[]} args.getData
* @param {(data: any[]) => void} args.setData
* @param {(data: any) => void} args.update
* @param {(markers: TimeSeriesMarker[]) => void} args.setMarkers
* @param {VoidFunction} args.clearMarkers
* @param {() => void} args.onRemove
*/
function addSeries({
@@ -408,8 +380,6 @@ export function createChartElement({
getData,
setData,
update,
setMarkers,
clearMarkers,
onRemove,
}) {
return signals.createRoot((dispose) => {
@@ -430,12 +400,7 @@ export function createChartElement({
const highlighted = signals.createSignal(true);
setup({ active, highlighted });
// Update markers when active changes
signals.createEffect(active, () => {
if (shouldUpdateMarkers()) markers.scheduleUpdate();
});
setup({ active, highlighted, zOrder: -order });
const hasData = signals.createSignal(false);
let lastTime = -Infinity;
@@ -453,22 +418,15 @@ export function createChartElement({
url: signals.createSignal(/** @type {string | null} */ (null)),
getData,
update,
setMarkers,
clearMarkers,
remove() {
dispose();
onRemove();
if (_valuesResource) {
activeResources.delete(_valuesResource);
}
seriesList().delete(series);
seriesList.set(seriesList());
},
};
seriesList().add(series);
seriesList.set(seriesList());
if (metric) {
signals.createScopedEffect(index, (index) => {
// Get timestamp metric from tree based on index type
@@ -496,138 +454,149 @@ export function createChartElement({
return `${base}${valuesResource.path}`;
});
signals.createScopedEffect(active, (active) => {
if (active) {
timeResource.fetch();
valuesResource.fetch();
activeResources.add(valuesResource);
const timeRange = timeResource.range();
const valuesRange = valuesResource.range();
const valuesCacheKey = signals.createMemo(() => {
const res = valuesRange.response();
if (!res?.data?.length) return null;
if (!timeRange.response()?.data?.length) return null;
return `${res.version}|${res.stamp}|${res.total}|${res.start}|${res.end}`;
});
signals.createEffect(valuesCacheKey, (cacheKey) => {
if (!cacheKey) return;
const _indexes = timeRange.response()?.data;
const values = valuesRange.response()?.data;
if (!_indexes?.length || !values?.length) return;
const indexes = /** @type {number[]} */ (_indexes);
const length = Math.min(indexes.length, values.length);
// Find start index for processing
let startIdx = 0;
if (hasData()) {
// Binary search to find first index where time >= lastTime
let lo = 0;
let hi = length;
while (lo < hi) {
const mid = (lo + hi) >>> 1;
if (indexes[mid] < lastTime) {
lo = mid + 1;
} else {
hi = mid;
}
}
startIdx = lo;
if (startIdx >= length) return; // No new data
}
/**
* @param {number} i
* @param {(number | null | [number, number, number, number])[]} vals
* @returns {LineData | CandlestickData}
*/
function buildDataPoint(i, vals) {
const time = /** @type {Time} */ (indexes[i]);
const v = vals[i];
if (v === null) {
return { time, value: NaN };
} else if (typeof v === "number") {
return { time, value: v };
} else {
if (!Array.isArray(v) || v.length !== 4)
throw new Error(`Expected OHLC tuple, got: ${v}`);
const [open, high, low, close] = v;
return { time, open, high, low, close };
}
}
if (!hasData()) {
// Initial load: build full array
const data = /** @type {LineData[] | CandlestickData[]} */ (
Array.from({ length })
);
let prevTime = null;
let timeOffset = 0;
for (let i = 0; i < length; i++) {
const time = indexes[i];
const sameTime = prevTime === time;
if (sameTime) {
timeOffset += 1;
}
const offsetedI = i - timeOffset;
const point = buildDataPoint(i, values);
if (sameTime && "open" in point) {
const prev = /** @type {CandlestickData} */ (
data[offsetedI]
);
point.open = prev.open;
point.high = Math.max(prev.high, point.high);
point.low = Math.min(prev.low, point.low);
}
data[offsetedI] = point;
prevTime = time;
}
data.length -= timeOffset;
setData(data);
hasData.set(true);
if (shouldUpdateMarkers()) markers.scheduleUpdate();
lastTime =
/** @type {number} */ (data.at(-1)?.time) ?? -Infinity;
if (fitContent) {
ichart.timeScale().fitContent();
}
timeScaleSetCallback?.(() => {
if (
index === "quarterindex" ||
index === "semesterindex" ||
index === "yearindex" ||
index === "decadeindex"
) {
setVisibleLogicalRange({ from: -1, to: data.length });
}
});
} else {
// Incremental update: only process new data points
for (let i = startIdx; i < length; i++) {
const point = buildDataPoint(i, values);
update(point);
lastTime = /** @type {number} */ (point.time);
}
}
});
} else {
activeResources.delete(valuesResource);
}
// Create memo outside active check (cheap, just checks data existence)
const timeRange = timeResource.range();
const valuesRange = valuesResource.range();
const valuesCacheKey = signals.createMemo(() => {
const res = valuesRange.response();
if (!res?.data?.length) return null;
if (!timeRange.response()?.data?.length) return null;
return `${res.version}|${res.stamp}|${res.total}|${res.start}|${res.end}`;
});
// Combined effect for active + data processing (flat, uses prev comparison)
signals.createEffect(
() => ({ isActive: active(), cacheKey: valuesCacheKey() }),
(curr, prev) => {
const becameActive = curr.isActive && (!prev || !prev.isActive);
const becameInactive = !curr.isActive && prev?.isActive;
if (becameInactive) {
activeResources.delete(valuesResource);
return;
}
if (!curr.isActive) return;
if (becameActive) {
timeResource.fetch();
valuesResource.fetch();
activeResources.add(valuesResource);
}
// Process data only if cacheKey changed
if (!curr.cacheKey || curr.cacheKey === prev?.cacheKey) return;
const _indexes = timeRange.response()?.data;
const values = valuesRange.response()?.data;
if (!_indexes?.length || !values?.length) return;
const indexes = /** @type {number[]} */ (_indexes);
const length = Math.min(indexes.length, values.length);
// Find start index for processing
let startIdx = 0;
if (hasData()) {
// Binary search to find first index where time >= lastTime
let lo = 0;
let hi = length;
while (lo < hi) {
const mid = (lo + hi) >>> 1;
if (indexes[mid] < lastTime) {
lo = mid + 1;
} else {
hi = mid;
}
}
startIdx = lo;
if (startIdx >= length) return; // No new data
}
/**
* @param {number} i
* @param {(number | null | [number, number, number, number])[]} vals
* @returns {LineData | CandlestickData}
*/
function buildDataPoint(i, vals) {
const time = /** @type {Time} */ (indexes[i]);
const v = vals[i];
if (v === null) {
return { time, value: NaN };
} else if (typeof v === "number") {
return { time, value: v };
} else {
if (!Array.isArray(v) || v.length !== 4)
throw new Error(`Expected OHLC tuple, got: ${v}`);
const [open, high, low, close] = v;
return { time, open, high, low, close };
}
}
if (!hasData()) {
// Initial load: build full array
const data = /** @type {LineData[] | CandlestickData[]} */ (
Array.from({ length })
);
let prevTime = null;
let timeOffset = 0;
for (let i = 0; i < length; i++) {
const time = indexes[i];
const sameTime = prevTime === time;
if (sameTime) {
timeOffset += 1;
}
const offsetedI = i - timeOffset;
const point = buildDataPoint(i, values);
if (sameTime && "open" in point) {
const prev = /** @type {CandlestickData} */ (
data[offsetedI]
);
point.open = prev.open;
point.high = Math.max(prev.high, point.high);
point.low = Math.min(prev.low, point.low);
}
data[offsetedI] = point;
prevTime = time;
}
data.length -= timeOffset;
setData(data);
hasData.set(true);
lastTime =
/** @type {number} */ (data.at(-1)?.time) ?? -Infinity;
if (fitContent) {
ichart.timeScale().fitContent();
}
timeScaleSetCallback?.(() => {
if (
index === "quarterindex" ||
index === "semesterindex" ||
index === "yearindex" ||
index === "decadeindex"
) {
setVisibleLogicalRange({ from: -1, to: data.length });
}
});
} else {
// Incremental update: only process new data points
for (let i = startIdx; i < length; i++) {
const point = buildDataPoint(i, values);
update(point);
lastTime = /** @type {number} */ (point.time);
}
}
},
);
});
} else if (data) {
signals.createEffect(data, (data) => {
setData(data);
hasData.set(true);
if (shouldUpdateMarkers()) markers.scheduleUpdate();
if (fitContent) {
ichart.timeScale().fitContent();
}
@@ -698,7 +667,6 @@ export function createChartElement({
const defaultRed = inverse ? colors.green : colors.red;
const upColor = customColors?.[0] ?? defaultGreen;
const downColor = customColors?.[1] ?? defaultRed;
let showLine = shouldShowLine();
/** @type {CandlestickISeries} */
const candlestickISeries = /** @type {any} */ (
@@ -710,7 +678,7 @@ export function createChartElement({
wickUpColor: upColor(),
wickDownColor: downColor(),
borderVisible: false,
visible: false,
visible: defaultActive !== false,
...options,
},
paneIndex,
@@ -731,8 +699,7 @@ export function createChartElement({
)
);
// Marker plugin always on candlestick (has true min/max via high/low)
const markerPlugin = createSeriesMarkers(candlestickISeries, [], { autoScale: false });
let showLine = false;
const series = addSeries({
colors: [upColor, downColor],
@@ -744,38 +711,28 @@ export function createChartElement({
data,
defaultActive,
metric,
setup: ({ active, highlighted }) => {
candlestickISeries.setSeriesOrder(order);
lineISeries.setSeriesOrder(order);
setup: ({ active, highlighted, zOrder }) => {
candlestickISeries.setSeriesOrder(zOrder);
lineISeries.setSeriesOrder(zOrder);
signals.createEffect(
() => ({
shouldShow: shouldShowLine(),
active: active(),
highlighted: highlighted(),
barsCount: visibleBarsCount(),
}),
({ shouldShow, active, highlighted, barsCount }) => {
if (barsCount === Infinity) return;
const wasLine = showLine;
({ shouldShow, active, highlighted }) => {
showLine = shouldShow;
// Use transparent when showing the other mode, otherwise use highlight
const up = showLine ? "transparent" : upColor.highlight(highlighted);
const down = showLine ? "transparent" : downColor.highlight(highlighted);
const line = showLine ? colors.default.highlight(highlighted) : "transparent";
candlestickISeries.applyOptions({
visible: active,
upColor: up,
downColor: down,
wickUpColor: up,
wickDownColor: down,
visible: active && !showLine,
upColor: upColor.highlight(highlighted),
downColor: downColor.highlight(highlighted),
wickUpColor: upColor.highlight(highlighted),
wickDownColor: downColor.highlight(highlighted),
});
lineISeries.applyOptions({
visible: active,
color: line,
priceLineVisible: active && showLine,
visible: active && showLine,
color: colors.default.highlight(highlighted),
});
if (wasLine !== showLine && shouldUpdateMarkers())
markers.scheduleUpdate();
},
);
},
@@ -789,8 +746,6 @@ export function createChartElement({
lineISeries.update({ time: data.time, value: data.close });
},
getData: () => candlestickISeries.data(),
setMarkers: (m) => markerPlugin.setMarkers(m),
clearMarkers: () => markerPlugin.setMarkers([]),
onRemove: () => {
ichart.removeSeries(candlestickISeries);
ichart.removeSeries(lineISeries);
@@ -839,8 +794,6 @@ export function createChartElement({
)
);
const markerPlugin = createSeriesMarkers(iseries, [], { autoScale: false });
const series = addSeries({
colors: isDualColor ? [positiveColor, negativeColor] : [positiveColor],
name,
@@ -851,8 +804,8 @@ export function createChartElement({
data,
defaultActive,
metric,
setup: ({ active, highlighted }) => {
iseries.setSeriesOrder(order);
setup: ({ active, highlighted, zOrder }) => {
iseries.setSeriesOrder(zOrder);
signals.createEffect(
() => ({ active: active(), highlighted: highlighted() }),
({ active, highlighted }) => {
@@ -880,8 +833,6 @@ export function createChartElement({
},
update: (data) => iseries.update(data),
getData: () => iseries.data(),
setMarkers: (m) => markerPlugin.setMarkers(m),
clearMarkers: () => markerPlugin.setMarkers([]),
onRemove: () => ichart.removeSeries(iseries),
});
return series;
@@ -926,8 +877,6 @@ export function createChartElement({
)
);
const markerPlugin = createSeriesMarkers(iseries, [], { autoScale: false });
const series = addSeries({
colors: [color],
name,
@@ -938,8 +887,8 @@ export function createChartElement({
data,
defaultActive,
metric,
setup: ({ active, highlighted }) => {
iseries.setSeriesOrder(order);
setup: ({ active, highlighted, zOrder }) => {
iseries.setSeriesOrder(zOrder);
signals.createEffect(
() => ({ active: active(), highlighted: highlighted() }),
({ active, highlighted }) => {
@@ -953,8 +902,6 @@ export function createChartElement({
setData: (data) => iseries.setData(data),
update: (data) => iseries.update(data),
getData: () => iseries.data(),
setMarkers: (m) => markerPlugin.setMarkers(m),
clearMarkers: () => markerPlugin.setMarkers([]),
onRemove: () => ichart.removeSeries(iseries),
});
return series;
@@ -1001,8 +948,6 @@ export function createChartElement({
)
);
const markerPlugin = createSeriesMarkers(iseries, [], { autoScale: false });
const series = addSeries({
colors: [color],
name,
@@ -1013,8 +958,8 @@ export function createChartElement({
data,
defaultActive,
metric,
setup: ({ active, highlighted }) => {
iseries.setSeriesOrder(order);
setup: ({ active, highlighted, zOrder }) => {
iseries.setSeriesOrder(zOrder);
signals.createEffect(
() => ({ active: active(), highlighted: highlighted() }),
({ active, highlighted }) => {
@@ -1032,8 +977,6 @@ export function createChartElement({
setData: (data) => iseries.setData(data),
update: (data) => iseries.update(data),
getData: () => iseries.data(),
setMarkers: (m) => markerPlugin.setMarkers(m),
clearMarkers: () => markerPlugin.setMarkers([]),
onRemove: () => ichart.removeSeries(iseries),
});
return series;
@@ -1089,8 +1032,6 @@ export function createChartElement({
)
);
const markerPlugin = createSeriesMarkers(iseries, [], { autoScale: false });
const series = addSeries({
colors: [topColor, bottomColor],
name,
@@ -1101,8 +1042,8 @@ export function createChartElement({
data,
defaultActive,
metric,
setup: ({ active, highlighted }) => {
iseries.setSeriesOrder(order);
setup: ({ active, highlighted, zOrder }) => {
iseries.setSeriesOrder(zOrder);
signals.createEffect(
() => ({ active: active(), highlighted: highlighted() }),
({ active, highlighted }) => {
@@ -1117,8 +1058,6 @@ export function createChartElement({
setData: (data) => iseries.setData(data),
update: (data) => iseries.update(data),
getData: () => iseries.data(),
setMarkers: (m) => markerPlugin.setMarkers(m),
clearMarkers: () => markerPlugin.setMarkers([]),
onRemove: () => ichart.removeSeries(iseries),
});
return series;
@@ -1179,10 +1118,38 @@ export function createChartElement({
});
});
if (captureElement && canCapture) {
const domain = window.document.createElement("p");
domain.innerText = window.location.host;
domain.id = "domain";
addFieldsetIfNeeded({
id: "capture",
paneIndex: 0,
position: "ne",
createChild() {
const button = window.document.createElement("button");
button.id = "capture";
button.innerText = "capture";
button.title = "Capture chart as image";
button.addEventListener("click", async () => {
captureElement.dataset.screenshot = "true";
captureElement.append(domain);
try {
await capture({ element: captureElement, name: chartId });
} catch {}
captureElement.removeChild(domain);
captureElement.dataset.screenshot = "false";
});
return button;
},
});
}
return chart;
}
/**
* @typedef {typeof createChartElement} CreateChartElement
* @typedef {ReturnType<createChartElement>} Chart
* @typedef {typeof createChart} CreateChart
* @typedef {ReturnType<createChart>} Chart
*/

View File

@@ -1,129 +0,0 @@
import { throttle } from "../utils/timing.js";
/**
* @param {Object} args
* @param {IChartApi} args.chart
* @param {Accessor<Set<AnySeries>>} args.seriesList
* @param {Colors} args.colors
* @param {(value: number) => string} args.formatValue
*/
export function createMinMaxMarkers({ chart, seriesList, colors, formatValue }) {
/** @type {Set<AnySeries>} */
const prevMarkerSeries = new Set();
function update() {
const timeScale = chart.timeScale();
const width = timeScale.width();
const range = timeScale.getVisibleRange();
if (!range) return;
const tLeft = timeScale.coordinateToTime(30);
const tRight = timeScale.coordinateToTime(width - 30);
const t0 = /** @type {number} */ (tLeft ?? range.from);
const t1 = /** @type {number} */ (tRight ?? range.to);
const color = colors.gray();
/** @type {Map<number, { minV: number, minT: Time, minS: AnySeries, maxV: number, maxT: Time, maxS: AnySeries }>} */
const byPane = new Map();
for (const series of seriesList()) {
if (!series.active() || !series.hasData()) continue;
const data = series.getData();
const len = data.length;
if (!len) continue;
// Binary search for start
let lo = 0, hi = len;
while (lo < hi) {
const mid = (lo + hi) >>> 1;
if (/** @type {number} */ (data[mid].time) < t0) lo = mid + 1;
else hi = mid;
}
if (lo >= len) continue;
const paneIndex = series.paneIndex;
let pane = byPane.get(paneIndex);
if (!pane) {
pane = {
minV: Infinity,
minT: /** @type {Time} */ (0),
minS: series,
maxV: -Infinity,
maxT: /** @type {Time} */ (0),
maxS: series,
};
byPane.set(paneIndex, pane);
}
for (let i = lo; i < len; i++) {
const pt = data[i];
if (/** @type {number} */ (pt.time) > t1) break;
const v = pt.low ?? pt.value;
const h = pt.high ?? pt.value;
if (v && v < pane.minV) {
pane.minV = v;
pane.minT = pt.time;
pane.minS = series;
}
if (h && h > pane.maxV) {
pane.maxV = h;
pane.maxT = pt.time;
pane.maxS = series;
}
}
}
// Set new markers
/** @type {Set<AnySeries>} */
const used = new Set();
for (const { minV, minT, minS, maxV, maxT, maxS } of byPane.values()) {
if (!Number.isFinite(minV) || !Number.isFinite(maxV) || minT === maxT)
continue;
const minM = /** @type {TimeSeriesMarker} */ ({
time: minT,
position: "belowBar",
shape: "arrowUp",
color,
size: 0,
text: formatValue(minV),
});
const maxM = /** @type {TimeSeriesMarker} */ ({
time: maxT,
position: "aboveBar",
shape: "arrowDown",
color,
size: 0,
text: formatValue(maxV),
});
used.add(minS);
used.add(maxS);
if (minS === maxS) {
minS.setMarkers([minM, maxM]);
} else {
minS.setMarkers([minM]);
maxS.setMarkers([maxM]);
}
}
// Clear stale
for (const s of prevMarkerSeries) {
if (!used.has(s)) s.clearMarkers();
}
prevMarkerSeries.clear();
for (const s of used) prevMarkerSeries.add(s);
}
function clear() {
for (const s of prevMarkerSeries) s.clearMarkers();
prevMarkerSeries.clear();
}
return {
update,
scheduleUpdate: throttle(update, 100),
clear,
};
}

View File

@@ -1,100 +1,107 @@
export function createOklchToRGBA() {
{
/**
*
* @param {readonly [number, number, number, number, number, number, number, number, number]} A
* @param {readonly [number, number, number]} B
* @returns
*/
function multiplyMatrices(A, B) {
return /** @type {const} */ ([
A[0] * B[0] + A[1] * B[1] + A[2] * B[2],
A[3] * B[0] + A[4] * B[1] + A[5] * B[2],
A[6] * B[0] + A[7] * B[1] + A[8] * B[2],
]);
}
/**
* @param {readonly [number, number, number]} param0
*/
function oklch2oklab([l, c, h]) {
return /** @type {const} */ ([
l,
isNaN(h) ? 0 : c * Math.cos((h * Math.PI) / 180),
isNaN(h) ? 0 : c * Math.sin((h * Math.PI) / 180),
]);
}
/**
* @param {readonly [number, number, number]} rgb
*/
function srgbLinear2rgb(rgb) {
return rgb.map((c) =>
Math.abs(c) > 0.0031308
? (c < 0 ? -1 : 1) * (1.055 * Math.abs(c) ** (1 / 2.4) - 0.055)
: 12.92 * c,
);
}
/**
* @param {readonly [number, number, number]} lab
*/
function oklab2xyz(lab) {
const LMSg = multiplyMatrices(
/** @type {const} */ ([
1, 0.3963377773761749, 0.2158037573099136, 1, -0.1055613458156586,
-0.0638541728258133, 1, -0.0894841775298119, -1.2914855480194092,
]),
lab,
);
const LMS = /** @type {[number, number, number]} */ (
LMSg.map((val) => val ** 3)
);
return multiplyMatrices(
/** @type {const} */ ([
1.2268798758459243, -0.5578149944602171, 0.2813910456659647,
-0.0405757452148008, 1.112286803280317, -0.0717110580655164,
-0.0763729366746601, -0.4214933324022432, 1.5869240198367816,
]),
LMS,
);
}
/**
* @param {readonly [number, number, number]} xyz
*/
function xyz2rgbLinear(xyz) {
return multiplyMatrices(
[
3.2409699419045226, -1.537383177570094, -0.4986107602930034,
-0.9692436362808796, 1.8759675015077202, 0.04155505740717559,
0.05563007969699366, -0.20397695888897652, 1.0569715142428786,
],
xyz,
);
}
/** @param {string} oklch */
return function (oklch) {
oklch = oklch.replace("oklch(", "");
oklch = oklch.replace(")", "");
let splitOklch = oklch.split(" / ");
let alpha = 1;
if (splitOklch.length === 2) {
alpha = Number(splitOklch.pop()?.replace("%", "")) / 100;
}
splitOklch = oklch.split(" ");
const lch = splitOklch.map((v, i) => {
if (!i && v.includes("%")) {
return Number(v.replace("%", "")) / 100;
} else {
return Number(v);
}
});
const rgb = srgbLinear2rgb(
xyz2rgbLinear(
oklab2xyz(oklch2oklab(/** @type {[number, number, number]} */ (lch))),
),
).map((v) => {
return Math.max(Math.min(Math.round(v * 255), 255), 0);
});
return [...rgb, alpha];
};
}
/**
* @param {readonly [number, number, number, number, number, number, number, number, number]} A
* @param {readonly [number, number, number]} B
*/
function multiplyMatrices(A, B) {
return /** @type {const} */ ([
A[0] * B[0] + A[1] * B[1] + A[2] * B[2],
A[3] * B[0] + A[4] * B[1] + A[5] * B[2],
A[6] * B[0] + A[7] * B[1] + A[8] * B[2],
]);
}
/** @param {readonly [number, number, number]} param0 */
function oklch2oklab([l, c, h]) {
return /** @type {const} */ ([
l,
isNaN(h) ? 0 : c * Math.cos((h * Math.PI) / 180),
isNaN(h) ? 0 : c * Math.sin((h * Math.PI) / 180),
]);
}
/** @param {readonly [number, number, number]} rgb */
function srgbLinear2rgb(rgb) {
return rgb.map((c) =>
Math.abs(c) > 0.0031308
? (c < 0 ? -1 : 1) * (1.055 * Math.abs(c) ** (1 / 2.4) - 0.055)
: 12.92 * c,
);
}
/** @param {readonly [number, number, number]} lab */
function oklab2xyz(lab) {
const LMSg = multiplyMatrices(
[1, 0.3963377773761749, 0.2158037573099136, 1, -0.1055613458156586,
-0.0638541728258133, 1, -0.0894841775298119, -1.2914855480194092],
lab,
);
const LMS = /** @type {[number, number, number]} */ (LMSg.map((val) => val ** 3));
return multiplyMatrices(
[1.2268798758459243, -0.5578149944602171, 0.2813910456659647,
-0.0405757452148008, 1.112286803280317, -0.0717110580655164,
-0.0763729366746601, -0.4214933324022432, 1.5869240198367816],
LMS,
);
}
/** @param {readonly [number, number, number]} xyz */
function xyz2rgbLinear(xyz) {
return multiplyMatrices(
[3.2409699419045226, -1.537383177570094, -0.4986107602930034,
-0.9692436362808796, 1.8759675015077202, 0.04155505740717559,
0.05563007969699366, -0.20397695888897652, 1.0569715142428786],
xyz,
);
}
/** @type {Map<string, [number, number, number, number]>} */
const conversionCache = new Map();
/**
* Parse oklch string and return rgba tuple
* @param {string} oklch
* @returns {[number, number, number, number] | null}
*/
function parseOklch(oklch) {
if (!oklch.startsWith("oklch(")) return null;
const cached = conversionCache.get(oklch);
if (cached) return cached;
let str = oklch.slice(6, -1); // remove "oklch(" and ")"
let alpha = 1;
const slashIdx = str.indexOf(" / ");
if (slashIdx !== -1) {
const alphaPart = str.slice(slashIdx + 3);
alpha = alphaPart.includes("%")
? Number(alphaPart.replace("%", "")) / 100
: Number(alphaPart);
str = str.slice(0, slashIdx);
}
const parts = str.split(" ");
const l = parts[0].includes("%") ? Number(parts[0].replace("%", "")) / 100 : Number(parts[0]);
const c = Number(parts[1]);
const h = Number(parts[2]);
const rgb = srgbLinear2rgb(xyz2rgbLinear(oklab2xyz(oklch2oklab([l, c, h]))))
.map((v) => Math.max(Math.min(Math.round(v * 255), 255), 0));
const result = /** @type {[number, number, number, number]} */ ([...rgb, alpha]);
conversionCache.set(oklch, result);
return result;
}
/**
* Convert oklch string to rgba string
* @param {string} oklch
* @returns {string}
*/
export function oklchToRgba(oklch) {
const result = parseOklch(oklch);
if (!result) return oklch;
const [r, g, b, a] = result;
return a === 1 ? `rgb(${r}, ${g}, ${b})` : `rgba(${r}, ${g}, ${b}, ${a})`;
}

View File

@@ -1,7 +1,7 @@
/**
* @import * as _ from "./modules/leeoniya-ufuzzy/1.0.19/dist/uFuzzy.d.ts"
*
* @import { IChartApi, ISeriesApi as _ISeriesApi, SeriesDefinition, SingleValueData as _SingleValueData, CandlestickData as _CandlestickData, BaselineData as _BaselineData, HistogramData as _HistogramData, SeriesType as LCSeriesType, IPaneApi, LineSeriesPartialOptions as _LineSeriesPartialOptions, HistogramSeriesPartialOptions as _HistogramSeriesPartialOptions, BaselineSeriesPartialOptions as _BaselineSeriesPartialOptions, CandlestickSeriesPartialOptions as _CandlestickSeriesPartialOptions, WhitespaceData, DeepPartial, ChartOptions, Time, LineData as _LineData, createChart as CreateChart, LineStyle, createSeriesMarkers as CreateSeriesMarkers, SeriesMarker, ISeriesMarkersPluginApi } from './modules/lightweight-charts/5.1.0/dist/typings.js'
* @import { IChartApi, ISeriesApi as _ISeriesApi, SeriesDefinition, SingleValueData as _SingleValueData, CandlestickData as _CandlestickData, BaselineData as _BaselineData, HistogramData as _HistogramData, SeriesType as LCSeriesType, IPaneApi, LineSeriesPartialOptions as _LineSeriesPartialOptions, HistogramSeriesPartialOptions as _HistogramSeriesPartialOptions, BaselineSeriesPartialOptions as _BaselineSeriesPartialOptions, CandlestickSeriesPartialOptions as _CandlestickSeriesPartialOptions, WhitespaceData, DeepPartial, ChartOptions, Time, LineData as _LineData, createChart as CreateLCChart, LineStyle, createSeriesMarkers as CreateSeriesMarkers, SeriesMarker, ISeriesMarkersPluginApi } from './modules/lightweight-charts/5.1.0/dist/typings.js'
*
* @import { Signal, Signals, Accessor } from "./signals.js";
*
@@ -10,9 +10,9 @@
*
* @import { Resources, MetricResource } from './resources.js'
*
* @import { SingleValueData, CandlestickData, Series, AnySeries, ISeries, HistogramData, LineData, BaselineData, LineSeriesPartialOptions, BaselineSeriesPartialOptions, HistogramSeriesPartialOptions, CandlestickSeriesPartialOptions, CreateChartElement, Chart, Legend } from "./chart/index.js"
* @import { SingleValueData, CandlestickData, Series, AnySeries, ISeries, HistogramData, LineData, BaselineData, LineSeriesPartialOptions, BaselineSeriesPartialOptions, HistogramSeriesPartialOptions, CandlestickSeriesPartialOptions, Chart, Legend } from "./chart/index.js"
*
* @import { Color, ColorName, Colors } from "./utils/colors.js"
* @import { Color, ColorName, Colors } from "./chart/colors.js"
*
* @import { WebSockets } from "./utils/ws.js"
*
@@ -22,7 +22,7 @@
*
* @import { UnitObject as Unit } from "./utils/units.js"
*
* @import { ChartableIndexName } from "./panes/chart/index.js";
* @import { ChartableIndexName } from "./panes/chart.js";
*/
// import uFuzzy = require("./modules/leeoniya-ufuzzy/1.0.19/dist/uFuzzy.d.ts");

View File

@@ -1,4 +1,4 @@
import { createColors } from "./utils/colors.js";
import { createColors } from "./chart/colors.js";
import { webSockets } from "./utils/ws.js";
import * as formatters from "./utils/format.js";
import { onFirstIntersection, getElementById, isHidden } from "./utils/dom.js";
@@ -8,7 +8,7 @@ import { initOptions } from "./options/full.js";
import ufuzzy from "./modules/leeoniya-ufuzzy/1.0.19/dist/uFuzzy.mjs";
import * as leanQr from "./modules/lean-qr/2.7.1/index.mjs";
import { init as initExplorer } from "./panes/_explorer.js";
import { init as initChart } from "./panes/chart/index.js";
import { init as initChart } from "./panes/chart.js";
import { init as initTable } from "./panes/table.js";
import { init as initSimulation } from "./panes/_simulation.js";
import { next } from "./utils/timing.js";

View File

@@ -19,7 +19,7 @@ import {
} from "../utils/format.js";
import { serdeDate, serdeOptDate, serdeOptNumber } from "../utils/serde.js";
import signals from "../signals.js";
import { createChartElement } from "../chart/index.js";
import { createChart } from "../chart/index.js";
import { resources } from "../resources.js";
/**
@@ -684,7 +684,7 @@ export function init({ colors }) {
/** @type {() => IndexName} */
const index = () => "dateindex";
createChartElement({
createChart({
index,
parent: resultsElement,
signals,
@@ -727,7 +727,7 @@ export function init({ colors }) {
],
});
createChartElement({
createChart({
index,
parent: resultsElement,
signals,
@@ -750,7 +750,7 @@ export function init({ colors }) {
],
});
createChartElement({
createChart({
index,
parent: resultsElement,
signals,
@@ -779,7 +779,7 @@ export function init({ colors }) {
],
});
createChartElement({
createChart({
index,
parent: resultsElement,
signals,
@@ -801,7 +801,7 @@ export function init({ colors }) {
],
});
createChartElement({
createChart({
index,
parent: resultsElement,
signals,

View File

@@ -1,18 +1,12 @@
import {
createShadow,
createChoiceField,
createHeader,
} from "../../utils/dom.js";
import { chartElement } from "../../utils/elements.js";
import { ios, canShare } from "../../utils/env.js";
import { serdeChartableIndex } from "../../utils/serde.js";
import { Unit } from "../../utils/units.js";
import signals from "../../signals.js";
import { createChartElement } from "../../chart/index.js";
import { createChartState } from "../../chart/state.js";
import { webSockets } from "../../utils/ws.js";
import { screenshot } from "./screenshot.js";
import { debounce } from "../../utils/timing.js";
import { createShadow, createChoiceField, createHeader } from "../utils/dom.js";
import { chartElement } from "../utils/elements.js";
import { serdeChartableIndex } from "../utils/serde.js";
import { Unit } from "../utils/units.js";
import signals from "../signals.js";
import { createChart } from "../chart/index.js";
import { createChartState } from "../chart/state.js";
import { webSockets } from "../utils/ws.js";
import { debounce } from "../utils/timing.js";
const keyPrefix = "chart";
const ONE_BTC_IN_SATS = 100_000_000;
@@ -39,15 +33,15 @@ export function init({ colors, option, brk }) {
const { from, to } = state.range();
const chart = createChartElement({
const chart = createChart({
parent: chartElement,
signals,
colors,
id: "charts",
brk,
index,
initialVisibleBarsCount:
from !== null && to !== null ? to - from : null,
initialVisibleBarsCount: from !== null && to !== null ? to - from : null,
captureElement: chartElement,
timeScaleSetCallback: (unknownTimeScaleCallback) => {
const { from, to } = state.range();
if (from !== null && to !== null) {
@@ -58,42 +52,11 @@ export function init({ colors, option, brk }) {
},
});
if (!(ios && !canShare)) {
const domain = window.document.createElement("p");
domain.innerText = `${window.location.host}`;
domain.id = "domain";
chart.addFieldsetIfNeeded({
id: "capture",
paneIndex: 0,
position: "ne",
createChild() {
const button = window.document.createElement("button");
button.id = "capture";
button.innerText = "capture";
button.title = "Capture chart as image";
button.addEventListener("click", async () => {
chartElement.dataset.screenshot = "true";
chartElement.append(domain);
try {
await screenshot({
element: chartElement,
name: option().path.join("-"),
title: option().title,
});
} catch {}
chartElement.removeChild(domain);
chartElement.dataset.screenshot = "false";
});
return button;
},
});
}
// Sync chart → state.range on user pan/zoom
// Debounce to avoid rapid URL updates while panning
const debouncedSetRange = debounce(
(/** @type {{ from: number, to: number }} */ range) => state.setRange(range),
(/** @type {{ from: number, to: number }} */ range) =>
state.setRange(range),
500,
);
chart.onVisibleLogicalRangeChange((t) => {
@@ -110,7 +73,8 @@ export function init({ colors, option, brk }) {
storageKey: `${keyPrefix}-price`,
urlKey: "price",
serialize: (u) => u.id,
deserialize: (s) => /** @type {Unit} */ (unitChoices.find((u) => u.id === s) ?? Unit.usd),
deserialize: (s) =>
/** @type {Unit} */ (unitChoices.find((u) => u.id === s) ?? Unit.usd),
});
const topUnitField = createChoiceField({
defaultValue: Unit.usd,
@@ -222,28 +186,33 @@ export function init({ colors, option, brk }) {
const bottomUnits = Array.from(option.bottom.keys());
/** @type {{ field: HTMLDivElement, selected: Signal<Unit> } | undefined} */
let bottomUnitSelector;
/** @type {Signal<Unit> | undefined} */
let bottomUnit;
if (bottomUnits.length) {
const selected = signals.createPersistedSignal({
// Storage key based on unit group (sorted unit IDs) so each group remembers its selection
const unitGroupKey = bottomUnits
.map((u) => u.id)
.sort()
.join("-");
bottomUnit = signals.createPersistedSignal({
defaultValue: bottomUnits[0],
storageKey: `${keyPrefix}-unit`,
storageKey: `${keyPrefix}-unit-${unitGroupKey}`,
urlKey: "unit",
serialize: (u) => u.id,
deserialize: (s) => bottomUnits.find((u) => u.id === s) ?? bottomUnits[0],
deserialize: (s) =>
bottomUnits.find((u) => u.id === s) ?? bottomUnits[0],
});
const field = createChoiceField({
defaultValue: bottomUnits[0],
choices: bottomUnits,
toKey: (u) => u.id,
toLabel: (u) => u.name,
selected,
selected: bottomUnit,
signals,
sorted: true,
type: "select",
});
bottomUnitSelector = { field, selected };
chart.addFieldsetIfNeeded({
id: "charts-unit-1",
paneIndex: 1,
@@ -259,17 +228,132 @@ export function init({ colors, option, brk }) {
chart.legendBottom.removeFrom(0);
}
signals.createScopedEffect(index, (index) => {
signals.createScopedEffect(topUnit, (topUnit) => {
/**
* @param {Object} args
* @param {Map<Unit, AnyFetchedSeriesBlueprint[]>} args.blueprints
* @param {number} args.paneIndex
* @param {Unit} args.unit
* @param {IndexName} args.idx
* @param {AnySeries[]} args.seriesList
* @param {number} args.orderStart
* @param {Legend} args.legend
*/
function createSeriesFromBlueprints({
blueprints,
paneIndex,
unit,
idx,
seriesList,
orderStart,
legend,
}) {
legend.removeFrom(orderStart);
seriesList.splice(orderStart).forEach((series) => series.remove());
blueprints.get(unit)?.forEach((blueprint, order) => {
order += orderStart;
const options = blueprint.options;
const indexes = Object.keys(blueprint.metric.by);
if (indexes.includes(idx)) {
switch (blueprint.type) {
case "Baseline": {
seriesList.push(
chart.addBaselineSeries({
metric: blueprint.metric,
name: blueprint.title,
unit,
defaultActive: blueprint.defaultActive,
paneIndex,
options: {
...options,
topLineColor:
blueprint.color?.() ?? blueprint.colors?.[0](),
bottomLineColor:
blueprint.color?.() ?? blueprint.colors?.[1](),
},
order,
}),
);
break;
}
case "Histogram": {
seriesList.push(
chart.addHistogramSeries({
metric: blueprint.metric,
name: blueprint.title,
unit,
color: blueprint.color,
defaultActive: blueprint.defaultActive,
paneIndex,
options,
order,
}),
);
break;
}
case "Candlestick": {
seriesList.push(
chart.addCandlestickSeries({
metric: blueprint.metric,
name: blueprint.title,
unit,
colors: blueprint.colors,
defaultActive: blueprint.defaultActive,
paneIndex,
options,
order,
}),
);
break;
}
case "Dots": {
seriesList.push(
chart.addDotsSeries({
metric: blueprint.metric,
color: blueprint.color,
name: blueprint.title,
unit,
defaultActive: blueprint.defaultActive,
paneIndex,
options,
order,
}),
);
break;
}
case "Line":
case undefined:
seriesList.push(
chart.addLineSeries({
metric: blueprint.metric,
color: blueprint.color,
name: blueprint.title,
unit,
defaultActive: blueprint.defaultActive,
paneIndex,
options,
order,
}),
);
}
}
});
}
// Price series + top pane blueprints: combined effect on index + topUnit
signals.createScopedEffect(
() => ({ idx: index(), unit: topUnit() }),
({ idx, unit }) => {
// Create price series
/** @type {AnySeries | undefined} */
let series;
switch (topUnit) {
switch (unit) {
case Unit.usd: {
series = chart.addCandlestickSeries({
metric: brk.metrics.price.usd.ohlc,
name: "Price",
unit: topUnit,
unit,
order: 0,
});
break;
@@ -278,19 +362,19 @@ export function init({ colors, option, brk }) {
series = chart.addCandlestickSeries({
metric: brk.metrics.price.sats.ohlc,
name: "Price",
unit: topUnit,
unit,
inverse: true,
order: 0,
});
break;
}
}
if (!series) throw Error("Unreachable");
seriesListTop[0]?.remove();
seriesListTop[0] = series;
// Live price update effect
signals.createEffect(
() => ({
latest: webSockets.kraken1dCandle.latest(),
@@ -298,151 +382,40 @@ export function init({ colors, option, brk }) {
}),
({ latest, hasData }) => {
if (!series || !latest || !hasData) return;
printLatest({ series, unit: topUnit, index });
printLatest({ series, unit, index: idx });
},
);
});
/**
* @param {Object} args
* @param {Map<Unit, AnyFetchedSeriesBlueprint[]>} args.blueprints
* @param {number} args.paneIndex
* @param {Accessor<Unit>} args.unit
* @param {AnySeries[]} args.seriesList
* @param {number} args.orderStart
* @param {Legend} args.legend
*/
function processPane({
blueprints,
paneIndex,
unit,
seriesList,
orderStart,
legend,
}) {
signals.createScopedEffect(unit, (unit) => {
legend.removeFrom(orderStart);
seriesList.splice(orderStart).forEach((series) => {
series.remove();
});
blueprints.get(unit)?.forEach((blueprint, order) => {
order += orderStart;
const options = blueprint.options;
// Tree-first: metric is now an accessor with .by property
const indexes = Object.keys(blueprint.metric.by);
if (indexes.includes(index)) {
switch (blueprint.type) {
case "Baseline": {
seriesList.push(
chart.addBaselineSeries({
metric: blueprint.metric,
name: blueprint.title,
unit,
defaultActive: blueprint.defaultActive,
paneIndex,
options: {
...options,
topLineColor:
blueprint.color?.() ?? blueprint.colors?.[0](),
bottomLineColor:
blueprint.color?.() ?? blueprint.colors?.[1](),
},
order,
}),
);
break;
}
case "Histogram": {
seriesList.push(
chart.addHistogramSeries({
metric: blueprint.metric,
name: blueprint.title,
unit,
color: blueprint.color,
defaultActive: blueprint.defaultActive,
paneIndex,
options,
order,
}),
);
break;
}
case "Candlestick": {
seriesList.push(
chart.addCandlestickSeries({
metric: blueprint.metric,
name: blueprint.title,
unit,
colors: blueprint.colors,
defaultActive: blueprint.defaultActive,
paneIndex,
options,
order,
}),
);
break;
}
case "Dots": {
seriesList.push(
chart.addDotsSeries({
metric: blueprint.metric,
color: blueprint.color,
name: blueprint.title,
unit,
defaultActive: blueprint.defaultActive,
paneIndex,
options,
order,
}),
);
break;
}
case "Line":
case undefined:
seriesList.push(
chart.addLineSeries({
metric: blueprint.metric,
color: blueprint.color,
name: blueprint.title,
unit,
defaultActive: blueprint.defaultActive,
paneIndex,
options,
order,
}),
);
}
}
});
// Top pane blueprint series
createSeriesFromBlueprints({
blueprints: option.top,
paneIndex: 0,
unit,
idx,
seriesList: seriesListTop,
orderStart: 1,
legend: chart.legendTop,
});
}
},
);
processPane({
blueprints: option.top,
paneIndex: 0,
unit: topUnit,
seriesList: seriesListTop,
orderStart: 1,
legend: chart.legendTop,
});
if (bottomUnitSelector) {
processPane({
blueprints: option.bottom,
paneIndex: 1,
unit: bottomUnitSelector.selected,
seriesList: seriesListBottom,
orderStart: 0,
legend: chart.legendBottom,
});
}
});
// Bottom pane blueprints: combined effect on index + bottomUnit
if (bottomUnit) {
signals.createScopedEffect(
() => ({ idx: index(), unit: bottomUnit() }),
({ idx, unit }) => {
createSeriesFromBlueprints({
blueprints: option.bottom,
paneIndex: 1,
unit,
idx,
seriesList: seriesListBottom,
orderStart: 0,
legend: chart.legendBottom,
});
},
);
}
});
}

View File

@@ -87,7 +87,7 @@ function useMetricEndpoint(endpoint) {
* @param {number} [to]
* @returns {RangeState<T>}
*/
function range(from, to) {
function range(from = -10000, to) {
const key = `${from}-${to ?? ""}`;
const existing = ranges.get(key);
if (existing) return existing;
@@ -111,7 +111,7 @@ function useMetricEndpoint(endpoint) {
* @param {number} [start=-10000]
* @param {number} [end]
*/
async fetch(start, end) {
async fetch(start = -10000, end) {
const r = range(start, end);
r.loading.set(true);
try {

View File

@@ -22,18 +22,22 @@ export function throttle(callback, wait = 1000) {
let timeoutId = null;
/** @type {Parameters<F>} */
let latestArgs;
let hasTrailing = false;
return (/** @type {Parameters<F>} */ ...args) => {
latestArgs = args;
if (!timeoutId) {
// Otherwise it optimizes away timeoutId in Chrome and FF
timeoutId = timeoutId;
timeoutId = setTimeout(() => {
callback(...latestArgs); // Execute with latest args
timeoutId = null;
}, wait);
if (timeoutId) {
hasTrailing = true;
return;
}
callback(...latestArgs);
timeoutId = setTimeout(() => {
timeoutId = null;
if (hasTrailing) {
hasTrailing = false;
callback(...latestArgs);
}
}, wait);
};
}