global: fixes + snapshot + packages

This commit is contained in:
nym21
2025-04-26 17:22:58 +02:00
parent 07618ebe43
commit 82bcc55645
44 changed files with 6462 additions and 1973 deletions

View File

@@ -1,6 +1,9 @@
use std::{fs, path::Path}; use std::{fs, path::Path};
use brk_core::{CheckedSub, Height, StoredU32, StoredU64, StoredUsize, Timestamp, Weight}; use brk_core::{
CheckedSub, DifficultyEpoch, HalvingEpoch, Height, StoredU32, StoredU64, StoredUsize,
Timestamp, Weight,
};
use brk_exit::Exit; use brk_exit::Exit;
use brk_indexer::Indexer; use brk_indexer::Indexer;
use brk_parser::bitcoin; use brk_parser::bitcoin;
@@ -8,19 +11,22 @@ use brk_vec::{Compressed, Version};
use super::{ use super::{
EagerVec, Indexes, EagerVec, Indexes,
grouped::{ComputedVecsFromHeight, StorableVecGeneatorOptions}, grouped::{ComputedVecsFromDateindex, ComputedVecsFromHeight, StorableVecGeneatorOptions},
indexes, indexes,
}; };
#[derive(Clone)] #[derive(Clone)]
pub struct Vecs { pub struct Vecs {
pub height_to_interval: EagerVec<Height, Timestamp>, pub height_to_interval: EagerVec<Height, Timestamp>,
pub indexes_to_block_interval: ComputedVecsFromHeight<Timestamp>,
pub indexes_to_block_count: ComputedVecsFromHeight<StoredU32>,
pub indexes_to_block_weight: ComputedVecsFromHeight<Weight>,
pub height_to_vbytes: EagerVec<Height, StoredU64>, pub height_to_vbytes: EagerVec<Height, StoredU64>,
pub indexes_to_block_vbytes: ComputedVecsFromHeight<StoredU64>, pub difficultyepoch_to_timestamp: EagerVec<DifficultyEpoch, Timestamp>,
pub halvingepoch_to_timestamp: EagerVec<HalvingEpoch, Timestamp>,
pub timeindexes_to_timestamp: ComputedVecsFromDateindex<Timestamp>,
pub indexes_to_block_count: ComputedVecsFromHeight<StoredU32>,
pub indexes_to_block_interval: ComputedVecsFromHeight<Timestamp>,
pub indexes_to_block_size: ComputedVecsFromHeight<StoredUsize>, pub indexes_to_block_size: ComputedVecsFromHeight<StoredUsize>,
pub indexes_to_block_vbytes: ComputedVecsFromHeight<StoredU64>,
pub indexes_to_block_weight: ComputedVecsFromHeight<Weight>,
} }
impl Vecs { impl Vecs {
@@ -33,6 +39,13 @@ impl Vecs {
Version::ZERO, Version::ZERO,
compressed, compressed,
)?, )?,
timeindexes_to_timestamp: ComputedVecsFromDateindex::forced_import(
path,
"timestamp",
Version::ZERO,
compressed,
StorableVecGeneatorOptions::default().add_first(),
)?,
indexes_to_block_interval: ComputedVecsFromHeight::forced_import( indexes_to_block_interval: ComputedVecsFromHeight::forced_import(
path, path,
"block_interval", "block_interval",
@@ -81,6 +94,16 @@ impl Vecs {
compressed, compressed,
StorableVecGeneatorOptions::default().add_sum().add_total(), StorableVecGeneatorOptions::default().add_sum().add_total(),
)?, )?,
difficultyepoch_to_timestamp: EagerVec::forced_import(
&path.join("difficultyepoch_to_timestamp"),
Version::ZERO,
compressed,
)?,
halvingepoch_to_timestamp: EagerVec::forced_import(
&path.join("halvingepoch_to_timestamp"),
Version::ZERO,
compressed,
)?,
}) })
} }
@@ -91,9 +114,43 @@ impl Vecs {
starting_indexes: &Indexes, starting_indexes: &Indexes,
exit: &Exit, exit: &Exit,
) -> color_eyre::Result<()> { ) -> color_eyre::Result<()> {
self.timeindexes_to_timestamp.compute(
indexer,
indexes,
starting_indexes,
exit,
|vec, _, indexes, starting_indexes, exit| {
vec.compute_transform(
starting_indexes.dateindex,
indexes.dateindex_to_date.mut_vec(),
|(di, d, ..)| (di, Timestamp::from(d)),
exit,
)
},
)?;
self.indexes_to_block_count.compute_all(
indexer,
indexes,
starting_indexes,
exit,
|v, indexer, _, starting_indexes, exit| {
let indexer_vecs = indexer.mut_vecs();
v.compute_range(
starting_indexes.height,
indexer_vecs.height_to_weight.mut_vec(),
|h| (h, StoredU32::from(1_u32)),
exit,
)
},
)?;
let indexer_vecs = indexer.mut_vecs();
self.height_to_interval.compute_transform( self.height_to_interval.compute_transform(
starting_indexes.height, starting_indexes.height,
indexer.mut_vecs().height_to_timestamp.mut_vec(), indexer_vecs.height_to_timestamp.mut_vec(),
|(height, timestamp, _, height_to_timestamp)| { |(height, timestamp, _, height_to_timestamp)| {
let interval = height.decremented().map_or(Timestamp::ZERO, |prev_h| { let interval = height.decremented().map_or(Timestamp::ZERO, |prev_h| {
let prev_timestamp = height_to_timestamp.double_unwrap_cached_get(prev_h); let prev_timestamp = height_to_timestamp.double_unwrap_cached_get(prev_h);
@@ -113,38 +170,23 @@ impl Vecs {
Some(self.height_to_interval.mut_vec()), Some(self.height_to_interval.mut_vec()),
)?; )?;
self.indexes_to_block_count.compute_all(
indexer,
indexes,
starting_indexes,
exit,
|v, indexer, _, starting_indexes, exit| {
v.compute_range(
starting_indexes.height,
indexer.mut_vecs().height_to_weight.mut_vec(),
|h| (h, StoredU32::from(1_u32)),
exit,
)
},
)?;
self.indexes_to_block_weight.compute_rest( self.indexes_to_block_weight.compute_rest(
indexes, indexes,
starting_indexes, starting_indexes,
exit, exit,
Some(indexer.mut_vecs().height_to_weight.mut_vec()), Some(indexer_vecs.height_to_weight.mut_vec()),
)?; )?;
self.indexes_to_block_size.compute_rest( self.indexes_to_block_size.compute_rest(
indexes, indexes,
starting_indexes, starting_indexes,
exit, exit,
Some(indexer.mut_vecs().height_to_total_size.mut_vec()), Some(indexer_vecs.height_to_total_size.mut_vec()),
)?; )?;
self.height_to_vbytes.compute_transform( self.height_to_vbytes.compute_transform(
starting_indexes.height, starting_indexes.height,
indexer.mut_vecs().height_to_weight.mut_vec(), indexer_vecs.height_to_weight.mut_vec(),
|(h, w, ..)| { |(h, w, ..)| {
( (
h, h,
@@ -161,6 +203,30 @@ impl Vecs {
Some(self.height_to_vbytes.mut_vec()), Some(self.height_to_vbytes.mut_vec()),
)?; )?;
self.difficultyepoch_to_timestamp.compute_transform(
starting_indexes.difficultyepoch,
indexes.difficultyepoch_to_first_height.mut_vec(),
|(i, h, ..)| {
(
i,
indexer_vecs.height_to_timestamp.double_unwrap_cached_get(h),
)
},
exit,
)?;
self.halvingepoch_to_timestamp.compute_transform(
starting_indexes.halvingepoch,
indexes.halvingepoch_to_first_height.mut_vec(),
|(i, h, ..)| {
(
i,
indexer_vecs.height_to_timestamp.double_unwrap_cached_get(h),
)
},
exit,
)?;
Ok(()) Ok(())
} }
@@ -169,12 +235,15 @@ impl Vecs {
vec![ vec![
self.height_to_interval.any_vec(), self.height_to_interval.any_vec(),
self.height_to_vbytes.any_vec(), self.height_to_vbytes.any_vec(),
self.difficultyepoch_to_timestamp.any_vec(),
self.halvingepoch_to_timestamp.any_vec(),
], ],
self.indexes_to_block_interval.any_vecs(), self.timeindexes_to_timestamp.any_vecs(),
self.indexes_to_block_count.any_vecs(), self.indexes_to_block_count.any_vecs(),
self.indexes_to_block_weight.any_vecs(), self.indexes_to_block_interval.any_vecs(),
self.indexes_to_block_size.any_vecs(), self.indexes_to_block_size.any_vecs(),
self.indexes_to_block_vbytes.any_vecs(), self.indexes_to_block_vbytes.any_vecs(),
self.indexes_to_block_weight.any_vecs(),
] ]
.concat() .concat()
} }

View File

@@ -19,21 +19,17 @@ pub struct Vecs {
pub dateindex_to_first_height: EagerVec<DateIndex, Height>, pub dateindex_to_first_height: EagerVec<DateIndex, Height>,
pub dateindex_to_last_height: EagerVec<DateIndex, Height>, pub dateindex_to_last_height: EagerVec<DateIndex, Height>,
pub dateindex_to_monthindex: EagerVec<DateIndex, MonthIndex>, pub dateindex_to_monthindex: EagerVec<DateIndex, MonthIndex>,
pub dateindex_to_timestamp: EagerVec<DateIndex, Timestamp>,
pub dateindex_to_weekindex: EagerVec<DateIndex, WeekIndex>, pub dateindex_to_weekindex: EagerVec<DateIndex, WeekIndex>,
pub decadeindex_to_decadeindex: EagerVec<DecadeIndex, DecadeIndex>, pub decadeindex_to_decadeindex: EagerVec<DecadeIndex, DecadeIndex>,
pub decadeindex_to_first_yearindex: EagerVec<DecadeIndex, YearIndex>, pub decadeindex_to_first_yearindex: EagerVec<DecadeIndex, YearIndex>,
pub decadeindex_to_last_yearindex: EagerVec<DecadeIndex, YearIndex>, pub decadeindex_to_last_yearindex: EagerVec<DecadeIndex, YearIndex>,
pub decadeindex_to_timestamp: EagerVec<DecadeIndex, Timestamp>,
pub difficultyepoch_to_difficultyepoch: EagerVec<DifficultyEpoch, DifficultyEpoch>, pub difficultyepoch_to_difficultyepoch: EagerVec<DifficultyEpoch, DifficultyEpoch>,
pub difficultyepoch_to_first_height: EagerVec<DifficultyEpoch, Height>, pub difficultyepoch_to_first_height: EagerVec<DifficultyEpoch, Height>,
pub difficultyepoch_to_last_height: EagerVec<DifficultyEpoch, Height>, pub difficultyepoch_to_last_height: EagerVec<DifficultyEpoch, Height>,
pub difficultyepoch_to_timestamp: EagerVec<DifficultyEpoch, Timestamp>,
pub emptyoutputindex_to_emptyoutputindex: EagerVec<EmptyOutputIndex, EmptyOutputIndex>, pub emptyoutputindex_to_emptyoutputindex: EagerVec<EmptyOutputIndex, EmptyOutputIndex>,
pub halvingepoch_to_first_height: EagerVec<HalvingEpoch, Height>, pub halvingepoch_to_first_height: EagerVec<HalvingEpoch, Height>,
pub halvingepoch_to_halvingepoch: EagerVec<HalvingEpoch, HalvingEpoch>, pub halvingepoch_to_halvingepoch: EagerVec<HalvingEpoch, HalvingEpoch>,
pub halvingepoch_to_last_height: EagerVec<HalvingEpoch, Height>, pub halvingepoch_to_last_height: EagerVec<HalvingEpoch, Height>,
pub halvingepoch_to_timestamp: EagerVec<HalvingEpoch, Timestamp>,
pub height_to_date: EagerVec<Height, Date>, pub height_to_date: EagerVec<Height, Date>,
pub height_to_date_fixed: EagerVec<Height, Date>, pub height_to_date_fixed: EagerVec<Height, Date>,
pub height_to_dateindex: EagerVec<Height, DateIndex>, pub height_to_dateindex: EagerVec<Height, DateIndex>,
@@ -47,7 +43,6 @@ pub struct Vecs {
pub monthindex_to_last_dateindex: EagerVec<MonthIndex, DateIndex>, pub monthindex_to_last_dateindex: EagerVec<MonthIndex, DateIndex>,
pub monthindex_to_monthindex: EagerVec<MonthIndex, MonthIndex>, pub monthindex_to_monthindex: EagerVec<MonthIndex, MonthIndex>,
pub monthindex_to_quarterindex: EagerVec<MonthIndex, QuarterIndex>, pub monthindex_to_quarterindex: EagerVec<MonthIndex, QuarterIndex>,
pub monthindex_to_timestamp: EagerVec<MonthIndex, Timestamp>,
pub monthindex_to_yearindex: EagerVec<MonthIndex, YearIndex>, pub monthindex_to_yearindex: EagerVec<MonthIndex, YearIndex>,
pub opreturnindex_to_opreturnindex: EagerVec<OpReturnIndex, OpReturnIndex>, pub opreturnindex_to_opreturnindex: EagerVec<OpReturnIndex, OpReturnIndex>,
pub outputindex_to_outputindex: EagerVec<OutputIndex, OutputIndex>, pub outputindex_to_outputindex: EagerVec<OutputIndex, OutputIndex>,
@@ -63,7 +58,6 @@ pub struct Vecs {
pub quarterindex_to_first_monthindex: EagerVec<QuarterIndex, MonthIndex>, pub quarterindex_to_first_monthindex: EagerVec<QuarterIndex, MonthIndex>,
pub quarterindex_to_last_monthindex: EagerVec<QuarterIndex, MonthIndex>, pub quarterindex_to_last_monthindex: EagerVec<QuarterIndex, MonthIndex>,
pub quarterindex_to_quarterindex: EagerVec<QuarterIndex, QuarterIndex>, pub quarterindex_to_quarterindex: EagerVec<QuarterIndex, QuarterIndex>,
pub quarterindex_to_timestamp: EagerVec<QuarterIndex, Timestamp>,
pub txindex_to_height: EagerVec<TxIndex, Height>, pub txindex_to_height: EagerVec<TxIndex, Height>,
pub txindex_to_last_inputindex: EagerVec<TxIndex, InputIndex>, pub txindex_to_last_inputindex: EagerVec<TxIndex, InputIndex>,
pub txindex_to_last_outputindex: EagerVec<TxIndex, OutputIndex>, pub txindex_to_last_outputindex: EagerVec<TxIndex, OutputIndex>,
@@ -71,12 +65,10 @@ pub struct Vecs {
pub unknownoutputindex_to_unknownoutputindex: EagerVec<UnknownOutputIndex, UnknownOutputIndex>, pub unknownoutputindex_to_unknownoutputindex: EagerVec<UnknownOutputIndex, UnknownOutputIndex>,
pub weekindex_to_first_dateindex: EagerVec<WeekIndex, DateIndex>, pub weekindex_to_first_dateindex: EagerVec<WeekIndex, DateIndex>,
pub weekindex_to_last_dateindex: EagerVec<WeekIndex, DateIndex>, pub weekindex_to_last_dateindex: EagerVec<WeekIndex, DateIndex>,
pub weekindex_to_timestamp: EagerVec<WeekIndex, Timestamp>,
pub weekindex_to_weekindex: EagerVec<WeekIndex, WeekIndex>, pub weekindex_to_weekindex: EagerVec<WeekIndex, WeekIndex>,
pub yearindex_to_decadeindex: EagerVec<YearIndex, DecadeIndex>, pub yearindex_to_decadeindex: EagerVec<YearIndex, DecadeIndex>,
pub yearindex_to_first_monthindex: EagerVec<YearIndex, MonthIndex>, pub yearindex_to_first_monthindex: EagerVec<YearIndex, MonthIndex>,
pub yearindex_to_last_monthindex: EagerVec<YearIndex, MonthIndex>, pub yearindex_to_last_monthindex: EagerVec<YearIndex, MonthIndex>,
pub yearindex_to_timestamp: EagerVec<YearIndex, Timestamp>,
pub yearindex_to_yearindex: EagerVec<YearIndex, YearIndex>, pub yearindex_to_yearindex: EagerVec<YearIndex, YearIndex>,
} }
@@ -265,41 +257,6 @@ impl Vecs {
Version::ZERO, Version::ZERO,
compressed, compressed,
)?, )?,
dateindex_to_timestamp: EagerVec::forced_import(
&path.join("dateindex_to_timestamp"),
Version::ZERO,
compressed,
)?,
decadeindex_to_timestamp: EagerVec::forced_import(
&path.join("decadeindex_to_timestamp"),
Version::ZERO,
compressed,
)?,
difficultyepoch_to_timestamp: EagerVec::forced_import(
&path.join("difficultyepoch_to_timestamp"),
Version::ZERO,
compressed,
)?,
halvingepoch_to_timestamp: EagerVec::forced_import(
&path.join("halvingepoch_to_timestamp"),
Version::ZERO,
compressed,
)?,
monthindex_to_timestamp: EagerVec::forced_import(
&path.join("monthindex_to_timestamp"),
Version::ZERO,
compressed,
)?,
weekindex_to_timestamp: EagerVec::forced_import(
&path.join("weekindex_to_timestamp"),
Version::ZERO,
compressed,
)?,
yearindex_to_timestamp: EagerVec::forced_import(
&path.join("yearindex_to_timestamp"),
Version::ZERO,
compressed,
)?,
height_to_timestamp_fixed: EagerVec::forced_import( height_to_timestamp_fixed: EagerVec::forced_import(
&path.join("height_to_timestamp_fixed"), &path.join("height_to_timestamp_fixed"),
Version::ZERO, Version::ZERO,
@@ -325,11 +282,6 @@ impl Vecs {
Version::ZERO, Version::ZERO,
compressed, compressed,
)?, )?,
quarterindex_to_timestamp: EagerVec::forced_import(
&path.join("quarterindex_to_timestamp"),
Version::ZERO,
compressed,
)?,
p2pk33index_to_p2pk33index: EagerVec::forced_import( p2pk33index_to_p2pk33index: EagerVec::forced_import(
&path.join("p2pk33index_to_p2pk33index"), &path.join("p2pk33index_to_p2pk33index"),
Version::ZERO, Version::ZERO,
@@ -421,6 +373,158 @@ impl Vecs {
let inputindexes_count = indexer_vecs.inputindex_to_outputindex.len(); let inputindexes_count = indexer_vecs.inputindex_to_outputindex.len();
let outputindexes_count = indexer_vecs.outputindex_to_value.len(); let outputindexes_count = indexer_vecs.outputindex_to_value.len();
// ---
// OutputIndex
// ---
self.outputindex_to_outputindex.compute_range(
starting_indexes.outputindex,
indexer_vecs.outputindex_to_value.mut_vec(),
|i| (i, i),
exit,
)?;
self.p2pk33index_to_p2pk33index.compute_range(
starting_indexes.p2pk33index,
indexer_vecs.p2pk33index_to_p2pk33bytes.mut_vec(),
|i| (i, i),
exit,
)?;
self.p2pk65index_to_p2pk65index.compute_range(
starting_indexes.p2pk65index,
indexer_vecs.p2pk65index_to_p2pk65bytes.mut_vec(),
|i| (i, i),
exit,
)?;
self.p2pkhindex_to_p2pkhindex.compute_range(
starting_indexes.p2pkhindex,
indexer_vecs.p2pkhindex_to_p2pkhbytes.mut_vec(),
|i| (i, i),
exit,
)?;
self.p2shindex_to_p2shindex.compute_range(
starting_indexes.p2shindex,
indexer_vecs.p2shindex_to_p2shbytes.mut_vec(),
|i| (i, i),
exit,
)?;
self.p2trindex_to_p2trindex.compute_range(
starting_indexes.p2trindex,
indexer_vecs.p2trindex_to_p2trbytes.mut_vec(),
|i| (i, i),
exit,
)?;
self.p2wpkhindex_to_p2wpkhindex.compute_range(
starting_indexes.p2wpkhindex,
indexer_vecs.p2wpkhindex_to_p2wpkhbytes.mut_vec(),
|i| (i, i),
exit,
)?;
self.p2wshindex_to_p2wshindex.compute_range(
starting_indexes.p2wshindex,
indexer_vecs.p2wshindex_to_p2wshbytes.mut_vec(),
|i| (i, i),
exit,
)?;
self.emptyoutputindex_to_emptyoutputindex.compute_range(
starting_indexes.emptyoutputindex,
indexer_vecs.emptyoutputindex_to_txindex.mut_vec(),
|i| (i, i),
exit,
)?;
self.p2msindex_to_p2msindex.compute_range(
starting_indexes.p2msindex,
indexer_vecs.p2msindex_to_txindex.mut_vec(),
|i| (i, i),
exit,
)?;
self.opreturnindex_to_opreturnindex.compute_range(
starting_indexes.opreturnindex,
indexer_vecs.opreturnindex_to_txindex.mut_vec(),
|i| (i, i),
exit,
)?;
self.p2aindex_to_p2aindex.compute_range(
starting_indexes.p2aindex,
indexer_vecs.p2aindex_to_p2abytes.mut_vec(),
|i| (i, i),
exit,
)?;
self.unknownoutputindex_to_unknownoutputindex
.compute_range(
starting_indexes.unknownoutputindex,
indexer_vecs.unknownoutputindex_to_txindex.mut_vec(),
|i| (i, i),
exit,
)?;
// ---
// InputIndex
// ---
self.inputindex_to_inputindex.compute_range(
starting_indexes.inputindex,
indexer_vecs.inputindex_to_outputindex.mut_vec(),
|i| (i, i),
exit,
)?;
// ---
// TxIndex
// ---
self.txindex_to_last_inputindex
.compute_last_index_from_first(
starting_indexes.txindex,
indexer_vecs.txindex_to_first_inputindex.mut_vec(),
inputindexes_count,
exit,
)?;
self.txindex_to_last_outputindex
.compute_last_index_from_first(
starting_indexes.txindex,
indexer_vecs.txindex_to_first_outputindex.mut_vec(),
outputindexes_count,
exit,
)?;
self.txindex_to_txindex.compute_range(
starting_indexes.txindex,
self.txindex_to_last_inputindex.mut_vec(),
|i| (i, i),
exit,
)?;
self.height_to_last_txindex.compute_last_index_from_first(
starting_indexes.height,
indexer_vecs.height_to_first_txindex.mut_vec(),
txindexes_count,
exit,
)?;
self.txindex_to_height.compute_inverse_less_to_more(
starting_indexes.height,
indexer_vecs.height_to_first_txindex.mut_vec(),
self.height_to_last_txindex.mut_vec(),
exit,
)?;
// ---
// Height
// ---
self.height_to_height.compute_range( self.height_to_height.compute_range(
starting_indexes.height, starting_indexes.height,
indexer_vecs.height_to_timestamp.mut_vec(), indexer_vecs.height_to_timestamp.mut_vec(),
@@ -457,6 +561,10 @@ impl Vecs {
let decremented_starting_height = starting_indexes.height.decremented().unwrap_or_default(); let decremented_starting_height = starting_indexes.height.decremented().unwrap_or_default();
// ---
// DateIndex
// ---
let starting_dateindex = self let starting_dateindex = self
.height_to_dateindex .height_to_dateindex
.unwrap_cached_get(decremented_starting_height) .unwrap_cached_get(decremented_starting_height)
@@ -509,36 +617,8 @@ impl Vecs {
exit, exit,
)?; )?;
self.dateindex_to_timestamp.compute_transform( // ---
starting_dateindex, // WeekIndex
self.dateindex_to_date.mut_vec(),
|(di, d, ..)| (di, Timestamp::from(d)),
exit,
)?;
self.txindex_to_last_inputindex
.compute_last_index_from_first(
starting_indexes.txindex,
indexer_vecs.txindex_to_first_inputindex.mut_vec(),
inputindexes_count,
exit,
)?;
self.txindex_to_last_outputindex
.compute_last_index_from_first(
starting_indexes.txindex,
indexer_vecs.txindex_to_first_outputindex.mut_vec(),
outputindexes_count,
exit,
)?;
self.height_to_last_txindex.compute_last_index_from_first(
starting_indexes.height,
indexer_vecs.height_to_first_txindex.mut_vec(),
txindexes_count,
exit,
)?;
// --- // ---
let starting_weekindex = self let starting_weekindex = self
@@ -575,13 +655,46 @@ impl Vecs {
exit, exit,
)?; )?;
self.weekindex_to_timestamp.compute_transform( // ---
starting_weekindex, // DifficultyEpoch
self.weekindex_to_first_dateindex.mut_vec(), // ---
|(i, d, ..)| (i, self.dateindex_to_timestamp.double_unwrap_cached_get(d)),
let starting_difficultyepoch = self
.height_to_difficultyepoch
.unwrap_cached_get(decremented_starting_height)
.unwrap_or_default();
self.height_to_difficultyepoch.compute_range(
starting_indexes.height,
self.height_to_height.mut_vec(),
|h| (h, DifficultyEpoch::from(h)),
exit, exit,
)?; )?;
self.difficultyepoch_to_first_height
.compute_inverse_more_to_less(
starting_indexes.height,
self.height_to_difficultyepoch.mut_vec(),
exit,
)?;
self.difficultyepoch_to_last_height
.compute_last_index_from_first(
starting_difficultyepoch,
self.difficultyepoch_to_first_height.mut_vec(),
height_count,
exit,
)?;
self.difficultyepoch_to_difficultyepoch.compute_range(
starting_difficultyepoch,
self.difficultyepoch_to_first_height.mut_vec(),
|i| (i, i),
exit,
)?;
// ---
// MonthIndex
// --- // ---
let starting_monthindex = self let starting_monthindex = self
@@ -620,13 +733,8 @@ impl Vecs {
exit, exit,
)?; )?;
self.monthindex_to_timestamp.compute_transform( // ---
starting_monthindex, // QuarterIndex
self.monthindex_to_first_dateindex.mut_vec(),
|(i, d, ..)| (i, self.dateindex_to_timestamp.double_unwrap_cached_get(d)),
exit,
)?;
// --- // ---
let starting_quarterindex = self let starting_quarterindex = self
@@ -665,13 +773,8 @@ impl Vecs {
exit, exit,
)?; )?;
self.quarterindex_to_timestamp.compute_transform( // ---
starting_quarterindex, // YearIndex
self.quarterindex_to_first_monthindex.mut_vec(),
|(i, m, ..)| (i, self.monthindex_to_timestamp.double_unwrap_cached_get(m)),
exit,
)?;
// --- // ---
let starting_yearindex = self let starting_yearindex = self
@@ -710,104 +813,8 @@ impl Vecs {
exit, exit,
)?; )?;
self.yearindex_to_timestamp.compute_transform(
starting_yearindex,
self.yearindex_to_first_monthindex.mut_vec(),
|(i, m, ..)| (i, self.monthindex_to_timestamp.double_unwrap_cached_get(m)),
exit,
)?;
// --- // ---
// HalvingEpoch
let starting_decadeindex = self
.yearindex_to_decadeindex
.unwrap_cached_get(starting_yearindex)
.unwrap_or_default();
self.yearindex_to_decadeindex.compute_range(
starting_yearindex,
self.yearindex_to_yearindex.mut_vec(),
|i| (i, DecadeIndex::from(i)),
exit,
)?;
self.decadeindex_to_first_yearindex
.compute_inverse_more_to_less(
starting_yearindex,
self.yearindex_to_decadeindex.mut_vec(),
exit,
)?;
self.decadeindex_to_last_yearindex
.compute_last_index_from_first(
starting_decadeindex,
self.decadeindex_to_first_yearindex.mut_vec(),
year_count,
exit,
)?;
self.decadeindex_to_decadeindex.compute_range(
starting_decadeindex,
self.decadeindex_to_first_yearindex.mut_vec(),
|i| (i, i),
exit,
)?;
self.decadeindex_to_timestamp.compute_transform(
starting_decadeindex,
self.decadeindex_to_first_yearindex.mut_vec(),
|(i, y, ..)| (i, self.yearindex_to_timestamp.double_unwrap_cached_get(y)),
exit,
)?;
// ---
let starting_difficultyepoch = self
.height_to_difficultyepoch
.unwrap_cached_get(decremented_starting_height)
.unwrap_or_default();
self.height_to_difficultyepoch.compute_range(
starting_indexes.height,
self.height_to_height.mut_vec(),
|h| (h, DifficultyEpoch::from(h)),
exit,
)?;
self.difficultyepoch_to_first_height
.compute_inverse_more_to_less(
starting_indexes.height,
self.height_to_difficultyepoch.mut_vec(),
exit,
)?;
self.difficultyepoch_to_last_height
.compute_last_index_from_first(
starting_difficultyepoch,
self.difficultyepoch_to_first_height.mut_vec(),
height_count,
exit,
)?;
self.difficultyepoch_to_difficultyepoch.compute_range(
starting_difficultyepoch,
self.difficultyepoch_to_first_height.mut_vec(),
|i| (i, i),
exit,
)?;
self.difficultyepoch_to_timestamp.compute_transform(
starting_difficultyepoch,
self.difficultyepoch_to_first_height.mut_vec(),
|(i, h, ..)| {
(
i,
indexer_vecs.height_to_timestamp.double_unwrap_cached_get(h),
)
},
exit,
)?;
// --- // ---
let starting_halvingepoch = self let starting_halvingepoch = self
@@ -844,112 +851,44 @@ impl Vecs {
exit, exit,
)?; )?;
// self.difficultyepoch_to_timestamp.compute_transform( // ---
// starting_difficultyepoch, // DecadeIndex
// self.difficultyepoch_to_first_height.mut_vec(),
// |(i, h, ..)| {
// (
// i,
// *indexer_vecs.height_to_timestamp.unwraped_cached_get(h).unwrap().unwrap(),
// )
// },
// exit,
// )?;
// --- // ---
self.outputindex_to_outputindex.compute_range( let starting_decadeindex = self
starting_indexes.outputindex, .yearindex_to_decadeindex
indexer_vecs.outputindex_to_value.mut_vec(), .unwrap_cached_get(starting_yearindex)
|i| (i, i), .unwrap_or_default();
self.yearindex_to_decadeindex.compute_range(
starting_yearindex,
self.yearindex_to_yearindex.mut_vec(),
|i| (i, DecadeIndex::from(i)),
exit, exit,
)?; )?;
self.p2pk33index_to_p2pk33index.compute_range(
starting_indexes.p2pk33index, self.decadeindex_to_first_yearindex
indexer_vecs.p2pk33index_to_p2pk33bytes.mut_vec(), .compute_inverse_more_to_less(
|i| (i, i), starting_yearindex,
exit, self.yearindex_to_decadeindex.mut_vec(),
)?;
self.p2pk65index_to_p2pk65index.compute_range(
starting_indexes.p2pk65index,
indexer_vecs.p2pk65index_to_p2pk65bytes.mut_vec(),
|i| (i, i),
exit,
)?;
self.p2pkhindex_to_p2pkhindex.compute_range(
starting_indexes.p2pkhindex,
indexer_vecs.p2pkhindex_to_p2pkhbytes.mut_vec(),
|i| (i, i),
exit,
)?;
self.p2shindex_to_p2shindex.compute_range(
starting_indexes.p2shindex,
indexer_vecs.p2shindex_to_p2shbytes.mut_vec(),
|i| (i, i),
exit,
)?;
self.p2trindex_to_p2trindex.compute_range(
starting_indexes.p2trindex,
indexer_vecs.p2trindex_to_p2trbytes.mut_vec(),
|i| (i, i),
exit,
)?;
self.p2wpkhindex_to_p2wpkhindex.compute_range(
starting_indexes.p2wpkhindex,
indexer_vecs.p2wpkhindex_to_p2wpkhbytes.mut_vec(),
|i| (i, i),
exit,
)?;
self.p2wshindex_to_p2wshindex.compute_range(
starting_indexes.p2wshindex,
indexer_vecs.p2wshindex_to_p2wshbytes.mut_vec(),
|i| (i, i),
exit,
)?;
self.txindex_to_txindex.compute_range(
starting_indexes.txindex,
self.txindex_to_height.mut_vec(),
|i| (i, i),
exit,
)?;
self.inputindex_to_inputindex.compute_range(
starting_indexes.inputindex,
indexer_vecs.inputindex_to_outputindex.mut_vec(),
|i| (i, i),
exit,
)?;
self.emptyoutputindex_to_emptyoutputindex.compute_range(
starting_indexes.emptyoutputindex,
indexer_vecs.emptyoutputindex_to_txindex.mut_vec(),
|i| (i, i),
exit,
)?;
self.p2msindex_to_p2msindex.compute_range(
starting_indexes.p2msindex,
indexer_vecs.p2msindex_to_txindex.mut_vec(),
|i| (i, i),
exit,
)?;
self.opreturnindex_to_opreturnindex.compute_range(
starting_indexes.opreturnindex,
indexer_vecs.opreturnindex_to_txindex.mut_vec(),
|i| (i, i),
exit,
)?;
self.p2aindex_to_p2aindex.compute_range(
starting_indexes.p2aindex,
indexer_vecs.p2aindex_to_p2abytes.mut_vec(),
|i| (i, i),
exit,
)?;
self.unknownoutputindex_to_unknownoutputindex
.compute_range(
starting_indexes.unknownoutputindex,
indexer_vecs.unknownoutputindex_to_txindex.mut_vec(),
|i| (i, i),
exit, exit,
)?; )?;
self.decadeindex_to_last_yearindex
.compute_last_index_from_first(
starting_decadeindex,
self.decadeindex_to_first_yearindex.mut_vec(),
year_count,
exit,
)?;
self.decadeindex_to_decadeindex.compute_range(
starting_decadeindex,
self.decadeindex_to_first_yearindex.mut_vec(),
|i| (i, i),
exit,
)?;
Ok(Indexes { Ok(Indexes {
indexes: starting_indexes, indexes: starting_indexes,
dateindex: starting_dateindex, dateindex: starting_dateindex,
@@ -1000,19 +939,11 @@ impl Vecs {
self.decadeindex_to_decadeindex.any_vec(), self.decadeindex_to_decadeindex.any_vec(),
self.difficultyepoch_to_difficultyepoch.any_vec(), self.difficultyepoch_to_difficultyepoch.any_vec(),
self.halvingepoch_to_halvingepoch.any_vec(), self.halvingepoch_to_halvingepoch.any_vec(),
self.dateindex_to_timestamp.any_vec(),
self.decadeindex_to_timestamp.any_vec(),
self.difficultyepoch_to_timestamp.any_vec(),
self.halvingepoch_to_timestamp.any_vec(),
self.monthindex_to_timestamp.any_vec(),
self.weekindex_to_timestamp.any_vec(),
self.yearindex_to_timestamp.any_vec(),
self.height_to_timestamp_fixed.any_vec(), self.height_to_timestamp_fixed.any_vec(),
self.monthindex_to_quarterindex.any_vec(), self.monthindex_to_quarterindex.any_vec(),
self.quarterindex_to_first_monthindex.any_vec(), self.quarterindex_to_first_monthindex.any_vec(),
self.quarterindex_to_last_monthindex.any_vec(), self.quarterindex_to_last_monthindex.any_vec(),
self.quarterindex_to_quarterindex.any_vec(), self.quarterindex_to_quarterindex.any_vec(),
self.quarterindex_to_timestamp.any_vec(),
self.p2pk33index_to_p2pk33index.any_vec(), self.p2pk33index_to_p2pk33index.any_vec(),
self.p2pk65index_to_p2pk65index.any_vec(), self.p2pk65index_to_p2pk65index.any_vec(),
self.p2pkhindex_to_p2pkhindex.any_vec(), self.p2pkhindex_to_p2pkhindex.any_vec(),

View File

@@ -0,0 +1,122 @@
use std::{fs, path::Path};
use brk_core::{DifficultyEpoch, HalvingEpoch, StoredF64};
use brk_exit::Exit;
use brk_indexer::Indexer;
use brk_vec::{Compressed, DynamicVec, Version};
use super::{
Indexes,
grouped::{ComputedVecsFromDateindex, ComputedVecsFromHeight, StorableVecGeneatorOptions},
indexes,
};
#[derive(Clone)]
pub struct Vecs {
pub indexes_to_difficulty: ComputedVecsFromHeight<StoredF64>,
pub indexes_to_difficultyepoch: ComputedVecsFromDateindex<DifficultyEpoch>,
pub indexes_to_halvingepoch: ComputedVecsFromDateindex<HalvingEpoch>,
}
impl Vecs {
pub fn forced_import(path: &Path, compressed: Compressed) -> color_eyre::Result<Self> {
fs::create_dir_all(path)?;
Ok(Self {
indexes_to_difficulty: ComputedVecsFromHeight::forced_import(
path,
"difficulty",
false,
Version::ZERO,
compressed,
StorableVecGeneatorOptions::default().add_last(),
)?,
indexes_to_difficultyepoch: ComputedVecsFromDateindex::forced_import(
path,
"difficultyepoch",
Version::ZERO,
compressed,
StorableVecGeneatorOptions::default().add_last(),
)?,
indexes_to_halvingepoch: ComputedVecsFromDateindex::forced_import(
path,
"halvingepoch",
Version::ZERO,
compressed,
StorableVecGeneatorOptions::default().add_last(),
)?,
})
}
pub fn compute(
&mut self,
indexer: &mut Indexer,
indexes: &mut indexes::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> color_eyre::Result<()> {
self.indexes_to_difficultyepoch.compute(
indexer,
indexes,
starting_indexes,
exit,
|vec, _, indexes, starting_indexes, exit| {
vec.compute_transform(
starting_indexes.dateindex,
indexes.dateindex_to_last_height.mut_vec(),
|(di, height, ..)| {
(
di,
indexes
.height_to_difficultyepoch
.mut_vec()
.double_unwrap_cached_get(height),
)
},
exit,
)
},
)?;
self.indexes_to_halvingepoch.compute(
indexer,
indexes,
starting_indexes,
exit,
|vec, _, indexes, starting_indexes, exit| {
vec.compute_transform(
starting_indexes.dateindex,
indexes.dateindex_to_last_height.mut_vec(),
|(di, height, ..)| {
(
di,
indexes
.height_to_halvingepoch
.mut_vec()
.double_unwrap_cached_get(height),
)
},
exit,
)
},
)?;
self.indexes_to_difficulty.compute_rest(
indexes,
starting_indexes,
exit,
Some(indexer.mut_vecs().height_to_difficulty.mut_vec()),
)?;
Ok(())
}
pub fn as_any_vecs(&self) -> Vec<&dyn brk_vec::AnyStoredVec> {
[
self.indexes_to_difficulty.any_vecs(),
self.indexes_to_difficultyepoch.any_vecs(),
self.indexes_to_halvingepoch.any_vecs(),
]
.concat()
}
}

View File

@@ -9,6 +9,7 @@ pub mod blocks;
pub mod grouped; pub mod grouped;
pub mod indexes; pub mod indexes;
pub mod marketprice; pub mod marketprice;
pub mod mining;
pub mod transactions; pub mod transactions;
pub mod vec; pub mod vec;
@@ -19,7 +20,8 @@ pub use vec::*;
pub struct Vecs { pub struct Vecs {
pub blocks: blocks::Vecs, pub blocks: blocks::Vecs,
pub indexes: indexes::Vecs, pub indexes: indexes::Vecs,
pub transactions: transactions::Vecs, pub mining: mining::Vecs,
// pub transactions: transactions::Vecs,
pub marketprice: Option<marketprice::Vecs>, pub marketprice: Option<marketprice::Vecs>,
} }
@@ -30,7 +32,8 @@ impl Vecs {
Ok(Self { Ok(Self {
blocks: blocks::Vecs::forced_import(path, compressed)?, blocks: blocks::Vecs::forced_import(path, compressed)?,
indexes: indexes::Vecs::forced_import(path, compressed)?, indexes: indexes::Vecs::forced_import(path, compressed)?,
transactions: transactions::Vecs::forced_import(path, compressed, fetch)?, mining: mining::Vecs::forced_import(path, compressed)?,
// transactions: transactions::Vecs::forced_import(path, compressed, fetch)?,
marketprice: fetch.then(|| marketprice::Vecs::forced_import(path, compressed).unwrap()), marketprice: fetch.then(|| marketprice::Vecs::forced_import(path, compressed).unwrap()),
}) })
} }
@@ -47,6 +50,9 @@ impl Vecs {
self.blocks self.blocks
.compute(indexer, &mut self.indexes, &starting_indexes, exit)?; .compute(indexer, &mut self.indexes, &starting_indexes, exit)?;
self.mining
.compute(indexer, &mut self.indexes, &starting_indexes, exit)?;
if let Some(marketprice) = self.marketprice.as_mut() { if let Some(marketprice) = self.marketprice.as_mut() {
marketprice.compute( marketprice.compute(
indexer, indexer,
@@ -57,13 +63,13 @@ impl Vecs {
)?; )?;
} }
self.transactions.compute( // self.transactions.compute(
indexer, // indexer,
&mut self.indexes, // &mut self.indexes,
&starting_indexes, // &starting_indexes,
&mut self.marketprice.as_mut(), // &mut self.marketprice.as_mut(),
exit, // exit,
)?; // )?;
Ok(()) Ok(())
} }
@@ -72,7 +78,8 @@ impl Vecs {
[ [
self.indexes.as_any_vecs(), self.indexes.as_any_vecs(),
self.blocks.as_any_vecs(), self.blocks.as_any_vecs(),
self.transactions.as_any_vecs(), self.mining.as_any_vecs(),
// self.transactions.as_any_vecs(),
self.marketprice self.marketprice
.as_ref() .as_ref()
.map_or(vec![], |v| v.as_any_vecs()), .map_or(vec![], |v| v.as_any_vecs()),

View File

@@ -25,11 +25,11 @@ pub struct Vecs {
pub indexes_to_feerate: ComputedVecsFromTxindex<Feerate>, pub indexes_to_feerate: ComputedVecsFromTxindex<Feerate>,
pub indexes_to_input_value: ComputedVecsFromTxindex<Sats>, pub indexes_to_input_value: ComputedVecsFromTxindex<Sats>,
pub indexes_to_output_value: ComputedVecsFromTxindex<Sats>, pub indexes_to_output_value: ComputedVecsFromTxindex<Sats>,
// pub txindex_to_is_v1: ComputedVec<Txindex, bool>, // pub txindex_to_is_v1: LazyVec<Txindex, bool>,
pub indexes_to_tx_v1: ComputedVecsFromHeight<StoredU32>, pub indexes_to_tx_v1: ComputedVecsFromHeight<StoredU32>,
// pub txindex_to_is_v2: ComputedVec<Txindex, bool>, // pub txindex_to_is_v2: LazyVec<Txindex, bool>,
pub indexes_to_tx_v2: ComputedVecsFromHeight<StoredU32>, pub indexes_to_tx_v2: ComputedVecsFromHeight<StoredU32>,
// pub txindex_to_is_v3: ComputedVec<Txindex, bool>, // pub txindex_to_is_v3: LazyVec<Txindex, bool>,
pub indexes_to_tx_v3: ComputedVecsFromHeight<StoredU32>, pub indexes_to_tx_v3: ComputedVecsFromHeight<StoredU32>,
pub indexes_to_tx_vsize: ComputedVecsFromTxindex<StoredUsize>, pub indexes_to_tx_vsize: ComputedVecsFromTxindex<StoredUsize>,
pub indexes_to_tx_weight: ComputedVecsFromTxindex<Weight>, pub indexes_to_tx_weight: ComputedVecsFromTxindex<Weight>,

View File

@@ -252,7 +252,7 @@ where
first_indexes.iter_from(index, |(value, first_index, ..)| { first_indexes.iter_from(index, |(value, first_index, ..)| {
let first_index = (first_index).to_usize()?; let first_index = (first_index).to_usize()?;
let last_index = (last_indexes.double_unwrap_cached_get(value)).to_usize()?; let last_index = (last_indexes.double_unwrap_cached_get(value)).to_usize()?;
(first_index..last_index) (first_index..=last_index)
.try_for_each(|index| self.forced_push_at(I::from(index), value, exit)) .try_for_each(|index| self.forced_push_at(I::from(index), value, exit))
})?; })?;

View File

@@ -1,4 +1,7 @@
use std::{fmt::Debug, ops::Add}; use std::{
fmt::Debug,
ops::{Add, Div},
};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout};
@@ -43,6 +46,14 @@ impl From<DifficultyEpoch> for usize {
} }
} }
impl Add for DifficultyEpoch {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
Self::from(self.0 + rhs.0)
}
}
impl Add<usize> for DifficultyEpoch { impl Add<usize> for DifficultyEpoch {
type Output = Self; type Output = Self;
@@ -51,6 +62,13 @@ impl Add<usize> for DifficultyEpoch {
} }
} }
impl Div<usize> for DifficultyEpoch {
type Output = Self;
fn div(self, rhs: usize) -> Self::Output {
Self::from(self.0 as usize / rhs)
}
}
impl From<Height> for DifficultyEpoch { impl From<Height> for DifficultyEpoch {
fn from(value: Height) -> Self { fn from(value: Height) -> Self {
Self((u32::from(value) / 2016) as u16) Self((u32::from(value) / 2016) as u16)

View File

@@ -1,4 +1,7 @@
use std::{fmt::Debug, ops::Add}; use std::{
fmt::Debug,
ops::{Add, Div},
};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout};
@@ -43,6 +46,14 @@ impl From<HalvingEpoch> for usize {
} }
} }
impl Add for HalvingEpoch {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
Self::from(self.0 + rhs.0)
}
}
impl Add<usize> for HalvingEpoch { impl Add<usize> for HalvingEpoch {
type Output = Self; type Output = Self;
@@ -62,3 +73,10 @@ impl CheckedSub for HalvingEpoch {
self.0.checked_sub(rhs.0).map(Self) self.0.checked_sub(rhs.0).map(Self)
} }
} }
impl Div<usize> for HalvingEpoch {
type Output = Self;
fn div(self, rhs: usize) -> Self::Output {
Self::from(self.0 as usize / rhs)
}
}

View File

@@ -24,6 +24,7 @@ mod outputtypeindex;
mod quarterindex; mod quarterindex;
mod rawlocktime; mod rawlocktime;
mod sats; mod sats;
mod stored_f64;
mod stored_u32; mod stored_u32;
mod stored_u64; mod stored_u64;
mod stored_u8; mod stored_u8;
@@ -66,6 +67,7 @@ pub use outputtypeindex::*;
pub use quarterindex::*; pub use quarterindex::*;
pub use rawlocktime::*; pub use rawlocktime::*;
pub use sats::*; pub use sats::*;
pub use stored_f64::*;
pub use stored_u8::*; pub use stored_u8::*;
pub use stored_u32::*; pub use stored_u32::*;
pub use stored_u64::*; pub use stored_u64::*;

View File

@@ -6,7 +6,7 @@ use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout};
use super::{Cents, Dollars, Sats}; use super::{Cents, Dollars, Sats};
#[derive(Debug, Default, Clone, FromBytes, Immutable, IntoBytes, KnownLayout, Serialize)] #[derive(Debug, Default, Clone, FromBytes, Immutable, IntoBytes, KnownLayout)]
#[repr(C)] #[repr(C)]
pub struct OHLCCents { pub struct OHLCCents {
pub open: Open<Cents>, pub open: Open<Cents>,
@@ -37,6 +37,20 @@ impl From<Close<Cents>> for OHLCCents {
} }
} }
impl Serialize for OHLCCents {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut tup = serializer.serialize_tuple(4)?;
tup.serialize_element(&self.open)?;
tup.serialize_element(&self.high)?;
tup.serialize_element(&self.low)?;
tup.serialize_element(&self.close)?;
tup.end()
}
}
#[derive(Debug, Default, Clone, FromBytes, Immutable, IntoBytes, KnownLayout)] #[derive(Debug, Default, Clone, FromBytes, Immutable, IntoBytes, KnownLayout)]
#[repr(C)] #[repr(C)]
pub struct OHLCDollars { pub struct OHLCDollars {

View File

@@ -0,0 +1,69 @@
use std::ops::{Add, Div};
use derive_deref::Deref;
use serde::Serialize;
use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::CheckedSub;
#[derive(
Debug,
Deref,
Clone,
Copy,
PartialEq,
PartialOrd,
FromBytes,
Immutable,
IntoBytes,
KnownLayout,
Serialize,
)]
pub struct StoredF64(f64);
impl From<f64> for StoredF64 {
fn from(value: f64) -> Self {
Self(value)
}
}
impl From<usize> for StoredF64 {
fn from(value: usize) -> Self {
Self(value as f64)
}
}
impl CheckedSub<StoredF64> for StoredF64 {
fn checked_sub(self, rhs: Self) -> Option<Self> {
Some(Self(self.0 - rhs.0))
}
}
impl Div<usize> for StoredF64 {
type Output = Self;
fn div(self, rhs: usize) -> Self::Output {
Self(self.0 / rhs as f64)
}
}
impl Add for StoredF64 {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
Self(self.0 + rhs.0)
}
}
impl From<StoredF64> for f64 {
fn from(value: StoredF64) -> Self {
value.0
}
}
impl Eq for StoredF64 {}
#[allow(clippy::derive_ord_xor_partial_ord)]
impl Ord for StoredF64 {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.0.partial_cmp(&other.0).unwrap()
}
}

View File

@@ -58,10 +58,9 @@ Stores: `src/storage/stores/mod.rs`
## Benchmark ## Benchmark
### Result 1 - 2025-04-12 ### `v0.0.21`
- version: `v0.0.21` - machine: `MBP M3 Pro (36GB RAM)`
- machine: `Macbook Pro M3 Pro (36GB RAM)`
- mode: `raw` - mode: `raw`
- from: `0` - from: `0`
- to: `892_098` - to: `892_098`
@@ -69,3 +68,10 @@ Stores: `src/storage/stores/mod.rs`
- peak memory: `6.1GB` - peak memory: `6.1GB`
- disk usage: `270 GB` - disk usage: `270 GB`
- overhead: `36%` (`270 GB / 741 GB`) - overhead: `36%` (`270 GB / 741 GB`)
### `v0.0.31`
- machine: `MBP M3 Pro (36GB RAM)`
- mode: `raw`
- disk usage: `208 GB`
- overhead: `28%` (`208 GB / 744 GB`)

View File

@@ -32,7 +32,7 @@ pub use stores::*;
pub use vecs::*; pub use vecs::*;
const SNAPSHOT_BLOCK_RANGE: usize = 1000; const SNAPSHOT_BLOCK_RANGE: usize = 1000;
const COLLISIONS_CHECKED_UP_TO: u32 = 0; const COLLISIONS_CHECKED_UP_TO: u32 = 893_000;
#[derive(Clone)] #[derive(Clone)]
pub struct Indexer { pub struct Indexer {
@@ -166,39 +166,35 @@ impl Indexer {
vecs.height_to_blockhash.push_if_needed(height, blockhash)?; vecs.height_to_blockhash.push_if_needed(height, blockhash)?;
vecs.height_to_difficulty vecs.height_to_difficulty
.push_if_needed(height, block.header.difficulty_float())?; .push_if_needed(height, block.header.difficulty_float().into())?;
vecs.height_to_timestamp vecs.height_to_timestamp
.push_if_needed(height, Timestamp::from(block.header.time))?; .push_if_needed(height, Timestamp::from(block.header.time))?;
vecs.height_to_total_size.push_if_needed(height, block.total_size().into())?; vecs.height_to_total_size.push_if_needed(height, block.total_size().into())?;
vecs.height_to_weight.push_if_needed(height, block.weight().into())?; vecs.height_to_weight.push_if_needed(height, block.weight().into())?;
let (inputs, outputs) = thread::scope(|s| { let inputs = block
let inputs_handle = s.spawn(|| block .txdata
.txdata .iter()
.iter() .enumerate()
.enumerate() .flat_map(|(index, tx)| {
.flat_map(|(index, tx)| { tx.input
tx.input .iter()
.iter() .enumerate()
.enumerate() .map(move |(vin, txin)| (TxIndex::from(index), Vin::from(vin), txin, tx))
.map(move |(vin, txin)| (TxIndex::from(index), Vin::from(vin), txin, tx)) })
}) .collect::<Vec<_>>();
.collect::<Vec<_>>());
let outputs_handle = s.spawn(|| block let outputs = block
.txdata .txdata
.iter() .iter()
.enumerate() .enumerate()
.flat_map(|(index, tx)| { .flat_map(|(index, tx)| {
tx.output tx.output
.iter() .iter()
.enumerate() .enumerate()
.map(move |(vout, txout)| (TxIndex::from(index), Vout::from(vout), txout, tx)) .map(move |(vout, txout)| (TxIndex::from(index), Vout::from(vout), txout, tx))
}) })
.collect::<Vec<_>>()); .collect::<Vec<_>>();
(inputs_handle.join().unwrap(), outputs_handle.join().unwrap())
});
let tx_len = block.txdata.len(); let tx_len = block.txdata.len();
let outputs_len = outputs.len(); let outputs_len = outputs.len();

View File

@@ -4,8 +4,8 @@ use brk_core::{
AddressBytes, BlockHash, EmptyOutputIndex, Height, InputIndex, OpReturnIndex, OutputIndex, AddressBytes, BlockHash, EmptyOutputIndex, Height, InputIndex, OpReturnIndex, OutputIndex,
OutputType, OutputTypeIndex, P2ABytes, P2AIndex, P2MSIndex, P2PK33Bytes, P2PK33Index, OutputType, OutputTypeIndex, P2ABytes, P2AIndex, P2MSIndex, P2PK33Bytes, P2PK33Index,
P2PK65Bytes, P2PK65Index, P2PKHBytes, P2PKHIndex, P2SHBytes, P2SHIndex, P2TRBytes, P2TRIndex, P2PK65Bytes, P2PK65Index, P2PKHBytes, P2PKHIndex, P2SHBytes, P2SHIndex, P2TRBytes, P2TRIndex,
P2WPKHBytes, P2WPKHIndex, P2WSHBytes, P2WSHIndex, RawLockTime, Sats, StoredU32, StoredUsize, P2WPKHBytes, P2WPKHIndex, P2WSHBytes, P2WSHIndex, RawLockTime, Sats, StoredF64, StoredU32,
Timestamp, TxIndex, TxVersion, Txid, UnknownOutputIndex, Weight, StoredUsize, Timestamp, TxIndex, TxVersion, Txid, UnknownOutputIndex, Weight,
}; };
use brk_vec::{AnyStoredVec, Compressed, Result, Version}; use brk_vec::{AnyStoredVec, Compressed, Result, Version};
use rayon::prelude::*; use rayon::prelude::*;
@@ -20,7 +20,7 @@ pub use base::*;
pub struct Vecs { pub struct Vecs {
pub emptyoutputindex_to_txindex: IndexedVec<EmptyOutputIndex, TxIndex>, pub emptyoutputindex_to_txindex: IndexedVec<EmptyOutputIndex, TxIndex>,
pub height_to_blockhash: IndexedVec<Height, BlockHash>, pub height_to_blockhash: IndexedVec<Height, BlockHash>,
pub height_to_difficulty: IndexedVec<Height, f64>, pub height_to_difficulty: IndexedVec<Height, StoredF64>,
pub height_to_first_emptyoutputindex: IndexedVec<Height, EmptyOutputIndex>, pub height_to_first_emptyoutputindex: IndexedVec<Height, EmptyOutputIndex>,
pub height_to_first_inputindex: IndexedVec<Height, InputIndex>, pub height_to_first_inputindex: IndexedVec<Height, InputIndex>,
pub height_to_first_opreturnindex: IndexedVec<Height, OpReturnIndex>, pub height_to_first_opreturnindex: IndexedVec<Height, OpReturnIndex>,

View File

@@ -0,0 +1,69 @@
use bitcoincore_rpc::{Auth, Client};
use brk_core::{Height, OutputType, default_bitcoin_path};
use brk_parser::Parser;
fn main() {
let i = std::time::Instant::now();
let bitcoin_dir = default_bitcoin_path();
let rpc = Box::leak(Box::new(
Client::new(
"http://localhost:8332",
Auth::CookieFile(bitcoin_dir.join(".cookie")),
)
.unwrap(),
));
// let start = None;
// let end = None;
let parser = Parser::new(bitcoin_dir.join("blocks"), rpc);
// parser
// .parse(start, end)
// .iter()
// .for_each(|(height, _block, hash)| {
// println!("{height}: {hash}");
// });
// println!(
// "{}",
// parser
// .get(Height::new(0))
// .txdata
// .first()
// .unwrap()
// .output
// .first()
// .unwrap()
// .script_pubkey
// );
let block_850_000 = parser.get(Height::new(850_000));
let tx = block_850_000.txdata.iter().find(|tx| {
tx.compute_txid().to_string()
== "b10c0000004da5a9d1d9b4ae32e09f0b3e62d21a5cce5428d4ad714fb444eb5d"
});
let output = tx.unwrap().tx_out(7).unwrap();
dbg!(OutputType::from(&output.script_pubkey));
dbg!(output);
// println!(
// "{}",
// .txdata
// .first()
// .unwrap()
// .output
// .first()
// .unwrap()
// .value
// );
dbg!(i.elapsed());
}

View File

@@ -4,92 +4,89 @@ use color_eyre::eyre::eyre;
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub enum Index { pub enum Index {
Addressindex, DateIndex,
Dateindex, DecadeIndex,
DifficultyEpoch,
EmptyOutputIndex,
HalvingEpoch,
Height, Height,
P2PK33index, InputIndex,
P2PK65index, MonthIndex,
P2PKHindex, OpReturnIndex,
P2SHindex, OutputIndex,
P2TRindex, P2AIndex,
P2WPKHindex, P2MSIndex,
P2WSHindex, P2PK33Index,
Txindex, P2PK65Index,
Inputindex, P2PKHIndex,
Outputindex, P2SHIndex,
Weekindex, P2TRIndex,
Monthindex, P2WPKHIndex,
Quarterindex, P2WSHIndex,
Yearindex, QuarterIndex,
Decadeindex, TxIndex,
Difficultyepoch, UnknownOutputIndex,
Halvingepoch, WeekIndex,
Emptyindex, YearIndex,
P2MSindex,
Opreturnindex,
Pushonlyindex,
Unknownindex,
} }
impl Index { impl Index {
pub fn all() -> [Self; 25] { pub fn all() -> [Self; 24] {
[ [
Self::DateIndex,
Self::DecadeIndex,
Self::DifficultyEpoch,
Self::EmptyOutputIndex,
Self::HalvingEpoch,
Self::Height, Self::Height,
Self::Dateindex, Self::InputIndex,
Self::Weekindex, Self::MonthIndex,
Self::Difficultyepoch, Self::OpReturnIndex,
Self::Monthindex, Self::OutputIndex,
Self::Quarterindex, Self::P2AIndex,
Self::Yearindex, Self::P2MSIndex,
Self::Decadeindex, Self::P2PK33Index,
Self::Halvingepoch, Self::P2PK65Index,
Self::Addressindex, Self::P2PKHIndex,
Self::P2PK33index, Self::P2SHIndex,
Self::P2PK65index, Self::P2TRIndex,
Self::P2PKHindex, Self::P2WPKHIndex,
Self::P2SHindex, Self::P2WSHIndex,
Self::P2TRindex, Self::QuarterIndex,
Self::P2WPKHindex, Self::TxIndex,
Self::P2WSHindex, Self::UnknownOutputIndex,
Self::Txindex, Self::WeekIndex,
Self::Inputindex, Self::YearIndex,
Self::Outputindex,
Self::Emptyindex,
Self::P2MSindex,
Self::Opreturnindex,
Self::Pushonlyindex,
Self::Unknownindex,
] ]
} }
pub fn possible_values(&self) -> &[&str] { pub fn possible_values(&self) -> &[&str] {
// Always have the "correct" id at the end // Always have the "correct" id at the end
match self { match self {
Self::DateIndex => &["d", "date", "dateindex"],
Self::DecadeIndex => &["decade", "decadeindex"],
Self::DifficultyEpoch => &["difficulty", "difficultyepoch"],
Self::EmptyOutputIndex => &["empty", "emptyoutputindex"],
Self::HalvingEpoch => &["h", "halving", "halvingepoch"],
Self::Height => &["h", "height"], Self::Height => &["h", "height"],
Self::Dateindex => &["d", "date", "dateindex"], Self::InputIndex => &["txin", "inputindex"],
Self::Weekindex => &["w", "week", "weekindex"], Self::MonthIndex => &["m", "month", "monthindex"],
Self::Difficultyepoch => &["difficulty", "difficultyepoch"], Self::OpReturnIndex => &["opreturn", "opreturnindex"],
Self::Monthindex => &["m", "month", "monthindex"], Self::OutputIndex => &["txout", "outputindex"],
Self::Quarterindex => &["q", "quarter", "quarterindex"], Self::P2AIndex => &["p2a", "p2aindex"],
Self::Yearindex => &["y", "year", "yearindex"], Self::P2MSIndex => &["p2ms", "p2msindex"],
Self::Decadeindex => &["decade", "decadeindex"], Self::P2PK33Index => &["p2pk33", "p2pk33index"],
Self::Halvingepoch => &["h", "halving", "halvingepoch"], Self::P2PK65Index => &["p2pk65", "p2pk65index"],
Self::Txindex => &["tx", "txindex"], Self::P2PKHIndex => &["p2pkh", "p2pkhindex"],
Self::Inputindex => &["txin", "inputindex"], Self::P2SHIndex => &["p2sh", "p2shindex"],
Self::Outputindex => &["txout", "outputindex"], Self::P2TRIndex => &["p2tr", "p2trindex"],
Self::Addressindex => &["a", "address", "addressindex"], Self::P2WPKHIndex => &["p2wpkh", "p2wpkhindex"],
Self::P2PK33index => &["p2pk33", "p2pk33index"], Self::P2WSHIndex => &["p2wsh", "p2wshindex"],
Self::P2PK65index => &["p2pk65", "p2pk65index"], Self::QuarterIndex => &["q", "quarter", "quarterindex"],
Self::P2PKHindex => &["p2pkh", "p2pkhindex"], Self::TxIndex => &["tx", "txindex"],
Self::P2SHindex => &["p2sh", "p2shindex"], Self::UnknownOutputIndex => &["unknown", "unknownoutputindex"],
Self::P2TRindex => &["p2tr", "p2trindex"], Self::WeekIndex => &["w", "week", "weekindex"],
Self::P2WPKHindex => &["p2wpkh", "p2wpkhindex"], Self::YearIndex => &["y", "year", "yearindex"],
Self::P2WSHindex => &["p2wsh", "p2wshindex"],
Self::Emptyindex => &["empty", "emptyoutputindex"],
Self::P2MSindex => &["multisig", "p2msindex"],
Self::Opreturnindex => &["opreturn", "opreturnindex"],
Self::Pushonlyindex => &["pushonly", "pushonlyindex"],
Self::Unknownindex => &["unknown", "unknownoutputindex"],
} }
} }
@@ -117,32 +114,33 @@ impl TryFrom<&str> for Index {
type Error = color_eyre::Report; type Error = color_eyre::Report;
fn try_from(value: &str) -> Result<Self, Self::Error> { fn try_from(value: &str) -> Result<Self, Self::Error> {
Ok(match value.to_lowercase().as_str() { Ok(match value.to_lowercase().as_str() {
v if (Self::Dateindex).possible_values().contains(&v) => Self::Dateindex, v if (Self::DateIndex).possible_values().contains(&v) => Self::DateIndex,
v if (Self::DecadeIndex).possible_values().contains(&v) => Self::DecadeIndex,
v if (Self::DifficultyEpoch).possible_values().contains(&v) => Self::DifficultyEpoch,
v if (Self::EmptyOutputIndex).possible_values().contains(&v) => Self::EmptyOutputIndex,
v if (Self::HalvingEpoch).possible_values().contains(&v) => Self::HalvingEpoch,
v if (Self::Height).possible_values().contains(&v) => Self::Height, v if (Self::Height).possible_values().contains(&v) => Self::Height,
v if (Self::Txindex).possible_values().contains(&v) => Self::Txindex, v if (Self::InputIndex).possible_values().contains(&v) => Self::InputIndex,
v if (Self::Inputindex).possible_values().contains(&v) => Self::Inputindex, v if (Self::MonthIndex).possible_values().contains(&v) => Self::MonthIndex,
v if (Self::Outputindex).possible_values().contains(&v) => Self::Outputindex, v if (Self::OpReturnIndex).possible_values().contains(&v) => Self::OpReturnIndex,
v if (Self::Addressindex).possible_values().contains(&v) => Self::Addressindex, v if (Self::OutputIndex).possible_values().contains(&v) => Self::OutputIndex,
v if (Self::P2PK33index).possible_values().contains(&v) => Self::P2PK33index, v if (Self::P2AIndex).possible_values().contains(&v) => Self::P2AIndex,
v if (Self::P2PK65index).possible_values().contains(&v) => Self::P2PK65index, v if (Self::P2MSIndex).possible_values().contains(&v) => Self::P2MSIndex,
v if (Self::P2PKHindex).possible_values().contains(&v) => Self::P2PKHindex, v if (Self::P2PK33Index).possible_values().contains(&v) => Self::P2PK33Index,
v if (Self::P2SHindex).possible_values().contains(&v) => Self::P2SHindex, v if (Self::P2PK65Index).possible_values().contains(&v) => Self::P2PK65Index,
v if (Self::P2TRindex).possible_values().contains(&v) => Self::P2TRindex, v if (Self::P2PKHIndex).possible_values().contains(&v) => Self::P2PKHIndex,
v if (Self::P2WPKHindex).possible_values().contains(&v) => Self::P2WPKHindex, v if (Self::P2SHIndex).possible_values().contains(&v) => Self::P2SHIndex,
v if (Self::P2WSHindex).possible_values().contains(&v) => Self::P2WSHindex, v if (Self::P2TRIndex).possible_values().contains(&v) => Self::P2TRIndex,
v if (Self::Weekindex).possible_values().contains(&v) => Self::Weekindex, v if (Self::P2WPKHIndex).possible_values().contains(&v) => Self::P2WPKHIndex,
v if (Self::Monthindex).possible_values().contains(&v) => Self::Monthindex, v if (Self::P2WSHIndex).possible_values().contains(&v) => Self::P2WSHIndex,
v if (Self::Yearindex).possible_values().contains(&v) => Self::Yearindex, v if (Self::QuarterIndex).possible_values().contains(&v) => Self::QuarterIndex,
v if (Self::Decadeindex).possible_values().contains(&v) => Self::Decadeindex, v if (Self::QuarterIndex).possible_values().contains(&v) => Self::QuarterIndex,
v if (Self::Difficultyepoch).possible_values().contains(&v) => Self::Difficultyepoch, v if (Self::TxIndex).possible_values().contains(&v) => Self::TxIndex,
v if (Self::Halvingepoch).possible_values().contains(&v) => Self::Halvingepoch, v if (Self::WeekIndex).possible_values().contains(&v) => Self::WeekIndex,
v if (Self::Quarterindex).possible_values().contains(&v) => Self::Quarterindex, v if (Self::YearIndex).possible_values().contains(&v) => Self::YearIndex,
v if (Self::Quarterindex).possible_values().contains(&v) => Self::Quarterindex, v if (Self::UnknownOutputIndex).possible_values().contains(&v) => {
v if (Self::Emptyindex).possible_values().contains(&v) => Self::Emptyindex, Self::UnknownOutputIndex
v if (Self::P2MSindex).possible_values().contains(&v) => Self::P2MSindex, }
v if (Self::Opreturnindex).possible_values().contains(&v) => Self::Opreturnindex,
v if (Self::Pushonlyindex).possible_values().contains(&v) => Self::Pushonlyindex,
v if (Self::Unknownindex).possible_values().contains(&v) => Self::Unknownindex,
_ => return Err(eyre!("Bad index")), _ => return Err(eyre!("Bad index")),
}) })
} }

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

View File

@@ -98,17 +98,17 @@ export default import("./v5.0.5-treeshaked/script.js").then((lc) => {
timeScale: { timeScale: {
borderVisible: false, borderVisible: false,
timeVisible: timeVisible:
index === /** @satisfies {Height} */ (0) || index === /** @satisfies {Height} */ (5) ||
index === /** @satisfies {Difficultyepoch} */ (3) || index === /** @satisfies {DifficultyEpoch} */ (2) ||
index === /** @satisfies {Halvingepoch} */ (8), index === /** @satisfies {HalvingEpoch} */ (4),
minBarSpacing: minBarSpacing:
index === /** @satisfies {Monthindex} */ (4) index === /** @satisfies {MonthIndex} */ (7)
? 1 ? 1
: index === /** @satisfies {Quarterindex} */ (5) : index === /** @satisfies {QuarterIndex} */ (19)
? 3 ? 3
: index === /** @satisfies {Yearindex} */ (6) : index === /** @satisfies {YearIndex} */ (23)
? 12 ? 12
: index === /** @satisfies {Decadeindex} */ (7) : index === /** @satisfies {DecadeIndex} */ (1)
? 120 ? 120
: undefined, : undefined,
}, },
@@ -199,8 +199,8 @@ export default import("./v5.0.5-treeshaked/script.js").then((lc) => {
signals.runWithOwner(owner, () => signals.runWithOwner(owner, () =>
signals.createEffect( signals.createEffect(
() => [ () => [
timeResource?.fetched[fetchedKey](), timeResource?.fetched[fetchedKey].vec(),
valuesResource.fetched[fetchedKey](), valuesResource.fetched[fetchedKey].vec(),
], ],
([indexes, _ohlcs]) => { ([indexes, _ohlcs]) => {
if (!ichart) throw Error("IChart should be initialized"); if (!ichart) throw Error("IChart should be initialized");
@@ -238,9 +238,9 @@ export default import("./v5.0.5-treeshaked/script.js").then((lc) => {
timeScaleSetCallback?.(() => { timeScaleSetCallback?.(() => {
if ( if (
!timeScaleSet && !timeScaleSet &&
(vecIndex === /** @satisfies {Quarterindex} */ (5) || (vecIndex === /** @satisfies {QuarterIndex} */ (19) ||
vecIndex === /** @satisfies {Yearindex} */ (6) || vecIndex === /** @satisfies {YearIndex} */ (23) ||
vecIndex === /** @satisfies {Decadeindex} */ (7)) vecIndex === /** @satisfies {DecadeIndex} */ (1))
) { ) {
ichart ichart
?.timeScale() ?.timeScale()
@@ -278,8 +278,8 @@ export default import("./v5.0.5-treeshaked/script.js").then((lc) => {
timeResource = vecsResources.getOrCreate( timeResource = vecsResources.getOrCreate(
vecIndex, vecIndex,
vecIndex === /** @satisfies {Height} */ (0) vecIndex === /** @satisfies {Height} */ (5)
? "fixed-timestamp" ? "timestamp-fixed"
: "timestamp", : "timestamp",
); );
timeResource.fetch(); timeResource.fetch();
@@ -648,7 +648,7 @@ export default import("./v5.0.5-treeshaked/script.js").then((lc) => {
}; };
config?.forEach(({ unit, blueprints }, paneIndex) => { config?.forEach(({ unit, blueprints }, paneIndex) => {
chart.create({ index: /** @satisfies {Dateindex} */ (1) }); chart.create({ index: /** @satisfies {DateIndex} */ (0) });
blueprints.forEach((blueprint) => { blueprints.forEach((blueprint) => {
if (blueprint.type === "Candlestick") { if (blueprint.type === "Candlestick") {

View File

@@ -0,0 +1,686 @@
// @ts-nocheck
// src/core/error.ts
var NotReadyError = class extends Error {
};
var EffectError = class extends Error {
constructor(effect, cause) {
super("");
this.cause = cause;
}
};
// src/core/constants.ts
var STATE_CLEAN = 0;
var STATE_CHECK = 1;
var STATE_DIRTY = 2;
var STATE_DISPOSED = 3;
var EFFECT_PURE = 0;
var EFFECT_RENDER = 1;
var EFFECT_USER = 2;
// src/core/scheduler.ts
var clock = 0;
function getClock() {
return clock;
}
function incrementClock() {
clock++;
}
var scheduled = false;
function schedule() {
if (scheduled)
return;
scheduled = true;
if (!globalQueue.y)
queueMicrotask(flushSync);
}
var Queue = class {
i = null;
y = false;
m = [[], [], []];
v = [];
created = clock;
enqueue(type, node) {
this.m[0].push(node);
if (type)
this.m[type].push(node);
schedule();
}
run(type) {
if (this.m[type].length) {
if (type === EFFECT_PURE) {
runPureQueue(this.m[type]);
this.m[type] = [];
} else {
const effects = this.m[type];
this.m[type] = [];
runEffectQueue(effects);
}
}
let rerun = false;
for (let i = 0; i < this.v.length; i++) {
rerun = this.v[i].run(type) || rerun;
}
if (type === EFFECT_PURE)
return rerun || !!this.m[type].length;
}
flush() {
if (this.y)
return;
this.y = true;
try {
while (this.run(EFFECT_PURE)) {
}
incrementClock();
scheduled = false;
this.run(EFFECT_RENDER);
this.run(EFFECT_USER);
} finally {
this.y = false;
}
}
addChild(child) {
this.v.push(child);
child.i = this;
}
removeChild(child) {
const index = this.v.indexOf(child);
if (index >= 0)
this.v.splice(index, 1);
}
notify(...args) {
if (this.i)
return this.i.notify(...args);
return false;
}
};
var globalQueue = new Queue();
function flushSync() {
while (scheduled) {
globalQueue.flush();
}
}
function runTop(node) {
const ancestors = [];
for (let current = node; current !== null; current = current.i) {
if (current.a !== STATE_CLEAN) {
ancestors.push(current);
}
}
for (let i = ancestors.length - 1; i >= 0; i--) {
if (ancestors[i].a !== STATE_DISPOSED)
ancestors[i].p();
}
}
function runPureQueue(queue) {
for (let i = 0; i < queue.length; i++) {
if (queue[i].a !== STATE_CLEAN)
runTop(queue[i]);
}
}
function runEffectQueue(queue) {
for (let i = 0; i < queue.length; i++)
queue[i].L();
}
// src/core/owner.ts
var currentOwner = null;
var defaultContext = {};
function getOwner() {
return currentOwner;
}
function setOwner(owner) {
const out = currentOwner;
currentOwner = owner;
return out;
}
function formatId(prefix, id) {
const num = id.toString(36), len = num.length - 1;
return prefix + (len ? String.fromCharCode(64 + len) : "") + num;
}
var Owner = class {
// We flatten the owner tree into a linked list so that we don't need a pointer to .firstChild
// However, the children are actually added in reverse creation order
// See comment at the top of the file for an example of the _nextSibling traversal
i = null;
g = null;
n = null;
a = STATE_CLEAN;
h = null;
j = defaultContext;
f = globalQueue;
G = null;
M = 0;
id = null;
constructor(id = null, skipAppend = false) {
this.id = id;
if (currentOwner && !skipAppend)
currentOwner.append(this);
}
append(child) {
child.i = this;
child.n = this;
if (this.id) {
child.G = this.g ? this.g.G + 1 : 0;
child.id = formatId(this.id, child.G);
}
if (this.g)
this.g.n = child;
child.g = this.g;
this.g = child;
if (child.j !== this.j) {
child.j = { ...this.j, ...child.j };
}
if (this.f)
child.f = this.f;
}
dispose(self = true) {
if (this.a === STATE_DISPOSED)
return;
let head = self ? this.n || this.i : this, current = this.g, next = null;
while (current && current.i === this) {
current.dispose(true);
current.q();
next = current.g;
current.g = null;
current = next;
}
this.M = 0;
if (self)
this.q();
if (current)
current.n = !self ? this : this.n;
if (head)
head.g = current;
}
q() {
if (this.n)
this.n.g = null;
this.i = null;
this.n = null;
this.j = defaultContext;
this.a = STATE_DISPOSED;
this.emptyDisposal();
}
emptyDisposal() {
if (!this.h)
return;
if (Array.isArray(this.h)) {
for (let i = 0; i < this.h.length; i++) {
const callable = this.h[i];
callable.call(callable);
}
} else {
this.h.call(this.h);
}
this.h = null;
}
getNextChildId() {
if (this.id)
return formatId(this.id + "-", this.M++);
throw new Error("Cannot get child id from owner without an id");
}
};
function onCleanup(fn) {
if (!currentOwner)
return fn;
const node = currentOwner;
if (!node.h) {
node.h = fn;
} else if (Array.isArray(node.h)) {
node.h.push(fn);
} else {
node.h = [node.h, fn];
}
return fn;
}
// src/core/flags.ts
var ERROR_OFFSET = 0;
var ERROR_BIT = 1 << ERROR_OFFSET;
var LOADING_OFFSET = 1;
var LOADING_BIT = 1 << LOADING_OFFSET;
var UNINITIALIZED_OFFSET = 2;
var UNINITIALIZED_BIT = 1 << UNINITIALIZED_OFFSET;
var DEFAULT_FLAGS = ERROR_BIT;
// src/core/core.ts
var currentObserver = null;
var currentMask = DEFAULT_FLAGS;
var newSources = null;
var newSourcesIndex = 0;
var newFlags = 0;
var notStale = false;
var UNCHANGED = Symbol(0);
var Computation = class extends Owner {
b = null;
c = null;
e;
w;
r;
// Used in __DEV__ mode, hopefully removed in production
Q;
// Using false is an optimization as an alternative to _equals: () => false
// which could enable more efficient DIRTY notification
H = isEqual;
N;
/** Whether the computation is an error or has ancestors that are unresolved */
d = 0;
/** Which flags raised by sources are handled, vs. being passed through. */
I = DEFAULT_FLAGS;
s = -1;
z = false;
constructor(initialValue, compute2, options) {
super(null, compute2 === null);
this.r = compute2;
this.a = compute2 ? STATE_DIRTY : STATE_CLEAN;
this.d = compute2 && initialValue === void 0 ? UNINITIALIZED_BIT : 0;
this.e = initialValue;
if (options?.equals !== void 0)
this.H = options.equals;
if (options?.unobserved)
this.N = options?.unobserved;
}
O() {
if (this.r) {
if (this.d & ERROR_BIT && this.s <= getClock())
update(this);
else
this.p();
}
track(this);
newFlags |= this.d & ~currentMask;
if (this.d & ERROR_BIT) {
throw this.w;
} else {
return this.e;
}
}
/**
* Return the current value of this computation
* Automatically re-executes the surrounding computation when the value changes
*/
read() {
return this.O();
}
/**
* Return the current value of this computation
* Automatically re-executes the surrounding computation when the value changes
*
* If the computation has any unresolved ancestors, this function waits for the value to resolve
* before continuing
*/
wait() {
if (this.r && this.d & ERROR_BIT && this.s <= getClock()) {
update(this);
} else {
this.p();
}
track(this);
if ((notStale || this.d & UNINITIALIZED_BIT) && this.d & LOADING_BIT) {
throw new NotReadyError();
}
return this.O();
}
/** Update the computation with a new value. */
write(value, flags = 0, raw = false) {
const newValue = !raw && typeof value === "function" ? value(this.e) : value;
const valueChanged = newValue !== UNCHANGED && (!!(this.d & UNINITIALIZED_BIT) || this.d & LOADING_BIT & ~flags || this.H === false || !this.H(this.e, newValue));
if (valueChanged) {
this.e = newValue;
this.w = void 0;
}
const changedFlagsMask = this.d ^ flags, changedFlags = changedFlagsMask & flags;
this.d = flags;
this.s = getClock() + 1;
if (this.c) {
for (let i = 0; i < this.c.length; i++) {
if (valueChanged) {
this.c[i].l(STATE_DIRTY);
} else if (changedFlagsMask) {
this.c[i].P(changedFlagsMask, changedFlags);
}
}
}
return this.e;
}
/**
* Set the current node's state, and recursively mark all of this node's observers as STATE_CHECK
*/
l(state, skipQueue) {
if (this.a >= state && !this.z)
return;
this.z = !!skipQueue;
this.a = state;
if (this.c) {
for (let i = 0; i < this.c.length; i++) {
this.c[i].l(STATE_CHECK, skipQueue);
}
}
}
/**
* Notify the computation that one of its sources has changed flags.
*
* @param mask A bitmask for which flag(s) were changed.
* @param newFlags The source's new flags, masked to just the changed ones.
*/
P(mask, newFlags2) {
if (this.a >= STATE_DIRTY)
return;
if (mask & this.I) {
this.l(STATE_DIRTY);
return;
}
if (this.a >= STATE_CHECK)
return;
const prevFlags = this.d & mask;
const deltaFlags = prevFlags ^ newFlags2;
if (newFlags2 === prevFlags) ; else if (deltaFlags & prevFlags & mask) {
this.l(STATE_CHECK);
} else {
this.d ^= deltaFlags;
if (this.c) {
for (let i = 0; i < this.c.length; i++) {
this.c[i].P(mask, newFlags2);
}
}
}
}
J(error) {
this.w = error;
this.write(UNCHANGED, this.d & ~LOADING_BIT | ERROR_BIT | UNINITIALIZED_BIT);
}
/**
* This is the core part of the reactivity system, which makes sure that the values are updated
* before they are read. We've also adapted it to return the loading state of the computation,
* so that we can propagate that to the computation's observers.
*
* This function will ensure that the value and states we read from the computation are up to date
*/
p() {
if (!this.r) {
return;
}
if (this.a === STATE_DISPOSED) {
return;
}
if (this.a === STATE_CLEAN) {
return;
}
let observerFlags = 0;
if (this.a === STATE_CHECK) {
for (let i = 0; i < this.b.length; i++) {
this.b[i].p();
observerFlags |= this.b[i].d;
if (this.a === STATE_DIRTY) {
break;
}
}
}
if (this.a === STATE_DIRTY) {
update(this);
} else {
this.write(UNCHANGED, observerFlags);
this.a = STATE_CLEAN;
}
}
/**
* Remove ourselves from the owner graph and the computation graph
*/
q() {
if (this.a === STATE_DISPOSED)
return;
if (this.b)
removeSourceObservers(this, 0);
super.q();
}
};
function track(computation) {
if (currentObserver) {
if (!newSources && currentObserver.b && currentObserver.b[newSourcesIndex] === computation) {
newSourcesIndex++;
} else if (!newSources)
newSources = [computation];
else if (computation !== newSources[newSources.length - 1]) {
newSources.push(computation);
}
}
}
function update(node) {
const prevSources = newSources, prevSourcesIndex = newSourcesIndex, prevFlags = newFlags;
newSources = null;
newSourcesIndex = 0;
newFlags = 0;
try {
node.dispose(false);
node.emptyDisposal();
const result = compute(node, node.r, node);
node.write(result, newFlags, true);
} catch (error) {
if (error instanceof NotReadyError) {
node.write(UNCHANGED, newFlags | LOADING_BIT | node.d & UNINITIALIZED_BIT);
} else {
node.J(error);
}
} finally {
if (newSources) {
if (node.b)
removeSourceObservers(node, newSourcesIndex);
if (node.b && newSourcesIndex > 0) {
node.b.length = newSourcesIndex + newSources.length;
for (let i = 0; i < newSources.length; i++) {
node.b[newSourcesIndex + i] = newSources[i];
}
} else {
node.b = newSources;
}
let source;
for (let i = newSourcesIndex; i < node.b.length; i++) {
source = node.b[i];
if (!source.c)
source.c = [node];
else
source.c.push(node);
}
} else if (node.b && newSourcesIndex < node.b.length) {
removeSourceObservers(node, newSourcesIndex);
node.b.length = newSourcesIndex;
}
newSources = prevSources;
newSourcesIndex = prevSourcesIndex;
newFlags = prevFlags;
node.s = getClock() + 1;
node.a = STATE_CLEAN;
}
}
function removeSourceObservers(node, index) {
let source;
let swap;
for (let i = index; i < node.b.length; i++) {
source = node.b[i];
if (source.c) {
swap = source.c.indexOf(node);
source.c[swap] = source.c[source.c.length - 1];
source.c.pop();
if (!source.c.length)
source.N?.();
}
}
}
function isEqual(a, b) {
return a === b;
}
function untrack(fn) {
if (currentObserver === null)
return fn();
return compute(getOwner(), fn, null);
}
function latest(fn, fallback) {
const argLength = arguments.length;
const prevFlags = newFlags;
const prevNotStale = notStale;
notStale = false;
try {
return fn();
} catch (err) {
if (argLength > 1 && err instanceof NotReadyError)
return fallback;
throw err;
} finally {
newFlags = prevFlags;
notStale = prevNotStale;
}
}
function compute(owner, fn, observer) {
const prevOwner = setOwner(owner), prevObserver = currentObserver, prevMask = currentMask, prevNotStale = notStale;
currentObserver = observer;
currentMask = observer?.I ?? DEFAULT_FLAGS;
notStale = true;
try {
return fn(observer ? observer.e : void 0);
} finally {
setOwner(prevOwner);
currentObserver = prevObserver;
currentMask = prevMask;
notStale = prevNotStale;
}
}
// src/core/effect.ts
var Effect = class extends Computation {
A;
B;
t;
K = false;
C;
o;
constructor(initialValue, compute2, effect, error, options) {
super(initialValue, compute2, options);
this.A = effect;
this.B = error;
this.C = initialValue;
this.o = options?.render ? EFFECT_RENDER : EFFECT_USER;
if (this.o === EFFECT_RENDER) {
this.r = (p) => getClock() > this.f.created && !(this.d & ERROR_BIT) ? latest(() => compute2(p)) : compute2(p);
}
this.p();
!options?.defer && (this.o === EFFECT_USER ? this.f.enqueue(this.o, this) : this.L());
}
write(value, flags = 0) {
if (this.a == STATE_DIRTY) {
this.d;
this.d = flags;
if (this.o === EFFECT_RENDER) {
this.f.notify(this, LOADING_BIT | ERROR_BIT, flags);
}
}
if (value === UNCHANGED)
return this.e;
this.e = value;
this.K = true;
return value;
}
l(state, skipQueue) {
if (this.a >= state || skipQueue)
return;
if (this.a === STATE_CLEAN)
this.f.enqueue(this.o, this);
this.a = state;
}
J(error) {
this.w = error;
this.t?.();
this.f.notify(this, LOADING_BIT, 0);
this.d = ERROR_BIT;
if (this.o === EFFECT_USER) {
try {
return this.B ? this.t = this.B(error) : console.error(new EffectError(this.A, error));
} catch (e) {
error = e;
}
}
if (!this.f.notify(this, ERROR_BIT, ERROR_BIT))
throw error;
}
q() {
if (this.a === STATE_DISPOSED)
return;
this.A = void 0;
this.C = void 0;
this.B = void 0;
this.t?.();
this.t = void 0;
super.q();
}
L() {
if (this.K && this.a !== STATE_DISPOSED) {
this.t?.();
try {
this.t = this.A(this.e, this.C);
} catch (e) {
if (!this.f.notify(this, ERROR_BIT, ERROR_BIT))
throw e;
} finally {
this.C = this.e;
this.K = false;
}
}
}
};
// src/signals.ts
function createSignal(first, second, third) {
if (typeof first === "function") {
const memo = createMemo((p) => {
const node2 = new Computation(
first(p ? untrack(p[0]) : second),
null,
third
);
return [node2.read.bind(node2), node2.write.bind(node2)];
});
return [() => memo()[0](), (value) => memo()[1](value)];
}
const node = new Computation(first, null, second);
return [node.read.bind(node), node.write.bind(node)];
}
function createMemo(compute2, value, options) {
let node = new Computation(
value,
compute2,
options
);
let resolvedValue;
return () => {
if (node) {
if (node.a === STATE_DISPOSED) {
node = void 0;
return resolvedValue;
}
resolvedValue = node.wait();
if (!node.b?.length && node.g?.i !== node) {
node.dispose();
node = void 0;
}
}
return resolvedValue;
};
}
function createEffect(compute2, effect, error, value, options) {
void new Effect(
value,
compute2,
effect,
error,
options
);
}
function createRoot(init, options) {
const owner = new Owner(options?.id);
return compute(owner, !init.length ? init : () => init(() => owner.dispose()), null);
}
function runWithOwner(owner, run) {
return compute(owner, run, null);
}
export { Owner, createEffect, createMemo, createRoot, createSignal, getOwner, onCleanup, runWithOwner, untrack };

View File

@@ -0,0 +1,17 @@
import { Computation } from "./core.js";
import { type Effect } from "./effect.js";
import { Queue } from "./scheduler.js";
export declare class CollectionQueue extends Queue {
_collectionType: number;
_nodes: Set<Effect>;
_disabled: Computation<boolean>;
constructor(type: number);
notify(node: Effect, type: number, flags: number): boolean;
}
export declare enum BoundaryMode {
VISIBLE = "visible",
HIDDEN = "hidden"
}
export declare function createBoundary<T>(fn: () => T, condition: () => BoundaryMode): () => T | undefined;
export declare function createSuspense(fn: () => any, fallback: () => any): () => any;
export declare function createErrorBoundary<U>(fn: () => any, fallback: (error: unknown, reset: () => void) => U): () => any;

View File

@@ -0,0 +1,14 @@
/**
* See https://dev.to/modderme123/super-charging-fine-grained-reactive-performance-47ph
* State clean corresponds to a node where all the sources are fully up to date
* State check corresponds to a node where some sources (including grandparents) may have changed
* State dirty corresponds to a node where the direct parents of a node has changed
*/
export declare const STATE_CLEAN = 0;
export declare const STATE_CHECK = 1;
export declare const STATE_DIRTY = 2;
export declare const STATE_DISPOSED = 3;
export declare const EFFECT_PURE = 0;
export declare const EFFECT_RENDER = 1;
export declare const EFFECT_USER = 2;
export declare const SUPPORTS_PROXY: boolean;

View File

@@ -0,0 +1,158 @@
/**
* Nodes for constructing a graph of reactive values and reactive computations.
*
* - The graph is acyclic.
* - The user inputs new values into the graph by calling .write() on one more computation nodes.
* - The user retrieves computed results from the graph by calling .read() on one or more computation nodes.
* - The library is responsible for running any necessary computations so that .read() is up to date
* with all prior .write() calls anywhere in the graph.
* - We call the input nodes 'roots' and the output nodes 'leaves' of the graph here.
* - Changes flow from roots to leaves. It would be effective but inefficient to immediately
* propagate all changes from a root through the graph to descendant leaves. Instead, we defer
* change most change propagation computation until a leaf is accessed. This allows us to
* coalesce computations and skip altogether recalculating unused sections of the graph.
* - Each computation node tracks its sources and its observers (observers are other
* elements that have this node as a source). Source and observer links are updated automatically
* as observer computations re-evaluate and call get() on their sources.
* - Each node stores a cache state (clean/check/dirty) to support the change propagation algorithm:
*
* In general, execution proceeds in three passes:
*
* 1. write() propagates changes down the graph to the leaves
* direct children are marked as dirty and their deeper descendants marked as check
* (no computations are evaluated)
* 2. read() requests that parent nodes updateIfNecessary(), which proceeds recursively up the tree
* to decide whether the node is clean (parents unchanged) or dirty (parents changed)
* 3. updateIfNecessary() evaluates the computation if the node is dirty (the computations are
* executed in root to leaf order)
*/
import { type Flags } from "./flags.js";
import { Owner } from "./owner.js";
export interface SignalOptions<T> {
name?: string;
equals?: ((prev: T, next: T) => boolean) | false;
unobserved?: () => void;
}
interface SourceType {
_observers: ObserverType[] | null;
_unobserved?: () => void;
_updateIfNecessary: () => void;
_stateFlags: Flags;
_time: number;
}
interface ObserverType {
_sources: SourceType[] | null;
_notify: (state: number, skipQueue?: boolean) => void;
_handlerMask: Flags;
_notifyFlags: (mask: Flags, newFlags: Flags) => void;
_time: number;
}
/**
* Returns the current observer.
*/
export declare function getObserver(): Computation | null;
export declare const UNCHANGED: unique symbol;
export type UNCHANGED = typeof UNCHANGED;
export declare class Computation<T = any> extends Owner implements SourceType, ObserverType {
_sources: SourceType[] | null;
_observers: ObserverType[] | null;
_value: T | undefined;
_error: unknown;
_compute: null | ((p?: T) => T);
_name: string | undefined;
_equals: false | ((a: T, b: T) => boolean);
_unobserved: (() => void) | undefined;
/** Whether the computation is an error or has ancestors that are unresolved */
_stateFlags: number;
/** Which flags raised by sources are handled, vs. being passed through. */
_handlerMask: number;
_time: number;
_forceNotify: boolean;
constructor(initialValue: T | undefined, compute: null | ((p?: T) => T), options?: SignalOptions<T>);
_read(): T;
/**
* Return the current value of this computation
* Automatically re-executes the surrounding computation when the value changes
*/
read(): T;
/**
* Return the current value of this computation
* Automatically re-executes the surrounding computation when the value changes
*
* If the computation has any unresolved ancestors, this function waits for the value to resolve
* before continuing
*/
wait(): T;
/** Update the computation with a new value. */
write(value: T | ((currentValue: T) => T) | UNCHANGED, flags?: number, raw?: boolean): T;
/**
* Set the current node's state, and recursively mark all of this node's observers as STATE_CHECK
*/
_notify(state: number, skipQueue?: boolean): void;
/**
* Notify the computation that one of its sources has changed flags.
*
* @param mask A bitmask for which flag(s) were changed.
* @param newFlags The source's new flags, masked to just the changed ones.
*/
_notifyFlags(mask: Flags, newFlags: Flags): void;
_setError(error: unknown): void;
/**
* This is the core part of the reactivity system, which makes sure that the values are updated
* before they are read. We've also adapted it to return the loading state of the computation,
* so that we can propagate that to the computation's observers.
*
* This function will ensure that the value and states we read from the computation are up to date
*/
_updateIfNecessary(): void;
/**
* Remove ourselves from the owner graph and the computation graph
*/
_disposeNode(): void;
}
/**
* Reruns a computation's _compute function, producing a new value and keeping track of dependencies.
*
* It handles the updating of sources and observers, disposal of previous executions,
* and error handling if the _compute function throws. It also sets the node as loading
* if it reads any parents that are currently loading.
*/
export declare function update<T>(node: Computation<T>): void;
export declare function isEqual<T>(a: T, b: T): boolean;
/**
* Returns the current value stored inside the given compute function without triggering any
* dependencies. Use `untrack` if you want to also disable owner tracking.
*/
export declare function untrack<T>(fn: () => T): T;
/**
* Returns true if the given functinon contains signals that have been updated since the last time
* the parent computation was run.
*/
export declare function hasUpdated(fn: () => any): boolean;
/**
* Returns an accessor that is true if the given function contains async signals are out of date.
*/
export declare function isPending(fn: () => any): boolean;
export declare function isPending(fn: () => any, loadingValue: boolean): boolean;
/**
* Attempts to resolve value of expression synchronously returning the last resolved value for any async computation.
*/
export declare function latest<T>(fn: () => T): T;
export declare function latest<T, U>(fn: () => T, fallback: U): T | U;
/**
* Runs the given function in the given observer.
*
* Warning: Usually there are simpler ways of modeling a problem that avoid using this function
*/
export declare function runWithObserver<T>(observer: Computation, run: () => T): T | undefined;
/**
* A convenient wrapper that calls `compute` with the `owner` and `observer` and is guaranteed
* to reset the global context after the computation is finished even if an error is thrown.
*/
export declare function compute<T>(owner: Owner | null, fn: (val: T) => T, observer: Computation<T>): T;
export declare function compute<T>(owner: Owner | null, fn: (val: undefined) => T, observer: null): T;
export declare function flatten(children: any, options?: {
skipNonRendered?: boolean;
doNotUnwrap?: boolean;
}): any;
export {};

View File

@@ -0,0 +1,34 @@
import { EFFECT_RENDER, EFFECT_USER } from "./constants.js";
import { Computation, type SignalOptions } from "./core.js";
/**
* Effects are the leaf nodes of our reactive graph. When their sources change, they are
* automatically added to the queue of effects to re-execute, which will cause them to fetch their
* sources and recompute
*/
export declare class Effect<T = any> extends Computation<T> {
_effect: (val: T, prev: T | undefined) => void | (() => void);
_onerror: ((err: unknown) => void | (() => void)) | undefined;
_cleanup: (() => void) | undefined;
_modified: boolean;
_prevValue: T | undefined;
_type: typeof EFFECT_RENDER | typeof EFFECT_USER;
constructor(initialValue: T, compute: (val?: T) => T, effect: (val: T, prev: T | undefined) => void | (() => void), error?: (err: unknown) => void | (() => void), options?: SignalOptions<T> & {
render?: boolean;
defer?: boolean;
});
write(value: T, flags?: number): T;
_notify(state: number, skipQueue?: boolean): void;
_setError(error: unknown): void;
_disposeNode(): void;
_runEffect(): void;
}
export declare class EagerComputation<T = any> extends Computation<T> {
constructor(initialValue: T, compute: () => T, options?: SignalOptions<T> & {
defer?: boolean;
});
_notify(state: number, skipQueue?: boolean): void;
}
export declare class ProjectionComputation extends Computation {
constructor(compute: () => void);
_notify(state: number, skipQueue?: boolean): void;
}

View File

@@ -0,0 +1,15 @@
import type { Owner } from "./owner.js";
export declare class NotReadyError extends Error {
}
export declare class NoOwnerError extends Error {
constructor();
}
export declare class ContextNotFoundError extends Error {
constructor();
}
export declare class EffectError extends Error {
constructor(effect: Function, cause: unknown);
}
export interface ErrorHandler {
(error: unknown, node: Owner): void;
}

View File

@@ -0,0 +1,11 @@
export type Flags = number;
export declare const ERROR_OFFSET = 0;
export declare const ERROR_BIT: number;
export declare const ERROR: unique symbol;
export declare const LOADING_OFFSET = 1;
export declare const LOADING_BIT: number;
export declare const LOADING: unique symbol;
export declare const UNINITIALIZED_OFFSET = 2;
export declare const UNINITIALIZED_BIT: number;
export declare const UNINITIALIZED: unique symbol;
export declare const DEFAULT_FLAGS: number;

View File

@@ -0,0 +1,9 @@
export { ContextNotFoundError, NoOwnerError, NotReadyError, type ErrorHandler } from "./error.js";
export { Owner, createContext, getContext, setContext, hasContext, getOwner, onCleanup, type Context, type ContextRecord, type Disposable } from "./owner.js";
export { Computation, getObserver, isEqual, untrack, hasUpdated, isPending, latest, flatten, UNCHANGED, compute, runWithObserver, type SignalOptions } from "./core.js";
export { Effect, EagerComputation } from "./effect.js";
export { flushSync, type IQueue, Queue } from "./scheduler.js";
export { createSuspense, createErrorBoundary, createBoundary } from "./boundaries.js";
export { SUPPORTS_PROXY } from "./constants.js";
export { tryCatch, type TryCatchResult } from "./utils.js";
export * from "./flags.js";

View File

@@ -0,0 +1,96 @@
/**
* Owner tracking is used to enable nested tracking scopes with automatic cleanup.
* We also use owners to also keep track of which error handling context we are in.
*
* If you write the following
*
* const a = createOwner(() => {
* const b = createOwner(() => {});
*
* const c = createOwner(() => {
* const d = createOwner(() => {});
* });
*
* const e = createOwner(() => {});
* });
*
* The owner tree will look like this:
*
* a
* /|\
* b-c-e
* |
* d
*
* Following the _nextSibling pointers of each owner will first give you its children, and then its siblings (in reverse).
* a -> e -> c -> d -> b
*
* Note that the owner tree is largely orthogonal to the reactivity tree, and is much closer to the component tree.
*/
import { type IQueue } from "./scheduler.js";
export type ContextRecord = Record<string | symbol, unknown>;
export interface Disposable {
(): void;
}
/**
* Returns the currently executing parent owner.
*/
export declare function getOwner(): Owner | null;
export declare function setOwner(owner: Owner | null): Owner | null;
export declare class Owner {
_parent: Owner | null;
_nextSibling: Owner | null;
_prevSibling: Owner | null;
_state: number;
_disposal: Disposable | Disposable[] | null;
_context: ContextRecord;
_queue: IQueue;
_siblingCount: number | null;
_childCount: number;
id: string | null;
constructor(id?: string | null, skipAppend?: boolean);
append(child: Owner): void;
dispose(this: Owner, self?: boolean): void;
_disposeNode(): void;
emptyDisposal(): void;
getNextChildId(): string;
}
export interface Context<T> {
readonly id: symbol;
readonly defaultValue: T | undefined;
}
/**
* Context provides a form of dependency injection. It is used to save from needing to pass
* data as props through intermediate components. This function creates a new context object
* that can be used with `getContext` and `setContext`.
*
* A default value can be provided here which will be used when a specific value is not provided
* via a `setContext` call.
*/
export declare function createContext<T>(defaultValue?: T, description?: string): Context<T>;
/**
* Attempts to get a context value for the given key.
*
* @throws `NoOwnerError` if there's no owner at the time of call.
* @throws `ContextNotFoundError` if a context value has not been set yet.
*/
export declare function getContext<T>(context: Context<T>, owner?: Owner | null): T;
/**
* Attempts to set a context value on the parent scope with the given key.
*
* @throws `NoOwnerError` if there's no owner at the time of call.
*/
export declare function setContext<T>(context: Context<T>, value?: T, owner?: Owner | null): void;
/**
* Whether the given context is currently defined.
*/
export declare function hasContext(context: Context<any>, owner?: Owner | null): boolean;
/**
* Runs an effect once before the reactive scope is disposed
* @param fn an effect that should run only once on cleanup
*
* @returns the same {@link fn} function that was passed in
*
* @description https://docs.solidjs.com/reference/lifecycle/on-cleanup
*/
export declare function onCleanup(fn: Disposable): Disposable;

View File

@@ -0,0 +1,33 @@
import type { Computation } from "./core.js";
import type { Effect } from "./effect.js";
export declare function getClock(): number;
export declare function incrementClock(): void;
export interface IQueue {
enqueue<T extends Computation | Effect>(type: number, node: T): void;
run(type: number): boolean | void;
flush(): void;
addChild(child: IQueue): void;
removeChild(child: IQueue): void;
created: number;
notify(...args: any[]): boolean;
_parent: IQueue | null;
}
export declare class Queue implements IQueue {
_parent: IQueue | null;
_running: boolean;
_queues: [Computation[], Effect[], Effect[]];
_children: IQueue[];
created: number;
enqueue<T extends Computation | Effect>(type: number, node: T): void;
run(type: number): boolean | undefined;
flush(): void;
addChild(child: IQueue): void;
removeChild(child: IQueue): void;
notify(...args: any[]): boolean;
}
export declare const globalQueue: Queue;
/**
* By default, changes are batched on the microtask queue which is an async process. You can flush
* the queue synchronously to get the latest updates by calling `flushSync()`.
*/
export declare function flushSync(): void;

View File

@@ -0,0 +1,4 @@
export declare function isUndefined(value: any): value is undefined;
export type TryCatchResult<T, E> = [undefined, T] | [E];
export declare function tryCatch<T, E = Error>(fn: () => Promise<T>): Promise<TryCatchResult<T, E>>;
export declare function tryCatch<T, E = Error>(fn: () => T): TryCatchResult<T, E>;

View File

@@ -0,0 +1,3 @@
export { Owner, getOwner, onCleanup, untrack } from "./core/index.js";
export type { ErrorHandler, SignalOptions, Context, ContextRecord, Disposable, IQueue } from "./core/index.js";
export * from "./signals.js";

View File

@@ -0,0 +1,22 @@
import type { Accessor } from "./signals.js";
export type Maybe<T> = T | void | null | undefined | false;
/**
* Reactively transforms an array with a callback function - underlying helper for the `<For>` control flow
*
* similar to `Array.prototype.map`, but gets the value and index as accessors, transforms only values that changed and returns an accessor and reactively tracks changes to the list.
*
* @description https://docs.solidjs.com/reference/reactive-utilities/map-array
*/
export declare function mapArray<Item, MappedItem>(list: Accessor<Maybe<readonly Item[]>>, map: (value: Accessor<Item>, index: Accessor<number>) => MappedItem, options?: {
keyed?: boolean | ((item: Item) => any);
fallback?: Accessor<any>;
}): Accessor<MappedItem[]>;
/**
* Reactively repeats a callback function the count provided - underlying helper for the `<Repeat>` control flow
*
* @description https://docs.solidjs.com/reference/reactive-utilities/repeat
*/
export declare function repeat(count: Accessor<number>, map: (index: number) => any, options?: {
from?: Accessor<number | undefined>;
fallback?: Accessor<any>;
}): Accessor<any[]>;

View File

@@ -0,0 +1,102 @@
import type { SignalOptions } from "./core/index.js";
import { Owner } from "./core/index.js";
export type Accessor<T> = () => T;
export type Setter<in out T> = {
<U extends T>(...args: undefined extends T ? [] : [value: Exclude<U, Function> | ((prev: T) => U)]): undefined extends T ? undefined : U;
<U extends T>(value: (prev: T) => U): U;
<U extends T>(value: Exclude<U, Function>): U;
<U extends T>(value: Exclude<U, Function> | ((prev: T) => U)): U;
};
export type Signal<T> = [get: Accessor<T>, set: Setter<T>];
export type ComputeFunction<Prev, Next extends Prev = Prev> = (v: Prev) => Next;
export type EffectFunction<Prev, Next extends Prev = Prev> = (v: Next, p?: Prev) => (() => void) | void;
export interface EffectOptions {
name?: string;
defer?: boolean;
}
export interface MemoOptions<T> {
name?: string;
equals?: false | ((prev: T, next: T) => boolean);
}
export type NoInfer<T extends any> = [T][T extends any ? 0 : never];
/**
* Creates a simple reactive state with a getter and setter
* ```typescript
* const [state: Accessor<T>, setState: Setter<T>] = createSignal<T>(
* value: T,
* options?: { name?: string, equals?: false | ((prev: T, next: T) => boolean) }
* )
* ```
* @param value initial value of the state; if empty, the state's type will automatically extended with undefined; otherwise you need to extend the type manually if you want setting to undefined not be an error
* @param options optional object with a name for debugging purposes and equals, a comparator function for the previous and next value to allow fine-grained control over the reactivity
*
* @returns ```typescript
* [state: Accessor<T>, setState: Setter<T>]
* ```
* * the Accessor is a function that returns the current value and registers each call to the reactive root
* * the Setter is a function that allows directly setting or mutating the value:
* ```typescript
* const [count, setCount] = createSignal(0);
* setCount(count => count + 1);
* ```
*
* @description https://docs.solidjs.com/reference/basic-reactivity/create-signal
*/
export declare function createSignal<T>(): Signal<T | undefined>;
export declare function createSignal<T>(value: Exclude<T, Function>, options?: SignalOptions<T>): Signal<T>;
export declare function createSignal<T>(fn: ComputeFunction<T>, initialValue?: T, options?: SignalOptions<T>): Signal<T>;
/**
* Creates a readonly derived reactive memoized signal
* ```typescript
* export function createMemo<T>(
* compute: (v: T) => T,
* value?: T,
* options?: { name?: string, equals?: false | ((prev: T, next: T) => boolean) }
* ): () => T;
* ```
* @param compute a function that receives its previous or the initial value, if set, and returns a new value used to react on a computation
* @param value an optional initial value for the computation; if set, fn will never receive undefined as first argument
* @param options allows to set a name in dev mode for debugging purposes and use a custom comparison function in equals
*
* @description https://docs.solidjs.com/reference/basic-reactivity/create-memo
*/
export declare function createMemo<Next extends Prev, Prev = Next>(compute: ComputeFunction<undefined | NoInfer<Prev>, Next>): Accessor<Next>;
export declare function createMemo<Next extends Prev, Init = Next, Prev = Next>(compute: ComputeFunction<Init | Prev, Next>, value: Init, options?: MemoOptions<Next>): Accessor<Next>;
/**
* Creates a reactive effect that runs after the render phase
* ```typescript
* export function createEffect<T>(
* compute: (prev: T) => T,
* effect: (v: T, prev: T) => (() => void) | void,
* value?: T,
* options?: { name?: string }
* ): void;
* ```
* @param compute a function that receives its previous or the initial value, if set, and returns a new value used to react on a computation
* @param effect a function that receives the new value and is used to perform side effects, return a cleanup function to run on disposal
* @param error an optional function that receives an error if thrown during the computation
* @param value an optional initial value for the computation; if set, fn will never receive undefined as first argument
* @param options allows to set a name in dev mode for debugging purposes
*
* @description https://docs.solidjs.com/reference/basic-reactivity/create-effect
*/
export declare function createEffect<Next>(compute: ComputeFunction<undefined | NoInfer<Next>, Next>, effect: EffectFunction<NoInfer<Next>, Next>, error?: (err: unknown) => void): void;
export declare function createEffect<Next, Init = Next>(compute: ComputeFunction<Init | Next, Next>, effect: EffectFunction<Next, Next>, error: ((err: unknown) => void) | undefined, value: Init, options?: EffectOptions): void;
/**
* Creates a new non-tracked reactive context with manual disposal
*
* @param fn a function in which the reactive state is scoped
* @returns the output of `fn`.
*
* @description https://docs.solidjs.com/reference/reactive-utilities/create-root
*/
export declare function createRoot<T>(init: ((dispose: () => void) => T) | (() => T), options?: {
id: string;
}): T;
/**
* Runs the given function in the given owner to move ownership of nested primitives and cleanups.
* This method untracks the current scope.
*
* Warning: Usually there are simpler ways of modeling a problem that avoid using this function
*/
export declare function runWithOwner<T>(owner: Owner | null, run: () => T): T;

View File

@@ -0,0 +1,6 @@
export type { Store, StoreSetter, StoreNode, NotWrappable, SolidStore } from "./store.js";
export type { Merge, Omit } from "./utils.js";
export { unwrap, isWrappable, createStore, deep, $RAW, $TRACK, $PROXY, $TARGET } from "./store.js";
export { createProjection } from "./projection.js";
export { reconcile } from "./reconcile.js";
export { merge, omit } from "./utils.js";

View File

@@ -0,0 +1,8 @@
import { type Store, type StoreSetter } from "./store.js";
/**
* Creates a mutable derived value
*
* @see {@link https://github.com/solidjs/x-reactivity#createprojection}
*/
export declare function createProjection<T extends Object>(fn: (draft: T) => void, initialValue?: T): Store<T>;
export declare function wrapProjection<T>(fn: (draft: T) => void, store: Store<T>, setStore: StoreSetter<T>): [Store<T>, StoreSetter<T>];

View File

@@ -0,0 +1 @@
export declare function reconcile<T extends U, U>(value: T, key: string | ((item: NonNullable<any>) => any)): (state: U) => T;

View File

@@ -0,0 +1,35 @@
import { Computation } from "../core/index.js";
export type Store<T> = Readonly<T>;
export type StoreSetter<T> = (fn: (state: T) => void) => void;
type DataNode = Computation<any>;
type DataNodes = Record<PropertyKey, DataNode>;
declare const $RAW: unique symbol, $TRACK: unique symbol, $TARGET: unique symbol, $PROXY: unique symbol;
export declare const STORE_VALUE = "v", STORE_NODE = "n", STORE_HAS = "h";
export { $PROXY, $TRACK, $RAW, $TARGET };
export type StoreNode = {
[STORE_VALUE]: Record<PropertyKey, any>;
[STORE_NODE]?: DataNodes;
[STORE_HAS]?: DataNodes;
};
export declare namespace SolidStore {
interface Unwrappable {
}
}
export type NotWrappable = string | number | bigint | symbol | boolean | Function | null | undefined | SolidStore.Unwrappable[keyof SolidStore.Unwrappable];
export declare function wrap<T extends Record<PropertyKey, any>>(value: T): T;
export declare function isWrappable<T>(obj: T | NotWrappable): obj is T;
/**
* Returns the underlying data in the store without a proxy.
* @param item store proxy object
* @example
* ```js
* const initial = {z...};
* const [state, setState] = createStore(initial);
* initial === state; // => false
* initial === unwrap(state); // => true
* ```
*/
export declare function unwrap<T>(item: T, deep?: boolean, set?: Set<unknown>): T;
export declare function createStore<T extends object = {}>(store: T | Store<T>): [get: Store<T>, set: StoreSetter<T>];
export declare function createStore<T extends object = {}>(fn: (store: T) => void, store: T | Store<T>): [get: Store<T>, set: StoreSetter<T>];
export declare function deep<T extends object>(store: Store<T>): Store<any>;

View File

@@ -0,0 +1,29 @@
type DistributeOverride<T, F> = T extends undefined ? F : T;
type Override<T, U> = T extends any ? U extends any ? {
[K in keyof T]: K extends keyof U ? DistributeOverride<U[K], T[K]> : T[K];
} & {
[K in keyof U]: K extends keyof T ? DistributeOverride<U[K], T[K]> : U[K];
} : T & U : T & U;
type OverrideSpread<T, U> = T extends any ? {
[K in keyof ({
[K in keyof T]: any;
} & {
[K in keyof U]?: any;
} & {
[K in U extends any ? keyof U : keyof U]?: any;
})]: K extends keyof T ? Exclude<U extends any ? U[K & keyof U] : never, undefined> | T[K] : U extends any ? U[K & keyof U] : never;
} : T & U;
type Simplify<T> = T extends any ? {
[K in keyof T]: T[K];
} : T;
type _Merge<T extends unknown[], Curr = {}> = T extends [
infer Next | (() => infer Next),
...infer Rest
] ? _Merge<Rest, Override<Curr, Next>> : T extends [...infer Rest, infer Next | (() => infer Next)] ? Override<_Merge<Rest, Curr>, Next> : T extends [] ? Curr : T extends (infer I | (() => infer I))[] ? OverrideSpread<Curr, I> : Curr;
export type Merge<T extends unknown[]> = Simplify<_Merge<T>>;
export declare function merge<T extends unknown[]>(...sources: T): Merge<T>;
export type Omit<T, K extends readonly (keyof T)[]> = {
[P in keyof T as Exclude<P, K[number]>]: T[P];
};
export declare function omit<T extends Record<any, any>, K extends readonly (keyof T)[]>(props: T, ...keys: K): Omit<T, K>;
export {};

View File

@@ -295,19 +295,19 @@ function createIndexSelector({ elements, signals, utils }) {
/** @returns {ChartableIndex} */ () => { /** @returns {ChartableIndex} */ () => {
switch (selected()) { switch (selected()) {
case "timestamp": case "timestamp":
return /** @satisfies {Height} */ (0); return /** @satisfies {Height} */ (5);
case "date": case "date":
return /** @satisfies {Dateindex} */ (1); return /** @satisfies {DateIndex} */ (0);
case "week": case "week":
return /** @satisfies {Weekindex} */ (2); return /** @satisfies {WeekIndex} */ (22);
case "month": case "month":
return /** @satisfies {Monthindex} */ (4); return /** @satisfies {MonthIndex} */ (7);
case "quarter": case "quarter":
return /** @satisfies {Quarterindex} */ (5); return /** @satisfies {QuarterIndex} */ (19);
case "year": case "year":
return /** @satisfies {Yearindex} */ (6); return /** @satisfies {YearIndex} */ (23);
case "decade": case "decade":
return /** @satisfies {Decadeindex} */ (7); return /** @satisfies {DecadeIndex} */ (1);
} }
}, },
); );

View File

@@ -9,7 +9,7 @@
* @import {Signal, Signals} from "../packages/solid-signals/types"; * @import {Signal, Signals} from "../packages/solid-signals/types";
* @import { getOwner as GetOwner, onCleanup as OnCleanup, Owner } from "../packages/solid-signals/v0.2.4-treeshaked/types/core/owner" * @import { getOwner as GetOwner, onCleanup as OnCleanup, Owner } from "../packages/solid-signals/v0.2.4-treeshaked/types/core/owner"
* @import { createEffect as CreateEffect, Accessor, Setter, createMemo as CreateMemo } from "../packages/solid-signals/v0.2.4-treeshaked/types/signals"; * @import { createEffect as CreateEffect, Accessor, Setter, createMemo as CreateMemo } from "../packages/solid-signals/v0.2.4-treeshaked/types/signals";
* @import {Addressindex, Dateindex, Decadeindex, Difficultyepoch, Index, Halvingepoch, Height, Monthindex, P2PK33index, P2PK65index, P2PKHindex, P2SHindex, P2TRindex, P2WPKHindex, P2WSHindex, Txindex, Inputindex, Outputindex, VecId, Weekindex, Yearindex, VecIdToIndexes, Quarterindex, Emptyindex, P2MSindex, Opreturnindex, Pushonlyindex, Unknownindex} from "./vecid-to-indexes" * @import {DateIndex, DecadeIndex, DifficultyEpoch, Index, HalvingEpoch, Height, MonthIndex, P2PK33Index, P2PK65Index, P2PKHIndex, P2SHIndex, P2MSIndex, P2AIndex, P2TRIndex, P2WPKHIndex, P2WSHIndex, TxIndex, InputIndex, OutputIndex, VecId, WeekIndex, YearIndex, VecIdToIndexes, QuarterIndex, EmptyOutputIndex, OpReturnIndex, UnknownOutputIndex} from "./vecid-to-indexes"
*/ */
/** /**
@@ -686,11 +686,12 @@ function createUtils() {
unit = "Index"; unit = "Index";
} else if (id.includes("type")) { } else if (id.includes("type")) {
unit = "Type"; unit = "Type";
} else if (id === "locktime") { } else if (id === "rawlocktime") {
unit = "Locktime"; unit = "Locktime";
} else if (id.startsWith("is-")) { } else if (id.startsWith("is-")) {
unit = "Bool"; unit = "Bool";
} else if ( } else if (
id.includes("bytes") ||
id.includes("hash") || id.includes("hash") ||
id.includes("address") || id.includes("address") ||
id.includes("txid") id.includes("txid")
@@ -1079,8 +1080,9 @@ function createUtils() {
* @template T * @template T
* @param {(value: T) => void} callback * @param {(value: T) => void} callback
* @param {string} path * @param {string} path
* @param {boolean} [mustBeArray]
*/ */
async function fetchApi(callback, path) { async function fetchApi(callback, path, mustBeArray) {
const url = `/api${path}`; const url = `/api${path}`;
/** @type {T | null} */ /** @type {T | null} */
@@ -1119,7 +1121,10 @@ function createUtils() {
let fetchedJson = /** @type {T | null} */ (null); let fetchedJson = /** @type {T | null} */ (null);
try { try {
fetchedJson = /** @type {T} */ (await fetchedResponse.json()); const f = await fetchedResponse.json();
fetchedJson = /** @type {T} */ (
mustBeArray && !Array.isArray(f) ? [f] : f
);
} catch (_) { } catch (_) {
return cachedJson; return cachedJson;
} }
@@ -1166,46 +1171,54 @@ function createUtils() {
*/ */
function vecIndexToString(index) { function vecIndexToString(index) {
switch (index) { switch (index) {
case /** @satisfies {Addressindex} */ (9): case /** @satisfies {DateIndex} */ (0):
return "Addressindex"; return "dateindex";
case /** @satisfies {Dateindex} */ (1): case /** @satisfies {DecadeIndex} */ (1):
return "Dateindex"; return "decadeindex";
case /** @satisfies {Height} */ (0): case /** @satisfies {DifficultyEpoch} */ (2):
return "Height"; return "difficultyepoch";
case /** @satisfies {P2PK33index} */ (10): case /** @satisfies {EmptyOutputIndex} */ (3):
return "P2PK33index"; return "emptyoutputindex";
case /** @satisfies {P2PK65index} */ (11): case /** @satisfies {HalvingEpoch} */ (4):
return "P2PK65index"; return "halvingepoch";
case /** @satisfies {P2PKHindex} */ (12): case /** @satisfies {Height} */ (5):
return "P2PKHindex"; return "height";
case /** @satisfies {P2SHindex} */ (13): case /** @satisfies {InputIndex} */ (6):
return "P2SHindex"; return "inputindex";
case /** @satisfies {P2TRindex} */ (14): case /** @satisfies {MonthIndex} */ (7):
return "P2TRindex"; return "monthindex";
case /** @satisfies {P2WPKHindex} */ (15): case /** @satisfies {OpReturnIndex} */ (8):
return "P2WPKHindex"; return "opreturnindex";
case /** @satisfies {P2WSHindex} */ (16): case /** @satisfies {OutputIndex} */ (9):
return "P2WSHindex"; return "outputindex";
case /** @satisfies {Txindex} */ (17): case /** @satisfies {P2AIndex} */ (10):
return "Txindex"; return "p2aindex";
case /** @satisfies {Inputindex} */ (18): case /** @satisfies {P2MSIndex} */ (11):
return "Inputindex"; return "p2msindex";
case /** @satisfies {Outputindex} */ (19): case /** @satisfies {P2PK33Index} */ (12):
return "Outputindex"; return "p2pk33index";
case /** @satisfies {Weekindex} */ (2): case /** @satisfies {P2PK65Index} */ (13):
return "Weekindex"; return "p2pk65index";
case /** @satisfies {Monthindex} */ (4): case /** @satisfies {P2PKHIndex} */ (14):
return "Monthindex"; return "p2pkhindex";
case /** @satisfies {Quarterindex} */ (5): case /** @satisfies {P2SHIndex} */ (15):
return "Quarterindex"; return "p2shindex";
case /** @satisfies {Yearindex} */ (6): case /** @satisfies {P2TRIndex} */ (16):
return "Yearindex"; return "p2trindex";
case /** @satisfies {Decadeindex} */ (7): case /** @satisfies {P2WPKHIndex} */ (17):
return "Decadeindex"; return "p2wpkhindex";
case /** @satisfies {Difficultyepoch} */ (3): case /** @satisfies {P2WSHIndex} */ (18):
return "Difficultyepoch"; return "p2wshindex";
case /** @satisfies {Halvingepoch} */ (8): case /** @satisfies {QuarterIndex} */ (19):
return "Halvingepoch"; return "quarterindex";
case /** @satisfies {TxIndex} */ (20):
return "txindex";
case /** @satisfies {UnknownOutputIndex} */ (21):
return "unknownoutputindex";
case /** @satisfies {WeekIndex} */ (22):
return "weekindex";
case /** @satisfies {YearIndex} */ (23):
return "yearindex";
} }
} }
@@ -1244,7 +1257,7 @@ function createUtils() {
* @param {number} [to] * @param {number} [to]
*/ */
fetchVec(callback, index, vecId, from, to) { fetchVec(callback, index, vecId, from, to) {
return fetchApi(callback, genPath(index, vecId, from, to)); return fetchApi(callback, genPath(index, vecId, from, to), true);
}, },
/** /**
* @template {number | OHLCTuple} [T=number] * @template {number | OHLCTuple} [T=number]
@@ -1314,12 +1327,12 @@ function createVecsResources(signals, utils) {
return signals.runWithOwner(owner, () => { return signals.runWithOwner(owner, () => {
/** @typedef {T extends number ? SingleValueData : CandlestickData} Value */ /** @typedef {T extends number ? SingleValueData : CandlestickData} Value */
let loading = false; const fetchedRecord =
let at = /** @type {Date | null} */ (null); /** @type {Record<string, {loading: boolean, at: Date | null, vec: Signal<T[] | null>}>} */ ({});
return { return {
url: utils.api.genUrl(index, id, defaultFrom), url: utils.api.genUrl(index, id, defaultFrom),
fetched: /** @type {Record<string, Signal<T[] | null>>} */ ({}), fetched: fetchedRecord,
/** /**
* Defaults * Defaults
* - from: -10_000 * - from: -10_000
@@ -1333,21 +1346,23 @@ function createVecsResources(signals, utils) {
const from = args?.from ?? defaultFrom; const from = args?.from ?? defaultFrom;
const to = args?.to ?? defaultTo; const to = args?.to ?? defaultTo;
const fetchedKey = genFetchedKey({ from, to }); const fetchedKey = genFetchedKey({ from, to });
this.fetched[fetchedKey] ??= signals.createSignal( fetchedRecord[fetchedKey] ??= {
/** @type {T[] | null} */ (null), loading: false,
); at: null,
const fetched = this.fetched[fetchedKey]; vec: signals.createSignal(/** @type {T[] | null} */ (null)),
if (loading) return fetched(); };
if (at) { const fetched = fetchedRecord[fetchedKey];
const diff = new Date().getTime() - at.getTime(); if (fetched.loading) return fetched.vec();
if (fetched.at) {
const diff = new Date().getTime() - fetched.at.getTime();
const ONE_MINUTE_IN_MS = 60_000; const ONE_MINUTE_IN_MS = 60_000;
if (diff < ONE_MINUTE_IN_MS) return fetched(); if (diff < ONE_MINUTE_IN_MS) return fetched.vec();
} }
loading = true; fetched.loading = true;
const res = /** @type {T[] | null} */ ( const res = /** @type {T[] | null} */ (
await utils.api.fetchVec( await utils.api.fetchVec(
(values) => { (values) => {
fetched.set(/** @type {T[]} */ (values)); fetched.vec.set(/** @type {T[]} */ (values));
}, },
index, index,
id, id,
@@ -1355,8 +1370,8 @@ function createVecsResources(signals, utils) {
to, to,
) )
); );
at = new Date(); fetched.at = new Date();
loading = false; fetched.loading = false;
return res; return res;
}, },
}; };
@@ -1967,7 +1982,7 @@ function main() {
(h) => { (h) => {
lastHeight.set(h); lastHeight.set(h);
}, },
/** @satisfies {Height} */ (0), /** @satisfies {Height} */ (5),
"height", "height",
); );
} }

View File

@@ -1,7 +1,7 @@
// @ts-check // @ts-check
/** /**
* @typedef {Height | Dateindex | Weekindex | Difficultyepoch | Monthindex | Quarterindex | Yearindex | Decadeindex | Halvingepoch} ChartableIndex * @typedef {Height | DateIndex | WeekIndex | DifficultyEpoch | MonthIndex | QuarterIndex | YearIndex | HalvingEpoch | DecadeIndex} ChartableIndex
*/ */
/** /**
* @template {readonly unknown[]} T * @template {readonly unknown[]} T
@@ -356,156 +356,191 @@ function createPartialOptions(colors) {
{ {
name: "Transaction", name: "Transaction",
tree: [ tree: [
{ // {
name: "Count", // name: "Count",
title: "Transaction Count", // title: "Transaction Count",
bottom: createBaseAverageSumTotalMinMaxPercentilesSeries({ // bottom: createBaseAverageSumTotalMinMaxPercentilesSeries({
key: "tx-count", // key: "tx-count",
name: "Count", // name: "Count",
}), // }),
}, // },
{ // {
name: "Subsidy", // name: "Subsidy",
title: "Subsidy", // title: "Subsidy",
bottom: [ // bottom: [
...createBaseAverageSumTotalMinMaxPercentilesSeries({ // ...createBaseAverageSumTotalMinMaxPercentilesSeries({
key: "subsidy", // key: "subsidy",
name: "Subsidy", // name: "Subsidy",
}), // }),
...createBaseAverageSumTotalMinMaxPercentilesSeries({ // ...createBaseAverageSumTotalMinMaxPercentilesSeries({
key: "subsidy-in-btc", // key: "subsidy-in-btc",
name: "Subsidy", // name: "Subsidy",
}), // }),
...createBaseAverageSumTotalMinMaxPercentilesSeries({ // ...createBaseAverageSumTotalMinMaxPercentilesSeries({
key: "subsidy-in-usd", // key: "subsidy-in-usd",
name: "Subsidy", // name: "Subsidy",
}), // }),
], // ],
}, // },
{ // {
name: "Coinbase", // name: "Coinbase",
title: "Coinbase", // title: "Coinbase",
bottom: [ // bottom: [
...createBaseAverageSumTotalMinMaxPercentilesSeries({ // ...createBaseAverageSumTotalMinMaxPercentilesSeries({
key: "coinbase", // key: "coinbase",
name: "Coinbase", // name: "Coinbase",
}), // }),
...createBaseAverageSumTotalMinMaxPercentilesSeries({ // ...createBaseAverageSumTotalMinMaxPercentilesSeries({
key: "coinbase-in-btc", // key: "coinbase-in-btc",
name: "Coinbase", // name: "Coinbase",
}), // }),
...createBaseAverageSumTotalMinMaxPercentilesSeries({ // ...createBaseAverageSumTotalMinMaxPercentilesSeries({
key: "coinbase-in-usd", // key: "coinbase-in-usd",
name: "Coinbase", // name: "Coinbase",
}), // }),
], // ],
}, // },
{ // {
name: "Fee", // name: "Fee",
title: "Transaction Fee", // title: "Transaction Fee",
bottom: [ // bottom: [
...createAverageSumTotalMinMaxPercentilesSeries("fee"), // ...createAverageSumTotalMinMaxPercentilesSeries("fee"),
...createAverageSumTotalMinMaxPercentilesSeries("fee-in-btc"), // ...createAverageSumTotalMinMaxPercentilesSeries("fee-in-btc"),
...createAverageSumTotalMinMaxPercentilesSeries("fee-in-usd"), // ...createAverageSumTotalMinMaxPercentilesSeries("fee-in-usd"),
], // ],
}, // },
{ // {
name: "Feerate", // name: "Feerate",
title: "Transaction Fee Rate", // title: "Transaction Fee Rate",
bottom: [ // bottom: [
createAverageSeries({ concat: "feerate" }), // createAverageSeries({ concat: "feerate" }),
...createMinMaxPercentilesSeries({ // ...createMinMaxPercentilesSeries({
concat: "feerate", // concat: "feerate",
}), // }),
], // ],
}, // },
{ // {
name: "Weight", // name: "Weight",
title: "Transaction Weight", // title: "Transaction Weight",
bottom: [ // bottom: [
createAverageSeries({ concat: "tx-weight" }), // createAverageSeries({ concat: "tx-weight" }),
...createMinMaxPercentilesSeries({ // ...createMinMaxPercentilesSeries({
concat: "tx-weight", // concat: "tx-weight",
}), // }),
], // ],
}, // },
{ // {
name: "vsize", // name: "vsize",
title: "Transaction Virtual Size", // title: "Transaction Virtual Size",
bottom: [ // bottom: [
createAverageSeries({ concat: "tx-vsize" }), // createAverageSeries({ concat: "tx-vsize" }),
...createMinMaxPercentilesSeries({ // ...createMinMaxPercentilesSeries({
concat: "tx-vsize", // concat: "tx-vsize",
}), // }),
], // ],
}, // },
{ // {
name: "Versions", // name: "Versions",
title: "Transaction Versions", // title: "Transaction Versions",
bottom: [ // bottom: [
createBaseSeries({ // createBaseSeries({
key: "tx-v1", // key: "tx-v1",
name: "v1 Count", // name: "v1 Count",
}), // }),
...createSumTotalSeries({ concat: "tx-v1", name: "v1" }), // ...createSumTotalSeries({ concat: "tx-v1", name: "v1" }),
createBaseSeries({ // createBaseSeries({
key: "tx-v2", // key: "tx-v2",
name: "v2 Count", // name: "v2 Count",
}), // }),
...createSumTotalSeries({ concat: "tx-v2", name: "v2" }), // ...createSumTotalSeries({ concat: "tx-v2", name: "v2" }),
createBaseSeries({ // createBaseSeries({
key: "tx-v3", // key: "tx-v3",
name: "v3 Count", // name: "v3 Count",
}), // }),
...createSumTotalSeries({ concat: "tx-v3", name: "v3" }), // ...createSumTotalSeries({ concat: "tx-v3", name: "v3" }),
], // ],
}, // },
], ],
}, },
{ {
name: "Input", name: "Input",
tree: [ tree: [
{ // {
name: "Count", // name: "Count",
title: "Transaction Input Count", // title: "Transaction Input Count",
bottom: [ // bottom: [
createAverageSeries({ concat: "input-count" }), // createAverageSeries({ concat: "input-count" }),
...createSumTotalSeries({ concat: "input-count" }), // ...createSumTotalSeries({ concat: "input-count" }),
...createMinMaxPercentilesSeries({ // ...createMinMaxPercentilesSeries({
concat: "input-count", // concat: "input-count",
}), // }),
], // ],
}, // },
{ // {
name: "Value", // name: "Value",
title: "Transaction Input Value", // title: "Transaction Input Value",
bottom: [ // bottom: [
createAverageSeries({ concat: "input-value" }), // createAverageSeries({ concat: "input-value" }),
...createSumTotalSeries({ concat: "input-value" }), // ...createSumTotalSeries({ concat: "input-value" }),
], // ],
}, // },
], ],
}, },
{ {
name: "Output", name: "Output",
tree: [
// {
// name: "Count",
// title: "Transaction Output Count",
// bottom: [
// createAverageSeries({ concat: "output-count" }),
// ...createSumTotalSeries({ concat: "output-count" }),
// ...createMinMaxPercentilesSeries({
// concat: "output-count",
// }),
// ],
// },
// {
// name: "Value",
// title: "Transaction Output Value",
// bottom: [
// createAverageSeries({ concat: "output-value" }),
// ...createSumTotalSeries({ concat: "output-value" }),
// ],
// },
],
},
{
name: "Mining",
tree: [ tree: [
{ {
name: "Count", name: "Difficulty",
title: "Transaction Output Count", title: "Difficulty",
bottom: [ bottom: [
createAverageSeries({ concat: "output-count" }), createBaseSeries({
...createSumTotalSeries({ concat: "output-count" }), key: "difficulty",
...createMinMaxPercentilesSeries({ name: "Value",
concat: "output-count",
}), }),
], ],
}, },
{ {
name: "Value", name: "Difficulty Epoch",
title: "Transaction Output Value", title: "Difficulty Epoch",
bottom: [ bottom: [
createAverageSeries({ concat: "output-value" }), createBaseSeries({
...createSumTotalSeries({ concat: "output-value" }), key: "difficultyepoch",
name: "Epoch",
}),
],
},
{
name: "Halving Epoch",
title: "Halving Epoch",
bottom: [
createBaseSeries({
key: "halvingepoch",
name: "Epoch",
}),
], ],
}, },
], ],

View File

@@ -852,7 +852,7 @@ export function init({
}); });
vecsResources vecsResources
.getOrCreate(/** @satisfies {Dateindex} */ (1), "close") .getOrCreate(/** @satisfies {DateIndex} */ (0), "close")
.fetch() .fetch()
.then((_closes) => { .then((_closes) => {
if (!_closes) return; if (!_closes) return;

View File

@@ -301,7 +301,7 @@ function createTable({
return l; return l;
}); });
signals.createEffect(vec.fetched[fetchedKey], (vec) => { signals.createEffect(vec.fetched[fetchedKey].vec, (vec) => {
if (!vec) return; if (!vec) return;
const thIndex = colIndex() + 1; const thIndex = colIndex() + 1;
@@ -396,16 +396,18 @@ export function init({
function createSerializedIndexes() { function createSerializedIndexes() {
return /** @type {const} */ ([ return /** @type {const} */ ([
/** @satisfies {VecId} */ ("height"),
/** @satisfies {VecId} */ ("dateindex"), /** @satisfies {VecId} */ ("dateindex"),
/** @satisfies {VecId} */ ("weekindex"),
/** @satisfies {VecId} */ ("difficultyepoch"),
/** @satisfies {VecId} */ ("monthindex"),
/** @satisfies {VecId} */ ("quarterindex"),
/** @satisfies {VecId} */ ("yearindex"),
/** @satisfies {VecId} */ ("decadeindex"), /** @satisfies {VecId} */ ("decadeindex"),
/** @satisfies {VecId} */ ("difficultyepoch"),
/** @satisfies {VecId} */ ("emptyoutputindex"),
/** @satisfies {VecId} */ ("halvingepoch"), /** @satisfies {VecId} */ ("halvingepoch"),
/** @satisfies {VecId} */ ("addressindex"), /** @satisfies {VecId} */ ("height"),
/** @satisfies {VecId} */ ("inputindex"),
/** @satisfies {VecId} */ ("monthindex"),
/** @satisfies {VecId} */ ("opreturnindex"),
/** @satisfies {VecId} */ ("outputindex"),
/** @satisfies {VecId} */ ("p2aindex"),
/** @satisfies {VecId} */ ("p2msindex"),
/** @satisfies {VecId} */ ("p2pk33index"), /** @satisfies {VecId} */ ("p2pk33index"),
/** @satisfies {VecId} */ ("p2pk65index"), /** @satisfies {VecId} */ ("p2pk65index"),
/** @satisfies {VecId} */ ("p2pkhindex"), /** @satisfies {VecId} */ ("p2pkhindex"),
@@ -413,14 +415,11 @@ function createSerializedIndexes() {
/** @satisfies {VecId} */ ("p2trindex"), /** @satisfies {VecId} */ ("p2trindex"),
/** @satisfies {VecId} */ ("p2wpkhindex"), /** @satisfies {VecId} */ ("p2wpkhindex"),
/** @satisfies {VecId} */ ("p2wshindex"), /** @satisfies {VecId} */ ("p2wshindex"),
/** @satisfies {VecId} */ ("quarterindex"),
/** @satisfies {VecId} */ ("txindex"), /** @satisfies {VecId} */ ("txindex"),
/** @satisfies {VecId} */ ("inputindex"),
/** @satisfies {VecId} */ ("outputindex"),
/** @satisfies {VecId} */ ("emptyoutputindex"),
/** @satisfies {VecId} */ ("p2msindex"),
/** @satisfies {VecId} */ ("opreturnindex"),
/** @satisfies {VecId} */ ("pushonlyindex"),
/** @satisfies {VecId} */ ("unknownoutputindex"), /** @satisfies {VecId} */ ("unknownoutputindex"),
/** @satisfies {VecId} */ ("weekindex"),
/** @satisfies {VecId} */ ("yearindex"),
]); ]);
} }
/** @typedef {ReturnType<typeof createSerializedIndexes>} SerializedIndexes */ /** @typedef {ReturnType<typeof createSerializedIndexes>} SerializedIndexes */
@@ -433,55 +432,53 @@ function createSerializedIndexes() {
function serializedIndexToIndex(serializedIndex) { function serializedIndexToIndex(serializedIndex) {
switch (serializedIndex) { switch (serializedIndex) {
case "height": case "height":
return /** @satisfies {Height} */ (0); return /** @satisfies {Height} */ (5);
case "dateindex": case "dateindex":
return /** @satisfies {Dateindex} */ (1); return /** @satisfies {DateIndex} */ (0);
case "weekindex": case "weekindex":
return /** @satisfies {Weekindex} */ (2); return /** @satisfies {WeekIndex} */ (22);
case "difficultyepoch": case "difficultyepoch":
return /** @satisfies {Difficultyepoch} */ (3); return /** @satisfies {DifficultyEpoch} */ (2);
case "monthindex": case "monthindex":
return /** @satisfies {Monthindex} */ (4); return /** @satisfies {MonthIndex} */ (7);
case "quarterindex": case "quarterindex":
return /** @satisfies {Quarterindex} */ (5); return /** @satisfies {QuarterIndex} */ (19);
case "yearindex": case "yearindex":
return /** @satisfies {Yearindex} */ (6); return /** @satisfies {YearIndex} */ (23);
case "decadeindex": case "decadeindex":
return /** @satisfies {Decadeindex} */ (7); return /** @satisfies {DecadeIndex} */ (1);
case "halvingepoch": case "halvingepoch":
return /** @satisfies {Halvingepoch} */ (8); return /** @satisfies {HalvingEpoch} */ (4);
case "addressindex":
return /** @satisfies {Addressindex} */ (9);
case "p2pk33index":
return /** @satisfies {P2PK33index} */ (10);
case "p2pk65index":
return /** @satisfies {P2PK65index} */ (11);
case "p2pkhindex":
return /** @satisfies {P2PKHindex} */ (12);
case "p2shindex":
return /** @satisfies {P2SHindex} */ (13);
case "p2trindex":
return /** @satisfies {P2TRindex} */ (14);
case "p2wpkhindex":
return /** @satisfies {P2WPKHindex} */ (15);
case "p2wshindex":
return /** @satisfies {P2WSHindex} */ (16);
case "txindex": case "txindex":
return /** @satisfies {Txindex} */ (17); return /** @satisfies {TxIndex} */ (20);
case "inputindex": case "inputindex":
return /** @satisfies {Inputindex} */ (18); return /** @satisfies {InputIndex} */ (6);
case "outputindex": case "outputindex":
return /** @satisfies {Outputindex} */ (19); return /** @satisfies {OutputIndex} */ (9);
case "emptyoutputindex": case "p2pk33index":
return /** @satisfies {Emptyindex} */ (20); return /** @satisfies {P2PK33Index} */ (12);
case "p2pk65index":
return /** @satisfies {P2PK65Index} */ (13);
case "p2pkhindex":
return /** @satisfies {P2PKHIndex} */ (14);
case "p2shindex":
return /** @satisfies {P2SHIndex} */ (15);
case "p2trindex":
return /** @satisfies {P2TRIndex} */ (16);
case "p2wpkhindex":
return /** @satisfies {P2WPKHIndex} */ (17);
case "p2wshindex":
return /** @satisfies {P2WSHIndex} */ (18);
case "p2aindex":
return /** @satisfies {P2AIndex} */ (10);
case "p2msindex": case "p2msindex":
return /** @satisfies {P2MSindex} */ (21); return /** @satisfies {P2MSIndex} */ (11);
case "opreturnindex": case "opreturnindex":
return /** @satisfies {Opreturnindex} */ (22); return /** @satisfies {OpReturnIndex} */ (8);
case "pushonlyindex": case "emptyoutputindex":
return /** @satisfies {Pushonlyindex} */ (23); return /** @satisfies {EmptyOutputIndex} */ (3);
case "unknownoutputindex": case "unknownoutputindex":
return /** @satisfies {Unknownindex} */ (24); return /** @satisfies {UnknownOutputIndex} */ (21);
} }
} }

File diff suppressed because it is too large Load Diff