diff --git a/CHANGELOG.md b/CHANGELOG.md index 3bc96f93d..76e70d914 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -48,6 +48,7 @@ - Added `--recompute_computed true` argument, to allow recomputation of computed datasets in case of a bug - Fixed not saved arguments, not being processed properly - Fixed bug in `generic_map.multi_insert_simple_average` +- Added defragmentation of databases to save space ## Server diff --git a/parser/src/actions/iter_blocks.rs b/parser/src/actions/iter_blocks.rs index b5a9b5a90..0132e760f 100644 --- a/parser/src/actions/iter_blocks.rs +++ b/parser/src/actions/iter_blocks.rs @@ -31,6 +31,14 @@ pub fn iter_blocks( let mut databases = Databases::import(); + if config.first_defragment() { + databases.defragment(); + + if true { + panic!("Done"); + } + } + log("Imported databases"); let mut states = States::import().unwrap_or_default(); @@ -177,7 +185,11 @@ pub fn iter_blocks( .as_ref() .map_or(true, |date| date.is_first_of_month()); - if is_check_point || height.is_close_to_end(approx_block_count) { + let ran_for_at_least_a_minute = instant.elapsed().as_secs() >= 60; + + if (is_check_point && ran_for_at_least_a_minute) + || height.is_close_to_end(approx_block_count) + { break 'days; } diff --git a/parser/src/databases/_database.rs b/parser/src/databases/_database.rs index 387c2cabc..c23207b9c 100644 --- a/parser/src/databases/_database.rs +++ b/parser/src/databases/_database.rs @@ -1,12 +1,13 @@ use std::{ collections::{BTreeMap, BTreeSet}, fmt::Debug, - fs, + fs, mem, }; use allocative::Allocative; use derive_deref::{Deref, DerefMut}; +use itertools::Itertools; // https://docs.rs/sanakirja/latest/sanakirja/index.html // https://pijul.org/posts/2021-02-06-rethinking-sanakirja/ // @@ -31,6 +32,8 @@ where { pub cached_puts: BTreeMap, pub cached_dels: BTreeSet, + folder: String, + file: String, #[allocative(skip)] db: Db_>, #[allocative(skip)] @@ -53,6 +56,8 @@ where .unwrap_or_else(|| unsafe { btree::create_db_(&mut txn).unwrap() }); Ok(Self { + folder: folder.to_owned(), + file: file.to_owned(), cached_puts: BTreeMap::default(), cached_dels: BTreeSet::default(), db, @@ -64,6 +69,16 @@ where btree::iter(&self.txn, &self.db, None).unwrap() } + fn iter_collect(&self) -> BTreeMap + where + Value: Clone, + { + self.iter() + .map(|r| r.unwrap()) + .map(|(key, value)| (key.clone(), value.clone())) + .collect::<_>() + } + pub fn get(&self, key: &Key) -> Option<&Value> { if let Some(cached_put) = self.get_from_puts(key) { return Some(cached_put); @@ -72,6 +87,17 @@ where self.db_get(key) } + fn destroy(self) { + let path = self.path(); + + drop(self); + + fs::remove_file(&path).unwrap_or_else(|_| { + dbg!(path); + panic!("Error"); + }); + } + pub fn db_get(&self, key: &Key) -> Option<&Value> { let option = btree::get(&self.txn, &self.db, key, None).unwrap(); @@ -114,6 +140,14 @@ where self.cached_puts.insert(key, value) } + fn len(&self) -> usize { + self.iter().try_len().unwrap_or_else(|e| e.0) + } + + fn is_empty(&self) -> bool { + self.len() == 0 + } + #[inline(always)] pub fn remove_from_puts(&mut self, key: &Key) -> Option { self.cached_puts.remove(key) @@ -131,6 +165,10 @@ where self.cached_puts.insert(key, value) } + fn path(&self) -> String { + format!("{}/{}", databases_folder_path(&self.folder), self.file) + } + fn init_txn(folder: &str, file: &str) -> color_eyre::Result> { let path = databases_folder_path(folder); @@ -143,31 +181,78 @@ where Ok(txn) } - pub fn export(mut self) -> color_eyre::Result<(), Error> { + fn db_multi_put(&mut self, tree: BTreeMap) -> Result<(), Error> { + tree.into_iter() + .try_for_each(|(key, value)| -> Result<(), Error> { + btree::put(&mut self.txn, &mut self.db, &key, &value)?; + Ok(()) + }) + } + + fn db_multi_del(&mut self, tree: BTreeSet) -> Result<(), Error> { + tree.into_iter().try_for_each(|key| -> Result<(), Error> { + btree::del(&mut self.txn, &mut self.db, &key, None)?; + Ok(()) + }) + } +} + +pub trait AnyDatabase { + fn export(self) -> color_eyre::Result<(), Error>; + fn boxed_export(self: Box) -> color_eyre::Result<(), Error>; + #[allow(unused)] + fn defragment(self); + fn boxed_defragment(self: Box); +} + +impl AnyDatabase for Database +where + Key: Ord + Clone + Debug + Storable, + Value: Storable + PartialEq + Clone, +{ + fn export(self) -> color_eyre::Result<(), Error> { + Box::new(self).boxed_export() + } + + fn boxed_export(mut self: Box) -> color_eyre::Result<(), Error> { if self.cached_dels.is_empty() && self.cached_puts.is_empty() { return Ok(()); } - self.cached_dels - .into_iter() - .try_for_each(|key| -> Result<(), Error> { - btree::del(&mut self.txn, &mut self.db, &key, None)?; + let cached_dels = mem::take(&mut self.cached_dels); + self.db_multi_del(cached_dels)?; - Ok(()) - })?; - - self.cached_puts - .into_iter() - .try_for_each(|(key, value)| -> Result<(), Error> { - btree::put(&mut self.txn, &mut self.db, &key, &value)?; - - Ok(()) - })?; + let cached_puts = mem::take(&mut self.cached_puts); + self.db_multi_put(cached_puts)?; self.txn.set_root(ROOT_DB, self.db.db.into()); self.txn.commit() } + + fn defragment(self) { + Box::new(self).boxed_defragment() + } + + fn boxed_defragment(self: Box) { + let btree = self.iter_collect(); + + let folder = self.folder.to_owned(); + let file = self.file.to_owned(); + + self.destroy(); + + let mut s = Self::open(&folder, &file).unwrap(); + + if !s.is_empty() { + dbg!(s.len()); + panic!("Database isn't empty"); + } + + s.db_multi_put(btree).unwrap(); + + s.export().unwrap(); + } } #[derive( diff --git a/parser/src/databases/_trait.rs b/parser/src/databases/_trait.rs index a7c632979..41c78aff8 100644 --- a/parser/src/databases/_trait.rs +++ b/parser/src/databases/_trait.rs @@ -5,7 +5,7 @@ use crate::{ utils::log, }; -use super::databases_folder_path; +use super::{databases_folder_path, AnyDatabase}; pub trait AnyDatabaseGroup where @@ -13,10 +13,15 @@ where { fn import() -> Self; - fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()>; - fn folder<'a>() -> &'a str; + fn drain_to_vec(&mut self) -> Vec>; + fn open_all(&mut self); + + fn export_metadata(&mut self, height: Height, date: Date) -> color_eyre::Result<()>; + // fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()>; + // fn defragment(&mut self); + fn reset(&mut self) -> color_eyre::Result<(), io::Error> { log(&format!("Reset {}", Self::folder())); diff --git a/parser/src/databases/address_index_to_address_data.rs b/parser/src/databases/address_index_to_address_data.rs index 444a25444..791fa9eb6 100644 --- a/parser/src/databases/address_index_to_address_data.rs +++ b/parser/src/databases/address_index_to_address_data.rs @@ -5,6 +5,7 @@ use std::{ }; use allocative::Allocative; +use itertools::Itertools; use rayon::prelude::*; use crate::{ @@ -13,7 +14,7 @@ use crate::{ utils::time, }; -use super::{AnyDatabaseGroup, Database as _Database, Metadata}; +use super::{AnyDatabase, AnyDatabaseGroup, Database as _Database, Metadata}; type Key = u32; type Value = AddressData; @@ -97,18 +98,45 @@ impl AddressIndexToAddressData { .iter() .map(|r| r.unwrap().1) .for_each(|address_data| s.increment(address_data).unwrap()); + s }) .sum() }) } - pub fn open_all(&mut self) { + fn db_index(key: &Key) -> usize { + *key as usize / ADDRESS_INDEX_DB_MAX_SIZE + } +} + +impl AnyDatabaseGroup for AddressIndexToAddressData { + fn import() -> Self { + Self { + metadata: Metadata::import(&Self::full_path(), 1), + + map: BTreeMap::default(), + } + } + + fn reset_metadata(&mut self) { + self.metadata.reset(); + } + + fn folder<'a>() -> &'a str { + "address_index_to_address_data" + } + + fn open_all(&mut self) { let path = Self::full_path(); - fs::create_dir_all(&path).unwrap(); + let folder = fs::read_dir(path); - fs::read_dir(path) + if folder.is_err() { + return; + } + + folder .unwrap() .map(|entry| { entry @@ -126,35 +154,14 @@ impl AddressIndexToAddressData { }); } - fn db_index(key: &Key) -> usize { - *key as usize / ADDRESS_INDEX_DB_MAX_SIZE - } -} - -impl AnyDatabaseGroup for AddressIndexToAddressData { - fn import() -> Self { - Self { - metadata: Metadata::import(&Self::full_path(), 1), - - map: BTreeMap::default(), - } - } - - fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { + fn drain_to_vec(&mut self) -> Vec> { mem::take(&mut self.map) - .into_par_iter() - .try_for_each(|(_, db)| db.export())?; - - self.metadata.export(height, date).unwrap(); - - Ok(()) + .into_values() + .map(|db| Box::new(db) as Box) + .collect_vec() } - fn reset_metadata(&mut self) { - self.metadata.reset(); - } - - fn folder<'a>() -> &'a str { - "address_index_to_address_data" + fn export_metadata(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { + self.metadata.export(height, date) } } diff --git a/parser/src/databases/address_index_to_empty_address_data.rs b/parser/src/databases/address_index_to_empty_address_data.rs index f9a284684..337a568f1 100644 --- a/parser/src/databases/address_index_to_empty_address_data.rs +++ b/parser/src/databases/address_index_to_empty_address_data.rs @@ -1,15 +1,17 @@ use std::{ collections::BTreeMap, - mem, + fs, mem, ops::{Deref, DerefMut}, }; use allocative::Allocative; -use rayon::prelude::*; +use itertools::Itertools; use crate::structs::{Date, EmptyAddressData, Height}; -use super::{AnyDatabaseGroup, Database as _Database, Metadata, ADDRESS_INDEX_DB_MAX_SIZE}; +use super::{ + AnyDatabase, AnyDatabaseGroup, Database as _Database, Metadata, ADDRESS_INDEX_DB_MAX_SIZE, +}; type Key = u32; type Value = EmptyAddressData; @@ -96,16 +98,6 @@ impl AnyDatabaseGroup for AddressIndexToEmptyAddressData { } } - fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { - mem::take(&mut self.map) - .into_par_iter() - .try_for_each(|(_, db)| db.export())?; - - self.metadata.export(height, date)?; - - Ok(()) - } - fn reset_metadata(&mut self) { self.metadata.reset(); } @@ -113,4 +105,60 @@ impl AnyDatabaseGroup for AddressIndexToEmptyAddressData { fn folder<'a>() -> &'a str { "address_index_to_empty_address_data" } + + fn open_all(&mut self) { + let path = Self::full_path(); + + let folder = fs::read_dir(path); + + if folder.is_err() { + return; + } + + folder + .unwrap() + .map(|entry| { + entry + .unwrap() + .path() + .file_name() + .unwrap() + .to_str() + .unwrap() + .to_owned() + }) + .filter(|file_name| file_name.contains("..")) + .for_each(|path| { + self.open_db(&path.split("..").next().unwrap().parse::().unwrap()); + }); + } + + fn drain_to_vec(&mut self) -> Vec> { + mem::take(&mut self.map) + .into_values() + .map(|db| Box::new(db) as Box) + .collect_vec() + } + + fn export_metadata(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { + self.metadata.export(height, date) + } + + // fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { + // self.drain_to_vec() + // .into_par_iter() + // .try_for_each(AnyDatabase::boxed_export)?; + + // self.metadata.export(height, date)?; + + // Ok(()) + // } + + // fn defragment(&mut self) { + // self.open_all(); + + // self.drain_to_vec() + // .into_par_iter() + // .for_each(AnyDatabase::boxed_defragment); + // } } diff --git a/parser/src/databases/address_to_address_index.rs b/parser/src/databases/address_to_address_index.rs index b28c9709d..d3a559c67 100644 --- a/parser/src/databases/address_to_address_index.rs +++ b/parser/src/databases/address_to_address_index.rs @@ -1,11 +1,13 @@ -use std::{collections::BTreeMap, mem, thread}; +use std::{collections::BTreeMap, fs, mem}; use allocative::Allocative; -use rayon::prelude::*; +use itertools::Itertools; use crate::structs::{Address, Date, Height}; -use super::{AnyDatabaseGroup, Database, Metadata, U8x19, U8x31}; +use super::{ + databases_folder_path, AnyDatabase, AnyDatabaseGroup, Database, Metadata, U8x19, U8x31, +}; type Value = u32; type U8x19Database = Database; @@ -146,64 +148,161 @@ impl AddressToAddressIndex { } } - pub fn open_p2pk(&mut self, prefix: u16) -> &mut P2PKDatabase { - self.p2pk.entry(prefix).or_insert_with(|| { - Database::open( - &format!("{}/{}", Self::folder(), "p2pk"), - &prefix.to_string(), - ) + fn path_to_group_prefixes(path: &str) -> Vec { + let path = databases_folder_path(path); + + let folder = fs::read_dir(path); + + if folder.is_err() { + return vec![]; + } + + folder .unwrap() - }) + .map(|entry| { + entry + .unwrap() + .path() + .file_name() + .unwrap() + .to_str() + .unwrap() + .to_owned() + .parse::() + .unwrap() + }) + .collect_vec() + } + + fn path_p2pk() -> String { + format!("{}/{}", Self::folder(), "p2pk") + } + + pub fn open_p2pk(&mut self, prefix: u16) -> &mut P2PKDatabase { + let path = Self::path_p2pk(); + self.p2pk + .entry(prefix) + .or_insert_with(|| Database::open(&path, &prefix.to_string()).unwrap()) + } + + fn open_all_p2pk(&mut self) { + let path = Self::path_p2pk(); + Self::path_to_group_prefixes(&path) + .into_iter() + .for_each(|prefix| { + self.p2pk + .insert(prefix, Database::open(&path, &prefix.to_string()).unwrap()); + }); + } + + fn path_p2pkh() -> String { + format!("{}/{}", Self::folder(), "p2pkh") } pub fn open_p2pkh(&mut self, prefix: u16) -> &mut P2PKHDatabase { - self.p2pkh.entry(prefix).or_insert_with(|| { - Database::open( - &format!("{}/{}", Self::folder(), "p2pkh"), - &prefix.to_string(), - ) - .unwrap() - }) + let path = Self::path_p2pkh(); + + self.p2pkh + .entry(prefix) + .or_insert_with(|| Database::open(&path, &prefix.to_string()).unwrap()) + } + + fn open_all_p2pkh(&mut self) { + let path = Self::path_p2pkh(); + Self::path_to_group_prefixes(&path) + .into_iter() + .for_each(|prefix| { + self.p2pkh + .insert(prefix, Database::open(&path, &prefix.to_string()).unwrap()); + }); + } + + fn path_p2sh() -> String { + format!("{}/{}", Self::folder(), "p2sh") } pub fn open_p2sh(&mut self, prefix: u16) -> &mut P2SHDatabase { - self.p2sh.entry(prefix).or_insert_with(|| { - Database::open( - &format!("{}/{}", Self::folder(), "p2sh"), - &prefix.to_string(), - ) - .unwrap() - }) + let path = Self::path_p2sh(); + + self.p2sh + .entry(prefix) + .or_insert_with(|| Database::open(&path, &prefix.to_string()).unwrap()) + } + + fn open_all_p2sh(&mut self) { + let path = Self::path_p2sh(); + Self::path_to_group_prefixes(&path) + .into_iter() + .for_each(|prefix| { + self.p2sh + .insert(prefix, Database::open(&path, &prefix.to_string()).unwrap()); + }); + } + + fn path_p2wpkh() -> String { + format!("{}/{}", Self::folder(), "p2wpkh") } pub fn open_p2wpkh(&mut self, prefix: u16) -> &mut P2WPKHDatabase { - self.p2wpkh.entry(prefix).or_insert_with(|| { - Database::open( - &format!("{}/{}", Self::folder(), "p2wpkh"), - &prefix.to_string(), - ) - .unwrap() - }) + let path = Self::path_p2wpkh(); + + self.p2wpkh + .entry(prefix) + .or_insert_with(|| Database::open(&path, &prefix.to_string()).unwrap()) + } + + fn open_all_p2wpkh(&mut self) { + let path = Self::path_p2wpkh(); + Self::path_to_group_prefixes(&path) + .into_iter() + .for_each(|prefix| { + self.p2wpkh + .insert(prefix, Database::open(&path, &prefix.to_string()).unwrap()); + }); + } + + fn path_p2wsh() -> String { + format!("{}/{}", Self::folder(), "p2wsh") } pub fn open_p2wsh(&mut self, prefix: u16) -> &mut P2WSHDatabase { - self.p2wsh.entry(prefix).or_insert_with(|| { - Database::open( - &format!("{}/{}", Self::folder(), "p2wsh"), - &prefix.to_string(), - ) - .unwrap() - }) + let path = Self::path_p2wsh(); + + self.p2wsh + .entry(prefix) + .or_insert_with(|| Database::open(&path, &prefix.to_string()).unwrap()) + } + + fn open_all_p2wsh(&mut self) { + let path = Self::path_p2wsh(); + Self::path_to_group_prefixes(&path) + .into_iter() + .for_each(|prefix| { + self.p2wsh + .insert(prefix, Database::open(&path, &prefix.to_string()).unwrap()); + }); + } + + fn path_p2tr() -> String { + format!("{}/{}", Self::folder(), "p2tr") } pub fn open_p2tr(&mut self, prefix: u16) -> &mut P2TRDatabase { - self.p2tr.entry(prefix).or_insert_with(|| { - Database::open( - &format!("{}/{}", Self::folder(), "p2tr"), - &prefix.to_string(), - ) - .unwrap() - }) + let path = Self::path_p2tr(); + + self.p2tr + .entry(prefix) + .or_insert_with(|| Database::open(&path, &prefix.to_string()).unwrap()) + } + + fn open_all_p2tr(&mut self) { + let path = Self::path_p2tr(); + Self::path_to_group_prefixes(&path) + .into_iter() + .for_each(|prefix| { + self.p2tr + .insert(prefix, Database::open(&path, &prefix.to_string()).unwrap()); + }); } pub fn open_unknown(&mut self) -> &mut UnknownDatabase { @@ -251,44 +350,6 @@ impl AnyDatabaseGroup for AddressToAddressIndex { } } - fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { - thread::scope(|s| { - s.spawn(|| { - mem::take(&mut self.p2pk) - .into_par_iter() - .chain(mem::take(&mut self.p2pkh).into_par_iter()) - .chain(mem::take(&mut self.p2sh).into_par_iter()) - .chain(mem::take(&mut self.p2wpkh).into_par_iter()) - .try_for_each(|(_, db)| db.export()) - }); - - s.spawn(|| { - mem::take(&mut self.p2wsh) - .into_par_iter() - .chain(mem::take(&mut self.p2tr).into_par_iter()) - .try_for_each(|(_, db)| db.export()) - }); - - s.spawn(|| { - [ - self.unknown.take(), - self.op_return.take(), - self.push_only.take(), - self.empty.take(), - ] - .into_par_iter() - .flatten() - .try_for_each(|db| db.export()) - }); - - self.multisig.take().map(|db| db.export()); - }); - - self.metadata.export(height, date)?; - - Ok(()) - } - fn reset_metadata(&mut self) { self.metadata.reset() } @@ -296,4 +357,61 @@ impl AnyDatabaseGroup for AddressToAddressIndex { fn folder<'a>() -> &'a str { "address_to_address_index" } + + fn drain_to_vec(&mut self) -> Vec> { + mem::take(&mut self.p2pk) + .into_values() + .map(|db| Box::new(db) as Box) + .chain( + mem::take(&mut self.p2pkh) + .into_values() + .map(|db| Box::new(db) as Box), + ) + .chain( + mem::take(&mut self.p2sh) + .into_values() + .map(|db| Box::new(db) as Box), + ) + .chain( + mem::take(&mut self.p2wpkh) + .into_values() + .map(|db| Box::new(db) as Box), + ) + .chain( + mem::take(&mut self.p2wsh) + .into_values() + .map(|db| Box::new(db) as Box), + ) + .chain( + mem::take(&mut self.p2tr) + .into_values() + .map(|db| Box::new(db) as Box), + ) + .chain( + [ + self.unknown.take(), + self.op_return.take(), + self.push_only.take(), + self.empty.take(), + self.multisig.take(), + ] + .into_iter() + .flatten() + .map(|db| Box::new(db) as Box), + ) + .collect_vec() + } + + fn open_all(&mut self) { + self.open_all_p2pk(); + self.open_all_p2pkh(); + self.open_all_p2wpkh(); + self.open_all_p2wsh(); + self.open_all_p2sh(); + self.open_all_p2tr(); + } + + fn export_metadata(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { + self.metadata.export(height, date) + } } diff --git a/parser/src/databases/mod.rs b/parser/src/databases/mod.rs index e436b8fd3..9d10bda11 100644 --- a/parser/src/databases/mod.rs +++ b/parser/src/databases/mod.rs @@ -17,12 +17,15 @@ use _trait::*; pub use address_index_to_address_data::*; pub use address_index_to_empty_address_data::*; pub use address_to_address_index::*; +use itertools::Itertools; use metadata::*; +use rayon::iter::{IntoParallelIterator, ParallelIterator}; pub use txid_to_tx_data::*; pub use txout_index_to_address_index::*; pub use txout_index_to_amount::*; use crate::{ + log, structs::{Date, Height}, utils::time, }; @@ -61,51 +64,96 @@ impl Databases { } } + pub fn drain_to_vec(&mut self) -> Vec> { + self.txid_to_tx_data + .drain_to_vec() + .into_iter() + .chain(self.txout_index_to_amount.drain_to_vec()) + .chain(self.address_to_address_index.drain_to_vec()) + .chain(self.address_index_to_address_data.drain_to_vec()) + .chain(self.address_index_to_empty_address_data.drain_to_vec()) + .chain(self.txout_index_to_address_index.drain_to_vec()) + .collect_vec() + } + + fn export_metadata(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { + self.txid_to_tx_data.export_metadata(height, date)?; + self.txout_index_to_amount.export_metadata(height, date)?; + self.address_index_to_address_data + .export_metadata(height, date)?; + self.address_index_to_empty_address_data + .export_metadata(height, date)?; + self.address_to_address_index + .export_metadata(height, date)?; + self.txout_index_to_address_index + .export_metadata(height, date)?; + Ok(()) + } + pub fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { - thread::scope(|s| { - s.spawn(|| { - time("> Database txid_to_tx_data", || { - self.txid_to_tx_data.export(height, date) - }) - }); + self.export_metadata(height, date)?; - s.spawn(|| { - time("> Database txout_index_to_amount", || { - self.txout_index_to_amount.export(height, date) - }) - }); - }); - - thread::scope(|s| { - s.spawn(|| { - time("> Database address_index_to_address_data", || { - self.address_index_to_address_data.export(height, date) - }) - }); - - s.spawn(|| { - time("> Database address_index_to_empty_address_data", || { - self.address_index_to_empty_address_data - .export(height, date) - }) - }); - - s.spawn(|| { - time("> Database address_to_address_index", || { - self.address_to_address_index.export(height, date) - }) - }); - - s.spawn(|| { - time("> Database txout_index_to_address_index", || { - self.txout_index_to_address_index.export(height, date) - }) - }); - }); + self.drain_to_vec() + .into_par_iter() + .try_for_each(AnyDatabase::boxed_export)?; Ok(()) } + fn open_all(&mut self) { + thread::scope(|s| { + s.spawn(|| { + time("Opening all address_index_to_address_data", || { + self.address_index_to_address_data.open_all() + }) + }); + + s.spawn(|| { + time("Opening all address_index_to_empty_address_data", || { + self.address_index_to_empty_address_data.open_all() + }) + }); + + s.spawn(|| { + time("Opening all address_to_address_index", || { + self.address_to_address_index.open_all() + }) + }); + + s.spawn(|| { + time("Opening all txid_to_tx_data", || { + self.txid_to_tx_data.open_all() + }) + }); + + s.spawn(|| { + time("Opening all txout_index_to_address_index", || { + self.txout_index_to_address_index.open_all() + }) + }); + + s.spawn(|| { + time("Opening all txout_index_to_amount", || { + self.txout_index_to_amount.open_all() + }) + }); + }); + } + + pub fn defragment(&mut self) { + log("Databases defragmentation"); + + time("Defragmenting databases", || { + time("Opened all databases", || self.open_all()); + + log("Defragmenting..."); + + self.drain_to_vec() + .into_par_iter() + .for_each(AnyDatabase::boxed_defragment); + }) + } + pub fn reset(&mut self, include_addresses: bool) { if include_addresses { let _ = self.address_index_to_address_data.reset(); diff --git a/parser/src/databases/txid_to_tx_data.rs b/parser/src/databases/txid_to_tx_data.rs index de77c523a..37d38b024 100644 --- a/parser/src/databases/txid_to_tx_data.rs +++ b/parser/src/databases/txid_to_tx_data.rs @@ -1,16 +1,16 @@ use std::{ collections::BTreeMap, - mem, + fs, mem, ops::{Deref, DerefMut}, }; use allocative::Allocative; use biter::bitcoin::Txid; -use rayon::prelude::*; +use itertools::Itertools; use crate::structs::{Date, Height, TxData}; -use super::{AnyDatabaseGroup, Database as _Database, Metadata, U8x31}; +use super::{AnyDatabase, AnyDatabaseGroup, Database as _Database, Metadata, U8x31}; type Key = U8x31; type Value = TxData; @@ -104,7 +104,11 @@ impl TxidToTxData { #[inline(always)] pub fn open_db(&mut self, txid: &Txid) -> &mut Database { let db_index = Self::db_index(txid); + self._open_db(db_index) + } + #[inline(always)] + pub fn _open_db(&mut self, db_index: u16) -> &mut Database { self.entry(db_index) .or_insert_with(|| Database::open(Self::folder(), &db_index.to_string()).unwrap()) } @@ -127,16 +131,6 @@ impl AnyDatabaseGroup for TxidToTxData { } } - fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { - mem::take(&mut self.map) - .into_par_iter() - .try_for_each(|(_, db)| db.export())?; - - self.metadata.export(height, date)?; - - Ok(()) - } - fn reset_metadata(&mut self) { self.metadata.reset(); } @@ -144,4 +138,42 @@ impl AnyDatabaseGroup for TxidToTxData { fn folder<'a>() -> &'a str { "txid_to_tx_data" } + + fn open_all(&mut self) { + let path = Self::full_path(); + + let folder = fs::read_dir(path); + + if folder.is_err() { + return; + } + + folder + .unwrap() + .flat_map(|entry| { + entry + .unwrap() + .path() + .file_name() + .unwrap() + .to_str() + .unwrap() + .to_owned() + .parse::() + }) + .for_each(|db_index| { + self._open_db(db_index); + }); + } + + fn drain_to_vec(&mut self) -> Vec> { + mem::take(&mut self.map) + .into_values() + .map(|db| Box::new(db) as Box) + .collect_vec() + } + + fn export_metadata(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { + self.metadata.export(height, date) + } } diff --git a/parser/src/databases/txout_index_to_address_index.rs b/parser/src/databases/txout_index_to_address_index.rs index 38f1d0c3a..495770035 100644 --- a/parser/src/databases/txout_index_to_address_index.rs +++ b/parser/src/databases/txout_index_to_address_index.rs @@ -1,15 +1,15 @@ use std::{ collections::BTreeMap, - mem, + fs, mem, ops::{Deref, DerefMut}, }; use allocative::Allocative; -use rayon::prelude::*; +use itertools::Itertools; use crate::structs::{Date, Height, TxoutIndex}; -use super::{AnyDatabaseGroup, Database as _Database, Metadata}; +use super::{AnyDatabase, AnyDatabaseGroup, Database as _Database, Metadata}; type Key = TxoutIndex; type Value = u32; @@ -95,15 +95,15 @@ impl AnyDatabaseGroup for TxoutIndexToAddressIndex { } } - fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { - mem::take(&mut self.map) - .into_par_iter() - .try_for_each(|(_, db)| db.export())?; + // fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { + // mem::take(&mut self.map) + // .into_par_iter() + // .try_for_each(|(_, db)| db.export())?; - self.metadata.export(height, date)?; + // self.metadata.export(height, date)?; - Ok(()) - } + // Ok(()) + // } fn reset_metadata(&mut self) { self.metadata.reset(); @@ -112,4 +112,50 @@ impl AnyDatabaseGroup for TxoutIndexToAddressIndex { fn folder<'a>() -> &'a str { "txout_index_to_address_index" } + + fn open_all(&mut self) { + let path = Self::full_path(); + + let folder = fs::read_dir(path); + + if folder.is_err() { + return; + } + + folder + .unwrap() + .map(|entry| { + entry + .unwrap() + .path() + .file_name() + .unwrap() + .to_str() + .unwrap() + .to_owned() + }) + .filter(|file_name| file_name.contains("..")) + .for_each(|path| { + self.open_db( + &path + .split("..") + .next() + .unwrap() + .parse::() + .unwrap() + .into(), + ); + }); + } + + fn drain_to_vec(&mut self) -> Vec> { + mem::take(&mut self.map) + .into_values() + .map(|db| Box::new(db) as Box) + .collect_vec() + } + + fn export_metadata(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { + self.metadata.export(height, date) + } } diff --git a/parser/src/databases/txout_index_to_amount.rs b/parser/src/databases/txout_index_to_amount.rs index 6d469f65a..6344edfd4 100644 --- a/parser/src/databases/txout_index_to_amount.rs +++ b/parser/src/databases/txout_index_to_amount.rs @@ -1,15 +1,15 @@ use std::{ collections::BTreeMap, - mem, + fs, mem, ops::{Deref, DerefMut}, }; use allocative::Allocative; -use rayon::prelude::*; +use itertools::Itertools; use crate::structs::{Amount, Date, Height, TxoutIndex}; -use super::{AnyDatabaseGroup, Database as _Database, Metadata}; +use super::{AnyDatabase, AnyDatabaseGroup, Database as _Database, Metadata}; type Key = TxoutIndex; type Value = Amount; @@ -95,16 +95,6 @@ impl AnyDatabaseGroup for TxoutIndexToAmount { } } - fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { - mem::take(&mut self.map) - .into_par_iter() - .try_for_each(|(_, db)| db.export())?; - - self.metadata.export(height, date)?; - - Ok(()) - } - fn reset_metadata(&mut self) { self.metadata.reset(); } @@ -112,4 +102,50 @@ impl AnyDatabaseGroup for TxoutIndexToAmount { fn folder<'a>() -> &'a str { "txout_index_to_amount" } + + fn open_all(&mut self) { + let path = Self::full_path(); + + let folder = fs::read_dir(path); + + if folder.is_err() { + return; + } + + folder + .unwrap() + .map(|entry| { + entry + .unwrap() + .path() + .file_name() + .unwrap() + .to_str() + .unwrap() + .to_owned() + }) + .filter(|file_name| file_name.contains("..")) + .for_each(|path| { + self.open_db( + &path + .split("..") + .next() + .unwrap() + .parse::() + .unwrap() + .into(), + ); + }); + } + + fn drain_to_vec(&mut self) -> Vec> { + mem::take(&mut self.map) + .into_values() + .map(|db| Box::new(db) as Box) + .collect_vec() + } + + fn export_metadata(&mut self, height: Height, date: Date) -> color_eyre::Result<()> { + self.metadata.export(height, date) + } } diff --git a/parser/src/structs/config.rs b/parser/src/structs/config.rs index bbfb6b76b..179126ef9 100644 --- a/parser/src/structs/config.rs +++ b/parser/src/structs/config.rs @@ -32,10 +32,9 @@ pub struct Config { #[arg(long, value_name = "SECONDS")] pub delay: Option, - /// Maximum ram you want the program to use in GB, default: 50% of total, not saved + // Maximum ram you want the program to use in GB, default: 50% of total, not saved // #[arg(long, value_name = "GB")] // pub max_ram: Option, - /// Start a dry run, default: false, not saved #[arg(long, value_name = "BOOL")] dry_run: Option, @@ -47,6 +46,10 @@ pub struct Config { /// Recompute all computed datasets, default: false, not saved #[arg(long, value_name = "BOOL")] recompute_computed: Option, + + /// Start the program by defragmenting all databases to reduce their footprint, default: false, not saved + #[arg(long, value_name = "BOOL")] + first_defragment: Option, } impl Config { @@ -99,6 +102,7 @@ impl Config { config.dry_run = config_args.dry_run.take(); config.record_ram_usage = config_args.record_ram_usage.take(); config.recompute_computed = config_args.recompute_computed.take(); + config.first_defragment = config_args.first_defragment.take(); log("---"); log("Configuration:"); @@ -115,6 +119,7 @@ impl Config { "recompute_computed: {:?}", config.recompute_computed )); + log(&format!("first_defragment: {:?}", config.first_defragment)); log("---"); if config_args != Config::default() { @@ -162,4 +167,8 @@ impl Config { pub fn recompute_computed(&self) -> bool { self.recompute_computed.is_some_and(|b| b) } + + pub fn first_defragment(&self) -> bool { + self.first_defragment.is_some_and(|b| b) + } } diff --git a/parser/src/structs/txout_index.rs b/parser/src/structs/txout_index.rs index 8cfb05990..6da7f588d 100644 --- a/parser/src/structs/txout_index.rs +++ b/parser/src/structs/txout_index.rs @@ -9,6 +9,9 @@ pub struct TxoutIndex { } direct_repr!(TxoutIndex); +const SHIFT: u64 = 16; +const AND: u64 = (1 << SHIFT) - 1; + impl TxoutIndex { #[inline(always)] pub fn new(tx_index: u32, vout: u16) -> Self { @@ -17,6 +20,15 @@ impl TxoutIndex { #[inline(always)] pub fn as_u64(&self) -> u64 { - ((self.tx_index as u64) << 16_u64) + self.vout as u64 + ((self.tx_index as u64) << SHIFT) + self.vout as u64 + } +} + +impl From for TxoutIndex { + fn from(value: u64) -> Self { + Self { + tx_index: (value >> SHIFT) as u32, + vout: (value & AND) as u16, + } } } diff --git a/website/scripts/main.js b/website/scripts/main.js index 77fcb2e4b..a3035f4fc 100644 --- a/website/scripts/main.js +++ b/website/scripts/main.js @@ -6,14 +6,14 @@ * @import * as _ from "./packages/ufuzzy/v1.0.14/types" * @import { DeepPartial, ChartOptions, IChartApi, IHorzScaleBehavior, WhitespaceData, SingleValueData, ISeriesApi, Time, LogicalRange, SeriesMarker, CandlestickData, SeriesType, BaselineStyleOptions, SeriesOptionsCommon } from "./packages/lightweight-charts/v4.2.0/types" * @import { DatePath, HeightPath, LastPath } from "./types/paths"; - * @import { SignalOptions, untrack as Untrack } from "./packages/solid-signals/2024-04-17/types/core" - * @import { getOwner as GetOwner, onCleanup as OnCleanup, Owner } from "./packages/solid-signals/2024-04-17/types/owner" - * @import { createSignal as CreateSignal, createEffect as CreateEffect, Accessor, Setter, createMemo as CreateMemo, createRoot as CreateRoot, runWithOwner as RunWithOwner } from "./packages/solid-signals/2024-04-17/types/signals"; + * @import { SignalOptions, untrack as Untrack } from "./packages/solid-signals/2024-10-28/types/core" + * @import { getOwner as GetOwner, onCleanup as OnCleanup, Owner } from "./packages/solid-signals/2024-10-28/types/owner" + * @import { createSignal as CreateSignal, createEffect as CreateEffect, Accessor, Setter, createMemo as CreateMemo, createRoot as CreateRoot, runWithOwner as RunWithOwner } from "./packages/solid-signals/2024-10-28/types/signals"; */ function initPackages() { async function importSignals() { - return import("./packages/solid-signals/2024-04-17/script.js").then( + return import("./packages/solid-signals/2024-10-28/script.js").then( (_signals) => { const signals = { createSolidSignal: /** @type {CreateSignal} */ ( diff --git a/website/scripts/packages/solid-signals/2024-10-28/script.js b/website/scripts/packages/solid-signals/2024-10-28/script.js new file mode 100644 index 000000000..02bb65516 --- /dev/null +++ b/website/scripts/packages/solid-signals/2024-10-28/script.js @@ -0,0 +1,771 @@ +// src/error.ts +var NotReadyError = class extends Error { +}; +var NoOwnerError = class extends Error { + constructor() { + super( + "" + ); + } +}; +var ContextNotFoundError = class extends Error { + constructor() { + super( + "" + ); + } +}; + +// src/constants.ts +var STATE_CLEAN = 0; +var STATE_CHECK = 1; +var STATE_DIRTY = 2; +var STATE_DISPOSED = 3; + +// src/utils.ts +function isUndefined(value) { + return typeof value === "undefined"; +} + +// src/owner.ts +var currentOwner = null; +var defaultContext = {}; +function getOwner() { + return currentOwner; +} +function setOwner(owner) { + const out = currentOwner; + currentOwner = owner; + return out; +} +var Owner = class { + // We flatten the owner tree into a linked list so that we don't need a pointer to .firstChild + // However, the children are actually added in reverse creation order + // See comment at the top of the file for an example of the _nextSibling traversal + k = null; + g = null; + j = null; + a = STATE_CLEAN; + e = null; + h = defaultContext; + f = null; + constructor(signal = false) { + if (currentOwner && !signal) + currentOwner.append(this); + } + append(child) { + child.k = this; + child.j = this; + if (this.g) + this.g.j = child; + child.g = this.g; + this.g = child; + if (child.h !== this.h) { + child.h = { ...this.h, ...child.h }; + } + if (this.f) { + child.f = !child.f ? this.f : [...child.f, ...this.f]; + } + } + dispose(self = true) { + if (this.a === STATE_DISPOSED) + return; + let head = self ? this.j || this.k : this, current = this.g, next = null; + while (current && current.k === this) { + current.dispose(true); + current.n(); + next = current.g; + current.g = null; + current = next; + } + if (self) + this.n(); + if (current) + current.j = !self ? this : this.j; + if (head) + head.g = current; + } + n() { + if (this.j) + this.j.g = null; + this.k = null; + this.j = null; + this.h = defaultContext; + this.f = null; + this.a = STATE_DISPOSED; + this.emptyDisposal(); + } + emptyDisposal() { + if (!this.e) + return; + if (Array.isArray(this.e)) { + for (let i = 0; i < this.e.length; i++) { + const callable = this.e[i]; + callable.call(callable); + } + } else { + this.e.call(this.e); + } + this.e = null; + } + handleError(error) { + if (!this.f) + throw error; + let i = 0, len = this.f.length; + for (i = 0; i < len; i++) { + try { + this.f[i](error); + break; + } catch (e) { + error = e; + } + } + if (i === len) + throw error; + } +}; +function createContext(defaultValue, description) { + return { id: Symbol(description), defaultValue }; +} +function getContext(context, owner = currentOwner) { + if (!owner) { + throw new NoOwnerError(); + } + const value = hasContext(context, owner) ? owner.h[context.id] : context.defaultValue; + if (isUndefined(value)) { + throw new ContextNotFoundError(); + } + return value; +} +function setContext(context, value, owner = currentOwner) { + if (!owner) { + throw new NoOwnerError(); + } + owner.h = { + ...owner.h, + [context.id]: isUndefined(value) ? context.defaultValue : value + }; +} +function hasContext(context, owner = currentOwner) { + return !isUndefined(owner?.h[context.id]); +} +function onCleanup(disposable) { + if (!currentOwner) + return; + const node = currentOwner; + if (!node.e) { + node.e = disposable; + } else if (Array.isArray(node.e)) { + node.e.push(disposable); + } else { + node.e = [node.e, disposable]; + } +} + +// src/flags.ts +var ERROR_OFFSET = 0; +var ERROR_BIT = 1 << ERROR_OFFSET; +var LOADING_OFFSET = 1; +var LOADING_BIT = 1 << LOADING_OFFSET; +var DEFAULT_FLAGS = ERROR_BIT; + +// src/scheduler.ts +var scheduled = false; +var runningScheduled = false; +var Computations = []; +var RenderEffects = []; +var Effects = []; +function flushSync() { + if (!runningScheduled) + runScheduled(); +} +function flushQueue() { + if (scheduled) + return; + scheduled = true; + queueMicrotask(runScheduled); +} +function runTop(node) { + const ancestors = []; + for (let current = node; current !== null; current = current.k) { + if (current.a !== STATE_CLEAN) { + ancestors.push(current); + } + } + for (let i = ancestors.length - 1; i >= 0; i--) { + if (ancestors[i].a !== STATE_DISPOSED) + ancestors[i].l(); + } +} +function runScheduled() { + if (!Effects.length && !RenderEffects.length && !Computations.length) { + scheduled = false; + return; + } + runningScheduled = true; + try { + runPureQueue(Computations); + runPureQueue(RenderEffects); + runPureQueue(Effects); + } finally { + const renderEffects = RenderEffects; + const effects = Effects; + Computations = []; + Effects = []; + RenderEffects = []; + scheduled = false; + runningScheduled = false; + incrementClock(); + runEffectQueue(renderEffects); + runEffectQueue(effects); + } +} +function runPureQueue(queue) { + for (let i = 0; i < queue.length; i++) { + if (queue[i].a !== STATE_CLEAN) + runTop(queue[i]); + } +} +function runEffectQueue(queue) { + for (let i = 0; i < queue.length; i++) { + if (queue[i].q && queue[i].a !== STATE_DISPOSED) { + queue[i].r(queue[i].d, queue[i].o); + queue[i].q = false; + queue[i].o = queue[i].d; + } + } +} + +// src/core.ts +var currentObserver = null; +var currentMask = DEFAULT_FLAGS; +var newSources = null; +var newSourcesIndex = 0; +var newFlags = 0; +var clock = 0; +var updateCheck = null; +function getObserver() { + return currentObserver; +} +function incrementClock() { + clock++; +} +var UNCHANGED = Symbol(0); +var Computation2 = class extends Owner { + b = null; + c = null; + d; + s; + // Used in __DEV__ mode, hopefully removed in production + B; + // Using false is an optimization as an alternative to _equals: () => false + // which could enable more efficient DIRTY notification + t = isEqual; + x; + /** Whether the computation is an error or has ancestors that are unresolved */ + i = 0; + /** Which flags raised by sources are handled, vs. being passed through. */ + p = DEFAULT_FLAGS; + u = null; + v = null; + w = -1; + constructor(initialValue, compute2, options) { + super(compute2 === null); + this.s = compute2; + this.a = compute2 ? STATE_DIRTY : STATE_CLEAN; + this.d = initialValue; + if (options?.equals !== void 0) + this.t = options.equals; + if (options?.unobserved) + this.x = options?.unobserved; + } + y() { + if (this.s) + this.l(); + if (!this.b || this.b.length) + track(this); + newFlags |= this.i & ~currentMask; + if (this.i & ERROR_BIT) { + throw this.d; + } else { + return this.d; + } + } + /** + * Return the current value of this computation + * Automatically re-executes the surrounding computation when the value changes + */ + read() { + return this.y(); + } + /** + * Return the current value of this computation + * Automatically re-executes the surrounding computation when the value changes + * + * If the computation has any unresolved ancestors, this function waits for the value to resolve + * before continuing + */ + wait() { + if (this.loading()) { + throw new NotReadyError(); + } + return this.y(); + } + /** + * Return true if the computation is the value is dependent on an unresolved promise + * Triggers re-execution of the computation when the loading state changes + * + * This is useful especially when effects want to re-execute when a computation's + * loading state changes + */ + loading() { + if (this.v === null) { + this.v = loadingState(this); + } + return this.v.read(); + } + /** + * Return true if the computation is the computation threw an error + * Triggers re-execution of the computation when the error state changes + */ + error() { + if (this.u === null) { + this.u = errorState(this); + } + return this.u.read(); + } + /** Update the computation with a new value. */ + write(value, flags = 0, raw = false) { + const newValue = !raw && typeof value === "function" ? value(this.d) : value; + const valueChanged = newValue !== UNCHANGED && (!!(flags & ERROR_BIT) || this.t === false || !this.t(this.d, newValue)); + if (valueChanged) + this.d = newValue; + const changedFlagsMask = this.i ^ flags, changedFlags = changedFlagsMask & flags; + this.i = flags; + this.w = clock + 1; + if (this.c) { + for (let i = 0; i < this.c.length; i++) { + if (valueChanged) { + this.c[i].m(STATE_DIRTY); + } else if (changedFlagsMask) { + this.c[i].z(changedFlagsMask, changedFlags); + } + } + } + return this.d; + } + /** + * Set the current node's state, and recursively mark all of this node's observers as STATE_CHECK + */ + m(state) { + if (this.a >= state) + return; + this.a = state; + if (this.c) { + for (let i = 0; i < this.c.length; i++) { + this.c[i].m(STATE_CHECK); + } + } + } + /** + * Notify the computation that one of its sources has changed flags. + * + * @param mask A bitmask for which flag(s) were changed. + * @param newFlags The source's new flags, masked to just the changed ones. + */ + z(mask, newFlags2) { + if (this.a >= STATE_DIRTY) + return; + if (mask & this.p) { + this.m(STATE_DIRTY); + return; + } + if (this.a >= STATE_CHECK) + return; + const prevFlags = this.i & mask; + const deltaFlags = prevFlags ^ newFlags2; + if (newFlags2 === prevFlags) ; else if (deltaFlags & prevFlags & mask) { + this.m(STATE_CHECK); + } else { + this.i ^= deltaFlags; + if (this.c) { + for (let i = 0; i < this.c.length; i++) { + this.c[i].z(mask, newFlags2); + } + } + } + } + A(error) { + this.write(error, this.i | ERROR_BIT); + } + /** + * This is the core part of the reactivity system, which makes sure that the values are updated + * before they are read. We've also adapted it to return the loading state of the computation, + * so that we can propagate that to the computation's observers. + * + * This function will ensure that the value and states we read from the computation are up to date + */ + l() { + if (this.a === STATE_DISPOSED) { + throw new Error("Tried to read a disposed computation"); + } + if (this.a === STATE_CLEAN) { + return; + } + let observerFlags = 0; + if (this.a === STATE_CHECK) { + for (let i = 0; i < this.b.length; i++) { + this.b[i].l(); + observerFlags |= this.b[i].i; + if (this.a === STATE_DIRTY) { + break; + } + } + } + if (this.a === STATE_DIRTY) { + update(this); + } else { + this.write(UNCHANGED, observerFlags); + this.a = STATE_CLEAN; + } + } + /** + * Remove ourselves from the owner graph and the computation graph + */ + n() { + if (this.a === STATE_DISPOSED) + return; + if (this.b) + removeSourceObservers(this, 0); + super.n(); + } +}; +function loadingState(node) { + const prevOwner = setOwner(node.k); + const options = void 0; + const computation = new Computation2( + void 0, + () => { + track(node); + node.l(); + return !!(node.i & LOADING_BIT); + }, + options + ); + computation.p = ERROR_BIT | LOADING_BIT; + setOwner(prevOwner); + return computation; +} +function errorState(node) { + const prevOwner = setOwner(node.k); + const options = void 0; + const computation = new Computation2( + void 0, + () => { + track(node); + node.l(); + return !!(node.i & ERROR_BIT); + }, + options + ); + computation.p = ERROR_BIT; + setOwner(prevOwner); + return computation; +} +function track(computation) { + if (currentObserver) { + if (!newSources && currentObserver.b && currentObserver.b[newSourcesIndex] === computation) { + newSourcesIndex++; + } else if (!newSources) + newSources = [computation]; + else if (computation !== newSources[newSources.length - 1]) { + newSources.push(computation); + } + if (updateCheck) { + updateCheck.d = computation.w > currentObserver.w; + } + } +} +function update(node) { + const prevSources = newSources, prevSourcesIndex = newSourcesIndex, prevFlags = newFlags; + newSources = null; + newSourcesIndex = 0; + newFlags = 0; + try { + node.dispose(false); + node.emptyDisposal(); + const result = compute(node, node.s, node); + node.write(result, newFlags, true); + } catch (error) { + if (error instanceof NotReadyError) { + node.write(UNCHANGED, newFlags | LOADING_BIT); + } else { + node.A(error); + } + } finally { + if (newSources) { + if (node.b) + removeSourceObservers(node, newSourcesIndex); + if (node.b && newSourcesIndex > 0) { + node.b.length = newSourcesIndex + newSources.length; + for (let i = 0; i < newSources.length; i++) { + node.b[newSourcesIndex + i] = newSources[i]; + } + } else { + node.b = newSources; + } + let source; + for (let i = newSourcesIndex; i < node.b.length; i++) { + source = node.b[i]; + if (!source.c) + source.c = [node]; + else + source.c.push(node); + } + } else if (node.b && newSourcesIndex < node.b.length) { + removeSourceObservers(node, newSourcesIndex); + node.b.length = newSourcesIndex; + } + newSources = prevSources; + newSourcesIndex = prevSourcesIndex; + newFlags = prevFlags; + node.a = STATE_CLEAN; + } +} +function removeSourceObservers(node, index) { + let source; + let swap; + for (let i = index; i < node.b.length; i++) { + source = node.b[i]; + if (source.c) { + swap = source.c.indexOf(node); + source.c[swap] = source.c[source.c.length - 1]; + source.c.pop(); + if (!source.c.length) + source.x?.(); + } + } +} +function isEqual(a, b) { + return a === b; +} +function untrack(fn) { + if (currentObserver === null) + return fn(); + return compute(getOwner(), fn, null); +} +function hasUpdated(fn) { + const current = updateCheck; + updateCheck = { d: false }; + try { + fn(); + return updateCheck.d; + } finally { + updateCheck = current; + } +} +function compute(owner, compute2, observer) { + const prevOwner = setOwner(owner), prevObserver = currentObserver, prevMask = currentMask; + currentObserver = observer; + currentMask = observer?.p ?? DEFAULT_FLAGS; + try { + return compute2(observer ? observer.d : void 0); + } finally { + setOwner(prevOwner); + currentObserver = prevObserver; + currentMask = prevMask; + } +} +var EagerComputation = class extends Computation2 { + constructor(initialValue, compute2, options) { + super(initialValue, compute2, options); + this.l(); + Computations.push(this); + } + m(state) { + if (this.a >= state) + return; + if (this.a === STATE_CLEAN) { + Computations.push(this); + flushQueue(); + } + this.a = state; + } +}; + +// src/effect.ts +var BaseEffect = class extends Computation2 { + r; + q = false; + o; + constructor(initialValue, compute2, effect, options) { + super(initialValue, compute2, options); + this.r = effect; + this.o = initialValue; + } + write(value) { + if (value === UNCHANGED) + return this.d; + this.d = value; + this.q = true; + return value; + } + A(error) { + this.handleError(error); + } + n() { + this.r = void 0; + this.o = void 0; + super.n(); + } +}; +var Effect = class extends BaseEffect { + constructor(initialValue, compute2, effect, options) { + super(initialValue, compute2, effect, options); + Effects.push(this); + flushQueue(); + } + m(state) { + if (this.a >= state) + return; + if (this.a === STATE_CLEAN) { + Effects.push(this); + flushQueue(); + } + this.a = state; + } +}; +var RenderEffect = class extends BaseEffect { + constructor(initialValue, compute2, effect, options) { + super(initialValue, compute2, effect, options); + this.l(); + RenderEffects.push(this); + } + m(state) { + if (this.a >= state) + return; + if (this.a === STATE_CLEAN) { + RenderEffects.push(this); + flushQueue(); + } + this.a = state; + } +}; + +// src/signals.ts +function createSignal(first, second, third) { + if (typeof first === "function") { + const memo = createMemo((p) => { + const node2 = new Computation2( + first(p ? untrack(p[0]) : second), + null, + third + ); + return [node2.read.bind(node2), node2.write.bind(node2)]; + }); + return [() => memo()[0](), (value) => memo()[1](value)]; + } + const node = new Computation2(first, null, second); + return [node.read.bind(node), node.write.bind(node)]; +} +function createAsync(fn, initial, options) { + const lhs = new EagerComputation(void 0, () => { + const source = fn(initial); + const isPromise = source instanceof Promise; + const iterator = source[Symbol.asyncIterator]; + if (!isPromise && !iterator) { + return { + wait() { + return source; + } + }; + } + const signal = new Computation2(initial, null, options); + signal.write(UNCHANGED, LOADING_BIT); + if (isPromise) { + source.then( + (value) => { + signal.write(value, 0); + }, + (error) => { + signal.write(error, ERROR_BIT); + } + ); + } else { + let abort = false; + onCleanup(() => abort = true); + (async () => { + try { + for await (let value of source) { + if (abort) + return; + signal.write(value, 0); + } + } catch (error) { + signal.write(error, ERROR_BIT); + } + })(); + } + return signal; + }); + return () => lhs.wait().wait(); +} +function createMemo(compute2, initialValue, options) { + let node = new Computation2( + initialValue, + compute2, + options + ); + let value; + return () => { + if (node) { + value = node.wait(); + if (!node.b?.length) + node = void 0; + } + return value; + }; +} +function createEffect(compute2, effect, initialValue, options) { + void new Effect( + initialValue, + compute2, + effect, + void 0 + ); +} +function createRenderEffect(compute2, effect, initialValue, options) { + void new RenderEffect( + initialValue, + compute2, + effect, + void 0 + ); +} +function createRoot(init) { + const owner = new Owner(); + return compute( + owner, + !init.length ? init : () => init(() => owner.dispose()), + null + ); +} +function runWithOwner(owner, run) { + try { + return compute(owner, run, null); + } catch (error) { + owner?.handleError(error); + return void 0; + } +} +function catchError(fn, handler) { + const owner = new Owner(); + owner.f = owner.f ? [handler, ...owner.f] : [handler]; + try { + compute(owner, fn, null); + } catch (error) { + owner.handleError(error); + } +} + +export { Computation2 as Computation, ContextNotFoundError, Effect, NoOwnerError, NotReadyError, Owner, RenderEffect, catchError, compute, createAsync, createContext, createEffect, createMemo, createRenderEffect, createRoot, createSignal, flushSync, getContext, getObserver, getOwner, hasContext, hasUpdated, isEqual, onCleanup, runWithOwner, setContext, setOwner, untrack }; diff --git a/website/scripts/packages/solid-signals/2024-10-28/types/constants.d.ts b/website/scripts/packages/solid-signals/2024-10-28/types/constants.d.ts new file mode 100644 index 000000000..c27fe1bc4 --- /dev/null +++ b/website/scripts/packages/solid-signals/2024-10-28/types/constants.d.ts @@ -0,0 +1,10 @@ +/** + * See https://dev.to/modderme123/super-charging-fine-grained-reactive-performance-47ph + * State clean corresponds to a node where all the sources are fully up to date + * State check corresponds to a node where some sources (including grandparents) may have changed + * State dirty corresponds to a node where the direct parents of a node has changed + */ +export declare const STATE_CLEAN = 0; +export declare const STATE_CHECK = 1; +export declare const STATE_DIRTY = 2; +export declare const STATE_DISPOSED = 3; diff --git a/website/scripts/packages/solid-signals/2024-10-28/types/core.d.ts b/website/scripts/packages/solid-signals/2024-10-28/types/core.d.ts new file mode 100644 index 000000000..c2554cf5a --- /dev/null +++ b/website/scripts/packages/solid-signals/2024-10-28/types/core.d.ts @@ -0,0 +1,152 @@ +/** + * Nodes for constructing a graph of reactive values and reactive computations. + * + * - The graph is acyclic. + * - The user inputs new values into the graph by calling .write() on one more computation nodes. + * - The user retrieves computed results from the graph by calling .read() on one or more computation nodes. + * - The library is responsible for running any necessary computations so that .read() is up to date + * with all prior .write() calls anywhere in the graph. + * - We call the input nodes 'roots' and the output nodes 'leaves' of the graph here. + * - Changes flow from roots to leaves. It would be effective but inefficient to immediately + * propagate all changes from a root through the graph to descendant leaves. Instead, we defer + * change most change propagation computation until a leaf is accessed. This allows us to + * coalesce computations and skip altogether recalculating unused sections of the graph. + * - Each computation node tracks its sources and its observers (observers are other + * elements that have this node as a source). Source and observer links are updated automatically + * as observer computations re-evaluate and call get() on their sources. + * - Each node stores a cache state (clean/check/dirty) to support the change propagation algorithm: + * + * In general, execution proceeds in three passes: + * + * 1. write() propagates changes down the graph to the leaves + * direct children are marked as dirty and their deeper descendants marked as check + * (no computations are evaluated) + * 2. read() requests that parent nodes updateIfNecessary(), which proceeds recursively up the tree + * to decide whether the node is clean (parents unchanged) or dirty (parents changed) + * 3. updateIfNecessary() evaluates the computation if the node is dirty (the computations are + * executed in root to leaf order) + */ +import { type Flags } from './flags'; +import { Owner } from './owner'; +export interface SignalOptions { + name?: string; + equals?: ((prev: T, next: T) => boolean) | false; + unobserved?: () => void; +} +interface SourceType { + _observers: ObserverType[] | null; + _unobserved?: () => void; + _updateIfNecessary: () => void; + _stateFlags: Flags; + _time: number; +} +interface ObserverType { + _sources: SourceType[] | null; + _notify: (state: number) => void; + _handlerMask: Flags; + _notifyFlags: (mask: Flags, newFlags: Flags) => void; + _time: number; +} +/** + * Returns the current observer. + */ +export declare function getObserver(): ObserverType | null; +export declare function incrementClock(): void; +export declare const UNCHANGED: unique symbol; +export type UNCHANGED = typeof UNCHANGED; +export declare class Computation extends Owner implements SourceType, ObserverType { + _sources: SourceType[] | null; + _observers: ObserverType[] | null; + _value: T | undefined; + _compute: null | (() => T); + _name: string | undefined; + _equals: false | ((a: T, b: T) => boolean); + _unobserved: (() => void) | undefined; + /** Whether the computation is an error or has ancestors that are unresolved */ + _stateFlags: number; + /** Which flags raised by sources are handled, vs. being passed through. */ + _handlerMask: number; + _error: Computation | null; + _loading: Computation | null; + _time: number; + constructor(initialValue: T | undefined, compute: null | (() => T), options?: SignalOptions); + _read(): T; + /** + * Return the current value of this computation + * Automatically re-executes the surrounding computation when the value changes + */ + read(): T; + /** + * Return the current value of this computation + * Automatically re-executes the surrounding computation when the value changes + * + * If the computation has any unresolved ancestors, this function waits for the value to resolve + * before continuing + */ + wait(): T; + /** + * Return true if the computation is the value is dependent on an unresolved promise + * Triggers re-execution of the computation when the loading state changes + * + * This is useful especially when effects want to re-execute when a computation's + * loading state changes + */ + loading(): boolean; + /** + * Return true if the computation is the computation threw an error + * Triggers re-execution of the computation when the error state changes + */ + error(): boolean; + /** Update the computation with a new value. */ + write(value: T | ((currentValue: T) => T) | UNCHANGED, flags?: number, raw?: boolean): T; + /** + * Set the current node's state, and recursively mark all of this node's observers as STATE_CHECK + */ + _notify(state: number): void; + /** + * Notify the computation that one of its sources has changed flags. + * + * @param mask A bitmask for which flag(s) were changed. + * @param newFlags The source's new flags, masked to just the changed ones. + */ + _notifyFlags(mask: Flags, newFlags: Flags): void; + _setError(error: unknown): void; + /** + * This is the core part of the reactivity system, which makes sure that the values are updated + * before they are read. We've also adapted it to return the loading state of the computation, + * so that we can propagate that to the computation's observers. + * + * This function will ensure that the value and states we read from the computation are up to date + */ + _updateIfNecessary(): void; + /** + * Remove ourselves from the owner graph and the computation graph + */ + _disposeNode(): void; +} +/** + * Reruns a computation's _compute function, producing a new value and keeping track of dependencies. + * + * It handles the updating of sources and observers, disposal of previous executions, + * and error handling if the _compute function throws. It also sets the node as loading + * if it reads any parents that are currently loading. + */ +export declare function update(node: Computation): void; +export declare function isEqual(a: T, b: T): boolean; +/** + * Returns the current value stored inside the given compute function without triggering any + * dependencies. Use `untrack` if you want to also disable owner tracking. + */ +export declare function untrack(fn: () => T): T; +export declare function hasUpdated(fn: () => any): Boolean; +/** + * A convenient wrapper that calls `compute` with the `owner` and `observer` and is guaranteed + * to reset the global context after the computation is finished even if an error is thrown. + */ +export declare function compute(owner: Owner | null, compute: (val: T) => T, observer: Computation): T; +export declare function compute(owner: Owner | null, compute: (val: undefined) => T, observer: null): T; +export declare class EagerComputation extends Computation { + constructor(initialValue: T, compute: () => T, options?: SignalOptions); + _notify(state: number): void; +} +export {}; diff --git a/website/scripts/packages/solid-signals/2024-10-28/types/effect.d.ts b/website/scripts/packages/solid-signals/2024-10-28/types/effect.d.ts new file mode 100644 index 000000000..7e44c88cb --- /dev/null +++ b/website/scripts/packages/solid-signals/2024-10-28/types/effect.d.ts @@ -0,0 +1,23 @@ +import { Computation, type SignalOptions } from './core'; +/** + * Effects are the leaf nodes of our reactive graph. When their sources change, they are + * automatically added to the queue of effects to re-execute, which will cause them to fetch their + * sources and recompute + */ +export declare class BaseEffect extends Computation { + _effect: (val: T, prev: T | undefined) => void; + _modified: boolean; + _prevValue: T | undefined; + constructor(initialValue: T, compute: () => T, effect: (val: T, prev: T | undefined) => void, options?: SignalOptions); + write(value: T): T; + _setError(error: unknown): void; + _disposeNode(): void; +} +export declare class Effect extends BaseEffect { + constructor(initialValue: T, compute: () => T, effect: (val: T, prev: T | undefined) => void, options?: SignalOptions); + _notify(state: number): void; +} +export declare class RenderEffect extends BaseEffect { + constructor(initialValue: T, compute: () => T, effect: (val: T, prev: T | undefined) => void, options?: SignalOptions); + _notify(state: number): void; +} diff --git a/website/scripts/packages/solid-signals/2024-10-28/types/error.d.ts b/website/scripts/packages/solid-signals/2024-10-28/types/error.d.ts new file mode 100644 index 000000000..b91a59791 --- /dev/null +++ b/website/scripts/packages/solid-signals/2024-10-28/types/error.d.ts @@ -0,0 +1,11 @@ +export declare class NotReadyError extends Error { +} +export declare class NoOwnerError extends Error { + constructor(); +} +export declare class ContextNotFoundError extends Error { + constructor(); +} +export interface ErrorHandler { + (error: unknown): void; +} diff --git a/website/scripts/packages/solid-signals/2024-10-28/types/flags.d.ts b/website/scripts/packages/solid-signals/2024-10-28/types/flags.d.ts new file mode 100644 index 000000000..b11fbb1d1 --- /dev/null +++ b/website/scripts/packages/solid-signals/2024-10-28/types/flags.d.ts @@ -0,0 +1,8 @@ +export type Flags = number; +export declare const ERROR_OFFSET = 0; +export declare const ERROR_BIT: number; +export declare const ERROR: unique symbol; +export declare const LOADING_OFFSET = 1; +export declare const LOADING_BIT: number; +export declare const LOADING: unique symbol; +export declare const DEFAULT_FLAGS: number; diff --git a/website/scripts/packages/solid-signals/2024-10-28/types/index.d.ts b/website/scripts/packages/solid-signals/2024-10-28/types/index.d.ts new file mode 100644 index 000000000..53b9c5a00 --- /dev/null +++ b/website/scripts/packages/solid-signals/2024-10-28/types/index.d.ts @@ -0,0 +1,6 @@ +export { ContextNotFoundError, NoOwnerError, NotReadyError, type ErrorHandler, } from './error'; +export { Owner, createContext, getContext, setContext, hasContext, getOwner, setOwner, onCleanup, type Context, type ContextRecord, type Disposable, } from './owner'; +export { Computation, compute, getObserver, isEqual, untrack, hasUpdated, type SignalOptions, } from './core'; +export { Effect, RenderEffect } from './effect'; +export { flushSync } from './scheduler'; +export * from './signals'; diff --git a/website/scripts/packages/solid-signals/2024-10-28/types/map.d.ts b/website/scripts/packages/solid-signals/2024-10-28/types/map.d.ts new file mode 100644 index 000000000..b42a53237 --- /dev/null +++ b/website/scripts/packages/solid-signals/2024-10-28/types/map.d.ts @@ -0,0 +1,12 @@ +import type { Accessor } from './signals'; +export type Maybe = T | void | null | undefined | false; +/** + * Reactive map helper that caches each list item by reference to reduce unnecessary mapping on + * updates. + * + * @see {@link https://github.com/solidjs/x-reactivity#maparray} + */ +export declare function mapArray(list: Accessor>, map: (value: Accessor, index: Accessor) => MappedItem, options?: { + keyed?: boolean | ((item: Item) => any); + name?: string; +}): Accessor; diff --git a/website/scripts/packages/solid-signals/2024-10-28/types/owner.d.ts b/website/scripts/packages/solid-signals/2024-10-28/types/owner.d.ts new file mode 100644 index 000000000..96e76dbe1 --- /dev/null +++ b/website/scripts/packages/solid-signals/2024-10-28/types/owner.d.ts @@ -0,0 +1,88 @@ +/** + * Owner tracking is used to enable nested tracking scopes with automatic cleanup. + * We also use owners to also keep track of which error handling context we are in. + * + * If you write the following + * + * const a = createOwner(() => { + * const b = createOwner(() => {}); + * + * const c = createOwner(() => { + * const d = createOwner(() => {}); + * }); + * + * const e = createOwner(() => {}); + * }); + * + * The owner tree will look like this: + * + * a + * /|\ + * b-c-e + * | + * d + * + * Following the _nextSibling pointers of each owner will first give you its children, and then its siblings (in reverse). + * a -> e -> c -> d -> b + * + * Note that the owner tree is largely orthogonal to the reactivity tree, and is much closer to the component tree. + */ +import { type ErrorHandler } from './error'; +export type ContextRecord = Record; +export interface Disposable { + (): void; +} +/** + * Returns the currently executing parent owner. + */ +export declare function getOwner(): Owner | null; +export declare function setOwner(owner: Owner | null): Owner | null; +export declare class Owner { + _parent: Owner | null; + _nextSibling: Owner | null; + _prevSibling: Owner | null; + _state: number; + _disposal: Disposable | Disposable[] | null; + _context: ContextRecord; + _handlers: ErrorHandler[] | null; + constructor(signal?: boolean); + append(child: Owner): void; + dispose(this: Owner, self?: boolean): void; + _disposeNode(): void; + emptyDisposal(): void; + handleError(error: unknown): void; +} +export interface Context { + readonly id: symbol; + readonly defaultValue: T | undefined; +} +/** + * Context provides a form of dependency injection. It is used to save from needing to pass + * data as props through intermediate components. This function creates a new context object + * that can be used with `getContext` and `setContext`. + * + * A default value can be provided here which will be used when a specific value is not provided + * via a `setContext` call. + */ +export declare function createContext(defaultValue?: T, description?: string): Context; +/** + * Attempts to get a context value for the given key. + * + * @throws `NoOwnerError` if there's no owner at the time of call. + * @throws `ContextNotFoundError` if a context value has not been set yet. + */ +export declare function getContext(context: Context, owner?: Owner | null): T; +/** + * Attempts to set a context value on the parent scope with the given key. + * + * @throws `NoOwnerError` if there's no owner at the time of call. + */ +export declare function setContext(context: Context, value?: T, owner?: Owner | null): void; +/** + * Whether the given context is currently defined. + */ +export declare function hasContext(context: Context, owner?: Owner | null): boolean; +/** + * Runs the given function when the parent owner computation is being disposed. + */ +export declare function onCleanup(disposable: Disposable): void; diff --git a/website/scripts/packages/solid-signals/2024-10-28/types/scheduler.d.ts b/website/scripts/packages/solid-signals/2024-10-28/types/scheduler.d.ts new file mode 100644 index 000000000..091a4feea --- /dev/null +++ b/website/scripts/packages/solid-signals/2024-10-28/types/scheduler.d.ts @@ -0,0 +1,9 @@ +import { Computation } from './core'; +import type { Effect, RenderEffect } from './effect'; +export declare let Computations: Computation[], RenderEffects: RenderEffect[], Effects: Effect[]; +/** + * By default, changes are batched on the microtask queue which is an async process. You can flush + * the queue synchronously to get the latest updates by calling `flushSync()`. + */ +export declare function flushSync(): void; +export declare function flushQueue(): void; diff --git a/website/scripts/packages/solid-signals/2024-10-28/types/signals.d.ts b/website/scripts/packages/solid-signals/2024-10-28/types/signals.d.ts new file mode 100644 index 000000000..3e706dcb5 --- /dev/null +++ b/website/scripts/packages/solid-signals/2024-10-28/types/signals.d.ts @@ -0,0 +1,56 @@ +import type { SignalOptions } from './core'; +import { Owner } from './owner'; +export interface Accessor { + (): T; +} +export interface Setter { + (value: T | SetValue): T; +} +export interface SetValue { + (currentValue: T): T; +} +export type Signal = [read: Accessor, write: Setter]; +/** + * Wraps the given value into a signal. The signal will return the current value when invoked + * `fn()`, and provide a simple write API via `write()`. The value can now be observed + * when used inside other computations created with `computed` and `effect`. + */ +export declare function createSignal(initialValue: Exclude, options?: SignalOptions): Signal; +export declare function createSignal(fn: (prev?: T) => T, initialValue?: T, options?: SignalOptions): Signal; +export declare function createAsync(fn: (prev?: T) => Promise | AsyncIterable | T, initial?: T, options?: SignalOptions): Accessor; +/** + * Creates a new computation whose value is computed and returned by the given function. The given + * compute function is _only_ re-run when one of it's dependencies are updated. Dependencies are + * are all signals that are read during execution. + */ +export declare function createMemo(compute: (prev?: T) => T, initialValue?: T, options?: SignalOptions): Accessor; +/** + * Invokes the given function each time any of the signals that are read inside are updated + * (i.e., their value changes). The effect is immediately invoked on initialization. + */ +export declare function createEffect(compute: () => T, effect: (v: T) => (() => void) | void, initialValue?: T, options?: { + name?: string; +}): void; +/** + * Invokes the given function each time any of the signals that are read inside are updated + * (i.e., their value changes). The effect is immediately invoked on initialization. + */ +export declare function createRenderEffect(compute: () => T, effect: (v: T) => (() => void) | void, initialValue?: T, options?: { + name?: string; +}): void; +/** + * Creates a computation root which is given a `dispose()` function to dispose of all inner + * computations. + */ +export declare function createRoot(init: ((dispose: () => void) => T) | (() => T)): T; +/** + * Runs the given function in the given owner so that error handling and cleanups continue to work. + * + * Warning: Usually there are simpler ways of modeling a problem that avoid using this function + */ +export declare function runWithOwner(owner: Owner | null, run: () => T): T | undefined; +/** + * Runs the given function when an error is thrown in a child owner. If the error is thrown again + * inside the error handler, it will trigger the next available parent owner handler. + */ +export declare function catchError(fn: () => T, handler: (error: unknown) => void): void; diff --git a/website/scripts/packages/solid-signals/2024-10-28/types/store.d.ts b/website/scripts/packages/solid-signals/2024-10-28/types/store.d.ts new file mode 100644 index 000000000..4e05cd1d8 --- /dev/null +++ b/website/scripts/packages/solid-signals/2024-10-28/types/store.d.ts @@ -0,0 +1,31 @@ +export type Store = Readonly; +export type StoreSetter = (fn: (state: T) => void) => void; +declare const $TRACK: unique symbol, $PROXY: unique symbol; +export { $PROXY, $TRACK }; +export type StoreNode = Record; +export declare namespace SolidStore { + interface Unwrappable { + } +} +export type NotWrappable = string | number | bigint | symbol | boolean | Function | null | undefined | SolidStore.Unwrappable[keyof SolidStore.Unwrappable]; +export declare function isWrappable(obj: T | NotWrappable): obj is T; +/** + * Returns the underlying data in the store without a proxy. + * @param item store proxy object + * @example + * ```js + * const initial = {z...}; + * const [state, setState] = createStore(initial); + * initial === state; // => false + * initial === unwrap(state); // => true + * ``` + */ +export declare function unwrap(item: T, set?: Set): T; +export declare function createStore(store: T | Store): [get: Store, set: StoreSetter]; +export declare function createStore(fn: (store: T) => void, store: T | Store): [get: Store, set: StoreSetter]; +/** + * Creates a mutable derived value + * + * @see {@link https://github.com/solidjs/x-reactivity#createprojection} + */ +export declare function createProjection(fn: (draft: T) => void, initialValue: T): Readonly; diff --git a/website/scripts/packages/solid-signals/2024-10-28/types/utils.d.ts b/website/scripts/packages/solid-signals/2024-10-28/types/utils.d.ts new file mode 100644 index 000000000..6db6309f2 --- /dev/null +++ b/website/scripts/packages/solid-signals/2024-10-28/types/utils.d.ts @@ -0,0 +1 @@ +export declare function isUndefined(value: any): value is undefined; diff --git a/website/scripts/service-worker.js b/website/scripts/service-worker.js index 3af08c3b2..348171cb2 100644 --- a/website/scripts/service-worker.js +++ b/website/scripts/service-worker.js @@ -19,7 +19,7 @@ self.addEventListener("install", (_event) => { "/styles/chart.css", "/scripts/packages/lean-qr/v2.3.4/script.js", "/scripts/packages/lightweight-charts/v4.2.0/script.js", - "/scripts/packages/solid-signals/2024-04-17/script.js", + "/scripts/packages/solid-signals/2024-10-28/script.js", "/scripts/packages/ufuzzy/v1.0.14/script.js", ]); }), diff --git a/website/scripts/types/self.d.ts b/website/scripts/types/self.d.ts index 39723ce95..3b176a8a6 100644 --- a/website/scripts/types/self.d.ts +++ b/website/scripts/types/self.d.ts @@ -1,7 +1,7 @@ import { Accessor, Setter, -} from "../packages/solid-signals/2024-04-17/types/signals"; +} from "../packages/solid-signals/2024-10-28/types/signals"; import { DeepPartial, BaselineStyleOptions, @@ -17,7 +17,7 @@ import { ISeriesApi, } from "../packages/lightweight-charts/v4.2.0/types"; import { DatePath, HeightPath, LastPath } from "./paths"; -import { Owner } from "../packages/solid-signals/2024-04-17/types/owner"; +import { Owner } from "../packages/solid-signals/2024-10-28/types/owner"; type GrowToSize = A["length"] extends N ? A