diff --git a/.gitignore b/.gitignore
index 4f590c2e7..d4d3b3b90 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,7 @@
.DS_Store
+/app-next
+/app-html
/datasets
/datasets2
/datasets_*
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 1c84b7eae..1bc191e76 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -13,6 +13,7 @@
- Auto fetch prices from the main Satonomics instance if missing instead of only trying Kraken's and Binance's API which are limited to the last 16 hours
- Merged the core of `HeightMap` and `DateMap` structs into `GenericMap`
- Added `Height` struct and many others
+ - Reorganized outputs of both the parser and the server for ease of use and easier sync compatibility
- CLI
- Added an argument parser for improved UX with several options
- Datasets
@@ -76,7 +77,7 @@
- Run file
- Only run with a watcher if `cargo watch` is available
-- Added trigger folder to automatically restart when a new dataset has been added in the parser
+ - Removed id_to_path file in favor for only `paths.d.ts` in `app/src/types`
## v. 0.2.0 | [851286](https://mempool.space/block/0000000000000000000281ca7f1bf8c50702bfca168c7af1bdc67c977c1ac8ed) - 2024/07/08
diff --git a/app/.gitignore b/app/.gitignore
index 71ed004fc..f695b26ba 100644
--- a/app/.gitignore
+++ b/app/.gitignore
@@ -6,4 +6,5 @@ dev-dist
visualizer
# Local Netlify folder
.netlify
-.wrangler
\ No newline at end of file
+.wrangler
+paths.d.ts
\ No newline at end of file
diff --git a/app/index.html b/app/index.html
index 9e7b83e58..2c8a5f98e 100644
--- a/app/index.html
+++ b/app/index.html
@@ -1,5 +1,5 @@
-
+
;
joined: string;
}) {
- const seconds = joined.length * 2;
+ const p = createRWS(undefined as HTMLParagraphElement | undefined);
+
+ const seconds = createRWS(joined.length * 2);
const wasOnceOn = createRWS(false);
@@ -133,13 +135,18 @@ function TextWrapper({
}
});
+ onMount(() => {
+ seconds.set(Math.round(p()!.clientWidth / 20));
+ });
+
return (
({
- scale,
- groupedKeysToURLPath,
-}: {
- scale: Scale;
- groupedKeysToURLPath: GroupedKeysToURLPath[Scale];
-}) {
- type Key = keyof typeof groupedKeysToURLPath;
- type ResourceData = ReturnType>;
-
- type ResourceDatasets = Record, ResourceData>;
-
- const datasets = groupedKeysToURLPath as any as ResourceDatasets;
-
- for (const key in groupedKeysToURLPath) {
- if ((key as Key) !== "price") {
- datasets[key as unknown as Exclude] = createResourceDataset(
- {
- scale,
- path: groupedKeysToURLPath[key as Key] as any,
- },
- );
- }
- }
-
- const price = createResourceDataset({
- scale,
- path: `/${scale}-to-price`,
- });
-
- Object.assign(datasets, { price });
-
- return datasets;
-}
diff --git a/app/src/scripts/datasets/date.ts b/app/src/scripts/datasets/date.ts
deleted file mode 100644
index f5f5a8382..000000000
--- a/app/src/scripts/datasets/date.ts
+++ /dev/null
@@ -1,36 +0,0 @@
-import { createResourceDataset } from "./resource";
-
-export { averages } from "./consts/averages";
-
-export function createDateDatasets({
- groupedKeysToURLPath,
-}: {
- groupedKeysToURLPath: GroupedKeysToURLPath["date"];
-}) {
- type Key = keyof typeof groupedKeysToURLPath;
- type ResourceData = ReturnType>;
-
- type ResourceDatasets = Record, ResourceData>;
-
- const datasets = groupedKeysToURLPath as any as ResourceDatasets;
-
- for (const key in groupedKeysToURLPath) {
- if ((key as Key) !== "price") {
- datasets[key as Exclude] = createResourceDataset<"date">({
- scale: "date",
- path: groupedKeysToURLPath[key as Key],
- });
- }
- }
-
- const price = createResourceDataset<"date", OHLC>({
- scale: "date",
- path: "/date-to-price",
- });
-
- Object.assign(datasets, { price });
-
- return datasets as ResourceDatasets & {
- price: ResourceDataset<"date", OHLC>;
- };
-}
diff --git a/app/src/scripts/datasets/height.ts b/app/src/scripts/datasets/height.ts
deleted file mode 100644
index 0cc38813b..000000000
--- a/app/src/scripts/datasets/height.ts
+++ /dev/null
@@ -1,34 +0,0 @@
-import { createResourceDataset } from "./resource";
-
-export function createHeightDatasets({
- groupedKeysToURLPath,
-}: {
- groupedKeysToURLPath: GroupedKeysToURLPath["height"];
-}) {
- type Key = keyof typeof groupedKeysToURLPath;
- type ResourceData = ReturnType>;
-
- type ResourceDatasets = Record, ResourceData>;
-
- const datasets = groupedKeysToURLPath as any as ResourceDatasets;
-
- for (const key in groupedKeysToURLPath) {
- if ((key as Key) !== "price") {
- datasets[key as Exclude] = createResourceDataset<"height">({
- scale: "height",
- path: groupedKeysToURLPath[key as Key],
- });
- }
- }
-
- const price = createResourceDataset<"height", OHLC>({
- scale: "height",
- path: "/height-to-price",
- });
-
- Object.assign(datasets, { price });
-
- return datasets as ResourceDatasets & {
- price: ResourceDataset<"height", OHLC>;
- };
-}
diff --git a/app/src/scripts/datasets/index.ts b/app/src/scripts/datasets/index.ts
index abdfc1f52..6889e170d 100644
--- a/app/src/scripts/datasets/index.ts
+++ b/app/src/scripts/datasets/index.ts
@@ -5,8 +5,8 @@ export const scales = ["date" as const, "height" as const];
export const HEIGHT_CHUNK_SIZE = 10_000;
export function createDatasets() {
- const date = new Map>();
- const height = new Map>();
+ const date = new Map>();
+ const height = new Map>();
function getOrImport(
scale: Scale,
diff --git a/app/src/scripts/datasets/types.d.ts b/app/src/scripts/datasets/types.d.ts
index 1a944898b..643ecf247 100644
--- a/app/src/scripts/datasets/types.d.ts
+++ b/app/src/scripts/datasets/types.d.ts
@@ -68,17 +68,8 @@ interface OHLC {
close: number;
}
-type GroupedKeysToURLPath =
- typeof import("/src/../../datasets/grouped_keys_to_url_path.json");
-
-type DateDatasetPath = import("/src/../../datasets/paths").DatePath;
-
-type HeightDatasetPath = import("/src/../../datasets/paths").HeightPath;
-
-type LastDataPath = import("/src/../../datasets/paths").LastPath;
-
type DatasetPath = Scale extends "date"
- ? DateDatasetPath
- : HeightDatasetPath;
+ ? DatePath
+ : HeightPath;
-type AnyDatasetPath = DateDatasetPath | HeightDatasetPath;
+type AnyDatasetPath = DatePath | HeightPath;
diff --git a/app/src/scripts/presets/hodlers/index.ts b/app/src/scripts/presets/hodlers/index.ts
index d620dc55a..cccd5a318 100644
--- a/app/src/scripts/presets/hodlers/index.ts
+++ b/app/src/scripts/presets/hodlers/index.ts
@@ -18,7 +18,7 @@ export function createPresets(scale: ResourceScale) {
title: `Hodl Supply`,
description: "",
icon: IconTablerRipple,
- unit: "Bitcoin",
+ unit: "Percentage",
bottom: [
{
title: `24h`,
diff --git a/app/src/scripts/presets/market/averages/index.ts b/app/src/scripts/presets/market/averages/index.ts
index 987a7e97d..27b920b1f 100644
--- a/app/src/scripts/presets/market/averages/index.ts
+++ b/app/src/scripts/presets/market/averages/index.ts
@@ -1,4 +1,4 @@
-import { averages } from "/src/scripts/datasets/date";
+import { averages } from "/src/scripts/datasets/consts/averages";
import { colors } from "/src/scripts/utils/colors";
import { createRatioFolder } from "../../templates/ratio";
diff --git a/app/src/types/self.d.ts b/app/src/types/self.d.ts
index 13e6b64ce..f9b9655a9 100644
--- a/app/src/types/self.d.ts
+++ b/app/src/types/self.d.ts
@@ -1,11 +1,3 @@
-interface Dated {
- date: string;
-}
-
-interface Heighted {
- height: number;
-}
-
interface Valued {
value: number;
}
diff --git a/parser/.gitignore b/parser/.gitignore
index 7ff0b82bf..47cec896f 100644
--- a/parser/.gitignore
+++ b/parser/.gitignore
@@ -8,7 +8,9 @@ flamegraph.svg
/profile.json
/inputs*/
+/in
/outputs*/
+/out
/snapshots*/
/exports*/
/imports*/
diff --git a/parser/src/actions/iter_blocks.rs b/parser/src/actions/iter_blocks.rs
index 41279f207..cb39c76d6 100644
--- a/parser/src/actions/iter_blocks.rs
+++ b/parser/src/actions/iter_blocks.rs
@@ -73,26 +73,6 @@ pub fn iter_blocks(bitcoin_db: &BitcoinDB, block_count: usize) -> color_eyre::Re
let current_block_date = Date::from_timestamp(timestamp);
let current_block_height = height + blocks_loop_i;
- if states.address_cohorts_durable_states.is_none()
- && datasets
- .address
- .needs_durable_states(current_block_height, current_block_date)
- {
- states.address_cohorts_durable_states =
- Some(AddressCohortsDurableStates::init(
- &mut databases.address_index_to_address_data,
- ));
- }
-
- if states.utxo_cohorts_durable_states.is_none()
- && datasets
- .utxo
- .needs_durable_states(current_block_height, current_block_date)
- {
- states.utxo_cohorts_durable_states =
- Some(UTXOCohortsDurableStates::init(&states.date_data_vec));
- }
-
let next_block_date = next_block_opt
.as_ref()
.map(|next_block| Date::from_timestamp(next_block.header.time));
@@ -137,6 +117,27 @@ pub fn iter_blocks(bitcoin_db: &BitcoinDB, block_count: usize) -> color_eyre::Re
blocks_loop_date,
);
+ if states.address_cohorts_durable_states.is_none()
+ && (compute_addresses
+ || datasets
+ .address
+ .needs_durable_states(current_block_height, current_block_date))
+ {
+ states.address_cohorts_durable_states =
+ Some(AddressCohortsDurableStates::init(
+ &mut databases.address_index_to_address_data,
+ ));
+ }
+
+ if states.utxo_cohorts_durable_states.is_none()
+ && datasets
+ .utxo
+ .needs_durable_states(current_block_height, current_block_date)
+ {
+ states.utxo_cohorts_durable_states =
+ Some(UTXOCohortsDurableStates::init(&states.date_data_vec));
+ }
+
parse(ParseData {
bitcoin_db,
block: current_block,
diff --git a/parser/src/datasets/mod.rs b/parser/src/datasets/mod.rs
index 93f6750aa..eaa8f5f1b 100644
--- a/parser/src/datasets/mod.rs
+++ b/parser/src/datasets/mod.rs
@@ -277,13 +277,16 @@ impl AllDatasets {
let datasets_len = path_to_type.len();
- Json::export("../datasets/disk_path_to_type.json", &path_to_type)?;
+ let server_inputs_path = "../server/in";
- let server_trigger_path = "../server/.trigger";
+ fs::create_dir_all(server_inputs_path)?;
- fs::create_dir_all(server_trigger_path)?;
+ Json::export(
+ &format!("{server_inputs_path}/disk_path_to_type.json"),
+ &path_to_type,
+ )?;
- let datasets_len_path = format!("{server_trigger_path}/datasets_len.txt");
+ let datasets_len_path = format!("{server_inputs_path}/datasets_len.txt");
if let Ok(len) = fs::read_to_string(&datasets_len_path) {
if let Ok(len) = len.parse::() {
diff --git a/parser/src/datasets/price/mod.rs b/parser/src/datasets/price/mod.rs
index d8fbf1249..c171128a2 100644
--- a/parser/src/datasets/price/mod.rs
+++ b/parser/src/datasets/price/mod.rs
@@ -26,6 +26,7 @@ pub struct PriceDatasets {
kraken_daily: Option>,
kraken_1mn: Option>,
binance_1mn: Option>,
+ binance_daily: Option>,
binance_har: Option>,
satonomics_by_height: BTreeMap>,
satonomics_by_date: BTreeMap>,
@@ -89,6 +90,7 @@ impl PriceDatasets {
min_initial_states: MinInitialStates::default(),
binance_1mn: None,
+ binance_daily: None,
binance_har: None,
kraken_1mn: None,
kraken_daily: None,
@@ -310,8 +312,9 @@ impl PriceDatasets {
Ok(self.ohlcs.date.get(&date).unwrap().to_owned())
} else {
let ohlc = self
- .get_from_date_satonomics(&date)
- .or_else(|_| self.get_from_daily_kraken(&date))?;
+ .get_from_daily_kraken(&date)
+ .or_else(|_| self.get_from_daily_binance(&date))
+ .or_else(|_| self.get_from_date_satonomics(&date))?;
self.ohlcs.date.insert(date, ohlc);
@@ -323,7 +326,16 @@ impl PriceDatasets {
let chunk_id = date.to_chunk_id();
#[allow(clippy::map_entry)]
- if !self.satonomics_by_date.contains_key(&chunk_id) {
+ if !self.satonomics_by_date.contains_key(&chunk_id)
+ || self
+ .satonomics_by_date
+ .get(&chunk_id)
+ .unwrap()
+ .last_key_value()
+ .unwrap()
+ .0
+ <= date
+ {
self.satonomics_by_date
.insert(chunk_id, Satonomics::fetch_date_prices(chunk_id)?);
}
@@ -337,9 +349,17 @@ impl PriceDatasets {
}
fn get_from_daily_kraken(&mut self, date: &Date) -> color_eyre::Result {
- if self.kraken_daily.is_none() {
- self.kraken_daily
- .replace(Kraken::fetch_daily_prices().or_else(|_| Binance::fetch_daily_prices())?);
+ if self.kraken_daily.is_none()
+ || self
+ .kraken_daily
+ .as_ref()
+ .unwrap()
+ .last_key_value()
+ .unwrap()
+ .0
+ <= date
+ {
+ self.kraken_daily.replace(Kraken::fetch_daily_prices()?);
}
self.kraken_daily
@@ -350,6 +370,28 @@ impl PriceDatasets {
.ok_or(Error::msg("Couldn't find date"))
}
+ fn get_from_daily_binance(&mut self, date: &Date) -> color_eyre::Result {
+ if self.binance_daily.is_none()
+ || self
+ .binance_daily
+ .as_ref()
+ .unwrap()
+ .last_key_value()
+ .unwrap()
+ .0
+ <= date
+ {
+ self.binance_daily.replace(Binance::fetch_daily_prices()?);
+ }
+
+ self.binance_daily
+ .as_ref()
+ .unwrap()
+ .get(date)
+ .cloned()
+ .ok_or(Error::msg("Couldn't find date"))
+ }
+
pub fn get_height_ohlc(
&mut self,
height: Height,
@@ -380,13 +422,13 @@ impl PriceDatasets {
let previous_timestamp = previous_timestamp.map(clean_timestamp);
let ohlc = self
- .get_from_height_satonomics(&height)
+ .get_from_1mn_kraken(timestamp, previous_timestamp)
.unwrap_or_else(|_| {
- self.get_from_1mn_kraken(timestamp, previous_timestamp)
+ self.get_from_1mn_binance(timestamp, previous_timestamp)
.unwrap_or_else(|_| {
- self.get_from_1mn_binance(timestamp, previous_timestamp)
+ self.get_from_har_binance(timestamp, previous_timestamp)
.unwrap_or_else(|_| {
- self.get_from_har_binance(timestamp, previous_timestamp)
+ self.get_from_height_satonomics(&height)
.unwrap_or_else(|_| {
let date = Date::from_timestamp(timestamp);
@@ -419,7 +461,10 @@ How to fix this:
let chunk_id = height.to_chunk_id();
#[allow(clippy::map_entry)]
- if !self.satonomics_by_height.contains_key(&chunk_id) {
+ if !self.satonomics_by_height.contains_key(&chunk_id)
+ || ((chunk_id.to_usize() + self.satonomics_by_height.get(&chunk_id).unwrap().len())
+ <= height.to_usize())
+ {
self.satonomics_by_height
.insert(chunk_id, Satonomics::fetch_height_prices(chunk_id)?);
}
@@ -437,7 +482,16 @@ How to fix this:
timestamp: u32,
previous_timestamp: Option,
) -> color_eyre::Result {
- if self.kraken_1mn.is_none() {
+ if self.kraken_1mn.is_none()
+ || self
+ .kraken_1mn
+ .as_ref()
+ .unwrap()
+ .last_key_value()
+ .unwrap()
+ .0
+ <= ×tamp
+ {
self.kraken_1mn.replace(Kraken::fetch_1mn_prices()?);
}
@@ -449,7 +503,16 @@ How to fix this:
timestamp: u32,
previous_timestamp: Option,
) -> color_eyre::Result {
- if self.binance_1mn.is_none() {
+ if self.binance_1mn.is_none()
+ || self
+ .binance_1mn
+ .as_ref()
+ .unwrap()
+ .last_key_value()
+ .unwrap()
+ .0
+ <= ×tamp
+ {
self.binance_1mn.replace(Binance::fetch_1mn_prices()?);
}
diff --git a/parser/src/io/consts.rs b/parser/src/io/consts.rs
index 293a07581..62a50ceb6 100644
--- a/parser/src/io/consts.rs
+++ b/parser/src/io/consts.rs
@@ -1,2 +1,2 @@
-pub const IMPORTS_FOLDER_PATH: &str = "./imports";
+pub const INPUTS_FOLDER_PATH: &str = "./in";
pub const OUTPUTS_FOLDER_PATH: &str = "./target/outputs";
diff --git a/parser/src/price/binance.rs b/parser/src/price/binance.rs
index 6c8934f2b..4ef3e9e80 100644
--- a/parser/src/price/binance.rs
+++ b/parser/src/price/binance.rs
@@ -1,6 +1,6 @@
#![allow(dead_code)]
-use std::{collections::BTreeMap, path::Path};
+use std::{collections::BTreeMap, fs, path::Path};
use color_eyre::eyre::ContextCompat;
use itertools::Itertools;
@@ -8,7 +8,7 @@ use serde_json::Value;
use crate::{
datasets::OHLC,
- io::{Json, IMPORTS_FOLDER_PATH},
+ io::{Json, INPUTS_FOLDER_PATH},
structs::Date,
utils::{log, retry},
};
@@ -19,7 +19,9 @@ impl Binance {
pub fn read_har_file() -> color_eyre::Result> {
log("binance: read har file");
- let path_binance_har = Path::new(IMPORTS_FOLDER_PATH).join("binance.har");
+ fs::create_dir_all(INPUTS_FOLDER_PATH)?;
+
+ let path_binance_har = Path::new(INPUTS_FOLDER_PATH).join("binance.har");
let json: BTreeMap =
Json::import(path_binance_har.to_str().unwrap()).unwrap_or_default();
@@ -195,7 +197,7 @@ impl Binance {
.collect::>())
},
10,
- 5,
+ 10,
)
}
}
diff --git a/parser/src/price/kraken.rs b/parser/src/price/kraken.rs
index cd29a8e5b..da51face3 100644
--- a/parser/src/price/kraken.rs
+++ b/parser/src/price/kraken.rs
@@ -62,7 +62,7 @@ impl Kraken {
.collect::>())
},
10,
- 5,
+ 10,
)
}
@@ -117,7 +117,7 @@ impl Kraken {
.collect::>())
},
10,
- 5,
+ 10,
)
}
}
diff --git a/parser/src/price/satonomics.rs b/parser/src/price/satonomics.rs
index 2453b9481..4a02a1011 100644
--- a/parser/src/price/satonomics.rs
+++ b/parser/src/price/satonomics.rs
@@ -21,11 +21,11 @@ const RETRIES: usize = 10;
impl Satonomics {
fn get_base_url(try_index: usize) -> &'static str {
- if try_index < RETRIES / 2 {
- SATONOMICS_OFFICIAL_URL
- } else {
- SATONOMICS_OFFICIAL_BACKUP_URL
- }
+ // if try_index < RETRIES / 2 {
+ SATONOMICS_OFFICIAL_URL
+ // } else {
+ // SATONOMICS_OFFICIAL_BACKUP_URL
+ // }
}
pub fn fetch_height_prices(chunk_id: HeightMapChunkId) -> color_eyre::Result> {
diff --git a/server/.gitignore b/server/.gitignore
index 12c3a4b28..9788728f4 100644
--- a/server/.gitignore
+++ b/server/.gitignore
@@ -1,4 +1,4 @@
/target
.DS_Store
/parser.log
-.trigger
+/in
diff --git a/server/run.sh b/server/run.sh
index 4f525d71b..754d9c1b1 100755
--- a/server/run.sh
+++ b/server/run.sh
@@ -1,5 +1,5 @@
if cargo watch --help &> /dev/null; then
- cargo watch --no-vcs-ignores -w "./src" -w "./run.sh" -w ".trigger" -x "run -r"
+ cargo watch --no-vcs-ignores -w "./src" -w "./run.sh" -w "./in/datasets_len.txt" -x "run -r"
else
cargo run -r
fi
diff --git a/server/src/main.rs b/server/src/main.rs
index da6916a19..f9d217790 100644
--- a/server/src/main.rs
+++ b/server/src/main.rs
@@ -38,7 +38,7 @@ async fn main() -> color_eyre::Result<()> {
let routes = Routes::build();
- routes.generate_front_end_files();
+ routes.generate_dts_file();
let state = AppState {
routes: Arc::new(routes),
diff --git a/server/src/routes.rs b/server/src/routes.rs
index 466a46203..6d5b76dd5 100644
--- a/server/src/routes.rs
+++ b/server/src/routes.rs
@@ -20,12 +20,13 @@ pub struct Route {
#[derive(Clone, Default, Deref, DerefMut)]
pub struct Routes(pub Grouped>);
-const DATASETS_PATH: &str = "../datasets";
+const INPUTS_PATH: &str = "./in";
+const APP_TYPES_PATH: &str = "../app/src/types";
impl Routes {
pub fn build() -> Self {
let path_to_type: BTreeMap =
- Json::import(&format!("{DATASETS_PATH}/disk_path_to_type.json")).unwrap();
+ Json::import(&format!("{INPUTS_PATH}/disk_path_to_type.json")).unwrap();
let mut routes = Routes::default();
@@ -99,42 +100,14 @@ impl Routes {
routes
}
- pub fn generate_front_end_files(&self) {
- self.generate_json_group_files();
- self.generate_definition_files();
- }
-
- fn generate_json_group_files(&self) {
- let map_to_group = |map: &HashMap| -> BTreeMap {
- map.iter()
- .map(|(key, route)| (key.to_owned(), route.url_path.to_owned()))
- .collect()
- };
-
- let date_group = map_to_group(&self.date);
- let height_group = map_to_group(&self.height);
- let last_group = map_to_group(&self.last);
-
- let groups = Paths(Grouped {
- date: date_group,
- height: height_group,
- last: last_group,
- });
-
- let _ = Json::export(
- &format!("{DATASETS_PATH}/grouped_keys_to_url_path.json"),
- &groups,
- );
- }
-
- fn generate_definition_files(&self) {
+ pub fn generate_dts_file(&self) {
let map_to_type = |name: &str, map: &HashMap| -> String {
let paths = map
.values()
.map(|route| format!("\"{}\"", route.url_path))
.join(" | ");
- format!("export type {}Path = {};\n", name, paths)
+ format!("type {}Path = {};\n", name, paths)
};
let date_type = map_to_type("Date", &self.date);
@@ -144,7 +117,7 @@ impl Routes {
let last_type = map_to_type("Last", &self.last);
fs::write(
- format!("{DATASETS_PATH}/paths.d.ts"),
+ format!("{APP_TYPES_PATH}/paths.d.ts"),
format!("{date_type}\n{height_type}\n{last_type}"),
)
.unwrap();