bitview: reorg part 6

This commit is contained in:
nym21
2025-09-27 19:52:11 +02:00
parent dfe5148f17
commit 9d03fdf31d
95 changed files with 4395 additions and 441 deletions

View File

@@ -1,154 +0,0 @@
use std::{fs, io, path::Path};
use brk_interface::{Index, Interface};
use brk_server::VERSION;
use brk_structs::pools;
use crate::website::Website;
const BRIDGE_PATH: &str = "scripts/bridge";
#[allow(clippy::upper_case_acronyms)]
pub trait Bridge {
fn generate_bridge_files(&self, website: Website, websites_path: &Path) -> io::Result<()>;
}
impl Bridge for Interface<'static> {
fn generate_bridge_files(&self, website: Website, websites_path: &Path) -> io::Result<()> {
if website.is_none() {
return Ok(());
}
let path = websites_path.join(website.to_folder_name());
if !fs::exists(&path)? {
return Ok(());
}
let path = path.join(BRIDGE_PATH);
fs::create_dir_all(&path)?;
generate_metrics_file(self, &path)?;
generate_pools_file(&path)
}
}
fn generate_pools_file(parent: &Path) -> io::Result<()> {
let path = parent.join(Path::new("pools.js"));
let pools = pools();
let mut contents = "//
// File auto-generated, any modifications will be overwritten
//
"
.to_string();
contents += "
/** @typedef {ReturnType<typeof createPools>} Pools */
/** @typedef {keyof Pools} Pool */
export function createPools() {
return /** @type {const} */ ({
";
let mut sorted_pools: Vec<_> = pools.iter().collect();
sorted_pools.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase()));
contents += &sorted_pools
.iter()
.map(|pool| {
let id = pool.serialized_id();
format!(" {id}: \"{}\",", pool.name)
})
.collect::<Vec<_>>()
.join("\n");
contents += "\n });\n}\n";
fs::write(path, contents)
}
fn generate_metrics_file(interface: &Interface<'static>, parent: &Path) -> io::Result<()> {
let path = parent.join(Path::new("metrics.js"));
let indexes = Index::all();
let mut contents = format!(
"//
// File auto-generated, any modifications will be overwritten
//
export const VERSION = \"v{VERSION}\";
"
);
contents += &indexes
.iter()
.enumerate()
.map(|(i_of_i, i)| {
// let lowered = i.to_string().to_lowercase();
format!("/** @typedef {{{i_of_i}}} {i} */",)
})
.collect::<Vec<_>>()
.join("\n");
contents += &format!(
"\n\n/** @typedef {{{}}} Index */\n",
indexes
.iter()
.map(|i| i.to_string())
.collect::<Vec<_>>()
.join(" | ")
);
contents += "
/** @typedef {ReturnType<typeof createIndexes>} Indexes */
export function createIndexes() {
return {
";
contents += &indexes
.iter()
.enumerate()
.map(|(i_of_i, i)| {
let lowered = i.to_string().to_lowercase();
format!(" {lowered}: /** @satisfies {{{i}}} */ ({i_of_i}),",)
})
.collect::<Vec<_>>()
.join("\n");
contents += " };\n}\n";
contents += "
/** @typedef {ReturnType<typeof createMetricToIndexes>} MetricToIndexes
/** @typedef {keyof MetricToIndexes} Metric */
/**
* @returns {Record<any, number[]>}
*/
export function createMetricToIndexes() {
return {
";
interface
.id_to_index_to_vec()
.iter()
.for_each(|(id, index_to_vec)| {
let indexes = index_to_vec
.keys()
.map(|i| (*i as u8).to_string())
// .map(|i| i.to_string())
.collect::<Vec<_>>()
.join(", ");
contents += &format!(" \"{id}\": [{indexes}],\n");
});
contents += " };\n}\n";
fs::write(path, contents)
}

View File

@@ -9,6 +9,7 @@ use std::{
};
use bitcoincore_rpc::{self, RpcApi};
use brk_bridge::Bridge;
use brk_bundler::bundle;
use brk_computer::Computer;
use brk_indexer::Indexer;
@@ -18,12 +19,11 @@ use brk_server::{Server, VERSION};
use log::info;
use vecdb::Exit;
mod bridge;
mod config;
mod paths;
mod website;
use crate::{bridge::Bridge, config::Config, paths::*};
use crate::{config::Config, paths::*};
pub fn main() -> color_eyre::Result<()> {
color_eyre::install()?;
@@ -51,97 +51,107 @@ pub fn run() -> color_eyre::Result<()> {
let mut indexer = Indexer::forced_import(&config.brkdir())?;
let wait_for_synced_node = |rpc_client: &bitcoincore_rpc::Client| -> color_eyre::Result<()> {
let is_synced = || -> color_eyre::Result<bool> {
let info = rpc_client.get_blockchain_info()?;
Ok(info.headers == info.blocks)
let mut computer = Computer::forced_import(&config.brkdir(), &indexer, config.fetcher())?;
let interface = Interface::build(&parser, &indexer, &computer);
let website = config.website();
let downloads_path = config.downloads_dir();
let future = async {
let bundle_path = if website.is_some() {
let websites_dev_path = Path::new("../../websites");
let packages_dev_path = Path::new("../../packages");
let websites_path;
let packages_path;
if fs::exists(websites_dev_path)? {
websites_path = websites_dev_path.to_path_buf();
packages_path = packages_dev_path.to_path_buf();
} else {
let downloaded_brk_path = downloads_path.join(format!("brk-{VERSION}"));
let downloaded_websites_path = downloaded_brk_path.join("websites");
let downloaded_packages_path = downloaded_brk_path.join("packages");
if !fs::exists(&downloaded_websites_path)? {
info!("Downloading source from Github...");
let url = format!(
"https://github.com/bitcoinresearchkit/brk/archive/refs/tags/v{VERSION}.zip",
);
let response = minreq::get(url).send()?;
let bytes = response.as_bytes();
let cursor = Cursor::new(bytes);
let mut zip = zip::ZipArchive::new(cursor).unwrap();
zip.extract(downloads_path).unwrap();
}
websites_path = downloaded_websites_path;
packages_path = downloaded_packages_path;
}
interface.generate_js_files(&packages_path)?;
Some(bundle(&websites_path, website.to_folder_name(), true).await?)
} else {
None
};
if !is_synced()? {
info!("Waiting for node to be synced...");
while !is_synced()? {
let server = Server::new(interface, bundle_path);
tokio::spawn(async move {
server.serve(true).await.unwrap();
});
sleep(Duration::from_secs(1));
loop {
wait_for_synced_node(rpc)?;
let block_count = rpc.get_block_count()?;
info!("{} blocks found.", block_count + 1);
let starting_indexes = indexer
.index(&parser, rpc, &exit, config.check_collisions())
.unwrap();
computer
.compute(&indexer, starting_indexes, &parser, &exit)
.unwrap();
info!("Waiting for new blocks...");
while block_count == rpc.get_block_count()? {
sleep(Duration::from_secs(1))
}
}
Ok(())
};
let mut computer = Computer::forced_import(&config.brkdir(), &indexer, config.fetcher())?;
tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()?
.block_on(async {
let interface = Interface::build(&parser, &indexer, &computer);
let website = config.website();
let downloads_path = config.downloads_dir();
let bundle_path = if website.is_some() {
let websites_dev_path = Path::new("../../websites");
let websites_path = if fs::exists(websites_dev_path)? {
websites_dev_path.to_path_buf()
} else {
let downloaded_websites_path =
downloads_path.join(format!("brk-{VERSION}")).join("websites");
if !fs::exists(&downloaded_websites_path)? {
info!("Downloading websites from Github...");
let url = format!(
"https://github.com/bitcoinresearchkit/brk/archive/refs/tags/v{VERSION}.zip",
);
let response = minreq::get(url).send()?;
let bytes = response.as_bytes();
let cursor = Cursor::new(bytes);
let mut zip = zip::ZipArchive::new(cursor).unwrap();
zip.extract(downloads_path).unwrap();
}
downloaded_websites_path
};
interface.generate_bridge_files(website, websites_path.as_path())?;
Some(bundle(&websites_path, website.to_folder_name(), true).await?)
} else {
None
};
let server = Server::new(
interface,
bundle_path,
);
tokio::spawn(async move {
server.serve(true).await.unwrap();
});
sleep(Duration::from_secs(1));
loop {
wait_for_synced_node(rpc)?;
let block_count = rpc.get_block_count()?;
info!("{} blocks found.", block_count + 1);
let starting_indexes =
indexer.index(&parser, rpc, &exit, config.check_collisions()).unwrap();
computer.compute(&indexer, starting_indexes, &parser, &exit).unwrap();
info!("Waiting for new blocks...");
while block_count == rpc.get_block_count()? {
sleep(Duration::from_secs(1))
}
}
})
.block_on(future)
}
fn wait_for_synced_node(rpc_client: &bitcoincore_rpc::Client) -> color_eyre::Result<()> {
let is_synced = || -> color_eyre::Result<bool> {
let info = rpc_client.get_blockchain_info()?;
Ok(info.headers == info.blocks)
};
if !is_synced()? {
info!("Waiting for node to be synced...");
while !is_synced()? {
sleep(Duration::from_secs(1))
}
}
Ok(())
}