global: snapshot

This commit is contained in:
nym21
2026-01-15 23:34:43 +01:00
parent b0d933a7ab
commit 967d2c7f35
67 changed files with 6854 additions and 5210 deletions

View File

@@ -1,59 +0,0 @@
// use lru::LruCache;
// use std::num::NonZeroUsize;
// struct SmartBlkReader {
// // LRU cache of recently accessed files (memory mapped)
// mmap_cache: LruCache<String, memmap2::Mmap>,
// // Fallback to direct file I/O for cache misses
// max_cached_files: usize,
// }
// impl SmartBlkReader {
// fn new(max_cached: usize) -> Self {
// Self {
// mmap_cache: LruCache::new(NonZeroUsize::new(max_cached).unwrap()),
// max_cached_files: max_cached,
// }
// }
// fn get_transaction(
// &mut self,
// file_path: &str,
// offset: u64,
// length: usize,
// ) -> Result<Transaction, Box<dyn std::error::Error>> {
// // Try cache first
// if let Some(mmap) = self.mmap_cache.get(file_path) {
// let tx_data = &mmap[offset as usize..(offset as usize + length)];
// let mut cursor = std::io::Cursor::new(tx_data);
// return Ok(bitcoin::consensus::Decodable::consensus_decode(
// &mut cursor,
// )?);
// }
// // Cache miss - use direct I/O and potentially cache the file
// let mut file = File::open(file_path)?;
// file.seek(SeekFrom::Start(offset))?;
// let mut buffer = vec![0u8; length];
// file.read_exact(&mut buffer)?;
// // Optionally add to cache (based on access patterns)
// if self.should_cache_file(file_path) {
// let file_for_mmap = File::open(file_path)?;
// if let Ok(mmap) = unsafe { memmap2::MmapOptions::new().map(&file_for_mmap) } {
// self.mmap_cache.put(file_path.to_string(), mmap);
// }
// }
// let mut cursor = std::io::Cursor::new(&buffer);
// Ok(bitcoin::consensus::Decodable::consensus_decode(
// &mut cursor,
// )?)
// }
// fn should_cache_file(&self, _file_path: &str) -> bool {
// // Implement logic: recent files, frequently accessed files, etc.
// true
// }
// }

View File

@@ -44,7 +44,7 @@ impl BlockRoutes for ApiRouter<AppState> {
async |headers: HeaderMap,
Path(path): Path<BlockHashParam>,
State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Height, move |q| q.block(&path.hash)).await
state.cached_json(&headers, CacheStrategy::Static, move |q| q.block(&path.hash)).await
},
|op| {
op.id("get_block")
@@ -135,7 +135,7 @@ impl BlockRoutes for ApiRouter<AppState> {
async |headers: HeaderMap,
Path(path): Path<BlockHashParam>,
State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Height, move |q| q.block_txids(&path.hash)).await
state.cached_json(&headers, CacheStrategy::Static, move |q| q.block_txids(&path.hash)).await
},
|op| {
op.id("get_block_txids")
@@ -158,7 +158,7 @@ impl BlockRoutes for ApiRouter<AppState> {
async |headers: HeaderMap,
Path(path): Path<BlockHashStartIndex>,
State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Height, move |q| q.block_txs(&path.hash, path.start_index)).await
state.cached_json(&headers, CacheStrategy::Static, move |q| q.block_txs(&path.hash, path.start_index)).await
},
|op| {
op.id("get_block_txs")
@@ -182,7 +182,7 @@ impl BlockRoutes for ApiRouter<AppState> {
async |headers: HeaderMap,
Path(path): Path<BlockHashTxIndex>,
State(state): State<AppState>| {
state.cached_text(&headers, CacheStrategy::Height, move |q| q.block_txid_at_index(&path.hash, path.index).map(|t| t.to_string())).await
state.cached_text(&headers, CacheStrategy::Static, move |q| q.block_txid_at_index(&path.hash, path.index).map(|t| t.to_string())).await
},
|op| {
op.id("get_block_txid")
@@ -226,7 +226,7 @@ impl BlockRoutes for ApiRouter<AppState> {
async |headers: HeaderMap,
Path(path): Path<BlockHashParam>,
State(state): State<AppState>| {
state.cached_bytes(&headers, CacheStrategy::Height, move |q| q.block_raw(&path.hash)).await
state.cached_bytes(&headers, CacheStrategy::Static, move |q| q.block_raw(&path.hash)).await
},
|op| {
op.id("get_block_raw")

View File

@@ -6,12 +6,12 @@ use axum::{
http::{HeaderMap, StatusCode, Uri},
response::{IntoResponse, Response},
};
use brk_query::{MetricSelection, Output};
use brk_types::Format;
use brk_types::{Format, MetricSelection, Output};
use quick_cache::sync::GuardResult;
use crate::{
CacheStrategy, api::metrics::MAX_WEIGHT, cache::CacheParams, extended::HeaderMapExtended,
api::metrics::{CACHE_CONTROL, MAX_WEIGHT},
extended::HeaderMapExtended,
};
use super::AppState;
@@ -39,35 +39,32 @@ async fn req_to_response_res(
Query(params): Query<MetricSelection>,
AppState { query, cache, .. }: AppState,
) -> brk_error::Result<Response> {
let format = params.format();
let height = query.sync(|q| q.height());
// Phase 1: Search and resolve metadata (cheap)
let resolved = query
.run(move |q| q.resolve(params, MAX_WEIGHT))
.await?;
let cache_params =
CacheParams::resolve(&CacheStrategy::height_with(params.etag_suffix()), || {
height.into()
});
let format = resolved.format();
let etag = resolved.etag();
if cache_params.matches_etag(&headers) {
// Check if client has fresh cache
if headers.has_etag(etag.as_str()) {
let mut response = (StatusCode::NOT_MODIFIED, "").into_response();
response.headers_mut().insert_cors();
return Ok(response);
}
let cache_key = format!(
"{}{}{}",
uri.path(),
uri.query().unwrap_or(""),
cache_params.etag_str()
);
// Check server-side cache
let cache_key = format!("bulk-{}{}{}", uri.path(), uri.query().unwrap_or(""), etag);
let guard_res = cache.get_value_or_guard(&cache_key, Some(Duration::from_millis(50)));
let mut response = if let GuardResult::Value(v) = guard_res {
Response::new(Body::from(v))
} else {
match query
.run(move |q| q.search_and_format_bulk_checked(params, MAX_WEIGHT))
.await?
{
// Phase 2: Format (expensive, only on cache miss)
let metric_output = query.run(move |q| q.format(resolved)).await?;
match metric_output.output {
Output::CSV(s) => {
if let GuardResult::Guard(g) = guard_res {
let _ = g.insert(s.clone().into());
@@ -75,21 +72,18 @@ async fn req_to_response_res(
s.into_response()
}
Output::Json(v) => {
let json = v.to_vec();
if let GuardResult::Guard(g) = guard_res {
let _ = g.insert(json.clone().into());
let _ = g.insert(v.clone().into());
}
json.into_response()
Response::new(Body::from(v))
}
}
};
let headers = response.headers_mut();
headers.insert_cors();
if let Some(etag) = &cache_params.etag {
headers.insert_etag(etag);
}
headers.insert_cache_control(&cache_params.cache_control);
headers.insert_etag(etag.as_str());
headers.insert_cache_control(CACHE_CONTROL);
match format {
Format::CSV => {

View File

@@ -6,12 +6,12 @@ use axum::{
http::{HeaderMap, StatusCode, Uri},
response::{IntoResponse, Response},
};
use brk_query::{MetricSelection, Output};
use brk_types::Format;
use brk_types::{Format, MetricSelection, Output};
use quick_cache::sync::GuardResult;
use crate::{
CacheStrategy, api::metrics::MAX_WEIGHT, cache::CacheParams, extended::HeaderMapExtended,
api::metrics::{CACHE_CONTROL, MAX_WEIGHT},
extended::HeaderMapExtended,
};
use super::AppState;
@@ -39,35 +39,32 @@ async fn req_to_response_res(
Query(params): Query<MetricSelection>,
AppState { query, cache, .. }: AppState,
) -> brk_error::Result<Response> {
let format = params.format();
let height = query.sync(|q| q.height());
// Phase 1: Search and resolve metadata (cheap)
let resolved = query
.run(move |q| q.resolve(params, MAX_WEIGHT))
.await?;
let cache_params =
CacheParams::resolve(&CacheStrategy::height_with(params.etag_suffix()), || {
height.into()
});
let format = resolved.format();
let etag = resolved.etag();
if cache_params.matches_etag(&headers) {
// Check if client has fresh cache
if headers.has_etag(etag.as_str()) {
let mut response = (StatusCode::NOT_MODIFIED, "").into_response();
response.headers_mut().insert_cors();
return Ok(response);
}
let cache_key = format!(
"single-{}{}{}",
uri.path(),
uri.query().unwrap_or(""),
cache_params.etag_str()
);
// Check server-side cache
let cache_key = format!("single-{}{}{}", uri.path(), uri.query().unwrap_or(""), etag);
let guard_res = cache.get_value_or_guard(&cache_key, Some(Duration::from_millis(50)));
let mut response = if let GuardResult::Value(v) = guard_res {
Response::new(Body::from(v))
} else {
match query
.run(move |q| q.search_and_format_checked(params, MAX_WEIGHT))
.await?
{
// Phase 2: Format (expensive, only on cache miss)
let metric_output = query.run(move |q| q.format(resolved)).await?;
match metric_output.output {
Output::CSV(s) => {
if let GuardResult::Guard(g) = guard_res {
let _ = g.insert(s.clone().into());
@@ -75,21 +72,18 @@ async fn req_to_response_res(
s.into_response()
}
Output::Json(v) => {
let json = v.to_vec();
if let GuardResult::Guard(g) = guard_res {
let _ = g.insert(json.clone().into());
let _ = g.insert(v.clone().into());
}
json.into_response()
Response::new(Body::from(v))
}
}
};
let headers = response.headers_mut();
headers.insert_cors();
if let Some(etag) = &cache_params.etag {
headers.insert_etag(etag);
}
headers.insert_cache_control(&cache_params.cache_control);
headers.insert_etag(etag.as_str());
headers.insert_cache_control(CACHE_CONTROL);
match format {
Format::CSV => {

View File

@@ -6,12 +6,12 @@ use axum::{
http::{HeaderMap, StatusCode, Uri},
response::{IntoResponse, Response},
};
use brk_query::{MetricSelection, OutputLegacy};
use brk_types::Format;
use brk_types::{Format, MetricSelection, OutputLegacy};
use quick_cache::sync::GuardResult;
use crate::{
CacheStrategy, api::metrics::MAX_WEIGHT, cache::CacheParams, extended::HeaderMapExtended,
api::metrics::{CACHE_CONTROL, MAX_WEIGHT},
extended::HeaderMapExtended,
};
use super::AppState;
@@ -39,35 +39,34 @@ async fn req_to_response_res(
Query(params): Query<MetricSelection>,
AppState { query, cache, .. }: AppState,
) -> brk_error::Result<Response> {
let format = params.format();
let height = query.sync(|q| q.height());
// Phase 1: Search and resolve metadata (cheap)
let resolved = query
.run(move |q| q.resolve(params, MAX_WEIGHT))
.await?;
let cache_params =
CacheParams::resolve(&CacheStrategy::height_with(params.etag_suffix()), || {
height.into()
});
let format = resolved.format();
let etag = resolved.etag();
if cache_params.matches_etag(&headers) {
// Check if client has fresh cache
if headers.has_etag(etag.as_str()) {
let mut response = (StatusCode::NOT_MODIFIED, "").into_response();
response.headers_mut().insert_cors();
return Ok(response);
}
let cache_key = format!(
"legacy-{}{}{}",
uri.path(),
uri.query().unwrap_or(""),
cache_params.etag_str()
);
// Check server-side cache
let cache_key = format!("legacy-{}{}{}", uri.path(), uri.query().unwrap_or(""), etag);
let guard_res = cache.get_value_or_guard(&cache_key, Some(Duration::from_millis(50)));
let mut response = if let GuardResult::Value(v) = guard_res {
Response::new(Body::from(v))
} else {
match query
.run(move |q| q.search_and_format_legacy_checked(params, MAX_WEIGHT))
.await?
{
// Phase 2: Format (expensive, only on cache miss)
let metric_output = query
.run(move |q| q.format_legacy(resolved))
.await?;
match metric_output.output {
OutputLegacy::CSV(s) => {
if let GuardResult::Guard(g) = guard_res {
let _ = g.insert(s.clone().into());
@@ -86,10 +85,8 @@ async fn req_to_response_res(
let headers = response.headers_mut();
headers.insert_cors();
if let Some(etag) = &cache_params.etag {
headers.insert_etag(etag);
}
headers.insert_cache_control(&cache_params.cache_control);
headers.insert_etag(etag.as_str());
headers.insert_cache_control(CACHE_CONTROL);
match format {
Format::CSV => {

View File

@@ -4,13 +4,10 @@ use axum::{
http::{HeaderMap, Uri},
response::{IntoResponse, Response},
};
use brk_query::{
DataRangeFormat, MetricSelection, MetricSelectionLegacy, PaginatedMetrics, Pagination,
};
use brk_traversable::TreeNode;
use brk_types::{
Index, IndexInfo, LimitParam, Metric, MetricCount, MetricData, MetricParam, MetricWithIndex,
Metrics,
DataRangeFormat, Index, IndexInfo, LimitParam, Metric, MetricCount, MetricData, MetricParam,
MetricSelection, MetricSelectionLegacy, MetricWithIndex, Metrics, PaginatedMetrics, Pagination,
};
use crate::{CacheStrategy, extended::TransformResponseExtended};
@@ -23,6 +20,8 @@ mod legacy;
/// Maximum allowed request weight in bytes (650KB)
const MAX_WEIGHT: usize = 65 * 10_000;
/// Cache control header for metric data responses
const CACHE_CONTROL: &str = "public, max-age=1, must-revalidate";
pub trait ApiMetricsRoutes {
fn add_metrics_routes(self) -> Self;
@@ -250,7 +249,8 @@ impl ApiMetricsRoutes for ApiRouter<AppState> {
Query(params): Query<MetricSelectionLegacy>,
state: State<AppState>|
-> Response {
legacy::handler(uri, headers, Query(params.into()), state).await
let params: MetricSelection = params.into();
legacy::handler(uri, headers, Query(params), state).await
},
|op| op
.metrics_tag()

View File

@@ -64,7 +64,7 @@ impl MiningRoutes for ApiRouter<AppState> {
"/api/v1/mining/pools/{time_period}",
get_with(
async |headers: HeaderMap, Path(path): Path<TimePeriodParam>, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::height_with(format!("{:?}", path.time_period)), move |q| q.mining_pools(path.time_period)).await
state.cached_json(&headers, CacheStrategy::Height, move |q| q.mining_pools(path.time_period)).await
},
|op| {
op.id("get_pool_stats")
@@ -81,7 +81,7 @@ impl MiningRoutes for ApiRouter<AppState> {
"/api/v1/mining/pool/{slug}",
get_with(
async |headers: HeaderMap, Path(path): Path<PoolSlugParam>, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::height_with(path.slug), move |q| q.pool_detail(path.slug)).await
state.cached_json(&headers, CacheStrategy::Height, move |q| q.pool_detail(path.slug)).await
},
|op| {
op.id("get_pool")
@@ -99,7 +99,7 @@ impl MiningRoutes for ApiRouter<AppState> {
"/api/v1/mining/hashrate",
get_with(
async |headers: HeaderMap, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::height_with("hashrate"), |q| q.hashrate(None)).await
state.cached_json(&headers, CacheStrategy::Height, |q| q.hashrate(None)).await
},
|op| {
op.id("get_hashrate")
@@ -116,7 +116,7 @@ impl MiningRoutes for ApiRouter<AppState> {
"/api/v1/mining/hashrate/{time_period}",
get_with(
async |headers: HeaderMap, Path(path): Path<TimePeriodParam>, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::height_with(format!("hashrate-{:?}", path.time_period)), move |q| q.hashrate(Some(path.time_period))).await
state.cached_json(&headers, CacheStrategy::Height, move |q| q.hashrate(Some(path.time_period))).await
},
|op| {
op.id("get_hashrate_by_period")
@@ -133,7 +133,7 @@ impl MiningRoutes for ApiRouter<AppState> {
"/api/v1/mining/difficulty-adjustments",
get_with(
async |headers: HeaderMap, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::height_with("diff-adj"), |q| q.difficulty_adjustments(None)).await
state.cached_json(&headers, CacheStrategy::Height, |q| q.difficulty_adjustments(None)).await
},
|op| {
op.id("get_difficulty_adjustments")
@@ -150,7 +150,7 @@ impl MiningRoutes for ApiRouter<AppState> {
"/api/v1/mining/difficulty-adjustments/{time_period}",
get_with(
async |headers: HeaderMap, Path(path): Path<TimePeriodParam>, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::height_with(format!("diff-adj-{:?}", path.time_period)), move |q| q.difficulty_adjustments(Some(path.time_period))).await
state.cached_json(&headers, CacheStrategy::Height, move |q| q.difficulty_adjustments(Some(path.time_period))).await
},
|op| {
op.id("get_difficulty_adjustments_by_period")
@@ -167,7 +167,7 @@ impl MiningRoutes for ApiRouter<AppState> {
"/api/v1/mining/blocks/fees/{time_period}",
get_with(
async |headers: HeaderMap, Path(path): Path<TimePeriodParam>, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::height_with(format!("fees-{:?}", path.time_period)), move |q| q.block_fees(path.time_period)).await
state.cached_json(&headers, CacheStrategy::Height, move |q| q.block_fees(path.time_period)).await
},
|op| {
op.id("get_block_fees")
@@ -184,7 +184,7 @@ impl MiningRoutes for ApiRouter<AppState> {
"/api/v1/mining/blocks/rewards/{time_period}",
get_with(
async |headers: HeaderMap, Path(path): Path<TimePeriodParam>, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::height_with(format!("rewards-{:?}", path.time_period)), move |q| q.block_rewards(path.time_period)).await
state.cached_json(&headers, CacheStrategy::Height, move |q| q.block_rewards(path.time_period)).await
},
|op| {
op.id("get_block_rewards")
@@ -219,7 +219,7 @@ impl MiningRoutes for ApiRouter<AppState> {
"/api/v1/mining/blocks/sizes-weights/{time_period}",
get_with(
async |headers: HeaderMap, Path(path): Path<TimePeriodParam>, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::height_with(format!("sizes-{:?}", path.time_period)), move |q| q.block_sizes_weights(path.time_period)).await
state.cached_json(&headers, CacheStrategy::Height, move |q| q.block_sizes_weights(path.time_period)).await
},
|op| {
op.id("get_block_sizes_weights")
@@ -236,7 +236,7 @@ impl MiningRoutes for ApiRouter<AppState> {
"/api/v1/mining/reward-stats/{block_count}",
get_with(
async |headers: HeaderMap, Path(path): Path<BlockCountParam>, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::height_with(format!("reward-stats-{}", path.block_count)), move |q| q.reward_stats(path.block_count)).await
state.cached_json(&headers, CacheStrategy::Height, move |q| q.reward_stats(path.block_count)).await
},
|op| {
op.id("get_reward_stats")

View File

@@ -1,7 +1,7 @@
use std::sync::Arc;
use aide::{
axum::ApiRouter,
axum::{ApiRouter, routing::get_with},
openapi::OpenApi,
};
use axum::{
@@ -12,13 +12,12 @@ use axum::{
};
use crate::{
VERSION,
api::{
addresses::AddressRoutes, blocks::BlockRoutes, mempool::MempoolRoutes,
metrics::ApiMetricsRoutes, mining::MiningRoutes, server::ServerRoutes,
transactions::TxRoutes,
},
extended::{HeaderMapExtended, ResponseExtended},
extended::{ResponseExtended, TransformResponseExtended},
};
use super::AppState;
@@ -48,19 +47,39 @@ impl ApiRoutes for ApiRouter<AppState> {
.add_metrics_routes()
.add_server_routes()
.route("/api/server", get(Redirect::temporary("/api#tag/server")))
.route(
.api_route(
"/api.json",
get(
get_with(
async |headers: HeaderMap,
Extension(api): Extension<Arc<OpenApi>>|
-> Response { Response::static_json(&headers, &*api) },
|op| {
op.id("get_openapi")
.server_tag()
.summary("OpenAPI specification")
.description("Full OpenAPI 3.1 specification for this API.")
},
),
)
.api_route(
"/api.trimmed.json",
get_with(
async |headers: HeaderMap,
Extension(api_trimmed): Extension<Arc<String>>|
-> Response {
let etag = VERSION;
if headers.has_etag(etag) {
return Response::new_not_modified();
}
Response::new_json(&api, etag)
let value: serde_json::Value =
serde_json::from_str(&api_trimmed).unwrap();
Response::static_json(&headers, &value)
},
|op| {
op.id("get_openapi_trimmed")
.server_tag()
.summary("Trimmed OpenAPI specification")
.description(
"Compact OpenAPI specification optimized for LLM consumption. \
Removes redundant fields while preserving essential API information.",
)
.ok_response::<serde_json::Value>()
},
),
)

View File

@@ -1,5 +1,3 @@
use aide::openapi::{Contact, Info, License, OpenApi, Tag};
//
// https://docs.rs/schemars/latest/schemars/derive.JsonSchema.html
//
@@ -12,6 +10,12 @@ use aide::openapi::{Contact, Info, License, OpenApi, Tag};
// - https://api.supabase.com/api/v1
//
mod trim;
pub use trim::trim_openapi_json;
use aide::openapi::{Contact, Info, License, OpenApi, Tag};
use crate::VERSION;
pub fn create_openapi() -> OpenApi {
@@ -25,10 +29,11 @@ pub fn create_openapi() -> OpenApi {
- **Metrics**: Thousands of time-series metrics across multiple indexes (date, block height, etc.)
- **[Mempool.space](https://mempool.space/docs/api/rest) compatible** (WIP): Most non-metrics endpoints follow the mempool.space API format
- **Multiple formats**: JSON and CSV output
- **LLM-optimized**: Compact OpenAPI spec at [`/api.trimmed.json`](/api.trimmed.json) for AI tools
### Client Libraries
- [JavaScript/TypeScript](https://www.npmjs.com/package/brk-client)
- [JavaScript](https://www.npmjs.com/package/brk-client)
- [Python](https://pypi.org/project/brk-client/)
- [Rust](https://crates.io/crates/brk_client)
@@ -56,6 +61,13 @@ pub fn create_openapi() -> OpenApi {
};
let tags = vec![
Tag {
name: "Server".to_string(),
description: Some(
"API metadata, health monitoring, and OpenAPI specifications.".to_string(),
),
..Default::default()
},
Tag {
name: "Metrics".to_string(),
description: Some(
@@ -115,14 +127,6 @@ pub fn create_openapi() -> OpenApi {
),
..Default::default()
},
Tag {
name: "Server".to_string(),
description: Some(
"API metadata and health monitoring. Version information and service status."
.to_string(),
),
..Default::default()
},
];
OpenApi {

View File

@@ -0,0 +1,447 @@
use serde_json::{Map, Value};
/// Trims an OpenAPI spec JSON to reduce size for LLM consumption.
/// Removes redundant fields while preserving essential API information.
///
/// Transformations applied (in order):
/// 1. Remove error responses (304, 400, 404, 500)
/// 2. Compact responses to "returns": "Type"
/// 3. Remove per-endpoint tags and style
/// 4. Simplify parameter schema to type, remove param descriptions
/// 5. Remove summary
/// 6. Remove examples, replace $ref with type
/// 7. Flatten single-item allOf
/// 8. Flatten anyOf to type array
/// 9. Remove format
/// 10. Remove property descriptions
/// 11. Simplify properties to direct types
pub fn trim_openapi_json(json: &str) -> String {
let mut spec: Value = serde_json::from_str(json).expect("Invalid OpenAPI JSON");
trim_value(&mut spec);
serde_json::to_string(&spec).unwrap()
}
fn trim_value(value: &mut Value) {
match value {
Value::Object(obj) => {
// Step 1: Remove error responses
if let Some(Value::Object(responses)) = obj.get_mut("responses") {
for code in &["304", "400", "404", "500"] {
responses.remove(*code);
}
}
// Step 2: Compact responses to "returns": "Type"
if let Some(Value::Object(responses)) = obj.remove("responses")
&& let Some(returns) = extract_return_type(&responses)
{
obj.insert("returns".to_string(), Value::String(returns));
}
// Step 3: Remove per-endpoint tags and style
// (only remove "tags" if it's an array, not if it's the top-level tags definition)
if let Some(Value::Array(_)) = obj.get("tags") {
// This is a per-endpoint tags array like ["Addresses"], remove it
obj.remove("tags");
}
obj.remove("style");
// Step 4: Simplify parameters (schema to type, remove descriptions)
if let Some(Value::Array(params)) = obj.get_mut("parameters") {
for param in params {
simplify_parameter(param);
}
}
// Step 5: Remove summary
obj.remove("summary");
// Step 6: Remove examples, replace $ref with type
obj.remove("example");
obj.remove("examples");
if let Some(Value::String(ref_path)) = obj.remove("$ref") {
let type_name = ref_path.split('/').next_back().unwrap_or("any");
obj.insert("type".to_string(), Value::String(type_name.to_string()));
}
// Step 7: Flatten single-item allOf
if let Some(Value::Array(all_of)) = obj.remove("allOf")
&& all_of.len() == 1
&& let Some(Value::Object(inner)) = all_of.into_iter().next()
{
for (k, v) in inner {
obj.insert(k, v);
}
}
// Step 8: Flatten anyOf to type array
if let Some(Value::Array(any_of)) = obj.remove("anyOf") {
let types: Vec<Value> = any_of
.into_iter()
.filter_map(|item| {
if let Value::Object(o) = item {
if let Some(Value::String(ref_path)) = o.get("$ref") {
return Some(Value::String(
ref_path.split('/').next_back().unwrap_or("any").to_string(),
));
}
o.get("type").cloned()
} else {
None
}
})
.collect();
if !types.is_empty() {
obj.insert("type".to_string(), Value::Array(types));
}
}
// Step 9: Remove format
obj.remove("format");
// Step 10 & 11: Simplify properties (remove descriptions, simplify to direct types)
if let Some(Value::Object(props)) = obj.get_mut("properties") {
simplify_properties(props);
}
// Recurse into remaining values
for (_, v) in obj.iter_mut() {
trim_value(v);
}
}
Value::Array(arr) => {
for item in arr {
trim_value(item);
}
}
_ => {}
}
}
fn extract_return_type(responses: &Map<String, Value>) -> Option<String> {
let resp_200 = responses.get("200")?;
let content = resp_200.get("content")?;
let json_content = content.get("application/json")?;
let schema = json_content.get("schema")?;
Some(schema_to_type_string(schema))
}
fn schema_to_type_string(schema: &Value) -> String {
if let Some(Value::String(ref_path)) = schema.get("$ref") {
return ref_path.split('/').next_back().unwrap_or("any").to_string();
}
if let Some(Value::String(t)) = schema.get("type") {
if t == "array"
&& let Some(items) = schema.get("items")
{
return format!("array[{}]", schema_to_type_string(items));
}
return t.clone();
}
"any".to_string()
}
fn simplify_parameter(param: &mut Value) {
if let Value::Object(obj) = param {
// Remove description
obj.remove("description");
// Extract type from schema
if let Some(schema) = obj.remove("schema") {
let type_val = extract_type_from_schema(&schema);
obj.insert("type".to_string(), type_val);
}
}
}
fn extract_type_from_schema(schema: &Value) -> Value {
if let Value::Object(obj) = schema {
// Handle anyOf (optional fields)
if let Some(Value::Array(any_of)) = obj.get("anyOf") {
let types: Vec<Value> = any_of
.iter()
.filter_map(|item| {
if let Value::Object(o) = item {
if let Some(Value::String(ref_path)) = o.get("$ref") {
return Some(Value::String(
ref_path.split('/').next_back().unwrap_or("any").to_string(),
));
}
o.get("type").cloned()
} else {
None
}
})
.collect();
if types.len() == 1 {
return types.into_iter().next().unwrap();
}
return Value::Array(types);
}
// Handle $ref
if let Some(Value::String(ref_path)) = obj.get("$ref") {
return Value::String(ref_path.split('/').next_back().unwrap_or("any").to_string());
}
// Handle type
if let Some(t) = obj.get("type") {
return t.clone();
}
}
Value::String("any".to_string())
}
fn simplify_properties(props: &mut Map<String, Value>) {
let keys: Vec<String> = props.keys().cloned().collect();
for key in keys {
if let Some(prop_value) = props.get_mut(&key)
&& let Value::Object(prop_obj) = prop_value
{
// Remove description
prop_obj.remove("description");
// Check if we can simplify to just the type
let simplified = simplify_property_value(prop_obj);
*prop_value = simplified;
}
}
}
fn simplify_property_value(obj: &mut Map<String, Value>) -> Value {
// Remove validation constraints
for key in &["default", "minItems", "maxItems", "uniqueItems"] {
obj.remove(*key);
}
// Handle $ref - convert to type (runs before recursion would)
if let Some(Value::String(ref_path)) = obj.remove("$ref") {
let type_name = ref_path.split('/').next_back().unwrap_or("any");
return Value::String(type_name.to_string());
}
// Handle single-item allOf - flatten and extract type
if let Some(Value::Array(all_of)) = obj.remove("allOf")
&& all_of.len() == 1
&& let Some(Value::Object(inner)) = all_of.into_iter().next()
{
if let Some(Value::String(ref_path)) = inner.get("$ref") {
let type_name = ref_path.split('/').next_back().unwrap_or("any");
return Value::String(type_name.to_string());
}
if let Some(t) = inner.get("type") {
return t.clone();
}
}
// Handle anyOf - flatten to type array (runs before recursion would)
if let Some(Value::Array(any_of)) = obj.remove("anyOf") {
let types: Vec<Value> = any_of
.into_iter()
.filter_map(|item| {
if let Value::Object(o) = item {
if let Some(Value::String(ref_path)) = o.get("$ref") {
return Some(Value::String(
ref_path.split('/').next_back().unwrap_or("any").to_string(),
));
}
o.get("type").cloned()
} else {
None
}
})
.collect();
return Value::Array(types);
}
// If only "type" remains, return just the type value
if obj.len() == 1
&& let Some(t) = obj.get("type")
{
return t.clone();
}
// Handle array with items
if obj.get("type") == Some(&Value::String("array".to_string()))
&& let Some(items) = obj.get("items")
&& let Value::Object(items_obj) = items
&& items_obj.len() == 1
{
// Items can have either "type" or "$ref"
if let Some(Value::String(item_type)) = items_obj.get("type") {
return Value::String(format!("array[{}]", item_type));
}
if let Some(Value::String(ref_path)) = items_obj.get("$ref") {
let type_name = ref_path.split('/').next_back().unwrap_or("any");
return Value::String(format!("array[{}]", type_name));
}
}
Value::Object(obj.clone())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_trim_property_anyof() {
let input = r##"{
"type": "object",
"properties": {
"index": {
"anyOf": [
{"type": "TxIndex"},
{"type": "null"}
]
}
}
}"##;
let result = trim_openapi_json(input);
let parsed: Value = serde_json::from_str(&result).unwrap();
// Property should be simplified to array, not {"type": [...]}
let index = &parsed["properties"]["index"];
assert!(index.is_array(), "Expected array, got: {}", index);
assert_eq!(index[0], "TxIndex");
assert_eq!(index[1], "null");
}
#[test]
fn test_trim_parameter_anyof() {
let input = r##"{
"parameters": [{
"in": "query",
"name": "after_txid",
"schema": {
"anyOf": [
{"$ref": "#/components/schemas/Txid"},
{"type": "null"}
]
}
}]
}"##;
let result = trim_openapi_json(input);
let parsed: Value = serde_json::from_str(&result).unwrap();
// Parameter should have type array including null
let param = &parsed["parameters"][0];
assert_eq!(param["name"], "after_txid");
assert_eq!(param["type"][0], "Txid");
assert_eq!(param["type"][1], "null");
}
#[test]
fn test_trim_property_ref() {
let input = r##"{
"type": "object",
"properties": {
"txid": {
"$ref": "#/components/schemas/Txid"
}
}
}"##;
let result = trim_openapi_json(input);
let parsed: Value = serde_json::from_str(&result).unwrap();
// Property with $ref should be simplified to just the type name
assert_eq!(parsed["properties"]["txid"], "Txid");
}
#[test]
fn test_trim_nested_component_schema() {
// This matches the actual API structure: components > schemas > Type > properties
let input = r##"{
"components": {
"schemas": {
"AddressStats": {
"type": "object",
"properties": {
"address": {
"$ref": "#/components/schemas/Address"
},
"chain_stats": {
"$ref": "#/components/schemas/AddressChainStats"
}
}
}
}
}
}"##;
let result = trim_openapi_json(input);
let parsed: Value = serde_json::from_str(&result).unwrap();
let props = &parsed["components"]["schemas"]["AddressStats"]["properties"];
assert_eq!(props["address"], "Address", "address should be simplified");
assert_eq!(props["chain_stats"], "AddressChainStats", "chain_stats should be simplified");
}
#[test]
fn test_trim_property_allof_with_ref() {
// Real API uses allOf wrapper around $ref
let input = r##"{
"type": "object",
"properties": {
"address": {
"description": "Bitcoin address string",
"allOf": [
{"$ref": "#/components/schemas/Address"}
]
}
}
}"##;
let result = trim_openapi_json(input);
let parsed: Value = serde_json::from_str(&result).unwrap();
assert_eq!(parsed["properties"]["address"], "Address");
}
#[test]
fn test_trim_property_array_with_ref() {
let input = r##"{
"type": "object",
"properties": {
"vin": {
"type": "array",
"items": {
"$ref": "#/components/schemas/TxIn"
}
}
}
}"##;
let result = trim_openapi_json(input);
let parsed: Value = serde_json::from_str(&result).unwrap();
// Array with $ref items should be simplified to "array[Type]"
assert_eq!(parsed["properties"]["vin"], "array[TxIn]");
}
#[test]
fn test_trim_responses_to_returns() {
let input = r##"{
"responses": {
"200": {
"content": {
"application/json": {
"schema": {"$ref": "#/components/schemas/Block"}
}
}
},
"400": {"description": "Bad request"},
"500": {"description": "Error"}
}
}"##;
let result = trim_openapi_json(input);
let parsed: Value = serde_json::from_str(&result).unwrap();
assert_eq!(parsed["returns"], "Block");
assert!(parsed.get("responses").is_none());
}
}