server: mcp + global: refactor

This commit is contained in:
nym21
2025-06-21 12:43:14 +02:00
parent c9e0f9d985
commit c3ae3cb768
92 changed files with 13601 additions and 12554 deletions

View File

@@ -0,0 +1,39 @@
use color_eyre::eyre::eyre;
use rmcp::schemars::JsonSchema;
use serde::Deserialize;
#[allow(clippy::upper_case_acronyms)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, JsonSchema)]
pub enum Format {
#[serde(alias = "json")]
JSON,
#[serde(alias = "csv")]
CSV,
#[serde(alias = "tsv")]
TSV,
#[serde(alias = "md", alias = "markdown")]
MD,
}
impl TryFrom<Option<String>> for Format {
type Error = color_eyre::Report;
fn try_from(value: Option<String>) -> Result<Self, Self::Error> {
if let Some(value) = value {
let value = value.to_lowercase();
let value = value.as_str();
if value == "md" || value == "markdown" {
Ok(Self::MD)
} else if value == "csv" {
Ok(Self::CSV)
} else if value == "tsv" {
Ok(Self::TSV)
} else if value == "json" {
Ok(Self::JSON)
} else {
Err(eyre!("Fail"))
}
} else {
Err(eyre!("Fail"))
}
}
}

View File

@@ -0,0 +1,173 @@
use std::fmt::{self, Debug};
use brk_core::{
DateIndex, DecadeIndex, DifficultyEpoch, EmptyOutputIndex, HalvingEpoch, Height, InputIndex,
MonthIndex, OpReturnIndex, OutputIndex, P2AIndex, P2MSIndex, P2PK33Index, P2PK65Index,
P2PKHIndex, P2SHIndex, P2TRIndex, P2WPKHIndex, P2WSHIndex, Printable, QuarterIndex, TxIndex,
UnknownOutputIndex, WeekIndex, YearIndex,
};
use color_eyre::eyre::eyre;
use schemars::JsonSchema;
use serde::{Deserialize, de::Error};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, JsonSchema)]
pub enum Index {
DateIndex,
DecadeIndex,
DifficultyEpoch,
EmptyOutputIndex,
HalvingEpoch,
Height,
InputIndex,
MonthIndex,
OpReturnIndex,
OutputIndex,
P2AIndex,
P2MSIndex,
P2PK33Index,
P2PK65Index,
P2PKHIndex,
P2SHIndex,
P2TRIndex,
P2WPKHIndex,
P2WSHIndex,
QuarterIndex,
TxIndex,
UnknownOutputIndex,
WeekIndex,
YearIndex,
}
impl Index {
pub fn all() -> [Self; 24] {
[
Self::DateIndex,
Self::DecadeIndex,
Self::DifficultyEpoch,
Self::EmptyOutputIndex,
Self::HalvingEpoch,
Self::Height,
Self::InputIndex,
Self::MonthIndex,
Self::OpReturnIndex,
Self::OutputIndex,
Self::P2AIndex,
Self::P2MSIndex,
Self::P2PK33Index,
Self::P2PK65Index,
Self::P2PKHIndex,
Self::P2SHIndex,
Self::P2TRIndex,
Self::P2WPKHIndex,
Self::P2WSHIndex,
Self::QuarterIndex,
Self::TxIndex,
Self::UnknownOutputIndex,
Self::WeekIndex,
Self::YearIndex,
]
}
pub fn possible_values(&self) -> &'static [&'static str] {
match self {
Self::DateIndex => DateIndex::to_possible_strings(),
Self::DecadeIndex => DecadeIndex::to_possible_strings(),
Self::DifficultyEpoch => DifficultyEpoch::to_possible_strings(),
Self::EmptyOutputIndex => EmptyOutputIndex::to_possible_strings(),
Self::HalvingEpoch => HalvingEpoch::to_possible_strings(),
Self::Height => Height::to_possible_strings(),
Self::InputIndex => InputIndex::to_possible_strings(),
Self::MonthIndex => MonthIndex::to_possible_strings(),
Self::OpReturnIndex => OpReturnIndex::to_possible_strings(),
Self::OutputIndex => OutputIndex::to_possible_strings(),
Self::P2AIndex => P2AIndex::to_possible_strings(),
Self::P2MSIndex => P2MSIndex::to_possible_strings(),
Self::P2PK33Index => P2PK33Index::to_possible_strings(),
Self::P2PK65Index => P2PK65Index::to_possible_strings(),
Self::P2PKHIndex => P2PKHIndex::to_possible_strings(),
Self::P2SHIndex => P2SHIndex::to_possible_strings(),
Self::P2TRIndex => P2TRIndex::to_possible_strings(),
Self::P2WPKHIndex => P2WPKHIndex::to_possible_strings(),
Self::P2WSHIndex => P2WSHIndex::to_possible_strings(),
Self::QuarterIndex => QuarterIndex::to_possible_strings(),
Self::TxIndex => TxIndex::to_possible_strings(),
Self::UnknownOutputIndex => UnknownOutputIndex::to_possible_strings(),
Self::WeekIndex => WeekIndex::to_possible_strings(),
Self::YearIndex => YearIndex::to_possible_strings(),
}
}
pub fn all_possible_values() -> Vec<&'static str> {
Self::all()
.into_iter()
.flat_map(|i| i.possible_values().iter().cloned())
.collect::<Vec<_>>()
}
pub fn serialize_short(&self) -> &'static str {
self.possible_values()
.iter()
.find(|str| str.len() > 1)
.unwrap()
}
pub fn serialize_long(&self) -> &'static str {
self.possible_values().last().unwrap()
}
}
impl TryFrom<&str> for Index {
type Error = color_eyre::Report;
fn try_from(value: &str) -> Result<Self, Self::Error> {
Ok(match value.to_lowercase().as_str() {
v if (Self::DateIndex).possible_values().contains(&v) => Self::DateIndex,
v if (Self::DecadeIndex).possible_values().contains(&v) => Self::DecadeIndex,
v if (Self::DifficultyEpoch).possible_values().contains(&v) => Self::DifficultyEpoch,
v if (Self::EmptyOutputIndex).possible_values().contains(&v) => Self::EmptyOutputIndex,
v if (Self::HalvingEpoch).possible_values().contains(&v) => Self::HalvingEpoch,
v if (Self::Height).possible_values().contains(&v) => Self::Height,
v if (Self::InputIndex).possible_values().contains(&v) => Self::InputIndex,
v if (Self::MonthIndex).possible_values().contains(&v) => Self::MonthIndex,
v if (Self::OpReturnIndex).possible_values().contains(&v) => Self::OpReturnIndex,
v if (Self::OutputIndex).possible_values().contains(&v) => Self::OutputIndex,
v if (Self::P2AIndex).possible_values().contains(&v) => Self::P2AIndex,
v if (Self::P2MSIndex).possible_values().contains(&v) => Self::P2MSIndex,
v if (Self::P2PK33Index).possible_values().contains(&v) => Self::P2PK33Index,
v if (Self::P2PK65Index).possible_values().contains(&v) => Self::P2PK65Index,
v if (Self::P2PKHIndex).possible_values().contains(&v) => Self::P2PKHIndex,
v if (Self::P2SHIndex).possible_values().contains(&v) => Self::P2SHIndex,
v if (Self::P2TRIndex).possible_values().contains(&v) => Self::P2TRIndex,
v if (Self::P2WPKHIndex).possible_values().contains(&v) => Self::P2WPKHIndex,
v if (Self::P2WSHIndex).possible_values().contains(&v) => Self::P2WSHIndex,
v if (Self::QuarterIndex).possible_values().contains(&v) => Self::QuarterIndex,
v if (Self::QuarterIndex).possible_values().contains(&v) => Self::QuarterIndex,
v if (Self::TxIndex).possible_values().contains(&v) => Self::TxIndex,
v if (Self::WeekIndex).possible_values().contains(&v) => Self::WeekIndex,
v if (Self::YearIndex).possible_values().contains(&v) => Self::YearIndex,
v if (Self::UnknownOutputIndex).possible_values().contains(&v) => {
Self::UnknownOutputIndex
}
_ => return Err(eyre!("Bad index")),
})
}
}
impl fmt::Display for Index {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl<'de> Deserialize<'de> for Index {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let str = String::deserialize(deserializer)?;
if let Ok(index) = Index::try_from(str.as_str()) {
Ok(index)
} else {
Err(Error::custom("Bad index"))
}
}
}

View File

@@ -0,0 +1,212 @@
#![doc = include_str!("../README.md")]
#![doc = "\n## Example\n\n```rust"]
#![doc = include_str!("../examples/main.rs")]
#![doc = "```"]
use std::collections::BTreeMap;
use brk_computer::Computer;
use brk_core::Result;
use brk_indexer::Indexer;
use brk_vec::AnyCollectableVec;
use tabled::settings::Style;
mod format;
mod index;
mod maybe_ids;
mod output;
mod params;
mod table;
mod vecs;
pub use format::Format;
pub use index::Index;
pub use output::{Output, Value};
pub use params::{Pagination, Params, ParamsOpt};
pub use table::Tabled;
use vecs::Vecs;
use crate::vecs::{IdToVec, IndexToVec};
pub struct Interface<'a> {
vecs: Vecs<'a>,
_indexer: &'a Indexer,
_computer: &'a Computer,
}
impl<'a> Interface<'a> {
pub fn build(indexer: &'a Indexer, computer: &'a Computer) -> Self {
Self {
vecs: Vecs::build(indexer, computer),
_indexer: indexer,
_computer: computer,
}
}
pub fn search(&self, params: &Params) -> Vec<(String, &&dyn AnyCollectableVec)> {
let tuples = params
.ids
.iter()
.flat_map(|s| {
s.to_lowercase()
.replace("-", "_")
.split_whitespace()
.flat_map(|s| {
s.split(',')
.flat_map(|s| s.split('+').map(|s| s.to_string()))
})
.collect::<Vec<_>>()
})
.map(|mut id| {
let mut res = self.vecs.id_to_index_to_vec.get(id.as_str());
if res.is_none() {
if let Ok(index) = Index::try_from(id.as_str()) {
id = index.possible_values().last().unwrap().to_string();
res = self.vecs.id_to_index_to_vec.get(id.as_str())
}
}
(id, res)
})
.filter(|(_, opt)| opt.is_some())
.map(|(id, vec)| (id, vec.unwrap()))
.collect::<Vec<_>>();
tuples
.iter()
.flat_map(|(str, i_to_v)| i_to_v.get(&params.index).map(|vec| (str.to_owned(), vec)))
.collect::<Vec<_>>()
}
pub fn format(
&self,
vecs: Vec<(String, &&dyn AnyCollectableVec)>,
params: &ParamsOpt,
) -> color_eyre::Result<Output> {
let from = params.from();
let to = params.to();
let format = params.format();
let mut values = vecs
.iter()
.map(|(_, vec)| -> Result<Vec<serde_json::Value>> {
vec.collect_range_serde_json(from, to)
})
.collect::<Result<Vec<_>>>()?;
if values.is_empty() {
return Ok(Output::default(format));
}
let ids_last_i = vecs.len() - 1;
Ok(match format {
Some(Format::CSV) | Some(Format::TSV) => {
let delimiter = if format == Some(Format::CSV) {
','
} else {
'\t'
};
let mut text = vecs
.iter()
.map(|(id, _)| id.to_owned())
.collect::<Vec<_>>()
.join(&delimiter.to_string());
text.push('\n');
let values_len = values.first().unwrap().len();
(0..values_len).for_each(|i| {
let mut line = "".to_string();
values.iter().enumerate().for_each(|(id_i, v)| {
line += &v.get(i).unwrap().to_string();
if id_i == ids_last_i {
line.push('\n');
} else {
line.push(delimiter);
}
});
text += &line;
});
if format == Some(Format::CSV) {
Output::CSV(text)
} else {
Output::TSV(text)
}
}
Some(Format::MD) => {
let mut table =
values.to_table(vecs.iter().map(|(s, _)| s.to_owned()).collect::<Vec<_>>());
table.with(Style::markdown());
Output::MD(table.to_string())
}
Some(Format::JSON) | None => {
if values.len() == 1 {
let mut values = values.pop().unwrap();
if values.len() == 1 {
let value = values.pop().unwrap();
Output::Json(Value::Single(value))
} else {
Output::Json(Value::List(values))
}
} else {
Output::Json(Value::Matrix(values))
}
}
})
}
pub fn search_and_format(&self, params: Params) -> color_eyre::Result<Output> {
self.format(self.search(&params), &params.rest)
}
pub fn id_to_index_to_vec(&self) -> &BTreeMap<&str, IndexToVec<'_>> {
&self.vecs.id_to_index_to_vec
}
pub fn index_to_id_to_vec(&self) -> &BTreeMap<Index, IdToVec<'_>> {
&self.vecs.index_to_id_to_vec
}
pub fn get_vecid_count(&self) -> usize {
self.vecs.id_count
}
pub fn get_index_count(&self) -> usize {
self.vecs.index_count
}
pub fn get_vec_count(&self) -> usize {
self.vecs.vec_count
}
pub fn get_indexes(&self) -> &[&'static str] {
&self.vecs.indexes
}
pub fn get_accepted_indexes(&self) -> &BTreeMap<&'static str, &'static [&'static str]> {
&self.vecs.accepted_indexes
}
pub fn get_vecids(&self, pagination: Pagination) -> &[&str] {
self.vecs.ids(pagination)
}
pub fn get_indexes_to_vecids(
&self,
pagination: Pagination,
) -> BTreeMap<&'static str, Vec<&str>> {
self.vecs.indexes_to_ids(pagination)
}
pub fn get_vecids_to_indexes(
&self,
pagination: Pagination,
) -> BTreeMap<&str, Vec<&'static str>> {
self.vecs.ids_to_indexes(pagination)
}
}

View File

@@ -0,0 +1,30 @@
use derive_deref::Deref;
use schemars::JsonSchema;
use serde::Deserialize;
#[derive(Debug, Deref, JsonSchema)]
pub struct MaybeIds(Vec<String>);
impl From<String> for MaybeIds {
fn from(value: String) -> Self {
Self(vec![value])
}
}
impl<'a> From<Vec<&'a str>> for MaybeIds {
fn from(value: Vec<&'a str>) -> Self {
Self(value.iter().map(|s| s.to_string()).collect::<Vec<_>>())
}
}
impl<'de> Deserialize<'de> for MaybeIds {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let str = String::deserialize(deserializer)?;
Ok(MaybeIds(
str.split(",").map(|s| s.to_string()).collect::<Vec<_>>(),
))
}
}

View File

@@ -0,0 +1,43 @@
use std::fmt;
use serde::Serialize;
use tabled::Tabled as TabledTabled;
use crate::Format;
#[derive(Debug, Serialize)]
pub enum Output {
Json(Value),
CSV(String),
TSV(String),
MD(String),
}
#[derive(Debug, Serialize, TabledTabled)]
#[serde(untagged)]
pub enum Value {
Matrix(Vec<Vec<serde_json::Value>>),
List(Vec<serde_json::Value>),
Single(serde_json::Value),
}
impl Output {
pub fn default(format: Option<Format>) -> Self {
match format {
Some(Format::CSV) => Output::CSV("".to_string()),
Some(Format::TSV) => Output::TSV("".to_string()),
_ => Output::Json(Value::Single(serde_json::Value::Null)),
}
}
}
impl fmt::Display for Output {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Json(value) => write!(f, "{}", serde_json::to_string_pretty(value).unwrap()),
Self::CSV(string) => write!(f, "{}", string),
Self::TSV(string) => write!(f, "{}", string),
Self::MD(string) => write!(f, "{}", string),
}
}
}

View File

@@ -0,0 +1,175 @@
use std::ops::Deref;
use rmcp::schemars::{self, JsonSchema};
use serde::{Deserialize, Deserializer};
use crate::{Format, Index, maybe_ids::MaybeIds};
#[derive(Debug, Deserialize, JsonSchema)]
pub struct Params {
#[serde(alias = "i")]
#[schemars(description = "Index of the values requested")]
pub index: Index,
#[serde(alias = "v")]
#[schemars(description = "Ids of the requested vecs")]
pub ids: MaybeIds,
#[serde(flatten)]
pub rest: ParamsOpt,
}
serde_with::flattened_maybe!(deserialize_rest, "rest");
impl Deref for Params {
type Target = ParamsOpt;
fn deref(&self) -> &Self::Target {
&self.rest
}
}
impl From<((Index, String), ParamsOpt)> for Params {
fn from(((index, id), rest): ((Index, String), ParamsOpt)) -> Self {
Self {
index,
ids: MaybeIds::from(id),
rest,
}
}
}
#[derive(Default, Debug, Deserialize, JsonSchema)]
pub struct ParamsOpt {
#[serde(default, alias = "f", deserialize_with = "de_unquote_i64")]
/// Inclusive starting index, if negative will be from the end
from: Option<i64>,
#[serde(default, alias = "t", deserialize_with = "de_unquote_i64")]
/// Exclusive ending index, if negative will be from the end, overrides 'count'
to: Option<i64>,
#[serde(default, alias = "c", deserialize_with = "de_unquote_usize")]
/// Number of values
count: Option<usize>,
/// Format of the output
format: Option<Format>,
}
impl ParamsOpt {
pub fn set_from(mut self, from: i64) -> Self {
self.from.replace(from);
self
}
pub fn set_to(mut self, to: i64) -> Self {
self.to.replace(to);
self
}
pub fn set_count(mut self, count: usize) -> Self {
self.count.replace(count);
self
}
pub fn set_format(mut self, format: Format) -> Self {
self.format.replace(format);
self
}
pub fn from(&self) -> Option<i64> {
self.from
}
pub fn to(&self) -> Option<i64> {
if self.to.is_none() {
if let Some(c) = self.count {
let c = c as i64;
if let Some(f) = self.from {
if f.is_positive() || f.abs() > c {
return Some(f + c);
}
} else {
return Some(c);
}
}
}
self.to
}
pub fn format(&self) -> Option<Format> {
self.format
}
}
fn de_unquote_i64<'de, D>(deserializer: D) -> Result<Option<i64>, D::Error>
where
D: Deserializer<'de>,
{
let value: Option<serde_json::Value> = Option::deserialize(deserializer)?;
match value {
None => Ok(None),
Some(serde_json::Value::String(mut s)) => {
if s.starts_with('"') && s.ends_with('"') && s.len() >= 2 {
s = s[1..s.len() - 1].to_string();
}
s.parse::<i64>().map(Some).map_err(serde::de::Error::custom)
}
Some(serde_json::Value::Number(n)) => {
// If it's a number, convert it to i64
n.as_i64()
.ok_or_else(|| serde::de::Error::custom("number out of range"))
.map(Some)
}
_ => Err(serde::de::Error::custom("expected a string or number")),
}
}
fn de_unquote_usize<'de, D>(deserializer: D) -> Result<Option<usize>, D::Error>
where
D: Deserializer<'de>,
{
let value: Option<serde_json::Value> = Option::deserialize(deserializer)?;
match value {
None => Ok(None),
Some(serde_json::Value::String(mut s)) => {
if s.starts_with('"') && s.ends_with('"') && s.len() >= 2 {
s = s[1..s.len() - 1].to_string();
}
s.parse::<usize>()
.map(Some)
.map_err(serde::de::Error::custom)
}
Some(serde_json::Value::Number(n)) => {
// If it's a number, convert it to usize
n.as_u64()
.ok_or_else(|| serde::de::Error::custom("number out of range"))
.map(|v| v as usize)
.map(Some)
}
_ => {
dbg!(value);
Err(serde::de::Error::custom("expected a string or number"))
}
}
}
#[derive(Debug, Default, Deserialize, JsonSchema)]
pub struct Pagination {
#[serde(alias = "p")]
#[schemars(description = "Pagination index")]
pub page: usize,
}
impl Pagination {
const PER_PAGE: usize = 1_000;
pub fn start(&self, len: usize) -> usize {
(self.page * Self::PER_PAGE).clamp(0, len)
}
pub fn end(&self, len: usize) -> usize {
((self.page + 1) * Self::PER_PAGE).clamp(0, len)
}
}

View File

@@ -0,0 +1,26 @@
use tabled::{Table, builder::Builder};
pub trait Tabled {
fn to_table(&self, ids: Vec<String>) -> Table;
}
impl Tabled for Vec<Vec<serde_json::Value>> {
fn to_table(&self, ids: Vec<String>) -> Table {
let mut builder = Builder::default();
builder.push_record(ids);
if let Some(first) = self.first() {
let len = first.len();
(0..len).for_each(|index| {
builder.push_record(
self.iter()
.map(|vec| vec.get(index).unwrap().to_string().replace("\"", "")),
);
});
}
builder.build()
}
}

View File

@@ -0,0 +1,189 @@
use std::collections::BTreeMap;
use brk_computer::Computer;
use brk_indexer::Indexer;
use brk_vec::AnyCollectableVec;
use derive_deref::{Deref, DerefMut};
use crate::params::Pagination;
use super::index::Index;
#[derive(Default)]
pub struct Vecs<'a> {
pub id_to_index_to_vec: BTreeMap<&'a str, IndexToVec<'a>>,
pub index_to_id_to_vec: BTreeMap<Index, IdToVec<'a>>,
pub ids: Vec<&'a str>,
pub indexes: Vec<&'static str>,
pub accepted_indexes: BTreeMap<&'static str, &'static [&'static str]>,
pub index_count: usize,
pub id_count: usize,
pub vec_count: usize,
serialized_id_to_indexes: BTreeMap<&'a str, Vec<&'static str>>,
serialized_indexes_to_ids: BTreeMap<&'static str, Vec<&'a str>>,
}
impl<'a> Vecs<'a> {
pub fn build(indexer: &'a Indexer, computer: &'a Computer) -> Self {
let mut this = Vecs::default();
indexer
.vecs
.vecs()
.into_iter()
.for_each(|vec| this.insert(vec));
computer
.vecs
.vecs()
.into_iter()
.for_each(|vec| this.insert(vec));
let mut ids = this.id_to_index_to_vec.keys().cloned().collect::<Vec<_>>();
ids.sort_unstable_by(|a, b| {
let len_cmp = a.len().cmp(&b.len());
if len_cmp == std::cmp::Ordering::Equal {
a.cmp(b)
} else {
len_cmp
}
});
this.ids = ids;
this.id_count = this.index_to_id_to_vec.keys().count();
this.index_count = this.index_to_id_to_vec.keys().count();
this.vec_count = this
.index_to_id_to_vec
.values()
.map(|tree| tree.len())
.sum::<usize>();
this.indexes = this
.index_to_id_to_vec
.keys()
.map(|i| i.serialize_long())
.collect::<Vec<_>>();
this.accepted_indexes = this
.index_to_id_to_vec
.keys()
.map(|i| (i.serialize_long(), i.possible_values()))
.collect::<BTreeMap<_, _>>();
this.serialized_id_to_indexes = this
.id_to_index_to_vec
.iter()
.map(|(id, index_to_vec)| {
(
*id,
index_to_vec
.keys()
.map(|i| i.serialize_long())
.collect::<Vec<_>>(),
)
})
.collect();
this.serialized_indexes_to_ids = this
.index_to_id_to_vec
.iter()
.map(|(index, id_to_vec)| {
(
index.serialize_long(),
id_to_vec.keys().cloned().collect::<Vec<_>>(),
)
})
.collect();
this
}
// Not the most performant or type safe but only built once so that's okay
fn insert(&mut self, vec: &'a dyn AnyCollectableVec) {
let name = vec.name();
let serialized_index = vec.index_type_to_string();
let split = name.split("_to_").collect::<Vec<_>>();
if split.len() != 1
&& !(split.len() == 2
&& split.first().is_some_and(|s| {
s == &"up"
|| s == &"start"
|| s.ends_with("relative")
|| s.starts_with("from")
|| s == &"cumulative_up"
|| s.starts_with("cumulative_start")
|| s.starts_with("cumulative_from")
|| s == &"activity"
}))
&& !(split.len() == 3
&& split.first().is_some_and(|s| {
s == &"up"
|| s == &"start"
|| s.starts_with("from")
|| s == &"cumulative_up"
|| s == &"cumulative_start"
|| s.starts_with("cumulative_from")
})
&& split.get(1).is_some_and(|s| s.ends_with("relative")))
{
dbg!((&serialized_index, &name, &split));
unreachable!();
}
let index = Index::try_from(serialized_index)
.inspect_err(|_| {
dbg!(&serialized_index);
})
.unwrap();
let prev = self
.id_to_index_to_vec
.entry(name)
.or_default()
.insert(index, vec);
if prev.is_some() {
dbg!(serialized_index, name);
panic!()
}
let prev = self
.index_to_id_to_vec
.entry(index)
.or_default()
.insert(name, vec);
if prev.is_some() {
dbg!(serialized_index, name);
panic!()
}
}
pub fn ids(&self, pagination: Pagination) -> &[&'_ str] {
let len = self.ids.len();
let start = pagination.start(len);
let end = pagination.end(len);
&self.ids[start..end]
}
pub fn ids_to_indexes(&self, pagination: Pagination) -> BTreeMap<&'_ str, Vec<&'static str>> {
let len = self.serialized_id_to_indexes.len();
let start = pagination.start(len);
let end = pagination.end(len);
self.serialized_id_to_indexes
.iter()
.skip(start)
.take(end)
.map(|(ids, indexes)| (*ids, indexes.clone()))
.collect()
}
pub fn indexes_to_ids(&self, pagination: Pagination) -> BTreeMap<&'static str, Vec<&'a str>> {
let len = self.serialized_indexes_to_ids.len();
let start = pagination.start(len);
let end = pagination.end(len);
self.serialized_indexes_to_ids
.iter()
.skip(start)
.take(end)
.map(|(index, ids)| (*index, ids.clone()))
.collect()
}
}
#[derive(Default, Deref, DerefMut)]
pub struct IndexToVec<'a>(BTreeMap<Index, &'a dyn AnyCollectableVec>);
#[derive(Default, Deref, DerefMut)]
pub struct IdToVec<'a>(BTreeMap<&'a str, &'a dyn AnyCollectableVec>);