clients: snapshot

This commit is contained in:
nym21
2026-01-11 23:08:08 +01:00
parent 325811fee7
commit 5826d78e35
38 changed files with 10018 additions and 11139 deletions

View File

@@ -12,7 +12,7 @@ pub fn generate_main_client(output: &mut String, endpoints: &[Endpoint]) {
writeln!(output, "class BrkClient(BrkClientBase):").unwrap();
writeln!(
output,
" \"\"\"Main BRK client with catalog tree and API methods.\"\"\""
" \"\"\"Main BRK client with metrics tree and API methods.\"\"\""
)
.unwrap();
writeln!(output).unwrap();
@@ -26,7 +26,7 @@ pub fn generate_main_client(output: &mut String, endpoints: &[Endpoint]) {
)
.unwrap();
writeln!(output, " super().__init__(base_url, timeout)").unwrap();
writeln!(output, " self.tree = CatalogTree(self)").unwrap();
writeln!(output, " self.metrics = MetricsTree(self)").unwrap();
writeln!(output).unwrap();
// Generate API methods
@@ -41,7 +41,7 @@ pub fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) {
}
let method_name = endpoint_to_method_name(endpoint);
let return_type = normalize_return_type(
let base_return_type = normalize_return_type(
&endpoint
.response_type
.as_deref()
@@ -49,6 +49,12 @@ pub fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) {
.unwrap_or_else(|| "Any".to_string()),
);
let return_type = if endpoint.supports_csv {
format!("Union[{}, str]", base_return_type)
} else {
base_return_type
};
// Build method signature
let params = build_method_params(endpoint);
writeln!(
@@ -79,9 +85,9 @@ pub fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) {
if endpoint.query_params.is_empty() {
if endpoint.path_params.is_empty() {
writeln!(output, " return self.get('{}')", path).unwrap();
writeln!(output, " return self.get_json('{}')", path).unwrap();
} else {
writeln!(output, " return self.get(f'{}')", path).unwrap();
writeln!(output, " return self.get_json(f'{}')", path).unwrap();
}
} else {
writeln!(output, " params = []").unwrap();
@@ -107,10 +113,18 @@ pub fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) {
writeln!(output, " query = '&'.join(params)").unwrap();
writeln!(
output,
" return self.get(f'{}{{\"?\" + query if query else \"\"}}')",
" path = f'{}{{\"?\" + query if query else \"\"}}'",
path
)
.unwrap();
if endpoint.supports_csv {
writeln!(output, " if format == 'csv':").unwrap();
writeln!(output, " return self.get_text(path)").unwrap();
writeln!(output, " return self.get_json(path)").unwrap();
} else {
writeln!(output, " return self.get_json(path)").unwrap();
}
}
writeln!(output).unwrap();

View File

@@ -81,25 +81,48 @@ class BrkClientBase:
"""Base HTTP client for making requests."""
def __init__(self, base_url: str, timeout: float = 30.0):
self.base_url = base_url
self.timeout = timeout
self._client = httpx.Client(timeout=timeout)
parsed = urlparse(base_url)
self._host = parsed.netloc
self._secure = parsed.scheme == 'https'
self._timeout = timeout
self._conn: Optional[Union[HTTPSConnection, HTTPConnection]] = None
def get(self, path: str) -> Any:
"""Make a GET request."""
def _connect(self) -> Union[HTTPSConnection, HTTPConnection]:
"""Get or create HTTP connection."""
if self._conn is None:
if self._secure:
self._conn = HTTPSConnection(self._host, timeout=self._timeout)
else:
self._conn = HTTPConnection(self._host, timeout=self._timeout)
return self._conn
def get(self, path: str) -> bytes:
"""Make a GET request and return raw bytes."""
try:
base = self.base_url.rstrip('/')
response = self._client.get(f"{{base}}{{path}}")
response.raise_for_status()
return response.json()
except httpx.HTTPStatusError as e:
raise BrkError(f"HTTP error: {{e.response.status_code}}", e.response.status_code)
except httpx.RequestError as e:
conn = self._connect()
conn.request("GET", path)
res = conn.getresponse()
data = res.read()
if res.status >= 400:
raise BrkError(f"HTTP error: {{res.status}}", res.status)
return data
except (ConnectionError, OSError, TimeoutError) as e:
self._conn = None
raise BrkError(str(e))
def get_json(self, path: str) -> Any:
"""Make a GET request and return JSON."""
return json.loads(self.get(path))
def get_text(self, path: str) -> str:
"""Make a GET request and return text."""
return self.get(path).decode()
def close(self):
"""Close the HTTP client."""
self._client.close()
if self._conn:
self._conn.close()
self._conn = None
def __enter__(self):
return self
@@ -124,8 +147,8 @@ pub fn generate_endpoint_class(output: &mut String) {
r#"class MetricData(TypedDict, Generic[T]):
"""Metric data with range information."""
total: int
from_: int # 'from' is reserved in Python
to: int
start: int
end: int
data: List[T]
@@ -143,18 +166,18 @@ class MetricEndpoint(Generic[T]):
def get(self) -> MetricData[T]:
"""Fetch all data points for this metric/index."""
return self._client.get(self.path())
return self._client.get_json(self.path())
def range(self, from_val: Optional[int] = None, to_val: Optional[int] = None) -> MetricData[T]:
def range(self, start: Optional[int] = None, end: Optional[int] = None) -> MetricData[T]:
"""Fetch data points within a range."""
params = []
if from_val is not None:
params.append(f"from={{from_val}}")
if to_val is not None:
params.append(f"to={{to_val}}")
if start is not None:
params.append(f"start={{start}}")
if end is not None:
params.append(f"end={{end}}")
query = "&".join(params)
p = self.path()
return self._client.get(f"{{p}}?{{query}}" if query else p)
return self._client.get_json(f"{{p}}?{{query}}" if query else p)
def path(self) -> str:
"""Get the endpoint path."""

View File

@@ -24,13 +24,14 @@ pub fn generate_python_client(
writeln!(output, "# Auto-generated BRK Python client").unwrap();
writeln!(output, "# Do not edit manually\n").unwrap();
writeln!(output, "from __future__ import annotations").unwrap();
writeln!(
output,
"from typing import TypeVar, Generic, Any, Optional, List, Literal, TypedDict, Union, Protocol"
)
.unwrap();
writeln!(output, "import httpx\n").unwrap();
writeln!(output, "from http.client import HTTPSConnection, HTTPConnection").unwrap();
writeln!(output, "from urllib.parse import urlparse").unwrap();
writeln!(output, "import json\n").unwrap();
writeln!(output, "T = TypeVar('T')\n").unwrap();
types::generate_type_definitions(&mut output, schemas);

View File

@@ -12,13 +12,13 @@ use crate::{
/// Generate tree classes
pub fn generate_tree_classes(output: &mut String, catalog: &TreeNode, metadata: &ClientMetadata) {
writeln!(output, "# Catalog tree classes\n").unwrap();
writeln!(output, "# Metrics tree classes\n").unwrap();
let pattern_lookup = metadata.pattern_lookup();
let mut generated = HashSet::new();
generate_tree_class(
output,
"CatalogTree",
"MetricsTree",
catalog,
&pattern_lookup,
metadata,
@@ -39,8 +39,30 @@ fn generate_tree_class(
return;
};
// Generate child classes FIRST (post-order traversal)
// This ensures children are defined before parent references them
for (child_name, child_node) in ctx.children.iter() {
if let TreeNode::Branch(grandchildren) = child_node {
let child_fields = get_node_fields(grandchildren, pattern_lookup);
// Generate inline class if no pattern match OR pattern is not parameterizable
if !metadata.is_parameterizable_fields(&child_fields) {
let child_class = child_type_name(name, child_name);
generate_tree_class(
output,
&child_class,
child_node,
pattern_lookup,
metadata,
generated,
);
}
}
}
// THEN generate the current class (after all children are defined)
writeln!(output, "class {}:", name).unwrap();
writeln!(output, " \"\"\"Catalog tree node.\"\"\"").unwrap();
writeln!(output, " \"\"\"Metrics tree node.\"\"\"").unwrap();
writeln!(output, " ").unwrap();
writeln!(
output,
@@ -92,24 +114,4 @@ fn generate_tree_class(
}
writeln!(output).unwrap();
// Generate child classes
for (child_name, child_node) in ctx.children {
if let TreeNode::Branch(grandchildren) = child_node {
let child_fields = get_node_fields(grandchildren, pattern_lookup);
// Generate inline class if no pattern match OR pattern is not parameterizable
if !metadata.is_parameterizable_fields(&child_fields) {
let child_class = child_type_name(name, child_name);
generate_tree_class(
output,
&child_class,
child_node,
pattern_lookup,
metadata,
generated,
);
}
}
}
}

View File

@@ -17,67 +17,81 @@ pub fn generate_type_definitions(output: &mut String, schemas: &TypeSchemas) {
let sorted_names = topological_sort_schemas(schemas);
for name in sorted_names {
if MANUAL_GENERIC_TYPES.contains(&name.as_str()) {
continue;
}
// Partition into simple type aliases and TypedDict classes
// Generate type aliases first to avoid forward reference issues
let (type_aliases, typed_dicts): (Vec<_>, Vec<_>) = sorted_names
.into_iter()
.filter(|name| !MANUAL_GENERIC_TYPES.contains(&name.as_str()))
.filter(|name| schemas.contains_key(name))
.partition(|name| {
schemas
.get(name)
.map(|s| s.get("properties").is_none())
.unwrap_or(false)
});
let Some(schema) = schemas.get(&name) else {
continue;
};
// Generate simple type aliases first
// Quote references to TypedDicts since they're defined after
let typed_dict_set: HashSet<_> = typed_dicts.iter().cloned().collect();
for name in type_aliases {
let schema = &schemas[&name];
let type_desc = schema.get("description").and_then(|d| d.as_str());
if let Some(props) = schema.get("properties").and_then(|p| p.as_object()) {
writeln!(output, "class {}(TypedDict):", name).unwrap();
// Collect field descriptions for Attributes section
let field_docs: Vec<(String, Option<&str>)> = props
.iter()
.map(|(prop_name, prop_schema)| {
let safe_name = escape_python_keyword(prop_name);
let desc = prop_schema.get("description").and_then(|d| d.as_str());
(safe_name, desc)
})
.collect();
let has_field_docs = field_docs.iter().any(|(_, d)| d.is_some());
// Generate docstring if we have type description or field descriptions
if type_desc.is_some() || has_field_docs {
writeln!(output, " \"\"\"").unwrap();
if let Some(desc) = type_desc {
for line in desc.lines() {
writeln!(output, " {}", line).unwrap();
}
}
if has_field_docs {
if type_desc.is_some() {
writeln!(output).unwrap();
}
writeln!(output, " Attributes:").unwrap();
for (field_name, desc) in &field_docs {
if let Some(d) = desc {
writeln!(output, " {}: {}", field_name, d).unwrap();
}
}
}
writeln!(output, " \"\"\"").unwrap();
let py_type = schema_to_python_type_quoting(schema, Some(&name), &typed_dict_set);
if let Some(desc) = type_desc {
for line in desc.lines() {
writeln!(output, "# {}", line).unwrap();
}
}
writeln!(output, "{} = {}", name, py_type).unwrap();
}
for (prop_name, prop_schema) in props {
let prop_type = schema_to_python_type_ctx(prop_schema, Some(&name));
// Then generate TypedDict classes
for name in typed_dicts {
let schema = &schemas[&name];
let type_desc = schema.get("description").and_then(|d| d.as_str());
let props = schema.get("properties").and_then(|p| p.as_object()).unwrap();
writeln!(output, "class {}(TypedDict):", name).unwrap();
// Collect field descriptions for Attributes section
let field_docs: Vec<(String, Option<&str>)> = props
.iter()
.map(|(prop_name, prop_schema)| {
let safe_name = escape_python_keyword(prop_name);
writeln!(output, " {}: {}", safe_name, prop_type).unwrap();
}
writeln!(output).unwrap();
} else {
let py_type = schema_to_python_type_ctx(schema, Some(&name));
let desc = prop_schema.get("description").and_then(|d| d.as_str());
(safe_name, desc)
})
.collect();
let has_field_docs = field_docs.iter().any(|(_, d)| d.is_some());
// Generate docstring if we have type description or field descriptions
if type_desc.is_some() || has_field_docs {
writeln!(output, " \"\"\"").unwrap();
if let Some(desc) = type_desc {
for line in desc.lines() {
writeln!(output, "# {}", line).unwrap();
writeln!(output, " {}", line).unwrap();
}
}
writeln!(output, "{} = {}", name, py_type).unwrap();
if has_field_docs {
if type_desc.is_some() {
writeln!(output).unwrap();
}
writeln!(output, " Attributes:").unwrap();
for (field_name, desc) in &field_docs {
if let Some(d) = desc {
writeln!(output, " {}: {}", field_name, d).unwrap();
}
}
}
writeln!(output, " \"\"\"").unwrap();
}
for (prop_name, prop_schema) in props {
let prop_type = schema_to_python_type_ctx(prop_schema, Some(&name));
let safe_name = escape_python_keyword(prop_name);
writeln!(output, " {}: {}", safe_name, prop_type).unwrap();
}
writeln!(output).unwrap();
}
writeln!(output).unwrap();
}
@@ -194,6 +208,70 @@ fn json_type_to_python(ty: &str, schema: &Value, current_type: Option<&str>) ->
}
}
/// Convert JSON Schema to Python type, quoting types in the given set
fn schema_to_python_type_quoting(
schema: &Value,
current_type: Option<&str>,
quote_types: &HashSet<String>,
) -> String {
if let Some(all_of) = schema.get("allOf").and_then(|v| v.as_array()) {
for item in all_of {
let resolved = schema_to_python_type_quoting(item, current_type, quote_types);
if resolved != "Any" {
return resolved;
}
}
}
// Handle $ref
if let Some(ref_path) = schema.get("$ref").and_then(|r| r.as_str()) {
let type_name = ref_to_type_name(ref_path).unwrap_or("Any");
// Quote self-references or types in quote_types set
if current_type == Some(type_name) || quote_types.contains(type_name) {
return format!("\"{}\"", type_name);
}
return type_name.to_string();
}
// Handle enum (array of string values)
if let Some(enum_values) = schema.get("enum").and_then(|e| e.as_array()) {
let literals: Vec<String> = enum_values
.iter()
.filter_map(|v| v.as_str())
.map(|s| format!("\"{}\"", s))
.collect();
if !literals.is_empty() {
return format!("Literal[{}]", literals.join(", "));
}
}
if let Some(variants) = schema
.get("anyOf")
.or_else(|| schema.get("oneOf"))
.and_then(|v| v.as_array())
{
let types: Vec<String> = variants
.iter()
.map(|v| schema_to_python_type_quoting(v, current_type, quote_types))
.collect();
let filtered: Vec<_> = types.iter().filter(|t| *t != "Any").collect();
if !filtered.is_empty() {
return format!(
"Union[{}]",
filtered
.iter()
.map(|s| s.as_str())
.collect::<Vec<_>>()
.join(", ")
);
}
return format!("Union[{}]", types.join(", "));
}
// Fall back to regular conversion for other cases
schema_to_python_type_ctx(schema, current_type)
}
/// Convert JSON Schema to Python type with context for detecting self-references
pub fn schema_to_python_type_ctx(schema: &Value, current_type: Option<&str>) -> String {
if let Some(all_of) = schema.get("allOf").and_then(|v| v.as_array()) {