diff --git a/.github/workflows/release-python.yml b/.github/workflows/release-python.yml index fc97db05..48a1ed90 100644 --- a/.github/workflows/release-python.yml +++ b/.github/workflows/release-python.yml @@ -40,8 +40,7 @@ jobs: git push origin "pctx-py-v${{ steps.get_version.outputs.version }}" - name: Build package - working-directory: pctx-py - run: uv build + run: ./scripts/build-python.sh - name: Publish to PyPI working-directory: pctx-py env: diff --git a/CHANGELOG.md b/CHANGELOG.md index 7e3171ca..089c6f2f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,31 +9,18 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added -### Changed - -### Fixed - -## [v0.6.0-beta.2] - 2026-02-17 - -### Added - -- Update the fs-mode generated readme to provide clarity on using cat to read about type definitions. Avoids parameter hallucinations. - -### Changed - -### Fixed - -## [v0.6.0-beta.1] - 2026-02-14 - -### Added - - Python `@tool` decorator now parses docstrings (Google, NumPy, reStructuredText, and Epydoc formats) to extract parameter descriptions, return value descriptions, and detailed function descriptions into tool schemas - Make code mode config and all tools / descriptions easily configurable from python client -- Add just-bash and new execute_bash tool to explore filesystem of the generated sdk +- `ToolDisclosure` support in python client and unified mcp with `pctx mcp start` ### Changed +- Centralized tool descriptions and workflows in root of the repo and loaded by the various `pctx` surfaces. + ### Fixed + +- Various `pctx mcp dev` rendering issues. + ## [v0.5.0] - 2026-02-14 ### Added @@ -50,6 +37,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - When removing a websocket session, graceful cancel of all pending client tool executions - JS runtime race condition by moving the V8 mutex to be held for the entire typecheck/execute process. This previously caused a panic: `../../../../third_party/libc++/src/include/__vector/vector.h:416: libc++ Hardening assertion __n < size() failed: vector[] index out of bounds` + ## [v0.4.3] - 2026-01-27 ### Added diff --git a/Cargo.lock b/Cargo.lock index 14ef29e7..a1421df2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2435,6 +2435,12 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" + [[package]] name = "futures-util" version = "0.3.31" @@ -4133,7 +4139,7 @@ checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3" [[package]] name = "pctx" -version = "0.6.0-beta.2" +version = "0.6.0" dependencies = [ "anstyle", "anyhow", @@ -4159,6 +4165,7 @@ dependencies = [ "pctx_mcp_server", "pctx_session_server", "ratatui", + "regex", "rmcp", "rustls", "serde", @@ -4180,12 +4187,13 @@ dependencies = [ [[package]] name = "pctx_code_execution_runtime" -version = "0.1.3" +version = "0.2.0" dependencies = [ "anyhow", "deno_core", "deno_error", "pctx_config", + "pctx_registry", "rmcp", "rustls", "serde", @@ -4198,14 +4206,14 @@ dependencies = [ [[package]] name = "pctx_code_mode" -version = "0.2.3" +version = "0.3.0" dependencies = [ "anyhow", "futures", - "pctx_code_execution_runtime", "pctx_codegen", "pctx_config", "pctx_executor", + "pctx_registry", "schemars 1.1.0", "serde", "serde_json", @@ -4218,7 +4226,7 @@ dependencies = [ [[package]] name = "pctx_codegen" -version = "0.2.0" +version = "0.3.0" dependencies = [ "biome_formatter", "biome_js_formatter", @@ -4242,7 +4250,7 @@ dependencies = [ [[package]] name = "pctx_config" -version = "0.1.3" +version = "0.1.4" dependencies = [ "anyhow", "base64 0.22.1", @@ -4274,15 +4282,15 @@ dependencies = [ [[package]] name = "pctx_executor" -version = "0.1.3" +version = "0.2.0" dependencies = [ "deno_core", "deno_resolver", "futures", "node_resolver", "pctx_code_execution_runtime", - "pctx_config", "pctx_deno_transpiler", + "pctx_registry", "pctx_type_check_runtime", "regex", "rustls", @@ -4306,10 +4314,7 @@ dependencies = [ "anyhow", "axum", "opentelemetry", - "pctx_code_execution_runtime", "pctx_code_mode", - "pctx_codegen", - "pctx_config", "rmcp", "serde", "serde_json", @@ -4322,6 +4327,19 @@ dependencies = [ "tracing-opentelemetry", ] +[[package]] +name = "pctx_registry" +version = "0.1.0" +dependencies = [ + "deno_error", + "pctx_config", + "rmcp", + "serde_json", + "thiserror 2.0.17", + "tokio", + "tracing", +] + [[package]] name = "pctx_session_server" version = "0.1.0" @@ -4334,11 +4352,9 @@ dependencies = [ "futures", "http-body-util", "opentelemetry", - "pctx_code_execution_runtime", "pctx_code_mode", - "pctx_codegen", - "pctx_config", "rmcp", + "rstest", "serde", "serde_json", "serial_test", @@ -4360,7 +4376,7 @@ dependencies = [ [[package]] name = "pctx_type_check_runtime" -version = "0.1.2" +version = "0.1.3" dependencies = [ "deno_ast", "deno_core", @@ -4585,6 +4601,15 @@ dependencies = [ "syn 2.0.111", ] +[[package]] +name = "proc-macro-crate" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" +dependencies = [ + "toml_edit", +] + [[package]] name = "proc-macro-error" version = "1.0.4" @@ -4977,9 +5002,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.12.2" +version = "1.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" +checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276" dependencies = [ "aho-corasick", "memchr", @@ -5004,6 +5029,12 @@ version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" +[[package]] +name = "relative-path" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba39f3699c378cd8970968dcbff9c43159ea4cfbd88d43c00b22f2ef10a435d2" + [[package]] name = "reqwest" version = "0.12.28" @@ -5163,6 +5194,36 @@ dependencies = [ "syn 2.0.111", ] +[[package]] +name = "rstest" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a2c585be59b6b5dd66a9d2084aa1d8bd52fbdb806eafdeffb52791147862035" +dependencies = [ + "futures", + "futures-timer", + "rstest_macros", + "rustc_version", +] + +[[package]] +name = "rstest_macros" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "825ea780781b15345a146be27eaefb05085e337e869bff01b4306a4fd4a9ad5a" +dependencies = [ + "cfg-if", + "glob", + "proc-macro-crate", + "proc-macro2", + "quote", + "regex", + "relative-path", + "rustc_version", + "syn 2.0.111", + "unicode-ident", +] + [[package]] name = "rust-embed" version = "8.9.0" @@ -5321,7 +5382,7 @@ dependencies = [ "security-framework 3.5.1", "security-framework-sys", "webpki-root-certs", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -6836,6 +6897,36 @@ dependencies = [ "tokio", ] +[[package]] +name = "toml_datetime" +version = "0.7.5+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" +dependencies = [ + "serde_core", +] + +[[package]] +name = "toml_edit" +version = "0.23.10+spec-1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84c8b9f757e028cee9fa244aea147aab2a9ec09d5325a9b01e0a49730c2b5269" +dependencies = [ + "indexmap 2.12.1", + "toml_datetime", + "toml_parser", + "winnow", +] + +[[package]] +name = "toml_parser" +version = "1.0.9+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "702d4415e08923e7e1ef96cd5727c0dfed80b4d2fa25db9647fe5eb6f7c5a4c4" +dependencies = [ + "winnow", +] + [[package]] name = "tonic" version = "0.14.2" @@ -7988,6 +8079,15 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" +[[package]] +name = "winnow" +version = "0.7.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" +dependencies = [ + "memchr", +] + [[package]] name = "winsafe" version = "0.0.19" diff --git a/Makefile b/Makefile index b8f90a72..120e5e73 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -.PHONY: help release publish-crates docs test-python test-python-integration format-python test-cli +.PHONY: help release publish-crates docs test-python test-python-integration format-python test-cli build-python # Default target - show help when running just 'make' .DEFAULT_GOAL := help @@ -14,6 +14,7 @@ help: @echo " make test-cli - Run CLI integration tests (pctx mcp start)" @echo " make release - Interactive release script (bump version, update changelog)" @echo " make publish-crates - Publish pctx_code_mode + dependencies to crates.io (runs locally)" + @echo " make build-python - Build Python package (resolves symlinks before build)" @echo "" # Generate CLI and Python documentation @@ -48,3 +49,7 @@ release: publish-crates: @./scripts/publish-crates.sh +# Build Python package (resolves _tool_descriptions/data symlink before build, restores after) +build-python: + @./scripts/build-python.sh + diff --git a/crates/pctx/Cargo.toml b/crates/pctx/Cargo.toml index ed890dc6..60ca8d69 100644 --- a/crates/pctx/Cargo.toml +++ b/crates/pctx/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pctx" -version = "0.6.0-beta.2" +version = "0.6.0" edition = "2024" rust-version = "1.89" license = "MIT" @@ -95,6 +95,7 @@ tokio = { workspace = true, features = [ # Errors anyhow = { workspace = true } +regex = "1.12.3" [target.'cfg(all(not(target_env = "msvc"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dependencies] diff --git a/crates/pctx/src/commands/mcp/add.rs b/crates/pctx/src/commands/mcp/add.rs index eb3f7661..47e04e43 100644 --- a/crates/pctx/src/commands/mcp/add.rs +++ b/crates/pctx/src/commands/mcp/add.rs @@ -9,7 +9,7 @@ use crate::{ utils::{ prompts, spinner::Spinner, - styles::{fmt_bold, fmt_dimmed, fmt_success}, + styles::{fmt_bold, fmt_good_check, fmt_literal}, }, }; use pctx_config::{ @@ -174,10 +174,10 @@ impl AddCmd { cfg.save()?; info!( "{}", - fmt_success(&format!( + fmt_good_check(&format!( "{name} upstream MCP added to {path}", name = fmt_bold(&self.name), - path = fmt_dimmed(cfg.path().as_str()), + path = fmt_literal(cfg.path().as_str()), )) ); } diff --git a/crates/pctx/src/commands/mcp/dev/app.rs b/crates/pctx/src/commands/mcp/dev/app.rs index f156e802..4dde6b4e 100644 --- a/crates/pctx/src/commands/mcp/dev/app.rs +++ b/crates/pctx/src/commands/mcp/dev/app.rs @@ -9,7 +9,7 @@ use anyhow::Result; use camino::Utf8PathBuf; use chrono::{DateTime, Utc}; use pctx_codegen::{Tool, ToolSet}; -use pctx_config::logger::LogLevel; +use pctx_config::{Config, logger::LogLevel}; use ratatui::{layout::Rect, widgets::ListState}; use super::log_entry::LogEntry; @@ -20,7 +20,7 @@ use pctx_code_mode::CodeMode; #[derive(Clone)] pub(super) enum AppMessage { ServerStarting, - ServerReady(CodeMode), + ServerReady(Config, CodeMode), ServerFailed(String), ServerStopped, ConfigChanged, @@ -28,7 +28,7 @@ pub(super) enum AppMessage { #[derive(Debug, Clone, Copy, PartialEq)] pub(super) enum FocusPanel { - Tools, + Namespaces, Logs, ToolDetail, Documentation, @@ -47,12 +47,14 @@ pub(super) struct ToolUsage { pub(super) struct App { pub(super) logs: Vec, - pub(super) tools: CodeMode, + pub(super) code_mode: CodeMode, + pub(super) config: Config, pub(super) server_ready: bool, pub(super) host: String, pub(super) port: u16, pub(super) start_time: Option, pub(super) log_scroll_offset: usize, + pub(super) log_visible_height: usize, pub(super) log_file_path: Utf8PathBuf, pub(super) log_file_pos: u64, @@ -65,6 +67,7 @@ pub(super) struct App { pub(super) selected_tool_index: Option, pub(super) selected_namespace_index: usize, // Index of currently selected namespace/server pub(super) detail_scroll_offset: usize, + pub(super) detail_max_scroll: usize, // Tool usage tracking pub(super) tool_usage: HashMap, @@ -74,32 +77,41 @@ pub(super) struct App { pub(super) logs_rect: Option, pub(super) namespace_rects: Vec, // Rectangles for each namespace column pub(super) docs_rect: Option, // Rectangle for documentation column + pub(super) url_rect: Option, // Rectangle for server URL box + + // Clipboard feedback + pub(super) copied_at: Option, } impl App { pub(super) fn new(host: String, port: u16, log_file_path: Utf8PathBuf) -> Self { Self { logs: Vec::new(), - tools: CodeMode::default(), + code_mode: CodeMode::default(), // set on first ServerReady event + config: Config::default(), // set on first ServerReady event server_ready: false, host, port, start_time: None, error: None, log_scroll_offset: 0, + log_visible_height: 0, log_file_path, log_file_pos: 0, - focused_panel: FocusPanel::Logs, + focused_panel: FocusPanel::Namespaces, log_filter: LogLevel::Info, tools_list_state: ListState::default(), selected_tool_index: None, selected_namespace_index: 0, detail_scroll_offset: 0, + detail_max_scroll: 0, tool_usage: HashMap::new(), tools_rect: None, logs_rect: None, namespace_rects: Vec::new(), docs_rect: None, + url_rect: None, + copied_at: None, } } @@ -159,6 +171,7 @@ impl App { Ok(()) } + // TODO: track tool usage from pctx_registry logs. pub(super) fn track_tool_usage(&mut self, entry: &LogEntry) { // Look for code execution logs that contain upstream tool calls if let Some(code_from_llm) = entry @@ -170,89 +183,89 @@ impl App { tracing::trace!( "Found code_from_llm field (length={}), checking for tool usage. Servers available: {}", code_from_llm.len(), - self.tools.tool_sets().len() + self.code_mode.tool_sets().len() ); - // Parse the code to find upstream tool calls like "Banking.getAccountBalance" + // Parse the code to find upstream tool calls like "await Banking.getAccountBalance(" (fs or catalog mode) + // or "await invoke({ name: 'banking__get_account_balance'," (sidecar) // Pattern: namespace.methodName( - for tool_set in self.tools.tool_sets() { - let namespace_pattern = format!("{}.", &tool_set.namespace); + for tool_set in self.code_mode.tool_sets() { tracing::trace!( - "Checking for server '{}' with namespace pattern '{namespace_pattern}' in code", + "Checking for server '{:?}' for usage in code", &tool_set.name ); - if code_from_llm.contains(&namespace_pattern) { + for tool in &tool_set.tools { + let namespace_pattern = regex::Regex::new(&format!( + r"await\s+{}\.{}\(", + tool_set.pascal_namespace(), + &tool.fn_name + )) + .unwrap(); + let sidecar_pattern = regex::Regex::new(&format!( + r#"await\s+invoke\(\s*\{{\s*["']?name["']?:\s*["']{}["']"#, + tool.id(tool_set.name.as_deref()) + )) + .unwrap(); + tracing::trace!( - "✓ Found {} namespace in code_from_llm, checking {} tools", - &tool_set.namespace, - tool_set.tools.len() + "Checking for tool '{}' (namespace_pattern={}, sidecar_pattern={})", + &tool.name, + namespace_pattern, + sidecar_pattern, ); - // Find all method calls for this server - for tool in &tool_set.tools { - // Check if this function is called in the code - let method_pattern = format!("{}.{}(", &tool_set.namespace, &tool.fn_name); - tracing::trace!( - "Checking for method pattern '{}' for tool '{}'", - method_pattern, - &tool.name - ); - - if code_from_llm.contains(&method_pattern) { - tracing::trace!( - "✓ Found tool usage: {}.{} (tool_name={})", - &tool_set.namespace, - &tool.fn_name, - &tool.name - ); - - // Extract a snippet of the call - if let Some(idx) = code_from_llm.find(&method_pattern) { - let snippet_start = idx.saturating_sub(10); - let snippet_end = - (idx + method_pattern.len() + 50).min(code_from_llm.len()); - let code_snippet = code_from_llm[snippet_start..snippet_end] - .lines() - .next() - .unwrap_or("") - .trim() - .to_string(); - - let key = format!("{}::{}", tool_set.name, tool.name); - - self.tool_usage - .entry(key.clone()) - .and_modify(|usage| { - usage.count += 1; - usage.last_used = entry.timestamp; - if !code_snippet.is_empty() - && !usage.code_snippets.contains(&code_snippet) - { - usage.code_snippets.push(code_snippet.clone()); - } - }) - .or_insert_with(|| ToolUsage { - tool_name: tool.name.clone(), - server_name: tool_set.name.clone(), - count: 1, - last_used: entry.timestamp, - code_snippets: if code_snippet.is_empty() { - vec![] - } else { - vec![code_snippet] - }, - }); - - tracing::trace!("✓ Tracked tool usage for key: {}", key); - } + // Only one pattern will ever match (namespace vs sidecar style), + // but there may be multiple call-sites in the code. + let matches: Vec<_> = { + let ns = namespace_pattern + .find_iter(code_from_llm) + .collect::>(); + if !ns.is_empty() { + ns + } else { + sidecar_pattern.find_iter(code_from_llm).collect() } + }; + + for m in matches { + let snippet_start = m.start().saturating_sub(10); + let snippet_end = (m.end() + 50).min(code_from_llm.len()); + let code_snippet = code_from_llm[snippet_start..snippet_end] + .lines() + .next() + .unwrap_or("") + .trim() + .to_string(); + + let key = tool.id(tool_set.name.as_deref()); + + self.tool_usage + .entry(key.clone()) + .and_modify(|usage| { + usage.count += 1; + tracing::trace!("usage count: {}", usage.count); + usage.last_used = entry.timestamp; + if !code_snippet.is_empty() + && !usage.code_snippets.contains(&code_snippet) + { + usage.code_snippets.push(code_snippet.clone()); + } + }) + .or_insert_with(|| ToolUsage { + tool_name: tool.name.clone(), + server_name: tool_set.name.clone().unwrap_or_default(), + count: 1, + last_used: entry.timestamp, + code_snippets: if code_snippet.is_empty() { + vec![] + } else { + vec![code_snippet] + }, + }); + + tracing::debug!("✓ Tracked tool usage for key: {key}"); } - } else { - tracing::trace!( - "Namespace pattern '{}' not found in code_from_llm", - namespace_pattern - ); } } } @@ -284,21 +297,32 @@ impl App { pub(super) fn filtered_logs(&self) -> Vec<&LogEntry> { self.logs .iter() - .filter(|l| self.log_filter <= l.level) + .filter(|l| self.log_filter <= l.level && !l.fields.message.is_empty()) .collect() } pub(super) fn handle_message(&mut self, msg: AppMessage) { match msg { - AppMessage::ServerReady(tools) => { + AppMessage::ServerReady(cfg, code_mode) => { self.server_ready = true; self.error = None; - self.tools = tools; + self.code_mode = code_mode; + self.config = cfg; + + // Auto-select the first tool + let has_tools = self + .code_mode + .tool_sets() + .iter() + .any(|ts| !ts.tools.is_empty()); + if has_tools && self.selected_tool_index.is_none() { + self.selected_tool_index = Some(0); + } // Re-process all existing logs now that we have server metadata tracing::info!( "ServerConnected: {} servers available. Re-processing existing logs for tool usage tracking.", - self.tools.tool_sets().len() + self.code_mode.tool_sets().len() ); self.reprocess_logs_for_tool_usage(); } @@ -317,7 +341,7 @@ impl App { AppMessage::ConfigChanged => { tracing::info!("Configuration file changed, reloading servers..."); // Clear existing servers - they will be repopulated when reconnection completes - self.tools = CodeMode::default(); + self.code_mode = CodeMode::default(); self.selected_tool_index = None; self.selected_namespace_index = 0; } @@ -326,8 +350,10 @@ impl App { pub(super) fn scroll_logs_up(&mut self) { // Scroll up = go back in time = increase offset + // Stop when the first log is already in frame let filtered_count = self.filtered_logs().len(); - if self.log_scroll_offset < filtered_count.saturating_sub(1) { + let max_offset = filtered_count.saturating_sub(self.log_visible_height); + if self.log_scroll_offset < max_offset { self.log_scroll_offset += 1; } } @@ -349,8 +375,8 @@ impl App { pub(super) fn next_panel(&mut self) { self.focused_panel = match self.focused_panel { - FocusPanel::Tools => FocusPanel::Logs, - FocusPanel::Logs => FocusPanel::Tools, + FocusPanel::Namespaces => FocusPanel::Logs, + FocusPanel::Logs => FocusPanel::Namespaces, FocusPanel::ToolDetail => FocusPanel::ToolDetail, // Stay in detail view FocusPanel::Documentation => FocusPanel::Documentation, // Stay in docs view }; @@ -358,8 +384,8 @@ impl App { pub(super) fn prev_panel(&mut self) { self.focused_panel = match self.focused_panel { - FocusPanel::Tools => FocusPanel::Logs, - FocusPanel::Logs => FocusPanel::Tools, + FocusPanel::Namespaces => FocusPanel::Logs, + FocusPanel::Logs => FocusPanel::Namespaces, FocusPanel::ToolDetail => FocusPanel::ToolDetail, // Stay in detail view FocusPanel::Documentation => FocusPanel::Documentation, // Stay in docs view }; @@ -378,11 +404,11 @@ impl App { } pub(super) fn close_tool_detail(&mut self) { - self.focused_panel = FocusPanel::Tools; + self.focused_panel = FocusPanel::Namespaces; } pub(super) fn close_documentation(&mut self) { - self.focused_panel = FocusPanel::Tools; + self.focused_panel = FocusPanel::Namespaces; } pub(super) fn scroll_detail_up(&mut self) { @@ -392,12 +418,12 @@ impl App { pub(super) fn scroll_detail_down(&mut self) { // Scroll faster (3 lines at a time) for better UX - self.detail_scroll_offset += 3; + self.detail_scroll_offset = (self.detail_scroll_offset + 3).min(self.detail_max_scroll); } pub(super) fn scroll_tools_down(&mut self) { // Sort servers alphabetically (same as rendering) - let mut sorted: Vec = self.tools.tool_sets().iter().cloned().collect(); + let mut sorted: Vec = self.code_mode.tool_sets().iter().cloned().collect(); sorted.sort_by_key(|s| s.name.clone()); if sorted.is_empty() { @@ -433,7 +459,7 @@ impl App { pub(super) fn scroll_tools_up(&mut self) { // Sort servers alphabetically (same as rendering) - let mut sorted: Vec = self.tools.tool_sets().iter().cloned().collect(); + let mut sorted: Vec = self.code_mode.tool_sets().iter().cloned().collect(); sorted.sort_by_key(|s| s.name.clone()); if sorted.is_empty() { @@ -462,12 +488,12 @@ impl App { } pub(super) fn move_to_next_namespace(&mut self) { - if self.tools.tool_sets().is_empty() { + if self.code_mode.tool_sets().is_empty() { return; } // Sort servers alphabetically (same as rendering) - let mut sorted: Vec = self.tools.tool_sets().iter().cloned().collect(); + let mut sorted: Vec = self.code_mode.tool_sets().iter().cloned().collect(); sorted.sort_by_key(|s| s.name.clone()); let num_namespaces = sorted.len(); @@ -483,12 +509,12 @@ impl App { } pub(super) fn move_to_prev_namespace(&mut self) { - if self.tools.tool_sets().is_empty() { + if self.code_mode.tool_sets().is_empty() { return; } // Sort servers alphabetically (same as rendering) - let mut sorted: Vec = self.tools.tool_sets().iter().cloned().collect(); + let mut sorted: Vec = self.code_mode.tool_sets().iter().cloned().collect(); sorted.sort_by_key(|s| s.name.clone()); let num_namespaces = sorted.len(); @@ -509,7 +535,7 @@ impl App { pub(super) fn select_first_tool_in_current_namespace(&mut self) { // Sort servers alphabetically (same as rendering) - let mut sorted: Vec = self.tools.tool_sets().iter().cloned().collect(); + let mut sorted: Vec = self.code_mode.tool_sets().iter().cloned().collect(); sorted.sort_by_key(|s| s.name.clone()); if self.selected_namespace_index >= sorted.len() { @@ -538,7 +564,7 @@ impl App { let mut counter = 0; // Sort servers alphabetically (same as rendering) - let mut sorted: Vec = self.tools.tool_sets().iter().cloned().collect(); + let mut sorted: Vec = self.code_mode.tool_sets().iter().cloned().collect(); sorted.sort_by_key(|s| s.name.clone()); for tool_set in sorted { @@ -547,7 +573,7 @@ impl App { .tools .iter() .map(|tool| { - let usage_key = format!("{}::{}", tool_set.name, tool.name); + let usage_key = tool.id(tool_set.name.as_deref()); let usage_count = self.tool_usage.get(&usage_key).map_or(0, |u| u.count); (tool.clone(), usage_count) }) @@ -566,6 +592,19 @@ impl App { } pub(super) fn handle_mouse_click(&mut self, x: u16, y: u16) { + // Check URL box click — copy to clipboard + if let Some(rect) = self.url_rect + && self.server_ready + && x >= rect.x + && x < rect.x + rect.width + && y >= rect.y + && y < rect.y + rect.height + { + let _ = self.copy_server_url_to_clipboard(); + self.copied_at = Some(Instant::now()); + return; + } + // Always check the back button first (available in all views) if let Some(rect) = self.docs_rect && x >= rect.x @@ -597,7 +636,7 @@ impl App { && y >= rect.y && y < rect.y + rect.height { - self.focused_panel = FocusPanel::Tools; + self.focused_panel = FocusPanel::Namespaces; // Check which namespace was clicked within the tools panel for (idx, namespace_rect) in self.namespace_rects.iter().enumerate() { diff --git a/crates/pctx/src/commands/mcp/dev/log_entry.rs b/crates/pctx/src/commands/mcp/dev/log_entry.rs index ac098598..848d5a7d 100644 --- a/crates/pctx/src/commands/mcp/dev/log_entry.rs +++ b/crates/pctx/src/commands/mcp/dev/log_entry.rs @@ -1,6 +1,5 @@ use std::collections::HashMap; -use super::{SECONDARY, TERTIARY}; use chrono::{DateTime, Utc}; use pctx_config::logger::LogLevel; use ratatui::{ @@ -34,8 +33,8 @@ impl LogEntry { pub(super) fn color(&self) -> Color { match &self.level { LogLevel::Trace => Color::LightMagenta, - LogLevel::Debug => SECONDARY, - LogLevel::Info => TERTIARY, + LogLevel::Debug => Color::Blue, + LogLevel::Info => Color::Green, LogLevel::Warn => Color::Yellow, LogLevel::Error => Color::Red, } diff --git a/crates/pctx/src/commands/mcp/dev/mod.rs b/crates/pctx/src/commands/mcp/dev/mod.rs index 1ce62b2e..aa17cb1b 100644 --- a/crates/pctx/src/commands/mcp/dev/mod.rs +++ b/crates/pctx/src/commands/mcp/dev/mod.rs @@ -12,8 +12,8 @@ use camino::Utf8PathBuf; use clap::Parser; use crossterm::{ event::{ - self, DisableMouseCapture, EnableMouseCapture, Event, KeyCode, KeyEventKind, MouseButton, - MouseEventKind, + self, DisableMouseCapture, EnableMouseCapture, Event, KeyCode, KeyEventKind, KeyModifiers, + MouseButton, MouseEventKind, }, execute, terminal::{EnterAlternateScreen, LeaveAlternateScreen, disable_raw_mode, enable_raw_mode}, @@ -27,11 +27,10 @@ use crate::commands::mcp::start::StartCmd; use app::{App, AppMessage, FocusPanel}; use pctx_mcp_server::PctxMcpServer; -#[allow(unused)] -const PRIMARY: Color = Color::Rgb(0, 43, 86); // #002B56 -const SECONDARY: Color = Color::Rgb(24, 66, 137); // #184289 -const TERTIARY: Color = Color::Rgb(30, 105, 105); // #1E6969 -const TEXT_COLOR: Color = Color::Rgb(1, 46, 88); // #012E58 +const BORDER_SELECTED: Color = Color::Cyan; +const BORDER: Color = Color::Reset; +const SELECTED_LINE_BG: Color = Color::Blue; +const SELECTED_LINE_FG: Color = Color::White; type ServerControl = Arc< Mutex< @@ -246,7 +245,7 @@ fn run_ui( if key.kind == KeyEventKind::Press { let mut app = app.lock().unwrap(); match key.code { - KeyCode::Char('q') => { + KeyCode::Char('c') if key.modifiers.contains(KeyModifiers::CONTROL) => { break; } KeyCode::Esc | KeyCode::Backspace => { @@ -259,7 +258,7 @@ fn run_ui( } } KeyCode::Enter => { - if app.focused_panel == FocusPanel::Tools { + if app.focused_panel == FocusPanel::Namespaces { app.show_tool_detail(); } } @@ -271,13 +270,13 @@ fn run_ui( } KeyCode::Up => match app.focused_panel { FocusPanel::Logs => app.scroll_logs_up(), - FocusPanel::Tools => app.scroll_tools_up(), + FocusPanel::Namespaces => app.scroll_tools_up(), FocusPanel::ToolDetail => app.scroll_detail_up(), FocusPanel::Documentation => app.scroll_detail_up(), }, KeyCode::Down => match app.focused_panel { FocusPanel::Logs => app.scroll_logs_down(), - FocusPanel::Tools => app.scroll_tools_down(), + FocusPanel::Namespaces => app.scroll_tools_down(), FocusPanel::ToolDetail => app.scroll_detail_down(), FocusPanel::Documentation => app.scroll_detail_down(), }, @@ -300,12 +299,12 @@ fn run_ui( _ => {} }, KeyCode::Left => { - if app.focused_panel == FocusPanel::Tools { + if app.focused_panel == FocusPanel::Namespaces { app.move_to_prev_namespace(); } } KeyCode::Right => { - if app.focused_panel == FocusPanel::Tools { + if app.focused_panel == FocusPanel::Namespaces { app.move_to_next_namespace(); } } @@ -473,7 +472,7 @@ fn spawn_server_task( let handle = tokio::spawn(async move { tx.send(AppMessage::ServerStarting).ok(); - let tools = match load_code_mode_for_dev(&cfg).await { + let code_mode = match load_code_mode_for_dev(&cfg).await { Ok(loaded) => loaded, Err(err) => { tx.send(AppMessage::ServerFailed(format!( @@ -485,12 +484,12 @@ fn spawn_server_task( }; // Run server with shutdown signal - let pctx_mcp = PctxMcpServer::new(&host, port, false); + let pctx_mcp = PctxMcpServer::new(&host, port, false, &cfg, code_mode.clone()); - tx.send(AppMessage::ServerReady(tools.clone())).ok(); + tx.send(AppMessage::ServerReady(cfg, code_mode)).ok(); if let Err(e) = pctx_mcp - .serve_with_shutdown(&cfg, tools, async move { + .serve_with_shutdown(async move { let _ = shutdown_rx.await; }) .await @@ -525,7 +524,7 @@ mod tests { use pctx_config::logger::LogLevel; use serde_json::json; - fn create_pctx_tools() -> CodeMode { + fn create_test_code_mode() -> CodeMode { let account_schema = json!({ "type": "object", "required": ["account_id", "opened_at", "balance", "status"], @@ -537,7 +536,7 @@ mod tests { } }); let tools = vec![ - Tool::new_mcp( + Tool::new( "get_account_balance", Some("Retrieves the balance for an account".into()), serde_json::from_value(json!({ @@ -551,7 +550,7 @@ mod tests { Some(serde_json::from_value(account_schema.clone()).unwrap()), ) .unwrap(), - Tool::new_mcp( + Tool::new( "freeze_account", Some("Freezes an account".into()), serde_json::from_value(json!({ @@ -568,8 +567,12 @@ mod tests { ]; let mut cm = CodeMode::default(); - cm.add_tool_set(ToolSet::new("banking", "Banking MCP Server", tools)) - .unwrap(); + cm.add_tool_set(ToolSet::new( + Some("banking".into()), + "Banking MCP Server", + tools, + )) + .unwrap(); cm } @@ -582,7 +585,7 @@ mod tests { let mut app = App::new("localhost".to_string(), 8080, log_file); // Add the test server - app.tools = create_pctx_tools(); + app.code_mode = create_test_code_mode(); // Create a log entry with code_from_llm field containing Banking.getAccountBalance let log_entry = LogEntry { @@ -604,7 +607,7 @@ mod tests { app.track_tool_usage(&log_entry); // Verify that the tool was tracked - let key = "banking::get_account_balance"; + let key = "banking__get_account_balance"; assert!( app.tool_usage.contains_key(key), "Expected tool_usage to contain key '{}', but it doesn't. Keys present: {:?}", @@ -627,7 +630,7 @@ mod tests { let mut app = App::new("localhost".to_string(), 8080, log_file); // Add the test server - app.tools = create_pctx_tools(); + app.code_mode = create_test_code_mode(); // Create a log entry with Banking.freezeAccount let log_entry = LogEntry { @@ -647,7 +650,7 @@ mod tests { app.track_tool_usage(&log_entry); // Verify that the tool was tracked - let key = "banking::freeze_account"; + let key = "banking__freeze_account"; assert!( app.tool_usage.contains_key(key), "Expected tool_usage to contain key '{}', but it doesn't. Keys present: {:?}", @@ -669,7 +672,7 @@ mod tests { let mut app = App::new("localhost".to_string(), 8080, log_file); // Add the test server - app.tools = create_pctx_tools(); + app.code_mode = create_test_code_mode(); // First call let log_entry1 = LogEntry { @@ -702,7 +705,7 @@ mod tests { app.track_tool_usage(&log_entry1); app.track_tool_usage(&log_entry2); - let key = "banking::get_account_balance"; + let key = "banking__get_account_balance"; let usage = app.tool_usage.get(key).unwrap(); assert_eq!(usage.count, 2, "Expected count to be 2 after two calls"); assert_eq!( diff --git a/crates/pctx/src/commands/mcp/dev/renderers.rs b/crates/pctx/src/commands/mcp/dev/renderers.rs index 5ebe24cb..c4f191cc 100644 --- a/crates/pctx/src/commands/mcp/dev/renderers.rs +++ b/crates/pctx/src/commands/mcp/dev/renderers.rs @@ -8,7 +8,7 @@ use ratatui::{ }; use super::{ - SECONDARY, TERTIARY, TEXT_COLOR, + BORDER, BORDER_SELECTED, SELECTED_LINE_BG, SELECTED_LINE_FG, app::{App, FocusPanel}, }; @@ -81,32 +81,35 @@ pub(super) fn ui(f: &mut Frame, app: &mut App) { } fn render_header(f: &mut Frame, app: &mut App, area: Rect) { - // Create a 3-column layout: Title | Server | Docs let chunks = Layout::default() .direction(Direction::Horizontal) .constraints([ Constraint::Min(20), // Title (flexible) Constraint::Length(50), // Server URL - Constraint::Length(12), // Docs button ]) .split(area); // Title let title = vec![ - Span::styled("PCTX ", Style::default().fg(SECONDARY).bold()), - Span::styled("Dev Mode", Style::default().fg(TEXT_COLOR)), + Span::styled("PCTX ", Style::default().fg(Color::Blue).bold()), + Span::styled("Dev Mode", Style::default().fg(Color::Blue)), ]; let title_widget = Paragraph::new(Line::from(title)) .block(Block::default().borders(Borders::ALL)) .alignment(Alignment::Center); f.render_widget(title_widget, chunks[0]); - // Url - - let url_span = if app.server_ready { + // Server URL — show "Copied!" for 2s after click, then URL + let copied = app + .copied_at + .map(|t| t.elapsed().as_secs() < 2) + .unwrap_or(false); + let url_span = if copied { + Span::styled("Copied!", Style::default().fg(Color::LightGreen).bold()) + } else if app.server_ready { Span::styled( - format!("{} [c]", app.get_server_url()), - Style::default().fg(TERTIARY).bold(), + format!("{}", app.get_server_url()), + Style::default().fg(Color::Blue), ) } else { Span::raw("") @@ -116,34 +119,16 @@ fn render_header(f: &mut Frame, app: &mut App, area: Rect) { .alignment(Alignment::Center); f.render_widget(url_widget, chunks[1]); - // Docs/Back button with keyboard shortcut hint - // In ToolDetail view: show "Back" (goes to Tools) - // In Documentation view: show "Back" (goes to Tools) - // In Tools/Logs: show "Docs" (opens documentation) - let (docs_text, docs_color) = match app.focused_panel { - FocusPanel::ToolDetail => ("[d] Back", TERTIARY), - FocusPanel::Documentation => ("[d] Back", TERTIARY), - _ => ("[d] Docs", SECONDARY), - }; - let docs_content = vec![Span::styled( - docs_text, - Style::default().fg(docs_color).add_modifier(Modifier::BOLD), - )]; - let docs_widget = Paragraph::new(Line::from(docs_content)) - .block(Block::default().borders(Borders::ALL)) - .alignment(Alignment::Center); - f.render_widget(docs_widget, chunks[2]); - - // Store docs button rectangle for click detection - app.docs_rect = Some(chunks[2]); + app.url_rect = Some(chunks[1]); + app.docs_rect = None; } fn render_tools_panel(f: &mut Frame, app: &mut App, area: Rect) { - let is_focused = app.focused_panel == FocusPanel::Tools; + let is_focused = app.focused_panel == FocusPanel::Namespaces; let border_style = if is_focused { - Style::default().fg(SECONDARY) + Style::default().fg(BORDER_SELECTED) } else { - Style::default() + Style::default().fg(BORDER) }; if let Some(err) = &app.error { @@ -154,7 +139,6 @@ fn render_tools_panel(f: &mut Frame, app: &mut App, area: Rect) { .border_style(border_style) .title("Error"), ) - .style(Style::default().red()) .alignment(Alignment::Center); f.render_widget(placeholder, area); return; @@ -166,80 +150,39 @@ fn render_tools_panel(f: &mut Frame, app: &mut App, area: Rect) { .borders(Borders::ALL) .border_style(border_style), ) - .style(Style::default().yellow()) .alignment(Alignment::Center); f.render_widget(placeholder, area); return; } - let total_tools: usize = app.tools.tool_sets().iter().map(|s| s.tools.len()).sum(); + let total_tools: usize = app + .code_mode + .tool_sets() + .iter() + .map(|s| s.tools.len()) + .sum(); let title = format!("MCP Tools [{total_tools} total]"); // Sort servers alphabetically by name - let mut sorted: Vec = app.tools.tool_sets().iter().cloned().collect(); + let mut sorted: Vec = app.code_mode.tool_sets().iter().cloned().collect(); sorted.sort_by_key(|s| s.name.clone()); if sorted.is_empty() { let help_lines = vec![ - Line::from(vec![Span::styled( - "No MCP servers connected", - Style::default() - .fg(Color::Yellow) - .add_modifier(Modifier::BOLD), - )]), Line::from(""), Line::from(vec![Span::styled( - "To add upstream MCP servers:", - Style::default().fg(TEXT_COLOR), + " No MCP servers connected", + Style::default().add_modifier(Modifier::BOLD), )]), Line::from(""), - Line::from(vec![ - Span::styled("1. ", Style::default().fg(SECONDARY)), - Span::raw("Edit your "), - Span::styled( - "pctx.json", - Style::default().fg(TERTIARY).add_modifier(Modifier::BOLD), - ), - Span::raw(" config file"), - ]), - Line::from(vec![ - Span::styled("2. ", Style::default().fg(SECONDARY)), - Span::raw("Add servers to the "), - Span::styled("\"upstreams\"", Style::default().fg(TERTIARY)), - Span::raw(" array"), - ]), - Line::from(vec![ - Span::styled("3. ", Style::default().fg(SECONDARY)), - Span::raw("Server will restart automatically"), - ]), - Line::from(""), - Line::from(vec![Span::styled( - "Example config:", - Style::default().fg(SECONDARY).add_modifier(Modifier::BOLD), - )]), - Line::from(vec![Span::styled( - r#" "upstreams": [{"#, - Style::default().fg(Color::DarkGray), - )]), Line::from(vec![Span::styled( - r" {", - Style::default().fg(Color::DarkGray), - )]), - Line::from(vec![Span::styled( - r#" "name": "my-server","#, - Style::default().fg(Color::DarkGray), - )]), - Line::from(vec![Span::styled( - r#" "url": "http://localhost:3000""#, - Style::default().fg(Color::DarkGray), - )]), - Line::from(vec![Span::styled( - r" }", - Style::default().fg(Color::DarkGray), + " To add upstream MCP servers:", + Style::default(), )]), + Line::from(""), Line::from(vec![Span::styled( - r" ]", - Style::default().fg(Color::DarkGray), + " pctx mcp add [URL]", + Style::default().fg(Color::LightBlue), )]), ]; @@ -276,27 +219,12 @@ fn render_tools_panel(f: &mut Frame, app: &mut App, area: Rect) { for (idx, tool_set) in sorted.iter().enumerate() { let mut items: Vec = Vec::new(); - // Server header - let status = if tool_set.tools.is_empty() { - "!" - } else { - "✓" - }; - - items.push(ListItem::new(Line::from(vec![ - Span::styled(format!("{status} "), Style::default().fg(TERTIARY)), - Span::styled( - &tool_set.name, - Style::default().fg(SECONDARY).add_modifier(Modifier::BOLD), - ), - ]))); - // Sort tools by usage count (descending) let mut tools_with_usage: Vec<_> = tool_set .tools .iter() .map(|tool| { - let usage_key = format!("{}::{}", tool_set.name, tool.name); + let usage_key = tool.id(tool_set.name.as_deref()); let usage_count = app.tool_usage.get(&usage_key).map_or(0, |u| u.count); (tool, usage_count) }) @@ -310,13 +238,13 @@ fn render_tools_panel(f: &mut Frame, app: &mut App, area: Rect) { for (tool, usage_count) in tools_with_usage { let is_selected_tool = app.selected_tool_index == Some(global_tool_index); - let mut spans = vec![Span::styled(&tool.fn_name, Style::default().fg(TERTIARY))]; + let mut spans = vec![Span::styled(&tool.fn_name, Style::default())]; // Add usage count in gray if > 0 if usage_count > 0 { spans.push(Span::styled( format!(" ({usage_count} calls)"), - Style::default().fg(Color::DarkGray), + Style::default().dim(), )); } @@ -324,7 +252,9 @@ fn render_tools_panel(f: &mut Frame, app: &mut App, area: Rect) { if is_selected_tool && is_focused { spans.push(Span::styled( " [enter]", - Style::default().fg(TERTIARY).add_modifier(Modifier::DIM), + Style::default() + .fg(Color::White) + .add_modifier(Modifier::DIM), )); } @@ -332,53 +262,54 @@ fn render_tools_panel(f: &mut Frame, app: &mut App, area: Rect) { global_tool_index += 1; } - let namespace_title = format!("{} ({} tools)", tool_set.name, tool_set.tools.len()); + let namespace_title = format!( + "{} ({} tools)", + tool_set.pascal_namespace(), + tool_set.tools.len() + ); // Check if a tool in this namespace is selected let selected_in_this_namespace = app .selected_tool_index .filter(|&idx| idx >= tools_start_index && idx < global_tool_index) - .map(|idx| idx - tools_start_index + 1); // +1 to account for header row + .map(|idx| idx - tools_start_index); let mut list_state = ListState::default(); if let Some(local_idx) = selected_in_this_namespace { list_state.select(Some(local_idx)); } - // Highlight border of active namespace - let namespace_border_style = if is_focused && idx == app.selected_namespace_index { - Style::default().fg(TERTIARY).add_modifier(Modifier::BOLD) - } else { - border_style - }; - let list = List::new(items) .block( Block::default() .borders(Borders::ALL) - .border_style(namespace_border_style) + .border_style(border_style) .title(namespace_title), ) - .highlight_style( + .highlight_style(if is_focused { Style::default() - .bg(Color::DarkGray) - .add_modifier(Modifier::BOLD), - ); + .bg(SELECTED_LINE_BG) + .fg(SELECTED_LINE_FG) + .add_modifier(Modifier::BOLD) + } else { + Style::default() + }); f.render_stateful_widget(list, namespace_chunks[idx], &mut list_state); } } -fn render_logs_panel(f: &mut Frame, app: &App, area: Rect) { +fn render_logs_panel(f: &mut Frame, app: &mut App, area: Rect) { let is_focused = app.focused_panel == FocusPanel::Logs; let border_style = if is_focused { - Style::default().fg(SECONDARY) + Style::default().fg(BORDER_SELECTED) } else { - Style::default() + Style::default().fg(BORDER) }; - let filtered_logs = app.filtered_logs(); let visible_height = area.height.saturating_sub(2) as usize; + app.log_visible_height = visible_height; + let filtered_logs = app.filtered_logs(); // Show most recent logs at the bottom let total_logs = filtered_logs.len(); @@ -407,34 +338,33 @@ fn render_logs_panel(f: &mut Frame, app: &App, area: Rect) { .wrap(Wrap { trim: false }); f.render_widget(logs, area); + + let placeholder = Paragraph::new("").block( + Block::default() + .borders(Borders::ALL) + .border_style(border_style) + .title(format!("Logs [{}]", app.log_filter.as_str().to_uppercase())), + ); + f.render_widget(placeholder, area); } -fn render_tool_detail(f: &mut Frame, app: &App, area: Rect) { +fn render_tool_detail(f: &mut Frame, app: &mut App, area: Rect) { if let Some((tool_set, tool)) = app.get_selected_tool() { - let usage_key = format!("{}::{}", tool_set.name, tool.name); + let usage_key = tool.id(tool_set.name.as_deref()); let usage = app.tool_usage.get(&usage_key); let mut lines: Vec = vec![ // Tool header Line::from(vec![ - Span::styled( - "Server: ", - Style::default().fg(SECONDARY).add_modifier(Modifier::BOLD), - ), - Span::raw(&tool_set.name), + Span::styled("Server: ", Style::default().add_modifier(Modifier::BOLD)), + Span::raw(tool_set.pascal_namespace()), ]), Line::from(vec![ - Span::styled( - "Function: ", - Style::default().fg(TERTIARY).add_modifier(Modifier::BOLD), - ), + Span::styled("Function: ", Style::default().add_modifier(Modifier::BOLD)), Span::raw(&tool.fn_name), ]), Line::from(vec![ - Span::styled( - "Tool Name: ", - Style::default().fg(TERTIARY).add_modifier(Modifier::BOLD), - ), + Span::styled("Tool Name: ", Style::default().add_modifier(Modifier::BOLD)), Span::raw(&tool.name), ]), Line::from(""), @@ -444,9 +374,11 @@ fn render_tool_detail(f: &mut Frame, app: &App, area: Rect) { if let Some(desc) = &tool.description { lines.push(Line::from(vec![Span::styled( "Description:", - Style::default().fg(TERTIARY).add_modifier(Modifier::BOLD), + Style::default().add_modifier(Modifier::BOLD), )])); - lines.push(Line::from(Span::raw(desc))); + for desc_line in desc.lines() { + lines.push(Line::from(Span::raw(desc_line.to_owned()))); + } lines.push(Line::from("")); } @@ -454,7 +386,7 @@ fn render_tool_detail(f: &mut Frame, app: &App, area: Rect) { if let Some(usage) = usage { lines.push(Line::from(vec![Span::styled( "Usage Stats:", - Style::default().fg(SECONDARY).add_modifier(Modifier::BOLD), + Style::default().add_modifier(Modifier::BOLD), )])); lines.push(Line::from(format!(" Calls: {}", usage.count))); lines.push(Line::from(format!( @@ -466,7 +398,7 @@ fn render_tool_detail(f: &mut Frame, app: &App, area: Rect) { if !usage.code_snippets.is_empty() { lines.push(Line::from(vec![Span::styled( "Example Usage:", - Style::default().fg(SECONDARY).add_modifier(Modifier::BOLD), + Style::default().add_modifier(Modifier::BOLD), )])); for snippet in &usage.code_snippets { lines.push(Line::from(format!(" {snippet}"))); @@ -478,7 +410,7 @@ fn render_tool_detail(f: &mut Frame, app: &App, area: Rect) { // Input type lines.push(Line::from(vec![Span::styled( "Input Type:", - Style::default().fg(SECONDARY).add_modifier(Modifier::BOLD), + Style::default().add_modifier(Modifier::BOLD), )])); if let Some(i) = &tool.input_signature() { lines.push(Line::from(format!(" {i}"))); @@ -490,7 +422,7 @@ fn render_tool_detail(f: &mut Frame, app: &App, area: Rect) { // Output type lines.push(Line::from(vec![Span::styled( "Output Type:", - Style::default().fg(SECONDARY).add_modifier(Modifier::BOLD), + Style::default().add_modifier(Modifier::BOLD), )])); lines.push(Line::from(format!(" {}", tool.output_signature()))); lines.push(Line::from("")); @@ -498,14 +430,17 @@ fn render_tool_detail(f: &mut Frame, app: &App, area: Rect) { // TypeScript types lines.push(Line::from(vec![Span::styled( "TypeScript Definition:", - Style::default().fg(TERTIARY).add_modifier(Modifier::BOLD), + Style::default().add_modifier(Modifier::BOLD), )])); for line in tool.types().lines() { lines.push(Line::from(format!(" {line}"))); } + lines.push(Line::from(format!(""))); + lines.push(Line::from(format!(""))); // Apply scroll let visible_height = area.height.saturating_sub(2) as usize; + app.detail_max_scroll = lines.len().saturating_sub(visible_height); let start_idx = app.detail_scroll_offset.min(lines.len().saturating_sub(1)); let end_idx = (start_idx + visible_height).min(lines.len()); @@ -515,13 +450,8 @@ fn render_tool_detail(f: &mut Frame, app: &App, area: Rect) { .block( Block::default() .borders(Borders::ALL) - .border_style(Style::default().fg(SECONDARY)) - .title(format!( - "Tool Detail - {} [{}/{}]", - tool.name, - app.detail_scroll_offset + 1, - lines.len() - )), + .border_style(Style::default().fg(BORDER_SELECTED)) + .title(format!("Tool Detail - {}", tool.name)), ) .wrap(Wrap { trim: false }); @@ -529,13 +459,13 @@ fn render_tool_detail(f: &mut Frame, app: &App, area: Rect) { } else { let placeholder = Paragraph::new("No tool selected") .block(Block::default().borders(Borders::ALL).title("Tool Detail")) - .style(Style::default().fg(Color::DarkGray)) + .style(Style::default()) .alignment(Alignment::Center); f.render_widget(placeholder, area); } } -fn render_documentation(f: &mut Frame, app: &App, area: Rect) { +fn render_documentation(f: &mut Frame, app: &mut App, area: Rect) { // Read and render the CLI.md documentation const CLI_DOCS: &str = include_str!("../../../../../../docs/CLI.md"); @@ -545,6 +475,7 @@ fn render_documentation(f: &mut Frame, app: &App, area: Rect) { // Apply scroll let visible_height = area.height.saturating_sub(2) as usize; + app.detail_max_scroll = total_lines.saturating_sub(visible_height); let start_idx = app.detail_scroll_offset.min(total_lines.saturating_sub(1)); let end_idx = (start_idx + visible_height).min(total_lines); @@ -564,7 +495,7 @@ fn render_documentation(f: &mut Frame, app: &App, area: Rect) { } fn render_footer(f: &mut Frame, app: &App, area: Rect) { - let mut help_text = vec![Span::raw("[q] Quit ")]; + let mut help_text = vec![Span::raw("[^C] Quit ")]; // Always show copy URL if server is running if app.server_ready { @@ -576,7 +507,7 @@ fn render_footer(f: &mut Frame, app: &App, area: Rect) { let fast_scroll = Span::raw("[PgUp/PgDn] Fast Scroll "); let select_text = Span::raw("[Mouse] Select Text "); let docs = Span::raw("[d] Docs "); - let filter_level = Span::raw("[f] Filter Level "); + let filter_level = Span::raw("[f] Log Level "); let switch_panel = Span::raw("[Tab] Switch Panel "); let navigate = Span::raw("[↑/↓] Navigate "); let switch_namespace = Span::raw("[←/→] Switch Namespace "); @@ -592,14 +523,14 @@ fn render_footer(f: &mut Frame, app: &App, area: Rect) { FocusPanel::Logs => { help_text.extend([docs, switch_panel, navigate, filter_level]); } - FocusPanel::Tools => { + FocusPanel::Namespaces => { help_text.extend([docs, switch_panel, navigate, switch_namespace, view_details]); } } let footer = Paragraph::new(Line::from(help_text)) .block(Block::default().borders(Borders::ALL)) - .style(Style::default().fg(Color::White)); + .style(Style::default()); f.render_widget(footer, area); } diff --git a/crates/pctx/src/commands/mcp/init.rs b/crates/pctx/src/commands/mcp/init.rs index 6312f1a5..2d930e9f 100644 --- a/crates/pctx/src/commands/mcp/init.rs +++ b/crates/pctx/src/commands/mcp/init.rs @@ -8,7 +8,7 @@ use crate::{ commands::{USER_CANCELLED, mcp::add::AddCmd}, utils::{ prompts, - styles::{fmt_bold, fmt_dimmed, fmt_success}, + styles::{fmt_bold, fmt_dimmed, fmt_good_check, fmt_literal}, }, }; @@ -154,7 +154,7 @@ impl InitCmd { cfg = updated; info!( "{}", - fmt_success(&format!("Added {name}", name = fmt_bold(&name))) + fmt_good_check(&format!("Added {name}", name = fmt_bold(&name))) ); } Err(e) => warn!("{e}"), @@ -170,10 +170,10 @@ impl InitCmd { info!( "{}", - fmt_success(&format!( + fmt_good_check(&format!( "{name} configuration created: {path}", name = fmt_bold("pctx"), - path = fmt_dimmed(cfg.path().as_str()), + path = fmt_literal(cfg.path().as_str()), )) ); diff --git a/crates/pctx/src/commands/mcp/list.rs b/crates/pctx/src/commands/mcp/list.rs index 0262c9ca..28c04ed1 100644 --- a/crates/pctx/src/commands/mcp/list.rs +++ b/crates/pctx/src/commands/mcp/list.rs @@ -2,7 +2,10 @@ use std::fmt::Display; use crate::utils::{ spinner::Spinner, - styles::{fmt_bold, fmt_cyan, fmt_dimmed, fmt_error, fmt_green, fmt_success}, + styles::{ + cargo_styles, fmt_bold, fmt_context, fmt_dimmed, fmt_error_x, fmt_good_check, fmt_header, + fmt_literal, fmt_style, + }, }; use anyhow::Result; use clap::Parser; @@ -23,7 +26,7 @@ impl ListCmd { info!(""); info!( "Run {cmd} to add some to your configuration", - cmd = fmt_bold("pctx add ") + cmd = fmt_style("pctx add ", &cargo_styles::LITERAL) ); return Ok(cfg); } @@ -96,28 +99,28 @@ impl UpstreamMcpSummary { impl Display for UpstreamMcpSummary { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut fields = vec![]; - let target_field = format!("{}: {}", fmt_bold("Target"), fmt_cyan(&self.target)); - let transport_field = format!("{}: {}", fmt_bold("Transport"), self.transport); + let target_field = format!("{}: {}", fmt_literal("Target"), fmt_bold(&self.target)); + let transport_field = format!("{}: {}", fmt_literal("Transport"), self.transport); if let Some(e) = &self.error { - fields.extend([fmt_error(e), target_field, transport_field]); + fields.extend([fmt_error_x(e), target_field, transport_field]); } else { - fields.extend([fmt_success("Connected"), target_field, transport_field]); + fields.extend([fmt_good_check("Connected"), target_field, transport_field]); if let Some(init_res) = &self.init_res { fields.push(format!( "{}: {}", - fmt_bold("Upstream Name"), + fmt_literal("Upstream Name"), &init_res.server_info.name )); fields.push(format!( "{}: {}", - fmt_bold("Upstream Version"), + fmt_literal("Upstream Version"), &init_res.server_info.version )); fields.push(format!( "{}: {}", - fmt_bold("Upstream Title"), + fmt_literal("Upstream Title"), init_res .server_info .title @@ -133,27 +136,27 @@ impl Display for UpstreamMcpSummary { }); fields.push(format!( "{}: {instructions}", - fmt_bold("Upstream Instructions"), + fmt_literal("Upstream Instructions"), )); } if self.tools.is_empty() { - fields.push(format!("{}: {}", fmt_bold("Tools"), fmt_dimmed("none"))); + fields.push(format!("{}: {}", fmt_literal("Tools"), fmt_dimmed("none"))); } else { let tool_display = self .tools .iter() .take(5) - .map(|t| fmt_green(t)) + .map(|t| fmt_context(t)) .collect::>() .join(", "); fields.push(format!( "{} ({}): {tool_display}{}", - fmt_bold("Tools"), + fmt_literal("Tools"), self.tools.len(), if self.tools.len() > 5 { - format!(", {}", fmt_green("...")) + format!(", {}", fmt_context("...")) } else { String::new() } @@ -174,6 +177,6 @@ impl Display for UpstreamMcpSummary { .collect::>() .join("\n"); - write!(f, "{}\n{tree}", fmt_cyan(&self.name)) + write!(f, "{}\n{tree}", fmt_header(&self.name)) } } diff --git a/crates/pctx/src/commands/mcp/remove.rs b/crates/pctx/src/commands/mcp/remove.rs index b6defe85..6712f64e 100644 --- a/crates/pctx/src/commands/mcp/remove.rs +++ b/crates/pctx/src/commands/mcp/remove.rs @@ -3,7 +3,7 @@ use clap::Parser; use pctx_config::Config; use tracing::info; -use crate::utils::styles::{fmt_bold, fmt_dimmed, fmt_success}; +use crate::utils::styles::{fmt_bold, fmt_good_check, fmt_literal}; #[derive(Debug, Clone, Parser)] pub struct RemoveCmd { @@ -19,10 +19,10 @@ impl RemoveCmd { info!( "{}", - fmt_success(&format!( + fmt_good_check(&format!( "{name} MCP Server removed from {path}", name = fmt_bold(&self.name), - path = fmt_dimmed(cfg.path().as_str()), + path = fmt_literal(cfg.path().as_str()), )) ); diff --git a/crates/pctx/src/commands/mcp/start.rs b/crates/pctx/src/commands/mcp/start.rs index 847ee4a0..10b85e71 100644 --- a/crates/pctx/src/commands/mcp/start.rs +++ b/crates/pctx/src/commands/mcp/start.rs @@ -51,11 +51,11 @@ impl StartCmd { let code_mode = StartCmd::load_code_mode(&cfg).await?; - let server = PctxMcpServer::new(&self.host, self.port, !self.no_banner); + let server = PctxMcpServer::new(&self.host, self.port, !self.no_banner, &cfg, code_mode); if self.stdio { - server.serve_stdio(&cfg, code_mode).await?; + server.serve_stdio().await?; } else { - server.serve(&cfg, code_mode).await?; + server.serve().await?; } info!("Shutting down..."); diff --git a/crates/pctx/src/lib.rs b/crates/pctx/src/lib.rs index 5680db46..f8ad2e75 100644 --- a/crates/pctx/src/lib.rs +++ b/crates/pctx/src/lib.rs @@ -175,7 +175,6 @@ pub enum McpCommands { /// Start the PCTX MCP server #[command(long_about = "Start the PCTX MCP server (exposes /mcp endpoint).")] Start(commands::mcp::StartCmd), - /// Start the PCTX MCP server with terminal UI #[command( long_about = "Start the PCTX MCP server in development mode with an interactive terminal UI with data and logging." diff --git a/crates/pctx/src/utils/prompts.rs b/crates/pctx/src/utils/prompts.rs index b899468d..d78780d3 100644 --- a/crates/pctx/src/utils/prompts.rs +++ b/crates/pctx/src/utils/prompts.rs @@ -4,7 +4,7 @@ use pctx_codegen::case::Case; use pctx_config::auth::{AuthConfig, AuthSecret, SecretString, write_to_keychain}; use tracing::info; -use crate::utils::styles::{fmt_dimmed, fmt_success}; +use crate::utils::styles::{fmt_dimmed, fmt_good_check}; pub(crate) fn prompt_auth(server_name: &str) -> Result { let options = vec![ @@ -107,7 +107,7 @@ pub(crate) fn prompt_secret(msg: &str, prefix: &str, key: &str) -> Result>>(&mut self, msg: M) { - let symbol = fmt_green(CHECK); + let symbol = fmt_good(CHECK); if let Some(sp) = self.sp.as_mut() { sp.stop_and_persist(&symbol, &msg.into()); } else { @@ -50,7 +50,7 @@ impl Spinner { } pub(crate) fn stop_warn>>(&mut self, msg: M) { - let symbol = fmt_yellow("ø"); + let symbol = fmt_warn("ø"); if let Some(sp) = self.sp.as_mut() { sp.stop_and_persist(&symbol, &msg.into()); } else { @@ -59,7 +59,7 @@ impl Spinner { } pub(crate) fn stop_error>>(&mut self, msg: M) { - let symbol = fmt_red(MARK); + let symbol = fmt_error(MARK); if let Some(sp) = self.sp.as_mut() { sp.stop_and_persist(&symbol, &msg.into()); } else { diff --git a/crates/pctx/src/utils/styles.rs b/crates/pctx/src/utils/styles.rs index 3a73cd98..22298561 100644 --- a/crates/pctx/src/utils/styles.rs +++ b/crates/pctx/src/utils/styles.rs @@ -1,108 +1,102 @@ -use anstyle::{AnsiColor, Color, RgbColor, Style}; -use clap::builder::Styles; - +#![allow(dead_code)] use crate::utils::{CHECK, MARK}; -// Brand colors -#[allow(dead_code)] -const PRIMARY: RgbColor = RgbColor(0, 43, 86); // #002B56 -const SECONDARY: RgbColor = RgbColor(24, 66, 137); // #184289 -const TERTIARY: RgbColor = RgbColor(30, 105, 105); // #1E6969 -const TEXT_COLOR: RgbColor = RgbColor(1, 46, 88); // #012E58 - -pub(crate) fn get_styles() -> Styles { - Styles::styled() - .usage( - Style::new() - .bold() - .underline() - .fg_color(Some(Color::Rgb(SECONDARY))), - ) - .header( - Style::new() - .bold() - .underline() - .fg_color(Some(Color::Rgb(SECONDARY))), - ) - .literal(Style::new().fg_color(Some(Color::Rgb(TERTIARY)))) - .invalid( - Style::new() - .bold() - .fg_color(Some(Color::Ansi(AnsiColor::Red))), - ) - .error( - Style::new() - .bold() - .fg_color(Some(Color::Ansi(AnsiColor::Red))), - ) - .valid( - Style::new() - .bold() - .underline() - .fg_color(Some(Color::Rgb(TERTIARY))), - ) - .placeholder(Style::new().fg_color(Some(Color::Ansi(AnsiColor::White)))) -} - -fn fmt_style(msg: &str, style: &Style) -> String { - format!("{style}{msg}{style:#}") -} - -#[allow(dead_code)] -pub(crate) fn fmt_primary(msg: &str) -> String { - let style = Style::new().fg_color(Some(Color::Rgb(PRIMARY))); - fmt_style(msg, &style) -} - -pub(crate) fn fmt_secondary(msg: &str) -> String { - let style = Style::new().fg_color(Some(Color::Rgb(SECONDARY))); - fmt_style(msg, &style) -} - -pub(crate) fn fmt_tertiary(msg: &str) -> String { - let style = Style::new().fg_color(Some(Color::Rgb(TERTIARY))); - fmt_style(msg, &style) -} +// CLI Styling copied from cargo #[allow(dead_code)] -pub(crate) fn fmt_text(msg: &str) -> String { - let style = Style::new().fg_color(Some(Color::Rgb(TEXT_COLOR))); - fmt_style(msg, &style) +pub(crate) mod cargo_styles { + use anstyle::*; + + pub(crate) const NOP: Style = Style::new(); + pub(crate) const HEADER: Style = AnsiColor::BrightGreen.on_default().effects(Effects::BOLD); + pub(crate) const USAGE: Style = AnsiColor::BrightGreen.on_default().effects(Effects::BOLD); + pub(crate) const LITERAL: Style = AnsiColor::BrightCyan.on_default().effects(Effects::BOLD); + pub(crate) const PLACEHOLDER: Style = AnsiColor::Cyan.on_default(); + pub(crate) const ERROR: Style = AnsiColor::BrightRed.on_default().effects(Effects::BOLD); + pub(crate) const WARN: Style = AnsiColor::Yellow.on_default(); + pub(crate) const NOTE: Style = AnsiColor::BrightGreen.on_default().effects(Effects::BOLD); + pub(crate) const GOOD: Style = AnsiColor::BrightGreen.on_default().effects(Effects::BOLD); + pub(crate) const VALID: Style = AnsiColor::BrightCyan.on_default().effects(Effects::BOLD); + pub(crate) const INVALID: Style = AnsiColor::Yellow.on_default(); + pub(crate) const TRANSIENT: Style = AnsiColor::BrightCyan.on_default().effects(Effects::BOLD); + pub(crate) const CONTEXT: Style = AnsiColor::BrightBlue.on_default().effects(Effects::BOLD); + pub(crate) const UPDATE_ADDED: Style = NOTE; + pub(crate) const UPDATE_REMOVED: Style = ERROR; + pub(crate) const UPDATE_UPGRADED: Style = GOOD; + pub(crate) const UPDATE_DOWNGRADED: Style = WARN; + pub(crate) const UPDATE_UNCHANGED: Style = anstyle::Style::new().bold(); + pub(crate) const DEP_NORMAL: Style = anstyle::Style::new().effects(anstyle::Effects::DIMMED); + pub(crate) const DEP_BUILD: Style = anstyle::AnsiColor::Blue + .on_default() + .effects(anstyle::Effects::BOLD); + pub(crate) const DEP_DEV: Style = anstyle::AnsiColor::Cyan + .on_default() + .effects(anstyle::Effects::BOLD); + pub(crate) const DEP_FEATURE: Style = anstyle::AnsiColor::Magenta + .on_default() + .effects(anstyle::Effects::DIMMED); } -// Legacy color functions - map to brand colors -pub(crate) fn fmt_green(msg: &str) -> String { - fmt_tertiary(msg) +pub(crate) fn get_styles() -> clap::builder::Styles { + clap::builder::styling::Styles::styled() + .header(cargo_styles::HEADER) + .usage(cargo_styles::USAGE) + .literal(cargo_styles::LITERAL) + .placeholder(cargo_styles::PLACEHOLDER) + .error(cargo_styles::ERROR) + .valid(cargo_styles::VALID) + .invalid(cargo_styles::INVALID) } -pub(crate) fn fmt_cyan(msg: &str) -> String { - fmt_secondary(msg) +pub(crate) fn fmt_style(msg: &str, style: &anstyle::Style) -> String { + format!("{style}{msg}{style:#}") } -pub(crate) fn fmt_red(msg: &str) -> String { - let red = Style::new().fg_color(Some(Color::Ansi(AnsiColor::Red))); - fmt_style(msg, &red) +macro_rules! make_fmt { + ($($fn_name:ident => $const:ident),* $(,)?) => { + $(pub(crate) fn $fn_name(msg: &str) -> String { + fmt_style(msg, &cargo_styles::$const) + })* + }; } -pub(crate) fn fmt_yellow(msg: &str) -> String { - let yellow = Style::new().fg_color(Some(Color::Ansi(AnsiColor::Yellow))); - fmt_style(msg, &yellow) +make_fmt! { + fmt_nop => NOP, + fmt_header => HEADER, + fmt_usage => USAGE, + fmt_literal => LITERAL, + fmt_placeholder => PLACEHOLDER, + fmt_warn => WARN, + fmt_note => NOTE, + fmt_good => GOOD, + fmt_valid => VALID, + fmt_invalid => INVALID, + fmt_transient => TRANSIENT, + fmt_context => CONTEXT, + fmt_update_added => UPDATE_ADDED, + fmt_update_removed => UPDATE_REMOVED, + fmt_update_upgraded => UPDATE_UPGRADED, + fmt_update_downgraded => UPDATE_DOWNGRADED, + fmt_update_unchanged => UPDATE_UNCHANGED, + fmt_dep_normal => DEP_NORMAL, + fmt_dep_build => DEP_BUILD, + fmt_dep_dev => DEP_DEV, + fmt_dep_feature => DEP_FEATURE, + // ERROR is omitted — fmt_error adds an icon and has different semantics + fmt_error => ERROR, } pub(crate) fn fmt_bold(msg: &str) -> String { - let bold = Style::new().bold().fg_color(Some(Color::Rgb(TEXT_COLOR))); - fmt_style(msg, &bold) + fmt_update_unchanged(msg) } - pub(crate) fn fmt_dimmed(msg: &str) -> String { - let dimmed = Style::new().dimmed(); - fmt_style(msg, &dimmed) + fmt_dep_normal(msg) } -pub(crate) fn fmt_success(msg: &str) -> String { - format!("{} {msg}", fmt_tertiary(CHECK)) +pub(crate) fn fmt_good_check(msg: &str) -> String { + format!("{} {msg}", fmt_good(CHECK)) } -pub(crate) fn fmt_error(msg: &str) -> String { - format!("{} {msg}", fmt_red(MARK)) +pub(crate) fn fmt_error_x(msg: &str) -> String { + format!("{} {msg}", fmt_error(MARK)) } diff --git a/crates/pctx_code_execution_runtime/Cargo.toml b/crates/pctx_code_execution_runtime/Cargo.toml index 5e199a34..2664df75 100644 --- a/crates/pctx_code_execution_runtime/Cargo.toml +++ b/crates/pctx_code_execution_runtime/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pctx_code_execution_runtime" -version = "0.1.3" +version = "0.2.0" edition = "2024" rust-version = "1.89" license = "MIT" @@ -13,7 +13,8 @@ categories = ["development-tools"] path = "src/lib.rs" [dependencies] -pctx_config = { version = "^0.1.3", path = "../pctx_config" } +pctx_registry = { version = "^0.1.0", path = "../pctx_registry" } +pctx_config = { version = "^0.1.4", path = "../pctx_config" } deno_core = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } @@ -26,7 +27,7 @@ tracing = { workspace = true } tokio = { workspace = true, features = ["time"] } [build-dependencies] -pctx_config = { version = "^0.1.3", path = "../pctx_config" } +pctx_config = { version = "^0.1.4", path = "../pctx_config" } deno_core = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } diff --git a/crates/pctx_code_execution_runtime/build.rs b/crates/pctx_code_execution_runtime/build.rs index 536cc3fc..0d38327d 100644 --- a/crates/pctx_code_execution_runtime/build.rs +++ b/crates/pctx_code_execution_runtime/build.rs @@ -16,11 +16,11 @@ use deno_core::snapshot::create_snapshot; use rmcp::model::JsonObject; -/// Call an MCP tool (async stub) +/// invoke a registered action #[deno_core::op2(async)] #[serde] #[allow(clippy::unused_async)] -async fn op_call_mcp_tool( +async fn op_invoke( #[string] _server_name: String, #[string] _tool_name: String, #[serde] _args: Option, @@ -28,17 +28,6 @@ async fn op_call_mcp_tool( serde_json::Value::Null } -/// Invoke callback (stub) -#[deno_core::op2(async)] -#[serde] -#[allow(clippy::unused_async)] -async fn op_invoke_callback( - #[string] _id: String, - #[serde] _arguments: Option, -) -> serde_json::Value { - serde_json::Value::Null -} - /// Sleep (stub for timers) #[deno_core::op2(async)] #[allow(clippy::unused_async)] @@ -50,8 +39,7 @@ extension!( pctx_runtime_snapshot, ops = [ // Op declarations - these will be registered but not executed during snapshot - op_call_mcp_tool, - op_invoke_callback, + op_invoke, op_sleep, ], esm_entry_point = "ext:pctx_runtime_snapshot/runtime.js", diff --git a/crates/pctx_code_execution_runtime/src/callback_registry.rs b/crates/pctx_code_execution_runtime/src/callback_registry.rs deleted file mode 100644 index fbeecd4e..00000000 --- a/crates/pctx_code_execution_runtime/src/callback_registry.rs +++ /dev/null @@ -1,128 +0,0 @@ -use serde_json::json; -use std::{ - collections::HashMap, - future::Future, - pin::Pin, - sync::{Arc, RwLock}, -}; -use tracing::instrument; - -use crate::error::McpError; - -pub type CallbackFn = Arc< - dyn Fn( - Option, - ) -> Pin> + Send>> - + Send - + Sync, ->; - -/// Singleton registry for callbacks -#[derive(Clone, Default)] -pub struct CallbackRegistry { - callbacks: Arc>>, -} - -impl CallbackRegistry { - /// Returns the ids of this [`CallbackRegistry`]. - /// - /// # Panics - /// - /// Panics if it fails acquiring the lock - pub fn ids(&self) -> Vec { - self.callbacks - .read() - .unwrap() - .keys() - .map(String::from) - .collect() - } - - /// Adds callback to registry - /// - /// # Panics - /// - /// Panics if cannot obtain lock - /// - /// # Errors - /// - /// This function will return an error if a callback already exists with the same ID - pub fn add( - &self, - id: &str, // namespace.name - callback: CallbackFn, - ) -> Result<(), McpError> { - let mut callbacks = self.callbacks.write().map_err(|e| { - McpError::Config(format!( - "Failed obtaining write lock on callback registry: {e}" - )) - })?; - - if callbacks.contains_key(id) { - return Err(McpError::Config(format!( - "Callback with id \"{id}\" is already registered" - ))); - } - - callbacks.insert(id.into(), callback); - - Ok(()) - } - - /// Remove a callback from the registry by id - /// - /// # Panics - /// - /// Panics if cannot obtain lock - pub fn remove(&self, id: &str) -> Option { - let mut callbacks = self.callbacks.write().unwrap(); - callbacks.remove(id) - } - - /// Get a Callback from the registry by id - /// - /// # Panics - /// - /// Panics if the internal lock is poisoned (i.e., a thread panicked while holding the lock) - pub fn get(&self, id: &str) -> Option { - let callbacks = self.callbacks.read().unwrap(); - callbacks.get(id).cloned() - } - - /// Confirms the callback registry contains a given id - /// - /// # Panics - /// - /// Panics if the internal lock is poisoned (i.e., a thread panicked while holding the lock) - pub fn has(&self, id: &str) -> bool { - let callbacks = self.callbacks.read().unwrap(); - callbacks.contains_key(id) - } - - /// invokes the callback with the provided args - /// - /// # Errors - /// - /// This function will return an error if a callback by the provided id doesn't exist - /// or if the callback itself fails - #[instrument( - name = "invoke_callback_tool", - skip_all, - fields(id=id, args = json!(args).to_string()), - ret(Display), - err - )] - pub async fn invoke( - &self, - id: &str, - args: Option, - ) -> Result { - let callback = self.get(id).ok_or_else(|| { - McpError::ToolCall(format!("Callback with id \"{id}\" does not exist")) - })?; - - callback(args).await.map_err(|e| { - McpError::ExecutionError(format!("Failed calling callback with id \"{id}\": {e}",)) - }) - } -} diff --git a/crates/pctx_code_execution_runtime/src/error.rs b/crates/pctx_code_execution_runtime/src/error.rs deleted file mode 100644 index 27248fe1..00000000 --- a/crates/pctx_code_execution_runtime/src/error.rs +++ /dev/null @@ -1,29 +0,0 @@ -//! Error types for PCTX runtime - -use pctx_config::server::McpConnectionError; - -/// Error type for MCP operations -#[derive(Debug, thiserror::Error)] -pub enum McpError { - /// Server configuration error (e.g., duplicate name) - #[error("MCP configuration error: {0}")] - Config(String), - /// Server connection error - #[error("MCP connection error: {0}")] - Connection(String), - /// Tool call error (HTTP, parsing, etc.) - #[error("MCP tool call error: {0}")] - ToolCall(String), - /// Local tool execution error - #[error("Local tool execution error: {0}")] - ExecutionError(String), -} - -impl From for McpError { - fn from(value: McpConnectionError) -> Self { - Self::Connection(value.to_string()) - } -} - -// Use the shared macro for JsErrorClass implementation -crate::impl_js_error_class!(McpError); diff --git a/crates/pctx_code_execution_runtime/src/callback_ops.rs b/crates/pctx_code_execution_runtime/src/invoke_ops.rs similarity index 65% rename from crates/pctx_code_execution_runtime/src/callback_ops.rs rename to crates/pctx_code_execution_runtime/src/invoke_ops.rs index 1bb976d4..a6550074 100644 --- a/crates/pctx_code_execution_runtime/src/callback_ops.rs +++ b/crates/pctx_code_execution_runtime/src/invoke_ops.rs @@ -4,21 +4,22 @@ //! Callbacks handle their own execution logic (WebSocket RPC, MCP calls, etc.) use deno_core::{OpState, op2}; +use rmcp::model::JsonObject; use std::cell::RefCell; use std::rc::Rc; -use crate::{CallbackRegistry, error::McpError}; +use pctx_registry::{PctxRegistry, RegistryError}; #[op2(async)] #[serde] -pub(crate) async fn op_invoke_callback( +pub(crate) async fn op_invoke( state: Rc>, #[string] id: String, - #[serde] arguments: Option, -) -> Result { + #[serde] arguments: Option, +) -> Result { let registry = { let borrowed = state.borrow(); - borrowed.borrow::().clone() + borrowed.borrow::().clone() }; registry.invoke(&id, arguments).await diff --git a/crates/pctx_code_execution_runtime/src/js_error_impl.rs b/crates/pctx_code_execution_runtime/src/js_error_impl.rs deleted file mode 100644 index a81397dc..00000000 --- a/crates/pctx_code_execution_runtime/src/js_error_impl.rs +++ /dev/null @@ -1,34 +0,0 @@ -//! Shared macro for implementing `JsErrorClass` for error types - -/// Macro to implement `JsErrorClass` with standard behavior -/// -/// This macro provides a standard implementation of `JsErrorClass` that: -/// - Returns "Error" as the class -/// - Uses the error's Display implementation for the message -/// - Has no additional properties -/// - Returns self as the error reference -#[macro_export] -macro_rules! impl_js_error_class { - ($error_type:ty) => { - impl deno_error::JsErrorClass for $error_type { - fn get_class(&self) -> std::borrow::Cow<'static, str> { - std::borrow::Cow::Borrowed("Error") - } - - fn get_message(&self) -> std::borrow::Cow<'static, str> { - std::borrow::Cow::Owned(self.to_string()) - } - - fn get_additional_properties( - &self, - ) -> Box, deno_error::PropertyValue)>> - { - Box::new(std::iter::empty()) - } - - fn get_ref(&self) -> &(dyn std::error::Error + Send + Sync + 'static) { - self - } - } - }; -} diff --git a/crates/pctx_code_execution_runtime/src/lib.rs b/crates/pctx_code_execution_runtime/src/lib.rs index 3f011f28..62296304 100644 --- a/crates/pctx_code_execution_runtime/src/lib.rs +++ b/crates/pctx_code_execution_runtime/src/lib.rs @@ -3,70 +3,42 @@ //! # PCTX Runtime //! -//! A Deno extension providing MCP (Model Context Protocol) client functionality and console output capturing. +//! A Deno extension providing registry-backed tool invocation and console output capturing. //! //! ## Overview //! //! This crate provides a pre-compiled V8 snapshot containing: -//! - **MCP Client API**: Register and call MCP tools from JavaScript -//! - **Network Fetch**: Host-permission-based fetch with security controls +//! - **Registry Invocation**: Call any registered action (local callbacks, MCP tools, etc.) from JavaScript //! - **Console Capturing**: Automatic stdout/stderr capture for testing and logging +//! - **Bash Execution**: `justBash` global for running shell commands via `just-bash` +//! - **Timers**: `setTimeout`/`setInterval` support //! -//! The runtime is designed to be embedded in Deno-based JavaScript execution environments, -//! providing a secure sandbox with controlled access to external services. +//! The extension is initialized with a `PctxRegistry` that routes `invokeInternal` calls +//! to the appropriate handler at runtime. //! -//! ## Features +//! ## JavaScript API //! -//! - **MCP Integration**: Full Model Context Protocol client with server registry -//! - **Permission System**: Host-based network access controls for fetch operations -//! - **Output Capturing**: Automatic console.log/error capture to buffers -//! - **V8 Snapshot**: Pre-compiled runtime for instant startup -//! - **Type Safety**: Full TypeScript type definitions included +//! The runtime exposes the following globals: //! -//! -//! ## MCP API -//! -//! The runtime exposes the following global functions to JavaScript: -//! -//! - `registerMCP(config)` - Register an MCP server -//! - `callMCPTool(call)` - Call a tool on a registered server -//! - `REGISTRY.has(name)` - Check if a server is registered -//! - `REGISTRY.get(name)` - Get server configuration -//! - `REGISTRY.delete(name)` - Remove a server -//! - `REGISTRY.clear()` - Remove all servers -//! - `fetch(url, options)` - Fetch with host permission checks +//! - `invokeInternal({ name, arguments })` - Invoke a registered action by ID +//! - `justBash` - Shell execution via the `just-bash` library //! //! ## Console Capturing //! -//! All `console.log()` and `console.error()` calls are automatically captured: +//! All console methods are overridden and captured to arrays: //! //! ```javascript -//! console.log("Hello", "World"); // Captured to globalThis.__stdout -//! console.error("Error!"); // Captured to globalThis.__stderr +//! console.log("hello"); // -> globalThis.__stdout +//! console.error("oops"); // -> globalThis.__stderr +//! console.warn("warn"); // -> globalThis.__stderr +//! console.info("info"); // -> globalThis.__stdout +//! console.debug("dbg"); // -> globalThis.__stdout //! ``` -//! -//! ## Security -//! -//! - Network access is controlled via `AllowedHosts` whitelist -//! - Each runtime instance has its own isolated MCP registry -//! - No file system access is provided by default -//! -//! ## Performance -//! -//! - **Startup**: Instant (V8 snapshot pre-compiled) -//! - **Memory**: ~2MB base runtime overhead -//! - **Operations**: Rust ops provide native performance -mod callback_ops; -mod callback_registry; -mod error; -mod js_error_impl; -pub mod mcp_ops; -mod mcp_registry; +mod invoke_ops; mod timer_ops; -pub use callback_registry::{CallbackFn, CallbackRegistry}; -pub use mcp_registry::MCPRegistry; +pub use pctx_registry::*; /// Pre-compiled V8 snapshot containing the PCTX runtime /// @@ -87,8 +59,7 @@ pub static RUNTIME_SNAPSHOT: &[u8] = deno_core::extension!( pctx_runtime_snapshot, ops = [ - mcp_ops::op_call_mcp_tool, - callback_ops::op_invoke_callback, + invoke_ops::op_invoke, timer_ops::op_sleep, ], esm_entry_point = "ext:pctx_runtime_snapshot/runtime.js", @@ -100,11 +71,9 @@ deno_core::extension!( "just-bash/node_zlib_stub.js", ], options = { - registry: MCPRegistry, - callback_registry: CallbackRegistry, + registry: PctxRegistry, }, state = |state, options| { state.put(options.registry); - state.put(options.callback_registry); }, ); diff --git a/crates/pctx_code_execution_runtime/src/mcp_ops.rs b/crates/pctx_code_execution_runtime/src/mcp_ops.rs index 449b8453..342465f9 100644 --- a/crates/pctx_code_execution_runtime/src/mcp_ops.rs +++ b/crates/pctx_code_execution_runtime/src/mcp_ops.rs @@ -8,8 +8,7 @@ use rmcp::model::JsonObject; use std::cell::RefCell; use std::rc::Rc; -use crate::error::McpError; -use crate::mcp_registry::MCPRegistry; +use pctx_registry::{MCPRegistry, RegistryError}; /// Call an MCP tool (async op) #[op2(async)] @@ -19,10 +18,10 @@ pub(crate) async fn op_call_mcp_tool( #[string] server_name: String, #[string] tool_name: String, #[serde] args: Option, -) -> Result { +) -> Result { let registry = { let borrowed = state.borrow(); borrowed.borrow::().clone() }; - crate::mcp_registry::call_mcp_tool(®istry, &server_name, &tool_name, args).await + pctx_registry::call_mcp_tool(®istry, &server_name, &tool_name, args).await } diff --git a/crates/pctx_code_execution_runtime/src/runtime.js b/crates/pctx_code_execution_runtime/src/runtime.js index 562c9aad..33b51606 100644 --- a/crates/pctx_code_execution_runtime/src/runtime.js +++ b/crates/pctx_code_execution_runtime/src/runtime.js @@ -72,39 +72,21 @@ console.debug = (...args) => { }; // ============================================================================ -// MCP & Callback Operations +// Registry Operation // ============================================================================ /** - * Call an MCP tool + * Invoke a registered action * @template T - * @param {Object} call - Tool call configuration - * @param {string} call.serverName - Name of the registered MCP server - * @param {string} call.toolName - Name of the registered tool to call - * @param {Object?} [call.arguments] - Arguments to pass to the tool - * @returns {Promise} The tool's response + * @param {Object} call - Action invocation configuration + * @param {string} call.name - ID of the action + * @param {Object?} [call.arguments] - Arguments to pass to the action + * @returns {Promise} The actions's response */ -export async function callMCPTool(call) { - return await ops.op_call_mcp_tool( - call.serverName, - call.toolName, - call.arguments, - ); -} - -/** - * Call an MCP tool - * @template T - * @param {Object} call - Tool call configuration - * @param {string} call.id - ID of the callback - * @param {Object?} [call.arguments] - Arguments to pass to the callback - * @returns {Promise} The tool's response - */ -export async function invokeCallback(call) { - return await ops.op_invoke_callback(call.id, call.arguments); +export async function invokeInternal(call) { + return await ops.op_invoke(call.name, call.arguments); } // Make APIs available globally for convenience (matching original behavior) -globalThis.callMCPTool = callMCPTool; -globalThis.invokeCallback = invokeCallback; +globalThis.invokeInternal = invokeInternal; globalThis.justBash = Bash; // lowercase to avoid any clashes with namespaces diff --git a/crates/pctx_code_mode/Cargo.toml b/crates/pctx_code_mode/Cargo.toml index 8655b5a8..029a890d 100644 --- a/crates/pctx_code_mode/Cargo.toml +++ b/crates/pctx_code_mode/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pctx_code_mode" -version = "0.2.3" +version = "0.3.0" edition = "2024" license = "MIT" description = "TypeScript code execution engine for AI agents with tool schemas, sandboxed Deno runtime, and Rust callbacks" @@ -11,10 +11,10 @@ categories = ["development-tools", "api-bindings"] [dependencies] # local -pctx_config = { version = "^0.1.3", path = "../pctx_config" } -pctx_codegen = { version = "^0.2.0", path = "../pctx_codegen" } -pctx_executor = { version = "^0.1.3", path = "../pctx_executor" } -pctx_code_execution_runtime = { version = "^0.1.3", path = "../pctx_code_execution_runtime" } +pctx_config = { version = "^0.1.4", path = "../pctx_config" } +pctx_codegen = { version = "^0.3.0", path = "../pctx_codegen" } +pctx_executor = { version = "^0.2.0", path = "../pctx_executor" } +pctx_registry = { path = "../pctx_registry" } # general thiserror = { workspace = true } diff --git a/crates/pctx_code_mode/README.md b/crates/pctx_code_mode/README.md index 899c216d..0821e08c 100644 --- a/crates/pctx_code_mode/README.md +++ b/crates/pctx_code_mode/README.md @@ -5,7 +5,9 @@ A TypeScript code execution engine that enables AI agents to dynamically call to ## Quick Start ```rust -use pctx_code_mode::{CodeMode, CallbackRegistry}; +use pctx_code_mode::{CodeMode}; +use pctx_code_mode::registry::PctxRegistry; +use pctx_code_mode::config::ToolDisclosure; use pctx_code_mode::model::CallbackConfig; use serde_json::json; use std::sync::Arc; @@ -34,8 +36,8 @@ async fn main() -> anyhow::Result<()> { code_mode.add_callback(&callback)?; // 3. Register callback functions that execute when tools are called - let registry = CallbackRegistry::default(); - registry.add(&callback.id(), Arc::new(|args| { + let registry = PctxRegistry::default(); + registry.add_callback(&callback.id(), Arc::new(|args| { Box::pin(async move { let name = args .and_then(|v| v.get("name")) @@ -43,7 +45,7 @@ async fn main() -> anyhow::Result<()> { .unwrap_or("World"); Ok(serde_json::json!({ "message": format!("Hello, {name}!") })) }) - }))?; + })); // 4. Execute LLM-generated TypeScript code let code = r#" @@ -53,7 +55,7 @@ async fn main() -> anyhow::Result<()> { } "#; - let output = code_mode.execute(code, Some(registry)).await?; + let output = code_mode.execute_typescript(code, ToolDisclosure::default(), registry).await?; if output.success { println!("Result: {}", serde_json::to_string_pretty(&output.output)?); @@ -69,7 +71,7 @@ async fn main() -> anyhow::Result<()> { ### 1. CodeMode -The [`CodeMode`] struct is the main execution engine. It provides: +The `CodeMode` struct is the main execution engine. It provides: **Builder methods** (chainable): @@ -84,15 +86,18 @@ The [`CodeMode`] struct is the main execution engine. It provides: **Accessor methods**: -- `tool_sets()` - Get registered ToolSets +- `tool_sets()` - Get all registered ToolSets +- `server_tool_sets()` - Get only MCP server ToolSets - `servers()` - Get registered server configurations - `callbacks()` - Get registered callback configurations +- `virtual_fs()` - Get the virtual filesystem used by bash execution **Execution methods**: - `list_functions()` - List all available functions with minimal interfaces - `get_function_details()` - Get full typed interfaces for specific functions -- `execute()` - Execute TypeScript code in the sandbox +- `execute_typescript()` - Execute TypeScript code in the sandbox +- `execute_bash()` - Execute a bash command in the virtual filesystem ```rust use pctx_code_mode::CodeMode; @@ -129,24 +134,41 @@ let details = code_mode.get_function_details(GetFunctionDetailsInput { println!("TypeScript definitions:\n{}", details.code); ``` -### 2. Tools and ToolSets +### 2. ToolDisclosure -[`Tool`]s represent individual functions callable from TypeScript. -They are organized into [`ToolSet`]s (namespaces). Tools can be: +`ToolDisclosure` controls how tools are presented to the LLM and how generated TypeScript code invokes them. Choose the mode that matches your agent's workflow: + +- **`Catalog`** (default) - Tools are discovered via `list_tools` → `get_tool_details`, then called through typed TypeScript namespaces (e.g. `await Greeter.greet({ name: "Alice" })`). +- **`Filesystem`** - Like `Catalog` but the agent works within a virtual filesystem via `execute_bash` before invoking TypeScript. +- **`Sidecar`** - Tools are passed as original MCP descriptions. The generated code uses an `InvokeMap` type and a type-safe `invoke()` function rather than namespace methods. + +```rust +use pctx_code_mode::config::ToolDisclosure; + +// Default catalog mode — typed namespaces +let output = code_mode.execute_typescript(code, ToolDisclosure::Catalog, registry).await?; + +// Sidecar mode — InvokeMap / invoke() interface +let output = code_mode.execute_typescript(code, ToolDisclosure::Sidecar, registry).await?; +``` + +### 3. Tools and ToolSets + +`Tool`s represent individual functions callable from TypeScript. +They are organized into `ToolSet`s (namespaces). Tools can be: - **MCP tools**: Loaded from MCP servers via `add_server()` - **Callback tools**: Defined via `CallbackConfig` and `add_callback()` -### 3. Callbacks +### 4. PctxRegistry -[`CallbackFn`] are Rust async functions that execute when TypeScript code calls callback tools. -Register them in a [`CallbackRegistry`] and pass it to `execute()`. +`PctxRegistry` is a thread-safe registry that routes TypeScript function calls to either local Rust callbacks or upstream MCP servers. Pass it to `execute_typescript()`. ```rust -use pctx_code_mode::{CallbackRegistry, CallbackFn}; +use pctx_code_mode::{PctxRegistry, CallbackFn}; use std::sync::Arc; -let registry = CallbackRegistry::default(); +let registry = PctxRegistry::default(); let callback: CallbackFn = Arc::new(|args| { Box::pin(async move { @@ -165,14 +187,19 @@ let callback: CallbackFn = Arc::new(|args| { }); // Register with namespace.function format -registry.add("DataApi.fetchData", callback)?; +registry.add_callback("DataApi.fetchData", callback); + +// Register MCP tools from a server +registry.add_mcp(tool_names, server_config); ``` -### 4. Code Execution +### 5. Code Execution Execute LLM-generated TypeScript code that calls your registered tools. ```rust +use pctx_code_mode::config::ToolDisclosure; + let code = r#" async function run() { // Call your registered tools @@ -190,7 +217,7 @@ let code = r#" } "#; -let output = code_mode.execute(code, Some(registry)).await?; +let output = code_mode.execute_typescript(code, ToolDisclosure::default(), registry).await?; match output.success { true => println!("Success: {:?}", output.output), @@ -297,12 +324,14 @@ let details = code_mode.get_function_details(GetFunctionDetailsInput { println!("TypeScript code:\n{}", details.code); ``` -#### `execute(code: &str, callbacks: Option) -> Result` +#### `execute_typescript(code: &str, disclosure: ToolDisclosure, registry: PctxRegistry) -> Result` Executes TypeScript code in a sandboxed Deno runtime. ```rust -let output = code_mode.execute(typescript_code, Some(callback_registry)).await?; +use pctx_code_mode::config::ToolDisclosure; + +let output = code_mode.execute_typescript(typescript_code, ToolDisclosure::default(), registry).await?; if output.success { println!("Return value: {:?}", output.output); @@ -312,49 +341,70 @@ if output.success { } ``` +#### `execute_bash(command: &str) -> Result` + +Executes a bash command in the virtual filesystem (used with `ToolDisclosure::Filesystem`). + +```rust +let output = code_mode.execute_bash("ls -la /workspace").await?; +``` + #### Accessor Methods ```rust -// Get registered tool sets +// Get all registered tool sets let tool_sets: &[ToolSet] = code_mode.tool_sets(); +// Get only MCP server tool sets +let server_tool_sets: &[ToolSet] = code_mode.server_tool_sets(); + // Get registered server configurations let servers: &[ServerConfig] = code_mode.servers(); // Get registered callback configurations let callbacks: &[CallbackConfig] = code_mode.callbacks(); + +// Get the virtual filesystem +let vfs = code_mode.virtual_fs(); ``` -### CallbackRegistry +### PctxRegistry -Thread-safe registry for managing callback functions. +Thread-safe registry that routes tool calls to Rust callbacks or MCP servers. -#### `default() -> CallbackRegistry` +#### `default() -> PctxRegistry` ```rust -let registry = CallbackRegistry::default(); +let registry = PctxRegistry::default(); ``` -#### `add(id: &str, callback: CallbackFn) -> Result<()>` +#### `add_callback(id: &str, callback: CallbackFn)` Registers a callback with a specific ID (format: `Namespace.functionName`). ```rust -registry.add("DataApi.fetchData", Arc::new(|args| { +registry.add_callback("DataApi.fetchData", Arc::new(|args| { Box::pin(async move { - // Your implementation Ok(serde_json::json!({"result": "data"})) }) -}))?; +})); ``` +#### `add_mcp(tool_names, cfg)` + +Registers MCP tools from a server configuration. + +#### `invoke(id: &str, args: Option) -> Result` + +Dispatches a call by tool ID. Used internally during execution. + #### `has(id: &str) -> bool` -Checks if a callback is registered. +Checks if a tool is registered. ```rust if registry.has("DataApi.fetchData") { - println!("Callback is registered"); + println!("Tool is registered"); } ``` @@ -374,23 +424,6 @@ pub struct CallbackConfig { } ``` -```rust -use pctx_code_mode::model::CallbackConfig; -use serde_json::json; - -let config = CallbackConfig { - namespace: "MyNamespace".to_string(), - name: "myFunction".to_string(), - description: Some("Does something useful".to_string()), - input_schema: Some(json!({ - "type": "object", - "properties": { "id": { "type": "integer" } }, - "required": ["id"] - })), - output_schema: None, -}; -``` - #### `Tool` and `ToolSet` Tools represent individual functions callable from TypeScript. They are organized into ToolSets (namespaces). These are typically created internally when you call `add_callback()` or `add_server()`. @@ -468,7 +501,7 @@ for config in tool_configs { // Register the corresponding callback function let callback_id = format!("{}.{}", config.namespace, config.name); - registry.add(&callback_id, create_callback_for_config(&config))?; + registry.add_callback(&callback_id, create_callback_for_config(&config)); } ``` @@ -477,7 +510,7 @@ for config in tool_configs { Callbacks support full async operations: ```rust -registry.add("Database.query", Arc::new(|args| { +registry.add_callback("Database.query", Arc::new(|args| { Box::pin(async move { let query = args .and_then(|v| v.get("sql")) @@ -493,13 +526,15 @@ registry.add("Database.query", Arc::new(|args| { Ok(serde_json::to_value(rows)?) }) -}))?; +})); ``` ### Error Handling ```rust -let output = code_mode.execute(code, Some(registry)).await?; +use pctx_code_mode::config::ToolDisclosure; + +let output = code_mode.execute_typescript(code, ToolDisclosure::default(), registry).await?; if !output.success { // Check stderr for execution errors @@ -527,39 +562,46 @@ async function run() { } ``` +In `Sidecar` mode, use the `invoke()` function instead of namespace methods: + +```typescript +async function run() { + const result = await invoke("Namespace.toolName", { param: value }); + return result; +} +``` + The code execution engine: -- Wraps your code with namespace implementations +- Wraps your code with generated namespace implementations or an `InvokeMap` (depending on `ToolDisclosure`) - Automatically calls `run()` and captures its return value - Provides the return value in `ExecuteOutput.output` ## Architecture 1. **Tool Definition**: Tools are defined with JSON Schemas for inputs/outputs -2. **Code Generation**: TypeScript interface definitions are generated from schemas -3. **Code Execution**: User code is wrapped with namespace implementations and executed in Deno -4. **Callback Routing**: Function calls in TypeScript are routed to Rust callbacks or MCP servers -5. **Result Marshaling**: JSON values are passed between TypeScript and Rust +2. **Disclosure Mode**: `ToolDisclosure` determines how tools are surfaced and the TypeScript code generation strategy used +3. **Code Generation**: TypeScript interface definitions are generated from schemas; `Catalog`/`Filesystem` modes emit full namespace implementations, `Sidecar` emits an `InvokeMap` +4. **Code Execution**: User code is wrapped with generated bindings and executed in Deno +5. **Call Routing**: TypeScript function calls are dispatched through `PctxRegistry` to Rust callbacks or MCP servers +6. **Result Marshaling**: JSON values are passed between TypeScript and Rust ## Sandbox Security Code is executed in a Deno runtime with: - Network access restricted to allowed hosts (from registered MCP servers) -- No file system access +- No file system access (use `execute_bash` with the virtual filesystem instead) - No subprocess spawning - Isolated V8 context per execution -```rust -// Add servers -code_mode.add_server(&server_config).await?; -``` - ## Examples ### Multi-Tool Workflow ```rust +use pctx_code_mode::config::ToolDisclosure; + let code = r#" async function run() { // Fetch user data @@ -585,12 +627,14 @@ let code = r#" } "#; -let output = code_mode.execute(code, Some(registry)).await?; +let output = code_mode.execute_typescript(code, ToolDisclosure::Catalog, registry).await?; ``` ### Error Recovery ```rust +use pctx_code_mode::config::ToolDisclosure; + let code = r#" async function run() { try { @@ -603,7 +647,7 @@ let code = r#" } "#; -let output = code_mode.execute(code, Some(registry)).await?; +let output = code_mode.execute_typescript(code, ToolDisclosure::default(), registry).await?; // Check console output if !output.stdout.is_empty() { @@ -630,10 +674,10 @@ let code = r#" ## Related Crates -- `pctx_config`: Server configuration types (`ServerConfig`) +- `pctx_config`: Server configuration types (`ServerConfig`, `ToolDisclosure`) - `pctx_codegen`: TypeScript code generation from JSON schemas - `pctx_executor`: Deno runtime execution engine -- `pctx_code_execution_runtime`: Runtime environment and callback system +- `pctx_code_execution_runtime`: Runtime environment (`PctxRegistry`, `CallbackFn`) ## License diff --git a/crates/pctx_code_mode/src/code_mode.rs b/crates/pctx_code_mode/src/code_mode.rs index ebd54ada..92646b08 100644 --- a/crates/pctx_code_mode/src/code_mode.rs +++ b/crates/pctx_code_mode/src/code_mode.rs @@ -1,13 +1,12 @@ +use pctx_codegen::{Tool, ToolSet}; +use pctx_config::{ToolDisclosure, server::ServerConfig}; +use pctx_registry::PctxRegistry; +use serde::{Deserialize, Serialize}; +use serde_json::json; use std::{ collections::{HashMap, HashSet}, time::Duration, }; - -use pctx_code_execution_runtime::CallbackRegistry; -use pctx_codegen::{Tool, ToolSet}; -use pctx_config::server::ServerConfig; -use serde::{Deserialize, Serialize}; -use serde_json::json; use tracing::{debug, info, instrument, warn}; use crate::{ @@ -75,9 +74,7 @@ impl CodeMode { ) -> Result<()> { let timeout = Duration::from_secs(timeout_secs); let mut tasks = vec![]; - let mut servers_to_add = vec![]; for server in servers { - servers_to_add.push(server.clone()); let server = server.clone(); let task = tokio::spawn(async move { let result = tokio::time::timeout(timeout, Self::server_to_toolset(&server)).await; @@ -98,26 +95,24 @@ impl CodeMode { } // join and unpack results - let results = futures::future::join_all(tasks).await; - let mut tool_sets = vec![]; - for result in results { - tool_sets.push(result.map_err(|e| { + let joined_results = futures::future::join_all(tasks).await; + let mut results = vec![]; + for result in joined_results { + results.push(result.map_err(|e| { Error::Message(format!("Failed joining parallel MCP registration: {e:?}")) })??); } // check for ToolSet conflicts & add to self - for tool_set in tool_sets { + for (tool_set, server_cfg) in results { self.add_tool_set(tool_set)?; + self.servers.push(server_cfg) } - // add server configs - self.servers.extend(servers_to_add); - Ok(()) } - async fn server_to_toolset(server: &ServerConfig) -> Result { + async fn server_to_toolset(server: &ServerConfig) -> Result<(ToolSet, ServerConfig)> { // Connect to the MCP server (this is the slow operation) debug!( "Connecting to MCP server '{}'({})...", @@ -137,7 +132,7 @@ impl CodeMode { // Convert MCP tools to pctx tools let mut tools = vec![]; - for mcp_tool in listed_tools { + for mcp_tool in &listed_tools { let input_schema = serde_json::from_value::(json!(mcp_tool.input_schema)) .map_err(|e| { @@ -160,9 +155,9 @@ impl CodeMode { }; tools.push( - Tool::new_mcp( + Tool::new( &mcp_tool.name, - mcp_tool.description.map(String::from), + mcp_tool.description.clone().map(String::from), Some(input_schema), output_schema, ) @@ -177,7 +172,7 @@ impl CodeMode { .and_then(|p| p.server_info.title.clone()) .unwrap_or(format!("MCP server at {}", server.display_target())); - let tool_set = ToolSet::new(&server.name, &description, tools); + let tool_set = ToolSet::new(Some(server.name.clone()), &description, tools); info!( "Successfully initialized MCP server '{}' with {} tools", @@ -185,7 +180,7 @@ impl CodeMode { tool_set.tools.len() ); - Ok(tool_set) + Ok((tool_set, server.clone())) } pub fn add_callbacks<'a>( @@ -210,7 +205,7 @@ impl CodeMode { .unwrap_or_else(|| { let idx = self.tool_sets.len(); self.tool_sets - .push(ToolSet::new(&callback.namespace, "", vec![])); + .push(ToolSet::new(callback.namespace.clone(), "", vec![])); idx }); let tool_set = &mut self.tool_sets[idx]; @@ -218,7 +213,8 @@ impl CodeMode { if tool_set.tools.iter().any(|t| t.name == callback.name) { return Err(Error::Message(format!( "ToolSet `{}` already has a tool with name `{}`. Tool names must be unique within tool sets", - &tool_set.name, &callback.name + tool_set.name.as_deref().unwrap_or_default(), + &callback.name ))); } @@ -244,7 +240,7 @@ impl CodeMode { } else { None }; - let tool = Tool::new_callback( + let tool = Tool::new( &callback.name, callback.description.clone(), input_schema, @@ -263,7 +259,7 @@ impl CodeMode { if self.tool_sets.iter().any(|t| t.name == tool_set.name) { return Err(Error::Message(format!( "CodeMode already has ToolSet with name: {}", - tool_set.name + tool_set.name.unwrap_or_default() ))); } @@ -299,7 +295,7 @@ impl CodeMode { } // e.g. "## Tools" - readme.push_str(&format!("## {}\n", tool_set.namespace)); + readme.push_str(&format!("## {}\n", tool_set.pascal_namespace())); // One line per function: "Namespace/fn.d.ts # description" // `cat` omitted since it's stated once in the header above. @@ -318,15 +314,20 @@ impl CodeMode { // Create file for this function under /sdk/ let tool_file_path = - format!("/sdk/{}/{}.d.ts", tool_set.namespace, tool.fn_name); - let tool_code = tool.fn_signature(true); + format!("/sdk/{}/{}.d.ts", tool_set.pascal_namespace(), tool.fn_name); + let tool_code = tool.ts_fn_signature(true); let formatted = pctx_codegen::format::format_d_ts(&tool_code); files.insert(tool_file_path, formatted); if desc.is_empty() { - format!("{}/{}.d.ts", tool_set.namespace, tool.fn_name) + format!("{}/{}.d.ts", tool_set.pascal_namespace(), tool.fn_name) } else { - format!("{}/{}.d.ts # {}", tool_set.namespace, tool.fn_name, desc) + format!( + "{}/{}.d.ts # {}", + tool_set.pascal_namespace(), + tool.fn_name, + desc + ) } }) .collect(); @@ -346,6 +347,29 @@ impl CodeMode { &self.tool_sets } + /// Returns an immutable reference to the registered ToolSets + /// representing upstream servers + pub fn server_tool_sets(&self) -> Vec<(&ServerConfig, &pctx_codegen::ToolSet)> { + // let server_names: Vec<&str> = self.servers.iter().map(|s| s.0.name.as_str()).collect(); + self.tool_sets + .iter() + .filter_map(|ts| { + if let Some(server_cfg) = self + .servers + .iter() + .find(|s| Some(s.name.as_str()) == ts.name.as_deref()) + { + Some((server_cfg, ts)) + } else { + None + } + // ts.name + // .as_deref() + // .is_some_and(|n| server_names.contains(&n)) + }) + .collect() + } + /// Returns an immutable reference to the registered server configurations pub fn servers(&self) -> &[ServerConfig] { &self.servers @@ -362,25 +386,14 @@ impl CodeMode { &self.virtual_fs } - pub fn allowed_hosts(&self) -> HashSet { - self.servers - .iter() - .filter_map(|s| { - let http_cfg = s.http()?; - let host = http_cfg.url.host()?; - let allowed = if let Some(port) = http_cfg.url.port() { - format!("{host}:{port}") - } else { - let default_port = if http_cfg.url.scheme() == "https" { - 443 - } else { - 80 - }; - format!("{host}:{default_port}") - }; - Some(allowed) - }) - .collect() + // --------------- Utilities --------------- + pub fn add_mcp_servers_to_registry(&self, registry: &mut PctxRegistry) -> Result<()> { + for (cfg, tool_set) in self.server_tool_sets() { + let tool_names: Vec = tool_set.tools.iter().map(|t| t.name.clone()).collect(); + registry.add_mcp(&tool_names, cfg.clone())?; + } + + Ok(()) } // --------------- Code-Mode Tools --------------- @@ -396,10 +409,10 @@ impl CodeMode { continue; } - namespaces.push(tool_set.namespace_interface(false)); + namespaces.push(tool_set.ts_namespace_declaration(false)); functions.extend(tool_set.tools.iter().map(|t| ListedFunction { - namespace: tool_set.namespace.clone(), + namespace: tool_set.pascal_namespace(), name: t.fn_name.clone(), description: t.description.clone(), })); @@ -426,7 +439,7 @@ impl CodeMode { let mut functions = vec![]; for tool_set in &self.tool_sets { - if let Some(fn_names) = by_mod.get(&tool_set.namespace) { + if let Some(fn_names) = by_mod.get(&tool_set.pascal_namespace()) { // filter tools based on requested fn names let tools: Vec<&pctx_codegen::Tool> = tool_set .tools @@ -437,13 +450,13 @@ impl CodeMode { if !tools.is_empty() { // code definition let fn_details: Vec = - tools.iter().map(|t| t.fn_signature(true)).collect(); - namespaces.push(tool_set.wrap_with_namespace(&fn_details.join("\n\n"))); + tools.iter().map(|t| t.ts_fn_signature(true)).collect(); + namespaces.push(tool_set.ts_wrap_with_namespace(&fn_details.join("\n\n"))); // struct output functions.extend(tools.iter().map(|t| FunctionDetails { listed: ListedFunction { - namespace: tool_set.namespace.clone(), + namespace: tool_set.pascal_namespace(), name: t.fn_name.clone(), description: t.description.clone(), }, @@ -469,10 +482,6 @@ impl CodeMode { pub async fn execute_bash(&self, command: &str) -> Result { debug!(command = %command, "Executing bash command"); - // Serialize virtual_fs for injection into JavaScript - let virtual_fs_json = - serde_json::to_string(&self.virtual_fs).unwrap_or_else(|_| "{}".to_string()); - // Wrap bash command in async IIFE and export the result // The result from bashFs.exec() contains: { stdout: string, stderr: string, exitCode: number } let to_execute = format!( @@ -488,17 +497,14 @@ const result = await (async () => {{ }})(); export default result;"#, - virtual_fs_json = virtual_fs_json, - command = serde_json::to_string(command).unwrap_or_else(|_| "\"\"".to_string()), + virtual_fs_json = json!(self.virtual_fs), + command = json!(command), ); debug!(to_execute = %to_execute, "Executing bash in sandbox"); - let options = pctx_executor::ExecuteOptions::new() - .with_allowed_hosts(self.allowed_hosts().into_iter().collect()) - .with_servers(self.servers.clone()); - - let execution_res = pctx_executor::execute(&to_execute, options).await?; + let execution_res = + pctx_executor::execute(&to_execute, pctx_executor::ExecuteOptions::new()).await?; // Extract stdout and stderr from the bash result object // The output field contains the result object: { stdout, stderr, exitCode } @@ -551,13 +557,16 @@ export default result;"#, } /// Execute TypeScript code with access to registered tools and virtual filesystem - #[instrument(skip(self, callback_registry), ret(Display), err)] + #[instrument(skip(self, registry), ret(Display), err)] pub async fn execute_typescript( &self, code: &str, - callback_registry: Option, + disclosure: ToolDisclosure, + registry: Option, ) -> Result { - let registry = callback_registry.unwrap_or_default(); + let mut registry = registry.unwrap_or_default(); + self.add_mcp_servers_to_registry(&mut registry)?; + // Format for logging only let formatted_code = pctx_codegen::format::format_ts(code); @@ -566,77 +575,104 @@ export default result;"#, formatted_code = %formatted_code, code_length = code.len(), callbacks =? registry.ids(), + disclosure =? disclosure, "Received TypeScript code to execute" ); - // confirm all configured callbacks in the CodeMode interface have - // registered callback functions - let missing_ids: Vec = self - .callbacks + // confirm all configured tool IDs the CodeMode interface have + // registered actions + let all_ids: Vec = self.tool_sets.iter().flat_map(|ts| ts.tool_ids()).collect(); + let missing_ids: Vec = all_ids .iter() .filter_map(|c| { - if registry.has(&c.id()) { + if registry.has(c) { None } else { - Some(c.id()) + Some(c.clone()) } }) .collect(); if !missing_ids.is_empty() { return Err(Error::Message(format!( - "Missing configured callbacks in registry with ids: {missing_ids:?}" + "Registry missing ids: {missing_ids:?}" ))); } - // generate the full script to be executed - let namespaces: Vec = self - .tool_sets - .iter() - .filter_map(|s| { - if s.tools.is_empty() { - None - } else { - Some(s.namespace()) - } - }) - .collect(); - - // Serialize virtual_fs for injection into JavaScript - let virtual_fs_json = - serde_json::to_string(&self.virtual_fs).unwrap_or_else(|_| "{}".to_string()); - - // Initialize bashFs with tool definitions, then user code, then namespaces - let to_execute = format!( - r#"// TypeScript declaration for bashFs -declare global {{ - var bashFs: InstanceType; -}} - -// Initialize bash filesystem with tool definitions -const bashFs = new justBash({{ - files: {virtual_fs_json}, - cwd: "/sdk", -}}); -globalThis.bashFs = bashFs; - -{code} + let to_execute = match disclosure { + ToolDisclosure::Catalog | ToolDisclosure::Filesystem => { + // generate the full script to be executed + let namespaces: Vec = self + .tool_sets + .iter() + .filter_map(|s| { + if s.tools.is_empty() { + None + } else { + Some(s.ts_namespace_impl()) + } + }) + .collect(); -{namespaces} + format!( + "{code}\n\n{namespaces}\n\nexport default await run();", + namespaces = pctx_codegen::format::format_ts(&namespaces.join("\n\n")), + ) + } + ToolDisclosure::Sidecar => { + let invoke_map_entries: Vec = self + .tool_sets + .iter() + .flat_map(|ts| { + ts.tools + .iter() + .map(|t| t.ts_invoke_map_entry(ts.name.as_deref())) + }) + .collect(); + let types: Vec = self + .tool_sets + .iter() + .flat_map(|ts| { + ts.tools.iter().filter_map(|t| { + let types = t.types(); + if types.is_empty() { None } else { Some(types) } + }) + }) + .collect(); -export default await run(); -"#, - virtual_fs_json = virtual_fs_json, - namespaces = namespaces.join("\n\n"), - ); + let invoke_interface = format!( + r#" + type InvokeMap = {{ + {invoke_map_entries} + }}; + + type InvokeCall = + undefined extends InvokeMap[K]["args"] + ? {{ name: K; arguments?: InvokeMap[K]["args"] }} + : {{ name: K; arguments: InvokeMap[K]["args"] }}; + + async function invoke(call: InvokeCall): Promise {{ + return await invokeInternal(call); + }} + + {types} + "#, + invoke_map_entries = invoke_map_entries.join("\n "), + types = types.join("\n\n") + ); + format!( + "{code}\n\n{invoke_interface}\n\nexport default await run();", + invoke_interface = pctx_codegen::format::format_ts(&invoke_interface) + ) + } + }; debug!(to_execute = %to_execute, "Executing TypeScript in sandbox"); - let options = pctx_executor::ExecuteOptions::new() - .with_allowed_hosts(self.allowed_hosts().into_iter().collect()) - .with_servers(self.servers.clone()) - .with_callbacks(registry); - - let execution_res = pctx_executor::execute(&to_execute, options).await?; + let execution_res = pctx_executor::execute( + &to_execute, + pctx_executor::ExecuteOptions::new().with_registry(registry), + ) + .await?; if execution_res.success { debug!("TypeScript execution completed successfully"); @@ -651,16 +687,4 @@ export default await run(); output: execution_res.output, }) } - - /// Main execute function that routes to bash or typescript execution - /// Defaults to TypeScript for backward compatibility - #[instrument(skip(self, callback_registry), ret(Display), err)] - pub async fn execute( - &self, - code: &str, - callback_registry: Option, - ) -> Result { - // Default to TypeScript execution for backward compatibility - self.execute_typescript(code, callback_registry).await - } } diff --git a/crates/pctx_code_mode/src/descriptions.rs b/crates/pctx_code_mode/src/descriptions.rs new file mode 100644 index 00000000..0bc4297c --- /dev/null +++ b/crates/pctx_code_mode/src/descriptions.rs @@ -0,0 +1,42 @@ +pub mod tools { + use pctx_config::ToolDisclosure; + + pub const EXECUTE_BASH: &str = include_str!("../../../descriptions/tools/execute_bash/v1.txt"); + pub const EXECUTE_TYPESCRIPT_CATALOG: &str = + include_str!("../../../descriptions/tools/execute_typescript_catalog/v1.txt"); + pub const EXECUTE_TYPESCRIPT_FILESYSTEM: &str = + include_str!("../../../descriptions/tools/execute_typescript_filesystem/v1.txt"); + pub const EXECUTE_TYPESCRIPT_SIDECAR: &str = + include_str!("../../../descriptions/tools/execute_typescript_sidecar/v1.txt"); + pub const GET_FUNCTION_DETAILS: &str = + include_str!("../../../descriptions/tools/get_function_details/v1.txt"); + pub const LIST_FUNCTIONS: &str = + include_str!("../../../descriptions/tools/list_functions/v1.txt"); + + pub fn disclosure_execute_description(disclosure: ToolDisclosure) -> String { + match disclosure { + ToolDisclosure::Catalog => EXECUTE_TYPESCRIPT_CATALOG.into(), + ToolDisclosure::Filesystem => EXECUTE_TYPESCRIPT_FILESYSTEM.into(), + ToolDisclosure::Sidecar => EXECUTE_TYPESCRIPT_SIDECAR.into(), + } + } +} + +pub mod workflow { + use pctx_config::ToolDisclosure; + + const COMMON: &str = include_str!("../../../descriptions/workflows/common.txt"); + const CATALOG: &str = include_str!("../../../descriptions/workflows/catalog/v1.txt"); + const FILESYSTEM: &str = include_str!("../../../descriptions/workflows/filesystem/v1.txt"); + const SIDECAR: &str = include_str!("../../../descriptions/workflows/sidecar/v1.txt"); + + pub fn get_workflow_description(disclosure: ToolDisclosure) -> String { + let disclosure_specific = match disclosure { + ToolDisclosure::Catalog => CATALOG, + ToolDisclosure::Filesystem => FILESYSTEM, + ToolDisclosure::Sidecar => SIDECAR, + }; + + format!("{COMMON}\n{disclosure_specific}") + } +} diff --git a/crates/pctx_code_mode/src/lib.rs b/crates/pctx_code_mode/src/lib.rs index b39a3654..13ea60d7 100644 --- a/crates/pctx_code_mode/src/lib.rs +++ b/crates/pctx_code_mode/src/lib.rs @@ -6,8 +6,10 @@ //! //! ## Quick Start //! -//! ```ignore -//! use pctx_code_mode::{CodeMode, CallbackRegistry}; +//! ``` +//! use pctx_code_mode::CodeMode; +//! use pctx_code_mode::registry::PctxRegistry; +//! use pctx_code_mode::config::ToolDisclosure; //! use pctx_code_mode::model::CallbackConfig; //! use serde_json::json; //! use std::sync::Arc; @@ -16,7 +18,7 @@ //! async fn main() -> anyhow::Result<()> { //! // 1. Define callback tools with JSON schemas //! let callback = CallbackConfig { -//! namespace: "Greeter".to_string(), +//! namespace: Some("Greeter".to_string()), //! name: "greet".to_string(), //! description: Some("Greets a person by name".to_string()), //! input_schema: Some(json!({ @@ -32,16 +34,16 @@ //! code_mode.add_callback(&callback)?; //! //! // 3. Register callback functions that execute when tools are called -//! let registry = CallbackRegistry::default(); -//! registry.add("Greeter.greet", Arc::new(|args| { +//! let registry = PctxRegistry::default(); +//! registry.add_callback(&callback.id(), Arc::new(|args: Option| { //! Box::pin(async move { -//! let name = args +//! let name = args.as_ref() //! .and_then(|v| v.get("name")) //! .and_then(|v| v.as_str()) //! .unwrap_or("World"); //! Ok(serde_json::json!({ "message": format!("Hello, {name}!") })) //! }) -//! }))?; +//! })); //! //! // 4. Execute LLM-generated TypeScript code //! let code = r#" @@ -51,7 +53,7 @@ //! } //! "#; //! -//! let output = code_mode.execute(code, Some(registry)).await?; +//! let output = code_mode.execute_typescript(code, ToolDisclosure::default(), Some(registry)).await?; //! //! if output.success { //! println!("Result: {}", serde_json::to_string_pretty(&output.output)?); @@ -79,15 +81,29 @@ //! - [`CodeMode::add_tool_set`] - Add a pre-built ToolSet directly //! //! **Accessor methods**: -//! - [`CodeMode::tool_sets`] - Get registered ToolSets +//! - [`CodeMode::tool_sets`] - Get all registered ToolSets +//! - [`CodeMode::server_tool_sets`] - Get only MCP server ToolSets //! - [`CodeMode::servers`] - Get registered server configurations //! - [`CodeMode::callbacks`] - Get registered callback configurations -//! - [`CodeMode::allowed_hosts`] - Get allowed network hosts +//! - [`CodeMode::virtual_fs`] - Get the virtual filesystem used by bash execution //! //! **Execution methods**: //! - [`CodeMode::list_functions`] - List all available functions with minimal interfaces //! - [`CodeMode::get_function_details`] - Get full typed interfaces for specific functions -//! - [`CodeMode::execute`] - Execute TypeScript code in the sandbox +//! - [`CodeMode::execute_typescript`] - Execute TypeScript code in the sandbox +//! - [`CodeMode::execute_bash`] - Execute a bash command in the virtual filesystem +//! +//! ### ToolDisclosure +//! +//! [`config::ToolDisclosure`] controls how tools are presented to the LLM and how generated +//! TypeScript code invokes them. Choose the mode that matches your agent's workflow: +//! +//! - **`Catalog`** (default) - Tools are discovered via `list_tools` → `get_tool_details`, then +//! called through typed TypeScript namespaces (e.g. `await Greeter.greet({ name: "Alice" })`). +//! - **`Filesystem`** - Like `Catalog` but the agent works within a virtual filesystem via +//! `execute_bash` before invoking TypeScript. +//! - **`Sidecar`** - Tools are passed as original MCP descriptions. The generated code uses an +//! `InvokeMap` type and a type-safe `invoke()` function rather than namespace methods. //! //! ### Tools and ToolSets //! @@ -96,78 +112,85 @@ //! - **MCP tools**: Loaded from MCP servers via [`CodeMode::add_server`] //! - **Callback tools**: Defined via [`CallbackConfig`](model::CallbackConfig) and [`CodeMode::add_callback`] //! -//! ### Callbacks +//! ### PctxRegistry +//! +//! [`PctxRegistry`] is a thread-safe registry that routes TypeScript function calls to either +//! local Rust callbacks or upstream MCP servers. Pass it to [`CodeMode::execute_typescript`]. //! -//! [`CallbackFn`] are Rust async functions that execute when TypeScript code calls callback tools. -//! Register them in a [`CallbackRegistry`] and pass it to [`CodeMode::execute`]. +//! - `add_callback(id, fn)` - Register a [`CallbackFn`] for a tool ID (e.g. `"Greeter.greet"`) +//! - `add_mcp(tool_names, cfg)` - Register MCP tools from a server +//! - `invoke(id, args)` - Dispatch a call by tool ID (used internally during execution) //! //! ## Examples //! //! ### Multi-Tool Workflow //! -//! ```ignore -//! # use pctx_code_mode::{CodeMode, CallbackRegistry}; -//! # async fn example(code_mode: CodeMode, registry: CallbackRegistry) -> anyhow::Result<()> { -//! let code = r#" -//! async function run() { -//! // Fetch user data -//! const user = await UserApi.getUser({ id: 123 }); -//! -//! // Process the data -//! const processed = await DataProcessor.transform({ -//! input: user.data, -//! format: "normalized" -//! }); -//! -//! // Save results -//! const saved = await Storage.save({ -//! key: `user_${user.id}`, -//! value: processed -//! }); -//! -//! return { -//! userId: user.id, -//! saved: saved.success, -//! location: saved.url -//! }; -//! } -//! "#; -//! -//! let output = code_mode.execute(code, Some(registry)).await?; -//! # Ok(()) -//! # } +//! ``` +//! use pctx_code_mode::CodeMode; +//! use pctx_code_mode::registry::PctxRegistry; +//! use pctx_code_mode::config::ToolDisclosure; +//! async fn example(code_mode: CodeMode, registry: PctxRegistry) -> anyhow::Result<()> { +//! let code = r#" +//! async function run() { +//! // Fetch user data +//! const user = await UserApi.getUser({ id: 123 }); +//! +//! // Process the data +//! const processed = await DataProcessor.transform({ +//! input: user.data, +//! format: "normalized" +//! }); +//! +//! // Save results +//! const saved = await Storage.save({ +//! key: `user_${user.id}`, +//! value: processed +//! }); +//! +//! return { +//! userId: user.id, +//! saved: saved.success, +//! location: saved.url +//! }; +//! } +//! "#; +//! +//! let output = code_mode.execute_typescript(code, ToolDisclosure::Catalog, Some(registry)).await?; +//! Ok(()) +//! } //! ``` //! //! ## Architecture //! //! 1. **Tool Definition**: Tools are defined with JSON Schemas for inputs/outputs -//! 2. **Code Generation**: TypeScript interface definitions are generated from schemas -//! 3. **Code Execution**: User code is wrapped with namespace implementations and executed in Deno -//! 4. **Callback Routing**: Function calls in TypeScript are routed to Rust callbacks or MCP servers -//! 5. **Result Marshaling**: JSON values are passed between TypeScript and Rust +//! 2. **Disclosure Mode**: [`config::ToolDisclosure`] determines how tools are surfaced and the +//! TypeScript code generation strategy used +//! 3. **Code Generation**: TypeScript interface definitions are generated from schemas; `Catalog`/ +//! `Filesystem` modes emit full namespace implementations, `Sidecar` emits an `InvokeMap` +//! 4. **Code Execution**: User code is wrapped with generated bindings and executed in Deno +//! 5. **Call Routing**: TypeScript function calls are dispatched through [`PctxRegistry`] to Rust +//! callbacks or MCP servers +//! 6. **Result Marshaling**: JSON values are passed between TypeScript and Rust //! //! ## Sandbox Security //! //! Code is executed in a Deno runtime with: //! - Network access restricted to allowed hosts (from registered MCP servers) -//! - No file system access +//! - No file system access (use `execute_bash` with the virtual filesystem instead) //! - No subprocess spawning //! - Isolated V8 context per execution mod code_mode; +pub mod descriptions; pub mod model; // Core execution API pub use code_mode::CodeMode; -// Re-export config, runtime and codegen crates -pub use pctx_code_execution_runtime as runtime; +// Re-export config, codegen, registry, crates pub use pctx_codegen as codegen; pub use pctx_config as config; - -// Re-export commonly used types for backwards compatibility -pub use pctx_code_execution_runtime::{CallbackFn, CallbackRegistry}; -pub use pctx_codegen::{RootSchema, Tool, ToolSet, case}; +pub use pctx_registry as registry; pub type Result = std::result::Result; #[derive(Debug, thiserror::Error)] @@ -178,6 +201,8 @@ pub enum Error { McpService(#[from] pctx_config::server::ServiceError), #[error("Codegen error: {0}")] Codegen(#[from] pctx_codegen::CodegenError), + #[error("Registry error: {0}")] + Registry(#[from] registry::RegistryError), #[error("Execution error: {0:?}")] Execution(#[from] pctx_executor::DenoExecutorError), #[error("Error: {0}")] diff --git a/crates/pctx_code_mode/src/model.rs b/crates/pctx_code_mode/src/model.rs index 852d6090..47e7c277 100644 --- a/crates/pctx_code_mode/src/model.rs +++ b/crates/pctx_code_mode/src/model.rs @@ -109,6 +109,14 @@ pub struct FunctionDetails { // -------------- Execute -------------- +#[allow(clippy::doc_markdown)] +#[derive(Debug, Default, Serialize, Deserialize, schemars::JsonSchema, ToSchema)] +#[serde(default)] +pub struct ExecuteBashInput { + /// Bash command to execute + pub command: String, +} + #[allow(clippy::doc_markdown)] #[derive(Debug, Default, Serialize, Deserialize, schemars::JsonSchema, ToSchema)] #[serde(default)] @@ -117,7 +125,7 @@ pub struct ExecuteInput { /// /// REQUIRED FORMAT: /// async function ``run()`` { - /// // YOUR CODE GOES HERE e.g. const result await ``Namespace.method();`` + /// // YOUR CODE GOES HERE /// // ALWAYS RETURN THE RESULT e.g. return result; /// } /// @@ -174,13 +182,20 @@ impl Display for ExecuteOutput { #[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] pub struct CallbackConfig { pub name: String, - pub namespace: String, + pub namespace: Option, pub description: Option, pub input_schema: Option, pub output_schema: Option, } impl CallbackConfig { pub fn id(&self) -> String { - format!("{}.{}", &self.namespace, &self.name) + format!( + "{}{}", + self.namespace + .as_ref() + .map(|n| format!("{n}__")) + .unwrap_or_default(), + &self.name + ) } } diff --git a/crates/pctx_codegen/Cargo.toml b/crates/pctx_codegen/Cargo.toml index c7b416e3..7f56ccc0 100644 --- a/crates/pctx_codegen/Cargo.toml +++ b/crates/pctx_codegen/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pctx_codegen" -version = "0.2.0" +version = "0.3.0" edition = "2024" license = "MIT" description = "Code generation utilities for pctx" diff --git a/crates/pctx_codegen/src/lib.rs b/crates/pctx_codegen/src/lib.rs index 434b7b5a..6c7f7a9d 100644 --- a/crates/pctx_codegen/src/lib.rs +++ b/crates/pctx_codegen/src/lib.rs @@ -12,7 +12,7 @@ use thiserror::Error; // re-export RootSchema pub use schemars::schema::RootSchema; -pub use tools::{Tool, ToolSet, ToolVariant}; +pub use tools::{Tool, ToolSet}; pub type SchemaDefinitions = IndexMap; pub type CodegenResult = Result; @@ -26,7 +26,8 @@ pub enum CodegenError { TypeGen(String), } -pub fn generate_docstring(content: &str) -> String { +/// Generates a code-safe docstring for the given string in Typescript +pub fn ts_generate_docstring(content: &str) -> String { let mut lines = vec!["/**".to_string()]; let replace_pat = regex::Regex::new(r"\*\/").expect("invalid docstring replace_pat"); diff --git a/crates/pctx_codegen/src/tools.rs b/crates/pctx_codegen/src/tools.rs index 771d6878..f6b442e6 100644 --- a/crates/pctx_codegen/src/tools.rs +++ b/crates/pctx_codegen/src/tools.rs @@ -6,51 +6,77 @@ use tracing::debug; use crate::{ CodegenResult, case::Case, - generate_docstring, + ts_generate_docstring, typegen::{TypegenResult, generate_types}, }; +pub const DEFAULT_NAMESPACE: &str = "Tools"; + #[derive(Clone, Debug, Serialize, Deserialize)] pub struct ToolSet { - pub name: String, - pub namespace: String, + pub name: Option, pub description: String, pub tools: Vec, } impl ToolSet { - pub fn new(name: &str, description: &str, tools: Vec) -> Self { + pub fn new(name: Option, description: &str, tools: Vec) -> Self { Self { - name: name.into(), - namespace: Case::Pascal.sanitize(name), + name, description: description.into(), tools, } } - pub fn namespace_interface(&self, include_types: bool) -> String { + pub fn tool_ids(&self) -> Vec { + self.tools + .iter() + .map(|t| t.id(self.name.as_deref())) + .collect() + } + + /// Returns the pascal case of the registered namespace + /// falling back on `Tools` if not present + pub fn pascal_namespace(&self) -> String { + self.name + .as_ref() + .map(|n| Case::Pascal.sanitize(n)) + .unwrap_or(DEFAULT_NAMESPACE.to_string()) + } + + // ------------- Typescript-Specific Code Generation ------------- + + /// Returns the generated typescript declaration (`.d.ts`) code for the ToolSet + /// as a typescript `namespace` + pub fn ts_namespace_declaration(&self, include_types: bool) -> String { let fns: Vec = self .tools .iter() - .map(|t| t.fn_signature(include_types)) + .map(|t| t.ts_fn_signature(include_types)) .collect(); - self.wrap_with_namespace(&fns.join("\n\n")) + self.ts_wrap_with_namespace(&fns.join("\n\n")) } - pub fn namespace(&self) -> String { - let fns: Vec = self.tools.iter().map(|t| t.fn_impl(&self.name)).collect(); - self.wrap_with_namespace(&fns.join("\n\n")) + /// Returns the full generated typescript code for this ToolSet as + /// a typescript `namespace` + pub fn ts_namespace_impl(&self) -> String { + let fns: Vec = self + .tools + .iter() + .map(|t| t.ts_fn_impl(self.name.as_deref())) + .collect(); + self.ts_wrap_with_namespace(&fns.join("\n\n")) } - pub fn wrap_with_namespace(&self, content: &str) -> String { + pub fn ts_wrap_with_namespace(&self, content: &str) -> String { format!( "{docstring} namespace {namespace} {{ {content} }}", - docstring = generate_docstring(&self.description), - namespace = &self.namespace, + docstring = ts_generate_docstring(&self.description), + namespace = self.pascal_namespace(), ) } } @@ -60,7 +86,6 @@ pub struct Tool { pub name: String, pub fn_name: String, pub description: Option, - pub variant: ToolVariant, pub input_schema: Option, pub output_schema: Option, @@ -70,36 +95,14 @@ pub struct Tool { } impl Tool { - pub fn new_mcp( - name: &str, - description: Option, - input: Option, - output: Option, - ) -> CodegenResult { - Self::_new(name, description, input, output, ToolVariant::Mcp) - } - - pub fn new_callback( - name: &str, - description: Option, - input: Option, - output: Option, - ) -> CodegenResult { - Self::_new(name, description, input, output, ToolVariant::Callback) - } - - fn _new( + pub fn new( name: &str, description: Option, input: Option, output: Option, - variant: ToolVariant, ) -> CodegenResult { let fn_name = Case::Camel.sanitize(name); - debug!( - variant =? variant, - "Generating Typescript interface for tool: '{name}' -> function {fn_name}", - ); + debug!("Generating Typescript interface for tool: '{name}' -> function {fn_name}",); let input_type = if let Some(i) = &input { Some(generate_types(i.clone(), &format!("{fn_name}Input"))?) @@ -121,10 +124,17 @@ impl Tool { fn_name, input_type, output_type, - variant, }) } + pub fn id(&self, toolset_name: Option<&str>) -> String { + format!( + "{}{}", + toolset_name.map(|n| format!("{n}__")).unwrap_or_default(), + &self.name + ) + } + pub fn input_signature(&self) -> Option { // No input schema -> no params for the generated function self.input_type.as_ref().map(|i| i.type_signature.clone()) @@ -138,6 +148,7 @@ impl Tool { .unwrap_or("any".into()) } + /// Returns all the input and output types as a string for the Tool pub fn types(&self) -> String { let mut type_defs = String::new(); if let Some(i) = &self.input_type { @@ -150,7 +161,19 @@ impl Tool { type_defs } - pub fn fn_signature(&self, include_types: bool) -> String { + /// Returns the typescript function signature for the Tool with a docstring + /// + /// e.g. + /// ```typescript + /// /** + /// * function docstring + /// */ + /// export async function myFunction(input: InputType): Promise + /// ``` + /// + /// The function signature has no trailing `;` or `{` so can be used for either + /// .ts or .d.ts generation + pub fn ts_fn_signature(&self, include_types: bool) -> String { let docstring_content = self.description.clone().unwrap_or_default(); let mut types = String::new(); @@ -166,54 +189,44 @@ impl Tool { format!( "{types}{docstring}\nexport async function {fn_name}({params}): Promise<{output}>", - docstring = generate_docstring(&docstring_content), + docstring = ts_generate_docstring(&docstring_content), fn_name = &self.fn_name, output = &self.output_signature(), ) } - pub fn fn_impl(&self, toolset_name: &str) -> String { + /// Returns the typescript function implementation including + /// the function signature, input/output types, and internal tool + /// functionality. + pub fn ts_fn_impl(&self, toolset_name: Option<&str>) -> String { let arguments = self .input_schema .as_ref() .map(|_| "arguments: input,".to_string()) .unwrap_or_default(); - match self.variant { - ToolVariant::Mcp => { - format!( - "{fn_sig} {{ - return await callMCPTool<{output}>({{ - serverName: {name}, - toolName: {tool}, - {arguments} - }}); -}}", - fn_sig = self.fn_signature(true), - name = json!(toolset_name), - tool = json!(&self.name), - output = &self.output_signature(), - ) - } - ToolVariant::Callback => { - format!( - "{fn_sig} {{ - return await invokeCallback<{output}>({{ - id: {id}, - {arguments} - }}); + + format!( + "{fn_sig} {{ + return await invokeInternal({{ name: {name}, {arguments} }}); }}", - fn_sig = self.fn_signature(true), - id = json!(format!("{toolset_name}.{}", &self.name)), - output = &self.output_signature(), - ) - } - } + fn_sig = self.ts_fn_signature(true), + name = json!(self.id(toolset_name)) + ) } -} -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub enum ToolVariant { - Mcp, - Callback, + /// Creates a typescript type map entry for the given tool, + /// meant to be wrapped by `type InvokeMap { ...entries } ` + pub fn ts_invoke_map_entry(&self, toolset_name: Option<&str>) -> String { + let args = match &self.input_type { + Some(i) if i.all_optional => format!("{} | undefined", &i.type_signature), + Some(i) => format!("{}", &i.type_signature), + None => format!("any | undefined"), + }; + + format!( + "{name}: {{ args: {args}, returns: {returns} }};", + name = json!(self.id(toolset_name)), + returns = self.output_signature() + ) + } } diff --git a/crates/pctx_codegen/src/typegen/schema_data.rs b/crates/pctx_codegen/src/typegen/schema_data.rs index bdf50cea..6c4c66bb 100644 --- a/crates/pctx_codegen/src/typegen/schema_data.rs +++ b/crates/pctx_codegen/src/typegen/schema_data.rs @@ -3,8 +3,9 @@ use schemars::schema::Schema; use serde::{Deserialize, Serialize}; use crate::{ - CodegenResult, SchemaDefinitions, generate_docstring, + CodegenResult, SchemaDefinitions, schema_type::{ObjectSchemaType, SchemaType}, + ts_generate_docstring, utils::get_description, }; @@ -33,7 +34,7 @@ impl ObjectSchemaData { name: prop_name.clone(), sig: prop_st.type_signature(required, defs)?, doc_string: get_description(&prop_schema.clone().into_object(), defs)? - .map(|desc| generate_docstring(&desc)), + .map(|desc| ts_generate_docstring(&desc)), required, nullable: prop_st.is_nullable(), }; @@ -54,7 +55,7 @@ impl ObjectSchemaData { Ok(Self { name: obj_st.type_name.clone(), doc_string: get_description(&obj_st.schema_obj, defs)? - .map(|desc| generate_docstring(&desc)), + .map(|desc| ts_generate_docstring(&desc)), properties, additional_props_sig, }) diff --git a/crates/pctx_codegen/tests/snapshots/tool__test_all_optional_input__fn_impl.ts.snap b/crates/pctx_codegen/tests/snapshots/tool__test_all_optional_input__fn_impl.ts.snap index 84bedcc2..2de4266f 100644 --- a/crates/pctx_codegen/tests/snapshots/tool__test_all_optional_input__fn_impl.ts.snap +++ b/crates/pctx_codegen/tests/snapshots/tool__test_all_optional_input__fn_impl.ts.snap @@ -32,9 +32,8 @@ export type SearchLogsOutputResults = { export async function searchLogs( input: SearchLogsInput = {}, ): Promise { - return await callMCPTool({ - serverName: "test_server", - toolName: "search_logs", + return await invokeInternal({ + name: "test_server__search_logs", arguments: input, }); } diff --git a/crates/pctx_codegen/tests/snapshots/tool__test_basic__fn_impl.ts.snap b/crates/pctx_codegen/tests/snapshots/tool__test_basic__fn_impl.ts.snap index a13a63e1..b5903f9c 100644 --- a/crates/pctx_codegen/tests/snapshots/tool__test_basic__fn_impl.ts.snap +++ b/crates/pctx_codegen/tests/snapshots/tool__test_basic__fn_impl.ts.snap @@ -22,9 +22,8 @@ export type GetWeatherOutput = { export async function getWeather( input: GetWeatherInput, ): Promise { - return await callMCPTool({ - serverName: "test_server", - toolName: "get_weather", + return await invokeInternal({ + name: "test_server__get_weather", arguments: input, }); } diff --git a/crates/pctx_codegen/tests/snapshots/tool__test_nested_types__fn_impl.ts.snap b/crates/pctx_codegen/tests/snapshots/tool__test_nested_types__fn_impl.ts.snap index 46e9825c..b9451a76 100644 --- a/crates/pctx_codegen/tests/snapshots/tool__test_nested_types__fn_impl.ts.snap +++ b/crates/pctx_codegen/tests/snapshots/tool__test_nested_types__fn_impl.ts.snap @@ -30,9 +30,8 @@ export type CreateDocumentOutput = { export async function createDocument( input: CreateDocumentInput, ): Promise { - return await callMCPTool({ - serverName: "test_server", - toolName: "create_document", + return await invokeInternal({ + name: "test_server__create_document", arguments: input, }); } diff --git a/crates/pctx_codegen/tests/snapshots/tool__test_no_input__fn_impl.ts.snap b/crates/pctx_codegen/tests/snapshots/tool__test_no_input__fn_impl.ts.snap index d67a6a0a..122636f5 100644 --- a/crates/pctx_codegen/tests/snapshots/tool__test_no_input__fn_impl.ts.snap +++ b/crates/pctx_codegen/tests/snapshots/tool__test_no_input__fn_impl.ts.snap @@ -10,7 +10,5 @@ export type ListFilesOutput = { * List all files in the current workspace */ export async function listFiles(): Promise { - return await invokeCallback({ - id: "test_server.list_files", - }); + return await invokeInternal({ name: "test_server__list_files" }); } diff --git a/crates/pctx_codegen/tests/snapshots/tool__test_no_input_or_output__fn_impl.ts.snap b/crates/pctx_codegen/tests/snapshots/tool__test_no_input_or_output__fn_impl.ts.snap index 9e2f4316..c885b746 100644 --- a/crates/pctx_codegen/tests/snapshots/tool__test_no_input_or_output__fn_impl.ts.snap +++ b/crates/pctx_codegen/tests/snapshots/tool__test_no_input_or_output__fn_impl.ts.snap @@ -6,7 +6,5 @@ expression: impl_code * Health check ping */ export async function ping(): Promise { - return await invokeCallback({ - id: "test_server.ping", - }); + return await invokeInternal({ name: "test_server__ping" }); } diff --git a/crates/pctx_codegen/tests/snapshots/tool__test_no_output__fn_impl.ts.snap b/crates/pctx_codegen/tests/snapshots/tool__test_no_output__fn_impl.ts.snap index 5724ce14..71e328d2 100644 --- a/crates/pctx_codegen/tests/snapshots/tool__test_no_output__fn_impl.ts.snap +++ b/crates/pctx_codegen/tests/snapshots/tool__test_no_output__fn_impl.ts.snap @@ -16,8 +16,8 @@ export type SendNotificationInput = { export async function sendNotification( input: SendNotificationInput, ): Promise { - return await invokeCallback({ - id: "test_server.send_notification", + return await invokeInternal({ + name: "test_server__send_notification", arguments: input, }); } diff --git a/crates/pctx_codegen/tests/tool.rs b/crates/pctx_codegen/tests/tool.rs index 8e3e5b93..e73bbcc5 100644 --- a/crates/pctx_codegen/tests/tool.rs +++ b/crates/pctx_codegen/tests/tool.rs @@ -18,24 +18,14 @@ struct ToolFixture { } impl ToolFixture { - fn to_mcp_tool(&self) -> Tool { - Tool::new_mcp( + fn to_tool(&self) -> Tool { + Tool::new( &self.name, self.description.clone(), self.input_schema.clone(), self.output_schema.clone(), ) - .expect("Tool::new_mcp failed") - } - - fn to_callback_tool(&self) -> Tool { - Tool::new_callback( - &self.name, - self.description.clone(), - self.input_schema.clone(), - self.output_schema.clone(), - ) - .expect("Tool::new_callback failed") + .expect("Tool::new failed") } } @@ -46,13 +36,14 @@ fn load_fixture(yml: &str) -> ToolFixture { // --- Tool tests --- macro_rules! tool_test { - ($test_name:ident, variant: $variant:ident, $fixture:expr) => { + ($test_name:ident, $fixture:expr) => { #[tokio::test] async fn $test_name() { let fixture = load_fixture($fixture); - let tool = fixture.$variant(); + let tool = fixture.to_tool(); - let impl_code = pctx_codegen::format::format_ts(&tool.fn_impl("test_server")); + let impl_code = + pctx_codegen::format::format_ts(&tool.ts_fn_impl(Some("test_server".into()))); let check_res = type_check(&impl_code).expect("failed typecheck"); assert!( @@ -64,12 +55,12 @@ macro_rules! tool_test { }; } -tool_test!(test_basic, variant: to_mcp_tool, BASIC_TOOL); -tool_test!(test_nested_types, variant: to_mcp_tool, NESTED_TYPES_TOOL); -tool_test!(test_no_output, variant: to_callback_tool, NO_OUTPUT_TOOL); -tool_test!(test_no_input, variant: to_callback_tool, NO_INPUT_TOOL); -tool_test!(test_no_input_or_output, variant: to_callback_tool, NO_INPUT_OR_OUTPUT_TOOL); -tool_test!(test_all_optional_input, variant: to_mcp_tool, ALL_OPTIONAL_INPUT_TOOL); +tool_test!(test_basic, BASIC_TOOL); +tool_test!(test_nested_types, NESTED_TYPES_TOOL); +tool_test!(test_no_output, NO_OUTPUT_TOOL); +tool_test!(test_no_input, NO_INPUT_TOOL); +tool_test!(test_no_input_or_output, NO_INPUT_OR_OUTPUT_TOOL); +tool_test!(test_all_optional_input, ALL_OPTIONAL_INPUT_TOOL); // --- ToolSet tests --- @@ -79,13 +70,13 @@ fn test_toolset_namespace() { let notif = load_fixture(NESTED_TYPES_TOOL); let toolset = ToolSet::new( - "my_tools", + Some("my_tools".into()), "A collection of utility tools", - vec![basic.to_mcp_tool(), notif.to_callback_tool()], + vec![basic.to_tool(), notif.to_tool()], ); insta::assert_snapshot!( "toolset__namespace_interface.ts", - toolset.namespace_interface(true) + toolset.ts_namespace_declaration(true) ); } diff --git a/crates/pctx_config/Cargo.toml b/crates/pctx_config/Cargo.toml index e615c86a..e3109e33 100644 --- a/crates/pctx_config/Cargo.toml +++ b/crates/pctx_config/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pctx_config" -version = "0.1.3" +version = "0.1.4" edition = "2024" license = "MIT" description = "Configuration management for pctx toolkit" diff --git a/crates/pctx_config/src/lib.rs b/crates/pctx_config/src/lib.rs index 08c962a2..2a5b6722 100644 --- a/crates/pctx_config/src/lib.rs +++ b/crates/pctx_config/src/lib.rs @@ -2,7 +2,7 @@ use anyhow::{Context, Result}; use camino::Utf8PathBuf; use serde::{Deserialize, Serialize}; use serde_json::json; -use std::fs; +use std::{fmt::Display, fs}; use tracing::debug; use crate::{logger::LoggerConfig, server::ServerConfig, telemetry::TelemetryConfig}; @@ -29,6 +29,11 @@ pub struct Config { #[serde(skip_serializing_if = "Option::is_none")] pub description: Option, + /// Tool disclosure mode, determines which set of + /// code mode tools will be made available. + #[serde(default)] + pub disclosure: ToolDisclosure, + /// Upstream MCP server configurations #[serde(default)] pub servers: Vec, @@ -139,3 +144,24 @@ impl Config { self.servers.iter_mut().find(|s| s.name == name) } } + +#[derive(Copy, Debug, Clone, Serialize, Deserialize, Default)] +pub enum ToolDisclosure { + /// list_tools -> get_tool_details -> execute_typescript + #[default] + #[serde(rename = "catalog")] + Catalog, + /// execute_bash -> execute_typescript + #[serde(rename = "filesystem")] + #[serde(alias = "fs")] + Filesystem, + /// original tool descriptions -> execute_typescript + #[serde(rename = "sidecar")] + Sidecar, +} +impl Display for ToolDisclosure { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let val = json!(self).to_string().replace("\"", ""); + write!(f, "{}", val) + } +} diff --git a/crates/pctx_executor/Cargo.toml b/crates/pctx_executor/Cargo.toml index aeb4b180..bdaca056 100644 --- a/crates/pctx_executor/Cargo.toml +++ b/crates/pctx_executor/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pctx_executor" -version = "0.1.3" +version = "0.2.0" edition = "2024" rust-version = "1.89" license = "MIT" @@ -17,9 +17,9 @@ deno_resolver = { workspace = true } node_resolver = { workspace = true } sys_traits = { workspace = true } pctx_deno_transpiler = { version = "^0.1.1", path = "../pctx_deno_transpiler" } -pctx_code_execution_runtime = { version = "^0.1.3", path = "../pctx_code_execution_runtime" } -pctx_type_check_runtime = { version = "^0.1.2", path = "../pctx_type_check_runtime" } -pctx_config = { version = "^0.1.3", path = "../pctx_config" } +pctx_code_execution_runtime = { version = "^0.2.0", path = "../pctx_code_execution_runtime" } +pctx_registry = { version = "^0.1.0", path = "../pctx_registry" } +pctx_type_check_runtime = { version = "^0.1.3", path = "../pctx_type_check_runtime" } tempfile = "3" regex = "1" thiserror = { workspace = true } diff --git a/crates/pctx_executor/src/lib.rs b/crates/pctx_executor/src/lib.rs index f2d7719d..16321852 100644 --- a/crates/pctx_executor/src/lib.rs +++ b/crates/pctx_executor/src/lib.rs @@ -4,7 +4,7 @@ use deno_core::RuntimeOptions; use deno_core::anyhow; use deno_core::error::CoreError; use futures::lock::Mutex; -use pctx_code_execution_runtime::CallbackRegistry; +use pctx_registry::PctxRegistry; pub use pctx_type_check_runtime::{CheckResult, Diagnostic, is_relevant_error}; use pctx_type_check_runtime::{init_v8_platform, type_check}; use serde::{Deserialize, Serialize}; @@ -28,17 +28,13 @@ pub type Result = std::result::Result; #[derive(Clone, Default)] pub struct ExecuteOptions { - pub allowed_hosts: Option>, - pub servers: Vec, - pub callback_registry: CallbackRegistry, + pub registry: PctxRegistry, } impl std::fmt::Debug for ExecuteOptions { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("ExecuteOptions") - .field("allowed_hosts", &self.allowed_hosts) - .field("servers", &self.servers) - .field("callback_registry", &self.callback_registry.ids()) + .field("registry", &self.registry.ids()) .finish() } } @@ -48,28 +44,12 @@ impl ExecuteOptions { Self::default() } - #[must_use] - pub fn with_allowed_hosts(mut self, hosts: Vec) -> Self { - self.allowed_hosts = Some(hosts); - self - } - - #[must_use] - pub fn with_servers(mut self, servers: Vec) -> Self { - self.servers = servers; - self - } - - /// Set the unified local callable registry + /// Set the unified local registry /// - /// This registry contains all local tool callbacks regardless of their source language. - /// Python, Node.js, and Rust callbacks are all wrapped as Rust closures and stored here. + /// This registry contains all local tool callbacks, and mcp server actions #[must_use] - pub fn with_callbacks( - mut self, - registry: pctx_code_execution_runtime::CallbackRegistry, - ) -> Self { - self.callback_registry = registry; + pub fn with_registry(mut self, registry: PctxRegistry) -> Self { + self.registry = registry; self } } @@ -109,29 +89,28 @@ pub enum DenoExecutorError { /// Execute TypeScript code with type checking and runtime execution /// /// This function combines type checking and execution: -/// 1. First runs TypeScript type checking via `check()` -/// 2. If type checking passes, executes code with Deno runtime +/// 1. First runs TypeScript type checking via `run_type_check()` +/// 2. If type checking passes, executes transpiled code with Deno runtime /// 3. Returns unified result with diagnostics and runtime output /// /// # Arguments /// * `code` - The TypeScript code to check and execute -/// * `options` - Execution options (allowed hosts, MCP configs, local tools) +/// * `options` - Execution options (registry with local tools and MCP server actions) /// /// # Returns -/// * `Ok(ExecuteResult)` - Contains type diagnostics, runtime errors, and output +/// * `Ok(ExecuteResult)` - Contains type diagnostics, runtime errors, stdout/stderr, and default export value /// /// # Errors -/// * Returns error only if internal tooling fails (not for type errors or runtime errors) +/// * Returns `Err` only if internal tooling fails (e.g. type check infrastructure); type errors +/// and runtime errors are captured in the returned `ExecuteResult` /// /// # Example /// ```rust,no_run /// use pctx_executor::{execute, ExecuteOptions}; /// /// # async fn example() -> Result<(), Box> { -/// let options = ExecuteOptions::new() -/// .with_allowed_hosts(vec!["api.example.com".to_string()]); -/// -/// let result = execute("const x = 1 + 1; export default x;", options).await?; +/// let result = execute("const x = 1 + 1; export default x;", ExecuteOptions::new()).await?; +/// assert_eq!(result.output, Some(serde_json::json!(2))); /// # Ok(()) /// # } /// ``` @@ -316,29 +295,9 @@ async fn execute_code( } }; - // Create MCP registry and populate it with provided configs - let mcp_registry = pctx_code_execution_runtime::MCPRegistry::new(); - - for config in options.servers { - if let Err(e) = mcp_registry.add(config) { - warn!(runtime = "execution", error = %e, "Failed to register MCP server"); - return Ok(InternalExecuteResult { - success: false, - output: None, - error: Some(ExecutionError { - message: format!("MCP registration failed: {e}"), - stack: None, - }), - stdout: String::new(), - stderr: String::new(), - }); - } - } - // Build extensions list let extensions = vec![pctx_code_execution_runtime::pctx_runtime_snapshot::init( - mcp_registry, - options.callback_registry, + options.registry, )]; // Create JsRuntime from `pctx_runtime` snapshot and extension diff --git a/crates/pctx_executor/src/tests/callback_usage.rs b/crates/pctx_executor/src/tests/callback_usage.rs index 26b027ba..0de90443 100644 --- a/crates/pctx_executor/src/tests/callback_usage.rs +++ b/crates/pctx_executor/src/tests/callback_usage.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use pctx_code_execution_runtime::CallbackRegistry; +use pctx_code_execution_runtime::PctxRegistry; use serde::Deserialize; use serde_json::json; @@ -10,10 +10,10 @@ use crate::{ExecuteOptions, execute}; #[serial] #[tokio::test] async fn test_execute_with_callbacks() { - let registry = CallbackRegistry::default(); + let registry = PctxRegistry::default(); registry - .add( - "MyMath.add", + .add_callback( + "my_math.add", Arc::new(move |args: Option| { Box::pin(async move { #[derive(Deserialize)] @@ -35,7 +35,7 @@ async fn test_execute_with_callbacks() { let code = r#" async function test() { try { - const val = await invokeCallback({ id: "MyMath.add", arguments: { a: 12, b: 4 } }); + const val = await invokeInternal({ name: "my_math.add", arguments: { a: 12, b: 4 } }); return { error: false, value: val }; } catch (e) { return { error: true, message: e instanceof Error ? e.message : String(e) }; @@ -45,7 +45,7 @@ async function test() { export default await test(); "#; - let result = execute(code, ExecuteOptions::new().with_callbacks(registry)) + let result = execute(code, ExecuteOptions::new().with_registry(registry)) .await .expect("execution should succeed"); @@ -64,10 +64,10 @@ export default await test(); #[serial] #[tokio::test] async fn test_execute_with_async_callbacks() { - let registry = CallbackRegistry::default(); + let registry = PctxRegistry::default(); registry - .add( - "MyAsync.wait", + .add_callback( + "my_async__wait", Arc::new(move |args: Option| { Box::pin(async move { #[derive(Deserialize)] @@ -89,7 +89,7 @@ async fn test_execute_with_async_callbacks() { let code = r#" async function test() { try { - const val = await invokeCallback({ id: "MyAsync.wait", arguments: { ms: 50 } }); + const val = await invokeInternal({ name: "my_async__wait", arguments: { ms: 50 } }); return { error: false, value: val }; } catch (e) { return { error: true, message: e instanceof Error ? e.message : String(e) }; @@ -99,7 +99,7 @@ async function test() { export default await test(); "#; - let result = execute(code, ExecuteOptions::new().with_callbacks(registry)) + let result = execute(code, ExecuteOptions::new().with_registry(registry)) .await .expect("execution should succeed"); diff --git a/crates/pctx_executor/src/tests/mcp_client_usage.rs b/crates/pctx_executor/src/tests/mcp_client_usage.rs index 5b70e712..8e7d2546 100644 --- a/crates/pctx_executor/src/tests/mcp_client_usage.rs +++ b/crates/pctx_executor/src/tests/mcp_client_usage.rs @@ -1,41 +1,6 @@ use super::serial; use crate::{ExecuteOptions, execute}; -use pctx_config::server::ServerConfig; use serde_json::json; -use url::Url; - -#[serial] -#[tokio::test] -async fn test_execute_with_mcp_client_duplicate_registration() { - let code = r" -export default true; -"; - - // Attempt to register the same server twice - let mcp_configs = vec![ - ServerConfig::new( - "duplicate-server".to_string(), - Url::parse("http://localhost:3000").unwrap(), - ), - ServerConfig::new( - "duplicate-server".to_string(), - Url::parse("http://localhost:3001").unwrap(), - ), - ]; - - let result = execute(code, ExecuteOptions::new().with_servers(mcp_configs)) - .await - .expect("execution should succeed"); - assert!(!result.success, "Duplicate MCP registration should fail"); - assert!(result.runtime_error.is_some(), "Should have runtime error"); - - let error = result.runtime_error.unwrap(); - assert!( - error.message.contains("already registered") || error.message.contains("duplicate"), - "Error should mention duplicate registration, got: {}", - error.message - ); -} #[serial] #[tokio::test] @@ -44,9 +9,8 @@ async fn test_execute_with_mcp_client_call_tool_nonexistent_server() { async function test() { try { - await callMCPTool({ - serverName: "nonexistent-server", - toolName: "some-tool" + await invokeInternal({ + name: "nonexistent-server___some-tool" }); return { error: false }; } catch (e) { diff --git a/crates/pctx_mcp_server/Cargo.toml b/crates/pctx_mcp_server/Cargo.toml index bb5be41b..f27e9fa7 100644 --- a/crates/pctx_mcp_server/Cargo.toml +++ b/crates/pctx_mcp_server/Cargo.toml @@ -9,9 +9,6 @@ repository.workspace = true [dependencies] # Local crates pctx_code_mode = { path = "../pctx_code_mode" } -pctx_code_execution_runtime = { path = "../pctx_code_execution_runtime" } -pctx_config = { path = "../pctx_config" } -pctx_codegen = { path = "../pctx_codegen" } # MCP rmcp = { workspace = true, features = [ diff --git a/crates/pctx_mcp_server/src/server.rs b/crates/pctx_mcp_server/src/server.rs index 75e89a0c..2fce366f 100644 --- a/crates/pctx_mcp_server/src/server.rs +++ b/crates/pctx_mcp_server/src/server.rs @@ -1,6 +1,5 @@ use anyhow::Result; use opentelemetry::{global, trace::TraceContextExt}; -use pctx_config::Config; use rmcp::{ ServiceExt, transport::{ @@ -40,14 +39,22 @@ pub struct PctxMcpServer { host: String, port: u16, banner: bool, + service: PctxMcpService, } impl PctxMcpServer { - pub fn new(host: &str, port: u16, banner: bool) -> Self { + pub fn new( + host: &str, + port: u16, + banner: bool, + cfg: &pctx_code_mode::config::Config, + code_mode: pctx_code_mode::CodeMode, + ) -> Self { Self { host: host.into(), port, banner, + service: PctxMcpService::new(&cfg, code_mode), } } @@ -60,14 +67,13 @@ impl PctxMcpServer { /// # Errors /// /// Errors if there is a failure starting the server on the configured host/port - pub async fn serve(&self, cfg: &Config, code_mode: pctx_code_mode::CodeMode) -> Result<()> { + pub async fn serve(&self) -> Result<()> { let shutdown_signal = async { tokio::signal::ctrl_c() .await .expect("failed graceful shutdown"); }; - self.serve_with_shutdown(cfg, code_mode, shutdown_signal) - .await + self.serve_with_shutdown(shutdown_signal).await } /// Serves MCP server with provided config, and shutdown signal @@ -76,18 +82,13 @@ impl PctxMcpServer { /// # Errors /// /// Errors if there is a failure starting the server on the configured host/port - pub async fn serve_with_shutdown( - &self, - cfg: &Config, - code_mode: pctx_code_mode::CodeMode, - shutdown_signal: F, - ) -> Result<()> + pub async fn serve_with_shutdown(&self, shutdown_signal: F) -> Result<()> where F: std::future::Future + Send + 'static, { - self.banner_http(cfg, &code_mode); + self.banner_http(); - let mcp_service = PctxMcpService::new(cfg, code_mode); + let mcp_service = self.service.clone(); let service = StreamableHttpService::new( move || Ok(mcp_service.clone()), @@ -166,36 +167,26 @@ impl PctxMcpServer { /// # Panics /// /// Panics if the ctrl-c handler cannot be installed. - pub async fn serve_stdio( - &self, - cfg: &Config, - code_mode: pctx_code_mode::CodeMode, - ) -> Result<()> { + pub async fn serve_stdio(&self) -> Result<()> { let shutdown_signal = async { tokio::signal::ctrl_c() .await .expect("failed graceful shutdown"); }; - self.serve_stdio_with_shutdown(cfg, code_mode, shutdown_signal) - .await + self.serve_stdio_with_shutdown(shutdown_signal).await } /// # Errors /// /// Returns an error if the stdio server fails to start or if the server /// task returns an error. - pub async fn serve_stdio_with_shutdown( - &self, - cfg: &Config, - code_mode: pctx_code_mode::CodeMode, - shutdown_signal: F, - ) -> Result<()> + pub async fn serve_stdio_with_shutdown(&self, shutdown_signal: F) -> Result<()> where F: std::future::Future + Send + 'static, { - self.banner_stdio(cfg, &code_mode); + self.banner_stdio(); - let mcp_service = PctxMcpService::new(cfg, code_mode); + let mcp_service = self.service.clone(); let mut shutdown_signal = Box::pin(shutdown_signal); let mut serve_task = tokio::spawn(mcp_service.serve(stdio())); let running = tokio::select! { @@ -228,13 +219,7 @@ impl PctxMcpServer { Ok(()) } - fn banner( - &self, - cfg: &pctx_config::Config, - code_mode: &pctx_code_mode::CodeMode, - transport_label: &str, - transport_value: &str, - ) -> Option { + fn banner(&self, transport_label: &str, transport_value: &str) -> Option { if !self.banner { return None; } @@ -252,35 +237,44 @@ impl PctxMcpServer { } let mut builder = Builder::default(); - builder.push_record(["Server Name", &cfg.name]); - builder.push_record(["Server Version", &cfg.version]); + builder.push_record(["Server Name", &self.service.name]); + builder.push_record(["Server Version", &self.service.version]); builder.push_record([transport_label, transport_value]); - builder.push_record([ - "Tools", - &["list_functions", "get_function_details", "execute"].join(", "), - ]); + builder.push_record(["Tool Disclosure", &self.service.disclosure.to_string()]); + + let active_tools = self + .service + .list_filtered_tools() + .tools + .iter() + .map(|t| t.name.to_string()) + .collect::>(); + builder.push_record(["Tools", &active_tools.join(", ")]); + builder.push_record(["Docs", &fmt_dimmed("https://github.com/portofcontext/pctx")]); - if !code_mode.tool_sets().is_empty() { + if !self.service.code_mode.tool_sets().is_empty() { builder.push_record(["", ""]); - let tool_record = |s: &pctx_codegen::ToolSet| { + let tool_record = |s: &pctx_code_mode::codegen::ToolSet| { format!( "{} - {} tool{}", - fmt_cyan(&s.name), + fmt_cyan(s.name.as_deref().unwrap_or_default()), s.tools.len(), if s.tools.len() > 1 { "s" } else { "" } ) }; builder.push_record([ "Upstream MCPs", - &code_mode + &self + .service + .code_mode .tool_sets() .first() .map(tool_record) .unwrap_or_default(), ]); - for s in &code_mode.tool_sets()[1..] { + for s in &self.service.code_mode.tool_sets()[1..] { builder.push_record(["", &tool_record(s)]); } } @@ -320,18 +314,18 @@ impl PctxMcpServer { Some(format!("\n{banner}\n")) } - fn banner_http(&self, cfg: &pctx_config::Config, code_mode: &pctx_code_mode::CodeMode) { + fn banner_http(&self) { let mcp_url = format!("http://{}:{}/mcp", self.host, self.port); - if let Some(banner) = self.banner(cfg, code_mode, "Server URL", &mcp_url) { + if let Some(banner) = self.banner("Server URL", &mcp_url) { println!("{banner}"); // tracing::info doesn't work well with colors / formatting } info!("PCTX listening at {mcp_url}..."); } - fn banner_stdio(&self, cfg: &pctx_config::Config, code_mode: &pctx_code_mode::CodeMode) { - if let Some(banner) = self.banner(cfg, code_mode, "Transport", "stdio") { + fn banner_stdio(&self) { + if let Some(banner) = self.banner("Transport", "stdio") { eprintln!("{banner}"); } @@ -342,17 +336,15 @@ impl PctxMcpServer { #[cfg(test)] mod tests { use super::PctxMcpServer; - use pctx_config::Config; + use pctx_code_mode::config::Config; #[tokio::test] async fn test_serve_stdio_with_immediate_shutdown() { - let server = PctxMcpServer::new("127.0.0.1", 0, false); let cfg = Config::default(); let code_mode = pctx_code_mode::CodeMode::default(); + let server = PctxMcpServer::new("127.0.0.1", 0, false, &cfg, code_mode); - let result = server - .serve_stdio_with_shutdown(&cfg, code_mode, async {}) - .await; + let result = server.serve_stdio_with_shutdown(async {}).await; assert!( result.is_ok(), @@ -360,21 +352,4 @@ mod tests { result.err().unwrap() ); } - - // Note: test_serve_stdio_with_delayed_shutdown removed because it's difficult to test - // stdio transport without actual stdin. The immediate shutdown test above covers - // the basic shutdown mechanism. - - #[test] - fn test_server_construction() { - let server = PctxMcpServer::new("127.0.0.1", 8080, true); - assert_eq!(server.host, "127.0.0.1"); - assert_eq!(server.port, 8080); - assert!(server.banner); - - let server = PctxMcpServer::new("0.0.0.0", 3000, false); - assert_eq!(server.host, "0.0.0.0"); - assert_eq!(server.port, 3000); - assert!(!server.banner); - } } diff --git a/crates/pctx_mcp_server/src/service.rs b/crates/pctx_mcp_server/src/service.rs index f4efe1c9..d8d95202 100644 --- a/crates/pctx_mcp_server/src/service.rs +++ b/crates/pctx_mcp_server/src/service.rs @@ -1,12 +1,17 @@ +use std::{collections::HashMap, sync::Arc}; + use pctx_code_mode::{ CodeMode, + config::{Config, ToolDisclosure}, + descriptions, model::{ - ExecuteInput, ExecuteOutput, GetFunctionDetailsInput, GetFunctionDetailsOutput, - ListFunctionsOutput, + ExecuteBashInput, ExecuteInput, ExecuteOutput, GetFunctionDetailsInput, + GetFunctionDetailsOutput, ListFunctionsOutput, }, + registry::{PctxRegistry, RegistryAction}, }; use rmcp::{ - RoleServer, ServerHandler, + RoleServer, ServerHandler, ServiceError, handler::server::{router::tool::ToolRouter, tool::ToolCallContext, wrapper::Parameters}, model::{ CallToolRequestParams, CallToolResult, Content, Implementation, ListToolsResult, @@ -16,7 +21,7 @@ use rmcp::{ tool, tool_router, }; use serde_json::json; -use tracing::{error, info, instrument}; +use tracing::{debug, error, info, instrument}; // Metrics removed - will be added via telemetry support later @@ -24,60 +29,119 @@ type McpResult = Result; #[derive(Clone)] pub(crate) struct PctxMcpService { - name: String, - version: String, - description: Option, - code_mode: CodeMode, - tool_router: ToolRouter, + pub(crate) name: String, + pub(crate) version: String, + pub(crate) description: Option, + pub(crate) code_mode: CodeMode, + pub(crate) disclosure: ToolDisclosure, + pub(crate) tool_router: ToolRouter, } #[tool_router] impl PctxMcpService { - pub(crate) fn new(cfg: &pctx_config::Config, code_mode: CodeMode) -> Self { + pub(crate) fn new(cfg: &Config, code_mode: CodeMode) -> Self { Self { name: cfg.name.clone(), version: cfg.version.clone(), description: cfg.description.clone(), code_mode, + disclosure: cfg.disclosure, tool_router: Self::tool_router(), } } - #[tool( - title = "List Functions", - description = "ALWAYS USE THIS TOOL FIRST to list all available functions organized by namespace. + pub(crate) fn list_filtered_tools(&self) -> ListToolsResult { + let original_list_tools = ListToolsResult::with_all_items(self.tool_router.list_all()); + let mut filtered = original_list_tools.clone(); + filtered.tools.clear(); + + if matches!(self.disclosure, ToolDisclosure::Sidecar) { + // add upstream tools to list of tools + for (_, tool_set) in self.code_mode.server_tool_sets() { + filtered.tools.extend(tool_set.tools.iter().map(|t| { + let input_schema: Option = + serde_json::from_value(json!(t.input_schema.clone())).unwrap(); + let output_schema: Option> = + serde_json::from_value(json!(t.output_schema.clone())).unwrap(); + + let mut tool = rmcp::model::Tool::new( + t.id(tool_set.name.as_deref()), + t.description.clone().unwrap_or_default(), + input_schema.unwrap_or_default(), + ); + tool.output_schema = output_schema; + tool + })); + } + } - WORKFLOW: - 1. Start here - Call this tool to see what functions are available - 2. Then call get_function_details() for specific functions you need to understand - 3. Finally call execute() to run your TypeScript code + // dynamically add descriptions based on tool disclosure + let overrides = ToolOverride::for_disclosure(self.disclosure); + for mut tool in original_list_tools.tools { + if let Some(o) = overrides.get(&tool.name.to_string()) { + if !o.enabled { + continue; + } + tool.description = Some(o.description.clone().into()); + } + + filtered.tools.push(tool) + } + + filtered + } + + pub(crate) async fn handle_direct_tool_call( + &self, + mut req: CallToolRequestParams, + ) -> McpResult { + let mut registry = PctxRegistry::default(); + self.code_mode + .add_mcp_servers_to_registry(&mut registry) + .map_err(|e| { + rmcp::ErrorData::internal_error( + format!("failed building internal MCP registry: {e}"), + None, + ) + })?; + + if let Some(RegistryAction::Mcp(mcp_tool_id)) = registry.get(&req.name) { + let server = self + .code_mode + .servers() + .iter() + .find(|s| s.name == mcp_tool_id.sever_name) + .ok_or(rmcp::ErrorData::invalid_params("tool not found", None))?; + let client = server.connect().await.map_err(|e| { + rmcp::ErrorData::invalid_request( + format!( + "failed connecting to upstream MCP at `{}`: {e}", + server.display_target() + ), + None, + ) + })?; + req.name = mcp_tool_id.tool_name.into(); + + client.call_tool(req).await.map_err(service_error_to_mcp) + } else { + Err(rmcp::ErrorData::invalid_params("tool not found", None)) + } + } - This returns function signatures without full details.", + #[tool( + title = "List Functions", output_schema = rmcp::handler::server::tool::schema_for_type::() )] async fn list_functions(&self) -> McpResult { let listed = self.code_mode.list_functions(); - let mut res = CallToolResult::success(vec![Content::text(&listed.code)]); - res.structured_content = Some(json!(listed)); + let res = success_with_structure(&listed.code, &listed); Ok(res) } #[tool( title = "Get Function Details", - description = "Get detailed information about specific functions you want to use. - - WHEN TO USE: After calling list_functions(), use this to learn about parameter types, return values, and usage for specific functions. - - REQUIRED FORMAT: Functions must be specified as 'namespace.functionName' (e.g., 'Namespace.apiPostSearch') - - This tool is lightweight and only returns details for the functions you request, avoiding unnecessary token usage. - Only request details for functions you actually plan to use in your code. - - NOTE ON RETURN TYPES: - - If a function returns Promise, the MCP server didn't provide an output schema - - The actual value is a parsed object (not a string) - access properties directly - - Don't use JSON.parse() on the results - they're already JavaScript objects", output_schema = rmcp::handler::server::tool::schema_for_type::() )] async fn get_function_details( @@ -85,48 +149,63 @@ impl PctxMcpService { Parameters(input): Parameters, ) -> McpResult { let details = self.code_mode.get_function_details(input); - let mut res = CallToolResult::success(vec![Content::text(&details.code)]); - res.structured_content = Some(json!(details)); + let res = success_with_structure(&details.code, &details); Ok(res) } #[tool( - title = "Execute Code", - description = "Execute TypeScript code that calls namespaced functions. USE THIS LAST after list_functions() and get_function_details(). - - TOKEN USAGE WARNING: This tool could return LARGE responses if your code returns big objects. - To minimize tokens: - - Filter/map/reduce data IN YOUR CODE before returning - - Only return specific fields you need (e.g., return {id: result.id, count: items.length}) - - Use console.log() for intermediate results instead of returning everything - - Avoid returning full API responses - extract just what you need - - REQUIRED CODE STRUCTURE: - async function run() { - // Your code here - // Call namespace.functionName() - MUST include namespace prefix - // Process data here to minimize return size - return onlyWhatYouNeed; // Keep this small! - } + title = "Execute Bash", + output_schema = rmcp::handler::server::tool::schema_for_type::() + )] + async fn execute_bash( + &self, + Parameters(input): Parameters, + ) -> McpResult { + // Capture current tracing context to propagate to spawned thread + let current_span = tracing::Span::current(); + + let code_mode = self.code_mode.clone(); + let command = input.command; - IMPORTANT RULES: - - Functions MUST be called as 'Namespace.functionName' (e.g., 'Notion.apiPostSearch') - - Only functions from list_functions() are available - no fetch(), fs, or other Node/Deno APIs - - Variables don't persist between execute() calls - return or log anything you need later - - Add console.log() statements between API calls to track progress if errors occur - - Code runs in an isolated Deno sandbox with restricted network access - - RETURN TYPE NOTE: - - Functions without output schemas show Promise as return type - - The actual runtime value is already a parsed JavaScript object, NOT a JSON string - - Do NOT call JSON.parse() on results - they're already objects - - Access properties directly (e.g., result.data) or inspect with console.log() first - - If you see 'Promise', the structure is unknown - log it to see what's returned - ", + let execution_output = tokio::task::spawn_blocking(move || -> Result<_, anyhow::Error> { + // Enter the captured span context in the new thread + let _guard = current_span.enter(); + + // Create a new current-thread runtime for Deno ops that use deno_unsync + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .map_err(|e| anyhow::anyhow!("Failed to create runtime: {e}"))?; + + rt.block_on(async { + code_mode + .execute_bash(&command) + .await + .map_err(|e| anyhow::anyhow!("Execution error: {e}")) + }) + }) + .await + .map_err(|e| { + error!("Task join failed: {e}"); + rmcp::ErrorData::internal_error(format!("Task join failed: {e}"), None) + })? + .map_err(|e| { + error!("Sandbox execution error: {e}"); + rmcp::ErrorData::internal_error(format!("Execution failed: {e}"), None) + })?; + + let mut res = CallToolResult::success(vec![Content::text(execution_output.markdown())]); + res.structured_content = Some(json!(execution_output)); + + Ok(res) + } + + #[tool( + title = "Execute Typescript Code", output_schema = rmcp::handler::server::tool::schema_for_type::() )] - async fn execute( + async fn execute_typescript( &self, Parameters(input): Parameters, ) -> McpResult { @@ -135,6 +214,7 @@ impl PctxMcpService { let code_mode = self.code_mode.clone(); let code = input.code; + let style = self.disclosure; let execution_output = tokio::task::spawn_blocking(move || -> Result<_, anyhow::Error> { // Enter the captured span context in the new thread @@ -148,7 +228,7 @@ impl PctxMcpService { rt.block_on(async { code_mode - .execute(&code, None) + .execute_typescript(&code, style, None) .await .map_err(|e| anyhow::anyhow!("Execution error: {e}")) }) @@ -172,23 +252,30 @@ impl PctxMcpService { impl ServerHandler for PctxMcpService { fn get_info(&self) -> ServerInfo { - let default_description = format!( + let available_namespaces = format!( "This server provides tools to explore SDK functions and execute SDK scripts for the following services: {}", self.code_mode .tool_sets() .iter() - .map(|s| s.name.clone()) + .map(|s| s.pascal_namespace()) .collect::>() .join(", ") ); + let workflow = + pctx_code_mode::descriptions::workflow::get_workflow_description(self.disclosure); + ServerInfo::new(ServerCapabilities::builder().enable_tools().build()) .with_protocol_version(ProtocolVersion::V_2024_11_05) .with_server_info( Implementation::new(self.name.clone(), self.version.clone()) .with_title(self.name.clone()), ) - .with_instructions(self.description.clone().unwrap_or(default_description)) + .with_instructions( + self.description + .clone() + .unwrap_or(format!("{available_namespaces}\n{workflow}")), + ) } #[instrument(skip_all, fields(mcp.method = "tools/list", mcp.id = %ctx.id))] @@ -198,19 +285,17 @@ impl ServerHandler for PctxMcpService { ctx: RequestContext, ) -> McpResult { let start = std::time::Instant::now(); - let res = ListToolsResult::with_all_items(self.tool_router.list_all()); + let filtered_tools = self.list_filtered_tools(); + let latency = start.elapsed(); info!( - tools.length = res.tools.len(), - tools.next_cursor = res.next_cursor.is_some(), + tools.length = filtered_tools.tools.len(), + tools.next_cursor = filtered_tools.next_cursor.is_some(), latency_ms = latency.as_millis(), "tools/list" ); - // Metrics disabled for now - let _ = latency; - - Ok(res) + Ok(filtered_tools) } #[instrument(skip_all, fields(mcp.method = "tools/call", mcp.id = %ctx.id, mcp.tool.name = %req.name))] @@ -222,8 +307,17 @@ impl ServerHandler for PctxMcpService { let start = std::time::Instant::now(); let tool_name = req.name.clone(); - let tcc = ToolCallContext::new(self, req, ctx); - let res = self.tool_router.call(tcc).await; + let res: Result = + if matches!(self.disclosure, ToolDisclosure::Sidecar) + && tool_name != "execute_typescript" + { + // call tool directly + debug!("Calling tool directly in sidecar style"); + self.handle_direct_tool_call(req).await + } else { + let tcc = ToolCallContext::new(self, req, ctx); + self.tool_router.call(tcc).await + }; let latency = start.elapsed(); let is_error = res @@ -246,3 +340,74 @@ impl ServerHandler for PctxMcpService { Ok(res) } } + +struct ToolOverride { + enabled: bool, + description: String, +} +impl ToolOverride { + fn for_disclosure(disclosure: ToolDisclosure) -> HashMap { + let mut overrides = HashMap::new(); + + // catalog only + overrides.insert( + "list_functions".into(), + Self { + enabled: matches!(disclosure, ToolDisclosure::Catalog), + description: descriptions::tools::LIST_FUNCTIONS.into(), + }, + ); + overrides.insert( + "get_function_details".into(), + Self { + enabled: matches!(disclosure, ToolDisclosure::Catalog), + description: descriptions::tools::GET_FUNCTION_DETAILS.into(), + }, + ); + + // fs only + overrides.insert( + "execute_bash".into(), + Self { + enabled: matches!(disclosure, ToolDisclosure::Filesystem), + description: descriptions::tools::EXECUTE_BASH.into(), + }, + ); + + // execute_typescript + overrides.insert( + "execute_typescript".into(), + Self { + enabled: true, + description: descriptions::tools::disclosure_execute_description(disclosure), + }, + ); + + overrides + } +} + +fn success_with_structure(text: &str, structured: V) -> CallToolResult { + let mut res = CallToolResult::success(vec![Content::text(text)]); + res.structured_content = Some(json!(structured)); + + res +} + +fn service_error_to_mcp(e: ServiceError) -> rmcp::ErrorData { + match e { + ServiceError::McpError(mcp_err) => mcp_err, + ServiceError::TransportClosed => rmcp::ErrorData::internal_error("transport closed", None), + ServiceError::TransportSend(err) => rmcp::ErrorData::internal_error(err.to_string(), None), + ServiceError::UnexpectedResponse => { + rmcp::ErrorData::internal_error("unexpected response type", None) + } + ServiceError::Cancelled { reason } => { + rmcp::ErrorData::internal_error(reason.unwrap_or_else(|| "cancelled".to_string()), None) + } + ServiceError::Timeout { timeout } => { + rmcp::ErrorData::internal_error(format!("request timeout after {timeout:?}"), None) + } + _ => rmcp::ErrorData::internal_error(e.to_string(), None), + } +} diff --git a/crates/pctx_registry/Cargo.toml b/crates/pctx_registry/Cargo.toml new file mode 100644 index 00000000..7f43760b --- /dev/null +++ b/crates/pctx_registry/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "pctx_registry" +version = "0.1.0" +edition.workspace = true +rust-version.workspace = true +license.workspace = true +repository.workspace = true +description = "Callback and MCP server registries for pctx" +keywords = ["mcp", "registry", "pctx"] +categories = ["development-tools"] + +[dependencies] +pctx_config = { version = "^0.1.4", path = "../pctx_config" } +serde_json = { workspace = true } +tracing = { workspace = true } +thiserror = { workspace = true } +rmcp = { workspace = true } +tokio = { workspace = true } +deno_error = { workspace = true } + +[lints] +workspace = true diff --git a/crates/pctx_registry/src/error.rs b/crates/pctx_registry/src/error.rs new file mode 100644 index 00000000..7bbb9866 --- /dev/null +++ b/crates/pctx_registry/src/error.rs @@ -0,0 +1,44 @@ +use pctx_config::server::McpConnectionError; + +/// Error type for registry operations +#[derive(Debug, thiserror::Error)] +pub enum RegistryError { + /// Configuration error (e.g., duplicate name) + #[error("Registry configuration error: {0}")] + Config(String), + /// Server connection error + #[error("MCP connection error: {0}")] + Connection(String), + /// Tool call error (HTTP, parsing, etc.) + #[error("Tool call error: {0}")] + ToolCall(String), + /// Callback execution error + #[error("Callback execution error: {0}")] + ExecutionError(String), +} + +impl From for RegistryError { + fn from(value: McpConnectionError) -> Self { + Self::Connection(value.to_string()) + } +} + +impl deno_error::JsErrorClass for RegistryError { + fn get_class(&self) -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("Error") + } + + fn get_message(&self) -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Owned(self.to_string()) + } + + fn get_additional_properties( + &self, + ) -> Box, deno_error::PropertyValue)>> { + Box::new(std::iter::empty()) + } + + fn get_ref(&self) -> &(dyn std::error::Error + Send + Sync + 'static) { + self + } +} diff --git a/crates/pctx_registry/src/lib.rs b/crates/pctx_registry/src/lib.rs new file mode 100644 index 00000000..ed658948 --- /dev/null +++ b/crates/pctx_registry/src/lib.rs @@ -0,0 +1,5 @@ +mod error; +pub mod registry; + +pub use error::RegistryError; +pub use registry::{CallbackFn, McpToolId, PctxRegistry, RegistryAction}; diff --git a/crates/pctx_code_execution_runtime/src/mcp_registry.rs b/crates/pctx_registry/src/mcp_registry.rs similarity index 89% rename from crates/pctx_code_execution_runtime/src/mcp_registry.rs rename to crates/pctx_registry/src/mcp_registry.rs index 0251710d..fb605aa2 100644 --- a/crates/pctx_code_execution_runtime/src/mcp_registry.rs +++ b/crates/pctx_registry/src/mcp_registry.rs @@ -1,4 +1,4 @@ -use crate::error::McpError; +use crate::error::RegistryError; use pctx_config::server::ServerConfig; use rmcp::model::{CallToolRequestParams, JsonObject, RawContent}; use serde_json::json; @@ -26,11 +26,11 @@ impl MCPRegistry { /// # Errors /// /// Panics if the internal lock is poisoned (i.e., a thread panicked while holding the lock) - pub fn add(&self, cfg: ServerConfig) -> Result<(), McpError> { + pub fn add(&self, cfg: ServerConfig) -> Result<(), RegistryError> { let mut configs = self.configs.write().unwrap(); if configs.contains_key(&cfg.name) { - return Err(McpError::Config(format!( + return Err(RegistryError::Config(format!( "MCP Server with name \"{}\" is already registered, you cannot register two MCP servers with the same name", cfg.name ))); @@ -95,15 +95,15 @@ impl Default for MCPRegistry { ret(Display), err )] -pub(crate) async fn call_mcp_tool( +pub async fn call_mcp_tool( registry: &MCPRegistry, server_name: &str, tool_name: &str, args: Option, -) -> Result { +) -> Result { // Get the server config from registry let mcp_cfg = registry.get(server_name).ok_or_else(|| { - McpError::ToolCall(format!( + RegistryError::ToolCall(format!( "MCP Server with name \"{server_name}\" does not exist" )) })?; @@ -116,7 +116,7 @@ pub(crate) async fn call_mcp_tool( error = %err, "Could not connect to MCP: initialization failure" ); - return Err(McpError::Connection(err.to_string())); + return Err(RegistryError::Connection(err.to_string())); } }; let tool_result = client @@ -129,7 +129,7 @@ pub(crate) async fn call_mcp_tool( }) .await .map_err(|e| { - McpError::ToolCall(format!( + RegistryError::ToolCall(format!( "Tool call \"{server_name}.{tool_name}\" failed: {e}" )) })?; @@ -137,7 +137,7 @@ pub(crate) async fn call_mcp_tool( // Check if the tool call resulted in an error if tool_result.is_error.unwrap_or(false) { - return Err(McpError::ToolCall(format!( + return Err(RegistryError::ToolCall(format!( "Tool call \"{server_name}.{tool_name}\" failed" ))); } @@ -151,7 +151,7 @@ pub(crate) async fn call_mcp_tool( serde_json::from_str(&text_content.text) .or_else(|_| Ok(serde_json::Value::String(text_content.text.clone()))) .map_err(|e: serde_json::Error| { - McpError::ToolCall(format!("Failed to parse content: {e}")) + RegistryError::ToolCall(format!("Failed to parse content: {e}")) })? } else { // Return the whole content array as JSON diff --git a/crates/pctx_registry/src/registry.rs b/crates/pctx_registry/src/registry.rs new file mode 100644 index 00000000..ab18b4d7 --- /dev/null +++ b/crates/pctx_registry/src/registry.rs @@ -0,0 +1,266 @@ +use crate::error::RegistryError; +use pctx_config::server::ServerConfig; +use rmcp::model::{CallToolRequestParams, JsonObject, RawContent}; +use serde_json::json; +use std::{ + collections::HashMap, + future::Future, + pin::Pin, + sync::{Arc, RwLock}, +}; +use tracing::{info, instrument, warn}; + +pub type CallbackFn = Arc< + dyn Fn( + Option, + ) -> Pin> + Send>> + + Send + + Sync, +>; + +#[derive(Clone)] +pub struct McpToolId { + pub sever_name: String, + pub tool_name: String, +} +impl McpToolId { + pub fn id(&self) -> String { + format!("{}__{}", &self.sever_name, self.tool_name) + } +} + +#[derive(Clone)] +pub enum RegistryAction { + Mcp(McpToolId), + Callback(CallbackFn), +} + +#[derive(Clone, Default)] +pub struct PctxRegistry { + actions: Arc>>, + servers: Arc>>, +} + +impl PctxRegistry { + /// Returns the ids of this Pctx Registry. + /// + /// # Panics + /// + /// Panics if it fails acquiring the lock + pub fn ids(&self) -> Vec { + self.actions + .read() + .unwrap() + .keys() + .map(String::from) + .collect() + } + + pub fn add_mcp(&self, tool_names: &[String], cfg: ServerConfig) -> Result<(), RegistryError> { + // confirm unique server name + let mut servers = self.servers.write().map_err(|e| { + RegistryError::Config(format!( + "Failed obtaining write lock on MCP server registry: {e}" + )) + })?; + if servers.contains_key(&cfg.name) { + return Err(RegistryError::Config(format!( + "MCP Server with name \"{}\" is already registered, you cannot register two MCP servers with the same name", + cfg.name + ))); + } + + // confirm unique MCP tool ids + let to_add: Vec = tool_names + .into_iter() + .map(|n| McpToolId { + sever_name: cfg.name.clone(), + tool_name: n.clone(), + }) + .collect(); + + let mut actions = self.actions.write().map_err(|e| { + RegistryError::Config(format!( + "Failed obtaining write lock on action registry: {e}" + )) + })?; + let already_exists: Vec = to_add + .iter() + .filter_map(|t| { + if actions.contains_key(&t.id()) { + Some(t.id()) + } else { + None + } + }) + .collect(); + if servers.contains_key(&cfg.name) { + return Err(RegistryError::Config(format!( + "Registry action(s) with id(s) {already_exists:?} are already registered, you cannot register two registry actions with the same id", + ))); + } + + // register + servers.insert(cfg.name.clone(), cfg); + actions.extend(to_add.into_iter().map(|t| (t.id(), RegistryAction::Mcp(t)))); + + Ok(()) + } + + pub fn add_callback(&self, id: &str, callback: CallbackFn) -> Result<(), RegistryError> { + let mut actions = self.actions.write().map_err(|e| { + RegistryError::Config(format!( + "Failed obtaining write lock on action registry: {e}" + )) + })?; + + if actions.contains_key(id) { + return Err(RegistryError::Config(format!( + "Registry action with id {id:?} is already registered, you cannot register two registry actions with the same id", + ))); + } + + actions.insert(id.into(), RegistryAction::Callback(callback)); + + Ok(()) + } + + /// Remove an action from the registry by id + /// + /// # Panics + /// + /// Panics if cannot obtain lock + pub fn remove(&self, id: &str) -> Option { + let mut actions = self.actions.write().unwrap(); + actions.remove(id) + } + + /// Get an action from the registry by id + /// + /// # Panics + /// + /// Panics if the internal lock is poisoned (i.e., a thread panicked while holding the lock) + pub fn get(&self, id: &str) -> Option { + let actions = self.actions.read().unwrap(); + actions.get(id).cloned() + } + + /// Confirms the registry contains a given id + /// + /// # Panics + /// + /// Panics if the internal lock is poisoned (i.e., a thread panicked while holding the lock) + pub fn has(&self, id: &str) -> bool { + let actions = self.actions.read().unwrap(); + actions.contains_key(id) + } + + /// invokes the action with the provided args + /// + /// # Errors + /// + /// This function will return an error if an action by the provided id doesn't exist + /// or if the action itself fails + #[instrument( + name = "invoke_registry_action", + skip_all, + fields(id=id, args = json!(args).to_string()), + ret(Display), + err + )] + pub async fn invoke( + &self, + id: &str, + args: Option, + ) -> Result { + let action = self.get(id).ok_or_else(|| { + RegistryError::ToolCall(format!("Action with id \"{id}\" does not exist")) + })?; + + match action { + RegistryAction::Callback(callback_fn) => { + callback_fn(args.map(|a| json!(a))).await.map_err(|e| { + RegistryError::ExecutionError(format!( + "Failed calling callback with id \"{id}\": {e}", + )) + }) + } + + RegistryAction::Mcp(mcp_id) => { + let server = { + let servers = self.servers.read().map_err(|e| { + RegistryError::Config(format!( + "Failed obtaining read lock on MCP server registry: {e}" + )) + })?; + servers + .get(&mcp_id.sever_name) + .ok_or(RegistryError::ToolCall(format!( + "MCP server with name \"{}\" does not exist", + &mcp_id.sever_name + )))? + .clone() + }; + + let client = match server.connect().await { + Ok(client) => client, + Err(err) => { + warn!( + server = %mcp_id.sever_name, + error = %err, + "Could not connect to MCP: initialization failure" + ); + return Err(RegistryError::Connection(err.to_string())); + } + }; + + let tool_result = client + .call_tool({ + let mut params = CallToolRequestParams::new(mcp_id.tool_name.to_string()); + if let Some(args) = args { + params = params.with_arguments(args); + } + params + }) + .await + .map_err(|e| { + RegistryError::ToolCall(format!( + "Tool call \"{}\" failed: {e}", + mcp_id.id() + )) + })?; + let _ = client.cancel().await; + + // Check if the tool call resulted in an error + if tool_result.is_error.unwrap_or(false) { + return Err(RegistryError::ToolCall(format!( + "Tool call \"{}\" failed", + mcp_id.id() + ))); + } + + // Prefer structuredContent if available, otherwise use content array + let has_structured = tool_result.structured_content.is_some(); + let val = if let Some(structured) = tool_result.structured_content { + structured + } else if let Some(RawContent::Text(text_content)) = + tool_result.content.first().map(|a| &**a) + { + // Try to parse as JSON, fallback to string value + serde_json::from_str(&text_content.text) + .or_else(|_| Ok(serde_json::Value::String(text_content.text.clone()))) + .map_err(|e: serde_json::Error| { + RegistryError::ToolCall(format!("Failed to parse content: {e}")) + })? + } else { + // Return the whole content array as JSON + json!(tool_result.content) + }; + + info!(structured_content = has_structured, result =? &val, "Tool result"); + + Ok(val) + } + } + } +} diff --git a/crates/pctx_session_server/Cargo.toml b/crates/pctx_session_server/Cargo.toml index f07341e8..4c023522 100644 --- a/crates/pctx_session_server/Cargo.toml +++ b/crates/pctx_session_server/Cargo.toml @@ -13,9 +13,6 @@ path = "src/bin/generate-openapi.rs" [dependencies] # Local crates pctx_code_mode = { path = "../pctx_code_mode" } -pctx_config = { path = "../pctx_config" } -pctx_code_execution_runtime = { path = "../pctx_code_execution_runtime" } -pctx_codegen = { path = "../pctx_codegen" } # Web framework axum = { workspace = true, features = ["macros", "ws"] } @@ -63,6 +60,7 @@ serial_test = "3" axum-test = { version = "18", features = ["ws"] } similar-asserts = { version = "1", features = ["serde"] } test-log = "0.2" +rstest = "0.23" [lints] workspace = true diff --git a/crates/pctx_session_server/src/model.rs b/crates/pctx_session_server/src/model.rs index 30d414d8..e5030c0b 100644 --- a/crates/pctx_session_server/src/model.rs +++ b/crates/pctx_session_server/src/model.rs @@ -1,5 +1,5 @@ use axum::{Json, http::StatusCode, response::IntoResponse}; -use pctx_code_mode::model::ExecuteOutput; +use pctx_code_mode::{config, model::ExecuteOutput}; use serde::{Deserialize, Serialize}; use tracing::{error, warn}; use utoipa::ToSchema; @@ -88,7 +88,7 @@ pub struct RegisterToolsResponse { #[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] pub struct RegisterMcpServersRequest { #[schema(value_type = Vec)] - pub servers: Vec, + pub servers: Vec, } /// Response after registering MCP servers @@ -110,19 +110,6 @@ pub struct CloseSessionResponse { pub success: bool, } -/// Request to execute a bash command -#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] -pub struct ExecuteBashRequest { - pub command: String, -} - -/// Response after executing a bash command -#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] -pub struct ExecuteBashResponse { - #[serde(flatten)] - pub output: ExecuteOutput, -} - // ----------- Websocket JRPC Message structs ----------- pub type WsJsonRpcMessage = rmcp::model::JsonRpcMessage; @@ -130,22 +117,25 @@ pub type WsJsonRpcMessage = rmcp::model::JsonRpcMessage, pub name: String, pub args: Option, } #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ExecuteCodeParams { +pub struct ExecuteTypescriptParams { pub code: String, + #[serde(default)] + pub disclosure: config::ToolDisclosure, } #[derive(Debug, Clone, Serialize, Deserialize)] diff --git a/crates/pctx_session_server/src/routes.rs b/crates/pctx_session_server/src/routes.rs index 2a875601..88b14938 100644 --- a/crates/pctx_session_server/src/routes.rs +++ b/crates/pctx_session_server/src/routes.rs @@ -4,7 +4,8 @@ use axum::{Json, extract::State, http::StatusCode}; use pctx_code_mode::{ CodeMode, model::{ - CallbackConfig, GetFunctionDetailsInput, GetFunctionDetailsOutput, ListFunctionsOutput, + CallbackConfig, ExecuteBashInput, ExecuteOutput, GetFunctionDetailsInput, + GetFunctionDetailsOutput, ListFunctionsOutput, }, }; use tracing::{error, info}; @@ -13,8 +14,8 @@ use uuid::Uuid; use crate::extractors::CodeModeSession; use crate::model::{ ApiError, ApiResult, CloseSessionResponse, CreateSessionResponse, ErrorCode, ErrorData, - ExecuteBashRequest, ExecuteBashResponse, HealthResponse, RegisterMcpServersRequest, - RegisterMcpServersResponse, RegisterToolsRequest, RegisterToolsResponse, + HealthResponse, RegisterMcpServersRequest, RegisterMcpServersResponse, RegisterToolsRequest, + RegisterToolsResponse, }; use crate::state::{AppState, backend::PctxSessionBackend}; @@ -327,9 +328,9 @@ pub(crate) async fn register_servers( params( ("x-code-mode-session" = String, Header, description = "Current code mode session") ), - request_body = ExecuteBashRequest, + request_body = ExecuteBashInput, responses( - (status = 200, description = "Bash command executed successfully", body = ExecuteBashResponse), + (status = 200, description = "Bash command executed successfully", body = ExecuteOutput), (status = 404, description = "Session not found", body = ErrorData), (status = 500, description = "Internal server error", body = ErrorData) ) @@ -337,8 +338,8 @@ pub(crate) async fn register_servers( pub(crate) async fn execute_bash( State(state): State>, CodeModeSession(session_id): CodeModeSession, - Json(request): Json, -) -> ApiResult> { + Json(request): Json, +) -> ApiResult> { info!( session_id =? session_id, command =? request.command, @@ -417,7 +418,5 @@ pub(crate) async fn execute_bash( error!("Failed to post_execution hook: {e}"); } - Ok(Json(ExecuteBashResponse { - output: exec_output, - })) + Ok(Json(exec_output)) } diff --git a/crates/pctx_session_server/src/server.rs b/crates/pctx_session_server/src/server.rs index 313417b2..99f5287b 100644 --- a/crates/pctx_session_server/src/server.rs +++ b/crates/pctx_session_server/src/server.rs @@ -17,15 +17,15 @@ use crate::{ AppState, PctxSessionBackend, extractors::HeaderExtractor, model::{ - CloseSessionResponse, CreateSessionResponse, ErrorData, ExecuteBashRequest, - ExecuteBashResponse, HealthResponse, RegisterMcpServersRequest, RegisterMcpServersResponse, - RegisterToolsRequest, RegisterToolsResponse, + CloseSessionResponse, CreateSessionResponse, ErrorData, HealthResponse, + RegisterMcpServersRequest, RegisterMcpServersResponse, RegisterToolsRequest, + RegisterToolsResponse, }, routes, websocket, }; use pctx_code_mode::model::{ - CallbackConfig, FunctionDetails, GetFunctionDetailsInput, GetFunctionDetailsOutput, - ListFunctionsOutput, ListedFunction, + CallbackConfig, ExecuteBashInput, ExecuteOutput, FunctionDetails, GetFunctionDetailsInput, + GetFunctionDetailsOutput, ListFunctionsOutput, ListedFunction, }; #[derive(OpenApi)] @@ -54,8 +54,8 @@ use pctx_code_mode::model::{ GetFunctionDetailsOutput, FunctionDetails, // Execute bash - ExecuteBashRequest, - ExecuteBashResponse, + ExecuteBashInput, + ExecuteOutput, // Tool registration RegisterToolsRequest, CallbackConfig, diff --git a/crates/pctx_session_server/src/websocket/handler.rs b/crates/pctx_session_server/src/websocket/handler.rs index 756bfcf3..94b98e75 100644 --- a/crates/pctx_session_server/src/websocket/handler.rs +++ b/crates/pctx_session_server/src/websocket/handler.rs @@ -4,7 +4,7 @@ use crate::{ PctxSessionBackend, extractors::CodeModeSession, model::{ - ExecuteCodeParams, ExecuteToolParams, PctxJsonRpcRequest, PctxJsonRpcResponse, + ExecuteToolParams, ExecuteTypescriptParams, PctxJsonRpcRequest, PctxJsonRpcResponse, WsJsonRpcMessage, }, state::ws_manager::WsSession, @@ -22,8 +22,10 @@ use futures::{ SinkExt, StreamExt, stream::{SplitSink, SplitStream}, }; -use pctx_code_execution_runtime::{CallbackFn, CallbackRegistry}; -use pctx_code_mode::model::ExecuteInput; +use pctx_code_mode::{ + model::ExecuteInput, + registry::{CallbackFn, PctxRegistry}, +}; use rmcp::{ ErrorData, model::{ErrorCode, JsonRpcMessage, RequestId}, @@ -165,24 +167,12 @@ async fn read_messages( } } -/// Common execution handler logic -async fn handle_execution_inner( +/// Handle an `execute_code` (TypeScript) request from the client +async fn handle_execute_code_request( req_id: RequestId, + params: ExecuteTypescriptParams, ws_session: Uuid, state: AppState, - input_code: String, - needs_callbacks: bool, - execute_fn: impl FnOnce( - pctx_code_mode::CodeMode, - Option, - ) -> std::pin::Pin< - Box< - dyn std::future::Future< - Output = Result, - >, - >, - > + Send - + 'static, ) -> Result<(), String> { // Save the WebSocket session for later response let ws_session_lock = state @@ -218,8 +208,8 @@ async fn handle_execution_inner( let execution_id = Uuid::new_v4(); // Setup callbacks if needed - let callback_registry = if needs_callbacks { - let registry = CallbackRegistry::default(); + let callback_registry = if !code_mode.callbacks().is_empty() { + let registry = PctxRegistry::default(); for callback_cfg in code_mode.callbacks() { let ws_session_lock_clone = ws_session_lock.clone(); let cfg = callback_cfg.clone(); @@ -244,7 +234,7 @@ async fn handle_execution_inner( }) }); - if let Err(add_err) = registry.add(&callback_cfg.id(), callback) { + if let Err(add_err) = registry.add_callback(&callback_cfg.id(), callback) { let err_res = WsJsonRpcMessage::error( ErrorData { code: ErrorCode::INTERNAL_ERROR, @@ -274,6 +264,7 @@ async fn handle_execution_inner( tokio::spawn(async move { let code_mode_clone = code_mode.clone(); + let code_to_exec = params.code.clone(); let output = tokio::task::spawn_blocking(move || -> Result<_, anyhow::Error> { let _guard = execution_span.enter(); @@ -282,8 +273,12 @@ async fn handle_execution_inner( .build() .map_err(|e| anyhow::anyhow!("Failed to create runtime: {e}"))?; - rt.block_on(execute_fn(code_mode_clone, callback_registry)) - .map_err(|e| anyhow::anyhow!("Execution error: {e}")) + rt.block_on(code_mode_clone.execute_typescript( + &code_to_exec, + params.disclosure, + callback_registry, + )) + .map_err(|e| anyhow::anyhow!("Execution error: {e}")) }) .await; @@ -325,7 +320,9 @@ async fn handle_execution_inner( code_mode_session_id, execution_id, code_mode, - ExecuteInput { code: input_code }, + ExecuteInput { + code: params.code.clone(), + }, execution_res, ) .await @@ -340,27 +337,6 @@ async fn handle_execution_inner( Ok(()) } -/// Handle an `execute_code` (TypeScript) request from the client -async fn handle_execute_code_request( - req_id: RequestId, - params: ExecuteCodeParams, - ws_session: Uuid, - state: AppState, -) -> Result<(), String> { - let code = params.code.clone(); - handle_execution_inner( - req_id, - ws_session, - state, - code.clone(), - true, // needs callbacks - move |code_mode, callback_registry| { - Box::pin(async move { code_mode.execute_typescript(&code, callback_registry).await }) - }, - ) - .await -} - /// Handle a single WebSocket message /// Messages coming from a client, needs to be routed to the correct `WsSession` for handling. async fn handle_message( @@ -377,7 +353,7 @@ async fn handle_message( match jrpc_msg { JsonRpcMessage::Request(req) => match req.request { - PctxJsonRpcRequest::ExecuteCode { params } => { + PctxJsonRpcRequest::ExecuteTypescript { params } => { debug!("Executing TypeScript code..."); handle_execute_code_request(req.id, params, ws_session, state.clone()).await } diff --git a/crates/pctx_session_server/tests/executions.rs b/crates/pctx_session_server/tests/executions.rs index 69087ff3..c455e5b8 100644 --- a/crates/pctx_session_server/tests/executions.rs +++ b/crates/pctx_session_server/tests/executions.rs @@ -3,6 +3,7 @@ mod utils; use crate::utils::{callback_tools, connect_websocket, create_test_server_with_session}; use pctx_code_mode::model::CallbackConfig; use pctx_session_server::{CODE_MODE_SESSION_HEADER, model::WsJsonRpcMessage}; +use rstest::rstest; use serde_json::json; use serial_test::serial; use similar_asserts::assert_serde_eq; @@ -124,10 +125,10 @@ async fn test_exec_code_syntax_err() { assert_eq!(response["result"]["success"], false); let stderr = response["result"]["stderr"].as_str().unwrap(); - // Should show line 15 where the error is (after bashFs prepend) + // Should show line 3 where the error is assert!( - stderr.contains("15:19"), - "Should show exact error location (line 15, col 19): {stderr}" + stderr.contains("3:19"), + "Should show exact error location (line 3, col 19): {stderr}" ); // Should show the error message @@ -137,9 +138,39 @@ async fn test_exec_code_syntax_err() { ); } -#[test_log::test(tokio::test)] +const CODE_OVERLOADED_SYNTAX: &str = " + async function run() { + let value = await invoke({ name:\"test_math__add\", arguments: {a: 8, b: 2}}); + console.log(`after add: ${value}`); + value = await invoke({ name:\"test_math__subtract\", arguments: {a: value, b: 5}}); + console.log(`after subtract: ${value}`); + value = await invoke({ name:\"test_math__multiply\", arguments: {a: value, b: 10}}); + console.log(`after multiply: ${value}`); + value = await invoke({ name:\"test_math__divide\", arguments: {a: value, b: 2}}); + console.log(`after divide: ${value}`); + return value; + }"; + +const CODE_NAMESPACED_SYNTAX: &str = " + async function run() { + let value = await TestMath.add({a: 8, b: 2}); + console.log(`after add: ${value}`); + value = await TestMath.subtract({a: value, b: 5}); + console.log(`after subtract: ${value}`); + value = await TestMath.multiply({a: value, b: 10}); + console.log(`after multiply: ${value}`); + value = await TestMath.divide({a: value, b: 2}); + console.log(`after divide: ${value}`); + return value; + }"; + +#[rstest] +#[case::sidecar(CODE_OVERLOADED_SYNTAX, "sidecar")] +#[case::catalog(CODE_NAMESPACED_SYNTAX, "catalog")] +#[case::filesystem(CODE_NAMESPACED_SYNTAX, "filesystem")] #[serial] -async fn test_exec_callbacks() { +#[tokio::test] +async fn test_exec_callbacks(#[case] code: &str, #[case] disclosure: &str) { let (session_id, server, _) = create_test_server_with_session().await; // register tools @@ -159,18 +190,6 @@ async fn test_exec_callbacks() { .await .into_websocket() .await; - let code = " - async function run() { - let value = await TestMath.add({a: 8, b: 2}); - console.log(`after add: ${value}`); - value = await TestMath.subtract({a: value, b: 5}); - console.log(`after subtract: ${value}`); - value = await TestMath.multiply({a: value, b: 10}); - console.log(`after multiply: ${value}`); - value = await TestMath.divide({a: value, b: 2}); - console.log(`after divide: ${value}`); - return value; - }"; // Send execute_code request via WebSocket ws.send_json(&json!({ @@ -178,7 +197,8 @@ async fn test_exec_callbacks() { "id": "test-4", "method": "execute_code", "params": { - "code": code + "code": code, + "disclosure": disclosure } })) .await; @@ -340,8 +360,8 @@ async fn test_exec_type_error_with_rich_diagnostics() { .await; register_res.assert_status_ok(); - // LLM code with type error - this will have bashFs setup prepended (12 lines) - // The error is on line 3 of user code, reported as line 15 in transformed code + // LLM code with type error + // The error is on line 3 of user code let code = r#" async function run() { let value = await TestMath.add({a: "wrong", b: 2}); // Type error: 'a' should be number @@ -371,14 +391,13 @@ async fn test_exec_type_error_with_rich_diagnostics() { assert_eq!(response["result"]["success"], false); // Verify the diagnostic points to the exact error location and has all the information - // Error is at line 15 (where "wrong" is passed), column 45 (the "wrong" string literal) - // Line 15 is after the 12-line bashFs prepend (user code line 3 + 12 = 15) + // Error is at line 3 (where "wrong" is passed), column 45 (the "wrong" string literal) let stderr = response["result"]["stderr"].as_str().unwrap(); // Should show exact location: Line 15, Column 45 assert!( - stderr.contains("Line 15"), - "Should show line 15 where error occurs: {stderr}" + stderr.contains("Line 3"), + "Should show line 3 where error occurs: {stderr}" ); assert!( stderr.contains("Column 45"), diff --git a/crates/pctx_session_server/tests/utils.rs b/crates/pctx_session_server/tests/utils.rs index 9fae6f5f..b0bfffc2 100644 --- a/crates/pctx_session_server/tests/utils.rs +++ b/crates/pctx_session_server/tests/utils.rs @@ -3,8 +3,7 @@ use std::sync::Arc; use axum_test::{TestResponse, TestServer}; -use pctx_code_execution_runtime::CallbackFn; -use pctx_code_mode::{CodeMode, model::CallbackConfig}; +use pctx_code_mode::{CodeMode, model::CallbackConfig, registry::CallbackFn}; use pctx_session_server::{ AppState, LocalBackend, PctxSessionBackend, model::CreateSessionResponse, server::create_router, }; @@ -105,7 +104,7 @@ pub(crate) fn callback_tools() -> Vec<(CallbackConfig, CallbackFn)> { ( CallbackConfig { name: "add".into(), - namespace: "test_math".into(), + namespace: Some("test_math".into()), description: Some("Add two numbers & return result".into()), input_schema: Some(input_schema.clone()), output_schema: Some(output_schema.clone()), @@ -123,7 +122,7 @@ pub(crate) fn callback_tools() -> Vec<(CallbackConfig, CallbackFn)> { ( CallbackConfig { name: "subtract".into(), - namespace: "test_math".into(), + namespace: Some("test_math".into()), description: Some("Subtract two numbers & return result".into()), input_schema: Some(input_schema.clone()), output_schema: Some(output_schema.clone()), @@ -141,7 +140,7 @@ pub(crate) fn callback_tools() -> Vec<(CallbackConfig, CallbackFn)> { ( CallbackConfig { name: "multiply".into(), - namespace: "test_math".into(), + namespace: Some("test_math".into()), description: Some("Multiply two numbers & return result".into()), input_schema: Some(input_schema.clone()), output_schema: Some(output_schema.clone()), @@ -159,7 +158,7 @@ pub(crate) fn callback_tools() -> Vec<(CallbackConfig, CallbackFn)> { ( CallbackConfig { name: "divide".into(), - namespace: "test_math".into(), + namespace: Some("test_math".into()), description: Some("Divide two numbers & return result".into()), input_schema: Some(json!({ "type": "object", diff --git a/crates/pctx_type_check_runtime/Cargo.toml b/crates/pctx_type_check_runtime/Cargo.toml index e64c5eca..e3107d06 100644 --- a/crates/pctx_type_check_runtime/Cargo.toml +++ b/crates/pctx_type_check_runtime/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pctx_type_check_runtime" -version = "0.1.2" +version = "0.1.3" edition = "2024" rust-version = "1.89" license = "MIT" diff --git a/descriptions/README.md b/descriptions/README.md new file mode 100644 index 00000000..8545c444 --- /dev/null +++ b/descriptions/README.md @@ -0,0 +1,8 @@ +# Descriptions + +This is a unified description folder used by both the rust and python surfaces of `pctx`. + +## Goals + +- Avoid duplication of tool descriptions & workflows across the various `pctx` surfaces. +- Encourage description versioning for experimenting with updated language. diff --git a/pctx-py/src/pctx_client/tool_descriptions/prescriptive/execute_bash.py b/descriptions/tools/execute_bash/v1.txt similarity index 75% rename from pctx-py/src/pctx_client/tool_descriptions/prescriptive/execute_bash.py rename to descriptions/tools/execute_bash/v1.txt index 88ded1c4..02469652 100644 --- a/pctx-py/src/pctx_client/tool_descriptions/prescriptive/execute_bash.py +++ b/descriptions/tools/execute_bash/v1.txt @@ -1,6 +1,4 @@ -"""Prescriptive description for execute_bash tool.""" - -DESCRIPTION = """Execute bash commands to explore the TypeScript SDK filesystem. +Execute bash commands to explore the TypeScript SDK filesystem. FILESYSTEM STRUCTURE: The SDK is organized as an in-memory virtual filesystem at /sdk/ containing: @@ -14,12 +12,14 @@ 3. Search: `grep 'keyword' README.md` to find specific functions 4. Read types: `cat {Namespace}/{functionName}.d.ts` for detailed type information -AVAILABLE COMMANDS: +COMMANDS: - ls, cat, grep, find - Explore the filesystem - All standard bash utilities for text processing +- Networking bash commands like `curl`, `ping`, etc. are unavailable. +Use bash to explore the SDK and write a script. CURRENT WORKING DIRECTORY: /sdk/ IMPORTANT: - This is a READ-ONLY virtual filesystem for SDK exploration -- Use this to discover available functions and their signatures""" +- Use this to discover available functions and their signatures \ No newline at end of file diff --git a/pctx-py/src/pctx_client/tool_descriptions/prescriptive/execute_typescript.py b/descriptions/tools/execute_typescript_catalog/v1.txt similarity index 84% rename from pctx-py/src/pctx_client/tool_descriptions/prescriptive/execute_typescript.py rename to descriptions/tools/execute_typescript_catalog/v1.txt index e6d0641f..0877e911 100644 --- a/pctx-py/src/pctx_client/tool_descriptions/prescriptive/execute_typescript.py +++ b/descriptions/tools/execute_typescript_catalog/v1.txt @@ -1,6 +1,4 @@ -"""Prescriptive description for execute_typescript tool.""" - -DESCRIPTION = """Execute TypeScript code with access to registered SDK functions. +Execute TypeScript code with access to registered SDK functions. Your code runs in a TypeScript runtime with registered SDK functions already imported and available. @@ -29,4 +27,4 @@ RETURN TYPE NOTE: - Function results are already parsed JavaScript objects, NOT JSON strings - Do NOT call JSON.parse() on results -- Access properties directly (e.g., result.data)""" +- Access properties directly (e.g., result.data) \ No newline at end of file diff --git a/pctx-py/src/pctx_client/tool_descriptions/terminal/execute_typescript.py b/descriptions/tools/execute_typescript_filesystem/v1.txt similarity index 81% rename from pctx-py/src/pctx_client/tool_descriptions/terminal/execute_typescript.py rename to descriptions/tools/execute_typescript_filesystem/v1.txt index a55467bd..3377b739 100644 --- a/pctx-py/src/pctx_client/tool_descriptions/terminal/execute_typescript.py +++ b/descriptions/tools/execute_typescript_filesystem/v1.txt @@ -1,6 +1,4 @@ -"""Terminal-style description for execute_typescript tool.""" - -DESCRIPTION = """Execute TypeScript code with SDK functions. +Execute TypeScript code with SDK functions. Your code runs in a sandbox with access to SDK functions discovered via execute_bash. Functions are called as 'Namespace.functionName()' and return parsed JavaScript objects. @@ -19,4 +17,4 @@ - Variables don't persist between runs - Add console.log() statements between API calls to track progress and/or errors -Keep return values small to minimize token usage.""" +Keep return values small to minimize token usage. \ No newline at end of file diff --git a/descriptions/tools/execute_typescript_sidecar/v1.txt b/descriptions/tools/execute_typescript_sidecar/v1.txt new file mode 100644 index 00000000..13752764 --- /dev/null +++ b/descriptions/tools/execute_typescript_sidecar/v1.txt @@ -0,0 +1,35 @@ +Execute TypeScript code with access to all available tools. + +CODE STRUCTURE: +async function run() { + // Your code here + // Call `await invoke({ name: "tool_name", arguments: {...} })` with proper types + return result; +} + +IMPORTANT RULES: +- ALWAYS make ALL tool calls via typescript unless otherwise explicitly stated in the tool definition. +- You can call any of the available tools using the `invoke` typescript function (does not need to be imported) +- `invoke` takes a single object as an argument with 2 properties: + - `name: string` - the name of the function/tool to be called, exactly as it is written in the function list. + - `arguments: {[key: string]: any}` - the arguments for the function as described by the json schema in the function/tool list. +- `invoke` will either return a native typescript object as defined by the successful return schema (in the function definition). + - if there is no return schema in the function definition then the return type of the function is `unknown`. +- The typescript environment has STRICT typing, the code will only execute if the typecheck passes. For example if a `string` property is not listed as required by the json schema you must handle the case when it is undefined. +- You MUST define a `run()` function +- You MUST NOT call or export any functions from the root of the script, `run()` will be called automatically +- ALWAYS batch multiple tool calls into ONE execute typescript call +- Only listed tools are available to call via `invoke` - other common functions/modules like fetch(), fs, or other Node/Deno APIs are unavailable. +- Variables don't persist between executions +- Code runs in an isolated Deno sandbox + +TOKEN USAGE WARNING: +- This tool could return LARGE responses if your code returns big objects +- AVOID adding comments to the code +- Filter/map/reduce data IN YOUR CODE before returning +- Only return the data you actually need for to complete the task, AVOID returning whole objects + +RETURN TYPE NOTE: +- Function results are already parsed JavaScript objects, NOT JSON strings +- Do NOT call JSON.parse() on results +- Access properties directly (e.g., result.data) \ No newline at end of file diff --git a/pctx-py/src/pctx_client/tool_descriptions/terminal/get_function_details.py b/descriptions/tools/get_function_details/v1.txt similarity index 54% rename from pctx-py/src/pctx_client/tool_descriptions/terminal/get_function_details.py rename to descriptions/tools/get_function_details/v1.txt index 217568e2..38bb78b9 100644 --- a/pctx-py/src/pctx_client/tool_descriptions/terminal/get_function_details.py +++ b/descriptions/tools/get_function_details/v1.txt @@ -1,9 +1,7 @@ -"""Terminal-style description for get_function_details tool.""" - -DESCRIPTION = """Get detailed TypeScript definitions for specific functions. +Get detailed TypeScript definitions for specific functions. Provide function names in 'namespace.functionName' format (e.g., 'Notion.apiPostSearch'). Returns full type signatures, parameter schemas, and return types. Note: Functions showing Promise return type have no schema - the actual runtime -value is a parsed JavaScript object (never a JSON string).""" +value is a parsed JavaScript object (never a JSON string). \ No newline at end of file diff --git a/descriptions/tools/list_functions/v1.txt b/descriptions/tools/list_functions/v1.txt new file mode 100644 index 00000000..a24a47b2 --- /dev/null +++ b/descriptions/tools/list_functions/v1.txt @@ -0,0 +1,3 @@ +List all available SDK functions organized by namespace. + +Returns function signatures showing structure without full type details. \ No newline at end of file diff --git a/descriptions/tools/search_functions/v1.txt b/descriptions/tools/search_functions/v1.txt new file mode 100644 index 00000000..74388826 --- /dev/null +++ b/descriptions/tools/search_functions/v1.txt @@ -0,0 +1,2 @@ +Search for SDK functions by keyword. +Returns matching functions ranked by relevance. \ No newline at end of file diff --git a/descriptions/workflows/catalog/v1.txt b/descriptions/workflows/catalog/v1.txt new file mode 100644 index 00000000..e751126c --- /dev/null +++ b/descriptions/workflows/catalog/v1.txt @@ -0,0 +1,3 @@ +WORKFLOW: + 1. Use the `list_functions` and `get_function_details` tools to discover tools signatures and input/output types. + 2. Write ONE script that calls ALL tools needed for the task and execute that script with `execute_typescript`, no need to import anything, all the namespaces returned by `list_functions` and `get_function_details` will be available globally. \ No newline at end of file diff --git a/descriptions/workflows/common.txt b/descriptions/workflows/common.txt new file mode 100644 index 00000000..e1de1086 --- /dev/null +++ b/descriptions/workflows/common.txt @@ -0,0 +1,7 @@ +General: + - BATCH MULTIPLE TOOL CALLS INTO ONE `execute_typescript` CALL. + - These tools exists to reduce round-trips. When a task requires multiple tool calls: + - WRONG: Multiple `execute_typescript` calls, each with one tool + - RIGHT: One `execute_typescript` call with a script that calls all needed tools + - Only `return` and `console.log` data you need, tools could have very large responses. + - IMPORTANT: All tool calls are ASYNC. Use await for each call. \ No newline at end of file diff --git a/descriptions/workflows/filesystem/v1.txt b/descriptions/workflows/filesystem/v1.txt new file mode 100644 index 00000000..1804ec34 --- /dev/null +++ b/descriptions/workflows/filesystem/v1.txt @@ -0,0 +1,3 @@ +WORKFLOW: + 1. Use the `execute_bash` to explore the filesystem for available typescript function signatures and their input/output types. + 2. Write ONE script that calls ALL tools needed for the task and execute that script with `execute_typescript`, no need to import anything, all the namespaces returned by `execute_bash` will be available. \ No newline at end of file diff --git a/descriptions/workflows/sidecar/v1.txt b/descriptions/workflows/sidecar/v1.txt new file mode 100644 index 00000000..a41ba572 --- /dev/null +++ b/descriptions/workflows/sidecar/v1.txt @@ -0,0 +1,3 @@ +WORKFLOW: + 1. Review all the available tools. + 2. Write and execute ONE script via `execute_typescript` that calls ALL tools needed for the task, no need to import `invoke`, it is globally available. \ No newline at end of file diff --git a/docs/CLI.md b/docs/CLI.md index 0cf8d137..7ac69f36 100644 --- a/docs/CLI.md +++ b/docs/CLI.md @@ -62,6 +62,7 @@ Starts PCTX server with no pre-configured tools. Use a client library like `pip * `--session-dir ` — Path to session storage directory Default value: `.pctx/sessions` +* `--allowed-origin ` — Allowed CORS origins. Can be specified multiple times. Defaults to localhost only (, , http://[`::1`]). Specify your own origins to override the default (can include or exclude localhost). Origins without explicit ports will match any port. Example: --allowed-origin --allowed-origin * `--no-banner` — Don't show the server banner diff --git a/docs/code-mode.md b/docs/code-mode.md index 4bc4b8bd..c6207f74 100644 --- a/docs/code-mode.md +++ b/docs/code-mode.md @@ -70,28 +70,44 @@ Code runs in Deno with strict limits: - Prevent invalid code from running - Clear error messages with line/column -## Three MCP Tools +## MCP Tools & Tool Disclosure -`pctx` exposes three tools that your LLM calls: +`pctx` exposes tools that your LLM calls. The exact set depends on the `disclosure` mode configured for your server. -### 1. `list_functions` +### `catalog` mode (default) -Returns TypeScript namespaces for all connected MCP servers. +Three tools for dynamic discovery and execution. -### 2. `get_function_details` +| Tool | Description | +| ---------------------- | ------------------------------------------------------------------------------------------- | +| `list_functions` | Returns TypeScript namespaces for all connected MCP servers | +| `get_function_details` | Returns full TypeScript signatures with JSDoc for specific functions | +| `execute_typescript` | Runs TypeScript code with type checking, returns `{ success, stdout, output, diagnostics }` | -Returns full TypeScript signatures with JSDoc for specific functions. +``` +list_functions() → get_function_details([...]) → execute_typescript({ code }) +``` -### 3. `execute` +### `filesystem` mode -Runs TypeScript code with type checking, returns `{ success, stdout, output, diagnostics }`. +Two tools for dynamic discovery and execution. The generated TypeScript code is loaded into a virtual filesystem that +LLMs can explore (`grep`, `find`, `cat`, `sed`, etc.) to gain the knowledge to write a script. -**Typical flow:** +| Tool | Description | +| -------------------- | ----------------------------------------------------- | +| `execute_bash` | Reads TypeScript tool definitions from the filesystem | +| `execute_typescript` | Runs TypeScript code with type checking | ``` -list_functions() → get_function_details([...]) → execute({ code }) +execute_bash() → execute_typescript({ code }) ``` +### `sidecar` mode + +_Currently only supported in the unified MCP server with `pctx mcp start`_ + +Upstream tool descriptions are surfaced directly as MCP tools with the addition of `execute_typescript`; the agent calls `execute_typescript` to invoke them without a separate discovery step. + ## Namespaces Each MCP server becomes a TypeScript namespace: diff --git a/docs/config.md b/docs/config.md index f5c9adf1..35347dc0 100644 --- a/docs/config.md +++ b/docs/config.md @@ -26,14 +26,33 @@ This creates a basic `pctx.json` and prompts you to add upstream MCP servers. ### Root Fields -| Field | Type | Required | Description | -| ------------- | --------------------- | -------- | ------------------------------------------------------ | -| `name` | `string` | Yes | Name of your MCP server instance | -| `version` | `string` | Yes | Version of your MCP server | -| `description` | `string` | No | Optional description of your MCP server | -| `servers` | `array[ServerConfig]` | Yes | List of upstream MCP server configurations (see below) | -| `logger` | `LoggerConfig` | No | Logger configuration (see below) | -| `telemetry` | `TelemetryConfig` | No | OpenTelemetry configuration (see below) | +| Field | Type | Required | Default | Description | +| ------------- | --------------------- | -------- | ----------- | ------------------------------------------------------ | +| `name` | `string` | Yes | - | Name of your MCP server instance | +| `version` | `string` | Yes | `"0.1.0"` | Version of your MCP server | +| `description` | `string` | No | - | Optional description of your MCP server | +| `disclosure` | `ToolDisclosure` | No | `"catalog"` | Tool disclosure mode (see below) | +| `servers` | `array[ServerConfig]` | Yes | - | List of upstream MCP server configurations (see below) | +| `logger` | `LoggerConfig` | No | - | Logger configuration (see below) | +| `telemetry` | `TelemetryConfig` | No | - | OpenTelemetry configuration (see below) | + +### Tool Disclosure + +The `disclosure` field controls which set of code-mode tools are exposed to the AI agent. It determines how the agent discovers and invokes upstream tools. + +| Value | Default | Description | +| -------------- | ------- | ------------------------------------------------------------------------------------------------- | +| `"catalog"` | Yes | Agent uses `list_tools` → `get_tool_details` → `execute_typescript` to discover and call tools | +| `"filesystem"` | No | Agent uses `execute_bash` → `execute_typescript`; tool details are read from the filesystem | +| `"sidecar"` | No | Upstream tool descriptions are surfaced directly; agent calls `execute_typescript` to invoke them | + +**Example:** + +```json +{ + "disclosure": "filesystem" +} +``` ### Server Configuration @@ -49,16 +68,17 @@ Each server in the `servers` array is either an HTTP server or a stdio server. **Stdio server fields:** -| Field | Type | Required | Description | -| --------- | ------------------- | -------- | ------------------------------------------------------------------------------------------------------- | -| `name` | `string` | Yes | Unique identifier used as TypeScript namespace | -| `command` | `string` | Yes | Command to execute the MCP server. Can be a single command or a full command line with arguments | -| `args` | `array[string]` | No | Arguments passed to the command. If omitted and `command` contains spaces, it will be shell-parsed | -| `env` | `map[string]string` | No | Environment variables for the process | +| Field | Type | Required | Description | +| --------- | ------------------- | -------- | -------------------------------------------------------------------------------------------------- | +| `name` | `string` | Yes | Unique identifier used as TypeScript namespace | +| `command` | `string` | Yes | Command to execute the MCP server. Can be a single command or a full command line with arguments | +| `args` | `array[string]` | No | Arguments passed to the command. If omitted and `command` contains spaces, it will be shell-parsed | +| `env` | `map[string]string` | No | Environment variables for the process | **Examples (stdio):** With explicit args array: + ```json { "name": "local_tools", @@ -71,6 +91,7 @@ With explicit args array: ``` With command-line string (auto-parsed): + ```json { "name": "memory", @@ -707,6 +728,7 @@ aws sts get-caller-identity When configuring stdio servers, you have two options: **Option 1: Shell-style command string (auto-parsed)** + ```json { "name": "memory", @@ -717,6 +739,7 @@ When configuring stdio servers, you have two options: The command is automatically parsed into executable and arguments using shell-style parsing. **Option 2: Explicit command and args** + ```json { "name": "memory", @@ -726,6 +749,7 @@ The command is automatically parsed into executable and arguments using shell-st ``` Use explicit args when: + - Your command has complex quoting requirements - You want to be explicit about argument boundaries - You're programmatically generating the configuration diff --git a/pctx-py/README.md b/pctx-py/README.md index f2b06a5a..ca3a50fb 100644 --- a/pctx-py/README.md +++ b/pctx-py/README.md @@ -1,5 +1,5 @@
- PCTX Logo + PCTX Logo

Python pctx-client

[![Made by](https://img.shields.io/badge/MADE%20BY-Port%20of%20Context-1e40af.svg?style=for-the-badge&labelColor=0c4a6e)](https://portofcontext.com) @@ -139,8 +139,7 @@ The `Pctx` client provides 3 main code mode functions: 3. **`execute(code)`** - Executes TypeScript code in an isolated Deno sandbox. The code can call any namespaced functions (e.g., `Namespace.functionName()`) discovered via `list_functions()`. Returns the execution result with stdout, stderr, and return value. If the optional dependancy `pctx-client[bm25s]` is installed, pctx will also -provide: -4. **`search_functions(query, top_k)`** - Searches available functions using BM25s vector search to find the most relevant functions for a given query. LLMs are instructed to call this first to discover what functions are available from your registered tools and MCP servers. +provide: 4. **`search_functions(query, top_k)`** - Searches available functions using BM25s vector search to find the most relevant functions for a given query. LLMs are instructed to call this first to discover what functions are available from your registered tools and MCP servers. ## Defining Tools diff --git a/pctx-py/pyproject.toml b/pctx-py/pyproject.toml index 5c878bea..c2631afb 100644 --- a/pctx-py/pyproject.toml +++ b/pctx-py/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "pctx-client" -version = "0.3.0b1" +version = "0.3.0" description = "Python client for using Code Mode via PCTX" readme = "README.md" authors = [ @@ -18,8 +18,8 @@ dependencies = [ [project.optional-dependencies] langchain = ["langchain>=1.1.2"] crewai = ["crewai>=1.6.1"] -openai = ["openai-agents>=0.6.2"] -pydantic-ai = ["pydantic-ai>=0.0.14"] +openai = ["openai-agents>=0.12.0"] +pydantic-ai = ["pydantic-ai>=1.60.0"] bm25s = ["bm25s[stem]>=0.2.12"] [project.urls] @@ -43,8 +43,8 @@ dev = [ "langchain>=1.1.2", "langchain-openai>=0.3.0", "crewai>=1.6.1", - "openai-agents>=0.6.2", - "pydantic-ai>=0.0.14", + "openai-agents>=0.12.0", + "pydantic-ai>=1.60.0", "bm25s[stem]>=0.2.12", "sphinx", "sphinx-autobuild", diff --git a/pctx-py/src/pctx_client/__init__.py b/pctx-py/src/pctx_client/__init__.py index 32460db0..7eee6845 100644 --- a/pctx-py/src/pctx_client/__init__.py +++ b/pctx-py/src/pctx_client/__init__.py @@ -1,8 +1,7 @@ from ._client import Pctx from ._convert import tool from ._tool import AsyncTool, Tool -from .models import HttpServerConfig, ServerConfig, StdioServerConfig -from .tools import ToolConfig, ToolName +from .models import HttpServerConfig, ServerConfig, StdioServerConfig, ToolName __all__ = [ "Pctx", @@ -12,6 +11,5 @@ "HttpServerConfig", "StdioServerConfig", "ServerConfig", - "ToolConfig", "ToolName", ] diff --git a/pctx-py/src/pctx_client/_client.py b/pctx-py/src/pctx_client/_client.py index f0d8ffcf..b8c8a4b6 100644 --- a/pctx-py/src/pctx_client/_client.py +++ b/pctx-py/src/pctx_client/_client.py @@ -4,6 +4,8 @@ Main client for executing code with both MCP tools and local Python tools. """ +import asyncio +import warnings from typing import TYPE_CHECKING from urllib.parse import urlparse @@ -11,8 +13,9 @@ from pydantic import BaseModel from pctx_client._tool import AsyncTool, Tool -from pctx_client._utils import to_snake_case +from pctx_client._utils import HAS_SEARCH, to_snake_case from pctx_client._websocket_client import WebSocketClient +from pctx_client.descriptions import get_tool_description from pctx_client.exceptions import ConnectionError, SessionError from pctx_client.models import ( ExecuteBashInput, @@ -24,9 +27,10 @@ ListFunctionsOutput, ServerConfig, ToolConfig, + ToolDisclosure, + ToolDisclosureName, + ToolName, ) -from pctx_client.tool_descriptions import PRESCRIPTIVE_DESCRIPTIONS -from pctx_client.tools import ModeString, ToolName, get_toolset_from_mode if TYPE_CHECKING: try: @@ -42,10 +46,8 @@ try: from bm25s import BM25, tokenize from Stemmer import Stemmer - - HAS_SEARCH = True except ImportError: - HAS_SEARCH = False + pass class Pctx: @@ -301,7 +303,11 @@ async def get_function_details( return GetFunctionDetailsOutput.model_validate(list_res.json()) - async def execute(self, code: str) -> ExecuteOutput: + async def execute_typescript( + self, + code: str, + disclosure: ToolDisclosure | ToolDisclosureName = ToolDisclosure.CATALOG, + ) -> ExecuteOutput: """ Execute TypeScript code that calls namespaced functions. @@ -340,17 +346,34 @@ async def execute(self, code: str) -> ExecuteOutput: ... return { temperature: result.temp }; ... } ... ''' - ... output = await pctx.execute(code) + ... output = await pctx.execute_typescript(code) ... print(output.markdown()) # Formatted results with logs """ + if self._session_id is None: raise SessionError( "No code mode session exists, run Pctx(...).connect() before calling" ) - return await self._ws_client.execute_code( - self._session_id, code, timeout=self._execute_timeout + return await self._ws_client.execute_typescript( + self._session_id, + code, + disclosure=ToolDisclosure(disclosure), + timeout=self._execute_timeout, ) + async def execute( + self, + code: str, + disclosure: ToolDisclosure | ToolDisclosureName = ToolDisclosure.CATALOG, + ) -> ExecuteOutput: + """Deprecated alias for execute_typescript.""" + warnings.warn( + "execute() is deprecated, use execute_typescript() instead", + DeprecationWarning, + stacklevel=2, + ) + return await self.execute_typescript(code, disclosure=disclosure) + async def execute_bash(self, command: str) -> ExecuteOutput: """ Execute a bash command in the virtual filesystem. @@ -397,6 +420,8 @@ async def _register_servers(self, configs: list[ServerConfig]): res = await self._client.post("/register/servers", json={"servers": configs}) res.raise_for_status() + # ========== Utils ========== + def _search_functions_result_to_string( self, functions: list[ListedFunction] ) -> str: @@ -407,22 +432,24 @@ def _search_functions_result_to_string( ] ) + # ========== Frameworks ========== + def langchain_tools( self, - mode: ModeString | ToolConfig = "list_get_execute", + disclosure: ToolDisclosure | ToolDisclosureName = ToolDisclosure.CATALOG, descriptions: dict[ToolName, str] | None = None, ) -> "list[LangchainBaseTool]": """ Expose PCTX tools as LangChain tools Args: - mode: Tool mode configuration. Either: - - "list_get_execute" (default): list_functions, search_functions, - get_function_details, execute - - "fs": execute_bash, execute_typescript - - ToolConfig: Custom tool selection + disclosure: Controls which tools are exposed and how function context is + provided to the model. CATALOG (default) exposes list_functions, + get_function_details, and execute_typescript — the agent discovers + and retrieves function signatures before executing. FS exposes + execute_bash and execute_typescript — the agent browses the virtual + filesystem directly. descriptions: Optional custom descriptions to override defaults. - Only used when mode is a string. Requires the 'langchain' extra to be installed: pip install pctx[langchain] @@ -431,17 +458,11 @@ def langchain_tools( ImportError: If langchain is not installed. Examples: - Pre-bundled modes: - >>> tools = pctx.langchain_tools() # default: list_get_execute - >>> tools = pctx.langchain_tools("fs") - - Override descriptions: - >>> tools = pctx.langchain_tools("list_get_execute", descriptions={"execute": "Custom"}) - - Full control: - >>> from pctx_client.tools import ToolConfig - >>> tools = pctx.langchain_tools(ToolConfig(tools=["execute_bash", "list_functions"])) + >>> tools = pctx.langchain_tools() # default: catalog + >>> tools = pctx.langchain_tools(disclosure="filesystem") + >>> tools = pctx.langchain_tools(descriptions={"execute_typescript": "Custom"}) """ + disclosure = ToolDisclosure(disclosure) try: from langchain_core.tools import tool as langchain_tool except ImportError as e: @@ -449,118 +470,76 @@ def langchain_tools( "LangChain is not installed. Install it with: pip install pctx[langchain]" ) from e - # Convert mode string to ToolConfig if needed - if isinstance(mode, str): - toolset = get_toolset_from_mode(mode, descriptions) - else: - toolset = mode - - # Helper to get description with fallback - def get_desc(key: str) -> str: - if toolset.descriptions: - return toolset.descriptions.get(key, CODE_MODE_TOOL_DESCRIPTIONS[key]) - return CODE_MODE_TOOL_DESCRIPTIONS[key] - - tools = [] + # build all tools - # Build tools based on toolset configuration using registry - from pctx_client._tool_registry import TOOL_REGISTRY - - for tool_name in toolset.tools: - # Validate tool exists in registry - if tool_name not in TOOL_REGISTRY: - raise ValueError( - f"Unknown tool: {tool_name}. Valid tools: {sorted(TOOL_REGISTRY)}" - ) - - # Skip search_functions if BM25 not installed - if tool_name == "search_functions" and not HAS_SEARCH: - continue + @langchain_tool( + description=get_tool_description("execute_bash", overrides=descriptions) + ) + async def execute_bash(command: str) -> str: + return (await self.execute_bash(command)).markdown() - # Create framework-specific tool - tool = self._create_langchain_tool( - tool_name, get_desc(tool_name), langchain_tool + @langchain_tool( + description=get_tool_description( + "execute_typescript", disclosure=disclosure, overrides=descriptions ) - tools.append(tool) - - return tools - - def _create_langchain_tool( - self, tool_name: ToolName, description: str, langchain_tool - ): - """Factory method to create a LangChain tool for the given tool name""" - if tool_name == "execute_bash": - - @langchain_tool(description=description) - async def execute_bash(command: str) -> str: - return (await self.execute_bash(command)).markdown() - - return execute_bash - - elif tool_name == "execute_typescript": - - @langchain_tool(description=description) - async def execute_typescript(code: str) -> str: - return (await self.execute(code)).markdown() - - return execute_typescript - - elif tool_name == "list_functions": - - @langchain_tool(description=description) - async def list_functions() -> str: - return (await self.list_functions()).code - - return list_functions - - elif tool_name == "search_functions": - - @langchain_tool(description=description) - async def search_functions(query: str, k: int = 10) -> str: - functions = await self.search_functions(query, k) - return self._search_functions_result_to_string(functions) - - return search_functions - - elif tool_name == "get_function_details": - - @langchain_tool(description=description) - async def get_function_details(functions: list[str]) -> str: - return ( - await self.get_function_details( - functions, - ) - ).code - - return get_function_details + ) + async def execute_typescript(code: str) -> str: + return ( + await self.execute_typescript(code, disclosure=disclosure) + ).markdown() - elif tool_name == "execute": + @langchain_tool( + description=get_tool_description("list_functions", overrides=descriptions) + ) + async def list_functions() -> str: + return (await self.list_functions()).code - @langchain_tool(description=description) - async def execute(code: str) -> str: - return (await self.execute(code)).markdown() + @langchain_tool( + description=get_tool_description("search_functions", overrides=descriptions) + ) + async def search_functions(query: str, k: int = 10) -> str: + functions = await self.search_functions(query, k) + return self._search_functions_result_to_string(functions) - return execute + @langchain_tool( + description=get_tool_description( + "get_function_details", overrides=descriptions + ) + ) + async def get_function_details(functions: list[str]) -> str: + return ( + await self.get_function_details( + functions, + ) + ).code + + all_tools = [ + execute_bash, + execute_typescript, + list_functions, + search_functions, + get_function_details, + ] - else: - raise ValueError(f"Unsupported LangChain tool: {tool_name}") + # filter according to disclosure + return [t for t in all_tools if disclosure.contains_tool(t.name)] def crewai_tools( self, - mode: ModeString | ToolConfig = "list_get_execute", + disclosure: ToolDisclosure | ToolDisclosureName = ToolDisclosure.CATALOG, descriptions: dict[ToolName, str] | None = None, ) -> "list[CrewAiBaseTool]": """ Expose PCTX tools as CrewAI tools Args: - mode: Tool mode configuration. Either: - - "list_get_execute" (default): list_functions, search_functions, - get_function_details, execute - - "fs": execute_bash, execute_typescript - - ToolConfig: Custom tool selection + disclosure: Controls which tools are exposed and how function context is + provided to the model. CATALOG (default) exposes list_functions, + get_function_details, and execute_typescript — the agent discovers + and retrieves function signatures before executing. FS exposes + execute_bash and execute_typescript — the agent browses the virtual + filesystem directly. descriptions: Optional custom descriptions to override defaults. - Only used when mode is a string. Requires the 'crewai' extra to be installed: pip install pctx[crewai] @@ -569,17 +548,11 @@ def crewai_tools( ImportError: If crewai is not installed. Examples: - Pre-bundled modes: - >>> tools = pctx.crewai_tools() # default: list_get_execute - >>> tools = pctx.crewai_tools("fs") - - Override descriptions: - >>> tools = pctx.crewai_tools("list_get_execute", descriptions={"execute": "Custom"}) - - Full control: - >>> from pctx_client.tools import ToolConfig - >>> tools = pctx.crewai_tools(ToolConfig(tools=["execute_bash", "list_functions"])) + >>> tools = pctx.crewai_tools() # default: catalog + >>> tools = pctx.crewai_tools(disclosure="filesystem") + >>> tools = pctx.crewai_tools(descriptions={"execute_typescript": "Custom"}) """ + disclosure = ToolDisclosure(disclosure) try: from crewai.tools import BaseTool as CrewAiBaseTool except ImportError as e: @@ -587,211 +560,115 @@ def crewai_tools( "CrewAI is not installed. Install it with: pip install pctx[crewai]" ) from e - # Convert mode string to ToolConfig if needed - if isinstance(mode, str): - toolset = get_toolset_from_mode(mode, descriptions) - else: - toolset = mode - - # Helper to get description with fallback - def get_desc(key: str) -> str: - if toolset.descriptions: - return toolset.descriptions.get(key, CODE_MODE_TOOL_DESCRIPTIONS[key]) - return CODE_MODE_TOOL_DESCRIPTIONS[key] - - tools = [] - import asyncio - # Capture the current event loop for later use from threads try: main_loop = asyncio.get_running_loop() except RuntimeError: main_loop = None - # Build tools based on toolset configuration using registry - from pctx_client._tool_registry import TOOL_REGISTRY - - for tool_name in toolset.tools: - # Validate tool exists in registry - if tool_name not in TOOL_REGISTRY: - raise ValueError( - f"Unknown tool: {tool_name}. Valid tools: {sorted(TOOL_REGISTRY)}" + def run_async(coro, timeout: float = 30.0): + if main_loop is not None: + return asyncio.run_coroutine_threadsafe(coro, main_loop).result( + timeout=timeout ) + else: + return asyncio.run(coro) - # Skip search_functions if BM25 not installed - if tool_name == "search_functions" and not HAS_SEARCH: - continue + # build all tools - # Create framework-specific tool - tool = self._create_crewai_tool( - tool_name, get_desc(tool_name), CrewAiBaseTool, main_loop + class ExecuteBashTool(CrewAiBaseTool): + name: str = "execute_bash" + description: str = get_tool_description( + "execute_bash", overrides=descriptions ) - tools.append(tool) + args_schema: type[BaseModel] = ExecuteBashInput - return tools + def _run(_self, command: str) -> str: + return run_async(self.execute_bash(command)).markdown() - def _create_crewai_tool( - self, tool_name: ToolName, description: str, CrewAiBaseTool, main_loop - ): - """Factory method to create a CrewAI tool for the given tool name""" - import asyncio - - # Capture description in local scope for class attribute access - desc = description - - if tool_name == "execute_bash": - - class ExecuteBashTool(CrewAiBaseTool): - name: str = "execute_bash" - description: str = desc - args_schema: type[BaseModel] = ExecuteBashInput - - def _run(_self, command: str) -> str: - if main_loop is not None: - future = asyncio.run_coroutine_threadsafe( - self.execute_bash(command), main_loop - ) - return future.result(timeout=30).markdown() - else: - return asyncio.run(self.execute_bash(command)).markdown() - - return ExecuteBashTool() - - elif tool_name == "execute_typescript": - - class ExecuteTypeScriptTool(CrewAiBaseTool): - name: str = "execute_typescript" - description: str = desc - args_schema: type[BaseModel] = ExecuteInput - - def _run(_self, code: str) -> str: - if main_loop is not None: - future = asyncio.run_coroutine_threadsafe( - self.execute(code), main_loop - ) - return future.result(timeout=self._execute_timeout).markdown() - else: - return asyncio.run(self.execute(code)).markdown() - - return ExecuteTypeScriptTool() - - elif tool_name == "list_functions": - - class ListFunctionsTool(CrewAiBaseTool): - name: str = "list_functions" - description: str = desc - - def _run(_self) -> str: - if main_loop is not None: - future = asyncio.run_coroutine_threadsafe( - self.list_functions(), main_loop - ) - return future.result(timeout=30).code - else: - return asyncio.run(self.list_functions()).code - - return ListFunctionsTool() - - elif tool_name == "search_functions": - - class SearchFunctionsTool(CrewAiBaseTool): - name: str = "search_functions" - description: str = desc - - def _run(_self, query: str, k: int = 10) -> str: - if main_loop is not None: - future = asyncio.run_coroutine_threadsafe( - self.search_functions(query, k), main_loop - ) - return self._search_functions_result_to_string( - future.result(timeout=30) - ) - else: - return self._search_functions_result_to_string( - asyncio.run(self.search_functions(query, k)) - ) - - return SearchFunctionsTool() - - elif tool_name == "get_function_details": - - class GetFunctionDetailsTool(CrewAiBaseTool): - name: str = "get_function_details" - description: str = desc - args_schema: type[BaseModel] = GetFunctionDetailsInput - - def _run(_self, functions: list[str]) -> str: - if main_loop is not None: - future = asyncio.run_coroutine_threadsafe( - self.get_function_details(functions=functions), main_loop - ) - return future.result(timeout=30).code - else: - return asyncio.run( - self.get_function_details(functions=functions) - ).code - - return GetFunctionDetailsTool() - - elif tool_name == "execute": - - class ExecuteTool(CrewAiBaseTool): - name: str = "execute" - description: str = desc - args_schema: type[BaseModel] = ExecuteInput - - def _run(_self, code: str) -> str: - if main_loop is not None: - future = asyncio.run_coroutine_threadsafe( - self.execute(code=code), main_loop - ) - return future.result(timeout=self._execute_timeout).markdown() - else: - return asyncio.run(self.execute(code=code)).markdown() - - return ExecuteTool() + class ExecuteTypeScriptTool(CrewAiBaseTool): + name: str = "execute_typescript" + description: str = get_tool_description( + "execute_typescript", disclosure=disclosure, overrides=descriptions + ) + args_schema: type[BaseModel] = ExecuteInput + + def _run(_self, code: str) -> str: + return run_async( + self.execute_typescript(code, disclosure=disclosure), + timeout=self._execute_timeout, + ).markdown() + + class ListFunctionsTool(CrewAiBaseTool): + name: str = "list_functions" + description: str = get_tool_description( + "list_functions", overrides=descriptions + ) - else: - raise ValueError(f"Unsupported CrewAI tool: {tool_name}") + def _run(_self) -> str: + return run_async(self.list_functions()).code + + class SearchFunctionsTool(CrewAiBaseTool): + name: str = "search_functions" + description: str = get_tool_description( + "search_functions", overrides=descriptions + ) + + def _run(_self, query: str, k: int = 10) -> str: + return self._search_functions_result_to_string( + run_async(self.search_functions(query, k)) + ) + + class GetFunctionDetailsTool(CrewAiBaseTool): + name: str = "get_function_details" + description: str = get_tool_description( + "get_function_details", overrides=descriptions + ) + args_schema: type[BaseModel] = GetFunctionDetailsInput + + def _run(_self, functions: list[str]) -> str: + return run_async(self.get_function_details(functions=functions)).code + + all_tools = [ + ExecuteBashTool(), + ExecuteTypeScriptTool(), + ListFunctionsTool(), + SearchFunctionsTool(), + GetFunctionDetailsTool(), + ] + + # filter according to disclosure + return [t for t in all_tools if disclosure.contains_tool(t.name)] def openai_agents_tools( self, - mode: ModeString | ToolConfig = "list_get_execute", + disclosure: ToolDisclosure | ToolDisclosureName = ToolDisclosure.CATALOG, descriptions: dict[ToolName, str] | None = None, ) -> "list[FunctionTool]": """ Expose PCTX tools as OpenAI Agents SDK function tools Args: - mode: Tool mode configuration. Either: - - "list_get_execute" (default): list_functions, search_functions, - get_function_details, execute - - "fs": execute_bash, execute_typescript - - ToolConfig: Custom tool selection + disclosure: Controls which tools are exposed and how function context is + provided to the model. CATALOG (default) exposes list_functions, + get_function_details, and execute_typescript — the agent discovers + and retrieves function signatures before executing. FS exposes + execute_bash and execute_typescript — the agent browses the virtual + filesystem directly. descriptions: Optional custom descriptions to override defaults. - Only used when mode is a string. Requires the 'openai' extra to be installed: pip install pctx[openai] - Returns: - List of function tools compatible with OpenAI Agents SDK - Raises: ImportError: If openai is not installed. Examples: - Pre-bundled modes: - >>> tools = pctx.openai_agents_tools() # default: list_get_execute - >>> tools = pctx.openai_agents_tools("fs") - - Override descriptions: - >>> tools = pctx.openai_agents_tools("list_get_execute", descriptions={"execute": "Custom"}) - - Full control: - >>> from pctx_client.tools import ToolConfig - >>> tools = pctx.openai_agents_tools(ToolConfig(tools=["execute_bash", "list_functions"])) + >>> tools = pctx.openai_agents_tools() # default: catalog + >>> tools = pctx.openai_agents_tools(disclosure="filesystem") + >>> tools = pctx.openai_agents_tools(descriptions={"execute_typescript": "Custom"}) """ + disclosure = ToolDisclosure(disclosure) try: from agents import function_tool except ImportError as e: @@ -799,136 +676,74 @@ def openai_agents_tools( "OpenAI Agents SDK is not installed. Install it with: pip install pctx[openai]" ) from e - # Convert mode string to ToolConfig if needed - if isinstance(mode, str): - toolset = get_toolset_from_mode(mode, descriptions) - else: - toolset = mode - - # Helper to get description with fallback - def get_desc(key: str) -> str: - if toolset.descriptions: - return toolset.descriptions.get(key, CODE_MODE_TOOL_DESCRIPTIONS[key]) - return CODE_MODE_TOOL_DESCRIPTIONS[key] - - tools = [] - - # Build tools based on toolset configuration using registry - from pctx_client._tool_registry import TOOL_REGISTRY - - for tool_name in toolset.tools: - # Validate tool exists in registry - if tool_name not in TOOL_REGISTRY: - raise ValueError( - f"Unknown tool: {tool_name}. Valid tools: {sorted(TOOL_REGISTRY)}" - ) - - # Skip search_functions if BM25 not installed - if tool_name == "search_functions" and not HAS_SEARCH: - continue - - # Create framework-specific tool - tool = self._create_openai_agents_tool( - tool_name, get_desc(tool_name), function_tool - ) - tools.append(tool) - - return tools - - def _create_openai_agents_tool( - self, tool_name: ToolName, description: str, function_tool - ): - """Factory method to create an OpenAI Agents SDK tool for the given tool name""" - if tool_name == "execute_bash": - - async def execute_bash_wrapper(command: str) -> str: - return (await self.execute_bash(command)).markdown() - - execute_bash_wrapper.__doc__ = f"""{description} - -Args: - command: Bash command to execute""" - - return function_tool(name_override="execute_bash")(execute_bash_wrapper) - - elif tool_name == "execute_typescript": + # build all tools - async def execute_typescript_wrapper(code: str) -> str: - return (await self.execute(code)).markdown() + async def execute_bash(command: str) -> str: + return (await self.execute_bash(command)).markdown() - execute_typescript_wrapper.__doc__ = f"""{description} - -Args: - code: TypeScript code to execute""" - - return function_tool(name_override="execute_typescript")( - execute_typescript_wrapper - ) - - elif tool_name == "list_functions": - - async def list_functions_wrapper() -> str: - return (await self.list_functions()).code - - list_functions_wrapper.__doc__ = description - return function_tool(name_override="list_functions")(list_functions_wrapper) - - elif tool_name == "search_functions": - - async def search_functions_wrapper(query: str, k: int = 10) -> str: - functions = await self.search_functions(query, k) - return self._search_functions_result_to_string(functions) - - search_functions_wrapper.__doc__ = description - return function_tool(name_override="search_functions")( - search_functions_wrapper - ) + execute_bash.__doc__ = get_tool_description( + "execute_bash", overrides=descriptions + ) - elif tool_name == "get_function_details": + async def execute_typescript(code: str) -> str: + return ( + await self.execute_typescript(code, disclosure=disclosure) + ).markdown() - async def get_function_details_wrapper(functions: list[str]) -> str: - return (await self.get_function_details(functions)).code + execute_typescript.__doc__ = get_tool_description( + "execute_typescript", disclosure=disclosure, overrides=descriptions + ) - get_function_details_wrapper.__doc__ = f"""{description} + async def list_functions() -> str: + return (await self.list_functions()).code -Args: - functions: List of function names in 'namespace.functionName' format""" + list_functions.__doc__ = get_tool_description( + "list_functions", overrides=descriptions + ) - return function_tool(name_override="get_function_details")( - get_function_details_wrapper + async def search_functions(query: str, k: int = 10) -> str: + return self._search_functions_result_to_string( + await self.search_functions(query, k) ) - elif tool_name == "execute": - - async def execute_wrapper(code: str) -> str: - return (await self.execute(code)).markdown() + search_functions.__doc__ = get_tool_description( + "search_functions", overrides=descriptions + ) - execute_wrapper.__doc__ = f"""{description} + async def get_function_details(functions: list[str]) -> str: + return (await self.get_function_details(functions)).code -Args: - code: TypeScript code to execute""" + get_function_details.__doc__ = get_tool_description( + "get_function_details", overrides=descriptions + ) - return function_tool(name_override="execute")(execute_wrapper) + all_tools = [ + function_tool(name_override="execute_bash")(execute_bash), + function_tool(name_override="execute_typescript")(execute_typescript), + function_tool(name_override="list_functions")(list_functions), + function_tool(name_override="search_functions")(search_functions), + function_tool(name_override="get_function_details")(get_function_details), + ] - else: - raise ValueError(f"Unsupported OpenAI Agents tool: {tool_name}") + # filter according to disclosure + return [t for t in all_tools if disclosure.contains_tool(t.name)] def pydantic_ai_tools( self, - mode: ModeString | ToolConfig = "list_get_execute", + disclosure: ToolDisclosure | ToolDisclosureName = ToolDisclosure.CATALOG, descriptions: dict[ToolName, str] | None = None, ) -> "list[PydanticAITool]": """ Expose PCTX tools as Pydantic AI tools Args: - mode: Tool mode configuration. Either: - - "list_get_execute" (default): list_functions, search_functions, - get_function_details, execute - - "fs": execute_bash, execute_typescript - - ToolConfig: Custom tool selection + disclosure: Controls which tools are exposed and how function context is + provided to the model. CATALOG (default) exposes list_functions, + get_function_details, and execute_typescript — the agent discovers + and retrieves function signatures before executing. FS exposes + execute_bash and execute_typescript — the agent browses the virtual + filesystem directly. descriptions: Optional custom descriptions to override defaults. - Only used when mode is a string. Requires the 'pydantic-ai' extra to be installed: pip install pctx[pydantic-ai] @@ -937,17 +752,11 @@ def pydantic_ai_tools( ImportError: If pydantic-ai is not installed. Examples: - Pre-bundled modes: - >>> tools = pctx.pydantic_ai_tools() # default: list_get_execute - >>> tools = pctx.pydantic_ai_tools("fs") - - Override descriptions: - >>> tools = pctx.pydantic_ai_tools("list_get_execute", descriptions={"execute": "Custom"}) - - Full control: - >>> from pctx_client.tools import ToolConfig - >>> tools = pctx.pydantic_ai_tools(ToolConfig(tools=["execute_bash", "list_functions"])) + >>> tools = pctx.pydantic_ai_tools() # default: catalog + >>> tools = pctx.pydantic_ai_tools(disclosure="filesystem") + >>> tools = pctx.pydantic_ai_tools(descriptions={"execute_typescript": "Custom"}) """ + disclosure = ToolDisclosure(disclosure) try: from pydantic_ai.tools import Tool as PydanticAITool except ImportError as e: @@ -955,118 +764,64 @@ def pydantic_ai_tools( "Pydantic AI is not installed. Install it with: pip install pctx[pydantic-ai]" ) from e - # Convert mode string to ToolConfig if needed - if isinstance(mode, str): - toolset = get_toolset_from_mode(mode, descriptions) - else: - toolset = mode + # build all tools - # Helper to get description with fallback - def get_desc(key: str) -> str: - if toolset.descriptions: - return toolset.descriptions.get(key, CODE_MODE_TOOL_DESCRIPTIONS[key]) - return CODE_MODE_TOOL_DESCRIPTIONS[key] + async def execute_bash(command: str) -> str: + return (await self.execute_bash(command)).markdown() - tools = [] + async def execute_typescript(code: str) -> str: + return ( + await self.execute_typescript(code, disclosure=disclosure) + ).markdown() - # Build tools based on toolset configuration using registry - from pctx_client._tool_registry import TOOL_REGISTRY + async def list_functions() -> str: + return (await self.list_functions()).code - for tool_name in toolset.tools: - # Validate tool exists in registry - if tool_name not in TOOL_REGISTRY: - raise ValueError( - f"Unknown tool: {tool_name}. Valid tools: {sorted(TOOL_REGISTRY)}" - ) - - # Skip search_functions if BM25 not installed - if tool_name == "search_functions" and not HAS_SEARCH: - continue - - # Create framework-specific tool - tool = self._create_pydantic_ai_tool( - tool_name, get_desc(tool_name), PydanticAITool + async def search_functions(query: str, k: int = 10) -> str: + return self._search_functions_result_to_string( + await self.search_functions(query, k) ) - tools.append(tool) - - return tools - def _create_pydantic_ai_tool( - self, tool_name: ToolName, description: str, PydanticAITool - ): - """Factory method to create a Pydantic AI tool for the given tool name""" - if tool_name == "execute_bash": - - async def execute_bash_wrapper(command: str) -> str: - return (await self.execute_bash(command)).markdown() + async def get_function_details(functions: list[str]) -> str: + return (await self.get_function_details(functions)).code - return PydanticAITool( - execute_bash_wrapper, + all_tools = [ + PydanticAITool( + execute_bash, name="execute_bash", - description=description, - ) - - elif tool_name == "execute_typescript": - - async def execute_typescript_wrapper(code: str) -> str: - return (await self.execute(code)).markdown() - - return PydanticAITool( - execute_typescript_wrapper, + description=get_tool_description( + "execute_bash", overrides=descriptions + ), + ), + PydanticAITool( + execute_typescript, name="execute_typescript", - description=description, - ) - - elif tool_name == "list_functions": - - async def list_functions_wrapper() -> str: - return (await self.list_functions()).code - - return PydanticAITool( - list_functions_wrapper, + description=get_tool_description( + "execute_typescript", disclosure=disclosure, overrides=descriptions + ), + ), + PydanticAITool( + list_functions, name="list_functions", - description=description, - ) - - elif tool_name == "search_functions": - - async def search_functions_wrapper(query: str, k: int = 10) -> str: - functions = await self.search_functions(query, k) - return self._search_functions_result_to_string(functions) - - return PydanticAITool( - search_functions_wrapper, + description=get_tool_description( + "list_functions", overrides=descriptions + ), + ), + PydanticAITool( + search_functions, name="search_functions", - description=description, - ) - - elif tool_name == "get_function_details": - - async def get_function_details_wrapper(functions: list[str]) -> str: - return (await self.get_function_details(functions)).code - - return PydanticAITool( - get_function_details_wrapper, + description=get_tool_description( + "search_functions", overrides=descriptions + ), + ), + PydanticAITool( + get_function_details, name="get_function_details", - description=description, - ) - - elif tool_name == "execute": - - async def execute_wrapper(code: str) -> str: - return (await self.execute(code)).markdown() - - return PydanticAITool( - execute_wrapper, - name="execute", - description=description, - ) - - else: - raise ValueError(f"Unsupported Pydantic AI tool: {tool_name}") - + description=get_tool_description( + "get_function_details", overrides=descriptions + ), + ), + ] -# Import tool descriptions - change this to experiment with different styles -# Options: PRESCRIPTIVE_DESCRIPTIONS, TERMINAL_STYLE_DESCRIPTIONS -# See pctx_client/tool_descriptions/README.md for details -CODE_MODE_TOOL_DESCRIPTIONS = PRESCRIPTIVE_DESCRIPTIONS + # filter according to disclosure + return [t for t in all_tools if disclosure.contains_tool(t.name)] diff --git a/pctx-py/src/pctx_client/_tool_registry.py b/pctx-py/src/pctx_client/_tool_registry.py deleted file mode 100644 index 1aafcedb..00000000 --- a/pctx-py/src/pctx_client/_tool_registry.py +++ /dev/null @@ -1,58 +0,0 @@ -""" -Central registry for PCTX tools. - -This module provides a single source of truth for all available tools and validates -that the registry is complete (i.e., every tool in the ToolName literal is registered). - -The registry pattern ensures that: -1. All tools are defined in one place -2. Adding a new tool to ToolName requires adding it here (validated at import time) -3. Framework adapters can loop over tools instead of using if-statement chains -""" - -from typing import get_args - -from pctx_client.tools import ToolName - -# Central registry - set of all valid tool names -TOOL_REGISTRY: set[ToolName] = { - "list_functions", - "search_functions", - "get_function_details", - "execute", - "execute_bash", - "execute_typescript", -} - - -def validate_registry_completeness() -> None: - """ - Validate that TOOL_REGISTRY contains exactly the tools defined in ToolName. - - This ensures that: - - Every tool in the ToolName literal is in the registry (no missing tools) - - Every tool in the registry is in the ToolName literal (no typos/extras) - - Raises: - ValueError: If there are missing or extra tools in the registry - """ - all_tool_names = set(get_args(ToolName)) - registered_names = TOOL_REGISTRY - - missing = all_tool_names - registered_names - if missing: - raise ValueError( - f"Missing registry entries for tools: {missing}. " - f"Add these tools to TOOL_REGISTRY in _tool_registry.py" - ) - - extra = registered_names - all_tool_names - if extra: - raise ValueError( - f"Extra registry entries not in ToolName: {extra}. " - f"Remove these from TOOL_REGISTRY or add them to ToolName in tools.py" - ) - - -# Validate at module import time - fail fast if registry is incomplete -validate_registry_completeness() diff --git a/pctx-py/src/pctx_client/_utils.py b/pctx-py/src/pctx_client/_utils.py index 0c0e9fa3..a2be55f7 100644 --- a/pctx-py/src/pctx_client/_utils.py +++ b/pctx-py/src/pctx_client/_utils.py @@ -1,4 +1,7 @@ import re +from importlib.util import find_spec + +HAS_SEARCH = find_spec("bm25s") is not None and find_spec("Stemmer") is not None def to_snake_case(name: str) -> str: diff --git a/pctx-py/src/pctx_client/_websocket_client.py b/pctx-py/src/pctx_client/_websocket_client.py index 12ffa286..88ebdba5 100644 --- a/pctx-py/src/pctx_client/_websocket_client.py +++ b/pctx-py/src/pctx_client/_websocket_client.py @@ -26,6 +26,7 @@ ExecuteToolResponse, ExecuteToolResult, JsonRpcError, + ToolDisclosure, ) from .exceptions import ConnectionError @@ -105,8 +106,12 @@ async def _send(self, message: WebSocketMessage): await self.ws.send(message.model_dump_json()) - async def execute_code( - self, code_mode_session: str, code: str, timeout: float = 30.0 + async def execute_typescript( + self, + code_mode_session: str, + code: str, + disclosure: ToolDisclosure = ToolDisclosure.CATALOG, + timeout: float = 30.0, ) -> ExecuteOutput: """ Execute code via WebSocket instead of REST. @@ -135,7 +140,9 @@ async def execute_code( # Send request request = ExecuteCodeRequest( - id=request_id, method="execute_code", params=ExecuteCodeParams(code=code) + id=request_id, + method="execute_typescript", + params=ExecuteCodeParams(code=code, style=disclosure), ) try: diff --git a/pctx-py/src/pctx_client/tool_descriptions/README.md b/pctx-py/src/pctx_client/descriptions/README.md similarity index 96% rename from pctx-py/src/pctx_client/tool_descriptions/README.md rename to pctx-py/src/pctx_client/descriptions/README.md index 79f6df94..ecaca4e6 100644 --- a/pctx-py/src/pctx_client/tool_descriptions/README.md +++ b/pctx-py/src/pctx_client/descriptions/README.md @@ -46,6 +46,7 @@ To add a new description style: 5. Add to `__all__` exports Example: + ``` your_style/ ├── __init__.py @@ -63,7 +64,7 @@ from pctx_client.tool_descriptions import TERMINAL_STYLE_DESCRIPTIONS tools = pctx.langchain_tools(descriptions=TERMINAL_STYLE_DESCRIPTIONS) # Or write your own -custom = {"execute": "Your description", "list_functions": "Another"} +custom = {"execute_typescript": "Your description", "list_functions": "Another"} tools = pctx.langchain_tools(descriptions=custom) # Combine with different modes diff --git a/pctx-py/src/pctx_client/descriptions/__init__.py b/pctx-py/src/pctx_client/descriptions/__init__.py new file mode 100644 index 00000000..9c77cc0a --- /dev/null +++ b/pctx-py/src/pctx_client/descriptions/__init__.py @@ -0,0 +1,58 @@ +"""Tool descriptions loaded from versioned data files. + +Provides `get_description` to retrieve tool description strings by name, disclosure style, and version. +""" + +from importlib.resources import files + +from pctx_client.models import ToolDisclosure, ToolDisclosureName, ToolName + + +def get_workflow_description( + disclosure: ToolDisclosure | ToolDisclosureName = ToolDisclosure.CATALOG, + version: int = 1, +) -> str: + disclosure = ToolDisclosure(disclosure) + + common = ( + files("pctx_client.descriptions.data.workflows") / "common.txt" + ).read_text(encoding="utf-8") + specific = ( + files("pctx_client.descriptions.data.workflows") + / disclosure.value + / f"v{version}.txt" + ).read_text(encoding="utf-8") + + return f"{common}\n{specific}" + + +def get_tool_description( + tool_name: ToolName, + disclosure: ToolDisclosure | ToolDisclosureName = ToolDisclosure.CATALOG, + version: int = 1, + overrides: dict[ToolName, str] = None, +) -> str: + """Load a tool description string from the data directory. + + Args: + tool: The tool name. + version: The version. + + Returns: + The description text. + """ + + disclosure = ToolDisclosure(disclosure) + + if overrides is not None and tool_name in overrides: + return overrides[tool_name] + + if tool_name == "execute_typescript": + tool_name = f"{tool_name}_{disclosure.value}" + + return ( + files("pctx_client.descriptions.data.tools") / tool_name / f"v{version}.txt" + ).read_text(encoding="utf-8") + + +__all__ = ["ToolName", "get_tool_description", "get_workflow_description"] diff --git a/pctx-py/src/pctx_client/descriptions/data b/pctx-py/src/pctx_client/descriptions/data new file mode 120000 index 00000000..9cc4386d --- /dev/null +++ b/pctx-py/src/pctx_client/descriptions/data @@ -0,0 +1 @@ +../../../../descriptions \ No newline at end of file diff --git a/pctx-py/src/pctx_client/models.py b/pctx-py/src/pctx_client/models.py index 3bfe04c4..7e977012 100644 --- a/pctx-py/src/pctx_client/models.py +++ b/pctx-py/src/pctx_client/models.py @@ -1,10 +1,50 @@ import json -from enum import IntEnum +from enum import Enum, IntEnum from typing import Any, Literal, TypedDict from pydantic import BaseModel from typing_extensions import NotRequired +from pctx_client._utils import HAS_SEARCH + +ToolName = Literal[ + "execute_bash", + "execute_typescript", + "get_function_details", + "list_functions", + "search_functions", +] + + +ToolDisclosureName = Literal["catalog", "filesystem"] + + +class ToolDisclosure(str, Enum): + CATALOG = "catalog" + FS = "filesystem" + # SIDECAR = "sidecar" # <--- not fully supported by session server yet + + def contains_tool(self, tool_name: ToolName) -> bool: + if self == ToolDisclosure.CATALOG: + allowed = { + "get_function_details", + "list_functions", + "execute_typescript", + } + if HAS_SEARCH: + allowed.add("search_functions") + return tool_name in allowed + elif self == ToolDisclosure.FS: + return tool_name in { + "execute_bash", + "execute_typescript", + } + # elif self == DisclosureStyle.SIDECAR: # <--- not fully supported by session server yet + # return tool_name == "execute_typescript" + else: + raise ValueError(f"Unhandled ToolDisclosure variant: {self}") + + # ------------- Tool Callback Config ------------ @@ -161,10 +201,11 @@ class JsonRpcError(JsonRpcBase): class ExecuteCodeParams(BaseModel): code: str + disclosure: ToolDisclosure = ToolDisclosure.CATALOG class ExecuteCodeRequest(JsonRpcBase): - method: Literal["execute_code"] + method: Literal["execute_typescript"] params: ExecuteCodeParams diff --git a/pctx-py/src/pctx_client/tool_descriptions/__init__.py b/pctx-py/src/pctx_client/tool_descriptions/__init__.py deleted file mode 100644 index 20c70086..00000000 --- a/pctx-py/src/pctx_client/tool_descriptions/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Tool description variants for experimentation with different prompting styles. - -This module builds description dictionaries from individual tool files organized by style. -""" - -from . import prescriptive, terminal, workflows - -# Build description dictionaries from individual tool modules -PRESCRIPTIVE_DESCRIPTIONS = { - "list_functions": prescriptive.list_functions.DESCRIPTION, - "search_functions": prescriptive.search_functions.DESCRIPTION, - "get_function_details": prescriptive.get_function_details.DESCRIPTION, - "execute": prescriptive.execute.DESCRIPTION, - "execute_bash": prescriptive.execute_bash.DESCRIPTION, - "execute_typescript": prescriptive.execute_typescript.DESCRIPTION, -} - -TERMINAL_STYLE_DESCRIPTIONS = { - "list_functions": terminal.list_functions.DESCRIPTION, - "search_functions": terminal.search_functions.DESCRIPTION, - "get_function_details": terminal.get_function_details.DESCRIPTION, - "execute": terminal.execute.DESCRIPTION, - "execute_bash": terminal.execute_bash.DESCRIPTION, - "execute_typescript": terminal.execute_typescript.DESCRIPTION, -} - -__all__ = [ - "PRESCRIPTIVE_DESCRIPTIONS", - "TERMINAL_STYLE_DESCRIPTIONS", - "workflows", -] diff --git a/pctx-py/src/pctx_client/tool_descriptions/_base.py b/pctx-py/src/pctx_client/tool_descriptions/_base.py deleted file mode 100644 index b2c0e8c1..00000000 --- a/pctx-py/src/pctx_client/tool_descriptions/_base.py +++ /dev/null @@ -1,10 +0,0 @@ -"""Shared utilities for tool descriptions.""" - -# Detect if search is available -try: - from bm25s import BM25 # noqa: F401 - from Stemmer import Stemmer # noqa: F401 - - HAS_SEARCH = True -except ImportError: - HAS_SEARCH = False diff --git a/pctx-py/src/pctx_client/tool_descriptions/prescriptive/__init__.py b/pctx-py/src/pctx_client/tool_descriptions/prescriptive/__init__.py deleted file mode 100644 index e1cd6dfa..00000000 --- a/pctx-py/src/pctx_client/tool_descriptions/prescriptive/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -"""Prescriptive tool descriptions - organized by individual tool.""" - -from . import ( - execute, - execute_bash, - execute_typescript, - get_function_details, - list_functions, - search_functions, -) - -__all__ = [ - "execute", - "execute_bash", - "execute_typescript", - "get_function_details", - "list_functions", - "search_functions", -] diff --git a/pctx-py/src/pctx_client/tool_descriptions/prescriptive/execute.py b/pctx-py/src/pctx_client/tool_descriptions/prescriptive/execute.py deleted file mode 100644 index a4d70803..00000000 --- a/pctx-py/src/pctx_client/tool_descriptions/prescriptive/execute.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Prescriptive description for execute tool.""" - -DESCRIPTION = """Execute TypeScript code that calls namespaced functions. - -TOKEN USAGE WARNING: This tool could return LARGE responses if your code returns big objects. -To minimize tokens: -- Filter/map/reduce data IN YOUR CODE before returning -- Only return specific fields you need (e.g., return {id: result.id, count: items.length}) -- Use console.log() for intermediate results instead of returning everything -- Avoid returning full API responses - extract just what you need - -REQUIRED CODE STRUCTURE: -async function run() { - // Your code here - // Call namespace.functionName() - MUST include namespace prefix - // Process data here to minimize return size - return onlyWhatYouNeed; // Keep this small! -} - -IMPORTANT RULES: -- Functions MUST be called as 'Namespace.functionName' (e.g., 'Notion.apiPostSearch') -- Only registered SDK functions are available - no fetch(), fs, or other Node/Deno APIs -- Variables don't persist between execute() calls - return or log anything you need later -- Add console.log() statements between API calls to track progress if errors occur -- Code runs in an isolated Deno sandbox with restricted network access - -RETURN TYPE NOTE: -- Functions without output schemas show Promise as return type -- The actual runtime value is already a parsed JavaScript object, NOT a JSON string -- Do NOT call JSON.parse() on results - they're already objects -- Access properties directly (e.g., result.data) or inspect with console.log() first -- If you see 'Promise', the structure is unknown - log it to see what's returned""" diff --git a/pctx-py/src/pctx_client/tool_descriptions/prescriptive/get_function_details.py b/pctx-py/src/pctx_client/tool_descriptions/prescriptive/get_function_details.py deleted file mode 100644 index 231f6f92..00000000 --- a/pctx-py/src/pctx_client/tool_descriptions/prescriptive/get_function_details.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Prescriptive description for get_function_details tool.""" - -DESCRIPTION = """Get detailed TypeScript type information for specific SDK functions you want to use. - -REQUIRED FORMAT: Functions must be specified as 'namespace.functionName' (e.g., 'Namespace.apiPostSearch') - -This tool is lightweight and only returns details for the functions you request, avoiding unnecessary token usage. -Only request details for functions you actually plan to use in your code. - -NOTE ON RETURN TYPES: -- If a function returns Promise, the tool didn't provide an output schema -- The actual value is a parsed object (not a string) - access properties directly -- Don't use JSON.parse() on the results - they're already JavaScript objects""" diff --git a/pctx-py/src/pctx_client/tool_descriptions/prescriptive/list_functions.py b/pctx-py/src/pctx_client/tool_descriptions/prescriptive/list_functions.py deleted file mode 100644 index b1844248..00000000 --- a/pctx-py/src/pctx_client/tool_descriptions/prescriptive/list_functions.py +++ /dev/null @@ -1,5 +0,0 @@ -"""Prescriptive description for list_functions tool.""" - -DESCRIPTION = """List all available SDK functions organized by namespace. - -Returns function signatures showing structure without full type details.""" diff --git a/pctx-py/src/pctx_client/tool_descriptions/prescriptive/search_functions.py b/pctx-py/src/pctx_client/tool_descriptions/prescriptive/search_functions.py deleted file mode 100644 index f87ab620..00000000 --- a/pctx-py/src/pctx_client/tool_descriptions/prescriptive/search_functions.py +++ /dev/null @@ -1,9 +0,0 @@ -"""Prescriptive description for search_functions tool.""" - -DESCRIPTION = """Search for SDK functions by keyword using BM25 full-text search. - -Arguments: - query: The search query string to find relevant functions. - k: The maximum number of top results to return (default: 10). - -Returns a ranked list of matching functions with relevance scores.""" diff --git a/pctx-py/src/pctx_client/tool_descriptions/terminal/__init__.py b/pctx-py/src/pctx_client/tool_descriptions/terminal/__init__.py deleted file mode 100644 index 5d38df53..00000000 --- a/pctx-py/src/pctx_client/tool_descriptions/terminal/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -"""Terminal-style tool descriptions - organized by individual tool.""" - -from . import ( - execute, - execute_bash, - execute_typescript, - get_function_details, - list_functions, - search_functions, -) - -__all__ = [ - "execute", - "execute_bash", - "execute_typescript", - "get_function_details", - "list_functions", - "search_functions", -] diff --git a/pctx-py/src/pctx_client/tool_descriptions/terminal/execute.py b/pctx-py/src/pctx_client/tool_descriptions/terminal/execute.py deleted file mode 100644 index ca746d9a..00000000 --- a/pctx-py/src/pctx_client/tool_descriptions/terminal/execute.py +++ /dev/null @@ -1,23 +0,0 @@ -"""Terminal-style description for execute tool.""" - -DESCRIPTION = """Execute TypeScript code in an isolated Deno sandbox. - -Your code runs in a TypeScript runtime with access to registered SDK functions. -Call functions as 'Namespace.functionName()' - they're already imported and available. - -Runtime environment: -- Isolated Deno sandbox with restricted network access -- No Node.js/Deno APIs (fs, fetch, etc.) - only registered SDK functions -- Variables don't persist between executions -- Return values are automatically serialized - -Code structure: -async function run() { - // Your code here - return yourResult; -} - -Performance: -- Large return values consume tokens -- Filter/reduce data in your code before returning. Use console.log() for debugging output -- Don't write any comments in your code""" diff --git a/pctx-py/src/pctx_client/tool_descriptions/terminal/execute_bash.py b/pctx-py/src/pctx_client/tool_descriptions/terminal/execute_bash.py deleted file mode 100644 index 944c350b..00000000 --- a/pctx-py/src/pctx_client/tool_descriptions/terminal/execute_bash.py +++ /dev/null @@ -1,12 +0,0 @@ -"""Terminal-style description for execute_bash tool.""" - -DESCRIPTION = """Execute bash commands in the SDK filesystem. - -You're working in an in-memory virtual filesystem mounted at /sdk/ containing: -- README.md - Overview of available functions by namespace -- {Namespace}/ directories - One per namespace, containing .d.ts type definition files - -Current directory: /sdk/ - -Standard bash utilities available: ls, cat, grep, find, etc. -This filesystem is read-only - used for exploring the SDK before writing code.""" diff --git a/pctx-py/src/pctx_client/tool_descriptions/terminal/list_functions.py b/pctx-py/src/pctx_client/tool_descriptions/terminal/list_functions.py deleted file mode 100644 index c80de85d..00000000 --- a/pctx-py/src/pctx_client/tool_descriptions/terminal/list_functions.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Terminal-style description for list_functions tool.""" - -DESCRIPTION = """List available SDK functions. - -Returns an organized view of all functions available in the current session, grouped by namespace. -Function signatures show basic structure without full type details.""" diff --git a/pctx-py/src/pctx_client/tool_descriptions/terminal/search_functions.py b/pctx-py/src/pctx_client/tool_descriptions/terminal/search_functions.py deleted file mode 100644 index 99b66d75..00000000 --- a/pctx-py/src/pctx_client/tool_descriptions/terminal/search_functions.py +++ /dev/null @@ -1,9 +0,0 @@ -"""Terminal-style description for search_functions tool.""" - -DESCRIPTION = """Search for SDK functions by keyword. - -Arguments: - query: Search terms to find relevant functions - k: Maximum results to return (default: 10) - -Returns matching functions ranked by relevance.""" diff --git a/pctx-py/src/pctx_client/tool_descriptions/workflows.py b/pctx-py/src/pctx_client/tool_descriptions/workflows.py deleted file mode 100644 index 6313bfa0..00000000 --- a/pctx-py/src/pctx_client/tool_descriptions/workflows.py +++ /dev/null @@ -1,65 +0,0 @@ -"""Workflow system prompts for different tool combinations.""" - -from ..tools import ToolName - - -def get_workflow_prompt(tools: list[ToolName]) -> str | None: - """ - Generate workflow guidance based on available tools. - - Returns system prompt text explaining how to use the tools together, - or None if no workflow guidance is needed (e.g., single tool). - """ - tool_set = set(tools) - - # Discovery + Details + Execute workflow - if {"list_functions", "get_function_details", "execute"}.issubset(tool_set): - return """To use these tools effectively: -1. Start with list_functions to see all available SDK functions organized by namespace -2. Call get_function_details for specific functions you want to use to see their parameters and types -3. Finally, use execute to run your TypeScript code that calls those functions - -This discovery -> details -> execute workflow helps you write correct code on the first try.""" - - # Search + Details + Execute workflow - if {"search_functions", "get_function_details", "execute"}.issubset(tool_set): - return """To use these tools effectively: -1. Use search_functions to find relevant functions by keyword (searches names and descriptions) -2. Call get_function_details for the functions you want to use to see their parameters and types -3. Finally, use execute to run your TypeScript code that calls those functions - -This search -> details -> execute workflow helps you quickly find and use the right functions.""" - - # Exploration workflow (bash + typescript) - if {"execute_bash", "execute_typescript"}.issubset(tool_set): - return """To use these tools effectively: -1. Use execute_bash to explore the SDK filesystem: - - `cat README.md` shows all available functions - - `cat {Namespace}/{functionName}.d.ts` shows detailed type information -2. Then use execute_typescript to run your code with those functions - -This exploration -> execution workflow is useful when you need to understand the SDK structure.""" - - # Filesystem mode - if {"read_file", "list_directory"} == tool_set: - return """To use these tools effectively: -1. Use list_directory to explore the directory structure -2. Use read_file to examine specific files - -These tools provide read-only filesystem access to SDK type definitions.""" - - # No specific workflow needed - return None - - -# Pre-defined workflows for common modes -WORKFLOW_PROMPTS = { - "list_get_execute": get_workflow_prompt( - ["list_functions", "get_function_details", "execute"] - ), - "search_get_execute": get_workflow_prompt( - ["search_functions", "get_function_details", "execute"] - ), - "bash_typescript": get_workflow_prompt(["execute_bash", "execute_typescript"]), - "fs": get_workflow_prompt(["read_file", "list_directory"]), -} diff --git a/pctx-py/src/pctx_client/tools.py b/pctx-py/src/pctx_client/tools.py deleted file mode 100644 index 9dbf01e6..00000000 --- a/pctx-py/src/pctx_client/tools.py +++ /dev/null @@ -1,106 +0,0 @@ -"""Tool configuration for pctx client. - -Provides flexible ways to configure which tools are exposed and how they're described. -""" - -from dataclasses import dataclass -from typing import Literal - -# All available tool names -ToolName = Literal[ - "list_functions", - "search_functions", - "get_function_details", - "execute", - "execute_bash", - "execute_typescript", -] - - -@dataclass -class ToolConfig: - """Configuration for which tools to expose and their descriptions. - - Examples: - Pre-bundled modes: - >>> tools = pctx.langchain_tools("list_get_execute") - >>> tools = pctx.langchain_tools("fs") - - Override descriptions in a mode: - >>> tools = pctx.langchain_tools( - ... "list_get_execute", - ... descriptions={"execute": "Custom description"} - ... ) - - Full control - mix and match: - >>> from pctx_client.tools import ToolConfig - >>> tools = pctx.langchain_tools( - ... ToolConfig( - ... tools=["execute_bash", "list_functions", "execute"], - ... descriptions={"execute_bash": "Custom bash description"} - ... ) - ... ) - """ - - tools: list[ToolName] - """List of tool names to include""" - - descriptions: dict[ToolName, str] | None = None - """Optional custom descriptions for tools. Only overrides specified tools.""" - - -# Pre-defined mode configurations -def list_get_execute_mode( - descriptions: dict[ToolName, str] | None = None, -) -> ToolConfig: - """Standard mode: list, search (if available), get_details, execute. - - This is the typical workflow: - 1. list_functions - See all available functions - 2. search_functions - Find relevant functions (if search is available) - 3. get_function_details - Get detailed info about specific functions - 4. execute - Run TypeScript code calling those functions - """ - # Note: search_functions is included in the list even if BM25 is not installed - # The converter methods will check HAS_SEARCH before creating the actual tool - tools = ["list_functions", "search_functions", "get_function_details", "execute"] - return ToolConfig(tools=tools, descriptions=descriptions) - - -def fs_mode(descriptions: dict[ToolName, str] | None = None) -> ToolConfig: - """Filesystem mode: execute_bash, execute_typescript. - - This mode presents SDK functions as an in-memory filesystem: - 1. execute_bash - Explore the filesystem (ls, cat, grep .d.ts files) - 2. execute_typescript - Run TypeScript code after discovering types - """ - return ToolConfig( - tools=["execute_bash", "execute_typescript"], descriptions=descriptions - ) - - -# Type for mode strings -ModeString = Literal["list_get_execute", "fs"] - - -def get_toolset_from_mode( - mode: ModeString, descriptions: dict[ToolName, str] | None = None -) -> ToolConfig: - """Convert a mode string to a ToolConfig configuration. - - Args: - mode: Mode name ("list_get_execute" or "fs") - descriptions: Optional custom descriptions to override defaults - - Returns: - ToolConfig configuration for the specified mode - - Raises: - ValueError: If mode is not recognized - """ - if mode == "list_get_execute": - return list_get_execute_mode(descriptions) - elif mode == "fs": - return fs_mode(descriptions) - else: - raise ValueError(f"Unknown mode: {mode}. Valid modes: 'list_get_execute', 'fs'") diff --git a/pctx-py/tests/scripts/crewai_code_mode.py b/pctx-py/tests/scripts/crewai_code_mode.py index 679c156e..ba8549c9 100644 --- a/pctx-py/tests/scripts/crewai_code_mode.py +++ b/pctx-py/tests/scripts/crewai_code_mode.py @@ -23,14 +23,14 @@ async def run_agent(): await code_mode.connect() llm = LLM( - model="openrouter/deepseek/deepseek-chat", + model="openai/gpt-oss-120b", api_key=os.environ.get("OPENROUTER_API_KEY"), base_url="https://openrouter.ai/api/v1", ) agent = Agent( llm=llm, - tools=code_mode.crewai_tools(), + tools=code_mode.crewai_tools("filesystem"), verbose=True, role="Helpful assistant", goal="answer queries about time and weather with your available tools", diff --git a/pctx-py/tests/scripts/langchain_code_mode.py b/pctx-py/tests/scripts/langchain_code_mode.py index c2984875..8d62f9c1 100644 --- a/pctx-py/tests/scripts/langchain_code_mode.py +++ b/pctx-py/tests/scripts/langchain_code_mode.py @@ -1,9 +1,9 @@ import asyncio import os -import pprint from langchain.agents import create_agent from langchain_openai import ChatOpenAI +from rich import print from pctx_client import Pctx, tool @@ -25,7 +25,7 @@ async def run_agent(): await code_mode.connect() llm = ChatOpenAI( - model="deepseek/deepseek-chat", + model="anthropic/claude-sonnet-4.6", temperature=0, api_key=os.environ.get("OPENROUTER_API_KEY"), base_url="https://openrouter.ai/api/v1", @@ -34,7 +34,7 @@ async def run_agent(): agent = create_agent( llm, tools=code_mode.langchain_tools(), - system_prompt="You are a helpful assistant", + system_prompt="You are a helpful assistant, use tools when you need to access real-time information.", ) result = await agent.ainvoke( @@ -45,7 +45,7 @@ async def run_agent(): } ) - pprint.pprint(result) + print(result) await code_mode.disconnect() diff --git a/pctx-py/tests/scripts/manual_code_mode.py b/pctx-py/tests/scripts/manual_code_mode.py index ed671427..42e676fa 100755 --- a/pctx-py/tests/scripts/manual_code_mode.py +++ b/pctx-py/tests/scripts/manual_code_mode.py @@ -1,5 +1,4 @@ import asyncio -import pprint from datetime import datetime from os import getenv @@ -83,8 +82,8 @@ async def main(): return { multval, now }; } """ - output = await p.execute(code) - pprint.pprint(output) + output = await p.execute_typescript(code) + print(output.markdown()) invalid_code = """ async function run() { @@ -93,8 +92,8 @@ async def main(): return addval; } """ - invalid_output = await p.execute(invalid_code) - pprint.pprint(invalid_output) + invalid_output = await p.execute_typescript(invalid_code) + print(invalid_output.markdown()) print(p._session_id) diff --git a/pctx-py/tests/scripts/pydantic_ai_code_mode.py b/pctx-py/tests/scripts/pydantic_ai_code_mode.py index 28b4969e..fe086c4c 100644 --- a/pctx-py/tests/scripts/pydantic_ai_code_mode.py +++ b/pctx-py/tests/scripts/pydantic_ai_code_mode.py @@ -40,11 +40,11 @@ async def run_agent(): await code_mode.connect() # Get PCTX tools in Pydantic AI format - pctx_tools = code_mode.pydantic_ai_tools() + pctx_tools = code_mode.pydantic_ai_tools("filesystem") # Create a Pydantic AI agent with PCTX tools agent = Agent( - "openrouter:deepseek/deepseek-chat", + "openrouter:anthropic/claude-sonnet-4-5", system_prompt="You are a helpful assistant with access to code execution tools.", tools=pctx_tools, ) diff --git a/pctx-py/tests/test_integration.py b/pctx-py/tests/test_integration.py index 2402c54d..914d9b1d 100644 --- a/pctx-py/tests/test_integration.py +++ b/pctx-py/tests/test_integration.py @@ -136,7 +136,7 @@ async def test_execute_simple_code(): } """ - output = await pctx.execute(code) + output = await pctx.execute_typescript(code) # Verify execution succeeded assert output.success, "Execution should succeed" @@ -170,7 +170,7 @@ async def test_execute_with_error(): } """ - output = await pctx.execute(code) + output = await pctx.execute_typescript(code) # When code throws an error, success should be False assert not output.success, "Execution should report failure" @@ -198,7 +198,7 @@ async def test_multiple_sequential_executions(): return { execution: 1, value: 100 }; } """ - output1 = await pctx.execute(code1) + output1 = await pctx.execute_typescript(code1) assert output1.success, "First execution should succeed" assert output1.output is not None, "output1 should have output" assert output1.output.get("execution") == 1 @@ -209,7 +209,7 @@ async def test_multiple_sequential_executions(): return { execution: 2, value: 200 }; } """ - output2 = await pctx.execute(code2) + output2 = await pctx.execute_typescript(code2) assert output2.success, "Second execution should succeed" assert output2.output is not None, "output2 should have output" assert output2.output.get("execution") == 2 @@ -258,7 +258,7 @@ async def test_markdown_output_formatting(): } """ - output = await pctx.execute(code) + output = await pctx.execute_typescript(code) markdown = output.markdown() # Verify markdown output contains expected elements @@ -342,7 +342,7 @@ def search_logs( return { sum: result }; } """ - output = await pctx.execute(code) + output = await pctx.execute_typescript(code) assert output.success, "Execution should succeed" assert output.output is not None, "Should have output" @@ -355,7 +355,7 @@ def search_logs( return { greeting: result }; } """ - output2 = await pctx.execute(code2) + output2 = await pctx.execute_typescript(code2) assert output2.success, "Second execution should succeed" assert output2.output is not None, "output2 should have output" @@ -370,7 +370,7 @@ def search_logs( return { greeting: result }; } """ - output3 = await pctx.execute(code3) + output3 = await pctx.execute_typescript(code3) assert output3.success, "Third execution should succeed" assert output3.output is not None, "output3 should have output" @@ -385,7 +385,7 @@ def search_logs( return { timestamp: result }; } """ - output4 = await pctx.execute(code4) + output4 = await pctx.execute_typescript(code4) assert output4.success, "Fourth execution should succeed" assert output4.output is not None, "output4 should have output" assert isinstance(output4.output.get("timestamp"), float), ( @@ -401,7 +401,7 @@ def search_logs( return { noInput, empty, filtered }; } """ - output5 = await pctx.execute(code5) + output5 = await pctx.execute_typescript(code5) assert output5.success, ( f"search_logs should succeed. stderr: {output5.stderr}" @@ -461,7 +461,7 @@ async def fetch_data(item_id: int) -> dict: return result; } """ - output = await pctx.execute(code) + output = await pctx.execute_typescript(code) assert output.success, "Execution should succeed" assert output.output is not None, "Should have output" @@ -524,7 +524,7 @@ async def test_http_mcp_server_registration(http_mcp_server): return { difference: result }; } """ - output = await pctx.execute(code) + output = await pctx.execute_typescript(code) assert output.success, f"Execution should succeed. stderr: {output.stderr}" assert output.output is not None, "Should have output" @@ -598,7 +598,7 @@ async def test_stdio_mcp_server_registration(): return { sum: result }; } """ - output = await pctx.execute(code) + output = await pctx.execute_typescript(code) assert output.success, f"Execution should succeed. stderr: {output.stderr}" assert output.output is not None, "Should have output" @@ -644,9 +644,10 @@ def greet(name: str) -> str: # Test 2: Read the README output = await pctx.execute_bash("cat README.md") + print(output.stdout) assert output.success, "cat command should succeed" assert "TypeScript SDK" in output.stdout, "README should have header" - assert "**Tools**" in output.stdout, "README should list Tools namespace" + assert "## Tools" in output.stdout, "README should list Tools namespace" assert "addNumbers" in output.stdout, ( "README should list addNumbers function" ) @@ -727,7 +728,7 @@ def multiply(x: int, y: int) -> int: return { product: result }; } """ - output = await pctx.execute(code) + output = await pctx.execute_typescript(code) assert output.success, "TypeScript execution should succeed" assert output.output is not None, "Should have output" assert output.output.get("product") == 42, "Expected product to be 42" @@ -780,7 +781,7 @@ def format_result(value: int, label: str) -> str: return { product, formatted }; } """ - output = await pctx.execute(code) + output = await pctx.execute_typescript(code) assert output.success, "Execution should succeed" assert output.output is not None, "output should have output" diff --git a/pctx-py/tests/test_tool_converters.py b/pctx-py/tests/test_tool_converters.py index 9e556c53..84af5d5d 100644 --- a/pctx-py/tests/test_tool_converters.py +++ b/pctx-py/tests/test_tool_converters.py @@ -14,6 +14,7 @@ from pydantic_ai.tools import Tool as PydanticAITool from pctx_client import Pctx +from pctx_client.models import ToolDisclosure @pytest.fixture @@ -46,12 +47,13 @@ def test_langchain_tools_are_langchain_tools(self, pctx_client): def test_langchain_tool_names(self, pctx_client): """Test that LangChain tools have the correct names""" - tools = pctx_client.langchain_tools() - names = [tool.name for tool in tools] - assert "list_functions" in names - assert "search_functions" in names - assert "get_function_details" in names - assert "execute" in names + names = {tool.name for tool in pctx_client.langchain_tools()} + assert names == { + "list_functions", + "search_functions", + "get_function_details", + "execute_typescript", + } def test_langchain_tool_descriptions(self, pctx_client): """Test that LangChain tools have descriptions""" @@ -91,12 +93,13 @@ def test_crewai_tools_are_crewai_basetools(self, pctx_client): def test_crewai_tool_names(self, pctx_client): """Test that CrewAI tools have correct names""" - tools = pctx_client.crewai_tools() - names = [tool.name for tool in tools] - assert "list_functions" in names - assert "search_functions" in names - assert "get_function_details" in names - assert "execute" in names + names = {tool.name for tool in pctx_client.crewai_tools()} + assert names == { + "list_functions", + "search_functions", + "get_function_details", + "execute_typescript", + } def test_crewai_tool_descriptions(self, pctx_client): """Test that CrewAI tools have descriptions""" @@ -129,7 +132,7 @@ def test_crewai_get_function_details_has_schema(self, pctx_client): def test_crewai_execute_has_schema(self, pctx_client): """Test that execute tool has args_schema""" tools = pctx_client.crewai_tools() - execute_tool = next(t for t in tools if t.name == "execute") + execute_tool = next(t for t in tools if t.name == "execute_typescript") assert hasattr(execute_tool, "args_schema") assert execute_tool.args_schema is not None @@ -159,12 +162,13 @@ def test_openai_agents_tools_structure(self, pctx_client): def test_openai_agents_function_names(self, pctx_client): """Test that OpenAI Agents functions have correct names""" - tools = pctx_client.openai_agents_tools() - names = [tool.name for tool in tools] - assert "list_functions" in names - assert "search_functions" in names - assert "get_function_details" in names - assert "execute" in names + names = {tool.name for tool in pctx_client.openai_agents_tools()} + assert names == { + "list_functions", + "search_functions", + "get_function_details", + "execute_typescript", + } def test_openai_agents_function_descriptions(self, pctx_client): """Test that OpenAI Agents functions have descriptions""" @@ -207,7 +211,7 @@ def test_openai_agents_get_function_details_schema(self, pctx_client): def test_openai_agents_execute_schema(self, pctx_client): """Test execute has correct schema""" tools = pctx_client.openai_agents_tools() - execute_tool = next(t for t in tools if t.name == "execute") + execute_tool = next(t for t in tools if t.name == "execute_typescript") params = execute_tool.params_json_schema assert "code" in params["properties"] assert params["properties"]["code"]["type"] == "string" @@ -234,12 +238,13 @@ def test_pydantic_ai_tools_are_pydantic_ai_tools(self, pctx_client): def test_pydantic_ai_tool_names(self, pctx_client): """Test that Pydantic AI tools have correct names""" - tools = pctx_client.pydantic_ai_tools() - names = [tool.name for tool in tools] - assert "list_functions" in names - assert "search_functions" in names - assert "get_function_details" in names - assert "execute" in names + names = {tool.name for tool in pctx_client.pydantic_ai_tools()} + assert names == { + "list_functions", + "search_functions", + "get_function_details", + "execute_typescript", + } def test_pydantic_ai_tool_descriptions(self, pctx_client): """Test that Pydantic AI tools have descriptions""" @@ -296,7 +301,7 @@ def test_all_converters_have_same_function_names(self, pctx_client): "list_functions", "search_functions", "get_function_details", - "execute", + "execute_typescript", } # LangChain @@ -324,19 +329,19 @@ class TestFilesystemMode: def test_langchain_fs_mode_returns_two_tools(self, pctx_client): """Test that langchain_tools("fs") returns exactly 2 tools""" - tools = pctx_client.langchain_tools("fs") + tools = pctx_client.langchain_tools(ToolDisclosure.FS) assert isinstance(tools, list) assert len(tools) == 2 def test_langchain_fs_mode_tool_names(self, pctx_client): - """Test that langchain_tools("fs") returns execute_bash and execute_typescript""" - tools = pctx_client.langchain_tools("fs") + """Test that langchain_tools(ToolDisclosure.FS) returns execute_bash and execute_typescript""" + tools = pctx_client.langchain_tools(ToolDisclosure.FS) names = {tool.name for tool in tools} assert names == {"execute_bash", "execute_typescript"} def test_langchain_fs_mode_tool_descriptions(self, pctx_client): """Test that fs_mode tools have proper descriptions""" - tools = pctx_client.langchain_tools("fs") + tools = pctx_client.langchain_tools(ToolDisclosure.FS) for tool in tools: assert tool.description assert len(tool.description) > 0 @@ -353,41 +358,41 @@ def test_langchain_fs_mode_tool_descriptions(self, pctx_client): ) def test_crewai_fs_mode_returns_two_tools(self, pctx_client): - """Test that crewai_tools("fs") returns exactly 2 tools""" - tools = pctx_client.crewai_tools("fs") + """Test that crewai_tools(ToolDisclosure.FS) returns exactly 2 tools""" + tools = pctx_client.crewai_tools(ToolDisclosure.FS) assert isinstance(tools, list) assert len(tools) == 2 def test_crewai_fs_mode_tool_names(self, pctx_client): - """Test that crewai_tools("fs") returns execute_bash and execute_typescript""" - tools = pctx_client.crewai_tools("fs") + """Test that crewai_tools(ToolDisclosure.FS) returns execute_bash and execute_typescript""" + tools = pctx_client.crewai_tools(ToolDisclosure.FS) names = {tool.name for tool in tools} assert names == {"execute_bash", "execute_typescript"} def test_crewai_fs_mode_tools_are_basetools(self, pctx_client): """Test that fs_mode CrewAI tools are still BaseTool instances""" - tools = pctx_client.crewai_tools("fs") + tools = pctx_client.crewai_tools(ToolDisclosure.FS) for tool in tools: assert isinstance(tool, CrewAIBaseTool) def test_crewai_fs_mode_tools_have_schemas(self, pctx_client): """Test that fs_mode CrewAI tools have args_schema""" - tools = pctx_client.crewai_tools("fs") + tools = pctx_client.crewai_tools(ToolDisclosure.FS) for tool in tools: assert hasattr(tool, "args_schema") assert tool.args_schema is not None def test_openai_agents_fs_mode_returns_two_tools(self, pctx_client): - """Test that openai_agents_tools("fs") returns exactly 2 tools""" - tools = pctx_client.openai_agents_tools("fs") + """Test that openai_agents_tools(ToolDisclosure.FS) returns exactly 2 tools""" + tools = pctx_client.openai_agents_tools(ToolDisclosure.FS) assert isinstance(tools, list) assert len(tools) == 2 def test_openai_agents_fs_mode_tool_names(self, pctx_client): - """Test that openai_agents_tools("fs") returns execute_bash and execute_typescript""" + """Test that openai_agents_tools(ToolDisclosure.FS) returns execute_bash and execute_typescript""" from agents import FunctionTool - tools = pctx_client.openai_agents_tools("fs") + tools = pctx_client.openai_agents_tools(ToolDisclosure.FS) names = {tool.name for tool in tools} assert names == {"execute_bash", "execute_typescript"} # Verify they're still FunctionTool instances @@ -396,7 +401,7 @@ def test_openai_agents_fs_mode_tool_names(self, pctx_client): def test_openai_agents_fs_mode_tool_schemas(self, pctx_client): """Test that fs_mode OpenAI Agents tools have proper schemas""" - tools = pctx_client.openai_agents_tools("fs") + tools = pctx_client.openai_agents_tools(ToolDisclosure.FS) for tool in tools: params = tool.params_json_schema assert params["type"] == "object" @@ -412,20 +417,20 @@ def test_openai_agents_fs_mode_tool_schemas(self, pctx_client): assert "code" in params["required"] def test_pydantic_ai_fs_mode_returns_two_tools(self, pctx_client): - """Test that pydantic_ai_tools("fs") returns exactly 2 tools""" - tools = pctx_client.pydantic_ai_tools("fs") + """Test that pydantic_ai_tools(ToolDisclosure.FS) returns exactly 2 tools""" + tools = pctx_client.pydantic_ai_tools(ToolDisclosure.FS) assert isinstance(tools, list) assert len(tools) == 2 def test_pydantic_ai_fs_mode_tool_names(self, pctx_client): - """Test that pydantic_ai_tools("fs") returns execute_bash and execute_typescript""" - tools = pctx_client.pydantic_ai_tools("fs") + """Test that pydantic_ai_tools(ToolDisclosure.FS) returns execute_bash and execute_typescript""" + tools = pctx_client.pydantic_ai_tools(ToolDisclosure.FS) names = {tool.name for tool in tools} assert names == {"execute_bash", "execute_typescript"} def test_pydantic_ai_fs_mode_tools_are_pydantic_ai_tools(self, pctx_client): """Test that fs_mode Pydantic AI tools are still Tool instances""" - tools = pctx_client.pydantic_ai_tools("fs") + tools = pctx_client.pydantic_ai_tools(ToolDisclosure.FS) for tool in tools: assert isinstance(tool, PydanticAITool) @@ -434,19 +439,27 @@ def test_all_converters_fs_mode_consistency(self, pctx_client): expected_names = {"execute_bash", "execute_typescript"} # LangChain - langchain_names = {tool.name for tool in pctx_client.langchain_tools("fs")} + langchain_names = { + tool.name for tool in pctx_client.langchain_tools(ToolDisclosure.FS) + } assert langchain_names == expected_names # CrewAI - crewai_names = {tool.name for tool in pctx_client.crewai_tools("fs")} + crewai_names = { + tool.name for tool in pctx_client.crewai_tools(ToolDisclosure.FS) + } assert crewai_names == expected_names # OpenAI Agents - openai_names = {tool.name for tool in pctx_client.openai_agents_tools("fs")} + openai_names = { + tool.name for tool in pctx_client.openai_agents_tools(ToolDisclosure.FS) + } assert openai_names == expected_names # Pydantic AI - pydantic_names = {tool.name for tool in pctx_client.pydantic_ai_tools("fs")} + pydantic_names = { + tool.name for tool in pctx_client.pydantic_ai_tools(ToolDisclosure.FS) + } assert pydantic_names == expected_names def test_fs_mode_false_returns_standard_tools(self, pctx_client): @@ -457,11 +470,10 @@ def test_fs_mode_false_returns_standard_tools(self, pctx_client): # Should have standard tools assert "list_functions" in names - assert "execute" in names + assert "execute_typescript" in names - # Should NOT have fs tools + # Should NOT have fs-only tools assert "execute_bash" not in names - assert "execute_typescript" not in names # ============== Custom Tool Descriptions Tests ============== @@ -475,13 +487,13 @@ def test_langchain_custom_descriptions(self, pctx_client): custom_descriptions = { "list_functions": "Custom list description", "get_function_details": "Custom details description", - "execute": "Custom execute description", + "execute_typescript": "Custom execute description", } tools = pctx_client.langchain_tools(descriptions=custom_descriptions) # Find the execute tool and check its description - execute_tool = next(t for t in tools if t.name == "execute") + execute_tool = next(t for t in tools if t.name == "execute_typescript") assert execute_tool.description == "Custom execute description" def test_crewai_custom_descriptions(self, pctx_client): @@ -489,13 +501,13 @@ def test_crewai_custom_descriptions(self, pctx_client): custom_descriptions = { "list_functions": "Custom list description", "get_function_details": "Custom details description", - "execute": "Custom execute description", + "execute_typescript": "Custom execute description", } tools = pctx_client.crewai_tools(descriptions=custom_descriptions) # Find the execute tool and check its description (CrewAI wraps it with metadata) - execute_tool = next(t for t in tools if t.name == "execute") + execute_tool = next(t for t in tools if t.name == "execute_typescript") assert "Custom execute description" in execute_tool.description def test_openai_agents_custom_descriptions(self, pctx_client): @@ -503,13 +515,13 @@ def test_openai_agents_custom_descriptions(self, pctx_client): custom_descriptions = { "list_functions": "Custom list description", "get_function_details": "Custom details description", - "execute": "Custom execute description", + "execute_typescript": "Custom execute description", } tools = pctx_client.openai_agents_tools(descriptions=custom_descriptions) # Find the execute tool and check its description contains custom text - execute_tool = next(t for t in tools if t.name == "execute") + execute_tool = next(t for t in tools if t.name == "execute_typescript") assert "Custom execute description" in execute_tool.description def test_pydantic_ai_custom_descriptions(self, pctx_client): @@ -517,13 +529,13 @@ def test_pydantic_ai_custom_descriptions(self, pctx_client): custom_descriptions = { "list_functions": "Custom list description", "get_function_details": "Custom details description", - "execute": "Custom execute description", + "execute_typescript": "Custom execute description", } tools = pctx_client.pydantic_ai_tools(descriptions=custom_descriptions) # Find the execute tool and check its description - execute_tool = next(t for t in tools if t.name == "execute") + execute_tool = next(t for t in tools if t.name == "execute_typescript") assert execute_tool.description == "Custom execute description" def test_langchain_fs_mode_custom_descriptions(self, pctx_client): @@ -533,7 +545,9 @@ def test_langchain_fs_mode_custom_descriptions(self, pctx_client): "execute_typescript": "Custom typescript description", } - tools = pctx_client.langchain_tools("fs", descriptions=custom_descriptions) + tools = pctx_client.langchain_tools( + ToolDisclosure.FS, descriptions=custom_descriptions + ) # Check both tools have custom descriptions bash_tool = next(t for t in tools if t.name == "execute_bash") @@ -541,276 +555,3 @@ def test_langchain_fs_mode_custom_descriptions(self, pctx_client): assert bash_tool.description == "Custom bash description" assert ts_tool.description == "Custom typescript description" - - def test_terminal_style_descriptions_work(self, pctx_client): - """Test that TERMINAL_STYLE_DESCRIPTIONS can be used""" - from pctx_client.tool_descriptions import TERMINAL_STYLE_DESCRIPTIONS - - tools = pctx_client.langchain_tools(descriptions=TERMINAL_STYLE_DESCRIPTIONS) - - # Verify tools were created and have the terminal-style descriptions - execute_tool = next(t for t in tools if t.name == "execute") - # Terminal style is shorter and mentions "Deno sandbox" - assert "Deno sandbox" in execute_tool.description - # Should NOT have prescriptive workflow language - assert "WORKFLOW" not in execute_tool.description - - -# ============== Tool Registry Tests ============== - - -class TestToolRegistry: - """Tests for the central tool registry""" - - def test_registry_contains_all_tools(self): - """Test that TOOL_REGISTRY contains all ToolName values""" - from typing import get_args - - from pctx_client._tool_registry import TOOL_REGISTRY - from pctx_client.tools import ToolName - - # Get all possible tool names from the ToolName literal - all_tool_names = set(get_args(ToolName)) - - # Verify registry contains exactly these tools - assert TOOL_REGISTRY == all_tool_names - - def test_registry_validation_runs_on_import(self): - """Test that registry validation runs automatically on import""" - # This test verifies that the module can be imported without errors - # If the registry were incomplete, an error would be raised on import - from pctx_client import _tool_registry - - # Verify the validation function exists - assert hasattr(_tool_registry, "validate_registry_completeness") - assert callable(_tool_registry.validate_registry_completeness) - - def test_unknown_tool_raises_error(self, pctx_client): - """Test that using an unknown tool name raises a clear error""" - from pctx_client.tools import ToolConfig - - # Create a ToolConfig with a made-up tool name - # We need to bypass type checking by using a dict - invalid_config = ToolConfig(tools=["fake_tool_that_does_not_exist"]) # type: ignore - - # All framework adapters should raise ValueError with helpful message - with pytest.raises(ValueError, match="Unknown tool"): - pctx_client.langchain_tools(invalid_config) - - with pytest.raises(ValueError, match="Unknown tool"): - pctx_client.crewai_tools(invalid_config) - - with pytest.raises(ValueError, match="Unknown tool"): - pctx_client.openai_agents_tools(invalid_config) - - with pytest.raises(ValueError, match="Unknown tool"): - pctx_client.pydantic_ai_tools(invalid_config) - - -# ============== Mix and Match ToolConfig Tests ============== - - -class TestToolConfigMixAndMatch: - """Tests for arbitrary ToolConfig combinations with custom tools and descriptions""" - - def test_single_tool_config_langchain(self, pctx_client): - """Test ToolConfig with just one tool""" - from pctx_client.tools import ToolConfig - - config = ToolConfig(tools=["execute"]) - tools = pctx_client.langchain_tools(config) - - assert len(tools) == 1 - assert tools[0].name == "execute" - - def test_single_tool_config_all_frameworks(self, pctx_client): - """Test single tool config works across all frameworks""" - from pctx_client.tools import ToolConfig - - config = ToolConfig(tools=["list_functions"]) - - # Test all frameworks - langchain = pctx_client.langchain_tools(config) - crewai = pctx_client.crewai_tools(config) - openai = pctx_client.openai_agents_tools(config) - pydantic = pctx_client.pydantic_ai_tools(config) - - # All should have exactly 1 tool - assert len(langchain) == 1 - assert len(crewai) == 1 - assert len(openai) == 1 - assert len(pydantic) == 1 - - # All should have the same tool name - assert langchain[0].name == "list_functions" - assert crewai[0].name == "list_functions" - assert openai[0].name == "list_functions" - assert pydantic[0].name == "list_functions" - - def test_custom_combo_bash_list_execute(self, pctx_client): - """Test custom combination: execute_bash + list_functions + execute""" - from pctx_client.tools import ToolConfig - - config = ToolConfig(tools=["execute_bash", "list_functions", "execute"]) - tools = pctx_client.langchain_tools(config) - - assert len(tools) == 3 - names = {tool.name for tool in tools} - assert names == {"execute_bash", "list_functions", "execute"} - - def test_custom_combo_with_custom_descriptions(self, pctx_client): - """Test ToolConfig with custom tools AND custom descriptions""" - from pctx_client.tools import ToolConfig - - custom_desc = "My custom bash description" - config = ToolConfig( - tools=["execute_bash", "list_functions"], - descriptions={"execute_bash": custom_desc}, - ) - - # Test LangChain - tools = pctx_client.langchain_tools(config) - assert len(tools) == 2 - bash_tool = next(t for t in tools if t.name == "execute_bash") - assert bash_tool.description == custom_desc - - # Test CrewAI - crewai_tools = pctx_client.crewai_tools(config) - assert len(crewai_tools) == 2 - crewai_bash = next(t for t in crewai_tools if t.name == "execute_bash") - assert custom_desc in crewai_bash.description - - # Test OpenAI Agents - openai_tools = pctx_client.openai_agents_tools(config) - assert len(openai_tools) == 2 - openai_bash = next(t for t in openai_tools if t.name == "execute_bash") - assert custom_desc in openai_bash.description - - # Test Pydantic AI - pydantic_tools = pctx_client.pydantic_ai_tools(config) - assert len(pydantic_tools) == 2 - pydantic_bash = next(t for t in pydantic_tools if t.name == "execute_bash") - assert pydantic_bash.description == custom_desc - - def test_all_six_tools(self, pctx_client): - """Test ToolConfig with all possible tools""" - from typing import get_args - - from pctx_client.tools import ToolConfig, ToolName - - all_tools = list(get_args(ToolName)) - config = ToolConfig(tools=all_tools) - - # Test LangChain (search_functions requires HAS_SEARCH) - tools = pctx_client.langchain_tools(config) - # Should have 4-6 tools depending on HAS_SEARCH - assert ( - len(tools) >= 5 - ) # At minimum: list, get_details, execute, execute_bash, execute_typescript - names = {tool.name for tool in tools} - assert "list_functions" in names - assert "execute" in names - assert "execute_bash" in names - - def test_custom_combo_search_execute(self, pctx_client): - """Test search + execute workflow (without list or get_details)""" - from pctx_client.tools import ToolConfig - - config = ToolConfig(tools=["search_functions", "execute"]) - tools = pctx_client.langchain_tools(config) - - # Should have 1-2 tools depending on HAS_SEARCH - assert len(tools) >= 1 - names = {tool.name for tool in tools} - assert "execute" in names - - def test_cross_framework_consistency_custom_config(self, pctx_client): - """Test that custom ToolConfig produces consistent results across frameworks""" - from pctx_client.tools import ToolConfig - - config = ToolConfig( - tools=["execute_bash", "execute_typescript", "list_functions"] - ) - - # Get tools from all frameworks - langchain = pctx_client.langchain_tools(config) - crewai = pctx_client.crewai_tools(config) - openai = pctx_client.openai_agents_tools(config) - pydantic = pctx_client.pydantic_ai_tools(config) - - # All should have same number of tools - assert len(langchain) == len(crewai) == len(openai) == len(pydantic) == 3 - - # All should have same tool names - expected_names = {"execute_bash", "execute_typescript", "list_functions"} - assert {t.name for t in langchain} == expected_names - assert {t.name for t in crewai} == expected_names - assert {t.name for t in openai} == expected_names - assert {t.name for t in pydantic} == expected_names - - def test_custom_config_preserves_tool_order(self, pctx_client): - """Test that ToolConfig preserves the order of tools""" - from pctx_client.tools import ToolConfig - - # Define tools in specific order - config = ToolConfig(tools=["execute", "list_functions", "execute_bash"]) - tools = pctx_client.langchain_tools(config) - - # Tools should be in the order specified - assert tools[0].name == "execute" - assert tools[1].name == "list_functions" - assert tools[2].name == "execute_bash" - - def test_duplicate_tools_in_config(self, pctx_client): - """Test that duplicate tools in config only create one instance""" - from pctx_client.tools import ToolConfig - - # Config with duplicate - config = ToolConfig(tools=["execute", "execute", "list_functions"]) # type: ignore - tools = pctx_client.langchain_tools(config) - - # Should create 2 tools (execute appears twice but only created once per iteration) - # Actually this will create execute twice since we loop over the list - # Let's verify the behavior - assert len(tools) == 3 # Will have 2 execute + 1 list_functions - names = [t.name for t in tools] - assert names.count("execute") == 2 - assert names.count("list_functions") == 1 - - def test_empty_tools_list(self, pctx_client): - """Test that empty tools list returns empty list""" - from pctx_client.tools import ToolConfig - - config = ToolConfig(tools=[]) - tools = pctx_client.langchain_tools(config) - - assert len(tools) == 0 - assert tools == [] - - def test_mixed_discovery_and_execution_tools(self, pctx_client): - """Test mixing discovery tools with execution tools""" - from pctx_client.tools import ToolConfig - - # Mix list (discovery) with execute (execution) and bash (filesystem) - config = ToolConfig( - tools=["list_functions", "execute", "execute_bash"], - descriptions={ - "list_functions": "Custom list description", - "execute": "Custom execute description", - }, - ) - - tools = pctx_client.langchain_tools(config) - assert len(tools) == 3 - - # Check custom descriptions were applied - list_tool = next(t for t in tools if t.name == "list_functions") - assert list_tool.description == "Custom list description" - - execute_tool = next(t for t in tools if t.name == "execute") - assert execute_tool.description == "Custom execute description" - - # execute_bash should have default description (not custom) - bash_tool = next(t for t in tools if t.name == "execute_bash") - assert "custom" not in bash_tool.description.lower() - assert len(bash_tool.description) > 0 # Has default description diff --git a/pctx-py/uv.lock b/pctx-py/uv.lock index af331b1e..49a7bf13 100644 --- a/pctx-py/uv.lock +++ b/pctx-py/uv.lock @@ -8,6 +8,30 @@ resolution-markers = [ "python_full_version < '3.11'", ] +[[package]] +name = "ag-ui-protocol" +version = "0.1.13" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/04/b5/fc0b65b561d00d88811c8a7d98ee735833f81554be244340950e7b65820c/ag_ui_protocol-0.1.13.tar.gz", hash = "sha256:811d7d7dcce4783dec252918f40b717ebfa559399bf6b071c4ba47c0c1e21bcb", size = 5671, upload-time = "2026-02-19T18:40:38.602Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cd/9f/b833c1ab1999da35ebad54841ae85d2c2764c931da9a6f52d8541b6901b2/ag_ui_protocol-0.1.13-py3-none-any.whl", hash = "sha256:1393fa894c1e8416efe184168a50689e760d05b32f4646eebb8ff423dddf8e8f", size = 8053, upload-time = "2026-02-19T18:40:37.27Z" }, +] + +[[package]] +name = "aiofile" +version = "3.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "caio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/67/e2/d7cb819de8df6b5c1968a2756c3cb4122d4fa2b8fc768b53b7c9e5edb646/aiofile-3.9.0.tar.gz", hash = "sha256:e5ad718bb148b265b6df1b3752c4d1d83024b93da9bd599df74b9d9ffcf7919b", size = 17943, upload-time = "2024-10-08T10:39:35.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/50/25/da1f0b4dd970e52bf5a36c204c107e11a0c6d3ed195eba0bfbc664c312b2/aiofile-3.9.0-py3-none-any.whl", hash = "sha256:ce2f6c1571538cbdfa0143b04e16b208ecb0e9cb4148e528af8a640ed51cc8aa", size = 19539, upload-time = "2024-10-08T10:39:32.955Z" }, +] + [[package]] name = "aiohappyeyeballs" version = "2.6.1" @@ -179,7 +203,7 @@ wheels = [ [[package]] name = "anthropic" -version = "0.79.0" +version = "0.84.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -191,9 +215,9 @@ dependencies = [ { name = "sniffio" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/15/b1/91aea3f8fd180d01d133d931a167a78a3737b3fd39ccef2ae8d6619c24fd/anthropic-0.79.0.tar.gz", hash = "sha256:8707aafb3b1176ed6c13e2b1c9fb3efddce90d17aee5d8b83a86c70dcdcca871", size = 509825, upload-time = "2026-02-07T18:06:18.388Z" } +sdist = { url = "https://files.pythonhosted.org/packages/04/ea/0869d6df9ef83dcf393aeefc12dd81677d091c6ffc86f783e51cf44062f2/anthropic-0.84.0.tar.gz", hash = "sha256:72f5f90e5aebe62dca316cb013629cfa24996b0f5a4593b8c3d712bc03c43c37", size = 539457, upload-time = "2026-02-25T05:22:38.54Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/95/b2/cc0b8e874a18d7da50b0fda8c99e4ac123f23bf47b471827c5f6f3e4a767/anthropic-0.79.0-py3-none-any.whl", hash = "sha256:04cbd473b6bbda4ca2e41dd670fe2f829a911530f01697d0a1e37321eb75f3cf", size = 405918, upload-time = "2026-02-07T18:06:20.246Z" }, + { url = "https://files.pythonhosted.org/packages/64/ca/218fa25002a332c0aa149ba18ffc0543175998b1f65de63f6d106689a345/anthropic-0.84.0-py3-none-any.whl", hash = "sha256:861c4c50f91ca45f942e091d83b60530ad6d4f98733bfe648065364da05d29e7", size = 455156, upload-time = "2026-02-25T05:22:40.468Z" }, ] [[package]] @@ -255,6 +279,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, ] +[[package]] +name = "authlib" +version = "1.6.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/af/98/00d3dd826d46959ad8e32af2dbb2398868fd9fd0683c26e56d0789bd0e68/authlib-1.6.9.tar.gz", hash = "sha256:d8f2421e7e5980cc1ddb4e32d3f5fa659cfaf60d8eaf3281ebed192e4ab74f04", size = 165134, upload-time = "2026-03-02T07:44:01.998Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/53/23/b65f568ed0c22f1efacb744d2db1a33c8068f384b8c9b482b52ebdbc3ef6/authlib-1.6.9-py2.py3-none-any.whl", hash = "sha256:f08b4c14e08f0861dc18a32357b33fbcfd2ea86cfe3fe149484b4d764c4a0ac3", size = 244197, upload-time = "2026-03-02T07:44:00.307Z" }, +] + [[package]] name = "babel" version = "2.18.0" @@ -282,6 +318,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, ] +[[package]] +name = "backports-tarfile" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/86/72/cd9b395f25e290e633655a100af28cb253e4393396264a98bd5f5951d50f/backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991", size = 86406, upload-time = "2024-05-28T17:01:54.731Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/fa/123043af240e49752f1c4bd24da5053b6bd00cad78c2be53c0d1e8b975bc/backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34", size = 30181, upload-time = "2024-05-28T17:01:53.112Z" }, +] + [[package]] name = "bcrypt" version = "5.0.0" @@ -352,6 +397,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e4/f8/972c96f5a2b6c4b3deca57009d93e946bbdbe2241dca9806d502f29dd3ee/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:6b8f520b61e8781efee73cba14e3e8c9556ccfb375623f4f97429544734545b4", size = 273375, upload-time = "2025-09-25T19:50:45.43Z" }, ] +[[package]] +name = "beartype" +version = "0.22.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/94/1009e248bbfbab11397abca7193bea6626806be9a327d399810d523a07cb/beartype-0.22.9.tar.gz", hash = "sha256:8f82b54aa723a2848a56008d18875f91c1db02c32ef6a62319a002e3e25a975f", size = 1608866, upload-time = "2025-12-13T06:50:30.72Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/71/cc/18245721fa7747065ab478316c7fea7c74777d07f37ae60db2e84f8172e8/beartype-0.22.9-py3-none-any.whl", hash = "sha256:d16c9bbc61ea14637596c5f6fbff2ee99cbe3573e46a716401734ef50c3060c2", size = 1333658, upload-time = "2025-12-13T06:50:28.266Z" }, +] + [[package]] name = "bm25s" version = "0.2.14" @@ -416,6 +470,44 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c5/0d/84a4380f930db0010168e0aa7b7a8fed9ba1835a8fbb1472bc6d0201d529/build-1.4.0-py3-none-any.whl", hash = "sha256:6a07c1b8eb6f2b311b96fcbdbce5dab5fe637ffda0fd83c9cac622e927501596", size = 24141, upload-time = "2026-01-08T16:41:46.453Z" }, ] +[[package]] +name = "cachetools" +version = "7.0.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/dd/57fe3fdb6e65b25a5987fd2cdc7e22db0aef508b91634d2e57d22928d41b/cachetools-7.0.5.tar.gz", hash = "sha256:0cd042c24377200c1dcd225f8b7b12b0ca53cc2c961b43757e774ebe190fd990", size = 37367, upload-time = "2026-03-09T20:51:29.451Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/06/f3/39cf3367b8107baa44f861dc802cbf16263c945b62d8265d36034fc07bea/cachetools-7.0.5-py3-none-any.whl", hash = "sha256:46bc8ebefbe485407621d0a4264b23c080cedd913921bad7ac3ed2f26c183114", size = 13918, upload-time = "2026-03-09T20:51:27.33Z" }, +] + +[[package]] +name = "caio" +version = "0.9.25" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/92/88/b8527e1b00c1811db339a1df8bd1ae49d146fcea9d6a5c40e3a80aaeb38d/caio-0.9.25.tar.gz", hash = "sha256:16498e7f81d1d0f5a4c0ad3f2540e65fe25691376e0a5bd367f558067113ed10", size = 26781, upload-time = "2025-12-26T15:21:36.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/80/ea4ead0c5d52a9828692e7df20f0eafe8d26e671ce4883a0a146bb91049e/caio-0.9.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ca6c8ecda611478b6016cb94d23fd3eb7124852b985bdec7ecaad9f3116b9619", size = 36836, upload-time = "2025-12-26T15:22:04.662Z" }, + { url = "https://files.pythonhosted.org/packages/17/b9/36715c97c873649d1029001578f901b50250916295e3dddf20c865438865/caio-0.9.25-cp310-cp310-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:db9b5681e4af8176159f0d6598e73b2279bb661e718c7ac23342c550bd78c241", size = 79695, upload-time = "2025-12-26T15:22:18.818Z" }, + { url = "https://files.pythonhosted.org/packages/0b/ab/07080ecb1adb55a02cbd8ec0126aa8e43af343ffabb6a71125b42670e9a1/caio-0.9.25-cp310-cp310-manylinux_2_34_aarch64.whl", hash = "sha256:bf61d7d0c4fd10ffdd98ca47f7e8db4d7408e74649ffaf4bef40b029ada3c21b", size = 79457, upload-time = "2026-03-04T22:08:16.024Z" }, + { url = "https://files.pythonhosted.org/packages/88/95/dd55757bb671eb4c376e006c04e83beb413486821f517792ea603ef216e9/caio-0.9.25-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:ab52e5b643f8bbd64a0605d9412796cd3464cb8ca88593b13e95a0f0b10508ae", size = 77705, upload-time = "2026-03-04T22:08:17.202Z" }, + { url = "https://files.pythonhosted.org/packages/ec/90/543f556fcfcfa270713eef906b6352ab048e1e557afec12925c991dc93c2/caio-0.9.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d6956d9e4a27021c8bd6c9677f3a59eb1d820cc32d0343cea7961a03b1371965", size = 36839, upload-time = "2025-12-26T15:21:40.267Z" }, + { url = "https://files.pythonhosted.org/packages/51/3b/36f3e8ec38dafe8de4831decd2e44c69303d2a3892d16ceda42afed44e1b/caio-0.9.25-cp311-cp311-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bf84bfa039f25ad91f4f52944452a5f6f405e8afab4d445450978cd6241d1478", size = 80255, upload-time = "2025-12-26T15:22:20.271Z" }, + { url = "https://files.pythonhosted.org/packages/df/ce/65e64867d928e6aff1b4f0e12dba0ef6d5bf412c240dc1df9d421ac10573/caio-0.9.25-cp311-cp311-manylinux_2_34_aarch64.whl", hash = "sha256:ae3d62587332bce600f861a8de6256b1014d6485cfd25d68c15caf1611dd1f7c", size = 80052, upload-time = "2026-03-04T22:08:20.402Z" }, + { url = "https://files.pythonhosted.org/packages/46/90/e278863c47e14ec58309aa2e38a45882fbe67b4cc29ec9bc8f65852d3e45/caio-0.9.25-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:fc220b8533dcf0f238a6b1a4a937f92024c71e7b10b5a2dfc1c73604a25709bc", size = 78273, upload-time = "2026-03-04T22:08:21.368Z" }, + { url = "https://files.pythonhosted.org/packages/d3/25/79c98ebe12df31548ba4eaf44db11b7cad6b3e7b4203718335620939083c/caio-0.9.25-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fb7ff95af4c31ad3f03179149aab61097a71fd85e05f89b4786de0359dffd044", size = 36983, upload-time = "2025-12-26T15:21:36.075Z" }, + { url = "https://files.pythonhosted.org/packages/a3/2b/21288691f16d479945968a0a4f2856818c1c5be56881d51d4dac9b255d26/caio-0.9.25-cp312-cp312-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:97084e4e30dfa598449d874c4d8e0c8d5ea17d2f752ef5e48e150ff9d240cd64", size = 82012, upload-time = "2025-12-26T15:22:20.983Z" }, + { url = "https://files.pythonhosted.org/packages/03/c4/8a1b580875303500a9c12b9e0af58cb82e47f5bcf888c2457742a138273c/caio-0.9.25-cp312-cp312-manylinux_2_34_aarch64.whl", hash = "sha256:4fa69eba47e0f041b9d4f336e2ad40740681c43e686b18b191b6c5f4c5544bfb", size = 81502, upload-time = "2026-03-04T22:08:22.381Z" }, + { url = "https://files.pythonhosted.org/packages/d1/1c/0fe770b8ffc8362c48134d1592d653a81a3d8748d764bec33864db36319d/caio-0.9.25-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:6bebf6f079f1341d19f7386db9b8b1f07e8cc15ae13bfdaff573371ba0575d69", size = 80200, upload-time = "2026-03-04T22:08:23.382Z" }, + { url = "https://files.pythonhosted.org/packages/31/57/5e6ff127e6f62c9f15d989560435c642144aa4210882f9494204bc892305/caio-0.9.25-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d6c2a3411af97762a2b03840c3cec2f7f728921ff8adda53d7ea2315a8563451", size = 36979, upload-time = "2025-12-26T15:21:35.484Z" }, + { url = "https://files.pythonhosted.org/packages/a3/9f/f21af50e72117eb528c422d4276cbac11fb941b1b812b182e0a9c70d19c5/caio-0.9.25-cp313-cp313-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0998210a4d5cd5cb565b32ccfe4e53d67303f868a76f212e002a8554692870e6", size = 81900, upload-time = "2025-12-26T15:22:21.919Z" }, + { url = "https://files.pythonhosted.org/packages/9c/12/c39ae2a4037cb10ad5eb3578eb4d5f8c1a2575c62bba675f3406b7ef0824/caio-0.9.25-cp313-cp313-manylinux_2_34_aarch64.whl", hash = "sha256:1a177d4777141b96f175fe2c37a3d96dec7911ed9ad5f02bac38aaa1c936611f", size = 81523, upload-time = "2026-03-04T22:08:25.187Z" }, + { url = "https://files.pythonhosted.org/packages/22/59/f8f2e950eb4f1a5a3883e198dca514b9d475415cb6cd7b78b9213a0dd45a/caio-0.9.25-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:9ed3cfb28c0e99fec5e208c934e5c157d0866aa9c32aa4dc5e9b6034af6286b7", size = 80243, upload-time = "2026-03-04T22:08:26.449Z" }, + { url = "https://files.pythonhosted.org/packages/69/ca/a08fdc7efdcc24e6a6131a93c85be1f204d41c58f474c42b0670af8c016b/caio-0.9.25-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:fab6078b9348e883c80a5e14b382e6ad6aabbc4429ca034e76e730cf464269db", size = 36978, upload-time = "2025-12-26T15:21:41.055Z" }, + { url = "https://files.pythonhosted.org/packages/5e/6c/d4d24f65e690213c097174d26eda6831f45f4734d9d036d81790a27e7b78/caio-0.9.25-cp314-cp314-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:44a6b58e52d488c75cfaa5ecaa404b2b41cc965e6c417e03251e868ecd5b6d77", size = 81832, upload-time = "2025-12-26T15:22:22.757Z" }, + { url = "https://files.pythonhosted.org/packages/87/a4/e534cf7d2d0e8d880e25dd61e8d921ffcfe15bd696734589826f5a2df727/caio-0.9.25-cp314-cp314-manylinux_2_34_aarch64.whl", hash = "sha256:628a630eb7fb22381dd8e3c8ab7f59e854b9c806639811fc3f4310c6bd711d79", size = 81565, upload-time = "2026-03-04T22:08:27.483Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ed/bf81aeac1d290017e5e5ac3e880fd56ee15e50a6d0353986799d1bc5cfd5/caio-0.9.25-cp314-cp314-manylinux_2_34_x86_64.whl", hash = "sha256:0ba16aa605ccb174665357fc729cf500679c2d94d5f1458a6f0d5ca48f2060a7", size = 80071, upload-time = "2026-03-04T22:08:28.751Z" }, + { url = "https://files.pythonhosted.org/packages/86/93/1f76c8d1bafe3b0614e06b2195784a3765bbf7b0a067661af9e2dd47fc33/caio-0.9.25-py3-none-any.whl", hash = "sha256:06c0bb02d6b929119b1cfbe1ca403c768b2013a369e2db46bfa2a5761cf82e40", size = 19087, upload-time = "2025-12-26T15:22:00.221Z" }, +] + [[package]] name = "certifi" version = "2026.1.4" @@ -653,7 +745,7 @@ wheels = [ [[package]] name = "cohere" -version = "5.20.5" +version = "5.20.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "fastavro" }, @@ -665,9 +757,9 @@ dependencies = [ { name = "types-requests" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cf/e9/a977a2f7093912e5bb7065589c913ad6887a5bdb3474886347fca53ed283/cohere-5.20.5.tar.gz", hash = "sha256:9db82263cf3c54a35b9bf44faf39e4b7fc3fc51a32a2634fc3680b99de069f31", size = 184819, upload-time = "2026-02-11T17:47:59.403Z" } +sdist = { url = "https://files.pythonhosted.org/packages/44/0b/96e2b55a0114ed9d69b3154565f54b764e7530735426290b000f467f4c0f/cohere-5.20.7.tar.gz", hash = "sha256:997ed85fabb3a1e4a4c036fdb520382e7bfa670db48eb59a026803b6f7061dbb", size = 184986, upload-time = "2026-02-25T01:22:18.673Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5b/83/70eccefd0608582bf7af6e99c69a56ee14048de741ba338824011f3bcf83/cohere-5.20.5-py3-none-any.whl", hash = "sha256:25b2ceae8ea52ed7f4a5f76da854f75536c348767d25ae5f9608eddb6945ee64", size = 323215, upload-time = "2026-02-11T17:47:57.803Z" }, + { url = "https://files.pythonhosted.org/packages/9d/86/dc991a75e3b9c2007b90dbfaf7f36fdb2457c216f799e26ce0474faf0c1f/cohere-5.20.7-py3-none-any.whl", hash = "sha256:043fef2a12c30c07e9b2c1f0b869fd66ffd911f58d1492f87e901c4190a65914", size = 323389, upload-time = "2026-02-25T01:22:16.902Z" }, ] [[package]] @@ -774,6 +866,23 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/bc/58/6b3d24e6b9bc474a2dcdee65dfd1f008867015408a271562e4b690561a4d/cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7", size = 3407605, upload-time = "2026-02-10T19:18:29.233Z" }, ] +[[package]] +name = "cyclopts" +version = "4.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "docstring-parser" }, + { name = "rich" }, + { name = "rich-rst" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/33/7a/3c3623755561c7f283dd769470e99ae36c46810bf3b3f264d69006f6c97a/cyclopts-4.8.0.tar.gz", hash = "sha256:92cc292d18d8be372e58d8bce1aa966d30f819a5fb3fee02bd2ad4a6bb403f29", size = 164066, upload-time = "2026-03-07T19:39:18.122Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/01/6ec7210775ea5e4989a10d89eda6c5ea7ff06caa614231ad533d74fecac8/cyclopts-4.8.0-py3-none-any.whl", hash = "sha256:ef353da05fec36587d4ebce7a6e4b27515d775d184a23bab4b01426f93ddc8d4", size = 201948, upload-time = "2026-03-07T19:39:19.307Z" }, +] + [[package]] name = "decorator" version = "5.2.1" @@ -801,6 +910,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, ] +[[package]] +name = "dnspython" +version = "2.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251, upload-time = "2025-09-07T18:58:00.022Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" }, +] + [[package]] name = "docstring-parser" version = "0.17.0" @@ -845,6 +963,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b0/0d/9feae160378a3553fa9a339b0e9c1a048e147a4127210e286ef18b730f03/durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286", size = 3922, upload-time = "2025-05-17T13:52:36.463Z" }, ] +[[package]] +name = "email-validator" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/22/900cb125c76b7aaa450ce02fd727f452243f2e91a61af068b40adba60ea9/email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426", size = 51238, upload-time = "2025-08-26T13:09:06.831Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/15/545e2b6cf2e3be84bc1ed85613edd75b8aea69807a71c26f4ca6a9258e82/email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4", size = 35604, upload-time = "2025-08-26T13:09:05.858Z" }, +] + [[package]] name = "et-xmlfile" version = "2.0.0" @@ -868,7 +999,7 @@ name = "exceptiongroup" version = "1.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } wheels = [ @@ -884,20 +1015,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317, upload-time = "2025-09-01T09:48:08.5Z" }, ] -[[package]] -name = "fasta2a" -version = "0.2.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "opentelemetry-api" }, - { name = "pydantic" }, - { name = "starlette" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/39/5e/2e65de871869530be7b575443ca7cda695c44026c50d37e20d7024b3d74f/fasta2a-0.2.4.tar.gz", hash = "sha256:7afa3fd3205ce3f299b68b2705d285380328a7a4cbd2d230230e66b4701743e8", size = 11558, upload-time = "2025-05-14T20:58:35.43Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/29/038b490aca2efdf1fc96224e75acbf6beefd577f12d156729f4b9d721650/fasta2a-0.2.4-py3-none-any.whl", hash = "sha256:3d8b71124739030512c6880d8a78d37bd6c9a099b42d3a772f62747ecef829e9", size = 14369, upload-time = "2025-05-14T20:58:21.095Z" }, -] - [[package]] name = "fastavro" version = "1.12.1" @@ -945,6 +1062,38 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fa/93/b44f67589e4d439913dab6720f7e3507b0fa8b8e56d06f6fc875ced26afb/fastavro-1.12.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:43ded16b3f4a9f1a42f5970c2aa618acb23ea59c4fcaa06680bdf470b255e5a8", size = 3386636, upload-time = "2025-10-10T15:42:18.974Z" }, ] +[[package]] +name = "fastmcp" +version = "3.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "authlib" }, + { name = "cyclopts" }, + { name = "exceptiongroup" }, + { name = "httpx" }, + { name = "jsonref" }, + { name = "jsonschema-path" }, + { name = "mcp" }, + { name = "openapi-pydantic" }, + { name = "opentelemetry-api" }, + { name = "packaging" }, + { name = "platformdirs" }, + { name = "py-key-value-aio", extra = ["filetree", "keyring", "memory"] }, + { name = "pydantic", extra = ["email"] }, + { name = "pyperclip" }, + { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "rich" }, + { name = "uncalled-for" }, + { name = "uvicorn" }, + { name = "watchfiles" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0a/70/862026c4589441f86ad3108f05bfb2f781c6b322ad60a982f40b303b47d7/fastmcp-3.1.0.tar.gz", hash = "sha256:e25264794c734b9977502a51466961eeecff92a0c2f3b49c40c070993628d6d0", size = 17347083, upload-time = "2026-03-03T02:43:11.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/07/516f5b20d88932e5a466c2216b628e5358a71b3a9f522215607c3281de05/fastmcp-3.1.0-py3-none-any.whl", hash = "sha256:b1f73b56fd3b0cb2bd9e2a144fc650d5cc31587ed129d996db7710e464ae8010", size = 633749, upload-time = "2026-03-03T02:43:09.06Z" }, +] + [[package]] name = "fastuuid" version = "0.14.0" @@ -1155,6 +1304,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e6/ab/fb21f4c939bb440104cc2b396d3be1d9b7a9fd3c6c2a53d98c45b3d7c954/fsspec-2026.2.0-py3-none-any.whl", hash = "sha256:98de475b5cb3bd66bedd5c4679e87b4fdfe1a3bf4d707b151b3c07e58c9a2437", size = 202505, upload-time = "2026-02-05T21:50:51.819Z" }, ] +[[package]] +name = "genai-prices" +version = "0.0.55" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, + { name = "pydantic" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/77/67/de9d9be180db6d80b298c281dff71502095c0776d7cc9286f486f667f61a/genai_prices-0.0.55.tar.gz", hash = "sha256:8692c65d0deefe2ad0680d71841eb12822a35945a6060d2b6adbcbdf4945e1cb", size = 59987, upload-time = "2026-02-26T17:56:41.467Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/98/66a06b82a5c840f896490d5ef9c7691776b147589f2e8d2fa66c67a3db9c/genai_prices-0.0.55-py3-none-any.whl", hash = "sha256:ccd795c90c926b3c71066bf5656f14c67fc11fdba6d71e072c7fb4fa311e1b12", size = 62603, upload-time = "2026-02-26T17:56:40.502Z" }, +] + [[package]] name = "google-auth" version = "2.48.0" @@ -1169,6 +1331,32 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/83/1d/d6466de3a5249d35e832a52834115ca9d1d0de6abc22065f049707516d47/google_auth-2.48.0-py3-none-any.whl", hash = "sha256:2e2a537873d449434252a9632c28bfc268b0adb1e53f9fb62afc5333a975903f", size = 236499, upload-time = "2026-01-26T19:22:45.099Z" }, ] +[package.optional-dependencies] +requests = [ + { name = "requests" }, +] + +[[package]] +name = "google-genai" +version = "1.66.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "google-auth", extra = ["requests"] }, + { name = "httpx" }, + { name = "pydantic" }, + { name = "requests" }, + { name = "sniffio" }, + { name = "tenacity" }, + { name = "typing-extensions" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9b/ba/0b343b0770d4710ad2979fd9301d7caa56c940174d5361ed4a7cc4979241/google_genai-1.66.0.tar.gz", hash = "sha256:ffc01647b65046bca6387320057aa51db0ad64bcc72c8e3e914062acfa5f7c49", size = 504386, upload-time = "2026-03-04T22:15:28.156Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/dd/403949d922d4e261b08b64aaa132af4e456c3b15c8e2a2d9e6ef693f66e2/google_genai-1.66.0-py3-none-any.whl", hash = "sha256:7f127a39cf695277104ce4091bb26e417c59bb46e952ff3699c3a982d9c474ee", size = 732174, upload-time = "2026-03-04T22:15:26.63Z" }, +] + [[package]] name = "googleapis-common-protos" version = "1.72.0" @@ -1193,6 +1381,14 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9c/83/3b1d03d36f224edded98e9affd0467630fc09d766c0e56fb1498cbb04a9b/griffe-1.15.0-py3-none-any.whl", hash = "sha256:6f6762661949411031f5fcda9593f586e6ce8340f0ba88921a0f2ef7a81eb9a3", size = 150705, upload-time = "2025-11-10T15:03:13.549Z" }, ] +[[package]] +name = "griffelib" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/51/c936033e16d12b627ea334aaaaf42229c37620d0f15593456ab69ab48161/griffelib-2.0.0-py3-none-any.whl", hash = "sha256:01284878c966508b6d6f1dbff9b6fa607bc062d8261c5c7253cb285b06422a7f", size = 142004, upload-time = "2026-02-09T19:09:40.561Z" }, +] + [[package]] name = "groq" version = "1.0.0" @@ -1554,6 +1750,42 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074, upload-time = "2025-01-17T11:24:33.271Z" }, ] +[[package]] +name = "jaraco-classes" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "more-itertools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/c0/ed4a27bc5571b99e3cff68f8a9fa5b56ff7df1c2251cc715a652ddd26402/jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd", size = 11780, upload-time = "2024-03-31T07:27:36.643Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/66/b15ce62552d84bbfcec9a4873ab79d993a1dd4edb922cbfccae192bd5b5f/jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790", size = 6777, upload-time = "2024-03-31T07:27:34.792Z" }, +] + +[[package]] +name = "jaraco-context" +version = "6.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-tarfile", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/27/7b/c3081ff1af947915503121c649f26a778e1a2101fd525f74aef997d75b7e/jaraco_context-6.1.1.tar.gz", hash = "sha256:bc046b2dc94f1e5532bd02402684414575cc11f565d929b6563125deb0a6e581", size = 15832, upload-time = "2026-03-07T15:46:04.63Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/49/c152890d49102b280ecf86ba5f80a8c111c3a155dafa3bd24aeb64fde9e1/jaraco_context-6.1.1-py3-none-any.whl", hash = "sha256:0df6a0287258f3e364072c3e40d5411b20cafa30cb28c4839d24319cecf9f808", size = 7005, upload-time = "2026-03-07T15:46:03.515Z" }, +] + +[[package]] +name = "jaraco-functools" +version = "4.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "more-itertools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0f/27/056e0638a86749374d6f57d0b0db39f29509cce9313cf91bdc0ac4d91084/jaraco_functools-4.4.0.tar.gz", hash = "sha256:da21933b0417b89515562656547a77b4931f98176eb173644c0d35032a33d6bb", size = 19943, upload-time = "2025-12-21T09:29:43.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/c4/813bb09f0985cb21e959f21f2464169eca882656849adf727ac7bb7e1767/jaraco_functools-4.4.0-py3-none-any.whl", hash = "sha256:9eec1e36f45c818d9bf307c8948eb03b2b56cd44087b3cdc989abca1f20b9176", size = 10481, upload-time = "2025-12-21T09:29:42.27Z" }, +] + [[package]] name = "jedi" version = "0.19.2" @@ -1566,6 +1798,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278, upload-time = "2024-11-11T01:41:40.175Z" }, ] +[[package]] +name = "jeepney" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/6f/357efd7602486741aa73ffc0617fb310a29b588ed0fd69c2399acbb85b0c/jeepney-0.9.0.tar.gz", hash = "sha256:cf0e9e845622b81e4a28df94c40345400256ec608d0e55bb8a3feaa9163f5732", size = 106758, upload-time = "2025-02-27T18:51:01.684Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/a3/e137168c9c44d18eff0376253da9f1e9234d0239e0ee230d2fee6cea8e55/jeepney-0.9.0-py3-none-any.whl", hash = "sha256:97e5714520c16fc0a45695e5365a2e11b81ea79bba796e26f9f1d178cb182683", size = 49010, upload-time = "2025-02-27T18:51:00.104Z" }, +] + [[package]] name = "jinja2" version = "3.1.6" @@ -1747,6 +1988,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl", hash = "sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce", size = 90630, upload-time = "2026-01-07T13:41:05.306Z" }, ] +[[package]] +name = "jsonschema-path" +version = "0.4.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pathable" }, + { name = "pyyaml" }, + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/8a/7e6102f2b8bdc6705a9eb5294f8f6f9ccd3a8420e8e8e19671d1dd773251/jsonschema_path-0.4.5.tar.gz", hash = "sha256:c6cd7d577ae290c7defd4f4029e86fdb248ca1bd41a07557795b3c95e5144918", size = 15113, upload-time = "2026-03-03T09:56:46.87Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/d5/4e96c44f6c1ea3d812cf5391d81a4f5abaa540abf8d04ecd7f66e0ed11df/jsonschema_path-0.4.5-py3-none-any.whl", hash = "sha256:7d77a2c3f3ec569a40efe5c5f942c44c1af2a6f96fe0866794c9ef5b8f87fd65", size = 19368, upload-time = "2026-03-03T09:56:45.39Z" }, +] + [[package]] name = "jsonschema-specifications" version = "2025.9.1" @@ -1759,6 +2014,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, ] +[[package]] +name = "keyring" +version = "25.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata", marker = "python_full_version < '3.12'" }, + { name = "jaraco-classes" }, + { name = "jaraco-context" }, + { name = "jaraco-functools" }, + { name = "jeepney", marker = "sys_platform == 'linux'" }, + { name = "pywin32-ctypes", marker = "sys_platform == 'win32'" }, + { name = "secretstorage", marker = "sys_platform == 'linux'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/43/4b/674af6ef2f97d56f0ab5153bf0bfa28ccb6c3ed4d1babf4305449668807b/keyring-25.7.0.tar.gz", hash = "sha256:fe01bd85eb3f8fb3dd0405defdeac9a5b4f6f0439edbb3149577f244a2e8245b", size = 63516, upload-time = "2025-11-16T16:26:09.482Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/db/e655086b7f3a705df045bf0933bdd9c2f79bb3c97bfef1384598bb79a217/keyring-25.7.0-py3-none-any.whl", hash = "sha256:be4a0b195f149690c166e850609a477c532ddbfbaed96a404d4e43f8d5e2689f", size = 39160, upload-time = "2025-11-16T16:26:08.402Z" }, +] + [[package]] name = "kubernetes" version = "35.0.0" @@ -1925,6 +2198,30 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/96/b5/9ab657ef5cc61bea0054c2a829282c1a2b7528ea4e64ada57b3fe36686a6/litellm-1.81.11-py3-none-any.whl", hash = "sha256:06a66c24742e082ddd2813c87f40f5c12fe7baa73ce1f9457eaf453dc44a0f65", size = 14491673, upload-time = "2026-02-13T01:35:35.745Z" }, ] +[[package]] +name = "logfire" +version = "4.28.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "executing" }, + { name = "opentelemetry-exporter-otlp-proto-http" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-sdk" }, + { name = "protobuf" }, + { name = "rich" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/82/83/5e0569754b545cdf381ea0e56ff347944a0be42bfa7036c5c6a520a35ff1/logfire-4.28.0.tar.gz", hash = "sha256:11cf93cd02b58ed1090d5962b3222ac2555cf3c41b4851436689709af8c0e69b", size = 1057109, upload-time = "2026-03-11T16:23:41.335Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/ad/a8407cb281500713122ae3d7ec194b99a73afa89a58c9174239375ffac33/logfire-4.28.0-py3-none-any.whl", hash = "sha256:30f9cd969ccaf4eafcce4fe46b8bcc4e5967a722d64ae8c7f9c47e0905ec2949", size = 302168, upload-time = "2026-03-11T16:23:38.299Z" }, +] + +[package.optional-dependencies] +httpx = [ + { name = "opentelemetry-instrumentation-httpx" }, +] + [[package]] name = "logfire-api" version = "4.23.0" @@ -2243,6 +2540,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6a/fc/0e61d9a4e29c8679356795a40e48f647b4aad58d71bfc969f0f8f56fb912/mmh3-5.2.0-cp314-cp314t-win_arm64.whl", hash = "sha256:e7884931fe5e788163e7b3c511614130c2c59feffdc21112290a194487efb2e9", size = 40455, upload-time = "2025-07-29T07:43:29.563Z" }, ] +[[package]] +name = "more-itertools" +version = "10.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/5d/38b681d3fce7a266dd9ab73c66959406d565b3e85f21d5e66e1181d93721/more_itertools-10.8.0.tar.gz", hash = "sha256:f638ddf8a1a0d134181275fb5d58b086ead7c6a72429ad725c67503f13ba30bd", size = 137431, upload-time = "2025-09-02T15:23:11.018Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/8e/469e5a4a2f5855992e425f3cb33804cc07bf18d48f2db061aec61ce50270/more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b", size = 69667, upload-time = "2025-09-02T15:23:09.635Z" }, +] + [[package]] name = "mpmath" version = "1.3.0" @@ -2433,6 +2739,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d3/ac/686789b9145413f1a61878c407210e41bfdb097976864e0913078b24098c/myst_parser-5.0.0-py3-none-any.whl", hash = "sha256:ab31e516024918296e169139072b81592336f2fef55b8986aa31c9f04b5f7211", size = 84533, upload-time = "2026-01-15T09:08:16.788Z" }, ] +[[package]] +name = "nexus-rpc" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/50/95d7bc91f900da5e22662c82d9bf0f72a4b01f2a552708bf2f43807707a1/nexus_rpc-1.2.0.tar.gz", hash = "sha256:b4ddaffa4d3996aaeadf49b80dfcdfbca48fe4cb616defaf3b3c5c2c8fc61890", size = 74142, upload-time = "2025-11-17T19:17:06.798Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/04/eaac430d0e6bf21265ae989427d37e94be5e41dc216879f1fbb6c5339942/nexus_rpc-1.2.0-py3-none-any.whl", hash = "sha256:977876f3af811ad1a09b2961d3d1ac9233bda43ff0febbb0c9906483b9d9f8a3", size = 28166, upload-time = "2025-11-17T19:17:05.64Z" }, +] + [[package]] name = "numpy" version = "2.2.6" @@ -2628,7 +2946,7 @@ wheels = [ [[package]] name = "openai" -version = "2.20.0" +version = "2.26.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -2640,14 +2958,14 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6e/5a/f495777c02625bfa18212b6e3b73f1893094f2bf660976eb4bc6f43a1ca2/openai-2.20.0.tar.gz", hash = "sha256:2654a689208cd0bf1098bb9462e8d722af5cbe961e6bba54e6f19fb843d88db1", size = 642355, upload-time = "2026-02-10T19:02:54.145Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/91/2a06c4e9597c338cac1e5e5a8dd6f29e1836fc229c4c523529dca387fda8/openai-2.26.0.tar.gz", hash = "sha256:b41f37c140ae0034a6e92b0c509376d907f3a66109935fba2c1b471a7c05a8fb", size = 666702, upload-time = "2026-03-05T23:17:35.874Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/a0/cf4297aa51bbc21e83ef0ac018947fa06aea8f2364aad7c96cbf148590e6/openai-2.20.0-py3-none-any.whl", hash = "sha256:38d989c4b1075cd1f76abc68364059d822327cf1a932531d429795f4fc18be99", size = 1098479, upload-time = "2026-02-10T19:02:52.157Z" }, + { url = "https://files.pythonhosted.org/packages/c6/2e/3f73e8ca53718952222cacd0cf7eecc9db439d020f0c1fe7ae717e4e199a/openai-2.26.0-py3-none-any.whl", hash = "sha256:6151bf8f83802f036117f06cc8a57b3a4da60da9926826cc96747888b57f394f", size = 1136409, upload-time = "2026-03-05T23:17:34.072Z" }, ] [[package]] name = "openai-agents" -version = "0.8.4" +version = "0.12.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "griffe" }, @@ -2658,9 +2976,21 @@ dependencies = [ { name = "types-requests" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ed/e0/9fa9eac9baf2816bc63cee28967d35a7ed9dc2f25e9fd2004f48ed6c8820/openai_agents-0.8.4.tar.gz", hash = "sha256:5d4c4861aedd56a82b15c6ddf6c53031a39859a222f08bbd5645d5967efa05e8", size = 2389744, upload-time = "2026-02-11T19:14:30.75Z" } +sdist = { url = "https://files.pythonhosted.org/packages/76/2e/402d3bfd6432c503bab699ece49e6febe38c64ade3365ae4fe31e7b3cba1/openai_agents-0.12.0.tar.gz", hash = "sha256:086d5cd16815d40a88231cbfd9dcca594cdf8596c6efd4859dcbafdfb31068ba", size = 2604305, upload-time = "2026-03-12T08:52:42.925Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/55/dc/10df015aebb0797a8367aab65200ac4f5221df20bbae76930f5b6ac8e001/openai_agents-0.8.4-py3-none-any.whl", hash = "sha256:2383c6e8e59ed4146b89d1b6f53e34e55caf94bc14ae3fd704e7aad5021f4ff1", size = 380662, upload-time = "2026-02-11T19:14:28.864Z" }, + { url = "https://files.pythonhosted.org/packages/c1/2c/8f03b5a56329559573e692d6dc2f02c3cbbe4fcd07f9c5d81b3c280e80e7/openai_agents-0.12.0-py3-none-any.whl", hash = "sha256:24f5cc5d6213dfcda42188918ad0a739861aa505f4ef738ee07b69169faf5c09", size = 446876, upload-time = "2026-03-12T08:52:40.779Z" }, +] + +[[package]] +name = "openapi-pydantic" +version = "0.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/02/2e/58d83848dd1a79cb92ed8e63f6ba901ca282c5f09d04af9423ec26c56fd7/openapi_pydantic-0.5.1.tar.gz", hash = "sha256:ff6835af6bde7a459fb93eb93bb92b8749b754fc6e51b2f1590a19dc3005ee0d", size = 60892, upload-time = "2025-01-08T19:29:27.083Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/cf/03675d8bd8ecbf4445504d8071adab19f5f993676795708e36402ab38263/openapi_pydantic-0.5.1-py3-none-any.whl", hash = "sha256:a3a09ef4586f5bd760a8df7f43028b60cafb6d9f61de2acba9574766255ab146", size = 96381, upload-time = "2025-01-08T19:29:25.275Z" }, ] [[package]] @@ -2736,6 +3066,37 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/95/f1/b27d3e2e003cd9a3592c43d099d2ed8d0a947c15281bf8463a256db0b46c/opentelemetry_exporter_otlp_proto_http-1.39.1-py3-none-any.whl", hash = "sha256:d9f5207183dd752a412c4cd564ca8875ececba13be6e9c6c370ffb752fd59985", size = 19641, upload-time = "2025-12-11T13:32:22.248Z" }, ] +[[package]] +name = "opentelemetry-instrumentation" +version = "0.60b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "packaging" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/41/0f/7e6b713ac117c1f5e4e3300748af699b9902a2e5e34c9cf443dde25a01fa/opentelemetry_instrumentation-0.60b1.tar.gz", hash = "sha256:57ddc7974c6eb35865af0426d1a17132b88b2ed8586897fee187fd5b8944bd6a", size = 31706, upload-time = "2025-12-11T13:36:42.515Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/d2/6788e83c5c86a2690101681aeef27eeb2a6bf22df52d3f263a22cee20915/opentelemetry_instrumentation-0.60b1-py3-none-any.whl", hash = "sha256:04480db952b48fb1ed0073f822f0ee26012b7be7c3eac1a3793122737c78632d", size = 33096, upload-time = "2025-12-11T13:35:33.067Z" }, +] + +[[package]] +name = "opentelemetry-instrumentation-httpx" +version = "0.60b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "opentelemetry-util-http" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/86/08/11208bcfcab4fc2023252c3f322aa397fd9ad948355fea60f5fc98648603/opentelemetry_instrumentation_httpx-0.60b1.tar.gz", hash = "sha256:a506ebaf28c60112cbe70ad4f0338f8603f148938cb7b6794ce1051cd2b270ae", size = 20611, upload-time = "2025-12-11T13:37:01.661Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/59/b98e84eebf745ffc75397eaad4763795bff8a30cbf2373a50ed4e70646c5/opentelemetry_instrumentation_httpx-0.60b1-py3-none-any.whl", hash = "sha256:f37636dd742ad2af83d896ba69601ed28da51fa4e25d1ab62fde89ce413e275b", size = 15701, upload-time = "2025-12-11T13:36:04.56Z" }, +] + [[package]] name = "opentelemetry-proto" version = "1.39.1" @@ -2775,6 +3136,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7a/5e/5958555e09635d09b75de3c4f8b9cae7335ca545d77392ffe7331534c402/opentelemetry_semantic_conventions-0.60b1-py3-none-any.whl", hash = "sha256:9fa8c8b0c110da289809292b0591220d3a7b53c1526a23021e977d68597893fb", size = 219982, upload-time = "2025-12-11T13:32:36.955Z" }, ] +[[package]] +name = "opentelemetry-util-http" +version = "0.60b1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/50/fc/c47bb04a1d8a941a4061307e1eddfa331ed4d0ab13d8a9781e6db256940a/opentelemetry_util_http-0.60b1.tar.gz", hash = "sha256:0d97152ca8c8a41ced7172d29d3622a219317f74ae6bb3027cfbdcf22c3cc0d6", size = 11053, upload-time = "2025-12-11T13:37:25.115Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/5c/d3f1733665f7cd582ef0842fb1d2ed0bc1fba10875160593342d22bba375/opentelemetry_util_http-0.60b1-py3-none-any.whl", hash = "sha256:66381ba28550c91bee14dcba8979ace443444af1ed609226634596b4b0faf199", size = 8947, upload-time = "2025-12-11T13:36:37.151Z" }, +] + [[package]] name = "orjson" version = "3.11.7" @@ -2923,11 +3293,11 @@ wheels = [ [[package]] name = "packaging" -version = "26.0" +version = "25.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, ] [[package]] @@ -2939,9 +3309,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b6/61/fae042894f4296ec49e3f193aff5d7c18440da9e48102c3315e1bc4519a7/parso-0.8.6-py2.py3-none-any.whl", hash = "sha256:2c549f800b70a5c4952197248825584cb00f033b29c692671d3bf08bf380baff", size = 106894, upload-time = "2026-02-09T15:45:21.391Z" }, ] +[[package]] +name = "pathable" +version = "0.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/55/b748445cb4ea6b125626f15379be7c96d1035d4fa3e8fee362fa92298abf/pathable-0.5.0.tar.gz", hash = "sha256:d81938348a1cacb525e7c75166270644782c0fb9c8cecc16be033e71427e0ef1", size = 16655, upload-time = "2026-02-20T08:47:00.748Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/96/5a770e5c461462575474468e5af931cff9de036e7c2b4fea23c1c58d2cbe/pathable-0.5.0-py3-none-any.whl", hash = "sha256:646e3d09491a6351a0c82632a09c02cdf70a252e73196b36d8a15ba0a114f0a6", size = 16867, upload-time = "2026-02-20T08:46:59.536Z" }, +] + [[package]] name = "pctx-client" -version = "0.3.0b1" +version = "0.3.0" source = { editable = "." } dependencies = [ { name = "docstring-parser" }, @@ -3000,9 +3379,9 @@ requires-dist = [ { name = "docstring-parser", specifier = ">=0.17.0" }, { name = "httpx", specifier = ">=0.28.1" }, { name = "langchain", marker = "extra == 'langchain'", specifier = ">=1.1.2" }, - { name = "openai-agents", marker = "extra == 'openai'", specifier = ">=0.6.2" }, + { name = "openai-agents", marker = "extra == 'openai'", specifier = ">=0.12.0" }, { name = "pydantic", specifier = ">=2.7.2" }, - { name = "pydantic-ai", marker = "extra == 'pydantic-ai'", specifier = ">=0.0.14" }, + { name = "pydantic-ai", marker = "extra == 'pydantic-ai'", specifier = ">=1.60.0" }, { name = "websockets", specifier = ">=15.0.1" }, ] provides-extras = ["langchain", "crewai", "openai", "pydantic-ai", "bm25s"] @@ -3017,8 +3396,8 @@ dev = [ { name = "litellm", specifier = ">=1.80.8" }, { name = "mcp", specifier = ">=1.25.0" }, { name = "myst-parser" }, - { name = "openai-agents", specifier = ">=0.6.2" }, - { name = "pydantic-ai", specifier = ">=0.0.14" }, + { name = "openai-agents", specifier = ">=0.12.0" }, + { name = "pydantic-ai", specifier = ">=1.60.0" }, { name = "pytest", specifier = ">=8.0.0" }, { name = "pytest-asyncio", specifier = ">=0.24.0" }, { name = "ruff", specifier = ">=0.14.7" }, @@ -3165,6 +3544,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f2/26/c56ce33ca856e358d27fda9676c055395abddb82c35ac0f593877ed4562e/pillow-12.1.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:cb9bb857b2d057c6dfc72ac5f3b44836924ba15721882ef103cecb40d002d80e", size = 7029880, upload-time = "2026-02-11T04:23:04.783Z" }, ] +[[package]] +name = "platformdirs" +version = "4.9.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/19/56/8d4c30c8a1d07013911a8fdbd8f89440ef9f08d07a1b50ab8ca8be5a20f9/platformdirs-4.9.4.tar.gz", hash = "sha256:1ec356301b7dc906d83f371c8f487070e99d3ccf9e501686456394622a01a934", size = 28737, upload-time = "2026-03-05T18:34:13.271Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/63/d7/97f7e3a6abb67d8080dd406fd4df842c2be0efaf712d1c899c32a075027c/platformdirs-4.9.4-py3-none-any.whl", hash = "sha256:68a9a4619a666ea6439f2ff250c12a853cd1cbd5158d258bd824a7df6be2f868", size = 21216, upload-time = "2026-03-05T18:34:12.172Z" }, +] + [[package]] name = "pluggy" version = "1.6.0" @@ -3361,6 +3749,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" }, ] +[[package]] +name = "py-key-value-aio" +version = "0.4.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "beartype" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/04/3c/0397c072a38d4bc580994b42e0c90c5f44f679303489e4376289534735e5/py_key_value_aio-0.4.4.tar.gz", hash = "sha256:e3012e6243ed7cc09bb05457bd4d03b1ba5c2b1ca8700096b3927db79ffbbe55", size = 92300, upload-time = "2026-02-16T21:21:43.245Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/69/f1b537ee70b7def42d63124a539ed3026a11a3ffc3086947a1ca6e861868/py_key_value_aio-0.4.4-py3-none-any.whl", hash = "sha256:18e17564ecae61b987f909fc2cd41ee2012c84b4b1dcb8c055cf8b4bc1bf3f5d", size = 152291, upload-time = "2026-02-16T21:21:44.241Z" }, +] + +[package.optional-dependencies] +filetree = [ + { name = "aiofile" }, + { name = "anyio" }, +] +keyring = [ + { name = "keyring" }, +] +memory = [ + { name = "cachetools" }, +] + [[package]] name = "pyasn1" version = "0.6.2" @@ -3579,40 +3992,46 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, ] +[package.optional-dependencies] +email = [ + { name = "email-validator" }, +] + [[package]] name = "pydantic-ai" -version = "0.2.4" +version = "1.67.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pydantic-ai-slim", extra = ["a2a", "anthropic", "bedrock", "cli", "cohere", "evals", "groq", "mcp", "mistral", "openai", "vertexai"] }, + { name = "pydantic-ai-slim", extra = ["ag-ui", "anthropic", "bedrock", "cli", "cohere", "evals", "fastmcp", "google", "groq", "huggingface", "logfire", "mcp", "mistral", "openai", "retries", "temporal", "ui", "vertexai", "xai"] }, ] -sdist = { url = "https://files.pythonhosted.org/packages/51/35/8a1b3850a8bfdcc79d57f42cfddcfa781a71a887a51bd67c13b4d48e188f/pydantic_ai-0.2.4.tar.gz", hash = "sha256:f80820f4b84d35c9155cf5bd79881576eca141f7b7a232ed840811a4307ee0ea", size = 36635052, upload-time = "2025-05-14T20:58:37.647Z" } +sdist = { url = "https://files.pythonhosted.org/packages/99/c2/61c8423d4d7c3a7b9c402bdb9f78aea2d6174e8f778019738b35a563fcda/pydantic_ai-1.67.0.tar.gz", hash = "sha256:87e402dbc68b10f2b8494abce110f43c1f56817ebf2bcc4fe47698899e5e8516", size = 12142, upload-time = "2026-03-06T22:40:04.927Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/60/d1/b99f269c21fe6b46ed360db121afddf68614b83df0f5235454dbf8d43251/pydantic_ai-0.2.4-py3-none-any.whl", hash = "sha256:557b75f2dbc016e80364c55932f628d91747bf4173b50fb815d9d8e60dc7e4b4", size = 10003, upload-time = "2025-05-14T20:58:22.538Z" }, + { url = "https://files.pythonhosted.org/packages/01/e3/2e6f8dad1f5f7c20d54dabb8f4b50505268093078a905bfb10b77ba204dc/pydantic_ai-1.67.0-py3-none-any.whl", hash = "sha256:bc1f3264f73658891ef023861944c567a5aee96fa45eeeb4c10286550ec71ade", size = 7227, upload-time = "2026-03-06T22:39:56.895Z" }, ] [[package]] name = "pydantic-ai-slim" -version = "0.2.4" +version = "1.67.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "eval-type-backport" }, { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, - { name = "griffe" }, + { name = "genai-prices" }, + { name = "griffelib" }, { name = "httpx" }, { name = "opentelemetry-api" }, { name = "pydantic" }, { name = "pydantic-graph" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/79/29/24f4186fbdade58275117275c5d798e1384a300affec6f0e2189342dacfc/pydantic_ai_slim-0.2.4.tar.gz", hash = "sha256:498ab56e6b00ce2cc28c7c6da536c78a7665ef1345330bfe025e8376529bd296", size = 126900, upload-time = "2025-05-14T20:58:42.931Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/51/c20e9aa4ee5f1d92a77dcb74741113e0d1838d63b65bb04ef16b9ba4b55f/pydantic_ai_slim-1.67.0.tar.gz", hash = "sha256:ce646e96aea775c00305d0c214080c970640cb4adf3bf58aa1296e186103e765", size = 436929, upload-time = "2026-03-06T22:40:07.061Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/01/81/3bdfd6f82c5d887330b6efe950fbbe67a014382907a9fd64f51ffedc925b/pydantic_ai_slim-0.2.4-py3-none-any.whl", hash = "sha256:dfa4e3ebc559c6d6f84556cba8883dea7e5a8715ca97211d49fdc02d121d8300", size = 161883, upload-time = "2025-05-14T20:58:28.432Z" }, + { url = "https://files.pythonhosted.org/packages/2f/bc/401bf11e4615a7adf8c00fb8f62313eb15ae7ee191fa77438cb9a27fa745/pydantic_ai_slim-1.67.0-py3-none-any.whl", hash = "sha256:241290e634298a38ed60c135eca2f9efbfa269811bf1e95aad33b234b6880945", size = 567858, upload-time = "2026-03-06T22:39:59.977Z" }, ] [package.optional-dependencies] -a2a = [ - { name = "fasta2a" }, +ag-ui = [ + { name = "ag-ui-protocol" }, + { name = "starlette" }, ] anthropic = [ { name = "anthropic" }, @@ -3623,6 +4042,7 @@ bedrock = [ cli = [ { name = "argcomplete" }, { name = "prompt-toolkit" }, + { name = "pyperclip" }, { name = "rich" }, ] cohere = [ @@ -3631,9 +4051,21 @@ cohere = [ evals = [ { name = "pydantic-evals" }, ] +fastmcp = [ + { name = "fastmcp" }, +] +google = [ + { name = "google-genai" }, +] groq = [ { name = "groq" }, ] +huggingface = [ + { name = "huggingface-hub" }, +] +logfire = [ + { name = "logfire", extra = ["httpx"] }, +] mcp = [ { name = "mcp" }, ] @@ -3642,11 +4074,24 @@ mistral = [ ] openai = [ { name = "openai" }, + { name = "tiktoken" }, +] +retries = [ + { name = "tenacity" }, +] +temporal = [ + { name = "temporalio" }, +] +ui = [ + { name = "starlette" }, ] vertexai = [ { name = "google-auth" }, { name = "requests" }, ] +xai = [ + { name = "xai-sdk" }, +] [[package]] name = "pydantic-core" @@ -3768,25 +4213,24 @@ wheels = [ [[package]] name = "pydantic-evals" -version = "0.2.4" +version = "1.67.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, - { name = "eval-type-backport", marker = "python_full_version < '3.11'" }, { name = "logfire-api" }, { name = "pydantic" }, { name = "pydantic-ai-slim" }, { name = "pyyaml" }, { name = "rich" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ed/83/128a06c2cfe226b836e1858068d068cb6cc19c64a9ee9305452908043b50/pydantic_evals-0.2.4.tar.gz", hash = "sha256:a5b21e6235975a4b3d40d33c7a76234d6435de07bd6709a2296452933f8ba111", size = 40903, upload-time = "2025-05-14T20:58:44.407Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/fc/cb4c5ac7144e4506b582f44fe0a97e4b2f97327a3e6d2539b6669326e45f/pydantic_evals-1.67.0.tar.gz", hash = "sha256:62ddcf40665114b8d36ec54c6728afc2f8d861c749e53e78c0ed65d943706b8a", size = 56689, upload-time = "2026-03-06T22:40:08.407Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/54/1e/03d443744a7706ab5ea873047db93ea9f7e1195d6bee6bc0e5939c99adfa/pydantic_evals-0.2.4-py3-none-any.whl", hash = "sha256:623bbaead093477def3d6b4c145a6255d20713f5cf2a2402f5ecf76032fc26fd", size = 49453, upload-time = "2025-05-14T20:58:30.095Z" }, + { url = "https://files.pythonhosted.org/packages/69/a4/5cf9caf529a523d8039598988f9c0fce87c59d928abb5d42b7b3347f336e/pydantic_evals-1.67.0-py3-none-any.whl", hash = "sha256:aa1bb0e9c5f87901420a9c60d84bbabc56ee90bf243509a2a60027762414b031", size = 67604, upload-time = "2026-03-06T22:40:01.833Z" }, ] [[package]] name = "pydantic-graph" -version = "0.2.4" +version = "1.67.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, @@ -3794,9 +4238,9 @@ dependencies = [ { name = "pydantic" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f0/4a/792b24dab9c8dcdb60fb370222d2bdc46672335efac839c4db1c6f318881/pydantic_graph-0.2.4.tar.gz", hash = "sha256:e01cec584b7d53186ff58e0c673d8d55244b87783932028e4b0f49a5800a1e8c", size = 21123, upload-time = "2025-05-14T20:58:45.44Z" } +sdist = { url = "https://files.pythonhosted.org/packages/17/f9/a2ce0fb0bba88701b77cc49ca249e6b143bd8b2adcbfcdff7b7b9e3f689d/pydantic_graph-1.67.0.tar.gz", hash = "sha256:d7bb6bb95aa5f3808b10d3700235aa881feebc462736cb8a5b917ffb470da36b", size = 58527, upload-time = "2026-03-06T22:40:09.271Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/6e/a186045b0e56712e707554ce4794e9281130d423193919889733e6093913/pydantic_graph-0.2.4-py3-none-any.whl", hash = "sha256:5462205ac7f735190177ee37fb689dd9b8cdc35386a68212518167ec61705eb5", size = 26527, upload-time = "2025-05-14T20:58:32.358Z" }, + { url = "https://files.pythonhosted.org/packages/22/b8/fc47a5151b274c354c8e15a7a02fe306d0c4d6e050cdf0cc0060f36d1ac2/pydantic_graph-1.67.0-py3-none-any.whl", hash = "sha256:fd3ec24dcf1f93435c6ad8b2968f66d2a20505c488e6319f1d96cfac832322d5", size = 72352, upload-time = "2026-03-06T22:40:03.04Z" }, ] [[package]] @@ -3865,6 +4309,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/65/17/18ad82f070da18ab970928f730fbd44d9b05aafcb52a2ebb6470eaae53f9/pypdfium2-5.4.0-py3-none-win_arm64.whl", hash = "sha256:2b78ea216fb92e7709b61c46241ebf2cc0c60cf18ad2fb4633af665d7b4e21e6", size = 2938727, upload-time = "2026-02-08T16:54:06.814Z" }, ] +[[package]] +name = "pyperclip" +version = "1.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/52/d87eba7cb129b81563019d1679026e7a112ef76855d6159d24754dbd2a51/pyperclip-1.11.0.tar.gz", hash = "sha256:244035963e4428530d9e3a6101a1ef97209c6825edab1567beac148ccc1db1b6", size = 12185, upload-time = "2025-09-26T14:40:37.245Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/80/fc9d01d5ed37ba4c42ca2b55b4339ae6e200b456be3a1aaddf4a9fa99b8c/pyperclip-1.11.0-py3-none-any.whl", hash = "sha256:299403e9ff44581cb9ba2ffeed69c7aa96a008622ad0c46cb575ca75b5b84273", size = 11063, upload-time = "2025-09-26T14:40:36.069Z" }, +] + [[package]] name = "pypika" version = "0.51.1" @@ -4015,6 +4468,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, ] +[[package]] +name = "pywin32-ctypes" +version = "0.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/85/9f/01a1a99704853cb63f253eea009390c88e7131c67e66a0a02099a8c917cb/pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755", size = 29471, upload-time = "2024-08-14T10:15:34.626Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/3d/8161f7711c017e01ac9f008dfddd9410dff3674334c233bde66e7ba65bbf/pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8", size = 30756, upload-time = "2024-08-14T10:15:33.187Z" }, +] + [[package]] name = "pyyaml" version = "6.0.3" @@ -4268,6 +4730,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ef/45/615f5babd880b4bd7d405cc0dc348234c5ffb6ed1ea33e152ede08b2072d/rich-14.3.2-py3-none-any.whl", hash = "sha256:08e67c3e90884651da3239ea668222d19bea7b589149d8014a21c633420dbb69", size = 309963, upload-time = "2026-02-01T16:20:46.078Z" }, ] +[[package]] +name = "rich-rst" +version = "1.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils", version = "0.21.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "docutils", version = "0.22.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "rich" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bc/6d/a506aaa4a9eaa945ed8ab2b7347859f53593864289853c5d6d62b77246e0/rich_rst-1.3.2.tar.gz", hash = "sha256:a1196fdddf1e364b02ec68a05e8ff8f6914fee10fbca2e6b6735f166bb0da8d4", size = 14936, upload-time = "2025-10-14T16:49:45.332Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/2f/b4530fbf948867702d0a3f27de4a6aab1d156f406d72852ab902c4d04de9/rich_rst-1.3.2-py3-none-any.whl", hash = "sha256:a99b4907cbe118cf9d18b0b44de272efa61f15117c61e39ebdc431baf5df722a", size = 12567, upload-time = "2025-10-14T16:49:42.953Z" }, +] + [[package]] name = "roman-numerals" version = "4.1.0" @@ -4583,6 +5059,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/56/a5/df8f46ef7da168f1bc52cd86e09a9de5c6f19cc1da04454d51b7d4f43408/scipy-1.17.0-cp314-cp314t-win_arm64.whl", hash = "sha256:031121914e295d9791319a1875444d55079885bbae5bdc9c5e0f2ee5f09d34ff", size = 25246266, upload-time = "2026-01-10T21:30:45.923Z" }, ] +[[package]] +name = "secretstorage" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "jeepney" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/03/e834bcd866f2f8a49a85eaff47340affa3bfa391ee9912a952a1faa68c7b/secretstorage-3.5.0.tar.gz", hash = "sha256:f04b8e4689cbce351744d5537bf6b1329c6fc68f91fa666f60a380edddcd11be", size = 19884, upload-time = "2025-11-23T19:02:53.191Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/46/f5af3402b579fd5e11573ce652019a67074317e18c1935cc0b4ba9b35552/secretstorage-3.5.0-py3-none-any.whl", hash = "sha256:0ce65888c0725fcb2c5bc0fdb8e5438eece02c523557ea40ce0703c266248137", size = 15554, upload-time = "2025-11-23T19:02:51.545Z" }, +] + [[package]] name = "shellingham" version = "1.5.4" @@ -4893,6 +5382,26 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353, upload-time = "2025-04-27T18:04:59.103Z" }, ] +[[package]] +name = "temporalio" +version = "1.20.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nexus-rpc" }, + { name = "protobuf" }, + { name = "python-dateutil", marker = "python_full_version < '3.11'" }, + { name = "types-protobuf" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/21/db/7d5118d28b0918888e1ec98f56f659fdb006351e06d95f30f4274962a76f/temporalio-1.20.0.tar.gz", hash = "sha256:5a6a85b7d298b7359bffa30025f7deac83c74ac095a4c6952fbf06c249a2a67c", size = 1850498, upload-time = "2025-11-25T21:25:20.225Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/1b/e69052aa6003eafe595529485d9c62d1382dd5e671108f1bddf544fb6032/temporalio-1.20.0-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:fba70314b4068f8b1994bddfa0e2ad742483f0ae714d2ef52e63013ccfd7042e", size = 12061638, upload-time = "2025-11-25T21:24:57.918Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3b/3e8c67ed7f23bedfa231c6ac29a7a9c12b89881da7694732270f3ecd6b0c/temporalio-1.20.0-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffc5bb6cabc6ae67f0bfba44de6a9c121603134ae18784a2ff3a7f230ad99080", size = 11562603, upload-time = "2025-11-25T21:25:01.721Z" }, + { url = "https://files.pythonhosted.org/packages/6d/be/ed0cc11702210522a79e09703267ebeca06eb45832b873a58de3ca76b9d0/temporalio-1.20.0-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1e80c1e4cdf88fa8277177f563edc91466fe4dc13c0322f26e55c76b6a219e6", size = 11824016, upload-time = "2025-11-25T21:25:06.771Z" }, + { url = "https://files.pythonhosted.org/packages/9d/97/09c5cafabc80139d97338a2bdd8ec22e08817dfd2949ab3e5b73565006eb/temporalio-1.20.0-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba92d909188930860c9d89ca6d7a753bc5a67e4e9eac6cea351477c967355eed", size = 12189521, upload-time = "2025-11-25T21:25:12.091Z" }, + { url = "https://files.pythonhosted.org/packages/11/23/5689c014a76aff3b744b3ee0d80815f63b1362637814f5fbb105244df09b/temporalio-1.20.0-cp310-abi3-win_amd64.whl", hash = "sha256:eacfd571b653e0a0f4aa6593f4d06fc628797898f0900d400e833a1f40cad03a", size = 12745027, upload-time = "2025-11-25T21:25:16.827Z" }, +] + [[package]] name = "tenacity" version = "9.1.4" @@ -5104,6 +5613,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ad/8a/5764b851659345f34787f1b6eb30b9d308bbd6c294825cbe38b6b869c97a/typer_slim-0.23.1-py3-none-any.whl", hash = "sha256:8146d5df1eb89f628191c4c604c8464fa841885d0733c58e6e700ff0228adac5", size = 3397, upload-time = "2026-02-13T10:04:27.132Z" }, ] +[[package]] +name = "types-protobuf" +version = "6.32.1.20260221" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5f/e2/9aa4a3b2469508bd7b4e2ae11cbedaf419222a09a1b94daffcd5efca4023/types_protobuf-6.32.1.20260221.tar.gz", hash = "sha256:6d5fb060a616bfb076cbb61b4b3c3969f5fc8bec5810f9a2f7e648ee5cbcbf6e", size = 64408, upload-time = "2026-02-21T03:55:13.916Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/e8/1fd38926f9cf031188fbc5a96694203ea6f24b0e34bd64a225ec6f6291ba/types_protobuf-6.32.1.20260221-py3-none-any.whl", hash = "sha256:da7cdd947975964a93c30bfbcc2c6841ee646b318d3816b033adc2c4eb6448e4", size = 77956, upload-time = "2026-02-21T03:55:12.894Z" }, +] + [[package]] name = "types-requests" version = "2.32.4.20260107" @@ -5137,6 +5655,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, ] +[[package]] +name = "uncalled-for" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/7c/b5b7d8136f872e3f13b0584e576886de0489d7213a12de6bebf29ff6ebfc/uncalled_for-0.2.0.tar.gz", hash = "sha256:b4f8fdbcec328c5a113807d653e041c5094473dd4afa7c34599ace69ccb7e69f", size = 49488, upload-time = "2026-02-27T17:40:58.137Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/7f/4320d9ce3be404e6310b915c3629fe27bf1e2f438a1a7a3cb0396e32e9a9/uncalled_for-0.2.0-py3-none-any.whl", hash = "sha256:2c0bd338faff5f930918f79e7eb9ff48290df2cb05fcc0b40a7f334e55d4d85f", size = 11351, upload-time = "2026-02-27T17:40:56.804Z" }, +] + [[package]] name = "urllib3" version = "2.6.3" @@ -5458,6 +5985,94 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, ] +[[package]] +name = "wrapt" +version = "1.17.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0", size = 55547, upload-time = "2025-08-12T05:53:21.714Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/23/bb82321b86411eb51e5a5db3fb8f8032fd30bd7c2d74bfe936136b2fa1d6/wrapt-1.17.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88bbae4d40d5a46142e70d58bf664a89b6b4befaea7b2ecc14e03cedb8e06c04", size = 53482, upload-time = "2025-08-12T05:51:44.467Z" }, + { url = "https://files.pythonhosted.org/packages/45/69/f3c47642b79485a30a59c63f6d739ed779fb4cc8323205d047d741d55220/wrapt-1.17.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b13af258d6a9ad602d57d889f83b9d5543acd471eee12eb51f5b01f8eb1bc2", size = 38676, upload-time = "2025-08-12T05:51:32.636Z" }, + { url = "https://files.pythonhosted.org/packages/d1/71/e7e7f5670c1eafd9e990438e69d8fb46fa91a50785332e06b560c869454f/wrapt-1.17.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd341868a4b6714a5962c1af0bd44f7c404ef78720c7de4892901e540417111c", size = 38957, upload-time = "2025-08-12T05:51:54.655Z" }, + { url = "https://files.pythonhosted.org/packages/de/17/9f8f86755c191d6779d7ddead1a53c7a8aa18bccb7cea8e7e72dfa6a8a09/wrapt-1.17.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f9b2601381be482f70e5d1051a5965c25fb3625455a2bf520b5a077b22afb775", size = 81975, upload-time = "2025-08-12T05:52:30.109Z" }, + { url = "https://files.pythonhosted.org/packages/f2/15/dd576273491f9f43dd09fce517f6c2ce6eb4fe21681726068db0d0467096/wrapt-1.17.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:343e44b2a8e60e06a7e0d29c1671a0d9951f59174f3709962b5143f60a2a98bd", size = 83149, upload-time = "2025-08-12T05:52:09.316Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c4/5eb4ce0d4814521fee7aa806264bf7a114e748ad05110441cd5b8a5c744b/wrapt-1.17.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:33486899acd2d7d3066156b03465b949da3fd41a5da6e394ec49d271baefcf05", size = 82209, upload-time = "2025-08-12T05:52:10.331Z" }, + { url = "https://files.pythonhosted.org/packages/31/4b/819e9e0eb5c8dc86f60dfc42aa4e2c0d6c3db8732bce93cc752e604bb5f5/wrapt-1.17.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e6f40a8aa5a92f150bdb3e1c44b7e98fb7113955b2e5394122fa5532fec4b418", size = 81551, upload-time = "2025-08-12T05:52:31.137Z" }, + { url = "https://files.pythonhosted.org/packages/f8/83/ed6baf89ba3a56694700139698cf703aac9f0f9eb03dab92f57551bd5385/wrapt-1.17.3-cp310-cp310-win32.whl", hash = "sha256:a36692b8491d30a8c75f1dfee65bef119d6f39ea84ee04d9f9311f83c5ad9390", size = 36464, upload-time = "2025-08-12T05:53:01.204Z" }, + { url = "https://files.pythonhosted.org/packages/2f/90/ee61d36862340ad7e9d15a02529df6b948676b9a5829fd5e16640156627d/wrapt-1.17.3-cp310-cp310-win_amd64.whl", hash = "sha256:afd964fd43b10c12213574db492cb8f73b2f0826c8df07a68288f8f19af2ebe6", size = 38748, upload-time = "2025-08-12T05:53:00.209Z" }, + { url = "https://files.pythonhosted.org/packages/bd/c3/cefe0bd330d389c9983ced15d326f45373f4073c9f4a8c2f99b50bfea329/wrapt-1.17.3-cp310-cp310-win_arm64.whl", hash = "sha256:af338aa93554be859173c39c85243970dc6a289fa907402289eeae7543e1ae18", size = 36810, upload-time = "2025-08-12T05:52:51.906Z" }, + { url = "https://files.pythonhosted.org/packages/52/db/00e2a219213856074a213503fdac0511203dceefff26e1daa15250cc01a0/wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7", size = 53482, upload-time = "2025-08-12T05:51:45.79Z" }, + { url = "https://files.pythonhosted.org/packages/5e/30/ca3c4a5eba478408572096fe9ce36e6e915994dd26a4e9e98b4f729c06d9/wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85", size = 38674, upload-time = "2025-08-12T05:51:34.629Z" }, + { url = "https://files.pythonhosted.org/packages/31/25/3e8cc2c46b5329c5957cec959cb76a10718e1a513309c31399a4dad07eb3/wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f", size = 38959, upload-time = "2025-08-12T05:51:56.074Z" }, + { url = "https://files.pythonhosted.org/packages/5d/8f/a32a99fc03e4b37e31b57cb9cefc65050ea08147a8ce12f288616b05ef54/wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311", size = 82376, upload-time = "2025-08-12T05:52:32.134Z" }, + { url = "https://files.pythonhosted.org/packages/31/57/4930cb8d9d70d59c27ee1332a318c20291749b4fba31f113c2f8ac49a72e/wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1", size = 83604, upload-time = "2025-08-12T05:52:11.663Z" }, + { url = "https://files.pythonhosted.org/packages/a8/f3/1afd48de81d63dd66e01b263a6fbb86e1b5053b419b9b33d13e1f6d0f7d0/wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5", size = 82782, upload-time = "2025-08-12T05:52:12.626Z" }, + { url = "https://files.pythonhosted.org/packages/1e/d7/4ad5327612173b144998232f98a85bb24b60c352afb73bc48e3e0d2bdc4e/wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2", size = 82076, upload-time = "2025-08-12T05:52:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/bb/59/e0adfc831674a65694f18ea6dc821f9fcb9ec82c2ce7e3d73a88ba2e8718/wrapt-1.17.3-cp311-cp311-win32.whl", hash = "sha256:c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89", size = 36457, upload-time = "2025-08-12T05:53:03.936Z" }, + { url = "https://files.pythonhosted.org/packages/83/88/16b7231ba49861b6f75fc309b11012ede4d6b0a9c90969d9e0db8d991aeb/wrapt-1.17.3-cp311-cp311-win_amd64.whl", hash = "sha256:0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77", size = 38745, upload-time = "2025-08-12T05:53:02.885Z" }, + { url = "https://files.pythonhosted.org/packages/9a/1e/c4d4f3398ec073012c51d1c8d87f715f56765444e1a4b11e5180577b7e6e/wrapt-1.17.3-cp311-cp311-win_arm64.whl", hash = "sha256:5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a", size = 36806, upload-time = "2025-08-12T05:52:53.368Z" }, + { url = "https://files.pythonhosted.org/packages/9f/41/cad1aba93e752f1f9268c77270da3c469883d56e2798e7df6240dcb2287b/wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0", size = 53998, upload-time = "2025-08-12T05:51:47.138Z" }, + { url = "https://files.pythonhosted.org/packages/60/f8/096a7cc13097a1869fe44efe68dace40d2a16ecb853141394047f0780b96/wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba", size = 39020, upload-time = "2025-08-12T05:51:35.906Z" }, + { url = "https://files.pythonhosted.org/packages/33/df/bdf864b8997aab4febb96a9ae5c124f700a5abd9b5e13d2a3214ec4be705/wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd", size = 39098, upload-time = "2025-08-12T05:51:57.474Z" }, + { url = "https://files.pythonhosted.org/packages/9f/81/5d931d78d0eb732b95dc3ddaeeb71c8bb572fb01356e9133916cd729ecdd/wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828", size = 88036, upload-time = "2025-08-12T05:52:34.784Z" }, + { url = "https://files.pythonhosted.org/packages/ca/38/2e1785df03b3d72d34fc6252d91d9d12dc27a5c89caef3335a1bbb8908ca/wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9", size = 88156, upload-time = "2025-08-12T05:52:13.599Z" }, + { url = "https://files.pythonhosted.org/packages/b3/8b/48cdb60fe0603e34e05cffda0b2a4adab81fd43718e11111a4b0100fd7c1/wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396", size = 87102, upload-time = "2025-08-12T05:52:14.56Z" }, + { url = "https://files.pythonhosted.org/packages/3c/51/d81abca783b58f40a154f1b2c56db1d2d9e0d04fa2d4224e357529f57a57/wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc", size = 87732, upload-time = "2025-08-12T05:52:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/9e/b1/43b286ca1392a006d5336412d41663eeef1ad57485f3e52c767376ba7e5a/wrapt-1.17.3-cp312-cp312-win32.whl", hash = "sha256:4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe", size = 36705, upload-time = "2025-08-12T05:53:07.123Z" }, + { url = "https://files.pythonhosted.org/packages/28/de/49493f962bd3c586ab4b88066e967aa2e0703d6ef2c43aa28cb83bf7b507/wrapt-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c", size = 38877, upload-time = "2025-08-12T05:53:05.436Z" }, + { url = "https://files.pythonhosted.org/packages/f1/48/0f7102fe9cb1e8a5a77f80d4f0956d62d97034bbe88d33e94699f99d181d/wrapt-1.17.3-cp312-cp312-win_arm64.whl", hash = "sha256:604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6", size = 36885, upload-time = "2025-08-12T05:52:54.367Z" }, + { url = "https://files.pythonhosted.org/packages/fc/f6/759ece88472157acb55fc195e5b116e06730f1b651b5b314c66291729193/wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0", size = 54003, upload-time = "2025-08-12T05:51:48.627Z" }, + { url = "https://files.pythonhosted.org/packages/4f/a9/49940b9dc6d47027dc850c116d79b4155f15c08547d04db0f07121499347/wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77", size = 39025, upload-time = "2025-08-12T05:51:37.156Z" }, + { url = "https://files.pythonhosted.org/packages/45/35/6a08de0f2c96dcdd7fe464d7420ddb9a7655a6561150e5fc4da9356aeaab/wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7", size = 39108, upload-time = "2025-08-12T05:51:58.425Z" }, + { url = "https://files.pythonhosted.org/packages/0c/37/6faf15cfa41bf1f3dba80cd3f5ccc6622dfccb660ab26ed79f0178c7497f/wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277", size = 88072, upload-time = "2025-08-12T05:52:37.53Z" }, + { url = "https://files.pythonhosted.org/packages/78/f2/efe19ada4a38e4e15b6dff39c3e3f3f73f5decf901f66e6f72fe79623a06/wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d", size = 88214, upload-time = "2025-08-12T05:52:15.886Z" }, + { url = "https://files.pythonhosted.org/packages/40/90/ca86701e9de1622b16e09689fc24b76f69b06bb0150990f6f4e8b0eeb576/wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa", size = 87105, upload-time = "2025-08-12T05:52:17.914Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e0/d10bd257c9a3e15cbf5523025252cc14d77468e8ed644aafb2d6f54cb95d/wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050", size = 87766, upload-time = "2025-08-12T05:52:39.243Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cf/7d848740203c7b4b27eb55dbfede11aca974a51c3d894f6cc4b865f42f58/wrapt-1.17.3-cp313-cp313-win32.whl", hash = "sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8", size = 36711, upload-time = "2025-08-12T05:53:10.074Z" }, + { url = "https://files.pythonhosted.org/packages/57/54/35a84d0a4d23ea675994104e667ceff49227ce473ba6a59ba2c84f250b74/wrapt-1.17.3-cp313-cp313-win_amd64.whl", hash = "sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb", size = 38885, upload-time = "2025-08-12T05:53:08.695Z" }, + { url = "https://files.pythonhosted.org/packages/01/77/66e54407c59d7b02a3c4e0af3783168fff8e5d61def52cda8728439d86bc/wrapt-1.17.3-cp313-cp313-win_arm64.whl", hash = "sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16", size = 36896, upload-time = "2025-08-12T05:52:55.34Z" }, + { url = "https://files.pythonhosted.org/packages/02/a2/cd864b2a14f20d14f4c496fab97802001560f9f41554eef6df201cd7f76c/wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cf30f6e3c077c8e6a9a7809c94551203c8843e74ba0c960f4a98cd80d4665d39", size = 54132, upload-time = "2025-08-12T05:51:49.864Z" }, + { url = "https://files.pythonhosted.org/packages/d5/46/d011725b0c89e853dc44cceb738a307cde5d240d023d6d40a82d1b4e1182/wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e228514a06843cae89621384cfe3a80418f3c04aadf8a3b14e46a7be704e4235", size = 39091, upload-time = "2025-08-12T05:51:38.935Z" }, + { url = "https://files.pythonhosted.org/packages/2e/9e/3ad852d77c35aae7ddebdbc3b6d35ec8013af7d7dddad0ad911f3d891dae/wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5ea5eb3c0c071862997d6f3e02af1d055f381b1d25b286b9d6644b79db77657c", size = 39172, upload-time = "2025-08-12T05:51:59.365Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f7/c983d2762bcce2326c317c26a6a1e7016f7eb039c27cdf5c4e30f4160f31/wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:281262213373b6d5e4bb4353bc36d1ba4084e6d6b5d242863721ef2bf2c2930b", size = 87163, upload-time = "2025-08-12T05:52:40.965Z" }, + { url = "https://files.pythonhosted.org/packages/e4/0f/f673f75d489c7f22d17fe0193e84b41540d962f75fce579cf6873167c29b/wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4a8d2b25efb6681ecacad42fca8859f88092d8732b170de6a5dddd80a1c8fa", size = 87963, upload-time = "2025-08-12T05:52:20.326Z" }, + { url = "https://files.pythonhosted.org/packages/df/61/515ad6caca68995da2fac7a6af97faab8f78ebe3bf4f761e1b77efbc47b5/wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:373342dd05b1d07d752cecbec0c41817231f29f3a89aa8b8843f7b95992ed0c7", size = 86945, upload-time = "2025-08-12T05:52:21.581Z" }, + { url = "https://files.pythonhosted.org/packages/d3/bd/4e70162ce398462a467bc09e768bee112f1412e563620adc353de9055d33/wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d40770d7c0fd5cbed9d84b2c3f2e156431a12c9a37dc6284060fb4bec0b7ffd4", size = 86857, upload-time = "2025-08-12T05:52:43.043Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b8/da8560695e9284810b8d3df8a19396a6e40e7518059584a1a394a2b35e0a/wrapt-1.17.3-cp314-cp314-win32.whl", hash = "sha256:fbd3c8319de8e1dc79d346929cd71d523622da527cca14e0c1d257e31c2b8b10", size = 37178, upload-time = "2025-08-12T05:53:12.605Z" }, + { url = "https://files.pythonhosted.org/packages/db/c8/b71eeb192c440d67a5a0449aaee2310a1a1e8eca41676046f99ed2487e9f/wrapt-1.17.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1a4120ae5705f673727d3253de3ed0e016f7cd78dc463db1b31e2463e1f3cf6", size = 39310, upload-time = "2025-08-12T05:53:11.106Z" }, + { url = "https://files.pythonhosted.org/packages/45/20/2cda20fd4865fa40f86f6c46ed37a2a8356a7a2fde0773269311f2af56c7/wrapt-1.17.3-cp314-cp314-win_arm64.whl", hash = "sha256:507553480670cab08a800b9463bdb881b2edeed77dc677b0a5915e6106e91a58", size = 37266, upload-time = "2025-08-12T05:52:56.531Z" }, + { url = "https://files.pythonhosted.org/packages/77/ed/dd5cf21aec36c80443c6f900449260b80e2a65cf963668eaef3b9accce36/wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ed7c635ae45cfbc1a7371f708727bf74690daedc49b4dba310590ca0bd28aa8a", size = 56544, upload-time = "2025-08-12T05:51:51.109Z" }, + { url = "https://files.pythonhosted.org/packages/8d/96/450c651cc753877ad100c7949ab4d2e2ecc4d97157e00fa8f45df682456a/wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:249f88ed15503f6492a71f01442abddd73856a0032ae860de6d75ca62eed8067", size = 40283, upload-time = "2025-08-12T05:51:39.912Z" }, + { url = "https://files.pythonhosted.org/packages/d1/86/2fcad95994d9b572db57632acb6f900695a648c3e063f2cd344b3f5c5a37/wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5a03a38adec8066d5a37bea22f2ba6bbf39fcdefbe2d91419ab864c3fb515454", size = 40366, upload-time = "2025-08-12T05:52:00.693Z" }, + { url = "https://files.pythonhosted.org/packages/64/0e/f4472f2fdde2d4617975144311f8800ef73677a159be7fe61fa50997d6c0/wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5d4478d72eb61c36e5b446e375bbc49ed002430d17cdec3cecb36993398e1a9e", size = 108571, upload-time = "2025-08-12T05:52:44.521Z" }, + { url = "https://files.pythonhosted.org/packages/cc/01/9b85a99996b0a97c8a17484684f206cbb6ba73c1ce6890ac668bcf3838fb/wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223db574bb38637e8230eb14b185565023ab624474df94d2af18f1cdb625216f", size = 113094, upload-time = "2025-08-12T05:52:22.618Z" }, + { url = "https://files.pythonhosted.org/packages/25/02/78926c1efddcc7b3aa0bc3d6b33a822f7d898059f7cd9ace8c8318e559ef/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e405adefb53a435f01efa7ccdec012c016b5a1d3f35459990afc39b6be4d5056", size = 110659, upload-time = "2025-08-12T05:52:24.057Z" }, + { url = "https://files.pythonhosted.org/packages/dc/ee/c414501ad518ac3e6fe184753632fe5e5ecacdcf0effc23f31c1e4f7bfcf/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:88547535b787a6c9ce4086917b6e1d291aa8ed914fdd3a838b3539dc95c12804", size = 106946, upload-time = "2025-08-12T05:52:45.976Z" }, + { url = "https://files.pythonhosted.org/packages/be/44/a1bd64b723d13bb151d6cc91b986146a1952385e0392a78567e12149c7b4/wrapt-1.17.3-cp314-cp314t-win32.whl", hash = "sha256:41b1d2bc74c2cac6f9074df52b2efbef2b30bdfe5f40cb78f8ca22963bc62977", size = 38717, upload-time = "2025-08-12T05:53:15.214Z" }, + { url = "https://files.pythonhosted.org/packages/79/d9/7cfd5a312760ac4dd8bf0184a6ee9e43c33e47f3dadc303032ce012b8fa3/wrapt-1.17.3-cp314-cp314t-win_amd64.whl", hash = "sha256:73d496de46cd2cdbdbcce4ae4bcdb4afb6a11234a1df9c085249d55166b95116", size = 41334, upload-time = "2025-08-12T05:53:14.178Z" }, + { url = "https://files.pythonhosted.org/packages/46/78/10ad9781128ed2f99dbc474f43283b13fea8ba58723e98844367531c18e9/wrapt-1.17.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f38e60678850c42461d4202739f9bf1e3a737c7ad283638251e79cc49effb6b6", size = 38471, upload-time = "2025-08-12T05:52:57.784Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" }, +] + +[[package]] +name = "xai-sdk" +version = "1.8.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "googleapis-common-protos" }, + { name = "grpcio" }, + { name = "opentelemetry-sdk" }, + { name = "packaging" }, + { name = "protobuf" }, + { name = "pydantic" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d3/41/e39d9207c6f4ba0fd98c1f42747c57edd7785389e1b7464afb2edf844501/xai_sdk-1.8.1.tar.gz", hash = "sha256:3f3ff2a98888b3bb2b6d8184c82a56d475d501711e78e5e748073d5a67be0804", size = 391417, upload-time = "2026-03-11T03:04:24.404Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/76/4eba837410a4969c70f961a2c5d6a90761167f61a775525772f64b3f7eb0/xai_sdk-1.8.1-py3-none-any.whl", hash = "sha256:9a503a5716f9402a8639da5b5c806cfbef7cda7809c8c8bd090e26c2a5e32dad", size = 242353, upload-time = "2026-03-11T03:04:22.758Z" }, +] + [[package]] name = "xxhash" version = "3.6.0" diff --git a/scripts/build-python.sh b/scripts/build-python.sh new file mode 100755 index 00000000..6bf2678a --- /dev/null +++ b/scripts/build-python.sh @@ -0,0 +1,28 @@ +#!/bin/bash +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" +PCTX_PY="$REPO_ROOT/pctx-py" +SYMLINK="$PCTX_PY/src/pctx_client/descriptions/data" +SYMLINK_TARGET="../../../../descriptions" +RESOLVED=false + +cleanup() { + if [ "$RESOLVED" = true ]; then + rm -rf "$SYMLINK" + ln -s "$SYMLINK_TARGET" "$SYMLINK" + fi +} + +trap cleanup EXIT + +if [ -L "$SYMLINK" ]; then + REAL_TARGET="$(cd "$(dirname "$SYMLINK")" && cd "$(readlink "$SYMLINK")" && pwd)" + rm "$SYMLINK" + cp -r "$REAL_TARGET" "$SYMLINK" + RESOLVED=true +fi + +cd "$PCTX_PY" +uv build "$@" diff --git a/scripts/test-mcp-cli.sh b/scripts/test-mcp-cli.sh index 381b66d7..44b0c57a 100755 --- a/scripts/test-mcp-cli.sh +++ b/scripts/test-mcp-cli.sh @@ -114,8 +114,6 @@ response=$(curl -s -X POST http://localhost:8080/mcp \ "method": "tools/list" }') -echo "Response preview: ${response:0:200}..." - # Check that we got tools back if echo "$response" | grep -q '"tools"'; then echo -e "${GREEN}✓ Successfully listed tools from MCP server${NC}" @@ -198,7 +196,7 @@ execute_response=$(curl -s -X POST http://localhost:8080/mcp \ \"id\": 4, \"method\": \"tools/call\", \"params\": { - \"name\": \"execute\", + \"name\": \"execute_typescript\", \"arguments\": { \"code\": \"async function run() { const result = await Memory.createEntities({ entities: [{ name: 'test', entityType: 'item', observations: ['test observation'] }] }); return result; }\" }