From 7f9abc5f82c81b93f24d7e02af8bfc1dea4b354e Mon Sep 17 00:00:00 2001 From: Chandra Kiran G Date: Tue, 7 Apr 2026 19:01:38 +0530 Subject: [PATCH] feat: Add Analytics and MCP APIs Co-authored-by: Cursor Agent --- portkey_ai/__init__.py | 48 + portkey_ai/api_resources/__init__.py | 48 + portkey_ai/api_resources/apis/__init__.py | 65 + portkey_ai/api_resources/apis/analytics.py | 1342 +++++++++++++++++ .../api_resources/apis/mcp_integrations.py | 638 ++++++++ portkey_ai/api_resources/apis/mcp_servers.py | 850 +++++++++++ portkey_ai/api_resources/client.py | 16 + .../api_resources/types/analytics_type.py | 66 + portkey_ai/api_resources/types/mcp_type.py | 551 +++++++ portkey_ai/llms/langchain/chat.py | 2 +- tests/test_llm_langchain.py | 4 +- 11 files changed, 3627 insertions(+), 3 deletions(-) create mode 100644 portkey_ai/api_resources/apis/analytics.py create mode 100644 portkey_ai/api_resources/apis/mcp_integrations.py create mode 100644 portkey_ai/api_resources/apis/mcp_servers.py create mode 100644 portkey_ai/api_resources/types/analytics_type.py create mode 100644 portkey_ai/api_resources/types/mcp_type.py diff --git a/portkey_ai/__init__.py b/portkey_ai/__init__.py index 976a2143..8f785a81 100644 --- a/portkey_ai/__init__.py +++ b/portkey_ai/__init__.py @@ -157,6 +157,30 @@ AsyncCalls, InputTokens, AsyncInputTokens, + Analytics, + AsyncAnalytics, + AnalyticsGraphs, + AsyncAnalyticsGraphs, + AnalyticsGroups, + AsyncAnalyticsGroups, + AnalyticsSummary, + AsyncAnalyticsSummary, + McpServers, + AsyncMcpServers, + McpServerCapabilities, + AsyncMcpServerCapabilities, + McpServerUserAccess, + AsyncMcpServerUserAccess, + McpServerMetadata, + AsyncMcpServerMetadata, + McpIntegrations, + AsyncMcpIntegrations, + McpIntegrationWorkspaces, + AsyncMcpIntegrationWorkspaces, + McpIntegrationCapabilities, + AsyncMcpIntegrationCapabilities, + McpIntegrationMetadata, + AsyncMcpIntegrationMetadata, ) from portkey_ai.version import VERSION @@ -333,4 +357,28 @@ "AsyncCalls", "InputTokens", "AsyncInputTokens", + "Analytics", + "AsyncAnalytics", + "AnalyticsGraphs", + "AsyncAnalyticsGraphs", + "AnalyticsGroups", + "AsyncAnalyticsGroups", + "AnalyticsSummary", + "AsyncAnalyticsSummary", + "McpServers", + "AsyncMcpServers", + "McpServerCapabilities", + "AsyncMcpServerCapabilities", + "McpServerUserAccess", + "AsyncMcpServerUserAccess", + "McpServerMetadata", + "AsyncMcpServerMetadata", + "McpIntegrations", + "AsyncMcpIntegrations", + "McpIntegrationWorkspaces", + "AsyncMcpIntegrationWorkspaces", + "McpIntegrationCapabilities", + "AsyncMcpIntegrationCapabilities", + "McpIntegrationMetadata", + "AsyncMcpIntegrationMetadata", ] diff --git a/portkey_ai/api_resources/__init__.py b/portkey_ai/api_resources/__init__.py index 080628cc..0cab7a03 100644 --- a/portkey_ai/api_resources/__init__.py +++ b/portkey_ai/api_resources/__init__.py @@ -145,6 +145,30 @@ AsyncCalls, InputTokens, AsyncInputTokens, + Analytics, + AsyncAnalytics, + AnalyticsGraphs, + AsyncAnalyticsGraphs, + AnalyticsGroups, + AsyncAnalyticsGroups, + AnalyticsSummary, + AsyncAnalyticsSummary, + McpServers, + AsyncMcpServers, + McpServerCapabilities, + AsyncMcpServerCapabilities, + McpServerUserAccess, + AsyncMcpServerUserAccess, + McpServerMetadata, + AsyncMcpServerMetadata, + McpIntegrations, + AsyncMcpIntegrations, + McpIntegrationWorkspaces, + AsyncMcpIntegrationWorkspaces, + McpIntegrationCapabilities, + AsyncMcpIntegrationCapabilities, + McpIntegrationMetadata, + AsyncMcpIntegrationMetadata, ) from .utils import ( Modes, @@ -325,4 +349,28 @@ "AsyncCalls", "InputTokens", "AsyncInputTokens", + "Analytics", + "AsyncAnalytics", + "AnalyticsGraphs", + "AsyncAnalyticsGraphs", + "AnalyticsGroups", + "AsyncAnalyticsGroups", + "AnalyticsSummary", + "AsyncAnalyticsSummary", + "McpServers", + "AsyncMcpServers", + "McpServerCapabilities", + "AsyncMcpServerCapabilities", + "McpServerUserAccess", + "AsyncMcpServerUserAccess", + "McpServerMetadata", + "AsyncMcpServerMetadata", + "McpIntegrations", + "AsyncMcpIntegrations", + "McpIntegrationWorkspaces", + "AsyncMcpIntegrationWorkspaces", + "McpIntegrationCapabilities", + "AsyncMcpIntegrationCapabilities", + "McpIntegrationMetadata", + "AsyncMcpIntegrationMetadata", ] diff --git a/portkey_ai/api_resources/apis/__init__.py b/portkey_ai/api_resources/apis/__init__.py index 095bcb3c..93fcbc9b 100644 --- a/portkey_ai/api_resources/apis/__init__.py +++ b/portkey_ai/api_resources/apis/__init__.py @@ -184,6 +184,39 @@ AsyncChatKitThreads, ) +from .analytics import ( + Analytics, + AsyncAnalytics, + AnalyticsGraphs, + AsyncAnalyticsGraphs, + AnalyticsGroups, + AsyncAnalyticsGroups, + AnalyticsSummary, + AsyncAnalyticsSummary, +) + +from .mcp_servers import ( + McpServers, + AsyncMcpServers, + McpServerCapabilities, + AsyncMcpServerCapabilities, + McpServerUserAccess, + AsyncMcpServerUserAccess, + McpServerMetadata, + AsyncMcpServerMetadata, +) + +from .mcp_integrations import ( + McpIntegrations, + AsyncMcpIntegrations, + McpIntegrationWorkspaces, + AsyncMcpIntegrationWorkspaces, + McpIntegrationCapabilities, + AsyncMcpIntegrationCapabilities, + McpIntegrationMetadata, + AsyncMcpIntegrationMetadata, +) + __all__ = [ "Completion", "AsyncCompletion", @@ -340,4 +373,36 @@ "AsyncCalls", "InputTokens", "AsyncInputTokens", + "Skills", + "AsyncSkills", + "SkillsContent", + "AsyncSkillsContent", + "SkillsVersions", + "AsyncSkillsVersions", + "SkillsVersionsContent", + "AsyncSkillsVersionsContent", + "Analytics", + "AsyncAnalytics", + "AnalyticsGraphs", + "AsyncAnalyticsGraphs", + "AnalyticsGroups", + "AsyncAnalyticsGroups", + "AnalyticsSummary", + "AsyncAnalyticsSummary", + "McpServers", + "AsyncMcpServers", + "McpServerCapabilities", + "AsyncMcpServerCapabilities", + "McpServerUserAccess", + "AsyncMcpServerUserAccess", + "McpServerMetadata", + "AsyncMcpServerMetadata", + "McpIntegrations", + "AsyncMcpIntegrations", + "McpIntegrationWorkspaces", + "AsyncMcpIntegrationWorkspaces", + "McpIntegrationCapabilities", + "AsyncMcpIntegrationCapabilities", + "McpIntegrationMetadata", + "AsyncMcpIntegrationMetadata", ] diff --git a/portkey_ai/api_resources/apis/analytics.py b/portkey_ai/api_resources/apis/analytics.py new file mode 100644 index 00000000..3d6f8ff3 --- /dev/null +++ b/portkey_ai/api_resources/apis/analytics.py @@ -0,0 +1,1342 @@ +from typing import Any, Literal, Union +from portkey_ai._vendor.openai import NOT_GIVEN, NotGiven +from portkey_ai.api_resources.base_client import APIClient, AsyncAPIClient +from urllib.parse import urlencode +from portkey_ai.api_resources.apis.api_resource import APIResource, AsyncAPIResource +from portkey_ai.api_resources.types.analytics_type import ( + AnalyticsGraphResponse, + AnalyticsGroupResponse, + AnalyticsSummaryResponse, +) + +ANALYTICS_API_PATH = "/analytics" + + +class AnalyticsGraphs(APIResource): + """Analytics Graphs API for retrieving time-series analytics data.""" + + def __init__(self, client: APIClient) -> None: + super().__init__(client) + + def _build_query_string( + self, + time_of_generation_min: str, + time_of_generation_max: str, + total_units_min: Union[int, NotGiven] = NOT_GIVEN, + total_units_max: Union[int, NotGiven] = NOT_GIVEN, + cost_min: Union[float, NotGiven] = NOT_GIVEN, + cost_max: Union[float, NotGiven] = NOT_GIVEN, + status_code: Union[str, NotGiven] = NOT_GIVEN, + virtual_keys: Union[str, NotGiven] = NOT_GIVEN, + configs: Union[str, NotGiven] = NOT_GIVEN, + workspace_slug: Union[str, NotGiven] = NOT_GIVEN, + api_key_ids: Union[str, NotGiven] = NOT_GIVEN, + ai_org_model: Union[str, NotGiven] = NOT_GIVEN, + prompt_slug: Union[str, NotGiven] = NOT_GIVEN, + metadata: Union[str, NotGiven] = NOT_GIVEN, + cache_status: Union[str, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> str: + query = { + "time_of_generation_min": time_of_generation_min, + "time_of_generation_max": time_of_generation_max, + "total_units_min": total_units_min, + "total_units_max": total_units_max, + "cost_min": cost_min, + "cost_max": cost_max, + "status_code": status_code, + "virtual_keys": virtual_keys, + "configs": configs, + "workspace_slug": workspace_slug, + "api_key_ids": api_key_ids, + "ai_org_model": ai_org_model, + "prompt_slug": prompt_slug, + "metadata": metadata, + "cache_status": cache_status, + **kwargs, + } + filtered_query = {k: v for k, v in query.items() if v is not NOT_GIVEN} + return urlencode(filtered_query) + + def requests( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get requests analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/graphs/requests?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def cost( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get cost analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/graphs/cost?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def latency( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get latency analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/graphs/latency?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def tokens( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get tokens analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/graphs/tokens?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def users( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get users analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/graphs/users?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def users_requests( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get users/requests analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/graphs/users/requests?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def errors( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get errors analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/graphs/errors?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def errors_rate( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get errors rate analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/graphs/errors/rate?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def errors_stacks( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get errors stacks analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/graphs/errors/stacks?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def errors_status_codes( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get errors status codes analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/graphs/errors/status-codes?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def requests_rescued( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get rescued requests analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/graphs/requests/rescued?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def cache_hit_rate( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get cache hit rate analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/graphs/cache/hit-rate?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def cache_latency( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get cache latency analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/graphs/cache/latency?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def feedbacks( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get feedbacks analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/graphs/feedbacks?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def feedbacks_scores( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get feedback scores analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/graphs/feedbacks/scores?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def feedbacks_weighted( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get weighted feedbacks analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/graphs/feedbacks/weighted?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def feedbacks_ai_models( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get feedbacks by AI models analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/graphs/feedbacks/ai-models?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class AnalyticsGroups(APIResource): + """Analytics Groups API for retrieving grouped analytics data.""" + + def __init__(self, client: APIClient) -> None: + super().__init__(client) + + def _build_query_string( + self, + time_of_generation_min: str, + time_of_generation_max: str, + columns: Union[str, NotGiven] = NOT_GIVEN, + include_total: Union[bool, NotGiven] = NOT_GIVEN, + current_page: Union[int, NotGiven] = NOT_GIVEN, + page_size: Union[int, NotGiven] = NOT_GIVEN, + order_by: Union[str, NotGiven] = NOT_GIVEN, + order_by_type: Union[Literal["asc", "desc"], NotGiven] = NOT_GIVEN, + virtual_keys: Union[str, NotGiven] = NOT_GIVEN, + configs: Union[str, NotGiven] = NOT_GIVEN, + workspace_slug: Union[str, NotGiven] = NOT_GIVEN, + api_key_ids: Union[str, NotGiven] = NOT_GIVEN, + ai_org_model: Union[str, NotGiven] = NOT_GIVEN, + prompt_slug: Union[str, NotGiven] = NOT_GIVEN, + metadata: Union[str, NotGiven] = NOT_GIVEN, + cache_status: Union[str, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> str: + query = { + "time_of_generation_min": time_of_generation_min, + "time_of_generation_max": time_of_generation_max, + "columns": columns, + "include_total": include_total, + "current_page": current_page, + "page_size": page_size, + "order_by": order_by, + "order_by_type": order_by_type, + "virtual_keys": virtual_keys, + "configs": configs, + "workspace_slug": workspace_slug, + "api_key_ids": api_key_ids, + "ai_org_model": ai_org_model, + "prompt_slug": prompt_slug, + "metadata": metadata, + "cache_status": cache_status, + **kwargs, + } + filtered_query = {k: v for k, v in query.items() if v is not NOT_GIVEN} + return urlencode(filtered_query) + + def users( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGroupResponse: + """Get analytics grouped by users.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/groups/users?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGroupResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def ai_models( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGroupResponse: + """Get analytics grouped by AI models.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/groups/ai-models?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGroupResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def workspaces( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGroupResponse: + """Get analytics grouped by workspaces.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/groups/workspaces?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGroupResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def metadata( + self, + *, + metadata_key: str, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGroupResponse: + """Get analytics grouped by metadata key.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/groups/metadata/{metadata_key}?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGroupResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def group_by( + self, + *, + group_by: Literal[ + "ai_service", + "model", + "status_code", + "api_key", + "config", + "workspace", + "provider", + "prompt", + ], + time_of_generation_min: str, + time_of_generation_max: str, + columns: Union[str, NotGiven] = NOT_GIVEN, + include_total: Union[bool, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> AnalyticsGroupResponse: + """Get analytics grouped by specified dimension.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + columns=columns, + include_total=include_total, + **kwargs, + ) + return self._get( + f"{ANALYTICS_API_PATH}/groups/{group_by}?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGroupResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class AnalyticsSummary(APIResource): + """Analytics Summary API for retrieving summary analytics data.""" + + def __init__(self, client: APIClient) -> None: + super().__init__(client) + + def cache( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + workspace_slug: Union[str, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> AnalyticsSummaryResponse: + """Get cache summary analytics.""" + query = { + "time_of_generation_min": time_of_generation_min, + "time_of_generation_max": time_of_generation_max, + "workspace_slug": workspace_slug, + **kwargs, + } + filtered_query = {k: v for k, v in query.items() if v is not NOT_GIVEN} + query_string = urlencode(filtered_query) + return self._get( + f"{ANALYTICS_API_PATH}/summary/cache?{query_string}", + params=None, + body=None, + cast_to=AnalyticsSummaryResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class Analytics(APIResource): + """Analytics API for retrieving analytics data from Portkey.""" + + graphs: AnalyticsGraphs + groups: AnalyticsGroups + summary: AnalyticsSummary + + def __init__(self, client: APIClient) -> None: + super().__init__(client) + self.graphs = AnalyticsGraphs(client) + self.groups = AnalyticsGroups(client) + self.summary = AnalyticsSummary(client) + + +class AsyncAnalyticsGraphs(AsyncAPIResource): + """Async Analytics Graphs API for retrieving time-series analytics data.""" + + def __init__(self, client: AsyncAPIClient) -> None: + super().__init__(client) + + def _build_query_string( + self, + time_of_generation_min: str, + time_of_generation_max: str, + total_units_min: Union[int, NotGiven] = NOT_GIVEN, + total_units_max: Union[int, NotGiven] = NOT_GIVEN, + cost_min: Union[float, NotGiven] = NOT_GIVEN, + cost_max: Union[float, NotGiven] = NOT_GIVEN, + status_code: Union[str, NotGiven] = NOT_GIVEN, + virtual_keys: Union[str, NotGiven] = NOT_GIVEN, + configs: Union[str, NotGiven] = NOT_GIVEN, + workspace_slug: Union[str, NotGiven] = NOT_GIVEN, + api_key_ids: Union[str, NotGiven] = NOT_GIVEN, + ai_org_model: Union[str, NotGiven] = NOT_GIVEN, + prompt_slug: Union[str, NotGiven] = NOT_GIVEN, + metadata: Union[str, NotGiven] = NOT_GIVEN, + cache_status: Union[str, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> str: + query = { + "time_of_generation_min": time_of_generation_min, + "time_of_generation_max": time_of_generation_max, + "total_units_min": total_units_min, + "total_units_max": total_units_max, + "cost_min": cost_min, + "cost_max": cost_max, + "status_code": status_code, + "virtual_keys": virtual_keys, + "configs": configs, + "workspace_slug": workspace_slug, + "api_key_ids": api_key_ids, + "ai_org_model": ai_org_model, + "prompt_slug": prompt_slug, + "metadata": metadata, + "cache_status": cache_status, + **kwargs, + } + filtered_query = {k: v for k, v in query.items() if v is not NOT_GIVEN} + return urlencode(filtered_query) + + async def requests( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get requests analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/graphs/requests?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def cost( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get cost analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/graphs/cost?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def latency( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get latency analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/graphs/latency?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def tokens( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get tokens analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/graphs/tokens?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def users( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get users analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/graphs/users?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def users_requests( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get users/requests analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/graphs/users/requests?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def errors( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get errors analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/graphs/errors?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def errors_rate( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get errors rate analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/graphs/errors/rate?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def errors_stacks( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get errors stacks analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/graphs/errors/stacks?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def errors_status_codes( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get errors status codes analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/graphs/errors/status-codes?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def requests_rescued( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get rescued requests analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/graphs/requests/rescued?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def cache_hit_rate( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get cache hit rate analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/graphs/cache/hit-rate?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def cache_latency( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get cache latency analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/graphs/cache/latency?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def feedbacks( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get feedbacks analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/graphs/feedbacks?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def feedbacks_scores( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get feedback scores analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/graphs/feedbacks/scores?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def feedbacks_weighted( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get weighted feedbacks analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/graphs/feedbacks/weighted?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def feedbacks_ai_models( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGraphResponse: + """Get feedbacks by AI models analytics graph data.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/graphs/feedbacks/ai-models?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGraphResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class AsyncAnalyticsGroups(AsyncAPIResource): + """Async Analytics Groups API for retrieving grouped analytics data.""" + + def __init__(self, client: AsyncAPIClient) -> None: + super().__init__(client) + + def _build_query_string( + self, + time_of_generation_min: str, + time_of_generation_max: str, + columns: Union[str, NotGiven] = NOT_GIVEN, + include_total: Union[bool, NotGiven] = NOT_GIVEN, + current_page: Union[int, NotGiven] = NOT_GIVEN, + page_size: Union[int, NotGiven] = NOT_GIVEN, + order_by: Union[str, NotGiven] = NOT_GIVEN, + order_by_type: Union[Literal["asc", "desc"], NotGiven] = NOT_GIVEN, + virtual_keys: Union[str, NotGiven] = NOT_GIVEN, + configs: Union[str, NotGiven] = NOT_GIVEN, + workspace_slug: Union[str, NotGiven] = NOT_GIVEN, + api_key_ids: Union[str, NotGiven] = NOT_GIVEN, + ai_org_model: Union[str, NotGiven] = NOT_GIVEN, + prompt_slug: Union[str, NotGiven] = NOT_GIVEN, + metadata: Union[str, NotGiven] = NOT_GIVEN, + cache_status: Union[str, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> str: + query = { + "time_of_generation_min": time_of_generation_min, + "time_of_generation_max": time_of_generation_max, + "columns": columns, + "include_total": include_total, + "current_page": current_page, + "page_size": page_size, + "order_by": order_by, + "order_by_type": order_by_type, + "virtual_keys": virtual_keys, + "configs": configs, + "workspace_slug": workspace_slug, + "api_key_ids": api_key_ids, + "ai_org_model": ai_org_model, + "prompt_slug": prompt_slug, + "metadata": metadata, + "cache_status": cache_status, + **kwargs, + } + filtered_query = {k: v for k, v in query.items() if v is not NOT_GIVEN} + return urlencode(filtered_query) + + async def users( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGroupResponse: + """Get analytics grouped by users.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/groups/users?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGroupResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def ai_models( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGroupResponse: + """Get analytics grouped by AI models.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/groups/ai-models?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGroupResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def workspaces( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGroupResponse: + """Get analytics grouped by workspaces.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/groups/workspaces?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGroupResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def metadata( + self, + *, + metadata_key: str, + time_of_generation_min: str, + time_of_generation_max: str, + **kwargs: Any, + ) -> AnalyticsGroupResponse: + """Get analytics grouped by metadata key.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/groups/metadata/{metadata_key}?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGroupResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def group_by( + self, + *, + group_by: Literal[ + "ai_service", + "model", + "status_code", + "api_key", + "config", + "workspace", + "provider", + "prompt", + ], + time_of_generation_min: str, + time_of_generation_max: str, + columns: Union[str, NotGiven] = NOT_GIVEN, + include_total: Union[bool, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> AnalyticsGroupResponse: + """Get analytics grouped by specified dimension.""" + query_string = self._build_query_string( + time_of_generation_min=time_of_generation_min, + time_of_generation_max=time_of_generation_max, + columns=columns, + include_total=include_total, + **kwargs, + ) + return await self._get( + f"{ANALYTICS_API_PATH}/groups/{group_by}?{query_string}", + params=None, + body=None, + cast_to=AnalyticsGroupResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class AsyncAnalyticsSummary(AsyncAPIResource): + """Async Analytics Summary API for retrieving summary analytics data.""" + + def __init__(self, client: AsyncAPIClient) -> None: + super().__init__(client) + + async def cache( + self, + *, + time_of_generation_min: str, + time_of_generation_max: str, + workspace_slug: Union[str, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> AnalyticsSummaryResponse: + """Get cache summary analytics.""" + query = { + "time_of_generation_min": time_of_generation_min, + "time_of_generation_max": time_of_generation_max, + "workspace_slug": workspace_slug, + **kwargs, + } + filtered_query = {k: v for k, v in query.items() if v is not NOT_GIVEN} + query_string = urlencode(filtered_query) + return await self._get( + f"{ANALYTICS_API_PATH}/summary/cache?{query_string}", + params=None, + body=None, + cast_to=AnalyticsSummaryResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class AsyncAnalytics(AsyncAPIResource): + """Async Analytics API for retrieving analytics data from Portkey.""" + + graphs: AsyncAnalyticsGraphs + groups: AsyncAnalyticsGroups + summary: AsyncAnalyticsSummary + + def __init__(self, client: AsyncAPIClient) -> None: + super().__init__(client) + self.graphs = AsyncAnalyticsGraphs(client) + self.groups = AsyncAnalyticsGroups(client) + self.summary = AsyncAnalyticsSummary(client) diff --git a/portkey_ai/api_resources/apis/mcp_integrations.py b/portkey_ai/api_resources/apis/mcp_integrations.py new file mode 100644 index 00000000..1484ea4f --- /dev/null +++ b/portkey_ai/api_resources/apis/mcp_integrations.py @@ -0,0 +1,638 @@ +from typing import Any, Dict, List, Literal, Union +from portkey_ai._vendor.openai import NOT_GIVEN, NotGiven +from portkey_ai.api_resources.base_client import APIClient, AsyncAPIClient +from urllib.parse import urlencode +from portkey_ai.api_resources.apis.api_resource import APIResource, AsyncAPIResource +from portkey_ai.api_resources.types.mcp_type import ( + McpIntegrationCreateResponse, + McpIntegrationRetrieveResponse, + McpIntegrationListResponse, + McpIntegrationUpdateResponse, + McpIntegrationDeleteResponse, + McpIntegrationSyncResponse, + McpIntegrationTestResponse, + McpIntegrationWorkspacesResponse, + McpIntegrationCapabilitiesResponse, + McpIntegrationMetadataResponse, +) +from portkey_ai.api_resources.utils import GenericResponse + +MCP_INTEGRATIONS_API_PATH = "/mcp-integrations" + + +class McpIntegrationWorkspaces(APIResource): + """MCP Integration Workspaces API.""" + + def __init__(self, client: APIClient) -> None: + super().__init__(client) + + def list( + self, + *, + mcp_integration_id: str, + ) -> McpIntegrationWorkspacesResponse: + """List workspaces for an MCP integration.""" + return self._get( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}/workspaces", + params=None, + body=None, + cast_to=McpIntegrationWorkspacesResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def update( + self, + *, + mcp_integration_id: str, + workspaces: Union[List[Dict[str, Any]], NotGiven] = NOT_GIVEN, + global_workspace_access: Union[Dict[str, Any], NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> GenericResponse: + """Update workspaces for an MCP integration.""" + body = { + "workspaces": workspaces, + "global_workspace_access": global_workspace_access, + **kwargs, + } + return self._put( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}/workspaces", + body=body, + params=None, + cast_to=GenericResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class McpIntegrationCapabilities(APIResource): + """MCP Integration Capabilities API.""" + + def __init__(self, client: APIClient) -> None: + super().__init__(client) + + def list( + self, + *, + mcp_integration_id: str, + ) -> McpIntegrationCapabilitiesResponse: + """List capabilities for an MCP integration.""" + return self._get( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}/capabilities", + params=None, + body=None, + cast_to=McpIntegrationCapabilitiesResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def update( + self, + *, + mcp_integration_id: str, + capabilities: List[Dict[str, Any]], + **kwargs: Any, + ) -> GenericResponse: + """Update capabilities for an MCP integration.""" + body = { + "capabilities": capabilities, + **kwargs, + } + return self._put( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}/capabilities", + body=body, + params=None, + cast_to=GenericResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class McpIntegrationMetadata(APIResource): + """MCP Integration Metadata API.""" + + def __init__(self, client: APIClient) -> None: + super().__init__(client) + + def retrieve( + self, + *, + mcp_integration_id: str, + ) -> McpIntegrationMetadataResponse: + """Get metadata for an MCP integration.""" + return self._get( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}/metadata", + params=None, + body=None, + cast_to=McpIntegrationMetadataResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class McpIntegrations(APIResource): + """MCP Integrations API for managing MCP integration configurations.""" + + workspaces: McpIntegrationWorkspaces + capabilities: McpIntegrationCapabilities + metadata: McpIntegrationMetadata + + def __init__(self, client: APIClient) -> None: + super().__init__(client) + self.workspaces = McpIntegrationWorkspaces(client) + self.capabilities = McpIntegrationCapabilities(client) + self.metadata = McpIntegrationMetadata(client) + + def create( + self, + *, + name: str, + url: str, + auth_type: Literal["oauth_auto", "oauth_client_credentials", "headers", "none"], + transport: Literal["http", "sse"], + description: Union[str, NotGiven] = NOT_GIVEN, + workspace_id: Union[str, NotGiven] = NOT_GIVEN, + organisation_id: Union[str, NotGiven] = NOT_GIVEN, + configurations: Union[Dict[str, Any], NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> McpIntegrationCreateResponse: + """Create a new MCP integration.""" + body = { + "name": name, + "url": url, + "auth_type": auth_type, + "transport": transport, + "description": description, + "workspace_id": workspace_id, + "organisation_id": organisation_id, + "configurations": configurations, + **kwargs, + } + return self._post( + MCP_INTEGRATIONS_API_PATH, + body=body, + params=None, + cast_to=McpIntegrationCreateResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def list( + self, + *, + organisation_id: Union[str, NotGiven] = NOT_GIVEN, + type: Union[Literal["workspace", "organisation", "all"], NotGiven] = NOT_GIVEN, + workspace_id: Union[str, NotGiven] = NOT_GIVEN, + current_page: Union[int, NotGiven] = NOT_GIVEN, + page_size: Union[int, NotGiven] = NOT_GIVEN, + search: Union[str, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> McpIntegrationListResponse: + """List MCP integrations.""" + query = { + "organisation_id": organisation_id, + "type": type, + "workspace_id": workspace_id, + "current_page": current_page, + "page_size": page_size, + "search": search, + **kwargs, + } + filtered_query = {k: v for k, v in query.items() if v is not NOT_GIVEN} + query_string = urlencode(filtered_query) if filtered_query else "" + url = ( + f"{MCP_INTEGRATIONS_API_PATH}?{query_string}" + if query_string + else MCP_INTEGRATIONS_API_PATH + ) + return self._get( + url, + params=None, + body=None, + cast_to=McpIntegrationListResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def retrieve( + self, + *, + mcp_integration_id: str, + ) -> McpIntegrationRetrieveResponse: + """Retrieve an MCP integration by ID.""" + return self._get( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}", + params=None, + body=None, + cast_to=McpIntegrationRetrieveResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def update( + self, + *, + mcp_integration_id: str, + name: Union[str, NotGiven] = NOT_GIVEN, + description: Union[str, NotGiven] = NOT_GIVEN, + configurations: Union[Dict[str, Any], NotGiven] = NOT_GIVEN, + url: Union[str, NotGiven] = NOT_GIVEN, + auth_type: Union[ + Literal["oauth_auto", "oauth_client_credentials", "headers", "none"], + NotGiven, + ] = NOT_GIVEN, + transport: Union[Literal["http", "sse"], NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> McpIntegrationUpdateResponse: + """Update an MCP integration.""" + body = { + "name": name, + "description": description, + "configurations": configurations, + "url": url, + "auth_type": auth_type, + "transport": transport, + **kwargs, + } + return self._put( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}", + body=body, + params=None, + cast_to=McpIntegrationUpdateResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def delete( + self, + *, + mcp_integration_id: str, + ) -> McpIntegrationDeleteResponse: + """Delete an MCP integration.""" + return self._delete( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}", + params=None, + body=None, + cast_to=McpIntegrationDeleteResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def sync( + self, + *, + mcp_integration_id: str, + server_info: Union[Dict[str, Any], NotGiven] = NOT_GIVEN, + capabilities: Union[List[Dict[str, Any]], NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> McpIntegrationSyncResponse: + """Sync an MCP integration.""" + body = { + "server_info": server_info, + "capabilities": capabilities, + **kwargs, + } + return self._post( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}/sync", + body=body, + params=None, + cast_to=McpIntegrationSyncResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def test( + self, + *, + mcp_integration_id: str, + ) -> McpIntegrationTestResponse: + """Test an MCP integration connection.""" + return self._post( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}/test", + body=None, + params=None, + cast_to=McpIntegrationTestResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class AsyncMcpIntegrationWorkspaces(AsyncAPIResource): + """Async MCP Integration Workspaces API.""" + + def __init__(self, client: AsyncAPIClient) -> None: + super().__init__(client) + + async def list( + self, + *, + mcp_integration_id: str, + ) -> McpIntegrationWorkspacesResponse: + """List workspaces for an MCP integration.""" + return await self._get( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}/workspaces", + params=None, + body=None, + cast_to=McpIntegrationWorkspacesResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def update( + self, + *, + mcp_integration_id: str, + workspaces: Union[List[Dict[str, Any]], NotGiven] = NOT_GIVEN, + global_workspace_access: Union[Dict[str, Any], NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> GenericResponse: + """Update workspaces for an MCP integration.""" + body = { + "workspaces": workspaces, + "global_workspace_access": global_workspace_access, + **kwargs, + } + return await self._put( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}/workspaces", + body=body, + params=None, + cast_to=GenericResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class AsyncMcpIntegrationCapabilities(AsyncAPIResource): + """Async MCP Integration Capabilities API.""" + + def __init__(self, client: AsyncAPIClient) -> None: + super().__init__(client) + + async def list( + self, + *, + mcp_integration_id: str, + ) -> McpIntegrationCapabilitiesResponse: + """List capabilities for an MCP integration.""" + return await self._get( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}/capabilities", + params=None, + body=None, + cast_to=McpIntegrationCapabilitiesResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def update( + self, + *, + mcp_integration_id: str, + capabilities: List[Dict[str, Any]], + **kwargs: Any, + ) -> GenericResponse: + """Update capabilities for an MCP integration.""" + body = { + "capabilities": capabilities, + **kwargs, + } + return await self._put( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}/capabilities", + body=body, + params=None, + cast_to=GenericResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class AsyncMcpIntegrationMetadata(AsyncAPIResource): + """Async MCP Integration Metadata API.""" + + def __init__(self, client: AsyncAPIClient) -> None: + super().__init__(client) + + async def retrieve( + self, + *, + mcp_integration_id: str, + ) -> McpIntegrationMetadataResponse: + """Get metadata for an MCP integration.""" + return await self._get( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}/metadata", + params=None, + body=None, + cast_to=McpIntegrationMetadataResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class AsyncMcpIntegrations(AsyncAPIResource): + """Async MCP Integrations API for managing MCP integration configurations.""" + + workspaces: AsyncMcpIntegrationWorkspaces + capabilities: AsyncMcpIntegrationCapabilities + metadata: AsyncMcpIntegrationMetadata + + def __init__(self, client: AsyncAPIClient) -> None: + super().__init__(client) + self.workspaces = AsyncMcpIntegrationWorkspaces(client) + self.capabilities = AsyncMcpIntegrationCapabilities(client) + self.metadata = AsyncMcpIntegrationMetadata(client) + + async def create( + self, + *, + name: str, + url: str, + auth_type: Literal["oauth_auto", "oauth_client_credentials", "headers", "none"], + transport: Literal["http", "sse"], + description: Union[str, NotGiven] = NOT_GIVEN, + workspace_id: Union[str, NotGiven] = NOT_GIVEN, + organisation_id: Union[str, NotGiven] = NOT_GIVEN, + configurations: Union[Dict[str, Any], NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> McpIntegrationCreateResponse: + """Create a new MCP integration.""" + body = { + "name": name, + "url": url, + "auth_type": auth_type, + "transport": transport, + "description": description, + "workspace_id": workspace_id, + "organisation_id": organisation_id, + "configurations": configurations, + **kwargs, + } + return await self._post( + MCP_INTEGRATIONS_API_PATH, + body=body, + params=None, + cast_to=McpIntegrationCreateResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def list( + self, + *, + organisation_id: Union[str, NotGiven] = NOT_GIVEN, + type: Union[Literal["workspace", "organisation", "all"], NotGiven] = NOT_GIVEN, + workspace_id: Union[str, NotGiven] = NOT_GIVEN, + current_page: Union[int, NotGiven] = NOT_GIVEN, + page_size: Union[int, NotGiven] = NOT_GIVEN, + search: Union[str, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> McpIntegrationListResponse: + """List MCP integrations.""" + query = { + "organisation_id": organisation_id, + "type": type, + "workspace_id": workspace_id, + "current_page": current_page, + "page_size": page_size, + "search": search, + **kwargs, + } + filtered_query = {k: v for k, v in query.items() if v is not NOT_GIVEN} + query_string = urlencode(filtered_query) if filtered_query else "" + url = ( + f"{MCP_INTEGRATIONS_API_PATH}?{query_string}" + if query_string + else MCP_INTEGRATIONS_API_PATH + ) + return await self._get( + url, + params=None, + body=None, + cast_to=McpIntegrationListResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def retrieve( + self, + *, + mcp_integration_id: str, + ) -> McpIntegrationRetrieveResponse: + """Retrieve an MCP integration by ID.""" + return await self._get( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}", + params=None, + body=None, + cast_to=McpIntegrationRetrieveResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def update( + self, + *, + mcp_integration_id: str, + name: Union[str, NotGiven] = NOT_GIVEN, + description: Union[str, NotGiven] = NOT_GIVEN, + configurations: Union[Dict[str, Any], NotGiven] = NOT_GIVEN, + url: Union[str, NotGiven] = NOT_GIVEN, + auth_type: Union[ + Literal["oauth_auto", "oauth_client_credentials", "headers", "none"], + NotGiven, + ] = NOT_GIVEN, + transport: Union[Literal["http", "sse"], NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> McpIntegrationUpdateResponse: + """Update an MCP integration.""" + body = { + "name": name, + "description": description, + "configurations": configurations, + "url": url, + "auth_type": auth_type, + "transport": transport, + **kwargs, + } + return await self._put( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}", + body=body, + params=None, + cast_to=McpIntegrationUpdateResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def delete( + self, + *, + mcp_integration_id: str, + ) -> McpIntegrationDeleteResponse: + """Delete an MCP integration.""" + return await self._delete( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}", + params=None, + body=None, + cast_to=McpIntegrationDeleteResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def sync( + self, + *, + mcp_integration_id: str, + server_info: Union[Dict[str, Any], NotGiven] = NOT_GIVEN, + capabilities: Union[List[Dict[str, Any]], NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> McpIntegrationSyncResponse: + """Sync an MCP integration.""" + body = { + "server_info": server_info, + "capabilities": capabilities, + **kwargs, + } + return await self._post( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}/sync", + body=body, + params=None, + cast_to=McpIntegrationSyncResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def test( + self, + *, + mcp_integration_id: str, + ) -> McpIntegrationTestResponse: + """Test an MCP integration connection.""" + return await self._post( + f"{MCP_INTEGRATIONS_API_PATH}/{mcp_integration_id}/test", + body=None, + params=None, + cast_to=McpIntegrationTestResponse, + stream=False, + stream_cls=None, + headers={}, + ) diff --git a/portkey_ai/api_resources/apis/mcp_servers.py b/portkey_ai/api_resources/apis/mcp_servers.py new file mode 100644 index 00000000..8eb5823b --- /dev/null +++ b/portkey_ai/api_resources/apis/mcp_servers.py @@ -0,0 +1,850 @@ +from typing import Any, Dict, List, Union +from portkey_ai._vendor.openai import NOT_GIVEN, NotGiven +from portkey_ai.api_resources.base_client import APIClient, AsyncAPIClient +from urllib.parse import urlencode +from portkey_ai.api_resources.apis.api_resource import APIResource, AsyncAPIResource +from portkey_ai.api_resources.types.mcp_type import ( + McpServerCreateResponse, + McpServerRetrieveResponse, + McpServerListResponse, + McpServerUpdateResponse, + McpServerDeleteResponse, + McpServerTestResponse, + McpServerTokensResponse, + McpServerClientInfoResponse, + McpServerCapabilitiesResponse, + McpServerUserAccessResponse, + McpServerUserAccessCheckResponse, + McpServerMetadataResponse, +) +from portkey_ai.api_resources.utils import GenericResponse + +MCP_SERVERS_API_PATH = "/mcp-servers" + + +class McpServerCapabilities(APIResource): + """MCP Server Capabilities API.""" + + def __init__(self, client: APIClient) -> None: + super().__init__(client) + + def list( + self, + *, + mcp_server_id: str, + ) -> McpServerCapabilitiesResponse: + """List capabilities for an MCP server.""" + return self._get( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/capabilities", + params=None, + body=None, + cast_to=McpServerCapabilitiesResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def update( + self, + *, + mcp_server_id: str, + capabilities: List[Dict[str, Any]], + **kwargs: Any, + ) -> GenericResponse: + """Update capabilities for an MCP server.""" + body = { + "capabilities": capabilities, + **kwargs, + } + return self._put( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/capabilities", + body=body, + params=None, + cast_to=GenericResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def sync( + self, + *, + mcp_server_id: str, + capabilities: Union[List[Dict[str, Any]], NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> GenericResponse: + """Sync capabilities for an MCP server.""" + body = { + "capabilities": capabilities, + **kwargs, + } + return self._post( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/capabilities/sync", + body=body, + params=None, + cast_to=GenericResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class McpServerUserAccess(APIResource): + """MCP Server User Access API.""" + + def __init__(self, client: APIClient) -> None: + super().__init__(client) + + def check( + self, + *, + mcp_server_id: str, + ) -> McpServerUserAccessCheckResponse: + """Check user access for an MCP server.""" + return self._get( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/user-access/check", + params=None, + body=None, + cast_to=McpServerUserAccessCheckResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def list( + self, + *, + mcp_server_id: str, + ) -> McpServerUserAccessResponse: + """List user access for an MCP server.""" + return self._get( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/user-access", + params=None, + body=None, + cast_to=McpServerUserAccessResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def update( + self, + *, + mcp_server_id: str, + user_access: Union[List[Dict[str, Any]], NotGiven] = NOT_GIVEN, + default_user_access: Union[Dict[str, Any], NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> GenericResponse: + """Update user access for an MCP server.""" + body = { + "user_access": user_access, + "default_user_access": default_user_access, + **kwargs, + } + return self._put( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/user-access", + body=body, + params=None, + cast_to=GenericResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class McpServerMetadata(APIResource): + """MCP Server Metadata API.""" + + def __init__(self, client: APIClient) -> None: + super().__init__(client) + + def retrieve( + self, + *, + mcp_server_id: str, + ) -> McpServerMetadataResponse: + """Get metadata for an MCP server.""" + return self._get( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/metadata", + params=None, + body=None, + cast_to=McpServerMetadataResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def sync( + self, + *, + mcp_server_id: str, + server_name: Union[str, NotGiven] = NOT_GIVEN, + server_version: Union[str, NotGiven] = NOT_GIVEN, + protocol_version: Union[str, NotGiven] = NOT_GIVEN, + title: Union[str, NotGiven] = NOT_GIVEN, + description: Union[str, NotGiven] = NOT_GIVEN, + website_url: Union[str, NotGiven] = NOT_GIVEN, + icons: Union[Dict[str, Any], NotGiven] = NOT_GIVEN, + capability_flags: Union[Dict[str, Any], NotGiven] = NOT_GIVEN, + instructions: Union[str, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> GenericResponse: + """Sync metadata for an MCP server.""" + body = { + "server_name": server_name, + "server_version": server_version, + "protocol_version": protocol_version, + "title": title, + "description": description, + "website_url": website_url, + "icons": icons, + "capability_flags": capability_flags, + "instructions": instructions, + **kwargs, + } + return self._post( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/metadata/sync", + body=body, + params=None, + cast_to=GenericResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class McpServers(APIResource): + """MCP Servers API for managing MCP server configurations.""" + + capabilities: McpServerCapabilities + user_access: McpServerUserAccess + metadata: McpServerMetadata + + def __init__(self, client: APIClient) -> None: + super().__init__(client) + self.capabilities = McpServerCapabilities(client) + self.user_access = McpServerUserAccess(client) + self.metadata = McpServerMetadata(client) + + def create( + self, + *, + name: str, + mcp_integration_id: str, + description: Union[str, NotGiven] = NOT_GIVEN, + workspace_id: Union[str, NotGiven] = NOT_GIVEN, + slug: Union[str, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> McpServerCreateResponse: + """Create a new MCP server.""" + body = { + "name": name, + "mcp_integration_id": mcp_integration_id, + "description": description, + "workspace_id": workspace_id, + "slug": slug, + **kwargs, + } + return self._post( + MCP_SERVERS_API_PATH, + body=body, + params=None, + cast_to=McpServerCreateResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def list( + self, + *, + workspace_id: Union[str, NotGiven] = NOT_GIVEN, + current_page: Union[int, NotGiven] = NOT_GIVEN, + page_size: Union[int, NotGiven] = NOT_GIVEN, + id: Union[str, NotGiven] = NOT_GIVEN, + search: Union[str, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> McpServerListResponse: + """List MCP servers.""" + query = { + "workspace_id": workspace_id, + "current_page": current_page, + "page_size": page_size, + "id": id, + "search": search, + **kwargs, + } + filtered_query = {k: v for k, v in query.items() if v is not NOT_GIVEN} + query_string = urlencode(filtered_query) if filtered_query else "" + url = ( + f"{MCP_SERVERS_API_PATH}?{query_string}" + if query_string + else MCP_SERVERS_API_PATH + ) + return self._get( + url, + params=None, + body=None, + cast_to=McpServerListResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def retrieve( + self, + *, + mcp_server_id: str, + ) -> McpServerRetrieveResponse: + """Retrieve an MCP server by ID.""" + return self._get( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}", + params=None, + body=None, + cast_to=McpServerRetrieveResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def update( + self, + *, + mcp_server_id: str, + name: Union[str, NotGiven] = NOT_GIVEN, + description: Union[str, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> McpServerUpdateResponse: + """Update an MCP server.""" + body = { + "name": name, + "description": description, + **kwargs, + } + return self._put( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}", + body=body, + params=None, + cast_to=McpServerUpdateResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def delete( + self, + *, + mcp_server_id: str, + ) -> McpServerDeleteResponse: + """Delete an MCP server.""" + return self._delete( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}", + params=None, + body=None, + cast_to=McpServerDeleteResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def test( + self, + *, + mcp_server_id: str, + ) -> McpServerTestResponse: + """Test an MCP server connection.""" + return self._post( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/test", + body=None, + params=None, + cast_to=McpServerTestResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def get_tokens( + self, + *, + mcp_server_id: str, + ) -> McpServerTokensResponse: + """Get tokens for an MCP server.""" + return self._get( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/tokens", + params=None, + body=None, + cast_to=McpServerTokensResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def update_tokens( + self, + *, + mcp_server_id: str, + access_token: str, + refresh_token: Union[str, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> GenericResponse: + """Update tokens for an MCP server.""" + body = { + "access_token": access_token, + "refresh_token": refresh_token, + **kwargs, + } + return self._put( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/tokens", + body=body, + params=None, + cast_to=GenericResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def delete_tokens( + self, + *, + mcp_server_id: str, + ) -> GenericResponse: + """Delete tokens for an MCP server.""" + return self._delete( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/tokens", + params=None, + body=None, + cast_to=GenericResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + def get_client_info( + self, + *, + mcp_server_id: str, + ) -> McpServerClientInfoResponse: + """Get client info for an MCP server.""" + return self._get( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/client-info", + params=None, + body=None, + cast_to=McpServerClientInfoResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class AsyncMcpServerCapabilities(AsyncAPIResource): + """Async MCP Server Capabilities API.""" + + def __init__(self, client: AsyncAPIClient) -> None: + super().__init__(client) + + async def list( + self, + *, + mcp_server_id: str, + ) -> McpServerCapabilitiesResponse: + """List capabilities for an MCP server.""" + return await self._get( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/capabilities", + params=None, + body=None, + cast_to=McpServerCapabilitiesResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def update( + self, + *, + mcp_server_id: str, + capabilities: List[Dict[str, Any]], + **kwargs: Any, + ) -> GenericResponse: + """Update capabilities for an MCP server.""" + body = { + "capabilities": capabilities, + **kwargs, + } + return await self._put( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/capabilities", + body=body, + params=None, + cast_to=GenericResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def sync( + self, + *, + mcp_server_id: str, + capabilities: Union[List[Dict[str, Any]], NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> GenericResponse: + """Sync capabilities for an MCP server.""" + body = { + "capabilities": capabilities, + **kwargs, + } + return await self._post( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/capabilities/sync", + body=body, + params=None, + cast_to=GenericResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class AsyncMcpServerUserAccess(AsyncAPIResource): + """Async MCP Server User Access API.""" + + def __init__(self, client: AsyncAPIClient) -> None: + super().__init__(client) + + async def check( + self, + *, + mcp_server_id: str, + ) -> McpServerUserAccessCheckResponse: + """Check user access for an MCP server.""" + return await self._get( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/user-access/check", + params=None, + body=None, + cast_to=McpServerUserAccessCheckResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def list( + self, + *, + mcp_server_id: str, + ) -> McpServerUserAccessResponse: + """List user access for an MCP server.""" + return await self._get( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/user-access", + params=None, + body=None, + cast_to=McpServerUserAccessResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def update( + self, + *, + mcp_server_id: str, + user_access: Union[List[Dict[str, Any]], NotGiven] = NOT_GIVEN, + default_user_access: Union[Dict[str, Any], NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> GenericResponse: + """Update user access for an MCP server.""" + body = { + "user_access": user_access, + "default_user_access": default_user_access, + **kwargs, + } + return await self._put( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/user-access", + body=body, + params=None, + cast_to=GenericResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class AsyncMcpServerMetadata(AsyncAPIResource): + """Async MCP Server Metadata API.""" + + def __init__(self, client: AsyncAPIClient) -> None: + super().__init__(client) + + async def retrieve( + self, + *, + mcp_server_id: str, + ) -> McpServerMetadataResponse: + """Get metadata for an MCP server.""" + return await self._get( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/metadata", + params=None, + body=None, + cast_to=McpServerMetadataResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def sync( + self, + *, + mcp_server_id: str, + server_name: Union[str, NotGiven] = NOT_GIVEN, + server_version: Union[str, NotGiven] = NOT_GIVEN, + protocol_version: Union[str, NotGiven] = NOT_GIVEN, + title: Union[str, NotGiven] = NOT_GIVEN, + description: Union[str, NotGiven] = NOT_GIVEN, + website_url: Union[str, NotGiven] = NOT_GIVEN, + icons: Union[Dict[str, Any], NotGiven] = NOT_GIVEN, + capability_flags: Union[Dict[str, Any], NotGiven] = NOT_GIVEN, + instructions: Union[str, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> GenericResponse: + """Sync metadata for an MCP server.""" + body = { + "server_name": server_name, + "server_version": server_version, + "protocol_version": protocol_version, + "title": title, + "description": description, + "website_url": website_url, + "icons": icons, + "capability_flags": capability_flags, + "instructions": instructions, + **kwargs, + } + return await self._post( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/metadata/sync", + body=body, + params=None, + cast_to=GenericResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + +class AsyncMcpServers(AsyncAPIResource): + """Async MCP Servers API for managing MCP server configurations.""" + + capabilities: AsyncMcpServerCapabilities + user_access: AsyncMcpServerUserAccess + metadata: AsyncMcpServerMetadata + + def __init__(self, client: AsyncAPIClient) -> None: + super().__init__(client) + self.capabilities = AsyncMcpServerCapabilities(client) + self.user_access = AsyncMcpServerUserAccess(client) + self.metadata = AsyncMcpServerMetadata(client) + + async def create( + self, + *, + name: str, + mcp_integration_id: str, + description: Union[str, NotGiven] = NOT_GIVEN, + workspace_id: Union[str, NotGiven] = NOT_GIVEN, + slug: Union[str, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> McpServerCreateResponse: + """Create a new MCP server.""" + body = { + "name": name, + "mcp_integration_id": mcp_integration_id, + "description": description, + "workspace_id": workspace_id, + "slug": slug, + **kwargs, + } + return await self._post( + MCP_SERVERS_API_PATH, + body=body, + params=None, + cast_to=McpServerCreateResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def list( + self, + *, + workspace_id: Union[str, NotGiven] = NOT_GIVEN, + current_page: Union[int, NotGiven] = NOT_GIVEN, + page_size: Union[int, NotGiven] = NOT_GIVEN, + id: Union[str, NotGiven] = NOT_GIVEN, + search: Union[str, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> McpServerListResponse: + """List MCP servers.""" + query = { + "workspace_id": workspace_id, + "current_page": current_page, + "page_size": page_size, + "id": id, + "search": search, + **kwargs, + } + filtered_query = {k: v for k, v in query.items() if v is not NOT_GIVEN} + query_string = urlencode(filtered_query) if filtered_query else "" + url = ( + f"{MCP_SERVERS_API_PATH}?{query_string}" + if query_string + else MCP_SERVERS_API_PATH + ) + return await self._get( + url, + params=None, + body=None, + cast_to=McpServerListResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def retrieve( + self, + *, + mcp_server_id: str, + ) -> McpServerRetrieveResponse: + """Retrieve an MCP server by ID.""" + return await self._get( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}", + params=None, + body=None, + cast_to=McpServerRetrieveResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def update( + self, + *, + mcp_server_id: str, + name: Union[str, NotGiven] = NOT_GIVEN, + description: Union[str, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> McpServerUpdateResponse: + """Update an MCP server.""" + body = { + "name": name, + "description": description, + **kwargs, + } + return await self._put( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}", + body=body, + params=None, + cast_to=McpServerUpdateResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def delete( + self, + *, + mcp_server_id: str, + ) -> McpServerDeleteResponse: + """Delete an MCP server.""" + return await self._delete( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}", + params=None, + body=None, + cast_to=McpServerDeleteResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def test( + self, + *, + mcp_server_id: str, + ) -> McpServerTestResponse: + """Test an MCP server connection.""" + return await self._post( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/test", + body=None, + params=None, + cast_to=McpServerTestResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def get_tokens( + self, + *, + mcp_server_id: str, + ) -> McpServerTokensResponse: + """Get tokens for an MCP server.""" + return await self._get( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/tokens", + params=None, + body=None, + cast_to=McpServerTokensResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def update_tokens( + self, + *, + mcp_server_id: str, + access_token: str, + refresh_token: Union[str, NotGiven] = NOT_GIVEN, + **kwargs: Any, + ) -> GenericResponse: + """Update tokens for an MCP server.""" + body = { + "access_token": access_token, + "refresh_token": refresh_token, + **kwargs, + } + return await self._put( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/tokens", + body=body, + params=None, + cast_to=GenericResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def delete_tokens( + self, + *, + mcp_server_id: str, + ) -> GenericResponse: + """Delete tokens for an MCP server.""" + return await self._delete( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/tokens", + params=None, + body=None, + cast_to=GenericResponse, + stream=False, + stream_cls=None, + headers={}, + ) + + async def get_client_info( + self, + *, + mcp_server_id: str, + ) -> McpServerClientInfoResponse: + """Get client info for an MCP server.""" + return await self._get( + f"{MCP_SERVERS_API_PATH}/{mcp_server_id}/client-info", + params=None, + body=None, + cast_to=McpServerClientInfoResponse, + stream=False, + stream_cls=None, + headers={}, + ) diff --git a/portkey_ai/api_resources/client.py b/portkey_ai/api_resources/client.py index 6e8ca315..4558d2a3 100644 --- a/portkey_ai/api_resources/client.py +++ b/portkey_ai/api_resources/client.py @@ -42,6 +42,10 @@ class Portkey(APIClient): realtime: apis.MainRealtime conversations: apis.Conversations videos: apis.Videos + skills: apis.Skills + analytics: apis.Analytics + mcp_servers: apis.McpServers + mcp_integrations: apis.McpIntegrations class beta: assistants: apis.Assistants @@ -194,6 +198,10 @@ def __init__( self.realtime = apis.MainRealtime(self) self.conversations = apis.Conversations(self) self.videos = apis.Videos(self) + self.skills = apis.Skills(self) + self.analytics = apis.Analytics(self) + self.mcp_servers = apis.McpServers(self) + self.mcp_integrations = apis.McpIntegrations(self) self.beta = self.beta(self) # type: ignore if self.instrumentation: @@ -380,6 +388,10 @@ class AsyncPortkey(AsyncAPIClient): realtime: apis.AsyncMainRealtime conversations: apis.AsyncConversations videos: apis.AsyncVideos + skills: apis.AsyncSkills + analytics: apis.AsyncAnalytics + mcp_servers: apis.AsyncMcpServers + mcp_integrations: apis.AsyncMcpIntegrations class beta: assistants: apis.AsyncAssistants @@ -532,6 +544,10 @@ def __init__( self.realtime = apis.AsyncMainRealtime(self) self.conversations = apis.AsyncConversations(self) self.videos = apis.AsyncVideos(self) + self.skills = apis.AsyncSkills(self) + self.analytics = apis.AsyncAnalytics(self) + self.mcp_servers = apis.AsyncMcpServers(self) + self.mcp_integrations = apis.AsyncMcpIntegrations(self) self.beta = self.beta(self) # type: ignore if self.instrumentation: diff --git a/portkey_ai/api_resources/types/analytics_type.py b/portkey_ai/api_resources/types/analytics_type.py new file mode 100644 index 00000000..0b72d985 --- /dev/null +++ b/portkey_ai/api_resources/types/analytics_type.py @@ -0,0 +1,66 @@ +import json +from typing import Any, Dict, List, Optional +import httpx +from pydantic import BaseModel, PrivateAttr +from portkey_ai.api_resources.types.utils import parse_headers + + +class AnalyticsGraphResponse(BaseModel, extra="allow"): + """Response type for analytics graph endpoints (requests, cost, latency, etc.)""" + + data: Optional[List[Dict[str, Any]]] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class AnalyticsGroupResponse(BaseModel, extra="allow"): + """Response type for analytics group endpoints (users, ai-models, workspaces, etc.)""" + + data: Optional[List[Dict[str, Any]]] = None + total: Optional[int] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class AnalyticsSummaryResponse(BaseModel, extra="allow"): + """Response type for analytics summary endpoints""" + + data: Optional[Dict[str, Any]] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default diff --git a/portkey_ai/api_resources/types/mcp_type.py b/portkey_ai/api_resources/types/mcp_type.py new file mode 100644 index 00000000..a6433783 --- /dev/null +++ b/portkey_ai/api_resources/types/mcp_type.py @@ -0,0 +1,551 @@ +import json +from typing import Any, Dict, List, Optional +import httpx +from pydantic import BaseModel, PrivateAttr +from portkey_ai.api_resources.types.utils import parse_headers + + +class McpCapability(BaseModel, extra="allow"): + """MCP capability model.""" + + name: Optional[str] = None + type: Optional[str] = None + enabled: Optional[bool] = None + description: Optional[str] = None + schema_: Optional[Dict[str, Any]] = None + + class Config: + fields = {"schema_": "schema"} + + +class McpServerMetadata(BaseModel, extra="allow"): + """MCP server metadata model.""" + + server_name: Optional[str] = None + server_version: Optional[str] = None + protocol_version: Optional[str] = None + title: Optional[str] = None + description: Optional[str] = None + website_url: Optional[str] = None + icons: Optional[Dict[str, Any]] = None + capability_flags: Optional[Dict[str, Any]] = None + instructions: Optional[str] = None + + +class McpServerCreateResponse(BaseModel, extra="allow"): + """Response type for MCP server creation.""" + + id: Optional[str] = None + name: Optional[str] = None + slug: Optional[str] = None + description: Optional[str] = None + workspace_id: Optional[str] = None + organisation_id: Optional[str] = None + mcp_integration_id: Optional[str] = None + status: Optional[str] = None + created_at: Optional[str] = None + updated_at: Optional[str] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpServerRetrieveResponse(BaseModel, extra="allow"): + """Response type for MCP server retrieval.""" + + id: Optional[str] = None + name: Optional[str] = None + slug: Optional[str] = None + description: Optional[str] = None + workspace_id: Optional[str] = None + organisation_id: Optional[str] = None + mcp_integration_id: Optional[str] = None + status: Optional[str] = None + created_at: Optional[str] = None + updated_at: Optional[str] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpServerListResponse(BaseModel, extra="allow"): + """Response type for MCP server list.""" + + data: Optional[List[Dict[str, Any]]] = None + total: Optional[int] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpServerUpdateResponse(BaseModel, extra="allow"): + """Response type for MCP server update.""" + + id: Optional[str] = None + name: Optional[str] = None + slug: Optional[str] = None + description: Optional[str] = None + status: Optional[str] = None + updated_at: Optional[str] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpServerDeleteResponse(BaseModel, extra="allow"): + """Response type for MCP server deletion.""" + + success: Optional[bool] = None + message: Optional[str] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpServerTestResponse(BaseModel, extra="allow"): + """Response type for MCP server test.""" + + success: Optional[bool] = None + message: Optional[str] = None + details: Optional[Dict[str, Any]] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpServerTokensResponse(BaseModel, extra="allow"): + """Response type for MCP server tokens.""" + + access_token: Optional[str] = None + refresh_token: Optional[str] = None + expires_at: Optional[str] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpServerClientInfoResponse(BaseModel, extra="allow"): + """Response type for MCP server client info.""" + + client_id: Optional[str] = None + client_secret: Optional[str] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpServerCapabilitiesResponse(BaseModel, extra="allow"): + """Response type for MCP server capabilities.""" + + data: Optional[List[Dict[str, Any]]] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpServerUserAccessResponse(BaseModel, extra="allow"): + """Response type for MCP server user access.""" + + user_access: Optional[List[Dict[str, Any]]] = None + default_user_access: Optional[Dict[str, Any]] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpServerUserAccessCheckResponse(BaseModel, extra="allow"): + """Response type for MCP server user access check.""" + + has_access: Optional[bool] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpServerMetadataResponse(BaseModel, extra="allow"): + """Response type for MCP server metadata.""" + + server_name: Optional[str] = None + server_version: Optional[str] = None + protocol_version: Optional[str] = None + title: Optional[str] = None + description: Optional[str] = None + website_url: Optional[str] = None + icons: Optional[Dict[str, Any]] = None + capability_flags: Optional[Dict[str, Any]] = None + instructions: Optional[str] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpIntegrationCreateResponse(BaseModel, extra="allow"): + """Response type for MCP integration creation.""" + + id: Optional[str] = None + name: Optional[str] = None + description: Optional[str] = None + url: Optional[str] = None + auth_type: Optional[str] = None + transport: Optional[str] = None + workspace_id: Optional[str] = None + organisation_id: Optional[str] = None + created_at: Optional[str] = None + updated_at: Optional[str] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpIntegrationRetrieveResponse(BaseModel, extra="allow"): + """Response type for MCP integration retrieval.""" + + id: Optional[str] = None + name: Optional[str] = None + description: Optional[str] = None + url: Optional[str] = None + auth_type: Optional[str] = None + transport: Optional[str] = None + workspace_id: Optional[str] = None + organisation_id: Optional[str] = None + configurations: Optional[Dict[str, Any]] = None + created_at: Optional[str] = None + updated_at: Optional[str] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpIntegrationListResponse(BaseModel, extra="allow"): + """Response type for MCP integration list.""" + + data: Optional[List[Dict[str, Any]]] = None + total: Optional[int] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpIntegrationUpdateResponse(BaseModel, extra="allow"): + """Response type for MCP integration update.""" + + id: Optional[str] = None + name: Optional[str] = None + description: Optional[str] = None + url: Optional[str] = None + auth_type: Optional[str] = None + transport: Optional[str] = None + updated_at: Optional[str] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpIntegrationDeleteResponse(BaseModel, extra="allow"): + """Response type for MCP integration deletion.""" + + success: Optional[bool] = None + message: Optional[str] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpIntegrationSyncResponse(BaseModel, extra="allow"): + """Response type for MCP integration sync.""" + + success: Optional[bool] = None + message: Optional[str] = None + details: Optional[Dict[str, Any]] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpIntegrationTestResponse(BaseModel, extra="allow"): + """Response type for MCP integration test.""" + + success: Optional[bool] = None + message: Optional[str] = None + details: Optional[Dict[str, Any]] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpIntegrationWorkspacesResponse(BaseModel, extra="allow"): + """Response type for MCP integration workspaces.""" + + workspaces: Optional[List[Dict[str, Any]]] = None + global_workspace_access: Optional[Dict[str, Any]] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpIntegrationCapabilitiesResponse(BaseModel, extra="allow"): + """Response type for MCP integration capabilities.""" + + data: Optional[List[Dict[str, Any]]] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default + + +class McpIntegrationMetadataResponse(BaseModel, extra="allow"): + """Response type for MCP integration metadata.""" + + server_name: Optional[str] = None + server_version: Optional[str] = None + protocol_version: Optional[str] = None + title: Optional[str] = None + description: Optional[str] = None + website_url: Optional[str] = None + icons: Optional[Dict[str, Any]] = None + capability_flags: Optional[Dict[str, Any]] = None + instructions: Optional[str] = None + object: Optional[str] = None + _headers: Optional[httpx.Headers] = PrivateAttr() + + def get_headers(self) -> Optional[Dict[str, str]]: + return parse_headers(self._headers) + + def __str__(self): + return json.dumps(self.dict(), indent=4) + + def __getitem__(self, key): + return getattr(self, key, None) + + def get(self, key: str, default: Optional[Any] = None): + return getattr(self, key, None) or default diff --git a/portkey_ai/llms/langchain/chat.py b/portkey_ai/llms/langchain/chat.py index b653428b..49b08181 100644 --- a/portkey_ai/llms/langchain/chat.py +++ b/portkey_ai/llms/langchain/chat.py @@ -16,7 +16,7 @@ try: from langchain.callbacks.manager import CallbackManagerForLLMRun - from langchain.chat_models.base import SimpleChatModel + from langchain_community.chat_models.base import SimpleChatModel from langchain.pydantic_v1 import Field, PrivateAttr from langchain.schema.messages import ( AIMessage, diff --git a/tests/test_llm_langchain.py b/tests/test_llm_langchain.py index 8cfe75e8..7d1dc9ab 100644 --- a/tests/test_llm_langchain.py +++ b/tests/test_llm_langchain.py @@ -7,9 +7,9 @@ from tests.utils import read_json_file from portkey_ai.langchain import LangchainCallbackHandler -from langchain.chat_models import ChatOpenAI +from langchain_community.chat_models import ChatOpenAI from langchain_core.prompts import ChatPromptTemplate -from langchain.chains import LLMChain +from langchain_classic.chains import LLMChain api_key = os.environ.get("PORTKEY_API_KEY")