From 9e974784592e686ec23dd2b420907d8453c03cda Mon Sep 17 00:00:00 2001 From: dabogee Date: Fri, 13 Mar 2026 08:37:02 +0200 Subject: [PATCH 1/8] Fixed integration with OpenAI Codex (v0.114.0) with gpt-5.4 --- ccproxy/plugins/codex/adapter.py | 113 ++++++-- ccproxy/plugins/codex/detection_service.py | 49 +++- ccproxy/plugins/codex/routes.py | 241 +++++++++++++++++- .../codex/integration/test_codex_basic.py | 6 + .../codex/integration/test_codex_websocket.py | 223 ++++++++++++++++ tests/plugins/codex/unit/test_adapter.py | 74 ++++++ tests/unit/plugins/test_codex_detection.py | 4 + 7 files changed, 685 insertions(+), 25 deletions(-) create mode 100644 tests/plugins/codex/integration/test_codex_websocket.py diff --git a/ccproxy/plugins/codex/adapter.py b/ccproxy/plugins/codex/adapter.py index 6faf46ed..5cc835e0 100644 --- a/ccproxy/plugins/codex/adapter.py +++ b/ccproxy/plugins/codex/adapter.py @@ -1,4 +1,5 @@ import contextlib +import copy import json import uuid from typing import Any, cast @@ -63,12 +64,7 @@ async def handle_request( headers = extract_request_headers(request) # Determine client streaming intent from body flag (fallback to False) - wants_stream = False - try: - data = json.loads(body.decode()) if body else {} - wants_stream = bool(data.get("stream", False)) - except Exception: # Malformed/missing JSON -> assume non-streaming - wants_stream = False + wants_stream = self._detect_streaming_intent(body, headers) logger.trace( "codex_adapter_request_intent", wants_stream=wants_stream, @@ -256,16 +252,14 @@ async def get_target_url(self, endpoint: str) -> str: async def prepare_provider_request( self, body: bytes, headers: dict[str, str], endpoint: str ) -> tuple[bytes, dict[str, str]]: - token_value = await self._resolve_access_token() + filtered_headers = await self.prepare_provider_headers(headers) - # Get profile to extract chatgpt_account_id - profile = await self.token_manager.get_profile_quick() - chatgpt_account_id = ( - getattr(profile, "chatgpt_account_id", None) if profile else None - ) + if self._request_body_is_encoded(headers): + return body, filtered_headers # Parse body (format conversion is now handled by format chain) body_data = json.loads(body.decode()) if body else {} + body_data = self._apply_request_template(body_data) # Inject instructions mandatory for being allow to # to used the Codex API endpoint @@ -299,8 +293,20 @@ async def prepare_provider_request( # Remove any prefixed metadata fields that shouldn't be sent to the API body_data = self._remove_metadata_fields(body_data) - # Filter and add headers + return json.dumps(body_data).encode(), filtered_headers + + async def prepare_provider_headers(self, headers: dict[str, str]) -> dict[str, str]: + token_value = await self._resolve_access_token() + + profile = await self.token_manager.get_profile_quick() + chatgpt_account_id = ( + getattr(profile, "chatgpt_account_id", None) if profile else None + ) + filtered_headers = filter_request_headers(headers, preserve_auth=False) + content_encoding = headers.get("content-encoding") + if content_encoding: + filtered_headers["content-encoding"] = content_encoding session_id = filtered_headers.get("session_id") or str(uuid.uuid4()) conversation_id = filtered_headers.get("conversation_id") or str(uuid.uuid4()) @@ -318,10 +324,10 @@ async def prepare_provider_request( filtered_headers.update(base_headers) cli_headers = self._collect_cli_headers() - if cli_headers: - filtered_headers.update(cli_headers) + for key, value in cli_headers.items(): + filtered_headers.setdefault(key, value) - return json.dumps(body_data).encode(), filtered_headers + return filtered_headers async def process_provider_response( self, response: httpx.Response, endpoint: str @@ -581,6 +587,71 @@ def _remove_metadata_fields(self, data: dict[str, Any]) -> dict[str, Any]: return cleaned_data + def _apply_request_template(self, data: dict[str, Any]) -> dict[str, Any]: + if not isinstance(data, dict): + return data + + template = self._get_request_template() + if not template: + return self._normalize_input_messages(data) + + merged = copy.deepcopy(data) + + for key in ("include", "parallel_tool_calls", "reasoning", "tool_choice"): + if key not in merged and key in template: + merged[key] = copy.deepcopy(template[key]) + + if not merged.get("tools") and isinstance(template.get("tools"), list): + merged["tools"] = copy.deepcopy(template["tools"]) + + if "prompt_cache_key" not in merged: + prompt_cache_key = template.get("prompt_cache_key") + if isinstance(prompt_cache_key, str) and prompt_cache_key: + merged["prompt_cache_key"] = str(uuid.uuid4()) + + return self._normalize_input_messages(merged) + + def _normalize_input_messages(self, data: dict[str, Any]) -> dict[str, Any]: + input_items = data.get("input") + if not isinstance(input_items, list): + return data + + normalized_items: list[Any] = [] + for item in input_items: + if ( + isinstance(item, dict) + and "type" not in item + and "role" in item + and "content" in item + ): + normalized_item = dict(item) + normalized_item["type"] = "message" + normalized_items.append(normalized_item) + continue + + normalized_items.append(item) + + data["input"] = normalized_items + return data + + def _request_body_is_encoded(self, headers: dict[str, str]) -> bool: + encoding = headers.get("content-encoding", "").strip().lower() + return bool(encoding and encoding != "identity") + + def _detect_streaming_intent( + self, body: bytes, headers: dict[str, str] + ) -> bool: + if self._request_body_is_encoded(headers): + accept = headers.get("accept", "").lower() + return "text/event-stream" in accept + + try: + data = json.loads(body.decode()) if body else {} + return bool(data.get("stream", False)) + except Exception: + accept = headers.get("accept", "").lower() + return "text/event-stream" in accept + def _get_instructions(self) -> str: if not self.detection_service: return "" @@ -601,6 +672,16 @@ def _get_instructions(self) -> str: return "" + def _get_request_template(self) -> dict[str, Any]: + if not self.detection_service: + return {} + + prompts = self.detection_service.get_detected_prompts() + if isinstance(prompts.raw, dict) and prompts.raw: + return prompts.raw + + return {} + def adapt_error(self, error_body: dict[str, Any]) -> dict[str, Any]: """Convert Codex error format to appropriate API error format. diff --git a/ccproxy/plugins/codex/detection_service.py b/ccproxy/plugins/codex/detection_service.py index 9cb396b0..abec9ba3 100644 --- a/ccproxy/plugins/codex/detection_service.py +++ b/ccproxy/plugins/codex/detection_service.py @@ -43,6 +43,7 @@ class CodexDetectionService: ignores_header: list[str] = [ "host", "content-length", + "content-encoding", "authorization", "x-api-key", "session_id", @@ -133,17 +134,46 @@ def get_detected_headers(self) -> DetectedHeaders: """Return cached headers as structured data.""" data = self.get_cached_data() - if not data: - return DetectedHeaders() - return data.headers + headers = data.headers if data else DetectedHeaders() + + required_headers = { + "accept", + "content-type", + "openai-beta", + "originator", + "version", + } + missing_required = [key for key in required_headers if not headers.get(key)] + if not missing_required: + return headers + + fallback = self._safe_fallback_data() + if fallback is None: + return headers + + merged_headers = fallback.headers.as_dict() + merged_headers.update( + {key: value for key, value in headers.as_dict().items() if value} + ) + return DetectedHeaders(merged_headers) def get_detected_prompts(self) -> DetectedPrompts: """Return cached prompt metadata as structured data.""" data = self.get_cached_data() - if not data: - return DetectedPrompts() - return data.prompts + prompts = data.prompts if data else DetectedPrompts() + if prompts.has_instructions() or prompts.has_system(): + return prompts + + fallback = self._safe_fallback_data() + if fallback is None: + return prompts + + fallback_prompts = fallback.prompts + if fallback_prompts.has_instructions() or fallback_prompts.has_system(): + return fallback_prompts + + return prompts def get_ignored_headers(self) -> list[str]: """Headers that should be ignored when forwarding CLI values.""" @@ -496,6 +526,13 @@ def _get_fallback_data(self) -> CodexCacheData: fallback_data_dict = json.load(f) return CodexCacheData.model_validate(fallback_data_dict) + def _safe_fallback_data(self) -> CodexCacheData | None: + """Best-effort fallback data loader for partial detection caches.""" + try: + return self._get_fallback_data() + except Exception: + return None + def invalidate_cache(self) -> None: """Clear all cached detection data.""" # Clear the async cache for _get_codex_version diff --git a/ccproxy/plugins/codex/routes.py b/ccproxy/plugins/codex/routes.py index 28f59c93..d1f4e02a 100644 --- a/ccproxy/plugins/codex/routes.py +++ b/ccproxy/plugins/codex/routes.py @@ -1,9 +1,15 @@ """Codex plugin routes.""" +import json +from pathlib import Path +from time import time from typing import TYPE_CHECKING, Annotated, Any, cast +from urllib.parse import urlparse +from uuid import uuid4 -from fastapi import APIRouter, Depends, Request +from fastapi import APIRouter, Depends, Request, WebSocket, WebSocketDisconnect from starlette.responses import Response, StreamingResponse +from starlette.websockets import WebSocketState from ccproxy.api.decorators import with_format_chain from ccproxy.api.dependencies import ( @@ -19,7 +25,9 @@ UPSTREAM_ENDPOINT_OPENAI_CHAT_COMPLETIONS, UPSTREAM_ENDPOINT_OPENAI_RESPONSES, ) +from ccproxy.core.plugins import PluginRegistry, ProviderPluginRuntime from ccproxy.streaming import DeferredStreaming +from ccproxy.streaming.sse_parser import SSEStreamParser from .config import CodexSettings @@ -54,6 +62,194 @@ async def _codex_responses_handler( return await handle_codex_request(request, adapter) +def _get_codex_websocket_adapter(websocket: WebSocket) -> Any: + if not hasattr(websocket.app.state, "plugin_registry"): + raise RuntimeError("Plugin registry not initialized") + + registry: PluginRegistry = websocket.app.state.plugin_registry + runtime = registry.get_runtime("codex") + + if not runtime or not isinstance(runtime, ProviderPluginRuntime): + raise RuntimeError("Codex plugin not initialized") + + if not runtime.adapter: + raise RuntimeError("Codex adapter not available") + + return runtime.adapter + + +def _prepare_websocket_headers(websocket: WebSocket) -> dict[str, str]: + headers = { + key.lower(): value + for key, value in websocket.headers.items() + if not key.lower().startswith("sec-websocket-") + } + headers["accept"] = "text/event-stream" + return headers + + +def _parse_websocket_request(raw_message: str) -> dict[str, Any]: + payload = json.loads(raw_message) + if not isinstance(payload, dict): + raise ValueError("Expected JSON object payload") + + if payload.get("type") != "response.create": + raise ValueError("Unsupported websocket message type") + + provider_payload = dict(payload) + provider_payload.pop("type", None) + return provider_payload + + +def _make_websocket_terminal_event( + provider_payload: dict[str, Any], + *, + error: dict[str, Any] | None = None, +) -> dict[str, Any]: + response_payload = { + "id": f"resp_ws_{uuid4().hex}", + "object": "response", + "created_at": int(time()), + "status": "failed" if error else "completed", + "model": provider_payload.get("model"), + "output": [], + "parallel_tool_calls": False, + "error": error, + "incomplete_details": None, + } + return {"type": "response.completed", "response": response_payload} + + +def _is_websocket_warmup_request(provider_payload: dict[str, Any]) -> bool: + input_items = provider_payload.get("input") + return isinstance(input_items, list) and len(input_items) == 0 + + +def _serialize_codex_models(config: CodexSettings) -> list[dict[str, Any]]: + models: list[dict[str, Any]] = [] + for card in config.models_endpoint: + model_data = card.model_dump(mode="json") + slug = model_data.get("slug") or model_data.get("id") or model_data.get("root") + if isinstance(slug, str) and slug: + model_data.setdefault("slug", slug) + model_data.setdefault("display_name", slug) + models.append(model_data) + return models + + +def _load_codex_cli_models_cache() -> list[dict[str, Any]]: + cache_file = Path.home() / ".codex" / "models_cache.json" + if not cache_file.exists(): + return [] + + try: + payload = json.loads(cache_file.read_text()) + except Exception: + return [] + + models = payload.get("models") + if not isinstance(models, list): + return [] + + return [model for model in models if isinstance(model, dict)] + + +def _serialize_codex_cli_models(config: CodexSettings) -> list[dict[str, Any]]: + configured_ids = { + card.id for card in config.models_endpoint if isinstance(getattr(card, "id", None), str) + } + configured_ids.update( + { + card.root + for card in config.models_endpoint + if isinstance(getattr(card, "root", None), str) and card.root + } + ) + + cached_models = _load_codex_cli_models_cache() + if cached_models and configured_ids: + matched = [ + model + for model in cached_models + if model.get("slug") in configured_ids or model.get("display_name") in configured_ids + ] + if matched: + return matched + + return _serialize_codex_models(config) + + +async def _stream_websocket_response( + websocket: WebSocket, + adapter: Any, + provider_payload: dict[str, Any], +) -> None: + request_headers = _prepare_websocket_headers(websocket) + provider_payload["stream"] = True + provider_payload["store"] = False + provider_headers = await adapter.prepare_provider_headers(request_headers) + target_url = await adapter.get_target_url(UPSTREAM_ENDPOINT_OPENAI_RESPONSES) + parsed_url = urlparse(target_url) + base_url = f"{parsed_url.scheme}://{parsed_url.netloc}" + client = await adapter.http_pool_manager.get_streaming_client(base_url=base_url) + parser = SSEStreamParser() + saw_terminal_event = False + + async with client.stream( + "POST", + target_url, + headers=provider_headers, + content=json.dumps(provider_payload).encode("utf-8"), + ) as upstream_response: + if upstream_response.status_code >= 400: + error_body = await upstream_response.aread() + try: + error_payload = json.loads(error_body.decode("utf-8")) + except Exception: + error_payload = { + "error": { + "type": "server_error", + "message": error_body.decode("utf-8", errors="replace") + or "Upstream Codex request failed", + } + } + await websocket.send_text( + json.dumps( + _make_websocket_terminal_event( + provider_payload, + error=error_payload.get("error", error_payload), + ), + separators=(",", ":"), + ) + ) + return + + async for chunk in upstream_response.aiter_bytes(): + for event in parser.feed(chunk): + if event.get("type") in {"response.completed", "response.failed"}: + saw_terminal_event = True + await websocket.send_text(json.dumps(event, separators=(",", ":"))) + + for event in parser.flush(): + if event.get("type") in {"response.completed", "response.failed"}: + saw_terminal_event = True + await websocket.send_text(json.dumps(event, separators=(",", ":"))) + + if not saw_terminal_event: + await websocket.send_text( + json.dumps( + _make_websocket_terminal_event( + provider_payload, + error={ + "type": "server_error", + "message": "WebSocket stream ended before response.completed", + }, + ), + separators=(",", ":"), + ) + ) + + @router.post("/v1/responses", response_model=None) @with_format_chain( [FORMAT_OPENAI_RESPONSES], endpoint=UPSTREAM_ENDPOINT_OPENAI_RESPONSES @@ -66,6 +262,39 @@ async def codex_responses( return await _codex_responses_handler(request, adapter) +@router.websocket("/v1/responses") +async def codex_responses_websocket(websocket: WebSocket) -> None: + await websocket.accept() + + try: + adapter = _get_codex_websocket_adapter(websocket) + local_response_ids: set[str] = set() + while True: + raw_message = await websocket.receive_text() + provider_payload = _parse_websocket_request(raw_message) + if _is_websocket_warmup_request(provider_payload): + warmup_event = _make_websocket_terminal_event(provider_payload) + response_id = warmup_event.get("response", {}).get("id") + if isinstance(response_id, str) and response_id: + local_response_ids.add(response_id) + await websocket.send_text( + json.dumps(warmup_event, separators=(",", ":")) + ) + continue + previous_response_id = provider_payload.get("previous_response_id") + if isinstance(previous_response_id, str) and previous_response_id in local_response_ids: + provider_payload.pop("previous_response_id", None) + await _stream_websocket_response(websocket, adapter, provider_payload) + except WebSocketDisconnect: + return + except ValueError as exc: + if websocket.client_state == WebSocketState.CONNECTED: + await websocket.close(code=1008, reason=str(exc)) + except Exception as exc: + if websocket.client_state == WebSocketState.CONNECTED: + await websocket.close(code=1011, reason=str(exc)) + + @router.post("/responses", response_model=None, include_in_schema=False) @with_format_chain( [FORMAT_OPENAI_RESPONSES], endpoint=UPSTREAM_ENDPOINT_OPENAI_RESPONSES @@ -78,6 +307,11 @@ async def codex_responses_legacy( return await _codex_responses_handler(request, adapter) +@router.websocket("/responses") +async def codex_responses_legacy_websocket(websocket: WebSocket) -> None: + await codex_responses_websocket(websocket) + + @router.post("/v1/chat/completions", response_model=None) @with_format_chain( [FORMAT_OPENAI_CHAT, FORMAT_OPENAI_RESPONSES], @@ -98,8 +332,9 @@ async def list_models( config: CodexConfigDep, ) -> dict[str, Any]: """List available Codex models.""" - models = [card.model_dump(mode="json") for card in config.models_endpoint] - return {"object": "list", "data": models} + openai_models = _serialize_codex_models(config) + codex_models = _serialize_codex_cli_models(config) + return {"object": "list", "data": openai_models, "models": codex_models} @router.post("/v1/messages", response_model=None) diff --git a/tests/plugins/codex/integration/test_codex_basic.py b/tests/plugins/codex/integration/test_codex_basic.py index 876f668b..06da0b73 100644 --- a/tests/plugins/codex/integration/test_codex_basic.py +++ b/tests/plugins/codex/integration/test_codex_basic.py @@ -30,9 +30,15 @@ async def test_models_endpoint_available_when_enabled( data: dict[str, Any] = resp.json() assert data.get("object") == "list" models = data.get("data") + cli_models = data.get("models") assert isinstance(models, list) assert len(models) > 0 + assert isinstance(cli_models, list) + assert len(cli_models) > 0 assert {"id", "object", "created", "owned_by"}.issubset(models[0].keys()) + assert models[0].get("slug") == models[0]["id"] + assert models[0].get("display_name") == models[0]["id"] + assert cli_models[0].get("slug") @pytest.mark.asyncio diff --git a/tests/plugins/codex/integration/test_codex_websocket.py b/tests/plugins/codex/integration/test_codex_websocket.py new file mode 100644 index 00000000..3e375809 --- /dev/null +++ b/tests/plugins/codex/integration/test_codex_websocket.py @@ -0,0 +1,223 @@ +import asyncio +from datetime import UTC, datetime, timedelta +from types import SimpleNamespace +from typing import Any +from unittest.mock import AsyncMock, patch + +import pytest +from fastapi.testclient import TestClient + +from ccproxy.api.app import create_app, initialize_plugins_startup +from ccproxy.api.bootstrap import create_service_container +from ccproxy.config.settings import Settings +from ccproxy.core.logging import setup_logging +from ccproxy.models.detection import DetectedHeaders, DetectedPrompts +from ccproxy.plugins.codex.models import CodexCacheData + + +@pytest.fixture +def codex_ws_client() -> Any: + setup_logging(json_logs=False, log_level_name="ERROR") + settings = Settings( + enable_plugins=True, + plugins={ + "codex": {"enabled": True}, + "oauth_codex": {"enabled": True}, + "duckdb_storage": {"enabled": False}, + "analytics": {"enabled": False}, + "metrics": {"enabled": False}, + }, + enabled_plugins=["codex", "oauth_codex"], + plugins_disable_local_discovery=False, + ) + service_container = create_service_container(settings) + app = create_app(service_container) + + credentials_stub = SimpleNamespace( + access_token="test-codex-access-token", + expires_at=datetime.now(UTC) + timedelta(hours=1), + ) + profile_stub = SimpleNamespace(chatgpt_account_id="test-account-id") + prompts = DetectedPrompts.from_body( + {"instructions": "You are a helpful coding assistant."} + ) + detection_data = CodexCacheData( + codex_version="fallback", + headers=DetectedHeaders({}), + prompts=prompts, + body_json=prompts.raw, + method="POST", + url="https://chatgpt.com/backend-codex/responses", + path="/api/backend-codex/responses", + query_params={}, + ) + + async def init_detection_stub(self): # type: ignore[no-untyped-def] + self._cached_data = detection_data + return detection_data + + load_patch = patch( + "ccproxy.plugins.oauth_codex.manager.CodexTokenManager.load_credentials", + new=AsyncMock(return_value=credentials_stub), + ) + profile_patch = patch( + "ccproxy.plugins.oauth_codex.manager.CodexTokenManager.get_profile_quick", + new=AsyncMock(return_value=profile_stub), + ) + detection_patch = patch( + "ccproxy.plugins.codex.detection_service.CodexDetectionService.initialize_detection", + new=init_detection_stub, + ) + + with load_patch, profile_patch, detection_patch: + asyncio.run(initialize_plugins_startup(app, settings)) + with TestClient(app) as client: + yield client + + +@pytest.mark.integration +@pytest.mark.codex +def test_codex_websocket_responses_streaming( + codex_ws_client: TestClient, + mock_external_openai_codex_api_streaming: Any, +) -> None: + request_payload = { + "type": "response.create", + "model": "gpt-5", + "stream": True, + "instructions": "Reply with exactly OK", + "input": [ + { + "type": "message", + "role": "user", + "content": [{"type": "input_text", "text": "Reply with exactly OK"}], + } + ], + } + + with codex_ws_client.websocket_connect( + "/codex/v1/responses", + headers={ + "authorization": "Bearer ignored-client-token", + "chatgpt-account-id": "test-account-id", + "openai-beta": "responses_websockets=2026-02-06", + "originator": "Codex Desktop", + "session_id": "test-session", + "version": "0.114.0", + "x-codex-beta-features": "multi_agent", + "x-codex-turn-metadata": '{"turn_id":"","sandbox":"seatbelt"}', + }, + ) as websocket: + websocket.send_json(request_payload) + + events: list[dict[str, Any]] = [] + while True: + try: + events.append(websocket.receive_json()) + if events[-1].get("type") == "response.completed": + websocket.close() + break + except Exception: + break + + event_types = [event.get("type") for event in events] + assert event_types == [ + "response.created", + "response.output_text.delta", + "response.output_text.delta", + "response.output_text.done", + "response.completed", + ] + assert events[-1]["response"]["output"][0]["content"][0]["text"] == "Hello Codex!" + + +@pytest.mark.integration +@pytest.mark.codex +def test_codex_websocket_returns_terminal_event_on_upstream_error( + codex_ws_client: TestClient, + mock_external_openai_codex_api_error: Any, +) -> None: + request_payload = { + "type": "response.create", + "model": "gpt-5", + "input": [ + { + "type": "message", + "role": "user", + "content": [{"type": "input_text", "text": "Reply with exactly OK"}], + } + ], + } + + with codex_ws_client.websocket_connect("/codex/v1/responses") as websocket: + websocket.send_json(request_payload) + event = websocket.receive_json() + websocket.close() + + assert event["type"] == "response.completed" + assert event["response"]["status"] == "failed" + assert event["response"]["error"]["type"] == "invalid_request_error" + + +@pytest.mark.integration +@pytest.mark.codex +def test_codex_websocket_short_circuits_empty_warmup_request( + codex_ws_client: TestClient, +) -> None: + request_payload = { + "type": "response.create", + "model": "gpt-5", + "input": [], + } + + with codex_ws_client.websocket_connect("/codex/v1/responses") as websocket: + websocket.send_json(request_payload) + event = websocket.receive_json() + websocket.close() + + assert event["type"] == "response.completed" + assert event["response"]["status"] == "completed" + assert event["response"]["output"] == [] + + +@pytest.mark.integration +@pytest.mark.codex +def test_codex_websocket_warmup_then_real_request_same_connection( + codex_ws_client: TestClient, + mock_external_openai_codex_api_streaming: Any, +) -> None: + warmup_payload = { + "type": "response.create", + "model": "gpt-5", + "input": [], + } + request_payload = { + "type": "response.create", + "model": "gpt-5", + "input": [ + { + "type": "message", + "role": "user", + "content": [{"type": "input_text", "text": "Reply with exactly OK"}], + } + ], + } + + with codex_ws_client.websocket_connect("/codex/v1/responses") as websocket: + websocket.send_json(warmup_payload) + warmup_event = websocket.receive_json() + request_payload["previous_response_id"] = warmup_event["response"]["id"] + + websocket.send_json(request_payload) + + events: list[dict[str, Any]] = [] + while True: + event = websocket.receive_json() + events.append(event) + if event.get("type") == "response.completed": + websocket.close() + break + + assert warmup_event["type"] == "response.completed" + assert events[-1]["type"] == "response.completed" + assert events[-1]["response"]["output"][0]["content"][0]["text"] == "Hello Codex!" diff --git a/tests/plugins/codex/unit/test_adapter.py b/tests/plugins/codex/unit/test_adapter.py index 40a4c18e..840230ae 100644 --- a/tests/plugins/codex/unit/test_adapter.py +++ b/tests/plugins/codex/unit/test_adapter.py @@ -217,6 +217,80 @@ async def test_prepare_provider_request_sets_stream_true( result_data = json.loads(result_body.decode()) assert result_data["stream"] is True + @pytest.mark.asyncio + async def test_prepare_provider_request_preserves_encoded_body( + self, adapter: CodexAdapter + ) -> None: + """Encoded request bodies should pass through unchanged.""" + body = b"\x28\xb5\x2f\xfdcompressed-request" + headers = { + "content-type": "application/json", + "content-encoding": "zstd", + "accept": "application/json, text/event-stream", + "authorization": "Bearer old-token", + "session_id": "existing-session", + } + + result_body, result_headers = await adapter.prepare_provider_request( + body, headers, "/responses" + ) + + assert result_body == body + assert result_headers["content-encoding"] == "zstd" + assert result_headers["authorization"] == "Bearer test-token" + assert result_headers["session_id"] == "existing-session" + assert "conversation_id" in result_headers + + @pytest.mark.asyncio + async def test_prepare_provider_request_applies_codex_template_defaults( + self, + mock_detection_service: Mock, + mock_auth_manager: Mock, + mock_http_pool_manager: Mock, + ) -> None: + template = { + "instructions": "You are a Python expert.", + "include": ["reasoning.encrypted_content"], + "parallel_tool_calls": True, + "reasoning": {"effort": "medium"}, + "tool_choice": "auto", + "tools": [{"type": "function", "name": "exec_command"}], + "prompt_cache_key": "template-cache-key", + } + prompts = DetectedPrompts.from_body(template) + mock_detection_service.get_detected_prompts = Mock(return_value=prompts) + mock_detection_service.get_system_prompt = Mock( + return_value=prompts.instructions_payload() + ) + + mock_config = Mock() + mock_config.base_url = "https://chat.openai.com/backend-anon" + + adapter = CodexAdapter( + detection_service=mock_detection_service, + config=mock_config, + auth_manager=mock_auth_manager, + http_pool_manager=mock_http_pool_manager, + ) + + body = json.dumps( + { + "model": "gpt-5", + "input": [{"role": "user", "content": [{"type": "input_text"}]}], + } + ).encode() + + result_body, _ = await adapter.prepare_provider_request(body, {}, "/responses") + result_data = json.loads(result_body.decode()) + + assert result_data["include"] == ["reasoning.encrypted_content"] + assert result_data["parallel_tool_calls"] is True + assert result_data["reasoning"] == {"effort": "medium"} + assert result_data["tool_choice"] == "auto" + assert result_data["tools"] == [{"type": "function", "name": "exec_command"}] + assert result_data["prompt_cache_key"] != "template-cache-key" + assert result_data["input"][0]["type"] == "message" + @pytest.mark.asyncio async def test_process_provider_response(self, adapter: CodexAdapter) -> None: """Test response processing and format conversion.""" diff --git a/tests/unit/plugins/test_codex_detection.py b/tests/unit/plugins/test_codex_detection.py index 9c8ec326..f6864c63 100644 --- a/tests/unit/plugins/test_codex_detection.py +++ b/tests/unit/plugins/test_codex_detection.py @@ -48,3 +48,7 @@ async def test_codex_detection_falls_back_when_cli_missing(tmp_path: Path) -> No mock_save.assert_not_called() assert result is expected_fallback assert service.get_cached_data() is expected_fallback + + +def test_codex_detection_ignores_content_encoding_header() -> None: + assert "content-encoding" in CodexDetectionService.ignores_header From afee85ca0c0168484aa2b5d14ebcd5d161900685 Mon Sep 17 00:00:00 2001 From: dabogee Date: Fri, 13 Mar 2026 11:19:00 +0200 Subject: [PATCH 2/8] fixed microsoft agent framework integration, tests --- .ccproxy.codex.msaf.toml.example | 64 +++ ccproxy/core/plugins/factories.py | 19 +- ccproxy/llms/formatters/context.py | 24 ++ .../formatters/openai_to_openai/responses.py | 7 +- .../formatters/openai_to_openai/streams.py | 12 +- ccproxy/plugins/codex/adapter.py | 33 +- ccproxy/plugins/codex/config.py | 7 + ccproxy/services/adapters/format_adapter.py | 8 + ccproxy/services/adapters/mock_adapter.py | 55 ++- ccproxy/services/factories.py | 20 + ccproxy/services/mocking/mock_handler.py | 283 +++++++++++-- pyproject.toml | 2 + .../codex/integration/test_msaf_compat.py | 190 +++++++++ .../integration/test_msaf_real_library.py | 295 ++++++++++++++ tests/plugins/codex/unit/test_adapter.py | 127 ++++++ .../unit/core/test_provider_factory_bypass.py | 48 +++ .../unit/llms/test_llms_streaming_settings.py | 32 ++ .../services/mocking/test_mock_handler.py | 53 ++- tests/unit/services/test_mock_adapter.py | 118 +++++- uv.lock | 375 +++++++++++++++++- 20 files changed, 1693 insertions(+), 79 deletions(-) create mode 100644 .ccproxy.codex.msaf.toml.example create mode 100644 tests/plugins/codex/integration/test_msaf_compat.py create mode 100644 tests/plugins/codex/integration/test_msaf_real_library.py create mode 100644 tests/unit/core/test_provider_factory_bypass.py diff --git a/.ccproxy.codex.msaf.toml.example b/.ccproxy.codex.msaf.toml.example new file mode 100644 index 00000000..2e890709 --- /dev/null +++ b/.ccproxy.codex.msaf.toml.example @@ -0,0 +1,64 @@ +# Example ccproxy config for Microsoft Agent Framework clients over Codex. + +enable_plugins = true +enabled_plugins = ["oauth_codex", "codex"] + +[server] +bypass_mode = false + +[llm] +# Keep OpenAI-compatible responses free from ... blocks. +openai_thinking_xml = false + +[plugins.codex] +enabled = true +name = "codex" +base_url = "https://chatgpt.com/backend-api/codex" +requires_auth = true +auth_type = "oauth" +supports_streaming = true +preferred_upstream_mode = "streaming" +buffer_non_streaming = true +enable_format_registry = true + +# Microsoft Agent Framework sends its own instructions/reasoning payloads. +# Do not prepend captured Codex CLI templates to generic OpenAI-compatible calls. +inject_detection_payload = false + +supported_input_formats = [ + "openai.responses", + "openai.chat_completions", + "anthropic.messages", +] + +detection_home_mode = "temp" + +[[plugins.codex.models_endpoint]] +id = "gpt-5.4" +object = "model" +created = 1735689600 +owned_by = "openai" +root = "gpt-5.4" +permission = [] + +[plugins.codex.oauth] +base_url = "https://auth.openai.com" +client_id = "app_EMoamEEZ73f0CkXaXp7hrann" +scopes = ["openid", "profile", "email", "offline_access"] + +[plugins.oauth_codex] +enabled = true +base_url = "https://auth.openai.com" +authorize_url = "https://auth.openai.com/oauth/authorize" +token_url = "https://auth.openai.com/oauth/token" +profile_url = "https://api.openai.com/oauth/profile" +client_id = "app_EMoamEEZ73f0CkXaXp7hrann" +redirect_uri = "http://localhost:1455/auth/callback" +callback_port = 1455 +scopes = ["openid", "profile", "email", "offline_access"] +audience = "https://api.openai.com/v1" +user_agent = "Codex-Code/1.0.43" +headers = { User-Agent = "Codex-Code/1.0.43" } +request_timeout = 30 +callback_timeout = 300 +use_pkce = true diff --git a/ccproxy/core/plugins/factories.py b/ccproxy/core/plugins/factories.py index d73f0e5b..ac591535 100644 --- a/ccproxy/core/plugins/factories.py +++ b/ccproxy/core/plugins/factories.py @@ -12,6 +12,7 @@ from fastapi import APIRouter from ccproxy.models.provider import ProviderConfig +from ccproxy.services.adapters.mock_adapter import MockAdapter from ccproxy.services.adapters.base import BaseAdapter from ccproxy.services.adapters.http_adapter import BaseHTTPAdapter from ccproxy.services.interfaces import ( @@ -215,6 +216,23 @@ async def create_adapter(self, context: PluginContext) -> BaseAdapter: Returns: Adapter instance """ + settings = context.get("settings") + service_container = context.get("service_container") + if settings and getattr(settings.server, "bypass_mode", False): + if not service_container: + raise RuntimeError( + f"Cannot initialize plugin '{self.plugin_name}' in bypass mode: " + "service container is required to create mock adapter. " + "This is likely a configuration issue." + ) + logger.warning( + "plugin_bypass_mode_enabled", + plugin=self.plugin_name, + adapter=self.adapter_class.__name__, + category="lifecycle", + ) + return MockAdapter(service_container.get_mock_handler()) + # Extract services from context (one-time extraction) http_pool_manager: HTTPPoolManager | None = cast( "HTTPPoolManager | None", context.get("http_pool_manager") @@ -232,7 +250,6 @@ async def create_adapter(self, context: PluginContext) -> BaseAdapter: config = context.get("config") # Get all adapter dependencies from service container - service_container = context.get("service_container") if not service_container: raise RuntimeError("Service container is required for adapter services") diff --git a/ccproxy/llms/formatters/context.py b/ccproxy/llms/formatters/context.py index 854505c6..5086fcce 100644 --- a/ccproxy/llms/formatters/context.py +++ b/ccproxy/llms/formatters/context.py @@ -11,6 +11,9 @@ "formatter_instructions", default=None ) _TOOLS_VAR: ContextVar[list[Any] | None] = ContextVar("formatter_tools", default=None) +_OPENAI_THINKING_XML_VAR: ContextVar[bool | None] = ContextVar( + "formatter_openai_thinking_xml", default=None +) def register_request(request: Any | None, instructions: str | None = None) -> None: @@ -114,3 +117,24 @@ def get_last_request_tools() -> list[Any] | None: cached = _TOOLS_VAR.get() return list(cached) if cached else None + + +def register_openai_thinking_xml(enabled: bool | None) -> None: + """Cache OpenAI thinking serialization preference for active conversions. + + Args: + enabled: Whether thinking blocks should be serialized with XML wrappers. + ``None`` means downstream conversion logic should use its default. + + Note: + The value is stored in a ``ContextVar``, so concurrent async requests + keep independent preferences without leaking into each other. + """ + + _OPENAI_THINKING_XML_VAR.set(enabled) + + +def get_openai_thinking_xml() -> bool | None: + """Return the OpenAI thinking serialization preference for active conversions.""" + + return _OPENAI_THINKING_XML_VAR.get() diff --git a/ccproxy/llms/formatters/openai_to_openai/responses.py b/ccproxy/llms/formatters/openai_to_openai/responses.py index 7283c52c..d8ccd254 100644 --- a/ccproxy/llms/formatters/openai_to_openai/responses.py +++ b/ccproxy/llms/formatters/openai_to_openai/responses.py @@ -15,6 +15,7 @@ convert_openai_responses_usage_to_completion_usage, merge_thinking_segments, ) +from ccproxy.llms.formatters.context import get_openai_thinking_xml from ccproxy.llms.models import openai as openai_models from ._helpers import ( @@ -333,6 +334,10 @@ def convert__openai_responses_to_openai_chat__response( response: openai_models.ResponseObject, ) -> openai_models.ChatCompletionResponse: """Convert an OpenAI ResponseObject to a ChatCompletionResponse.""" + include_thinking = get_openai_thinking_xml() + if include_thinking is None: + include_thinking = True + text_segments: list[str] = [] added_reasoning: set[tuple[str, str]] = set() tool_calls: list[openai_models.ToolCall] = [] @@ -353,7 +358,7 @@ def convert__openai_responses_to_openai_chat__response( if thinking_text and len(thinking_text) > 30 else thinking_text, ) - if thinking_text: + if include_thinking and thinking_text: key = (signature or "", thinking_text) if key not in added_reasoning: text_segments.append(_wrap_thinking(signature, thinking_text)) diff --git a/ccproxy/llms/formatters/openai_to_openai/streams.py b/ccproxy/llms/formatters/openai_to_openai/streams.py index b41187f9..8a50ee2d 100644 --- a/ccproxy/llms/formatters/openai_to_openai/streams.py +++ b/ccproxy/llms/formatters/openai_to_openai/streams.py @@ -27,16 +27,14 @@ get_last_instructions, get_last_request, get_last_request_tools, + get_openai_thinking_xml, register_request, register_request_tools, ) from ccproxy.llms.models import openai as openai_models from ccproxy.llms.streaming.accumulators import OpenAIAccumulator -from ._helpers import ( - _convert_tools_chat_to_responses, - _get_attr, -) +from ._helpers import _convert_tools_chat_to_responses, _get_attr from .requests import _build_responses_payload_from_chat_request from .responses import ( _collect_reasoning_segments, @@ -61,6 +59,10 @@ def run( async def generator() -> AsyncGenerator[ openai_models.ChatCompletionChunk, None ]: + include_thinking = get_openai_thinking_xml() + if include_thinking is None: + include_thinking = True + model_id = "" role_sent = False @@ -537,7 +539,7 @@ def create_text_chunk( for entry in summary_list: text = _get_attr(entry, "text") signature = _get_attr(entry, "signature") - if isinstance(text, str) and text: + if include_thinking and isinstance(text, str) and text: chunk_text = _wrap_thinking(signature, text) sequence_counter += 1 yield openai_models.ChatCompletionChunk( diff --git a/ccproxy/plugins/codex/adapter.py b/ccproxy/plugins/codex/adapter.py index 5cc835e0..88634982 100644 --- a/ccproxy/plugins/codex/adapter.py +++ b/ccproxy/plugins/codex/adapter.py @@ -259,28 +259,36 @@ async def prepare_provider_request( # Parse body (format conversion is now handled by format chain) body_data = json.loads(body.decode()) if body else {} - body_data = self._apply_request_template(body_data) + if self._should_apply_detection_payload(): + body_data = self._apply_request_template(body_data) + else: + body_data = self._normalize_input_messages(body_data) - # Inject instructions mandatory for being allow to - # to used the Codex API endpoint - # Fetch detected instructions from detection service - instructions = self._get_instructions() + detected_instructions = ( + self._get_instructions() if self._should_apply_detection_payload() else "" + ) existing_instructions = body_data.get("instructions") if isinstance(existing_instructions, str) and existing_instructions: - if instructions: - instructions = instructions + "\n" + existing_instructions - else: - instructions = existing_instructions + instructions = ( + detected_instructions + "\n" + existing_instructions + if detected_instructions + else existing_instructions + ) + else: + instructions = detected_instructions - body_data["instructions"] = instructions + if instructions: + body_data["instructions"] = instructions + else: + body_data.pop("instructions", None) # Codex backend requires stream=true, always override body_data["stream"] = True body_data["store"] = False # Remove unsupported keys for Codex - for key in ("max_output_tokens", "max_completion_tokens", "temperature"): + for key in ("max_output_tokens", "max_completion_tokens", "max_tokens", "temperature"): body_data.pop(key, None) list_input = body_data.get("input", []) @@ -652,6 +660,9 @@ def _detect_streaming_intent( accept = headers.get("accept", "").lower() return "text/event-stream" in accept + def _should_apply_detection_payload(self) -> bool: + return bool(getattr(self.config, "inject_detection_payload", True)) + def _get_instructions(self) -> str: if not self.detection_service: return "" diff --git a/ccproxy/plugins/codex/config.py b/ccproxy/plugins/codex/config.py index df2af6c0..6d9d71b5 100644 --- a/ccproxy/plugins/codex/config.py +++ b/ccproxy/plugins/codex/config.py @@ -124,6 +124,13 @@ class CodexSettings(ProviderConfig): enable_format_registry: bool = Field( default=True, description="Whether to enable format adapter registry" ) + inject_detection_payload: bool = Field( + default=True, + description=( + "Whether to inject the captured Codex CLI instructions/template into " + "provider requests. Disable this for generic OpenAI-compatible API usage." + ), + ) # Detection configuration detection_home_mode: Literal["temp", "home"] = Field( diff --git a/ccproxy/services/adapters/format_adapter.py b/ccproxy/services/adapters/format_adapter.py index a0af27e0..9ffdb7e6 100644 --- a/ccproxy/services/adapters/format_adapter.py +++ b/ccproxy/services/adapters/format_adapter.py @@ -6,6 +6,8 @@ from collections.abc import AsyncIterator, Awaitable, Callable from typing import Any, Protocol, runtime_checkable +from ccproxy.llms.formatters.context import register_openai_thinking_xml + FormatDict = dict[str, Any] @@ -63,6 +65,10 @@ def __init__( self._error = error self._stream = stream self.name = name or self.__class__.__name__ + self._openai_thinking_xml: bool | None = None + + def configure_streaming(self, *, openai_thinking_xml: bool | None = None) -> None: + self._openai_thinking_xml = openai_thinking_xml async def convert_request(self, data: FormatDict) -> FormatDict: return await self._run_stage(self._request, data, stage="request") @@ -92,6 +98,7 @@ async def _create_stream_iterator( f"{self.name} does not implement stream conversion" ) + register_openai_thinking_xml(self._openai_thinking_xml) handler = self._stream(stream) handler = await _maybe_await(handler) @@ -121,6 +128,7 @@ async def _run_stage( f"{self.name} does not implement {stage} conversion" ) + register_openai_thinking_xml(self._openai_thinking_xml) result = await _maybe_await(func(data)) if not isinstance(result, dict): raise TypeError( diff --git a/ccproxy/services/adapters/mock_adapter.py b/ccproxy/services/adapters/mock_adapter.py index f335551a..1c838709 100644 --- a/ccproxy/services/adapters/mock_adapter.py +++ b/ccproxy/services/adapters/mock_adapter.py @@ -10,6 +10,11 @@ from starlette.responses import StreamingResponse from ccproxy.core import logging +from ccproxy.core.constants import ( + FORMAT_ANTHROPIC_MESSAGES, + FORMAT_OPENAI_CHAT, + FORMAT_OPENAI_RESPONSES, +) from ccproxy.core.request_context import RequestContext from ccproxy.services.adapters.base import BaseAdapter from ccproxy.services.mocking.mock_handler import MockResponseHandler @@ -25,6 +30,44 @@ class MockAdapter(BaseAdapter): def __init__(self, mock_handler: MockResponseHandler) -> None: self.mock_handler = mock_handler + async def cleanup(self) -> None: + """Release adapter resources.""" + return None + + def _detect_format_from_endpoint(self, endpoint: str | None) -> str | None: + """Map known route patterns to the expected output format.""" + + if not endpoint: + return None + + endpoint_lower = endpoint.lower() + if "chat/completions" in endpoint_lower: + return FORMAT_OPENAI_CHAT + if "responses" in endpoint_lower: + return FORMAT_OPENAI_RESPONSES + return None + + def _resolve_target_format(self, request: Request, endpoint: str) -> str: + """Infer the response format expected by the current route.""" + + ctx = getattr(request.state, "context", None) + format_chain = getattr(ctx, "format_chain", None) + if isinstance(format_chain, list) and format_chain: + first = format_chain[0] + if first in { + FORMAT_OPENAI_CHAT, + FORMAT_OPENAI_RESPONSES, + FORMAT_ANTHROPIC_MESSAGES, + }: + return first + + for candidate in (endpoint, getattr(request.url, "path", None)): + detected_format = self._detect_format_from_endpoint(candidate) + if detected_format: + return detected_format + + return FORMAT_ANTHROPIC_MESSAGES + def _extract_stream_flag(self, body: bytes) -> bool: """Check if request asks for streaming.""" try: @@ -46,6 +89,7 @@ async def handle_request( """Handle request using mock handler.""" body = await request.body() message_type = self.mock_handler.extract_message_type(body) + prompt_text = self.mock_handler.extract_prompt_text(body) # Get endpoint from context or request URL endpoint = request.url.path @@ -53,7 +97,7 @@ async def handle_request( ctx = request.state.context endpoint = ctx.metadata.get("endpoint", request.url.path) - is_openai = "openai" in endpoint + target_format = self._resolve_target_format(request, endpoint) model = "unknown" try: body_json = json.loads(body) if body else {} @@ -75,7 +119,7 @@ async def handle_request( if self._extract_stream_flag(body): return await self.mock_handler.generate_streaming_response( - model, is_openai, ctx, message_type + model, target_format, ctx, message_type, prompt_text ) else: ( @@ -83,7 +127,7 @@ async def handle_request( headers, response_body, ) = await self.mock_handler.generate_standard_response( - model, is_openai, ctx, message_type + model, target_format, ctx, message_type, prompt_text ) return Response(content=response_body, status_code=status, headers=headers) @@ -93,7 +137,8 @@ async def handle_streaming( """Handle a streaming request.""" body = await request.body() message_type = self.mock_handler.extract_message_type(body) - is_openai = "openai" in endpoint + prompt_text = self.mock_handler.extract_prompt_text(body) + target_format = self._resolve_target_format(request, endpoint) model = "unknown" try: body_json = json.loads(body) if body else {} @@ -114,5 +159,5 @@ async def handle_streaming( ) return await self.mock_handler.generate_streaming_response( - model, is_openai, ctx, message_type + model, target_format, ctx, message_type, prompt_text ) diff --git a/ccproxy/services/factories.py b/ccproxy/services/factories.py index ddf079b2..be295964 100644 --- a/ccproxy/services/factories.py +++ b/ccproxy/services/factories.py @@ -23,6 +23,7 @@ from ccproxy.services.adapters.format_registry import FormatRegistry from ccproxy.services.adapters.simple_converters import ( convert_anthropic_to_openai_response, + convert_anthropic_to_openai_responses_response, ) from ccproxy.services.auth_registry import AuthManagerRegistry from ccproxy.services.cache import ResponseCache @@ -127,16 +128,25 @@ def create_mock_handler(self) -> MockResponseHandler: response=convert_anthropic_to_openai_response, name="mock_anthropic_to_openai", ) + openai_responses_adapter = DictFormatAdapter( + response=convert_anthropic_to_openai_responses_response, + name="mock_anthropic_to_openai_responses", + ) # Configure streaming settings if needed openai_thinking_xml = getattr( getattr(settings, "llm", object()), "openai_thinking_xml", True ) if hasattr(openai_adapter, "configure_streaming"): openai_adapter.configure_streaming(openai_thinking_xml=openai_thinking_xml) + if hasattr(openai_responses_adapter, "configure_streaming"): + openai_responses_adapter.configure_streaming( + openai_thinking_xml=openai_thinking_xml + ) handler = MockResponseHandler( mock_generator=mock_generator, openai_adapter=openai_adapter, + openai_responses_adapter=openai_responses_adapter, error_rate=0.05, latency_range=(0.5, 2.0), ) @@ -342,6 +352,12 @@ def _register_core_format_adapters( ] # Register each core adapter + openai_thinking_xml = True + if settings is not None: + openai_thinking_xml = getattr( + getattr(settings, "llm", object()), "openai_thinking_xml", True + ) + for spec in core_adapter_specs: adapter = DictFormatAdapter( request=spec["request"], @@ -350,6 +366,10 @@ def _register_core_format_adapters( error=spec["error"], name=spec["name"], ) + if hasattr(adapter, "configure_streaming"): + adapter.configure_streaming( + openai_thinking_xml=openai_thinking_xml + ) registry.register( from_format=spec["from_format"], to_format=spec["to_format"], diff --git a/ccproxy/services/mocking/mock_handler.py b/ccproxy/services/mocking/mock_handler.py index 8cdd1afb..26bb6289 100644 --- a/ccproxy/services/mocking/mock_handler.py +++ b/ccproxy/services/mocking/mock_handler.py @@ -4,21 +4,37 @@ import json import random from collections.abc import AsyncGenerator -from typing import Any +from typing import Any, Literal, TypeAlias import structlog from fastapi.responses import StreamingResponse +from ccproxy.core.constants import ( + FORMAT_ANTHROPIC_MESSAGES, + FORMAT_OPENAI_CHAT, + FORMAT_OPENAI_RESPONSES, +) from ccproxy.core.request_context import RequestContext from ccproxy.services.adapters.format_adapter import DictFormatAdapter from ccproxy.services.adapters.simple_converters import ( convert_anthropic_to_openai_response, + convert_anthropic_to_openai_responses_response, ) from ccproxy.testing import RealisticMockResponseGenerator logger = structlog.get_logger(__name__) +PROMPT_EXTRACTION_KEYS = ("instructions", "content", "text", "input", "messages") +MAX_PROMPT_EXTRACTION_DEPTH = 10 + +TargetFormat = Literal[ + FORMAT_ANTHROPIC_MESSAGES, + FORMAT_OPENAI_CHAT, + FORMAT_OPENAI_RESPONSES, +] +PromptValue: TypeAlias = str | list[Any] | dict[str, Any] | int | float | bool | None + class MockResponseHandler: """Handles bypass mode with realistic mock responses.""" @@ -27,6 +43,7 @@ def __init__( self, mock_generator: RealisticMockResponseGenerator, openai_adapter: DictFormatAdapter | None = None, + openai_responses_adapter: DictFormatAdapter | None = None, error_rate: float = 0.05, latency_range: tuple[float, float] = (0.5, 2.0), ) -> None: @@ -37,14 +54,32 @@ def __init__( """ self.mock_generator = mock_generator if openai_adapter is None: - openai_adapter = DictFormatAdapter( - response=convert_anthropic_to_openai_response, - name="mock_anthropic_to_openai", - ) + openai_adapter = self._create_openai_adapter() self.openai_adapter = openai_adapter + if openai_responses_adapter is None: + openai_responses_adapter = self._create_openai_responses_adapter() + self.openai_responses_adapter = openai_responses_adapter self.error_rate = error_rate self.latency_range = latency_range + @staticmethod + def _create_openai_adapter() -> DictFormatAdapter: + """Create the adapter used for Anthropic -> OpenAI chat mocks.""" + + return DictFormatAdapter( + response=convert_anthropic_to_openai_response, + name="mock_anthropic_to_openai", + ) + + @staticmethod + def _create_openai_responses_adapter() -> DictFormatAdapter: + """Create the adapter used for Anthropic -> OpenAI responses mocks.""" + + return DictFormatAdapter( + response=convert_anthropic_to_openai_responses_response, + name="mock_anthropic_to_openai_responses", + ) + def extract_message_type(self, body: bytes | None) -> str: """Analyze request body to determine response type. @@ -89,12 +124,179 @@ def should_simulate_error(self) -> bool: """ return random.random() < self.error_rate + def extract_prompt_text(self, body: bytes | None) -> str: + """Extract a plain-text prompt summary from common request shapes.""" + + if not body: + return "" + + try: + data: PromptValue = json.loads(body) + except (json.JSONDecodeError, TypeError): + return "" + + parts: list[str] = [] + + seen: set[int] = set() + + def collect(value: PromptValue, depth: int = 0) -> None: + if depth > MAX_PROMPT_EXTRACTION_DEPTH: + logger.debug( + "prompt_extraction_max_depth_reached", + depth=depth, + max_depth=MAX_PROMPT_EXTRACTION_DEPTH, + ) + return + + if isinstance(value, str): + text = value.strip() + if text: + parts.append(text) + return + + if isinstance(value, list): + value_id = id(value) + if value_id in seen: + return + seen.add(value_id) + for item in value: + collect(item, depth + 1) + return + + if not isinstance(value, dict): + return + + value_id = id(value) + if value_id in seen: + return + seen.add(value_id) + + for key in PROMPT_EXTRACTION_KEYS: + if key not in value: + continue + try: + collect(value[key], depth + 1) + except (KeyError, TypeError, AttributeError) as exc: + logger.debug( + "prompt_extraction_value_skipped", + key=key, + error=str(exc), + ) + + collect(data) + return "\n".join(parts) + + def _make_text_response(self, text: str, model: str | None) -> dict[str, Any]: + """Wrap plain text in the default Anthropic-style mock response envelope.""" + + output_tokens = max(1, len(text.split())) + input_tokens = max(16, min(1200, output_tokens * 4)) + return { + "id": f"msg_{random.randint(1000, 9999)}", + "type": "message", + "role": "assistant", + "content": [{"type": "text", "text": text}], + "model": model or "claude-3-sonnet", + "stop_reason": "end_turn", + "usage": {"input_tokens": input_tokens, "output_tokens": output_tokens}, + } + + def _build_login_form_response( + self, prompt_text: str, model: str | None + ) -> dict[str, Any] | None: + """Generate a deterministic response for the login-form demo workflow.""" + + prompt_lower = prompt_text.lower() + if "логин" not in prompt_lower and "login" not in prompt_lower: + return None + + if "final editor" in prompt_lower or "requirementseditor" in prompt_lower: + text = """# Login Form Requirements + +## Goal +- Provide a secure and low-friction way for an existing user to sign in to the web application. + +## Functional Requirements +- The form must include `email` and `password` fields plus a primary `Sign in` action. +- The form must optionally include a `Remember me` checkbox. +- The form must provide links for `Forgot password?` and `Create account`. + +## Validation Rules +- Email is required and must be validated as a properly formatted email address. +- Password is required and must not be silently trimmed after the user enters it. +- The submit action stays disabled only while the request is in flight, not before entry completion. + +## UX Requirements +- Each field must have a persistent visible label and helpful placeholder or hint text when needed. +- Validation errors must appear inline near the affected field and a summary error must appear for failed submit attempts. +- The form must show a loading state during authentication and prevent duplicate submissions. +- The form must preserve the entered email after an authentication failure. + +## Accessibility Requirements +- The form must be fully operable by keyboard, including tab order, submit, and recovery links. +- Inputs, errors, and loading states must be announced correctly to screen readers. +- Error messages must be associated with their fields via ARIA or equivalent semantic attributes. +- Color must not be the only way to communicate validation or authentication failure. + +## Security Requirements +- Authentication failures must return a generic message that does not reveal whether the email exists. +- The backend must enforce rate limiting and temporary throttling for repeated failed attempts. +- Password values must be transmitted only over HTTPS and must never be logged in plaintext. +- Session creation must use secure, HttpOnly cookies or an equivalent secure session mechanism. + +## Error Handling +- Invalid credentials show a generic authentication error. +- Network or server failures show a retry-friendly message without losing the entered email. +- Locked or throttled states show a user-safe message with next-step guidance. + +## Acceptance Criteria +- A valid existing user can sign in successfully from desktop and mobile layouts. +- Inline validation appears for malformed email and empty password before or at submit time. +- Failed authentication does not disclose account existence. +- Keyboard-only and screen-reader users can complete the entire sign-in flow. +- Duplicate clicks during loading do not create duplicate requests. + +## Open Questions +- Is MFA required immediately after password entry for some or all users? +- Should `Remember me` extend the session lifetime or only persist the email field? +- Are SSO providers such as Google or Microsoft in scope for the first release?""" + return self._make_text_response(text, model) + + if "productanalyst" in prompt_lower or "focus on product scope" in prompt_lower: + text = """- The login form serves existing users and should prioritize fast sign-in. +- Required inputs: email and password; optional controls: remember me, forgot password, create account. +- Core acceptance flow: submit valid credentials, create session, redirect to the application home or previous destination. +- Validation must cover required fields, email format, and authentication failure scenarios.""" + return self._make_text_response(text, model) + + if ( + "uxaccessibilityanalyst" in prompt_lower + or "focus on form labels" in prompt_lower + ): + text = """- Use persistent labels, clear focus indicators, inline errors, and a visible loading state. +- Preserve the email field after failed login and keep recovery actions easy to reach. +- Support keyboard navigation, screen-reader announcements for errors, and responsive mobile layout. +- Do not rely only on color to communicate invalid fields or failed authentication.""" + return self._make_text_response(text, model) + + if "securityanalyst" in prompt_lower or "focus on password handling" in prompt_lower: + text = """- Use generic auth failure messages to prevent account enumeration. +- Enforce HTTPS, secure session handling, rate limiting, and temporary lockout or throttling after repeated failures. +- Never log plaintext passwords or return sensitive backend error details to the UI. +- Audit failed login attempts and suspicious patterns for operational monitoring.""" + return self._make_text_response(text, model) + + text = """- The login form should collect email and password, validate input, handle errors clearly, and protect against abuse. +- Include accessibility support, secure session behavior, and explicit acceptance criteria for successful and failed sign-in.""" + return self._make_text_response(text, model) + async def generate_standard_response( self, model: str | None, - is_openai_format: bool, + target_format: TargetFormat, ctx: RequestContext, message_type: str = "short", + prompt_text: str = "", ) -> tuple[int, dict[str, str], bytes]: """Generate non-streaming mock response. @@ -109,23 +311,33 @@ async def generate_standard_response( # Check if we should simulate an error if self.should_simulate_error(): - error_response = self._generate_error_response(is_openai_format) + error_response = self._generate_error_response(target_format) return 429, {"content-type": "application/json"}, error_response - # Generate mock response based on type - if message_type == "tool_use": - mock_response = self.mock_generator.generate_tool_use_response(model=model) - elif message_type == "long": - mock_response = self.mock_generator.generate_long_response(model=model) - elif message_type == "medium": - mock_response = self.mock_generator.generate_medium_response(model=model) - else: - mock_response = self.mock_generator.generate_short_response(model=model) + mock_response = self._build_login_form_response(prompt_text, model) + if mock_response is None: + if message_type == "tool_use": + mock_response = self.mock_generator.generate_tool_use_response( + model=model + ) + elif message_type == "long": + mock_response = self.mock_generator.generate_long_response(model=model) + elif message_type == "medium": + mock_response = self.mock_generator.generate_medium_response( + model=model + ) + else: + mock_response = self.mock_generator.generate_short_response( + model=model + ) # Convert to OpenAI format if needed - if is_openai_format and message_type != "tool_use": - # Use dict-based conversion + if target_format == FORMAT_OPENAI_CHAT and message_type != "tool_use": mock_response = await self.openai_adapter.convert_response(mock_response) + elif target_format == FORMAT_OPENAI_RESPONSES: + mock_response = await self.openai_responses_adapter.convert_response( + mock_response + ) # Update context with metrics if ctx: @@ -142,9 +354,10 @@ async def generate_standard_response( async def generate_streaming_response( self, model: str | None, - is_openai_format: bool, + target_format: TargetFormat, ctx: RequestContext, message_type: str = "short", + prompt_text: str = "", ) -> StreamingResponse: """Generate SSE streaming mock response. @@ -155,14 +368,20 @@ async def generate_streaming_response( async def stream_generator() -> AsyncGenerator[bytes, None]: # Generate base response - if message_type == "tool_use": - base_response = self.mock_generator.generate_tool_use_response( - model=model - ) - elif message_type == "long": - base_response = self.mock_generator.generate_long_response(model=model) - else: - base_response = self.mock_generator.generate_short_response(model=model) + base_response = self._build_login_form_response(prompt_text, model) + if base_response is None: + if message_type == "tool_use": + base_response = self.mock_generator.generate_tool_use_response( + model=model + ) + elif message_type == "long": + base_response = self.mock_generator.generate_long_response( + model=model + ) + else: + base_response = self.mock_generator.generate_short_response( + model=model + ) content = base_response.get("content", [{"text": "Mock response"}]) if isinstance(content, list) and content: @@ -175,7 +394,7 @@ async def stream_generator() -> AsyncGenerator[bytes, None]: chunk_size = 3 # Words per chunk # Send initial event - if is_openai_format: + if target_format == FORMAT_OPENAI_CHAT: initial_event = { "id": f"chatcmpl-{ctx.request_id if ctx else 'mock'}", "object": "chat.completion.chunk", @@ -213,7 +432,7 @@ async def stream_generator() -> AsyncGenerator[bytes, None]: await asyncio.sleep(0.05) # Simulate token generation delay - if is_openai_format: + if target_format == FORMAT_OPENAI_CHAT: chunk_event = { "id": f"chatcmpl-{ctx.request_id if ctx else 'mock'}", "object": "chat.completion.chunk", @@ -237,7 +456,7 @@ async def stream_generator() -> AsyncGenerator[bytes, None]: yield f"data: {json.dumps(chunk_event)}\n\n".encode() # Send final event - if is_openai_format: + if target_format == FORMAT_OPENAI_CHAT: final_event = { "id": f"chatcmpl-{ctx.request_id if ctx else 'mock'}", "object": "chat.completion.chunk", @@ -270,9 +489,9 @@ async def stream_generator() -> AsyncGenerator[bytes, None]: }, ) - def _generate_error_response(self, is_openai_format: bool) -> bytes: + def _generate_error_response(self, target_format: TargetFormat) -> bytes: """Generate a mock error response.""" - if is_openai_format: + if target_format in {FORMAT_OPENAI_CHAT, FORMAT_OPENAI_RESPONSES}: error: dict[str, Any] = { "error": { "message": "Rate limit exceeded (mock error)", diff --git a/pyproject.toml b/pyproject.toml index cd7b2190..26be1425 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,6 +64,8 @@ test = [ "pytest-env", "pytest-httpx", "pytest-xdist", + "agent-framework-core>=1.0.0rc4,<2", + "agent-framework-orchestrations>=1.0.0b260311,<2", ] docs = [ diff --git a/tests/plugins/codex/integration/test_msaf_compat.py b/tests/plugins/codex/integration/test_msaf_compat.py new file mode 100644 index 00000000..fa20d47b --- /dev/null +++ b/tests/plugins/codex/integration/test_msaf_compat.py @@ -0,0 +1,190 @@ +from __future__ import annotations + +import json +from collections.abc import AsyncGenerator +from contextlib import AsyncExitStack +from datetime import UTC, datetime, timedelta +from types import SimpleNamespace +from typing import Any +from unittest.mock import AsyncMock, patch + +import pytest +import pytest_asyncio +from httpx import ASGITransport, AsyncClient +from pytest_httpx import HTTPXMock +from tests.helpers.assertions import assert_openai_responses_format + +from ccproxy.api.app import create_app, initialize_plugins_startup, shutdown_plugins +from ccproxy.api.bootstrap import create_service_container +from ccproxy.config.settings import Settings +from ccproxy.core.logging import setup_logging +from ccproxy.models.detection import DetectedHeaders, DetectedPrompts +from ccproxy.plugins.codex.models import CodexCacheData + + +pytestmark = pytest.mark.asyncio(loop_scope="module") + +DETECTED_CLI_INSTRUCTIONS = "Detected Codex CLI instructions" +MSAF_CHAT_COMPLETIONS_REQUEST = { + "model": "gpt-5.4", + "messages": [ + { + "role": "system", + "content": "You are part of a requirements workshop for a login form.", + }, + {"role": "user", "content": "Составьте требования для формы логина."}, + ], + "reasoning_effort": "medium", + "max_completion_tokens": 256, + "temperature": 0.1, +} + + +def _build_detection_data() -> CodexCacheData: + prompts = DetectedPrompts.from_body({"instructions": DETECTED_CLI_INSTRUCTIONS}) + return CodexCacheData( + codex_version="fallback", + headers=DetectedHeaders({}), + prompts=prompts, + body_json=prompts.raw, + method="POST", + url="https://chatgpt.com/backend-api/codex/responses", + path="/backend-api/codex/responses", + query_params={}, + ) + + +@pytest_asyncio.fixture +async def codex_msaf_client() -> AsyncGenerator[AsyncClient, None]: + setup_logging(json_logs=False, log_level_name="ERROR") + + settings = Settings( + enable_plugins=True, + plugins_disable_local_discovery=False, + enabled_plugins=["codex", "oauth_codex"], + plugins={ + "codex": { + "enabled": True, + "inject_detection_payload": False, + }, + "oauth_codex": {"enabled": True}, + "duckdb_storage": {"enabled": False}, + "analytics": {"enabled": False}, + "metrics": {"enabled": False}, + }, + llm={"openai_thinking_xml": False}, + ) + service_container = create_service_container(settings) + app = create_app(service_container) + + credentials_stub = SimpleNamespace( + access_token="test-codex-access-token", + expires_at=datetime.now(UTC) + timedelta(hours=1), + ) + profile_stub = SimpleNamespace(chatgpt_account_id="test-account-id") + detection_data = _build_detection_data() + + async def init_detection_stub(self): # type: ignore[no-untyped-def] + self._cached_data = detection_data + return detection_data + + async with AsyncExitStack() as stack: + stack.enter_context( + patch( + "ccproxy.plugins.oauth_codex.manager.CodexTokenManager.load_credentials", + new=AsyncMock(return_value=credentials_stub), + ) + ) + stack.enter_context( + patch( + "ccproxy.plugins.oauth_codex.manager.CodexTokenManager.get_access_token", + new=AsyncMock(return_value="test-codex-access-token"), + ) + ) + stack.enter_context( + patch( + "ccproxy.plugins.oauth_codex.manager.CodexTokenManager.get_access_token_with_refresh", + new=AsyncMock(return_value="test-codex-access-token"), + ) + ) + stack.enter_context( + patch( + "ccproxy.plugins.oauth_codex.manager.CodexTokenManager.get_profile_quick", + new=AsyncMock(return_value=profile_stub), + ) + ) + stack.enter_context( + patch( + "ccproxy.plugins.codex.detection_service.CodexDetectionService.initialize_detection", + new=init_detection_stub, + ) + ) + + await initialize_plugins_startup(app, settings) + transport = ASGITransport(app=app) + client = AsyncClient(transport=transport, base_url="http://test") + try: + yield client + finally: + await client.aclose() + await shutdown_plugins(app) + await service_container.close() + + +@pytest.mark.integration +@pytest.mark.codex +async def test_msaf_chat_completions_request_reaches_codex_without_cli_injection( + codex_msaf_client: AsyncClient, + mock_external_codex_api: HTTPXMock, +) -> None: + response = await codex_msaf_client.post( + "/codex/v1/chat/completions", + json=MSAF_CHAT_COMPLETIONS_REQUEST, + ) + + assert response.status_code == 200, response.text + data = response.json() + assert_openai_responses_format(data) + + requests = mock_external_codex_api.get_requests() + assert len(requests) == 1 + + upstream_payload = json.loads(requests[0].read().decode()) + assert ( + upstream_payload["instructions"] + == MSAF_CHAT_COMPLETIONS_REQUEST["messages"][0]["content"] + ) + assert DETECTED_CLI_INSTRUCTIONS not in upstream_payload["instructions"] + assert upstream_payload["reasoning"] == {"effort": "medium", "summary": "auto"} + assert upstream_payload["stream"] is True + assert upstream_payload["store"] is False + assert "max_tokens" not in upstream_payload + assert "max_output_tokens" not in upstream_payload + assert "temperature" not in upstream_payload + assert upstream_payload["input"][0]["type"] == "message" + assert ( + upstream_payload["input"][0]["content"][0]["text"] + == "Составьте требования для формы логина." + ) + + +@pytest.mark.integration +@pytest.mark.codex +async def test_msaf_chat_completions_hides_thinking_xml_when_disabled( + codex_msaf_client: AsyncClient, + mock_external_codex_api: HTTPXMock, +) -> None: + request_payload = { + **MSAF_CHAT_COMPLETIONS_REQUEST, + "reasoning_effort": "high", + } + + response = await codex_msaf_client.post( + "/codex/v1/chat/completions", + json=request_payload, + ) + + assert response.status_code == 200, response.text + data = response.json() + assert_openai_responses_format(data) + assert "" not in json.dumps(data, ensure_ascii=False) diff --git a/tests/plugins/codex/integration/test_msaf_real_library.py b/tests/plugins/codex/integration/test_msaf_real_library.py new file mode 100644 index 00000000..b0762df5 --- /dev/null +++ b/tests/plugins/codex/integration/test_msaf_real_library.py @@ -0,0 +1,295 @@ +from __future__ import annotations + +import json +from collections.abc import AsyncGenerator +from contextlib import AsyncExitStack +from datetime import UTC, datetime, timedelta +from types import SimpleNamespace +from typing import Any +from unittest.mock import AsyncMock, patch + +import httpx +import pytest +import pytest_asyncio +from agent_framework import Message +from agent_framework.openai import OpenAIChatClient +from openai import AsyncOpenAI +from pytest_httpx import HTTPXMock + +from ccproxy.api.app import create_app, initialize_plugins_startup, shutdown_plugins +from ccproxy.api.bootstrap import create_service_container +from ccproxy.config.settings import Settings +from ccproxy.core.logging import setup_logging +from ccproxy.models.detection import DetectedHeaders, DetectedPrompts +from ccproxy.plugins.codex.models import CodexCacheData + + +pytestmark = [ + pytest.mark.asyncio(loop_scope="module"), + pytest.mark.integration, + pytest.mark.codex, +] + +DETECTED_CLI_INSTRUCTIONS = "Detected Codex CLI instructions" +COMMON_INSTRUCTIONS = ( + "You are part of a requirements workshop for a login form. " + "Reply in the same language as the user request. " + "Be concise and practical." +) + + +def _build_detection_data() -> CodexCacheData: + prompts = DetectedPrompts.from_body({"instructions": DETECTED_CLI_INSTRUCTIONS}) + return CodexCacheData( + codex_version="fallback", + headers=DetectedHeaders({}), + prompts=prompts, + body_json=prompts.raw, + method="POST", + url="https://chatgpt.com/backend-api/codex/responses", + path="/backend-api/codex/responses", + query_params={}, + ) + + +def _build_codex_response( + *, + response_id: str, + message_id: str, + text: str, + reasoning_text: str, +) -> dict[str, Any]: + return { + "id": response_id, + "object": "response", + "created_at": 1773389433, + "status": "completed", + "model": "gpt-5-2025-08-07", + "output": [ + { + "type": "reasoning", + "id": f"rs_{response_id}", + "status": "completed", + "summary": [{"type": "summary_text", "text": reasoning_text}], + }, + { + "type": "message", + "id": message_id, + "role": "assistant", + "status": "completed", + "content": [{"type": "output_text", "text": text}], + }, + ], + "parallel_tool_calls": False, + "usage": { + "input_tokens": 64, + "output_tokens": 32, + "total_tokens": 96, + "input_tokens_details": {"cached_tokens": 0}, + "output_tokens_details": {"reasoning_tokens": 12}, + }, + } + + +@pytest_asyncio.fixture +async def msaf_codex_client( + httpx_mock: HTTPXMock, +) -> AsyncGenerator[tuple[OpenAIChatClient, list[dict[str, Any]]], None]: + upstream_payloads: list[dict[str, Any]] = [] + response_bodies = [ + _build_codex_response( + response_id="resp_analyst", + message_id="msg_analyst", + text="- Email\n- Password\n- Remember me\n- Inline errors\n- Redirect after success", + reasoning_text="Hidden analyst reasoning", + ), + _build_codex_response( + response_id="resp_editor", + message_id="msg_editor", + text=( + "## Goal\n" + "Определить требования к форме логина.\n\n" + "## Functional Requirements\n" + "- Поля email и пароль.\n" + "- Кнопка входа и remember me.\n\n" + "## Validation Rules\n" + "- Оба поля обязательны.\n" + "- Email валидируется по формату.\n\n" + "## Acceptance Criteria\n" + "- Успешный вход ведет к редиректу." + ), + reasoning_text="Hidden editor reasoning", + ), + ] + + def upstream_callback(request: httpx.Request) -> httpx.Response: + payload = json.loads(request.content.decode() or "{}") + upstream_payloads.append(payload) + index = min(len(upstream_payloads), len(response_bodies)) - 1 + return httpx.Response( + status_code=200, + json=response_bodies[index], + headers={"content-type": "application/json"}, + ) + + httpx_mock.add_callback( + upstream_callback, + url="https://chatgpt.com/backend-api/codex/responses", + is_reusable=True, + ) + + setup_logging(json_logs=False, log_level_name="ERROR") + + settings = Settings( + enable_plugins=True, + plugins_disable_local_discovery=False, + enabled_plugins=["codex", "oauth_codex"], + plugins={ + "codex": {"enabled": True, "inject_detection_payload": False}, + "oauth_codex": {"enabled": True}, + "duckdb_storage": {"enabled": False}, + "analytics": {"enabled": False}, + "metrics": {"enabled": False}, + }, + llm={"openai_thinking_xml": False}, + ) + service_container = create_service_container(settings) + app = create_app(service_container) + + credentials_stub = SimpleNamespace( + access_token="test-codex-access-token", + expires_at=datetime.now(UTC) + timedelta(hours=1), + ) + profile_stub = SimpleNamespace(chatgpt_account_id="test-account-id") + detection_data = _build_detection_data() + + async def init_detection_stub(self): # type: ignore[no-untyped-def] + self._cached_data = detection_data + return detection_data + + http_client: httpx.AsyncClient | None = None + async with AsyncExitStack() as stack: + stack.enter_context( + patch( + "ccproxy.plugins.oauth_codex.manager.CodexTokenManager.load_credentials", + new=AsyncMock(return_value=credentials_stub), + ) + ) + stack.enter_context( + patch( + "ccproxy.plugins.oauth_codex.manager.CodexTokenManager.get_access_token", + new=AsyncMock(return_value="test-codex-access-token"), + ) + ) + stack.enter_context( + patch( + "ccproxy.plugins.oauth_codex.manager.CodexTokenManager.get_access_token_with_refresh", + new=AsyncMock(return_value="test-codex-access-token"), + ) + ) + stack.enter_context( + patch( + "ccproxy.plugins.oauth_codex.manager.CodexTokenManager.get_profile_quick", + new=AsyncMock(return_value=profile_stub), + ) + ) + stack.enter_context( + patch( + "ccproxy.plugins.codex.detection_service.CodexDetectionService.initialize_detection", + new=init_detection_stub, + ) + ) + try: + await initialize_plugins_startup(app, settings) + transport = httpx.ASGITransport(app=app) + http_client = httpx.AsyncClient( + transport=transport, + base_url="http://test", + ) + async_client = AsyncOpenAI( + api_key="ccproxy", + base_url="http://test/codex/v1", + http_client=http_client, + ) + client = OpenAIChatClient( + model_id="gpt-5.4", + async_client=async_client, + ) + yield client, upstream_payloads + finally: + if http_client is not None: + await http_client.aclose() + await shutdown_plugins(app) + await service_container.close() + + +async def test_msaf_real_library_agent_runs_through_codex_proxy( + msaf_codex_client: tuple[OpenAIChatClient, list[dict[str, Any]]], +) -> None: + client, upstream_payloads = msaf_codex_client + response = await client.get_response( + [Message("user", ["Составьте требования для формы логина."])], + options={ + "instructions": ( + f"{COMMON_INSTRUCTIONS} " + "Focus on fields, validations, and success criteria. " + "Output at most 5 bullets." + ) + }, + ) + + assert len(upstream_payloads) == 1 + assert all( + DETECTED_CLI_INSTRUCTIONS not in payload.get("instructions", "") + for payload in upstream_payloads + ) + assert all(payload.get("stream") is True for payload in upstream_payloads) + assert all(payload.get("store") is False for payload in upstream_payloads) + assert "Detected Codex CLI instructions" not in upstream_payloads[0].get( + "instructions", "" + ) + assert "" not in response.text + assert "Email" in response.text + assert "Password" in response.text + + +async def test_msaf_real_library_sequential_agents_keep_clean_messages( + msaf_codex_client: tuple[OpenAIChatClient, list[dict[str, Any]]], +) -> None: + client, upstream_payloads = msaf_codex_client + analyst_response = await client.get_response( + [Message("user", ["Составьте требования для формы логина."])], + options={ + "instructions": ( + f"{COMMON_INSTRUCTIONS} " + "Focus on fields, validations, and success criteria. " + "Output at most 5 bullets." + ) + }, + ) + editor_response = await client.get_response( + [ + Message("user", ["Составьте требования для формы логина."], author_name="user"), + Message( + "assistant", + [analyst_response.text], + author_name="ProductAnalyst", + ), + ], + options={ + "instructions": ( + "You are the final editor for login form requirements. " + "Reply in the same language as the user request. " + "Produce one clean Markdown document with sections " + "Goal, Functional Requirements, Validation Rules, Acceptance Criteria." + ) + }, + ) + + assert len(upstream_payloads) == 2 + assert "Hidden analyst reasoning" not in analyst_response.text + assert "Hidden editor reasoning" not in editor_response.text + assert "" not in analyst_response.text + assert "" not in editor_response.text + assert "## Goal" in editor_response.text + assert "## Functional Requirements" in editor_response.text diff --git a/tests/plugins/codex/unit/test_adapter.py b/tests/plugins/codex/unit/test_adapter.py index 840230ae..ac0e7caa 100644 --- a/tests/plugins/codex/unit/test_adapter.py +++ b/tests/plugins/codex/unit/test_adapter.py @@ -90,6 +90,25 @@ def adapter( http_pool_manager=mock_http_pool_manager, ) + @pytest.fixture + def adapter_with_disabled_detection( + self, + mock_detection_service: Mock, + mock_auth_manager: Mock, + mock_http_pool_manager: Mock, + ) -> CodexAdapter: + """Create CodexAdapter with detection payload injection disabled.""" + mock_config = Mock() + mock_config.base_url = "https://chat.openai.com/backend-anon" + mock_config.inject_detection_payload = False + + return CodexAdapter( + detection_service=mock_detection_service, + config=mock_config, + auth_manager=mock_auth_manager, + http_pool_manager=mock_http_pool_manager, + ) + @pytest.mark.asyncio async def test_get_target_url(self, adapter: CodexAdapter) -> None: """Test target URL generation.""" @@ -217,6 +236,25 @@ async def test_prepare_provider_request_sets_stream_true( result_data = json.loads(result_body.decode()) assert result_data["stream"] is True + @pytest.mark.asyncio + async def test_prepare_provider_request_removes_max_completion_tokens( + self, adapter: CodexAdapter + ) -> None: + body_dict = { + "messages": [{"role": "user", "content": "Hello"}], + "model": "gpt-4", + "max_completion_tokens": 321, + } + body = json.dumps(body_dict).encode() + + result_body, _ = await adapter.prepare_provider_request( + body, {"content-type": "application/json"}, "/responses" + ) + + result_data = json.loads(result_body.decode()) + assert "max_output_tokens" not in result_data + assert "max_completion_tokens" not in result_data + @pytest.mark.asyncio async def test_prepare_provider_request_preserves_encoded_body( self, adapter: CodexAdapter @@ -291,6 +329,95 @@ async def test_prepare_provider_request_applies_codex_template_defaults( assert result_data["prompt_cache_key"] != "template-cache-key" assert result_data["input"][0]["type"] == "message" + @pytest.mark.asyncio + async def test_prepare_provider_request_skips_detection_payload_when_disabled( + self, + adapter_with_disabled_detection: CodexAdapter, + mock_detection_service: Mock, + ) -> None: + """Verify that detection payload is not injected when disabled in config.""" + template = { + "instructions": "You are a Python expert.", + "include": ["reasoning.encrypted_content"], + "parallel_tool_calls": True, + "reasoning": {"effort": "medium"}, + "tool_choice": "auto", + "tools": [{"type": "function", "name": "exec_command"}], + } + prompts = DetectedPrompts.from_body(template) + mock_detection_service.get_detected_prompts = Mock(return_value=prompts) + mock_detection_service.get_system_prompt = Mock( + return_value=prompts.instructions_payload() + ) + + body = json.dumps( + { + "model": "gpt-5", + "instructions": "User supplied instructions", + "input": [{"role": "user", "content": [{"type": "input_text"}]}], + } + ).encode() + + result_body, _ = await adapter_with_disabled_detection.prepare_provider_request( + body, {}, "/responses" + ) + result_data = json.loads(result_body.decode()) + + # When detection is disabled, user instructions are preserved + assert result_data["instructions"] == "User supplied instructions" + # Template fields should not be injected + assert "include" not in result_data + assert "parallel_tool_calls" not in result_data + assert "reasoning" not in result_data + assert "tool_choice" not in result_data + assert "tools" not in result_data + # Input type normalization still occurs + assert result_data["input"][0]["type"] == "message" + + @pytest.mark.asyncio + async def test_prepare_provider_request_keeps_msaf_reasoning_when_detection_disabled( + self, adapter_with_disabled_detection: CodexAdapter + ) -> None: + """Verify that user-supplied reasoning is preserved when detection is disabled. + + This ensures that even with detection disabled, legitimate MSAF reasoning + parameters from the user are not stripped. + """ + body = json.dumps( + { + "model": "gpt-5", + "instructions": "Workshop instructions", + "reasoning": {"effort": "medium", "summary": "auto"}, + "temperature": 0.2, + "max_tokens": 128, + "input": [ + { + "type": "message", + "role": "user", + "content": [ + { + "type": "input_text", + "text": "Draft login form requirements.", + } + ], + } + ], + } + ).encode() + + result_body, _ = await adapter_with_disabled_detection.prepare_provider_request( + body, {}, "/responses" + ) + result_data = json.loads(result_body.decode()) + + assert result_data["instructions"] == "Workshop instructions" + assert result_data["reasoning"] == {"effort": "medium", "summary": "auto"} + assert result_data["stream"] is True + assert result_data["store"] is False + # Provider-specific params are normalized/removed + assert "temperature" not in result_data + assert "max_tokens" not in result_data + @pytest.mark.asyncio async def test_process_provider_response(self, adapter: CodexAdapter) -> None: """Test response processing and format conversion.""" diff --git a/tests/unit/core/test_provider_factory_bypass.py b/tests/unit/core/test_provider_factory_bypass.py new file mode 100644 index 00000000..b0d6f840 --- /dev/null +++ b/tests/unit/core/test_provider_factory_bypass.py @@ -0,0 +1,48 @@ +"""Unit tests for provider factory bypass mode behaviour.""" + +from types import SimpleNamespace +from unittest.mock import MagicMock, patch + +import pytest + +from ccproxy.core.plugins import factories as plugin_factories +from ccproxy.plugins.codex.plugin import CodexFactory +from ccproxy.services.adapters.mock_adapter import MockAdapter + + +@pytest.mark.asyncio +async def test_create_adapter_logs_warning_in_bypass_mode() -> None: + factory = CodexFactory() + mock_handler = MagicMock() + service_container = MagicMock() + service_container.get_mock_handler.return_value = mock_handler + context = { + "settings": SimpleNamespace(server=SimpleNamespace(bypass_mode=True)), + "service_container": service_container, + } + + with patch.object(plugin_factories.logger, "warning") as warning: + adapter = await factory.create_adapter(context) # type: ignore[arg-type] + + assert isinstance(adapter, MockAdapter) + assert adapter.mock_handler is mock_handler + warning.assert_called_once_with( + "plugin_bypass_mode_enabled", + plugin="codex", + adapter="CodexAdapter", + category="lifecycle", + ) + + +@pytest.mark.asyncio +async def test_create_adapter_raises_clear_error_without_service_container() -> None: + factory = CodexFactory() + context = { + "settings": SimpleNamespace(server=SimpleNamespace(bypass_mode=True)), + } + + with pytest.raises( + RuntimeError, + match="Cannot initialize plugin 'codex' in bypass mode", + ): + await factory.create_adapter(context) # type: ignore[arg-type] diff --git a/tests/unit/llms/test_llms_streaming_settings.py b/tests/unit/llms/test_llms_streaming_settings.py index 9ba4961d..224c96ef 100644 --- a/tests/unit/llms/test_llms_streaming_settings.py +++ b/tests/unit/llms/test_llms_streaming_settings.py @@ -1,5 +1,14 @@ +import asyncio + import pytest +from ccproxy.api.bootstrap import create_service_container +from ccproxy.config.settings import Settings +from ccproxy.core.constants import FORMAT_OPENAI_CHAT, FORMAT_OPENAI_RESPONSES +from ccproxy.llms.formatters.context import ( + get_openai_thinking_xml, + register_openai_thinking_xml, +) from ccproxy.llms.streaming.processors import OpenAIStreamProcessor @@ -43,3 +52,26 @@ async def test_llm_openai_thinking_xml_env_disables_thinking_serialization(monke delta = c["choices"][0].get("delta") or {} if isinstance(delta, dict) and "content" in delta: assert " None: + settings = Settings(llm={"openai_thinking_xml": False}) + container = create_service_container(settings) + + registry = container.get_format_registry() + adapter = registry.get(FORMAT_OPENAI_RESPONSES, FORMAT_OPENAI_CHAT) + + assert getattr(adapter, "_openai_thinking_xml", None) is False + + +@pytest.mark.asyncio +async def test_openai_thinking_xml_contextvar_is_isolated_per_task() -> None: + async def worker(value: bool) -> bool | None: + register_openai_thinking_xml(value) + await asyncio.sleep(0) + return get_openai_thinking_xml() + + results = await asyncio.gather(worker(True), worker(False)) + + assert results == [True, False] + assert get_openai_thinking_xml() is None diff --git a/tests/unit/services/mocking/test_mock_handler.py b/tests/unit/services/mocking/test_mock_handler.py index a4e13005..e18618eb 100644 --- a/tests/unit/services/mocking/test_mock_handler.py +++ b/tests/unit/services/mocking/test_mock_handler.py @@ -1,9 +1,12 @@ """Tests for the mock response handler.""" import asyncio +import json +from unittest.mock import MagicMock import pytest +from ccproxy.core.constants import FORMAT_ANTHROPIC_MESSAGES, FORMAT_OPENAI_CHAT from ccproxy.core.request_context import RequestContext from ccproxy.services.mocking.mock_handler import MockResponseHandler @@ -36,12 +39,42 @@ def test_extract_message_type(body: bytes, expected: str) -> None: assert handler.extract_message_type(body) == expected +def test_extract_prompt_text_collects_nested_values() -> None: + handler = MockResponseHandler(DummyGenerator()) # type: ignore[arg-type] + body = json.dumps( + { + "instructions": "Top level instructions", + "input": [ + { + "content": [ + {"text": "First prompt"}, + {"text": "Second prompt"}, + ] + } + ], + } + ).encode() + + assert handler.extract_prompt_text(body) == ( + "Top level instructions\nFirst prompt\nSecond prompt" + ) + + +def test_extract_prompt_text_limits_deep_nesting() -> None: + handler = MockResponseHandler(DummyGenerator()) # type: ignore[arg-type] + nested: dict[str, object] = {"text": "too deep"} + for _ in range(12): + nested = {"input": [nested]} + + body = json.dumps(nested).encode() + + assert handler.extract_prompt_text(body) == "" + + @pytest.mark.asyncio async def test_generate_standard_response_success( monkeypatch: pytest.MonkeyPatch, ) -> None: - from unittest.mock import MagicMock - handler = MockResponseHandler(DummyGenerator(), error_rate=0.0) # type: ignore[arg-type] monkeypatch.setattr(handler, "should_simulate_error", lambda: False) monkeypatch.setattr("random.uniform", lambda *args, **kwargs: 0) @@ -54,7 +87,10 @@ async def fast_sleep(_: float) -> None: mock_logger = MagicMock() ctx = RequestContext(request_id="req", start_time=0, logger=mock_logger) # type: ignore[arg-type] status, headers, body = await handler.generate_standard_response( - model="m1", is_openai_format=False, ctx=ctx, message_type="short" + model="m1", + target_format=FORMAT_ANTHROPIC_MESSAGES, + ctx=ctx, + message_type="short", ) assert status == 200 @@ -67,8 +103,6 @@ async def fast_sleep(_: float) -> None: async def test_generate_standard_response_error( monkeypatch: pytest.MonkeyPatch, ) -> None: - from unittest.mock import MagicMock - handler = MockResponseHandler(DummyGenerator(), error_rate=1.0) # type: ignore[arg-type] monkeypatch.setattr(handler, "should_simulate_error", lambda: True) @@ -80,7 +114,10 @@ async def fast_sleep(_: float) -> None: mock_logger = MagicMock() mock_ctx = RequestContext(request_id="req", start_time=0, logger=mock_logger) # type: ignore[arg-type] status, headers, body = await handler.generate_standard_response( - model="m1", is_openai_format=True, ctx=mock_ctx, message_type="short" + model="m1", + target_format=FORMAT_OPENAI_CHAT, + ctx=mock_ctx, + message_type="short", ) assert status == 429 @@ -89,14 +126,12 @@ async def fast_sleep(_: float) -> None: @pytest.mark.asyncio async def test_generate_streaming_response(monkeypatch: pytest.MonkeyPatch) -> None: - from unittest.mock import MagicMock - handler = MockResponseHandler(DummyGenerator(), error_rate=0.0) # type: ignore[arg-type] mock_logger = MagicMock() ctx = RequestContext(request_id="req", start_time=0, logger=mock_logger) # type: ignore[arg-type] stream = await handler.generate_streaming_response( - model="m1", is_openai_format=True, ctx=ctx + model="m1", target_format=FORMAT_OPENAI_CHAT, ctx=ctx ) chunks = [] diff --git a/tests/unit/services/test_mock_adapter.py b/tests/unit/services/test_mock_adapter.py index c852fc65..2f749a47 100644 --- a/tests/unit/services/test_mock_adapter.py +++ b/tests/unit/services/test_mock_adapter.py @@ -1,10 +1,16 @@ """Tests for the mock adapter streaming behaviour.""" +import json from types import SimpleNamespace from typing import Any import pytest +from ccproxy.core.constants import ( + FORMAT_ANTHROPIC_MESSAGES, + FORMAT_OPENAI_CHAT, + FORMAT_OPENAI_RESPONSES, +) from ccproxy.services.adapters.mock_adapter import MockAdapter @@ -21,16 +27,32 @@ def extract_message_type(self, body: bytes) -> str: self.calls.append(("extract", (body,))) return "message" + def extract_prompt_text(self, body: bytes) -> str: + self.calls.append(("prompt", (body,))) + return "prompt" + async def generate_standard_response( - self, model: Any, is_openai: Any, ctx: Any, message_type: Any + self, + model: Any, + target_format: Any, + ctx: Any, + message_type: Any, + prompt_text: Any, ) -> tuple[int, dict[str, str], bytes]: - self.calls.append(("standard", (model, is_openai, message_type))) - return 202, {"X-Test": "yes"}, b"standard" + self.calls.append( + ("standard", (model, target_format, message_type, prompt_text)) + ) + return 202, {"X-Test": "yes"}, json.dumps({"format": target_format}).encode() async def generate_streaming_response( - self, model: Any, is_openai: Any, ctx: Any, message_type: Any + self, + model: Any, + target_format: Any, + ctx: Any, + message_type: Any, + prompt_text: Any, ) -> str: - self.calls.append(("stream", (model, is_openai, message_type))) + self.calls.append(("stream", (model, target_format, message_type, prompt_text))) return "stream-object" @@ -43,7 +65,8 @@ def __init__( state_dict: dict[str, Any] = {} if context_endpoint is not None: state_dict["context"] = SimpleNamespace( - metadata={"endpoint": context_endpoint} + metadata={"endpoint": context_endpoint}, + format_chain=[], ) self.state = SimpleNamespace(**state_dict) @@ -57,14 +80,16 @@ async def test_handle_request_returns_standard_response() -> None: adapter = _TestableMockAdapter(handler) request: Any = FakeRequest( - b'{"model": "gpt", "stream": false}', "/openai/v1/messages" + b'{"model": "gpt", "stream": false}', "/codex/v1/responses" ) response = await adapter.handle_request(request) assert response.status_code == 202 assert response.headers["X-Test"] == "yes" + assert json.loads(response.body)["format"] == FORMAT_OPENAI_RESPONSES assert handler.calls[0][0] == "extract" - assert handler.calls[1][0] == "standard" + assert handler.calls[1][0] == "prompt" + assert handler.calls[2][0] == "standard" @pytest.mark.asyncio @@ -82,6 +107,83 @@ async def test_handle_request_streaming_path() -> None: assert handler.calls[-1][0] == "stream" +@pytest.mark.asyncio +async def test_handle_request_prefers_context_format_chain() -> None: + handler: Any = StubHandler() + adapter = _TestableMockAdapter(handler) + + request: Any = FakeRequest( + b'{"model": "gpt"}', "/codex/v1/chat/completions", "/v1/chat/completions" + ) + request.state.context.format_chain = [FORMAT_OPENAI_CHAT] + + response = await adapter.handle_request(request) + + assert response.status_code == 202 + assert json.loads(response.body)["format"] == FORMAT_OPENAI_CHAT + + +@pytest.mark.asyncio +async def test_handle_request_falls_back_to_chat_endpoint_detection() -> None: + handler: Any = StubHandler() + adapter = _TestableMockAdapter(handler) + + request: Any = FakeRequest( + b'{"model": "gpt"}', "/codex/v1/chat/completions", "/internal/unknown" + ) + + response = await adapter.handle_request(request) + + assert response.status_code == 202 + assert json.loads(response.body)["format"] == FORMAT_OPENAI_CHAT + + +@pytest.mark.asyncio +async def test_handle_request_falls_back_to_responses_endpoint_detection() -> None: + handler: Any = StubHandler() + adapter = _TestableMockAdapter(handler) + + request: Any = FakeRequest( + b'{"model": "gpt"}', "/codex/v1/responses", "/internal/unknown" + ) + + response = await adapter.handle_request(request) + + assert response.status_code == 202 + assert json.loads(response.body)["format"] == FORMAT_OPENAI_RESPONSES + + +@pytest.mark.asyncio +async def test_handle_request_ignores_unknown_format_chain_and_uses_endpoint() -> None: + handler: Any = StubHandler() + adapter = _TestableMockAdapter(handler) + + request: Any = FakeRequest( + b'{"model": "gpt"}', "/codex/v1/chat/completions", "/codex/v1/chat/completions" + ) + request.state.context.format_chain = ["unsupported.format"] + + response = await adapter.handle_request(request) + + assert response.status_code == 202 + assert json.loads(response.body)["format"] == FORMAT_OPENAI_CHAT + + +@pytest.mark.asyncio +async def test_handle_request_defaults_to_anthropic_for_unknown_endpoint() -> None: + handler: Any = StubHandler() + adapter = _TestableMockAdapter(handler) + + request: Any = FakeRequest( + b'{"model": "claude"}', "/provider/v1/messages", "/provider/v1/messages" + ) + + response = await adapter.handle_request(request) + + assert response.status_code == 202 + assert json.loads(response.body)["format"] == FORMAT_ANTHROPIC_MESSAGES + + @pytest.mark.asyncio async def test_handle_streaming_uses_endpoint_kwarg() -> None: handler: Any = StubHandler() diff --git a/uv.lock b/uv.lock index 975545cf..a23e938d 100644 --- a/uv.lock +++ b/uv.lock @@ -2,6 +2,40 @@ version = 1 revision = 3 requires-python = ">=3.11" +[[package]] +name = "agent-framework-core" +version = "1.0.0rc4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-ai-projects" }, + { name = "azure-identity" }, + { name = "mcp", extra = ["ws"] }, + { name = "openai" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-sdk" }, + { name = "opentelemetry-semantic-conventions-ai" }, + { name = "packaging" }, + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f1/5a/b472f9a57235bb72899151ec5cd3c925825e16018689e0300cb822cf00f8/agent_framework_core-1.0.0rc4.tar.gz", hash = "sha256:f394eb95ae877ae854aa7a3e499f76f34b26102808009a66b264ded89c6b6dbd", size = 302446, upload-time = "2026-03-11T23:19:29.198Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/06/d7/89776e7e919e46fd83ae464a416966715f4f40083297d42574e3d45214f6/agent_framework_core-1.0.0rc4-py3-none-any.whl", hash = "sha256:f01a6997be0f5e05853eb6be341dbca692c4e5d6999de5f3e8364296de50635f", size = 348882, upload-time = "2026-03-11T23:19:43.158Z" }, +] + +[[package]] +name = "agent-framework-orchestrations" +version = "1.0.0b260311" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "agent-framework-core" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/7f/43aeca0b4d1dc6156539d1723ea3d48599ee10bf660280577593e1441b1b/agent_framework_orchestrations-1.0.0b260311.tar.gz", hash = "sha256:a303a156c066954bbed5b1ac6e7b3dd8049ffe3bbf0c1841f5ab24e97a8f1fd9", size = 55139, upload-time = "2026-03-11T23:19:52.793Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/83/ef99c5a45c3d45eeaed1ffcb4f3294fa50f4d19c0f69771693b7d295b0bd/agent_framework_orchestrations-1.0.0b260311-py3-none-any.whl", hash = "sha256:cc7cdebe0abb76208d2c6618d410bf77f0806478dbe25ad1467b27f4f70b8dba", size = 61073, upload-time = "2026-03-11T23:19:38.618Z" }, +] + [[package]] name = "aioconsole" version = "0.8.2" @@ -52,6 +86,67 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, ] +[[package]] +name = "azure-ai-projects" +version = "2.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core" }, + { name = "azure-identity" }, + { name = "azure-storage-blob" }, + { name = "isodate" }, + { name = "openai" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/86/f9/a15c8a16e35e6d620faebabc6cc4f9e2f4b7f1d962cc6f58931c46947e24/azure_ai_projects-2.0.1.tar.gz", hash = "sha256:c8c64870aa6b89903af69a4ff28b4eff3df9744f14615ea572cae87394946a0c", size = 491774, upload-time = "2026-03-12T19:59:02.712Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/f7/290ca39501c06c6e23b46ba9f7f3dfb05ecc928cde105fed85d6845060dd/azure_ai_projects-2.0.1-py3-none-any.whl", hash = "sha256:dfda540d256e67a52bf81c75418b6bf92b811b96693fe45787e154a888ad2396", size = 236560, upload-time = "2026-03-12T19:59:04.249Z" }, +] + +[[package]] +name = "azure-core" +version = "1.38.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c8/29/9641b73248745774a52c7ce7f965ed1febbdea787ec21caad3ae6891d18a/azure_core-1.38.3.tar.gz", hash = "sha256:a7931fd445cb4af8802c6f39c6a326bbd1e34b115846550a8245fa656ead6f8e", size = 367267, upload-time = "2026-03-12T20:28:21.122Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/3d/ac86083efa45a439d0bbfb7947615227813d368b9e1e93d23fd30de6fec0/azure_core-1.38.3-py3-none-any.whl", hash = "sha256:bf59d29765bf4748ab9edf25f98a30b7ea9797f43e367c06d846a30b29c1f845", size = 218231, upload-time = "2026-03-12T20:28:22.462Z" }, +] + +[[package]] +name = "azure-identity" +version = "1.25.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core" }, + { name = "cryptography" }, + { name = "msal" }, + { name = "msal-extensions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c5/0e/3a63efb48aa4a5ae2cfca61ee152fbcb668092134d3eb8bfda472dd5c617/azure_identity-1.25.3.tar.gz", hash = "sha256:ab23c0d63015f50b630ef6c6cf395e7262f439ce06e5d07a64e874c724f8d9e6", size = 286304, upload-time = "2026-03-13T01:12:20.892Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/9a/417b3a533e01953a7c618884df2cb05a71e7b68bdbce4fbdb62349d2a2e8/azure_identity-1.25.3-py3-none-any.whl", hash = "sha256:f4d0b956a8146f30333e071374171f3cfa7bdb8073adb8c3814b65567aa7447c", size = 192138, upload-time = "2026-03-13T01:12:22.951Z" }, +] + +[[package]] +name = "azure-storage-blob" +version = "12.28.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core" }, + { name = "cryptography" }, + { name = "isodate" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/24/072ba8e27b0e2d8fec401e9969b429d4f5fc4c8d4f0f05f4661e11f7234a/azure_storage_blob-12.28.0.tar.gz", hash = "sha256:e7d98ea108258d29aa0efbfd591b2e2075fa1722a2fae8699f0b3c9de11eff41", size = 604225, upload-time = "2026-01-06T23:48:57.282Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d8/3a/6ef2047a072e54e1142718d433d50e9514c999a58f51abfff7902f3a72f8/azure_storage_blob-12.28.0-py3-none-any.whl", hash = "sha256:00fb1db28bf6a7b7ecaa48e3b1d5c83bfadacc5a678b77826081304bd87d6461", size = 431499, upload-time = "2026-01-06T23:48:58.995Z" }, +] + [[package]] name = "babel" version = "2.17.0" @@ -193,6 +288,8 @@ all = [ { name = "textual" }, ] dev = [ + { name = "agent-framework-core" }, + { name = "agent-framework-orchestrations" }, { name = "aioconsole" }, { name = "bandit" }, { name = "claude-agent-sdk" }, @@ -248,6 +345,8 @@ docs = [ { name = "mkdocstrings", extra = ["python"] }, ] test = [ + { name = "agent-framework-core" }, + { name = "agent-framework-orchestrations" }, { name = "mypy" }, { name = "pytest" }, { name = "pytest-asyncio" }, @@ -306,6 +405,8 @@ all = [ { name = "textual", specifier = ">=3.7.1" }, ] dev = [ + { name = "agent-framework-core", specifier = ">=1.0.0rc4,<2" }, + { name = "agent-framework-orchestrations", specifier = ">=1.0.0b260311,<2" }, { name = "aioconsole", specifier = ">=0.8.1" }, { name = "bandit" }, { name = "claude-agent-sdk", specifier = ">=0.1.0" }, @@ -361,6 +462,8 @@ docs = [ { name = "mkdocstrings", extras = ["python"], specifier = ">=0.24.0" }, ] test = [ + { name = "agent-framework-core", specifier = ">=1.0.0rc4,<2" }, + { name = "agent-framework-orchestrations", specifier = ">=1.0.0b260311,<2" }, { name = "mypy" }, { name = "pytest" }, { name = "pytest-asyncio" }, @@ -389,6 +492,8 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, @@ -397,6 +502,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, @@ -404,6 +514,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, @@ -411,18 +526,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, ] [[package]] @@ -651,6 +779,7 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/78/19/f748958276519adf6a0c1e79e7b8860b4830dda55ccdf29f2719b5fc499c/cryptography-46.0.4.tar.gz", hash = "sha256:bfd019f60f8abc2ed1b9be4ddc21cfef059c841d86d710bb69909a688cbb8f59", size = 749301, upload-time = "2026-01-28T00:24:37.379Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/99/157aae7949a5f30d51fcb1a9851e8ebd5c74bf99b5285d8bb4b8b9ee641e/cryptography-46.0.4-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:281526e865ed4166009e235afadf3a4c4cba6056f99336a99efba65336fd5485", size = 7173686, upload-time = "2026-01-28T00:23:07.515Z" }, { url = "https://files.pythonhosted.org/packages/87/91/874b8910903159043b5c6a123b7e79c4559ddd1896e38967567942635778/cryptography-46.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f14fba5bf6f4390d7ff8f086c566454bff0411f6d8aa7af79c88b6f9267aecc", size = 4275871, upload-time = "2026-01-28T00:23:09.439Z" }, { url = "https://files.pythonhosted.org/packages/c0/35/690e809be77896111f5b195ede56e4b4ed0435b428c2f2b6d35046fbb5e8/cryptography-46.0.4-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47bcd19517e6389132f76e2d5303ded6cf3f78903da2158a671be8de024f4cd0", size = 4423124, upload-time = "2026-01-28T00:23:11.529Z" }, { url = "https://files.pythonhosted.org/packages/1a/5b/a26407d4f79d61ca4bebaa9213feafdd8806dc69d3d290ce24996d3cfe43/cryptography-46.0.4-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:01df4f50f314fbe7009f54046e908d1754f19d0c6d3070df1e6268c5a4af09fa", size = 4277090, upload-time = "2026-01-28T00:23:13.123Z" }, @@ -662,6 +791,9 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a6/f7/6d43cbaddf6f65b24816e4af187d211f0bc536a29961f69faedc48501d8e/cryptography-46.0.4-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3d425eacbc9aceafd2cb429e42f4e5d5633c6f873f5e567077043ef1b9bbf616", size = 4454641, upload-time = "2026-01-28T00:23:22.866Z" }, { url = "https://files.pythonhosted.org/packages/9e/4f/ebd0473ad656a0ac912a16bd07db0f5d85184924e14fc88feecae2492834/cryptography-46.0.4-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91627ebf691d1ea3976a031b61fb7bac1ccd745afa03602275dda443e11c8de0", size = 4405159, upload-time = "2026-01-28T00:23:25.278Z" }, { url = "https://files.pythonhosted.org/packages/d1/f7/7923886f32dc47e27adeff8246e976d77258fd2aa3efdd1754e4e323bf49/cryptography-46.0.4-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2d08bc22efd73e8854b0b7caff402d735b354862f1145d7be3b9c0f740fef6a0", size = 4666059, upload-time = "2026-01-28T00:23:26.766Z" }, + { url = "https://files.pythonhosted.org/packages/eb/a7/0fca0fd3591dffc297278a61813d7f661a14243dd60f499a7a5b48acb52a/cryptography-46.0.4-cp311-abi3-win32.whl", hash = "sha256:82a62483daf20b8134f6e92898da70d04d0ef9a75829d732ea1018678185f4f5", size = 3026378, upload-time = "2026-01-28T00:23:28.317Z" }, + { url = "https://files.pythonhosted.org/packages/2d/12/652c84b6f9873f0909374864a57b003686c642ea48c84d6c7e2c515e6da5/cryptography-46.0.4-cp311-abi3-win_amd64.whl", hash = "sha256:6225d3ebe26a55dbc8ead5ad1265c0403552a63336499564675b29eb3184c09b", size = 3478614, upload-time = "2026-01-28T00:23:30.275Z" }, + { url = "https://files.pythonhosted.org/packages/b9/27/542b029f293a5cce59349d799d4d8484b3b1654a7b9a0585c266e974a488/cryptography-46.0.4-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:485e2b65d25ec0d901bca7bcae0f53b00133bf3173916d8e421f6fddde103908", size = 7116417, upload-time = "2026-01-28T00:23:31.958Z" }, { url = "https://files.pythonhosted.org/packages/f8/f5/559c25b77f40b6bf828eabaf988efb8b0e17b573545edb503368ca0a2a03/cryptography-46.0.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:078e5f06bd2fa5aea5a324f2a09f914b1484f1d0c2a4d6a8a28c74e72f65f2da", size = 4264508, upload-time = "2026-01-28T00:23:34.264Z" }, { url = "https://files.pythonhosted.org/packages/49/a1/551fa162d33074b660dc35c9bc3616fefa21a0e8c1edd27b92559902e408/cryptography-46.0.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dce1e4f068f03008da7fa51cc7abc6ddc5e5de3e3d1550334eaf8393982a5829", size = 4409080, upload-time = "2026-01-28T00:23:35.793Z" }, { url = "https://files.pythonhosted.org/packages/b0/6a/4d8d129a755f5d6df1bbee69ea2f35ebfa954fa1847690d1db2e8bca46a5/cryptography-46.0.4-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:2067461c80271f422ee7bdbe79b9b4be54a5162e90345f86a23445a0cf3fd8a2", size = 4270039, upload-time = "2026-01-28T00:23:37.263Z" }, @@ -673,6 +805,9 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/83/17/259409b8349aa10535358807a472c6a695cf84f106022268d31cea2b6c97/cryptography-46.0.4-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:df4a817fa7138dd0c96c8c8c20f04b8aaa1fac3bbf610913dcad8ea82e1bfd3f", size = 4441254, upload-time = "2026-01-28T00:23:48.403Z" }, { url = "https://files.pythonhosted.org/packages/9c/fe/e4a1b0c989b00cee5ffa0764401767e2d1cf59f45530963b894129fd5dce/cryptography-46.0.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b1de0ebf7587f28f9190b9cb526e901bf448c9e6a99655d2b07fff60e8212a82", size = 4396520, upload-time = "2026-01-28T00:23:50.26Z" }, { url = "https://files.pythonhosted.org/packages/b3/81/ba8fd9657d27076eb40d6a2f941b23429a3c3d2f56f5a921d6b936a27bc9/cryptography-46.0.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9b4d17bc7bd7cdd98e3af40b441feaea4c68225e2eb2341026c84511ad246c0c", size = 4651479, upload-time = "2026-01-28T00:23:51.674Z" }, + { url = "https://files.pythonhosted.org/packages/00/03/0de4ed43c71c31e4fe954edd50b9d28d658fef56555eba7641696370a8e2/cryptography-46.0.4-cp314-cp314t-win32.whl", hash = "sha256:c411f16275b0dea722d76544a61d6421e2cc829ad76eec79280dbdc9ddf50061", size = 3001986, upload-time = "2026-01-28T00:23:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/5c/70/81830b59df7682917d7a10f833c4dab2a5574cd664e86d18139f2b421329/cryptography-46.0.4-cp314-cp314t-win_amd64.whl", hash = "sha256:728fedc529efc1439eb6107b677f7f7558adab4553ef8669f0d02d42d7b959a7", size = 3468288, upload-time = "2026-01-28T00:23:55.09Z" }, + { url = "https://files.pythonhosted.org/packages/56/f7/f648fdbb61d0d45902d3f374217451385edc7e7768d1b03ff1d0e5ffc17b/cryptography-46.0.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a9556ba711f7c23f77b151d5798f3ac44a13455cc68db7697a1096e6d0563cab", size = 7169583, upload-time = "2026-01-28T00:23:56.558Z" }, { url = "https://files.pythonhosted.org/packages/d8/cc/8f3224cbb2a928de7298d6ed4790f5ebc48114e02bdc9559196bfb12435d/cryptography-46.0.4-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8bf75b0259e87fa70bddc0b8b4078b76e7fd512fd9afae6c1193bcf440a4dbef", size = 4275419, upload-time = "2026-01-28T00:23:58.364Z" }, { url = "https://files.pythonhosted.org/packages/17/43/4a18faa7a872d00e4264855134ba82d23546c850a70ff209e04ee200e76f/cryptography-46.0.4-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3c268a3490df22270955966ba236d6bc4a8f9b6e4ffddb78aac535f1a5ea471d", size = 4419058, upload-time = "2026-01-28T00:23:59.867Z" }, { url = "https://files.pythonhosted.org/packages/ee/64/6651969409821d791ba12346a124f55e1b76f66a819254ae840a965d4b9c/cryptography-46.0.4-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:812815182f6a0c1d49a37893a303b44eaac827d7f0d582cecfc81b6427f22973", size = 4278151, upload-time = "2026-01-28T00:24:01.731Z" }, @@ -684,10 +819,14 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/03/c3/c90a2cb358de4ac9309b26acf49b2a100957e1ff5cc1e98e6c4996576710/cryptography-46.0.4-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:6bb5157bf6a350e5b28aee23beb2d84ae6f5be390b2f8ee7ea179cda077e1019", size = 4451216, upload-time = "2026-01-28T00:24:13.975Z" }, { url = "https://files.pythonhosted.org/packages/96/2c/8d7f4171388a10208671e181ca43cdc0e596d8259ebacbbcfbd16de593da/cryptography-46.0.4-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd5aba870a2c40f87a3af043e0dee7d9eb02d4aff88a797b48f2b43eff8c3ab4", size = 4404299, upload-time = "2026-01-28T00:24:16.169Z" }, { url = "https://files.pythonhosted.org/packages/e9/23/cbb2036e450980f65c6e0a173b73a56ff3bccd8998965dea5cc9ddd424a5/cryptography-46.0.4-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:93d8291da8d71024379ab2cb0b5c57915300155ad42e07f76bea6ad838d7e59b", size = 4664837, upload-time = "2026-01-28T00:24:17.629Z" }, + { url = "https://files.pythonhosted.org/packages/0a/21/f7433d18fe6d5845329cbdc597e30caf983229c7a245bcf54afecc555938/cryptography-46.0.4-cp38-abi3-win32.whl", hash = "sha256:0563655cb3c6d05fb2afe693340bc050c30f9f34e15763361cf08e94749401fc", size = 3009779, upload-time = "2026-01-28T00:24:20.198Z" }, + { url = "https://files.pythonhosted.org/packages/3a/6a/bd2e7caa2facffedf172a45c1a02e551e6d7d4828658c9a245516a598d94/cryptography-46.0.4-cp38-abi3-win_amd64.whl", hash = "sha256:fa0900b9ef9c49728887d1576fd8d9e7e3ea872fa9b25ef9b64888adc434e976", size = 3466633, upload-time = "2026-01-28T00:24:21.851Z" }, + { url = "https://files.pythonhosted.org/packages/59/e0/f9c6c53e1f2a1c2507f00f2faba00f01d2f334b35b0fbfe5286715da2184/cryptography-46.0.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:766330cce7416c92b5e90c3bb71b1b79521760cdcfc3a6a1a182d4c9fab23d2b", size = 3476316, upload-time = "2026-01-28T00:24:24.144Z" }, { url = "https://files.pythonhosted.org/packages/27/7a/f8d2d13227a9a1a9fe9c7442b057efecffa41f1e3c51d8622f26b9edbe8f/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c236a44acfb610e70f6b3e1c3ca20ff24459659231ef2f8c48e879e2d32b73da", size = 4216693, upload-time = "2026-01-28T00:24:25.758Z" }, { url = "https://files.pythonhosted.org/packages/c5/de/3787054e8f7972658370198753835d9d680f6cd4a39df9f877b57f0dd69c/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8a15fb869670efa8f83cbffbc8753c1abf236883225aed74cd179b720ac9ec80", size = 4382765, upload-time = "2026-01-28T00:24:27.577Z" }, { url = "https://files.pythonhosted.org/packages/8a/5f/60e0afb019973ba6a0b322e86b3d61edf487a4f5597618a430a2a15f2d22/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:fdc3daab53b212472f1524d070735b2f0c214239df131903bae1d598016fa822", size = 4216066, upload-time = "2026-01-28T00:24:29.056Z" }, { url = "https://files.pythonhosted.org/packages/81/8e/bf4a0de294f147fee66f879d9bae6f8e8d61515558e3d12785dd90eca0be/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:44cc0675b27cadb71bdbb96099cca1fa051cd11d2ade09e5cd3a2edb929ed947", size = 4382025, upload-time = "2026-01-28T00:24:30.681Z" }, + { url = "https://files.pythonhosted.org/packages/79/f4/9ceb90cfd6a3847069b0b0b353fd3075dc69b49defc70182d8af0c4ca390/cryptography-46.0.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:be8c01a7d5a55f9a47d1888162b76c8f49d62b234d88f0ff91a9fbebe32ffbc3", size = 3406043, upload-time = "2026-01-28T00:24:32.236Z" }, ] [[package]] @@ -705,6 +844,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, ] +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, +] + [[package]] name = "dnspython" version = "2.8.0" @@ -901,7 +1049,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2", size = 272305, upload-time = "2025-08-07T13:15:41.288Z" }, { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246", size = 632472, upload-time = "2025-08-07T13:42:55.044Z" }, { url = "https://files.pythonhosted.org/packages/ae/8f/95d48d7e3d433e6dae5b1682e4292242a53f22df82e6d3dda81b1701a960/greenlet-3.2.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:94abf90142c2a18151632371140b3dba4dee031633fe614cb592dbb6c9e17bc3", size = 644646, upload-time = "2025-08-07T13:45:26.523Z" }, - { url = "https://files.pythonhosted.org/packages/d5/5e/405965351aef8c76b8ef7ad370e5da58d57ef6068df197548b015464001a/greenlet-3.2.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:4d1378601b85e2e5171b99be8d2dc85f594c79967599328f95c1dc1a40f1c633", size = 640519, upload-time = "2025-08-07T13:53:13.928Z" }, { url = "https://files.pythonhosted.org/packages/25/5d/382753b52006ce0218297ec1b628e048c4e64b155379331f25a7316eb749/greenlet-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0db5594dce18db94f7d1650d7489909b57afde4c580806b8d9203b6e79cdc079", size = 639707, upload-time = "2025-08-07T13:18:27.146Z" }, { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8", size = 587684, upload-time = "2025-08-07T13:18:25.164Z" }, { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52", size = 1116647, upload-time = "2025-08-07T13:42:38.655Z" }, @@ -912,7 +1059,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd", size = 274079, upload-time = "2025-08-07T13:15:45.033Z" }, { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb", size = 640997, upload-time = "2025-08-07T13:42:56.234Z" }, { url = "https://files.pythonhosted.org/packages/3b/16/035dcfcc48715ccd345f3a93183267167cdd162ad123cd93067d86f27ce4/greenlet-3.2.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f28588772bb5fb869a8eb331374ec06f24a83a9c25bfa1f38b6993afe9c1e968", size = 655185, upload-time = "2025-08-07T13:45:27.624Z" }, - { url = "https://files.pythonhosted.org/packages/31/da/0386695eef69ffae1ad726881571dfe28b41970173947e7c558d9998de0f/greenlet-3.2.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5c9320971821a7cb77cfab8d956fa8e39cd07ca44b6070db358ceb7f8797c8c9", size = 649926, upload-time = "2025-08-07T13:53:15.251Z" }, { url = "https://files.pythonhosted.org/packages/68/88/69bf19fd4dc19981928ceacbc5fd4bb6bc2215d53199e367832e98d1d8fe/greenlet-3.2.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c60a6d84229b271d44b70fb6e5fa23781abb5d742af7b808ae3f6efd7c9c60f6", size = 651839, upload-time = "2025-08-07T13:18:30.281Z" }, { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0", size = 607586, upload-time = "2025-08-07T13:18:28.544Z" }, { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0", size = 1123281, upload-time = "2025-08-07T13:42:39.858Z" }, @@ -923,7 +1069,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31", size = 272814, upload-time = "2025-08-07T13:15:50.011Z" }, { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945", size = 641073, upload-time = "2025-08-07T13:42:57.23Z" }, { url = "https://files.pythonhosted.org/packages/f7/0b/bc13f787394920b23073ca3b6c4a7a21396301ed75a655bcb47196b50e6e/greenlet-3.2.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:710638eb93b1fa52823aa91bf75326f9ecdfd5e0466f00789246a5280f4ba0fc", size = 655191, upload-time = "2025-08-07T13:45:29.752Z" }, - { url = "https://files.pythonhosted.org/packages/f2/d6/6adde57d1345a8d0f14d31e4ab9c23cfe8e2cd39c3baf7674b4b0338d266/greenlet-3.2.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c5111ccdc9c88f423426df3fd1811bfc40ed66264d35aa373420a34377efc98a", size = 649516, upload-time = "2025-08-07T13:53:16.314Z" }, { url = "https://files.pythonhosted.org/packages/7f/3b/3a3328a788d4a473889a2d403199932be55b1b0060f4ddd96ee7cdfcad10/greenlet-3.2.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d76383238584e9711e20ebe14db6c88ddcedc1829a9ad31a584389463b5aa504", size = 652169, upload-time = "2025-08-07T13:18:32.861Z" }, { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671", size = 610497, upload-time = "2025-08-07T13:18:31.636Z" }, { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b", size = 1121662, upload-time = "2025-08-07T13:42:41.117Z" }, @@ -934,7 +1079,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0", size = 273586, upload-time = "2025-08-07T13:16:08.004Z" }, { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f", size = 686346, upload-time = "2025-08-07T13:42:59.944Z" }, { url = "https://files.pythonhosted.org/packages/c0/aa/687d6b12ffb505a4447567d1f3abea23bd20e73a5bed63871178e0831b7a/greenlet-3.2.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c17b6b34111ea72fc5a4e4beec9711d2226285f0386ea83477cbb97c30a3f3a5", size = 699218, upload-time = "2025-08-07T13:45:30.969Z" }, - { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1", size = 694659, upload-time = "2025-08-07T13:53:17.759Z" }, { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735", size = 695355, upload-time = "2025-08-07T13:18:34.517Z" }, { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337", size = 657512, upload-time = "2025-08-07T13:18:33.969Z" }, { url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269", size = 1612508, upload-time = "2025-11-04T12:42:23.427Z" }, @@ -1119,6 +1263,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, ] +[[package]] +name = "isodate" +version = "0.7.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/4d/e940025e2ce31a8ce1202635910747e5a87cc3a6a6bb2d00973375014749/isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6", size = 29705, upload-time = "2024-10-08T23:04:11.5Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320, upload-time = "2024-10-08T23:04:09.501Z" }, +] + [[package]] name = "jaraco-classes" version = "3.4.0" @@ -1176,6 +1329,91 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, ] +[[package]] +name = "jiter" +version = "0.13.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/5e/4ec91646aee381d01cdb9974e30882c9cd3b8c5d1079d6b5ff4af522439a/jiter-0.13.0.tar.gz", hash = "sha256:f2839f9c2c7e2dffc1bc5929a510e14ce0a946be9365fd1219e7ef342dae14f4", size = 164847, upload-time = "2026-02-02T12:37:56.441Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/71/29/499f8c9eaa8a16751b1c0e45e6f5f1761d180da873d417996cc7bddc8eef/jiter-0.13.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ea026e70a9a28ebbdddcbcf0f1323128a8db66898a06eaad3a4e62d2f554d096", size = 311157, upload-time = "2026-02-02T12:35:37.758Z" }, + { url = "https://files.pythonhosted.org/packages/50/f6/566364c777d2ab450b92100bea11333c64c38d32caf8dc378b48e5b20c46/jiter-0.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66aa3e663840152d18cc8ff1e4faad3dd181373491b9cfdc6004b92198d67911", size = 319729, upload-time = "2026-02-02T12:35:39.246Z" }, + { url = "https://files.pythonhosted.org/packages/73/dd/560f13ec5e4f116d8ad2658781646cca91b617ae3b8758d4a5076b278f70/jiter-0.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3524798e70655ff19aec58c7d05adb1f074fecff62da857ea9be2b908b6d701", size = 354766, upload-time = "2026-02-02T12:35:40.662Z" }, + { url = "https://files.pythonhosted.org/packages/7c/0d/061faffcfe94608cbc28a0d42a77a74222bdf5055ccdbe5fd2292b94f510/jiter-0.13.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec7e287d7fbd02cb6e22f9a00dd9c9cd504c40a61f2c61e7e1f9690a82726b4c", size = 362587, upload-time = "2026-02-02T12:35:42.025Z" }, + { url = "https://files.pythonhosted.org/packages/92/c9/c66a7864982fd38a9773ec6e932e0398d1262677b8c60faecd02ffb67bf3/jiter-0.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47455245307e4debf2ce6c6e65a717550a0244231240dcf3b8f7d64e4c2f22f4", size = 487537, upload-time = "2026-02-02T12:35:43.459Z" }, + { url = "https://files.pythonhosted.org/packages/6c/86/84eb4352cd3668f16d1a88929b5888a3fe0418ea8c1dfc2ad4e7bf6e069a/jiter-0.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ee9da221dca6e0429c2704c1b3655fe7b025204a71d4d9b73390c759d776d165", size = 373717, upload-time = "2026-02-02T12:35:44.928Z" }, + { url = "https://files.pythonhosted.org/packages/6e/09/9fe4c159358176f82d4390407a03f506a8659ed13ca3ac93a843402acecf/jiter-0.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24ab43126d5e05f3d53a36a8e11eb2f23304c6c1117844aaaf9a0aa5e40b5018", size = 362683, upload-time = "2026-02-02T12:35:46.636Z" }, + { url = "https://files.pythonhosted.org/packages/c9/5e/85f3ab9caca0c1d0897937d378b4a515cae9e119730563572361ea0c48ae/jiter-0.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9da38b4fedde4fb528c740c2564628fbab737166a0e73d6d46cb4bb5463ff411", size = 392345, upload-time = "2026-02-02T12:35:48.088Z" }, + { url = "https://files.pythonhosted.org/packages/12/4c/05b8629ad546191939e6f0c2f17e29f542a398f4a52fb987bc70b6d1eb8b/jiter-0.13.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b34c519e17658ed88d5047999a93547f8889f3c1824120c26ad6be5f27b6cf5", size = 517775, upload-time = "2026-02-02T12:35:49.482Z" }, + { url = "https://files.pythonhosted.org/packages/4d/88/367ea2eb6bc582c7052e4baf5ddf57ebe5ab924a88e0e09830dfb585c02d/jiter-0.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2a6394e6af690d462310a86b53c47ad75ac8c21dc79f120714ea449979cb1d3", size = 551325, upload-time = "2026-02-02T12:35:51.104Z" }, + { url = "https://files.pythonhosted.org/packages/f3/12/fa377ffb94a2f28c41afaed093e0d70cfe512035d5ecb0cad0ae4792d35e/jiter-0.13.0-cp311-cp311-win32.whl", hash = "sha256:0f0c065695f616a27c920a56ad0d4fc46415ef8b806bf8fc1cacf25002bd24e1", size = 204709, upload-time = "2026-02-02T12:35:52.467Z" }, + { url = "https://files.pythonhosted.org/packages/cb/16/8e8203ce92f844dfcd3d9d6a5a7322c77077248dbb12da52d23193a839cd/jiter-0.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:0733312953b909688ae3c2d58d043aa040f9f1a6a75693defed7bc2cc4bf2654", size = 204560, upload-time = "2026-02-02T12:35:53.925Z" }, + { url = "https://files.pythonhosted.org/packages/44/26/97cc40663deb17b9e13c3a5cf29251788c271b18ee4d262c8f94798b8336/jiter-0.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:5d9b34ad56761b3bf0fbe8f7e55468704107608512350962d3317ffd7a4382d5", size = 189608, upload-time = "2026-02-02T12:35:55.304Z" }, + { url = "https://files.pythonhosted.org/packages/2e/30/7687e4f87086829955013ca12a9233523349767f69653ebc27036313def9/jiter-0.13.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0a2bd69fc1d902e89925fc34d1da51b2128019423d7b339a45d9e99c894e0663", size = 307958, upload-time = "2026-02-02T12:35:57.165Z" }, + { url = "https://files.pythonhosted.org/packages/c3/27/e57f9a783246ed95481e6749cc5002a8a767a73177a83c63ea71f0528b90/jiter-0.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f917a04240ef31898182f76a332f508f2cc4b57d2b4d7ad2dbfebbfe167eb505", size = 318597, upload-time = "2026-02-02T12:35:58.591Z" }, + { url = "https://files.pythonhosted.org/packages/cf/52/e5719a60ac5d4d7c5995461a94ad5ef962a37c8bf5b088390e6fad59b2ff/jiter-0.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1e2b199f446d3e82246b4fd9236d7cb502dc2222b18698ba0d986d2fecc6152", size = 348821, upload-time = "2026-02-02T12:36:00.093Z" }, + { url = "https://files.pythonhosted.org/packages/61/db/c1efc32b8ba4c740ab3fc2d037d8753f67685f475e26b9d6536a4322bcdd/jiter-0.13.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04670992b576fa65bd056dbac0c39fe8bd67681c380cb2b48efa885711d9d726", size = 364163, upload-time = "2026-02-02T12:36:01.937Z" }, + { url = "https://files.pythonhosted.org/packages/55/8a/fb75556236047c8806995671a18e4a0ad646ed255276f51a20f32dceaeec/jiter-0.13.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a1aff1fbdb803a376d4d22a8f63f8e7ccbce0b4890c26cc7af9e501ab339ef0", size = 483709, upload-time = "2026-02-02T12:36:03.41Z" }, + { url = "https://files.pythonhosted.org/packages/7e/16/43512e6ee863875693a8e6f6d532e19d650779d6ba9a81593ae40a9088ff/jiter-0.13.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b3fb8c2053acaef8580809ac1d1f7481a0a0bdc012fd7f5d8b18fb696a5a089", size = 370480, upload-time = "2026-02-02T12:36:04.791Z" }, + { url = "https://files.pythonhosted.org/packages/f8/4c/09b93e30e984a187bc8aaa3510e1ec8dcbdcd71ca05d2f56aac0492453aa/jiter-0.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdaba7d87e66f26a2c45d8cbadcbfc4bf7884182317907baf39cfe9775bb4d93", size = 360735, upload-time = "2026-02-02T12:36:06.994Z" }, + { url = "https://files.pythonhosted.org/packages/1a/1b/46c5e349019874ec5dfa508c14c37e29864ea108d376ae26d90bee238cd7/jiter-0.13.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b88d649135aca526da172e48083da915ec086b54e8e73a425ba50999468cc08", size = 391814, upload-time = "2026-02-02T12:36:08.368Z" }, + { url = "https://files.pythonhosted.org/packages/15/9e/26184760e85baee7162ad37b7912797d2077718476bf91517641c92b3639/jiter-0.13.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e404ea551d35438013c64b4f357b0474c7abf9f781c06d44fcaf7a14c69ff9e2", size = 513990, upload-time = "2026-02-02T12:36:09.993Z" }, + { url = "https://files.pythonhosted.org/packages/e9/34/2c9355247d6debad57a0a15e76ab1566ab799388042743656e566b3b7de1/jiter-0.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f4748aad1b4a93c8bdd70f604d0f748cdc0e8744c5547798acfa52f10e79228", size = 548021, upload-time = "2026-02-02T12:36:11.376Z" }, + { url = "https://files.pythonhosted.org/packages/ac/4a/9f2c23255d04a834398b9c2e0e665382116911dc4d06b795710503cdad25/jiter-0.13.0-cp312-cp312-win32.whl", hash = "sha256:0bf670e3b1445fc4d31612199f1744f67f889ee1bbae703c4b54dc097e5dd394", size = 203024, upload-time = "2026-02-02T12:36:12.682Z" }, + { url = "https://files.pythonhosted.org/packages/09/ee/f0ae675a957ae5a8f160be3e87acea6b11dc7b89f6b7ab057e77b2d2b13a/jiter-0.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:15db60e121e11fe186c0b15236bd5d18381b9ddacdcf4e659feb96fc6c969c92", size = 205424, upload-time = "2026-02-02T12:36:13.93Z" }, + { url = "https://files.pythonhosted.org/packages/1b/02/ae611edf913d3cbf02c97cdb90374af2082c48d7190d74c1111dde08bcdd/jiter-0.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:41f92313d17989102f3cb5dd533a02787cdb99454d494344b0361355da52fcb9", size = 186818, upload-time = "2026-02-02T12:36:15.308Z" }, + { url = "https://files.pythonhosted.org/packages/91/9c/7ee5a6ff4b9991e1a45263bfc46731634c4a2bde27dfda6c8251df2d958c/jiter-0.13.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1f8a55b848cbabf97d861495cd65f1e5c590246fabca8b48e1747c4dfc8f85bf", size = 306897, upload-time = "2026-02-02T12:36:16.748Z" }, + { url = "https://files.pythonhosted.org/packages/7c/02/be5b870d1d2be5dd6a91bdfb90f248fbb7dcbd21338f092c6b89817c3dbf/jiter-0.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f556aa591c00f2c45eb1b89f68f52441a016034d18b65da60e2d2875bbbf344a", size = 317507, upload-time = "2026-02-02T12:36:18.351Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/b25d2ec333615f5f284f3a4024f7ce68cfa0604c322c6808b2344c7f5d2b/jiter-0.13.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7e1d61da332ec412350463891923f960c3073cf1aae93b538f0bb4c8cd46efb", size = 350560, upload-time = "2026-02-02T12:36:19.746Z" }, + { url = "https://files.pythonhosted.org/packages/be/ec/74dcb99fef0aca9fbe56b303bf79f6bd839010cb18ad41000bf6cc71eec0/jiter-0.13.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3097d665a27bc96fd9bbf7f86178037db139f319f785e4757ce7ccbf390db6c2", size = 363232, upload-time = "2026-02-02T12:36:21.243Z" }, + { url = "https://files.pythonhosted.org/packages/1b/37/f17375e0bb2f6a812d4dd92d7616e41917f740f3e71343627da9db2824ce/jiter-0.13.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d01ecc3a8cbdb6f25a37bd500510550b64ddf9f7d64a107d92f3ccb25035d0f", size = 483727, upload-time = "2026-02-02T12:36:22.688Z" }, + { url = "https://files.pythonhosted.org/packages/77/d2/a71160a5ae1a1e66c1395b37ef77da67513b0adba73b993a27fbe47eb048/jiter-0.13.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed9bbc30f5d60a3bdf63ae76beb3f9db280d7f195dfcfa61af792d6ce912d159", size = 370799, upload-time = "2026-02-02T12:36:24.106Z" }, + { url = "https://files.pythonhosted.org/packages/01/99/ed5e478ff0eb4e8aa5fd998f9d69603c9fd3f32de3bd16c2b1194f68361c/jiter-0.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98fbafb6e88256f4454de33c1f40203d09fc33ed19162a68b3b257b29ca7f663", size = 359120, upload-time = "2026-02-02T12:36:25.519Z" }, + { url = "https://files.pythonhosted.org/packages/16/be/7ffd08203277a813f732ba897352797fa9493faf8dc7995b31f3d9cb9488/jiter-0.13.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5467696f6b827f1116556cb0db620440380434591e93ecee7fd14d1a491b6daa", size = 390664, upload-time = "2026-02-02T12:36:26.866Z" }, + { url = "https://files.pythonhosted.org/packages/d1/84/e0787856196d6d346264d6dcccb01f741e5f0bd014c1d9a2ebe149caf4f3/jiter-0.13.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2d08c9475d48b92892583df9da592a0e2ac49bcd41fae1fec4f39ba6cf107820", size = 513543, upload-time = "2026-02-02T12:36:28.217Z" }, + { url = "https://files.pythonhosted.org/packages/65/50/ecbd258181c4313cf79bca6c88fb63207d04d5bf5e4f65174114d072aa55/jiter-0.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:aed40e099404721d7fcaf5b89bd3b4568a4666358bcac7b6b15c09fb6252ab68", size = 547262, upload-time = "2026-02-02T12:36:29.678Z" }, + { url = "https://files.pythonhosted.org/packages/27/da/68f38d12e7111d2016cd198161b36e1f042bd115c169255bcb7ec823a3bf/jiter-0.13.0-cp313-cp313-win32.whl", hash = "sha256:36ebfbcffafb146d0e6ffb3e74d51e03d9c35ce7c625c8066cdbfc7b953bdc72", size = 200630, upload-time = "2026-02-02T12:36:31.808Z" }, + { url = "https://files.pythonhosted.org/packages/25/65/3bd1a972c9a08ecd22eb3b08a95d1941ebe6938aea620c246cf426ae09c2/jiter-0.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:8d76029f077379374cf0dbc78dbe45b38dec4a2eb78b08b5194ce836b2517afc", size = 202602, upload-time = "2026-02-02T12:36:33.679Z" }, + { url = "https://files.pythonhosted.org/packages/15/fe/13bd3678a311aa67686bb303654792c48206a112068f8b0b21426eb6851e/jiter-0.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:bb7613e1a427cfcb6ea4544f9ac566b93d5bf67e0d48c787eca673ff9c9dff2b", size = 185939, upload-time = "2026-02-02T12:36:35.065Z" }, + { url = "https://files.pythonhosted.org/packages/49/19/a929ec002ad3228bc97ca01dbb14f7632fffdc84a95ec92ceaf4145688ae/jiter-0.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fa476ab5dd49f3bf3a168e05f89358c75a17608dbabb080ef65f96b27c19ab10", size = 316616, upload-time = "2026-02-02T12:36:36.579Z" }, + { url = "https://files.pythonhosted.org/packages/52/56/d19a9a194afa37c1728831e5fb81b7722c3de18a3109e8f282bfc23e587a/jiter-0.13.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade8cb6ff5632a62b7dbd4757d8c5573f7a2e9ae285d6b5b841707d8363205ef", size = 346850, upload-time = "2026-02-02T12:36:38.058Z" }, + { url = "https://files.pythonhosted.org/packages/36/4a/94e831c6bf287754a8a019cb966ed39ff8be6ab78cadecf08df3bb02d505/jiter-0.13.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9950290340acc1adaded363edd94baebcee7dabdfa8bee4790794cd5cfad2af6", size = 358551, upload-time = "2026-02-02T12:36:39.417Z" }, + { url = "https://files.pythonhosted.org/packages/a2/ec/a4c72c822695fa80e55d2b4142b73f0012035d9fcf90eccc56bc060db37c/jiter-0.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2b4972c6df33731aac0742b64fd0d18e0a69bc7d6e03108ce7d40c85fd9e3e6d", size = 201950, upload-time = "2026-02-02T12:36:40.791Z" }, + { url = "https://files.pythonhosted.org/packages/b6/00/393553ec27b824fbc29047e9c7cd4a3951d7fbe4a76743f17e44034fa4e4/jiter-0.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:701a1e77d1e593c1b435315ff625fd071f0998c5f02792038a5ca98899261b7d", size = 185852, upload-time = "2026-02-02T12:36:42.077Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f5/f1997e987211f6f9bd71b8083047b316208b4aca0b529bb5f8c96c89ef3e/jiter-0.13.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:cc5223ab19fe25e2f0bf2643204ad7318896fe3729bf12fde41b77bfc4fafff0", size = 308804, upload-time = "2026-02-02T12:36:43.496Z" }, + { url = "https://files.pythonhosted.org/packages/cd/8f/5482a7677731fd44881f0204981ce2d7175db271f82cba2085dd2212e095/jiter-0.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9776ebe51713acf438fd9b4405fcd86893ae5d03487546dae7f34993217f8a91", size = 318787, upload-time = "2026-02-02T12:36:45.071Z" }, + { url = "https://files.pythonhosted.org/packages/f3/b9/7257ac59778f1cd025b26a23c5520a36a424f7f1b068f2442a5b499b7464/jiter-0.13.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:879e768938e7b49b5e90b7e3fecc0dbec01b8cb89595861fb39a8967c5220d09", size = 353880, upload-time = "2026-02-02T12:36:47.365Z" }, + { url = "https://files.pythonhosted.org/packages/c3/87/719eec4a3f0841dad99e3d3604ee4cba36af4419a76f3cb0b8e2e691ad67/jiter-0.13.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:682161a67adea11e3aae9038c06c8b4a9a71023228767477d683f69903ebc607", size = 366702, upload-time = "2026-02-02T12:36:48.871Z" }, + { url = "https://files.pythonhosted.org/packages/d2/65/415f0a75cf6921e43365a1bc227c565cb949caca8b7532776e430cbaa530/jiter-0.13.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a13b68cd1cd8cc9de8f244ebae18ccb3e4067ad205220ef324c39181e23bbf66", size = 486319, upload-time = "2026-02-02T12:36:53.006Z" }, + { url = "https://files.pythonhosted.org/packages/54/a2/9e12b48e82c6bbc6081fd81abf915e1443add1b13d8fc586e1d90bb02bb8/jiter-0.13.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87ce0f14c6c08892b610686ae8be350bf368467b6acd5085a5b65441e2bf36d2", size = 372289, upload-time = "2026-02-02T12:36:54.593Z" }, + { url = "https://files.pythonhosted.org/packages/4e/c1/e4693f107a1789a239c759a432e9afc592366f04e901470c2af89cfd28e1/jiter-0.13.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c365005b05505a90d1c47856420980d0237adf82f70c4aff7aebd3c1cc143ad", size = 360165, upload-time = "2026-02-02T12:36:56.112Z" }, + { url = "https://files.pythonhosted.org/packages/17/08/91b9ea976c1c758240614bd88442681a87672eebc3d9a6dde476874e706b/jiter-0.13.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1317fdffd16f5873e46ce27d0e0f7f4f90f0cdf1d86bf6abeaea9f63ca2c401d", size = 389634, upload-time = "2026-02-02T12:36:57.495Z" }, + { url = "https://files.pythonhosted.org/packages/18/23/58325ef99390d6d40427ed6005bf1ad54f2577866594bcf13ce55675f87d/jiter-0.13.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c05b450d37ba0c9e21c77fef1f205f56bcee2330bddca68d344baebfc55ae0df", size = 514933, upload-time = "2026-02-02T12:36:58.909Z" }, + { url = "https://files.pythonhosted.org/packages/5b/25/69f1120c7c395fd276c3996bb8adefa9c6b84c12bb7111e5c6ccdcd8526d/jiter-0.13.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:775e10de3849d0631a97c603f996f518159272db00fdda0a780f81752255ee9d", size = 548842, upload-time = "2026-02-02T12:37:00.433Z" }, + { url = "https://files.pythonhosted.org/packages/18/05/981c9669d86850c5fbb0d9e62bba144787f9fba84546ba43d624ee27ef29/jiter-0.13.0-cp314-cp314-win32.whl", hash = "sha256:632bf7c1d28421c00dd8bbb8a3bac5663e1f57d5cd5ed962bce3c73bf62608e6", size = 202108, upload-time = "2026-02-02T12:37:01.718Z" }, + { url = "https://files.pythonhosted.org/packages/8d/96/cdcf54dd0b0341db7d25413229888a346c7130bd20820530905fdb65727b/jiter-0.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:f22ef501c3f87ede88f23f9b11e608581c14f04db59b6a801f354397ae13739f", size = 204027, upload-time = "2026-02-02T12:37:03.075Z" }, + { url = "https://files.pythonhosted.org/packages/fb/f9/724bcaaab7a3cd727031fe4f6995cb86c4bd344909177c186699c8dec51a/jiter-0.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:07b75fe09a4ee8e0c606200622e571e44943f47254f95e2436c8bdcaceb36d7d", size = 187199, upload-time = "2026-02-02T12:37:04.414Z" }, + { url = "https://files.pythonhosted.org/packages/62/92/1661d8b9fd6a3d7a2d89831db26fe3c1509a287d83ad7838831c7b7a5c7e/jiter-0.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:964538479359059a35fb400e769295d4b315ae61e4105396d355a12f7fef09f0", size = 318423, upload-time = "2026-02-02T12:37:05.806Z" }, + { url = "https://files.pythonhosted.org/packages/4f/3b/f77d342a54d4ebcd128e520fc58ec2f5b30a423b0fd26acdfc0c6fef8e26/jiter-0.13.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e104da1db1c0991b3eaed391ccd650ae8d947eab1480c733e5a3fb28d4313e40", size = 351438, upload-time = "2026-02-02T12:37:07.189Z" }, + { url = "https://files.pythonhosted.org/packages/76/b3/ba9a69f0e4209bd3331470c723c2f5509e6f0482e416b612431a5061ed71/jiter-0.13.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e3a5f0cde8ff433b8e88e41aa40131455420fb3649a3c7abdda6145f8cb7202", size = 364774, upload-time = "2026-02-02T12:37:08.579Z" }, + { url = "https://files.pythonhosted.org/packages/b3/16/6cdb31fa342932602458dbb631bfbd47f601e03d2e4950740e0b2100b570/jiter-0.13.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:57aab48f40be1db920a582b30b116fe2435d184f77f0e4226f546794cedd9cf0", size = 487238, upload-time = "2026-02-02T12:37:10.066Z" }, + { url = "https://files.pythonhosted.org/packages/ed/b1/956cc7abaca8d95c13aa8d6c9b3f3797241c246cd6e792934cc4c8b250d2/jiter-0.13.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7772115877c53f62beeb8fd853cab692dbc04374ef623b30f997959a4c0e7e95", size = 372892, upload-time = "2026-02-02T12:37:11.656Z" }, + { url = "https://files.pythonhosted.org/packages/26/c4/97ecde8b1e74f67b8598c57c6fccf6df86ea7861ed29da84629cdbba76c4/jiter-0.13.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1211427574b17b633cfceba5040de8081e5abf114f7a7602f73d2e16f9fdaa59", size = 360309, upload-time = "2026-02-02T12:37:13.244Z" }, + { url = "https://files.pythonhosted.org/packages/4b/d7/eabe3cf46715854ccc80be2cd78dd4c36aedeb30751dbf85a1d08c14373c/jiter-0.13.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7beae3a3d3b5212d3a55d2961db3c292e02e302feb43fce6a3f7a31b90ea6dfe", size = 389607, upload-time = "2026-02-02T12:37:14.881Z" }, + { url = "https://files.pythonhosted.org/packages/df/2d/03963fc0804e6109b82decfb9974eb92df3797fe7222428cae12f8ccaa0c/jiter-0.13.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:e5562a0f0e90a6223b704163ea28e831bd3a9faa3512a711f031611e6b06c939", size = 514986, upload-time = "2026-02-02T12:37:16.326Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6c/8c83b45eb3eb1c1e18d841fe30b4b5bc5619d781267ca9bc03e005d8fd0a/jiter-0.13.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:6c26a424569a59140fb51160a56df13f438a2b0967365e987889186d5fc2f6f9", size = 548756, upload-time = "2026-02-02T12:37:17.736Z" }, + { url = "https://files.pythonhosted.org/packages/47/66/eea81dfff765ed66c68fd2ed8c96245109e13c896c2a5015c7839c92367e/jiter-0.13.0-cp314-cp314t-win32.whl", hash = "sha256:24dc96eca9f84da4131cdf87a95e6ce36765c3b156fc9ae33280873b1c32d5f6", size = 201196, upload-time = "2026-02-02T12:37:19.101Z" }, + { url = "https://files.pythonhosted.org/packages/ff/32/4ac9c7a76402f8f00d00842a7f6b83b284d0cf7c1e9d4227bc95aa6d17fa/jiter-0.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0a8d76c7524087272c8ae913f5d9d608bd839154b62c4322ef65723d2e5bb0b8", size = 204215, upload-time = "2026-02-02T12:37:20.495Z" }, + { url = "https://files.pythonhosted.org/packages/f9/8e/7def204fea9f9be8b3c21a6f2dd6c020cf56c7d5ff753e0e23ed7f9ea57e/jiter-0.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2c26cf47e2cad140fa23b6d58d435a7c0161f5c514284802f25e87fddfe11024", size = 187152, upload-time = "2026-02-02T12:37:22.124Z" }, + { url = "https://files.pythonhosted.org/packages/79/b3/3c29819a27178d0e461a8571fb63c6ae38be6dc36b78b3ec2876bbd6a910/jiter-0.13.0-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b1cbfa133241d0e6bdab48dcdc2604e8ba81512f6bbd68ec3e8e1357dd3c316c", size = 307016, upload-time = "2026-02-02T12:37:42.755Z" }, + { url = "https://files.pythonhosted.org/packages/eb/ae/60993e4b07b1ac5ebe46da7aa99fdbb802eb986c38d26e3883ac0125c4e0/jiter-0.13.0-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:db367d8be9fad6e8ebbac4a7578b7af562e506211036cba2c06c3b998603c3d2", size = 305024, upload-time = "2026-02-02T12:37:44.774Z" }, + { url = "https://files.pythonhosted.org/packages/77/fa/2227e590e9cf98803db2811f172b2d6460a21539ab73006f251c66f44b14/jiter-0.13.0-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45f6f8efb2f3b0603092401dc2df79fa89ccbc027aaba4174d2d4133ed661434", size = 339337, upload-time = "2026-02-02T12:37:46.668Z" }, + { url = "https://files.pythonhosted.org/packages/2d/92/015173281f7eb96c0ef580c997da8ef50870d4f7f4c9e03c845a1d62ae04/jiter-0.13.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:597245258e6ad085d064780abfb23a284d418d3e61c57362d9449c6c7317ee2d", size = 346395, upload-time = "2026-02-02T12:37:48.09Z" }, + { url = "https://files.pythonhosted.org/packages/80/60/e50fa45dd7e2eae049f0ce964663849e897300433921198aef94b6ffa23a/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:3d744a6061afba08dd7ae375dcde870cffb14429b7477e10f67e9e6d68772a0a", size = 305169, upload-time = "2026-02-02T12:37:50.376Z" }, + { url = "https://files.pythonhosted.org/packages/d2/73/a009f41c5eed71c49bec53036c4b33555afcdee70682a18c6f66e396c039/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:ff732bd0a0e778f43d5009840f20b935e79087b4dc65bd36f1cd0f9b04b8ff7f", size = 303808, upload-time = "2026-02-02T12:37:52.092Z" }, + { url = "https://files.pythonhosted.org/packages/c4/10/528b439290763bff3d939268085d03382471b442f212dca4ff5f12802d43/jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab44b178f7981fcaea7e0a5df20e773c663d06ffda0198f1a524e91b2fde7e59", size = 337384, upload-time = "2026-02-02T12:37:53.582Z" }, + { url = "https://files.pythonhosted.org/packages/67/8a/a342b2f0251f3dac4ca17618265d93bf244a2a4d089126e81e4c1056ac50/jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bb00b6d26db67a05fe3e12c76edc75f32077fb51deed13822dc648fa373bc19", size = 343768, upload-time = "2026-02-02T12:37:55.055Z" }, +] + [[package]] name = "jsbeautifier" version = "1.15.4" @@ -1357,7 +1595,7 @@ wheels = [ [[package]] name = "mcp" -version = "1.17.0" +version = "1.26.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -1366,15 +1604,23 @@ dependencies = [ { name = "jsonschema" }, { name = "pydantic" }, { name = "pydantic-settings" }, + { name = "pyjwt", extra = ["crypto"] }, { name = "python-multipart" }, { name = "pywin32", marker = "sys_platform == 'win32'" }, { name = "sse-starlette" }, { name = "starlette" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5a/79/5724a540df19e192e8606c543cdcf162de8eb435077520cca150f7365ec0/mcp-1.17.0.tar.gz", hash = "sha256:1b57fabf3203240ccc48e39859faf3ae1ccb0b571ff798bbedae800c73c6df90", size = 477951, upload-time = "2025-10-10T12:16:44.519Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/6d/62e76bbb8144d6ed86e202b5edd8a4cb631e7c8130f3f4893c3f90262b10/mcp-1.26.0.tar.gz", hash = "sha256:db6e2ef491eecc1a0d93711a76f28dec2e05999f93afd48795da1c1137142c66", size = 608005, upload-time = "2026-01-24T19:40:32.468Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1c/72/3751feae343a5ad07959df713907b5c3fbaed269d697a14b0c449080cf2e/mcp-1.17.0-py3-none-any.whl", hash = "sha256:0660ef275cada7a545af154db3082f176cf1d2681d5e35ae63e014faf0a35d40", size = 167737, upload-time = "2025-10-10T12:16:42.863Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d9/eaa1f80170d2b7c5ba23f3b59f766f3a0bb41155fbc32a69adfa1adaaef9/mcp-1.26.0-py3-none-any.whl", hash = "sha256:904a21c33c25aa98ddbeb47273033c435e595bbacfdb177f4bd87f6dceebe1ca", size = 233615, upload-time = "2026-01-24T19:40:30.652Z" }, +] + +[package.optional-dependencies] +ws = [ + { name = "websockets" }, ] [[package]] @@ -1652,6 +1898,32 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a4/8e/469e5a4a2f5855992e425f3cb33804cc07bf18d48f2db061aec61ce50270/more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b", size = 69667, upload-time = "2025-09-02T15:23:09.635Z" }, ] +[[package]] +name = "msal" +version = "1.35.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "pyjwt", extra = ["crypto"] }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3c/aa/5a646093ac218e4a329391d5a31e5092a89db7d2ef1637a90b82cd0b6f94/msal-1.35.1.tar.gz", hash = "sha256:70cac18ab80a053bff86219ba64cfe3da1f307c74b009e2da57ef040eb1b5656", size = 165658, upload-time = "2026-03-04T23:38:51.812Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/96/86/16815fddf056ca998853c6dc525397edf0b43559bb4073a80d2bc7fe8009/msal-1.35.1-py3-none-any.whl", hash = "sha256:8f4e82f34b10c19e326ec69f44dc6b30171f2f7098f3720ea8a9f0c11832caa3", size = 119909, upload-time = "2026-03-04T23:38:50.452Z" }, +] + +[[package]] +name = "msal-extensions" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "msal" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/01/99/5d239b6156eddf761a636bded1118414d161bd6b7b37a9335549ed159396/msal_extensions-1.3.1.tar.gz", hash = "sha256:c5b0fd10f65ef62b5f1d62f4251d51cbcaf003fcedae8c91b040a488614be1a4", size = 23315, upload-time = "2025-03-14T23:51:03.902Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5e/75/bd9b7bb966668920f06b200e84454c8f3566b102183bc55c5473d96cb2b9/msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca", size = 20583, upload-time = "2025-03-14T23:51:03.016Z" }, +] + [[package]] name = "mypy" version = "1.18.2" @@ -1708,6 +1980,78 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, ] +[[package]] +name = "openai" +version = "2.26.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d7/91/2a06c4e9597c338cac1e5e5a8dd6f29e1836fc229c4c523529dca387fda8/openai-2.26.0.tar.gz", hash = "sha256:b41f37c140ae0034a6e92b0c509376d907f3a66109935fba2c1b471a7c05a8fb", size = 666702, upload-time = "2026-03-05T23:17:35.874Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/2e/3f73e8ca53718952222cacd0cf7eecc9db439d020f0c1fe7ae717e4e199a/openai-2.26.0-py3-none-any.whl", hash = "sha256:6151bf8f83802f036117f06cc8a57b3a4da60da9926826cc96747888b57f394f", size = 1136409, upload-time = "2026-03-05T23:17:34.072Z" }, +] + +[[package]] +name = "opentelemetry-api" +version = "1.40.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2c/1d/4049a9e8698361cc1a1aa03a6c59e4fa4c71e0c0f94a30f988a6876a2ae6/opentelemetry_api-1.40.0.tar.gz", hash = "sha256:159be641c0b04d11e9ecd576906462773eb97ae1b657730f0ecf64d32071569f", size = 70851, upload-time = "2026-03-04T14:17:21.555Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/bf/93795954016c522008da367da292adceed71cca6ee1717e1d64c83089099/opentelemetry_api-1.40.0-py3-none-any.whl", hash = "sha256:82dd69331ae74b06f6a874704be0cfaa49a1650e1537d4a813b86ecef7d0ecf9", size = 68676, upload-time = "2026-03-04T14:17:01.24Z" }, +] + +[[package]] +name = "opentelemetry-sdk" +version = "1.40.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/fd/3c3125b20ba18ce2155ba9ea74acb0ae5d25f8cd39cfd37455601b7955cc/opentelemetry_sdk-1.40.0.tar.gz", hash = "sha256:18e9f5ec20d859d268c7cb3c5198c8d105d073714db3de50b593b8c1345a48f2", size = 184252, upload-time = "2026-03-04T14:17:31.87Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/c5/6a852903d8bfac758c6dc6e9a68b015d3c33f2f1be5e9591e0f4b69c7e0a/opentelemetry_sdk-1.40.0-py3-none-any.whl", hash = "sha256:787d2154a71f4b3d81f20524a8ce061b7db667d24e46753f32a7bc48f1c1f3f1", size = 141951, upload-time = "2026-03-04T14:17:17.961Z" }, +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.61b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/c0/4ae7973f3c2cfd2b6e321f1675626f0dab0a97027cc7a297474c9c8f3d04/opentelemetry_semantic_conventions-0.61b0.tar.gz", hash = "sha256:072f65473c5d7c6dc0355b27d6c9d1a679d63b6d4b4b16a9773062cb7e31192a", size = 145755, upload-time = "2026-03-04T14:17:32.664Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/37/cc6a55e448deaa9b27377d087da8615a3416d8ad523d5960b78dbeadd02a/opentelemetry_semantic_conventions-0.61b0-py3-none-any.whl", hash = "sha256:fa530a96be229795f8cef353739b618148b0fe2b4b3f005e60e262926c4d38e2", size = 231621, upload-time = "2026-03-04T14:17:19.33Z" }, +] + +[[package]] +name = "opentelemetry-semantic-conventions-ai" +version = "0.4.15" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-sdk" }, + { name = "opentelemetry-semantic-conventions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/75/455c15f8360b475dd31101a87eab316420388486f7941bf019cbf4e63d5b/opentelemetry_semantic_conventions_ai-0.4.15.tar.gz", hash = "sha256:12de172d1e11d21c6e82bbf578c7e8a713589a7fda76af9ed785632564a28b81", size = 18595, upload-time = "2026-03-02T15:36:50.254Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/49/819fb212386f77cfd93f81bd916d674f0e735f87c8ac2262ed14e3b852c2/opentelemetry_semantic_conventions_ai-0.4.15-py3-none-any.whl", hash = "sha256:011461f1fba30f27035c49ab3b8344367adc72da0a6c8d3c7428303c6779edc9", size = 5999, upload-time = "2026-03-02T15:36:51.44Z" }, +] + [[package]] name = "packaging" version = "25.0" @@ -1932,6 +2276,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, ] +[package.optional-dependencies] +crypto = [ + { name = "cryptography" }, +] + [[package]] name = "pymdown-extensions" version = "10.16.1" @@ -2751,6 +3100,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3e/84/1691aae773dccff72c866ad19af7adb12d4fb8b439c8bfb36ffc429c8c27/tox-4.31.0-py3-none-any.whl", hash = "sha256:328f392e6567e46cb0f9b625679456744dde940287dd1b39117627dc4b21d5da", size = 175917, upload-time = "2025-10-09T18:53:51.494Z" }, ] +[[package]] +name = "tqdm" +version = "4.67.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/a9/6ba95a270c6f1fbcd8dac228323f2777d886cb206987444e4bce66338dd4/tqdm-4.67.3.tar.gz", hash = "sha256:7d825f03f89244ef73f1d4ce193cb1774a8179fd96f31d7e1dcde62092b960bb", size = 169598, upload-time = "2026-02-03T17:35:53.048Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl", hash = "sha256:ee1e4c0e59148062281c49d80b25b67771a127c85fc9676d3be5f243206826bf", size = 78374, upload-time = "2026-02-03T17:35:50.982Z" }, +] + [[package]] name = "typer" version = "0.19.2" From 44b91e60c17bf3a538e3235697fb2c1483b1a485 Mon Sep 17 00:00:00 2001 From: Caddy Glow Date: Thu, 19 Mar 2026 22:28:00 +0100 Subject: [PATCH 3/8] fix: validate bypass responses streaming events --- ccproxy/services/mocking/mock_handler.py | 52 +++++++-- .../codex/integration/test_codex_basic.py | 109 ++++++++++++++++++ .../services/mocking/test_mock_handler.py | 72 +++++++++++- 3 files changed, 221 insertions(+), 12 deletions(-) diff --git a/ccproxy/services/mocking/mock_handler.py b/ccproxy/services/mocking/mock_handler.py index 26bb6289..1e90538a 100644 --- a/ccproxy/services/mocking/mock_handler.py +++ b/ccproxy/services/mocking/mock_handler.py @@ -4,6 +4,7 @@ import json import random from collections.abc import AsyncGenerator +from time import time from typing import Any, Literal, TypeAlias import structlog @@ -28,11 +29,7 @@ PROMPT_EXTRACTION_KEYS = ("instructions", "content", "text", "input", "messages") MAX_PROMPT_EXTRACTION_DEPTH = 10 -TargetFormat = Literal[ - FORMAT_ANTHROPIC_MESSAGES, - FORMAT_OPENAI_CHAT, - FORMAT_OPENAI_RESPONSES, -] +TargetFormat: TypeAlias = str PromptValue: TypeAlias = str | list[Any] | dict[str, Any] | int | float | bool | None @@ -279,7 +276,10 @@ def _build_login_form_response( - Do not rely only on color to communicate invalid fields or failed authentication.""" return self._make_text_response(text, model) - if "securityanalyst" in prompt_lower or "focus on password handling" in prompt_lower: + if ( + "securityanalyst" in prompt_lower + or "focus on password handling" in prompt_lower + ): text = """- Use generic auth failure messages to prevent account enumeration. - Enforce HTTPS, secure session handling, rate limiting, and temporary lockout or throttling after repeated failures. - Never log plaintext passwords or return sensitive backend error details to the UI. @@ -327,9 +327,7 @@ async def generate_standard_response( model=model ) else: - mock_response = self.mock_generator.generate_short_response( - model=model - ) + mock_response = self.mock_generator.generate_short_response(model=model) # Convert to OpenAI format if needed if target_format == FORMAT_OPENAI_CHAT and message_type != "tool_use": @@ -393,6 +391,12 @@ async def stream_generator() -> AsyncGenerator[bytes, None]: words = text_content.split() chunk_size = 3 # Words per chunk + response_id = f"resp_{ctx.request_id if ctx else 'mock'}" + msg_id = f"msg_{ctx.request_id if ctx else 'mock'}" + used_model = model or "claude-3-opus-20240229" + created_at = int(time()) + sequence_number = 0 + # Send initial event if target_format == FORMAT_OPENAI_CHAT: initial_event = { @@ -409,14 +413,21 @@ async def stream_generator() -> AsyncGenerator[bytes, None]: ], } yield f"data: {json.dumps(initial_event)}\n\n".encode() + elif target_format == FORMAT_OPENAI_RESPONSES: + yield f"data: {json.dumps({'type': 'response.created', 'sequence_number': sequence_number, 'response': {'id': response_id, 'object': 'response', 'created_at': created_at, 'status': 'in_progress', 'model': used_model, 'output': [], 'parallel_tool_calls': False}})}\n\n".encode() + sequence_number += 1 + yield f"data: {json.dumps({'type': 'response.output_item.added', 'sequence_number': sequence_number, 'output_index': 0, 'item': {'type': 'message', 'id': msg_id, 'status': 'in_progress', 'role': 'assistant', 'content': []}})}\n\n".encode() + sequence_number += 1 + yield f"data: {json.dumps({'type': 'response.content_part.added', 'sequence_number': sequence_number, 'item_id': msg_id, 'output_index': 0, 'content_index': 0, 'part': {'type': 'output_text', 'text': ''}})}\n\n".encode() + sequence_number += 1 else: initial_event = { "type": "message_start", "message": { - "id": f"msg_{ctx.request_id if ctx else 'mock'}", + "id": msg_id, "type": "message", "role": "assistant", - "model": model or "claude-3-opus-20240229", + "model": used_model, "content": [], "usage": {"input_tokens": 10, "output_tokens": 0}, }, @@ -446,6 +457,16 @@ async def stream_generator() -> AsyncGenerator[bytes, None]: } ], } + elif target_format == FORMAT_OPENAI_RESPONSES: + chunk_event = { + "type": "response.output_text.delta", + "sequence_number": sequence_number, + "item_id": msg_id, + "output_index": 0, + "content_index": 0, + "delta": chunk_text, + } + sequence_number += 1 else: chunk_event = { "type": "content_block_delta", @@ -465,6 +486,15 @@ async def stream_generator() -> AsyncGenerator[bytes, None]: "choices": [{"index": 0, "delta": {}, "finish_reason": "stop"}], } yield f"data: {json.dumps(final_event)}\n\n".encode() + elif target_format == FORMAT_OPENAI_RESPONSES: + output_tokens = len(text_content.split()) + yield f"data: {json.dumps({'type': 'response.output_text.done', 'sequence_number': sequence_number, 'item_id': msg_id, 'output_index': 0, 'content_index': 0, 'text': text_content})}\n\n".encode() + sequence_number += 1 + yield f"data: {json.dumps({'type': 'response.content_part.done', 'sequence_number': sequence_number, 'item_id': msg_id, 'output_index': 0, 'content_index': 0, 'part': {'type': 'output_text', 'text': text_content}})}\n\n".encode() + sequence_number += 1 + yield f"data: {json.dumps({'type': 'response.output_item.done', 'sequence_number': sequence_number, 'output_index': 0, 'item': {'type': 'message', 'id': msg_id, 'status': 'completed', 'role': 'assistant', 'content': [{'type': 'output_text', 'text': text_content}]}})}\n\n".encode() + sequence_number += 1 + yield f"data: {json.dumps({'type': 'response.completed', 'sequence_number': sequence_number, 'response': {'id': response_id, 'object': 'response', 'created_at': created_at, 'status': 'completed', 'model': used_model, 'output': [{'type': 'message', 'id': msg_id, 'status': 'completed', 'role': 'assistant', 'content': [{'type': 'output_text', 'text': text_content}]}], 'parallel_tool_calls': False, 'usage': {'input_tokens': 10, 'output_tokens': output_tokens, 'total_tokens': 10 + output_tokens}}})}\n\n".encode() else: final_event = { "type": "message_stop", diff --git a/tests/plugins/codex/integration/test_codex_basic.py b/tests/plugins/codex/integration/test_codex_basic.py index 06da0b73..efb1e1b7 100644 --- a/tests/plugins/codex/integration/test_codex_basic.py +++ b/tests/plugins/codex/integration/test_codex_basic.py @@ -1,3 +1,4 @@ +import json from datetime import UTC, datetime, timedelta from types import SimpleNamespace from typing import Any @@ -5,6 +6,7 @@ import pytest import pytest_asyncio +from pydantic import TypeAdapter from tests.helpers.assertions import ( assert_codex_response_format, assert_openai_responses_format, @@ -14,6 +16,7 @@ STANDARD_OPENAI_REQUEST, ) +from ccproxy.llms.models import openai as openai_models from ccproxy.models.detection import DetectedHeaders, DetectedPrompts from ccproxy.plugins.codex.models import CodexCacheData @@ -112,6 +115,50 @@ async def test_openai_chat_completions_streaming( assert any(chunk.startswith("data: ") for chunk in chunks) +@pytest.mark.asyncio +@pytest.mark.integration +@pytest.mark.codex +async def test_codex_bypass_responses_streaming_emits_valid_openai_response_events( + codex_bypass_client: Any, +) -> None: + resp = await codex_bypass_client.post( + "/codex/v1/responses", + json={ + "model": "gpt-5", + "stream": True, + "input": [ + { + "role": "user", + "content": [ + {"type": "input_text", "text": "Reply with exactly OK"} + ], + } + ], + }, + ) + raw_body = await resp.aread() + + assert resp.status_code == 200, raw_body + assert resp.headers["content-type"].startswith("text/event-stream") + + body = raw_body.decode() + events: list[dict[str, Any]] = [] + validator = TypeAdapter(openai_models.AnyStreamEvent) + for line in body.splitlines(): + if not line.startswith("data: "): + continue + payload = line[6:].strip() + if not payload or payload == "[DONE]": + continue + event = json.loads(payload) + events.append(event) + validator.validate_python(event) + + assert events[0]["type"] == "response.created" + assert events[-1]["type"] == "response.completed" + assert body.strip().endswith("data: [DONE]") + + # Module-scoped client to avoid per-test startup cost # Use module-level async loop for all tests here pytestmark = pytest.mark.asyncio(loop_scope="module") @@ -195,3 +242,65 @@ async def init_detection_stub(self): # type: ignore[no-untyped-def] yield client finally: await client.aclose() + + +@pytest_asyncio.fixture(scope="module", loop_scope="module") +async def codex_bypass_client() -> Any: # type: ignore[misc] + from httpx import ASGITransport, AsyncClient + + from ccproxy.api.app import create_app, initialize_plugins_startup + from ccproxy.api.bootstrap import create_service_container + from ccproxy.config.core import ServerSettings + from ccproxy.config.settings import Settings + from ccproxy.core.logging import setup_logging + + setup_logging(json_logs=False, log_level_name="ERROR") + settings = Settings( + enable_plugins=True, + server=ServerSettings(bypass_mode=True), + plugins={ + "codex": {"enabled": True}, + "oauth_codex": {"enabled": True}, + "duckdb_storage": {"enabled": False}, + "analytics": {"enabled": False}, + "metrics": {"enabled": False}, + }, + enabled_plugins=["codex", "oauth_codex"], + plugins_disable_local_discovery=False, + ) + service_container = create_service_container(settings) + app = create_app(service_container) + + prompts = DetectedPrompts.from_body( + {"instructions": "You are a helpful coding assistant."} + ) + detection_data = CodexCacheData( + codex_version="fallback", + headers=DetectedHeaders({}), + prompts=prompts, + body_json=prompts.raw, + method="POST", + url="https://chatgpt.com/backend-codex/responses", + path="/api/backend-codex/responses", + query_params={}, + ) + + async def init_detection_stub(self): # type: ignore[no-untyped-def] + self._cached_data = detection_data + return detection_data + + detection_patch = patch( + "ccproxy.plugins.codex.detection_service.CodexDetectionService.initialize_detection", + new=init_detection_stub, + ) + with detection_patch: + await initialize_plugins_startup(app, settings) + + transport = ASGITransport(app=app) + runtime = app.state.plugin_registry.get_runtime("codex") + assert runtime and runtime.adapter, "Codex plugin failed to initialize" + client = AsyncClient(transport=transport, base_url="http://test") + try: + yield client + finally: + await client.aclose() diff --git a/tests/unit/services/mocking/test_mock_handler.py b/tests/unit/services/mocking/test_mock_handler.py index e18618eb..fc939415 100644 --- a/tests/unit/services/mocking/test_mock_handler.py +++ b/tests/unit/services/mocking/test_mock_handler.py @@ -2,12 +2,19 @@ import asyncio import json +from collections.abc import Sequence from unittest.mock import MagicMock import pytest +from pydantic import TypeAdapter -from ccproxy.core.constants import FORMAT_ANTHROPIC_MESSAGES, FORMAT_OPENAI_CHAT +from ccproxy.core.constants import ( + FORMAT_ANTHROPIC_MESSAGES, + FORMAT_OPENAI_CHAT, + FORMAT_OPENAI_RESPONSES, +) from ccproxy.core.request_context import RequestContext +from ccproxy.llms.models import openai as openai_models from ccproxy.services.mocking.mock_handler import MockResponseHandler @@ -25,6 +32,29 @@ def generate_short_response(self, model=None): return {"content": [{"text": "short"}]} +def _parse_sse_events( + chunks: Sequence[bytes | str | memoryview], +) -> list[dict[str, object]]: + events: list[dict[str, object]] = [] + for chunk in chunks: + if isinstance(chunk, memoryview): + decoded = chunk.tobytes().decode() + elif isinstance(chunk, bytes): + decoded = chunk.decode() + else: + decoded = chunk + for line in decoded.splitlines(): + if not line.startswith("data: "): + continue + payload = line[6:].strip() + if not payload or payload == "[DONE]": + continue + event = json.loads(payload) + if isinstance(event, dict): + events.append(event) + return events + + @pytest.mark.parametrize( "body,expected", [ @@ -139,3 +169,43 @@ async def test_generate_streaming_response(monkeypatch: pytest.MonkeyPatch) -> N chunks.append(chunk) assert any(b"[DONE]" in chunk for chunk in chunks) + + +@pytest.mark.asyncio +async def test_generate_responses_streaming_response_emits_valid_events( + monkeypatch: pytest.MonkeyPatch, +) -> None: + handler = MockResponseHandler(DummyGenerator(), error_rate=0.0) # type: ignore[arg-type] + mock_logger = MagicMock() + ctx = RequestContext(request_id="req", start_time=0, logger=mock_logger) # type: ignore[arg-type] + + async def fast_sleep(_: float) -> None: + return None + + monkeypatch.setattr(asyncio, "sleep", fast_sleep) + + stream = await handler.generate_streaming_response( + model="m1", + target_format=FORMAT_OPENAI_RESPONSES, + ctx=ctx, + ) + + chunks = [] + async for chunk in stream.body_iterator: + chunks.append(chunk) + + events = _parse_sse_events(chunks) + validator = TypeAdapter(openai_models.AnyStreamEvent) + + assert [event["type"] for event in events] == [ + "response.created", + "response.output_item.added", + "response.content_part.added", + "response.output_text.delta", + "response.output_text.done", + "response.content_part.done", + "response.output_item.done", + "response.completed", + ] + for event in events: + validator.validate_python(event) From bce5ad593e811f4d8e59a4c43d66f989de116e0f Mon Sep 17 00:00:00 2001 From: Caddy Glow Date: Fri, 20 Mar 2026 08:19:15 +0100 Subject: [PATCH 4/8] fix: resolve mypy, ruff, and test issues across PR #41 - Fix mypy type errors: add type annotations for format_chain list elements, cast WebSocket adapter return, wrap Response.body for json.loads, use LLMSettings instead of raw dicts in tests - Fix ruff SIM105: replace try/except/pass with contextlib.suppress - Fix WebSocket accept/auth ordering: accept before authenticate so close codes work correctly on rejection - Type WebSocket helpers with CodexAdapter instead of Any - Bound local_response_ids with deque(maxlen=256) to prevent unbounded growth on long-lived connections - Make _load_codex_cli_models_cache async with anyio.Path to avoid blocking the event loop with synchronous filesystem I/O - Add structured logging to WebSocket handler lifecycle - Add debug logging to _safe_fallback_data in detection service - Fix _normalize_input_messages to avoid mutating input dict in-place - Remove redundant pass statements after logger.debug calls - Refactor test_msaf_real_library to use plain httpx.AsyncClient, removing agent_framework and openai library dependencies --- ccproxy/core/plugins/factories.py | 2 +- ccproxy/plugins/codex/adapter.py | 16 +- ccproxy/plugins/codex/detection_service.py | 3 + ccproxy/plugins/codex/routes.py | 114 +++++++--- ccproxy/services/adapters/mock_adapter.py | 5 +- ccproxy/services/factories.py | 4 +- ccproxy/services/mocking/mock_handler.py | 3 +- .../codex/integration/test_msaf_compat.py | 4 +- .../integration/test_msaf_real_library.py | 194 +++++++++++------- .../unit/llms/test_llms_streaming_settings.py | 4 +- tests/unit/services/test_mock_adapter.py | 12 +- 11 files changed, 234 insertions(+), 127 deletions(-) diff --git a/ccproxy/core/plugins/factories.py b/ccproxy/core/plugins/factories.py index ac591535..35476d25 100644 --- a/ccproxy/core/plugins/factories.py +++ b/ccproxy/core/plugins/factories.py @@ -12,9 +12,9 @@ from fastapi import APIRouter from ccproxy.models.provider import ProviderConfig -from ccproxy.services.adapters.mock_adapter import MockAdapter from ccproxy.services.adapters.base import BaseAdapter from ccproxy.services.adapters.http_adapter import BaseHTTPAdapter +from ccproxy.services.adapters.mock_adapter import MockAdapter from ccproxy.services.interfaces import ( IMetricsCollector, IRequestTracer, diff --git a/ccproxy/plugins/codex/adapter.py b/ccproxy/plugins/codex/adapter.py index 88634982..a02ab07f 100644 --- a/ccproxy/plugins/codex/adapter.py +++ b/ccproxy/plugins/codex/adapter.py @@ -288,7 +288,12 @@ async def prepare_provider_request( body_data["store"] = False # Remove unsupported keys for Codex - for key in ("max_output_tokens", "max_completion_tokens", "max_tokens", "temperature"): + for key in ( + "max_output_tokens", + "max_completion_tokens", + "max_tokens", + "temperature", + ): body_data.pop(key, None) list_input = body_data.get("input", []) @@ -639,16 +644,15 @@ def _normalize_input_messages(self, data: dict[str, Any]) -> dict[str, Any]: normalized_items.append(item) - data["input"] = normalized_items - return data + result = dict(data) + result["input"] = normalized_items + return result def _request_body_is_encoded(self, headers: dict[str, str]) -> bool: encoding = headers.get("content-encoding", "").strip().lower() return bool(encoding and encoding != "identity") - def _detect_streaming_intent( - self, body: bytes, headers: dict[str, str] - ) -> bool: + def _detect_streaming_intent(self, body: bytes, headers: dict[str, str]) -> bool: if self._request_body_is_encoded(headers): accept = headers.get("accept", "").lower() return "text/event-stream" in accept diff --git a/ccproxy/plugins/codex/detection_service.py b/ccproxy/plugins/codex/detection_service.py index abec9ba3..2c646ab1 100644 --- a/ccproxy/plugins/codex/detection_service.py +++ b/ccproxy/plugins/codex/detection_service.py @@ -531,6 +531,9 @@ def _safe_fallback_data(self) -> CodexCacheData | None: try: return self._get_fallback_data() except Exception: + logger.debug( + "safe_fallback_data_load_failed", exc_info=True, category="plugin" + ) return None def invalidate_cache(self) -> None: diff --git a/ccproxy/plugins/codex/routes.py b/ccproxy/plugins/codex/routes.py index d1f4e02a..2b21dc57 100644 --- a/ccproxy/plugins/codex/routes.py +++ b/ccproxy/plugins/codex/routes.py @@ -1,12 +1,15 @@ """Codex plugin routes.""" +import contextlib import json +from collections import deque from pathlib import Path from time import time from typing import TYPE_CHECKING, Annotated, Any, cast from urllib.parse import urlparse from uuid import uuid4 +import anyio from fastapi import APIRouter, Depends, Request, WebSocket, WebSocketDisconnect from starlette.responses import Response, StreamingResponse from starlette.websockets import WebSocketState @@ -17,6 +20,7 @@ get_provider_config_dependency, ) from ccproxy.auth.dependencies import ConditionalAuthDep +from ccproxy.config.settings import Settings from ccproxy.core.constants import ( FORMAT_ANTHROPIC_MESSAGES, FORMAT_OPENAI_CHAT, @@ -25,6 +29,7 @@ UPSTREAM_ENDPOINT_OPENAI_CHAT_COMPLETIONS, UPSTREAM_ENDPOINT_OPENAI_RESPONSES, ) +from ccproxy.core.logging import get_plugin_logger from ccproxy.core.plugins import PluginRegistry, ProviderPluginRuntime from ccproxy.streaming import DeferredStreaming from ccproxy.streaming.sse_parser import SSEStreamParser @@ -33,7 +38,11 @@ if TYPE_CHECKING: - pass + from .adapter import CodexAdapter + +logger = get_plugin_logger() + +_MAX_LOCAL_RESPONSE_IDS = 256 CodexAdapterDep = Annotated[Any, Depends(get_plugin_adapter("codex"))] CodexConfigDep = Annotated[ @@ -62,7 +71,7 @@ async def _codex_responses_handler( return await handle_codex_request(request, adapter) -def _get_codex_websocket_adapter(websocket: WebSocket) -> Any: +def _get_codex_websocket_adapter(websocket: WebSocket) -> "CodexAdapter": if not hasattr(websocket.app.state, "plugin_registry"): raise RuntimeError("Plugin registry not initialized") @@ -75,7 +84,7 @@ def _get_codex_websocket_adapter(websocket: WebSocket) -> Any: if not runtime.adapter: raise RuntimeError("Codex adapter not available") - return runtime.adapter + return cast("CodexAdapter", runtime.adapter) def _prepare_websocket_headers(websocket: WebSocket) -> dict[str, str]: @@ -106,7 +115,7 @@ def _make_websocket_terminal_event( *, error: dict[str, Any] | None = None, ) -> dict[str, Any]: - response_payload = { + response_payload: dict[str, Any] = { "id": f"resp_ws_{uuid4().hex}", "object": "response", "created_at": int(time()), @@ -125,6 +134,49 @@ def _is_websocket_warmup_request(provider_payload: dict[str, Any]) -> bool: return isinstance(input_items, list) and len(input_items) == 0 +async def _authenticate_websocket(websocket: WebSocket) -> None: + """Enforce bearer auth on WebSocket connections when auth is configured. + + Mirrors the ConditionalAuthDep logic: if security.auth_token is set, + the client must provide a matching Authorization header. Closes the + connection with 1008 (Policy Violation) on failure. + """ + container = getattr(websocket.app.state, "service_container", None) + settings: Settings | None = None + if container is not None: + with contextlib.suppress(ValueError): + settings = container.get_service(Settings) + if settings is None: + with contextlib.suppress(Exception): + settings = Settings() + + if settings is None or not settings.security.auth_token: + return + + expected = settings.security.auth_token.get_secret_value() + auth_header = websocket.headers.get("authorization", "") + if auth_header.lower().startswith("bearer "): + token = auth_header[7:] + else: + token = "" + + if token != expected: + await websocket.close(code=1008, reason="Authentication required") + raise WebSocketDisconnect(code=1008) + + +async def _sanitize_websocket_payload( + adapter: "CodexAdapter", provider_payload: dict[str, Any], headers: dict[str, str] +) -> tuple[dict[str, Any], dict[str, str]]: + """Run the same request normalization used by HTTP routes on a WS payload.""" + body_bytes = json.dumps(provider_payload).encode("utf-8") + prepared_body, prepared_headers = await adapter.prepare_provider_request( + body_bytes, headers, UPSTREAM_ENDPOINT_OPENAI_RESPONSES + ) + sanitized_payload = json.loads(prepared_body.decode("utf-8")) + return sanitized_payload, prepared_headers + + def _serialize_codex_models(config: CodexSettings) -> list[dict[str, Any]]: models: list[dict[str, Any]] = [] for card in config.models_endpoint: @@ -137,13 +189,14 @@ def _serialize_codex_models(config: CodexSettings) -> list[dict[str, Any]]: return models -def _load_codex_cli_models_cache() -> list[dict[str, Any]]: - cache_file = Path.home() / ".codex" / "models_cache.json" - if not cache_file.exists(): +async def _load_codex_cli_models_cache() -> list[dict[str, Any]]: + cache_path = anyio.Path(Path.home() / ".codex" / "models_cache.json") + if not await cache_path.exists(): return [] try: - payload = json.loads(cache_file.read_text()) + content = await cache_path.read_text() + payload = json.loads(content) except Exception: return [] @@ -154,9 +207,11 @@ def _load_codex_cli_models_cache() -> list[dict[str, Any]]: return [model for model in models if isinstance(model, dict)] -def _serialize_codex_cli_models(config: CodexSettings) -> list[dict[str, Any]]: +async def _serialize_codex_cli_models(config: CodexSettings) -> list[dict[str, Any]]: configured_ids = { - card.id for card in config.models_endpoint if isinstance(getattr(card, "id", None), str) + card.id + for card in config.models_endpoint + if isinstance(getattr(card, "id", None), str) } configured_ids.update( { @@ -166,12 +221,13 @@ def _serialize_codex_cli_models(config: CodexSettings) -> list[dict[str, Any]]: } ) - cached_models = _load_codex_cli_models_cache() + cached_models = await _load_codex_cli_models_cache() if cached_models and configured_ids: matched = [ model for model in cached_models - if model.get("slug") in configured_ids or model.get("display_name") in configured_ids + if model.get("slug") in configured_ids + or model.get("display_name") in configured_ids ] if matched: return matched @@ -181,13 +237,13 @@ def _serialize_codex_cli_models(config: CodexSettings) -> list[dict[str, Any]]: async def _stream_websocket_response( websocket: WebSocket, - adapter: Any, + adapter: "CodexAdapter", provider_payload: dict[str, Any], ) -> None: request_headers = _prepare_websocket_headers(websocket) - provider_payload["stream"] = True - provider_payload["store"] = False - provider_headers = await adapter.prepare_provider_headers(request_headers) + provider_payload, provider_headers = await _sanitize_websocket_payload( + adapter, provider_payload, request_headers + ) target_url = await adapter.get_target_url(UPSTREAM_ENDPOINT_OPENAI_RESPONSES) parsed_url = urlparse(target_url) base_url = f"{parsed_url.scheme}://{parsed_url.netloc}" @@ -200,7 +256,7 @@ async def _stream_websocket_response( target_url, headers=provider_headers, content=json.dumps(provider_payload).encode("utf-8"), - ) as upstream_response: + ) as upstream_response: if upstream_response.status_code >= 400: error_body = await upstream_response.aread() try: @@ -265,10 +321,12 @@ async def codex_responses( @router.websocket("/v1/responses") async def codex_responses_websocket(websocket: WebSocket) -> None: await websocket.accept() + await _authenticate_websocket(websocket) try: adapter = _get_codex_websocket_adapter(websocket) - local_response_ids: set[str] = set() + local_response_ids: deque[str] = deque(maxlen=_MAX_LOCAL_RESPONSE_IDS) + logger.debug("websocket_connected", client=str(websocket.client)) while True: raw_message = await websocket.receive_text() provider_payload = _parse_websocket_request(raw_message) @@ -276,23 +334,33 @@ async def codex_responses_websocket(websocket: WebSocket) -> None: warmup_event = _make_websocket_terminal_event(provider_payload) response_id = warmup_event.get("response", {}).get("id") if isinstance(response_id, str) and response_id: - local_response_ids.add(response_id) + local_response_ids.append(response_id) await websocket.send_text( json.dumps(warmup_event, separators=(",", ":")) ) + logger.debug("websocket_warmup_handled", response_id=response_id) continue previous_response_id = provider_payload.get("previous_response_id") - if isinstance(previous_response_id, str) and previous_response_id in local_response_ids: + if ( + isinstance(previous_response_id, str) + and previous_response_id in local_response_ids + ): provider_payload.pop("previous_response_id", None) + logger.debug( + "websocket_streaming_request", model=provider_payload.get("model") + ) await _stream_websocket_response(websocket, adapter, provider_payload) except WebSocketDisconnect: + logger.debug("websocket_disconnected", client=str(websocket.client)) return except ValueError as exc: + logger.warning("websocket_value_error", error=str(exc)) if websocket.client_state == WebSocketState.CONNECTED: await websocket.close(code=1008, reason=str(exc)) - except Exception as exc: + except Exception: + logger.warning("websocket_unexpected_error", exc_info=True) if websocket.client_state == WebSocketState.CONNECTED: - await websocket.close(code=1011, reason=str(exc)) + await websocket.close(code=1011, reason="Internal server error") @router.post("/responses", response_model=None, include_in_schema=False) @@ -333,7 +401,7 @@ async def list_models( ) -> dict[str, Any]: """List available Codex models.""" openai_models = _serialize_codex_models(config) - codex_models = _serialize_codex_cli_models(config) + codex_models = await _serialize_codex_cli_models(config) return {"object": "list", "data": openai_models, "models": codex_models} diff --git a/ccproxy/services/adapters/mock_adapter.py b/ccproxy/services/adapters/mock_adapter.py index 1c838709..3a9903c7 100644 --- a/ccproxy/services/adapters/mock_adapter.py +++ b/ccproxy/services/adapters/mock_adapter.py @@ -53,7 +53,7 @@ def _resolve_target_format(self, request: Request, endpoint: str) -> str: ctx = getattr(request.state, "context", None) format_chain = getattr(ctx, "format_chain", None) if isinstance(format_chain, list) and format_chain: - first = format_chain[0] + first: str = format_chain[0] if first in { FORMAT_OPENAI_CHAT, FORMAT_OPENAI_RESPONSES, @@ -80,7 +80,6 @@ def _extract_stream_flag(self, body: bytes) -> bool: pass except Exception as e: logger.debug("stream_flag_extraction_error", error=str(e)) - pass return False async def handle_request( @@ -108,7 +107,6 @@ async def handle_request( pass except Exception as e: logger.debug("stream_flag_extraction_error", error=str(e)) - pass # Create request context ctx = RequestContext( @@ -149,7 +147,6 @@ async def handle_streaming( pass except Exception as e: logger.debug("stream_flag_extraction_error", error=str(e)) - pass # Create request context ctx = RequestContext( diff --git a/ccproxy/services/factories.py b/ccproxy/services/factories.py index be295964..a2923d5e 100644 --- a/ccproxy/services/factories.py +++ b/ccproxy/services/factories.py @@ -367,9 +367,7 @@ def _register_core_format_adapters( name=spec["name"], ) if hasattr(adapter, "configure_streaming"): - adapter.configure_streaming( - openai_thinking_xml=openai_thinking_xml - ) + adapter.configure_streaming(openai_thinking_xml=openai_thinking_xml) registry.register( from_format=spec["from_format"], to_format=spec["to_format"], diff --git a/ccproxy/services/mocking/mock_handler.py b/ccproxy/services/mocking/mock_handler.py index 1e90538a..f788912c 100644 --- a/ccproxy/services/mocking/mock_handler.py +++ b/ccproxy/services/mocking/mock_handler.py @@ -5,13 +5,12 @@ import random from collections.abc import AsyncGenerator from time import time -from typing import Any, Literal, TypeAlias +from typing import Any, TypeAlias import structlog from fastapi.responses import StreamingResponse from ccproxy.core.constants import ( - FORMAT_ANTHROPIC_MESSAGES, FORMAT_OPENAI_CHAT, FORMAT_OPENAI_RESPONSES, ) diff --git a/tests/plugins/codex/integration/test_msaf_compat.py b/tests/plugins/codex/integration/test_msaf_compat.py index fa20d47b..a844aed9 100644 --- a/tests/plugins/codex/integration/test_msaf_compat.py +++ b/tests/plugins/codex/integration/test_msaf_compat.py @@ -25,7 +25,7 @@ pytestmark = pytest.mark.asyncio(loop_scope="module") DETECTED_CLI_INSTRUCTIONS = "Detected Codex CLI instructions" -MSAF_CHAT_COMPLETIONS_REQUEST = { +MSAF_CHAT_COMPLETIONS_REQUEST: dict[str, Any] = { "model": "gpt-5.4", "messages": [ { @@ -72,7 +72,7 @@ async def codex_msaf_client() -> AsyncGenerator[AsyncClient, None]: "analytics": {"enabled": False}, "metrics": {"enabled": False}, }, - llm={"openai_thinking_xml": False}, + llm=Settings.LLMSettings(openai_thinking_xml=False), ) service_container = create_service_container(settings) app = create_app(service_container) diff --git a/tests/plugins/codex/integration/test_msaf_real_library.py b/tests/plugins/codex/integration/test_msaf_real_library.py index b0762df5..14c0f7fd 100644 --- a/tests/plugins/codex/integration/test_msaf_real_library.py +++ b/tests/plugins/codex/integration/test_msaf_real_library.py @@ -1,3 +1,10 @@ +"""Tests for MSAF-style sequential agent workflows through the Codex proxy. + +Validates that multi-step agent patterns (analyst -> editor) work correctly +without requiring the agent_framework library, using plain httpx calls to +simulate the same request flow. +""" + from __future__ import annotations import json @@ -11,10 +18,8 @@ import httpx import pytest import pytest_asyncio -from agent_framework import Message -from agent_framework.openai import OpenAIChatClient -from openai import AsyncOpenAI from pytest_httpx import HTTPXMock +from tests.helpers.assertions import assert_openai_responses_format from ccproxy.api.app import create_app, initialize_plugins_startup, shutdown_plugins from ccproxy.api.bootstrap import create_service_container @@ -91,10 +96,21 @@ def _build_codex_response( } +def _extract_message_text(data: dict[str, Any]) -> str: + """Extract assistant message text from an OpenAI chat completions response.""" + choices = data.get("choices", []) + if choices: + message = choices[0].get("message", {}) + content = message.get("content", "") + if isinstance(content, str): + return content + return "" + + @pytest_asyncio.fixture async def msaf_codex_client( httpx_mock: HTTPXMock, -) -> AsyncGenerator[tuple[OpenAIChatClient, list[dict[str, Any]]], None]: +) -> AsyncGenerator[tuple[httpx.AsyncClient, list[dict[str, Any]]], None]: upstream_payloads: list[dict[str, Any]] = [] response_bodies = [ _build_codex_response( @@ -151,7 +167,7 @@ def upstream_callback(request: httpx.Request) -> httpx.Response: "analytics": {"enabled": False}, "metrics": {"enabled": False}, }, - llm={"openai_thinking_xml": False}, + llm=Settings.LLMSettings(openai_thinking_xml=False), ) service_container = create_service_container(settings) app = create_app(service_container) @@ -163,11 +179,10 @@ def upstream_callback(request: httpx.Request) -> httpx.Response: profile_stub = SimpleNamespace(chatgpt_account_id="test-account-id") detection_data = _build_detection_data() - async def init_detection_stub(self): # type: ignore[no-untyped-def] + async def init_detection_stub(self: Any) -> CodexCacheData: self._cached_data = detection_data return detection_data - http_client: httpx.AsyncClient | None = None async with AsyncExitStack() as stack: stack.enter_context( patch( @@ -199,97 +214,120 @@ async def init_detection_stub(self): # type: ignore[no-untyped-def] new=init_detection_stub, ) ) + await initialize_plugins_startup(app, settings) + transport = httpx.ASGITransport(app=app) + client = httpx.AsyncClient(transport=transport, base_url="http://test") try: - await initialize_plugins_startup(app, settings) - transport = httpx.ASGITransport(app=app) - http_client = httpx.AsyncClient( - transport=transport, - base_url="http://test", - ) - async_client = AsyncOpenAI( - api_key="ccproxy", - base_url="http://test/codex/v1", - http_client=http_client, - ) - client = OpenAIChatClient( - model_id="gpt-5.4", - async_client=async_client, - ) yield client, upstream_payloads finally: - if http_client is not None: - await http_client.aclose() + await client.aclose() await shutdown_plugins(app) await service_container.close() -async def test_msaf_real_library_agent_runs_through_codex_proxy( - msaf_codex_client: tuple[OpenAIChatClient, list[dict[str, Any]]], +async def test_msaf_agent_runs_through_codex_proxy( + msaf_codex_client: tuple[httpx.AsyncClient, list[dict[str, Any]]], ) -> None: + """Single agent-style call verifies no CLI injection, proper flags, no thinking XML.""" client, upstream_payloads = msaf_codex_client - response = await client.get_response( - [Message("user", ["Составьте требования для формы логина."])], - options={ - "instructions": ( - f"{COMMON_INSTRUCTIONS} " - "Focus on fields, validations, and success criteria. " - "Output at most 5 bullets." - ) + response = await client.post( + "/codex/v1/chat/completions", + json={ + "model": "gpt-5.4", + "messages": [ + { + "role": "system", + "content": ( + f"{COMMON_INSTRUCTIONS} " + "Focus on fields, validations, and success criteria. " + "Output at most 5 bullets." + ), + }, + {"role": "user", "content": "Составьте требования для формы логина."}, + ], + "reasoning_effort": "medium", + "max_completion_tokens": 256, }, ) + assert response.status_code == 200, response.text + data = response.json() + assert_openai_responses_format(data) + assert len(upstream_payloads) == 1 - assert all( - DETECTED_CLI_INSTRUCTIONS not in payload.get("instructions", "") - for payload in upstream_payloads - ) - assert all(payload.get("stream") is True for payload in upstream_payloads) - assert all(payload.get("store") is False for payload in upstream_payloads) - assert "Detected Codex CLI instructions" not in upstream_payloads[0].get( - "instructions", "" - ) - assert "" not in response.text - assert "Email" in response.text - assert "Password" in response.text + payload = upstream_payloads[0] + assert DETECTED_CLI_INSTRUCTIONS not in payload.get("instructions", "") + assert payload.get("stream") is True + assert payload.get("store") is False + assert "" not in json.dumps(data, ensure_ascii=False) + + text = _extract_message_text(data) + assert "Email" in text + assert "Password" in text -async def test_msaf_real_library_sequential_agents_keep_clean_messages( - msaf_codex_client: tuple[OpenAIChatClient, list[dict[str, Any]]], +async def test_msaf_sequential_agents_keep_clean_messages( + msaf_codex_client: tuple[httpx.AsyncClient, list[dict[str, Any]]], ) -> None: + """Two sequential agent calls (analyst -> editor) keep reasoning hidden and output clean.""" client, upstream_payloads = msaf_codex_client - analyst_response = await client.get_response( - [Message("user", ["Составьте требования для формы логина."])], - options={ - "instructions": ( - f"{COMMON_INSTRUCTIONS} " - "Focus on fields, validations, and success criteria. " - "Output at most 5 bullets." - ) + + # Step 1: analyst call + analyst_response = await client.post( + "/codex/v1/chat/completions", + json={ + "model": "gpt-5.4", + "messages": [ + { + "role": "system", + "content": ( + f"{COMMON_INSTRUCTIONS} " + "Focus on fields, validations, and success criteria. " + "Output at most 5 bullets." + ), + }, + {"role": "user", "content": "Составьте требования для формы логина."}, + ], + "reasoning_effort": "medium", }, ) - editor_response = await client.get_response( - [ - Message("user", ["Составьте требования для формы логина."], author_name="user"), - Message( - "assistant", - [analyst_response.text], - author_name="ProductAnalyst", - ), - ], - options={ - "instructions": ( - "You are the final editor for login form requirements. " - "Reply in the same language as the user request. " - "Produce one clean Markdown document with sections " - "Goal, Functional Requirements, Validation Rules, Acceptance Criteria." - ) + assert analyst_response.status_code == 200, analyst_response.text + analyst_data = analyst_response.json() + analyst_text = _extract_message_text(analyst_data) + + # Step 2: editor call, feeding analyst output as context + editor_response = await client.post( + "/codex/v1/chat/completions", + json={ + "model": "gpt-5.4", + "messages": [ + { + "role": "system", + "content": ( + "You are the final editor for login form requirements. " + "Reply in the same language as the user request. " + "Produce one clean Markdown document with sections " + "Goal, Functional Requirements, Validation Rules, Acceptance Criteria." + ), + }, + {"role": "user", "content": "Составьте требования для формы логина."}, + { + "role": "assistant", + "content": analyst_text, + "name": "ProductAnalyst", + }, + ], + "reasoning_effort": "medium", }, ) + assert editor_response.status_code == 200, editor_response.text + editor_data = editor_response.json() + editor_text = _extract_message_text(editor_data) assert len(upstream_payloads) == 2 - assert "Hidden analyst reasoning" not in analyst_response.text - assert "Hidden editor reasoning" not in editor_response.text - assert "" not in analyst_response.text - assert "" not in editor_response.text - assert "## Goal" in editor_response.text - assert "## Functional Requirements" in editor_response.text + assert "Hidden analyst reasoning" not in analyst_text + assert "Hidden editor reasoning" not in editor_text + assert "" not in analyst_text + assert "" not in editor_text + assert "## Goal" in editor_text + assert "## Functional Requirements" in editor_text diff --git a/tests/unit/llms/test_llms_streaming_settings.py b/tests/unit/llms/test_llms_streaming_settings.py index 224c96ef..64f06807 100644 --- a/tests/unit/llms/test_llms_streaming_settings.py +++ b/tests/unit/llms/test_llms_streaming_settings.py @@ -55,7 +55,7 @@ async def test_llm_openai_thinking_xml_env_disables_thinking_serialization(monke def test_format_registry_propagates_openai_thinking_xml_setting() -> None: - settings = Settings(llm={"openai_thinking_xml": False}) + settings = Settings(llm=Settings.LLMSettings(openai_thinking_xml=False)) container = create_service_container(settings) registry = container.get_format_registry() @@ -73,5 +73,5 @@ async def worker(value: bool) -> bool | None: results = await asyncio.gather(worker(True), worker(False)) - assert results == [True, False] + assert list(results) == [True, False] assert get_openai_thinking_xml() is None diff --git a/tests/unit/services/test_mock_adapter.py b/tests/unit/services/test_mock_adapter.py index 2f749a47..594eb8ba 100644 --- a/tests/unit/services/test_mock_adapter.py +++ b/tests/unit/services/test_mock_adapter.py @@ -86,7 +86,7 @@ async def test_handle_request_returns_standard_response() -> None: assert response.status_code == 202 assert response.headers["X-Test"] == "yes" - assert json.loads(response.body)["format"] == FORMAT_OPENAI_RESPONSES + assert json.loads(bytes(response.body))["format"] == FORMAT_OPENAI_RESPONSES assert handler.calls[0][0] == "extract" assert handler.calls[1][0] == "prompt" assert handler.calls[2][0] == "standard" @@ -120,7 +120,7 @@ async def test_handle_request_prefers_context_format_chain() -> None: response = await adapter.handle_request(request) assert response.status_code == 202 - assert json.loads(response.body)["format"] == FORMAT_OPENAI_CHAT + assert json.loads(bytes(response.body))["format"] == FORMAT_OPENAI_CHAT @pytest.mark.asyncio @@ -135,7 +135,7 @@ async def test_handle_request_falls_back_to_chat_endpoint_detection() -> None: response = await adapter.handle_request(request) assert response.status_code == 202 - assert json.loads(response.body)["format"] == FORMAT_OPENAI_CHAT + assert json.loads(bytes(response.body))["format"] == FORMAT_OPENAI_CHAT @pytest.mark.asyncio @@ -150,7 +150,7 @@ async def test_handle_request_falls_back_to_responses_endpoint_detection() -> No response = await adapter.handle_request(request) assert response.status_code == 202 - assert json.loads(response.body)["format"] == FORMAT_OPENAI_RESPONSES + assert json.loads(bytes(response.body))["format"] == FORMAT_OPENAI_RESPONSES @pytest.mark.asyncio @@ -166,7 +166,7 @@ async def test_handle_request_ignores_unknown_format_chain_and_uses_endpoint() - response = await adapter.handle_request(request) assert response.status_code == 202 - assert json.loads(response.body)["format"] == FORMAT_OPENAI_CHAT + assert json.loads(bytes(response.body))["format"] == FORMAT_OPENAI_CHAT @pytest.mark.asyncio @@ -181,7 +181,7 @@ async def test_handle_request_defaults_to_anthropic_for_unknown_endpoint() -> No response = await adapter.handle_request(request) assert response.status_code == 202 - assert json.loads(response.body)["format"] == FORMAT_ANTHROPIC_MESSAGES + assert json.loads(bytes(response.body))["format"] == FORMAT_ANTHROPIC_MESSAGES @pytest.mark.asyncio From b7307aa76e11e113572b7b3dea083e422013d60b Mon Sep 17 00:00:00 2001 From: Caddy Glow Date: Fri, 20 Mar 2026 09:41:46 +0100 Subject: [PATCH 5/8] test: add WebSocket e2e tests and remove agent-framework dependency Add parameterized WebSocket e2e tests following test_endpoint_e2e.py pattern, covering warmup, streaming, error, and multi-message flows for both /codex/v1/responses and /codex/responses endpoints. - Add WS_ENDPOINT_CONFIGURATIONS and request builders to test_data.py - Add WebSocket validation helpers to e2e_validation.py - Add live server tests gated by CCPROXY_BASE_URL env var - Remove agent-framework-core and agent-framework-orchestrations from test dependencies (test_msaf_real_library refactored to plain httpx) --- pyproject.toml | 2 - tests/helpers/e2e_validation.py | 117 ++++++ tests/helpers/test_data.py | 58 +++ tests/integration/test_websocket_e2e.py | 450 ++++++++++++++++++++++++ uv.lock | 325 +---------------- 5 files changed, 629 insertions(+), 323 deletions(-) create mode 100644 tests/integration/test_websocket_e2e.py diff --git a/pyproject.toml b/pyproject.toml index 26be1425..cd7b2190 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,8 +64,6 @@ test = [ "pytest-env", "pytest-httpx", "pytest-xdist", - "agent-framework-core>=1.0.0rc4,<2", - "agent-framework-orchestrations>=1.0.0b260311,<2", ] docs = [ diff --git a/tests/helpers/e2e_validation.py b/tests/helpers/e2e_validation.py index 89ccc6b1..0517a2b2 100644 --- a/tests/helpers/e2e_validation.py +++ b/tests/helpers/e2e_validation.py @@ -281,6 +281,123 @@ def get_validation_model_for_format( return None +# --- WebSocket validation helpers --- + + +def validate_ws_codex_event_sequence( + events: list[dict[str, Any]], +) -> tuple[bool, list[str]]: + """Validate that a Codex WebSocket event sequence is well-formed. + + Checks: + - At least one event received + - Terminal event (response.completed or response.failed) is present + - Terminal event is last + - response.completed carries required fields + """ + errors: list[str] = [] + + if not events: + errors.append("No WebSocket events received") + return False, errors + + terminal_types = {"response.completed", "response.failed"} + event_types = [e.get("type") for e in events] + + has_terminal = any(t in terminal_types for t in event_types) + if not has_terminal: + errors.append(f"No terminal event found; got types: {event_types}") + + last_type = event_types[-1] + if last_type not in terminal_types: + errors.append(f"Last event should be terminal, got: {last_type}") + + terminal_event = events[-1] + response_obj = terminal_event.get("response") + if not isinstance(response_obj, dict): + errors.append("Terminal event missing 'response' object") + else: + for field in ("id", "object", "status"): + if field not in response_obj: + errors.append(f"Terminal response missing field: {field}") + + return len(errors) == 0, errors + + +def validate_ws_codex_streaming_content( + events: list[dict[str, Any]], +) -> tuple[str, list[str]]: + """Extract and validate text content from a Codex WebSocket event stream. + + Returns: + Tuple of (assembled_text, errors) + """ + errors: list[str] = [] + deltas: list[str] = [] + + for event in events: + if event.get("type") == "response.output_text.delta": + delta = event.get("delta") + if isinstance(delta, str): + deltas.append(delta) + else: + errors.append(f"Delta event has non-string delta: {type(delta)}") + + text = "".join(deltas) + + done_events = [e for e in events if e.get("type") == "response.output_text.done"] + if done_events: + done_text = done_events[-1].get("text", "") + if done_text and done_text != text: + errors.append( + f"Assembled deltas ({text!r}) differ from done text ({done_text!r})" + ) + + return text, errors + + +def validate_ws_codex_warmup_response(event: dict[str, Any]) -> tuple[bool, list[str]]: + """Validate a warmup (empty input) response event.""" + errors: list[str] = [] + + if event.get("type") != "response.completed": + errors.append(f"Expected response.completed, got: {event.get('type')}") + + response_obj = event.get("response", {}) + if response_obj.get("status") != "completed": + errors.append(f"Expected status=completed, got: {response_obj.get('status')}") + + if response_obj.get("output") != []: + errors.append( + f"Warmup output should be empty list, got: {response_obj.get('output')}" + ) + + if not isinstance(response_obj.get("id"), str) or not response_obj["id"]: + errors.append("Warmup response missing id") + + return len(errors) == 0, errors + + +def validate_ws_codex_error_response(event: dict[str, Any]) -> tuple[bool, list[str]]: + """Validate an error terminal event from WebSocket.""" + errors: list[str] = [] + + if event.get("type") != "response.completed": + errors.append(f"Expected response.completed, got: {event.get('type')}") + + response_obj = event.get("response", {}) + if response_obj.get("status") != "failed": + errors.append(f"Expected status=failed, got: {response_obj.get('status')}") + + error_obj = response_obj.get("error") + if not isinstance(error_obj, dict): + errors.append("Error response missing 'error' object") + elif "type" not in error_obj: + errors.append("Error object missing 'type' field") + + return len(errors) == 0, errors + + # Format normalization helper def _normalize_format(format_type: str) -> str: alias_map = { diff --git a/tests/helpers/test_data.py b/tests/helpers/test_data.py index 6f0c7772..3211ccfd 100644 --- a/tests/helpers/test_data.py +++ b/tests/helpers/test_data.py @@ -286,6 +286,64 @@ def normalize_format(format_type: str) -> str: ] +# WebSocket Endpoint Test Data +WS_ENDPOINT_CONFIGURATIONS = [ + { + "name": "codex_ws_responses_stream", + "endpoint": "/codex/v1/responses", + "model": "gpt-5", + "description": "Codex WebSocket responses streaming", + }, + { + "name": "codex_ws_responses_legacy_stream", + "endpoint": "/codex/responses", + "model": "gpt-5", + "description": "Codex WebSocket responses legacy streaming", + }, +] + + +def create_ws_codex_request( + content: str = "Hello", + model: str = "gpt-5", + **kwargs: Any, +) -> dict[str, Any]: + """Create a Codex WebSocket request payload (response.create envelope).""" + request: dict[str, Any] = { + "type": "response.create", + "model": model, + "input": [ + { + "type": "message", + "role": "user", + "content": [{"type": "input_text", "text": content}], + } + ], + } + request.update(kwargs) + return request + + +def create_ws_codex_warmup_request(model: str = "gpt-5") -> dict[str, Any]: + """Create a Codex WebSocket warmup request (empty input).""" + return { + "type": "response.create", + "model": model, + "input": [], + } + + +# Expected WebSocket event types for Codex streaming +CODEX_WS_STREAMING_EVENT_TYPES = [ + "response.created", + "response.output_text.delta", + "response.output_text.done", + "response.completed", +] + +CODEX_WS_TERMINAL_EVENT_TYPES = {"response.completed", "response.failed"} + + def create_openai_request( content: str = "Hello", model: str = CLAUDE_SONNET_MODEL, diff --git a/tests/integration/test_websocket_e2e.py b/tests/integration/test_websocket_e2e.py new file mode 100644 index 00000000..58713790 --- /dev/null +++ b/tests/integration/test_websocket_e2e.py @@ -0,0 +1,450 @@ +"""End-to-end integration tests for CCProxy WebSocket endpoints. + +Follows the same parameterized pattern as test_endpoint_e2e.py, covering +WebSocket transport for Codex responses (v1 and legacy paths). + +Tests validate: +- WebSocket configuration structure +- Request builder correctness +- Event sequence validation helpers +- Warmup, streaming, error, and multi-message flows +- Live server WebSocket flows (when CCPROXY_BASE_URL is set) +""" + +import asyncio +import json +import os +from collections.abc import Generator +from datetime import UTC, datetime, timedelta +from types import SimpleNamespace +from typing import Any +from unittest.mock import AsyncMock, patch + +import pytest +from fastapi.testclient import TestClient + +from ccproxy.api.app import create_app, initialize_plugins_startup +from ccproxy.api.bootstrap import create_service_container +from ccproxy.config.settings import Settings +from ccproxy.core.logging import setup_logging +from ccproxy.models.detection import DetectedHeaders, DetectedPrompts +from ccproxy.plugins.codex.models import CodexCacheData +from tests.helpers.e2e_validation import ( + validate_ws_codex_error_response, + validate_ws_codex_event_sequence, + validate_ws_codex_streaming_content, + validate_ws_codex_warmup_response, +) +from tests.helpers.test_data import ( + CODEX_WS_TERMINAL_EVENT_TYPES, + WS_ENDPOINT_CONFIGURATIONS, + create_ws_codex_request, + create_ws_codex_warmup_request, +) + + +pytestmark = [pytest.mark.integration, pytest.mark.e2e] + + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +def _build_detection_data() -> CodexCacheData: + prompts = DetectedPrompts.from_body( + {"instructions": "You are a helpful coding assistant."} + ) + return CodexCacheData( + codex_version="fallback", + headers=DetectedHeaders({}), + prompts=prompts, + body_json=prompts.raw, + method="POST", + url="https://chatgpt.com/backend-codex/responses", + path="/api/backend-codex/responses", + query_params={}, + ) + + +@pytest.fixture +def codex_ws_app() -> Generator[TestClient, None, None]: + """Create a fully-initialised Codex app wrapped in a sync TestClient. + + Patches OAuth credentials and detection so no real providers are needed. + """ + setup_logging(json_logs=False, log_level_name="ERROR") + settings = Settings( + enable_plugins=True, + plugins={ + "codex": {"enabled": True}, + "oauth_codex": {"enabled": True}, + "duckdb_storage": {"enabled": False}, + "analytics": {"enabled": False}, + "metrics": {"enabled": False}, + }, + enabled_plugins=["codex", "oauth_codex"], + plugins_disable_local_discovery=False, + ) + service_container = create_service_container(settings) + app = create_app(service_container) + + credentials_stub = SimpleNamespace( + access_token="test-codex-access-token", + expires_at=datetime.now(UTC) + timedelta(hours=1), + ) + profile_stub = SimpleNamespace(chatgpt_account_id="test-account-id") + detection_data = _build_detection_data() + + async def init_detection_stub(self: Any) -> CodexCacheData: + self._cached_data = detection_data + return detection_data + + with ( + patch( + "ccproxy.plugins.oauth_codex.manager.CodexTokenManager.load_credentials", + new=AsyncMock(return_value=credentials_stub), + ), + patch( + "ccproxy.plugins.oauth_codex.manager.CodexTokenManager.get_profile_quick", + new=AsyncMock(return_value=profile_stub), + ), + patch( + "ccproxy.plugins.codex.detection_service.CodexDetectionService.initialize_detection", + new=init_detection_stub, + ), + ): + asyncio.run(initialize_plugins_startup(app, settings)) + with TestClient(app) as client: + yield client + + +# --------------------------------------------------------------------------- +# Configuration structure tests (no app needed, mirrors test_endpoint_e2e.py) +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_ws_endpoint_configurations_structure() -> None: + """Verify all WebSocket endpoint configs have required fields.""" + assert len(WS_ENDPOINT_CONFIGURATIONS) > 0 + + for config in WS_ENDPOINT_CONFIGURATIONS: + required_fields = ["name", "endpoint", "model", "description"] + assert all(field in config for field in required_fields), ( + f"Config {config.get('name')} missing fields" + ) + + endpoint = config["endpoint"] + assert isinstance(endpoint, str) + assert endpoint.startswith("/") + assert isinstance(config["model"], str) + assert len(config["model"]) > 0 + + +@pytest.mark.asyncio +@pytest.mark.parametrize("config", WS_ENDPOINT_CONFIGURATIONS) +async def test_ws_request_creation_for_each_endpoint( + config: dict[str, Any], +) -> None: + """Verify request builders produce valid payloads for each config.""" + model = config["model"] + + request_data = create_ws_codex_request( + content="Test WebSocket message", model=model + ) + assert request_data["type"] == "response.create" + assert request_data["model"] == model + assert isinstance(request_data["input"], list) + assert len(request_data["input"]) > 0 + + warmup = create_ws_codex_warmup_request(model=model) + assert warmup["type"] == "response.create" + assert warmup["input"] == [] + + +@pytest.mark.asyncio +async def test_ws_validation_helpers_work() -> None: + """Verify validation helpers detect good and bad event sequences.""" + good_events: list[dict[str, Any]] = [ + {"type": "response.created", "response": {"id": "r1", "object": "response"}}, + {"type": "response.output_text.delta", "delta": "Hello"}, + { + "type": "response.completed", + "response": { + "id": "r1", + "object": "response", + "status": "completed", + "output": [], + }, + }, + ] + is_valid, errors = validate_ws_codex_event_sequence(good_events) + assert is_valid, errors + + text, text_errors = validate_ws_codex_streaming_content(good_events) + assert text == "Hello" + assert not text_errors + + # Empty events should fail + is_valid, errors = validate_ws_codex_event_sequence([]) + assert not is_valid + + # Missing terminal event should fail + is_valid, errors = validate_ws_codex_event_sequence([{"type": "response.created"}]) + assert not is_valid + + # Warmup validation + warmup_event = { + "type": "response.completed", + "response": { + "id": "w1", + "object": "response", + "status": "completed", + "output": [], + }, + } + is_valid, errors = validate_ws_codex_warmup_response(warmup_event) + assert is_valid, errors + + # Error validation + error_event = { + "type": "response.completed", + "response": { + "id": "e1", + "object": "response", + "status": "failed", + "error": {"type": "invalid_request_error", "message": "bad"}, + }, + } + is_valid, errors = validate_ws_codex_error_response(error_event) + assert is_valid, errors + + +# --------------------------------------------------------------------------- +# Live WebSocket tests (require codex_ws_app + external API mocks) +# --------------------------------------------------------------------------- + + +@pytest.mark.parametrize("config", WS_ENDPOINT_CONFIGURATIONS, ids=lambda c: c["name"]) +def test_ws_warmup_request( + codex_ws_app: TestClient, + config: dict[str, Any], +) -> None: + """Empty-input warmup should return a completed terminal event immediately.""" + warmup = create_ws_codex_warmup_request(model=config["model"]) + + with codex_ws_app.websocket_connect(config["endpoint"]) as ws: + ws.send_json(warmup) + event = ws.receive_json() + ws.close() + + is_valid, errors = validate_ws_codex_warmup_response(event) + assert is_valid, errors + + +@pytest.mark.parametrize("config", WS_ENDPOINT_CONFIGURATIONS, ids=lambda c: c["name"]) +def test_ws_streaming_response( + codex_ws_app: TestClient, + mock_external_openai_codex_api_streaming: Any, + config: dict[str, Any], +) -> None: + """A real request should stream events ending with a terminal event.""" + request = create_ws_codex_request( + content="Reply with exactly OK", model=config["model"] + ) + + events: list[dict[str, Any]] = [] + with codex_ws_app.websocket_connect(config["endpoint"]) as ws: + ws.send_json(request) + while True: + event = ws.receive_json() + events.append(event) + if event.get("type") in CODEX_WS_TERMINAL_EVENT_TYPES: + ws.close() + break + + is_valid, errors = validate_ws_codex_event_sequence(events) + assert is_valid, errors + + text, text_errors = validate_ws_codex_streaming_content(events) + assert not text_errors, text_errors + assert len(text) > 0, "Expected non-empty streamed text" + + +@pytest.mark.parametrize("config", WS_ENDPOINT_CONFIGURATIONS, ids=lambda c: c["name"]) +def test_ws_upstream_error( + codex_ws_app: TestClient, + mock_external_openai_codex_api_error: Any, + config: dict[str, Any], +) -> None: + """Upstream errors should produce a failed terminal event.""" + request = create_ws_codex_request( + content="Reply with exactly OK", model=config["model"] + ) + + with codex_ws_app.websocket_connect(config["endpoint"]) as ws: + ws.send_json(request) + event = ws.receive_json() + ws.close() + + is_valid, errors = validate_ws_codex_error_response(event) + assert is_valid, errors + + +@pytest.mark.parametrize("config", WS_ENDPOINT_CONFIGURATIONS, ids=lambda c: c["name"]) +def test_ws_warmup_then_real_request( + codex_ws_app: TestClient, + mock_external_openai_codex_api_streaming: Any, + config: dict[str, Any], +) -> None: + """Warmup followed by real request on same connection should both succeed.""" + warmup = create_ws_codex_warmup_request(model=config["model"]) + request = create_ws_codex_request( + content="Reply with exactly OK", model=config["model"] + ) + + with codex_ws_app.websocket_connect(config["endpoint"]) as ws: + # Warmup + ws.send_json(warmup) + warmup_event = ws.receive_json() + + # Strip synthetic previous_response_id + warmup_id = warmup_event.get("response", {}).get("id") + request["previous_response_id"] = warmup_id + + # Real request + ws.send_json(request) + events: list[dict[str, Any]] = [] + while True: + event = ws.receive_json() + events.append(event) + if event.get("type") in CODEX_WS_TERMINAL_EVENT_TYPES: + ws.close() + break + + # Validate warmup + is_valid, errors = validate_ws_codex_warmup_response(warmup_event) + assert is_valid, errors + + # Validate streaming + is_valid, errors = validate_ws_codex_event_sequence(events) + assert is_valid, errors + + text, text_errors = validate_ws_codex_streaming_content(events) + assert not text_errors, text_errors + assert len(text) > 0 + + +# --------------------------------------------------------------------------- +# Live server tests (require `make dev` + real credentials) +# +# Run with: CCPROXY_BASE_URL=http://127.0.0.1:8000 pytest -m real_api -k websocket +# --------------------------------------------------------------------------- + +_LIVE_BASE_URL = os.environ.get("CCPROXY_BASE_URL", "").rstrip("/") +_skip_no_live = pytest.mark.skipif( + not _LIVE_BASE_URL, + reason="CCPROXY_BASE_URL not set; skipping live WebSocket tests", +) + + +def _ws_url(http_base: str, path: str) -> str: + """Convert http(s) base URL + path to a ws(s) URL.""" + return http_base.replace("https://", "wss://").replace("http://", "ws://") + path + + +@_skip_no_live +@pytest.mark.real_api +@pytest.mark.parametrize("config", WS_ENDPOINT_CONFIGURATIONS, ids=lambda c: c["name"]) +@pytest.mark.asyncio +async def test_live_ws_warmup(config: dict[str, Any]) -> None: + """Send a warmup to the live server and validate the response.""" + import websockets + + warmup = create_ws_codex_warmup_request(model=config["model"]) + url = _ws_url(_LIVE_BASE_URL, config["endpoint"]) + + async with websockets.connect(url) as ws: + await ws.send(json.dumps(warmup)) + raw = await asyncio.wait_for(ws.recv(), timeout=10) + event = json.loads(raw) + + is_valid, errors = validate_ws_codex_warmup_response(event) + assert is_valid, errors + + +@_skip_no_live +@pytest.mark.real_api +@pytest.mark.parametrize("config", WS_ENDPOINT_CONFIGURATIONS, ids=lambda c: c["name"]) +@pytest.mark.asyncio +async def test_live_ws_streaming(config: dict[str, Any]) -> None: + """Send a real request to the live server and collect streaming events.""" + import websockets + + request = create_ws_codex_request( + content="Reply with exactly OK", model=config["model"] + ) + url = _ws_url(_LIVE_BASE_URL, config["endpoint"]) + + events: list[dict[str, Any]] = [] + async with websockets.connect(url) as ws: + await ws.send(json.dumps(request)) + while True: + raw = await asyncio.wait_for(ws.recv(), timeout=60) + event = json.loads(raw) + events.append(event) + if event.get("type") in CODEX_WS_TERMINAL_EVENT_TYPES: + break + + is_valid, errors = validate_ws_codex_event_sequence(events) + assert is_valid, errors + + text, text_errors = validate_ws_codex_streaming_content(events) + assert not text_errors, text_errors + assert len(text) > 0, "Expected non-empty response from live server" + + +@_skip_no_live +@pytest.mark.real_api +@pytest.mark.parametrize("config", WS_ENDPOINT_CONFIGURATIONS, ids=lambda c: c["name"]) +@pytest.mark.asyncio +async def test_live_ws_warmup_then_request(config: dict[str, Any]) -> None: + """Warmup followed by real request on a single live WebSocket connection.""" + import websockets + + warmup = create_ws_codex_warmup_request(model=config["model"]) + request = create_ws_codex_request( + content="Reply with exactly OK", model=config["model"] + ) + url = _ws_url(_LIVE_BASE_URL, config["endpoint"]) + + async with websockets.connect(url) as ws: + # Warmup + await ws.send(json.dumps(warmup)) + raw = await asyncio.wait_for(ws.recv(), timeout=10) + warmup_event = json.loads(raw) + + is_valid, errors = validate_ws_codex_warmup_response(warmup_event) + assert is_valid, errors + + # Attach previous_response_id from warmup + warmup_id = warmup_event.get("response", {}).get("id") + request["previous_response_id"] = warmup_id + + # Real request + await ws.send(json.dumps(request)) + events: list[dict[str, Any]] = [] + while True: + raw = await asyncio.wait_for(ws.recv(), timeout=60) + event = json.loads(raw) + events.append(event) + if event.get("type") in CODEX_WS_TERMINAL_EVENT_TYPES: + break + + is_valid, errors = validate_ws_codex_event_sequence(events) + assert is_valid, errors + + text, text_errors = validate_ws_codex_streaming_content(events) + assert not text_errors, text_errors + assert len(text) > 0 diff --git a/uv.lock b/uv.lock index a23e938d..293c10a6 100644 --- a/uv.lock +++ b/uv.lock @@ -2,40 +2,6 @@ version = 1 revision = 3 requires-python = ">=3.11" -[[package]] -name = "agent-framework-core" -version = "1.0.0rc4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "azure-ai-projects" }, - { name = "azure-identity" }, - { name = "mcp", extra = ["ws"] }, - { name = "openai" }, - { name = "opentelemetry-api" }, - { name = "opentelemetry-sdk" }, - { name = "opentelemetry-semantic-conventions-ai" }, - { name = "packaging" }, - { name = "pydantic" }, - { name = "python-dotenv" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f1/5a/b472f9a57235bb72899151ec5cd3c925825e16018689e0300cb822cf00f8/agent_framework_core-1.0.0rc4.tar.gz", hash = "sha256:f394eb95ae877ae854aa7a3e499f76f34b26102808009a66b264ded89c6b6dbd", size = 302446, upload-time = "2026-03-11T23:19:29.198Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/06/d7/89776e7e919e46fd83ae464a416966715f4f40083297d42574e3d45214f6/agent_framework_core-1.0.0rc4-py3-none-any.whl", hash = "sha256:f01a6997be0f5e05853eb6be341dbca692c4e5d6999de5f3e8364296de50635f", size = 348882, upload-time = "2026-03-11T23:19:43.158Z" }, -] - -[[package]] -name = "agent-framework-orchestrations" -version = "1.0.0b260311" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "agent-framework-core" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b3/7f/43aeca0b4d1dc6156539d1723ea3d48599ee10bf660280577593e1441b1b/agent_framework_orchestrations-1.0.0b260311.tar.gz", hash = "sha256:a303a156c066954bbed5b1ac6e7b3dd8049ffe3bbf0c1841f5ab24e97a8f1fd9", size = 55139, upload-time = "2026-03-11T23:19:52.793Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/58/83/ef99c5a45c3d45eeaed1ffcb4f3294fa50f4d19c0f69771693b7d295b0bd/agent_framework_orchestrations-1.0.0b260311-py3-none-any.whl", hash = "sha256:cc7cdebe0abb76208d2c6618d410bf77f0806478dbe25ad1467b27f4f70b8dba", size = 61073, upload-time = "2026-03-11T23:19:38.618Z" }, -] - [[package]] name = "aioconsole" version = "0.8.2" @@ -86,67 +52,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, ] -[[package]] -name = "azure-ai-projects" -version = "2.0.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "azure-core" }, - { name = "azure-identity" }, - { name = "azure-storage-blob" }, - { name = "isodate" }, - { name = "openai" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/86/f9/a15c8a16e35e6d620faebabc6cc4f9e2f4b7f1d962cc6f58931c46947e24/azure_ai_projects-2.0.1.tar.gz", hash = "sha256:c8c64870aa6b89903af69a4ff28b4eff3df9744f14615ea572cae87394946a0c", size = 491774, upload-time = "2026-03-12T19:59:02.712Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/f7/290ca39501c06c6e23b46ba9f7f3dfb05ecc928cde105fed85d6845060dd/azure_ai_projects-2.0.1-py3-none-any.whl", hash = "sha256:dfda540d256e67a52bf81c75418b6bf92b811b96693fe45787e154a888ad2396", size = 236560, upload-time = "2026-03-12T19:59:04.249Z" }, -] - -[[package]] -name = "azure-core" -version = "1.38.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "requests" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c8/29/9641b73248745774a52c7ce7f965ed1febbdea787ec21caad3ae6891d18a/azure_core-1.38.3.tar.gz", hash = "sha256:a7931fd445cb4af8802c6f39c6a326bbd1e34b115846550a8245fa656ead6f8e", size = 367267, upload-time = "2026-03-12T20:28:21.122Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/3d/ac86083efa45a439d0bbfb7947615227813d368b9e1e93d23fd30de6fec0/azure_core-1.38.3-py3-none-any.whl", hash = "sha256:bf59d29765bf4748ab9edf25f98a30b7ea9797f43e367c06d846a30b29c1f845", size = 218231, upload-time = "2026-03-12T20:28:22.462Z" }, -] - -[[package]] -name = "azure-identity" -version = "1.25.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "azure-core" }, - { name = "cryptography" }, - { name = "msal" }, - { name = "msal-extensions" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c5/0e/3a63efb48aa4a5ae2cfca61ee152fbcb668092134d3eb8bfda472dd5c617/azure_identity-1.25.3.tar.gz", hash = "sha256:ab23c0d63015f50b630ef6c6cf395e7262f439ce06e5d07a64e874c724f8d9e6", size = 286304, upload-time = "2026-03-13T01:12:20.892Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/49/9a/417b3a533e01953a7c618884df2cb05a71e7b68bdbce4fbdb62349d2a2e8/azure_identity-1.25.3-py3-none-any.whl", hash = "sha256:f4d0b956a8146f30333e071374171f3cfa7bdb8073adb8c3814b65567aa7447c", size = 192138, upload-time = "2026-03-13T01:12:22.951Z" }, -] - -[[package]] -name = "azure-storage-blob" -version = "12.28.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "azure-core" }, - { name = "cryptography" }, - { name = "isodate" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/71/24/072ba8e27b0e2d8fec401e9969b429d4f5fc4c8d4f0f05f4661e11f7234a/azure_storage_blob-12.28.0.tar.gz", hash = "sha256:e7d98ea108258d29aa0efbfd591b2e2075fa1722a2fae8699f0b3c9de11eff41", size = 604225, upload-time = "2026-01-06T23:48:57.282Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d8/3a/6ef2047a072e54e1142718d433d50e9514c999a58f51abfff7902f3a72f8/azure_storage_blob-12.28.0-py3-none-any.whl", hash = "sha256:00fb1db28bf6a7b7ecaa48e3b1d5c83bfadacc5a678b77826081304bd87d6461", size = 431499, upload-time = "2026-01-06T23:48:58.995Z" }, -] - [[package]] name = "babel" version = "2.17.0" @@ -288,8 +193,6 @@ all = [ { name = "textual" }, ] dev = [ - { name = "agent-framework-core" }, - { name = "agent-framework-orchestrations" }, { name = "aioconsole" }, { name = "bandit" }, { name = "claude-agent-sdk" }, @@ -345,8 +248,6 @@ docs = [ { name = "mkdocstrings", extra = ["python"] }, ] test = [ - { name = "agent-framework-core" }, - { name = "agent-framework-orchestrations" }, { name = "mypy" }, { name = "pytest" }, { name = "pytest-asyncio" }, @@ -405,8 +306,6 @@ all = [ { name = "textual", specifier = ">=3.7.1" }, ] dev = [ - { name = "agent-framework-core", specifier = ">=1.0.0rc4,<2" }, - { name = "agent-framework-orchestrations", specifier = ">=1.0.0b260311,<2" }, { name = "aioconsole", specifier = ">=0.8.1" }, { name = "bandit" }, { name = "claude-agent-sdk", specifier = ">=0.1.0" }, @@ -462,8 +361,6 @@ docs = [ { name = "mkdocstrings", extras = ["python"], specifier = ">=0.24.0" }, ] test = [ - { name = "agent-framework-core", specifier = ">=1.0.0rc4,<2" }, - { name = "agent-framework-orchestrations", specifier = ">=1.0.0b260311,<2" }, { name = "mypy" }, { name = "pytest" }, { name = "pytest-asyncio" }, @@ -844,15 +741,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, ] -[[package]] -name = "distro" -version = "1.9.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, -] - [[package]] name = "dnspython" version = "2.8.0" @@ -1049,6 +937,7 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2", size = 272305, upload-time = "2025-08-07T13:15:41.288Z" }, { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246", size = 632472, upload-time = "2025-08-07T13:42:55.044Z" }, { url = "https://files.pythonhosted.org/packages/ae/8f/95d48d7e3d433e6dae5b1682e4292242a53f22df82e6d3dda81b1701a960/greenlet-3.2.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:94abf90142c2a18151632371140b3dba4dee031633fe614cb592dbb6c9e17bc3", size = 644646, upload-time = "2025-08-07T13:45:26.523Z" }, + { url = "https://files.pythonhosted.org/packages/d5/5e/405965351aef8c76b8ef7ad370e5da58d57ef6068df197548b015464001a/greenlet-3.2.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:4d1378601b85e2e5171b99be8d2dc85f594c79967599328f95c1dc1a40f1c633", size = 640519, upload-time = "2025-08-07T13:53:13.928Z" }, { url = "https://files.pythonhosted.org/packages/25/5d/382753b52006ce0218297ec1b628e048c4e64b155379331f25a7316eb749/greenlet-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0db5594dce18db94f7d1650d7489909b57afde4c580806b8d9203b6e79cdc079", size = 639707, upload-time = "2025-08-07T13:18:27.146Z" }, { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8", size = 587684, upload-time = "2025-08-07T13:18:25.164Z" }, { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52", size = 1116647, upload-time = "2025-08-07T13:42:38.655Z" }, @@ -1059,6 +948,7 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd", size = 274079, upload-time = "2025-08-07T13:15:45.033Z" }, { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb", size = 640997, upload-time = "2025-08-07T13:42:56.234Z" }, { url = "https://files.pythonhosted.org/packages/3b/16/035dcfcc48715ccd345f3a93183267167cdd162ad123cd93067d86f27ce4/greenlet-3.2.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f28588772bb5fb869a8eb331374ec06f24a83a9c25bfa1f38b6993afe9c1e968", size = 655185, upload-time = "2025-08-07T13:45:27.624Z" }, + { url = "https://files.pythonhosted.org/packages/31/da/0386695eef69ffae1ad726881571dfe28b41970173947e7c558d9998de0f/greenlet-3.2.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5c9320971821a7cb77cfab8d956fa8e39cd07ca44b6070db358ceb7f8797c8c9", size = 649926, upload-time = "2025-08-07T13:53:15.251Z" }, { url = "https://files.pythonhosted.org/packages/68/88/69bf19fd4dc19981928ceacbc5fd4bb6bc2215d53199e367832e98d1d8fe/greenlet-3.2.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c60a6d84229b271d44b70fb6e5fa23781abb5d742af7b808ae3f6efd7c9c60f6", size = 651839, upload-time = "2025-08-07T13:18:30.281Z" }, { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0", size = 607586, upload-time = "2025-08-07T13:18:28.544Z" }, { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0", size = 1123281, upload-time = "2025-08-07T13:42:39.858Z" }, @@ -1069,6 +959,7 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31", size = 272814, upload-time = "2025-08-07T13:15:50.011Z" }, { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945", size = 641073, upload-time = "2025-08-07T13:42:57.23Z" }, { url = "https://files.pythonhosted.org/packages/f7/0b/bc13f787394920b23073ca3b6c4a7a21396301ed75a655bcb47196b50e6e/greenlet-3.2.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:710638eb93b1fa52823aa91bf75326f9ecdfd5e0466f00789246a5280f4ba0fc", size = 655191, upload-time = "2025-08-07T13:45:29.752Z" }, + { url = "https://files.pythonhosted.org/packages/f2/d6/6adde57d1345a8d0f14d31e4ab9c23cfe8e2cd39c3baf7674b4b0338d266/greenlet-3.2.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c5111ccdc9c88f423426df3fd1811bfc40ed66264d35aa373420a34377efc98a", size = 649516, upload-time = "2025-08-07T13:53:16.314Z" }, { url = "https://files.pythonhosted.org/packages/7f/3b/3a3328a788d4a473889a2d403199932be55b1b0060f4ddd96ee7cdfcad10/greenlet-3.2.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d76383238584e9711e20ebe14db6c88ddcedc1829a9ad31a584389463b5aa504", size = 652169, upload-time = "2025-08-07T13:18:32.861Z" }, { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671", size = 610497, upload-time = "2025-08-07T13:18:31.636Z" }, { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b", size = 1121662, upload-time = "2025-08-07T13:42:41.117Z" }, @@ -1079,6 +970,7 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0", size = 273586, upload-time = "2025-08-07T13:16:08.004Z" }, { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f", size = 686346, upload-time = "2025-08-07T13:42:59.944Z" }, { url = "https://files.pythonhosted.org/packages/c0/aa/687d6b12ffb505a4447567d1f3abea23bd20e73a5bed63871178e0831b7a/greenlet-3.2.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c17b6b34111ea72fc5a4e4beec9711d2226285f0386ea83477cbb97c30a3f3a5", size = 699218, upload-time = "2025-08-07T13:45:30.969Z" }, + { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1", size = 694659, upload-time = "2025-08-07T13:53:17.759Z" }, { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735", size = 695355, upload-time = "2025-08-07T13:18:34.517Z" }, { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337", size = 657512, upload-time = "2025-08-07T13:18:33.969Z" }, { url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269", size = 1612508, upload-time = "2025-11-04T12:42:23.427Z" }, @@ -1263,15 +1155,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, ] -[[package]] -name = "isodate" -version = "0.7.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/4d/e940025e2ce31a8ce1202635910747e5a87cc3a6a6bb2d00973375014749/isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6", size = 29705, upload-time = "2024-10-08T23:04:11.5Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320, upload-time = "2024-10-08T23:04:09.501Z" }, -] - [[package]] name = "jaraco-classes" version = "3.4.0" @@ -1329,91 +1212,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, ] -[[package]] -name = "jiter" -version = "0.13.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0d/5e/4ec91646aee381d01cdb9974e30882c9cd3b8c5d1079d6b5ff4af522439a/jiter-0.13.0.tar.gz", hash = "sha256:f2839f9c2c7e2dffc1bc5929a510e14ce0a946be9365fd1219e7ef342dae14f4", size = 164847, upload-time = "2026-02-02T12:37:56.441Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/71/29/499f8c9eaa8a16751b1c0e45e6f5f1761d180da873d417996cc7bddc8eef/jiter-0.13.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ea026e70a9a28ebbdddcbcf0f1323128a8db66898a06eaad3a4e62d2f554d096", size = 311157, upload-time = "2026-02-02T12:35:37.758Z" }, - { url = "https://files.pythonhosted.org/packages/50/f6/566364c777d2ab450b92100bea11333c64c38d32caf8dc378b48e5b20c46/jiter-0.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66aa3e663840152d18cc8ff1e4faad3dd181373491b9cfdc6004b92198d67911", size = 319729, upload-time = "2026-02-02T12:35:39.246Z" }, - { url = "https://files.pythonhosted.org/packages/73/dd/560f13ec5e4f116d8ad2658781646cca91b617ae3b8758d4a5076b278f70/jiter-0.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3524798e70655ff19aec58c7d05adb1f074fecff62da857ea9be2b908b6d701", size = 354766, upload-time = "2026-02-02T12:35:40.662Z" }, - { url = "https://files.pythonhosted.org/packages/7c/0d/061faffcfe94608cbc28a0d42a77a74222bdf5055ccdbe5fd2292b94f510/jiter-0.13.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec7e287d7fbd02cb6e22f9a00dd9c9cd504c40a61f2c61e7e1f9690a82726b4c", size = 362587, upload-time = "2026-02-02T12:35:42.025Z" }, - { url = "https://files.pythonhosted.org/packages/92/c9/c66a7864982fd38a9773ec6e932e0398d1262677b8c60faecd02ffb67bf3/jiter-0.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47455245307e4debf2ce6c6e65a717550a0244231240dcf3b8f7d64e4c2f22f4", size = 487537, upload-time = "2026-02-02T12:35:43.459Z" }, - { url = "https://files.pythonhosted.org/packages/6c/86/84eb4352cd3668f16d1a88929b5888a3fe0418ea8c1dfc2ad4e7bf6e069a/jiter-0.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ee9da221dca6e0429c2704c1b3655fe7b025204a71d4d9b73390c759d776d165", size = 373717, upload-time = "2026-02-02T12:35:44.928Z" }, - { url = "https://files.pythonhosted.org/packages/6e/09/9fe4c159358176f82d4390407a03f506a8659ed13ca3ac93a843402acecf/jiter-0.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24ab43126d5e05f3d53a36a8e11eb2f23304c6c1117844aaaf9a0aa5e40b5018", size = 362683, upload-time = "2026-02-02T12:35:46.636Z" }, - { url = "https://files.pythonhosted.org/packages/c9/5e/85f3ab9caca0c1d0897937d378b4a515cae9e119730563572361ea0c48ae/jiter-0.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9da38b4fedde4fb528c740c2564628fbab737166a0e73d6d46cb4bb5463ff411", size = 392345, upload-time = "2026-02-02T12:35:48.088Z" }, - { url = "https://files.pythonhosted.org/packages/12/4c/05b8629ad546191939e6f0c2f17e29f542a398f4a52fb987bc70b6d1eb8b/jiter-0.13.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b34c519e17658ed88d5047999a93547f8889f3c1824120c26ad6be5f27b6cf5", size = 517775, upload-time = "2026-02-02T12:35:49.482Z" }, - { url = "https://files.pythonhosted.org/packages/4d/88/367ea2eb6bc582c7052e4baf5ddf57ebe5ab924a88e0e09830dfb585c02d/jiter-0.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2a6394e6af690d462310a86b53c47ad75ac8c21dc79f120714ea449979cb1d3", size = 551325, upload-time = "2026-02-02T12:35:51.104Z" }, - { url = "https://files.pythonhosted.org/packages/f3/12/fa377ffb94a2f28c41afaed093e0d70cfe512035d5ecb0cad0ae4792d35e/jiter-0.13.0-cp311-cp311-win32.whl", hash = "sha256:0f0c065695f616a27c920a56ad0d4fc46415ef8b806bf8fc1cacf25002bd24e1", size = 204709, upload-time = "2026-02-02T12:35:52.467Z" }, - { url = "https://files.pythonhosted.org/packages/cb/16/8e8203ce92f844dfcd3d9d6a5a7322c77077248dbb12da52d23193a839cd/jiter-0.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:0733312953b909688ae3c2d58d043aa040f9f1a6a75693defed7bc2cc4bf2654", size = 204560, upload-time = "2026-02-02T12:35:53.925Z" }, - { url = "https://files.pythonhosted.org/packages/44/26/97cc40663deb17b9e13c3a5cf29251788c271b18ee4d262c8f94798b8336/jiter-0.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:5d9b34ad56761b3bf0fbe8f7e55468704107608512350962d3317ffd7a4382d5", size = 189608, upload-time = "2026-02-02T12:35:55.304Z" }, - { url = "https://files.pythonhosted.org/packages/2e/30/7687e4f87086829955013ca12a9233523349767f69653ebc27036313def9/jiter-0.13.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0a2bd69fc1d902e89925fc34d1da51b2128019423d7b339a45d9e99c894e0663", size = 307958, upload-time = "2026-02-02T12:35:57.165Z" }, - { url = "https://files.pythonhosted.org/packages/c3/27/e57f9a783246ed95481e6749cc5002a8a767a73177a83c63ea71f0528b90/jiter-0.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f917a04240ef31898182f76a332f508f2cc4b57d2b4d7ad2dbfebbfe167eb505", size = 318597, upload-time = "2026-02-02T12:35:58.591Z" }, - { url = "https://files.pythonhosted.org/packages/cf/52/e5719a60ac5d4d7c5995461a94ad5ef962a37c8bf5b088390e6fad59b2ff/jiter-0.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1e2b199f446d3e82246b4fd9236d7cb502dc2222b18698ba0d986d2fecc6152", size = 348821, upload-time = "2026-02-02T12:36:00.093Z" }, - { url = "https://files.pythonhosted.org/packages/61/db/c1efc32b8ba4c740ab3fc2d037d8753f67685f475e26b9d6536a4322bcdd/jiter-0.13.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04670992b576fa65bd056dbac0c39fe8bd67681c380cb2b48efa885711d9d726", size = 364163, upload-time = "2026-02-02T12:36:01.937Z" }, - { url = "https://files.pythonhosted.org/packages/55/8a/fb75556236047c8806995671a18e4a0ad646ed255276f51a20f32dceaeec/jiter-0.13.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a1aff1fbdb803a376d4d22a8f63f8e7ccbce0b4890c26cc7af9e501ab339ef0", size = 483709, upload-time = "2026-02-02T12:36:03.41Z" }, - { url = "https://files.pythonhosted.org/packages/7e/16/43512e6ee863875693a8e6f6d532e19d650779d6ba9a81593ae40a9088ff/jiter-0.13.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b3fb8c2053acaef8580809ac1d1f7481a0a0bdc012fd7f5d8b18fb696a5a089", size = 370480, upload-time = "2026-02-02T12:36:04.791Z" }, - { url = "https://files.pythonhosted.org/packages/f8/4c/09b93e30e984a187bc8aaa3510e1ec8dcbdcd71ca05d2f56aac0492453aa/jiter-0.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdaba7d87e66f26a2c45d8cbadcbfc4bf7884182317907baf39cfe9775bb4d93", size = 360735, upload-time = "2026-02-02T12:36:06.994Z" }, - { url = "https://files.pythonhosted.org/packages/1a/1b/46c5e349019874ec5dfa508c14c37e29864ea108d376ae26d90bee238cd7/jiter-0.13.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b88d649135aca526da172e48083da915ec086b54e8e73a425ba50999468cc08", size = 391814, upload-time = "2026-02-02T12:36:08.368Z" }, - { url = "https://files.pythonhosted.org/packages/15/9e/26184760e85baee7162ad37b7912797d2077718476bf91517641c92b3639/jiter-0.13.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e404ea551d35438013c64b4f357b0474c7abf9f781c06d44fcaf7a14c69ff9e2", size = 513990, upload-time = "2026-02-02T12:36:09.993Z" }, - { url = "https://files.pythonhosted.org/packages/e9/34/2c9355247d6debad57a0a15e76ab1566ab799388042743656e566b3b7de1/jiter-0.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f4748aad1b4a93c8bdd70f604d0f748cdc0e8744c5547798acfa52f10e79228", size = 548021, upload-time = "2026-02-02T12:36:11.376Z" }, - { url = "https://files.pythonhosted.org/packages/ac/4a/9f2c23255d04a834398b9c2e0e665382116911dc4d06b795710503cdad25/jiter-0.13.0-cp312-cp312-win32.whl", hash = "sha256:0bf670e3b1445fc4d31612199f1744f67f889ee1bbae703c4b54dc097e5dd394", size = 203024, upload-time = "2026-02-02T12:36:12.682Z" }, - { url = "https://files.pythonhosted.org/packages/09/ee/f0ae675a957ae5a8f160be3e87acea6b11dc7b89f6b7ab057e77b2d2b13a/jiter-0.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:15db60e121e11fe186c0b15236bd5d18381b9ddacdcf4e659feb96fc6c969c92", size = 205424, upload-time = "2026-02-02T12:36:13.93Z" }, - { url = "https://files.pythonhosted.org/packages/1b/02/ae611edf913d3cbf02c97cdb90374af2082c48d7190d74c1111dde08bcdd/jiter-0.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:41f92313d17989102f3cb5dd533a02787cdb99454d494344b0361355da52fcb9", size = 186818, upload-time = "2026-02-02T12:36:15.308Z" }, - { url = "https://files.pythonhosted.org/packages/91/9c/7ee5a6ff4b9991e1a45263bfc46731634c4a2bde27dfda6c8251df2d958c/jiter-0.13.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1f8a55b848cbabf97d861495cd65f1e5c590246fabca8b48e1747c4dfc8f85bf", size = 306897, upload-time = "2026-02-02T12:36:16.748Z" }, - { url = "https://files.pythonhosted.org/packages/7c/02/be5b870d1d2be5dd6a91bdfb90f248fbb7dcbd21338f092c6b89817c3dbf/jiter-0.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f556aa591c00f2c45eb1b89f68f52441a016034d18b65da60e2d2875bbbf344a", size = 317507, upload-time = "2026-02-02T12:36:18.351Z" }, - { url = "https://files.pythonhosted.org/packages/da/92/b25d2ec333615f5f284f3a4024f7ce68cfa0604c322c6808b2344c7f5d2b/jiter-0.13.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7e1d61da332ec412350463891923f960c3073cf1aae93b538f0bb4c8cd46efb", size = 350560, upload-time = "2026-02-02T12:36:19.746Z" }, - { url = "https://files.pythonhosted.org/packages/be/ec/74dcb99fef0aca9fbe56b303bf79f6bd839010cb18ad41000bf6cc71eec0/jiter-0.13.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3097d665a27bc96fd9bbf7f86178037db139f319f785e4757ce7ccbf390db6c2", size = 363232, upload-time = "2026-02-02T12:36:21.243Z" }, - { url = "https://files.pythonhosted.org/packages/1b/37/f17375e0bb2f6a812d4dd92d7616e41917f740f3e71343627da9db2824ce/jiter-0.13.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d01ecc3a8cbdb6f25a37bd500510550b64ddf9f7d64a107d92f3ccb25035d0f", size = 483727, upload-time = "2026-02-02T12:36:22.688Z" }, - { url = "https://files.pythonhosted.org/packages/77/d2/a71160a5ae1a1e66c1395b37ef77da67513b0adba73b993a27fbe47eb048/jiter-0.13.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed9bbc30f5d60a3bdf63ae76beb3f9db280d7f195dfcfa61af792d6ce912d159", size = 370799, upload-time = "2026-02-02T12:36:24.106Z" }, - { url = "https://files.pythonhosted.org/packages/01/99/ed5e478ff0eb4e8aa5fd998f9d69603c9fd3f32de3bd16c2b1194f68361c/jiter-0.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98fbafb6e88256f4454de33c1f40203d09fc33ed19162a68b3b257b29ca7f663", size = 359120, upload-time = "2026-02-02T12:36:25.519Z" }, - { url = "https://files.pythonhosted.org/packages/16/be/7ffd08203277a813f732ba897352797fa9493faf8dc7995b31f3d9cb9488/jiter-0.13.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5467696f6b827f1116556cb0db620440380434591e93ecee7fd14d1a491b6daa", size = 390664, upload-time = "2026-02-02T12:36:26.866Z" }, - { url = "https://files.pythonhosted.org/packages/d1/84/e0787856196d6d346264d6dcccb01f741e5f0bd014c1d9a2ebe149caf4f3/jiter-0.13.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2d08c9475d48b92892583df9da592a0e2ac49bcd41fae1fec4f39ba6cf107820", size = 513543, upload-time = "2026-02-02T12:36:28.217Z" }, - { url = "https://files.pythonhosted.org/packages/65/50/ecbd258181c4313cf79bca6c88fb63207d04d5bf5e4f65174114d072aa55/jiter-0.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:aed40e099404721d7fcaf5b89bd3b4568a4666358bcac7b6b15c09fb6252ab68", size = 547262, upload-time = "2026-02-02T12:36:29.678Z" }, - { url = "https://files.pythonhosted.org/packages/27/da/68f38d12e7111d2016cd198161b36e1f042bd115c169255bcb7ec823a3bf/jiter-0.13.0-cp313-cp313-win32.whl", hash = "sha256:36ebfbcffafb146d0e6ffb3e74d51e03d9c35ce7c625c8066cdbfc7b953bdc72", size = 200630, upload-time = "2026-02-02T12:36:31.808Z" }, - { url = "https://files.pythonhosted.org/packages/25/65/3bd1a972c9a08ecd22eb3b08a95d1941ebe6938aea620c246cf426ae09c2/jiter-0.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:8d76029f077379374cf0dbc78dbe45b38dec4a2eb78b08b5194ce836b2517afc", size = 202602, upload-time = "2026-02-02T12:36:33.679Z" }, - { url = "https://files.pythonhosted.org/packages/15/fe/13bd3678a311aa67686bb303654792c48206a112068f8b0b21426eb6851e/jiter-0.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:bb7613e1a427cfcb6ea4544f9ac566b93d5bf67e0d48c787eca673ff9c9dff2b", size = 185939, upload-time = "2026-02-02T12:36:35.065Z" }, - { url = "https://files.pythonhosted.org/packages/49/19/a929ec002ad3228bc97ca01dbb14f7632fffdc84a95ec92ceaf4145688ae/jiter-0.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fa476ab5dd49f3bf3a168e05f89358c75a17608dbabb080ef65f96b27c19ab10", size = 316616, upload-time = "2026-02-02T12:36:36.579Z" }, - { url = "https://files.pythonhosted.org/packages/52/56/d19a9a194afa37c1728831e5fb81b7722c3de18a3109e8f282bfc23e587a/jiter-0.13.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade8cb6ff5632a62b7dbd4757d8c5573f7a2e9ae285d6b5b841707d8363205ef", size = 346850, upload-time = "2026-02-02T12:36:38.058Z" }, - { url = "https://files.pythonhosted.org/packages/36/4a/94e831c6bf287754a8a019cb966ed39ff8be6ab78cadecf08df3bb02d505/jiter-0.13.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9950290340acc1adaded363edd94baebcee7dabdfa8bee4790794cd5cfad2af6", size = 358551, upload-time = "2026-02-02T12:36:39.417Z" }, - { url = "https://files.pythonhosted.org/packages/a2/ec/a4c72c822695fa80e55d2b4142b73f0012035d9fcf90eccc56bc060db37c/jiter-0.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2b4972c6df33731aac0742b64fd0d18e0a69bc7d6e03108ce7d40c85fd9e3e6d", size = 201950, upload-time = "2026-02-02T12:36:40.791Z" }, - { url = "https://files.pythonhosted.org/packages/b6/00/393553ec27b824fbc29047e9c7cd4a3951d7fbe4a76743f17e44034fa4e4/jiter-0.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:701a1e77d1e593c1b435315ff625fd071f0998c5f02792038a5ca98899261b7d", size = 185852, upload-time = "2026-02-02T12:36:42.077Z" }, - { url = "https://files.pythonhosted.org/packages/6e/f5/f1997e987211f6f9bd71b8083047b316208b4aca0b529bb5f8c96c89ef3e/jiter-0.13.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:cc5223ab19fe25e2f0bf2643204ad7318896fe3729bf12fde41b77bfc4fafff0", size = 308804, upload-time = "2026-02-02T12:36:43.496Z" }, - { url = "https://files.pythonhosted.org/packages/cd/8f/5482a7677731fd44881f0204981ce2d7175db271f82cba2085dd2212e095/jiter-0.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9776ebe51713acf438fd9b4405fcd86893ae5d03487546dae7f34993217f8a91", size = 318787, upload-time = "2026-02-02T12:36:45.071Z" }, - { url = "https://files.pythonhosted.org/packages/f3/b9/7257ac59778f1cd025b26a23c5520a36a424f7f1b068f2442a5b499b7464/jiter-0.13.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:879e768938e7b49b5e90b7e3fecc0dbec01b8cb89595861fb39a8967c5220d09", size = 353880, upload-time = "2026-02-02T12:36:47.365Z" }, - { url = "https://files.pythonhosted.org/packages/c3/87/719eec4a3f0841dad99e3d3604ee4cba36af4419a76f3cb0b8e2e691ad67/jiter-0.13.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:682161a67adea11e3aae9038c06c8b4a9a71023228767477d683f69903ebc607", size = 366702, upload-time = "2026-02-02T12:36:48.871Z" }, - { url = "https://files.pythonhosted.org/packages/d2/65/415f0a75cf6921e43365a1bc227c565cb949caca8b7532776e430cbaa530/jiter-0.13.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a13b68cd1cd8cc9de8f244ebae18ccb3e4067ad205220ef324c39181e23bbf66", size = 486319, upload-time = "2026-02-02T12:36:53.006Z" }, - { url = "https://files.pythonhosted.org/packages/54/a2/9e12b48e82c6bbc6081fd81abf915e1443add1b13d8fc586e1d90bb02bb8/jiter-0.13.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87ce0f14c6c08892b610686ae8be350bf368467b6acd5085a5b65441e2bf36d2", size = 372289, upload-time = "2026-02-02T12:36:54.593Z" }, - { url = "https://files.pythonhosted.org/packages/4e/c1/e4693f107a1789a239c759a432e9afc592366f04e901470c2af89cfd28e1/jiter-0.13.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c365005b05505a90d1c47856420980d0237adf82f70c4aff7aebd3c1cc143ad", size = 360165, upload-time = "2026-02-02T12:36:56.112Z" }, - { url = "https://files.pythonhosted.org/packages/17/08/91b9ea976c1c758240614bd88442681a87672eebc3d9a6dde476874e706b/jiter-0.13.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1317fdffd16f5873e46ce27d0e0f7f4f90f0cdf1d86bf6abeaea9f63ca2c401d", size = 389634, upload-time = "2026-02-02T12:36:57.495Z" }, - { url = "https://files.pythonhosted.org/packages/18/23/58325ef99390d6d40427ed6005bf1ad54f2577866594bcf13ce55675f87d/jiter-0.13.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c05b450d37ba0c9e21c77fef1f205f56bcee2330bddca68d344baebfc55ae0df", size = 514933, upload-time = "2026-02-02T12:36:58.909Z" }, - { url = "https://files.pythonhosted.org/packages/5b/25/69f1120c7c395fd276c3996bb8adefa9c6b84c12bb7111e5c6ccdcd8526d/jiter-0.13.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:775e10de3849d0631a97c603f996f518159272db00fdda0a780f81752255ee9d", size = 548842, upload-time = "2026-02-02T12:37:00.433Z" }, - { url = "https://files.pythonhosted.org/packages/18/05/981c9669d86850c5fbb0d9e62bba144787f9fba84546ba43d624ee27ef29/jiter-0.13.0-cp314-cp314-win32.whl", hash = "sha256:632bf7c1d28421c00dd8bbb8a3bac5663e1f57d5cd5ed962bce3c73bf62608e6", size = 202108, upload-time = "2026-02-02T12:37:01.718Z" }, - { url = "https://files.pythonhosted.org/packages/8d/96/cdcf54dd0b0341db7d25413229888a346c7130bd20820530905fdb65727b/jiter-0.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:f22ef501c3f87ede88f23f9b11e608581c14f04db59b6a801f354397ae13739f", size = 204027, upload-time = "2026-02-02T12:37:03.075Z" }, - { url = "https://files.pythonhosted.org/packages/fb/f9/724bcaaab7a3cd727031fe4f6995cb86c4bd344909177c186699c8dec51a/jiter-0.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:07b75fe09a4ee8e0c606200622e571e44943f47254f95e2436c8bdcaceb36d7d", size = 187199, upload-time = "2026-02-02T12:37:04.414Z" }, - { url = "https://files.pythonhosted.org/packages/62/92/1661d8b9fd6a3d7a2d89831db26fe3c1509a287d83ad7838831c7b7a5c7e/jiter-0.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:964538479359059a35fb400e769295d4b315ae61e4105396d355a12f7fef09f0", size = 318423, upload-time = "2026-02-02T12:37:05.806Z" }, - { url = "https://files.pythonhosted.org/packages/4f/3b/f77d342a54d4ebcd128e520fc58ec2f5b30a423b0fd26acdfc0c6fef8e26/jiter-0.13.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e104da1db1c0991b3eaed391ccd650ae8d947eab1480c733e5a3fb28d4313e40", size = 351438, upload-time = "2026-02-02T12:37:07.189Z" }, - { url = "https://files.pythonhosted.org/packages/76/b3/ba9a69f0e4209bd3331470c723c2f5509e6f0482e416b612431a5061ed71/jiter-0.13.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e3a5f0cde8ff433b8e88e41aa40131455420fb3649a3c7abdda6145f8cb7202", size = 364774, upload-time = "2026-02-02T12:37:08.579Z" }, - { url = "https://files.pythonhosted.org/packages/b3/16/6cdb31fa342932602458dbb631bfbd47f601e03d2e4950740e0b2100b570/jiter-0.13.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:57aab48f40be1db920a582b30b116fe2435d184f77f0e4226f546794cedd9cf0", size = 487238, upload-time = "2026-02-02T12:37:10.066Z" }, - { url = "https://files.pythonhosted.org/packages/ed/b1/956cc7abaca8d95c13aa8d6c9b3f3797241c246cd6e792934cc4c8b250d2/jiter-0.13.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7772115877c53f62beeb8fd853cab692dbc04374ef623b30f997959a4c0e7e95", size = 372892, upload-time = "2026-02-02T12:37:11.656Z" }, - { url = "https://files.pythonhosted.org/packages/26/c4/97ecde8b1e74f67b8598c57c6fccf6df86ea7861ed29da84629cdbba76c4/jiter-0.13.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1211427574b17b633cfceba5040de8081e5abf114f7a7602f73d2e16f9fdaa59", size = 360309, upload-time = "2026-02-02T12:37:13.244Z" }, - { url = "https://files.pythonhosted.org/packages/4b/d7/eabe3cf46715854ccc80be2cd78dd4c36aedeb30751dbf85a1d08c14373c/jiter-0.13.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7beae3a3d3b5212d3a55d2961db3c292e02e302feb43fce6a3f7a31b90ea6dfe", size = 389607, upload-time = "2026-02-02T12:37:14.881Z" }, - { url = "https://files.pythonhosted.org/packages/df/2d/03963fc0804e6109b82decfb9974eb92df3797fe7222428cae12f8ccaa0c/jiter-0.13.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:e5562a0f0e90a6223b704163ea28e831bd3a9faa3512a711f031611e6b06c939", size = 514986, upload-time = "2026-02-02T12:37:16.326Z" }, - { url = "https://files.pythonhosted.org/packages/f6/6c/8c83b45eb3eb1c1e18d841fe30b4b5bc5619d781267ca9bc03e005d8fd0a/jiter-0.13.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:6c26a424569a59140fb51160a56df13f438a2b0967365e987889186d5fc2f6f9", size = 548756, upload-time = "2026-02-02T12:37:17.736Z" }, - { url = "https://files.pythonhosted.org/packages/47/66/eea81dfff765ed66c68fd2ed8c96245109e13c896c2a5015c7839c92367e/jiter-0.13.0-cp314-cp314t-win32.whl", hash = "sha256:24dc96eca9f84da4131cdf87a95e6ce36765c3b156fc9ae33280873b1c32d5f6", size = 201196, upload-time = "2026-02-02T12:37:19.101Z" }, - { url = "https://files.pythonhosted.org/packages/ff/32/4ac9c7a76402f8f00d00842a7f6b83b284d0cf7c1e9d4227bc95aa6d17fa/jiter-0.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0a8d76c7524087272c8ae913f5d9d608bd839154b62c4322ef65723d2e5bb0b8", size = 204215, upload-time = "2026-02-02T12:37:20.495Z" }, - { url = "https://files.pythonhosted.org/packages/f9/8e/7def204fea9f9be8b3c21a6f2dd6c020cf56c7d5ff753e0e23ed7f9ea57e/jiter-0.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2c26cf47e2cad140fa23b6d58d435a7c0161f5c514284802f25e87fddfe11024", size = 187152, upload-time = "2026-02-02T12:37:22.124Z" }, - { url = "https://files.pythonhosted.org/packages/79/b3/3c29819a27178d0e461a8571fb63c6ae38be6dc36b78b3ec2876bbd6a910/jiter-0.13.0-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b1cbfa133241d0e6bdab48dcdc2604e8ba81512f6bbd68ec3e8e1357dd3c316c", size = 307016, upload-time = "2026-02-02T12:37:42.755Z" }, - { url = "https://files.pythonhosted.org/packages/eb/ae/60993e4b07b1ac5ebe46da7aa99fdbb802eb986c38d26e3883ac0125c4e0/jiter-0.13.0-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:db367d8be9fad6e8ebbac4a7578b7af562e506211036cba2c06c3b998603c3d2", size = 305024, upload-time = "2026-02-02T12:37:44.774Z" }, - { url = "https://files.pythonhosted.org/packages/77/fa/2227e590e9cf98803db2811f172b2d6460a21539ab73006f251c66f44b14/jiter-0.13.0-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45f6f8efb2f3b0603092401dc2df79fa89ccbc027aaba4174d2d4133ed661434", size = 339337, upload-time = "2026-02-02T12:37:46.668Z" }, - { url = "https://files.pythonhosted.org/packages/2d/92/015173281f7eb96c0ef580c997da8ef50870d4f7f4c9e03c845a1d62ae04/jiter-0.13.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:597245258e6ad085d064780abfb23a284d418d3e61c57362d9449c6c7317ee2d", size = 346395, upload-time = "2026-02-02T12:37:48.09Z" }, - { url = "https://files.pythonhosted.org/packages/80/60/e50fa45dd7e2eae049f0ce964663849e897300433921198aef94b6ffa23a/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:3d744a6061afba08dd7ae375dcde870cffb14429b7477e10f67e9e6d68772a0a", size = 305169, upload-time = "2026-02-02T12:37:50.376Z" }, - { url = "https://files.pythonhosted.org/packages/d2/73/a009f41c5eed71c49bec53036c4b33555afcdee70682a18c6f66e396c039/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:ff732bd0a0e778f43d5009840f20b935e79087b4dc65bd36f1cd0f9b04b8ff7f", size = 303808, upload-time = "2026-02-02T12:37:52.092Z" }, - { url = "https://files.pythonhosted.org/packages/c4/10/528b439290763bff3d939268085d03382471b442f212dca4ff5f12802d43/jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab44b178f7981fcaea7e0a5df20e773c663d06ffda0198f1a524e91b2fde7e59", size = 337384, upload-time = "2026-02-02T12:37:53.582Z" }, - { url = "https://files.pythonhosted.org/packages/67/8a/a342b2f0251f3dac4ca17618265d93bf244a2a4d089126e81e4c1056ac50/jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bb00b6d26db67a05fe3e12c76edc75f32077fb51deed13822dc648fa373bc19", size = 343768, upload-time = "2026-02-02T12:37:55.055Z" }, -] - [[package]] name = "jsbeautifier" version = "1.15.4" @@ -1618,11 +1416,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fd/d9/eaa1f80170d2b7c5ba23f3b59f766f3a0bb41155fbc32a69adfa1adaaef9/mcp-1.26.0-py3-none-any.whl", hash = "sha256:904a21c33c25aa98ddbeb47273033c435e595bbacfdb177f4bd87f6dceebe1ca", size = 233615, upload-time = "2026-01-24T19:40:30.652Z" }, ] -[package.optional-dependencies] -ws = [ - { name = "websockets" }, -] - [[package]] name = "mdit-py-plugins" version = "0.5.0" @@ -1898,32 +1691,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a4/8e/469e5a4a2f5855992e425f3cb33804cc07bf18d48f2db061aec61ce50270/more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b", size = 69667, upload-time = "2025-09-02T15:23:09.635Z" }, ] -[[package]] -name = "msal" -version = "1.35.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cryptography" }, - { name = "pyjwt", extra = ["crypto"] }, - { name = "requests" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/3c/aa/5a646093ac218e4a329391d5a31e5092a89db7d2ef1637a90b82cd0b6f94/msal-1.35.1.tar.gz", hash = "sha256:70cac18ab80a053bff86219ba64cfe3da1f307c74b009e2da57ef040eb1b5656", size = 165658, upload-time = "2026-03-04T23:38:51.812Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/96/86/16815fddf056ca998853c6dc525397edf0b43559bb4073a80d2bc7fe8009/msal-1.35.1-py3-none-any.whl", hash = "sha256:8f4e82f34b10c19e326ec69f44dc6b30171f2f7098f3720ea8a9f0c11832caa3", size = 119909, upload-time = "2026-03-04T23:38:50.452Z" }, -] - -[[package]] -name = "msal-extensions" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "msal" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/01/99/5d239b6156eddf761a636bded1118414d161bd6b7b37a9335549ed159396/msal_extensions-1.3.1.tar.gz", hash = "sha256:c5b0fd10f65ef62b5f1d62f4251d51cbcaf003fcedae8c91b040a488614be1a4", size = 23315, upload-time = "2025-03-14T23:51:03.902Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/75/bd9b7bb966668920f06b200e84454c8f3566b102183bc55c5473d96cb2b9/msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca", size = 20583, upload-time = "2025-03-14T23:51:03.016Z" }, -] - [[package]] name = "mypy" version = "1.18.2" @@ -1980,78 +1747,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, ] -[[package]] -name = "openai" -version = "2.26.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "distro" }, - { name = "httpx" }, - { name = "jiter" }, - { name = "pydantic" }, - { name = "sniffio" }, - { name = "tqdm" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d7/91/2a06c4e9597c338cac1e5e5a8dd6f29e1836fc229c4c523529dca387fda8/openai-2.26.0.tar.gz", hash = "sha256:b41f37c140ae0034a6e92b0c509376d907f3a66109935fba2c1b471a7c05a8fb", size = 666702, upload-time = "2026-03-05T23:17:35.874Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/2e/3f73e8ca53718952222cacd0cf7eecc9db439d020f0c1fe7ae717e4e199a/openai-2.26.0-py3-none-any.whl", hash = "sha256:6151bf8f83802f036117f06cc8a57b3a4da60da9926826cc96747888b57f394f", size = 1136409, upload-time = "2026-03-05T23:17:34.072Z" }, -] - -[[package]] -name = "opentelemetry-api" -version = "1.40.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "importlib-metadata" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2c/1d/4049a9e8698361cc1a1aa03a6c59e4fa4c71e0c0f94a30f988a6876a2ae6/opentelemetry_api-1.40.0.tar.gz", hash = "sha256:159be641c0b04d11e9ecd576906462773eb97ae1b657730f0ecf64d32071569f", size = 70851, upload-time = "2026-03-04T14:17:21.555Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/bf/93795954016c522008da367da292adceed71cca6ee1717e1d64c83089099/opentelemetry_api-1.40.0-py3-none-any.whl", hash = "sha256:82dd69331ae74b06f6a874704be0cfaa49a1650e1537d4a813b86ecef7d0ecf9", size = 68676, upload-time = "2026-03-04T14:17:01.24Z" }, -] - -[[package]] -name = "opentelemetry-sdk" -version = "1.40.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "opentelemetry-api" }, - { name = "opentelemetry-semantic-conventions" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/58/fd/3c3125b20ba18ce2155ba9ea74acb0ae5d25f8cd39cfd37455601b7955cc/opentelemetry_sdk-1.40.0.tar.gz", hash = "sha256:18e9f5ec20d859d268c7cb3c5198c8d105d073714db3de50b593b8c1345a48f2", size = 184252, upload-time = "2026-03-04T14:17:31.87Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/c5/6a852903d8bfac758c6dc6e9a68b015d3c33f2f1be5e9591e0f4b69c7e0a/opentelemetry_sdk-1.40.0-py3-none-any.whl", hash = "sha256:787d2154a71f4b3d81f20524a8ce061b7db667d24e46753f32a7bc48f1c1f3f1", size = 141951, upload-time = "2026-03-04T14:17:17.961Z" }, -] - -[[package]] -name = "opentelemetry-semantic-conventions" -version = "0.61b0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "opentelemetry-api" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6d/c0/4ae7973f3c2cfd2b6e321f1675626f0dab0a97027cc7a297474c9c8f3d04/opentelemetry_semantic_conventions-0.61b0.tar.gz", hash = "sha256:072f65473c5d7c6dc0355b27d6c9d1a679d63b6d4b4b16a9773062cb7e31192a", size = 145755, upload-time = "2026-03-04T14:17:32.664Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b2/37/cc6a55e448deaa9b27377d087da8615a3416d8ad523d5960b78dbeadd02a/opentelemetry_semantic_conventions-0.61b0-py3-none-any.whl", hash = "sha256:fa530a96be229795f8cef353739b618148b0fe2b4b3f005e60e262926c4d38e2", size = 231621, upload-time = "2026-03-04T14:17:19.33Z" }, -] - -[[package]] -name = "opentelemetry-semantic-conventions-ai" -version = "0.4.15" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "opentelemetry-sdk" }, - { name = "opentelemetry-semantic-conventions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e9/75/455c15f8360b475dd31101a87eab316420388486f7941bf019cbf4e63d5b/opentelemetry_semantic_conventions_ai-0.4.15.tar.gz", hash = "sha256:12de172d1e11d21c6e82bbf578c7e8a713589a7fda76af9ed785632564a28b81", size = 18595, upload-time = "2026-03-02T15:36:50.254Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/12/49/819fb212386f77cfd93f81bd916d674f0e735f87c8ac2262ed14e3b852c2/opentelemetry_semantic_conventions_ai-0.4.15-py3-none-any.whl", hash = "sha256:011461f1fba30f27035c49ab3b8344367adc72da0a6c8d3c7428303c6779edc9", size = 5999, upload-time = "2026-03-02T15:36:51.44Z" }, -] - [[package]] name = "packaging" version = "25.0" @@ -3100,18 +2795,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3e/84/1691aae773dccff72c866ad19af7adb12d4fb8b439c8bfb36ffc429c8c27/tox-4.31.0-py3-none-any.whl", hash = "sha256:328f392e6567e46cb0f9b625679456744dde940287dd1b39117627dc4b21d5da", size = 175917, upload-time = "2025-10-09T18:53:51.494Z" }, ] -[[package]] -name = "tqdm" -version = "4.67.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/09/a9/6ba95a270c6f1fbcd8dac228323f2777d886cb206987444e4bce66338dd4/tqdm-4.67.3.tar.gz", hash = "sha256:7d825f03f89244ef73f1d4ce193cb1774a8179fd96f31d7e1dcde62092b960bb", size = 169598, upload-time = "2026-02-03T17:35:53.048Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl", hash = "sha256:ee1e4c0e59148062281c49d80b25b67771a127c85fc9676d3be5f243206826bf", size = 78374, upload-time = "2026-02-03T17:35:50.982Z" }, -] - [[package]] name = "typer" version = "0.19.2" From 786e0fbdb32820f78d5c3ff9265dcbbc5feb2f50 Mon Sep 17 00:00:00 2001 From: jhfnetboy Date: Sun, 15 Mar 2026 21:17:26 +0700 Subject: [PATCH 6/8] fix: support adaptive thinking type and fix beta header handling 1. Add ThinkingConfigAdaptive to the request validation schema to support Claude 4-6+ models that use thinking: {"type": "adaptive"}. 2. Fix anthropic-beta header handling: use minimal required tags (claude-code-20250219, oauth-2025-04-20) and block CLI fallback headers from overwriting them. The fallback data included tags like fine-grained-tool-streaming-2025-05-14 that triggered "long context beta not available" errors for some subscriptions. Co-Authored-By: Claude Opus 4.6 (1M context) --- ccproxy/llms/models/anthropic.py | 9 ++++++++- ccproxy/plugins/claude_api/adapter.py | 9 ++++----- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/ccproxy/llms/models/anthropic.py b/ccproxy/llms/models/anthropic.py index a5b7186e..4544fd84 100644 --- a/ccproxy/llms/models/anthropic.py +++ b/ccproxy/llms/models/anthropic.py @@ -352,8 +352,15 @@ class ThinkingConfigDisabled(ThinkingConfigBase): type: Literal["disabled"] = Field(default="disabled", alias="type") +class ThinkingConfigAdaptive(ThinkingConfigBase): + """Configuration for adaptive thinking (Claude 4-6+).""" + + type: Literal["adaptive"] = Field(default="adaptive", alias="type") + + ThinkingConfig = Annotated[ - ThinkingConfigEnabled | ThinkingConfigDisabled, Field(discriminator="type") + ThinkingConfigEnabled | ThinkingConfigDisabled | ThinkingConfigAdaptive, + Field(discriminator="type"), ] diff --git a/ccproxy/plugins/claude_api/adapter.py b/ccproxy/plugins/claude_api/adapter.py index 94a68b25..b012e1c9 100644 --- a/ccproxy/plugins/claude_api/adapter.py +++ b/ccproxy/plugins/claude_api/adapter.py @@ -85,15 +85,14 @@ async def prepare_provider_request( # Always set Authorization from OAuth-managed access token filtered_headers["authorization"] = f"Bearer {token_value}" - # PATCH: Add Computer Use beta headers for Anthropic API - # These are required for browser automation tools to work + # Minimal beta tags required for OAuth-based Claude Code auth filtered_headers["anthropic-version"] = "2023-06-01" - filtered_headers["anthropic-beta"] = "computer-use-2025-01-24" + filtered_headers["anthropic-beta"] = "claude-code-20250219,oauth-2025-04-20" - # Add CLI headers if available, but never allow overriding auth + # Add CLI headers if available, but never allow overriding auth or beta cli_headers = self._collect_cli_headers() if cli_headers: - blocked_overrides = {"authorization", "x-api-key"} + blocked_overrides = {"authorization", "x-api-key", "anthropic-beta"} for key, value in cli_headers.items(): lk = key.lower() if lk in blocked_overrides: From ee8a3cf8fb07c1072e66c553dd8b7a396b5829e0 Mon Sep 17 00:00:00 2001 From: Caddy Glow Date: Thu, 19 Mar 2026 20:58:47 +0100 Subject: [PATCH 7/8] feat(anthropic): add optional display field to adaptive thinking config Support summarized/omitted display modes for adaptive thinking responses. --- ccproxy/llms/models/anthropic.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ccproxy/llms/models/anthropic.py b/ccproxy/llms/models/anthropic.py index 4544fd84..34da760b 100644 --- a/ccproxy/llms/models/anthropic.py +++ b/ccproxy/llms/models/anthropic.py @@ -356,6 +356,7 @@ class ThinkingConfigAdaptive(ThinkingConfigBase): """Configuration for adaptive thinking (Claude 4-6+).""" type: Literal["adaptive"] = Field(default="adaptive", alias="type") + display: Literal["summarized", "omitted"] | None = None ThinkingConfig = Annotated[ From 50a0b62d4867e5991ab41d6e344ec2020760c6aa Mon Sep 17 00:00:00 2001 From: dabogee Date: Fri, 20 Mar 2026 11:58:38 +0200 Subject: [PATCH 8/8] trigger a build --- .ccproxy.codex.msaf.toml.example | 1 + 1 file changed, 1 insertion(+) diff --git a/.ccproxy.codex.msaf.toml.example b/.ccproxy.codex.msaf.toml.example index 2e890709..40f2cd8f 100644 --- a/.ccproxy.codex.msaf.toml.example +++ b/.ccproxy.codex.msaf.toml.example @@ -21,6 +21,7 @@ preferred_upstream_mode = "streaming" buffer_non_streaming = true enable_format_registry = true + # Microsoft Agent Framework sends its own instructions/reasoning payloads. # Do not prepend captured Codex CLI templates to generic OpenAI-compatible calls. inject_detection_payload = false