diff --git a/agent/core/model_switcher.py b/agent/core/model_switcher.py index afb8d52c..43a13509 100644 --- a/agent/core/model_switcher.py +++ b/agent/core/model_switcher.py @@ -26,6 +26,7 @@ SUGGESTED_MODELS = [ {"id": "bedrock/us.anthropic.claude-opus-4-7", "label": "Claude Opus 4.7"}, {"id": "bedrock/us.anthropic.claude-opus-4-6-v1", "label": "Claude Opus 4.6"}, + {"id": "openai/gpt-5.5", "label": "GPT-5.5"}, {"id": "MiniMaxAI/MiniMax-M2.7", "label": "MiniMax M2.7"}, {"id": "moonshotai/Kimi-K2.6", "label": "Kimi K2.6"}, {"id": "zai-org/GLM-5.1", "label": "GLM 5.1"}, diff --git a/agent/core/session.py b/agent/core/session.py index 4b6390d8..d6e095ff 100644 --- a/agent/core/session.py +++ b/agent/core/session.py @@ -16,6 +16,9 @@ logger = logging.getLogger(__name__) _DEFAULT_MAX_TOKENS = 200_000 +_LOCAL_MAX_TOKENS: dict[str, int] = { + "openai/gpt-5.5": 1_000_000, +} def _get_max_tokens_safe(model_name: str) -> int: @@ -30,6 +33,9 @@ def _get_max_tokens_safe(model_name: str) -> int: """ from litellm import get_model_info + if model_name in _LOCAL_MAX_TOKENS: + return _LOCAL_MAX_TOKENS[model_name] + candidates = [model_name] stripped = model_name.removeprefix("huggingface/").split(":", 1)[0] if stripped != model_name: diff --git a/backend/routes/agent.py b/backend/routes/agent.py index 7f577995..66785475 100644 --- a/backend/routes/agent.py +++ b/backend/routes/agent.py @@ -53,6 +53,13 @@ "tier": "pro", "recommended": True, }, + { + "id": "openai/gpt-5.5", + "label": "GPT-5.5", + "provider": "openai", + "tier": "pro", + "recommended": True, + }, { "id": "MiniMaxAI/MiniMax-M2.7", "label": "MiniMax M2.7", @@ -692,4 +699,3 @@ async def shutdown_session( raise HTTPException(status_code=404, detail="Session not found or inactive") return {"status": "shutdown_requested", "session_id": session_id} - diff --git a/frontend/src/components/Chat/ChatInput.tsx b/frontend/src/components/Chat/ChatInput.tsx index d9fe5c4d..dfc2bb7d 100644 --- a/frontend/src/components/Chat/ChatInput.tsx +++ b/frontend/src/components/Chat/ChatInput.tsx @@ -41,6 +41,14 @@ const MODEL_OPTIONS: ModelOption[] = [ avatarUrl: 'https://huggingface.co/api/avatars/Anthropic', recommended: true, }, + { + id: 'gpt-5-5', + name: 'GPT-5.5', + description: 'OpenAI', + modelPath: 'openai/gpt-5.5', + avatarUrl: 'https://openai.com/favicon.ico', + recommended: true, + }, { id: 'minimax-m2.7', name: 'MiniMax M2.7',