From 0cb3da356d1c3cb268d75ff341964e0414d01d83 Mon Sep 17 00:00:00 2001 From: Farouk Boukil Date: Sat, 7 Feb 2026 17:15:00 +0100 Subject: [PATCH 1/4] add: anthropic llms --- .../json_spec/agentspec_json_spec_26_2_0.json | 114 +++++++++++++++++- pyagentspec/setup.py | 1 + .../src/pyagentspec/_component_registry.py | 2 + .../adapters/langgraph/_agentspecconverter.py | 10 ++ .../adapters/langgraph/_langgraphconverter.py | 20 +++ .../pyagentspec/adapters/langgraph/_types.py | 3 + pyagentspec/src/pyagentspec/llms/__init__.py | 2 + .../src/pyagentspec/llms/anthropicconfig.py | 25 ++++ .../llm/test_anthropic_llm_conversion.py | 68 +++++++++++ 9 files changed, 244 insertions(+), 1 deletion(-) create mode 100644 pyagentspec/src/pyagentspec/llms/anthropicconfig.py create mode 100644 pyagentspec/tests/adapters/langgraph/llm/test_anthropic_llm_conversion.py diff --git a/docs/pyagentspec/source/agentspec/json_spec/agentspec_json_spec_26_2_0.json b/docs/pyagentspec/source/agentspec/json_spec/agentspec_json_spec_26_2_0.json index 0b90737fb..37a9cfd2f 100644 --- a/docs/pyagentspec/source/agentspec/json_spec/agentspec_json_spec_26_2_0.json +++ b/docs/pyagentspec/source/agentspec/json_spec/agentspec_json_spec_26_2_0.json @@ -82,6 +82,16 @@ } ] }, + "AnthropicLlmConfig": { + "anyOf": [ + { + "$ref": "#/$defs/BaseAnthropicLlmConfig" + }, + { + "$ref": "#/$defs/ComponentReference" + } + ] + }, "ApiNode": { "anyOf": [ { @@ -1385,6 +1395,84 @@ ], "x-abstract-component": true }, + "BaseAnthropicLlmConfig": { + "additionalProperties": false, + "description": "Class to configure a connection to an Anthropic Claude model.\n\nRequires to specify the model identity. The API key and endpoint are optional\nand may be provided by the runtime environment.", + "properties": { + "id": { + "title": "Id", + "type": "string" + }, + "name": { + "title": "Name", + "type": "string" + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "title": "Description" + }, + "metadata": { + "anyOf": [ + { + "additionalProperties": true, + "type": "object" + }, + { + "type": "null" + } + ], + "title": "Metadata" + }, + "default_generation_parameters": { + "anyOf": [ + { + "type": "null" + }, + { + "$ref": "#/$defs/LlmGenerationConfig" + } + ], + "default": null + }, + "model_id": { + "title": "Model Id", + "type": "string" + }, + "base_url": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "title": "Base Url" + }, + "$referenced_components": { + "$ref": "#/$defs/ReferencedComponents" + }, + "component_type": { + "const": "AnthropicLlmConfig" + } + }, + "required": [ + "model_id", + "name" + ], + "title": "AnthropicLlmConfig", + "type": "object", + "x-abstract-component": false + }, "BaseApiNode": { "additionalProperties": false, "description": "Make an API call.\n\nThis node is intended to be a part of a Flow.\n\n- **Inputs**\n Inferred from the json spec retrieved from API Spec URI, if available and reachable.\n Otherwise, users have to manually specify them.\n- **Outputs**\n Inferred from the json spec retrieved from API Spec URI, if available and reachable.\n Otherwise, users should manually specify them.\n\n If None is given, ``pyagentspec`` infers a generic property of any type named ``response``.\n- **Branches**\n One, the default next.\n\n\nExamples\n--------\n>>> from pyagentspec.flows.nodes import ApiNode\n>>> from pyagentspec.property import Property\n>>> weather_result_property = Property(\n... json_schema={\n... \"title\": \"zurich_weather\",\n... \"type\": \"object\",\n... \"properties\": {\n... \"temperature\": {\n... \"type\": \"number\",\n... \"description\": \"Temperature in celsius degrees\",\n... },\n... \"weather\": {\"type\": \"string\"}\n... },\n... }\n... )\n>>> call_current_weather_step = ApiNode(\n... name=\"Weather API call node\",\n... url=\"https://example.com/weather\",\n... http_method = \"GET\",\n... query_params={\n... \"location\": \"zurich\",\n... },\n... outputs=[weather_result_property]\n... )\n>>>\n>>> item_id_property = Property(\n... json_schema={\"title\": \"item_id\", \"type\": \"string\"}\n... )\n>>> order_id_property = Property(\n... json_schema={\"title\": \"order_id\", \"type\": \"string\"}\n... )\n>>> store_id_property = Property(\n... json_schema={\"title\": \"store_id\", \"type\": \"string\"}\n... )\n>>> session_id_property = Property(\n... json_schema={\"title\": \"session_id\", \"type\": \"string\"}\n... )\n>>> create_order_step = ApiNode(\n... name=\"Orders api call node\",\n... url=\"https://example.com/orders/{{ order_id }}\",\n... http_method=\"POST\",\n... # sending an object which will automatically be transformed into JSON\n... data={\n... # define a static body parameter\n... \"topic_id\": 12345,\n... # define a templated body parameter.\n... # The value for {{ item_id }} will be taken from the IO system at runtime\n... \"item_id\": \"{{ item_id }}\",\n... },\n... query_params={\n... # provide one templated query parameter called \"store_id\"\n... # which will take its value from the IO system from key \"store_id\"\n... \"store_id\": \"{{ store_id }}\",\n... },\n... headers={\n... # set header session_id. the value is coming from the IO system\n... \"session_id\": \"{{ session_id }}\",\n... },\n... inputs=[item_id_property, order_id_property, store_id_property, session_id_property],\n... )", @@ -2779,6 +2867,9 @@ }, "BaseLlmConfig": { "anyOf": [ + { + "$ref": "#/$defs/AnthropicLlmConfig" + }, { "$ref": "#/$defs/OciGenAiConfig" }, @@ -6259,6 +6350,9 @@ { "$ref": "#/$defs/BaseAgenticComponent" }, + { + "$ref": "#/$defs/BaseAnthropicLlmConfig" + }, { "$ref": "#/$defs/BaseApiNode" }, @@ -6580,6 +6674,21 @@ } } }, + "VersionedAnthropicLlmConfig": { + "anyOf": [ + { + "$ref": "#/$defs/BaseAnthropicLlmConfig" + }, + { + "$ref": "#/$defs/ComponentReference" + } + ], + "properties": { + "agentspec_version": { + "$ref": "#/$defs/AgentSpecVersionEnum" + } + } + }, "VersionedApiNode": { "anyOf": [ { @@ -7504,6 +7613,9 @@ { "$ref": "#/$defs/VersionedAgenticComponent" }, + { + "$ref": "#/$defs/VersionedAnthropicLlmConfig" + }, { "$ref": "#/$defs/VersionedApiNode" }, @@ -7685,4 +7797,4 @@ "$ref": "#/$defs/VersionedVllmConfig" } ] -} +} \ No newline at end of file diff --git a/pyagentspec/setup.py b/pyagentspec/setup.py index 6f2843d21..6786ad716 100644 --- a/pyagentspec/setup.py +++ b/pyagentspec/setup.py @@ -30,6 +30,7 @@ def read(file_name): "langchain>=1.2.0", "langchain-openai>=1.1.7", "langchain-ollama>=1.0.1", + "langchain-anthropic>=1.3.2,<2.0.0", "anyio>=4.10.0,<4.12.0", "langgraph-checkpoint>=3.0.1,<4.0.0", # To mitigate CVE-2025-64439 ] diff --git a/pyagentspec/src/pyagentspec/_component_registry.py b/pyagentspec/src/pyagentspec/_component_registry.py index 1e1315994..64ae2b310 100644 --- a/pyagentspec/src/pyagentspec/_component_registry.py +++ b/pyagentspec/src/pyagentspec/_component_registry.py @@ -43,6 +43,7 @@ ToolNode, ) from pyagentspec.llms import ( + AnthropicLlmConfig, OciGenAiConfig, OllamaConfig, OpenAiCompatibleConfig, @@ -87,6 +88,7 @@ "AgenticComponent": AgenticComponent, "AgentNode": AgentNode, "AgentSpecializationParameters": AgentSpecializationParameters, + "AnthropicLlmConfig": AnthropicLlmConfig, "ApiNode": ApiNode, "BranchingNode": BranchingNode, "CatchExceptionNode": CatchExceptionNode, diff --git a/pyagentspec/src/pyagentspec/adapters/langgraph/_agentspecconverter.py b/pyagentspec/src/pyagentspec/adapters/langgraph/_agentspecconverter.py index 358105a24..b4ebfb7dc 100644 --- a/pyagentspec/src/pyagentspec/adapters/langgraph/_agentspecconverter.py +++ b/pyagentspec/src/pyagentspec/adapters/langgraph/_agentspecconverter.py @@ -8,6 +8,8 @@ from types import FunctionType from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Union, cast +from pydantic import SecretStr + from pyagentspec import Property from pyagentspec.adapters.langgraph._agentspec_converter_flow import ( _langgraph_graph_convert_to_agentspec, @@ -20,11 +22,13 @@ StateNodeSpec, StructuredTool, SystemMessage, + langchain_anthropic, langchain_ollama, langchain_openai, ) from pyagentspec.agent import Agent as AgentSpecAgent from pyagentspec.component import Component as AgentSpecComponent +from pyagentspec.llms import AnthropicLlmConfig as AgentSpecAnthropicLlmConfig from pyagentspec.llms import LlmConfig as AgentSpecLlmConfig from pyagentspec.llms import OllamaConfig as AgentSpecOllamaConfig from pyagentspec.llms import OpenAiCompatibleConfig as AgentSpecOpenAiCompatibleConfig @@ -160,6 +164,12 @@ def _basechatmodel_convert_to_agentspec(self, model: BaseChatModel) -> AgentSpec model_id=model.model_name, api_type=api_type, ) + elif isinstance(model, langchain_anthropic.ChatAnthropic): + return AgentSpecAnthropicLlmConfig( + name=model.model, + model_id=model.model, + base_url=model.anthropic_api_url, + ) raise ValueError(f"The LLM instance provided is of an unsupported type `{type(model)}`.") def _langgraph_agent_convert_to_agentspec( diff --git a/pyagentspec/src/pyagentspec/adapters/langgraph/_langgraphconverter.py b/pyagentspec/src/pyagentspec/adapters/langgraph/_langgraphconverter.py index 7ea0e00a4..f2b1281c0 100644 --- a/pyagentspec/src/pyagentspec/adapters/langgraph/_langgraphconverter.py +++ b/pyagentspec/src/pyagentspec/adapters/langgraph/_langgraphconverter.py @@ -72,6 +72,7 @@ from pyagentspec.flows.nodes import OutputMessageNode as AgentSpecOutputMessageNode from pyagentspec.flows.nodes import StartNode as AgentSpecStartNode from pyagentspec.flows.nodes import ToolNode as AgentSpecToolNode +from pyagentspec.llms.anthropicconfig import AnthropicLlmConfig from pyagentspec.llms.llmconfig import LlmConfig as AgentSpecLlmConfig from pyagentspec.llms.ollamaconfig import OllamaConfig from pyagentspec.llms.openaicompatibleconfig import OpenAIAPIType, OpenAiCompatibleConfig @@ -1240,6 +1241,25 @@ def _llm_convert_to_langgraph( callbacks=callbacks, **generation_config, ) + elif isinstance(llm_config, AnthropicLlmConfig): + from langchain_anthropic import ChatAnthropic + + anthropic_generation_config: dict[str, Any] = { + "temperature": generation_config.get("temperature"), + "max_tokens": generation_config.get("max_completion_tokens"), + "top_p": generation_config.get("top_p"), + } + anthropic_generation_config = { + k: v for k, v in anthropic_generation_config.items() if v is not None + } + anthropic_kwargs: Dict[str, Any] = { + "model": llm_config.model_id, + "callbacks": callbacks, + **anthropic_generation_config, + } + if llm_config.base_url: + anthropic_kwargs["base_url"] = llm_config.base_url + return ChatAnthropic(**anthropic_kwargs) else: raise NotImplementedError( f"Llm model of type {llm_config.__class__.__name__} is not yet supported." diff --git a/pyagentspec/src/pyagentspec/adapters/langgraph/_types.py b/pyagentspec/src/pyagentspec/adapters/langgraph/_types.py index 70826271d..8140e8cb9 100644 --- a/pyagentspec/src/pyagentspec/adapters/langgraph/_types.py +++ b/pyagentspec/src/pyagentspec/adapters/langgraph/_types.py @@ -17,6 +17,7 @@ # Otherwise, importing the module when they are not installed would lead to an import error. import langchain.agents as langchain_agents + import langchain_anthropic import langchain_core.messages.content as langchain_core_messages_content import langchain_ollama import langchain_openai @@ -44,6 +45,7 @@ langgraph = LazyLoader("langgraph") langchain_ollama = LazyLoader("langchain_ollama") langchain_openai = LazyLoader("langchain_openai") + langchain_anthropic = LazyLoader("langchain_anthropic") langgraph_graph = LazyLoader("langgraph.graph") langgraph_types = LazyLoader("langgraph.types") langgraph_prebuilt = LazyLoader("langgraph.prebuilt") @@ -134,6 +136,7 @@ class FlowOutputSchema(TypedDict): "langchain_core_messages_content", "langgraph_prebuilt", "langchain_agents", + "langchain_anthropic", "langchain_ollama", "langchain_openai", "LangGraphTool", diff --git a/pyagentspec/src/pyagentspec/llms/__init__.py b/pyagentspec/src/pyagentspec/llms/__init__.py index 4f5469eb4..eb5b6b19c 100644 --- a/pyagentspec/src/pyagentspec/llms/__init__.py +++ b/pyagentspec/src/pyagentspec/llms/__init__.py @@ -6,6 +6,7 @@ """Define LLM configurations abstraction and concrete classes for connecting to vLLM or OCI.""" +from .anthropicconfig import AnthropicLlmConfig from .llmconfig import LlmConfig from .llmgenerationconfig import LlmGenerationConfig from .ocigenaiconfig import OciGenAiConfig @@ -15,6 +16,7 @@ from .vllmconfig import VllmConfig __all__ = [ + "AnthropicLlmConfig", "LlmConfig", "LlmGenerationConfig", "VllmConfig", diff --git a/pyagentspec/src/pyagentspec/llms/anthropicconfig.py b/pyagentspec/src/pyagentspec/llms/anthropicconfig.py new file mode 100644 index 000000000..2364e30d5 --- /dev/null +++ b/pyagentspec/src/pyagentspec/llms/anthropicconfig.py @@ -0,0 +1,25 @@ +# Copyright © 2025 Oracle and/or its affiliates. +# +# This software is under the Apache License 2.0 +# (LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0) or Universal Permissive License +# (UPL) 1.0 (LICENSE-UPL or https://oss.oracle.com/licenses/upl), at your option. + +"""Defines the class for configuring how to connect to Anthropic Claude models.""" + +from pyagentspec.llms.llmconfig import LlmConfig + + +class AnthropicLlmConfig(LlmConfig): + """ + Class to configure a connection to an Anthropic Claude model. + + Requires to specify the model identity. The API key and endpoint are optional + and may be provided by the runtime environment. + """ + + model_id: str + """ID of the model to use.""" + + base_url: str | None = None + """Base URL of the Anthropic API. + If not provided, the default Anthropic API base URL will be used.""" diff --git a/pyagentspec/tests/adapters/langgraph/llm/test_anthropic_llm_conversion.py b/pyagentspec/tests/adapters/langgraph/llm/test_anthropic_llm_conversion.py new file mode 100644 index 000000000..7589aa732 --- /dev/null +++ b/pyagentspec/tests/adapters/langgraph/llm/test_anthropic_llm_conversion.py @@ -0,0 +1,68 @@ +# Copyright © 2025, 2026 Oracle and/or its affiliates. +# +# This software is under the Apache License 2.0 +# (LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0) or Universal Permissive License +# (UPL) 1.0 (LICENSE-UPL or https://oss.oracle.com/licenses/upl), at your option. + +import pytest + + +def test_agentspec_to_langgraph_converts_anthropic_llm_config_to_chat_anthropic() -> None: + pytest.importorskip("langchain_anthropic") + + from langchain_anthropic import ChatAnthropic + from langchain_core.runnables import RunnableConfig + + from pyagentspec.adapters.langgraph._langgraphconverter import AgentSpecToLangGraphConverter + from pyagentspec.llms import AnthropicLlmConfig, LlmGenerationConfig + + model_id: str = "test-anthropic-model" + base_url: str = "https://api.test-anthropic.com" + max_tokens: int = 123 + temperature: float = 0.7 + top_p: float = 0.9 + + agentspec_config = AnthropicLlmConfig( + name="test-name", + model_id=model_id, + base_url=base_url, + default_generation_parameters=LlmGenerationConfig( + max_tokens=max_tokens, + temperature=temperature, + top_p=top_p, + ), + ) + + model = AgentSpecToLangGraphConverter()._llm_convert_to_langgraph( + agentspec_config, RunnableConfig({}) + ) + + assert isinstance(model, ChatAnthropic) + assert model.model == model_id + assert model.anthropic_api_url == base_url + assert model.max_tokens == max_tokens + assert model.temperature == temperature + assert model.top_p == top_p + + +def test_langgraph_to_agentspec_converts_chat_anthropic_to_anthropic_llm_config() -> None: + pytest.importorskip("langchain_anthropic") + + from langchain_anthropic import ChatAnthropic + + from pyagentspec.adapters.langgraph._agentspecconverter import LangGraphToAgentSpecConverter + from pyagentspec.llms import AnthropicLlmConfig + + model_id: str = "test-anthropic-model" + base_url: str = "https://api.test-anthropic.com" + + model = ChatAnthropic( + model=model_id, + base_url=base_url, + ) + + agentspec_config = LangGraphToAgentSpecConverter().convert(model) + assert isinstance(agentspec_config, AnthropicLlmConfig) + assert agentspec_config.name == model_id + assert agentspec_config.model_id == model_id + assert agentspec_config.base_url == base_url From 65c669525dc03ddd035545e2ed9beddc4dbf5038 Mon Sep 17 00:00:00 2001 From: Farouk Boukil Date: Mon, 9 Feb 2026 22:08:21 +0100 Subject: [PATCH 2/4] fix: address comments --- .../agentspec/json_spec/agentspec_json_spec_26_2_0.json | 2 +- pyagentspec/src/pyagentspec/llms/anthropicconfig.py | 5 ++--- .../langgraph/{llm => llms}/test_anthropic_llm_conversion.py | 0 3 files changed, 3 insertions(+), 4 deletions(-) rename pyagentspec/tests/adapters/langgraph/{llm => llms}/test_anthropic_llm_conversion.py (100%) diff --git a/docs/pyagentspec/source/agentspec/json_spec/agentspec_json_spec_26_2_0.json b/docs/pyagentspec/source/agentspec/json_spec/agentspec_json_spec_26_2_0.json index 37a9cfd2f..c3eba22cf 100644 --- a/docs/pyagentspec/source/agentspec/json_spec/agentspec_json_spec_26_2_0.json +++ b/docs/pyagentspec/source/agentspec/json_spec/agentspec_json_spec_26_2_0.json @@ -1397,7 +1397,7 @@ }, "BaseAnthropicLlmConfig": { "additionalProperties": false, - "description": "Class to configure a connection to an Anthropic Claude model.\n\nRequires to specify the model identity. The API key and endpoint are optional\nand may be provided by the runtime environment.", + "description": "Class to configure a connection to an Anthropic Claude model.\n\nRequires to specify the model identity. The API key and endpoint are provided by the runtime environment.", "properties": { "id": { "title": "Id", diff --git a/pyagentspec/src/pyagentspec/llms/anthropicconfig.py b/pyagentspec/src/pyagentspec/llms/anthropicconfig.py index 2364e30d5..8e5969059 100644 --- a/pyagentspec/src/pyagentspec/llms/anthropicconfig.py +++ b/pyagentspec/src/pyagentspec/llms/anthropicconfig.py @@ -13,12 +13,11 @@ class AnthropicLlmConfig(LlmConfig): """ Class to configure a connection to an Anthropic Claude model. - Requires to specify the model identity. The API key and endpoint are optional - and may be provided by the runtime environment. + Requires to specify the model identity. The API key and endpoint are provided by the runtime environment. """ model_id: str - """ID of the model to use.""" + """ID of the Anthropic model to use, e.g., claude-haiku-4-5-20251001.""" base_url: str | None = None """Base URL of the Anthropic API. diff --git a/pyagentspec/tests/adapters/langgraph/llm/test_anthropic_llm_conversion.py b/pyagentspec/tests/adapters/langgraph/llms/test_anthropic_llm_conversion.py similarity index 100% rename from pyagentspec/tests/adapters/langgraph/llm/test_anthropic_llm_conversion.py rename to pyagentspec/tests/adapters/langgraph/llms/test_anthropic_llm_conversion.py From 6a0529431b8f53b5497c92b00f33fab72985c928 Mon Sep 17 00:00:00 2001 From: Farouk Boukil Date: Mon, 9 Feb 2026 22:12:58 +0100 Subject: [PATCH 3/4] fix: rem unnecessary ret type --- .../adapters/langgraph/llms/test_anthropic_llm_conversion.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyagentspec/tests/adapters/langgraph/llms/test_anthropic_llm_conversion.py b/pyagentspec/tests/adapters/langgraph/llms/test_anthropic_llm_conversion.py index 7589aa732..9a0bd2671 100644 --- a/pyagentspec/tests/adapters/langgraph/llms/test_anthropic_llm_conversion.py +++ b/pyagentspec/tests/adapters/langgraph/llms/test_anthropic_llm_conversion.py @@ -7,7 +7,7 @@ import pytest -def test_agentspec_to_langgraph_converts_anthropic_llm_config_to_chat_anthropic() -> None: +def test_agentspec_to_langgraph_converts_anthropic_llm_config_to_chat_anthropic(): pytest.importorskip("langchain_anthropic") from langchain_anthropic import ChatAnthropic @@ -45,7 +45,7 @@ def test_agentspec_to_langgraph_converts_anthropic_llm_config_to_chat_anthropic( assert model.top_p == top_p -def test_langgraph_to_agentspec_converts_chat_anthropic_to_anthropic_llm_config() -> None: +def test_langgraph_to_agentspec_converts_chat_anthropic_to_anthropic_llm_config(): pytest.importorskip("langchain_anthropic") from langchain_anthropic import ChatAnthropic From 66e9e1758b9278938c2f711d06e74acb4d5140d9 Mon Sep 17 00:00:00 2001 From: Farouk Boukil Date: Tue, 10 Feb 2026 22:31:35 +0100 Subject: [PATCH 4/4] fix: address comments --- .../source/_components/llm_config_tabs.rst | 11 ++++++ .../json_spec/agentspec_json_spec_26_2_0.json | 20 +++++++++-- docs/pyagentspec/source/api/llmmodels.rst | 7 ++++ .../howto_llm_from_different_providers.py | 25 ++++++++++++++ .../howto_llm_from_different_providers.rst | 34 +++++++++++++++++++ .../adapters/langgraph/_agentspecconverter.py | 2 +- .../adapters/langgraph/_langgraphconverter.py | 6 ++-- .../src/pyagentspec/llms/anthropicconfig.py | 15 ++++++-- .../llms/test_anthropic_llm_conversion.py | 19 +++++++---- 9 files changed, 124 insertions(+), 15 deletions(-) diff --git a/docs/pyagentspec/source/_components/llm_config_tabs.rst b/docs/pyagentspec/source/_components/llm_config_tabs.rst index d26b9ce04..b3f100d20 100644 --- a/docs/pyagentspec/source/_components/llm_config_tabs.rst +++ b/docs/pyagentspec/source/_components/llm_config_tabs.rst @@ -60,3 +60,14 @@ name="Ollama Config", model_id="model-id", ) + + .. tab:: Anthropic + + .. code-block:: python + + from pyagentspec.llms import AnthropicLlmConfig + + llm_config = AnthropicLlmConfig( + name="Anthropic Config", + model_id="claude-haiku-4-5-20251001", + ) diff --git a/docs/pyagentspec/source/agentspec/json_spec/agentspec_json_spec_26_2_0.json b/docs/pyagentspec/source/agentspec/json_spec/agentspec_json_spec_26_2_0.json index c3eba22cf..3a389e1cb 100644 --- a/docs/pyagentspec/source/agentspec/json_spec/agentspec_json_spec_26_2_0.json +++ b/docs/pyagentspec/source/agentspec/json_spec/agentspec_json_spec_26_2_0.json @@ -1446,7 +1446,7 @@ "title": "Model Id", "type": "string" }, - "base_url": { + "url": { "anyOf": [ { "type": "string" @@ -1456,7 +1456,21 @@ } ], "default": null, - "title": "Base Url" + "title": "Url" + }, + "api_key": { + "anyOf": [ + { + "format": "password", + "type": "string", + "writeOnly": true + }, + { + "type": "null" + } + ], + "default": null, + "title": "Api Key" }, "$referenced_components": { "$ref": "#/$defs/ReferencedComponents" @@ -7797,4 +7811,4 @@ "$ref": "#/$defs/VersionedVllmConfig" } ] -} \ No newline at end of file +} diff --git a/docs/pyagentspec/source/api/llmmodels.rst b/docs/pyagentspec/source/api/llmmodels.rst index b4eb69ecd..e6c393982 100644 --- a/docs/pyagentspec/source/api/llmmodels.rst +++ b/docs/pyagentspec/source/api/llmmodels.rst @@ -66,6 +66,13 @@ OpenAI Models .. autoclass:: pyagentspec.llms.openaiconfig.OpenAiConfig :exclude-members: model_post_init, model_config +Anthropic Models +^^^^^^^^^^^^^^^^ + +.. _anthropicllmconfig: +.. autoclass:: pyagentspec.llms.anthropicconfig.AnthropicLlmConfig + :exclude-members: model_post_init, model_config + OciGenAi Models ^^^^^^^^^^^^^^^ diff --git a/docs/pyagentspec/source/code_examples/howto_llm_from_different_providers.py b/docs/pyagentspec/source/code_examples/howto_llm_from_different_providers.py index d625c90c3..de060c10c 100644 --- a/docs/pyagentspec/source/code_examples/howto_llm_from_different_providers.py +++ b/docs/pyagentspec/source/code_examples/howto_llm_from_different_providers.py @@ -82,6 +82,20 @@ ) # .. openai-end +# .. anthropic-start +from pyagentspec.llms import AnthropicLlmConfig + +generation_config = LlmGenerationConfig(max_tokens=256, temperature=0.7, top_p=0.95) + +llm = AnthropicLlmConfig( + name="anthropic-claude-haiku", + model_id="claude-haiku-4-5-20251001", + url="https://api.anthropic.com", + api_key="optional_api_key", + default_generation_parameters=generation_config, +) +# .. anthropic-end + # .. ollama-start from pyagentspec.llms import OllamaConfig @@ -148,6 +162,17 @@ default_generation_parameters=generation_config, ) +from pyagentspec.llms import AnthropicLlmConfig + +generation_config = LlmGenerationConfig(max_tokens=256, temperature=0.7, top_p=0.95) + +llm = AnthropicLlmConfig( + name="anthropic-claude-haiku", + model_id="claude-haiku-4-5-20251001", + url="https://api.anthropic.com", + default_generation_parameters=generation_config, +) + from pyagentspec.llms import OllamaConfig generation_config = LlmGenerationConfig(max_tokens=512, temperature=0.9, top_p=0.9) diff --git a/docs/pyagentspec/source/howtoguides/howto_llm_from_different_providers.rst b/docs/pyagentspec/source/howtoguides/howto_llm_from_different_providers.rst index 6d3a17a02..4e545e539 100644 --- a/docs/pyagentspec/source/howtoguides/howto_llm_from_different_providers.rst +++ b/docs/pyagentspec/source/howtoguides/howto_llm_from_different_providers.rst @@ -6,6 +6,7 @@ Agent Spec supports several LLM providers, each one having its own LlmConfig com The available LLMs are: - :ref:`OpenAiConfig ` +- :ref:`AnthropicLlmConfig ` - :ref:`OciGenAiConfig ` - :ref:`OpenAiCompatibleConfig ` - :ref:`VllmConfig ` @@ -163,6 +164,39 @@ You can refer to one of those models by using the ``OpenAiConfig`` Component. :start-after: .. openai-start :end-before: .. openai-end +AnthropicLlmConfig +================== + +`Anthropic Models `_ are powered by Anthropic. +You can refer to one of those models by using the ``AnthropicLlmConfig`` component. + +**Parameters** + +.. option:: model_id: str + + ID of the Anthropic model to use. + +.. option:: url: str, null + + URL of the Anthropic API endpoint. + If omitted, the Anthropic API URL from the runtime environment is used. + +.. option:: api_key: SecretStr, null + + An optional API key for authentication to the Anthropic API endpoint. + If omitted, configure credentials in your runtime environment. + +.. option:: default_generation_parameters: dict, null + + Default parameters for text generation with this model. + +**Examples** + +.. literalinclude:: ../code_examples/howto_llm_from_different_providers.py + :language: python + :start-after: .. anthropic-start + :end-before: .. anthropic-end + OpenAiCompatibleConfig ====================== diff --git a/pyagentspec/src/pyagentspec/adapters/langgraph/_agentspecconverter.py b/pyagentspec/src/pyagentspec/adapters/langgraph/_agentspecconverter.py index b4ebfb7dc..9a17257c7 100644 --- a/pyagentspec/src/pyagentspec/adapters/langgraph/_agentspecconverter.py +++ b/pyagentspec/src/pyagentspec/adapters/langgraph/_agentspecconverter.py @@ -168,7 +168,7 @@ def _basechatmodel_convert_to_agentspec(self, model: BaseChatModel) -> AgentSpec return AgentSpecAnthropicLlmConfig( name=model.model, model_id=model.model, - base_url=model.anthropic_api_url, + url=model.anthropic_api_url, ) raise ValueError(f"The LLM instance provided is of an unsupported type `{type(model)}`.") diff --git a/pyagentspec/src/pyagentspec/adapters/langgraph/_langgraphconverter.py b/pyagentspec/src/pyagentspec/adapters/langgraph/_langgraphconverter.py index f2b1281c0..e56f5dc81 100644 --- a/pyagentspec/src/pyagentspec/adapters/langgraph/_langgraphconverter.py +++ b/pyagentspec/src/pyagentspec/adapters/langgraph/_langgraphconverter.py @@ -1257,8 +1257,10 @@ def _llm_convert_to_langgraph( "callbacks": callbacks, **anthropic_generation_config, } - if llm_config.base_url: - anthropic_kwargs["base_url"] = llm_config.base_url + if llm_config.url: + anthropic_kwargs["base_url"] = llm_config.url + if llm_config.api_key: + anthropic_kwargs["anthropic_api_key"] = llm_config.api_key.get_secret_value() return ChatAnthropic(**anthropic_kwargs) else: raise NotImplementedError( diff --git a/pyagentspec/src/pyagentspec/llms/anthropicconfig.py b/pyagentspec/src/pyagentspec/llms/anthropicconfig.py index 8e5969059..3287c8676 100644 --- a/pyagentspec/src/pyagentspec/llms/anthropicconfig.py +++ b/pyagentspec/src/pyagentspec/llms/anthropicconfig.py @@ -6,7 +6,12 @@ """Defines the class for configuring how to connect to Anthropic Claude models.""" + +from pydantic import SecretStr + from pyagentspec.llms.llmconfig import LlmConfig +from pyagentspec.sensitive_field import SensitiveField + class AnthropicLlmConfig(LlmConfig): @@ -19,6 +24,10 @@ class AnthropicLlmConfig(LlmConfig): model_id: str """ID of the Anthropic model to use, e.g., claude-haiku-4-5-20251001.""" - base_url: str | None = None - """Base URL of the Anthropic API. - If not provided, the default Anthropic API base URL will be used.""" + url: str | None = None + """URL of the Anthropic API. + If not provided, the Anthropic API URL from the runtime environment will be used.""" + + api_key: SensitiveField[SecretStr | None] = None + """An optional API KEY for the remote LLM model. If specified, the value of the api_key will be + excluded and replaced by a reference when exporting the configuration.""" diff --git a/pyagentspec/tests/adapters/langgraph/llms/test_anthropic_llm_conversion.py b/pyagentspec/tests/adapters/langgraph/llms/test_anthropic_llm_conversion.py index 9a0bd2671..3e64a85ed 100644 --- a/pyagentspec/tests/adapters/langgraph/llms/test_anthropic_llm_conversion.py +++ b/pyagentspec/tests/adapters/langgraph/llms/test_anthropic_llm_conversion.py @@ -17,7 +17,8 @@ def test_agentspec_to_langgraph_converts_anthropic_llm_config_to_chat_anthropic( from pyagentspec.llms import AnthropicLlmConfig, LlmGenerationConfig model_id: str = "test-anthropic-model" - base_url: str = "https://api.test-anthropic.com" + url: str = "https://api.test-anthropic.com" + api_key: str = "test-anthropic-api-key" max_tokens: int = 123 temperature: float = 0.7 top_p: float = 0.9 @@ -25,7 +26,8 @@ def test_agentspec_to_langgraph_converts_anthropic_llm_config_to_chat_anthropic( agentspec_config = AnthropicLlmConfig( name="test-name", model_id=model_id, - base_url=base_url, + url=url, + api_key=api_key, default_generation_parameters=LlmGenerationConfig( max_tokens=max_tokens, temperature=temperature, @@ -39,7 +41,9 @@ def test_agentspec_to_langgraph_converts_anthropic_llm_config_to_chat_anthropic( assert isinstance(model, ChatAnthropic) assert model.model == model_id - assert model.anthropic_api_url == base_url + assert model.anthropic_api_url == url + assert model.anthropic_api_key is not None + assert model.anthropic_api_key.get_secret_value() == api_key assert model.max_tokens == max_tokens assert model.temperature == temperature assert model.top_p == top_p @@ -54,15 +58,18 @@ def test_langgraph_to_agentspec_converts_chat_anthropic_to_anthropic_llm_config( from pyagentspec.llms import AnthropicLlmConfig model_id: str = "test-anthropic-model" - base_url: str = "https://api.test-anthropic.com" + url: str = "https://api.test-anthropic.com" + api_key: str = "test-anthropic-api-key" model = ChatAnthropic( model=model_id, - base_url=base_url, + base_url=url, + api_key=api_key, ) agentspec_config = LangGraphToAgentSpecConverter().convert(model) assert isinstance(agentspec_config, AnthropicLlmConfig) assert agentspec_config.name == model_id assert agentspec_config.model_id == model_id - assert agentspec_config.base_url == base_url + assert agentspec_config.url == url + assert agentspec_config.api_key is None