Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions docs/pyagentspec/source/_components/llm_config_tabs.rst
Original file line number Diff line number Diff line change
Expand Up @@ -60,3 +60,14 @@
name="Ollama Config",
model_id="model-id",
)

.. tab:: Anthropic

.. code-block:: python

from pyagentspec.llms import AnthropicLlmConfig

llm_config = AnthropicLlmConfig(
name="Anthropic Config",
model_id="claude-haiku-4-5-20251001",
)
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,16 @@
}
]
},
"AnthropicLlmConfig": {
"anyOf": [
{
"$ref": "#/$defs/BaseAnthropicLlmConfig"
},
{
"$ref": "#/$defs/ComponentReference"
}
]
},
"ApiNode": {
"anyOf": [
{
Expand Down Expand Up @@ -1385,6 +1395,98 @@
],
"x-abstract-component": true
},
"BaseAnthropicLlmConfig": {
"additionalProperties": false,
"description": "Class to configure a connection to an Anthropic Claude model.\n\nRequires to specify the model identity. The API key and endpoint are provided by the runtime environment.",
"properties": {
"id": {
"title": "Id",
"type": "string"
},
"name": {
"title": "Name",
"type": "string"
},
"description": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null,
"title": "Description"
},
"metadata": {
"anyOf": [
{
"additionalProperties": true,
"type": "object"
},
{
"type": "null"
}
],
"title": "Metadata"
},
"default_generation_parameters": {
"anyOf": [
{
"type": "null"
},
{
"$ref": "#/$defs/LlmGenerationConfig"
}
],
"default": null
},
"model_id": {
"title": "Model Id",
"type": "string"
},
"url": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null,
"title": "Url"
},
"api_key": {
"anyOf": [
{
"format": "password",
"type": "string",
"writeOnly": true
},
{
"type": "null"
}
],
"default": null,
"title": "Api Key"
},
"$referenced_components": {
"$ref": "#/$defs/ReferencedComponents"
},
"component_type": {
"const": "AnthropicLlmConfig"
}
},
"required": [
"model_id",
"name"
],
"title": "AnthropicLlmConfig",
"type": "object",
"x-abstract-component": false
},
"BaseApiNode": {
"additionalProperties": false,
"description": "Make an API call.\n\nThis node is intended to be a part of a Flow.\n\n- **Inputs**\n Inferred from the json spec retrieved from API Spec URI, if available and reachable.\n Otherwise, users have to manually specify them.\n- **Outputs**\n Inferred from the json spec retrieved from API Spec URI, if available and reachable.\n Otherwise, users should manually specify them.\n\n If None is given, ``pyagentspec`` infers a generic property of any type named ``response``.\n- **Branches**\n One, the default next.\n\n\nExamples\n--------\n>>> from pyagentspec.flows.nodes import ApiNode\n>>> from pyagentspec.property import Property\n>>> weather_result_property = Property(\n... json_schema={\n... \"title\": \"zurich_weather\",\n... \"type\": \"object\",\n... \"properties\": {\n... \"temperature\": {\n... \"type\": \"number\",\n... \"description\": \"Temperature in celsius degrees\",\n... },\n... \"weather\": {\"type\": \"string\"}\n... },\n... }\n... )\n>>> call_current_weather_step = ApiNode(\n... name=\"Weather API call node\",\n... url=\"https://example.com/weather\",\n... http_method = \"GET\",\n... query_params={\n... \"location\": \"zurich\",\n... },\n... outputs=[weather_result_property]\n... )\n>>>\n>>> item_id_property = Property(\n... json_schema={\"title\": \"item_id\", \"type\": \"string\"}\n... )\n>>> order_id_property = Property(\n... json_schema={\"title\": \"order_id\", \"type\": \"string\"}\n... )\n>>> store_id_property = Property(\n... json_schema={\"title\": \"store_id\", \"type\": \"string\"}\n... )\n>>> session_id_property = Property(\n... json_schema={\"title\": \"session_id\", \"type\": \"string\"}\n... )\n>>> create_order_step = ApiNode(\n... name=\"Orders api call node\",\n... url=\"https://example.com/orders/{{ order_id }}\",\n... http_method=\"POST\",\n... # sending an object which will automatically be transformed into JSON\n... data={\n... # define a static body parameter\n... \"topic_id\": 12345,\n... # define a templated body parameter.\n... # The value for {{ item_id }} will be taken from the IO system at runtime\n... \"item_id\": \"{{ item_id }}\",\n... },\n... query_params={\n... # provide one templated query parameter called \"store_id\"\n... # which will take its value from the IO system from key \"store_id\"\n... \"store_id\": \"{{ store_id }}\",\n... },\n... headers={\n... # set header session_id. the value is coming from the IO system\n... \"session_id\": \"{{ session_id }}\",\n... },\n... inputs=[item_id_property, order_id_property, store_id_property, session_id_property],\n... )",
Expand Down Expand Up @@ -2779,6 +2881,9 @@
},
"BaseLlmConfig": {
"anyOf": [
{
"$ref": "#/$defs/AnthropicLlmConfig"
},
{
"$ref": "#/$defs/OciGenAiConfig"
},
Expand Down Expand Up @@ -6259,6 +6364,9 @@
{
"$ref": "#/$defs/BaseAgenticComponent"
},
{
"$ref": "#/$defs/BaseAnthropicLlmConfig"
},
{
"$ref": "#/$defs/BaseApiNode"
},
Expand Down Expand Up @@ -6580,6 +6688,21 @@
}
}
},
"VersionedAnthropicLlmConfig": {
"anyOf": [
{
"$ref": "#/$defs/BaseAnthropicLlmConfig"
},
{
"$ref": "#/$defs/ComponentReference"
}
],
"properties": {
"agentspec_version": {
"$ref": "#/$defs/AgentSpecVersionEnum"
}
}
},
"VersionedApiNode": {
"anyOf": [
{
Expand Down Expand Up @@ -7504,6 +7627,9 @@
{
"$ref": "#/$defs/VersionedAgenticComponent"
},
{
"$ref": "#/$defs/VersionedAnthropicLlmConfig"
},
{
"$ref": "#/$defs/VersionedApiNode"
},
Expand Down
7 changes: 7 additions & 0 deletions docs/pyagentspec/source/api/llmmodels.rst
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,13 @@ OpenAI Models
.. autoclass:: pyagentspec.llms.openaiconfig.OpenAiConfig
:exclude-members: model_post_init, model_config

Anthropic Models
^^^^^^^^^^^^^^^^

.. _anthropicllmconfig:
.. autoclass:: pyagentspec.llms.anthropicconfig.AnthropicLlmConfig
:exclude-members: model_post_init, model_config

OciGenAi Models
^^^^^^^^^^^^^^^

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,20 @@
)
# .. openai-end

# .. anthropic-start
from pyagentspec.llms import AnthropicLlmConfig

generation_config = LlmGenerationConfig(max_tokens=256, temperature=0.7, top_p=0.95)

llm = AnthropicLlmConfig(
name="anthropic-claude-haiku",
model_id="claude-haiku-4-5-20251001",
url="https://api.anthropic.com",
api_key="optional_api_key",
default_generation_parameters=generation_config,
)
# .. anthropic-end

# .. ollama-start
from pyagentspec.llms import OllamaConfig

Expand Down Expand Up @@ -148,6 +162,17 @@
default_generation_parameters=generation_config,
)

from pyagentspec.llms import AnthropicLlmConfig

generation_config = LlmGenerationConfig(max_tokens=256, temperature=0.7, top_p=0.95)

llm = AnthropicLlmConfig(
name="anthropic-claude-haiku",
model_id="claude-haiku-4-5-20251001",
url="https://api.anthropic.com",
default_generation_parameters=generation_config,
)

from pyagentspec.llms import OllamaConfig

generation_config = LlmGenerationConfig(max_tokens=512, temperature=0.9, top_p=0.9)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ Agent Spec supports several LLM providers, each one having its own LlmConfig com
The available LLMs are:

- :ref:`OpenAiConfig <openaiconfig>`
- :ref:`AnthropicLlmConfig <anthropicllmconfig>`
- :ref:`OciGenAiConfig <ocigenaiconfig>`
- :ref:`OpenAiCompatibleConfig <openaicompatibleconfig>`
- :ref:`VllmConfig <vllmconfig>`
Expand Down Expand Up @@ -163,6 +164,39 @@ You can refer to one of those models by using the ``OpenAiConfig`` Component.
:start-after: .. openai-start
:end-before: .. openai-end

AnthropicLlmConfig
==================

`Anthropic Models <https://docs.anthropic.com/en/docs/about-claude/models>`_ are powered by Anthropic.
You can refer to one of those models by using the ``AnthropicLlmConfig`` component.

**Parameters**

.. option:: model_id: str

ID of the Anthropic model to use.

.. option:: url: str, null

URL of the Anthropic API endpoint.
If omitted, the Anthropic API URL from the runtime environment is used.

.. option:: api_key: SecretStr, null

An optional API key for authentication to the Anthropic API endpoint.
If omitted, configure credentials in your runtime environment.

.. option:: default_generation_parameters: dict, null

Default parameters for text generation with this model.

**Examples**

.. literalinclude:: ../code_examples/howto_llm_from_different_providers.py
:language: python
:start-after: .. anthropic-start
:end-before: .. anthropic-end


OpenAiCompatibleConfig
======================
Expand Down
1 change: 1 addition & 0 deletions pyagentspec/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ def read(file_name):
"langchain>=1.2.0",
"langchain-openai>=1.1.7",
"langchain-ollama>=1.0.1",
"langchain-anthropic>=1.3.2,<2.0.0",
"anyio>=4.10.0,<4.12.0",
"langgraph-checkpoint>=3.0.1,<4.0.0", # To mitigate CVE-2025-64439
]
Expand Down
2 changes: 2 additions & 0 deletions pyagentspec/src/pyagentspec/_component_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
ToolNode,
)
from pyagentspec.llms import (
AnthropicLlmConfig,
OciGenAiConfig,
OllamaConfig,
OpenAiCompatibleConfig,
Expand Down Expand Up @@ -87,6 +88,7 @@
"AgenticComponent": AgenticComponent,
"AgentNode": AgentNode,
"AgentSpecializationParameters": AgentSpecializationParameters,
"AnthropicLlmConfig": AnthropicLlmConfig,
"ApiNode": ApiNode,
"BranchingNode": BranchingNode,
"CatchExceptionNode": CatchExceptionNode,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@
from types import FunctionType
from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Union, cast

from pydantic import SecretStr

from pyagentspec import Property
from pyagentspec.adapters.langgraph._agentspec_converter_flow import (
_langgraph_graph_convert_to_agentspec,
Expand All @@ -20,11 +22,13 @@
StateNodeSpec,
StructuredTool,
SystemMessage,
langchain_anthropic,
langchain_ollama,
langchain_openai,
)
from pyagentspec.agent import Agent as AgentSpecAgent
from pyagentspec.component import Component as AgentSpecComponent
from pyagentspec.llms import AnthropicLlmConfig as AgentSpecAnthropicLlmConfig
from pyagentspec.llms import LlmConfig as AgentSpecLlmConfig
from pyagentspec.llms import OllamaConfig as AgentSpecOllamaConfig
from pyagentspec.llms import OpenAiCompatibleConfig as AgentSpecOpenAiCompatibleConfig
Expand Down Expand Up @@ -160,6 +164,12 @@ def _basechatmodel_convert_to_agentspec(self, model: BaseChatModel) -> AgentSpec
model_id=model.model_name,
api_type=api_type,
)
elif isinstance(model, langchain_anthropic.ChatAnthropic):
return AgentSpecAnthropicLlmConfig(
name=model.model,
model_id=model.model,
url=model.anthropic_api_url,
)
raise ValueError(f"The LLM instance provided is of an unsupported type `{type(model)}`.")

def _langgraph_agent_convert_to_agentspec(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@
from pyagentspec.flows.nodes import OutputMessageNode as AgentSpecOutputMessageNode
from pyagentspec.flows.nodes import StartNode as AgentSpecStartNode
from pyagentspec.flows.nodes import ToolNode as AgentSpecToolNode
from pyagentspec.llms.anthropicconfig import AnthropicLlmConfig
from pyagentspec.llms.llmconfig import LlmConfig as AgentSpecLlmConfig
from pyagentspec.llms.ollamaconfig import OllamaConfig
from pyagentspec.llms.openaicompatibleconfig import OpenAIAPIType, OpenAiCompatibleConfig
Expand Down Expand Up @@ -1240,6 +1241,27 @@ def _llm_convert_to_langgraph(
callbacks=callbacks,
**generation_config,
)
elif isinstance(llm_config, AnthropicLlmConfig):
from langchain_anthropic import ChatAnthropic

anthropic_generation_config: dict[str, Any] = {
"temperature": generation_config.get("temperature"),
"max_tokens": generation_config.get("max_completion_tokens"),
"top_p": generation_config.get("top_p"),
}
anthropic_generation_config = {
k: v for k, v in anthropic_generation_config.items() if v is not None
}
anthropic_kwargs: Dict[str, Any] = {
"model": llm_config.model_id,
"callbacks": callbacks,
**anthropic_generation_config,
}
if llm_config.url:
anthropic_kwargs["base_url"] = llm_config.url
if llm_config.api_key:
anthropic_kwargs["anthropic_api_key"] = llm_config.api_key.get_secret_value()
return ChatAnthropic(**anthropic_kwargs)
else:
raise NotImplementedError(
f"Llm model of type {llm_config.__class__.__name__} is not yet supported."
Expand Down
Loading
Loading