From f1c5e752bf0aac53bd9b414dbeb8801562529bfc Mon Sep 17 00:00:00 2001 From: Xinyue Date: Mon, 16 Feb 2026 16:15:47 +0100 Subject: [PATCH 1/7] examples: add automatic function calling helper --- .../ollama_chat_automatic_function_calling.py | 164 ++++++++++++++++++ 1 file changed, 164 insertions(+) create mode 100644 examples/ollama_chat_automatic_function_calling.py diff --git a/examples/ollama_chat_automatic_function_calling.py b/examples/ollama_chat_automatic_function_calling.py new file mode 100644 index 00000000..82882381 --- /dev/null +++ b/examples/ollama_chat_automatic_function_calling.py @@ -0,0 +1,164 @@ +from ollama import ChatResponse, chat + + +def ollama_automatic_function_calling( + client_fn: chat, **kwargs +) -> tuple[ChatResponse, list[dict]]: + """ + Automatically handles tool/function calls in a chat loop. + + max_turns is reached. + + Args: + client_fn: Ollama chat client + messages: List of message dicts for the conversation + max_turns: Maximum number of chat iterations (default: 20) + model: Model name to use (default: "glm-5:cloud") + tools: List of available tool definitions + tool_map: Dict mapping tool names to callable functions + chat_fn: Optional callable to use for chat (default: ollama.chat) + + Returns: + (last_response, messages) + """ + model = kwargs.get("model") + if model is None: + raise ValueError("model must be specified") + + messages = kwargs.get("messages", []) + max_turns = kwargs.get("max_turns", 20) + tools = kwargs.get("tools", []) + tool_map = kwargs.get("tool_map") + last_response = None + + def _to_msg_dict(msg): + if hasattr(msg, "model_dump"): + return msg.model_dump(exclude_none=True) + if isinstance(msg, dict): + return msg + return { + "role": getattr(msg, "role", "assistant"), + "content": getattr(msg, "content", ""), + } + + def _extract_tool_call_parts(tool_call): + function = getattr(tool_call, "function", None) + if function is None and isinstance(tool_call, dict): + function = tool_call.get("function", {}) + + if isinstance(function, dict): + name = function.get("name") + arguments = function.get("arguments") or {} + else: + name = getattr(function, "name", None) + arguments = getattr(function, "arguments", None) or {} + + return name, arguments + + def _infer_tool_name(t): + if t is None: + return None + if isinstance(t, dict): + fn = t.get("function") or {} + if isinstance(fn, dict): + return fn.get("name") + return None + return getattr(t, "name", None) or getattr(t, "__name__", None) + + def _resolve_callable(fn): + wrapped = getattr(fn, "__wrapped__", None) + return wrapped if callable(wrapped) else fn + + if tool_map is None: + tool_map = {} + for t in tools: + name = _infer_tool_name(t) + if name: + tool_map[name] = t + + if not callable(client_fn): + raise ValueError("client_fn must be a callable") + + reserved = {"messages", "max_turns", "model", "tools", "tool_map"} + chat_kwargs = {k: v for k, v in kwargs.items() if k not in reserved} + + for _ in range(max_turns): + response = client_fn(model=model, tools=tools, messages=messages, **chat_kwargs) + last_response = response + assistant_msg = response.message + messages.append(_to_msg_dict(assistant_msg)) + + tool_calls = getattr(assistant_msg, "tool_calls", None) or [] + if not tool_calls: + return last_response, messages + + for tool_call in tool_calls: + tool_name, tool_args = _extract_tool_call_parts(tool_call) + tool_fn = tool_map.get(tool_name) + + if not callable(tool_fn): + messages.append( + { + "role": "tool", + "content": f"Tool '{tool_name}' not found.", + "tool_name": tool_name or "unknown_tool", + } + ) + continue + + if isinstance(tool_args, str): + try: + import json + + tool_args = json.loads(tool_args) + except Exception: + tool_args = {} + + try: + result = _resolve_callable(tool_fn)(**(tool_args or {})) + content = str(result) + except Exception as e: + content = f"Tool '{tool_name}' execution failed: {e}" + + messages.append( + { + "role": "tool", + "content": content, + "tool_name": tool_name, + } + ) + + # Safety exit: do not raise, just return the last response. + return last_response, messages + + +def add(a: float, b: float) -> float: + """Apply addition""" + return a + b + + +def sub(a: float, b: float) -> float: + """Apply subtraction""" + return a - b + + +def mul(a: float, b: float) -> float: + """Apply multiplication""" + return a * b + + +def div(a: float, b: float) -> float: + """Apply division""" + return a / b + + +if __name__ == "__main__": + from ollama import chat + + response, messages = ollama_automatic_function_calling( + chat, + model="glm-5:cloud", + think="low", + tools=[add, sub, mul, div], + messages=[{"role": "user", "content": "这个数学题:4+7*9/6是多少?"}], + ) From fe3cff40ac83139cc21efb030c9c453343d64165 Mon Sep 17 00:00:00 2001 From: Xinyue Date: Mon, 16 Feb 2026 16:17:59 +0100 Subject: [PATCH 2/7] examples: fix typo in function docstring parameter description --- examples/ollama_chat_automatic_function_calling.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/ollama_chat_automatic_function_calling.py b/examples/ollama_chat_automatic_function_calling.py index 82882381..4d146502 100644 --- a/examples/ollama_chat_automatic_function_calling.py +++ b/examples/ollama_chat_automatic_function_calling.py @@ -10,7 +10,7 @@ def ollama_automatic_function_calling( max_turns is reached. Args: - client_fn: Ollama chat client + client_fn: Ollama some client messages: List of message dicts for the conversation max_turns: Maximum number of chat iterations (default: 20) model: Model name to use (default: "glm-5:cloud") From a3396e310a4065c986ab43d3dcaec76ce0137f80 Mon Sep 17 00:00:00 2001 From: Xinyue Date: Mon, 16 Feb 2026 17:04:52 +0100 Subject: [PATCH 3/7] examples: rename function and add options parameter support - Rename `ollama_automatic_function_calling` to `ollama_chat_automatic_function_calling` - Add `options` parameter with default None - Pass `options` to client function call - Update docstring to clarify client_fn parameter - Remove duplicate import statement --- examples/ollama_chat_automatic_function_calling.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/examples/ollama_chat_automatic_function_calling.py b/examples/ollama_chat_automatic_function_calling.py index 4d146502..3fbd8d97 100644 --- a/examples/ollama_chat_automatic_function_calling.py +++ b/examples/ollama_chat_automatic_function_calling.py @@ -1,8 +1,10 @@ from ollama import ChatResponse, chat +from ollama import ChatResponse, chat + -def ollama_automatic_function_calling( - client_fn: chat, **kwargs +def ollama_chat_automatic_function_calling( + client_fn: chat, options: dict = None, **kwargs ) -> tuple[ChatResponse, list[dict]]: """ Automatically handles tool/function calls in a chat loop. @@ -10,7 +12,7 @@ def ollama_automatic_function_calling( max_turns is reached. Args: - client_fn: Ollama some client + client_fn: Ollama chat client messages: List of message dicts for the conversation max_turns: Maximum number of chat iterations (default: 20) model: Model name to use (default: "glm-5:cloud") @@ -83,7 +85,9 @@ def _resolve_callable(fn): chat_kwargs = {k: v for k, v in kwargs.items() if k not in reserved} for _ in range(max_turns): - response = client_fn(model=model, tools=tools, messages=messages, **chat_kwargs) + response = client_fn( + model=model, tools=tools, messages=messages, options=options, **chat_kwargs + ) last_response = response assistant_msg = response.message messages.append(_to_msg_dict(assistant_msg)) From e6c821033031902e8dbd388c911f8405db779dfe Mon Sep 17 00:00:00 2001 From: Xinyue Date: Mon, 16 Feb 2026 17:05:06 +0100 Subject: [PATCH 4/7] examples: reorder parameters and add options configuration in example - Move `think` parameter after `messages` for better parameter ordering - Add `options` dict with temperature, top_p, and top_k settings - Add comment showing think parameter accepts "low", true, or false --- examples/ollama_chat_automatic_function_calling.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/examples/ollama_chat_automatic_function_calling.py b/examples/ollama_chat_automatic_function_calling.py index 3fbd8d97..504bac47 100644 --- a/examples/ollama_chat_automatic_function_calling.py +++ b/examples/ollama_chat_automatic_function_calling.py @@ -159,10 +159,15 @@ def div(a: float, b: float) -> float: if __name__ == "__main__": from ollama import chat - response, messages = ollama_automatic_function_calling( + response, messages = ollama_chat_automatic_function_calling( chat, model="glm-5:cloud", - think="low", tools=[add, sub, mul, div], messages=[{"role": "user", "content": "这个数学题:4+7*9/6是多少?"}], + think="low", # or true, false + options={ + "temperature": 1, + "top_p": 1.0, + "top_k": 60, + }, ) From ddb9fcd3e3d9bea024f9db090a05aaadce93ef3c Mon Sep 17 00:00:00 2001 From: Xinyue Date: Mon, 16 Feb 2026 17:20:25 +0100 Subject: [PATCH 5/7] examples: remove duplicate import and refactor message initialization - Remove duplicate `from ollama import ChatResponse, chat` import - Initialize `messages` as empty list instead of getting from kwargs - Remove `reserved` set and `chat_kwargs` filtering logic - Add `think` parameter to client function call - Reformat client_fn call with explicit parameters on separate lines --- .../ollama_chat_automatic_function_calling.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/examples/ollama_chat_automatic_function_calling.py b/examples/ollama_chat_automatic_function_calling.py index 504bac47..cc8e0c7e 100644 --- a/examples/ollama_chat_automatic_function_calling.py +++ b/examples/ollama_chat_automatic_function_calling.py @@ -1,7 +1,5 @@ from ollama import ChatResponse, chat -from ollama import ChatResponse, chat - def ollama_chat_automatic_function_calling( client_fn: chat, options: dict = None, **kwargs @@ -27,10 +25,9 @@ def ollama_chat_automatic_function_calling( if model is None: raise ValueError("model must be specified") - messages = kwargs.get("messages", []) max_turns = kwargs.get("max_turns", 20) - tools = kwargs.get("tools", []) tool_map = kwargs.get("tool_map") + messages = list() last_response = None def _to_msg_dict(msg): @@ -81,12 +78,14 @@ def _resolve_callable(fn): if not callable(client_fn): raise ValueError("client_fn must be a callable") - reserved = {"messages", "max_turns", "model", "tools", "tool_map"} - chat_kwargs = {k: v for k, v in kwargs.items() if k not in reserved} - for _ in range(max_turns): response = client_fn( - model=model, tools=tools, messages=messages, options=options, **chat_kwargs + model=model, + tools=tools, + messages=messages, + options=options, + think=think, + **chat_kwargs, ) last_response = response assistant_msg = response.message From c03fd14bcdf861bca9186ff46c688df586df81b5 Mon Sep 17 00:00:00 2001 From: Xinyue Date: Mon, 16 Feb 2026 17:20:37 +0100 Subject: [PATCH 6/7] examples: refactor function calling helper to accept messages parameter - Add `messages` as explicit parameter instead of initializing empty list - Extract `tools` from kwargs for conditional tool_map initialization - Only initialize tool_map when both tool_map is None and tools exist - Add reserved parameters set and filter chat_kwargs - Remove hardcoded `think` parameter from client call - Add null check for tool_map before accessing in tool execution --- .../ollama_chat_automatic_function_calling.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/examples/ollama_chat_automatic_function_calling.py b/examples/ollama_chat_automatic_function_calling.py index cc8e0c7e..942b2494 100644 --- a/examples/ollama_chat_automatic_function_calling.py +++ b/examples/ollama_chat_automatic_function_calling.py @@ -2,7 +2,10 @@ def ollama_chat_automatic_function_calling( - client_fn: chat, options: dict = None, **kwargs + client_fn: chat, + messages: list[dict], + options: dict = None, + **kwargs, ) -> tuple[ChatResponse, list[dict]]: """ Automatically handles tool/function calls in a chat loop. @@ -26,8 +29,8 @@ def ollama_chat_automatic_function_calling( raise ValueError("model must be specified") max_turns = kwargs.get("max_turns", 20) + tools = kwargs.get("tools") tool_map = kwargs.get("tool_map") - messages = list() last_response = None def _to_msg_dict(msg): @@ -68,7 +71,7 @@ def _resolve_callable(fn): wrapped = getattr(fn, "__wrapped__", None) return wrapped if callable(wrapped) else fn - if tool_map is None: + if tool_map is None and tools: tool_map = {} for t in tools: name = _infer_tool_name(t) @@ -78,13 +81,15 @@ def _resolve_callable(fn): if not callable(client_fn): raise ValueError("client_fn must be a callable") + reserved = {"model", "tools", "messages", "options", "max_turns", "tool_map"} + chat_kwargs = {k: v for k, v in kwargs.items() if k not in reserved} + for _ in range(max_turns): response = client_fn( model=model, tools=tools, messages=messages, options=options, - think=think, **chat_kwargs, ) last_response = response @@ -97,7 +102,7 @@ def _resolve_callable(fn): for tool_call in tool_calls: tool_name, tool_args = _extract_tool_call_parts(tool_call) - tool_fn = tool_map.get(tool_name) + tool_fn = tool_map.get(tool_name) if tool_map else None if not callable(tool_fn): messages.append( From 983c51ea082597f22b468eb1c9e7807ca9561b0a Mon Sep 17 00:00:00 2001 From: Xinyue Date: Tue, 17 Feb 2026 12:19:10 +0100 Subject: [PATCH 7/7] examples: rename function to remove redundant 'chat' prefix - Rename `ollama_chat_automatic_function_calling` to `ollama_automatic_function_calling` - Update function call in main block to use new name --- examples/ollama_chat_automatic_function_calling.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/ollama_chat_automatic_function_calling.py b/examples/ollama_chat_automatic_function_calling.py index 942b2494..d367dba1 100644 --- a/examples/ollama_chat_automatic_function_calling.py +++ b/examples/ollama_chat_automatic_function_calling.py @@ -1,7 +1,7 @@ from ollama import ChatResponse, chat -def ollama_chat_automatic_function_calling( +def ollama_automatic_function_calling( client_fn: chat, messages: list[dict], options: dict = None, @@ -163,7 +163,7 @@ def div(a: float, b: float) -> float: if __name__ == "__main__": from ollama import chat - response, messages = ollama_chat_automatic_function_calling( + response, messages = ollama_automatic_function_calling( chat, model="glm-5:cloud", tools=[add, sub, mul, div],