Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -184,3 +184,7 @@ demoapp.py

# Vendored external repositories (ignored)
vendor/

# JavaScript bundles (generated by webpack)
assets/js/bundle.js
assets/js/bundle.js.LICENSE.txt
468 changes: 259 additions & 209 deletions README.md

Large diffs are not rendered by default.

12 changes: 12 additions & 0 deletions aistarterkit/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,18 @@
OPENAI_API_VERSION = os.getenv("OPENAI_API_VERSION", "2024-10-21")
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")

LITELLM_BASE_URL = os.getenv("LITELLM_BASE_URL", "https://api.openai.com/v1")
LITELLM_SERVICE_KEY = os.getenv("LITELLM_SERVICE_KEY") or OPENAI_API_KEY or "test-litellm-service-key"
LITELLM_DEFAULT_MODEL = os.getenv("LITELLM_DEFAULT_MODEL", "gpt-5")
_litellm_model_env = os.getenv("LITELLM_MODEL_LIST")
if _litellm_model_env:
_litellm_models = [value.strip() for value in _litellm_model_env.split(",") if value.strip()]
else:
_litellm_models = []
if LITELLM_DEFAULT_MODEL not in _litellm_models:
_litellm_models.append(LITELLM_DEFAULT_MODEL)
LITELLM_MODEL_LIST = _litellm_models or [LITELLM_DEFAULT_MODEL]

# Azure OpenAI Settings
AZURE_OPENAI_API_KEY = os.getenv("AZURE_OPENAI_API_KEY")
AZURE_OPENAI_ENDPOINT = os.getenv("AZURE_OPENAI_ENDPOINT")
Expand Down
2 changes: 0 additions & 2 deletions assets/js/bundle.js

This file was deleted.

8 changes: 0 additions & 8 deletions assets/js/bundle.js.LICENSE.txt

This file was deleted.

261 changes: 140 additions & 121 deletions chat/ai/agent.py
Original file line number Diff line number Diff line change
@@ -1,121 +1,140 @@
"""This module contains the Agent class.

The Agent class is responsible for interacting with the user.
Typical usage example:

agent = Agent()
response = agent.chat("Tell me a joke")
"""

import os
import json
from openai import OpenAI, AzureOpenAI
import re
from django.conf import settings
from ..models import Message

# Initialize the OpenAI client
if settings.OPENAI_API_TYPE == "azure":
client = AzureOpenAI(
api_version=settings.OPENAI_API_VERSION,
azure_endpoint=settings.AZURE_OPENAI_ENDPOINT,
)
else:
client = OpenAI()


class Agent:
"""A class used to interact with the user and invoke the necessary tools.

Attributes:
tools: A dictionary of tools where the key is the tool's name and the
value is a dictionary containing the tool's parameters and function.
tool_invoker: An instance of the ToolInvoker class.
history: A list of previous interactions with the user.
prompt: A string used as the initial prompt for the chat.
"""

def __init__(self, prompt="You are a helpful assistant.", thread=None) -> None:
self.thread = thread
self.history = self._build_history()
self.prompt = prompt

def chat(self, message):
"""Interacts with the user and invokes the necessary tools.

Args:
message: A string containing the user's input.

Returns:
A string containing the assistant's response.
"""
ai_reply = self._get_ai_reply(message, system_message=self.prompt.strip())
self._update_history("user", message)
self._update_history("assistant", ai_reply)

return ai_reply

def _build_history(self):
"""Builds the history from the thread messages.

Returns:
A list of previous interactions with the user.
"""
history = []
if self.thread is not None: # Ensure that thread is not None
messages = Message.objects.filter(thread=self.thread).order_by("timestamp")
for message in messages:
history.append({"role": message.role, "content": message.content})
return history

def _get_ai_reply(
self, message, model="gpt-35-turbo-16k", system_message=None, temperature=0
):
"""Gets a response from the AI model.

Args:
message: A string containing the user's input.
model: A string containing the name of the AI model.
system_message: A string containing a system message.
temperature: A float used to control the randomness of the AI's output.

Returns:
A string containing the AI's response.
"""
messages = self._prepare_messages(message, system_message)
completion = client.chat.completions.create(
model=model, messages=messages, temperature=temperature
)
return completion.choices[0].message.content.strip()

def _prepare_messages(self, message, system_message):
"""Prepares the messages for the AI model.

Args:
message: A string containing the user's input.
system_message: A string containing a system message.

Returns:
A list of messages for the AI model.
"""
messages = []
if system_message is not None:
messages.append({"role": "system", "content": system_message})
messages.extend(self.history)
if message is not None:
messages.append({"role": "user", "content": message})
return messages

def _update_history(self, role, content):
"""Updates the history of interactions with the user.

Args:
role: A string indicating the role of the message sender.
content: A string containing the message content.
"""
self.history.append({"role": role, "content": content})
# Create and save a Message instance
if self.thread is not None: # Ensure that thread is not None
Message.objects.create(
thread=self.thread, user=self.thread.user, content=content, role=role
)
"""This module contains the Agent class.

The Agent class is responsible for interacting with the user.
Typical usage example:

agent = Agent()
response = agent.chat("Tell me a joke")
"""

import os
import json
from openai import OpenAI
import re
from django.conf import settings
from ..models import Message

client = OpenAI(
base_url=settings.LITELLM_BASE_URL,
api_key=settings.LITELLM_SERVICE_KEY,
)


class Agent:
"""A class used to interact with the user and invoke the necessary tools.

Attributes:
tools: A dictionary of tools where the key is the tool's name and the
value is a dictionary containing the tool's parameters and function.
tool_invoker: An instance of the ToolInvoker class.
history: A list of previous interactions with the user.
prompt: A string used as the initial prompt for the chat.
"""

def __init__(self, prompt="You are a helpful assistant.", thread=None) -> None:
self.thread = thread
self.history = self._build_history()
self.prompt = prompt

def chat(self, message):
"""Interacts with the user and invokes the necessary tools.

Args:
message: A string containing the user's input.

Returns:
A string containing the assistant's response.
"""
ai_reply = self._get_ai_reply(message, system_message=self.prompt.strip())
self._update_history("user", message)
self._update_history("assistant", ai_reply)

return ai_reply

def _build_history(self):
"""Builds the history from the thread messages.

Returns:
A list of previous interactions with the user.
"""
history = []
if self.thread is not None: # Ensure that thread is not None
messages = Message.objects.filter(thread=self.thread).order_by("timestamp")
for message in messages:
history.append({"role": message.role, "content": message.content})
return history

def _get_ai_reply(
self,
message,
model=None,
system_message=None,
):
"""Gets a response from the AI model.

Args:
message: A string containing the user's input.
model: A string containing the name of the AI model.
system_message: A string containing a system message.

Returns:
A string containing the AI's response.
"""
messages = self._prepare_messages(message, system_message)
target_model = self._resolve_model(model)
temperature = self._resolve_temperature()
completion = client.chat.completions.create(
model=target_model,
messages=messages,
temperature=temperature,
)
return completion.choices[0].message.content.strip()

def _resolve_model(self, override):
if override:
return override
allowed_models = getattr(settings, "LITELLM_MODEL_LIST", None)
if not allowed_models:
allowed_models = [settings.LITELLM_DEFAULT_MODEL]
thread_model = getattr(self.thread, "model", None)
if thread_model and thread_model in allowed_models:
return thread_model
return settings.LITELLM_DEFAULT_MODEL

def _resolve_temperature(self):
thread_temperature = getattr(self.thread, "temperature", None)
if thread_temperature is None:
return 1
return thread_temperature

def _prepare_messages(self, message, system_message):
"""Prepares the messages for the AI model.

Args:
message: A string containing the user's input.
system_message: A string containing a system message.

Returns:
A list of messages for the AI model.
"""
messages = []
if system_message is not None:
messages.append({"role": "system", "content": system_message})
messages.extend(self.history)
if message is not None:
messages.append({"role": "user", "content": message})
return messages

def _update_history(self, role, content):
"""Updates the history of interactions with the user.

Args:
role: A string indicating the role of the message sender.
content: A string containing the message content.
"""
self.history.append({"role": role, "content": content})
# Create and save a Message instance
if self.thread is not None: # Ensure that thread is not None
Message.objects.create(
thread=self.thread, user=self.thread.user, content=content, role=role
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
# Generated by Django 4.2.7 on 2025-11-18 23:08

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
("chat", "0004_alter_thread_model"),
]

operations = [
migrations.AlterField(
model_name="thread",
name="model",
field=models.CharField(
choices=[("gpt-5", "gpt-5")], default="gpt-5", max_length=100
),
),
migrations.AlterField(
model_name="thread",
name="temperature",
field=models.FloatField(default=1),
),
]
Loading