From a34ccb65d5006ef18070dbf85f2707615c92497d Mon Sep 17 00:00:00 2001 From: wesleyhstratusadv Date: Sat, 21 Feb 2026 12:35:07 -0700 Subject: [PATCH 1/9] action for loading bots and listing bots --- dandy/cli/actions/bot/__init__.py | 0 dandy/cli/actions/bot/action.py | 76 +++++++++++++++++++ dandy/cli/actions/constants.py | 2 + .../intelligence/decoders/files_decoder.py | 18 +---- 4 files changed, 79 insertions(+), 17 deletions(-) create mode 100644 dandy/cli/actions/bot/__init__.py create mode 100644 dandy/cli/actions/bot/action.py diff --git a/dandy/cli/actions/bot/__init__.py b/dandy/cli/actions/bot/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/dandy/cli/actions/bot/action.py b/dandy/cli/actions/bot/action.py new file mode 100644 index 00000000..93bcfc26 --- /dev/null +++ b/dandy/cli/actions/bot/action.py @@ -0,0 +1,76 @@ +from time import sleep + +import importlib +import inspect +import sys +from pathlib import Path + +from dandy.bot.bot import Bot +from dandy.cli.actions.action import BaseAction +from dandy.cli.session import session +from dandy.cli.tui.tui import tui + + +class BotAction(BaseAction): + name = 'Bot' + description = 'Bots at your service!' + calls = ('bot',) + + def help(self): + # Simple help output (printed if called with no subcommand) + return "Usage: /bot run [optional inline prompt]\n" \ + "If no prompt, enter multi-line (end with /end). Other subcommands: list" + + def run(self, user_input: str) -> str: + parts = user_input.split() + if not parts: + return self.help() + + subcmd = parts[0].lower() + + if subcmd == 'run': + if len(parts) < 2: + return "Error: Missing bot name. Usage: /bot run " + + module_name = parts[1] + bots_dir = Path(session.project_base_path) / '.dandy' / 'bots' + + if not bots_dir.exists(): + return f"Error: Bots directory not found at {bots_dir}. Create it first." + + # Add bots dir to sys.path for import + sys.path.insert(0, str(bots_dir)) + + try: + module = importlib.import_module(module_name) + except ImportError as e: + return f"Error: Could not import bot module '{module_name}.py' from {bots_dir}: {e}" + + # Find the first Bot subclass in the module + bot_class = None + for name, obj in inspect.getmembers(module): + if inspect.isclass(obj) and issubclass(obj, Bot) and obj != Bot: + bot_class = obj + break + + if not bot_class: + return f"Error: No Bot subclass found in {module_name}.py" + + bot_class().process() + + return 'Bot ran successfully!' + + elif subcmd == 'list': + # Optional: List available bots + bots_dir = Path(session.project_base_path) / '.dandy' / 'bots' + if not bots_dir.exists(): + return "No bots directory found." + bots = [f.stem.replace('_bot', 'Bot').title() for f in bots_dir.glob('*.py') if f.stem != '__init__'] + return "Available bots:\n" + '\n'.join(bots) if bots else "No bots found." + + else: + return f"Unknown subcommand '{subcmd}'. Try 'run' or 'list'." + + def render(self): + # Placeholder; not used in current CLI, but required + pass \ No newline at end of file diff --git a/dandy/cli/actions/constants.py b/dandy/cli/actions/constants.py index ddb259ad..acdf2e0f 100644 --- a/dandy/cli/actions/constants.py +++ b/dandy/cli/actions/constants.py @@ -1,8 +1,10 @@ +from dandy.cli.actions.bot.action import BotAction from dandy.cli.actions.build.action import BuildAction from dandy.cli.actions.explain.action import ExplainAction from dandy.cli.actions.quit.action import QuitAction ACTIONS = ( + BotAction, BuildAction, ExplainAction, QuitAction, diff --git a/dandy/cli/actions/explain/intelligence/decoders/files_decoder.py b/dandy/cli/actions/explain/intelligence/decoders/files_decoder.py index 46b28891..f3a80e79 100644 --- a/dandy/cli/actions/explain/intelligence/decoders/files_decoder.py +++ b/dandy/cli/actions/explain/intelligence/decoders/files_decoder.py @@ -14,26 +14,10 @@ def process(self, prompt: Prompt | str): keys_values={ **{file_path: file_path for file_path in get_directory_listing( - dir_path=Path(session.project_base_path / 'dandy'), + dir_path=Path(session.project_base_path), max_depth=None, file_extensions=['py', 'md'], ) }, - **{ - file_path: file_path - for file_path in get_directory_listing( - dir_path=Path(session.project_base_path / 'docs'), - max_depth=None, - file_extensions=['py', 'md'], - ) - }, - **{ - file_path: file_path - for file_path in get_directory_listing( - dir_path=Path(session.project_base_path / 'tests'), - max_depth=None, - file_extensions=['py', 'md'], - ) - } } ) From c826d8ce37064056219550c20c542a0928baf183 Mon Sep 17 00:00:00 2001 From: Nathan Johnson Date: Fri, 27 Feb 2026 21:51:16 -0700 Subject: [PATCH 2/9] refactoring cli bot --- .github/copilot-instructions.md | 1 + dandy/cli/actions/action.py | 4 +- dandy/cli/actions/bot/action.py | 109 +++++++++++------- dandy/cli/intelligence/bots/markdown_bot.py | 14 --- .../cli/intelligence/bots/source_code_bot.py | 11 ++ .../intelligence/intel/source_code_intel.py | 9 ++ dandy/cli/session.py | 12 +- 7 files changed, 98 insertions(+), 62 deletions(-) delete mode 100644 dandy/cli/intelligence/bots/markdown_bot.py create mode 100644 dandy/cli/intelligence/bots/source_code_bot.py create mode 100644 dandy/cli/intelligence/intel/source_code_intel.py diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 88bb8abd..c97bf9c8 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -24,6 +24,7 @@ Dandy is a Python Artificial Intelligence Framework that simplifies the developm - **Imports**: Follow this order: standard library → third-party → local (dandy.*) - **Type hints**: Always use type hints for function signatures - **Imports**: Use absolute imports (e.g., `from dandy.bot.bot import Bot`) +- **Verbose**: Always use verbose names (e.g., `sub_command` not `subcmd`) ### Naming Conventions diff --git a/dandy/cli/actions/action.py b/dandy/cli/actions/action.py index dcc2245c..939c6a02 100644 --- a/dandy/cli/actions/action.py +++ b/dandy/cli/actions/action.py @@ -8,10 +8,10 @@ class BaseAction(ABC): description: str calls: tuple[str, ...] - def __post_init__(self): + def __init_subclass__(cls, **kwargs): check_attrs = ['name', 'description', 'calls'] for attr in check_attrs: - if not hasattr(self, attr): + if not hasattr(cls, attr): message = f'Command `{attr}` is required' raise ValueError(message) diff --git a/dandy/cli/actions/bot/action.py b/dandy/cli/actions/bot/action.py index 93bcfc26..0033da6d 100644 --- a/dandy/cli/actions/bot/action.py +++ b/dandy/cli/actions/bot/action.py @@ -1,14 +1,13 @@ -from time import sleep - import importlib import inspect import sys from pathlib import Path +from typing import Callable from dandy.bot.bot import Bot from dandy.cli.actions.action import BaseAction from dandy.cli.session import session -from dandy.cli.tui.tui import tui +from dandy.file.utils import make_directory class BotAction(BaseAction): @@ -16,61 +15,87 @@ class BotAction(BaseAction): description = 'Bots at your service!' calls = ('bot',) + def __init__(self): + self.bots_path = Path(session.project_dandy_path, 'bots') + + make_directory(self.bots_path) + + self.sub_commands_methods: dict[str, Callable] = { + 'build': self.build_bot, + 'list': self.list_bots, + 'help': self.help, + 'run': self.run_bot, + } + + def build_bot(self, user_input: str) -> str: + return f'Building "{user_input}"...' + def help(self): - # Simple help output (printed if called with no subcommand) - return "Usage: /bot run [optional inline prompt]\n" \ - "If no prompt, enter multi-line (end with /end). Other subcommands: list" + return f"""Usage: /bot run [optional inline prompt] + If no prompt, enter multi-line (end with /end). + Other subcommands: {self.sub_commands_methods.keys()} + """ + + def list_bots(self, user_input: str) -> str: + assert user_input + return "Available bots:\n" + '\n'.join(self.bot_files) if self.bot_files else "No bots found." def run(self, user_input: str) -> str: parts = user_input.split() - if not parts: - return self.help() - subcmd = parts[0].lower() + sub_command = parts[0].lower() if len(parts) > 0 else None - if subcmd == 'run': - if len(parts) < 2: - return "Error: Missing bot name. Usage: /bot run " + if sub_command in self.sub_commands_methods: + return self.sub_commands_methods[sub_command]( + user_input=user_input + ) - module_name = parts[1] - bots_dir = Path(session.project_base_path) / '.dandy' / 'bots' + else: + return self.help() - if not bots_dir.exists(): - return f"Error: Bots directory not found at {bots_dir}. Create it first." + def run_bot(self, user_input: str) -> str: + parts = user_input.split() - # Add bots dir to sys.path for import - sys.path.insert(0, str(bots_dir)) + if len(parts) < 2: + return "Error: Missing bot name. Usage: /bot run " - try: - module = importlib.import_module(module_name) - except ImportError as e: - return f"Error: Could not import bot module '{module_name}.py' from {bots_dir}: {e}" + module_name = parts[1] - # Find the first Bot subclass in the module - bot_class = None - for name, obj in inspect.getmembers(module): - if inspect.isclass(obj) and issubclass(obj, Bot) and obj != Bot: - bot_class = obj - break + # Add bots dir to sys.path for import + sys.path.insert(0, str(self.bots_path)) - if not bot_class: - return f"Error: No Bot subclass found in {module_name}.py" + try: + module = importlib.import_module(module_name) + except ImportError as e: + return f"Error: Could not import bot module '{module_name}.py' from {self.bots_path}: {e}" - bot_class().process() + # Find the first Bot subclass in the module + bot_class: type | None = None - return 'Bot ran successfully!' + for name, obj in inspect.getmembers(module): + if inspect.isclass(obj) and issubclass(obj, Bot) and obj != Bot: + bot_class: type = obj + break - elif subcmd == 'list': - # Optional: List available bots - bots_dir = Path(session.project_base_path) / '.dandy' / 'bots' - if not bots_dir.exists(): - return "No bots directory found." - bots = [f.stem.replace('_bot', 'Bot').title() for f in bots_dir.glob('*.py') if f.stem != '__init__'] - return "Available bots:\n" + '\n'.join(bots) if bots else "No bots found." + if not bot_class: + return f"Error: No Bot subclass found in {module_name}.py" - else: - return f"Unknown subcommand '{subcmd}'. Try 'run' or 'list'." + try: + bot_class().process() + + return f'{bot_class.__name__} ran successfully!' + except Exception as e: + message = f"Bot failed with Error: {e}" + return message def render(self): # Placeholder; not used in current CLI, but required - pass \ No newline at end of file + pass + + @property + def bot_files(self) -> list[str] | None: + return [ + file.stem.replace('_bot', 'Bot').title() + for file in self.bots_path.glob('*.py') + if file.stem != '__init__' + ] diff --git a/dandy/cli/intelligence/bots/markdown_bot.py b/dandy/cli/intelligence/bots/markdown_bot.py deleted file mode 100644 index e88a45e4..00000000 --- a/dandy/cli/intelligence/bots/markdown_bot.py +++ /dev/null @@ -1,14 +0,0 @@ -from pathlib import Path - -from dandy import Bot - - -class MarkdownBot(Bot): - def __init__( - self, - markdown_file_path: str | Path - ): - with open(markdown_file_path, 'r') as f: - self.llm_system_override_prompt = f.read() - - super().__init__() diff --git a/dandy/cli/intelligence/bots/source_code_bot.py b/dandy/cli/intelligence/bots/source_code_bot.py new file mode 100644 index 00000000..64516ae0 --- /dev/null +++ b/dandy/cli/intelligence/bots/source_code_bot.py @@ -0,0 +1,11 @@ +from pathlib import Path + +from dandy import Bot +from dandy.cli.intelligence.intel.source_code_intel import SourceCodeIntel + + +class SourceCodeBot(Bot): + intel_class = SourceCodeIntel + + def process(self, user_input: str) -> None: + pass diff --git a/dandy/cli/intelligence/intel/source_code_intel.py b/dandy/cli/intelligence/intel/source_code_intel.py new file mode 100644 index 00000000..8e6f3834 --- /dev/null +++ b/dandy/cli/intelligence/intel/source_code_intel.py @@ -0,0 +1,9 @@ +from typing import Literal + +from dandy import BaseIntel + + +class SourceCodeIntel(BaseIntel): + language: Literal['python'] + extension: Literal['py'] + code: str \ No newline at end of file diff --git a/dandy/cli/session.py b/dandy/cli/session.py index 73c24f87..c0f1157a 100644 --- a/dandy/cli/session.py +++ b/dandy/cli/session.py @@ -8,27 +8,31 @@ class DandyCliSession(BaseIntel): project_base_path: Path | None = None - cli_working_directory: Path | None = None + project_dandy_path: Path | None = None + project_dandy_cli_path: Path | None = None is_loaded: bool = False def post_init(self, project_base_path: Path) -> None: self.project_base_path = project_base_path - self.cli_working_directory = Path( + self.project_dandy_path = Path( self.project_base_path, settings.DANDY_DIRECTORY, + ) + self.project_dandy_cli_path = Path( + self.project_dandy_path, CLI_WORKING_DIRECTORY, ) @property def session_file_path(self) -> Path: - return Path(self.cli_working_directory, 'session.json') + return Path(self.project_dandy_cli_path, 'session.json') def load(self): if file_exists(self.session_file_path): loaded_session = DandyCliSession.create_from_file(self.session_file_path) loaded_session.project_base_path = self.project_base_path - loaded_session.cli_working_directory = self.cli_working_directory + loaded_session.project_dandy_cli_path = self.project_dandy_cli_path self.__dict__.update(loaded_session.__dict__) From 28056274a4e0685e309fbb1fe5fe94bd87c55cb7 Mon Sep 17 00:00:00 2001 From: Nathan Johnson Date: Sat, 28 Feb 2026 21:07:12 -0700 Subject: [PATCH 3/9] chore: upgrade CLI, refactor messaging, and introduce new tooling --- dandy/cli/actions/bot/action.py | 40 +++++++++++++- dandy/cli/actions/{build => code}/__init__.py | 0 dandy/cli/actions/{build => code}/action.py | 14 ++--- dandy/cli/actions/constants.py | 4 +- dandy/cli/cli.py | 29 +++++----- .../cli/intelligence/bots/source_code_bot.py | 34 ++++++++++-- .../intelligence/intel/source_code_intel.py | 11 +++- dandy/cli/main.py | 27 ++++++++-- dandy/cli/tui/ascii.py | 6 +-- dandy/cli/tui/printer.py | 6 +-- dandy/conf.py | 10 ++-- dandy/constants.py | 2 +- dandy/llm/connector.py | 12 ++--- dandy/llm/prompt/prompt.py | 16 ++++-- dandy/llm/prompt/snippet.py | 22 +++++--- dandy/llm/request/message.py | 2 +- dandy/tool/__init__.py | 0 dandy/tool/git/__init__.py | 0 dandy/tool/git/tool.py | 53 +++++++++++++++++++ dandy/tool/tool.py | 15 ++++++ docs/changelog/v2_changelog.md | 10 +++- ruff.toml | 10 ++-- tests/llm/request/test_messages.py | 14 ++--- tests/llm/request/test_request.py | 2 +- tests/llm/test_llm_reset.py | 4 +- tests/llm/test_service.py | 2 +- 26 files changed, 269 insertions(+), 76 deletions(-) rename dandy/cli/actions/{build => code}/__init__.py (100%) rename dandy/cli/actions/{build => code}/action.py (59%) create mode 100644 dandy/tool/__init__.py create mode 100644 dandy/tool/git/__init__.py create mode 100644 dandy/tool/git/tool.py create mode 100644 dandy/tool/tool.py diff --git a/dandy/cli/actions/bot/action.py b/dandy/cli/actions/bot/action.py index 0033da6d..74e89a14 100644 --- a/dandy/cli/actions/bot/action.py +++ b/dandy/cli/actions/bot/action.py @@ -4,9 +4,12 @@ from pathlib import Path from typing import Callable +from dandy import Prompt from dandy.bot.bot import Bot from dandy.cli.actions.action import BaseAction +from dandy.cli.intelligence.bots.source_code_bot import SourceCodeBot from dandy.cli.session import session +from dandy.cli.tui.tui import tui from dandy.file.utils import make_directory @@ -28,7 +31,42 @@ def __init__(self): } def build_bot(self, user_input: str) -> str: - return f'Building "{user_input}"...' + parts = user_input.split() + + if len(parts) < 2: + bot_description = tui.get_user_input(question='Please describe the bot you want to build') + else: + bot_description = " ".join(parts[2:]) + + start_time = tui.printer.start_task('Building', 'create a new bot') + + code_reference_prompt = ( + Prompt() + .module_source('dandy.bot.bot') + .lb() + .module_source('dandy.llm.service') + .lb() + .module_source('dandy.file.service') + .lb() + .module_source('dandy.http.service') + .lb() + .module_source('dandy.intel.service') + .lb() + .sub_heading('Tutorials') + .lb() + .file(Path(session.project_base_path, 'docs', 'tutorials', 'bots.md')) + ) + + source_code_intel = SourceCodeBot().process( + user_input=bot_description, + code_reference_prompt=code_reference_prompt + ) + + source_code_intel.write_to_directory(self.bots_path) + + tui.printer.end_task(start_time) + + return f'Bot created at "{Path(self.bots_path, source_code_intel.recommended_file_name)}"' def help(self): return f"""Usage: /bot run [optional inline prompt] diff --git a/dandy/cli/actions/build/__init__.py b/dandy/cli/actions/code/__init__.py similarity index 100% rename from dandy/cli/actions/build/__init__.py rename to dandy/cli/actions/code/__init__.py diff --git a/dandy/cli/actions/build/action.py b/dandy/cli/actions/code/action.py similarity index 59% rename from dandy/cli/actions/build/action.py rename to dandy/cli/actions/code/action.py index e6603aad..0af41ee7 100644 --- a/dandy/cli/actions/build/action.py +++ b/dandy/cli/actions/code/action.py @@ -4,25 +4,25 @@ from dandy.cli.tui.tui import tui -class BuildAction(BaseAction): - name = 'Build' - description = 'Build something inside your project!' - calls = ('b', 'build') +class CodeAction(BaseAction): + name = 'Code' + description = 'Code something inside your project!' + calls = ('c', 'Code') def help(self): print('Chat help') def run(self, user_input: str) -> str: if not user_input: - user_input = tui.get_user_input(question='What would you like to build?') + user_input = tui.get_user_input(question='What would you like to code?') - start_time = tui.printer.start_task('Building', 'some sleepy time') + start_time = tui.printer.start_task('Coding', 'some sleepy time') sleep(1.0) tui.printer.end_task(start_time) - return f'Building {user_input}...' + return f'Coding {user_input}...' def render(self): print('hello') diff --git a/dandy/cli/actions/constants.py b/dandy/cli/actions/constants.py index acdf2e0f..fd744e80 100644 --- a/dandy/cli/actions/constants.py +++ b/dandy/cli/actions/constants.py @@ -1,11 +1,11 @@ from dandy.cli.actions.bot.action import BotAction -from dandy.cli.actions.build.action import BuildAction +from dandy.cli.actions.code.action import CodeAction from dandy.cli.actions.explain.action import ExplainAction from dandy.cli.actions.quit.action import QuitAction ACTIONS = ( BotAction, - BuildAction, + CodeAction, ExplainAction, QuitAction, ) \ No newline at end of file diff --git a/dandy/cli/cli.py b/dandy/cli/cli.py index f9d8919c..f555cdab 100644 --- a/dandy/cli/cli.py +++ b/dandy/cli/cli.py @@ -15,6 +15,21 @@ def __init__(self): ) ) + def process_user_input(self, user_input: str): + user_input_words = user_input.split(' ') + + if user_input_words[0][0] == '/': + self.action_manager.call( + action_key=user_input_words[0][1:], + user_input=' '.join(user_input_words[1:]), + ) + + else: + self.action_manager.call( + action_key='help', + user_input=' '.join(user_input_words), + ) + def run(self): tui.printer.welcome() @@ -22,19 +37,7 @@ def run(self): user_input = self.newest_user_input if user_input is not None: - user_input_words = user_input.split(' ') - - if user_input_words[0][0] == '/': - self.action_manager.call( - action_key=user_input_words[0][1:], - user_input=' '.join(user_input_words[1:]), - ) - - else: - self.action_manager.call( - action_key='help', - user_input=' '.join(user_input_words), - ) + self.process_user_input(user_input) user_input = tui.get_user_input() diff --git a/dandy/cli/intelligence/bots/source_code_bot.py b/dandy/cli/intelligence/bots/source_code_bot.py index 64516ae0..954aa9d4 100644 --- a/dandy/cli/intelligence/bots/source_code_bot.py +++ b/dandy/cli/intelligence/bots/source_code_bot.py @@ -1,11 +1,39 @@ from pathlib import Path -from dandy import Bot +from dandy import Bot, Prompt, recorder_to_html_file from dandy.cli.intelligence.intel.source_code_intel import SourceCodeIntel class SourceCodeBot(Bot): + role = 'Senior Developer' + task = 'Read the instructions and write the source code for the user.' + guidelines = Prompt().list([ + 'You\'re only creating one file so focus on completeness.', + 'The file name should not contain a path and should be post fixed with `_bot`.', + ]) intel_class = SourceCodeIntel - def process(self, user_input: str) -> None: - pass + @recorder_to_html_file('source_code_bot') + def process( + self, + user_input: str, + code_reference_prompt: Prompt, + ) -> SourceCodeIntel: + self.llm.messages.add_message( + role='user', + text=( + Prompt() + .text('Below is the code I want you to reference while writing the source code for my next request.') + .prompt(code_reference_prompt) + .to_str() + ) + ) + + self.llm.messages.add_message( + role='system', + text='I have read through the provided code and will use it as a reference.' + ) + + return self.llm.prompt_to_intel( + prompt=user_input, + ) diff --git a/dandy/cli/intelligence/intel/source_code_intel.py b/dandy/cli/intelligence/intel/source_code_intel.py index 8e6f3834..67da6f4c 100644 --- a/dandy/cli/intelligence/intel/source_code_intel.py +++ b/dandy/cli/intelligence/intel/source_code_intel.py @@ -1,9 +1,18 @@ +from pathlib import Path from typing import Literal from dandy import BaseIntel +from dandy.file.utils import write_to_file class SourceCodeIntel(BaseIntel): + recommended_file_name: str language: Literal['python'] extension: Literal['py'] - code: str \ No newline at end of file + code: str + + def write_to_directory(self, dir_path: Path | str): + write_to_file( + file_path=Path(dir_path) / self.recommended_file_name, + content=self.code, + ) diff --git a/dandy/cli/main.py b/dandy/cli/main.py index 2042afa8..7bf5f7dc 100644 --- a/dandy/cli/main.py +++ b/dandy/cli/main.py @@ -2,6 +2,7 @@ from pathlib import Path import dotenv +from blessed import Terminal CWD_PATH = Path.cwd() @@ -15,7 +16,8 @@ for env_file_name in env_file_names: env_file_path = Path(CWD_PATH, env_file_name) if env_file_path.exists(): - print(f'Loading environment variables from "{env_file_path}"') + env_term = Terminal() + print(env_term.blue(f'\nLoading environment variables from "{env_file_path}"')) dotenv.load_dotenv(env_file_path) sys.path.append(str(CWD_PATH)) @@ -39,12 +41,31 @@ def main(): if not session.is_loaded: session.save() - from dandy.cli.cli import DandyCli cli = DandyCli() - cli.run() + if len(sys.argv) > 1: + user_input = ' '.join(sys.argv[1:]) + + if user_input[0] == '-': + user_input = '/' + user_input[1:] + + if user_input[0] != '/': + user_input = '/' + user_input + + arg_term = Terminal() + + print(arg_term.bold_blue(f'\nDandy')) + + cli.process_user_input( + user_input=user_input + ) + + else: + cli.run() + + print('') if __name__ == '__main__': diff --git a/dandy/cli/tui/ascii.py b/dandy/cli/tui/ascii.py index 5f887aaa..9ae1222b 100644 --- a/dandy/cli/tui/ascii.py +++ b/dandy/cli/tui/ascii.py @@ -1,10 +1,8 @@ -DANDY_ANSII = """ - ███████████ ████████ ███████ ████ ████ ████████ ████ ████ +DANDY_ANSII = """ ███████████ ████████ ███████ ████ ████ ████████ ████ ████ ▒█▒▒▒▒▒▒▒▒██ ▒▒██▒▒▒▒██ ██▒▒▒▒▒██ ▒▒████ ▒▒██ ▒▒██▒▒▒▒██ ▒▒██ ▒▒██ ▒█ ▒██ ▒██ ▒▒██ ▒██ ▒██ ▒██▒██ ▒██ ▒██ ▒▒██ ▒▒██ ██ ▒█ ▒██ ▒██ ▒██ ▒█████████ ▒██▒▒██▒██ ▒██ ▒██ ▒▒████ ▒███████████ ▒██ ▒██ ▒██▒▒▒▒▒██ ▒██ ▒▒████ ▒██ ▒██ ▒▒██ ▒█ ▒██ ▒██ ██ ▒██ ▒██ ▒██ ▒▒███ ▒██ ██ ▒██ █████████████████ ████████ ████ ████ ████ ▒▒███ ████████ ████ -▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ ▒▒▒▒▒▒▒▒ ▒▒▒▒ ▒▒▒▒ ▒▒▒▒ ▒▒▒ ▒▒▒▒▒▒▒▒ ▒▒▒▒ -""" +▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ ▒▒▒▒▒▒▒▒ ▒▒▒▒ ▒▒▒▒ ▒▒▒▒ ▒▒▒ ▒▒▒▒▒▒▒▒ ▒▒▒▒""" diff --git a/dandy/cli/tui/printer.py b/dandy/cli/tui/printer.py index 355ac533..4b2d1109 100644 --- a/dandy/cli/tui/printer.py +++ b/dandy/cli/tui/printer.py @@ -27,7 +27,7 @@ def purple_divider(self): print(self.term.bold_purple('─' * self.term.width), flush=True) def divider(self): - print('─' * self.term.width, flush=True) + print(self.term.bold_grey('─' * self.term.width), flush=True) def green_divider(self): print(self.term.bold_green('─' * self.term.width), flush=True) @@ -36,7 +36,7 @@ def red_divider(self): print(self.term.bold_red('─' * self.term.width), flush=True) def welcome(self): - print(self.term.bold_blue(f'\n{DANDY_ANSII}')) + print(self.term.bold_blue(f'\n{DANDY_ANSII}\n')) self.blue_divider() print(self.term.bold_blue('Version : ') + constants.__VERSION__) print(self.term.bold_blue('Model : ') + LlmConfig('DEFAULT').model) @@ -56,7 +56,7 @@ def completed_action(self, start_time: float, action: BaseAction): def start_task(self, action_name: str, task: str) -> float: self.indented_event( - text=f'{self.term.orange}{action_name}{self.term.normal} "{task}" ... ', + text=f'{self.term.bold_orange}{action_name}{self.term.normal} "{task}" ... ', indent=1, end='', ) diff --git a/dandy/conf.py b/dandy/conf.py index ba5364c5..baa417a1 100644 --- a/dandy/conf.py +++ b/dandy/conf.py @@ -1,4 +1,5 @@ import importlib +import os from dandy.core.exceptions import DandyCriticalError from dandy.core.utils import get_settings_module_name @@ -36,9 +37,12 @@ def load_user_settings(self): if self._settings_module_name is not None: try: - self._user_settings = importlib.import_module( - self._settings_module_name - ) + if self._user_settings is not ...: + self._user_settings = importlib.reload(self._user_settings) + else: + self._user_settings = importlib.import_module( + self._settings_module_name + ) self._has_loaded_user_settings = True diff --git a/dandy/constants.py b/dandy/constants.py index 0730a7b8..d82e8989 100644 --- a/dandy/constants.py +++ b/dandy/constants.py @@ -1,4 +1,4 @@ -__VERSION__ = '2.0.0' +__VERSION__ = '2.1.0' # Cache diff --git a/dandy/llm/connector.py b/dandy/llm/connector.py index 7ada10b3..6e4da363 100644 --- a/dandy/llm/connector.py +++ b/dandy/llm/connector.py @@ -48,7 +48,7 @@ def has_retry_attempts_available(self) -> bool: return self.prompt_retry_attempt < self.llm_config.options.prompt_retry_count def _prepend_system_message(self): - self.request_body.messages.create_message( + self.request_body.messages.add_message( role='system', text=self.system_prompt_str, prepend=True, @@ -92,13 +92,13 @@ def prompt_to_intel( ) if prompt is not None: - self.request_body.messages.create_message( + self.request_body.messages.add_message( role='user', text=Prompt(prompt).to_str(), ) if audio_urls or audio_file_paths or audio_base64_strings: - self.request_body.messages.create_message( + self.request_body.messages.add_message( role='user', audio_urls=audio_urls, audio_file_paths=audio_file_paths, @@ -106,7 +106,7 @@ def prompt_to_intel( ) if image_urls or image_file_paths or image_base64_strings: - self.request_body.messages.create_message( + self.request_body.messages.add_message( role='user', image_urls=image_urls, image_file_paths=image_file_paths, @@ -157,7 +157,7 @@ def _request_to_intel( intel=intel_object, ) - self.request_body.messages.create_message( + self.request_body.messages.add_message( role='assistant', text=self.response_str ) @@ -189,7 +189,7 @@ def retry_request_to_intel( remaining_attempts=self.llm_config.options.prompt_retry_count - self.prompt_retry_attempt, ) - self.request_body.messages.create_message( + self.request_body.messages.add_message( role='user', text=Prompt(retry_user_prompt).to_str() ) diff --git a/dandy/llm/prompt/prompt.py b/dandy/llm/prompt/prompt.py index b3211512..25de3e3f 100644 --- a/dandy/llm/prompt/prompt.py +++ b/dandy/llm/prompt/prompt.py @@ -91,7 +91,7 @@ def file( file_path=file_path, encoding=encoding, triple_backtick=triple_backtick, - triple_backtick_label=triple_backtick_label + triple_backtick_inner_label=triple_backtick_label ) ) @@ -140,12 +140,18 @@ def intel_schema( return self - def module_source(self, module_name: str, triple_backtick: bool = True) -> Self: + def module_source( + self, + module_name: str, + triple_backtick: bool = True, + language: str = 'python', + ) -> Self: self.snippets.append( snippet.ModuleSourceSnippet( module_name=module_name, triple_backtick=triple_backtick, - triple_backtick_label=module_name, + triple_backtick_inner_label=language, + triple_backtick_outer_label=f'module: {module_name}' ) ) @@ -156,7 +162,7 @@ def object_source(self, object_module_name: str, triple_backtick: bool = True) - snippet.ObjectSourceSnippet( object_module_name=object_module_name, triple_backtick=triple_backtick, - triple_backtick_label=object_module_name, + triple_backtick_inner_label=object_module_name, ) ) @@ -214,7 +220,7 @@ def text( text=text, label=label, triple_backtick=triple_backtick, - triple_backtick_label=triple_backtick_label, + triple_backtick_inner_label=triple_backtick_label, ) ) diff --git a/dandy/llm/prompt/snippet.py b/dandy/llm/prompt/snippet.py index 4c113154..84f9486e 100644 --- a/dandy/llm/prompt/snippet.py +++ b/dandy/llm/prompt/snippet.py @@ -25,19 +25,29 @@ @dataclass(kw_only=True) class BaseSnippet(ABC): triple_backtick: bool = False - triple_backtick_label: str | None = None + triple_backtick_inner_label: str | None = None + triple_backtick_outer_label: str | None = None def __str__(self): return self.to_str() def to_str(self): + snippet_str = '' + if self.triple_backtick: - if self.triple_backtick_label: - return f'``` {self.triple_backtick_label}\n{self._to_str()}```\n' - else: - return f'```\n{self._to_str()}```\n' + if self.triple_backtick_outer_label: + snippet_str += f'**{self.triple_backtick_outer_label}**\n' + + snippet_str += '```' + + if self.triple_backtick_inner_label: + snippet_str += f'{self.triple_backtick_inner_label}\n' + + snippet_str += f'{self._to_str()}```\n' + else: + snippet_str += self._to_str() - return self._to_str() + return snippet_str @abstractmethod def _to_str(self) -> str: diff --git a/dandy/llm/request/message.py b/dandy/llm/request/message.py index 8c66dc89..0291e703 100644 --- a/dandy/llm/request/message.py +++ b/dandy/llm/request/message.py @@ -136,7 +136,7 @@ def estimated_token_count(self) -> int: def has_system_message(self) -> bool: return len(self.messages) > 0 and self.messages[0].role == 'system' - def create_message( + def add_message( self, role: RoleLiteralStr, text: str | None = None, diff --git a/dandy/tool/__init__.py b/dandy/tool/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/dandy/tool/git/__init__.py b/dandy/tool/git/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/dandy/tool/git/tool.py b/dandy/tool/git/tool.py new file mode 100644 index 00000000..b79de842 --- /dev/null +++ b/dandy/tool/git/tool.py @@ -0,0 +1,53 @@ +import subprocess +from pathlib import Path + +from dandy.tool.tool import BaseTool + + +class GitTool(BaseTool): + def setup(self) -> bool: + try: + subprocess.run( + ['git', '--version'], + capture_output=True, + text=True, + check=True + ) + + return True + + except (subprocess.CalledProcessError, FileNotFoundError): + return False + + @staticmethod + def diff_file(self, file_path: Path | str) -> dict: + file_path_str = str(file_path) + + try: + result = subprocess.run( + ['git', 'diff', file_path_str], + capture_output=True, + text=True, + check=True + ) + + return { + 'file_path': file_path_str, + 'diff': result.stdout, + 'has_changes': bool(result.stdout.strip()), + 'error': None + } + + except subprocess.CalledProcessError as e: + return { + 'error': f'Git command failed: {e.stderr}', + 'file_path': file_path_str, + 'diff': None + } + + except Exception as e: + return { + 'error': f'Unexpected error: {str(e)}', + 'file_path': file_path_str, + 'diff': None + } diff --git a/dandy/tool/tool.py b/dandy/tool/tool.py new file mode 100644 index 00000000..865a629d --- /dev/null +++ b/dandy/tool/tool.py @@ -0,0 +1,15 @@ +from abc import ABC + + +class BaseTool(ABC): + def __init__(self) -> None: + if not self.setup(): + print(f"Failed to setup {self.__class__.__name__}") + + self.__post_init__() + + def __post_init__(self) -> None: + pass + + def setup(self) -> None: + raise NotImplementedError \ No newline at end of file diff --git a/docs/changelog/v2_changelog.md b/docs/changelog/v2_changelog.md index ec077f6d..11facbf2 100644 --- a/docs/changelog/v2_changelog.md +++ b/docs/changelog/v2_changelog.md @@ -1,6 +1,14 @@ # Changelog -## v2.0.0.alpha +## v2.1.0 + +### Features + +- CLI !!! + - Use `dandy` to access the new command line interface. + - + +## v2.0.0 ### Major Release diff --git a/ruff.toml b/ruff.toml index 23d92a0c..4fe680be 100644 --- a/ruff.toml +++ b/ruff.toml @@ -12,11 +12,11 @@ ignore = [ "ANN002", "ANN003", "ANN101", - "ANN201", - "ANN202", - "ANN204", - "ANN205", - "ANN206", +# "ANN201", +# "ANN202", +# "ANN204", +# "ANN205", +# "ANN206", "ANN401", "BLE001", "C901", diff --git a/tests/llm/request/test_messages.py b/tests/llm/request/test_messages.py index 4f8ebb2c..467ba4de 100644 --- a/tests/llm/request/test_messages.py +++ b/tests/llm/request/test_messages.py @@ -7,13 +7,13 @@ class TestMessages(TestCase): def test_message_history(self): message_history = MessageHistory() - message_history.create_message(role='user', text='I was 91 years old a few days ago') - message_history.create_message(role='system', text='When is your birthday?') - message_history.create_message(role='user', text='It is my birthday today!') - message_history.create_message(role='system', text='How old are you?') - message_history.create_message(role='user', text='I just turned 92') - message_history.create_message(role='system', text='Wow! That is so old.') - message_history.create_message(role='user', text='That is ok I am feeling great') + message_history.add_message(role='user', text='I was 91 years old a few days ago') + message_history.add_message(role='system', text='When is your birthday?') + message_history.add_message(role='user', text='It is my birthday today!') + message_history.add_message(role='system', text='How old are you?') + message_history.add_message(role='user', text='I just turned 92') + message_history.add_message(role='system', text='Wow! That is so old.') + message_history.add_message(role='user', text='That is ok I am feeling great') class BirthdayIntel(BaseIntel): past_age: int diff --git a/tests/llm/request/test_request.py b/tests/llm/request/test_request.py index a0b428fa..50a7fca3 100644 --- a/tests/llm/request/test_request.py +++ b/tests/llm/request/test_request.py @@ -16,7 +16,7 @@ def test_config_request_body(self): test_image_bytes = base64.b64encode(img.read()) test_image_string = test_image_bytes.decode('utf-8') - request_body.messages.create_message( + request_body.messages.add_message( role='system', text='You are a helpful assistant.', image_base64_strings=[ diff --git a/tests/llm/test_llm_reset.py b/tests/llm/test_llm_reset.py index fb270004..a0767f02 100644 --- a/tests/llm/test_llm_reset.py +++ b/tests/llm/test_llm_reset.py @@ -9,7 +9,7 @@ def test_llm_service_reset(self): self.assertEqual(len(bot.llm.messages), 0) - bot.llm.messages.create_message(role='user', text='Hello!') + bot.llm.messages.add_message(role='user', text='Hello!') self.assertEqual(len(bot.llm.messages), 1) bot.llm.reset() @@ -18,7 +18,7 @@ def test_llm_service_reset(self): def test_llm_reset_messages_alias_behavior(self): bot = Bot() - bot.llm.messages.create_message(role='user', text='A') + bot.llm.messages.add_message(role='user', text='A') self.assertEqual(len(bot.llm.messages), 1) bot.llm.reset_messages() diff --git a/tests/llm/test_service.py b/tests/llm/test_service.py index 5090f98a..61ea43c4 100644 --- a/tests/llm/test_service.py +++ b/tests/llm/test_service.py @@ -55,7 +55,7 @@ def test_prompt_to_intel_with_no_prompt_argument(self): def test_prompt_to_intel_with_message_and_no_prompt_argument(self): bot = Bot() - bot.llm.messages.create_message( + bot.llm.messages.add_message( role='user', text='Hello!' ) From c2bff197a24d74f187985ba7defd70a459a6c11d Mon Sep 17 00:00:00 2001 From: Nathan Johnson Date: Sun, 1 Mar 2026 11:27:00 -0700 Subject: [PATCH 4/9] dilligence --- dandy/cli/actions/code/action.py | 4 ++-- dandy/core/service/mixin.py | 10 +++++----- dandy/core/service/service.py | 8 ++++---- dandy/llm/diligence/__init__.py | 0 dandy/llm/diligence/diligence.py | 6 ++++++ dandy/llm/mixin.py | 9 +++++++-- 6 files changed, 24 insertions(+), 13 deletions(-) create mode 100644 dandy/llm/diligence/__init__.py create mode 100644 dandy/llm/diligence/diligence.py diff --git a/dandy/cli/actions/code/action.py b/dandy/cli/actions/code/action.py index 0af41ee7..c5f242c9 100644 --- a/dandy/cli/actions/code/action.py +++ b/dandy/cli/actions/code/action.py @@ -9,7 +9,7 @@ class CodeAction(BaseAction): description = 'Code something inside your project!' calls = ('c', 'Code') - def help(self): + def help(self) -> None: print('Chat help') def run(self, user_input: str) -> str: @@ -24,5 +24,5 @@ def run(self, user_input: str) -> str: return f'Coding {user_input}...' - def render(self): + def render(self) -> None: print('hello') diff --git a/dandy/core/service/mixin.py b/dandy/core/service/mixin.py index e668b1f8..b37dfdee 100644 --- a/dandy/core/service/mixin.py +++ b/dandy/core/service/mixin.py @@ -9,11 +9,11 @@ class BaseServiceMixin(ABC): _required_attrs: ClassVar[tuple[str, ...]] = () - def __init__(self, **kwargs): + @abstractmethod + def __init__(self, **kwargs) -> None: """Required for super() call chain""" - pass - def __init_subclass__(cls): + def __init_subclass__(cls) -> None: super().__init_subclass__() for attr in cls._required_attrs: if getattr(cls, attr) is None: @@ -30,6 +30,6 @@ def _get_service_instance(self, service_class: type[T]) -> T: return getattr(self, service_instance_attr) @abstractmethod - def reset(self): + def reset(self) -> None: """Cannot use NotImplementedError do to call chain""" - pass + diff --git a/dandy/core/service/service.py b/dandy/core/service/service.py index a3e67405..2f73c7cc 100644 --- a/dandy/core/service/service.py +++ b/dandy/core/service/service.py @@ -9,13 +9,13 @@ class BaseService(ABC, Generic[T_co]): - def __init__(self, obj: T_co): + def __init__(self, obj: T_co) -> None: self.recorder_event_id = generate_recorder_event_id() self.obj = obj self.__post_init__() - def __post_init__(self): + def __post_init__(self) -> None: pass @property @@ -23,5 +23,5 @@ def obj_class(self) -> type[T_co]: return self.obj.__class__ @abstractmethod - def reset(self): - raise NotImplementedError \ No newline at end of file + def reset(self) -> None: + raise NotImplementedError diff --git a/dandy/llm/diligence/__init__.py b/dandy/llm/diligence/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/dandy/llm/diligence/diligence.py b/dandy/llm/diligence/diligence.py new file mode 100644 index 00000000..2ab33682 --- /dev/null +++ b/dandy/llm/diligence/diligence.py @@ -0,0 +1,6 @@ +class Diligence: + def __init__(self, level: float) -> None: + if 2.0 > level > 0.0: + message = f'Diligence level must be between 0.0 and 2.0 not {level}' + raise ValueError(message) + diff --git a/dandy/llm/mixin.py b/dandy/llm/mixin.py index c4846050..059b3198 100644 --- a/dandy/llm/mixin.py +++ b/dandy/llm/mixin.py @@ -8,6 +8,7 @@ class LlmServiceMixin(BaseServiceMixin): + diligence: float = 1.0 llm_config: str = 'DEFAULT' intel_class: type[BaseIntel] = DefaultIntel role: Prompt | str = 'Assistant' @@ -25,12 +26,16 @@ class LlmServiceMixin(BaseServiceMixin): def __init__( self, + diligence: float | None = None, llm_config: str | None = None, llm_temperature: float | None = None, **kwargs, - ): + ) -> None: super().__init__(**kwargs) + if isinstance(diligence, float): + self.diligence = diligence + if isinstance(llm_config, str): self.llm_config = llm_config @@ -44,6 +49,6 @@ def get_llm_config(self) -> LlmConfig: def llm(self) -> LlmService: return self._get_service_instance(LlmService) - def reset(self): + def reset(self) -> None: super().reset() self.llm.reset() From 5eb6b2c7782d0a8d6c178c2e469ae27a5018b1af Mon Sep 17 00:00:00 2001 From: Nathan Johnson Date: Sun, 1 Mar 2026 20:39:48 -0700 Subject: [PATCH 5/9] feat: introduce LLM diligence framework with pre/post handlers and stop word removal --- dandy/cache/memory/cache.py | 4 +-- dandy/cache/memory/decorators.py | 4 +-- dandy/cache/sqlite/cache.py | 5 ++- dandy/cache/sqlite/decorators.py | 4 +-- dandy/cli/actions/bot/action.py | 2 +- dandy/cli/actions/code/action.py | 2 +- .../cli/actions/code/intelligence/__init__.py | 0 .../code/intelligence/bots/__init__.py | 0 .../code/intelligence/bots/coding_bot.py | 5 +++ dandy/llm/connector.py | 22 +++++++++++- dandy/llm/diligence/diligence.py | 31 +++++++++++++--- dandy/llm/diligence/handler.py | 36 +++++++++++++++++++ dandy/llm/diligence/second_pass/__init__.py | 0 dandy/llm/diligence/second_pass/diligence.py | 19 ++++++++++ .../diligence/stop_word_removal/__init__.py | 20 +++++++++++ .../diligence/stop_word_removal/constants.py | 0 .../diligence/stop_word_removal/diligence.py | 19 ++++++++++ dandy/llm/service.py | 1 + dandy/tool/git/tool.py | 2 +- ruff.toml | 5 --- tests/llm/diligence/__init__.py | 0 tests/llm/diligence/intelligence/__init__.py | 0 tests/llm/diligence/intelligence/bot.py | 7 ++++ tests/llm/diligence/test_diligence.py | 17 +++++++++ 24 files changed, 183 insertions(+), 22 deletions(-) create mode 100644 dandy/cli/actions/code/intelligence/__init__.py create mode 100644 dandy/cli/actions/code/intelligence/bots/__init__.py create mode 100644 dandy/cli/actions/code/intelligence/bots/coding_bot.py create mode 100644 dandy/llm/diligence/handler.py create mode 100644 dandy/llm/diligence/second_pass/__init__.py create mode 100644 dandy/llm/diligence/second_pass/diligence.py create mode 100644 dandy/llm/diligence/stop_word_removal/__init__.py create mode 100644 dandy/llm/diligence/stop_word_removal/constants.py create mode 100644 dandy/llm/diligence/stop_word_removal/diligence.py create mode 100644 tests/llm/diligence/__init__.py create mode 100644 tests/llm/diligence/intelligence/__init__.py create mode 100644 tests/llm/diligence/intelligence/bot.py create mode 100644 tests/llm/diligence/test_diligence.py diff --git a/dandy/cache/memory/cache.py b/dandy/cache/memory/cache.py index cad48b57..9cc9affa 100644 --- a/dandy/cache/memory/cache.py +++ b/dandy/cache/memory/cache.py @@ -1,7 +1,7 @@ from typing import OrderedDict, Any -import dandy.constants from dandy.cache.cache import BaseCache +from dandy.constants import CACHE_DEFAULT_NAME _memory_cache = {} @@ -32,7 +32,7 @@ def clean(self): self._cache.popitem(last=False) @classmethod - def clear(cls, cache_name: str = dandy.constants.CACHE_DEFAULT_NAME): + def clear(cls, cache_name: str = CACHE_DEFAULT_NAME): if cache_name in _memory_cache: _memory_cache[cache_name].clear() diff --git a/dandy/cache/memory/decorators.py b/dandy/cache/memory/decorators.py index 52b5e17a..46d1054a 100644 --- a/dandy/cache/memory/decorators.py +++ b/dandy/cache/memory/decorators.py @@ -1,14 +1,14 @@ from functools import wraps from typing import Callable -import dandy.constants from dandy.cache.decorators import cache_decorator_function from dandy.cache.memory.cache import MemoryCache from dandy.conf import settings +from dandy.constants import CACHE_DEFAULT_NAME def cache_to_memory( - cache_name: str = dandy.constants.CACHE_DEFAULT_NAME, + cache_name: str = CACHE_DEFAULT_NAME, limit: int | None = None, ) -> Callable: if limit is None: diff --git a/dandy/cache/sqlite/cache.py b/dandy/cache/sqlite/cache.py index 1abc5577..866870a6 100644 --- a/dandy/cache/sqlite/cache.py +++ b/dandy/cache/sqlite/cache.py @@ -2,10 +2,9 @@ import sqlite3 from typing import Any -import dandy.constants from dandy.cache.cache import BaseCache from dandy.cache.sqlite.connection import SqliteConnection -from dandy.constants import SQLITE_CACHE_TABLE_NAME, SQLITE_CACHE_DB_NAME +from dandy.constants import SQLITE_CACHE_TABLE_NAME, SQLITE_CACHE_DB_NAME, CACHE_DEFAULT_NAME class SqliteCache(BaseCache): @@ -108,7 +107,7 @@ def clean(self): connection.commit() @classmethod - def clear(cls, cache_name: str = dandy.constants.CACHE_DEFAULT_NAME): + def clear(cls, cache_name: str = CACHE_DEFAULT_NAME): if cls._table_exists(): with SqliteConnection(SQLITE_CACHE_DB_NAME) as connection: cursor = connection.cursor() diff --git a/dandy/cache/sqlite/decorators.py b/dandy/cache/sqlite/decorators.py index ac5cde2d..b903585a 100644 --- a/dandy/cache/sqlite/decorators.py +++ b/dandy/cache/sqlite/decorators.py @@ -1,14 +1,14 @@ from functools import wraps from typing import Callable -import dandy.constants from dandy.cache.decorators import cache_decorator_function from dandy.cache.sqlite.cache import SqliteCache from dandy.conf import settings +from dandy.constants import CACHE_DEFAULT_NAME def cache_to_sqlite( - cache_name: str = dandy.constants.CACHE_DEFAULT_NAME, + cache_name: str = CACHE_DEFAULT_NAME, limit: int | None = None ) -> Callable: if limit is None: diff --git a/dandy/cli/actions/bot/action.py b/dandy/cli/actions/bot/action.py index 74e89a14..c4c6f6fb 100644 --- a/dandy/cli/actions/bot/action.py +++ b/dandy/cli/actions/bot/action.py @@ -16,7 +16,7 @@ class BotAction(BaseAction): name = 'Bot' description = 'Bots at your service!' - calls = ('bot',) + calls = ('b', 'bot') def __init__(self): self.bots_path = Path(session.project_dandy_path, 'bots') diff --git a/dandy/cli/actions/code/action.py b/dandy/cli/actions/code/action.py index c5f242c9..38f2d394 100644 --- a/dandy/cli/actions/code/action.py +++ b/dandy/cli/actions/code/action.py @@ -7,7 +7,7 @@ class CodeAction(BaseAction): name = 'Code' description = 'Code something inside your project!' - calls = ('c', 'Code') + calls = ('c', 'code') def help(self) -> None: print('Chat help') diff --git a/dandy/cli/actions/code/intelligence/__init__.py b/dandy/cli/actions/code/intelligence/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/dandy/cli/actions/code/intelligence/bots/__init__.py b/dandy/cli/actions/code/intelligence/bots/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/dandy/cli/actions/code/intelligence/bots/coding_bot.py b/dandy/cli/actions/code/intelligence/bots/coding_bot.py new file mode 100644 index 00000000..2abbdc14 --- /dev/null +++ b/dandy/cli/actions/code/intelligence/bots/coding_bot.py @@ -0,0 +1,5 @@ +from dandy import Bot + + +class CodingBot(Bot): + pass \ No newline at end of file diff --git a/dandy/llm/connector.py b/dandy/llm/connector.py index 6e4da363..b16d742a 100644 --- a/dandy/llm/connector.py +++ b/dandy/llm/connector.py @@ -8,6 +8,8 @@ from dandy.intel.factory import IntelFactory from dandy.intel.typing import IntelType from dandy.llm.config import LlmConfig +from dandy.llm.diligence.handler import BaseDiligenceHandler, PreDiligenceHandler, \ + PostDiligenceHandler from dandy.llm.exceptions import LlmCriticalError, LlmRecoverableError from dandy.llm.intelligence.prompts import service_system_validation_error_prompt from dandy.llm.prompt.prompt import Prompt @@ -28,6 +30,7 @@ def __init__( llm_config: LlmConfig, intel_class: type[IntelType] | None, system_prompt: Prompt | str, + diligence: float = 1.0, ): self.recorder_event_id = recorder_event_id @@ -43,10 +46,20 @@ def __init__( self.system_prompt_str = str(system_prompt) + self.diligence = diligence + @property def has_retry_attempts_available(self) -> bool: return self.prompt_retry_attempt < self.llm_config.options.prompt_retry_count + def _apply_diligence(self, diligence_handler_class: type[BaseDiligenceHandler]) -> None: + if self.diligence != 1.0: + diligence_handler_class( + level=self.diligence, + ).apply( + llm_connector=self + ) + def _prepend_system_message(self): self.request_body.messages.add_message( role='system', @@ -117,7 +130,13 @@ def prompt_to_intel( message = 'You cannot prompt the LlmService without at least one system and one user message.' raise LlmCriticalError(message) - return self._request_to_intel() + self._apply_diligence(PreDiligenceHandler) + + response_intel_object = self._request_to_intel() + + self._apply_diligence(PostDiligenceHandler) + + return response_intel_object def _reset_prompt_retry_attempt(self): self.prompt_retry_attempt = 0 @@ -220,3 +239,4 @@ def _set_intel( def _update_request_body(self): for key, value in self.llm_config.options.model_dump(exclude_none=True).items(): setattr(self.request_body, key, value) + diff --git a/dandy/llm/diligence/diligence.py b/dandy/llm/diligence/diligence.py index 2ab33682..3ca3bf71 100644 --- a/dandy/llm/diligence/diligence.py +++ b/dandy/llm/diligence/diligence.py @@ -1,6 +1,29 @@ -class Diligence: - def __init__(self, level: float) -> None: - if 2.0 > level > 0.0: - message = f'Diligence level must be between 0.0 and 2.0 not {level}' +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import Callable, TYPE_CHECKING + +if TYPE_CHECKING: + from dandy.llm.connector import LlmConnector + + +class BaseDiligence(ABC): + trigger_level: float + trigger_operator: Callable[[float, float], bool] + + def __init_subclass__(cls, **kwargs): + if cls.trigger_level == 1.0 or cls.trigger_level > 2.0 or cls.trigger_level < 0.0: + message = f'`{cls.__name__}` should have a trigger level between 0.0 and 2.0 and not 1.0 as it\'s used as the default.' raise ValueError(message) + @classmethod + def is_triggered(cls, level: float) -> bool: + if cls.trigger_operator(level, cls.trigger_level): + return True + + return False + + @classmethod + @abstractmethod + def apply(cls, llm_connector: LlmConnector) -> None: + raise NotImplementedError diff --git a/dandy/llm/diligence/handler.py b/dandy/llm/diligence/handler.py new file mode 100644 index 00000000..acac5506 --- /dev/null +++ b/dandy/llm/diligence/handler.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from abc import ABC +from typing import TYPE_CHECKING + +from dandy.llm.diligence.diligence import BaseDiligence +from dandy.llm.diligence.second_pass.diligence import SecondPassRemovalDiligence +from dandy.llm.diligence.stop_word_removal.diligence import StopWordRemovalDiligence + +if TYPE_CHECKING: + from dandy.llm.connector import LlmConnector + +class BaseDiligenceHandler(ABC): + diligence_classes: tuple[type[BaseDiligence]] + + def __init__(self, level: float) -> None: + self.level = level + + + def apply(self, llm_connector: LlmConnector) -> None: + for diligence_class in self.diligence_classes: + if diligence_class.is_triggered(self.level): + diligence_class.apply(llm_connector=llm_connector) + +class PreDiligenceHandler(BaseDiligenceHandler): + diligence_classes = ( + StopWordRemovalDiligence, + ) + + +class PostDiligenceHandler(BaseDiligenceHandler): + diligence_classes = ( + SecondPassRemovalDiligence, + ) + + diff --git a/dandy/llm/diligence/second_pass/__init__.py b/dandy/llm/diligence/second_pass/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/dandy/llm/diligence/second_pass/diligence.py b/dandy/llm/diligence/second_pass/diligence.py new file mode 100644 index 00000000..c8a43b03 --- /dev/null +++ b/dandy/llm/diligence/second_pass/diligence.py @@ -0,0 +1,19 @@ +from __future__ import annotations + +import operator +from typing import TYPE_CHECKING + +from dandy.llm.diligence.diligence import BaseDiligence + +if TYPE_CHECKING: + from dandy.llm.connector import LlmConnector + + +class SecondPassRemovalDiligence(BaseDiligence): + trigger_level = 2.0 + trigger_operator = operator.ge + + @classmethod + def apply(cls, llm_connector: LlmConnector) -> None: + print('Second pass removal diligence') + diff --git a/dandy/llm/diligence/stop_word_removal/__init__.py b/dandy/llm/diligence/stop_word_removal/__init__.py new file mode 100644 index 00000000..7b52df5f --- /dev/null +++ b/dandy/llm/diligence/stop_word_removal/__init__.py @@ -0,0 +1,20 @@ +STOP_WORDS = ( + "i", "me", "my", "myself", "we", "our", "ours", "ourselves", + "you", "your", "yours", "yourself", "yourselves", + "he", "him", "his", "himself", + "she", "her", "hers", "herself", + "it", "its", "itself", + "they", "them", "their", "theirs", "themselves", + "what", "which", "who", "whom", + "this", "that", "these", "those", + "am", "is", "are", "was", "were", "be", "been", "being", + "have", "has", "had", "having", + "do", "does", "did", "doing", + "will", "would", "shall", "should", "can", "could", "may", "might", "must", + "if", "then", "else", "when", "where", "why", "how", + "all", "each", "few", "more", "most", "other", "some", "such", + "no", "nor", "not", "only", "own", "same", "so", "than", "too", "very", + "can't", "won't", "wouldn't", "didn't", "doesn't", "don't", "hadn't", "hasn't", "haven't", "isn't", "mightn't", "mustn't", "needn't", "shan't", "shouldn't", "weren't", "won't", "wouldn't", + "cant", "wont", "wouldnt", "didnt", "doesnt", "dont", "hadnt", "hasnt", "havent", "isnt", "mightnt", "mustnt", "neednt", "shant", "shouldnt", "werent", "wont", "wouldnt", + "a", "an", "the" +) \ No newline at end of file diff --git a/dandy/llm/diligence/stop_word_removal/constants.py b/dandy/llm/diligence/stop_word_removal/constants.py new file mode 100644 index 00000000..e69de29b diff --git a/dandy/llm/diligence/stop_word_removal/diligence.py b/dandy/llm/diligence/stop_word_removal/diligence.py new file mode 100644 index 00000000..d1ed9c82 --- /dev/null +++ b/dandy/llm/diligence/stop_word_removal/diligence.py @@ -0,0 +1,19 @@ +from __future__ import annotations + +import operator +from typing import TYPE_CHECKING + +from dandy.llm.diligence.diligence import BaseDiligence + +if TYPE_CHECKING: + from dandy.llm.connector import LlmConnector + + +class StopWordRemovalDiligence(BaseDiligence): + trigger_level = 0.0 + trigger_operator = operator.le + + @classmethod + def apply(cls, llm_connector: LlmConnector) -> None: + print('Stop word removal diligence') + diff --git a/dandy/llm/service.py b/dandy/llm/service.py index 6cb52975..f1c6124c 100644 --- a/dandy/llm/service.py +++ b/dandy/llm/service.py @@ -26,6 +26,7 @@ class LlmService( def __post_init__(self): self._llm_connector: LlmConnector = LlmConnector( recorder_event_id=self.recorder_event_id, + diligence=self.obj.diligence, system_prompt=service_system_prompt( role=self.obj.role, task=self.obj.task, diff --git a/dandy/tool/git/tool.py b/dandy/tool/git/tool.py index b79de842..abc58920 100644 --- a/dandy/tool/git/tool.py +++ b/dandy/tool/git/tool.py @@ -20,7 +20,7 @@ def setup(self) -> bool: return False @staticmethod - def diff_file(self, file_path: Path | str) -> dict: + def diff_file(file_path: Path | str) -> dict: file_path_str = str(file_path) try: diff --git a/ruff.toml b/ruff.toml index 4fe680be..34dcf03d 100644 --- a/ruff.toml +++ b/ruff.toml @@ -12,11 +12,6 @@ ignore = [ "ANN002", "ANN003", "ANN101", -# "ANN201", -# "ANN202", -# "ANN204", -# "ANN205", -# "ANN206", "ANN401", "BLE001", "C901", diff --git a/tests/llm/diligence/__init__.py b/tests/llm/diligence/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/llm/diligence/intelligence/__init__.py b/tests/llm/diligence/intelligence/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/llm/diligence/intelligence/bot.py b/tests/llm/diligence/intelligence/bot.py new file mode 100644 index 00000000..2d17e7b4 --- /dev/null +++ b/tests/llm/diligence/intelligence/bot.py @@ -0,0 +1,7 @@ +from dandy import Bot + + +class CelestialObserverBot(Bot): + role = 'Celestial Observer' + task = 'You will be given a description of a celestial object and your task is to identify it.' + guidelines = 'Provide only the name of the object as a single word.' diff --git a/tests/llm/diligence/test_diligence.py b/tests/llm/diligence/test_diligence.py new file mode 100644 index 00000000..0236deb4 --- /dev/null +++ b/tests/llm/diligence/test_diligence.py @@ -0,0 +1,17 @@ + +from unittest import TestCase, mock + +from tests.llm.diligence.intelligence.bot import CelestialObserverBot + + +class TestDiligence(TestCase): + def test_default_diligence(self): + + for int_level in range(21): + test_bot = CelestialObserverBot(diligence=int_level * 0.1) + + celestial_intel = test_bot.process('What is the large round object that is circling the plant earth, is generally only visible at night and affects the tides?') + + self.assertEqual(celestial_intel.text.lower(), 'moon') + + From c6bdb479a5cb7e7158cfaa48d29b3f8ba5806df9 Mon Sep 17 00:00:00 2001 From: Nathan Johnson Date: Tue, 3 Mar 2026 14:10:51 -0700 Subject: [PATCH 6/9] refactoring --- dandy/recorder/decorators.py | 22 +++--- ruff.toml | 138 ++++++++++++++++++----------------- 2 files changed, 82 insertions(+), 78 deletions(-) diff --git a/dandy/recorder/decorators.py b/dandy/recorder/decorators.py index b94d37a4..929ab9e6 100644 --- a/dandy/recorder/decorators.py +++ b/dandy/recorder/decorators.py @@ -12,10 +12,10 @@ def _recorder_to_file_decorator_function( recording_name: str | None, renderer: str, path: Path | str, -): +) -> Callable: if recording_name is None: recording_name = ( - str(func.__qualname__) + str(func.__qualname__) # ty:ignore[unresolved-attribute] .replace('.', '_') .replace('<', '') .replace('>', '') @@ -33,13 +33,13 @@ def _recorder_to_file_decorator_function( Recorder.delete_recording(recording_name) -def recorder_to_html_file(recording_name: str | None = None, path: Path | str | None = None): +def recorder_to_html_file(recording_name: str | None = None, path: Path | str | None = None) -> Callable: if path is None: path = Recorder.get_default_recording_path() - def decorator(func: Callable): + def decorator(func: Callable) -> Callable: @wraps(func) - def wrapper(*args, **kwargs): + def wrapper(*args, **kwargs) -> Callable: return _recorder_to_file_decorator_function(func, args, kwargs, recording_name, 'html', path) return wrapper @@ -47,13 +47,13 @@ def wrapper(*args, **kwargs): return decorator -def recorder_to_json_file(recording_name: str | None = None, path: Path | str | None = None): +def recorder_to_json_file(recording_name: str | None = None, path: Path | str | None = None) -> Callable: if path is None: path = Recorder.get_default_recording_path() - def decorator(func: Callable): + def decorator(func: Callable) -> Callable: @wraps(func) - def wrapper(*args, **kwargs): + def wrapper(*args, **kwargs) -> Callable: return _recorder_to_file_decorator_function(func, args, kwargs, recording_name, 'json', path) return wrapper @@ -61,13 +61,13 @@ def wrapper(*args, **kwargs): return decorator -def recorder_to_markdown_file(recording_name: str | None = None, path: Path | str | None = None): +def recorder_to_markdown_file(recording_name: str | None = None, path: Path | str | None = None) -> Callable: if path is None: path = Recorder.get_default_recording_path() - def decorator(func: Callable): + def decorator(func: Callable) -> Callable: @wraps(func) - def wrapper(*args, **kwargs): + def wrapper(*args, **kwargs) -> Callable: return _recorder_to_file_decorator_function(func, args, kwargs, recording_name, 'markdown', path) return wrapper diff --git a/ruff.toml b/ruff.toml index 34dcf03d..98754582 100644 --- a/ruff.toml +++ b/ruff.toml @@ -1,9 +1,60 @@ -target-version = "py311" -line-length = 88 indent-width = 4 +line-length = 100 +target-version = "py311" -[format] -quote-style = "single" +exclude = [ + "*/migrations/*", + "/site", + ".DS_Store", + ".eggs", + ".git", + ".hg", + ".hypothesis", + ".idea", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pybuilder", + ".pytest_cache", + ".pyre", + ".pytype", + ".ropeproject", + ".scrapy", + ".svn", + ".tox", + ".venv", + ".venv*", + ".venv_*", + ".webassets-cache", + "__pycache__", + "__pypackages__", + "build", + "cover", + "CVS", + "develop-eggs", + "dist", + "docs/_build", + "downloads", + "eggs", + "htmlcov", + "instance", + "lib", + "lib64", + "migrations", + "node_modules", + "parts", + "profile_default", + "sdist", + "share/python-wheels", + "target", + "tmp/cache", + "var", + "venv", + "venv*", + "venv_*", + "venv.bak", + "wheels", +] [lint] select = ["ALL"] @@ -11,7 +62,6 @@ ignore = [ "A003", "ANN002", "ANN003", - "ANN101", "ANN401", "BLE001", "C901", @@ -25,7 +75,9 @@ ignore = [ "D106", "D107", "D202", + "D203", "D205", + "D211", "D212", "D213", "D400", @@ -34,90 +86,42 @@ ignore = [ "D407", "D415", "D417", - "E501", "ERA001", "FBT001", "FBT002", "G004", "I001", "INP001", - "PLC0206", "PLR0913", "PLR0915", "PLR2004", "PLW2901", - "PT009", "PT019", - "PT027", "PTH123", - "RET504", - "RUF015", - "Q000", + "RUF012", "Q000", + "Q003", + "S101", "S301", "S311", "S603", "SLF001", "T201", - "UP006", "UP015", - "UP035" + "UP035", ] fixable = ["ALL"] unfixable = [] -exclude = [ - ".DS_Store", - ".eggs", - ".git", - ".hg", - ".hypothesis", - ".idea", - ".ipynb_checkpoints", - ".mypy_cache", - ".nox", - ".pybuilder", - ".pytest_cache", - ".pyre", - ".pytype", - ".ropeproject", - ".scrapy", - ".svn", - ".tox", - ".venv", - ".venv*", - ".venv_*", - ".webassets-cache", - "CVS", - "__pycache__", - "__pypackages__", - "build", - "cover", - "develop-eggs", - "dist", - "docs/_build", - "downloads", - "eggs", - "htmlcov", - "instance", - "lib", - "lib64", - "node_modules", - "parts", - "profile_default", - "sdist", - "share/python-wheels", - "target", - "tmp/cache", - "var", - "venv", - "venv*", - "venv_*", - "venv.bak", - "/site", - "wheels" -] per-file-ignores = {} dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" -[mccabe] -max-complexity = 10 +[format] +quote-style = "single" +indent-style = "space" +skip-magic-trailing-comma = true + +[lint.isort] +known-first-party = ["system", "app"] + +[lint.mccabe] +max-complexity = 6 \ No newline at end of file From ae12a3ef755a37712dce3911580c72705d6efed5 Mon Sep 17 00:00:00 2001 From: Nathan Johnson Date: Tue, 3 Mar 2026 16:38:26 -0700 Subject: [PATCH 7/9] refactoring --- dandy/bot/bot.py | 8 +-- dandy/llm/diligence/diligence.py | 15 ++--- dandy/llm/diligence/recorder.py | 18 ++++++ dandy/llm/diligence/second_pass/diligence.py | 6 +- .../diligence/stop_word_removal/diligence.py | 6 +- dandy/llm/recorder.py | 4 +- dandy/recorder/events.py | 31 +++++++--- pyproject.toml | 1 + ruff.toml | 2 +- tests/llm/diligence/test_diligence.py | 15 +++-- uv.lock | 62 ++++++++++++------- 11 files changed, 116 insertions(+), 52 deletions(-) create mode 100644 dandy/llm/diligence/recorder.py diff --git a/dandy/bot/bot.py b/dandy/bot/bot.py index 16d295d7..240ded66 100644 --- a/dandy/bot/bot.py +++ b/dandy/bot/bot.py @@ -22,7 +22,7 @@ def __init__( llm_config: str | None = None, llm_temperature: float | None = None, **kwargs, - ): + ) -> None: super().__init__( llm_config=llm_config, llm_temperature=llm_temperature, @@ -37,7 +37,7 @@ def __init__( self.__post_init__() - def __init_subclass__(cls): + def __init_subclass__(cls) -> None: super().__init_subclass__() if ABC not in cls.__bases__: @@ -60,7 +60,7 @@ def __getattribute__(self: Self, name: str) -> Any: # noqa: N807 cls.__getattribute__ = __getattribute__ - def __post_init__(self): # noqa: B027 + def __post_init__(self) -> None: # noqa: B027 pass @classmethod @@ -84,5 +84,5 @@ def process( def process_to_future(self, *args, **kwargs) -> AsyncFuture: return process_to_future(self.process, *args, **kwargs) - def reset(self): + def reset(self) -> None: super().reset() diff --git a/dandy/llm/diligence/diligence.py b/dandy/llm/diligence/diligence.py index 3ca3bf71..7a12e48c 100644 --- a/dandy/llm/diligence/diligence.py +++ b/dandy/llm/diligence/diligence.py @@ -11,17 +11,18 @@ class BaseDiligence(ABC): trigger_level: float trigger_operator: Callable[[float, float], bool] - def __init_subclass__(cls, **kwargs): - if cls.trigger_level == 1.0 or cls.trigger_level > 2.0 or cls.trigger_level < 0.0: - message = f'`{cls.__name__}` should have a trigger level between 0.0 and 2.0 and not 1.0 as it\'s used as the default.' + def __init_subclass__(cls, **kwargs) -> None: + if ( + cls.trigger_level == 1.0 + or cls.trigger_level > 2.0 + or cls.trigger_level < 0.0 + ): + message = f"`{cls.__name__}` should have a trigger level between 0.0 and 2.0 and not 1.0 as it's used as the default." raise ValueError(message) @classmethod def is_triggered(cls, level: float) -> bool: - if cls.trigger_operator(level, cls.trigger_level): - return True - - return False + return cls.trigger_operator(level, cls.trigger_level) @classmethod @abstractmethod diff --git a/dandy/llm/diligence/recorder.py b/dandy/llm/diligence/recorder.py new file mode 100644 index 00000000..e33cd973 --- /dev/null +++ b/dandy/llm/diligence/recorder.py @@ -0,0 +1,18 @@ +from dandy import Recorder +from dandy.recorder.events import Event, EventType, EventAttribute + + +def recorder_add_llm_diligence_event( + event_id: str, + diligence_name: str, + event_attributes: list[EventAttribute], +) -> None: + Recorder.add_event( + Event( + id=event_id, + object_name='LLM Diligence', + callable_name=diligence_name, + type=EventType.OTHER, + attributes=event_attributes, + ) + ) diff --git a/dandy/llm/diligence/second_pass/diligence.py b/dandy/llm/diligence/second_pass/diligence.py index c8a43b03..f94ba49c 100644 --- a/dandy/llm/diligence/second_pass/diligence.py +++ b/dandy/llm/diligence/second_pass/diligence.py @@ -1,7 +1,7 @@ from __future__ import annotations import operator -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Callable from dandy.llm.diligence.diligence import BaseDiligence @@ -10,8 +10,8 @@ class SecondPassRemovalDiligence(BaseDiligence): - trigger_level = 2.0 - trigger_operator = operator.ge + trigger_level: float = 2.0 + trigger_operator: Callable[[float, float], bool] = operator.ge @classmethod def apply(cls, llm_connector: LlmConnector) -> None: diff --git a/dandy/llm/diligence/stop_word_removal/diligence.py b/dandy/llm/diligence/stop_word_removal/diligence.py index d1ed9c82..36912985 100644 --- a/dandy/llm/diligence/stop_word_removal/diligence.py +++ b/dandy/llm/diligence/stop_word_removal/diligence.py @@ -1,7 +1,7 @@ from __future__ import annotations import operator -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Callable from dandy.llm.diligence.diligence import BaseDiligence @@ -10,8 +10,8 @@ class StopWordRemovalDiligence(BaseDiligence): - trigger_level = 0.0 - trigger_operator = operator.le + trigger_level: float = 0.0 + trigger_operator: Callable[[float, float], bool] = operator.le @classmethod def apply(cls, llm_connector: LlmConnector) -> None: diff --git a/dandy/llm/recorder.py b/dandy/llm/recorder.py index 72453174..49eb6a6f 100644 --- a/dandy/llm/recorder.py +++ b/dandy/llm/recorder.py @@ -91,11 +91,11 @@ def recorder_add_llm_request_event( for message in request_body.messages: for message_content in message.content: - llm_request_event.add_attribute(EventAttribute( + llm_request_event.add_attribute( key=message.role, value=str(message_content.text), is_card=True, - )) + ) Recorder.add_event(llm_request_event) diff --git a/dandy/recorder/events.py b/dandy/recorder/events.py index 08af3007..7dd9e8e9 100644 --- a/dandy/recorder/events.py +++ b/dandy/recorder/events.py @@ -33,22 +33,39 @@ class Event(BaseModel): object_name: str callable_name: str type: EventType - attributes: list[EventAttribute] | None = Field(default_factory=list) + attributes: list[EventAttribute] = Field(default_factory=list) start_time: float = Field(default_factory=perf_counter) token_usage: int = 0 run_time_seconds: float = 0.0 complete_run_time_seconds: float = 0.0 - def model_post_init(self, __context: Any, /): + def model_post_init(self, __context: Any, /) -> None: if settings.DEBUG: logging.debug(str(self)) - def calculate_run_time(self, pre_event: Self): + def calculate_run_time(self, pre_event: Self) -> None: self.run_time_seconds = self.start_time - pre_event.start_time - self.complete_run_time_seconds = pre_event.complete_run_time_seconds + self.run_time_seconds - - def add_attribute(self, event_attribute: EventAttribute) -> Self: - self.attributes.append(event_attribute) + self.complete_run_time_seconds = ( + pre_event.complete_run_time_seconds + self.run_time_seconds + ) + + def add_attribute( + self, + key: str, + value: Any, + is_dropdown: bool = False, + is_card: bool = False, + is_base64_image: bool = False, + ) -> Self: + self.attributes.append( + EventAttribute( + key=key, + value=value, + is_dropdown=is_dropdown, + is_card=is_card, + is_base64_image=is_base64_image, + ) + ) return self diff --git a/pyproject.toml b/pyproject.toml index 1f0187c0..ab36b8af 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,6 +32,7 @@ dependencies = [ development = [ "build", "Faker", + "pytest", "ruff", "setuptools", "twine", diff --git a/ruff.toml b/ruff.toml index 98754582..d84fd40a 100644 --- a/ruff.toml +++ b/ruff.toml @@ -1,5 +1,5 @@ indent-width = 4 -line-length = 100 +line-length = 80 target-version = "py311" exclude = [ diff --git a/tests/llm/diligence/test_diligence.py b/tests/llm/diligence/test_diligence.py index 0236deb4..a6ecd4ef 100644 --- a/tests/llm/diligence/test_diligence.py +++ b/tests/llm/diligence/test_diligence.py @@ -1,17 +1,24 @@ - from unittest import TestCase, mock +from dandy import Recorder from tests.llm.diligence.intelligence.bot import CelestialObserverBot class TestDiligence(TestCase): - def test_default_diligence(self): + def test_default_diligence(self) -> None: for int_level in range(21): test_bot = CelestialObserverBot(diligence=int_level * 0.1) - celestial_intel = test_bot.process('What is the large round object that is circling the plant earth, is generally only visible at night and affects the tides?') + Recorder.start_recording(f'diligence_{int_level}') + + celestial_intel = test_bot.process( + 'What is the large round object that is circling the plant earth, is generally only visible at night and affects the tides?' + ) - self.assertEqual(celestial_intel.text.lower(), 'moon') + assert celestial_intel.text.lower() == 'moon' + Recorder.stop_recording(f'diligence_{int_level}') + if int_level in {0, 21}: + Recorder.to_html_file(f'diligence_{int_level}') diff --git a/uv.lock b/uv.lock index 6ebc8a97..948464d1 100644 --- a/uv.lock +++ b/uv.lock @@ -110,41 +110,31 @@ name = "cffi" version = "2.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pycparser", marker = "implementation_name != 'PyPy'" }, + { name = "pycparser", marker = "implementation_name != 'PyPy' and sys_platform != 'emscripten' and sys_platform != 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, - { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, - { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, - { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, - { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, - { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, - { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, - { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, - { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, - { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, - { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, @@ -249,40 +239,34 @@ name = "cryptography" version = "46.0.4" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, + { name = "cffi", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'emscripten' and sys_platform != 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/78/19/f748958276519adf6a0c1e79e7b8860b4830dda55ccdf29f2719b5fc499c/cryptography-46.0.4.tar.gz", hash = "sha256:bfd019f60f8abc2ed1b9be4ddc21cfef059c841d86d710bb69909a688cbb8f59", size = 749301, upload-time = "2026-01-28T00:24:37.379Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/87/91/874b8910903159043b5c6a123b7e79c4559ddd1896e38967567942635778/cryptography-46.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f14fba5bf6f4390d7ff8f086c566454bff0411f6d8aa7af79c88b6f9267aecc", size = 4275871, upload-time = "2026-01-28T00:23:09.439Z" }, { url = "https://files.pythonhosted.org/packages/c0/35/690e809be77896111f5b195ede56e4b4ed0435b428c2f2b6d35046fbb5e8/cryptography-46.0.4-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47bcd19517e6389132f76e2d5303ded6cf3f78903da2158a671be8de024f4cd0", size = 4423124, upload-time = "2026-01-28T00:23:11.529Z" }, { url = "https://files.pythonhosted.org/packages/1a/5b/a26407d4f79d61ca4bebaa9213feafdd8806dc69d3d290ce24996d3cfe43/cryptography-46.0.4-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:01df4f50f314fbe7009f54046e908d1754f19d0c6d3070df1e6268c5a4af09fa", size = 4277090, upload-time = "2026-01-28T00:23:13.123Z" }, - { url = "https://files.pythonhosted.org/packages/0c/d8/4bb7aec442a9049827aa34cee1aa83803e528fa55da9a9d45d01d1bb933e/cryptography-46.0.4-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5aa3e463596b0087b3da0dbe2b2487e9fc261d25da85754e30e3b40637d61f81", size = 4947652, upload-time = "2026-01-28T00:23:14.554Z" }, { url = "https://files.pythonhosted.org/packages/2b/08/f83e2e0814248b844265802d081f2fac2f1cbe6cd258e72ba14ff006823a/cryptography-46.0.4-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0a9ad24359fee86f131836a9ac3bffc9329e956624a2d379b613f8f8abaf5255", size = 4455157, upload-time = "2026-01-28T00:23:16.443Z" }, { url = "https://files.pythonhosted.org/packages/0a/05/19d849cf4096448779d2dcc9bb27d097457dac36f7273ffa875a93b5884c/cryptography-46.0.4-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:dc1272e25ef673efe72f2096e92ae39dea1a1a450dd44918b15351f72c5a168e", size = 3981078, upload-time = "2026-01-28T00:23:17.838Z" }, { url = "https://files.pythonhosted.org/packages/e6/89/f7bac81d66ba7cde867a743ea5b37537b32b5c633c473002b26a226f703f/cryptography-46.0.4-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:de0f5f4ec8711ebc555f54735d4c673fc34b65c44283895f1a08c2b49d2fd99c", size = 4276213, upload-time = "2026-01-28T00:23:19.257Z" }, - { url = "https://files.pythonhosted.org/packages/da/9f/7133e41f24edd827020ad21b068736e792bc68eecf66d93c924ad4719fb3/cryptography-46.0.4-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:eeeb2e33d8dbcccc34d64651f00a98cb41b2dc69cef866771a5717e6734dfa32", size = 4912190, upload-time = "2026-01-28T00:23:21.244Z" }, { url = "https://files.pythonhosted.org/packages/a6/f7/6d43cbaddf6f65b24816e4af187d211f0bc536a29961f69faedc48501d8e/cryptography-46.0.4-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3d425eacbc9aceafd2cb429e42f4e5d5633c6f873f5e567077043ef1b9bbf616", size = 4454641, upload-time = "2026-01-28T00:23:22.866Z" }, { url = "https://files.pythonhosted.org/packages/9e/4f/ebd0473ad656a0ac912a16bd07db0f5d85184924e14fc88feecae2492834/cryptography-46.0.4-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91627ebf691d1ea3976a031b61fb7bac1ccd745afa03602275dda443e11c8de0", size = 4405159, upload-time = "2026-01-28T00:23:25.278Z" }, { url = "https://files.pythonhosted.org/packages/d1/f7/7923886f32dc47e27adeff8246e976d77258fd2aa3efdd1754e4e323bf49/cryptography-46.0.4-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2d08bc22efd73e8854b0b7caff402d735b354862f1145d7be3b9c0f740fef6a0", size = 4666059, upload-time = "2026-01-28T00:23:26.766Z" }, { url = "https://files.pythonhosted.org/packages/f8/f5/559c25b77f40b6bf828eabaf988efb8b0e17b573545edb503368ca0a2a03/cryptography-46.0.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:078e5f06bd2fa5aea5a324f2a09f914b1484f1d0c2a4d6a8a28c74e72f65f2da", size = 4264508, upload-time = "2026-01-28T00:23:34.264Z" }, { url = "https://files.pythonhosted.org/packages/49/a1/551fa162d33074b660dc35c9bc3616fefa21a0e8c1edd27b92559902e408/cryptography-46.0.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dce1e4f068f03008da7fa51cc7abc6ddc5e5de3e3d1550334eaf8393982a5829", size = 4409080, upload-time = "2026-01-28T00:23:35.793Z" }, { url = "https://files.pythonhosted.org/packages/b0/6a/4d8d129a755f5d6df1bbee69ea2f35ebfa954fa1847690d1db2e8bca46a5/cryptography-46.0.4-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:2067461c80271f422ee7bdbe79b9b4be54a5162e90345f86a23445a0cf3fd8a2", size = 4270039, upload-time = "2026-01-28T00:23:37.263Z" }, - { url = "https://files.pythonhosted.org/packages/4c/f5/ed3fcddd0a5e39321e595e144615399e47e7c153a1fb8c4862aec3151ff9/cryptography-46.0.4-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:c92010b58a51196a5f41c3795190203ac52edfd5dc3ff99149b4659eba9d2085", size = 4926748, upload-time = "2026-01-28T00:23:38.884Z" }, { url = "https://files.pythonhosted.org/packages/43/ae/9f03d5f0c0c00e85ecb34f06d3b79599f20630e4db91b8a6e56e8f83d410/cryptography-46.0.4-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:829c2b12bbc5428ab02d6b7f7e9bbfd53e33efd6672d21341f2177470171ad8b", size = 4442307, upload-time = "2026-01-28T00:23:40.56Z" }, { url = "https://files.pythonhosted.org/packages/8b/22/e0f9f2dae8040695103369cf2283ef9ac8abe4d51f68710bec2afd232609/cryptography-46.0.4-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:62217ba44bf81b30abaeda1488686a04a702a261e26f87db51ff61d9d3510abd", size = 3959253, upload-time = "2026-01-28T00:23:42.827Z" }, { url = "https://files.pythonhosted.org/packages/01/5b/6a43fcccc51dae4d101ac7d378a8724d1ba3de628a24e11bf2f4f43cba4d/cryptography-46.0.4-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:9c2da296c8d3415b93e6053f5a728649a87a48ce084a9aaf51d6e46c87c7f2d2", size = 4269372, upload-time = "2026-01-28T00:23:44.655Z" }, - { url = "https://files.pythonhosted.org/packages/17/b7/0f6b8c1dd0779df2b526e78978ff00462355e31c0a6f6cff8a3e99889c90/cryptography-46.0.4-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:9b34d8ba84454641a6bf4d6762d15847ecbd85c1316c0a7984e6e4e9f748ec2e", size = 4891908, upload-time = "2026-01-28T00:23:46.48Z" }, { url = "https://files.pythonhosted.org/packages/83/17/259409b8349aa10535358807a472c6a695cf84f106022268d31cea2b6c97/cryptography-46.0.4-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:df4a817fa7138dd0c96c8c8c20f04b8aaa1fac3bbf610913dcad8ea82e1bfd3f", size = 4441254, upload-time = "2026-01-28T00:23:48.403Z" }, { url = "https://files.pythonhosted.org/packages/9c/fe/e4a1b0c989b00cee5ffa0764401767e2d1cf59f45530963b894129fd5dce/cryptography-46.0.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b1de0ebf7587f28f9190b9cb526e901bf448c9e6a99655d2b07fff60e8212a82", size = 4396520, upload-time = "2026-01-28T00:23:50.26Z" }, { url = "https://files.pythonhosted.org/packages/b3/81/ba8fd9657d27076eb40d6a2f941b23429a3c3d2f56f5a921d6b936a27bc9/cryptography-46.0.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9b4d17bc7bd7cdd98e3af40b441feaea4c68225e2eb2341026c84511ad246c0c", size = 4651479, upload-time = "2026-01-28T00:23:51.674Z" }, { url = "https://files.pythonhosted.org/packages/d8/cc/8f3224cbb2a928de7298d6ed4790f5ebc48114e02bdc9559196bfb12435d/cryptography-46.0.4-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8bf75b0259e87fa70bddc0b8b4078b76e7fd512fd9afae6c1193bcf440a4dbef", size = 4275419, upload-time = "2026-01-28T00:23:58.364Z" }, { url = "https://files.pythonhosted.org/packages/17/43/4a18faa7a872d00e4264855134ba82d23546c850a70ff209e04ee200e76f/cryptography-46.0.4-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3c268a3490df22270955966ba236d6bc4a8f9b6e4ffddb78aac535f1a5ea471d", size = 4419058, upload-time = "2026-01-28T00:23:59.867Z" }, { url = "https://files.pythonhosted.org/packages/ee/64/6651969409821d791ba12346a124f55e1b76f66a819254ae840a965d4b9c/cryptography-46.0.4-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:812815182f6a0c1d49a37893a303b44eaac827d7f0d582cecfc81b6427f22973", size = 4278151, upload-time = "2026-01-28T00:24:01.731Z" }, - { url = "https://files.pythonhosted.org/packages/20/0b/a7fce65ee08c3c02f7a8310cc090a732344066b990ac63a9dfd0a655d321/cryptography-46.0.4-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:a90e43e3ef65e6dcf969dfe3bb40cbf5aef0d523dff95bfa24256be172a845f4", size = 4939441, upload-time = "2026-01-28T00:24:03.175Z" }, { url = "https://files.pythonhosted.org/packages/db/a7/20c5701e2cd3e1dfd7a19d2290c522a5f435dd30957d431dcb531d0f1413/cryptography-46.0.4-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a05177ff6296644ef2876fce50518dffb5bcdf903c85250974fc8bc85d54c0af", size = 4451617, upload-time = "2026-01-28T00:24:05.403Z" }, { url = "https://files.pythonhosted.org/packages/00/dc/3e16030ea9aa47b63af6524c354933b4fb0e352257c792c4deeb0edae367/cryptography-46.0.4-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:daa392191f626d50f1b136c9b4cf08af69ca8279d110ea24f5c2700054d2e263", size = 3977774, upload-time = "2026-01-28T00:24:06.851Z" }, { url = "https://files.pythonhosted.org/packages/42/c8/ad93f14118252717b465880368721c963975ac4b941b7ef88f3c56bf2897/cryptography-46.0.4-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e07ea39c5b048e085f15923511d8121e4a9dc45cee4e3b970ca4f0d338f23095", size = 4277008, upload-time = "2026-01-28T00:24:08.926Z" }, - { url = "https://files.pythonhosted.org/packages/00/cf/89c99698151c00a4631fbfcfcf459d308213ac29e321b0ff44ceeeac82f1/cryptography-46.0.4-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:d5a45ddc256f492ce42a4e35879c5e5528c09cd9ad12420828c972951d8e016b", size = 4903339, upload-time = "2026-01-28T00:24:12.009Z" }, { url = "https://files.pythonhosted.org/packages/03/c3/c90a2cb358de4ac9309b26acf49b2a100957e1ff5cc1e98e6c4996576710/cryptography-46.0.4-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:6bb5157bf6a350e5b28aee23beb2d84ae6f5be390b2f8ee7ea179cda077e1019", size = 4451216, upload-time = "2026-01-28T00:24:13.975Z" }, { url = "https://files.pythonhosted.org/packages/96/2c/8d7f4171388a10208671e181ca43cdc0e596d8259ebacbbcfbd16de593da/cryptography-46.0.4-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd5aba870a2c40f87a3af043e0dee7d9eb02d4aff88a797b48f2b43eff8c3ab4", size = 4404299, upload-time = "2026-01-28T00:24:16.169Z" }, { url = "https://files.pythonhosted.org/packages/e9/23/cbb2036e450980f65c6e0a173b73a56ff3bccd8998965dea5cc9ddd424a5/cryptography-46.0.4-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:93d8291da8d71024379ab2cb0b5c57915300155ad42e07f76bea6ad838d7e59b", size = 4664837, upload-time = "2026-01-28T00:24:17.629Z" }, @@ -306,6 +290,7 @@ dependencies = [ development = [ { name = "build" }, { name = "faker" }, + { name = "pytest" }, { name = "ruff" }, { name = "setuptools" }, { name = "twine" }, @@ -341,6 +326,7 @@ requires-dist = [ { name = "mkdocstrings-python", marker = "extra == 'documentation'" }, { name = "openpyxl", marker = "extra == 'documentation'" }, { name = "pydantic", specifier = "==2.12.5" }, + { name = "pytest", marker = "extra == 'development'" }, { name = "python-dotenv", specifier = "==1.2.1" }, { name = "requests", specifier = "==2.32.5" }, { name = "ruff", marker = "extra == 'development'" }, @@ -451,13 +437,22 @@ name = "importlib-metadata" version = "8.7.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "zipp" }, + { name = "zipp", marker = "python_full_version < '3.14'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" }, ] +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + [[package]] name = "jaraco-classes" version = "3.4.0" @@ -1082,6 +1077,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" }, ] +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + [[package]] name = "pycparser" version = "3.0" @@ -1246,6 +1250,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/bd/24/12818598c362d7f300f18e74db45963dbcb85150324092410c8b49405e42/pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913", size = 10216, upload-time = "2024-09-29T09:24:11.978Z" }, ] +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -1436,8 +1456,8 @@ name = "secretstorage" version = "3.5.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "cryptography" }, - { name = "jeepney" }, + { name = "cryptography", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" }, + { name = "jeepney", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/1c/03/e834bcd866f2f8a49a85eaff47340affa3bfa391ee9912a952a1faa68c7b/secretstorage-3.5.0.tar.gz", hash = "sha256:f04b8e4689cbce351744d5537bf6b1329c6fc68f91fa666f60a380edddcd11be", size = 19884, upload-time = "2025-11-23T19:02:53.191Z" } wheels = [ From 0f9ca410c6e7eaf1fa53414931dfe8e05a5bc866 Mon Sep 17 00:00:00 2001 From: Nathan Johnson Date: Tue, 3 Mar 2026 21:16:40 -0700 Subject: [PATCH 8/9] diligence basic implementation working. --- dandy/llm/connector.py | 134 ++++++++---------- dandy/llm/diligence/diligence.py | 1 + dandy/llm/diligence/handler.py | 22 ++- dandy/llm/diligence/second_pass/diligence.py | 6 +- .../diligence/stop_word_removal/__init__.py | 20 --- .../diligence/stop_word_removal/constants.py | 126 ++++++++++++++++ .../diligence/stop_word_removal/diligence.py | 30 +++- dandy/llm/diligence/vowel_removal/__init__.py | 0 .../llm/diligence/vowel_removal/constants.py | 7 + .../llm/diligence/vowel_removal/diligence.py | 44 ++++++ dandy/llm/request/message.py | 6 +- ruff.toml | 2 +- tests/llm/diligence/test_diligence.py | 2 +- 13 files changed, 292 insertions(+), 108 deletions(-) create mode 100644 dandy/llm/diligence/vowel_removal/__init__.py create mode 100644 dandy/llm/diligence/vowel_removal/constants.py create mode 100644 dandy/llm/diligence/vowel_removal/diligence.py diff --git a/dandy/llm/connector.py b/dandy/llm/connector.py index b16d742a..572bae04 100644 --- a/dandy/llm/connector.py +++ b/dandy/llm/connector.py @@ -8,8 +8,11 @@ from dandy.intel.factory import IntelFactory from dandy.intel.typing import IntelType from dandy.llm.config import LlmConfig -from dandy.llm.diligence.handler import BaseDiligenceHandler, PreDiligenceHandler, \ - PostDiligenceHandler +from dandy.llm.diligence.handler import ( + BaseDiligenceHandler, + PreDiligenceHandler, + PostDiligenceHandler, +) from dandy.llm.exceptions import LlmCriticalError, LlmRecoverableError from dandy.llm.intelligence.prompts import service_system_validation_error_prompt from dandy.llm.prompt.prompt import Prompt @@ -25,12 +28,12 @@ class LlmConnector(BaseConnector): def __init__( - self, - recorder_event_id: str, - llm_config: LlmConfig, - intel_class: type[IntelType] | None, - system_prompt: Prompt | str, - diligence: float = 1.0, + self, + recorder_event_id: str, + llm_config: LlmConfig, + intel_class: type[IntelType] | None, + system_prompt: Prompt | str, + diligence: float = 1.0, ): self.recorder_event_id = recorder_event_id @@ -52,45 +55,46 @@ def __init__( def has_retry_attempts_available(self) -> bool: return self.prompt_retry_attempt < self.llm_config.options.prompt_retry_count - def _apply_diligence(self, diligence_handler_class: type[BaseDiligenceHandler]) -> None: - if self.diligence != 1.0: - diligence_handler_class( - level=self.diligence, - ).apply( - llm_connector=self - ) + @property + def _changed_diligence(self) -> bool: + return self.diligence != 1.0 + + def _http_request_to_response_str(self) -> None: + http_connector = HttpConnector() + + self.llm_config.http_request_intel.json_data = self.request_body.model_dump() + + self.response_str = http_connector.request_to_response( + request_intel=self.llm_config.http_request_intel + ).json_data['choices'][0]['message']['content'] def _prepend_system_message(self): self.request_body.messages.add_message( - role='system', - text=self.system_prompt_str, - prepend=True, + role='system', text=self.system_prompt_str, prepend=True ) def prompt_to_intel( - self, - prompt: Prompt | str | None = None, - intel_class: type[IntelType] | None = None, - intel_object: IntelType | None = None, - audio_urls: list[str] | None = None, - audio_file_paths: list[str | Path] | None = None, - audio_base64_strings: list[str] | None = None, - image_urls: list[str] | None = None, - image_file_paths: list[str | Path] | None = None, - image_base64_strings: list[str] | None = None, - include_fields: IncEx | None = None, - exclude_fields: IncEx | None = None, - message_history: MessageHistory | None = None, - replace_message_history: bool = False, + self, + prompt: Prompt | str | None = None, + intel_class: type[IntelType] | None = None, + intel_object: IntelType | None = None, + audio_urls: list[str] | None = None, + audio_file_paths: list[str | Path] | None = None, + audio_base64_strings: list[str] | None = None, + image_urls: list[str] | None = None, + image_file_paths: list[str | Path] | None = None, + image_base64_strings: list[str] | None = None, + include_fields: IncEx | None = None, + exclude_fields: IncEx | None = None, + message_history: MessageHistory | None = None, + replace_message_history: bool = False, ) -> IntelType: self._update_request_body() self._set_intel(intel_class=intel_class, intel_object=intel_object) self.request_body.json_schema = IntelFactory.intel_to_json_inc_ex_schema( - intel=self.intel, - include=include_fields, - exclude=exclude_fields + intel=self.intel, include=include_fields, exclude=exclude_fields ) if not self.request_body.messages.has_system_message: @@ -100,22 +104,17 @@ def prompt_to_intel( if replace_message_history: self.request_body.messages = message_history else: - self.request_body.messages.extend( - message_history.messages - ) + self.request_body.messages.extend(message_history.messages) if prompt is not None: - self.request_body.messages.add_message( - role='user', - text=Prompt(prompt).to_str(), - ) + self.request_body.messages.add_message(role='user', text=Prompt(prompt).to_str()) if audio_urls or audio_file_paths or audio_base64_strings: self.request_body.messages.add_message( role='user', audio_urls=audio_urls, audio_file_paths=audio_file_paths, - audio_base64_strings=audio_base64_strings + audio_base64_strings=audio_base64_strings, ) if image_urls or image_file_paths or image_base64_strings: @@ -127,14 +126,21 @@ def prompt_to_intel( ) if len(self.request_body.messages) <= 1: - message = 'You cannot prompt the LlmService without at least one system and one user message.' + message = ( + 'You cannot prompt the LlmService without at least one system and one user message.' + ) raise LlmCriticalError(message) - self._apply_diligence(PreDiligenceHandler) + if self._changed_diligence: + PreDiligenceHandler(level=self.diligence).apply(llm_connector=self) response_intel_object = self._request_to_intel() - self._apply_diligence(PostDiligenceHandler) + if self._changed_diligence: + PostDiligenceHandler(level=self.diligence).apply(llm_connector=self) + + if PostDiligenceHandler(level=self.diligence).requires_new_llm_request: + response_intel_object = self._request_to_intel() return response_intel_object @@ -145,20 +151,10 @@ def reset(self): self.llm_config.reset() self.request_body = self.llm_config.generate_request_body() - def _request_to_intel( - self, - ) -> IntelType: - recorder_add_llm_request_event( - self.request_body, self.recorder_event_id - ) - - http_connector = HttpConnector() - - self.llm_config.http_request_intel.json_data = self.request_body.model_dump() + def _request_to_intel(self) -> IntelType: + recorder_add_llm_request_event(self.request_body, self.recorder_event_id) - self.response_str = http_connector.request_to_response( - request_intel=self.llm_config.http_request_intel - ).json_data['choices'][0]['message']['content'] + self._http_request_to_response_str() recorder_add_llm_response_event( message_content=self.response_str, event_id=self.recorder_event_id @@ -176,10 +172,7 @@ def _request_to_intel( intel=intel_object, ) - self.request_body.messages.add_message( - role='assistant', - text=self.response_str - ) + self.request_body.messages.add_message(role='assistant', text=self.response_str) return intel_object @@ -195,9 +188,7 @@ def _request_to_intel( ) def retry_request_to_intel( - self, - retry_event_description: str, - retry_user_prompt: Prompt | str, + self, retry_event_description: str, retry_user_prompt: Prompt | str ) -> IntelType: if self.has_retry_attempts_available: self.prompt_retry_attempt += 1 @@ -205,12 +196,12 @@ def retry_request_to_intel( recorder_add_llm_retry_event( retry_event_description, self.recorder_event_id, - remaining_attempts=self.llm_config.options.prompt_retry_count - self.prompt_retry_attempt, + remaining_attempts=self.llm_config.options.prompt_retry_count + - self.prompt_retry_attempt, ) self.request_body.messages.add_message( - role='user', - text=Prompt(retry_user_prompt).to_str() + role='user', text=Prompt(retry_user_prompt).to_str() ) return self._request_to_intel() @@ -219,9 +210,7 @@ def retry_request_to_intel( raise LlmRecoverableError(message) def _set_intel( - self, - intel_class: type[IntelType] | None = None, - intel_object: IntelType | None = None, + self, intel_class: type[IntelType] | None = None, intel_object: IntelType | None = None ): if intel_class and intel_object: message = 'Cannot specify both intel_class and intel_object.' @@ -239,4 +228,3 @@ def _set_intel( def _update_request_body(self): for key, value in self.llm_config.options.model_dump(exclude_none=True).items(): setattr(self.request_body, key, value) - diff --git a/dandy/llm/diligence/diligence.py b/dandy/llm/diligence/diligence.py index 7a12e48c..0caa0390 100644 --- a/dandy/llm/diligence/diligence.py +++ b/dandy/llm/diligence/diligence.py @@ -10,6 +10,7 @@ class BaseDiligence(ABC): trigger_level: float trigger_operator: Callable[[float, float], bool] + requires_new_llm_request: bool = False def __init_subclass__(cls, **kwargs) -> None: if ( diff --git a/dandy/llm/diligence/handler.py b/dandy/llm/diligence/handler.py index acac5506..f4b980cb 100644 --- a/dandy/llm/diligence/handler.py +++ b/dandy/llm/diligence/handler.py @@ -3,12 +3,14 @@ from abc import ABC from typing import TYPE_CHECKING -from dandy.llm.diligence.diligence import BaseDiligence from dandy.llm.diligence.second_pass.diligence import SecondPassRemovalDiligence from dandy.llm.diligence.stop_word_removal.diligence import StopWordRemovalDiligence +from dandy.llm.diligence.vowel_removal.diligence import VowelRemovalDiligence if TYPE_CHECKING: from dandy.llm.connector import LlmConnector + from dandy.llm.diligence.diligence import BaseDiligence + class BaseDiligenceHandler(ABC): diligence_classes: tuple[type[BaseDiligence]] @@ -16,21 +18,27 @@ class BaseDiligenceHandler(ABC): def __init__(self, level: float) -> None: self.level = level - def apply(self, llm_connector: LlmConnector) -> None: for diligence_class in self.diligence_classes: if diligence_class.is_triggered(self.level): diligence_class.apply(llm_connector=llm_connector) + @property + def requires_new_llm_request(self) -> bool: + for diligence_class in self.diligence_classes: + if diligence_class.is_triggered(self.level) and diligence_class.requires_new_llm_request: + return True + + return False + + + class PreDiligenceHandler(BaseDiligenceHandler): diligence_classes = ( StopWordRemovalDiligence, + VowelRemovalDiligence, ) class PostDiligenceHandler(BaseDiligenceHandler): - diligence_classes = ( - SecondPassRemovalDiligence, - ) - - + diligence_classes = (SecondPassRemovalDiligence,) diff --git a/dandy/llm/diligence/second_pass/diligence.py b/dandy/llm/diligence/second_pass/diligence.py index f94ba49c..5d63487b 100644 --- a/dandy/llm/diligence/second_pass/diligence.py +++ b/dandy/llm/diligence/second_pass/diligence.py @@ -12,8 +12,12 @@ class SecondPassRemovalDiligence(BaseDiligence): trigger_level: float = 2.0 trigger_operator: Callable[[float, float], bool] = operator.ge + requires_new_llm_request: bool = True @classmethod def apply(cls, llm_connector: LlmConnector) -> None: - print('Second pass removal diligence') + llm_connector.request_body.messages.add_message( + role='user', + text='Review our conversation and make sure you answered my request to the best of your ability.' + ) diff --git a/dandy/llm/diligence/stop_word_removal/__init__.py b/dandy/llm/diligence/stop_word_removal/__init__.py index 7b52df5f..e69de29b 100644 --- a/dandy/llm/diligence/stop_word_removal/__init__.py +++ b/dandy/llm/diligence/stop_word_removal/__init__.py @@ -1,20 +0,0 @@ -STOP_WORDS = ( - "i", "me", "my", "myself", "we", "our", "ours", "ourselves", - "you", "your", "yours", "yourself", "yourselves", - "he", "him", "his", "himself", - "she", "her", "hers", "herself", - "it", "its", "itself", - "they", "them", "their", "theirs", "themselves", - "what", "which", "who", "whom", - "this", "that", "these", "those", - "am", "is", "are", "was", "were", "be", "been", "being", - "have", "has", "had", "having", - "do", "does", "did", "doing", - "will", "would", "shall", "should", "can", "could", "may", "might", "must", - "if", "then", "else", "when", "where", "why", "how", - "all", "each", "few", "more", "most", "other", "some", "such", - "no", "nor", "not", "only", "own", "same", "so", "than", "too", "very", - "can't", "won't", "wouldn't", "didn't", "doesn't", "don't", "hadn't", "hasn't", "haven't", "isn't", "mightn't", "mustn't", "needn't", "shan't", "shouldn't", "weren't", "won't", "wouldn't", - "cant", "wont", "wouldnt", "didnt", "doesnt", "dont", "hadnt", "hasnt", "havent", "isnt", "mightnt", "mustnt", "neednt", "shant", "shouldnt", "werent", "wont", "wouldnt", - "a", "an", "the" -) \ No newline at end of file diff --git a/dandy/llm/diligence/stop_word_removal/constants.py b/dandy/llm/diligence/stop_word_removal/constants.py index e69de29b..d60a7a19 100644 --- a/dandy/llm/diligence/stop_word_removal/constants.py +++ b/dandy/llm/diligence/stop_word_removal/constants.py @@ -0,0 +1,126 @@ +STOP_WORDS = ( + "can't", + "didn't", + "doesn't", + "don't", + "hadn't", + "hasn't", + "haven't", + "isn't", + "mightn't", + "mustn't", + "needn't", + "shan't", + "shouldn't", + "weren't", + "won't", + "won't", + "wouldn't", + "wouldn't", + 'a', + 'all', + 'am', + 'an', + 'are', + 'be', + 'been', + 'being', + 'can', + 'cant', + 'could', + 'did', + 'didnt', + 'do', + 'does', + 'doesnt', + 'doing', + 'dont', + 'each', + 'else', + 'few', + 'had', + 'hadnt', + 'has', + 'hasnt', + 'have', + 'havent', + 'having', + 'he', + 'her', + 'hers', + 'herself', + 'him', + 'himself', + 'his', + 'how', + 'i', + 'if', + 'is', + 'isnt', + 'it', + 'its', + 'itself', + 'may', + 'me', + 'might', + 'mightnt', + 'more', + 'most', + 'must', + 'mustnt', + 'my', + 'myself', + 'neednt', + 'no', + 'nor', + 'not', + 'only', + 'other', + 'our', + 'ours', + 'ourselves', + 'own', + 'same', + 'shall', + 'shant', + 'she', + 'should', + 'shouldnt', + 'so', + 'some', + 'such', + 'than', + 'that', + 'the', + 'their', + 'theirs', + 'them', + 'themselves', + 'then', + 'these', + 'they', + 'this', + 'those', + 'too', + 'very', + 'was', + 'we', + 'were', + 'werent', + 'what', + 'when', + 'where', + 'which', + 'who', + 'whom', + 'why', + 'will', + 'wont', + 'would', + 'wouldnt', + 'you', + 'your', + 'yours', + 'yourself', + 'yourselves', +) diff --git a/dandy/llm/diligence/stop_word_removal/diligence.py b/dandy/llm/diligence/stop_word_removal/diligence.py index 36912985..8430a918 100644 --- a/dandy/llm/diligence/stop_word_removal/diligence.py +++ b/dandy/llm/diligence/stop_word_removal/diligence.py @@ -1,4 +1,6 @@ from __future__ import annotations +from dandy.llm.diligence.stop_word_removal.constants import STOP_WORDS +import re import operator from typing import TYPE_CHECKING, Callable @@ -10,10 +12,34 @@ class StopWordRemovalDiligence(BaseDiligence): - trigger_level: float = 0.0 + trigger_level: float = 0.2 trigger_operator: Callable[[float, float], bool] = operator.le + requires_new_llm_request: bool = False @classmethod def apply(cls, llm_connector: LlmConnector) -> None: - print('Stop word removal diligence') + for i in range(len(llm_connector.request_body.messages)): + if isinstance(llm_connector.request_body.messages[i], list): + for k in range(len(llm_connector.request_body.messages[i])): + for j in range(len(llm_connector.request_body.messages[i][k].content)): + stripped_text = cls.remove_stop_words(llm_connector.request_body.messages[i][k].content[j].text) + llm_connector.request_body.messages[i][k].content[j].text = stripped_text + else: + for j in range(len(llm_connector.request_body.messages[i].content)): + stripped_text = cls.remove_stop_words(llm_connector.request_body.messages[i].content[j].text) + llm_connector.request_body.messages[i].content[j].text = stripped_text + llm_connector.request_body.messages.add_message( + role='system', + text='Assume Stop Words', + prepend=True, + ) + + @staticmethod + def remove_stop_words(text: str) -> str: + stop_words = [re.escape(word) for word in STOP_WORDS] + pattern = r'\b(' + '|'.join(stop_words) + r')\b' + + cleaned_text = re.sub(pattern, '', text.lower()) + + return re.sub(r'\s+', ' ', cleaned_text).strip() diff --git a/dandy/llm/diligence/vowel_removal/__init__.py b/dandy/llm/diligence/vowel_removal/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/dandy/llm/diligence/vowel_removal/constants.py b/dandy/llm/diligence/vowel_removal/constants.py new file mode 100644 index 00000000..f9de93dc --- /dev/null +++ b/dandy/llm/diligence/vowel_removal/constants.py @@ -0,0 +1,7 @@ +VOWELS = ( + 'a', + 'e', + 'i', + 'o', + 'u', +) diff --git a/dandy/llm/diligence/vowel_removal/diligence.py b/dandy/llm/diligence/vowel_removal/diligence.py new file mode 100644 index 00000000..442c370f --- /dev/null +++ b/dandy/llm/diligence/vowel_removal/diligence.py @@ -0,0 +1,44 @@ +from __future__ import annotations +from dandy.llm.diligence.vowel_removal.constants import VOWELS +from dandy.llm.diligence.stop_word_removal.constants import STOP_WORDS +import re + +import operator +from typing import TYPE_CHECKING, Callable + +from dandy.llm.diligence.diligence import BaseDiligence + +if TYPE_CHECKING: + from dandy.llm.connector import LlmConnector + + +class VowelRemovalDiligence(BaseDiligence): + trigger_level: float = 0.0 + trigger_operator: Callable[[float, float], bool] = operator.le + + @classmethod + def apply(cls, llm_connector: LlmConnector) -> None: + for i in range(len(llm_connector.request_body.messages)): + if isinstance(llm_connector.request_body.messages[i], list): + for k in range(len(llm_connector.request_body.messages[i])): + for j in range(len(llm_connector.request_body.messages[i][k].content)): + stripped_text = cls.remove_vowels(llm_connector.request_body.messages[i][k].content[j].text) + llm_connector.request_body.messages[i][k].content[j].text = stripped_text + else: + for j in range(len(llm_connector.request_body.messages[i].content)): + stripped_text = cls.remove_vowels(llm_connector.request_body.messages[i].content[j].text) + llm_connector.request_body.messages[i].content[j].text = stripped_text + + llm_connector.request_body.messages.add_message( + role='system', + text='Assume Vowels', + prepend=True, + ) + + @staticmethod + def remove_vowels(text: str) -> str: + remove_str = ''.join(VOWELS) + + translation_table = str.maketrans('', '', remove_str) + + return text.lower().translate(translation_table) diff --git a/dandy/llm/request/message.py b/dandy/llm/request/message.py index 0291e703..ddbfeedf 100644 --- a/dandy/llm/request/message.py +++ b/dandy/llm/request/message.py @@ -114,7 +114,7 @@ def add_content_from_input_audio_base64_string(self, input_audio_base64_string: class MessageHistory(BaseModel): - messages: List[Message] = Field(default_factory=list) + messages: list[Message] = Field(default_factory=list) def __len__(self) -> int: return len(self.messages) @@ -122,10 +122,10 @@ def __len__(self) -> int: def __getitem__(self, index: int) -> list[Message] | Message: return self.messages[index] - def __iter__(self) -> Iterator[Message]: + def __iter__(self) -> Iterator[list[Message] | Message]: yield from self.messages - def __setitem__(self, index: int, message: Message): + def __setitem__(self, index: int, message: Message) -> None: self.messages[index] = message @property diff --git a/ruff.toml b/ruff.toml index d84fd40a..98754582 100644 --- a/ruff.toml +++ b/ruff.toml @@ -1,5 +1,5 @@ indent-width = 4 -line-length = 80 +line-length = 100 target-version = "py311" exclude = [ diff --git a/tests/llm/diligence/test_diligence.py b/tests/llm/diligence/test_diligence.py index a6ecd4ef..38df4ecc 100644 --- a/tests/llm/diligence/test_diligence.py +++ b/tests/llm/diligence/test_diligence.py @@ -20,5 +20,5 @@ def test_default_diligence(self) -> None: Recorder.stop_recording(f'diligence_{int_level}') - if int_level in {0, 21}: + if int_level in {0, 2, 20}: Recorder.to_html_file(f'diligence_{int_level}') From 258bcb41b71a1b278e72e68ac3f5d657f390d18a Mon Sep 17 00:00:00 2001 From: Nathan Johnson Date: Wed, 4 Mar 2026 08:00:04 -0700 Subject: [PATCH 9/9] refactor: improve typing, reorganize CLI processing phrases, and update configuration --- dandy/bot/bot.py | 2 +- dandy/cache/memory/cache.py | 10 +- dandy/cache/tools.py | 3 +- dandy/cli/actions/action.py | 8 +- dandy/cli/actions/bot/action.py | 41 +- dandy/cli/actions/code/action.py | 3 - dandy/cli/actions/explain/action.py | 7 +- dandy/cli/actions/help/action.py | 4 +- dandy/cli/actions/quit/action.py | 8 +- .../cli/intelligence/bots/source_code_bot.py | 28 +- .../intelligence/intel/source_code_intel.py | 7 +- dandy/cli/main.py | 14 +- .../{constants.py => processing_phrases.py} | 405 +++++++++++------- dandy/cli/session.py | 24 +- dandy/cli/tui/printer.py | 2 +- dandy/http/intelligence/intel.py | 8 +- dandy/http/mixin.py | 2 +- docs/changelog/v2_changelog.md | 13 +- pyproject.toml | 4 + ty.toml | 2 + 20 files changed, 355 insertions(+), 240 deletions(-) rename dandy/cli/{constants.py => processing_phrases.py} (56%) create mode 100644 ty.toml diff --git a/dandy/bot/bot.py b/dandy/bot/bot.py index 240ded66..cc0e9219 100644 --- a/dandy/bot/bot.py +++ b/dandy/bot/bot.py @@ -52,7 +52,7 @@ def __getattribute__(self: Self, name: str) -> Any: # noqa: N807 and not hasattr(attr, '_wrapped') ): wrapped = record_process_wrapper(self, attr) - wrapped._wrapped = True + setattr(wrapped, '_wrapped', True) return wrapped diff --git a/dandy/cache/memory/cache.py b/dandy/cache/memory/cache.py index 9cc9affa..d628a5e4 100644 --- a/dandy/cache/memory/cache.py +++ b/dandy/cache/memory/cache.py @@ -23,23 +23,23 @@ def __len__(self) -> int: def get(self, key: str) -> Any | None: return self._cache.get(key) - def set(self, key: str, value: Any): + def set(self, key: str, value: Any) -> None: self._cache[key] = value self.clean() - def clean(self): + def clean(self) -> None: if len(self._cache) > self.limit: self._cache.popitem(last=False) @classmethod - def clear(cls, cache_name: str = CACHE_DEFAULT_NAME): + def clear(cls, cache_name: str = CACHE_DEFAULT_NAME) -> None: if cache_name in _memory_cache: _memory_cache[cache_name].clear() @classmethod - def clear_all(cls): + def clear_all(cls) -> None: _memory_cache.clear() @classmethod - def destroy_all(cls): + def destroy_all(cls) -> None: cls.clear_all() diff --git a/dandy/cache/tools.py b/dandy/cache/tools.py index 3a76976d..13058d33 100644 --- a/dandy/cache/tools.py +++ b/dandy/cache/tools.py @@ -26,11 +26,10 @@ def generate_cache_key(func: object, *args, **kwargs) -> str: hashable_kwargs, ) - hash_key = hashlib.shake_128( + return hashlib.shake_128( str(hashable_tuple).encode() ).hexdigest(16) - return hash_key def convert_to_hashable_str(obj: Any, hash_layer: int = 1) -> str: diff --git a/dandy/cli/actions/action.py b/dandy/cli/actions/action.py index 939c6a02..49be6a25 100644 --- a/dandy/cli/actions/action.py +++ b/dandy/cli/actions/action.py @@ -8,7 +8,7 @@ class BaseAction(ABC): description: str calls: tuple[str, ...] - def __init_subclass__(cls, **kwargs): + def __init_subclass__(cls, **kwargs) -> None: check_attrs = ['name', 'description', 'calls'] for attr in check_attrs: if not hasattr(cls, attr): @@ -16,7 +16,7 @@ def __init_subclass__(cls, **kwargs): raise ValueError(message) @abstractmethod - def help(self): + def help(self) -> None: raise NotImplementedError @classmethod @@ -32,7 +32,3 @@ def name_gerund(cls) -> str: @abstractmethod def run(self, user_input: str) -> str: raise NotImplementedError - - @abstractmethod - def render(self): - raise NotImplementedError diff --git a/dandy/cli/actions/bot/action.py b/dandy/cli/actions/bot/action.py index c4c6f6fb..40ee344f 100644 --- a/dandy/cli/actions/bot/action.py +++ b/dandy/cli/actions/bot/action.py @@ -18,7 +18,7 @@ class BotAction(BaseAction): description = 'Bots at your service!' calls = ('b', 'bot') - def __init__(self): + def __init__(self) -> None: self.bots_path = Path(session.project_dandy_path, 'bots') make_directory(self.bots_path) @@ -30,6 +30,13 @@ def __init__(self): 'run': self.run_bot, } + @property + def help_string(self) -> str: + return f"""Usage: /bot run [optional inline prompt] + If no prompt, enter multi-line (end with /end). + Other subcommands: {self.sub_commands_methods.keys()} + """ + def build_bot(self, user_input: str) -> str: parts = user_input.split() @@ -55,6 +62,8 @@ def build_bot(self, user_input: str) -> str: .sub_heading('Tutorials') .lb() .file(Path(session.project_base_path, 'docs', 'tutorials', 'bots.md')) + .lb() + .text('The file name for this code should be postfixed with `_bot` example `task_reviewer_bot.py`') ) source_code_intel = SourceCodeBot().process( @@ -66,13 +75,10 @@ def build_bot(self, user_input: str) -> str: tui.printer.end_task(start_time) - return f'Bot created at "{Path(self.bots_path, source_code_intel.recommended_file_name)}"' + return f'Bot created at "{Path(self.bots_path, source_code_intel.file_name_with_extension)}"' - def help(self): - return f"""Usage: /bot run [optional inline prompt] - If no prompt, enter multi-line (end with /end). - Other subcommands: {self.sub_commands_methods.keys()} - """ + def help(self) -> None: + print(self.help_string) def list_bots(self, user_input: str) -> str: assert user_input @@ -88,8 +94,7 @@ def run(self, user_input: str) -> str: user_input=user_input ) - else: - return self.help() + return self.help_string def run_bot(self, user_input: str) -> str: parts = user_input.split() @@ -110,25 +115,25 @@ def run_bot(self, user_input: str) -> str: # Find the first Bot subclass in the module bot_class: type | None = None - for name, obj in inspect.getmembers(module): + for _, obj in inspect.getmembers(module): if inspect.isclass(obj) and issubclass(obj, Bot) and obj != Bot: bot_class: type = obj break - if not bot_class: + if bot_class is None: return f"Error: No Bot subclass found in {module_name}.py" + output = '' + try: - bot_class().process() + if issubclass(bot_class, Bot): + bot_class().process() - return f'{bot_class.__name__} ran successfully!' + output = f'{bot_class.__name__} ran successfully!' except Exception as e: - message = f"Bot failed with Error: {e}" - return message + output = f"Bot failed with Error: {e}" - def render(self): - # Placeholder; not used in current CLI, but required - pass + return output @property def bot_files(self) -> list[str] | None: diff --git a/dandy/cli/actions/code/action.py b/dandy/cli/actions/code/action.py index 38f2d394..0c6a563a 100644 --- a/dandy/cli/actions/code/action.py +++ b/dandy/cli/actions/code/action.py @@ -23,6 +23,3 @@ def run(self, user_input: str) -> str: tui.printer.end_task(start_time) return f'Coding {user_input}...' - - def render(self) -> None: - print('hello') diff --git a/dandy/cli/actions/explain/action.py b/dandy/cli/actions/explain/action.py index 089f1d6d..d9bcfd85 100644 --- a/dandy/cli/actions/explain/action.py +++ b/dandy/cli/actions/explain/action.py @@ -7,13 +7,10 @@ class ExplainAction(BaseAction): description = 'This will explain what the current project does.' calls = ('e', 'explain') - def help(self): + def help(self) -> None: print('Chat help') - def run(self, user_input: str): + def run(self, user_input: str) -> str: return explain_project_workflow( user_input=user_input, ) - - def render(self): - print('hello') diff --git a/dandy/cli/actions/help/action.py b/dandy/cli/actions/help/action.py index d82d7e36..6247af2e 100644 --- a/dandy/cli/actions/help/action.py +++ b/dandy/cli/actions/help/action.py @@ -10,7 +10,7 @@ class HelpAction(BaseAction): description = 'Get help on how to use the Dandy command line interface.' calls = ('h', 'help') - def help(self): + def help(self) -> None: print('help of all sorts') def run(self, user_input: str) -> str: @@ -23,5 +23,3 @@ def run(self, user_input: str) -> str: return default_intel.response - def render(self): - print('Helping...') \ No newline at end of file diff --git a/dandy/cli/actions/quit/action.py b/dandy/cli/actions/quit/action.py index 22b77cb9..92aa0cfa 100644 --- a/dandy/cli/actions/quit/action.py +++ b/dandy/cli/actions/quit/action.py @@ -8,11 +8,9 @@ class QuitAction(BaseAction): description = 'Quit the application.' calls = ('q', 'quit') - def help(self): + def help(self) -> None: print('Quit help') - def run(self, user_input: str): + def run(self, user_input: str) -> str: + assert user_input sys.exit(0) - - def render(self): - print('Quitting...') \ No newline at end of file diff --git a/dandy/cli/intelligence/bots/source_code_bot.py b/dandy/cli/intelligence/bots/source_code_bot.py index 954aa9d4..e1115c43 100644 --- a/dandy/cli/intelligence/bots/source_code_bot.py +++ b/dandy/cli/intelligence/bots/source_code_bot.py @@ -7,33 +7,31 @@ class SourceCodeBot(Bot): role = 'Senior Developer' task = 'Read the instructions and write the source code for the user.' - guidelines = Prompt().list([ - 'You\'re only creating one file so focus on completeness.', - 'The file name should not contain a path and should be post fixed with `_bot`.', - ]) + guidelines = Prompt().list( + [ + 'You\'re only creating one file so focus on completeness.', + 'The file name should not contain a path and must include the extensions' + ] + ) intel_class = SourceCodeIntel @recorder_to_html_file('source_code_bot') - def process( - self, - user_input: str, - code_reference_prompt: Prompt, - ) -> SourceCodeIntel: + def process(self, user_input: str, code_reference_prompt: Prompt) -> SourceCodeIntel: self.llm.messages.add_message( role='user', text=( Prompt() - .text('Below is the code I want you to reference while writing the source code for my next request.') + .text( + 'Below is the code I want you to reference while writing the source code for my next request.' + ) .prompt(code_reference_prompt) .to_str() - ) + ), ) self.llm.messages.add_message( role='system', - text='I have read through the provided code and will use it as a reference.' + text='I have read through the provided code and will use it as a reference.', ) - return self.llm.prompt_to_intel( - prompt=user_input, - ) + return self.llm.prompt_to_intel(prompt=user_input) diff --git a/dandy/cli/intelligence/intel/source_code_intel.py b/dandy/cli/intelligence/intel/source_code_intel.py index 67da6f4c..a8075756 100644 --- a/dandy/cli/intelligence/intel/source_code_intel.py +++ b/dandy/cli/intelligence/intel/source_code_intel.py @@ -6,13 +6,12 @@ class SourceCodeIntel(BaseIntel): - recommended_file_name: str + file_name_with_extension: str language: Literal['python'] - extension: Literal['py'] code: str - def write_to_directory(self, dir_path: Path | str): + def write_to_directory(self, dir_path: Path | str) -> None: write_to_file( - file_path=Path(dir_path) / self.recommended_file_name, + file_path=Path(dir_path) / self.file_name_with_extension, content=self.code, ) diff --git a/dandy/cli/main.py b/dandy/cli/main.py index 7bf5f7dc..361c1f02 100644 --- a/dandy/cli/main.py +++ b/dandy/cli/main.py @@ -22,16 +22,16 @@ sys.path.append(str(CWD_PATH)) -from dandy.cli.session import session -from dandy.cli.utils import check_or_create_settings +from dandy.cli.session import session # noqa: E402 +from dandy.cli.utils import check_or_create_settings # noqa: E402 -def main(): +def main() -> None: sys.path.append(str(CWD_PATH)) check_or_create_settings(CWD_PATH) - from dandy.conf import settings + from dandy.conf import settings # noqa: PLC0415 settings.reload_from_os() @@ -41,7 +41,7 @@ def main(): if not session.is_loaded: session.save() - from dandy.cli.cli import DandyCli + from dandy.cli.cli import DandyCli # noqa: PLC0415 cli = DandyCli() @@ -56,7 +56,7 @@ def main(): arg_term = Terminal() - print(arg_term.bold_blue(f'\nDandy')) + print(arg_term.bold_blue('\nDandy')) cli.process_user_input( user_input=user_input @@ -65,7 +65,7 @@ def main(): else: cli.run() - print('') + print() if __name__ == '__main__': diff --git a/dandy/cli/constants.py b/dandy/cli/processing_phrases.py similarity index 56% rename from dandy/cli/constants.py rename to dandy/cli/processing_phrases.py index 6f6f4f84..3f1d7fac 100644 --- a/dandy/cli/constants.py +++ b/dandy/cli/processing_phrases.py @@ -1,173 +1,278 @@ PROCESSING_PHRASES = ( - 'Twirling Monocle', - 'Fluffing Cravat', - 'Sipping Earl Grey with Pinky Extended', - 'Tapping Cane Rhythmically', - 'Adjusting Suspenders with a Flick', - 'Dusting Lapel with a Silk Brush', - 'Folding Pocket Square Precisely', - 'Polishing Cufflinks with a Velvet Cloth', - 'Checking Waistcoat Buttons', - 'Stroking Silk Top Hat', - 'Twirling Mustache with Wax', - 'Unfurling a Pocket Square', - 'Measuring Cufflinks with Calipers', 'Adjusting Glove Seams', - 'Inspecting Lorgnette for Smudges', - 'Dabbing Colognes with a Q-Tip', - 'Calibrating a Pocket Compass', - 'Straightening a Spats Buckle', - 'Polishing a Brass Knuckle Duster', - 'Counting Monogram Stitches', - 'Tuning a Pocket Watch', - 'Folding a Handkerchief into a Swan', - 'Adjusting a Top Hat Band', - 'Polishing Spectacles with a Monogrammed Cloth', - 'Twirling a Walking Stick', - 'Sipping Brandy from a Crystal Glass', - 'Examining a Fob Chain with a Magnifier', + 'Adjusting Suspenders with a Flick', 'Adjusting a Bow Tie with a Miniature Ruler', - 'Dusting a Tailcoat with a Feathers Duster', - 'Measuring Cuff Length with Calipers', - 'Polishing a Silver Cigarette Case', - 'Fluffing a Silk Scarf', - 'Checking Time on a Skeleton Watch', 'Adjusting a Cravat Pin', - 'Polishing a Patent Leather Shoe', - 'Twirling a Foil Epee', - 'Scenting a Letter with Lavender', + 'Adjusting a Cravat with a Warp-Field Measuring Tape', + 'Adjusting a Diamond-Encrusted Cufflink with Chroniton Tweezers', + 'Adjusting a Diamond-Encrusted Cufflink', + 'Adjusting a Diamond-Encrusted Spats Strap with Temporal Tweezers', + 'Adjusting a Diamond-Encrusted Spats Strap', + 'Adjusting a Diamond-Studded Cravat Pin with Laser Guidance', + 'Adjusting a Diamond-Studded Cravat Pin', + 'Adjusting a Diamond-Studded Cravat with Nanobot Tweezers', + 'Adjusting a Diamond-Studded Cufflink with Tweezers', + 'Adjusting a Diamond-Studded Spats Strap with a Neural Interface', + 'Adjusting a Monocle Chain with Quantum-Tuned Tweezers', + 'Adjusting a Monocle Chain with a Chroniton Micro-Adjuster', + 'Adjusting a Monocle Chain with a Micro-Adjuster', + 'Adjusting a Monocle Chain with a Temporal Micro-Adjuster', + 'Adjusting a Monocle Chain', + 'Adjusting a Monocle with a Diamond-Encrusted Wrench', 'Adjusting a Pince-Nez', - 'Folding a Newspaper with Precision', - 'Measuring Cufflink Diameter with Calipers', - 'Polishing a Silver Flask', + 'Adjusting a Silk-Top Hat Band', + 'Adjusting a Silk-Top Hat with a Graviton Brush', + 'Adjusting a Silk-Top Hat with a Monogrammed Brush', + 'Adjusting a Silk-Top Hat with a Monogrammed Nanobrush', + 'Adjusting a Silted Waistcoat with a Laser-Guided Tailor\'s Chalk', + 'Adjusting a Spats Buckle with Graviton Tweezers', + 'Adjusting a Spats Buckle with Neural-Link Precision', 'Adjusting a Spats Strap', - 'Dabbing Wrist with Colognes', - 'Tuning a Pocket Watch with a Tiny Screwdriver', - 'Straightening a Diamond Stickpin', - 'Polishing a Silver Cane Tip', - 'Fluffing a Silk Handkerchief', - 'Counting Threads in a Linen Handkerchief', - 'Adjusting a Watch Chain', - 'Polishing a Monogrammed Cufflink', - 'Sipping Tea through a Mustache Guard', - 'Twirling a Pocket Watch Chain', - 'Folding a Letter into a Crane', + 'Adjusting a Top Hat Band with a Holographic Level', + 'Adjusting a Top Hat Band with a Singularity Wrench', + 'Adjusting a Top Hat Band', + 'Adjusting a Top Hat with a Diamond-Encrusted Feather', + 'Adjusting a Top Hat with a Diamond-Studded Brush', + 'Adjusting a Top Hat with a Feather-Trimmed Brush', 'Adjusting a Top Hat with a Feathers Duster', - 'Polishing a Silver Cigarette Holder', - 'Measuring the Perfect Cravat Angle', - 'Dusting a Lapel with a Diamond-Encrusted Brush', + 'Adjusting a Top Hat with a Quantum Feather Duster', + 'Adjusting a Top Hat with a Reality-Warping Feather', + 'Adjusting a Top Hat with a Silk-Trimmed Brush', + 'Adjusting a Top Hat with a Singularity-Encrusted Brush', + 'Adjusting a Watch Chain', + 'Calibrating a Monocle with a Quantum Focus Ring', + 'Calibrating a Pocket Compass', + 'Calibrating a Scent Diffuser with Exotic Matter', + 'Calibrating a Scent Diffuser with Temporal Harmonics', + 'Calibrating a Scent Diffuser with a Tachyon Spanner', 'Calibrating a Scent Diffuser', - 'Tuning a Pocket Watch with a Loupe', - 'Adjusting a Diamond-Encrusted Cufflink', - 'Polishing a Gold-Plated Pocket Knife', - 'Folding a Silk Scarf into an Origami Swan', - 'Sipping Sherry from a Crystal Decanter', - 'Measuring Glove Fit with Calipers', - 'Adjusting a Monocle Chain', - 'Polishing a Silver-Plated Snuff Box', - 'Twirling a Silk Handkerchief', - 'Dabbing a Lapel with Scented Water', - 'Folding a Pocket Square into a Rose', - 'Adjusting a Top Hat with a Diamond-Studded Brush', - 'Polishing a Platinum Cufflink', - 'Counting the Stitches in a Monogram', - 'Sipping Absinthe through a Sugar Cube', - 'Measuring a Cravat with a Tailor’s Tape', - 'Polishing a Gold-Encrusted Pocket Watch', - 'Adjusting a Silk-Top Hat Band', - 'Fluffing a Velvet Cufflink', - 'Tuning a Pocket Watch with a Microscope', - 'Dusting a Tailcoat with a Diamond-Tipped Brush', - 'Folding a Handkerchief into a Butterfly', - 'Adjusting a Diamond-Studded Cravat Pin', - 'Polishing a Platinum-Plated Cane', - 'Sipping Champagne from a Crystal Flute', - 'Measuring Cufflink Depth with Laser Calipers', - 'Adjusting a Monocle with a Diamond-Encrusted Wrench', - 'Polishing a Ruby-Encrusted Pocket Square', - 'Folding a Newspaper into a Miniature Eiffel Tower', + 'Checking Time on a Skeleton Watch', + 'Checking Waistcoat Buttons', + 'Counting Monogram Stitches with a Multiverse Scanner', + 'Counting Monogram Stitches with a Quantum Microscope', + 'Counting Monogram Stitches', + 'Counting Stitches in a Monogram with a Singularity Scanner', + 'Counting Stitches in a Monogram with a Tachyon Scanner', + 'Counting Threads in a Handkerchief via Holographic Projector', + 'Counting Threads in a Linen Handkerchief', + 'Counting Threads in a Silk Cravat via DNA-Scanning Loupe', + 'Counting Weave Dimensions in a 4D Silk Cravat', + 'Counting Weaves in a Silk Cravat with a Tachyon Microscope', + 'Counting Weaves in a Spun-Gold Handkerchief via Hologram', 'Counting the Dots on a Silk Tie', - 'Sipping Brandy from a Gold-Plated Snifter', - 'Tuning a Pocket Watch with a Sonic Screwdriver', - 'Adjusting a Top Hat with a Feather-Trimmed Brush', - 'Polishing a Diamond-Encrusted Shoe Horn', - 'Folding a Handkerchief into a Peacock', - 'Measuring Glove Seam Tension with a Tensiometer', - 'Dabbing a Cufflink with Scented Oil', - 'Adjusting a Silk-Top Hat with a Monogrammed Brush', - 'Polishing a Gold-Plated Cufflink', - 'Fluffing a Linen Handkerchief with a Feather', + 'Counting the Stitches in a Monogram with a DNA Scanner', + 'Counting the Stitches in a Monogram with a Magnifier', + 'Counting the Stitches in a Monogram with a Singularity Scanner', + 'Counting the Stitches in a Monogram', + 'Counting the Threads in a Silk Cravat with a Quantum Microscope', 'Counting the Threads in a Silk Cravat', - 'Sipping Whiskey from a Crystal Tumbler', - 'Tuning a Pocket Watch with a Quantum Calibrator', - 'Adjusting a Diamond-Encrusted Spats Strap', - 'Polishing a Platinum-Plated Monocle', - 'Folding a Pocket Square into a Falcon', - 'Measuring Cufflink Symmetry with a Laser Level', + 'Counting the Weave Patterns in a Linen Handkerchief', + 'Dabbing Colognes with a Q-Tip', + 'Dabbing Cufflink with Quantum Scent Nanoparticles', + 'Dabbing Cufflink with Scented Dark Matter Nanoparticles', + 'Dabbing Cufflinks with Antimatter Scent Oil', + 'Dabbing Lapel with Essence of Collapsed Stars', + 'Dabbing Lapel with Essence of Distant Galaxies (Revised)', + 'Dabbing Lapel with Essence of Distant Supernovae', + 'Dabbing Wrist with Colognes', + 'Dabbing a Cufflink with Scented Dark Matter', + 'Dabbing a Cufflink with Scented Nanoparticles', + 'Dabbing a Cufflink with Scented Oil', + 'Dabbing a Lapel with Essence of Distant Galaxies', + 'Dabbing a Lapel with Rare Orchid Essence', + 'Dabbing a Lapel with Scented Water', + 'Dusting Lapel with a Silk Brush', + 'Dusting a Lapel with a Diamond-Encrusted Brush', + 'Dusting a Tailcoat with Anti-Gravity Feathers', + 'Dusting a Tailcoat with Photon-Enhanced Diamond Duster', 'Dusting a Tailcoat with a Diamond-Encrusted Duster', - 'Adjusting a Top Hat with a Silk-Trimmed Brush', - 'Polishing a Gold-Plated Cane Tip', + 'Dusting a Tailcoat with a Diamond-Tipped Brush', + 'Dusting a Tailcoat with a Feathers Duster', + 'Dusting a Tailcoat with a Photon-Enhanced Brush', + 'Dusting a Tailcoat with a Quantum Duster', + 'Dusting a Tailcoat with a Singularity-Enhanced Brush', + 'Dusting a Top Hat with Nebula-Infused Feather Duster', + 'Dusting a Top Hat with Nebula-Infused Microfibers', + 'Examining a Fob Chain with a Magnifier', + 'Filing a Monocle with Diamond-Encrusted Nanofiles', + 'Fluffing Cravat', + 'Fluffing a Linen Handkerchief with Dark Matter Puffs', + 'Fluffing a Linen Handkerchief with Quantum Fluff', + 'Fluffing a Linen Handkerchief with a Feather', + 'Fluffing a Silk Cravat with Anti-Gravity Puffs', + 'Fluffing a Silk Handkerchief with a Diamond-Tipped Brush', + 'Fluffing a Silk Handkerchief', + 'Fluffing a Silk Scarf with Singularity Fluff', + 'Fluffing a Silk Scarf with Warp-Field Micro-Airbrush', 'Fluffing a Silk Scarf with a Feather Duster', - 'Counting the Stitches in a Monogram with a Magnifier', - 'Sipping Gin from a Crystal Goblet', - 'Tuning a Pocket Watch with a Holographic Interface', - 'Adjusting a Diamond-Studded Cufflink with Tweezers', - 'Polishing a Platinum-Plated Cigarette Case', + 'Fluffing a Silk Scarf with a Warp-Field Feathers Duster', + 'Fluffing a Silk Scarf', + 'Fluffing a Velvet Cufflink via Quantum Resonance', + 'Fluffing a Velvet Cufflink via Warp-Field Resonance', + 'Fluffing a Velvet Cufflink with Warp-Field Feathers', + 'Fluffing a Velvet Cufflink with a Micro-Airbrush', + 'Fluffing a Velvet Cufflink', + 'Folding Pocket Square Precisely', + 'Folding a Handkerchief into a 4D Hypercube', + 'Folding a Handkerchief into a Butterfly', + 'Folding a Handkerchief into a Fractal Peacock', + 'Folding a Handkerchief into a Multiverse Swan', + 'Folding a Handkerchief into a Peacock', + 'Folding a Handkerchief into a Quantum Butterfly', + 'Folding a Handkerchief into a Singularity Spiral', 'Folding a Handkerchief into a Swan (Again)', - 'Measuring Glove Fit with Nanometer Precision', - 'Dabbing a Lapel with Rare Orchid Essence', - 'Adjusting a Monocle Chain with a Micro-Adjuster', - 'Polishing a Ruby-Encrusted Cane', - 'Sipping Rum from a Silver Goblet', + 'Folding a Handkerchief into a Swan', + 'Folding a Handkerchief into a Tesseract Swan', + 'Folding a Letter into a Crane', + 'Folding a Linen Cravat into a Perfect Origami Lotus', + 'Folding a Newspaper into a 5D Hypercube', + 'Folding a Newspaper into a Klein Bottle', + 'Folding a Newspaper into a Miniature Eiffel Tower', 'Folding a Newspaper into a Perfect Cube', - 'Counting the Weave Patterns in a Linen Handkerchief', - 'Tuning a Pocket Watch with a Chroniton Emitter', - 'Adjusting a Top Hat with a Diamond-Encrusted Feather', - 'Polishing a Gold-Plated Pocket Knife with a Microfiber Cloth', - 'Fluffing a Silk Handkerchief with a Diamond-Tipped Brush', - 'Measuring Cravat Symmetry with a Digital Protractor', - 'Dusting a Tailcoat with a Quantum Duster', - 'Adjusting a Diamond-Studded Cravat Pin with Laser Guidance', - 'Polishing a Platinum-Encrusted Monocle with Nano-Solution', + 'Folding a Newspaper into a Time-Dilation Tower', + 'Folding a Newspaper with Precision', + 'Folding a Pocket Square into a Chroniton Rose', + 'Folding a Pocket Square into a Falcon', + 'Folding a Pocket Square into a Fractal Phoenix', + 'Folding a Pocket Square into a Fractal Rose', + 'Folding a Pocket Square into a Möbius Strip', 'Folding a Pocket Square into a Phoenix', - 'Sipping Port from a Crystal Decanter', + 'Folding a Pocket Square into a Quantum Origami Falcon', + 'Folding a Pocket Square into a Rose', + 'Folding a Silk Scarf into an Origami Swan', + 'Infusing a Handkerchief with Nebula-Scented Nanoparticles', + 'Inspecting Lorgnette for Smudges', + 'Inspecting a Cane Tip with a Tachyon Microscope', + 'Inspecting a Cufflink with a Quantum Magnifier', + 'Inspecting a Silver Vinaigrette for Proper Aromatherapy Calibration', + 'Measuring Cravat Angle with a Holographic Protractor', + 'Measuring Cravat Angle with a Singularity Protractor', + 'Measuring Cravat Symmetry with a Digital Protractor', + 'Measuring Cravat Symmetry with a Graviton Ruler', + 'Measuring Cravat Symmetry with a Non-Euclidean Protractor', + 'Measuring Cuff Length with Calipers', + 'Measuring Cufflink Asymmetry with a Chroniton Ruler', + 'Measuring Cufflink Depth with Laser Calipers', + 'Measuring Cufflink Depth with a Quantum Ruler', 'Measuring Cufflink Depth with a Subatomic Ruler', - 'Dabbing a Cufflink with Scented Nanoparticles', - 'Adjusting a Top Hat Band with a Holographic Level', + 'Measuring Cufflink Depth with a Tachyon Ruler', + 'Measuring Cufflink Diameter with Calipers', + 'Measuring Cufflink Symmetry with a Laser Level', + 'Measuring Cufflink Symmetry with a Tachyon Ruler', + 'Measuring Cufflinks with Calipers', + 'Measuring Glove Fit with Calipers', + 'Measuring Glove Fit with Nanometer Precision', + 'Measuring Glove Fit with a Multidimensional Caliper', + 'Measuring Glove Fit with a Singularity Caliper', + 'Measuring Glove Gauntlet Tension with a Gravitational Sensor', + 'Measuring Glove Seam Tension with a Graviton Sensor', + 'Measuring Glove Seam Tension with a Tachyon Sensor', + 'Measuring Glove Seam Tension with a Tensiometer', + 'Measuring a Cravat with a Tailor\'s Tape', + 'Measuring the Perfect Cravat Angle', + 'Polishing Cufflinks with a Velvet Cloth', + 'Polishing Spectacles with a Monogrammed Cloth', + 'Polishing a Brass Knuckle Duster', + 'Polishing a Diamond-Encrusted Cane with Void Energy', + 'Polishing a Diamond-Encrusted Cigarette Case with Neutronium', + 'Polishing a Diamond-Encrusted Cufflink with Neutronium Polish', + 'Polishing a Diamond-Encrusted Shoe Horn with Exotic Matter', + 'Polishing a Diamond-Encrusted Shoe Horn', 'Polishing a Diamond-Encrusted Snuff Box with Liquid Diamond', - 'Fluffing a Velvet Cufflink with a Micro-Airbrush', - 'Counting the Stitches in a Monogram with a DNA Scanner', - 'Sipping Absinthe through a Diamond-Encrusted Sugar Cube', - 'Tuning a Pocket Watch with a Temporal Calibrator', - 'Adjusting a Silk-Top Hat with a Monogrammed Nanobrush', + 'Polishing a Diamond-Encrusted Snuff Box with Void Energy', + 'Polishing a Gold-Encrusted Pocket Watch', + 'Polishing a Gold-Plated Cane Tip with Liquid Platinum', + 'Polishing a Gold-Plated Cane Tip', 'Polishing a Gold-Plated Cane with Liquid Platinum', - 'Folding a Handkerchief into a Quantum Butterfly', - 'Measuring Glove Seam Tension with a Graviton Sensor', - 'Dusting a Tailcoat with a Photon-Enhanced Brush', - 'Adjusting a Diamond-Studded Spats Strap with a Neural Interface', - 'Polishing a Platinum-Plated Monocle with Antimatter Polish', - 'Folding a Pocket Square into a Fractal Rose', - 'Sipping Ambrosia from a Celestial Chalice', - 'Measuring Cufflink Symmetry with a Tachyon Ruler', - 'Dabbing a Cufflink with Scented Dark Matter', - 'Adjusting a Top Hat with a Singularity-Encrusted Brush', 'Polishing a Gold-Plated Cigarette Case with Neutronium', - 'Fluffing a Silk Scarf with a Warp-Field Feathers Duster', - 'Counting the Threads in a Silk Cravat with a Quantum Microscope', - 'Sipping Starlight from a Nebula Goblet', - 'Tuning a Pocket Watch with a Reality-Adjustment Dial', - 'Adjusting a Diamond-Encrusted Cufflink with Chroniton Tweezers', + 'Polishing a Gold-Plated Cufflink', + 'Polishing a Gold-Plated Monocle with Chroniton Polish', + 'Polishing a Gold-Plated Pocket Knife with a Microfiber Cloth', + 'Polishing a Gold-Plated Pocket Knife', + 'Polishing a Monocle Lens with Quantum-Enhanced Microfiber', + 'Polishing a Monogrammed Cufflink', + 'Polishing a Patent Leather Shoe', + 'Polishing a Platinum Cane Tip with Liquid Time', + 'Polishing a Platinum Cufflink', + 'Polishing a Platinum Monocle with Antimatter Polish', + 'Polishing a Platinum-Encrusted Cane with Exotic Matter', + 'Polishing a Platinum-Encrusted Monocle with Nano-Solution', 'Polishing a Platinum-Plated Cane Tip with Exotic Matter', - 'Folding a Handkerchief into a Tesseract Swan', - 'Measuring Glove Fit with a Multidimensional Caliper', - 'Dabbing a Lapel with Essence of Distant Galaxies', - 'Adjusting a Monocle Chain with a Temporal Micro-Adjuster', + 'Polishing a Platinum-Plated Cane with Neutronium Polish', + 'Polishing a Platinum-Plated Cane', + 'Polishing a Platinum-Plated Cigarette Case', + 'Polishing a Platinum-Plated Monocle with Antimatter Polish', + 'Polishing a Platinum-Plated Monocle', + 'Polishing a Ruby-Crusted Cane Tip with Liquid Time', + 'Polishing a Ruby-Encrusted Cane', + 'Polishing a Ruby-Encrusted Monocle with Void Polish', 'Polishing a Ruby-Encrusted Pocket Square with Nebula Dust', - 'Folding a Newspaper into a Klein Bottle', - 'Counting the Stitches in a Monogram with a Singularity Scanner', + 'Polishing a Silver Cane Tip', + 'Polishing a Silver Cigarette Case', + 'Polishing a Silver Cigarette Holder', + 'Polishing a Silver Flask', + 'Polishing a Silver-Plated Snuff Box', + 'Polishing a Top Hat Band with Liquid Stardust', + 'Scenting a Letter with Lavender', + 'Sipping Absinthe through a Diamond-Encrusted Sugar Cube', + 'Sipping Absinthe through a Sugar Cube', + 'Sipping Ambrosia from a Celestial Chalice', + 'Sipping Ambrosia from a Celestial Nebula Goblet', + 'Sipping Ambrosia from a Chroniton-Infused Vessel', + 'Sipping Ambrosia from a Singularity Vessel', + 'Sipping Black Hole Brew from an Event Horizon Chalice', + 'Sipping Brandy from a Crystal Glass', + 'Sipping Brandy from a Gold-Plated Snifter', + 'Sipping Champagne from a Crystal Flute', + 'Sipping Earl Grey with Pinky Extended', + 'Sipping Elixir from a Chroniton-Infused Goblet', + 'Sipping Eternity from a Black Hole Decanter', + 'Sipping Eternity from a Timeless Chroniton Decanter', 'Sipping Eternity from a Timeless Vessel', + 'Sipping Eternity through a Chroniton Filter', + 'Sipping Gilded Absinthe from a Singularity-Infused Glass', + 'Sipping Gin from a Crystal Goblet', + 'Sipping Liquid Starlight from a Cosmic Chalice', + 'Sipping Liquid Time from a Tachyon Chalice', + 'Sipping Port from a Crystal Decanter', + 'Sipping Rum from a Silver Goblet', + 'Sipping Sherry from a Crystal Decanter', + 'Sipping Starlight Brew from an Event Horizon Chalice', + 'Sipping Starlight from a Nebula Goblet', + 'Sipping Starlight through a Diamond-Encrusted Filter', + 'Sipping Tea through a Mustache Guard', + 'Sipping Whiskey from a Crystal Tumbler', + 'Straightening a Diamond Stickpin', + 'Straightening a Spats Buckle', + 'Stroking Silk Top Hat', + 'Stroking a Mutton Chop Whisker with a Diamond-Encrusted Comb', + 'Tapping Cane Rhythmically', + 'Tuning a Pocket Compass via Multiverse Alignment', + 'Tuning a Pocket Compass with a Chroniton Emitter', + 'Tuning a Pocket Compass with a Multiverse Dial', + 'Tuning a Pocket Watch via Black Hole Calibration' + 'Tuning a Pocket Watch via Multiverse Calibration Tool', + 'Tuning a Pocket Watch via Quantum Entanglement', + 'Tuning a Pocket Watch via Temporal Harmonic Key', + 'Tuning a Pocket Watch via Temporal Harmonic Resonator', + 'Tuning a Pocket Watch with a Chroniton Emitter', + 'Tuning a Pocket Watch with a Holographic Interface', + 'Tuning a Pocket Watch with a Loupe', + 'Tuning a Pocket Watch with a Microscope', 'Tuning a Pocket Watch with a Multiverse Calibration Tool', - 'Adjusting a Top Hat with a Reality-Warping Feather', - 'Polishing a Diamond-Encrusted Cane with Void Energy', - 'Fluffing a Linen Handkerchief with Quantum Fluff', -) \ No newline at end of file + 'Tuning a Pocket Watch with a Multiverse Tuning Fork', + 'Tuning a Pocket Watch with a Quantum Calibrator', + 'Tuning a Pocket Watch with a Reality-Adjustment Dial', + 'Tuning a Pocket Watch with a Singularity Wrench', + 'Tuning a Pocket Watch with a Sonic Screwdriver', + 'Tuning a Pocket Watch with a Temporal Calibrator', + 'Tuning a Pocket Watch with a Tiny Screwdriver', + 'Tuning a Pocket Watch', + 'Twirling Monocle', + 'Twirling Mustache with Wax', + 'Twirling a Foil Epee', + 'Twirling a Pocket Watch Chain', + 'Twirling a Silk Handkerchief', + 'Twirling a Walking Stick', + 'Unfurling a Pocket Square', +) diff --git a/dandy/cli/session.py b/dandy/cli/session.py index c0f1157a..ba849de6 100644 --- a/dandy/cli/session.py +++ b/dandy/cli/session.py @@ -7,40 +7,46 @@ class DandyCliSession(BaseIntel): - project_base_path: Path | None = None - project_dandy_path: Path | None = None - project_dandy_cli_path: Path | None = None + project_base_path: Path is_loaded: bool = False def post_init(self, project_base_path: Path) -> None: self.project_base_path = project_base_path - self.project_dandy_path = Path( + + @property + def project_dandy_path(self) -> Path: + return Path( self.project_base_path, settings.DANDY_DIRECTORY, ) - self.project_dandy_cli_path = Path( + + @property + def project_dandy_cli_path(self) -> Path: + return Path( self.project_dandy_path, CLI_WORKING_DIRECTORY, ) + @property def session_file_path(self) -> Path: return Path(self.project_dandy_cli_path, 'session.json') - def load(self): + def load(self) -> None: if file_exists(self.session_file_path): loaded_session = DandyCliSession.create_from_file(self.session_file_path) loaded_session.project_base_path = self.project_base_path - loaded_session.project_dandy_cli_path = self.project_dandy_cli_path self.__dict__.update(loaded_session.__dict__) self.is_loaded = True - def save(self): + def save(self) -> None: self.save_to_file(self.session_file_path) self.is_loaded = True -session = DandyCliSession() +session = DandyCliSession( + project_base_path=Path.cwd() +) diff --git a/dandy/cli/tui/printer.py b/dandy/cli/tui/printer.py index 4b2d1109..3cac9808 100644 --- a/dandy/cli/tui/printer.py +++ b/dandy/cli/tui/printer.py @@ -5,7 +5,7 @@ from dandy import constants from dandy.cli.actions.action import BaseAction -from dandy.cli.constants import PROCESSING_PHRASES +from dandy.cli.processing_phrases import PROCESSING_PHRASES from dandy.cli.session import session from dandy.cli.tui.ascii import DANDY_ANSII from dandy.cli.tui.tools import wrap_text_with_indentation diff --git a/dandy/http/intelligence/intel.py b/dandy/http/intelligence/intel.py index 0087291c..f61aae51 100644 --- a/dandy/http/intelligence/intel.py +++ b/dandy/http/intelligence/intel.py @@ -21,7 +21,7 @@ def from_requests_response(cls, requests_response: requests.Response) -> Self: except ValueError: json_data = {} - return HttpResponseIntel( + return cls( status_code=requests_response.status_code, reason=requests_response.reason, text=requests_response.text, @@ -29,7 +29,7 @@ def from_requests_response(cls, requests_response: requests.Response) -> Self: ) @property - def json_str(self) -> str: + def json_str(self) -> str | None: return self.text @@ -48,7 +48,7 @@ class HttpRequestIntel(BaseIntel): def json_str(self) -> str: return json.dumps(self.json_data) - def model_post_init(self, __context: Any, /): + def model_post_init(self, __context: Any, /) -> None: self.generate_headers() def as_requests_request(self) -> requests.Request: @@ -83,7 +83,7 @@ def to_http_response_intel(self) -> HttpResponseIntel: return HttpResponseIntel.from_requests_response(response) - def generate_headers(self): + def generate_headers(self) -> None: if self.bearer_token is not None: if self.headers is None: self.headers = {} diff --git a/dandy/http/mixin.py b/dandy/http/mixin.py index 6e131bf9..9ebc4ea6 100644 --- a/dandy/http/mixin.py +++ b/dandy/http/mixin.py @@ -7,6 +7,6 @@ class HttpServiceMixin(BaseServiceMixin): def http(self) -> HttpService: return self._get_service_instance(HttpService) - def reset(self): + def reset(self) -> None: super().reset() self.http.reset() diff --git a/docs/changelog/v2_changelog.md b/docs/changelog/v2_changelog.md index 11facbf2..cc188db1 100644 --- a/docs/changelog/v2_changelog.md +++ b/docs/changelog/v2_changelog.md @@ -6,7 +6,18 @@ - CLI !!! - Use `dandy` to access the new command line interface. - - + - All commands have a basic help provided with autocomplete. + - The default / no command will answer questions about the cli. +- Diligence System (Experimental) + - The `Bot` class has a new attribute called `diligence` which is defaulted to `1.0` + - You can adjust `diligence` between `0.0` (almost no effort) and `2.0` (maximum effort) + - Adjusting the diligence level allows you to control processing with any LLM. + - This feature is experimental and works with any llm model. + +### Fixes + + - Updated the `dandy.conf.settings` to be much more flexible with different use cases and systems. + - Lots of refactoring, typing and code cleaning to new `ruff` and `ty` configuration. ## v2.0.0 diff --git a/pyproject.toml b/pyproject.toml index ab36b8af..ebc80b1c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -90,6 +90,10 @@ version = { attr = "dandy.constants.__VERSION__" } [tool.ruff] extend = "ruff.toml" + +[tool.ty] +extend = "ty.toml" + [tool.codespell] # Ref: https://github.com/codespell-project/codespell#using-a-config-file skip = '.git*,*.svg,*.css,.cache,.npm' diff --git a/ty.toml b/ty.toml new file mode 100644 index 00000000..5dff2b25 --- /dev/null +++ b/ty.toml @@ -0,0 +1,2 @@ +[rules] +invalid-argument-type="warn" \ No newline at end of file