diff --git a/.gitignore b/.gitignore index 0d4ca451..b4a259a5 100644 --- a/.gitignore +++ b/.gitignore @@ -14,7 +14,7 @@ dist/ downloads/ eggs/ .eggs/ -lib/ +/lib/ lib64/ parts/ sdist/ diff --git a/.vscode/launch.json b/.vscode/launch.json index 3ddf5d21..7c5ddf42 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -21,5 +21,21 @@ "envFile": "${workspaceFolder}/.env.local", "console": "integratedTerminal" }, + { + "name": "Debug Chat API", + "type": "debugpy", + "request": "launch", + "preLaunchTask": "Create .env.tmp file", + "postDebugTask": "Delete .env.tmp file", + "module": "uvicorn", + "args": ["src.chat.api.app:app","--reload","--port","8000"], + "envFile": "${workspaceFolder}/.env.tmp", + "env": { + "ASKUI_WORKSPACES__LOG__FORMAT": "logfmt", + }, + "justMyCode": false, + "python": "${workspaceFolder}/.venv/bin/python", + "pythonArgs": ["-Xfrozen_modules=off"] + } ] } diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 00000000..3818fe82 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,17 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "Create .env.tmp file", + "type": "shell", + "command": "op run --env-file=.env --no-masking -- printenv > ${workspaceFolder}/.env.tmp", + "problemMatcher": [] + }, + { + "label": "Delete .env.tmp file", + "type": "shell", + "command": "rm -f ${workspaceFolder}/.env.tmp", + "problemMatcher": [] + } + ] +} diff --git a/README.md b/README.md index 65418b68..123d82da 100644 --- a/README.md +++ b/README.md @@ -765,38 +765,64 @@ By default, we record usage data to detect and fix bugs inside the package and i If you would like to disable the recording of usage data, set the `ASKUI__VA__TELEMETRY__ENABLED` environment variable to `False`. -## Experimental Features +## Experimental -### Chat +### AskUI Chat -The chat is a streamlit app that allows you to give an agent instructions and observe via messages by the agent given as a response the reasoning and actions taken by the action in return. The actions can be replayed afterwards. +AskUI Chat is a web application that allows interacting with an AskUI Vision Agent similar how it can be +done with `VisionAgent.act()` but in a more interactive manner that involves less code. Aside from +telling the AskUI Vision Agent what to do, the user can also demonstrate what to do (currently, only +clicking is supported). -Instead of giving an agent instructions about what to do, it also allows demonstrating/simulating certain actions (left clicking, typing, mouse moving) directly and replay those actions. +**⚠️ Warning:** AskUI Chat is currently in an experimental stage and has several limitations (see below). -Configure the chat by setting the following environment variables: +#### Configuration -- `ANTHROPIC_API_KEY` -- `ASKUI_WORKSPACE_ID` -- `ASKUI_TOKEN` +To use the chat, configure the following environment variables: -Start the chat with: +- `ASKUI_TOKEN`: AskUI Vision Agent behind chat uses currently the AskUI API +- `ASKUI_WORKSPACE_ID`: AskUI Vision Agent behind chat uses currently the AskUI API +- `ASKUI__CHAT_API__DATA_DIR` (optional, defaults to `$(pwd)/chat`): Currently, the AskUI chat stores its data in a directory locally. You can change the default directory by setting this environment variable. + +#### Installation ```bash -pdm run chat +pdm install # is going to install the dependencies of the api +pdm run chat:ui:install # is going to install the dependencies of the ui ``` -This should open the streamlit app in your default browser. - -Currently, the chat is in the experimental stage and has a lot of issues, for example: - -- You cannot cancel/stop the agent while it is running except by terminating/killing streamlit or hitting "stop" within the streamlit app. -- There is no option to retry a failed prompt. -- A chat/thread cannot be deleted. -- The agent often responds with that it cannot solve an issue because it does not see what to do. -- No option to edit message in a thread or start a new thread from an existing thread by editing a message within it or retrying a certain message. -- You have to rerun all actions taken by the agent even though they may have not lead to the goal and are, in fact, redundant. -- Often the agent does not get which window has focus --> You can tell it in the prompt and tell it to click on the window to interact with first to make sure it has focus. -- Often the agent goes into crazy loops. -- Display id cannot be configured but instead display 1 is taken by default. -- Connection issues with controller, e.g., if there are multiple chat sessions open at the same time, or if the agent was killed/terminated --> Restart controller, restart streamlit app, make sure it is the only controller and streamlit app session running. -- The chat sessions are identified by timestamp of when they were created. +You may need to give permissions on the fast run of the Chat UI to demonstrate actions (aka record clicks). + +#### Usage + +```bash +pdm run chat:api # is going to start the api at port 8000 +pdm run chat:ui # is going to start the ui at port 3000 +``` + +You can use the chat to record a workflow and redo it later. For that, just tell the agent to redo all previous steps. + +- *Not efficient enought?* If some of the steps can be omitted in the rerun, you can just delete them or tell the agent to skip unnecessary steps. +- *Agent not doing what you want it to?* + - The agent may get confused with the coordinates of clicks demonstrated by the user as it seems to use other coordinates. To omit this just tell the agent that the coordinates may have changed in the meanwhile and that it should take screenshots along the way to determine where to click. + - It may also be helpful to tell the agent to first explain its understanding of the user's goal after having demonstrated some actions or before trying to get it to redo what has been done so that the agent can focus on the overarching goal instead of being reliant on specific actions. + +#### Limitations + +- A lot of errors are not handled properly and we allow the user to do a lot of actions that can lead to errors instead of properly guiding the user. +- The chat currently only allows rerunning actions through `VisionAgent.act()` which can be expensive, slow and is not necessary the most reliable way to do it. +- A lot quirks in UI and API. +- Currently, api and ui need to be run in dev mode. +- When demonstrating actions, the corresponding screenshot may not reflect the correct state of the screen before the action. In this case, cancel demonstrating, delete messages and try again. +- Currently, we only allow a maximum of 100 messages per conversation. +- When demonstrating actions, actions may be recorded that you did not want to record, e.g., stopping the demonstration. Just delete these messages afterwards. +- The agent is going to fail if there are no messages in the conversation, there is no tool use result message following the tool use message somewhere in the conversation, a message is too long etc. + Just adding or deleting the message in this case should fix the issue. +- You should not switch the conversation while waiting for an agent's answers or demonstrating actions. + + + +#### Architecture + +- The chat api/backend is a [FastAPI](https://fastapi.tiangolo.com/) application that provides a REST API similar to [OpenAI's Assistants API](https://platform.openai.com/docs/assistants/overview). +- The chat ui/frontend is a [Next.js](https://nextjs.org/) application that provides a web interface to the chat api. diff --git a/pdm.lock b/pdm.lock index 7ac45d7b..e70dcd9b 100644 --- a/pdm.lock +++ b/pdm.lock @@ -5,29 +5,11 @@ groups = ["default", "chat", "mcp", "pynput", "test"] strategy = ["inherit_metadata"] lock_version = "4.5.0" -content_hash = "sha256:21d28e90c53b9d7f3439e469fe2125b83225d66dd3f6d182bedd1ae774544485" +content_hash = "sha256:1265b0e0daca5f17ed1ec690f9b11c9c0265ce255976a32163348594e60cce92" [[metadata.targets]] requires_python = ">=3.10" -[[package]] -name = "altair" -version = "5.5.0" -requires_python = ">=3.9" -summary = "Vega-Altair: A declarative statistical visualization library for Python." -groups = ["chat"] -dependencies = [ - "jinja2", - "jsonschema>=3.0", - "narwhals>=1.14.2", - "packaging", - "typing-extensions>=4.10.0; python_version < \"3.14\"", -] -files = [ - {file = "altair-5.5.0-py3-none-any.whl", hash = "sha256:91a310b926508d560fe0148d02a194f38b824122641ef528113d029fcd129f8c"}, - {file = "altair-5.5.0.tar.gz", hash = "sha256:d960ebe6178c56de3855a68c47b516be38640b73fb3b5111c2a9ca90546dd73d"}, -] - [[package]] name = "annotated-types" version = "0.7.0" @@ -79,17 +61,6 @@ files = [ {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, ] -[[package]] -name = "attrs" -version = "25.3.0" -requires_python = ">=3.8" -summary = "Classes Without Boilerplate" -groups = ["chat"] -files = [ - {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, - {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, -] - [[package]] name = "backoff" version = "2.2.1" @@ -101,34 +72,12 @@ files = [ {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, ] -[[package]] -name = "blinker" -version = "1.9.0" -requires_python = ">=3.9" -summary = "Fast, simple object-to-object and broadcast signaling" -groups = ["chat"] -files = [ - {file = "blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"}, - {file = "blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"}, -] - -[[package]] -name = "cachetools" -version = "5.5.2" -requires_python = ">=3.7" -summary = "Extensible memoizing collections and decorators" -groups = ["chat"] -files = [ - {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, - {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, -] - [[package]] name = "certifi" version = "2025.1.31" requires_python = ">=3.6" summary = "Python package for providing Mozilla's CA Bundle." -groups = ["default", "chat", "mcp"] +groups = ["default", "mcp"] files = [ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, @@ -139,7 +88,7 @@ name = "charset-normalizer" version = "3.4.1" requires_python = ">=3.7" summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -groups = ["default", "chat"] +groups = ["default"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -436,35 +385,6 @@ files = [ {file = "fsspec-2025.3.2.tar.gz", hash = "sha256:e52c77ef398680bbd6a98c0e628fbc469491282981209907bbc8aea76a04fdc6"}, ] -[[package]] -name = "gitdb" -version = "4.0.12" -requires_python = ">=3.7" -summary = "Git Object Database" -groups = ["chat"] -dependencies = [ - "smmap<6,>=3.0.1", -] -files = [ - {file = "gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf"}, - {file = "gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571"}, -] - -[[package]] -name = "gitpython" -version = "3.1.44" -requires_python = ">=3.7" -summary = "GitPython is a Python library used to interact with Git repositories" -groups = ["chat"] -dependencies = [ - "gitdb<5,>=4.0.1", - "typing-extensions>=3.7.4.3; python_version < \"3.8\"", -] -files = [ - {file = "GitPython-3.1.44-py3-none-any.whl", hash = "sha256:9e0e10cda9bed1ee64bc9a6de50e7e38a9c9943241cd7f585f6df3ed28011110"}, - {file = "gitpython-3.1.44.tar.gz", hash = "sha256:c87e30b26253bf5418b01b0660f818967f3c503193838337fe5e573331249269"}, -] - [[package]] name = "gradio-client" version = "1.8.0" @@ -707,7 +627,7 @@ name = "jinja2" version = "3.1.6" requires_python = ">=3.7" summary = "A very fast and expressive template engine." -groups = ["default", "chat"] +groups = ["default"] dependencies = [ "MarkupSafe>=2.0", ] @@ -777,39 +697,6 @@ files = [ {file = "jiter-0.9.0.tar.gz", hash = "sha256:aadba0964deb424daa24492abc3d229c60c4a31bfee205aedbf1acc7639d7893"}, ] -[[package]] -name = "jsonschema" -version = "4.23.0" -requires_python = ">=3.8" -summary = "An implementation of JSON Schema validation for Python" -groups = ["chat"] -dependencies = [ - "attrs>=22.2.0", - "importlib-resources>=1.4.0; python_version < \"3.9\"", - "jsonschema-specifications>=2023.03.6", - "pkgutil-resolve-name>=1.3.10; python_version < \"3.9\"", - "referencing>=0.28.4", - "rpds-py>=0.7.1", -] -files = [ - {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, - {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, -] - -[[package]] -name = "jsonschema-specifications" -version = "2024.10.1" -requires_python = ">=3.9" -summary = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -groups = ["chat"] -dependencies = [ - "referencing>=0.31.0", -] -files = [ - {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, - {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, -] - [[package]] name = "markdown-it-py" version = "3.0.0" @@ -829,7 +716,7 @@ name = "markupsafe" version = "3.0.2" requires_python = ">=3.9" summary = "Safely add untrusted strings to HTML/XML markup." -groups = ["default", "chat"] +groups = ["default"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -998,84 +885,9 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -[[package]] -name = "narwhals" -version = "1.33.0" -requires_python = ">=3.8" -summary = "Extremely lightweight compatibility layer between dataframe libraries" -groups = ["chat"] -files = [ - {file = "narwhals-1.33.0-py3-none-any.whl", hash = "sha256:f653319112fd121a1f1c18a40cf70dada773cdacfd53e62c2aa0afae43c17129"}, - {file = "narwhals-1.33.0.tar.gz", hash = "sha256:6233d2457debf4b5fe4a1da54530c6fe2d84326f4a8e3bca35bbbff580a347cb"}, -] - -[[package]] -name = "numpy" -version = "2.2.4" -requires_python = ">=3.10" -summary = "Fundamental package for array computing in Python" -groups = ["chat"] -files = [ - {file = "numpy-2.2.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8146f3550d627252269ac42ae660281d673eb6f8b32f113538e0cc2a9aed42b9"}, - {file = "numpy-2.2.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e642d86b8f956098b564a45e6f6ce68a22c2c97a04f5acd3f221f57b8cb850ae"}, - {file = "numpy-2.2.4-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:a84eda42bd12edc36eb5b53bbcc9b406820d3353f1994b6cfe453a33ff101775"}, - {file = "numpy-2.2.4-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:4ba5054787e89c59c593a4169830ab362ac2bee8a969249dc56e5d7d20ff8df9"}, - {file = "numpy-2.2.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7716e4a9b7af82c06a2543c53ca476fa0b57e4d760481273e09da04b74ee6ee2"}, - {file = "numpy-2.2.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adf8c1d66f432ce577d0197dceaac2ac00c0759f573f28516246351c58a85020"}, - {file = "numpy-2.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:218f061d2faa73621fa23d6359442b0fc658d5b9a70801373625d958259eaca3"}, - {file = "numpy-2.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:df2f57871a96bbc1b69733cd4c51dc33bea66146b8c63cacbfed73eec0883017"}, - {file = "numpy-2.2.4-cp310-cp310-win32.whl", hash = "sha256:a0258ad1f44f138b791327961caedffbf9612bfa504ab9597157806faa95194a"}, - {file = "numpy-2.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:0d54974f9cf14acf49c60f0f7f4084b6579d24d439453d5fc5805d46a165b542"}, - {file = "numpy-2.2.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e9e0a277bb2eb5d8a7407e14688b85fd8ad628ee4e0c7930415687b6564207a4"}, - {file = "numpy-2.2.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9eeea959168ea555e556b8188da5fa7831e21d91ce031e95ce23747b7609f8a4"}, - {file = "numpy-2.2.4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:bd3ad3b0a40e713fc68f99ecfd07124195333f1e689387c180813f0e94309d6f"}, - {file = "numpy-2.2.4-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:cf28633d64294969c019c6df4ff37f5698e8326db68cc2b66576a51fad634880"}, - {file = "numpy-2.2.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fa8fa7697ad1646b5c93de1719965844e004fcad23c91228aca1cf0800044a1"}, - {file = "numpy-2.2.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4162988a360a29af158aeb4a2f4f09ffed6a969c9776f8f3bdee9b06a8ab7e5"}, - {file = "numpy-2.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:892c10d6a73e0f14935c31229e03325a7b3093fafd6ce0af704be7f894d95687"}, - {file = "numpy-2.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db1f1c22173ac1c58db249ae48aa7ead29f534b9a948bc56828337aa84a32ed6"}, - {file = "numpy-2.2.4-cp311-cp311-win32.whl", hash = "sha256:ea2bb7e2ae9e37d96835b3576a4fa4b3a97592fbea8ef7c3587078b0068b8f09"}, - {file = "numpy-2.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:f7de08cbe5551911886d1ab60de58448c6df0f67d9feb7d1fb21e9875ef95e91"}, - {file = "numpy-2.2.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a7b9084668aa0f64e64bd00d27ba5146ef1c3a8835f3bd912e7a9e01326804c4"}, - {file = "numpy-2.2.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dbe512c511956b893d2dacd007d955a3f03d555ae05cfa3ff1c1ff6df8851854"}, - {file = "numpy-2.2.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:bb649f8b207ab07caebba230d851b579a3c8711a851d29efe15008e31bb4de24"}, - {file = "numpy-2.2.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:f34dc300df798742b3d06515aa2a0aee20941c13579d7a2f2e10af01ae4901ee"}, - {file = "numpy-2.2.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3f7ac96b16955634e223b579a3e5798df59007ca43e8d451a0e6a50f6bfdfba"}, - {file = "numpy-2.2.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f92084defa704deadd4e0a5ab1dc52d8ac9e8a8ef617f3fbb853e79b0ea3592"}, - {file = "numpy-2.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4e84a6283b36632e2a5b56e121961f6542ab886bc9e12f8f9818b3c266bfbb"}, - {file = "numpy-2.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:11c43995255eb4127115956495f43e9343736edb7fcdb0d973defd9de14cd84f"}, - {file = "numpy-2.2.4-cp312-cp312-win32.whl", hash = "sha256:65ef3468b53269eb5fdb3a5c09508c032b793da03251d5f8722b1194f1790c00"}, - {file = "numpy-2.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:2aad3c17ed2ff455b8eaafe06bcdae0062a1db77cb99f4b9cbb5f4ecb13c5146"}, - {file = "numpy-2.2.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cf4e5c6a278d620dee9ddeb487dc6a860f9b199eadeecc567f777daace1e9e7"}, - {file = "numpy-2.2.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1974afec0b479e50438fc3648974268f972e2d908ddb6d7fb634598cdb8260a0"}, - {file = "numpy-2.2.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:79bd5f0a02aa16808fcbc79a9a376a147cc1045f7dfe44c6e7d53fa8b8a79392"}, - {file = "numpy-2.2.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:3387dd7232804b341165cedcb90694565a6015433ee076c6754775e85d86f1fc"}, - {file = "numpy-2.2.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f527d8fdb0286fd2fd97a2a96c6be17ba4232da346931d967a0630050dfd298"}, - {file = "numpy-2.2.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bce43e386c16898b91e162e5baaad90c4b06f9dcbe36282490032cec98dc8ae7"}, - {file = "numpy-2.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:31504f970f563d99f71a3512d0c01a645b692b12a63630d6aafa0939e52361e6"}, - {file = "numpy-2.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:81413336ef121a6ba746892fad881a83351ee3e1e4011f52e97fba79233611fd"}, - {file = "numpy-2.2.4-cp313-cp313-win32.whl", hash = "sha256:f486038e44caa08dbd97275a9a35a283a8f1d2f0ee60ac260a1790e76660833c"}, - {file = "numpy-2.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:207a2b8441cc8b6a2a78c9ddc64d00d20c303d79fba08c577752f080c4007ee3"}, - {file = "numpy-2.2.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8120575cb4882318c791f839a4fd66161a6fa46f3f0a5e613071aae35b5dd8f8"}, - {file = "numpy-2.2.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a761ba0fa886a7bb33c6c8f6f20213735cb19642c580a931c625ee377ee8bd39"}, - {file = "numpy-2.2.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:ac0280f1ba4a4bfff363a99a6aceed4f8e123f8a9b234c89140f5e894e452ecd"}, - {file = "numpy-2.2.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:879cf3a9a2b53a4672a168c21375166171bc3932b7e21f622201811c43cdd3b0"}, - {file = "numpy-2.2.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f05d4198c1bacc9124018109c5fba2f3201dbe7ab6e92ff100494f236209c960"}, - {file = "numpy-2.2.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2f085ce2e813a50dfd0e01fbfc0c12bbe5d2063d99f8b29da30e544fb6483b8"}, - {file = "numpy-2.2.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:92bda934a791c01d6d9d8e038363c50918ef7c40601552a58ac84c9613a665bc"}, - {file = "numpy-2.2.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ee4d528022f4c5ff67332469e10efe06a267e32f4067dc76bb7e2cddf3cd25ff"}, - {file = "numpy-2.2.4-cp313-cp313t-win32.whl", hash = "sha256:05c076d531e9998e7e694c36e8b349969c56eadd2cdcd07242958489d79a7286"}, - {file = "numpy-2.2.4-cp313-cp313t-win_amd64.whl", hash = "sha256:188dcbca89834cc2e14eb2f106c96d6d46f200fe0200310fc29089657379c58d"}, - {file = "numpy-2.2.4-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7051ee569db5fbac144335e0f3b9c2337e0c8d5c9fee015f259a5bd70772b7e8"}, - {file = "numpy-2.2.4-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:ab2939cd5bec30a7430cbdb2287b63151b77cf9624de0532d629c9a1c59b1d5c"}, - {file = "numpy-2.2.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0f35b19894a9e08639fd60a1ec1978cb7f5f7f1eace62f38dd36be8aecdef4d"}, - {file = "numpy-2.2.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b4adfbbc64014976d2f91084915ca4e626fbf2057fb81af209c1a6d776d23e3d"}, - {file = "numpy-2.2.4.tar.gz", hash = "sha256:9ba03692a45d3eef66559efe1d1096c4b9b75c0986b5dff5530c378fb8331d4f"}, -] - [[package]] name = "openai" -version = "1.70.0" +version = "1.85.0" requires_python = ">=3.8" summary = "The official Python library for the openai API" groups = ["default"] @@ -1090,8 +902,8 @@ dependencies = [ "typing-extensions<5,>=4.11", ] files = [ - {file = "openai-1.70.0-py3-none-any.whl", hash = "sha256:f6438d053fd8b2e05fd6bef70871e832d9bbdf55e119d0ac5b92726f1ae6f614"}, - {file = "openai-1.70.0.tar.gz", hash = "sha256:e52a8d54c3efeb08cf58539b5b21a5abef25368b5432965e4de88cdf4e091b2b"}, + {file = "openai-1.85.0-py3-none-any.whl", hash = "sha256:7dc3e839cb8bb8747979a90c63ad4cb25a8e0cbec17b53eec009532c9965cecf"}, + {file = "openai-1.85.0.tar.gz", hash = "sha256:6ba76e4ebc5725f71f2f6126c7cb5169ca8de60dd5aa61f350f9448ad162c913"}, ] [[package]] @@ -1099,70 +911,18 @@ name = "packaging" version = "24.2" requires_python = ">=3.8" summary = "Core utilities for Python packages" -groups = ["default", "chat", "test"] +groups = ["default", "test"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] -[[package]] -name = "pandas" -version = "2.2.3" -requires_python = ">=3.9" -summary = "Powerful data structures for data analysis, time series, and statistics" -groups = ["chat"] -dependencies = [ - "numpy>=1.22.4; python_version < \"3.11\"", - "numpy>=1.23.2; python_version == \"3.11\"", - "numpy>=1.26.0; python_version >= \"3.12\"", - "python-dateutil>=2.8.2", - "pytz>=2020.1", - "tzdata>=2022.7", -] -files = [ - {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, - {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, - {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, - {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, - {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, - {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, - {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, - {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, - {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, - {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, - {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, - {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, - {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, - {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, - {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, - {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, - {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, - {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, - {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, - {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, - {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, - {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, - {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, - {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, - {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, - {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, - {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, - {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, - {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, - {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, - {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, - {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, - {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, - {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, - {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, -] - [[package]] name = "pillow" version = "11.1.0" requires_python = ">=3.9" summary = "Python Imaging Library (Fork)" -groups = ["default", "chat"] +groups = ["default"] files = [ {file = "pillow-11.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8"}, {file = "pillow-11.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192"}, @@ -1242,7 +1002,7 @@ name = "protobuf" version = "5.29.4" requires_python = ">=3.8" summary = "" -groups = ["default", "chat"] +groups = ["default"] files = [ {file = "protobuf-5.29.4-cp310-abi3-win32.whl", hash = "sha256:13eb236f8eb9ec34e63fc8b1d6efd2777d062fa6aaa68268fb67cf77f6839ad7"}, {file = "protobuf-5.29.4-cp310-abi3-win_amd64.whl", hash = "sha256:bcefcdf3976233f8a502d265eb65ea740c989bacc6c30a58290ed0e519eb4b8d"}, @@ -1266,50 +1026,6 @@ files = [ {file = "py_machineid-0.7.0-py3-none-any.whl", hash = "sha256:3dacc322b0511383d79f1e817a2710b19bcfb820a4c7cea34aaa329775fef468"}, ] -[[package]] -name = "pyarrow" -version = "19.0.1" -requires_python = ">=3.9" -summary = "Python library for Apache Arrow" -groups = ["chat"] -files = [ - {file = "pyarrow-19.0.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:fc28912a2dc924dddc2087679cc8b7263accc71b9ff025a1362b004711661a69"}, - {file = "pyarrow-19.0.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:fca15aabbe9b8355800d923cc2e82c8ef514af321e18b437c3d782aa884eaeec"}, - {file = "pyarrow-19.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad76aef7f5f7e4a757fddcdcf010a8290958f09e3470ea458c80d26f4316ae89"}, - {file = "pyarrow-19.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d03c9d6f2a3dffbd62671ca070f13fc527bb1867b4ec2b98c7eeed381d4f389a"}, - {file = "pyarrow-19.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:65cf9feebab489b19cdfcfe4aa82f62147218558d8d3f0fc1e9dea0ab8e7905a"}, - {file = "pyarrow-19.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:41f9706fbe505e0abc10e84bf3a906a1338905cbbcf1177b71486b03e6ea6608"}, - {file = "pyarrow-19.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c6cb2335a411b713fdf1e82a752162f72d4a7b5dbc588e32aa18383318b05866"}, - {file = "pyarrow-19.0.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:cc55d71898ea30dc95900297d191377caba257612f384207fe9f8293b5850f90"}, - {file = "pyarrow-19.0.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:7a544ec12de66769612b2d6988c36adc96fb9767ecc8ee0a4d270b10b1c51e00"}, - {file = "pyarrow-19.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0148bb4fc158bfbc3d6dfe5001d93ebeed253793fff4435167f6ce1dc4bddeae"}, - {file = "pyarrow-19.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f24faab6ed18f216a37870d8c5623f9c044566d75ec586ef884e13a02a9d62c5"}, - {file = "pyarrow-19.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:4982f8e2b7afd6dae8608d70ba5bd91699077323f812a0448d8b7abdff6cb5d3"}, - {file = "pyarrow-19.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:49a3aecb62c1be1d822f8bf629226d4a96418228a42f5b40835c1f10d42e4db6"}, - {file = "pyarrow-19.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:008a4009efdb4ea3d2e18f05cd31f9d43c388aad29c636112c2966605ba33466"}, - {file = "pyarrow-19.0.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:80b2ad2b193e7d19e81008a96e313fbd53157945c7be9ac65f44f8937a55427b"}, - {file = "pyarrow-19.0.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:ee8dec072569f43835932a3b10c55973593abc00936c202707a4ad06af7cb294"}, - {file = "pyarrow-19.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d5d1ec7ec5324b98887bdc006f4d2ce534e10e60f7ad995e7875ffa0ff9cb14"}, - {file = "pyarrow-19.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ad4c0eb4e2a9aeb990af6c09e6fa0b195c8c0e7b272ecc8d4d2b6574809d34"}, - {file = "pyarrow-19.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d383591f3dcbe545f6cc62daaef9c7cdfe0dff0fb9e1c8121101cabe9098cfa6"}, - {file = "pyarrow-19.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b4c4156a625f1e35d6c0b2132635a237708944eb41df5fbe7d50f20d20c17832"}, - {file = "pyarrow-19.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:5bd1618ae5e5476b7654c7b55a6364ae87686d4724538c24185bbb2952679960"}, - {file = "pyarrow-19.0.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:e45274b20e524ae5c39d7fc1ca2aa923aab494776d2d4b316b49ec7572ca324c"}, - {file = "pyarrow-19.0.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:d9dedeaf19097a143ed6da37f04f4051aba353c95ef507764d344229b2b740ae"}, - {file = "pyarrow-19.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ebfb5171bb5f4a52319344ebbbecc731af3f021e49318c74f33d520d31ae0c4"}, - {file = "pyarrow-19.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a21d39fbdb948857f67eacb5bbaaf36802de044ec36fbef7a1c8f0dd3a4ab2"}, - {file = "pyarrow-19.0.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:99bc1bec6d234359743b01e70d4310d0ab240c3d6b0da7e2a93663b0158616f6"}, - {file = "pyarrow-19.0.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1b93ef2c93e77c442c979b0d596af45e4665d8b96da598db145b0fec014b9136"}, - {file = "pyarrow-19.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:d9d46e06846a41ba906ab25302cf0fd522f81aa2a85a71021826f34639ad31ef"}, - {file = "pyarrow-19.0.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:c0fe3dbbf054a00d1f162fda94ce236a899ca01123a798c561ba307ca38af5f0"}, - {file = "pyarrow-19.0.1-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:96606c3ba57944d128e8a8399da4812f56c7f61de8c647e3470b417f795d0ef9"}, - {file = "pyarrow-19.0.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f04d49a6b64cf24719c080b3c2029a3a5b16417fd5fd7c4041f94233af732f3"}, - {file = "pyarrow-19.0.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a9137cf7e1640dce4c190551ee69d478f7121b5c6f323553b319cac936395f6"}, - {file = "pyarrow-19.0.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:7c1bca1897c28013db5e4c83944a2ab53231f541b9e0c3f4791206d0c0de389a"}, - {file = "pyarrow-19.0.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:58d9397b2e273ef76264b45531e9d552d8ec8a6688b7390b5be44c02a37aade8"}, - {file = "pyarrow-19.0.1.tar.gz", hash = "sha256:3bf266b485df66a400f282ac0b6d1b500b9d2ae73314a153dbe97d6d5cc8a99e"}, -] - [[package]] name = "pydantic" version = "2.11.2" @@ -1432,21 +1148,6 @@ files = [ {file = "pydantic_settings-2.9.1.tar.gz", hash = "sha256:c509bf79d27563add44e8446233359004ed85066cd096d8b510f715e6ef5d268"}, ] -[[package]] -name = "pydeck" -version = "0.9.1" -requires_python = ">=3.8" -summary = "Widget for deck.gl maps" -groups = ["chat"] -dependencies = [ - "jinja2>=2.10.1", - "numpy>=1.16.4", -] -files = [ - {file = "pydeck-0.9.1-py2.py3-none-any.whl", hash = "sha256:b3f75ba0d273fc917094fa61224f3f6076ca8752b93d46faf3bcfd9f9d59b038"}, - {file = "pydeck-0.9.1.tar.gz", hash = "sha256:f74475ae637951d63f2ee58326757f8d4f9cd9f2a457cf42950715003e2cb605"}, -] - [[package]] name = "pygments" version = "2.19.1" @@ -1676,7 +1377,7 @@ name = "python-dateutil" version = "2.9.0.post0" requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" summary = "Extensions to the standard Python datetime module" -groups = ["default", "chat"] +groups = ["default"] dependencies = [ "six>=1.5", ] @@ -1721,16 +1422,6 @@ files = [ {file = "python_xlib-0.33-py2.py3-none-any.whl", hash = "sha256:c3534038d42e0df2f1392a1b30a15a4ff5fdc2b86cfa94f072bf11b10a164398"}, ] -[[package]] -name = "pytz" -version = "2025.2" -summary = "World timezone definitions, modern and historical" -groups = ["chat"] -files = [ - {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, - {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, -] - [[package]] name = "pyyaml" version = "6.0.2" @@ -1777,28 +1468,12 @@ files = [ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] -[[package]] -name = "referencing" -version = "0.36.2" -requires_python = ">=3.9" -summary = "JSON Referencing + Python" -groups = ["chat"] -dependencies = [ - "attrs>=22.2.0", - "rpds-py>=0.7.0", - "typing-extensions>=4.4.0; python_version < \"3.13\"", -] -files = [ - {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, - {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, -] - [[package]] name = "requests" version = "2.32.3" requires_python = ">=3.8" summary = "Python HTTP for Humans." -groups = ["default", "chat"] +groups = ["default"] dependencies = [ "certifi>=2017.4.17", "charset-normalizer<4,>=2", @@ -1826,104 +1501,6 @@ files = [ {file = "rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725"}, ] -[[package]] -name = "rpds-py" -version = "0.24.0" -requires_python = ">=3.9" -summary = "Python bindings to Rust's persistent data structures (rpds)" -groups = ["chat"] -files = [ - {file = "rpds_py-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:006f4342fe729a368c6df36578d7a348c7c716be1da0a1a0f86e3021f8e98724"}, - {file = "rpds_py-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2d53747da70a4e4b17f559569d5f9506420966083a31c5fbd84e764461c4444b"}, - {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8acd55bd5b071156bae57b555f5d33697998752673b9de554dd82f5b5352727"}, - {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7e80d375134ddb04231a53800503752093dbb65dad8dabacce2c84cccc78e964"}, - {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60748789e028d2a46fc1c70750454f83c6bdd0d05db50f5ae83e2db500b34da5"}, - {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e1daf5bf6c2be39654beae83ee6b9a12347cb5aced9a29eecf12a2d25fff664"}, - {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b221c2457d92a1fb3c97bee9095c874144d196f47c038462ae6e4a14436f7bc"}, - {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:66420986c9afff67ef0c5d1e4cdc2d0e5262f53ad11e4f90e5e22448df485bf0"}, - {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:43dba99f00f1d37b2a0265a259592d05fcc8e7c19d140fe51c6e6f16faabeb1f"}, - {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a88c0d17d039333a41d9bf4616bd062f0bd7aa0edeb6cafe00a2fc2a804e944f"}, - {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc31e13ce212e14a539d430428cd365e74f8b2d534f8bc22dd4c9c55b277b875"}, - {file = "rpds_py-0.24.0-cp310-cp310-win32.whl", hash = "sha256:fc2c1e1b00f88317d9de6b2c2b39b012ebbfe35fe5e7bef980fd2a91f6100a07"}, - {file = "rpds_py-0.24.0-cp310-cp310-win_amd64.whl", hash = "sha256:c0145295ca415668420ad142ee42189f78d27af806fcf1f32a18e51d47dd2052"}, - {file = "rpds_py-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2d3ee4615df36ab8eb16c2507b11e764dcc11fd350bbf4da16d09cda11fcedef"}, - {file = "rpds_py-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e13ae74a8a3a0c2f22f450f773e35f893484fcfacb00bb4344a7e0f4f48e1f97"}, - {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf86f72d705fc2ef776bb7dd9e5fbba79d7e1f3e258bf9377f8204ad0fc1c51e"}, - {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c43583ea8517ed2e780a345dd9960896afc1327e8cf3ac8239c167530397440d"}, - {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4cd031e63bc5f05bdcda120646a0d32f6d729486d0067f09d79c8db5368f4586"}, - {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34d90ad8c045df9a4259c47d2e16a3f21fdb396665c94520dbfe8766e62187a4"}, - {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e838bf2bb0b91ee67bf2b889a1a841e5ecac06dd7a2b1ef4e6151e2ce155c7ae"}, - {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04ecf5c1ff4d589987b4d9882872f80ba13da7d42427234fce8f22efb43133bc"}, - {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:630d3d8ea77eabd6cbcd2ea712e1c5cecb5b558d39547ac988351195db433f6c"}, - {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ebcb786b9ff30b994d5969213a8430cbb984cdd7ea9fd6df06663194bd3c450c"}, - {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:174e46569968ddbbeb8a806d9922f17cd2b524aa753b468f35b97ff9c19cb718"}, - {file = "rpds_py-0.24.0-cp311-cp311-win32.whl", hash = "sha256:5ef877fa3bbfb40b388a5ae1cb00636a624690dcb9a29a65267054c9ea86d88a"}, - {file = "rpds_py-0.24.0-cp311-cp311-win_amd64.whl", hash = "sha256:e274f62cbd274359eff63e5c7e7274c913e8e09620f6a57aae66744b3df046d6"}, - {file = "rpds_py-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d8551e733626afec514b5d15befabea0dd70a343a9f23322860c4f16a9430205"}, - {file = "rpds_py-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e374c0ce0ca82e5b67cd61fb964077d40ec177dd2c4eda67dba130de09085c7"}, - {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d69d003296df4840bd445a5d15fa5b6ff6ac40496f956a221c4d1f6f7b4bc4d9"}, - {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8212ff58ac6dfde49946bea57474a386cca3f7706fc72c25b772b9ca4af6b79e"}, - {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:528927e63a70b4d5f3f5ccc1fa988a35456eb5d15f804d276709c33fc2f19bda"}, - {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a824d2c7a703ba6daaca848f9c3d5cb93af0505be505de70e7e66829affd676e"}, - {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d51febb7a114293ffd56c6cf4736cb31cd68c0fddd6aa303ed09ea5a48e029"}, - {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3fab5f4a2c64a8fb64fc13b3d139848817a64d467dd6ed60dcdd6b479e7febc9"}, - {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9be4f99bee42ac107870c61dfdb294d912bf81c3c6d45538aad7aecab468b6b7"}, - {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:564c96b6076a98215af52f55efa90d8419cc2ef45d99e314fddefe816bc24f91"}, - {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:75a810b7664c17f24bf2ffd7f92416c00ec84b49bb68e6a0d93e542406336b56"}, - {file = "rpds_py-0.24.0-cp312-cp312-win32.whl", hash = "sha256:f6016bd950be4dcd047b7475fdf55fb1e1f59fc7403f387be0e8123e4a576d30"}, - {file = "rpds_py-0.24.0-cp312-cp312-win_amd64.whl", hash = "sha256:998c01b8e71cf051c28f5d6f1187abbdf5cf45fc0efce5da6c06447cba997034"}, - {file = "rpds_py-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2d8e4508e15fc05b31285c4b00ddf2e0eb94259c2dc896771966a163122a0c"}, - {file = "rpds_py-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f00c16e089282ad68a3820fd0c831c35d3194b7cdc31d6e469511d9bffc535c"}, - {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951cc481c0c395c4a08639a469d53b7d4afa252529a085418b82a6b43c45c240"}, - {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9ca89938dff18828a328af41ffdf3902405a19f4131c88e22e776a8e228c5a8"}, - {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed0ef550042a8dbcd657dfb284a8ee00f0ba269d3f2286b0493b15a5694f9fe8"}, - {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b2356688e5d958c4d5cb964af865bea84db29971d3e563fb78e46e20fe1848b"}, - {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78884d155fd15d9f64f5d6124b486f3d3f7fd7cd71a78e9670a0f6f6ca06fb2d"}, - {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6a4a535013aeeef13c5532f802708cecae8d66c282babb5cd916379b72110cf7"}, - {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:84e0566f15cf4d769dade9b366b7b87c959be472c92dffb70462dd0844d7cbad"}, - {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:823e74ab6fbaa028ec89615ff6acb409e90ff45580c45920d4dfdddb069f2120"}, - {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c61a2cb0085c8783906b2f8b1f16a7e65777823c7f4d0a6aaffe26dc0d358dd9"}, - {file = "rpds_py-0.24.0-cp313-cp313-win32.whl", hash = "sha256:60d9b630c8025b9458a9d114e3af579a2c54bd32df601c4581bd054e85258143"}, - {file = "rpds_py-0.24.0-cp313-cp313-win_amd64.whl", hash = "sha256:6eea559077d29486c68218178ea946263b87f1c41ae7f996b1f30a983c476a5a"}, - {file = "rpds_py-0.24.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:d09dc82af2d3c17e7dd17120b202a79b578d79f2b5424bda209d9966efeed114"}, - {file = "rpds_py-0.24.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5fc13b44de6419d1e7a7e592a4885b323fbc2f46e1f22151e3a8ed3b8b920405"}, - {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c347a20d79cedc0a7bd51c4d4b7dbc613ca4e65a756b5c3e57ec84bd43505b47"}, - {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:20f2712bd1cc26a3cc16c5a1bfee9ed1abc33d4cdf1aabd297fe0eb724df4272"}, - {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aad911555286884be1e427ef0dc0ba3929e6821cbeca2194b13dc415a462c7fd"}, - {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0aeb3329c1721c43c58cae274d7d2ca85c1690d89485d9c63a006cb79a85771a"}, - {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a0f156e9509cee987283abd2296ec816225145a13ed0391df8f71bf1d789e2d"}, - {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aa6800adc8204ce898c8a424303969b7aa6a5e4ad2789c13f8648739830323b7"}, - {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a18fc371e900a21d7392517c6f60fe859e802547309e94313cd8181ad9db004d"}, - {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9168764133fd919f8dcca2ead66de0105f4ef5659cbb4fa044f7014bed9a1797"}, - {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f6e3cec44ba05ee5cbdebe92d052f69b63ae792e7d05f1020ac5e964394080c"}, - {file = "rpds_py-0.24.0-cp313-cp313t-win32.whl", hash = "sha256:8ebc7e65ca4b111d928b669713865f021b7773350eeac4a31d3e70144297baba"}, - {file = "rpds_py-0.24.0-cp313-cp313t-win_amd64.whl", hash = "sha256:675269d407a257b8c00a6b58205b72eec8231656506c56fd429d924ca00bb350"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:619ca56a5468f933d940e1bf431c6f4e13bef8e688698b067ae68eb4f9b30e3a"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b28e5122829181de1898c2c97f81c0b3246d49f585f22743a1246420bb8d399"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e5ab32cf9eb3647450bc74eb201b27c185d3857276162c101c0f8c6374e098"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:208b3a70a98cf3710e97cabdc308a51cd4f28aa6e7bb11de3d56cd8b74bab98d"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbc4362e06f950c62cad3d4abf1191021b2ffaf0b31ac230fbf0526453eee75e"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebea2821cdb5f9fef44933617be76185b80150632736f3d76e54829ab4a3b4d1"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4df06c35465ef4d81799999bba810c68d29972bf1c31db61bfdb81dd9d5bb"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d3aa13bdf38630da298f2e0d77aca967b200b8cc1473ea05248f6c5e9c9bdb44"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:041f00419e1da7a03c46042453598479f45be3d787eb837af382bfc169c0db33"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8754d872a5dfc3c5bf9c0e059e8107451364a30d9fd50f1f1a85c4fb9481164"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:896c41007931217a343eff197c34513c154267636c8056fb409eafd494c3dcdc"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:92558d37d872e808944c3c96d0423b8604879a3d1c86fdad508d7ed91ea547d5"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f9e0057a509e096e47c87f753136c9b10d7a91842d8042c2ee6866899a717c0d"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d6e109a454412ab82979c5b1b3aee0604eca4bbf9a02693bb9df027af2bfa91a"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc1c892b1ec1f8cbd5da8de287577b455e388d9c328ad592eabbdcb6fc93bee5"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c39438c55983d48f4bb3487734d040e22dad200dab22c41e331cee145e7a50d"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d7e8ce990ae17dda686f7e82fd41a055c668e13ddcf058e7fb5e9da20b57793"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ea7f4174d2e4194289cb0c4e172d83e79a6404297ff95f2875cf9ac9bced8ba"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb2954155bb8f63bb19d56d80e5e5320b61d71084617ed89efedb861a684baea"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04f2b712a2206e13800a8136b07aaedc23af3facab84918e7aa89e4be0260032"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:eda5c1e2a715a4cbbca2d6d304988460942551e4e5e3b7457b50943cd741626d"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:9abc80fe8c1f87218db116016de575a7998ab1629078c90840e8d11ab423ee25"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6a727fd083009bc83eb83d6950f0c32b3c94c8b80a9b667c87f4bd1274ca30ba"}, - {file = "rpds_py-0.24.0.tar.gz", hash = "sha256:772cc1b2cd963e7e17e6cc55fe0371fb9c704d63e44cacec7b9b7f523b78919e"}, -] - [[package]] name = "ruff" version = "0.11.3" @@ -1995,23 +1572,12 @@ name = "six" version = "1.17.0" requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" summary = "Python 2 and 3 compatibility utilities" -groups = ["default", "chat", "pynput"] +groups = ["default", "pynput"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] -[[package]] -name = "smmap" -version = "5.0.2" -requires_python = ">=3.7" -summary = "A pure Python implementation of a sliding window memory map manager" -groups = ["chat"] -files = [ - {file = "smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e"}, - {file = "smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5"}, -] - [[package]] name = "sniffio" version = "1.3.1" @@ -2053,59 +1619,17 @@ files = [ {file = "starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5"}, ] -[[package]] -name = "streamlit" -version = "1.44.1" -requires_python = "!=3.9.7,>=3.9" -summary = "A faster way to build and share data apps" -groups = ["chat"] -dependencies = [ - "altair<6,>=4.0", - "blinker<2,>=1.0.0", - "cachetools<6,>=4.0", - "click<9,>=7.0", - "gitpython!=3.1.19,<4,>=3.0.7", - "numpy<3,>=1.23", - "packaging<25,>=20", - "pandas<3,>=1.4.0", - "pillow<12,>=7.1.0", - "protobuf<6,>=3.20", - "pyarrow>=7.0", - "pydeck<1,>=0.8.0b4", - "requests<3,>=2.27", - "tenacity<10,>=8.1.0", - "toml<2,>=0.10.1", - "tornado<7,>=6.0.3", - "typing-extensions<5,>=4.4.0", - "watchdog<7,>=2.1.5; platform_system != \"Darwin\"", -] -files = [ - {file = "streamlit-1.44.1-py3-none-any.whl", hash = "sha256:9fe355f58b11f4eb71e74f115ce1f38c4c9eaff2733e6bcffb510ac1298a5990"}, - {file = "streamlit-1.44.1.tar.gz", hash = "sha256:c6914ed6d5b76870b461510476806db370f36425ae0e6654d227c988288198d3"}, -] - [[package]] name = "tenacity" version = "9.1.2" requires_python = ">=3.9" summary = "Retry code until it succeeds" -groups = ["default", "chat"] +groups = ["default"] files = [ {file = "tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138"}, {file = "tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb"}, ] -[[package]] -name = "toml" -version = "0.10.2" -requires_python = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -summary = "Python Library for Tom's Obvious, Minimal Language" -groups = ["chat"] -files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] - [[package]] name = "tomli" version = "2.2.1" @@ -2148,26 +1672,6 @@ files = [ {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] -[[package]] -name = "tornado" -version = "6.4.2" -requires_python = ">=3.8" -summary = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -groups = ["chat"] -files = [ - {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"}, - {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"}, - {file = "tornado-6.4.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec"}, - {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946"}, - {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf"}, - {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634"}, - {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73"}, - {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c"}, - {file = "tornado-6.4.2-cp38-abi3-win32.whl", hash = "sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482"}, - {file = "tornado-6.4.2-cp38-abi3-win_amd64.whl", hash = "sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38"}, - {file = "tornado-6.4.2.tar.gz", hash = "sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b"}, -] - [[package]] name = "tqdm" version = "4.67.1" @@ -2293,23 +1797,12 @@ files = [ {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, ] -[[package]] -name = "tzdata" -version = "2025.2" -requires_python = ">=2" -summary = "Provider of IANA time zone data" -groups = ["chat"] -files = [ - {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, - {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, -] - [[package]] name = "urllib3" version = "2.3.0" requires_python = ">=3.9" summary = "HTTP library with thread-safe connection pooling, file post, and more." -groups = ["default", "chat", "test"] +groups = ["default", "test"] files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, @@ -2331,41 +1824,6 @@ files = [ {file = "uvicorn-0.34.3.tar.gz", hash = "sha256:35919a9a979d7a59334b6b10e05d77c1d0d574c50e0fc98b8b1a0f165708b55a"}, ] -[[package]] -name = "watchdog" -version = "6.0.0" -requires_python = ">=3.9" -summary = "Filesystem events monitoring" -groups = ["chat"] -marker = "platform_system != \"Darwin\"" -files = [ - {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"}, - {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"}, - {file = "watchdog-6.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c897ac1b55c5a1461e16dae288d22bb2e412ba9807df8397a635d88f671d36c3"}, - {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c"}, - {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2"}, - {file = "watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c"}, - {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948"}, - {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860"}, - {file = "watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0"}, - {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c"}, - {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134"}, - {file = "watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b"}, - {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c7ac31a19f4545dd92fc25d200694098f42c9a8e391bc00bdd362c5736dbf881"}, - {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9513f27a1a582d9808cf21a07dae516f0fab1cf2d7683a742c498b93eedabb11"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2"}, - {file = "watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a"}, - {file = "watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680"}, - {file = "watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f"}, - {file = "watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282"}, -] - [[package]] name = "websockets" version = "15.0.1" diff --git a/pyproject.toml b/pyproject.toml index 0be5b9c3..1ab18148 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,13 +53,13 @@ lint = "ruff check src tests" "lint:fix" = "ruff check --fix src tests" typecheck = "mypy" "typecheck:all" = "mypy src tests" -chat = "streamlit run src/askui/chat/__main__.py" -"chat:api" = "uvicorn src.askui.chat.api.app:app --reload --port 8000" +"chat:api" = "uvicorn src.chat.api.app:app --reload --port 8000" +"chat:ui:install" = {shell = "cd src/chat/ui && npm ci"} +"chat:ui" = {shell = "cd src/chat/ui && npm run dev"} mcp = "mcp dev src/askui/mcp/__init__.py" [dependency-groups] chat = [ - "streamlit>=1.42.0", "fastapi>=0.115.12", "uvicorn>=0.34.3", ] diff --git a/src/askui/agent.py b/src/askui/agent.py index e1568778..1dbd33fc 100644 --- a/src/askui/agent.py +++ b/src/askui/agent.py @@ -690,7 +690,7 @@ def cli( logger.debug("VisionAgent received instruction to execute '%s' on cli", command) self.tools.os.run_command(command) - @telemetry.record_call(flush=True) + @telemetry.record_call() def close(self) -> None: self.tools.os.disconnect() self._reporter.generate() diff --git a/src/askui/chat/__main__.py b/src/askui/chat/__main__.py deleted file mode 100644 index 25d47365..00000000 --- a/src/askui/chat/__main__.py +++ /dev/null @@ -1,382 +0,0 @@ -import io -import json -import time -from pathlib import Path - -import httpx -import streamlit as st -from PIL import Image - -from askui.chat.api.messages.service import Message, MessageService -from askui.chat.api.runs.service import RunService -from askui.chat.api.threads.service import ThreadService - -# from askui.chat.click_recorder import ClickRecorder -from askui.models.shared.computer_agent_message_param import ( - Base64ImageSourceParam, - MessageParam, - UrlImageSourceParam, -) -from askui.utils.image_utils import base64_to_image - -st.set_page_config( - page_title="Vision Agent Chat", - page_icon="💬", -) - - -BASE_DIR = Path("./chat") - - -@st.cache_resource -def get_thread_service() -> ThreadService: - return ThreadService(BASE_DIR) - - -@st.cache_resource -def get_message_service() -> MessageService: - return MessageService(BASE_DIR) - - -@st.cache_resource -def get_run_service() -> RunService: - return RunService(BASE_DIR) - - -thread_service = get_thread_service() -message_service = get_message_service() -run_service = get_run_service() - -# click_recorder = ClickRecorder() - - -def get_image( - source: Base64ImageSourceParam | UrlImageSourceParam, -) -> Image.Image: - match source.type: - case "base64": - data = source.data - if isinstance(data, str): - return base64_to_image(data) - error_msg = f"Image source data type not supported: {type(data)}" - raise NotImplementedError(error_msg) - case "url": - response = httpx.get(source.url) - return Image.open(io.BytesIO(response.content)) - - -def write_message( # noqa: C901 - message: Message, -) -> None: - # Create a container for the message and delete button - col1, col2 = st.columns([0.95, 0.05]) - - with col1: - with st.chat_message(message.role): - st.markdown(f"*{message.created_at.isoformat()}* - **{message.role}**\n\n") - if isinstance(message.content, str): - st.markdown(message.content) - else: - for block in message.content: - match block.type: - case "image": - st.image(get_image(block.source)) - case "text": - st.markdown(block.text) - case "tool_result": - st.markdown(f"Tool use id: {block.tool_use_id}") - st.markdown(f"Erroneous: {block.is_error}") - content = block.content - if isinstance(content, str): - st.markdown(content) - else: - for nested_block in content: - match nested_block.type: - case "image": - st.image(get_image(nested_block.source)) - case "text": - st.markdown(nested_block.text) - case _: - st.markdown( - json.dumps(block.model_dump(mode="json"), indent=2) - ) - - # Add delete button in the second column if message_id is provided - with col2: - if st.button("🗑️", key=f"delete_{message.id}"): - message_service.delete(st.session_state.thread_id, message.id) - st.rerun() - - -# def paint_crosshair( -# image: Image.Image, -# coordinates: tuple[int, int], -# size: int | None = None, -# color: str = "red", -# width: int = 4, -# ) -> Image.Image: -# """ -# Paints a crosshair at the given coordinates on the image. - -# :param image: A PIL Image object. -# :param coordinates: A tuple (x, y) representing the coordinates of the point. -# :param size: Optional length of each line in the crosshair. Defaults to min(width,height)/20 -# :param color: The color of the crosshair. -# :param width: The width of the crosshair. -# :return: A new image with the crosshair. -# """ -# if size is None: -# size = ( -# min(image.width, image.height) // 20 -# ) # Makes crosshair ~5% of smallest image dimension - -# image_copy = image.copy() -# draw = ImageDraw.Draw(image_copy) -# x, y = coordinates -# # Draw horizontal and vertical lines -# draw.line((x - size, y, x + size, y), fill=color, width=width) -# draw.line((x, y - size, x, y + size), fill=color, width=width) -# return image_copy - - -# prompt = """The following image is a screenshot with a red crosshair on top of an element that the user wants to interact with. Give me a description that uniquely describes the element as concise as possible across all elements on the screen that the user most likely wants to interact with. Examples: - -# - "Submit button" -# - "Cell within the table about European countries in the third row and 6th column (area in km^2) in the right-hand browser window" -# - "Avatar in the top right hand corner of the browser in focus that looks like a woman" -# """ - - -# def rerun() -> None: -# st.markdown("### Re-running...") -# with VisionAgent( -# log_level=logging.DEBUG, -# tools=tools, -# ) as agent: -# screenshot: Image.Image | None = None -# for message in messages_service.list_(st.session_state.thread_id).data: -# try: -# if ( -# message.role == MessageRole.ASSISTANT -# or message.role == MessageRole.USER -# ): -# content = message.content[0] -# if content.text == "screenshot()": -# screenshot = ( -# get_image(content.image_paths[0]) -# if content.image_paths -# else None -# ) -# continue -# if content.text: -# if match := re.match( -# r"mouse\((\d+),\s*(\d+)\)", cast("str", content.text) -# ): -# if not screenshot: -# error_msg = "Screenshot is required to paint crosshair" -# raise ValueError(error_msg) # noqa: TRY301 -# x, y = map(int, match.groups()) -# screenshot_with_crosshair = paint_crosshair( -# screenshot, (x, y) -# ) -# element_description = agent.get( -# query=prompt, -# image=screenshot_with_crosshair, -# model=ModelName.ANTHROPIC__CLAUDE__3_5__SONNET__20241022, -# ) -# messages_service.create( -# thread_id=st.session_state.thread_id, -# role=message.role.value, -# content=f"Move mouse to {element_description}", -# image=screenshot_with_crosshair, -# ) -# agent.mouse_move( -# locator=element_description.replace('"', ""), -# model=ModelName.ANTHROPIC__CLAUDE__3_5__SONNET__20241022, -# ) -# else: -# messages_service.create( -# thread_id=st.session_state.thread_id, -# role=message.role.value, -# content=content.text, -# image=None, -# ) -# func_call = f"agent.tools.os.{content.text}" -# eval(func_call) -# except json.JSONDecodeError: -# continue -# except AttributeError: -# st.write(str(InvalidFunctionError(cast("str", content.text)))) -# except Exception as e: # noqa: BLE001 - We want to catch all other exceptions here -# st.write(str(FunctionExecutionError(cast("str", content.text), e))) - - -if st.sidebar.button("New Chat"): - thread = thread_service.create() - st.session_state.thread_id = thread.id - st.rerun() - -available_threads = thread_service.list_().data -thread_id = st.session_state.get("thread_id", None) - -if not thread_id and not available_threads: - thread = thread_service.create() - thread_id = thread.id - st.session_state.thread_id = thread_id - st.rerun() - -index_of_thread = 0 -if thread_id: - for index, thread in enumerate(available_threads): - if thread.id == thread_id: - index_of_thread = index - break - -# Create columns for thread selection and delete buttons -thread_cols = st.sidebar.columns([0.8, 0.2]) -with thread_cols[0]: - thread_id = st.radio( - "Threads", - [t.id for t in available_threads], - index=index_of_thread, - ) - -# Add delete buttons for each thread -for t in available_threads: - with thread_cols[1]: - if st.button("🗑️", key=f"delete_thread_{t.id}"): - if t.id == thread_id: - # If deleting current thread, switch to first available thread - remaining_threads = [th for th in available_threads if th.id != t.id] - if remaining_threads: - st.session_state.thread_id = remaining_threads[0].id - else: - # Create new thread if no threads left - new_thread = thread_service.create() - st.session_state.thread_id = new_thread.id - thread_service.delete(t.id) - st.rerun() - -if thread_id != st.session_state.get("thread_id"): - st.session_state.thread_id = thread_id - st.rerun() - - -st.title(f"Vision Agent Chat - {thread_id}") - -# Display chat history -messages = message_service.list_(thread_id).data -for message in messages: - write_message(message) - -last_message = messages[-1] if messages else None - -# if value_to_type := st.chat_input("Simulate Typing for User (Demonstration)"): -# reporter.add_message( -# role="user", -# content=f'type("{value_to_type}", 50)', -# ) -# st.rerun() - -# if st.button("Simulate left click"): -# reporter.add_message( -# role="User (Demonstration)", -# content='click("left", 1)', -# ) -# st.rerun() - -# # Chat input -# if st.button( -# "Demonstrate where to move mouse" -# ): # only single step, only click supported for now, independent of click always registered as click -# image, coordinates = click_recorder.record() -# reporter.add_message( -# role="User (Demonstration)", -# content="screenshot()", -# image=image, -# ) -# reporter.add_message( -# role="User (Demonstration)", -# content=f"mouse_move({coordinates[0]}, {coordinates[1]})", -# image=draw_point_on_image(image, coordinates[0], coordinates[1]), -# ) -# st.rerun() - -# if st.session_state.get("input_event_listening"): -# while input_event := tools.os.poll_event(): -# image = tools.os.screenshot(report=False) -# if input_event.pressed: -# reporter.add_message( -# role="User (Demonstration)", -# content=f"mouse_move({input_event.x}, {input_event.y})", -# image=draw_point_on_image(image, input_event.x, input_event.y), -# ) -# reporter.add_message( -# role="User (Demonstration)", -# content=f'click("{input_event.button}")', -# ) -# if st.button("Refresh"): -# st.rerun() -# if st.button("Stop listening to input events"): -# tools.os.stop_listening() -# st.session_state["input_event_listening"] = False -# st.rerun() -# else: -# if st.button("Listen to input events"): -# tools.os.start_listening() -# st.session_state["input_event_listening"] = True -# st.rerun() - -if act_prompt := st.chat_input("Ask AI"): - if act_prompt != "Continue": - last_message = message_service.create( - thread_id=thread_id, - message=MessageParam( - role="user", - content=act_prompt, - ), - ) - write_message(last_message) - run = run_service.create(thread_id, stream=False) - time.sleep(1) - while run := run_service.retrieve(run.id): - new_messages = message_service.list_( - thread_id, after=last_message.id if last_message else None - ).data - for message in new_messages: - write_message(message) - last_message = new_messages[-1] if new_messages else last_message - if run.status not in {"queued", "running", "in_progress"}: - break - time.sleep(1) - - -if act_prompt := st.chat_input("Ask AI (streaming)"): - if act_prompt != "Continue": - last_message = message_service.create( - thread_id=thread_id, - message=MessageParam( - role="user", - content=act_prompt, - ), - ) - write_message(last_message) - - # Use the streaming API - event_stream = run_service.create(thread_id, stream=True) - import asyncio - - async def handle_stream() -> None: - last_msg_id = last_message.id if last_message else None - async for event in event_stream: - if event.event == "message.created": - msg = event.data - if msg and (not last_msg_id or msg.id > last_msg_id): - write_message(msg) - last_msg_id = msg.id - - # Run the async handler in Streamlit (sync context) - asyncio.run(handle_stream()) - -# if st.button("Rerun"): -# rerun() diff --git a/src/askui/chat/api/app.py b/src/askui/chat/api/app.py deleted file mode 100644 index 48b4eafe..00000000 --- a/src/askui/chat/api/app.py +++ /dev/null @@ -1,27 +0,0 @@ -from fastapi import APIRouter, FastAPI -from fastapi.middleware.cors import CORSMiddleware - -from askui.chat.api.messages.router import router as messages_router -from askui.chat.api.runs.router import router as runs_router -from askui.chat.api.threads.router import router as threads_router - -app = FastAPI( - title="AskUI Chat API", - version="0.1.0", -) - -# Add CORS middleware -app.add_middleware( - CORSMiddleware, - allow_origins=["*"], - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], -) - -# Include routers -v1_router = APIRouter(prefix="/v1") -v1_router.include_router(threads_router) -v1_router.include_router(messages_router) -v1_router.include_router(runs_router) -app.include_router(v1_router) diff --git a/src/askui/chat/api/messages/dependencies.py b/src/askui/chat/api/messages/dependencies.py deleted file mode 100644 index 86899d14..00000000 --- a/src/askui/chat/api/messages/dependencies.py +++ /dev/null @@ -1,13 +0,0 @@ -from fastapi import Depends - -from askui.chat.api.dependencies import SettingsDep -from askui.chat.api.messages.service import MessageService -from askui.chat.api.settings import Settings - - -def get_message_service(settings: Settings = SettingsDep) -> MessageService: - """Get MessageService instance.""" - return MessageService(settings.data_dir) - - -MessageServiceDep = Depends(get_message_service) diff --git a/src/askui/chat/api/messages/service.py b/src/askui/chat/api/messages/service.py deleted file mode 100644 index 8af02967..00000000 --- a/src/askui/chat/api/messages/service.py +++ /dev/null @@ -1,172 +0,0 @@ -from datetime import datetime, timezone -from pathlib import Path -from typing import Literal - -from pydantic import AwareDatetime, BaseModel, Field - -from askui.chat.api.models import Event -from askui.chat.api.utils import generate_time_ordered_id -from askui.models.shared.computer_agent_message_param import MessageParam - - -class Message(MessageParam): - """A message in a thread.""" - - id: str = Field(default_factory=lambda: generate_time_ordered_id("msg")) - thread_id: str - created_at: AwareDatetime = Field( - default_factory=lambda: datetime.now(tz=timezone.utc) - ) - object: str = "message" - - -class MessageEvent(Event): - data: Message - event: Literal["message.created"] - - -class MessageListResponse(BaseModel): - """Response model for listing messages.""" - - object: str = "list" - data: list[Message] - first_id: str | None = None - last_id: str | None = None - has_more: bool = False - - -class MessageService: - """Service for managing messages within threads.""" - - def __init__(self, base_dir: Path) -> None: - """Initialize message service. - - Args: - base_dir: Base directory to store message data - """ - self._base_dir = base_dir - self._threads_dir = base_dir / "threads" - - def list_( - self, thread_id: str, limit: int | None = None, after: str | None = None - ) -> MessageListResponse: - """List all messages in a thread. - - Args: - thread_id: ID of thread to list messages from - limit: Optional maximum number of messages to return - after: Optional message ID after which messages are returned - - Returns: - MessageListResponse containing messages sorted by creation date - - Raises: - FileNotFoundError: If thread doesn't exist - """ - thread_file = self._threads_dir / f"{thread_id}.jsonl" - if not thread_file.exists(): - error_msg = f"Thread {thread_id} not found" - raise FileNotFoundError(error_msg) - - messages: list[Message] = [] - with thread_file.open("r") as f: - for line in f: - msg = Message.model_validate_json(line) - messages.append(msg) - - # Sort by creation date - messages = sorted(messages, key=lambda m: m.created_at) - if after: - messages = [m for m in messages if m.id > after] - - # Apply limit if specified - if limit is not None: - messages = messages[:limit] - - return MessageListResponse( - data=messages, - first_id=messages[0].id if messages else None, - last_id=messages[-1].id if messages else None, - has_more=len(messages) > (limit or len(messages)), - ) - - def create( - self, - thread_id: str, - message: MessageParam, - ) -> Message: - """Create a new message in a thread. - - Args: - thread_id: ID of thread to create message in - role: Role of message sender - content: Message content - - Returns: - Created message object - - Raises: - FileNotFoundError: If thread doesn't exist - """ - thread_file = self._threads_dir / f"{thread_id}.jsonl" - if not thread_file.exists(): - error_msg = f"Thread {thread_id} not found" - raise FileNotFoundError(error_msg) - message = Message.model_construct( - thread_id=thread_id, - role=message.role, - content=message.content, - ) - with thread_file.open("a") as f: - f.write(message.model_dump_json()) - f.write("\n") - return message - - def retrieve(self, thread_id: str, message_id: str) -> Message: - """Retrieve a specific message from a thread. - - Args: - thread_id: ID of thread containing message - message_id: ID of message to retrieve - - Returns: - Message object - - Raises: - FileNotFoundError: If thread or message doesn't exist - """ - messages = self.list_(thread_id).data - for msg in messages: - if msg.id == message_id: - return msg - error_msg = f"Message {message_id} not found in thread {thread_id}" - raise FileNotFoundError(error_msg) - - def delete(self, thread_id: str, message_id: str) -> None: - """Delete a message from a thread. - - Args: - thread_id: ID of thread containing message - message_id: ID of message to delete - - Raises: - FileNotFoundError: If thread or message doesn't exist - """ - thread_file = self._threads_dir / f"{thread_id}.jsonl" - if not thread_file.exists(): - error_msg = f"Thread {thread_id} not found" - raise FileNotFoundError(error_msg) - - # Read all messages - messages: list[Message] = [] - with thread_file.open("r") as f: - for line in f: - msg = Message.model_validate_json(line) - if msg.id != message_id: - messages.append(msg) - - # Write back all messages except the deleted one - with thread_file.open("w") as f: - for msg in messages: - f.write(msg.model_dump_json()) - f.write("\n") diff --git a/src/askui/chat/api/runs/service.py b/src/askui/chat/api/runs/service.py deleted file mode 100644 index 4dff82a9..00000000 --- a/src/askui/chat/api/runs/service.py +++ /dev/null @@ -1,287 +0,0 @@ -import asyncio -import queue -import threading -from collections.abc import AsyncGenerator -from datetime import datetime, timedelta, timezone -from pathlib import Path -from typing import Literal, Sequence, cast, overload - -from pydantic import AwareDatetime, BaseModel, Field, computed_field - -from askui.agent import VisionAgent -from askui.chat.api.messages.service import MessageEvent, MessageService -from askui.chat.api.models import Event -from askui.chat.api.utils import generate_time_ordered_id -from askui.models.shared.computer_agent_cb_param import OnMessageCbParam -from askui.models.shared.computer_agent_message_param import MessageParam - -RunStatus = Literal[ - "queued", - "in_progress", - "completed", - "cancelling", - "cancelled", - "failed", - "expired", -] - - -class RunError(BaseModel): - message: str - code: Literal["server_error"] - - -class Run(BaseModel): - id: str = Field(default_factory=lambda: generate_time_ordered_id("run")) - thread_id: str - created_at: AwareDatetime = Field( - default_factory=lambda: datetime.now(tz=timezone.utc) - ) - started_at: AwareDatetime | None = None - completed_at: AwareDatetime | None = None - tried_cancelling_at: AwareDatetime | None = None - cancelled_at: AwareDatetime | None = None - expires_at: AwareDatetime = Field( - default_factory=lambda: datetime.now(tz=timezone.utc) + timedelta(minutes=10) - ) - failed_at: AwareDatetime | None = None - last_error: RunError | None = None - object: Literal["run"] = "run" - - @computed_field - @property - def status(self) -> RunStatus: - if self.cancelled_at: - return "cancelled" - if self.failed_at: - return "failed" - if self.completed_at: - return "completed" - if self.expires_at and self.expires_at < datetime.now(tz=timezone.utc): - return "expired" - if self.tried_cancelling_at: - return "cancelling" - if self.started_at: - return "in_progress" - return "queued" - - -class RunListResponse(BaseModel): - object: Literal["list"] = "list" - data: Sequence[Run] - first_id: str | None = None - last_id: str | None = None - has_more: bool = False - - -class RunEvent(Event): - data: Run - event: Literal[ - "run.created", - "run.started", - "run.completed", - "run.failed", - "run.cancelled", - "run.expired", - ] - - -class Runner: - def __init__(self, run: Run, base_dir: Path) -> None: - self._run = run - self._base_dir = base_dir - self._runs_dir = base_dir / "runs" - self._msg_service = MessageService(self._base_dir) - - def run(self, event_queue: queue.Queue[RunEvent | MessageEvent | None]) -> None: - self._mark_started() - event_queue.put( - RunEvent( - data=self._run, - event="run.started", - ) - ) - messages: list[MessageParam] = [ - cast("MessageParam", msg) - for msg in self._msg_service.list_(self._run.thread_id).data - ] - - def on_message( - on_message_cb_param: OnMessageCbParam, - ) -> MessageParam | None: - message = self._msg_service.create( - thread_id=self._run.thread_id, - message=on_message_cb_param.message, - ) - event_queue.put( - MessageEvent( - data=message, - event="message.created", - ) - ) - updated_run = self._retrieve_run() - if updated_run.status == "cancelling": - updated_run.cancelled_at = datetime.now(tz=timezone.utc) - self._update_run_file(updated_run) - event_queue.put( - RunEvent( - data=updated_run, - event="run.cancelled", - ) - ) - return None - if updated_run.status == "expired": - event_queue.put( - RunEvent( - data=updated_run, - event="run.expired", - ) - ) - return None - return on_message_cb_param.message - - try: - with VisionAgent() as agent: - agent.act(messages, on_message=on_message) - updated_run = self._retrieve_run() - if updated_run.status == "in_progress": - updated_run.completed_at = datetime.now(tz=timezone.utc) - self._update_run_file(updated_run) - event_queue.put( - RunEvent( - data=updated_run, - event="run.completed", - ) - ) - except Exception as e: # noqa: BLE001 - updated_run = self._retrieve_run() - updated_run.failed_at = datetime.now(tz=timezone.utc) - updated_run.last_error = RunError(message=str(e), code="server_error") - self._update_run_file(updated_run) - event_queue.put( - RunEvent( - data=updated_run, - event="run.failed", - ) - ) - finally: - event_queue.put(None) - - def _mark_started(self) -> None: - self._run.started_at = datetime.now(tz=timezone.utc) - self._update_run_file(self._run) - - def _should_abort(self, run: Run) -> bool: - return run.status in ("cancelled", "cancelling", "expired") - - def _update_run_file(self, run: Run) -> None: - run_file = self._runs_dir / f"{run.thread_id}__{run.id}.json" - with run_file.open("w") as f: - f.write(run.model_dump_json()) - - def _retrieve_run(self) -> Run: - run_file = self._runs_dir / f"{self._run.thread_id}__{self._run.id}.json" - with run_file.open("r") as f: - return Run.model_validate_json(f.read()) - - -class RunService: - """ - Service for managing runs. Handles creation, retrieval, listing, and cancellation of runs. - """ - - def __init__(self, base_dir: Path) -> None: - self._base_dir = base_dir - self._runs_dir = base_dir / "runs" - - def _run_path(self, thread_id: str, run_id: str) -> Path: - return self._runs_dir / f"{thread_id}__{run_id}.json" - - def _create_run(self, thread_id: str) -> Run: - run = Run(thread_id=thread_id) - self._runs_dir.mkdir(parents=True, exist_ok=True) - self._update_run_file(run) - return run - - @overload - def create(self, thread_id: str, stream: Literal[False]) -> Run: ... - - @overload - def create( - self, thread_id: str, stream: Literal[True] - ) -> AsyncGenerator[RunEvent | MessageEvent, None]: ... - - @overload - def create( - self, thread_id: str, stream: bool - ) -> Run | AsyncGenerator[RunEvent | MessageEvent, None]: ... - - def create( - self, thread_id: str, stream: bool - ) -> Run | AsyncGenerator[RunEvent | MessageEvent, None]: - run = self._create_run(thread_id) - event_queue: queue.Queue[RunEvent | MessageEvent | None] = queue.Queue() - runner = Runner(run, self._base_dir) - thread = threading.Thread(target=runner.run, args=(event_queue,)) - thread.start() - if stream: - - async def event_stream() -> AsyncGenerator[RunEvent | MessageEvent, None]: - yield RunEvent( - data=run, - event="run.created", - ) - loop = asyncio.get_event_loop() - while True: - event = await loop.run_in_executor(None, event_queue.get) - if event is None: - break - yield event - - return event_stream() - return run - - def _update_run_file(self, run: Run) -> None: - run_file = self._run_path(run.thread_id, run.id) - with run_file.open("w") as f: - f.write(run.model_dump_json()) - - def retrieve(self, run_id: str) -> Run: - # Find the file by run_id - for f in self._runs_dir.glob(f"*__{run_id}.json"): - with f.open("r") as file: - return Run.model_validate_json(file.read()) - error_msg = f"Run {run_id} not found" - raise FileNotFoundError(error_msg) - - def list_(self, thread_id: str | None = None) -> RunListResponse: - if not self._runs_dir.exists(): - return RunListResponse(data=[]) - if thread_id: - run_files = list(self._runs_dir.glob(f"{thread_id}__*.json")) - else: - run_files = list(self._runs_dir.glob("*__*.json")) - runs: list[Run] = [] - for f in run_files: - with f.open("r") as file: - runs.append(Run.model_validate_json(file.read())) - runs = sorted(runs, key=lambda r: r.created_at, reverse=True) - return RunListResponse( - data=runs, - first_id=runs[0].id if runs else None, - last_id=runs[-1].id if runs else None, - has_more=False, - ) - - def cancel(self, run_id: str) -> Run: - run = self.retrieve(run_id) - if run.status in ("cancelled", "cancelling", "completed", "failed", "expired"): - return run - run.tried_cancelling_at = datetime.now(tz=timezone.utc) - for f in self._runs_dir.glob(f"*__{run_id}.json"): - with f.open("w") as file: - file.write(run.model_dump_json()) - return run - # Find the file by run_id - error_msg = f"Run {run_id} not found" - raise FileNotFoundError(error_msg) diff --git a/src/askui/chat/api/threads/dependencies.py b/src/askui/chat/api/threads/dependencies.py deleted file mode 100644 index 3d607559..00000000 --- a/src/askui/chat/api/threads/dependencies.py +++ /dev/null @@ -1,13 +0,0 @@ -from fastapi import Depends - -from askui.chat.api.dependencies import SettingsDep -from askui.chat.api.settings import Settings -from askui.chat.api.threads.service import ThreadService - - -def get_thread_service(settings: Settings = SettingsDep) -> ThreadService: - """Get ThreadService instance.""" - return ThreadService(settings.data_dir) - - -ThreadServiceDep = Depends(get_thread_service) diff --git a/src/askui/chat/api/threads/service.py b/src/askui/chat/api/threads/service.py deleted file mode 100644 index 5e4e7b4d..00000000 --- a/src/askui/chat/api/threads/service.py +++ /dev/null @@ -1,132 +0,0 @@ -from datetime import datetime, timezone -from pathlib import Path -from typing import Sequence - -from pydantic import AwareDatetime, BaseModel, Field - -from askui.chat.api.utils import generate_time_ordered_id - - -class Thread(BaseModel): - """A chat thread/session.""" - - id: str = Field(default_factory=lambda: generate_time_ordered_id("thread")) - created_at: AwareDatetime = Field( - default_factory=lambda: datetime.now(tz=timezone.utc) - ) - object: str = "thread" - - -class ThreadListResponse(BaseModel): - """Response model for listing threads.""" - - object: str = "list" - data: Sequence[Thread] - first_id: str | None = None - last_id: str | None = None - has_more: bool = False - - -class ThreadService: - """Service for managing chat threads/sessions.""" - - def __init__(self, base_dir: Path) -> None: - """Initialize thread service. - - Args: - base_dir: Base directory to store thread data - """ - self._base_dir = base_dir - self._threads_dir = base_dir / "threads" - - def list_(self, limit: int | None = None) -> ThreadListResponse: - """List all available threads. - - Args: - limit: Optional maximum number of threads to return - - Returns: - ThreadListResponse containing threads sorted by creation date (newest first) - """ - if not self._threads_dir.exists(): - return ThreadListResponse(data=[]) - - thread_files = list(self._threads_dir.glob("*.jsonl")) - threads: list[Thread] = [] - for f in thread_files: - thread_id = f.stem - created_at = datetime.fromtimestamp(f.stat().st_ctime, tz=timezone.utc) - threads.append( - Thread( - id=thread_id, - created_at=created_at, - ) - ) - - # Sort by creation date, newest first - threads = sorted(threads, key=lambda t: t.created_at, reverse=True) - - # Apply limit if specified - if limit is not None: - threads = threads[:limit] - - return ThreadListResponse( - data=threads, - first_id=threads[0].id if threads else None, - last_id=threads[-1].id if threads else None, - has_more=len(thread_files) > (limit or len(thread_files)), - ) - - def create(self) -> Thread: - """Create a new thread. - - Returns: - Created thread object - """ - thread = Thread() - thread_file = self._threads_dir / f"{thread.id}.jsonl" - self._threads_dir.mkdir(parents=True, exist_ok=True) - thread_file.touch() - return thread - - def retrieve(self, thread_id: str) -> Thread: - """Retrieve a thread by ID. - - Args: - thread_id: ID of thread to retrieve - - Returns: - Thread object - - Raises: - FileNotFoundError: If thread doesn't exist - """ - thread_file = self._threads_dir / f"{thread_id}.jsonl" - if not thread_file.exists(): - error_msg = f"Thread {thread_id} not found" - raise FileNotFoundError(error_msg) - - created_at = datetime.fromtimestamp( - thread_file.stat().st_ctime, tz=timezone.utc - ) - return Thread( - id=thread_id, - created_at=created_at, - ) - - def delete(self, thread_id: str) -> None: - """Delete a thread and all its associated files. - - Args: - thread_id: ID of thread to delete - - Raises: - FileNotFoundError: If thread doesn't exist - """ - thread_file = self._threads_dir / f"{thread_id}.jsonl" - if not thread_file.exists(): - error_msg = f"Thread {thread_id} not found" - raise FileNotFoundError(error_msg) - - # Delete thread file - thread_file.unlink() diff --git a/src/askui/chat/click_recorder.py b/src/askui/chat/click_recorder.py deleted file mode 100644 index 17b64b82..00000000 --- a/src/askui/chat/click_recorder.py +++ /dev/null @@ -1,103 +0,0 @@ -import json -import os -import subprocess -import sys -import tempfile -from datetime import datetime -from pathlib import Path -from typing import List, Tuple - -from PIL import Image -from pydantic import UUID4, BaseModel, ConfigDict -from pydantic.alias_generators import to_camel - -from askui.chat.exceptions import AnnotationError - -Coordinate = Tuple[int, int] - - -class Rectangle(BaseModel): - xmin: int - ymin: int - xmax: int - ymax: int - - @property - def center(self) -> Coordinate: - x = (self.xmin + self.xmax) // 2 - y = (self.ymin + self.ymax) // 2 - return (x, y) - - -class Annotation(BaseModel): - id: UUID4 - rectangle: Rectangle - - -class Size(BaseModel): - width: int - height: int - - -class AskUIImage(BaseModel): - size: Size - - -class AnnoationContainer(BaseModel): - model_config = ConfigDict( - alias_generator=lambda field_name: to_camel(field_name), - ) - - version: int - id: UUID4 - creation_date_time: datetime - image: AskUIImage - annotations: List[Annotation] - - -class AskUiSnippingTool: - def __init__(self) -> None: - self.process = None - - def __find_remote_device_controller(self) -> str: - if sys.platform == "darwin": - return f"{os.environ['ASKUI_INSTALLATION_DIRECTORY']}/DependencyCache/AskUIRemoteDeviceSnippingTool-0.2.0/AskuiRemoteDeviceSnippingTool" - error_msg = "Snipping tool not supported on this platform, yet, as the path was unknown at the time of writing" - raise NotImplementedError(error_msg) - - def __start_process(self, binary_path: str, output_directory: str) -> None: - self.process = subprocess.check_output( - (binary_path, "-Annotate", "-OneShot", "-OutDirectory", output_directory) - ) - - def annotate(self) -> Tuple[Image.Image, AnnoationContainer]: - with tempfile.TemporaryDirectory() as tempdir: - tempdir_path = Path(tempdir) - self.__start_process(self.__find_remote_device_controller(), tempdir) - - json_files = list(tempdir_path.glob("*.json")) - png_files = list(tempdir_path.glob("*.png")) - - if len(json_files) != 1 or len(png_files) != 1: - raise AnnotationError - json_file = json_files[0] - annotation = None - with Path.open(json_file) as json_data: - annotation = AnnoationContainer(**json.load(json_data)) - - return Image.open(png_files[0]).copy(), annotation - - -class ClickRecorder: - def __init__(self) -> None: - self.snipping_tool = AskUiSnippingTool() - - def record(self) -> Tuple[Image.Image, Coordinate]: - image, annotation_container = self.snipping_tool.annotate() - assert ( - annotation_container.annotations is not None - and len(annotation_container.annotations) == 1 - ) - annotation = annotation_container.annotations[0] - center = annotation.rectangle.center - return image, center diff --git a/src/askui/chat/exceptions.py b/src/askui/chat/exceptions.py deleted file mode 100644 index 3c70c17d..00000000 --- a/src/askui/chat/exceptions.py +++ /dev/null @@ -1,47 +0,0 @@ -"""Exceptions for the chat module.""" - - -class ChatError(Exception): - """Base exception for chat-related errors.""" - - def __init__(self, message: str): - self.message = message - super().__init__(self.message) - - -class InvalidFunctionError(ChatError): - """Exception raised when an invalid function is called.""" - - def __init__(self, function_name: str): - super().__init__(f"Invalid function: {function_name}") - - -class FunctionExecutionError(ChatError): - """Exception raised when a function execution fails.""" - - def __init__(self, function_name: str, error: Exception): - super().__init__(f"Error executing {function_name}: {str(error)}") - self.original_error = error - - -class AnnotationError(ChatError): - """Exception raised when annotation is not done or invalid.""" - - def __init__(self, message: str = "No annotation Done!"): - super().__init__(message) - - -class ActionTimeoutError(ChatError): - """Exception raised when an action times out.""" - - def __init__(self, message: str = "Action not yet done"): - super().__init__(message) - - -__all__ = [ - "ChatError", - "InvalidFunctionError", - "FunctionExecutionError", - "AnnotationError", - "ActionTimeoutError", -] diff --git a/src/askui/models/anthropic/settings.py b/src/askui/models/anthropic/settings.py index e804495d..3b9f9de6 100644 --- a/src/askui/models/anthropic/settings.py +++ b/src/askui/models/anthropic/settings.py @@ -8,13 +8,14 @@ class AnthropicSettings(BaseSettings): api_key: SecretStr = Field( + default=..., min_length=1, validation_alias="ANTHROPIC_API_KEY", ) class ClaudeSettingsBase(BaseModel): - anthropic: AnthropicSettings = Field(default_factory=AnthropicSettings) + anthropic: AnthropicSettings = Field(default_factory=lambda: AnthropicSettings()) class ClaudeSettings(ClaudeSettingsBase): diff --git a/src/askui/models/shared/computer_agent.py b/src/askui/models/shared/computer_agent.py index 0c4a74be..eda81819 100644 --- a/src/askui/models/shared/computer_agent.py +++ b/src/askui/models/shared/computer_agent.py @@ -232,8 +232,13 @@ def _step( ) -> None: """Execute a single step in the conversation. + If the last message is an assistant's message and does not contain tool use + blocks, this method is going to return immediately, as there is nothing to act + upon. + Args: messages (list[MessageParam]): The message history. + Contains at least one message. model_choice (str): The model to use for message creation. on_message (OnMessageCb | None, optional): Callback on new messages @@ -246,16 +251,21 @@ def _step( self._settings.only_n_most_recent_images, self._settings.image_truncation_threshold, ) - response_message = self._create_message(messages, model_choice) - message_by_assistant = self._call_on_message( - on_message, response_message, messages - ) - if message_by_assistant is None: - return - message_by_assistant_dict = message_by_assistant.model_dump(mode="json") - logger.debug(message_by_assistant_dict) - messages.append(message_by_assistant) - self._reporter.add_message(self.__class__.__name__, message_by_assistant_dict) + if messages[-1].role == "user": + response_message = self._create_message(messages, model_choice) + message_by_assistant = self._call_on_message( + on_message, response_message, messages + ) + if message_by_assistant is None: + return + message_by_assistant_dict = message_by_assistant.model_dump(mode="json") + logger.debug(message_by_assistant_dict) + messages.append(message_by_assistant) + self._reporter.add_message( + self.__class__.__name__, message_by_assistant_dict + ) + else: + message_by_assistant = messages[-1] if tool_result_message := self._use_tools(message_by_assistant): if tool_result_message := self._call_on_message( on_message, tool_result_message, messages diff --git a/src/askui/tools/pynput/pynput_agent_os.py b/src/askui/tools/pynput/pynput_agent_os.py index bccded71..1cd022e2 100644 --- a/src/askui/tools/pynput/pynput_agent_os.py +++ b/src/askui/tools/pynput/pynput_agent_os.py @@ -17,7 +17,7 @@ from typing_extensions import override from askui.logger import logger -from askui.reporting import Reporter +from askui.reporting import CompositeReporter, Reporter from askui.tools.agent_os import AgentOs, InputEvent, ModifierKey, PcKey from askui.utils.image_utils import draw_point_on_image @@ -131,14 +131,14 @@ class PynputAgentOs(AgentOs): def __init__( self, - reporter: Reporter, + reporter: Reporter | None = None, display: int = 1, ) -> None: self._mouse = MouseController() self._keyboard = KeyboardController() self._sct = mss() self._display = display - self._reporter = reporter + self._reporter = reporter or CompositeReporter() self._mouse_listener: MouseListener | None = None self._input_event_queue: queue.Queue[InputEvent] = queue.Queue() diff --git a/src/askui/chat/__init__.py b/src/chat/__init__.py similarity index 100% rename from src/askui/chat/__init__.py rename to src/chat/__init__.py diff --git a/src/askui/chat/api/__init__.py b/src/chat/api/__init__.py similarity index 100% rename from src/askui/chat/api/__init__.py rename to src/chat/api/__init__.py diff --git a/src/chat/api/app.py b/src/chat/api/app.py new file mode 100644 index 00000000..2c9aa8e5 --- /dev/null +++ b/src/chat/api/app.py @@ -0,0 +1,43 @@ +from contextlib import asynccontextmanager +from typing import AsyncGenerator + +from fastapi import APIRouter, FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from chat.api.assistants.dependencies import get_assistant_service +from chat.api.assistants.router import router as assistants_router +from chat.api.dependencies import get_settings +from chat.api.messages.router import router as messages_router +from chat.api.runs.router import router as runs_router +from chat.api.threads.router import router as threads_router + + +@asynccontextmanager +async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: # noqa: ARG001 + assistant_service = get_assistant_service(settings=get_settings()) + assistant_service.seed() + yield + + +app = FastAPI( + title="AskUI Chat API", + version="0.1.0", + lifespan=lifespan, +) + +# Add CORS middleware +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Include routers +v1_router = APIRouter(prefix="/v1") +v1_router.include_router(assistants_router) +v1_router.include_router(threads_router) +v1_router.include_router(messages_router) +v1_router.include_router(runs_router) +app.include_router(v1_router) diff --git a/src/askui/chat/api/messages/__init__.py b/src/chat/api/assistants/__init__.py similarity index 100% rename from src/askui/chat/api/messages/__init__.py rename to src/chat/api/assistants/__init__.py diff --git a/src/chat/api/assistants/dependencies.py b/src/chat/api/assistants/dependencies.py new file mode 100644 index 00000000..014f21a5 --- /dev/null +++ b/src/chat/api/assistants/dependencies.py @@ -0,0 +1,13 @@ +from fastapi import Depends + +from chat.api.assistants.service import AssistantService +from chat.api.dependencies import SettingsDep +from chat.api.settings import Settings + + +def get_assistant_service(settings: Settings = SettingsDep) -> AssistantService: + """Get AssistantService instance.""" + return AssistantService(settings.data_dir) + + +AssistantServiceDep = Depends(get_assistant_service) diff --git a/src/chat/api/assistants/models.py b/src/chat/api/assistants/models.py new file mode 100644 index 00000000..fcfb9d6d --- /dev/null +++ b/src/chat/api/assistants/models.py @@ -0,0 +1,20 @@ +from datetime import datetime, timezone +from typing import Literal + +from pydantic import BaseModel, Field + +from chat.api.models import UnixDatetime +from chat.api.utils import generate_time_ordered_id + + +class Assistant(BaseModel): + """An assistant that can be used in a thread.""" + + id: str = Field(default_factory=lambda: generate_time_ordered_id("asst")) + created_at: UnixDatetime = Field( + default_factory=lambda: datetime.now(tz=timezone.utc) + ) + name: str | None = None + description: str | None = None + object: Literal["assistant"] = "assistant" + avatar: str | None = Field(default=None, description="URL of the avatar image") diff --git a/src/chat/api/assistants/router.py b/src/chat/api/assistants/router.py new file mode 100644 index 00000000..37140e46 --- /dev/null +++ b/src/chat/api/assistants/router.py @@ -0,0 +1,66 @@ +from fastapi import APIRouter, HTTPException + +# from fastapi import status +from chat.api.assistants.dependencies import AssistantServiceDep +from chat.api.assistants.models import Assistant +from chat.api.assistants.service import ( + AssistantService, # AssistantModifyRequest, CreateAssistantRequest, +) +from chat.api.models import ListQuery, ListQueryDep, ListResponse + +router = APIRouter(prefix="/assistants", tags=["assistants"]) + + +@router.get("") +def list_assistants( + query: ListQuery = ListQueryDep, + assistant_service: AssistantService = AssistantServiceDep, +) -> ListResponse[Assistant]: + """List all assistants.""" + return assistant_service.list_(query=query) + + +# @router.post("", status_code=status.HTTP_201_CREATED) +# def create_assistant( +# request: CreateAssistantRequest, +# assistant_service: AssistantService = AssistantServiceDep, +# ) -> Assistant: +# """Create a new assistant.""" +# return assistant_service.create(request) + + +@router.get("/{assistant_id}") +def retrieve_assistant( + assistant_id: str, + assistant_service: AssistantService = AssistantServiceDep, +) -> Assistant: + """Get an assistant by ID.""" + try: + return assistant_service.retrieve(assistant_id) + except FileNotFoundError as e: + raise HTTPException(status_code=404, detail=str(e)) from e + + +# @router.post("/{assistant_id}") +# def modify_assistant( +# assistant_id: str, +# request: AssistantModifyRequest, +# assistant_service: AssistantService = AssistantServiceDep, +# ) -> Assistant: +# """Update an assistant.""" +# try: +# return assistant_service.modify(assistant_id, request) +# except FileNotFoundError as e: +# raise HTTPException(status_code=404, detail=str(e)) from e + + +# @router.delete("/{assistant_id}", status_code=status.HTTP_204_NO_CONTENT) +# def delete_assistant( +# assistant_id: str, +# assistant_service: AssistantService = AssistantServiceDep, +# ) -> None: +# """Delete an assistant.""" +# try: +# assistant_service.delete(assistant_id) +# except FileNotFoundError as e: +# raise HTTPException(status_code=404, detail=str(e)) from e diff --git a/src/chat/api/assistants/seeds.py b/src/chat/api/assistants/seeds.py new file mode 100644 index 00000000..81e44ed7 --- /dev/null +++ b/src/chat/api/assistants/seeds.py @@ -0,0 +1,13 @@ +from chat.api.assistants.models import Assistant + +ASKUI_VISION_AGENT = Assistant( + id="asst_ge3tiojsga3dgnruge3di2u5ov36shedkcslxnmca", + name="AskUI Vision Agent", + avatar="data:image/svg+xml;base64,PHN2ZyAgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIgogIHdpZHRoPSIyNCIKICBoZWlnaHQ9IjI0IgogIHZpZXdCb3g9IjAgMCAyNCAyNCIKICBmaWxsPSJub25lIgogIHN0cm9rZT0iIzAwMCIgc3R5bGU9ImJhY2tncm91bmQtY29sb3I6ICNmZmY7IGJvcmRlci1yYWRpdXM6IDJweCIKICBzdHJva2Utd2lkdGg9IjIiCiAgc3Ryb2tlLWxpbmVjYXA9InJvdW5kIgogIHN0cm9rZS1saW5lam9pbj0icm91bmQiCj4KICA8cGF0aCBkPSJNMTIgOFY0SDgiIC8+CiAgPHJlY3Qgd2lkdGg9IjE2IiBoZWlnaHQ9IjEyIiB4PSI0IiB5PSI4IiByeD0iMiIgLz4KICA8cGF0aCBkPSJNMiAxNGgyIiAvPgogIDxwYXRoIGQ9Ik0yMCAxNGgyIiAvPgogIDxwYXRoIGQ9Ik0xNSAxM3YyIiAvPgogIDxwYXRoIGQ9Ik05IDEzdjIiIC8+Cjwvc3ZnPgo=", +) + +HUMAN_DEMONSTRATION_AGENT = Assistant( + id="asst_ge3tiojsga3dgnruge3di2u5ov36shedkcslxnmcb", + name="Human DemonstrationAgent", + avatar="data:image/svg+xml;base64,PHN2ZyAgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIgogIHdpZHRoPSIyNCIKICBoZWlnaHQ9IjI0IgogIHZpZXdCb3g9IjAgMCAyNCAyNCIKICBmaWxsPSJub25lIgogIHN0cm9rZT0iIzAwMCIgc3R5bGU9ImJhY2tncm91bmQtY29sb3I6ICNmZmY7IGJvcmRlci1yYWRpdXM6IDJweCIKICBzdHJva2Utd2lkdGg9IjIiCiAgc3Ryb2tlLWxpbmVjYXA9InJvdW5kIgogIHN0cm9rZS1saW5lam9pbj0icm91bmQiCj4KICA8cGF0aCBkPSJNMTkgMjF2LTJhNCA0IDAgMCAwLTQtNEg5YTQgNCAwIDAgMC00IDR2MiIgLz4KICA8Y2lyY2xlIGN4PSIxMiIgY3k9IjciIHI9IjQiIC8+Cjwvc3ZnPgo=", +) diff --git a/src/chat/api/assistants/service.py b/src/chat/api/assistants/service.py new file mode 100644 index 00000000..369698c8 --- /dev/null +++ b/src/chat/api/assistants/service.py @@ -0,0 +1,166 @@ +from pathlib import Path + +from pydantic import BaseModel, Field + +from chat.api.assistants.models import Assistant +from chat.api.assistants.seeds import ASKUI_VISION_AGENT, HUMAN_DEMONSTRATION_AGENT +from chat.api.models import DO_NOT_PATCH, DoNotPatch, ListQuery, ListResponse + + +class CreateAssistantRequest(BaseModel): + """Request model for creating an assistant.""" + + name: str | None = None + description: str | None = None + avatar: str | None = Field(default=None, description="URL of the avatar image") + + +class AssistantModifyRequest(BaseModel): + """Request model for updating an assistant.""" + + name: str | None | DoNotPatch = DO_NOT_PATCH + description: str | None | DoNotPatch = DO_NOT_PATCH + avatar: str | None | DoNotPatch = Field( + default=DO_NOT_PATCH, description="URL of the avatar image" + ) + + +class AssistantService: + """Service for managing assistants.""" + + def __init__(self, base_dir: Path) -> None: + """Initialize assistant service. + + Args: + base_dir: Base directory to store assistant data + """ + self._base_dir = base_dir + self._assistants_dir = base_dir / "assistants" + + def list_(self, query: ListQuery) -> ListResponse[Assistant]: + """List all available assistants. + + Args: + query (ListQuery): Query parameters for listing assistants + + Returns: + ListResponse[Assistant]: ListResponse containing assistants sorted by + creation date + """ + if not self._assistants_dir.exists(): + return ListResponse(data=[]) + + assistant_files = list(self._assistants_dir.glob("*.json")) + assistants: list[Assistant] = [] + for f in assistant_files: + with f.open("r") as file: + assistants.append(Assistant.model_validate_json(file.read())) + + # Sort by creation date + assistants = sorted( + assistants, key=lambda a: a.created_at, reverse=(query.order == "desc") + ) + + # Apply before/after filters + if query.after: + assistants = [a for a in assistants if a.id > query.after] + if query.before: + assistants = [a for a in assistants if a.id < query.before] + + # Apply limit + assistants = assistants[: query.limit] + + return ListResponse( + data=assistants, + first_id=assistants[0].id if assistants else None, + last_id=assistants[-1].id if assistants else None, + has_more=len(assistant_files) > query.limit, + ) + + def retrieve(self, assistant_id: str) -> Assistant: + """Retrieve an assistant by ID. + + Args: + assistant_id: ID of assistant to retrieve + + Returns: + Assistant object + + Raises: + FileNotFoundError: If assistant doesn't exist + """ + assistant_file = self._assistants_dir / f"{assistant_id}.json" + if not assistant_file.exists(): + error_msg = f"Assistant {assistant_id} not found" + raise FileNotFoundError(error_msg) + + with assistant_file.open("r") as f: + return Assistant.model_validate_json(f.read()) + + def create(self, request: CreateAssistantRequest) -> Assistant: + """Create a new assistant. + + Args: + request: Assistant creation request + + Returns: + Created assistant object + """ + assistant = Assistant( + name=request.name, + description=request.description, + ) + self._save(assistant) + return assistant + + def _save(self, assistant: Assistant) -> None: + """Save an assistant to the file system.""" + self._assistants_dir.mkdir(parents=True, exist_ok=True) + assistant_file = self._assistants_dir / f"{assistant.id}.json" + with assistant_file.open("w") as f: + f.write(assistant.model_dump_json()) + + def modify(self, assistant_id: str, request: AssistantModifyRequest) -> Assistant: + """Update an existing assistant. + + Args: + assistant_id: ID of assistant to modify + request: Assistant modify request + + Returns: + Updated assistant object + + Raises: + FileNotFoundError: If assistant doesn't exist + """ + assistant = self.retrieve(assistant_id) + if not isinstance(request.name, DoNotPatch): + assistant.name = request.name + if not isinstance(request.description, DoNotPatch): + assistant.description = request.description + if not isinstance(request.avatar, DoNotPatch): + assistant.avatar = request.avatar + assistant_file = self._assistants_dir / f"{assistant_id}.json" + with assistant_file.open("w") as f: + f.write(assistant.model_dump_json()) + return assistant + + def delete(self, assistant_id: str) -> None: + """Delete an assistant. + + Args: + assistant_id: ID of assistant to delete + + Raises: + FileNotFoundError: If assistant doesn't exist + """ + assistant_file = self._assistants_dir / f"{assistant_id}.json" + if not assistant_file.exists(): + error_msg = f"Assistant {assistant_id} not found" + raise FileNotFoundError(error_msg) + assistant_file.unlink() + + def seed(self) -> None: + """Seed the assistant service with default assistants.""" + self._save(ASKUI_VISION_AGENT) + self._save(HUMAN_DEMONSTRATION_AGENT) diff --git a/src/askui/chat/api/dependencies.py b/src/chat/api/dependencies.py similarity index 78% rename from src/askui/chat/api/dependencies.py rename to src/chat/api/dependencies.py index a9c78c2f..ef35a3d7 100644 --- a/src/askui/chat/api/dependencies.py +++ b/src/chat/api/dependencies.py @@ -1,6 +1,6 @@ from fastapi import Depends -from askui.chat.api.settings import Settings +from chat.api.settings import Settings def get_settings() -> Settings: diff --git a/src/askui/chat/api/runs/__init__.py b/src/chat/api/messages/__init__.py similarity index 100% rename from src/askui/chat/api/runs/__init__.py rename to src/chat/api/messages/__init__.py diff --git a/src/chat/api/messages/dependencies.py b/src/chat/api/messages/dependencies.py new file mode 100644 index 00000000..59ec54d2 --- /dev/null +++ b/src/chat/api/messages/dependencies.py @@ -0,0 +1,15 @@ +from fastapi import Depends + +from chat.api.dependencies import SettingsDep +from chat.api.messages.service import MessageService +from chat.api.settings import Settings + + +def get_message_service( + settings: Settings = SettingsDep, +) -> MessageService: + """Get MessagePersistedService instance.""" + return MessageService(settings.data_dir) + + +MessageServiceDep = Depends(get_message_service) diff --git a/src/askui/chat/api/messages/router.py b/src/chat/api/messages/router.py similarity index 53% rename from src/askui/chat/api/messages/router.py rename to src/chat/api/messages/router.py index 114a1c03..f35e8e36 100644 --- a/src/askui/chat/api/messages/router.py +++ b/src/chat/api/messages/router.py @@ -1,58 +1,66 @@ from fastapi import APIRouter, HTTPException, status -from askui.chat.api.messages.dependencies import MessageServiceDep -from askui.chat.api.messages.service import Message, MessageListResponse, MessageService -from askui.models.shared.computer_agent_message_param import MessageParam +from chat.api.messages.dependencies import MessageServiceDep +from chat.api.messages.service import Message, MessageCreateRequest, MessageService +from chat.api.models import ListQuery, ListQueryDep, ListResponse, MessageId, ThreadId router = APIRouter(prefix="/threads/{thread_id}/messages", tags=["messages"]) @router.get("") def list_messages( - thread_id: str, - limit: int | None = None, + thread_id: ThreadId, + query: ListQuery = ListQueryDep, message_service: MessageService = MessageServiceDep, -) -> MessageListResponse: +) -> ListResponse[Message]: """List all messages in a thread.""" try: - return message_service.list_(thread_id, limit=limit) + messages = message_service.list_(thread_id, query=query) + return ListResponse( + data=messages, + first_id=messages[0].id if messages else None, + last_id=messages[-1].id if messages else None, + has_more=len(messages) > query.limit, + ) except FileNotFoundError as e: raise HTTPException(status_code=404, detail=str(e)) from e @router.post("", status_code=status.HTTP_201_CREATED) async def create_message( - thread_id: str, - message: MessageParam, + thread_id: ThreadId, + request: MessageCreateRequest, message_service: MessageService = MessageServiceDep, ) -> Message: """Create a new message in a thread.""" try: - return message_service.create( - thread_id=thread_id, - message=message, - ) + return message_service.create(thread_id=thread_id, request=request) except FileNotFoundError as e: raise HTTPException(status_code=404, detail=str(e)) from e @router.get("/{message_id}") def retrieve_message( - thread_id: str, - message_id: str, + thread_id: ThreadId, + message_id: MessageId, message_service: MessageService = MessageServiceDep, ) -> Message: """Get a specific message from a thread.""" try: - return message_service.retrieve(thread_id, message_id) + messages = message_service.list_(thread_id=thread_id, query=ListQuery(limit=1)) + for msg in messages: + if msg.id == message_id: + return msg + error_msg = f"Message {message_id} not found in thread {thread_id}" + raise HTTPException(status_code=404, detail=error_msg) except FileNotFoundError as e: raise HTTPException(status_code=404, detail=str(e)) from e @router.delete("/{message_id}", status_code=status.HTTP_204_NO_CONTENT) def delete_message( - thread_id: str, - message_id: str, + thread_id: ThreadId, + message_id: MessageId, message_service: MessageService = MessageServiceDep, ) -> None: """Delete a message from a thread.""" diff --git a/src/chat/api/messages/service.py b/src/chat/api/messages/service.py new file mode 100644 index 00000000..e6d2b2a3 --- /dev/null +++ b/src/chat/api/messages/service.py @@ -0,0 +1,112 @@ +from datetime import datetime, timezone +from pathlib import Path +from typing import Literal + +from pydantic import Field + +from askui.models.shared.computer_agent_message_param import MessageParam +from chat.api.models import ( + MAX_MESSAGES_PER_THREAD, + AssistantId, + ListQuery, + MessageId, + RunId, + ThreadId, + UnixDatetime, +) +from chat.api.utils import generate_time_ordered_id + + +class MessageBase(MessageParam): + assistant_id: AssistantId | None = None + object: Literal["thread.message"] = "thread.message" + role: Literal["user", "assistant"] + run_id: RunId | None = None + + +class Message(MessageBase): + id: MessageId = Field(default_factory=lambda: generate_time_ordered_id("msg")) + thread_id: ThreadId + created_at: UnixDatetime = Field( + default_factory=lambda: datetime.now(tz=timezone.utc) + ) + + +class MessageCreateRequest(MessageBase): + pass + + +class MessageService: + def __init__(self, base_dir: Path) -> None: + """Initialize message service. + + Args: + base_dir: Base directory to store message data + """ + self._base_dir = base_dir + self._threads_dir = base_dir / "threads" + + def create(self, thread_id: ThreadId, request: MessageCreateRequest) -> Message: + messages = self.list_( + thread_id, ListQuery(limit=MAX_MESSAGES_PER_THREAD, order="asc") + ) + new_message = Message( + **request.model_dump(), + thread_id=thread_id, + ) + self.save(thread_id, messages + [new_message]) + return new_message + + def delete(self, thread_id: ThreadId, message_id: MessageId) -> None: + messages = self.list_( + thread_id, ListQuery(limit=MAX_MESSAGES_PER_THREAD, order="asc") + ) + filtered_messages = [m for m in messages if m.id != message_id] + if len(filtered_messages) == len(messages): + error_msg = f"Message {message_id} not found in thread {thread_id}" + raise ValueError(error_msg) + self.save(thread_id, filtered_messages) + + def list_(self, thread_id: ThreadId, query: ListQuery) -> list[Message]: + thread_file = self._threads_dir / f"{thread_id}.jsonl" + if not thread_file.exists(): + error_msg = f"Thread {thread_id} not found" + raise FileNotFoundError(error_msg) + + messages: list[Message] = [] + with thread_file.open("r") as f: + for line in f: + msg = Message.model_validate_json(line) + messages.append(msg) + + # Sort by creation date + messages = sorted( + messages, key=lambda m: m.created_at, reverse=(query.order == "desc") + ) + + # Apply before/after filters + if query.after: + messages = [m for m in messages if m.id > query.after] + if query.before: + messages = [m for m in messages if m.id < query.before] + + # Apply limit + return messages[: query.limit] + + def _get_thread_path(self, thread_id: ThreadId) -> Path: + thread_path = self._threads_dir / f"{thread_id}.jsonl" + if not thread_path.exists(): + error_msg = f"Thread {thread_id} not found" + raise FileNotFoundError(error_msg) + return thread_path + + def save(self, thread_id: ThreadId, messages: list[Message]) -> None: + if len(messages) > MAX_MESSAGES_PER_THREAD: + error_msg = f"Thread {thread_id} has too many messages" + raise ValueError(error_msg) + messages = sorted(messages, key=lambda m: m.created_at) + thread_path = self._get_thread_path(thread_id) + with thread_path.open("w") as f: + for msg in messages: + f.write(msg.model_dump_json()) + f.write("\n") diff --git a/src/chat/api/models.py b/src/chat/api/models.py new file mode 100644 index 00000000..3d4793ff --- /dev/null +++ b/src/chat/api/models.py @@ -0,0 +1,54 @@ +from dataclasses import dataclass +from typing import Annotated, Generic, Literal, Sequence + +from fastapi import Depends, Query +from pydantic import AwareDatetime, BaseModel, PlainSerializer +from typing_extensions import TypeVar + +UnixDatetime = Annotated[ + AwareDatetime, + PlainSerializer( + lambda v: int(v.timestamp()), + return_type=int, + ), +] + + +AssistantId = str +FileId = str +MessageId = str +RunId = str +ThreadId = str + + +ListOrder = Literal["asc", "desc"] +MAX_MESSAGES_PER_THREAD = 100 + + +@dataclass(kw_only=True) +class ListQuery: + limit: Annotated[int, Query(ge=1, le=MAX_MESSAGES_PER_THREAD)] = 20 + after: Annotated[str | None, Query()] = None + before: Annotated[str | None, Query()] = None + order: Annotated[ListOrder, Query()] = "desc" + + +ListQueryDep = Depends(ListQuery) + + +ObjectType = TypeVar("ObjectType", bound=BaseModel) + + +class ListResponse(BaseModel, Generic[ObjectType]): + object: Literal["list"] = "list" + data: Sequence[ObjectType] + first_id: str | None = None + last_id: str | None = None + has_more: bool = False + + +class DoNotPatch(BaseModel): + pass + + +DO_NOT_PATCH = DoNotPatch() diff --git a/src/askui/chat/api/threads/__init__.py b/src/chat/api/runs/__init__.py similarity index 100% rename from src/askui/chat/api/threads/__init__.py rename to src/chat/api/runs/__init__.py diff --git a/src/askui/chat/api/runs/dependencies.py b/src/chat/api/runs/dependencies.py similarity index 72% rename from src/askui/chat/api/runs/dependencies.py rename to src/chat/api/runs/dependencies.py index 772c2545..440f07d4 100644 --- a/src/askui/chat/api/runs/dependencies.py +++ b/src/chat/api/runs/dependencies.py @@ -1,7 +1,7 @@ from fastapi import Depends -from askui.chat.api.dependencies import SettingsDep -from askui.chat.api.settings import Settings +from chat.api.dependencies import SettingsDep +from chat.api.settings import Settings from .service import RunService diff --git a/src/chat/api/runs/models.py b/src/chat/api/runs/models.py new file mode 100644 index 00000000..a2a45190 --- /dev/null +++ b/src/chat/api/runs/models.py @@ -0,0 +1,58 @@ +from datetime import datetime, timedelta, timezone +from typing import Literal + +from pydantic import BaseModel, Field, computed_field + +from chat.api.models import AssistantId, RunId, ThreadId, UnixDatetime +from chat.api.utils import generate_time_ordered_id + +RunStatus = Literal[ + "queued", + "in_progress", + "completed", + "cancelling", + "cancelled", + "failed", + "expired", +] + + +class RunError(BaseModel): + message: str + code: Literal["server_error", "rate_limit_exceeded", "invalid_prompt"] + + +class Run(BaseModel): + assistant_id: AssistantId + cancelled_at: UnixDatetime | None = None + completed_at: UnixDatetime | None = None + created_at: UnixDatetime = Field( + default_factory=lambda: datetime.now(tz=timezone.utc) + ) + expires_at: UnixDatetime = Field( + default_factory=lambda: datetime.now(tz=timezone.utc) + timedelta(minutes=10) + ) + failed_at: UnixDatetime | None = None + id: RunId = Field(default_factory=lambda: generate_time_ordered_id("run")) + last_error: RunError | None = None + object: Literal["thread.run"] = "thread.run" + started_at: UnixDatetime | None = None + thread_id: ThreadId + tried_cancelling_at: UnixDatetime | None = None + + @computed_field # type: ignore[prop-decorator] + @property + def status(self) -> RunStatus: + if self.cancelled_at: + return "cancelled" + if self.failed_at: + return "failed" + if self.completed_at: + return "completed" + if self.expires_at and self.expires_at < datetime.now(tz=timezone.utc): + return "expired" + if self.tried_cancelling_at: + return "cancelling" + if self.started_at: + return "in_progress" + return "queued" diff --git a/src/askui/chat/api/runs/router.py b/src/chat/api/runs/router.py similarity index 69% rename from src/askui/chat/api/runs/router.py rename to src/chat/api/runs/router.py index c3ee94b7..c6def410 100644 --- a/src/askui/chat/api/runs/router.py +++ b/src/chat/api/runs/router.py @@ -5,15 +5,15 @@ from fastapi.responses import JSONResponse, StreamingResponse from pydantic import BaseModel -if TYPE_CHECKING: - from askui.chat.api.messages.service import MessageEvent +from chat.api.models import ListQuery, ListQueryDep, ListResponse, RunId, ThreadId +from chat.api.runs.service import CreateRunRequest from .dependencies import RunServiceDep -from .service import Run, RunEvent, RunListResponse, RunService - +from .models import Run +from .service import RunService -class CreateRunRequest(BaseModel): - stream: bool = False +if TYPE_CHECKING: + from .runner.events import Events router = APIRouter(prefix="/threads/{thread_id}/runs", tags=["runs"]) @@ -21,7 +21,7 @@ class CreateRunRequest(BaseModel): @router.post("") def create_run( - thread_id: Annotated[str, Path(...)], + thread_id: Annotated[ThreadId, Path(...)], request: Annotated[CreateRunRequest, Body(...)], run_service: RunService = RunServiceDep, ) -> Response: @@ -29,15 +29,21 @@ def create_run( Create a new run for a given thread. """ stream = request.stream - run_or_async_generator = run_service.create(thread_id, stream) + run_or_async_generator = run_service.create(thread_id, stream, request) if stream: async_generator = cast( - "AsyncGenerator[RunEvent | MessageEvent, None]", run_or_async_generator + "AsyncGenerator[Events, None]", + run_or_async_generator, ) async def sse_event_stream() -> AsyncGenerator[str, None]: async for event in async_generator: - yield f"event: {event.event}\ndata: {event.model_dump_json()}\n\n" + data = ( + event.data.model_dump_json() + if isinstance(event.data, BaseModel) + else event.data + ) + yield f"event: {event.event}\ndata: {data}\n\n" return StreamingResponse( status_code=status.HTTP_201_CREATED, @@ -50,7 +56,7 @@ async def sse_event_stream() -> AsyncGenerator[str, None]: @router.get("/{run_id}") def retrieve_run( - run_id: Annotated[str, Path(...)], + run_id: Annotated[RunId, Path(...)], run_service: RunService = RunServiceDep, ) -> Run: """ @@ -64,18 +70,19 @@ def retrieve_run( @router.get("") def list_runs( - thread_id: Annotated[str, Path(...)], + thread_id: Annotated[ThreadId, Path(...)], + query: ListQuery = ListQueryDep, run_service: RunService = RunServiceDep, -) -> RunListResponse: +) -> ListResponse[Run]: """ List runs, optionally filtered by thread. """ - return run_service.list_(thread_id) + return run_service.list_(thread_id, query=query) @router.post("/{run_id}/cancel") def cancel_run( - run_id: Annotated[str, Path(...)], + run_id: Annotated[RunId, Path(...)], run_service: RunService = RunServiceDep, ) -> Run: """ diff --git a/src/chat/api/runs/runner/__init__.py b/src/chat/api/runs/runner/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/chat/api/runs/runner/events/__init__.py b/src/chat/api/runs/runner/events/__init__.py new file mode 100644 index 00000000..89dabf59 --- /dev/null +++ b/src/chat/api/runs/runner/events/__init__.py @@ -0,0 +1,15 @@ +from chat.api.runs.runner.events.done_events import DoneEvent +from chat.api.runs.runner.events.error_events import ErrorEvent +from chat.api.runs.runner.events.event_base import EventBase +from chat.api.runs.runner.events.events import Events +from chat.api.runs.runner.events.message_events import MessageEvent +from chat.api.runs.runner.events.run_events import RunEvent + +__all__ = [ + "DoneEvent", + "ErrorEvent", + "EventBase", + "Events", + "MessageEvent", + "RunEvent", +] diff --git a/src/chat/api/runs/runner/events/done_events.py b/src/chat/api/runs/runner/events/done_events.py new file mode 100644 index 00000000..88b86a82 --- /dev/null +++ b/src/chat/api/runs/runner/events/done_events.py @@ -0,0 +1,8 @@ +from typing import Literal + +from chat.api.runs.runner.events.event_base import EventBase + + +class DoneEvent(EventBase): + event: Literal["done"] = "done" + data: Literal["[DONE]"] = "[DONE]" diff --git a/src/chat/api/runs/runner/events/error_events.py b/src/chat/api/runs/runner/events/error_events.py new file mode 100644 index 00000000..efb536be --- /dev/null +++ b/src/chat/api/runs/runner/events/error_events.py @@ -0,0 +1,18 @@ +from typing import Literal + +from pydantic import BaseModel + +from chat.api.runs.runner.events.event_base import EventBase + + +class ErrorEventDataError(BaseModel): + message: str + + +class ErrorEventData(BaseModel): + error: ErrorEventDataError + + +class ErrorEvent(EventBase): + event: Literal["error"] = "error" + data: ErrorEventData diff --git a/src/askui/chat/api/models.py b/src/chat/api/runs/runner/events/event_base.py similarity index 78% rename from src/askui/chat/api/models.py rename to src/chat/api/runs/runner/events/event_base.py index 81d23f14..60250480 100644 --- a/src/askui/chat/api/models.py +++ b/src/chat/api/runs/runner/events/event_base.py @@ -3,5 +3,5 @@ from pydantic import BaseModel -class Event(BaseModel): +class EventBase(BaseModel): object: Literal["event"] = "event" diff --git a/src/chat/api/runs/runner/events/events.py b/src/chat/api/runs/runner/events/events.py new file mode 100644 index 00000000..b11e2b12 --- /dev/null +++ b/src/chat/api/runs/runner/events/events.py @@ -0,0 +1,6 @@ +from chat.api.runs.runner.events.done_events import DoneEvent +from chat.api.runs.runner.events.error_events import ErrorEvent +from chat.api.runs.runner.events.message_events import MessageEvent +from chat.api.runs.runner.events.run_events import RunEvent + +Events = DoneEvent | ErrorEvent | MessageEvent | RunEvent diff --git a/src/chat/api/runs/runner/events/message_events.py b/src/chat/api/runs/runner/events/message_events.py new file mode 100644 index 00000000..3a1b2d88 --- /dev/null +++ b/src/chat/api/runs/runner/events/message_events.py @@ -0,0 +1,9 @@ +from typing import Literal + +from chat.api.messages.service import Message +from chat.api.runs.runner.events.event_base import EventBase + + +class MessageEvent(EventBase): + data: Message + event: Literal["thread.message.created"] diff --git a/src/chat/api/runs/runner/events/run_events.py b/src/chat/api/runs/runner/events/run_events.py new file mode 100644 index 00000000..033ef15f --- /dev/null +++ b/src/chat/api/runs/runner/events/run_events.py @@ -0,0 +1,18 @@ +from typing import Literal + +from chat.api.runs.models import Run +from chat.api.runs.runner.events.event_base import EventBase + + +class RunEvent(EventBase): + data: Run + event: Literal[ + "thread.run.created", + "thread.run.queued", + "thread.run.in_progress", + "thread.run.completed", + "thread.run.failed", + "thread.run.cancelling", + "thread.run.cancelled", + "thread.run.expired", + ] diff --git a/src/chat/api/runs/runner/runner.py b/src/chat/api/runs/runner/runner.py new file mode 100644 index 00000000..f300eb1c --- /dev/null +++ b/src/chat/api/runs/runner/runner.py @@ -0,0 +1,266 @@ +import logging +import queue +import time +from datetime import datetime, timezone +from pathlib import Path +from typing import TYPE_CHECKING + +from askui.agent import VisionAgent +from askui.models.shared.computer_agent_cb_param import OnMessageCbParam +from askui.models.shared.computer_agent_message_param import ( + Base64ImageSourceParam, + ImageBlockParam, + MessageParam, + TextBlockParam, +) +from askui.tools.pynput.pynput_agent_os import PynputAgentOs +from askui.utils.image_utils import ImageSource +from chat.api.messages.service import MessageCreateRequest, MessageService +from chat.api.models import MAX_MESSAGES_PER_THREAD, ListQuery +from chat.api.runs.models import Run, RunError +from chat.api.runs.runner.events.done_events import DoneEvent +from chat.api.runs.runner.events.error_events import ( + ErrorEvent, + ErrorEventData, + ErrorEventDataError, +) +from chat.api.runs.runner.events.events import Events +from chat.api.runs.runner.events.message_events import MessageEvent +from chat.api.runs.runner.events.run_events import RunEvent + +if TYPE_CHECKING: + from askui.tools.agent_os import InputEvent + +logger = logging.getLogger(__name__) + + +ASKUI_VISION_AGENT_ID = "asst_ge3tiojsga3dgnruge3di2u5ov36shedkcslxnmca" +HUMAN_AGENT_ID = "asst_ge3tiojsga3dgnruge3di2u5ov36shedkcslxnmcb" + + +class Runner: + def __init__(self, run: Run, base_dir: Path) -> None: + self._run = run + self._base_dir = base_dir + self._runs_dir = base_dir / "runs" + self._msg_service = MessageService(self._base_dir) + self._agent_os = PynputAgentOs() + + def _run_human_agent(self, event_queue: queue.Queue[Events]) -> None: + message = self._msg_service.create( + thread_id=self._run.thread_id, + request=MessageCreateRequest( + role="user", + content=[ + TextBlockParam( + type="text", + text="Let me take over and show you what I want you to do...", + ), + ], + run_id=self._run.id, + ), + ) + event_queue.put( + MessageEvent( + data=message, + event="thread.message.created", + ) + ) + self._agent_os.start_listening() + screenshot = self._agent_os.screenshot() + time.sleep(0.1) + recorded_events: list[InputEvent] = [] + while True: + updated_run = self._retrieve_run() + if self._should_abort(updated_run): + break + while event := self._agent_os.poll_event(): + if self._should_abort(updated_run): + break + if not event.pressed: + recorded_events.append(event) + button = ( + f'the "{event.button}" mouse button' + if event.button != "unknown" + else "a mouse button" + ) + message = self._msg_service.create( + thread_id=self._run.thread_id, + request=MessageCreateRequest( + role="user", + content=[ + ImageBlockParam( + type="image", + source=Base64ImageSourceParam( + data=ImageSource(screenshot).to_base64(), + media_type="image/png", + ), + ), + TextBlockParam( + type="text", + text=( + f"I moved the mouse to x={event.x}, " + f"y={event.y} and clicked {button}." + ), + ), + ], + run_id=self._run.id, + ), + ) + event_queue.put( + MessageEvent( + data=message, + event="thread.message.created", + ) + ) + screenshot = self._agent_os.screenshot() + time.sleep(0.1) + self._agent_os.stop_listening() + if len(recorded_events) == 0: + text = "Nevermind, I didn't do anything." + message = self._msg_service.create( + thread_id=self._run.thread_id, + request=MessageCreateRequest( + role="user", + content=[ + TextBlockParam( + type="text", + text=text, + ) + ], + run_id=self._run.id, + ), + ) + event_queue.put( + MessageEvent( + data=message, + event="thread.message.created", + ) + ) + + def _run_askui_vision_agent(self, event_queue: queue.Queue[Events]) -> None: + messages: list[MessageParam] = [ + MessageParam( + role=msg.role, + content=msg.content, + ) + for msg in self._msg_service.list_( + thread_id=self._run.thread_id, + query=ListQuery(limit=MAX_MESSAGES_PER_THREAD, order="asc"), + ) + ] + + def on_message( + on_message_cb_param: OnMessageCbParam, + ) -> MessageParam | None: + message = self._msg_service.create( + thread_id=self._run.thread_id, + request=MessageCreateRequest( + assistant_id=self._run.assistant_id + if on_message_cb_param.message.role == "assistant" + else None, + role=on_message_cb_param.message.role, + content=on_message_cb_param.message.content, + run_id=self._run.id, + ), + ) + event_queue.put( + MessageEvent( + data=message, + event="thread.message.created", + ) + ) + updated_run = self._retrieve_run() + if self._should_abort(updated_run): + return None + return on_message_cb_param.message + + with VisionAgent() as agent: + agent.act( + messages, + on_message=on_message, + ) + + def run( + self, + event_queue: queue.Queue[Events], + ) -> None: + self._mark_run_as_started() + event_queue.put( + RunEvent( + data=self._run, + event="thread.run.in_progress", + ) + ) + try: + if self._run.assistant_id == HUMAN_AGENT_ID: + self._run_human_agent(event_queue) + elif self._run.assistant_id == ASKUI_VISION_AGENT_ID: + self._run_askui_vision_agent(event_queue) + updated_run = self._retrieve_run() + if updated_run.status == "in_progress": + updated_run.completed_at = datetime.now(tz=timezone.utc) + self._update_run_file(updated_run) + event_queue.put( + RunEvent( + data=updated_run, + event="thread.run.completed", + ) + ) + if updated_run.status == "cancelling": + event_queue.put( + RunEvent( + data=updated_run, + event="thread.run.cancelling", + ) + ) + updated_run.cancelled_at = datetime.now(tz=timezone.utc) + self._update_run_file(updated_run) + event_queue.put( + RunEvent( + data=updated_run, + event="thread.run.cancelled", + ) + ) + if updated_run.status == "expired": + event_queue.put( + RunEvent( + data=updated_run, + event="thread.run.expired", + ) + ) + event_queue.put(DoneEvent()) + except Exception as e: # noqa: BLE001 + logger.exception("Exception in runner") + updated_run = self._retrieve_run() + updated_run.failed_at = datetime.now(tz=timezone.utc) + updated_run.last_error = RunError(message=str(e), code="server_error") + self._update_run_file(updated_run) + event_queue.put( + RunEvent( + data=updated_run, + event="thread.run.failed", + ) + ) + event_queue.put( + ErrorEvent( + data=ErrorEventData(error=ErrorEventDataError(message=str(e))) + ) + ) + + def _mark_run_as_started(self) -> None: + self._run.started_at = datetime.now(tz=timezone.utc) + self._update_run_file(self._run) + + def _should_abort(self, run: Run) -> bool: + return run.status in ("cancelled", "cancelling", "expired") + + def _update_run_file(self, run: Run) -> None: + run_file = self._runs_dir / f"{run.thread_id}__{run.id}.json" + with run_file.open("w") as f: + f.write(run.model_dump_json()) + + def _retrieve_run(self) -> Run: + run_file = self._runs_dir / f"{self._run.thread_id}__{self._run.id}.json" + with run_file.open("r") as f: + return Run.model_validate_json(f.read()) diff --git a/src/chat/api/runs/service.py b/src/chat/api/runs/service.py new file mode 100644 index 00000000..a3469896 --- /dev/null +++ b/src/chat/api/runs/service.py @@ -0,0 +1,159 @@ +import asyncio +import queue +import threading +from collections.abc import AsyncGenerator +from datetime import datetime, timezone +from pathlib import Path +from typing import Literal, overload + +from pydantic import BaseModel + +from chat.api.models import AssistantId, ListQuery, ListResponse, RunId, ThreadId +from chat.api.runs.models import Run +from chat.api.runs.runner.events import Events +from chat.api.runs.runner.events.done_events import DoneEvent +from chat.api.runs.runner.events.error_events import ErrorEvent +from chat.api.runs.runner.events.run_events import RunEvent +from chat.api.runs.runner.runner import Runner + + +class CreateRunRequest(BaseModel): + assistant_id: AssistantId + stream: bool = True + + +class RunService: + """ + Service for managing runs. Handles creation, retrieval, listing, and + cancellation of runs. + """ + + def __init__(self, base_dir: Path) -> None: + self._base_dir = base_dir + self._runs_dir = base_dir / "runs" + + def _run_path(self, thread_id: ThreadId, run_id: RunId) -> Path: + return self._runs_dir / f"{thread_id}__{run_id}.json" + + def _create_run(self, thread_id: ThreadId, request: CreateRunRequest) -> Run: + run = Run(thread_id=thread_id, assistant_id=request.assistant_id) + self._runs_dir.mkdir(parents=True, exist_ok=True) + self._update_run_file(run) + return run + + @overload + def create( + self, thread_id: ThreadId, stream: Literal[False], request: CreateRunRequest + ) -> Run: ... + + @overload + def create( + self, thread_id: ThreadId, stream: Literal[True], request: CreateRunRequest + ) -> AsyncGenerator[Events, None]: ... + + @overload + def create( + self, thread_id: ThreadId, stream: bool, request: CreateRunRequest + ) -> Run | AsyncGenerator[Events, None]: ... + + def create( + self, thread_id: ThreadId, stream: bool, request: CreateRunRequest + ) -> Run | AsyncGenerator[Events, None]: + run = self._create_run(thread_id, request) + event_queue: queue.Queue[Events] = queue.Queue() + runner = Runner(run, self._base_dir) + thread = threading.Thread(target=runner.run, args=(event_queue,), daemon=True) + thread.start() + if stream: + + async def event_stream() -> AsyncGenerator[Events, None]: + yield RunEvent( + # run already in progress instead of queued which is + # different from OpenAI + data=run, + event="thread.run.created", + ) + yield RunEvent( + # run already in progress instead of queued which is + # different from OpenAI + data=run, + event="thread.run.queued", + ) + loop = asyncio.get_event_loop() + while True: + event = await loop.run_in_executor(None, event_queue.get) + yield event + if isinstance(event, DoneEvent) or isinstance(event, ErrorEvent): + break + + return event_stream() + return run + + def _update_run_file(self, run: Run) -> None: + run_file = self._run_path(run.thread_id, run.id) + with run_file.open("w") as f: + f.write(run.model_dump_json()) + + def retrieve(self, run_id: RunId) -> Run: + # Find the file by run_id + for f in self._runs_dir.glob(f"*__{run_id}.json"): + with f.open("r") as file: + return Run.model_validate_json(file.read()) + error_msg = f"Run {run_id} not found" + raise FileNotFoundError(error_msg) + + def list_(self, thread_id: ThreadId, query: ListQuery) -> ListResponse[Run]: + """List runs, optionally filtered by thread. + + Args: + thread_id (ThreadId): ID of thread to filter runs by + query (ListQuery): Query parameters for listing runs + + Returns: + ListResponse[Run]: ListResponse containing runs sorted by creation date + """ + if not self._runs_dir.exists(): + return ListResponse(data=[]) + + run_files = list(self._runs_dir.glob(f"{thread_id}__*.json")) + + runs: list[Run] = [] + for f in run_files: + with f.open("r") as file: + runs.append(Run.model_validate_json(file.read())) + + # Sort by creation date + runs = sorted( + runs, + key=lambda r: r.created_at, + reverse=(query.order == "desc"), + ) + + # Apply before/after filters + if query.after: + runs = [r for r in runs if r.id > query.after] + if query.before: + runs = [r for r in runs if r.id < query.before] + + # Apply limit + runs = runs[: query.limit] + + return ListResponse( + data=runs, + first_id=runs[0].id if runs else None, + last_id=runs[-1].id if runs else None, + has_more=len(run_files) > query.limit, + ) + + def cancel(self, run_id: RunId) -> Run: + run = self.retrieve(run_id) + if run.status in ("cancelled", "cancelling", "completed", "failed", "expired"): + return run + run.tried_cancelling_at = datetime.now(tz=timezone.utc) + for f in self._runs_dir.glob(f"*__{run_id}.json"): + with f.open("w") as file: + file.write(run.model_dump_json()) + return run + # Find the file by run_id + error_msg = f"Run {run_id} not found" + raise FileNotFoundError(error_msg) diff --git a/src/askui/chat/api/settings.py b/src/chat/api/settings.py similarity index 100% rename from src/askui/chat/api/settings.py rename to src/chat/api/settings.py diff --git a/src/chat/api/threads/__init__.py b/src/chat/api/threads/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/chat/api/threads/dependencies.py b/src/chat/api/threads/dependencies.py new file mode 100644 index 00000000..926fdf9e --- /dev/null +++ b/src/chat/api/threads/dependencies.py @@ -0,0 +1,21 @@ +from fastapi import Depends + +from chat.api.dependencies import SettingsDep +from chat.api.messages.dependencies import MessageServiceDep +from chat.api.messages.service import MessageService +from chat.api.settings import Settings +from chat.api.threads.service import ThreadService + + +def get_thread_service( + settings: Settings = SettingsDep, + message_service: MessageService = MessageServiceDep, +) -> ThreadService: + """Get ThreadService instance.""" + return ThreadService( + base_dir=settings.data_dir, + message_service=message_service, + ) + + +ThreadServiceDep = Depends(get_thread_service) diff --git a/src/askui/chat/api/threads/router.py b/src/chat/api/threads/router.py similarity index 58% rename from src/askui/chat/api/threads/router.py rename to src/chat/api/threads/router.py index f899863e..e018440c 100644 --- a/src/askui/chat/api/threads/router.py +++ b/src/chat/api/threads/router.py @@ -1,31 +1,38 @@ from fastapi import APIRouter, HTTPException, status -from askui.chat.api.threads.dependencies import ThreadServiceDep -from askui.chat.api.threads.service import Thread, ThreadListResponse, ThreadService +from chat.api.models import ListQuery, ListQueryDep, ListResponse, ThreadId +from chat.api.threads.dependencies import ThreadServiceDep +from chat.api.threads.service import ( + Thread, + ThreadCreateRequest, + ThreadModifyRequest, + ThreadService, +) router = APIRouter(prefix="/threads", tags=["threads"]) @router.get("") def list_threads( - limit: int | None = None, + query: ListQuery = ListQueryDep, thread_service: ThreadService = ThreadServiceDep, -) -> ThreadListResponse: +) -> ListResponse[Thread]: """List all threads.""" - return thread_service.list_(limit=limit) + return thread_service.list_(query=query) @router.post("", status_code=status.HTTP_201_CREATED) def create_thread( + request: ThreadCreateRequest, thread_service: ThreadService = ThreadServiceDep, ) -> Thread: """Create a new thread.""" - return thread_service.create() + return thread_service.create(request=request) @router.get("/{thread_id}") def retrieve_thread( - thread_id: str, + thread_id: ThreadId, thread_service: ThreadService = ThreadServiceDep, ) -> Thread: """Get a thread by ID.""" @@ -37,7 +44,7 @@ def retrieve_thread( @router.delete("/{thread_id}", status_code=status.HTTP_204_NO_CONTENT) def delete_thread( - thread_id: str, + thread_id: ThreadId, thread_service: ThreadService = ThreadServiceDep, ) -> None: """Delete a thread.""" @@ -45,3 +52,13 @@ def delete_thread( thread_service.delete(thread_id) except FileNotFoundError as e: raise HTTPException(status_code=404, detail=str(e)) from e + + +@router.post("/{thread_id}") +def modify_thread( + thread_id: ThreadId, + request: ThreadModifyRequest, + thread_service: ThreadService = ThreadServiceDep, +) -> Thread: + """Modify a thread.""" + return thread_service.modify(thread_id, request) diff --git a/src/chat/api/threads/service.py b/src/chat/api/threads/service.py new file mode 100644 index 00000000..1ce88aef --- /dev/null +++ b/src/chat/api/threads/service.py @@ -0,0 +1,157 @@ +from datetime import datetime, timezone +from pathlib import Path +from typing import Literal + +from pydantic import BaseModel, Field + +from chat.api.messages.service import MessageCreateRequest, MessageService +from chat.api.models import DoNotPatch, ListQuery, ListResponse, ThreadId, UnixDatetime +from chat.api.utils import generate_time_ordered_id + + +class Thread(BaseModel): + """A chat thread/session.""" + + id: ThreadId = Field(default_factory=lambda: generate_time_ordered_id("thread")) + created_at: UnixDatetime = Field( + default_factory=lambda: datetime.now(tz=timezone.utc) + ) + name: str | None = None + object: Literal["thread"] = "thread" + + +class ThreadCreateRequest(BaseModel): + name: str | None = None + messages: list[MessageCreateRequest] | None = None + + +class ThreadModifyRequest(BaseModel): + name: str | None | DoNotPatch = DoNotPatch() + + +class ThreadService: + """Service for managing chat threads/sessions.""" + + def __init__( + self, + base_dir: Path, + message_service: MessageService, + ) -> None: + """Initialize thread service. + + Args: + base_dir: Base directory to store thread data + """ + self._base_dir = base_dir + self._threads_dir = base_dir / "threads" + self._message_service = message_service + + def create(self, request: ThreadCreateRequest) -> Thread: + """Create a new thread. + + Returns: + Created thread object + """ + thread = Thread(name=request.name) + self._threads_dir.mkdir(parents=True, exist_ok=True) + thread_file = self._threads_dir / f"{thread.id}.json" + thread_file.write_text(thread.model_dump_json()) + thread_messages_file = self._threads_dir / f"{thread.id}.jsonl" + thread_messages_file.touch() + if request.messages: + for message in request.messages: + self._message_service.create( + thread_id=thread.id, + request=message, + ) + return thread + + def list_(self, query: ListQuery) -> ListResponse[Thread]: + """List all available threads. + + Args: + query (ListQuery): Query parameters for listing threads + + Returns: + ListResponse[Thread]: ListResponse containing threads sorted by creation + date + """ + if not self._threads_dir.exists(): + return ListResponse(data=[]) + + thread_files = list(self._threads_dir.glob("*.json")) + threads: list[Thread] = [] + for f in thread_files: + thread = Thread.model_validate_json(f.read_text()) + threads.append(thread) + + # Sort by creation date + threads = sorted( + threads, key=lambda t: t.created_at, reverse=(query.order == "desc") + ) + + # Apply before/after filters + if query.after: + threads = [t for t in threads if t.id > query.after] + if query.before: + threads = [t for t in threads if t.id < query.before] + + # Apply limit + threads = threads[: query.limit] + + return ListResponse( + data=threads, + first_id=threads[0].id if threads else None, + last_id=threads[-1].id if threads else None, + has_more=len(thread_files) > query.limit, + ) + + def retrieve(self, thread_id: ThreadId) -> Thread: + """Retrieve a thread by ID. + + Args: + thread_id: ID of thread to retrieve + + Returns: + Thread object + + Raises: + FileNotFoundError: If thread doesn't exist + """ + thread_file = self._threads_dir / f"{thread_id}.json" + if not thread_file.exists(): + error_msg = f"Thread {thread_id} not found" + raise FileNotFoundError(error_msg) + return Thread.model_validate_json(thread_file.read_text()) + + def delete(self, thread_id: ThreadId) -> None: + """Delete a thread and all its associated files. + + Args: + thread_id (ThreadId): ID of thread to delete + + Raises: + FileNotFoundError: If thread doesn't exist + """ + thread_file = self._threads_dir / f"{thread_id}.json" + if not thread_file.exists(): + error_msg = f"Thread {thread_id} not found" + raise FileNotFoundError(error_msg) + thread_messages_file = self._threads_dir / f"{thread_id}.jsonl" + if thread_messages_file.exists(): + thread_messages_file.unlink() + thread_file.unlink() + + def modify(self, thread_id: ThreadId, request: ThreadModifyRequest) -> Thread: + """Modify a thread. + + Args: + thread_id (ThreadId): ID of thread to modify + request (ThreadModifyRequest): Request containing the new name + """ + thread = self.retrieve(thread_id) + if not isinstance(request.name, DoNotPatch): + thread.name = request.name + thread_file = self._threads_dir / f"{thread_id}.json" + thread_file.write_text(thread.model_dump_json()) + return thread diff --git a/src/askui/chat/api/utils.py b/src/chat/api/utils.py similarity index 100% rename from src/askui/chat/api/utils.py rename to src/chat/api/utils.py diff --git a/src/chat/ui/.gitignore b/src/chat/ui/.gitignore new file mode 100644 index 00000000..5ab2f9bd --- /dev/null +++ b/src/chat/ui/.gitignore @@ -0,0 +1,33 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.js +.yarn/install-state.gz + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts diff --git a/src/chat/ui/app/globals.css b/src/chat/ui/app/globals.css new file mode 100644 index 00000000..20b1c1db --- /dev/null +++ b/src/chat/ui/app/globals.css @@ -0,0 +1,82 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; + +:root { + --foreground-rgb: 0, 0, 0; + --background-start-rgb: 214, 219, 220; + --background-end-rgb: 255, 255, 255; +} + +@media (prefers-color-scheme: dark) { + :root { + --foreground-rgb: 255, 255, 255; + --background-start-rgb: 0, 0, 0; + --background-end-rgb: 0, 0, 0; + } +} + +@layer base { + :root { + --background: 0 0% 100%; + --foreground: 0 0% 3.9%; + --card: 0 0% 100%; + --card-foreground: 0 0% 3.9%; + --popover: 0 0% 100%; + --popover-foreground: 0 0% 3.9%; + --primary: 0 0% 9%; + --primary-foreground: 0 0% 98%; + --secondary: 0 0% 96.1%; + --secondary-foreground: 0 0% 9%; + --muted: 0 0% 96.1%; + --muted-foreground: 0 0% 45.1%; + --accent: 0 0% 96.1%; + --accent-foreground: 0 0% 9%; + --destructive: 0 84.2% 60.2%; + --destructive-foreground: 0 0% 98%; + --border: 0 0% 89.8%; + --input: 0 0% 89.8%; + --ring: 0 0% 3.9%; + --chart-1: 12 76% 61%; + --chart-2: 173 58% 39%; + --chart-3: 197 37% 24%; + --chart-4: 43 74% 66%; + --chart-5: 27 87% 67%; + --radius: 0.5rem; + } + .dark { + --background: 0 0% 3.9%; + --foreground: 0 0% 98%; + --card: 0 0% 3.9%; + --card-foreground: 0 0% 98%; + --popover: 0 0% 3.9%; + --popover-foreground: 0 0% 98%; + --primary: 0 0% 98%; + --primary-foreground: 0 0% 9%; + --secondary: 0 0% 14.9%; + --secondary-foreground: 0 0% 98%; + --muted: 0 0% 14.9%; + --muted-foreground: 0 0% 63.9%; + --accent: 0 0% 14.9%; + --accent-foreground: 0 0% 98%; + --destructive: 0 62.8% 30.6%; + --destructive-foreground: 0 0% 98%; + --border: 0 0% 14.9%; + --input: 0 0% 14.9%; + --ring: 0 0% 83.1%; + --chart-1: 220 70% 50%; + --chart-2: 160 60% 45%; + --chart-3: 30 80% 55%; + --chart-4: 280 65% 60%; + --chart-5: 340 75% 55%; + } +} + +@layer base { + * { + @apply border-border; + } + body { + @apply bg-background text-foreground; + } +} diff --git a/src/chat/ui/app/layout.tsx b/src/chat/ui/app/layout.tsx new file mode 100644 index 00000000..4ec63bdc --- /dev/null +++ b/src/chat/ui/app/layout.tsx @@ -0,0 +1,31 @@ +import "./globals.css"; +import type { Metadata } from "next"; +import { Inter } from "next/font/google"; +import { ThemeProvider } from "next-themes"; + +const inter = Inter({ subsets: ["latin"] }); + +export const metadata: Metadata = { + title: "AskUI Chat", +}; + +export default function RootLayout({ + children, +}: { + children: React.ReactNode; +}) { + return ( + + + + {children} + + + + ); +} diff --git a/src/chat/ui/app/page.tsx b/src/chat/ui/app/page.tsx new file mode 100644 index 00000000..2f8a007a --- /dev/null +++ b/src/chat/ui/app/page.tsx @@ -0,0 +1,33 @@ +"use client"; + +import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; +import { Toaster } from "sonner"; +import { Sidebar } from "@/components/sidebar/sidebar"; +import { ChatContainer } from "@/components/chat/chat-container"; + +const queryClient = new QueryClient({ + defaultOptions: { + queries: { + staleTime: 1000 * 60 * 5, // 5 minutes + retry: 1, + }, + }, +}); + +function ChatApp() { + return ( +
+ + +
+ ); +} + +export default function Home() { + return ( + + + + + ); +} diff --git a/src/chat/ui/components.json b/src/chat/ui/components.json new file mode 100644 index 00000000..c5974621 --- /dev/null +++ b/src/chat/ui/components.json @@ -0,0 +1,20 @@ +{ + "$schema": "https://ui.shadcn.com/schema.json", + "style": "default", + "rsc": true, + "tsx": true, + "tailwind": { + "config": "tailwind.config.ts", + "css": "app/globals.css", + "baseColor": "neutral", + "cssVariables": true, + "prefix": "" + }, + "aliases": { + "components": "@/components", + "utils": "@/lib/utils", + "ui": "@/components/ui", + "lib": "@/lib", + "hooks": "@/hooks" + } +} diff --git a/src/chat/ui/components/chat/chat-container.tsx b/src/chat/ui/components/chat/chat-container.tsx new file mode 100644 index 00000000..221d5460 --- /dev/null +++ b/src/chat/ui/components/chat/chat-container.tsx @@ -0,0 +1,23 @@ +"use client"; + +import { useChatStore } from "@/lib/store"; +import { EmptyState } from "./empty-state"; +import { ChatHeader } from "./chat-header"; +import { MessageList } from "./message-list"; +import { ChatInput } from "./chat-input"; + +export function ChatContainer() { + const { selectedThread } = useChatStore(); + + if (!selectedThread) { + return ; + } + + return ( +
+ + + +
+ ); +} diff --git a/src/chat/ui/components/chat/chat-header.tsx b/src/chat/ui/components/chat/chat-header.tsx new file mode 100644 index 00000000..16da6302 --- /dev/null +++ b/src/chat/ui/components/chat/chat-header.tsx @@ -0,0 +1,118 @@ +"use client"; + +import { Bot, Zap } from "lucide-react"; +import { useQuery } from "@tanstack/react-query"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, +} from "@/components/ui/select"; +import { Avatar, AvatarFallback, AvatarImage } from "@/components/ui/avatar"; +import { Badge } from "@/components/ui/badge"; +import { Skeleton } from "@/components/ui/skeleton"; +import { useChatStore } from "@/lib/store"; +import { apiClient } from "@/lib/api"; +import { HUMAN_DEMONSTRATION_AGENT_ID } from "@/lib/constants"; + +export function ChatHeader() { + const { selectedAssistant, setSelectedAssistant, currentRun } = + useChatStore(); + + const { data: assistantsListResponse, isLoading } = useQuery({ + queryKey: ["assistants"], + queryFn: () => + apiClient.listAssistants().then((response) => { + return { + ...response, + data: response.data.filter( + (a) => a.id !== HUMAN_DEMONSTRATION_AGENT_ID + ), + }; + }), + }); + + const handleAssistantChange = (assistantId: string) => { + const assistant = assistantsListResponse?.data.find( + (a) => a.id === assistantId + ); + if (assistant) { + setSelectedAssistant(assistant); + } + }; + + if (isLoading) { + return ( +
+
+ + +
+ +
+ ); + } + + return ( +
+
+ +
+ + {currentRun && ( + + + {currentRun.status === "in_progress" + ? "Thinking..." + : currentRun.status} + + )} +
+ ); +} diff --git a/src/chat/ui/components/chat/chat-input.tsx b/src/chat/ui/components/chat/chat-input.tsx new file mode 100644 index 00000000..32ef0ce6 --- /dev/null +++ b/src/chat/ui/components/chat/chat-input.tsx @@ -0,0 +1,520 @@ +"use client"; + +import { useState, useRef, useCallback } from "react"; +import { + Send, + Plus, + X, + Paperclip, + Square, + MousePointerClick, +} from "lucide-react"; +import { motion, AnimatePresence } from "framer-motion"; +import { useMutation, useQueryClient } from "@tanstack/react-query"; +import { toast } from "sonner"; +import { Button } from "@/components/ui/button"; +import { Textarea } from "@/components/ui/textarea"; +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from "@/components/ui/tooltip"; +import { useChatStore } from "@/lib/store"; +import { apiClient } from "@/lib/api"; +import { Event } from "@/lib/types"; +import { HUMAN_DEMONSTRATION_AGENT_ID } from "@/lib/constants"; + +interface AttachedFile { + id: string; + file: File; + preview: string; + type: "image"; +} + +let buffer = ""; + +const SseSplitterStream = (): TransformStream => + new TransformStream({ + start() {}, + transform(chunk, controller) { + buffer += chunk; + const parts = buffer.split("\n\n"); + buffer = parts.pop()!; // Keep the last partial event in buffer + + for (const part of parts) { + controller.enqueue(part); + } + }, + flush(controller) {}, + }); + +function parseSseMessage(message: string): Event { + const lines = message.split("\n"); + let type = "message"; + const dataLines: string[] = []; + + for (const line of lines) { + if (line.startsWith("event:")) { + type = line.slice(6).trim(); + } else if (line.startsWith("data:")) { + dataLines.push(line.slice(5).trim()); + } + } + + if (dataLines.length === 0) { + throw new Error("No data field in SSE message"); + } + + const rawData = dataLines.join("\n"); + + try { + switch (type) { + case "thread.run.created": + case "thread.run.queued": + case "thread.run.in_progress": + case "thread.run.completed": + case "thread.run.cancelling": + case "thread.run.cancelled": + case "thread.run.failed": + case "thread.run.expired": + return { type, data: JSON.parse(rawData) }; + case "thread.message.created": + return { type, data: JSON.parse(rawData) }; + case "error": + return { type, data: JSON.parse(rawData) }; + case "done": + return { type, data: "[DONE]" }; + default: + throw new Error(`Unknown event type: ${type}`); + } + } catch (e) { + throw new Error( + `Failed to parse SSE data of event "${type}": ${ + e instanceof Error ? e.message : String(e) + }: ${rawData}` + ); + } +} + +export function ChatInput() { + const [message, setMessage] = useState(""); + const [attachedFiles, setAttachedFiles] = useState([]); + const [isDragOver, setIsDragOver] = useState(false); + const [runningAction, setRunningAction] = useState<"send" | "demo" | null>( + null + ); + const textareaRef = useRef(null); + const fileInputRef = useRef(null); + const queryClient = useQueryClient(); + + const { + selectedThread, + selectedAssistant, + currentRun, + setCurrentRun, + appendMessage, + clearMessages, + } = useChatStore(); + + const createMessageMutation = useMutation({ + mutationFn: async (data: { content: any; role: "user" }) => { + if (!selectedThread) throw new Error("No thread selected"); + return apiClient.createMessage(selectedThread.id, data); + }, + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: ["messages", selectedThread?.id], + }); + }, + onError: (error) => { + toast.error(`Failed to send message: ${error}`); + }, + }); + + const createRunMutation = useMutation({ + mutationFn: async (assistantId: string) => { + if (!selectedThread || !assistantId) { + throw new Error("Thread and assistant required"); + } + + clearMessages(); + const response = await fetch( + `${ + process.env.NEXT_PUBLIC_API_URL || "http://localhost:8000" + }/v1/threads/${selectedThread.id}/runs`, + { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ + assistant_id: assistantId, + stream: true, + }), + } + ); + + if (!response.ok) { + throw new Error(`API Error: ${response.status} ${response.statusText}`); + } + + if (!response.body) { + throw new Error("No response body"); + } + + const reader = response.body + .pipeThrough(new TextDecoderStream()) + .pipeThrough(SseSplitterStream()) + .getReader(); + + while (true) { + const { done, value } = await reader.read(); + if (done) break; + const event: Event = parseSseMessage(value); + switch (event.type) { + case "thread.run.created": + case "thread.run.queued": + case "thread.run.in_progress": + case "thread.run.completed": + case "thread.run.cancelling": + case "thread.run.cancelled": + case "thread.run.failed": + setCurrentRun(event.data); + break; + case "thread.run.expired": + setCurrentRun(event.data); + throw new Error("Run expired"); + case "thread.message.created": + appendMessage(event.data); + break; + case "error": + throw new Error(event.data.error.message); + case "done": + setCurrentRun(null); + break; + } + } + }, + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: ["messages", selectedThread?.id], + }); + setCurrentRun(null); + setRunningAction(null); + }, + onError: (error) => { + toast.error(`Run failed: ${error.message}`); + queryClient.invalidateQueries({ + queryKey: ["messages", selectedThread?.id], + }); + setCurrentRun(null); + setRunningAction(null); + }, + }); + + const handleFileSelect = (files: FileList | null) => { + if (!files) return; + + Array.from(files).forEach((file) => { + if (file.type.startsWith("image/")) { + const reader = new FileReader(); + reader.onload = (e) => { + const newFile: AttachedFile = { + id: Math.random().toString(36).substr(2, 9), + file, + preview: e.target?.result as string, + type: "image", + }; + setAttachedFiles((prev) => [...prev, newFile]); + }; + reader.readAsDataURL(file); + } else { + toast.error("Only image files are supported"); + } + }); + }; + + const handleSubmit = async (e: React.FormEvent) => { + e.preventDefault(); + + if (!selectedThread || !selectedAssistant) { + toast.error("Please select a thread and assistant"); + return; + } + + if (message.trim() || attachedFiles.length > 0) { + const content: any[] = []; + + if (message.trim()) { + content.push({ + type: "text", + text: message.trim(), + }); + } + + attachedFiles.forEach((file) => { + const base64Data = file.preview.split(",")[1]; + content.push({ + type: "image", + source: { + type: "base64", + media_type: file.file.type, + data: base64Data, + }, + }); + }); + + await createMessageMutation.mutateAsync({ + content: + content.length === 1 && content[0].type === "text" + ? content[0].text + : content, + role: "user", + }); + + setMessage(""); + setAttachedFiles([]); + } + + if (!selectedAssistant.id) { + toast.warning( + "Select an assistant and hit the send button again if you want to receive an answer" + ); + return; + } + + setRunningAction("send"); + await createRunMutation.mutateAsync(selectedAssistant.id); + }; + + const handleCancel = () => { + if (currentRun) { + // Cancel the run + apiClient + .cancelRun(currentRun.thread_id, currentRun.id) + .then(() => { + toast.success("Send request to cancel run"); + }) + .catch(() => { + toast.error("Failed to send request to cancel run"); + }); + } + }; + + const handleDemo = async () => { + setRunningAction("demo"); + await createRunMutation.mutateAsync(HUMAN_DEMONSTRATION_AGENT_ID); + }; + + const removeFile = (fileId: string) => { + setAttachedFiles((prev) => prev.filter((f) => f.id !== fileId)); + }; + + const handleDragOver = useCallback((e: React.DragEvent) => { + e.preventDefault(); + setIsDragOver(true); + }, []); + + const handleDragLeave = useCallback((e: React.DragEvent) => { + e.preventDefault(); + setIsDragOver(false); + }, []); + + const handleDrop = useCallback((e: React.DragEvent) => { + e.preventDefault(); + setIsDragOver(false); + handleFileSelect(e.dataTransfer.files); + }, []); + + const isLoading = + createMessageMutation.isPending || createRunMutation.isPending; + + return ( + +
+
+ {/* File Attachments */} + + {attachedFiles.length > 0 && ( + + {attachedFiles.map((file) => ( +
+ {file.file.name} + +
+ ))} +
+ )} +
+ + {/* Input Area */} +
+