diff --git a/chatstream/__init__.py b/chatstream/__init__.py index 90aa33c..3e3310c 100644 --- a/chatstream/__init__.py +++ b/chatstream/__init__.py @@ -29,7 +29,6 @@ cast, ) -import shiny.experimental as x import tiktoken from htmltools import HTMLDependency from shiny import Inputs, Outputs, Session, module, reactive, render, ui @@ -44,7 +43,9 @@ if "pyodide" in sys.modules: from . import openai_pyodide as openai else: - import openai + from openai import AsyncOpenAI + + if sys.version_info < (3, 10): from typing_extensions import ParamSpec, TypeGuard @@ -255,17 +256,19 @@ async def finalize_streaming_result(): current_batch = self.streaming_chat_messages_batch() for message in current_batch: - if "content" in message["choices"][0]["delta"]: + if message.choices[0].delta.content is not None: self.streaming_chat_string_pieces.set( - self.streaming_chat_string_pieces() - + (message["choices"][0]["delta"]["content"],) + (self.streaming_chat_string_pieces()) + + (message.choices[0].delta.content,) # convert string to tuple thanks to comma ) - finish_reason = message["choices"][0]["finish_reason"] + finish_reason = message.choices[0].finish_reason + if finish_reason in ["stop", "length"]: # If we got here, we know that streaming_chat_string is not None. current_message_str = "".join(self.streaming_chat_string_pieces()) + if finish_reason == "length": current_message_str += " [Reached token limit; Type 'continue' to continue answer.]" @@ -352,16 +355,16 @@ async def perform_query(): # a separate task so that the data can come in without need to await it in # this Task (which would block other computation to happen, like running # reactive stuff). + aclient = AsyncOpenAI(api_key=self.api_key()) + messages: StreamResult[ChatCompletionStreaming] = stream_to_reactive( - openai.ChatCompletion.acreate( # pyright: ignore[reportUnknownMemberType, reportGeneralTypeIssues] - model=self.model(), - api_key=self.api_key(), - messages=outgoing_messages_normalized, - stream=True, - temperature=self.temperature(), - **extra_kwargs, - ), - throttle=self.throttle(), + aclient.chat.completions.create(model=self.model(), + messages=outgoing_messages_normalized, + stream=True, + temperature=self.temperature(), + **extra_kwargs, + max_tokens=100), + throttle=self.throttle() ) # Set this to a non-empty tuple (with a blank string), to indicate that @@ -418,7 +421,7 @@ def query_ui(): return ui.div() return ui.div( - x.ui.input_text_area( + ui.input_text_area( "query", None, # value="2+2", diff --git a/examples/basic/app.py b/examples/basic/app.py index 97c6f56..63cef57 100644 --- a/examples/basic/app.py +++ b/examples/basic/app.py @@ -8,7 +8,7 @@ def server(input: Inputs, output: Outputs, session: Session): - chatstream.chat_server("mychat") + chatstream.chat_server("mychat", debug=True) app = App(app_ui, server) diff --git a/examples/controls/app.py b/examples/controls/app.py index 80784af..7ed0e41 100644 --- a/examples/controls/app.py +++ b/examples/controls/app.py @@ -20,10 +20,10 @@ }); """ -app_ui = x.ui.page_fillable( +app_ui = ui.page_fillable( ui.head_content(ui.tags.title("Shiny ChatGPT")), - x.ui.layout_sidebar( - x.ui.sidebar( + ui.layout_sidebar( + ui.sidebar( ui.h4("Shiny ChatGPT"), ui.hr(), ui.input_select( diff --git a/examples/doc_query/app.py b/examples/doc_query/app.py index f636e53..1fd8d4d 100644 --- a/examples/doc_query/app.py +++ b/examples/doc_query/app.py @@ -46,10 +46,10 @@ }); """ -app_ui = x.ui.page_fillable( +app_ui = ui.page_fillable( ui.head_content(ui.tags.title("Shiny Document Query")), - x.ui.layout_sidebar( - x.ui.sidebar( + ui.layout_sidebar( + ui.sidebar( ui.h4("Shiny Document Query"), ui.hr(), ui.input_file("file", "Drag to upload text or PDF files", multiple=True), diff --git a/examples/doc_query/requirements.txt b/examples/doc_query/requirements.txt index 85c0d0f..6ab8e12 100644 --- a/examples/doc_query/requirements.txt +++ b/examples/doc_query/requirements.txt @@ -3,4 +3,4 @@ pypdf langchain tiktoken shiny -chatstream@git+https://github.com/wch/chatstream.git +chatstream@git+https://github.com/dar4datascience/chatstream.git diff --git a/examples/dual/app.py b/examples/dual/app.py index c2c2c4f..2ede3c4 100644 --- a/examples/dual/app.py +++ b/examples/dual/app.py @@ -15,10 +15,10 @@ }); """ -app_ui = x.ui.page_fillable( +app_ui = ui.page_fillable( ui.head_content(ui.tags.title("Shiny ChatGPT")), - x.ui.layout_sidebar( - x.ui.sidebar( + ui.layout_sidebar( + ui.sidebar( ui.h4("Shiny ChatGPT"), ui.hr(), ui.input_select( @@ -65,12 +65,12 @@ ), position="right", ), - x.ui.layout_column_wrap( + ui.layout_column_wrap( 1 / 2, - x.ui.card( + ui.card( chatstream.chat_ui("chat1"), ), - x.ui.card( + ui.card( chatstream.chat_ui("chat2"), ), ), diff --git a/examples/recipes_card/app.py b/examples/recipes_card/app.py index a10add8..555b26c 100644 --- a/examples/recipes_card/app.py +++ b/examples/recipes_card/app.py @@ -166,7 +166,7 @@ def answer_to_recipe_card(answer: str) -> ui.TagChild: def recipe_card(recipe: Recipe) -> ui.TagChild: title = None if "title" in recipe: - title = x.ui.card_header( + title = ui.card_header( {"class": "bg-dark fw-bold fs-3"}, recipe["title"], ) @@ -256,11 +256,11 @@ def recipe_card(recipe: Recipe) -> ui.TagChild: ui.a({"href": recipe["source"], "target": "_blank"}, recipe["source"]), ) - return x.ui.card( + return ui.card( title, tags, summary, - x.ui.layout_column_wrap( + ui.layout_column_wrap( None, ui.div(ingredients), ui.div(directions),