From 10797fd0f5741d8dfb35c91f8c74b8470e6a2f60 Mon Sep 17 00:00:00 2001 From: Carson Date: Tue, 17 Dec 2024 11:17:26 -0600 Subject: [PATCH] feat(Chat): The .append_message() method now automatically streams in generators --- shiny/ui/_chat.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/shiny/ui/_chat.py b/shiny/ui/_chat.py index b72995d94..b1cec61c7 100644 --- a/shiny/ui/_chat.py +++ b/shiny/ui/_chat.py @@ -515,13 +515,21 @@ async def append_message(self, message: Any) -> None: The message to append. A variety of message formats are supported including a string, a dictionary with `content` and `role` keys, or a relevant chat completion object from platforms like OpenAI, Anthropic, Ollama, and others. + When the message is a generator or async generator, it is automatically + treated as a stream of message chunks (i.e., uses + `.append_message_stream()`) Note ---- - Use `.append_message_stream()` instead of this method when `stream=True` (or - similar) is specified in model's completion method. + Although this method tries its best to handle various message formats, it's + not always possible to handle every message format. If you encounter an error + or no response when appending a message, try extracting the message content + as a string and passing it to this method. """ - await self._append_message(message) + if inspect.isasyncgen(message) or inspect.isgenerator(message): + await self.append_message_stream(message) + else: + await self._append_message(message) async def _append_message( self, message: Any, *, chunk: ChunkOption = False, stream_id: str | None = None