diff --git a/docs/docs/examples/palmai-bot.md b/docs/docs/examples/palmai-bot.md new file mode 100644 index 00000000..b9ce631e --- /dev/null +++ b/docs/docs/examples/palmai-bot.md @@ -0,0 +1,55 @@ +--- +sidebar_position: 2 +--- + +# Palm AI bot + +This bot makes an API call to PalmAI API and processes the user input. It uses by default chat-bison, which is optimised for chat purposes. + +```py +import os +from textbase import bot, Message +from textbase.models import PalmAI +from typing import List + +# Load your PalmAI API key +PalmAI.api_key = "" +# or from environment variable +PalmAI.api_key = os.getenv("PALMAI_API_KEY") + +# Prompt for GPT-3.5 Turbo +SYSTEM_PROMPT = """You are chatting with an AI. There are no specific prefixes for responses, so you can ask or talk about anything you like. +You will respond in a natural, conversational manner. Feel free to start the conversation with any question or topic, and let's have a pleasant chat! +""" + +@bot() +def on_message(message_history: List[Message], state: dict = None): + + bot_response = PalmAI.generate( + system_prompt=SYSTEM_PROMPT, + message_history=message_history + ) + + response = { + "data": { + "messages": [ + { + "data_type": "STRING", + "value": bot_response + } + ], + "state": state + }, + "errors": [ + { + "message": "" + } + ] + } + + return { + "status_code": 200, + "response": response + } + +``` \ No newline at end of file diff --git a/examples/palmai-bot.py b/examples/palmai-bot.py new file mode 100644 index 00000000..f58a65cc --- /dev/null +++ b/examples/palmai-bot.py @@ -0,0 +1,44 @@ +import os +from textbase import bot, Message +from textbase.models import PalmAI +from typing import List + +# Load your PalmAI API key +PalmAI.api_key = "" +# or from environment variable +PalmAI.api_key = os.getenv("PALMAI_API_KEY") + +# Prompt for GPT-3.5 Turbo +SYSTEM_PROMPT = """You are chatting with an AI. There are no specific prefixes for responses, so you can ask or talk about anything you like. +You will respond in a natural, conversational manner. Feel free to start the conversation with any question or topic, and let's have a pleasant chat! +""" + +@bot() +def on_message(message_history: List[Message], state: dict = None): + + bot_response = PalmAI.generate( + system_prompt=SYSTEM_PROMPT, + message_history=message_history + ) + + response = { + "data": { + "messages": [ + { + "data_type": "STRING", + "value": bot_response + } + ], + "state": state + }, + "errors": [ + { + "message": "" + } + ] + } + + return { + "status_code": 200, + "response": response + } diff --git a/pyproject.toml b/pyproject.toml index fd2dd53a..8738793e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,7 @@ tabulate = "^0.9.0" functions-framework = "^3.4.0" yaspin = "^3.0.0" pydantic = "^2.3.0" +google-generativeai = "^0.1.0" [build-system] requires = ["poetry-core"] diff --git a/textbase/models.py b/textbase/models.py index 814ed533..7df81bc4 100644 --- a/textbase/models.py +++ b/textbase/models.py @@ -6,6 +6,7 @@ import traceback from textbase import Message +import google.generativeai as palm # Return list of values of content. def get_contents(message: Message, data_type: str): @@ -143,4 +144,40 @@ def generate( data = json.loads(response.text) # parse the JSON data into a dictionary message = data['message'] - return message \ No newline at end of file + return message + +class PalmAI: + api_key = None + response = None + client = None + + @classmethod + def generate( + cls, + system_prompt: str, + message_history: list[Message], + model="models/chat-bison-001", + examples: typing.List[typing.Tuple] = None, + temperature = 0.7, + ): + assert cls.api_key is not None, "PalmAI API key is not set." + if cls.client == None: + palm.configure(api_key=cls.api_key) + cls.client = palm + + most_recent_message = get_contents(message_history[-1], "STRING")[0]["content"] + + if cls.response == None: + cls.response = cls.client.chat( + context = system_prompt, + model = model, + temperature=temperature, + messages=most_recent_message + ) + + else: + cls.response = cls.response.reply( + message=most_recent_message + ) + + return cls.response.last \ No newline at end of file