diff --git a/docs/docs/examples/cohere-chat-bot.md b/docs/docs/examples/cohere-chat-bot.md new file mode 100644 index 00000000..954ae39c --- /dev/null +++ b/docs/docs/examples/cohere-chat-bot.md @@ -0,0 +1,52 @@ +--- +sidebar_position: 1 +--- + +# Cohere Chat bot + +This chat bot makes an API call to Cohere and processes the user input and also maintain the chat history of user and system. It uses command model by default. More information added in Cohere Class itself. + +```py +from textbase import bot, Message +from textbase.models import Cohere +from typing import List + +# Load your Cohere API key +Cohere.api_key = "" + +# Prompt for `command` (cohere's model) +MESSAGE = "Is messi better than ronaldo or ronaldo better than messi?" + +@bot() +def on_message(message_history: List[Message], state: dict = None): + + # Generate `command` (cohere's model) response + bot_response = Cohere.chat( + system_prompt=MESSAGE, + user_name="user", + message_history=message_history, # Assuming history is the list of user messages + model="command", + ) + + response = { + "data": { + "messages": [ + { + "data_type": "STRING", + "value": bot_response + } + ], + "state": state + }, + "errors": [ + { + "message": "" + } + ] + } + + return { + "status_code": 200, + "response": response + } +``` \ No newline at end of file diff --git a/docs/docs/examples/cohere-prompt-bot.md b/docs/docs/examples/cohere-prompt-bot.md new file mode 100644 index 00000000..af7c49dc --- /dev/null +++ b/docs/docs/examples/cohere-prompt-bot.md @@ -0,0 +1,53 @@ +--- +sidebar_position: 2 +--- + +# Cohere Prompt bot + +This Prompt bot makes an API call to Cohere and processes the user input and resopond accordingly. It uses command model by default. More information added in Cohere Class itself. + +```py +from textbase import bot, Message +from textbase.models import Cohere +from typing import List + +# Load your Cohere API key +Cohere.api_key = "" + +# Prompt for `command` (cohere's model) +# you add your sample prompts here +SYSTEM_PROMPT = """How much developer can love python language? +""" + +@bot() +def on_message(message_history: List[Message], state: dict = None): + + # Generate `command` (cohere's model) response + bot_response = Cohere.generate( + system_prompt=SYSTEM_PROMPT, + message_history=message_history, # Assuming history is the list of user messages + model="command", + ) + + response = { + "data": { + "messages": [ + { + "data_type": "STRING", + "value": bot_response + } + ], + "state": state + }, + "errors": [ + { + "message": "" + } + ] + } + + return { + "status_code": 200, + "response": response + } +``` \ No newline at end of file diff --git a/docs/docs/examples/cohere-summarize-bot.md b/docs/docs/examples/cohere-summarize-bot.md new file mode 100644 index 00000000..c9840e7a --- /dev/null +++ b/docs/docs/examples/cohere-summarize-bot.md @@ -0,0 +1,73 @@ +--- +sidebar_position: 3 +--- + +# Cohere Summary bot + +This bot makes an API call to Cohere and processes the long user input and retruns short and crisp summary of the given para in different forms. It uses command model by default. More information added in Cohere Class itself. + +```py +from textbase import bot, Message +from textbase.models import Cohere +from typing import List + +# Load your Cohere API key +Cohere.api_key = "" + +# Prompt for `command` (cohere's model) +# you add your sample prompts here +SYSTEM_PROMPT = """"Ice cream is a sweetened frozen food typically eaten as a snack or dessert. + It may be made from milk or cream and is flavoured with a sweetener, + either sugar or an alternative, and a spice, such as cocoa or vanilla, + or with fruit such as strawberries or peaches. + It can also be made by whisking a flavored cream base and liquid nitrogen together. + Food coloring is sometimes added, in addition to stabilizers. + The mixture is cooled below the freezing point of water and stirred to incorporate air spaces + and to prevent detectable ice crystals from forming. The result is a smooth, + semi-solid foam that is solid at very low temperatures (below 2 °C or 35 °F). + It becomes more malleable as its temperature increases. + The meaning of the name ice cream varies from one country to another. + In some countries, such as the United States, ice cream applies only to a specific variety, + and most governments regulate the commercial use of the various terms according to the + relative quantities of the main ingredients, notably the amount of cream. + Products that do not meet the criteria to be called ice cream are sometimes labelled + frozen dairy dessert instead. In other countries, such as Italy and Argentina, + one word is used for all variants. Analogues made from dairy alternatives, + such as goat's or sheep's milk, or milk substitutes + (e.g., soy, cashew, coconut, almond milk or tofu), are available for those who are + lactose intolerant, allergic to dairy protein or vegan. +""" + +@bot() +def on_message(message_history: List[Message], state: dict = None): + + # Generate `command` (cohere's model) response + bot_response = Cohere.summarize( + text_input=SYSTEM_PROMPT, + message_history=message_history, # Assuming history is the list of user messages + model="command", + length="long" + ) + + response = { + "data": { + "messages": [ + { + "data_type": "STRING", + "value": bot_response + } + ], + "state": state + }, + "errors": [ + { + "message": "" + } + ] + } + + return { + "status_code": 200, + "response": response + } +``` \ No newline at end of file diff --git a/docs/docs/examples/huggingface-bot.md b/docs/docs/examples/huggingface-bot.md index e0342afe..8fd205a0 100644 --- a/docs/docs/examples/huggingface-bot.md +++ b/docs/docs/examples/huggingface-bot.md @@ -1,5 +1,5 @@ --- -sidebar_position: 3 +sidebar_position: 4 --- # HuggingFace bot diff --git a/docs/docs/examples/mimicking-bot.md b/docs/docs/examples/mimicking-bot.md index 3a49adda..48a439b5 100644 --- a/docs/docs/examples/mimicking-bot.md +++ b/docs/docs/examples/mimicking-bot.md @@ -1,5 +1,5 @@ --- -sidebar_position: 1 +sidebar_position: 5 --- # Mimicking bot diff --git a/docs/docs/examples/openai-bot.md b/docs/docs/examples/openai-bot.md index a7df7b7c..144d229a 100644 --- a/docs/docs/examples/openai-bot.md +++ b/docs/docs/examples/openai-bot.md @@ -1,5 +1,5 @@ --- -sidebar_position: 2 +sidebar_position: 6 --- # Open AI bot diff --git a/examples/cohere-chat-bot/main.py b/examples/cohere-chat-bot/main.py new file mode 100644 index 00000000..10fc8fce --- /dev/null +++ b/examples/cohere-chat-bot/main.py @@ -0,0 +1,42 @@ +from textbase import bot, Message +from textbase.models import Cohere +from typing import List + +# Load your Cohere API key +Cohere.api_key = "" + +# Prompt for `command` (cohere's model) +MESSAGE = "Hey there, How are you!" + +@bot() +def on_message(message_history: List[Message], state: dict = None): + + # Generate `command` (cohere's model) response + bot_response = Cohere.chat( + system_prompt=MESSAGE, + user_name="user", + message_history=message_history, # Assuming history is the list of user messages + model="command", + ) + + response = { + "data": { + "messages": [ + { + "data_type": "STRING", + "value": bot_response + } + ], + "state": state + }, + "errors": [ + { + "message": "" + } + ] + } + + return { + "status_code": 200, + "response": response + } \ No newline at end of file diff --git a/examples/cohere-prompt-bot/main.py b/examples/cohere-prompt-bot/main.py new file mode 100644 index 00000000..538d7764 --- /dev/null +++ b/examples/cohere-prompt-bot/main.py @@ -0,0 +1,43 @@ +from textbase import bot, Message +from textbase.models import Cohere +from typing import List + +# Load your Cohere API key +Cohere.api_key = "" + +# Prompt for `command` (cohere's model) +# you add your sample prompts here +SYSTEM_PROMPT = """how python interpreter works! +""" + +@bot() +def on_message(message_history: List[Message], state: dict = None): + + # Generate `command` (cohere's model) response + bot_response = Cohere.generate( + system_prompt=SYSTEM_PROMPT, + message_history=message_history, # Assuming history is the list of user messages + model="command", + ) + + response = { + "data": { + "messages": [ + { + "data_type": "STRING", + "value": bot_response + } + ], + "state": state + }, + "errors": [ + { + "message": "" + } + ] + } + + return { + "status_code": 200, + "response": response + } \ No newline at end of file diff --git a/examples/cohere-summarize-bot/main.py b/examples/cohere-summarize-bot/main.py new file mode 100644 index 00000000..b1ef0163 --- /dev/null +++ b/examples/cohere-summarize-bot/main.py @@ -0,0 +1,62 @@ +from textbase import bot, Message +from textbase.models import Cohere +from typing import List + +# Load your Cohere API key +Cohere.api_key = "" + +# Prompt for `command` (cohere's model) +# you add your sample prompts here +SYSTEM_PROMPT = """"Ice cream is a sweetened frozen food typically eaten as a snack or dessert. + It may be made from milk or cream and is flavoured with a sweetener, + either sugar or an alternative, and a spice, such as cocoa or vanilla, + or with fruit such as strawberries or peaches. + It can also be made by whisking a flavored cream base and liquid nitrogen together. + Food coloring is sometimes added, in addition to stabilizers. + The mixture is cooled below the freezing point of water and stirred to incorporate air spaces + and to prevent detectable ice crystals from forming. The result is a smooth, + semi-solid foam that is solid at very low temperatures (below 2 °C or 35 °F). + It becomes more malleable as its temperature increases. + The meaning of the name ice cream varies from one country to another. + In some countries, such as the United States, ice cream applies only to a specific variety, + and most governments regulate the commercial use of the various terms according to the + relative quantities of the main ingredients, notably the amount of cream. + Products that do not meet the criteria to be called ice cream are sometimes labelled + frozen dairy dessert instead. In other countries, such as Italy and Argentina, + one word is used for all variants. Analogues made from dairy alternatives, + such as goat's or sheep's milk, or milk substitutes + (e.g., soy, cashew, coconut, almond milk or tofu), are available for those who are + lactose intolerant, allergic to dairy protein or vegan. +""" + +@bot() +def on_message(message_history: List[Message], state: dict = None): + + # Generate `command` (cohere's model) response + bot_response = Cohere.summarize( + text_input=SYSTEM_PROMPT, + message_history=message_history, # Assuming history is the list of user messages + model="command", + ) + + response = { + "data": { + "messages": [ + { + "data_type": "STRING", + "value": bot_response + } + ], + "state": state + }, + "errors": [ + { + "message": "" + } + ] + } + + return { + "status_code": 200, + "response": response + } \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 25a024e7..870fba69 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,7 @@ tabulate = "^0.9.0" functions-framework = "^3.4.0" yaspin = "^3.0.0" pydantic = "^2.3.0" +cohere = "^4.21" google-generativeai = "^0.1.0" rich = "^13.5.2" diff --git a/textbase/models.py b/textbase/models.py index 33ff1692..c1d049b0 100644 --- a/textbase/models.py +++ b/textbase/models.py @@ -5,18 +5,27 @@ import time import typing import traceback +import cohere from textbase import Message # Return list of values of content. -def get_contents(message: Message, data_type: str): +def get_contents(message: Message, data_type: str, client: str = None): return [ + { + "user_name": message["role"], + "message": content["value"] + } + if content["data_type"] == data_type and client == "cohere" + else { "role": message["role"], "content": content["value"] } - for content in message["content"] if content["data_type"] == data_type + else + f"Content data type is not {data_type}" + for content in message["content"] ] # Returns content if it's non empty. @@ -145,7 +154,195 @@ def generate( message = data['message'] return message + + +class Cohere: + api_key = None + + @classmethod + def generate( + cls, + system_prompt: str, + message_history: list[Message], + model: str = "command", + max_tokens=3500, + temperature=0.5, + ): + """ + Generate text based on a system prompt and optional message history using the Cohere API. + + Args: + cls (object): The class object containing the Cohere API key. + system_prompt (str): The system prompt or message to generate text from. + message_history (list[Message]): A list of messages to consider in the generation process. + model (str, optional): The model to use for text generation. Defaults to "command". + max_tokens (int, optional): The maximum number of tokens in the generated text. Defaults to 3500. + temperature (float, optional): The temperature parameter for generating the text. + Higher values (e.g., 1.0) make the output more random, while lower values (e.g., 0.1) make it more deterministic. Defaults to 0.5. + + Returns: + str: The generated text produced by the Cohere API based on the input prompt and history. + + Raises: + AssertionError: Raised when the Cohere API key is not set. + + Example: + To generate text based on a system prompt and message history using the Cohere API: + ``` + api_key = "your_api_key_here" + system_prompt = "Generate a creative story about space exploration." + message_history = [] # You can add previous messages to the history + result = generate(api_key, system_prompt, message_history) + print(result) + ``` + """ + assert cls.api_key is not None, "Cohere API key is not set." + filtered_messages = [] + + for message in message_history: + #list of all the contents inside a single message + contents = get_contents(message, "STRING") + if contents: + filtered_messages.extend(contents) + cohere_client = cohere.Client(cls.api_key) + response = cohere_client.generate( + prompt=system_prompt, + model=model, + temperature=temperature, + max_tokens=max_tokens, + ) + return response[0] + + @classmethod + def chat( + cls, + system_prompt: str, + user_name:str, + message_history: list[Message], + model: str = "command", + max_tokens=1500, + temperature=0.5, + ): + + """ + Engage in a chat conversation using the Cohere API. + Args: + cls (object): The class object containing the Cohere API key. + system_prompt (str): The initial system prompt or message to start the conversation. + user_name (str): The user's name or identifier in the conversation. + message_history (list[Message]): A list of messages representing the chat history. + model (str, optional): The model to use for the chat conversation. Defaults to "command". + max_tokens (int, optional): The maximum number of tokens in the generated response. Defaults to 1500. + temperature (float, optional): The temperature parameter for generating responses. + Higher values (e.g., 1.0) make the output more random, while lower values (e.g., 0.1) make it more deterministic. Defaults to 0.5. + + Returns: + str: The text of the response generated by the Cohere API as part of the chat conversation. + + Raises: + AssertionError: Raised when the Cohere API key is not set. + + Example: + To have a chat conversation using the Cohere API: + ``` + api_key = "your_api_key_here" + system_prompt = "System: Welcome to the chatbot. How can I assist you today?" + user_name = "User123" + message_history = [] # You can add previous messages to the history + result = chat(api_key, system_prompt, user_name, message_history) + print(result) + ``` + """ + assert cls.api_key is not None, "Cohere API key is not set." + filtered_messages = [] + + for message in message_history: + #list of all the contents inside a single message + contents = get_contents(message, "STRING","cohere") + if contents: + filtered_messages.extend(contents) + + # initializing cohere client + cohere_client = cohere.Client(cls.api_key) + + response = cohere_client.chat( + message=system_prompt, + user_name=user_name, + model=model, + chat_history=[ + { + "user_name": "system", + "message": system_prompt + }, + *map(dict, filtered_messages), + ], + temperature=temperature, + max_tokens=max_tokens, + ) + + return response.text + + @classmethod + def summarize( + cls, + text_input: str, + message_history: list[Message], + model: str = "command", + temperature=0.3, + length:str = "medium", + format:str = "paragraph", + ): + """ + Summarize a text_input using the Cohere API based on a text input. + + Args: + cls (object): The class object containing the Cohere API key. + text_input (str): The input text to be summarized. + message_history (list[Message]): A list of messages to consider in the summarization process. + model (str, optional): The model to use for summarization. Defaults to "command". + temperature (float, optional): The temperature parameter for generating the summary. + Higher values (e.g., 1.0) make the output more random, while lower values (e.g., 0.1) make it more deterministic. Defaults to 0.3. + length (str, optional): The desired length of the summary. + Options include "short," "medium," and "long." Defaults to "medium". + format (str, optional): The desired format of the summary, e.g., "paragraph" or other formats supported by Cohere. Defaults to "paragraph". + + Returns: + str: The summarized text generated by the Cohere API. + + Raises: + AssertionError: Raised when the Cohere API key is not set. + + Example: + To summarize a text with a text input using the default parameters: + ``` + api_key = "your_api_key_here" + result = summarize(api_key, "This is the input text.", message_history) + print(result) + ``` + """ + assert cls.api_key is not None, "Cohere API key is not set." + filtered_messages = [] + + for message in message_history: + #list of all the contents inside a single message + contents = get_contents(message, "STRING") + if contents: + filtered_messages.extend(contents) + + # initializing cohere client + cohere_client = cohere.Client(cls.api_key) + + response =cohere_client.summarize( + text=text_input, + length=length, + format=format, + model=model, + temperature=temperature, + ) + + return response.summary + class PalmAI: api_key = None @@ -170,4 +367,4 @@ def generate( response = palm.chat(messages=filtered_messages) print(response) - return response.last \ No newline at end of file + return response.last