Skip to content

Commit

Permalink
Merge pull request #116 from mraniki/dev
Browse files Browse the repository at this point in the history
Add prompt conversion to string
  • Loading branch information
mraniki authored Sep 26, 2023
2 parents 0419aa3 + 477f545 commit ca3a4d1
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 18 deletions.
2 changes: 1 addition & 1 deletion myllm/default_settings.toml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ llm_commands = """
➰ /aimode\n
🧽 /clearai\n
"""

llm_prefix = "🐻"
llm_ai_mode = false

# template prompt context
Expand Down
32 changes: 15 additions & 17 deletions myllm/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,30 +76,28 @@ async def get_myllm_info(self):
f"ℹ️ MyLLM v{__version__}\n {settings.llm_model}\n{settings.llm_provider}"
)


async def chat(self, prompt):
"""
Asynchronously chats with the user.
Args:
prompt (str): The prompt message from the user.
Returns:
str: The response from the conversation model.
str: The response from the conversation model.
"""
self.conversation.add_message("user", prompt)
logger.debug("conversation {}", self.conversation.get_messages())
response = await self.provider.create_async(
model=settings.llm_model,
messages=self.conversation.get_messages(),
)

self.conversation.add_message("ai", response)
sleep(settings.lag)
if response:
logger.debug("response received {}", response)
return response
else:
logger.debug("No response from the model")
try:
self.conversation.add_message("user", prompt)
response = await self.provider.create_async(
model=settings.llm_model,
messages=self.conversation.get_messages(),
)
sleep(settings.lag)
self.conversation.add_message("ai", response)
return f"{settings.llm_prefix} {response}"
except Exception as error:
logger.error("No response from the model {}", error)
return "No response from the model"

async def clear_chat_history(self):
Expand Down

0 comments on commit ca3a4d1

Please sign in to comment.