Skip to content

Commit

Permalink
Merge pull request #133 from mraniki/dev
Browse files Browse the repository at this point in the history
✅ Unit Test
  • Loading branch information
mraniki authored Oct 2, 2023
2 parents 9854c1c + fbe5713 commit 3ac2f31
Show file tree
Hide file tree
Showing 6 changed files with 11 additions and 10 deletions.
2 changes: 1 addition & 1 deletion .requirements/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ fastapi==0.103.2 ; python_version >= "3.10" and python_version < "4.0"
flask-cors==4.0.0 ; python_version >= "3.10" and python_version < "4.0"
flask==3.0.0 ; python_version >= "3.10" and python_version < "4.0"
frozenlist==1.4.0 ; python_version >= "3.10" and python_version < "4.0"
g4f==0.1.4.2 ; python_version >= "3.10" and python_version < "4.0"
g4f==0.1.4.4 ; python_version >= "3.10" and python_version < "4.0"
h11==0.14.0 ; python_version >= "3.10" and python_version < "4.0"
httpcore==0.18.0 ; python_version >= "3.10" and python_version < "4.0"
httpx==0.25.0 ; python_version >= "3.10" and python_version < "4.0"
Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,10 @@ Interact with LLM in simple way.<br>
<pre>
<code>
talky = MyLLM()
logger.info(await talky.talk(
logger.info(await talky.chat(
prompt="tell me who is president of the united states?"))
# The current President of the United States is Joe Biden.
logger.info(await talky.talk(prompt="let's start a conversation"))
logger.info(await talky.chat(prompt="let's start a conversation"))
# keep the chat history
</code>
</pre>
Expand Down
3 changes: 2 additions & 1 deletion myllm/default_settings.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@ VALUE = "On default"
myllm_enabled = true

# LLM Model to use
llm_model = "gpt-3.5-turbo"
# llm_model = "gpt-3.5-turbo"
llm_model= "gpt_4"

# LLM Provider
# Refer to https://github.com/xtekky/gpt4free
Expand Down
8 changes: 3 additions & 5 deletions myllm/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def __init__(self):
provider_module = importlib.import_module(provider_module_name)
provider_class = getattr(provider_module, provider_module_name.split(".")[-1])
self.provider = provider_class()
self.llm_model = settings.llm_model
self.model = settings.llm_model
self.lag = settings.lag
self.conversation = Conversation()

Expand All @@ -60,9 +60,7 @@ async def get_myllm_info(self):
Returns:
str: A string containing the MyLLM version, model, and provider.
"""
return (
f"ℹ️ MyLLM v{__version__}\n {self.llm_model}\n{self.provider}"
)
return f"ℹ️ MyLLM v{__version__}\n {self.model}\n{self.provider}"

async def chat(self, prompt):
"""
Expand All @@ -77,7 +75,7 @@ async def chat(self, prompt):
try:
self.conversation.add_message("user", prompt)
response = await self.provider.create_async(
model=self.llm_model,
model=self.model,
messages=self.conversation.get_messages(),
)
sleep(self.lag)
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ dynaconf = ">=3.2.0"
loguru = ">=0.6.0"
httpx = ">=0.24.1"
js2py = "^0.74"
g4f = "0.1.4.2"
g4f = "0.1.4.4"
curl-cffi ="0.5.7"
PyExecJS2="1.6.1"

Expand Down
2 changes: 2 additions & 0 deletions tests/test_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,4 +57,6 @@ async def test_switch_continous_mode(talky):
@pytest.mark.asyncio
async def test_chat(talky):
result = await talky.chat("tell me a story")
print(talky.provider)
print(talky.model)
assert result is not None

0 comments on commit 3ac2f31

Please sign in to comment.