From 476c5cdcfe2712950221c74b1dae7f50b83430f8 Mon Sep 17 00:00:00 2001 From: monofuel Date: Sat, 27 Jul 2024 13:50:50 -0400 Subject: [PATCH] switch tests to llama 3.1 --- tests/test_llama_leap.nim | 13 ++++++++----- tests/test_ollama_tools.nim | 27 +++++++++++++++++++++++++++ 2 files changed, 35 insertions(+), 5 deletions(-) create mode 100644 tests/test_ollama_tools.nim diff --git a/tests/test_llama_leap.nim b/tests/test_llama_leap.nim index eb8db04..bc9a765 100644 --- a/tests/test_llama_leap.nim +++ b/tests/test_llama_leap.nim @@ -1,11 +1,13 @@ ## llama_leap API tests ## Ensure that ollama is running! -import llama_leap, jsony, std/[unittest, json, options, strutils] +import + std/[unittest, json, options, strutils], + llama_leap, jsony const - TestModel = "llama2" - TestModelfileName = "test-pirate-llama2" + TestModel = "llama3.1:8b" + TestModelfileName = "test-pirate-llama3.1" suite "llama_leap": var ollama: OllamaAPI @@ -33,7 +35,7 @@ suite "llama_leap": suite "generate": - test "load llama2": + test "load llama3.1": ollama.loadModel(TestModel) test "simple /api/generate": @@ -118,10 +120,11 @@ suite "llama_leap": ) let resp = ollama.chat(req) echo "> " & resp.message.content.strip() + suite "create": test "create specifying modelfile": let modelfile = """ -FROM llama2 +FROM llama3.1:8b PARAMETER temperature 0 PARAMETER num_ctx 4096 diff --git a/tests/test_ollama_tools.nim b/tests/test_ollama_tools.nim new file mode 100644 index 0000000..cf8db5f --- /dev/null +++ b/tests/test_ollama_tools.nim @@ -0,0 +1,27 @@ +## ollama tools test +## Ensure that ollama is running! + +import + std/[unittest], + llama_leap + +# Must use a tools compatible model! +const + TestModel = "llama3.1:8b" + +suite "ollama tools": + var ollama: OllamaAPI + + setup: + ollama = newOllamaAPI() + teardown: + ollama.close() + + suite "version": + test "version": + echo "> " & ollama.getVersion() + suite "pull": + test "pull model": + ollama.pullModel(TestModel) + + # TODO do the thing \ No newline at end of file