Skip to content

Commit

Permalink
add list
Browse files Browse the repository at this point in the history
  • Loading branch information
monofuel committed Jan 13, 2024
1 parent 6288a8b commit 4184dee
Show file tree
Hide file tree
Showing 2 changed files with 54 additions and 8 deletions.
38 changes: 35 additions & 3 deletions src/llama_leap.nim
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,20 @@ type
prompt_eval_duration*: int
eval_count*: int
eval_duration*: int
ModelDetails* = ref object
format: string
family: string
families: Option[seq[string]]
parameter_size: string
quantization_level: string
OllamaModel* = ref object
name*: string
modified_at*: string
size: int
digest: string
details*: ModelDetails
ListResp* = ref object
models*: seq[OllamaModel]

proc renameHook*(v: var GenerateReq, fieldName: var string) =
## `template` is a special keyword in nim, so we need to rename it during serialization
Expand Down Expand Up @@ -98,13 +112,24 @@ proc close*(api: OllamaAPI) =
api.curlPool.close()


proc loadModel*(api: OllamaAPI, model: string): JsonNode {.discardable.} =
## Calling /api/generate without a prompt will load the model
let url = api.baseUrl / "generate"
var headers: curly.HttpHeaders
headers["Content-Type"] = "application/json"
let req = %*{"model": model}

let resp = api.curlPool.post(url, headers, toJson(req), api.curlTimeout)
if resp.code != 200:
raise newException(CatchableError, &"ollama failed to load model: {resp.code} {resp.body}")
result = fromJson(resp.body)

proc generate*(api: OllamaAPI, req: GenerateReq): GenerateResp =
## typed interface for /api/generate
let url = api.baseUrl / "/generate"
let url = api.baseUrl / "generate"
var headers: curly.HttpHeaders
headers["Content-Type"] = "application/json"
req.stream = option(false)
echo toJson(req)
let resp = api.curlPool.post(url, headers, toJson(req), api.curlTimeout)
if resp.code != 200:
raise newException(CatchableError, &"ollama generate failed: {resp.code} {resp.body}")
Expand All @@ -119,7 +144,7 @@ proc generate*(api: OllamaAPI, model: string, prompt: string): string =
proc generate*(api: OllamaAPI, req: JsonNode): JsonNode =
## direct json interface for /api/generate
## only use if there are specific new features you need or know what you are doing
let url = api.baseUrl / "/generate"
let url = api.baseUrl / "generate"
var headers: curly.HttpHeaders
headers["Content-Type"] = "application/json"
req["stream"] = newJBool(false)
Expand All @@ -128,3 +153,10 @@ proc generate*(api: OllamaAPI, req: JsonNode): JsonNode =
if resp.code != 200:
raise newException(CatchableError, &"ollama generate failed: {resp.code} {resp.body}")
result = fromJson(resp.body)

proc listModels*(api: OllamaAPI): ListResp =
let url = api.baseUrl / "tags"
let resp = api.curlPool.get(url, timeout = api.curlTimeout)
if resp.code != 200:
raise newException(CatchableError, &"ollama list tags failed: {resp.code} {resp.body}")
result = fromJson(resp.body, ListResp)
24 changes: 19 additions & 5 deletions tests/test_llama_leap.nim
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@

import llama_leap, std/[unittest, json, options, strutils]

const TestModel = "llama2"

suite "llama_leap":
var ollama: OllamaAPI

Expand All @@ -11,13 +13,25 @@ suite "llama_leap":
teardown:
ollama.close()

suite "list":
test "list model tags":
let resp = ollama.listModels()
var resultStr = ""
for model in resp.models:
resultStr.add(model.name & " ")
echo "> " & resultStr.strip()

suite "generate":

test "load llama2":
ollama.loadModel(TestModel)

test "simple /api/generate":
echo "> " & ollama.generate("llama2", "How are you today?")
echo "> " & ollama.generate(TestModel, "How are you today?")

test "typed /api/generate":
let req = GenerateReq(
model: "llama2",
model: TestModel,
prompt: "How are you today?",
options: option(ModelParameters(
temperature: option(0.0f),
Expand All @@ -30,7 +44,7 @@ suite "llama_leap":

test "json /api/generate":
let req = %*{
"model": "llama2",
"model": TestModel,
"prompt": "How are you today?",
"system": "Please talk like a ninja. You are Sneaky the llama.",
"options": {
Expand All @@ -42,7 +56,7 @@ suite "llama_leap":

test "context":
let req = GenerateReq(
model: "llama2",
model: TestModel,
prompt: "How are you today?",
system: option("Please talk like a pirate. You are Longbeard the llama."),
options: option(ModelParameters(
Expand All @@ -54,7 +68,7 @@ suite "llama_leap":
echo "1> " & resp.response.strip()

let req2 = GenerateReq(
model: "llama2",
model: TestModel,
prompt: "How are you today?",
context: option(resp.context),
options: option(ModelParameters(
Expand Down

0 comments on commit 4184dee

Please sign in to comment.