Skip to content

Commit

Permalink
show model
Browse files Browse the repository at this point in the history
  • Loading branch information
monofuel committed Jan 13, 2024
1 parent 22804cd commit 5f354b8
Show file tree
Hide file tree
Showing 3 changed files with 51 additions and 11 deletions.
1 change: 0 additions & 1 deletion examples/example.nim

This file was deleted.

43 changes: 38 additions & 5 deletions src/llama_leap.nim
Original file line number Diff line number Diff line change
Expand Up @@ -92,31 +92,44 @@ type
details*: ModelDetails
ListResp* = ref object
models*: seq[OllamaModel]
ShowModel* = ref object
modelfile*: string
parameters*: string
template_str*: string
details*: ModelDetails
EmbeddingReq* = ref object
model*: string
prompt*: string
options*: Option[ModelParameters] # bag of model parameters
EmbeddingResp* = ref object
embedding*: seq[float64]

proc renameHook*(v: var ChatReq, fieldName: var string) =
proc renameHook(v: var ChatReq, fieldName: var string) =
## `template` is a special keyword in nim, so we need to rename it during serialization
if fieldName == "template":
fieldName = "template_str"
proc dumpHook(v: var ChatReq, fieldName: var string) =
if fieldName == "template_str":
fieldName = "template"

proc renameHook(v: var GenerateReq, fieldName: var string) =
## `template` is a special keyword in nim, so we need to rename it during serialization
if fieldName == "template":
fieldName = "template_str"
proc dumpHook*(v: var ChatReq, fieldName: var string) =
proc dumpHook(v: var GenerateReq, fieldName: var string) =
if fieldName == "template_str":
fieldName = "template"

proc renameHook*(v: var GenerateReq, fieldName: var string) =
proc renameHook(v: var ShowModel, fieldName: var string) =
## `template` is a special keyword in nim, so we need to rename it during serialization
if fieldName == "template":
fieldName = "template_str"
proc dumpHook*(v: var GenerateReq, fieldName: var string) =
proc dumpHook(v: var ShowModel, fieldName: var string) =
if fieldName == "template_str":
fieldName = "template"


proc dumpHook*(s: var string, v: object) =
proc dumpHook(s: var string, v: object) =
## jsony `hack` to skip optional fields that are nil
s.add '{'
var i = 0
Expand Down Expand Up @@ -172,6 +185,7 @@ proc generate*(api: OllamaAPI, req: GenerateReq): GenerateResp =
var headers: curly.HttpHeaders
headers["Content-Type"] = "application/json"
req.stream = option(false)
echo toJson(req)
let resp = api.curlPool.post(url, headers, toJson(req), api.curlTimeout)
if resp.code != 200:
raise newException(CatchableError, &"ollama generate failed: {resp.code} {resp.body}")
Expand Down Expand Up @@ -267,6 +281,19 @@ proc listModels*(api: OllamaAPI): ListResp =
raise newException(CatchableError, &"ollama list tags failed: {resp.code} {resp.body}")
result = fromJson(resp.body, ListResp)

proc showModel*(api: OllamaAPI, name: string): ShowModel =
## get details for a specific model
let url = api.baseUrl / "show"
let req = %*{"name": name}

var headers: curly.HttpHeaders
headers["Content-Type"] = "application/json"

let resp = api.curlPool.post(url, headers, toJson(req), api.curlTimeout)
if resp.code != 200:
raise newException(CatchableError, &"ollama show failed: {resp.code} {resp.body}")
result = fromJson(resp.body, ShowModel)

proc pullModel*(api: OllamaAPI, name: string) =
## Ask the ollama server to pull a model
let url = api.baseUrl / "pull"
Expand Down Expand Up @@ -317,3 +344,9 @@ proc generateEmbeddings*(
result = fromJson(resp.body, EmbeddingResp)
else:
result = fromJson(resp.body, EmbeddingResp)

# TODO: HEAD /api/blobs/:digest
# TODO: POST /api/blobs/:digest
# TODO: POST /api/copy
# TODO: DELETE /api/delete
# TODO: POST /api/push
18 changes: 13 additions & 5 deletions tests/test_llama_leap.nim
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
## llama_leap API tests
## Ensure that ollama is running!

import llama_leap, std/[unittest, json, options, strutils]
import llama_leap, jsony, std/[unittest, json, options, strutils]

const TestModel = "llama2"
const
TestModel = "llama2"
TestModelfileName = "test-pirate-llama2"

suite "llama_leap":
var ollama: OllamaAPI
Expand Down Expand Up @@ -113,7 +115,6 @@ suite "llama_leap":
let resp = ollama.chat(req)
echo "> " & resp.message.content.strip()
suite "create":
let testModelName = "test-pirate-llama2"
test "create specifying modelfile":
let modelfile = """
FROM llama2
Expand All @@ -122,9 +123,16 @@ PARAMETER num_ctx 4096
SYSTEM Please talk like a pirate. You are Longbeard the llama.
"""
ollama.createModel(testModelName, modelfile)
ollama.createModel(TestModelfileName, modelfile)
test "use our created modelfile":
echo "> " & ollama.generate(testModelName, "How are you today?")
echo "> " & ollama.generate(TestModelfileName, "How are you today?")

suite "show":
test "show model":
let resp = ollama.showModel(TestModelfileName)
echo "> " & toJson(resp)
# validate that renameHook() is working properly
assert resp.template_str != ""

suite "embeddings":
test "generate embeddings":
Expand Down

0 comments on commit 5f354b8

Please sign in to comment.