Skip to content

Commit

Permalink
tests
Browse files Browse the repository at this point in the history
  • Loading branch information
monofuel committed Jan 13, 2024
1 parent f19e803 commit 4ad3efc
Show file tree
Hide file tree
Showing 3 changed files with 64 additions and 23 deletions.
16 changes: 5 additions & 11 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,23 +21,17 @@ echo ollama.generate("llama2", "How are you today?")
# simple interface
echo ollama.generate("llama2", "How are you today?")
# nim typed interface
# structured interface
let req = GenerateReq(
model: "llama2",
prompt: "How are you today?",
system: option("Please talk like a pirate. You are longbeard the llama.")
options: option(ModelParameters(
temperature: option(0.0f),
)),
system: option("Please talk like a pirate. You are Longbeard the llama.")
)
let resp = ollama.generate(req)
echo "> " & resp.response
# direct json interface
let req = %*{
"model": "llama2",
"prompt": "How are you today?",
"system": "Please talk like a pirate. You are longbeard the llama."
}
let resp = ollama.generate(req)
echo "> " & resp["response"].getStr
```

# Testing
Expand Down
30 changes: 22 additions & 8 deletions src/llama_leap.nim
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import curly, jsony, std/[strutils, json, options, strformat, os]

## ollama API Interface
## https://github.com/jmorganca/ollama/blob/main/docs/api.md
## https://github.com/jmorganca/ollama/blob/main/api/types.go

## model parameters: https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values

Expand All @@ -10,17 +11,30 @@ type
curlPool: CurlPool
baseUrl: string
curlTimeout: float32
ModelParameters* = ref object
mirostat*: Option[int]
mirostat_eta*: Option[float32]
mirostat_tau*: Option[float32]
num_ctx*: Option[int]
num_gqa*: Option[int]
num_gpu*: Option[int]
num_thread*: Option[int]
repeat_last_n*: Option[int]
repeat_penalty*: Option[float32]
temperature*: Option[float32]
seed*: Option[int]
# TODO rest
GenerateReq* = ref object
model*: string
prompt*: string
images*: Option[seq[string]] # list of base64 encoded images
format*: Option[string] # optional format=json for a structured response
options*: Option[JsonNode] # bag of model parameters
system*: Option[string] # override modelfile system prompt
template_str*: Option[string] # override modelfile template
context*: Option[seq[int]] # conversation encoding from a previous response
stream: Option[bool] # stream=false to get a single response
raw*: Option[bool] # use raw=true if you are specifying a fully templated prompt
images*: Option[seq[string]] # list of base64 encoded images
format*: Option[string] # optional format=json for a structured response
options*: Option[ModelParameters] # bag of model parameters
system*: Option[string] # override modelfile system prompt
template_str*: Option[string] # override modelfile template
context*: Option[seq[int]] # conversation encoding from a previous response
stream: Option[bool] # stream=false to get a single response
raw*: Option[bool] # use raw=true if you are specifying a fully templated prompt
GenerateResp* = ref object
model*: string
created_at*: string
Expand Down
41 changes: 37 additions & 4 deletions tests/test_llama_leap.nim
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
## llama_leap API tests
## Ensure that ollama is running!

import llama_leap, std/[unittest, json, options]
import llama_leap, std/[unittest, json, options, strutils]

suite "llama_leap":
var ollama: OllamaAPI
Expand All @@ -19,15 +19,48 @@ suite "llama_leap":
let req = GenerateReq(
model: "llama2",
prompt: "How are you today?",
options: option(ModelParameters(
temperature: option(0.0f),
seed: option(42)
)),
system: option("Please talk like a pirate. You are Longbeard the llama.")
)
let resp = ollama.generate(req)
echo "> " & resp.response
echo "> " & resp.response.strip()

test "json /api/generate":
let req = %*{
"model": "llama2",
"prompt": "How are you today?",
"system": "Please talk like a ninja. You are Sneaky the llama."
"system": "Please talk like a ninja. You are Sneaky the llama.",
"options": {
"temperature": 0.0
}
}
let resp = ollama.generate(req)
echo "> " & resp["response"].getStr
echo "> " & resp["response"].getStr.strip()

test "context":
let req = GenerateReq(
model: "llama2",
prompt: "How are you today?",
system: option("Please talk like a pirate. You are Longbeard the llama."),
options: option(ModelParameters(
temperature: option(0.0f),
seed: option(42)
)),
)
let resp = ollama.generate(req)
echo "1> " & resp.response.strip()

let req2 = GenerateReq(
model: "llama2",
prompt: "How are you today?",
context: option(resp.context),
options: option(ModelParameters(
temperature: option(0.0f),
seed: option(42)
)),
)
let resp2 = ollama.generate(req2)
echo "2> " & resp2.response.strip()

0 comments on commit 4ad3efc

Please sign in to comment.