forked from awaescher/OllamaSharp
-
Notifications
You must be signed in to change notification settings - Fork 0
/
CompletionConsole.cs
59 lines (47 loc) · 1.59 KB
/
CompletionConsole.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
using OllamaSharp;
using OllamaSharp.Models;
using OllamaSharp.Streamer;
using Spectre.Console;
using System.IO;
public class CompletionConsole : OllamaConsole
{
public CompletionConsole(IOllamaApiClient ollama)
: base(ollama)
{
}
public override async Task Run()
{
AnsiConsole.Write(new Rule("Prmopt demo").LeftJustified());
AnsiConsole.WriteLine();
Ollama.SelectedModel = await SelectModel("Select a model you want to invoke:");
if (!string.IsNullOrEmpty(Ollama.SelectedModel))
{
AnsiConsole.MarkupLine($"You are talking to [blue]{Ollama.SelectedModel}[/] now.");
AnsiConsole.MarkupLine("[gray]Type \"[red]exit[/]\" to leave.[/]");
string message;
do
{
AnsiConsole.WriteLine();
message = ReadInput();
if (message.Equals("exit", StringComparison.OrdinalIgnoreCase))
break;
if (message.Equals("/show", StringComparison.OrdinalIgnoreCase))
{
var response = await Ollama.ShowModelInformation(Ollama.SelectedModel, CancellationToken.None);
AnsiConsole.MarkupInterpolated($"[gray]Parameters:\n{response.Parameters}[/]");
}
else
{
var response = await Ollama.GetCompletion(message, null);
AnsiConsole.MarkupInterpolated($"[cyan]{response.Response ?? ""}[/]");
if (response.Metadata != null)
{
var tokensPerSecond = response.Metadata.EvalCount / (response.Metadata.EvalDuration / 1e9);
AnsiConsole.MarkupInterpolated($"[gray]TPS: {tokensPerSecond}[/]");
}
AnsiConsole.WriteLine();
}
} while (!string.IsNullOrEmpty(message));
}
}
}