Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merge JD's docs #147

Merged
merged 20 commits into from
Dec 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
21073a3
docs: Updated XML comments to contain types for most methods. Updated…
JerrettDavis Nov 13, 2024
ce5c604
refactor: made several classes internal and added documentation.
JerrettDavis Nov 13, 2024
5fa1722
docs: pushing static files for documentation.
JerrettDavis Nov 13, 2024
eff93a2
ci: moved docs to its own workflow
JerrettDavis Nov 13, 2024
737a12a
Revert "docs: pushing static files for documentation."
JerrettDavis Nov 13, 2024
7f1d966
docs: adding in some base doc files to populate.
JerrettDavis Nov 14, 2024
849a507
tests: Added functional tests project
JerrettDavis Nov 14, 2024
ed7dd4f
ci: Working on fixing tests due to swap to internal classes.
JerrettDavis Nov 14, 2024
a5ef8e3
ci: corrected friend public key.
JerrettDavis Nov 14, 2024
ced91b5
ci: excluding functional tests from github ci.
JerrettDavis Nov 14, 2024
252180d
docs: update documentation stubs and styling
JerrettDavis Nov 16, 2024
d84fc58
docs: fixed pathing for documentation generation
JerrettDavis Nov 16, 2024
51a7945
docs: changing toc path to direct html link
JerrettDavis Nov 16, 2024
7c32501
fix: updated documentation typos and set PullModel test to ignored in…
JerrettDavis Nov 21, 2024
c99224b
Use the default Ollama icon
awaescher Nov 25, 2024
a884131
Remove the need for an icon
awaescher Nov 25, 2024
c1fb75c
Update docs
awaescher Nov 25, 2024
f9db5a3
Format whitespace
awaescher Nov 25, 2024
7b7915e
Move functional tests into the tests project
awaescher Dec 9, 2024
c0b08b5
Format tests
awaescher Dec 9, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ on:
branches: [ "main", "master" ]
paths: [ 'src/**' ]

jobs:

jobs:
build:

runs-on: ubuntu-latest
Expand Down Expand Up @@ -45,7 +45,7 @@ jobs:
run: dotnet build --no-restore --configuration=Release /p:Version=${{steps.gitversion.outputs.semVer}}

- name: Test
run: dotnet test --no-build --configuration=Release --verbosity normal
run: dotnet test --no-build --configuration=Release --verbosity normal --filter 'FullyQualifiedName!~FunctionalTests'

- name: pack nuget packages
run: dotnet pack --output nupkgs --configuration=Release --no-restore --no-build /p:PackageVersion=${{steps.gitversion.outputs.semVer}}
Expand All @@ -59,4 +59,4 @@ jobs:

- name: upload nuget package
if: github.event_name != 'pull_request'
run: dotnet nuget push nupkgs/OllamaSharp*.nupkg -k ${{ secrets.NUGET_API_KEY }} -s https://api.nuget.org/v3/index.json
run: dotnet nuget push nupkgs/OllamaSharp*.nupkg -k ${{ secrets.NUGET_API_KEY }} -s https://api.nuget.org/v3/index.json
40 changes: 40 additions & 0 deletions .github/workflows/docs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
name: docfx Build and Deploy

on:
push:
branches: [ "main" ]
workflow_dispatch:

permissions:
actions: read
pages: write
id-token: write

concurrency:
group: "pages"
cancel-in-progress: false

jobs:
publish-docs:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Dotnet Setup
uses: actions/setup-dotnet@v3
with:
dotnet-version: 8.x

- run: dotnet tool update -g docfx
- run: docfx ./docfx.json

- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: './_site'
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4
6 changes: 5 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -396,4 +396,8 @@ FodyWeavers.xsd

# JetBrains Rider
*.sln.iml
/.idea
/.idea

# DocFX
_site/
api/
2 changes: 1 addition & 1 deletion demo/Demos/ToolConsole.cs
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ public override async Task Run()
}
}

private static IEnumerable<Tool> GetTools() => [new WeatherTool(), new NewsTool()];
private static Tool[] GetTools() => [new WeatherTool(), new NewsTool()];

private sealed class WeatherTool : Tool
{
Expand Down
9 changes: 9 additions & 0 deletions doc-template/public/main.css
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
.flex-row {
display: flex;
flex-direction: row;
}

.flex-column {
display: flex;
flex-direction: column;
}
10 changes: 10 additions & 0 deletions doc-template/public/main.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
export default {
defaultTheme: 'dark',
iconLinks: [
{
icon: 'github',
href: 'https://github.com/awaescher/OllamaSharp',
title: 'GitHub'
}
]
}
52 changes: 52 additions & 0 deletions docfx.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
{
"metadata": [
{
"src": [
{
"src": "./src",
"files": [
"**/*.csproj"
]
}
],
"dest": "api"
}
],
"build": {
"content": [
{
"files": [
"**/*.{md,yml}"
],
"exclude": [
"_site/**"
]
}
],
"markdownEngineProperties": {
"markdigExtensions": [
"CustomContainers"
]
},
"resource": [
{
"files": [
"images/**"
]
}
],
"output": "_site",
"template": [
"default",
"modern",
"doc-template"
],
"globalMetadata": {
"_appName": "OllamaSharp",
"_appTitle": "OllamaSharp",
"_appLogoPath": "images/0.png",
"_enableSearch": true,
"pdf": true
}
}
}
64 changes: 64 additions & 0 deletions docs/getting-started.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
# Getting Started

[OllamaSharp](https://github.com/awaescher/OllamaSharp) provides .NET bindings for the Ollama API, simplifying interactions with Ollama both locally and remotely. It provides asynchronous streaming, progress reporting and convenience classes and functions to simplify common use cases.

Getting started with OllamaSharp only requires a running Ollama server and a supported version of [.NET](https://dotnet.microsoft.com/en-us/download).

## Prerequisites

- [Ollama](https://ollama.com/)
- [.NET](https://dotnet.microsoft.com/en-us/download)

## Pulling a model

To use Ollama, you will need to specify a large language model to talk with. You can download a model from the [Ollama model hub](https://ollama.com/models). Below is a code snippet illustrating how to connect to an Ollama server and pull a model from there:

```csharp
using OllamaSharp;

// if you are running Ollama locally on the default port:
var uri = new Uri("http://localhost:11434");
var ollama = new OllamaApiClient(uri);

// pull the model, and print the status of the pull operation.
await foreach (var status in ollama.PullModelAsync("llama3.2-vision"))
Console.WriteLine($"{status.Percent}% {status.Status}");

Console.WriteLine("Model pulled successfully.");
```

This should result in an output like this:

```
100% pulling manifest
100% pulling 11f274007f09
100% pulling ece5e659647a
100% pulling 715415638c9c
100% pulling 0b4284c1f870
100% pulling fefc914e46e6
100% pulling fbd313562bb7
100% verifying sha256 digest
100% writing manifest
100% success
Model pulled successfully.
```

## Taking to a model

After obtaining a model, you can begin interacting with Ollama. The following code snippet demonstrates how to connect to an Ollama server, load a model, and initiate a conversation:

```csharp
using OllamaSharp;

var uri = new Uri("http://localhost:11434");
var model = "llama3.2-vision";

var ollama = new OllamaApiClient(uri, model);

var request = "Write a deep, beautiful song for me about AI and the future.";

await foreach (var stream in ollama.GenerateAsync(request))
Console.Write(stream.Response);
```

The model's answer should be streamed directly into your Console window.
5 changes: 5 additions & 0 deletions docs/introduction.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# Introduction

[Ollama](https://ollama.com/) is a [Go](https://go.dev/)-based, open-source server for interacting with local large language models using Georgi Gerganov's [llama.cpp](https://github.com/ggerganov/llama.cpp) library. Ollama provides first-class support for various models, including [llama3.2](https://ollama.com/library/llama3.2), [phi3.5](https://ollama.com/library/phi3.5), [mistral](https://ollama.com/library/mistral), and many more. It provides support for pulling, running, creating, pushing, and interacting with models.

[OllamaSharp](https://github.com/awaescher/OllamaSharp) provides .NET bindings for the Ollama API, simplifying interactions with Ollama both locally and remotely. It provides asynchronous streaming, progress reporting and convenience classes and functions to simplify common use cases.
4 changes: 4 additions & 0 deletions docs/toc.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
- name: Introduction
href: introduction.md
- name: Getting Started
href: getting-started.md
Binary file added images/0.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added images/[email protected]
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added images/logo.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added images/[email protected]
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
53 changes: 53 additions & 0 deletions index.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
---
_layout: landing
---

::::flex-row

:::col

![Ollama Logo](images/[email protected]) ➕ ![.NET Logo](images/[email protected])

# Build AI-powered applications with Ollama and .NET 🦙

OllamaSharp provides .NET bindings for the [Ollama API](https://github.com/jmorganca/ollama/blob/main/docs/api.md), simplifying interactions with Ollama both locally and remotely.

Provides support for interacting with Ollama directly, or through the [Microsoft.Extensions.AI](https://devblogs.microsoft.com/dotnet/introducing-microsoft-extensions-ai-preview/)
and [Microsoft Semantic Kernel](https://github.com/microsoft/semantic-kernel/pull/7362) libraries.
:::
:::col

### Add OllamaSharp to your project
```bash
dotnet add package OllamaSharp
```

### Start talking to Ollama
```csharp
using OllamaSharp;

var uri = new Uri("http://localhost:11434");
var ollama = new OllamaApiClient(uri, "llama3.2");

// messages including their roles and tool calls will automatically
// be tracked within the chat object and are accessible via the Messages property
var chat = new Chat(ollama);

Console.WriteLine("You're now talking with Ollama. Hit Ctrl+C to exit.");

while (true)
{
Console.Write("You: ");
var input = Console.ReadLine();

Console.Write("Assistant: ");
await foreach (var stream in chat.SendAsync(message))
Console.Write(stream);

Console.WriteLine("");
}
```

:::

::::
11 changes: 7 additions & 4 deletions src/AsyncEnumerableExtensions/ChatResponseStreamAppender.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,16 @@
namespace OllamaSharp.AsyncEnumerableExtensions;

/// <summary>
/// Appender to stream IAsyncEnumerable(ChatResponseStream) to build up one single ChatDoneResponseStream object
/// Appender to stream <see cref="System.Collections.Generic.IAsyncEnumerable{ChatResponseStream}"/> to
/// build up one single <see cref="ChatDoneResponseStream"/> object
/// </summary>
public class ChatResponseStreamAppender : IAppender<ChatResponseStream?, ChatDoneResponseStream?>
internal class ChatResponseStreamAppender : IAppender<ChatResponseStream?, ChatDoneResponseStream?>
{
private readonly MessageBuilder _messageBuilder = new();
private ChatDoneResponseStream? _lastItem;

/// <summary>
/// Appends a given ChatResponseStream item to build a single return object
/// Appends a given <see cref="ChatResponseStream"/> item to build a single return object
/// </summary>
/// <param name="item">The item to append</param>
public void Append(ChatResponseStream? item)
Expand All @@ -24,8 +25,10 @@ public void Append(ChatResponseStream? item)
}

/// <summary>
/// Builds up one single ChatDoneResponseStream object from the previously streamed ChatResponseStream items
/// Builds up one single <see cref="ChatDoneResponseStream"/> object from the
/// previously streamed <see cref="ChatResponseStream"/> items
/// </summary>
/// <returns>The completed consolidated <see cref="ChatDoneResponseStream"/> object</returns>
public ChatDoneResponseStream? Complete()
{
if (_lastItem is null)
Expand Down
11 changes: 7 additions & 4 deletions src/AsyncEnumerableExtensions/GenerateResponseStreamAppender.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,16 @@
namespace OllamaSharp.AsyncEnumerableExtensions;

/// <summary>
/// Appender to stream IAsyncEnumerable(GenerateResponseStream) to build up one single GenerateDoneResponseStream object
/// Appender to stream <see cref="System.Collections.Generic.IAsyncEnumerable{GenerateDoneResponseStream}"/>
/// to build up one single <see cref="GenerateDoneResponseStream"/> object
/// </summary>
public class GenerateResponseStreamAppender : IAppender<GenerateResponseStream?, GenerateDoneResponseStream?>
internal class GenerateResponseStreamAppender : IAppender<GenerateResponseStream?, GenerateDoneResponseStream?>
{
private readonly StringBuilder _builder = new();
private GenerateDoneResponseStream? _lastItem;

/// <summary>
/// Appends a given GenerateResponseStream item to build a single return object
/// Appends a given <see cref="GenerateResponseStream"/> item to build a single return object
/// </summary>
/// <param name="item">The item to append</param>
public void Append(GenerateResponseStream? item)
Expand All @@ -25,8 +26,10 @@ public void Append(GenerateResponseStream? item)
}

/// <summary>
/// Builds up one single GenerateDoneResponseStream object from the previously streamed GenerateResponseStream items
/// Builds up one single <see cref="GenerateDoneResponseStream"/> object
/// from the previously streamed <see cref="GenerateResponseStream"/> items
/// </summary>
/// <returns>The completed, consolidated <see cref="GenerateDoneResponseStream"/> object</returns>
public GenerateDoneResponseStream? Complete()
{
if (_lastItem is null)
Expand Down
2 changes: 1 addition & 1 deletion src/AsyncEnumerableExtensions/IAppender.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ namespace OllamaSharp.AsyncEnumerableExtensions;
/// </summary>
/// <typeparam name="Tin">The type of the items of the IAsyncEnumerable</typeparam>
/// <typeparam name="Tout">The return type after the IAsyncEnumerable was streamed to the end</typeparam>
public interface IAppender<in Tin, out Tout>
internal interface IAppender<in Tin, out Tout>
{
/// <summary>
/// Appends an item to build up the return value
Expand Down
4 changes: 2 additions & 2 deletions src/AsyncEnumerableExtensions/IAsyncEnumerableExtensions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ public static partial class IAsyncEnumerableExtensions
/// </summary>
/// <param name="stream">The IAsyncEnumerable to stream</param>
/// <param name="itemCallback">An optional callback to additionally process every single item from the IAsyncEnumerable</param>
/// <returns>A single response stream appened from every IAsyncEnumerable item</returns>
/// <returns>A single response stream append from every IAsyncEnumerable item</returns>
public static Task<string> StreamToEndAsync(this IAsyncEnumerable<string> stream, Action<string>? itemCallback = null)
=> stream.StreamToEndAsync(new StringAppender(), itemCallback);

Expand Down Expand Up @@ -48,7 +48,7 @@ public static Task<string> StreamToEndAsync(this IAsyncEnumerable<string> stream
/// <param name="appender">The appender instance used to build up one single response value</param>
/// <param name="itemCallback">An optional callback to additionally process every single item from the IAsyncEnumerable</param>
/// <returns>A single ChatDoneResponseStream built up from every single IAsyncEnumerable item</returns>
public static async Task<Tout> StreamToEndAsync<Tin, Tout>(this IAsyncEnumerable<Tin> stream, IAppender<Tin, Tout> appender, Action<Tin>? itemCallback = null)
internal static async Task<Tout> StreamToEndAsync<Tin, Tout>(this IAsyncEnumerable<Tin> stream, IAppender<Tin, Tout> appender, Action<Tin>? itemCallback = null)
{
await foreach (var item in stream.ConfigureAwait(false))
{
Expand Down
Loading
Loading