Skip to content

Commit

Permalink
Merge pull request #147 from awaescher/docs
Browse files Browse the repository at this point in the history
Merge JD's docs
  • Loading branch information
awaescher authored Dec 9, 2024
2 parents 23f0d32 + c0b08b5 commit c6c6f98
Show file tree
Hide file tree
Showing 48 changed files with 1,600 additions and 852 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ on:
branches: [ "main", "master" ]
paths: [ 'src/**' ]

jobs:

jobs:
build:

runs-on: ubuntu-latest
Expand Down Expand Up @@ -45,7 +45,7 @@ jobs:
run: dotnet build --no-restore --configuration=Release /p:Version=${{steps.gitversion.outputs.semVer}}

- name: Test
run: dotnet test --no-build --configuration=Release --verbosity normal
run: dotnet test --no-build --configuration=Release --verbosity normal --filter 'FullyQualifiedName!~FunctionalTests'

- name: pack nuget packages
run: dotnet pack --output nupkgs --configuration=Release --no-restore --no-build /p:PackageVersion=${{steps.gitversion.outputs.semVer}}
Expand All @@ -59,4 +59,4 @@ jobs:

- name: upload nuget package
if: github.event_name != 'pull_request'
run: dotnet nuget push nupkgs/OllamaSharp*.nupkg -k ${{ secrets.NUGET_API_KEY }} -s https://api.nuget.org/v3/index.json
run: dotnet nuget push nupkgs/OllamaSharp*.nupkg -k ${{ secrets.NUGET_API_KEY }} -s https://api.nuget.org/v3/index.json
40 changes: 40 additions & 0 deletions .github/workflows/docs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
name: docfx Build and Deploy

on:
push:
branches: [ "main" ]
workflow_dispatch:

permissions:
actions: read
pages: write
id-token: write

concurrency:
group: "pages"
cancel-in-progress: false

jobs:
publish-docs:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Dotnet Setup
uses: actions/setup-dotnet@v3
with:
dotnet-version: 8.x

- run: dotnet tool update -g docfx
- run: docfx ./docfx.json

- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: './_site'
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4
6 changes: 5 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -396,4 +396,8 @@ FodyWeavers.xsd

# JetBrains Rider
*.sln.iml
/.idea
/.idea

# DocFX
_site/
api/
2 changes: 1 addition & 1 deletion demo/Demos/ToolConsole.cs
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ public override async Task Run()
}
}

private static IEnumerable<Tool> GetTools() => [new WeatherTool(), new NewsTool()];
private static Tool[] GetTools() => [new WeatherTool(), new NewsTool()];

private sealed class WeatherTool : Tool
{
Expand Down
9 changes: 9 additions & 0 deletions doc-template/public/main.css
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
.flex-row {
display: flex;
flex-direction: row;
}

.flex-column {
display: flex;
flex-direction: column;
}
10 changes: 10 additions & 0 deletions doc-template/public/main.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
export default {
defaultTheme: 'dark',
iconLinks: [
{
icon: 'github',
href: 'https://github.com/awaescher/OllamaSharp',
title: 'GitHub'
}
]
}
52 changes: 52 additions & 0 deletions docfx.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
{
"metadata": [
{
"src": [
{
"src": "./src",
"files": [
"**/*.csproj"
]
}
],
"dest": "api"
}
],
"build": {
"content": [
{
"files": [
"**/*.{md,yml}"
],
"exclude": [
"_site/**"
]
}
],
"markdownEngineProperties": {
"markdigExtensions": [
"CustomContainers"
]
},
"resource": [
{
"files": [
"images/**"
]
}
],
"output": "_site",
"template": [
"default",
"modern",
"doc-template"
],
"globalMetadata": {
"_appName": "OllamaSharp",
"_appTitle": "OllamaSharp",
"_appLogoPath": "images/0.png",
"_enableSearch": true,
"pdf": true
}
}
}
64 changes: 64 additions & 0 deletions docs/getting-started.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
# Getting Started

[OllamaSharp](https://github.com/awaescher/OllamaSharp) provides .NET bindings for the Ollama API, simplifying interactions with Ollama both locally and remotely. It provides asynchronous streaming, progress reporting and convenience classes and functions to simplify common use cases.

Getting started with OllamaSharp only requires a running Ollama server and a supported version of [.NET](https://dotnet.microsoft.com/en-us/download).

## Prerequisites

- [Ollama](https://ollama.com/)
- [.NET](https://dotnet.microsoft.com/en-us/download)

## Pulling a model

To use Ollama, you will need to specify a large language model to talk with. You can download a model from the [Ollama model hub](https://ollama.com/models). Below is a code snippet illustrating how to connect to an Ollama server and pull a model from there:

```csharp
using OllamaSharp;

// if you are running Ollama locally on the default port:
var uri = new Uri("http://localhost:11434");
var ollama = new OllamaApiClient(uri);

// pull the model, and print the status of the pull operation.
await foreach (var status in ollama.PullModelAsync("llama3.2-vision"))
Console.WriteLine($"{status.Percent}% {status.Status}");

Console.WriteLine("Model pulled successfully.");
```

This should result in an output like this:

```
100% pulling manifest
100% pulling 11f274007f09
100% pulling ece5e659647a
100% pulling 715415638c9c
100% pulling 0b4284c1f870
100% pulling fefc914e46e6
100% pulling fbd313562bb7
100% verifying sha256 digest
100% writing manifest
100% success
Model pulled successfully.
```

## Taking to a model

After obtaining a model, you can begin interacting with Ollama. The following code snippet demonstrates how to connect to an Ollama server, load a model, and initiate a conversation:

```csharp
using OllamaSharp;

var uri = new Uri("http://localhost:11434");
var model = "llama3.2-vision";

var ollama = new OllamaApiClient(uri, model);

var request = "Write a deep, beautiful song for me about AI and the future.";

await foreach (var stream in ollama.GenerateAsync(request))
Console.Write(stream.Response);
```

The model's answer should be streamed directly into your Console window.
5 changes: 5 additions & 0 deletions docs/introduction.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# Introduction

[Ollama](https://ollama.com/) is a [Go](https://go.dev/)-based, open-source server for interacting with local large language models using Georgi Gerganov's [llama.cpp](https://github.com/ggerganov/llama.cpp) library. Ollama provides first-class support for various models, including [llama3.2](https://ollama.com/library/llama3.2), [phi3.5](https://ollama.com/library/phi3.5), [mistral](https://ollama.com/library/mistral), and many more. It provides support for pulling, running, creating, pushing, and interacting with models.

[OllamaSharp](https://github.com/awaescher/OllamaSharp) provides .NET bindings for the Ollama API, simplifying interactions with Ollama both locally and remotely. It provides asynchronous streaming, progress reporting and convenience classes and functions to simplify common use cases.
4 changes: 4 additions & 0 deletions docs/toc.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
- name: Introduction
href: introduction.md
- name: Getting Started
href: getting-started.md
Binary file added images/0.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added images/[email protected]
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added images/logo.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added images/[email protected]
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
53 changes: 53 additions & 0 deletions index.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
---
_layout: landing
---

::::flex-row

:::col

![Ollama Logo](images/[email protected])![.NET Logo](images/[email protected])

# Build AI-powered applications with Ollama and .NET 🦙

OllamaSharp provides .NET bindings for the [Ollama API](https://github.com/jmorganca/ollama/blob/main/docs/api.md), simplifying interactions with Ollama both locally and remotely.

Provides support for interacting with Ollama directly, or through the [Microsoft.Extensions.AI](https://devblogs.microsoft.com/dotnet/introducing-microsoft-extensions-ai-preview/)
and [Microsoft Semantic Kernel](https://github.com/microsoft/semantic-kernel/pull/7362) libraries.
:::
:::col

### Add OllamaSharp to your project
```bash
dotnet add package OllamaSharp
```

### Start talking to Ollama
```csharp
using OllamaSharp;

var uri = new Uri("http://localhost:11434");
var ollama = new OllamaApiClient(uri, "llama3.2");

// messages including their roles and tool calls will automatically
// be tracked within the chat object and are accessible via the Messages property
var chat = new Chat(ollama);

Console.WriteLine("You're now talking with Ollama. Hit Ctrl+C to exit.");

while (true)
{
Console.Write("You: ");
var input = Console.ReadLine();

Console.Write("Assistant: ");
await foreach (var stream in chat.SendAsync(message))
Console.Write(stream);

Console.WriteLine("");
}
```

:::

::::
11 changes: 7 additions & 4 deletions src/AsyncEnumerableExtensions/ChatResponseStreamAppender.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,16 @@
namespace OllamaSharp.AsyncEnumerableExtensions;

/// <summary>
/// Appender to stream IAsyncEnumerable(ChatResponseStream) to build up one single ChatDoneResponseStream object
/// Appender to stream <see cref="System.Collections.Generic.IAsyncEnumerable{ChatResponseStream}"/> to
/// build up one single <see cref="ChatDoneResponseStream"/> object
/// </summary>
public class ChatResponseStreamAppender : IAppender<ChatResponseStream?, ChatDoneResponseStream?>
internal class ChatResponseStreamAppender : IAppender<ChatResponseStream?, ChatDoneResponseStream?>
{
private readonly MessageBuilder _messageBuilder = new();
private ChatDoneResponseStream? _lastItem;

/// <summary>
/// Appends a given ChatResponseStream item to build a single return object
/// Appends a given <see cref="ChatResponseStream"/> item to build a single return object
/// </summary>
/// <param name="item">The item to append</param>
public void Append(ChatResponseStream? item)
Expand All @@ -24,8 +25,10 @@ public void Append(ChatResponseStream? item)
}

/// <summary>
/// Builds up one single ChatDoneResponseStream object from the previously streamed ChatResponseStream items
/// Builds up one single <see cref="ChatDoneResponseStream"/> object from the
/// previously streamed <see cref="ChatResponseStream"/> items
/// </summary>
/// <returns>The completed consolidated <see cref="ChatDoneResponseStream"/> object</returns>
public ChatDoneResponseStream? Complete()
{
if (_lastItem is null)
Expand Down
11 changes: 7 additions & 4 deletions src/AsyncEnumerableExtensions/GenerateResponseStreamAppender.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,16 @@
namespace OllamaSharp.AsyncEnumerableExtensions;

/// <summary>
/// Appender to stream IAsyncEnumerable(GenerateResponseStream) to build up one single GenerateDoneResponseStream object
/// Appender to stream <see cref="System.Collections.Generic.IAsyncEnumerable{GenerateDoneResponseStream}"/>
/// to build up one single <see cref="GenerateDoneResponseStream"/> object
/// </summary>
public class GenerateResponseStreamAppender : IAppender<GenerateResponseStream?, GenerateDoneResponseStream?>
internal class GenerateResponseStreamAppender : IAppender<GenerateResponseStream?, GenerateDoneResponseStream?>
{
private readonly StringBuilder _builder = new();
private GenerateDoneResponseStream? _lastItem;

/// <summary>
/// Appends a given GenerateResponseStream item to build a single return object
/// Appends a given <see cref="GenerateResponseStream"/> item to build a single return object
/// </summary>
/// <param name="item">The item to append</param>
public void Append(GenerateResponseStream? item)
Expand All @@ -25,8 +26,10 @@ public void Append(GenerateResponseStream? item)
}

/// <summary>
/// Builds up one single GenerateDoneResponseStream object from the previously streamed GenerateResponseStream items
/// Builds up one single <see cref="GenerateDoneResponseStream"/> object
/// from the previously streamed <see cref="GenerateResponseStream"/> items
/// </summary>
/// <returns>The completed, consolidated <see cref="GenerateDoneResponseStream"/> object</returns>
public GenerateDoneResponseStream? Complete()
{
if (_lastItem is null)
Expand Down
2 changes: 1 addition & 1 deletion src/AsyncEnumerableExtensions/IAppender.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ namespace OllamaSharp.AsyncEnumerableExtensions;
/// </summary>
/// <typeparam name="Tin">The type of the items of the IAsyncEnumerable</typeparam>
/// <typeparam name="Tout">The return type after the IAsyncEnumerable was streamed to the end</typeparam>
public interface IAppender<in Tin, out Tout>
internal interface IAppender<in Tin, out Tout>
{
/// <summary>
/// Appends an item to build up the return value
Expand Down
4 changes: 2 additions & 2 deletions src/AsyncEnumerableExtensions/IAsyncEnumerableExtensions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ public static partial class IAsyncEnumerableExtensions
/// </summary>
/// <param name="stream">The IAsyncEnumerable to stream</param>
/// <param name="itemCallback">An optional callback to additionally process every single item from the IAsyncEnumerable</param>
/// <returns>A single response stream appened from every IAsyncEnumerable item</returns>
/// <returns>A single response stream append from every IAsyncEnumerable item</returns>
public static Task<string> StreamToEndAsync(this IAsyncEnumerable<string> stream, Action<string>? itemCallback = null)
=> stream.StreamToEndAsync(new StringAppender(), itemCallback);

Expand Down Expand Up @@ -48,7 +48,7 @@ public static Task<string> StreamToEndAsync(this IAsyncEnumerable<string> stream
/// <param name="appender">The appender instance used to build up one single response value</param>
/// <param name="itemCallback">An optional callback to additionally process every single item from the IAsyncEnumerable</param>
/// <returns>A single ChatDoneResponseStream built up from every single IAsyncEnumerable item</returns>
public static async Task<Tout> StreamToEndAsync<Tin, Tout>(this IAsyncEnumerable<Tin> stream, IAppender<Tin, Tout> appender, Action<Tin>? itemCallback = null)
internal static async Task<Tout> StreamToEndAsync<Tin, Tout>(this IAsyncEnumerable<Tin> stream, IAppender<Tin, Tout> appender, Action<Tin>? itemCallback = null)
{
await foreach (var item in stream.ConfigureAwait(false))
{
Expand Down
Loading

0 comments on commit c6c6f98

Please sign in to comment.