Skip to content

Commit

Permalink
The log levels defined on llama.cpp and LlamaSharp side were not alig…
Browse files Browse the repository at this point in the history
…ned anymore (issue #995)
  • Loading branch information
LoicDagnas committed Nov 26, 2024
1 parent 5bce923 commit 8983bbf
Show file tree
Hide file tree
Showing 2 changed files with 89 additions and 12 deletions.
63 changes: 63 additions & 0 deletions LLama.Unittest/LLamaContextWithCustomLoggerTests.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
using LLama.Common;
using LLama.Native;
using Microsoft.Extensions.Logging;

namespace LLama.Unittest
{
public sealed class LLamaContextWithCustomLoggerTests
: IDisposable
{
private sealed class CustomLogger : ILogger
{
public IDisposable? BeginScope<TState>(TState state) where TState : notnull => default;

public void Log<TState>(
LogLevel logLevel,
EventId eventId,
TState state,
Exception? exception,
Func<TState, Exception, string> formatter)
{
}

public bool IsEnabled(LogLevel logLevel) => true;
}

private readonly LLamaWeights _weights;
private readonly LLamaContext _context;

public LLamaContextWithCustomLoggerTests()
{
var @params = new ModelParams(Constants.GenerativeModelPath)
{
ContextSize = 128,
GpuLayerCount = Constants.CIGpuLayerCount,
};

// This unit test used to fail when loading the weights with such a naive logger set.
//
// See https://github.com/SciSharp/LLamaSharp/issues/995
//
// So the unit test here doesn't check that the logger is actually used
// but at least that setting one doesn't crash the weights load.
NativeLogConfig.llama_log_set(new CustomLogger());

_weights = LLamaWeights.LoadFromFile(@params);
_context = _weights.CreateContext(@params);
}

public void Dispose()
{
_weights.Dispose();
_context.Dispose();
}

[Fact]
public void CheckProperties()
{
Assert.Equal(128u, _context.ContextSize);
Assert.Equal(2048, _context.EmbeddingSize);
Assert.Equal(128256, _context.VocabCount);
}
}
}
38 changes: 26 additions & 12 deletions LLama/Native/LLamaLogLevel.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4,29 +4,41 @@
namespace LLama.Native
{
/// <summary>
/// Severity level of a log message
/// Severity level of a log message. This enum should always be aligned with
/// the one defined on llama.cpp side at
/// https://github.com/ggerganov/llama.cpp/blob/0eb4e12beebabae46d37b78742f4c5d4dbe52dc1/ggml/include/ggml.h#L559
/// </summary>
public enum LLamaLogLevel
{
/// <summary>
/// Logs that highlight when the current flow of execution is stopped due to a failure.
/// Logs are never written.
/// </summary>
Error = 2,
None = 0,

/// <summary>
/// Logs that highlight an abnormal or unexpected event in the application flow, but do not otherwise cause the application execution to stop.
/// Logs that are used for interactive investigation during development.
/// </summary>
Warning = 3,
Debug = 1,

/// <summary>
/// Logs that track the general flow of the application.
/// </summary>
Info = 4,
Info = 2,

/// <summary>
/// Logs that are used for interactive investigation during development.
/// Logs that highlight an abnormal or unexpected event in the application flow, but do not otherwise cause the application execution to stop.
/// </summary>
Debug = 5,
Warning = 3,

/// <summary>
/// Logs that highlight when the current flow of execution is stopped due to a failure.
/// </summary>
Error = 4,

/// <summary>
/// Continue log level is equivalent to None in the way it is used in llama.cpp.
/// </summary>
Continue = 5,
}

internal static class LLamaLogLevelExtensions
Expand All @@ -35,12 +47,14 @@ public static LogLevel ToLogLevel(this LLamaLogLevel llama)
{
return (llama) switch
{
LLamaLogLevel.Error => LogLevel.Error,
LLamaLogLevel.Warning => LogLevel.Warning,
LLamaLogLevel.Info => LogLevel.Information,
LLamaLogLevel.None => LogLevel.None,
LLamaLogLevel.Debug => LogLevel.Debug,
LLamaLogLevel.Info => LogLevel.Information,
LLamaLogLevel.Warning => LogLevel.Warning,
LLamaLogLevel.Error => LogLevel.Error,
LLamaLogLevel.Continue => LogLevel.None,
_ => throw new ArgumentOutOfRangeException(nameof(llama), llama, null)
};
}
}
}
}

0 comments on commit 8983bbf

Please sign in to comment.