diff --git a/LLama.Examples/Program.cs b/LLama.Examples/Program.cs
index f8c7ba608..b24ef406b 100644
--- a/LLama.Examples/Program.cs
+++ b/LLama.Examples/Program.cs
@@ -16,11 +16,20 @@ __ __ ____ __
""");
-// Configure native library to use
+// Configure native library to use. This must be done before any other llama.cpp methods are called!
NativeLibraryConfig
.Instance
- .WithCuda()
- .WithLogs(LLamaLogLevel.Info);
+ .WithCuda();
+
+// Configure logging. Change this to `true` to see log messages from llama.cpp
+var showLLamaCppLogs = false;
+NativeLibraryConfig
+ .Instance
+ .WithLogCallback((level, message) =>
+ {
+ if (showLLamaCppLogs)
+ Console.WriteLine($"[llama {level}]: {message.TrimEnd('\n')}");
+ });
// Calling this method forces loading to occur now.
NativeApi.llama_empty_call();
diff --git a/LLama/GlobalSuppressions.cs b/LLama/GlobalSuppressions.cs
index 2053bc259..4122be700 100644
--- a/LLama/GlobalSuppressions.cs
+++ b/LLama/GlobalSuppressions.cs
@@ -8,3 +8,5 @@
[assembly: SuppressMessage("Interoperability", "CA1401:P/Invokes should not be visible", Justification = "LLamaSharp intentionally exports the native llama.cpp API")]
[assembly: SuppressMessage("Style", "IDE0070:Use 'System.HashCode'", Justification = "Not compatible with netstandard2.0")]
+
+[assembly: SuppressMessage("Interoperability", "SYSLIB1054:Use 'LibraryImportAttribute' instead of 'DllImportAttribute' to generate P/Invoke marshalling code at compile time", Justification = "Not compatible with netstandard2.0")]
diff --git a/LLama/Native/LLamaLogLevel.cs b/LLama/Native/LLamaLogLevel.cs
index 39e4545ab..07aca59ed 100644
--- a/LLama/Native/LLamaLogLevel.cs
+++ b/LLama/Native/LLamaLogLevel.cs
@@ -1,8 +1,11 @@
-namespace LLama.Native
-{
- ///
- /// Severity level of a log message
- ///
+using System;
+using Microsoft.Extensions.Logging;
+
+namespace LLama.Native
+{
+ ///
+ /// Severity level of a log message
+ ///
public enum LLamaLogLevel
{
///
@@ -25,4 +28,19 @@ public enum LLamaLogLevel
///
Debug = 5,
}
+
+ internal static class LLamaLogLevelExtensions
+ {
+ public static LogLevel ToLogLevel(this LLamaLogLevel llama)
+ {
+ return (llama) switch
+ {
+ LLamaLogLevel.Error => LogLevel.Error,
+ LLamaLogLevel.Warning => LogLevel.Warning,
+ LLamaLogLevel.Info => LogLevel.Information,
+ LLamaLogLevel.Debug => LogLevel.Debug,
+ _ => throw new ArgumentOutOfRangeException(nameof(llama), llama, null)
+ };
+ }
+ }
}
diff --git a/LLama/Native/NativeApi.Load.cs b/LLama/Native/NativeApi.Load.cs
index b5b3a5308..4b4beea2e 100644
--- a/LLama/Native/NativeApi.Load.cs
+++ b/LLama/Native/NativeApi.Load.cs
@@ -17,6 +17,9 @@ static NativeApi()
// which llama.dll is used.
SetDllImportResolver();
+ // Set flag to indicate that this point has been passed. No native library config can be done after this point.
+ NativeLibraryConfig.LibraryHasLoaded = true;
+
// Immediately make a call which requires loading the llama DLL. This method call
// can't fail unless the DLL hasn't been loaded.
try
@@ -34,6 +37,10 @@ static NativeApi()
"to specify it at the very beginning of your code. For more informations about compilation, please refer to LLamaSharp repo on github.\n");
}
+ // Now that the "loaded" flag is set configure logging in llama.cpp
+ if (NativeLibraryConfig.Instance.LogCallback != null)
+ NativeLogConfig.llama_log_set(NativeLibraryConfig.Instance.LogCallback);
+
// Init llama.cpp backend
llama_backend_init();
}
@@ -80,47 +87,10 @@ private static void SetDllImportResolver()
private static void Log(string message, LLamaLogLevel level)
{
- if (!enableLogging)
- return;
-
- if ((int)level > (int)logLevel)
- return;
+ if (!message.EndsWith("\n"))
+ message += "\n";
- var fg = Console.ForegroundColor;
- var bg = Console.BackgroundColor;
- try
- {
- ConsoleColor color;
- string levelPrefix;
- if (level == LLamaLogLevel.Debug)
- {
- color = ConsoleColor.Cyan;
- levelPrefix = "[Debug]";
- }
- else if (level == LLamaLogLevel.Info)
- {
- color = ConsoleColor.Green;
- levelPrefix = "[Info]";
- }
- else if (level == LLamaLogLevel.Error)
- {
- color = ConsoleColor.Red;
- levelPrefix = "[Error]";
- }
- else
- {
- color = ConsoleColor.Yellow;
- levelPrefix = "[UNK]";
- }
-
- Console.ForegroundColor = color;
- Console.WriteLine($"{loggingPrefix} {levelPrefix} {message}");
- }
- finally
- {
- Console.ForegroundColor = fg;
- Console.BackgroundColor = bg;
- }
+ NativeLibraryConfig.Instance.LogCallback?.Invoke(level, message);
}
#region CUDA version
@@ -362,8 +332,6 @@ private static IntPtr TryLoadLibraries(LibraryName lib)
{
#if NET6_0_OR_GREATER
var configuration = NativeLibraryConfig.CheckAndGatherDescription(lib);
- enableLogging = configuration.Logging;
- logLevel = configuration.LogLevel;
// Set the flag to ensure the NativeLibraryConfig can no longer be modified
NativeLibraryConfig.LibraryHasLoaded = true;
@@ -455,8 +423,5 @@ string TryFindPath(string filename)
internal const string libraryName = "llama";
internal const string llavaLibraryName = "llava_shared";
private const string cudaVersionFile = "version.json";
- private const string loggingPrefix = "[LLamaSharp Native]";
- private static bool enableLogging = false;
- private static LLamaLogLevel logLevel = LLamaLogLevel.Info;
}
}
diff --git a/LLama/Native/NativeApi.cs b/LLama/Native/NativeApi.cs
index d46d48a20..41c1809e0 100644
--- a/LLama/Native/NativeApi.cs
+++ b/LLama/Native/NativeApi.cs
@@ -5,13 +5,6 @@
namespace LLama.Native
{
- ///
- /// Callback from llama.cpp with log messages
- ///
- ///
- ///
- public delegate void LLamaLogCallback(LLamaLogLevel level, string message);
-
///
/// Direct translation of the llama.cpp API
///
@@ -364,8 +357,11 @@ public static int llama_token_to_piece(SafeLlamaModelHandle model, LLamaToken ll
/// Register a callback to receive llama log messages
///
///
- [DllImport(libraryName, CallingConvention = CallingConvention.Cdecl)]
- public static extern void llama_log_set(LLamaLogCallback logCallback);
+ [Obsolete("Use `NativeLogConfig.llama_log_set` instead")]
+ public static void llama_log_set(NativeLogConfig.LLamaLogCallback logCallback)
+ {
+ NativeLogConfig.llama_log_set(logCallback);
+ }
///
/// Clear the KV cache
diff --git a/LLama/Native/NativeLibraryConfig.cs b/LLama/Native/NativeLibraryConfig.cs
index c08749ba9..ef7cd7c19 100644
--- a/LLama/Native/NativeLibraryConfig.cs
+++ b/LLama/Native/NativeLibraryConfig.cs
@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Linq;
+using Microsoft.Extensions.Logging;
namespace LLama.Native
{
@@ -9,18 +10,8 @@ namespace LLama.Native
/// Allows configuration of the native llama.cpp libraries to load and use.
/// All configuration must be done before using **any** other LLamaSharp methods!
///
- public sealed class NativeLibraryConfig
+ public sealed partial class NativeLibraryConfig
{
- ///
- /// Get the config instance
- ///
- public static NativeLibraryConfig Instance { get; } = new();
-
- ///
- /// Check if the native library has already been loaded. Configuration cannot be modified if this is true.
- ///
- public static bool LibraryHasLoaded { get; internal set; } = false;
-
private string? _libraryPath;
private string? _libraryPathLLava;
@@ -28,20 +19,12 @@ public sealed class NativeLibraryConfig
private AvxLevel _avxLevel;
private bool _allowFallback = true;
private bool _skipCheck = false;
- private bool _logging = false;
- private LLamaLogLevel _logLevel = LLamaLogLevel.Info;
///
/// search directory -> priority level, 0 is the lowest.
///
private readonly List _searchDirectories = new List();
- private static void ThrowIfLoaded()
- {
- if (LibraryHasLoaded)
- throw new InvalidOperationException("NativeLibraryConfig must be configured before using **any** other LLamaSharp methods!");
- }
-
#region configurators
///
/// Load a specified native library as backend for LLamaSharp.
@@ -117,35 +100,6 @@ public NativeLibraryConfig SkipCheck(bool enable = true)
return this;
}
- ///
- /// Whether to output the logs to console when loading the native library with your configuration.
- ///
- ///
- ///
- /// Thrown if `LibraryHasLoaded` is true.
- public NativeLibraryConfig WithLogs(bool enable)
- {
- ThrowIfLoaded();
-
- _logging = enable;
- return this;
- }
-
- ///
- /// Enable console logging with the specified log logLevel.
- ///
- ///
- ///
- /// Thrown if `LibraryHasLoaded` is true.
- public NativeLibraryConfig WithLogs(LLamaLogLevel logLevel = LLamaLogLevel.Info)
- {
- ThrowIfLoaded();
-
- _logging = true;
- _logLevel = logLevel;
- return this;
- }
-
///
/// Add self-defined search directories. Note that the file stucture of the added
/// directories must be the same as the default directory. Besides, the directory
@@ -196,8 +150,6 @@ internal static Description CheckAndGatherDescription(LibraryName library)
Instance._avxLevel,
Instance._allowFallback,
Instance._skipCheck,
- Instance._logging,
- Instance._logLevel,
Instance._searchDirectories.Concat(new[] { "./" }).ToArray()
);
}
@@ -279,7 +231,7 @@ public enum AvxLevel
Avx512,
}
- internal record Description(string? Path, LibraryName Library, bool UseCuda, AvxLevel AvxLevel, bool AllowFallback, bool SkipCheck, bool Logging, LLamaLogLevel LogLevel, string[] SearchDirectories)
+ internal record Description(string? Path, LibraryName Library, bool UseCuda, AvxLevel AvxLevel, bool AllowFallback, bool SkipCheck, string[] SearchDirectories)
{
public override string ToString()
{
@@ -301,14 +253,61 @@ public override string ToString()
$"- PreferredAvxLevel: {avxLevelString}\n" +
$"- AllowFallback: {AllowFallback}\n" +
$"- SkipCheck: {SkipCheck}\n" +
- $"- Logging: {Logging}\n" +
- $"- LogLevel: {LogLevel}\n" +
$"- SearchDirectories and Priorities: {searchDirectoriesString}";
}
}
}
#endif
+ public sealed partial class NativeLibraryConfig
+ {
+ ///
+ /// Get the config instance
+ ///
+ public static NativeLibraryConfig Instance { get; } = new();
+
+ ///
+ /// Check if the native library has already been loaded. Configuration cannot be modified if this is true.
+ ///
+ public static bool LibraryHasLoaded { get; internal set; }
+
+ internal NativeLogConfig.LLamaLogCallback? LogCallback;
+
+ private static void ThrowIfLoaded()
+ {
+ if (LibraryHasLoaded)
+ throw new InvalidOperationException("NativeLibraryConfig must be configured before using **any** other LLamaSharp methods!");
+ }
+
+ ///
+ /// Set the log callback that will be used for all llama.cpp log messages
+ ///
+ ///
+ ///
+ public NativeLibraryConfig WithLogCallback(NativeLogConfig.LLamaLogCallback? callback)
+ {
+ ThrowIfLoaded();
+
+ LogCallback = callback;
+ return this;
+ }
+
+ ///
+ /// Set the log callback that will be used for all llama.cpp log messages
+ ///
+ ///
+ ///
+ public NativeLibraryConfig WithLogCallback(ILogger? logger)
+ {
+ ThrowIfLoaded();
+
+ // Redirect to llama_log_set. This will wrap the logger in a delegate and bind that as the log callback instead.
+ NativeLogConfig.llama_log_set(logger);
+
+ return this;
+ }
+ }
+
internal enum LibraryName
{
Llama,
diff --git a/LLama/Native/NativeLogConfig.cs b/LLama/Native/NativeLogConfig.cs
new file mode 100644
index 000000000..ebcd23d47
--- /dev/null
+++ b/LLama/Native/NativeLogConfig.cs
@@ -0,0 +1,93 @@
+using System.Runtime.InteropServices;
+using System.Text;
+using System.Threading;
+using Microsoft.Extensions.Logging;
+
+namespace LLama.Native;
+
+///
+/// Configure llama.cpp logging
+///
+public static class NativeLogConfig
+{
+ ///
+ /// Callback from llama.cpp with log messages
+ ///
+ ///
+ ///
+ public delegate void LLamaLogCallback(LLamaLogLevel level, string message);
+
+ ///
+ /// Register a callback to receive llama log messages
+ ///
+ ///
+ [DllImport(NativeApi.libraryName, CallingConvention = CallingConvention.Cdecl, EntryPoint = "llama_log_set")]
+ private static extern void native_llama_log_set(LLamaLogCallback? logCallback);
+
+ ///
+ /// A GC handle for the current log callback to ensure the callback is not collected
+ ///
+ private static GCHandle? _currentLogCallbackHandle;
+
+ ///
+ /// Register a callback to receive llama log messages
+ ///
+ ///
+#pragma warning disable IDE1006 // Naming Styles (name imitates llama.cpp)
+ public static void llama_log_set(LLamaLogCallback? logCallback)
+#pragma warning restore IDE1006 // Naming Styles
+ {
+ if (NativeLibraryConfig.LibraryHasLoaded)
+ {
+ // The library is loaded, just pass the callback directly to llama.cpp
+ native_llama_log_set(logCallback);
+
+ // Save a GC handle, to ensure the callback is not collected
+ _currentLogCallbackHandle?.Free();
+ _currentLogCallbackHandle = null;
+ if (logCallback != null)
+ _currentLogCallbackHandle = GCHandle.Alloc(logCallback);
+ }
+ else
+ {
+ // We can't set the log method yet since that would cause the llama.dll to load.
+ // Instead configure it to be set when the native library loading is done
+ NativeLibraryConfig.Instance.WithLogCallback(logCallback);
+ }
+ }
+
+ ///
+ /// Register a callback to receive llama log messages
+ ///
+ ///
+#pragma warning disable IDE1006 // Naming Styles (name imitates llama.cpp)
+ public static void llama_log_set(ILogger? logger)
+#pragma warning restore IDE1006 // Naming Styles
+ {
+ // Clear the logger
+ if (logger == null)
+ {
+ llama_log_set((LLamaLogCallback?)null);
+ return;
+ }
+
+ var builderThread = new ThreadLocal(() => new StringBuilder());
+
+ // Bind a function that combines messages until a newline is encountered, then logs it all as one message
+ llama_log_set((level, message) =>
+ {
+ var builder = builderThread.Value!;
+
+ builder.Append(message);
+
+ if (!message.EndsWith("\n"))
+ return;
+
+ // Remove the newline from the end
+ builder.Remove(builder.Length - 1, 1);
+
+ logger.Log(level.ToLogLevel(), "{message}", builder.ToString());
+ builder.Clear();
+ });
+ }
+}
\ No newline at end of file