diff --git a/Directory.Packages.props b/Directory.Packages.props
index 453827321..c3f6e19ec 100644
--- a/Directory.Packages.props
+++ b/Directory.Packages.props
@@ -3,24 +3,16 @@
true
-
-
+
-
+
-
-
-
-
-
-
-
-
+
@@ -33,34 +25,41 @@
-
-
+
-
-
-
-
+
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
+
+
+
+
@@ -81,11 +80,11 @@
all
runtime; build; native; contentfiles; analyzers; buildtransitive
-
+
all
runtime; build; native; contentfiles; analyzers; buildtransitive
-
+
all
runtime; build; native; contentfiles; analyzers; buildtransitive
diff --git a/KernelMemory.sln b/KernelMemory.sln
index 035e879df..20c453af1 100644
--- a/KernelMemory.sln
+++ b/KernelMemory.sln
@@ -273,8 +273,6 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Elasticsearch", "extensions
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Elasticsearch.FunctionalTests", "extensions\Elasticsearch\Elasticsearch.FunctionalTests\Elasticsearch.FunctionalTests.csproj", "{C5E6B28C-F54D-423D-954D-A9EAEFB89732}"
EndProject
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Discord", "extensions\Discord\Discord\Discord.csproj", "{43877864-6AE8-4B03-BEDA-6B6FA8BB1D8B}"
-EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "301-discord-test-application", "examples\301-discord-test-application\301-discord-test-application.csproj", "{FAE4C6B8-38B2-43E7-8881-99693C9CEDC6}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "applications", "applications", "{DBEA0A6B-474A-4E8C-BCC8-D5D43C063A54}"
@@ -555,10 +553,6 @@ Global
{C5E6B28C-F54D-423D-954D-A9EAEFB89732}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C5E6B28C-F54D-423D-954D-A9EAEFB89732}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C5E6B28C-F54D-423D-954D-A9EAEFB89732}.Release|Any CPU.ActiveCfg = Release|Any CPU
- {43877864-6AE8-4B03-BEDA-6B6FA8BB1D8B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
- {43877864-6AE8-4B03-BEDA-6B6FA8BB1D8B}.Debug|Any CPU.Build.0 = Debug|Any CPU
- {43877864-6AE8-4B03-BEDA-6B6FA8BB1D8B}.Release|Any CPU.ActiveCfg = Release|Any CPU
- {43877864-6AE8-4B03-BEDA-6B6FA8BB1D8B}.Release|Any CPU.Build.0 = Release|Any CPU
{FAE4C6B8-38B2-43E7-8881-99693C9CEDC6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{FAE4C6B8-38B2-43E7-8881-99693C9CEDC6}.Debug|Any CPU.Build.0 = Debug|Any CPU
{FAE4C6B8-38B2-43E7-8881-99693C9CEDC6}.Release|Any CPU.ActiveCfg = Release|Any CPU
@@ -697,7 +691,6 @@ Global
{B9BE1099-F78F-4A5F-A897-BF2C75E19C57} = {155DA079-E267-49AF-973A-D1D44681970F}
{2E10420F-BF96-411C-8FE0-F6268F2EEB67} = {155DA079-E267-49AF-973A-D1D44681970F}
{C5E6B28C-F54D-423D-954D-A9EAEFB89732} = {3C17F42B-CFC8-4900-8CFB-88936311E919}
- {43877864-6AE8-4B03-BEDA-6B6FA8BB1D8B} = {155DA079-E267-49AF-973A-D1D44681970F}
{FAE4C6B8-38B2-43E7-8881-99693C9CEDC6} = {0A43C65C-6007-4BB4-B3FE-8D439FC91841}
{432AC1B4-8275-4284-9A44-44988A6F0C24} = {DBEA0A6B-474A-4E8C-BCC8-D5D43C063A54}
{A0C81A29-715F-463E-A243-7E45DB8AE53F} = {155DA079-E267-49AF-973A-D1D44681970F}
diff --git a/examples/204-dotnet-ASP.NET-MVC-integration/204-dotnet-ASP.NET-MVC-integration.csproj b/examples/204-dotnet-ASP.NET-MVC-integration/204-dotnet-ASP.NET-MVC-integration.csproj
index 03e3e2435..c27671236 100644
--- a/examples/204-dotnet-ASP.NET-MVC-integration/204-dotnet-ASP.NET-MVC-integration.csproj
+++ b/examples/204-dotnet-ASP.NET-MVC-integration/204-dotnet-ASP.NET-MVC-integration.csproj
@@ -2,11 +2,14 @@
net8.0
+ LatestMajor
+ 12
enable
+ enable
-
+
\ No newline at end of file
diff --git a/examples/204-dotnet-ASP.NET-MVC-integration/Directory.Build.props b/examples/204-dotnet-ASP.NET-MVC-integration/Directory.Build.props
new file mode 100644
index 000000000..c1df2220d
--- /dev/null
+++ b/examples/204-dotnet-ASP.NET-MVC-integration/Directory.Build.props
@@ -0,0 +1,2 @@
+
+
\ No newline at end of file
diff --git a/examples/204-dotnet-ASP.NET-MVC-integration/Directory.Packages.props b/examples/204-dotnet-ASP.NET-MVC-integration/Directory.Packages.props
new file mode 100644
index 000000000..c1df2220d
--- /dev/null
+++ b/examples/204-dotnet-ASP.NET-MVC-integration/Directory.Packages.props
@@ -0,0 +1,2 @@
+
+
\ No newline at end of file
diff --git a/examples/301-discord-test-application/301-discord-test-application.csproj b/examples/301-discord-test-application/301-discord-test-application.csproj
index 2e7660039..4871148be 100644
--- a/examples/301-discord-test-application/301-discord-test-application.csproj
+++ b/examples/301-discord-test-application/301-discord-test-application.csproj
@@ -3,16 +3,16 @@
Exe
net8.0
+ LatestMajor
+ 12
enable
+ enable
-
-
-
-
-
-
+
+
+
diff --git a/examples/301-discord-test-application/Directory.Build.props b/examples/301-discord-test-application/Directory.Build.props
new file mode 100644
index 000000000..c1df2220d
--- /dev/null
+++ b/examples/301-discord-test-application/Directory.Build.props
@@ -0,0 +1,2 @@
+
+
\ No newline at end of file
diff --git a/examples/301-discord-test-application/Directory.Packages.props b/examples/301-discord-test-application/Directory.Packages.props
new file mode 100644
index 000000000..c1df2220d
--- /dev/null
+++ b/examples/301-discord-test-application/Directory.Packages.props
@@ -0,0 +1,2 @@
+
+
\ No newline at end of file
diff --git a/extensions/Discord/Discord/DiscordConnector.cs b/examples/301-discord-test-application/DiscordConnector/DiscordConnector.cs
similarity index 97%
rename from extensions/Discord/Discord/DiscordConnector.cs
rename to examples/301-discord-test-application/DiscordConnector/DiscordConnector.cs
index 9c3da87ff..edad30c71 100644
--- a/extensions/Discord/Discord/DiscordConnector.cs
+++ b/examples/301-discord-test-application/DiscordConnector/DiscordConnector.cs
@@ -1,17 +1,10 @@
// Copyright (c) Microsoft. All rights reserved.
-using System;
-using System.Collections.Generic;
using System.Globalization;
-using System.IO;
using System.Text;
using System.Text.Json;
-using System.Threading;
-using System.Threading.Tasks;
using Discord;
using Discord.WebSocket;
-using Microsoft.Extensions.Hosting;
-using Microsoft.Extensions.Logging;
using Microsoft.KernelMemory.Diagnostics;
namespace Microsoft.KernelMemory.Sources.DiscordBot;
diff --git a/extensions/Discord/Discord/DiscordConnectorConfig.cs b/examples/301-discord-test-application/DiscordConnector/DiscordConnectorConfig.cs
similarity index 95%
rename from extensions/Discord/Discord/DiscordConnectorConfig.cs
rename to examples/301-discord-test-application/DiscordConnector/DiscordConnectorConfig.cs
index 65200f83b..9ed2b8ba6 100644
--- a/extensions/Discord/Discord/DiscordConnectorConfig.cs
+++ b/examples/301-discord-test-application/DiscordConnector/DiscordConnectorConfig.cs
@@ -1,7 +1,5 @@
// Copyright (c) Microsoft. All rights reserved.
-using System.Collections.Generic;
-
namespace Microsoft.KernelMemory.Sources.DiscordBot;
///
diff --git a/extensions/Discord/Discord/DiscordMessage.cs b/examples/301-discord-test-application/DiscordConnector/DiscordMessage.cs
similarity index 99%
rename from extensions/Discord/Discord/DiscordMessage.cs
rename to examples/301-discord-test-application/DiscordConnector/DiscordMessage.cs
index 0120caa17..f80249235 100644
--- a/extensions/Discord/Discord/DiscordMessage.cs
+++ b/examples/301-discord-test-application/DiscordConnector/DiscordMessage.cs
@@ -1,6 +1,5 @@
// Copyright (c) Microsoft. All rights reserved.
-using System;
using System.Text.Json.Serialization;
namespace Microsoft.KernelMemory.Sources.DiscordBot;
diff --git a/extensions/Discord/Discord/Discord.csproj b/extensions/Discord/Discord/Discord.csproj
deleted file mode 100644
index d05f4e3a9..000000000
--- a/extensions/Discord/Discord/Discord.csproj
+++ /dev/null
@@ -1,28 +0,0 @@
-
-
-
- net8.0
- LatestMajor
- Microsoft.KernelMemory.Sources.DiscordBot
- Microsoft.KernelMemory.Sources.DiscordBot
- $(NoWarn);CS8002;CA1303;
-
-
-
-
-
-
-
-
-
-
-
-
- false
- Microsoft.KernelMemory.Sources.Discord
- Discord connector for Kernel Memory
- Discord connector for Kernel Memory
- Discord, Kernel Memory, AI, Artificial Intelligence, ETL
-
-
-
diff --git a/extensions/LlamaSharp/LlamaSharp/LlamaSharp.csproj b/extensions/LlamaSharp/LlamaSharp/LlamaSharp.csproj
index 3172a4fad..7ad407db2 100644
--- a/extensions/LlamaSharp/LlamaSharp/LlamaSharp.csproj
+++ b/extensions/LlamaSharp/LlamaSharp/LlamaSharp.csproj
@@ -10,7 +10,7 @@
-
+
diff --git a/extensions/LlamaSharp/LlamaSharp/LlamaSharpConfig.cs b/extensions/LlamaSharp/LlamaSharp/LlamaSharpConfig.cs
index 2a42b37eb..c87b7b290 100644
--- a/extensions/LlamaSharp/LlamaSharp/LlamaSharpConfig.cs
+++ b/extensions/LlamaSharp/LlamaSharp/LlamaSharpConfig.cs
@@ -23,8 +23,6 @@ public class LlamaSharpConfig
///
public int? GpuLayerCount { get; set; }
- public uint? Seed { get; set; } = 1337;
-
///
/// Verify that the current state is valid.
///
diff --git a/extensions/LlamaSharp/LlamaSharp/LlamaSharpTextGenerator.cs b/extensions/LlamaSharp/LlamaSharp/LlamaSharpTextGenerator.cs
index fcb4fa3d3..e5cd2cc5e 100644
--- a/extensions/LlamaSharp/LlamaSharp/LlamaSharpTextGenerator.cs
+++ b/extensions/LlamaSharp/LlamaSharp/LlamaSharpTextGenerator.cs
@@ -8,9 +8,9 @@
using LLama;
using LLama.Abstractions;
using LLama.Common;
+using LLama.Native;
using LLama.Sampling;
using Microsoft.Extensions.Logging;
-using Microsoft.KernelMemory.AI.OpenAI;
using Microsoft.KernelMemory.Diagnostics;
namespace Microsoft.KernelMemory.AI.LlamaSharp;
@@ -43,36 +43,27 @@ public LlamaSharpTextGenerator(
config.Validate();
this.MaxTokenTotal = (int)config.MaxTokenTotal;
- if (textTokenizer == null)
- {
- this._log.LogWarning(
- "Tokenizer not specified, will use {0}. The token count might be incorrect, causing unexpected errors",
- nameof(GPT4oTokenizer));
- textTokenizer = new GPT4oTokenizer();
- }
-
- this._textTokenizer = textTokenizer;
-
var parameters = new ModelParams(config.ModelPath)
{
- ContextSize = config.MaxTokenTotal
+ ContextSize = config.MaxTokenTotal,
+ GpuLayerCount = config.GpuLayerCount ?? 20,
};
- if (config.GpuLayerCount.HasValue)
- {
- parameters.GpuLayerCount = config.GpuLayerCount.Value;
- }
-
- if (config.Seed.HasValue)
- {
- parameters.Seed = config.Seed.Value;
- }
-
var modelFilename = config.ModelPath.Split('/').Last().Split('\\').Last();
this._log.LogDebug("Loading LLama model: {1}", modelFilename);
this._model = LLamaWeights.LoadFromFile(parameters);
this._context = this._model.CreateContext(parameters);
this._log.LogDebug("LLama model loaded");
+
+ if (textTokenizer == null)
+ {
+ this._log.LogWarning(
+ "Tokenizer not specified, will use {0}. The token count might be incorrect, causing unexpected errors",
+ nameof(DefaultGPTTokenizer));
+ textTokenizer = new DefaultGPTTokenizer();
+ }
+
+ this._textTokenizer = textTokenizer;
}
///
@@ -81,13 +72,7 @@ public LlamaSharpTextGenerator(
///
public int CountTokens(string text)
{
- int? value = this._textTokenizer?.CountTokens(text);
- if (!value.HasValue)
- {
- value = this._context.Tokenize(text, addBos: false, special: false).Length;
- }
-
- return value.Value;
+ return this._textTokenizer.CountTokens(text);
}
///
@@ -104,19 +89,18 @@ public IAsyncEnumerable GenerateTextAsync(
{
var executor = new InteractiveExecutor(this._context);
- var samplingPipeline = new DefaultSamplingPipeline();
- samplingPipeline.Temperature = (float)options.Temperature;
- samplingPipeline.TopP = (float)options.NucleusSampling;
- samplingPipeline.AlphaPresence = (float)options.PresencePenalty;
- samplingPipeline.AlphaFrequency = (float)options.FrequencyPenalty;
+ var logitBias = options.TokenSelectionBiases.Count > 0
+ ? options.TokenSelectionBiases.ToDictionary(pair => (LLamaToken)pair.Key, pair => pair.Value)
+ : new Dictionary();
- if (options.TokenSelectionBiases is { Count: > 0 })
+ var samplingPipeline = new DefaultSamplingPipeline()
{
- foreach (var (token, bias) in options.TokenSelectionBiases)
- {
- samplingPipeline.LogitBias!.Add(token, bias);
- }
- }
+ Temperature = (float)options.Temperature,
+ TopP = (float)options.NucleusSampling,
+ AlphaPresence = (float)options.PresencePenalty,
+ AlphaFrequency = (float)options.FrequencyPenalty,
+ LogitBias = logitBias,
+ };
IInferenceParams settings = new InferenceParams
{
@@ -126,26 +110,15 @@ public IAsyncEnumerable GenerateTextAsync(
SamplingPipeline = samplingPipeline
};
+ this._log.LogTrace("Generating text, temperature {0}, max tokens {1}",
+ samplingPipeline.Temperature, settings.MaxTokens);
return executor.InferAsync(prompt, settings, cancellationToken);
}
///
public void Dispose()
{
- this.Dispose(true);
- GC.SuppressFinalize(this);
- }
-
- private void Dispose(bool disposing)
- {
- if (!disposing) { return; }
-
- this._context.Dispose();
this._model.Dispose();
- }
-
- ~LlamaSharpTextGenerator()
- {
- this.Dispose(false);
+ this._context.Dispose();
}
}
diff --git a/extensions/Ollama/Ollama/OllamaTextEmbeddingGenerator.cs b/extensions/Ollama/Ollama/OllamaTextEmbeddingGenerator.cs
index ccb708f9e..7e4a5ae9b 100644
--- a/extensions/Ollama/Ollama/OllamaTextEmbeddingGenerator.cs
+++ b/extensions/Ollama/Ollama/OllamaTextEmbeddingGenerator.cs
@@ -139,8 +139,8 @@ public async Task GenerateEmbeddingBatchAsync(
}
};
- EmbedResponse response = await this._client.Embed(request, cancellationToken).ConfigureAwait(false);
- Embedding[] result = response.Embeddings.Select(Embedding.FromDoubles).ToArray();
+ EmbedResponse response = await this._client.EmbedAsync(request, cancellationToken).ConfigureAwait(false);
+ Embedding[] result = response.Embeddings.Select(x => new Embedding(x)).ToArray();
this._log.LogTrace("Embeddings batch ready, size {0} texts", result.Length);
diff --git a/extensions/Ollama/Ollama/OllamaTextGenerator.cs b/extensions/Ollama/Ollama/OllamaTextGenerator.cs
index e213a5c95..2bf0cd445 100644
--- a/extensions/Ollama/Ollama/OllamaTextGenerator.cs
+++ b/extensions/Ollama/Ollama/OllamaTextGenerator.cs
@@ -142,7 +142,7 @@ public async IAsyncEnumerable GenerateTextAsync(
// }
var chat = new Chat(this._client);
- IAsyncEnumerable stream = chat.Send(prompt, cancellationToken);
+ IAsyncEnumerable stream = chat.SendAsync(prompt, cancellationToken);
await foreach (string? token in stream)
{
if (token != null) { yield return token; }