Skip to content

Instantly share code, notes, and snippets.

@NickStrupat
Last active July 8, 2024 14:13
Show Gist options
  • Save NickStrupat/6d74d46fda08d134e3c8867d5a606e76 to your computer and use it in GitHub Desktop.
Save NickStrupat/6d74d46fda08d134e3c8867d5a606e76 to your computer and use it in GitHub Desktop.
C# Chat bot powered by Llama 3 through Ollama
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<WarningsAsErrors>Nullable</WarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="OllamaSharp" Version="2.0.6" />
</ItemGroup>
</Project>
using OllamaSharp;
var chatBot = new ChatBot(
ollamaServiceUri: new("http://localhost:11434"),
modelName: "llama3:latest",
promptPrefix: """
Please respond to the following prompt as if you were a Canadian mortgage
broker, without saying you are a Canadian mortgage broker, and in the context
of helping the prompter acquire a mortgage or mortgage related product:
""",
responseChunkAction: Console.Write
);
while (await Console.In.ReadLineAsync() is { Length: > 0 } prompt)
await chatBot.AskAsync(prompt);
public sealed class ChatBot(Uri ollamaServiceUri, String modelName, String promptPrefix, Action<String> responseChunkAction)
{
public async Task AskAsync(String prompt) => await (await chat).Send($"{promptPrefix}\n\n{prompt}");
private readonly Task<Chat> chat = CreateChatAsync(ollamaServiceUri, modelName, responseChunkAction);
private static async Task<Chat> CreateChatAsync(Uri ollamaServiceUri, String modelName, Action<String> responseChunkAction)
{
var client = new OllamaApiClient(ollamaServiceUri, modelName);
if (!(await client.ListLocalModels()).Select(x => x.Name).Contains(modelName))
throw new InvalidOperationException($"Model '{modelName}' not found locally. Please pull it first using the 'ollama pull' command in your terminal.");
return client.Chat(stream =>
{
if (stream is null)
return;
if (stream.Message.Content is {} body)
responseChunkAction(body);
if (stream.Done)
responseChunkAction("\n");
});
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment