// Copyright (c) 2024 // Author : Bruno Capuano // Change Log : // - Sample Text Generation Service for Ollama models // // The MIT License (MIT) // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; using OllamaSharp; using OllamaSharp.Models.Chat; namespace sk_ollamacsharp { public class OllamaTextGenerationService : Microsoft.SemanticKernel.TextGeneration.ITextGenerationService { // public property for the model url endpoint public string ModelUrl { get; set; } public string ModelName { get; set; } public IReadOnlyDictionary<string, object?> Attributes => throw new NotImplementedException(); public IAsyncEnumerable<StreamingTextContent> GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) { throw new NotImplementedException(); } public async Task<IReadOnlyList<TextContent>> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) { var ollama = new OllamaApiClient(ModelUrl, ModelName); var completionResponse = await ollama.GetCompletion(prompt, null, CancellationToken.None); TextContent stc = new TextContent(completionResponse.Response); return new List<TextContent> { stc }; } } }