diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/JsonContext.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/JsonContext.cs
deleted file mode 100644
index 6de0144c7cf..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/JsonContext.cs
+++ /dev/null
@@ -1,24 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System.Text.Json.Serialization;
-
-namespace Microsoft.Extensions.AI;
-
-[JsonSourceGenerationOptions(PropertyNamingPolicy = JsonKnownNamingPolicy.SnakeCaseLower, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)]
-[JsonSerializable(typeof(OllamaChatRequest))]
-[JsonSerializable(typeof(OllamaChatRequestMessage))]
-[JsonSerializable(typeof(OllamaChatResponse))]
-[JsonSerializable(typeof(OllamaChatResponseMessage))]
-[JsonSerializable(typeof(OllamaFunctionCallContent))]
-[JsonSerializable(typeof(OllamaFunctionResultContent))]
-[JsonSerializable(typeof(OllamaFunctionTool))]
-[JsonSerializable(typeof(OllamaFunctionToolCall))]
-[JsonSerializable(typeof(OllamaFunctionToolParameter))]
-[JsonSerializable(typeof(OllamaFunctionToolParameters))]
-[JsonSerializable(typeof(OllamaRequestOptions))]
-[JsonSerializable(typeof(OllamaTool))]
-[JsonSerializable(typeof(OllamaToolCall))]
-[JsonSerializable(typeof(OllamaEmbeddingRequest))]
-[JsonSerializable(typeof(OllamaEmbeddingResponse))]
-internal sealed partial class JsonContext : JsonSerializerContext;
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/Microsoft.Extensions.AI.Ollama.csproj b/src/Libraries/Microsoft.Extensions.AI.Ollama/Microsoft.Extensions.AI.Ollama.csproj
deleted file mode 100644
index 96734131c83..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/Microsoft.Extensions.AI.Ollama.csproj
+++ /dev/null
@@ -1,44 +0,0 @@
-
-
-
- Microsoft.Extensions.AI
- Implementation of generative AI abstractions for Ollama. This package is deprecated, and the OllamaSharp package is recommended.
- AI
-
-
-
- preview
- false
- 78
- 0
-
-
-
- $(TargetFrameworks);netstandard2.0
- $(NoWarn);CA2227;SA1316;S1121;EA0002
- true
- true
-
-
-
- true
- true
- true
- true
- true
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/Microsoft.Extensions.AI.Ollama.json b/src/Libraries/Microsoft.Extensions.AI.Ollama/Microsoft.Extensions.AI.Ollama.json
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs
deleted file mode 100644
index 42f75af495e..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs
+++ /dev/null
@@ -1,505 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System;
-using System.Collections.Generic;
-using System.Globalization;
-using System.IO;
-using System.Linq;
-using System.Net.Http;
-using System.Net.Http.Json;
-using System.Runtime.CompilerServices;
-using System.Text.Json;
-using System.Threading;
-using System.Threading.Tasks;
-using Microsoft.Shared.Diagnostics;
-
-#pragma warning disable EA0011 // Consider removing unnecessary conditional access operator (?)
-#pragma warning disable SA1204 // Static elements should appear before instance elements
-#pragma warning disable S3358 // Ternary operators should not be nested
-
-namespace Microsoft.Extensions.AI;
-
-/// Represents an for Ollama.
-public sealed class OllamaChatClient : IChatClient
-{
- private static readonly JsonElement _schemalessJsonResponseFormatValue = JsonDocument.Parse("\"json\"").RootElement;
-
- private static readonly AIJsonSchemaTransformCache _schemaTransformCache = new(new()
- {
- ConvertBooleanSchemas = true,
- });
-
- /// Metadata about the client.
- private readonly ChatClientMetadata _metadata;
-
- /// The api/chat endpoint URI.
- private readonly Uri _apiChatEndpoint;
-
- /// The to use for sending requests.
- private readonly HttpClient _httpClient;
-
- /// The use for any serialization activities related to tool call arguments and results.
- private JsonSerializerOptions _toolCallJsonSerializerOptions = AIJsonUtilities.DefaultOptions;
-
- /// Initializes a new instance of the class.
- /// The endpoint URI where Ollama is hosted.
- ///
- /// The ID of the model to use. This ID can also be overridden per request via .
- /// Either this parameter or must provide a valid model ID.
- ///
- /// An instance to use for HTTP operations.
- public OllamaChatClient(string endpoint, string? modelId = null, HttpClient? httpClient = null)
- : this(new Uri(Throw.IfNull(endpoint)), modelId, httpClient)
- {
- }
-
- /// Initializes a new instance of the class.
- /// The endpoint URI where Ollama is hosted.
- ///
- /// The ID of the model to use. This ID can also be overridden per request via .
- /// Either this parameter or must provide a valid model ID.
- ///
- /// An instance to use for HTTP operations.
- /// is .
- /// is empty or composed entirely of whitespace.
- public OllamaChatClient(Uri endpoint, string? modelId = null, HttpClient? httpClient = null)
- {
- _ = Throw.IfNull(endpoint);
- if (modelId is not null)
- {
- _ = Throw.IfNullOrWhitespace(modelId);
- }
-
- _apiChatEndpoint = new Uri(endpoint, "api/chat");
- _httpClient = httpClient ?? OllamaUtilities.SharedClient;
-
- _metadata = new ChatClientMetadata("ollama", endpoint, modelId);
- }
-
- /// Gets or sets to use for any serialization activities related to tool call arguments and results.
- public JsonSerializerOptions ToolCallJsonSerializerOptions
- {
- get => _toolCallJsonSerializerOptions;
- set => _toolCallJsonSerializerOptions = Throw.IfNull(value);
- }
-
- ///
- public async Task GetResponseAsync(
- IEnumerable messages, ChatOptions? options = null, CancellationToken cancellationToken = default)
- {
- _ = Throw.IfNull(messages);
-
- using var httpResponse = await _httpClient.PostAsJsonAsync(
- _apiChatEndpoint,
- ToOllamaChatRequest(messages, options, stream: false),
- JsonContext.Default.OllamaChatRequest,
- cancellationToken).ConfigureAwait(false);
-
- if (!httpResponse.IsSuccessStatusCode)
- {
- await OllamaUtilities.ThrowUnsuccessfulOllamaResponseAsync(httpResponse, cancellationToken).ConfigureAwait(false);
- }
-
- var response = (await httpResponse.Content.ReadFromJsonAsync(
- JsonContext.Default.OllamaChatResponse,
- cancellationToken).ConfigureAwait(false))!;
-
- if (!string.IsNullOrEmpty(response.Error))
- {
- throw new InvalidOperationException($"Ollama error: {response.Error}");
- }
-
- var responseId = Guid.NewGuid().ToString("N");
-
- return new(FromOllamaMessage(response.Message!, responseId))
- {
- CreatedAt = DateTimeOffset.TryParse(response.CreatedAt, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTimeOffset createdAt) ? createdAt : null,
- FinishReason = ToFinishReason(response),
- ModelId = response.Model ?? options?.ModelId ?? _metadata.DefaultModelId,
- ResponseId = responseId,
- Usage = ParseOllamaChatResponseUsage(response),
- };
- }
-
- ///
- public async IAsyncEnumerable GetStreamingResponseAsync(
- IEnumerable messages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
- {
- _ = Throw.IfNull(messages);
-
- using HttpRequestMessage request = new(HttpMethod.Post, _apiChatEndpoint)
- {
- Content = JsonContent.Create(ToOllamaChatRequest(messages, options, stream: true), JsonContext.Default.OllamaChatRequest)
- };
- using var httpResponse = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
-
- if (!httpResponse.IsSuccessStatusCode)
- {
- await OllamaUtilities.ThrowUnsuccessfulOllamaResponseAsync(httpResponse, cancellationToken).ConfigureAwait(false);
- }
-
- // Ollama doesn't set a response ID on streamed chunks, so we need to generate one.
- var responseId = Guid.NewGuid().ToString("N");
-
- using var httpResponseStream = await httpResponse.Content
-#if NET
- .ReadAsStreamAsync(cancellationToken)
-#else
- .ReadAsStreamAsync()
-#endif
- .ConfigureAwait(false);
-
- using var streamReader = new StreamReader(httpResponseStream);
-#if NET
- while ((await streamReader.ReadLineAsync(cancellationToken).ConfigureAwait(false)) is { } line)
-#else
- while ((await streamReader.ReadLineAsync().ConfigureAwait(false)) is { } line)
-#endif
- {
- var chunk = JsonSerializer.Deserialize(line, JsonContext.Default.OllamaChatResponse);
- if (chunk is null)
- {
- continue;
- }
-
- string? modelId = chunk.Model ?? _metadata.DefaultModelId;
-
- ChatResponseUpdate update = new()
- {
- CreatedAt = DateTimeOffset.TryParse(chunk.CreatedAt, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTimeOffset createdAt) ? createdAt : null,
- FinishReason = ToFinishReason(chunk),
- ModelId = modelId,
- ResponseId = responseId,
- MessageId = responseId, // There is no per-message ID, but there's only one message per response, so use the response ID
- Role = chunk.Message?.Role is not null ? new ChatRole(chunk.Message.Role) : null,
- };
-
- if (chunk.Message is { } message)
- {
- if (message.ToolCalls is { Length: > 0 })
- {
- foreach (var toolCall in message.ToolCalls)
- {
- if (toolCall.Function is { } function)
- {
- update.Contents.Add(ToFunctionCallContent(function));
- }
- }
- }
-
- // Equivalent rule to the nonstreaming case
- if (message.Content?.Length > 0 || update.Contents.Count == 0)
- {
- update.Contents.Insert(0, new TextContent(message.Content));
- }
- }
-
- if (ParseOllamaChatResponseUsage(chunk) is { } usage)
- {
- update.Contents.Add(new UsageContent(usage));
- }
-
- yield return update;
- }
- }
-
- ///
- object? IChatClient.GetService(Type serviceType, object? serviceKey)
- {
- _ = Throw.IfNull(serviceType);
-
- return
- serviceKey is not null ? null :
- serviceType == typeof(ChatClientMetadata) ? _metadata :
- serviceType.IsInstanceOfType(this) ? this :
- null;
- }
-
- ///
- public void Dispose()
- {
- if (_httpClient != OllamaUtilities.SharedClient)
- {
- _httpClient.Dispose();
- }
- }
-
- private static UsageDetails? ParseOllamaChatResponseUsage(OllamaChatResponse response)
- {
- AdditionalPropertiesDictionary? additionalCounts = null;
- OllamaUtilities.TransferNanosecondsTime(response, static r => r.LoadDuration, "load_duration", ref additionalCounts);
- OllamaUtilities.TransferNanosecondsTime(response, static r => r.TotalDuration, "total_duration", ref additionalCounts);
- OllamaUtilities.TransferNanosecondsTime(response, static r => r.PromptEvalDuration, "prompt_eval_duration", ref additionalCounts);
- OllamaUtilities.TransferNanosecondsTime(response, static r => r.EvalDuration, "eval_duration", ref additionalCounts);
-
- if (additionalCounts is not null || response.PromptEvalCount is not null || response.EvalCount is not null)
- {
- return new()
- {
- InputTokenCount = response.PromptEvalCount,
- OutputTokenCount = response.EvalCount,
- TotalTokenCount = response.PromptEvalCount.GetValueOrDefault() + response.EvalCount.GetValueOrDefault(),
- AdditionalCounts = additionalCounts,
- };
- }
-
- return null;
- }
-
- private static ChatFinishReason? ToFinishReason(OllamaChatResponse response) =>
- response.DoneReason switch
- {
- null => null,
- "length" => ChatFinishReason.Length,
- "stop" => ChatFinishReason.Stop,
- _ => new ChatFinishReason(response.DoneReason),
- };
-
- private static ChatMessage FromOllamaMessage(OllamaChatResponseMessage message, string responseId)
- {
- List contents = [];
-
- // Add any tool calls.
- if (message.ToolCalls is { Length: > 0 })
- {
- foreach (var toolCall in message.ToolCalls)
- {
- if (toolCall.Function is { } function)
- {
- contents.Add(ToFunctionCallContent(function));
- }
- }
- }
-
- // Ollama frequently sends back empty content with tool calls. Rather than always adding an empty
- // content, we only add the content if either it's not empty or there weren't any tool calls.
- if (message.Content?.Length > 0 || contents.Count == 0)
- {
- contents.Insert(0, new TextContent(message.Content));
- }
-
- // Ollama doesn't have per-message IDs, so use the response ID in the same way we do when streaming
- return new ChatMessage(new(message.Role), contents) { MessageId = responseId };
- }
-
- private static FunctionCallContent ToFunctionCallContent(OllamaFunctionToolCall function)
- {
-#if NET
- var id = System.Security.Cryptography.RandomNumberGenerator.GetHexString(8);
-#else
- var id = Guid.NewGuid().ToString().Substring(0, 8);
-#endif
- return new FunctionCallContent(id, function.Name, function.Arguments);
- }
-
- private static JsonElement? ToOllamaChatResponseFormat(ChatResponseFormat? format)
- {
- if (format is ChatResponseFormatJson jsonFormat)
- {
- return _schemaTransformCache.GetOrCreateTransformedSchema(jsonFormat) ?? _schemalessJsonResponseFormatValue;
- }
- else
- {
- return null;
- }
- }
-
- private OllamaChatRequest ToOllamaChatRequest(IEnumerable messages, ChatOptions? options, bool stream)
- {
- var requestMessages = messages.SelectMany(ToOllamaChatRequestMessages).ToList();
- if (options?.Instructions is string instructions)
- {
- requestMessages.Insert(0, new OllamaChatRequestMessage
- {
- Role = ChatRole.System.Value,
- Content = instructions,
- });
- }
-
- OllamaChatRequest request = new()
- {
- Format = ToOllamaChatResponseFormat(options?.ResponseFormat),
- Messages = requestMessages,
- Model = options?.ModelId ?? _metadata.DefaultModelId ?? string.Empty,
- Stream = stream,
- Tools = options?.ToolMode is not NoneChatToolMode && options?.Tools is { Count: > 0 } tools ? tools.OfType().Select(ToOllamaTool) : null,
- };
-
- if (options is not null)
- {
- TransferMetadataValue(nameof(OllamaRequestOptions.embedding_only), (options, value) => options.embedding_only = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.f16_kv), (options, value) => options.f16_kv = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.logits_all), (options, value) => options.logits_all = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.low_vram), (options, value) => options.low_vram = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.main_gpu), (options, value) => options.main_gpu = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.min_p), (options, value) => options.min_p = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.mirostat), (options, value) => options.mirostat = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.mirostat_eta), (options, value) => options.mirostat_eta = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.mirostat_tau), (options, value) => options.mirostat_tau = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.num_batch), (options, value) => options.num_batch = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.num_ctx), (options, value) => options.num_ctx = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.num_gpu), (options, value) => options.num_gpu = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.num_keep), (options, value) => options.num_keep = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.num_thread), (options, value) => options.num_thread = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.numa), (options, value) => options.numa = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.penalize_newline), (options, value) => options.penalize_newline = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.repeat_last_n), (options, value) => options.repeat_last_n = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.repeat_penalty), (options, value) => options.repeat_penalty = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.tfs_z), (options, value) => options.tfs_z = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.typical_p), (options, value) => options.typical_p = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.use_mmap), (options, value) => options.use_mmap = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.use_mlock), (options, value) => options.use_mlock = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.vocab_only), (options, value) => options.vocab_only = value);
-
- if (options.FrequencyPenalty is float frequencyPenalty)
- {
- (request.Options ??= new()).frequency_penalty = frequencyPenalty;
- }
-
- if (options.MaxOutputTokens is int maxOutputTokens)
- {
- (request.Options ??= new()).num_predict = maxOutputTokens;
- }
-
- if (options.PresencePenalty is float presencePenalty)
- {
- (request.Options ??= new()).presence_penalty = presencePenalty;
- }
-
- if (options.StopSequences is { Count: > 0 })
- {
- (request.Options ??= new()).stop = [.. options.StopSequences];
- }
-
- if (options.Temperature is float temperature)
- {
- (request.Options ??= new()).temperature = temperature;
- }
-
- if (options.TopP is float topP)
- {
- (request.Options ??= new()).top_p = topP;
- }
-
- if (options.TopK is int topK)
- {
- (request.Options ??= new()).top_k = topK;
- }
-
- if (options.Seed is long seed)
- {
- (request.Options ??= new()).seed = seed;
- }
- }
-
- return request;
-
- void TransferMetadataValue(string propertyName, Action setOption)
- {
- if (options.AdditionalProperties?.TryGetValue(propertyName, out T? t) is true)
- {
- request.Options ??= new();
- setOption(request.Options, t);
- }
- }
- }
-
- private IEnumerable ToOllamaChatRequestMessages(ChatMessage content)
- {
- // In general, we return a single request message for each understood content item.
- // However, various image models expect both text and images in the same request message.
- // To handle that, attach images to a previous text message if one exists.
-
- OllamaChatRequestMessage? currentTextMessage = null;
- foreach (var item in content.Contents)
- {
- if (item is DataContent dataContent && dataContent.HasTopLevelMediaType("image"))
- {
- IList images = currentTextMessage?.Images ?? [];
- images.Add(dataContent.Base64Data.ToString());
-
- if (currentTextMessage is not null)
- {
- currentTextMessage.Images = images;
- }
- else
- {
- yield return new OllamaChatRequestMessage
- {
- Role = content.Role.Value,
- Images = images,
- };
- }
- }
- else
- {
- if (currentTextMessage is not null)
- {
- yield return currentTextMessage;
- currentTextMessage = null;
- }
-
- switch (item)
- {
- case TextContent textContent:
- currentTextMessage = new OllamaChatRequestMessage
- {
- Role = content.Role.Value,
- Content = textContent.Text,
- };
- break;
-
- case FunctionCallContent fcc:
- {
- yield return new OllamaChatRequestMessage
- {
- Role = "assistant",
- Content = JsonSerializer.Serialize(new OllamaFunctionCallContent
- {
- CallId = fcc.CallId,
- Name = fcc.Name,
- Arguments = JsonSerializer.SerializeToElement(fcc.Arguments, ToolCallJsonSerializerOptions.GetTypeInfo(typeof(IDictionary))),
- }, JsonContext.Default.OllamaFunctionCallContent)
- };
- break;
- }
-
- case FunctionResultContent frc:
- {
- JsonElement jsonResult = JsonSerializer.SerializeToElement(frc.Result, ToolCallJsonSerializerOptions.GetTypeInfo(typeof(object)));
- yield return new OllamaChatRequestMessage
- {
- Role = "tool",
- Content = JsonSerializer.Serialize(new OllamaFunctionResultContent
- {
- CallId = frc.CallId,
- Result = jsonResult,
- }, JsonContext.Default.OllamaFunctionResultContent)
- };
- break;
- }
- }
- }
- }
-
- if (currentTextMessage is not null)
- {
- yield return currentTextMessage;
- }
- }
-
- private static OllamaTool ToOllamaTool(AIFunction function)
- {
- return new()
- {
- Type = "function",
- Function = new OllamaFunctionTool
- {
- Name = function.Name,
- Description = function.Description,
- Parameters = JsonSerializer.Deserialize(_schemaTransformCache.GetOrCreateTransformedSchema(function), JsonContext.Default.OllamaFunctionToolParameters)!,
- }
- };
- }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatRequest.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatRequest.cs
deleted file mode 100644
index 7cdadb91666..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatRequest.cs
+++ /dev/null
@@ -1,17 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System.Collections.Generic;
-using System.Text.Json;
-
-namespace Microsoft.Extensions.AI;
-
-internal sealed class OllamaChatRequest
-{
- public required string Model { get; set; }
- public required IList Messages { get; set; }
- public JsonElement? Format { get; set; }
- public bool Stream { get; set; }
- public IEnumerable? Tools { get; set; }
- public OllamaRequestOptions? Options { get; set; }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatRequestMessage.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatRequestMessage.cs
deleted file mode 100644
index 5a377b1eb34..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatRequestMessage.cs
+++ /dev/null
@@ -1,13 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System.Collections.Generic;
-
-namespace Microsoft.Extensions.AI;
-
-internal sealed class OllamaChatRequestMessage
-{
- public required string Role { get; set; }
- public string? Content { get; set; }
- public IList? Images { get; set; }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatResponse.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatResponse.cs
deleted file mode 100644
index 8c39f9ab598..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatResponse.cs
+++ /dev/null
@@ -1,20 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-namespace Microsoft.Extensions.AI;
-
-internal sealed class OllamaChatResponse
-{
- public string? Model { get; set; }
- public string? CreatedAt { get; set; }
- public long? TotalDuration { get; set; }
- public long? LoadDuration { get; set; }
- public string? DoneReason { get; set; }
- public int? PromptEvalCount { get; set; }
- public long? PromptEvalDuration { get; set; }
- public int? EvalCount { get; set; }
- public long? EvalDuration { get; set; }
- public OllamaChatResponseMessage? Message { get; set; }
- public bool Done { get; set; }
- public string? Error { get; set; }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatResponseMessage.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatResponseMessage.cs
deleted file mode 100644
index bf73c08d793..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatResponseMessage.cs
+++ /dev/null
@@ -1,11 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-namespace Microsoft.Extensions.AI;
-
-internal sealed class OllamaChatResponseMessage
-{
- public required string Role { get; set; }
- public required string Content { get; set; }
- public OllamaToolCall[]? ToolCalls { get; set; }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaEmbeddingGenerator.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaEmbeddingGenerator.cs
deleted file mode 100644
index 0b0d4d3b344..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaEmbeddingGenerator.cs
+++ /dev/null
@@ -1,165 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Net.Http;
-using System.Net.Http.Json;
-using System.Threading;
-using System.Threading.Tasks;
-using Microsoft.Shared.Diagnostics;
-
-#pragma warning disable S3358 // Ternary operators should not be nested
-
-namespace Microsoft.Extensions.AI;
-
-/// Represents an for Ollama.
-public sealed class OllamaEmbeddingGenerator : IEmbeddingGenerator>
-{
- /// Metadata about the embedding generator.
- private readonly EmbeddingGeneratorMetadata _metadata;
-
- /// The api/embeddings endpoint URI.
- private readonly Uri _apiEmbeddingsEndpoint;
-
- /// The to use for sending requests.
- private readonly HttpClient _httpClient;
-
- /// Initializes a new instance of the class.
- /// The endpoint URI where Ollama is hosted.
- ///
- /// The ID of the model to use. This ID can also be overridden per request via .
- /// Either this parameter or must provide a valid model ID.
- ///
- /// An instance to use for HTTP operations.
- public OllamaEmbeddingGenerator(string endpoint, string? modelId = null, HttpClient? httpClient = null)
- : this(new Uri(Throw.IfNull(endpoint)), modelId, httpClient)
- {
- }
-
- /// Initializes a new instance of the class.
- /// The endpoint URI where Ollama is hosted.
- ///
- /// The ID of the model to use. This ID can also be overridden per request via .
- /// Either this parameter or must provide a valid model ID.
- ///
- /// An instance to use for HTTP operations.
- /// is .
- /// is empty or composed entirely of whitespace.
- public OllamaEmbeddingGenerator(Uri endpoint, string? modelId = null, HttpClient? httpClient = null)
- {
- _ = Throw.IfNull(endpoint);
- if (modelId is not null)
- {
- _ = Throw.IfNullOrWhitespace(modelId);
- }
-
- _apiEmbeddingsEndpoint = new Uri(endpoint, "api/embed");
- _httpClient = httpClient ?? OllamaUtilities.SharedClient;
- _metadata = new("ollama", endpoint, modelId);
- }
-
- ///
- object? IEmbeddingGenerator.GetService(Type serviceType, object? serviceKey)
- {
- _ = Throw.IfNull(serviceType);
-
- return
- serviceKey is not null ? null :
- serviceType == typeof(EmbeddingGeneratorMetadata) ? _metadata :
- serviceType.IsInstanceOfType(this) ? this :
- null;
- }
-
- ///
- public void Dispose()
- {
- if (_httpClient != OllamaUtilities.SharedClient)
- {
- _httpClient.Dispose();
- }
- }
-
- ///
- public async Task>> GenerateAsync(
- IEnumerable values, EmbeddingGenerationOptions? options = null, CancellationToken cancellationToken = default)
- {
- _ = Throw.IfNull(values);
-
- // Create request.
- string[] inputs = values.ToArray();
- string? requestModel = options?.ModelId ?? _metadata.DefaultModelId;
- var request = new OllamaEmbeddingRequest
- {
- Model = requestModel ?? string.Empty,
- Input = inputs,
- };
-
- if (options?.AdditionalProperties is { } requestProps)
- {
- if (requestProps.TryGetValue("keep_alive", out long keepAlive))
- {
- request.KeepAlive = keepAlive;
- }
-
- if (requestProps.TryGetValue("truncate", out bool truncate))
- {
- request.Truncate = truncate;
- }
- }
-
- // Send request and get response.
- var httpResponse = await _httpClient.PostAsJsonAsync(
- _apiEmbeddingsEndpoint,
- request,
- JsonContext.Default.OllamaEmbeddingRequest,
- cancellationToken).ConfigureAwait(false);
-
- if (!httpResponse.IsSuccessStatusCode)
- {
- await OllamaUtilities.ThrowUnsuccessfulOllamaResponseAsync(httpResponse, cancellationToken).ConfigureAwait(false);
- }
-
- var response = (await httpResponse.Content.ReadFromJsonAsync(
- JsonContext.Default.OllamaEmbeddingResponse,
- cancellationToken).ConfigureAwait(false))!;
-
- // Validate response.
- if (!string.IsNullOrEmpty(response.Error))
- {
- throw new InvalidOperationException($"Ollama error: {response.Error}");
- }
-
- if (response.Embeddings is null || response.Embeddings.Length != inputs.Length)
- {
- throw new InvalidOperationException($"Ollama generated {response.Embeddings?.Length ?? 0} embeddings but {inputs.Length} were expected.");
- }
-
- // Convert response into result objects.
- AdditionalPropertiesDictionary? additionalCounts = null;
- OllamaUtilities.TransferNanosecondsTime(response, r => r.TotalDuration, "total_duration", ref additionalCounts);
- OllamaUtilities.TransferNanosecondsTime(response, r => r.LoadDuration, "load_duration", ref additionalCounts);
-
- UsageDetails? usage = null;
- if (additionalCounts is not null || response.PromptEvalCount is not null)
- {
- usage = new()
- {
- InputTokenCount = response.PromptEvalCount,
- TotalTokenCount = response.PromptEvalCount,
- AdditionalCounts = additionalCounts,
- };
- }
-
- return new(response.Embeddings.Select(e =>
- new Embedding(e)
- {
- CreatedAt = DateTimeOffset.UtcNow,
- ModelId = response.Model ?? requestModel,
- }))
- {
- Usage = usage,
- };
- }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaEmbeddingRequest.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaEmbeddingRequest.cs
deleted file mode 100644
index 07e3530b8ed..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaEmbeddingRequest.cs
+++ /dev/null
@@ -1,13 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-namespace Microsoft.Extensions.AI;
-
-internal sealed class OllamaEmbeddingRequest
-{
- public required string Model { get; set; }
- public required string[] Input { get; set; }
- public OllamaRequestOptions? Options { get; set; }
- public bool? Truncate { get; set; }
- public long? KeepAlive { get; set; }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaEmbeddingResponse.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaEmbeddingResponse.cs
deleted file mode 100644
index c4fd2cde87c..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaEmbeddingResponse.cs
+++ /dev/null
@@ -1,21 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System.Text.Json.Serialization;
-
-namespace Microsoft.Extensions.AI;
-
-internal sealed class OllamaEmbeddingResponse
-{
- [JsonPropertyName("model")]
- public string? Model { get; set; }
- [JsonPropertyName("embeddings")]
- public float[][]? Embeddings { get; set; }
- [JsonPropertyName("total_duration")]
- public long? TotalDuration { get; set; }
- [JsonPropertyName("load_duration")]
- public long? LoadDuration { get; set; }
- [JsonPropertyName("prompt_eval_count")]
- public int? PromptEvalCount { get; set; }
- public string? Error { get; set; }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaFunctionCallContent.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaFunctionCallContent.cs
deleted file mode 100644
index f518413586a..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaFunctionCallContent.cs
+++ /dev/null
@@ -1,13 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System.Text.Json;
-
-namespace Microsoft.Extensions.AI;
-
-internal sealed class OllamaFunctionCallContent
-{
- public string? CallId { get; set; }
- public string? Name { get; set; }
- public JsonElement Arguments { get; set; }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaFunctionResultContent.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaFunctionResultContent.cs
deleted file mode 100644
index ba3eab607b8..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaFunctionResultContent.cs
+++ /dev/null
@@ -1,12 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System.Text.Json;
-
-namespace Microsoft.Extensions.AI;
-
-internal sealed class OllamaFunctionResultContent
-{
- public string? CallId { get; set; }
- public JsonElement Result { get; set; }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaFunctionTool.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaFunctionTool.cs
deleted file mode 100644
index 880e37bec2a..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaFunctionTool.cs
+++ /dev/null
@@ -1,11 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-namespace Microsoft.Extensions.AI;
-
-internal sealed class OllamaFunctionTool
-{
- public required string Name { get; set; }
- public required string Description { get; set; }
- public required OllamaFunctionToolParameters Parameters { get; set; }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaFunctionToolCall.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaFunctionToolCall.cs
deleted file mode 100644
index c94d41bd3f3..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaFunctionToolCall.cs
+++ /dev/null
@@ -1,12 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System.Collections.Generic;
-
-namespace Microsoft.Extensions.AI;
-
-internal sealed class OllamaFunctionToolCall
-{
- public required string Name { get; set; }
- public IDictionary? Arguments { get; set; }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaFunctionToolParameter.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaFunctionToolParameter.cs
deleted file mode 100644
index 77ba2a5561c..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaFunctionToolParameter.cs
+++ /dev/null
@@ -1,13 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System.Collections.Generic;
-
-namespace Microsoft.Extensions.AI;
-
-internal sealed class OllamaFunctionToolParameter
-{
- public string? Type { get; set; }
- public string? Description { get; set; }
- public IEnumerable? Enum { get; set; }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaFunctionToolParameters.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaFunctionToolParameters.cs
deleted file mode 100644
index 9fa7d0d2adc..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaFunctionToolParameters.cs
+++ /dev/null
@@ -1,14 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System.Collections.Generic;
-using System.Text.Json;
-
-namespace Microsoft.Extensions.AI;
-
-internal sealed class OllamaFunctionToolParameters
-{
- public string Type { get; set; } = "object";
- public required IDictionary Properties { get; set; }
- public IList? Required { get; set; }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaRequestOptions.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaRequestOptions.cs
deleted file mode 100644
index cc8b548c1a1..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaRequestOptions.cs
+++ /dev/null
@@ -1,41 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-namespace Microsoft.Extensions.AI;
-
-#pragma warning disable IDE1006 // Naming Styles
-
-internal sealed class OllamaRequestOptions
-{
- public bool? embedding_only { get; set; }
- public bool? f16_kv { get; set; }
- public float? frequency_penalty { get; set; }
- public bool? logits_all { get; set; }
- public bool? low_vram { get; set; }
- public int? main_gpu { get; set; }
- public float? min_p { get; set; }
- public int? mirostat { get; set; }
- public float? mirostat_eta { get; set; }
- public float? mirostat_tau { get; set; }
- public int? num_batch { get; set; }
- public int? num_ctx { get; set; }
- public int? num_gpu { get; set; }
- public int? num_keep { get; set; }
- public int? num_predict { get; set; }
- public int? num_thread { get; set; }
- public bool? numa { get; set; }
- public bool? penalize_newline { get; set; }
- public float? presence_penalty { get; set; }
- public int? repeat_last_n { get; set; }
- public float? repeat_penalty { get; set; }
- public long? seed { get; set; }
- public string[]? stop { get; set; }
- public float? temperature { get; set; }
- public float? tfs_z { get; set; }
- public int? top_k { get; set; }
- public float? top_p { get; set; }
- public float? typical_p { get; set; }
- public bool? use_mlock { get; set; }
- public bool? use_mmap { get; set; }
- public bool? vocab_only { get; set; }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaTool.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaTool.cs
deleted file mode 100644
index 457793dc476..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaTool.cs
+++ /dev/null
@@ -1,10 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-namespace Microsoft.Extensions.AI;
-
-internal sealed class OllamaTool
-{
- public required string Type { get; set; }
- public required OllamaFunctionTool Function { get; set; }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaToolCall.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaToolCall.cs
deleted file mode 100644
index a00d0e0e290..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaToolCall.cs
+++ /dev/null
@@ -1,9 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-namespace Microsoft.Extensions.AI;
-
-internal sealed class OllamaToolCall
-{
- public OllamaFunctionToolCall? Function { get; set; }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaUtilities.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaUtilities.cs
deleted file mode 100644
index ea2625bd50e..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaUtilities.cs
+++ /dev/null
@@ -1,72 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System;
-using System.Diagnostics;
-using System.Diagnostics.CodeAnalysis;
-using System.Net.Http;
-using System.Text.Json;
-using System.Threading;
-using System.Threading.Tasks;
-
-namespace Microsoft.Extensions.AI;
-
-internal static class OllamaUtilities
-{
- /// Gets a singleton used when no other instance is supplied.
- public static HttpClient SharedClient { get; } = new()
- {
- // Expected use is localhost access for non-production use. Typical production use should supply
- // an HttpClient configured with whatever more robust resilience policy / handlers are appropriate.
- Timeout = Timeout.InfiniteTimeSpan,
- };
-
- public static void TransferNanosecondsTime(TResponse response, Func getNanoseconds, string key, ref AdditionalPropertiesDictionary? metadata)
- {
- if (getNanoseconds(response) is long duration)
- {
- try
- {
- (metadata ??= [])[key] = duration;
- }
- catch (OverflowException)
- {
- // Ignore options that don't convert
- }
- }
- }
-
- [DoesNotReturn]
- public static async ValueTask ThrowUnsuccessfulOllamaResponseAsync(HttpResponseMessage response, CancellationToken cancellationToken)
- {
- Debug.Assert(!response.IsSuccessStatusCode, "must only be invoked for unsuccessful responses.");
-
- // Read the entire response content into a string.
- string errorContent =
-#if NET
- await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
-#else
- await response.Content.ReadAsStringAsync().ConfigureAwait(false);
-#endif
-
- // The response content *could* be JSON formatted, try to extract the error field.
-
-#pragma warning disable CA1031 // Do not catch general exception types
- try
- {
- using JsonDocument document = JsonDocument.Parse(errorContent);
- if (document.RootElement.TryGetProperty("error", out JsonElement errorElement) &&
- errorElement.ValueKind is JsonValueKind.String)
- {
- errorContent = errorElement.GetString()!;
- }
- }
- catch
- {
- // Ignore JSON parsing errors.
- }
-#pragma warning restore CA1031 // Do not catch general exception types
-
- throw new InvalidOperationException($"Ollama error: {errorContent}");
- }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/README.md b/src/Libraries/Microsoft.Extensions.AI.Ollama/README.md
deleted file mode 100644
index 3c85cd17087..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/README.md
+++ /dev/null
@@ -1,11 +0,0 @@
-# Microsoft.Extensions.AI.Ollama
-
-This package is deprecated and the [OllamaSharp](https://www.nuget.org/packages/ollamasharp) package is recommended. `OllamaSharp` provides .NET bindings for the [Ollama API](https://github.com/jmorganca/ollama/blob/main/docs/api.md), simplifying interactions with Ollama both locally and remotely.
-
-No further updates, features, or fixes are planned for the `Microsoft.Extensions.AI.Ollama` package.
-
-## Related packages
-
-* [OllamaSharp](https://www.nuget.org/packages/OllamaSharp)
-* [Microsoft.Extensions.AI](https://www.nuget.org/packages/Microsoft.Extensions.AI)
-* [Microsoft.Extensions.AI.Abstractions](https://www.nuget.org/packages/Microsoft.Extensions.AI.Abstractions)
diff --git a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/Microsoft.Extensions.AI.Ollama.Tests.csproj b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/Microsoft.Extensions.AI.Ollama.Tests.csproj
deleted file mode 100644
index 5db789e3b6b..00000000000
--- a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/Microsoft.Extensions.AI.Ollama.Tests.csproj
+++ /dev/null
@@ -1,22 +0,0 @@
-
-
- Microsoft.Extensions.AI
- Unit tests for Microsoft.Extensions.AI.Ollama
-
-
-
- true
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientIntegrationTests.cs b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientIntegrationTests.cs
deleted file mode 100644
index 83e84e49f5b..00000000000
--- a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientIntegrationTests.cs
+++ /dev/null
@@ -1,115 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System;
-using System.Collections.Generic;
-using System.ComponentModel;
-using System.Threading;
-using System.Threading.Tasks;
-using Microsoft.TestUtilities;
-using Xunit;
-
-namespace Microsoft.Extensions.AI;
-
-public class OllamaChatClientIntegrationTests : ChatClientIntegrationTests
-{
- protected override IChatClient? CreateChatClient() =>
- IntegrationTestHelpers.GetOllamaUri() is Uri endpoint ?
- new OllamaChatClient(endpoint, "llama3.1") :
- null;
-
- public override Task FunctionInvocation_RequireAny() =>
- throw new SkipTestException("Ollama does not currently support requiring function invocation.");
-
- public override Task FunctionInvocation_RequireSpecific() =>
- throw new SkipTestException("Ollama does not currently support requiring function invocation.");
-
- protected override string? GetModel_MultiModal_DescribeImage() => "llava";
-
- [ConditionalFact]
- public async Task PromptBasedFunctionCalling_NoArgs()
- {
- SkipIfNotEnabled();
-
- using var chatClient = CreateChatClient()!
- .AsBuilder()
- .UseFunctionInvocation()
- .UsePromptBasedFunctionCalling()
- .Use(innerClient => new AssertNoToolsDefinedChatClient(innerClient))
- .Build();
-
- var secretNumber = 42;
- var response = await chatClient.GetResponseAsync("What is the current secret number? Answer with digits only.", new ChatOptions
- {
- ModelId = "llama3:8b",
- Tools = [AIFunctionFactory.Create(() => secretNumber, "GetSecretNumber")],
- Temperature = 0,
- Seed = 0,
- });
-
- Assert.Contains(secretNumber.ToString(), response.Text);
- }
-
- [ConditionalFact]
- public async Task PromptBasedFunctionCalling_WithArgs()
- {
- SkipIfNotEnabled();
-
- using var chatClient = CreateChatClient()!
- .AsBuilder()
- .UseFunctionInvocation()
- .UsePromptBasedFunctionCalling()
- .Use(innerClient => new AssertNoToolsDefinedChatClient(innerClient))
- .Build();
-
- var stockPriceTool = AIFunctionFactory.Create([Description("Returns the stock price for a given ticker symbol")] (
- [Description("The ticker symbol")] string symbol,
- [Description("The currency code such as USD or JPY")] string currency) =>
- {
- Assert.Equal("MSFT", symbol);
- Assert.Equal("GBP", currency);
- return 999;
- }, "GetStockPrice");
-
- var didCallIrrelevantTool = false;
- var irrelevantTool = AIFunctionFactory.Create(() => { didCallIrrelevantTool = true; return 123; }, "GetSecretNumber");
-
- var response = await chatClient.GetResponseAsync("What's the stock price for Microsoft in British pounds?", new ChatOptions
- {
- Tools = [stockPriceTool, irrelevantTool],
- Temperature = 0,
- Seed = 0,
- });
-
- Assert.Contains("999", response.Text);
- Assert.False(didCallIrrelevantTool);
- }
-
- [ConditionalFact]
- public async Task InvalidModelParameter_ThrowsInvalidOperationException()
- {
- SkipIfNotEnabled();
-
- var endpoint = IntegrationTestHelpers.GetOllamaUri();
- Assert.NotNull(endpoint);
-
- using var chatClient = new OllamaChatClient(endpoint, modelId: "inexistent-model");
-
- InvalidOperationException ex;
- ex = await Assert.ThrowsAsync(() => chatClient.GetResponseAsync("Hello, world!"));
- Assert.Contains("inexistent-model", ex.Message);
-
- ex = await Assert.ThrowsAsync(() => chatClient.GetStreamingResponseAsync("Hello, world!").ToChatResponseAsync());
- Assert.Contains("inexistent-model", ex.Message);
- }
-
- private sealed class AssertNoToolsDefinedChatClient(IChatClient innerClient) : DelegatingChatClient(innerClient)
- {
- public override Task GetResponseAsync(
- IEnumerable messages, ChatOptions? options = null, CancellationToken cancellationToken = default)
- {
- Assert.Null(options?.Tools);
- return base.GetResponseAsync(messages, options, cancellationToken);
- }
- }
-}
diff --git a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs
deleted file mode 100644
index 2f716d2fe7d..00000000000
--- a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs
+++ /dev/null
@@ -1,487 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System;
-using System.Collections.Generic;
-using System.ComponentModel;
-using System.Linq;
-using System.Net.Http;
-using System.Text.Json;
-using System.Text.RegularExpressions;
-using System.Threading;
-using System.Threading.Tasks;
-using Microsoft.Extensions.Caching.Distributed;
-using Microsoft.Extensions.Caching.Memory;
-using Xunit;
-
-#pragma warning disable S103 // Lines should not be too long
-
-namespace Microsoft.Extensions.AI;
-
-public class OllamaChatClientTests
-{
- [Fact]
- public void Ctor_InvalidArgs_Throws()
- {
- Assert.Throws("endpoint", () => new OllamaChatClient((Uri)null!));
- Assert.Throws("modelId", () => new OllamaChatClient("http://localhost", " "));
- }
-
- [Fact]
- public void ToolCallJsonSerializerOptions_HasExpectedValue()
- {
- using OllamaChatClient client = new("http://localhost", "model");
-
- Assert.Same(client.ToolCallJsonSerializerOptions, AIJsonUtilities.DefaultOptions);
- Assert.Throws("value", () => client.ToolCallJsonSerializerOptions = null!);
-
- JsonSerializerOptions options = new();
- client.ToolCallJsonSerializerOptions = options;
- Assert.Same(options, client.ToolCallJsonSerializerOptions);
- }
-
- [Fact]
- public void GetService_SuccessfullyReturnsUnderlyingClient()
- {
- using OllamaChatClient client = new("http://localhost");
-
- Assert.Same(client, client.GetService());
- Assert.Same(client, client.GetService());
-
- using IChatClient pipeline = client
- .AsBuilder()
- .UseFunctionInvocation()
- .UseOpenTelemetry()
- .UseDistributedCache(new MemoryDistributedCache(Options.Options.Create(new MemoryDistributedCacheOptions())))
- .Build();
-
- Assert.NotNull(pipeline.GetService());
- Assert.NotNull(pipeline.GetService());
- Assert.NotNull(pipeline.GetService());
- Assert.NotNull(pipeline.GetService());
-
- Assert.Same(client, pipeline.GetService());
- Assert.IsType(pipeline.GetService());
- }
-
- [Fact]
- public void Ctor_ProducesExpectedMetadata()
- {
- Uri endpoint = new("http://localhost/some/endpoint");
- string model = "amazingModel";
-
- using IChatClient chatClient = new OllamaChatClient(endpoint, model);
- var metadata = chatClient.GetService();
- Assert.NotNull(metadata);
- Assert.Equal("ollama", metadata.ProviderName);
- Assert.Equal(endpoint, metadata.ProviderUri);
- Assert.Equal(model, metadata.DefaultModelId);
- }
-
- [Fact]
- public async Task BasicRequestResponse_NonStreaming()
- {
- const string Input = """
- {
- "model":"llama3.1",
- "messages":[{"role":"user","content":"hello"}],
- "stream":false,
- "options":{"num_predict":10,"temperature":0.5}
- }
- """;
-
- const string Output = """
- {
- "model": "llama3.1",
- "created_at": "2024-10-01T15:46:10.5248793Z",
- "message": {
- "role": "assistant",
- "content": "Hello! How are you today? Is there something"
- },
- "done_reason": "length",
- "done": true,
- "total_duration": 22186844400,
- "load_duration": 17947219100,
- "prompt_eval_count": 11,
- "prompt_eval_duration": 1953805000,
- "eval_count": 10,
- "eval_duration": 2277274000
- }
- """;
-
- using VerbatimHttpHandler handler = new(Input, Output);
- using HttpClient httpClient = new(handler);
- using OllamaChatClient client = new("http://localhost:11434", "llama3.1", httpClient);
- var response = await client.GetResponseAsync("hello", new()
- {
- MaxOutputTokens = 10,
- Temperature = 0.5f,
- });
- Assert.NotNull(response);
-
- Assert.Equal("Hello! How are you today? Is there something", response.Text);
- Assert.Single(response.Messages.Single().Contents);
- Assert.Equal(ChatRole.Assistant, response.Messages.Single().Role);
- Assert.Equal("llama3.1", response.ModelId);
- Assert.Equal(DateTimeOffset.Parse("2024-10-01T15:46:10.5248793Z"), response.CreatedAt);
- Assert.Equal(ChatFinishReason.Length, response.FinishReason);
- Assert.NotNull(response.Usage);
- Assert.Equal(11, response.Usage.InputTokenCount);
- Assert.Equal(10, response.Usage.OutputTokenCount);
- Assert.Equal(21, response.Usage.TotalTokenCount);
- }
-
- [Fact]
- public async Task BasicRequestResponse_Streaming()
- {
- const string Input = """
- {
- "model":"llama3.1",
- "messages":[{"role":"user","content":"hello"}],
- "stream":true,
- "options":{"num_predict":20,"temperature":0.5}
- }
- """;
-
- const string Output = """
- {"model":"llama3.1","created_at":"2024-10-01T16:15:20.4965315Z","message":{"role":"assistant","content":"Hello"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:20.763058Z","message":{"role":"assistant","content":"!"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:20.9751134Z","message":{"role":"assistant","content":" How"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:21.1788125Z","message":{"role":"assistant","content":" are"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:21.3883171Z","message":{"role":"assistant","content":" you"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:21.5912498Z","message":{"role":"assistant","content":" today"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:21.7968039Z","message":{"role":"assistant","content":"?"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:22.0034152Z","message":{"role":"assistant","content":" Is"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:22.1931196Z","message":{"role":"assistant","content":" there"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:22.3827484Z","message":{"role":"assistant","content":" something"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:22.5659027Z","message":{"role":"assistant","content":" I"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:22.7488871Z","message":{"role":"assistant","content":" can"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:22.9339881Z","message":{"role":"assistant","content":" help"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:23.1201564Z","message":{"role":"assistant","content":" you"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:23.303447Z","message":{"role":"assistant","content":" with"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:23.4964909Z","message":{"role":"assistant","content":" or"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:23.6837816Z","message":{"role":"assistant","content":" would"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:23.8723142Z","message":{"role":"assistant","content":" you"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:24.064613Z","message":{"role":"assistant","content":" like"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:24.2504498Z","message":{"role":"assistant","content":" to"},"done":false}
- {"model":"llama3.1","created_at":"2024-10-01T16:15:24.2514508Z","message":{"role":"assistant","content":""},"done_reason":"length", "done":true,"total_duration":11912402900,"load_duration":6824559200,"prompt_eval_count":11,"prompt_eval_duration":1329601000,"eval_count":20,"eval_duration":3754262000}
- """;
-
- using VerbatimHttpHandler handler = new(Input, Output);
- using HttpClient httpClient = new(handler);
- using IChatClient client = new OllamaChatClient("http://localhost:11434", "llama3.1", httpClient);
-
- List updates = [];
- var streamingResponse = client.GetStreamingResponseAsync("hello", new()
- {
- MaxOutputTokens = 20,
- Temperature = 0.5f,
- });
- await foreach (var update in streamingResponse)
- {
- updates.Add(update);
- }
-
- Assert.Equal(21, updates.Count);
-
- DateTimeOffset[] createdAts = Regex.Matches(Output, @"2024.*?Z").Cast().Select(m => DateTimeOffset.Parse(m.Value)).ToArray();
-
- for (int i = 0; i < updates.Count; i++)
- {
- Assert.NotNull(updates[i].ResponseId);
- Assert.NotNull(updates[i].MessageId);
- Assert.Equal(i < updates.Count - 1 ? 1 : 2, updates[i].Contents.Count);
- Assert.Equal(ChatRole.Assistant, updates[i].Role);
- Assert.Equal("llama3.1", updates[i].ModelId);
- Assert.Equal(createdAts[i], updates[i].CreatedAt);
- Assert.Equal(i < updates.Count - 1 ? null : ChatFinishReason.Length, updates[i].FinishReason);
- }
-
- Assert.Equal("Hello! How are you today? Is there something I can help you with or would you like to", string.Concat(updates.Select(u => u.Text)));
- Assert.Equal(2, updates[updates.Count - 1].Contents.Count);
- Assert.IsType(updates[updates.Count - 1].Contents[0]);
- UsageContent usage = Assert.IsType(updates[updates.Count - 1].Contents[1]);
- Assert.Equal(11, usage.Details.InputTokenCount);
- Assert.Equal(20, usage.Details.OutputTokenCount);
- Assert.Equal(31, usage.Details.TotalTokenCount);
-
- var chatResponse = await streamingResponse.ToChatResponseAsync();
- Assert.Single(Assert.Single(chatResponse.Messages).Contents);
- Assert.Equal("Hello! How are you today? Is there something I can help you with or would you like to", chatResponse.Text);
- }
-
- [Fact]
- public async Task MultipleMessages_NonStreaming()
- {
- const string Input = """
- {
- "model": "llama3.1",
- "messages": [
- {
- "role": "user",
- "content": "hello!"
- },
- {
- "role": "assistant",
- "content": "hi, how are you?"
- },
- {
- "role": "user",
- "content": "i\u0027m good. how are you?"
- }
- ],
- "stream": false,
- "options": {
- "frequency_penalty": 0.75,
- "presence_penalty": 0.5,
- "seed": 42,
- "stop": ["great"],
- "temperature": 0.25
- }
- }
- """;
-
- const string Output = """
- {
- "model": "llama3.1",
- "created_at": "2024-10-01T17:18:46.308987Z",
- "message": {
- "role": "assistant",
- "content": "I'm just a computer program, so I don't have feelings or emotions like humans do, but I'm functioning properly and ready to help with any questions or tasks you may have! How about we chat about something in particular or just shoot the breeze? Your choice!"
- },
- "done_reason": "stop",
- "done": true,
- "total_duration": 23229369000,
- "load_duration": 7724086300,
- "prompt_eval_count": 36,
- "prompt_eval_duration": 4245660000,
- "eval_count": 55,
- "eval_duration": 11256470000
- }
- """;
-
- using VerbatimHttpHandler handler = new(Input, Output);
- using HttpClient httpClient = new(handler);
- using IChatClient client = new OllamaChatClient("http://localhost:11434", httpClient: httpClient);
-
- List messages =
- [
- new(ChatRole.User, "hello!"),
- new(ChatRole.Assistant, "hi, how are you?"),
- new(ChatRole.User, "i'm good. how are you?"),
- ];
-
- var response = await client.GetResponseAsync(messages, new()
- {
- ModelId = "llama3.1",
- Temperature = 0.25f,
- FrequencyPenalty = 0.75f,
- PresencePenalty = 0.5f,
- StopSequences = ["great"],
- Seed = 42,
- });
- Assert.NotNull(response);
-
- Assert.Equal(
- VerbatimHttpHandler.RemoveWhiteSpace("""
- I'm just a computer program, so I don't have feelings or emotions like humans do,
- but I'm functioning properly and ready to help with any questions or tasks you may have!
- How about we chat about something in particular or just shoot the breeze ? Your choice!
- """),
- VerbatimHttpHandler.RemoveWhiteSpace(response.Text));
- Assert.Single(response.Messages.Single().Contents);
- Assert.Equal(ChatRole.Assistant, response.Messages.Single().Role);
- Assert.Equal("llama3.1", response.ModelId);
- Assert.Equal(DateTimeOffset.Parse("2024-10-01T17:18:46.308987Z"), response.CreatedAt);
- Assert.Equal(ChatFinishReason.Stop, response.FinishReason);
- Assert.NotNull(response.Usage);
- Assert.Equal(36, response.Usage.InputTokenCount);
- Assert.Equal(55, response.Usage.OutputTokenCount);
- Assert.Equal(91, response.Usage.TotalTokenCount);
- }
-
- [Fact]
- public async Task FunctionCallContent_NonStreaming()
- {
- const string Input = """
- {
- "model": "llama3.1",
- "messages": [
- {
- "role": "user",
- "content": "How old is Alice?"
- }
- ],
- "stream": false,
- "tools": [
- {
- "type": "function",
- "function": {
- "name": "GetPersonAge",
- "description": "Gets the age of the specified person.",
- "parameters": {
- "type": "object",
- "properties": {
- "personName": {
- "description": "The person whose age is being requested",
- "type": "string"
- }
- },
- "required": ["personName"]
- }
- }
- }
- ]
- }
- """;
-
- const string Output = """
- {
- "model": "llama3.1",
- "created_at": "2024-10-01T18:48:30.2669578Z",
- "message": {
- "role": "assistant",
- "content": "",
- "tool_calls": [
- {
- "function": {
- "name": "GetPersonAge",
- "arguments": {
- "personName": "Alice"
- }
- }
- }
- ]
- },
- "done_reason": "stop",
- "done": true,
- "total_duration": 27351311300,
- "load_duration": 8041538400,
- "prompt_eval_count": 170,
- "prompt_eval_duration": 16078776000,
- "eval_count": 19,
- "eval_duration": 3227962000
- }
- """;
-
- using VerbatimHttpHandler handler = new(Input, Output);
- using HttpClient httpClient = new(handler) { Timeout = Timeout.InfiniteTimeSpan };
- using IChatClient client = new OllamaChatClient("http://localhost:11434", "llama3.1", httpClient)
- {
- ToolCallJsonSerializerOptions = TestJsonSerializerContext.Default.Options,
- };
-
- var response = await client.GetResponseAsync("How old is Alice?", new()
- {
- Tools = [AIFunctionFactory.Create(([Description("The person whose age is being requested")] string personName) => 42, "GetPersonAge", "Gets the age of the specified person.")],
- });
- Assert.NotNull(response);
-
- Assert.Empty(response.Text);
- Assert.Equal("llama3.1", response.ModelId);
- Assert.Equal(ChatRole.Assistant, response.Messages.Single().Role);
- Assert.Equal(DateTimeOffset.Parse("2024-10-01T18:48:30.2669578Z"), response.CreatedAt);
- Assert.Equal(ChatFinishReason.Stop, response.FinishReason);
- Assert.NotNull(response.Usage);
- Assert.Equal(170, response.Usage.InputTokenCount);
- Assert.Equal(19, response.Usage.OutputTokenCount);
- Assert.Equal(189, response.Usage.TotalTokenCount);
-
- Assert.Single(response.Messages.Single().Contents);
- FunctionCallContent fcc = Assert.IsType(response.Messages.Single().Contents[0]);
- Assert.Equal("GetPersonAge", fcc.Name);
- AssertExtensions.EqualFunctionCallParameters(new Dictionary { ["personName"] = "Alice" }, fcc.Arguments);
- }
-
- [Fact]
- public async Task FunctionResultContent_NonStreaming()
- {
- const string Input = """
- {
- "model": "llama3.1",
- "messages": [
- {
- "role": "user",
- "content": "How old is Alice?"
- },
- {
- "role": "assistant",
- "content": "{\u0022call_id\u0022:\u0022abcd1234\u0022,\u0022name\u0022:\u0022GetPersonAge\u0022,\u0022arguments\u0022:{\u0022personName\u0022:\u0022Alice\u0022}}"
- },
- {
- "role": "tool",
- "content": "{\u0022call_id\u0022:\u0022abcd1234\u0022,\u0022result\u0022:42}"
- }
- ],
- "stream": false,
- "tools": [
- {
- "type": "function",
- "function": {
- "name": "GetPersonAge",
- "description": "Gets the age of the specified person.",
- "parameters": {
- "type": "object",
- "properties": {
- "personName": {
- "description": "The person whose age is being requested",
- "type": "string"
- }
- },
- "required": ["personName"]
- }
- }
- }
- ]
- }
- """;
-
- const string Output = """
- {
- "model": "llama3.1",
- "created_at": "2024-10-01T20:57:20.157266Z",
- "message": {
- "role": "assistant",
- "content": "Alice is 42 years old."
- },
- "done_reason": "stop",
- "done": true,
- "total_duration": 20320666000,
- "load_duration": 8159642600,
- "prompt_eval_count": 106,
- "prompt_eval_duration": 10846727000,
- "eval_count": 8,
- "eval_duration": 1307842000
- }
- """;
-
- using VerbatimHttpHandler handler = new(Input, Output);
- using HttpClient httpClient = new(handler) { Timeout = Timeout.InfiniteTimeSpan };
- using IChatClient client = new OllamaChatClient("http://localhost:11434", "llama3.1", httpClient)
- {
- ToolCallJsonSerializerOptions = TestJsonSerializerContext.Default.Options,
- };
-
- var response = await client.GetResponseAsync(
- [
- new(ChatRole.User, "How old is Alice?"),
- new(ChatRole.Assistant, [new FunctionCallContent("abcd1234", "GetPersonAge", new Dictionary { ["personName"] = "Alice" })]),
- new(ChatRole.Tool, [new FunctionResultContent("abcd1234", 42)]),
- ],
- new()
- {
- Tools = [AIFunctionFactory.Create(([Description("The person whose age is being requested")] string personName) => 42, "GetPersonAge", "Gets the age of the specified person.")],
- });
- Assert.NotNull(response);
-
- Assert.Equal("Alice is 42 years old.", response.Text);
- Assert.Equal("llama3.1", response.ModelId);
- Assert.Equal(ChatRole.Assistant, response.Messages.Single().Role);
- Assert.Equal(DateTimeOffset.Parse("2024-10-01T20:57:20.157266Z"), response.CreatedAt);
- Assert.Equal(ChatFinishReason.Stop, response.FinishReason);
- Assert.NotNull(response.Usage);
- Assert.Equal(106, response.Usage.InputTokenCount);
- Assert.Equal(8, response.Usage.OutputTokenCount);
- Assert.Equal(114, response.Usage.TotalTokenCount);
- }
-}
diff --git a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaEmbeddingGeneratorIntegrationTests.cs b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaEmbeddingGeneratorIntegrationTests.cs
deleted file mode 100644
index 493c0bf0333..00000000000
--- a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaEmbeddingGeneratorIntegrationTests.cs
+++ /dev/null
@@ -1,32 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System;
-using System.Threading.Tasks;
-using Microsoft.TestUtilities;
-using Xunit;
-
-namespace Microsoft.Extensions.AI;
-
-public class OllamaEmbeddingGeneratorIntegrationTests : EmbeddingGeneratorIntegrationTests
-{
- protected override IEmbeddingGenerator>? CreateEmbeddingGenerator() =>
- IntegrationTestHelpers.GetOllamaUri() is Uri endpoint ?
- new OllamaEmbeddingGenerator(endpoint, "all-minilm") :
- null;
-
- [ConditionalFact]
- public async Task InvalidModelParameter_ThrowsInvalidOperationException()
- {
- SkipIfNotEnabled();
-
- var endpoint = IntegrationTestHelpers.GetOllamaUri();
- Assert.NotNull(endpoint);
-
- using var generator = new OllamaEmbeddingGenerator(endpoint, modelId: "inexistent-model");
-
- InvalidOperationException ex;
- ex = await Assert.ThrowsAsync(() => generator.GenerateAsync(["Hello, world!"]));
- Assert.Contains("inexistent-model", ex.Message);
- }
-}
diff --git a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaEmbeddingGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaEmbeddingGeneratorTests.cs
deleted file mode 100644
index be18138de84..00000000000
--- a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaEmbeddingGeneratorTests.cs
+++ /dev/null
@@ -1,102 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System;
-using System.Net.Http;
-using System.Threading.Tasks;
-using Microsoft.Extensions.Caching.Distributed;
-using Microsoft.Extensions.Caching.Memory;
-using Xunit;
-
-#pragma warning disable S103 // Lines should not be too long
-
-namespace Microsoft.Extensions.AI;
-
-public class OllamaEmbeddingGeneratorTests
-{
- [Fact]
- public void Ctor_InvalidArgs_Throws()
- {
- Assert.Throws("endpoint", () => new OllamaEmbeddingGenerator((string)null!));
- Assert.Throws("modelId", () => new OllamaEmbeddingGenerator(new Uri("http://localhost"), " "));
- }
-
- [Fact]
- public void GetService_SuccessfullyReturnsUnderlyingClient()
- {
- using OllamaEmbeddingGenerator generator = new("http://localhost");
-
- Assert.Same(generator, generator.GetService());
- Assert.Same(generator, generator.GetService>>());
-
- using IEmbeddingGenerator> pipeline = generator
- .AsBuilder()
- .UseOpenTelemetry()
- .UseDistributedCache(new MemoryDistributedCache(Options.Options.Create(new MemoryDistributedCacheOptions())))
- .Build();
-
- Assert.NotNull(pipeline.GetService>>());
- Assert.NotNull(pipeline.GetService>>());
- Assert.NotNull(pipeline.GetService>>());
-
- Assert.Same(generator, pipeline.GetService());
- Assert.IsType>>(pipeline.GetService>>());
- }
-
- [Fact]
- public void AsIEmbeddingGenerator_ProducesExpectedMetadata()
- {
- Uri endpoint = new("http://localhost/some/endpoint");
- string model = "amazingModel";
-
- using IEmbeddingGenerator> generator = new OllamaEmbeddingGenerator(endpoint, model);
- var metadata = generator.GetService();
- Assert.Equal("ollama", metadata?.ProviderName);
- Assert.Equal(endpoint, metadata?.ProviderUri);
- Assert.Equal(model, metadata?.DefaultModelId);
- }
-
- [Fact]
- public async Task GetEmbeddingsAsync_ExpectedRequestResponse()
- {
- const string Input = """
- {"model":"all-minilm","input":["hello, world!","red, white, blue"]}
- """;
-
- const string Output = """
- {
- "model":"all-minilm",
- "embeddings":[
- [-0.038159743,0.032830726,-0.005602915,0.014363416,-0.04031945,-0.11662117,0.031710647,0.0019634133,-0.042558126,0.02925818,0.04254404,0.032178584,0.029820565,0.010947956,-0.05383333,-0.05031401,-0.023460664,0.010746779,-0.13776828,0.003972192,0.029283607,0.06673441,-0.015434976,0.048401773,-0.088160664,-0.012700827,0.04134059,0.0408592,-0.050058633,-0.058048956,0.048720006,0.068883754,0.0588242,0.008813041,-0.016036017,0.08514798,-0.07813561,-0.07740018,0.020856613,0.016228318,0.032506905,-0.053466275,-0.06220645,-0.024293836,0.0073994277,0.02410873,0.006477103,0.051144805,0.072868116,0.03460658,-0.0547553,-0.05937917,-0.007205277,0.020145971,0.035794333,0.005588114,0.010732389,-0.052755248,0.01006711,-0.008716047,-0.062840104,0.038445882,-0.013913384,0.07341423,0.09004691,-0.07995187,-0.016410379,0.044806693,-0.06886798,-0.03302609,-0.015488586,0.0112944925,0.03645402,0.06637969,-0.054364193,0.008732196,0.012049053,-0.038111813,0.006928739,0.05113517,0.07739711,-0.12295967,0.016389083,0.049567502,0.03162499,-0.039604694,0.0016613991,0.009564599,-0.03268798,-0.033994347,-0.13328508,0.0072719813,-0.010261588,0.038570367,-0.093384996,-0.041716397,0.069951184,-0.02632818,-0.149702,0.13445856,0.037486482,0.052814852,0.045044158,0.018727085,0.05445453,0.01727433,-0.032474063,0.046129994,-0.046679277,-0.03058037,-0.0181755,-0.048695795,0.033057086,-0.0038555008,0.050006237,-0.05828653,-0.010029618,0.01062073,-0.040105496,-0.0015263702,0.060846698,-0.04557025,0.049251337,0.026121102,0.019804202,-0.0016694543,0.059516467,-6.525171e-33,0.06351319,0.0030810465,0.028928237,0.17336167,0.0029677018,0.027755935,-0.09513812,-0.031182382,0.026697554,-0.0107956175,0.023849761,0.02378595,-0.03121345,0.049473017,-0.02506533,0.101713106,-0.079133175,-0.0032418896,0.04290832,0.094838716,-0.06652884,0.0062877694,0.02221229,0.0700068,-0.007469806,-0.0017550732,0.027011596,-0.075321496,0.114022695,0.0085597,-0.023766534,-0.04693697,0.014437173,0.01987886,-0.0046902793,0.0013660098,-0.034307938,-0.054156985,-0.09417741,-0.028919358,-0.018871028,0.04574328,0.047602862,-0.0031305805,-0.033291575,-0.0135114025,0.051019657,0.031115327,0.015239397,0.05413997,-0.085031144,0.013366392,-0.04757861,0.07102588,-0.013105953,-0.0023799809,0.050322797,-0.041649505,-0.014187793,0.0324716,0.005401626,0.091307014,0.0044665188,-0.018263677,-0.015284639,-0.04634121,0.038754962,0.014709013,0.052040145,0.0017918312,-0.014979437,0.027103048,0.03117813,0.023749126,-0.004567645,0.03617759,0.06680814,-0.001835277,0.021281,-0.057563916,0.019137124,0.031450257,-0.018432263,-0.040860977,0.10391725,0.011970765,-0.014854915,-0.10521159,-0.012288272,-0.00041675335,-0.09510029,0.058300544,0.042590536,-0.025064372,-0.09454636,4.0064686e-33,0.13224861,0.0053342036,-0.033114634,-0.09096768,-0.031561732,-0.03395822,-0.07202013,0.12591493,-0.08332582,0.052816514,0.001065021,0.022002738,0.1040207,0.013038866,0.04092958,0.018689224,0.1142518,0.024801003,0.014596161,0.006195551,-0.011214642,-0.035760444,-0.037979998,0.011274433,-0.051305123,0.007884909,0.06734877,0.0033462204,-0.09284879,0.037033774,-0.022331867,0.039951596,-0.030730229,-0.011403805,-0.014458028,0.024968812,-0.097553216,-0.03536226,-0.037567392,-0.010149212,-0.06387594,0.025570663,0.02060328,0.037549157,-0.104355134,-0.02837097,-0.052078977,0.0128349,-0.05123587,-0.029060647,-0.09632806,-0.042301137,0.067175224,-0.030890828,-0.010358077,0.027408795,-0.028092034,0.010337195,0.04303845,0.022324203,0.00797792,0.056084383,0.040727936,0.092925824,0.01653155,-0.053750493,0.00046004262,0.050728552,0.04253214,-0.029197674,0.00926312,-0.010662153,-0.037244495,0.002277273,-0.030296732,0.07459592,0.002572513,-0.017561244,0.0028881067,0.03841156,0.007247727,0.045637112,0.039992437,0.014227117,-0.014297474,0.05854321,0.03632371,0.05527864,-0.02007574,-0.08043163,-0.030238612,-0.014929122,0.022335418,0.011954643,-0.06906099,-1.8807288e-8,-0.07850291,0.046684187,-0.023935271,0.063510746,0.024001691,0.0014455577,-0.09078209,-0.066868275,-0.0801402,0.005480386,0.053663295,0.10483363,-0.066864185,0.015531167,0.06711155,0.07081655,-0.031996343,0.020819444,-0.021926524,-0.0073062326,-0.010652819,0.0041180425,0.033138428,-0.0789938,0.03876969,-0.075220205,-0.015715994,0.0059789424,0.005140016,-0.06150612,0.041992374,0.09544083,-0.043187104,0.014401576,-0.10615426,-0.027936764,0.011047429,0.069572434,0.06690283,-0.074798405,-0.07852024,0.04276141,-0.034642085,-0.106051244,-0.03581038,0.051521253,0.06865896,-0.04999753,0.0154549,-0.06452052,-0.07598782,0.02603005,0.074413665,-0.012398757,0.13330704,0.07475513,0.051348723,0.02098748,-0.02679416,0.08896129,0.039944872,-0.041040305,0.031930625,0.018114654],
- [0.007228383,-0.021804843,-0.07494023,-0.021707121,-0.021184582,0.09326986,0.10764054,-0.01918113,0.007439991,0.01367952,-0.034187328,-0.044076536,0.016042138,0.007507193,-0.016432272,0.025345335,0.010598066,-0.03832474,-0.14418823,-0.033625234,0.013156937,-0.0048872638,-0.08534306,-0.00003228713,-0.08900276,-0.00008128615,0.010332802,0.053303026,-0.050233904,-0.0879366,-0.064243905,-0.017168961,0.1284308,-0.015268303,-0.049664143,-0.07491954,0.021887481,0.015997978,-0.07967111,0.08744341,-0.039261423,-0.09904984,0.02936398,0.042995434,0.057036504,0.09063012,0.0000012311281,0.06120768,-0.050825767,-0.014443322,0.02879051,-0.002343813,-0.10176559,0.104563184,0.031316753,0.08251861,-0.041213628,-0.0217945,0.0649965,-0.011131547,0.018417398,-0.014460508,-0.05108664,0.11330918,0.01863208,0.006442521,-0.039408617,-0.03609412,-0.009156692,-0.0031261789,-0.010928502,-0.021108521,0.037411734,0.012443921,0.018142054,-0.0362644,0.058286663,-0.02733258,-0.052172586,-0.08320095,-0.07089281,-0.0970049,-0.048587535,0.055343032,0.048351917,0.06892102,-0.039993215,0.06344781,-0.084417015,0.003692423,-0.059397053,0.08186814,0.0029228176,-0.010551637,-0.058019258,0.092128515,0.06862907,-0.06558893,0.021121018,0.079212844,0.09616225,0.0045106052,0.039712362,-0.053576704,0.035097837,-0.04251009,-0.013761404,0.011582285,0.02387105,0.009042205,0.054141942,-0.051263757,-0.07984356,-0.020198742,-0.051623948,-0.0013434993,-0.05825417,-0.0026240738,0.0050159167,-0.06320204,0.07872169,-0.04051374,0.04671058,-0.05804034,-0.07103668,-0.07507343,0.015222599,-3.0948323e-33,0.0076309564,-0.06283016,0.024291662,0.12532257,0.013917241,0.04869009,-0.037988827,-0.035241846,-0.041410565,-0.033772282,0.018835608,0.081035286,-0.049912665,0.044602085,0.030495265,-0.009206943,0.027668765,0.011651487,-0.10254086,0.054472663,-0.06514106,0.12192646,0.048823033,-0.015688669,0.010323047,-0.02821445,-0.030832449,-0.035029083,-0.010604268,0.0014445938,0.08670387,0.01997448,0.0101131955,0.036524937,-0.033489946,-0.026745271,-0.04709222,0.015197909,0.018787097,-0.009976326,-0.0016434817,-0.024719588,-0.09179337,0.09343157,0.029579962,-0.015174558,0.071250066,0.010549244,0.010716396,0.05435638,-0.06391847,-0.031383075,0.007916095,0.012391228,-0.012053197,-0.017409964,0.013742709,0.0594159,-0.033767693,0.04505938,-0.0017214329,0.12797962,0.03223919,-0.054756388,0.025249248,-0.02273578,-0.04701282,-0.018718086,0.009820931,-0.06267794,-0.012644738,0.0068301614,0.093209736,-0.027372226,-0.09436381,0.003861504,0.054960024,-0.058553983,-0.042971537,-0.008994571,-0.08225824,-0.013560626,-0.01880568,0.0995795,-0.040887516,-0.0036491079,-0.010253542,-0.031025425,-0.006957114,-0.038943008,-0.090270124,-0.031345647,0.029613726,-0.099465184,-0.07469079,7.844707e-34,0.024241973,0.03597121,-0.049776066,0.05084303,0.006059542,-0.020719761,0.019962702,0.092246406,0.069408394,0.062306542,0.013837189,0.054749023,0.05090263,0.04100415,-0.02573441,0.09535842,0.036858294,0.059478357,0.0070162765,0.038462427,-0.053635903,0.05912332,-0.037887845,-0.0012995935,-0.068758026,0.0671618,0.029407106,-0.061569903,-0.07481879,-0.01849014,0.014240046,-0.08064838,0.028351007,0.08456427,0.016858438,0.02053254,0.06171099,-0.028964644,-0.047633287,0.08802184,0.0017116248,0.019451816,0.03419083,0.07152118,-0.027244413,-0.04888475,-0.10314279,0.07628554,-0.045991484,-0.023299307,-0.021448445,0.04111079,-0.036342163,-0.010670482,0.01950527,-0.0648448,-0.033299454,0.05782628,0.030278979,0.079154804,-0.03679649,0.031728156,-0.034912236,0.08817754,0.059208114,-0.02319613,-0.027045371,-0.018559752,-0.051946763,-0.010635224,0.048839167,-0.043925915,-0.028300019,-0.0039419765,0.044211324,-0.067469835,-0.027534118,0.005051618,-0.034172326,0.080007285,-0.01931061,-0.005759926,0.08765162,0.08372951,-0.093784876,0.011837292,0.019019455,0.047941882,0.05504541,-0.12475821,0.012822803,0.12833545,0.08005919,0.019278418,-0.025834465,-1.9763878e-8,0.05211108,0.024891146,-0.0015623684,0.0040500895,0.015101377,-0.0031462535,0.014759316,-0.041329216,-0.029255627,0.048599463,0.062482737,0.018376771,-0.066601776,0.014752581,0.07968402,-0.015090815,-0.12100162,-0.0014005995,0.0134423375,-0.0065814927,-0.01188529,-0.01107086,-0.059613306,0.030120188,0.0418596,-0.009260598,0.028435009,0.024893047,0.031339604,0.09501834,0.027570697,0.0636991,-0.056108754,-0.0329521,-0.114633024,-0.00981398,-0.060992315,0.027551433,0.0069592255,-0.059862003,0.0008075791,0.001507554,-0.028574942,-0.011227367,0.0056030746,-0.041190825,-0.09364463,-0.04459479,-0.055058934,-0.029972456,-0.028642913,-0.015199684,0.007875299,-0.034083385,0.02143902,-0.017395096,0.027429376,0.013198211,0.005065835,0.037760753,0.08974973,0.07598824,0.0050444477,0.014734193]
- ],
- "total_duration":375551700,
- "load_duration":354411900,
- "prompt_eval_count":9
- }
- """;
-
- using VerbatimHttpHandler handler = new(Input, Output);
- using HttpClient httpClient = new(handler);
- using IEmbeddingGenerator> generator = new OllamaEmbeddingGenerator("http://localhost:11434", "all-minilm", httpClient);
-
- var response = await generator.GenerateAsync([
- "hello, world!",
- "red, white, blue",
- ]);
- Assert.NotNull(response);
- Assert.Equal(2, response.Count);
-
- Assert.NotNull(response.Usage);
- Assert.Equal(9, response.Usage.InputTokenCount);
- Assert.Equal(9, response.Usage.TotalTokenCount);
-
- foreach (Embedding e in response)
- {
- Assert.Equal("all-minilm", e.ModelId);
- Assert.NotNull(e.CreatedAt);
- Assert.Equal(384, e.Vector.Length);
- Assert.Contains(e.Vector.ToArray(), f => !f.Equals(0));
- }
- }
-}
diff --git a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/TestJsonSerializerContext.cs b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/TestJsonSerializerContext.cs
deleted file mode 100644
index 49560a9c451..00000000000
--- a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/TestJsonSerializerContext.cs
+++ /dev/null
@@ -1,12 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System.Collections.Generic;
-using System.Text.Json.Serialization;
-
-namespace Microsoft.Extensions.AI;
-
-[JsonSerializable(typeof(string))]
-[JsonSerializable(typeof(int))]
-[JsonSerializable(typeof(IDictionary))]
-internal sealed partial class TestJsonSerializerContext : JsonSerializerContext;
diff --git a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/IntegrationTestHelpers.cs b/test/Libraries/Microsoft.Extensions.AI.OllamaSharp.Integration.Tests/IntegrationTestHelpers.cs
similarity index 100%
rename from test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/IntegrationTestHelpers.cs
rename to test/Libraries/Microsoft.Extensions.AI.OllamaSharp.Integration.Tests/IntegrationTestHelpers.cs
diff --git a/test/Libraries/Microsoft.Extensions.AI.OllamaSharp.Integration.Tests/Microsoft.Extensions.AI.OllamaSharp.Integration.Tests.csproj b/test/Libraries/Microsoft.Extensions.AI.OllamaSharp.Integration.Tests/Microsoft.Extensions.AI.OllamaSharp.Integration.Tests.csproj
index 14ca7e244d1..d977c035279 100644
--- a/test/Libraries/Microsoft.Extensions.AI.OllamaSharp.Integration.Tests/Microsoft.Extensions.AI.OllamaSharp.Integration.Tests.csproj
+++ b/test/Libraries/Microsoft.Extensions.AI.OllamaSharp.Integration.Tests/Microsoft.Extensions.AI.OllamaSharp.Integration.Tests.csproj
@@ -9,7 +9,7 @@
-
+
diff --git a/test/Libraries/Microsoft.Extensions.AI.OllamaSharp.Integration.Tests/OllamaSharpChatClientIntegrationTests.cs b/test/Libraries/Microsoft.Extensions.AI.OllamaSharp.Integration.Tests/OllamaSharpChatClientIntegrationTests.cs
index 921e2d3b5f9..28d3e21fd65 100644
--- a/test/Libraries/Microsoft.Extensions.AI.OllamaSharp.Integration.Tests/OllamaSharpChatClientIntegrationTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.OllamaSharp.Integration.Tests/OllamaSharpChatClientIntegrationTests.cs
@@ -2,14 +2,115 @@
// The .NET Foundation licenses this file to you under the MIT license.
using System;
+using System.Collections.Generic;
+using System.ComponentModel;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.TestUtilities;
using OllamaSharp;
+using Xunit;
namespace Microsoft.Extensions.AI;
-public class OllamaSharpChatClientIntegrationTests : OllamaChatClientIntegrationTests
+public class OllamaSharpChatClientIntegrationTests : ChatClientIntegrationTests
{
protected override IChatClient? CreateChatClient() =>
IntegrationTestHelpers.GetOllamaUri() is Uri endpoint ?
new OllamaApiClient(endpoint, "llama3.2") :
null;
+
+ public override Task FunctionInvocation_RequireAny() =>
+ throw new SkipTestException("Ollama does not currently support requiring function invocation.");
+
+ public override Task FunctionInvocation_RequireSpecific() =>
+ throw new SkipTestException("Ollama does not currently support requiring function invocation.");
+
+ protected override string? GetModel_MultiModal_DescribeImage() => "llava";
+
+ [ConditionalFact]
+ public async Task PromptBasedFunctionCalling_NoArgs()
+ {
+ SkipIfNotEnabled();
+
+ using var chatClient = CreateChatClient()!
+ .AsBuilder()
+ .UseFunctionInvocation()
+ .UsePromptBasedFunctionCalling()
+ .Use(innerClient => new AssertNoToolsDefinedChatClient(innerClient))
+ .Build();
+
+ var secretNumber = 42;
+ var response = await chatClient.GetResponseAsync("What is the current secret number? Answer with digits only.", new ChatOptions
+ {
+ ModelId = "llama3:8b",
+ Tools = [AIFunctionFactory.Create(() => secretNumber, "GetSecretNumber")],
+ Temperature = 0,
+ Seed = 0,
+ });
+
+ Assert.Contains(secretNumber.ToString(), response.Text);
+ }
+
+ [ConditionalFact]
+ public async Task PromptBasedFunctionCalling_WithArgs()
+ {
+ SkipIfNotEnabled();
+
+ using var chatClient = CreateChatClient()!
+ .AsBuilder()
+ .UseFunctionInvocation()
+ .UsePromptBasedFunctionCalling()
+ .Use(innerClient => new AssertNoToolsDefinedChatClient(innerClient))
+ .Build();
+
+ var stockPriceTool = AIFunctionFactory.Create([Description("Returns the stock price for a given ticker symbol")] (
+ [Description("The ticker symbol")] string symbol,
+ [Description("The currency code such as USD or JPY")] string currency) =>
+ {
+ Assert.Equal("MSFT", symbol);
+ Assert.Equal("GBP", currency);
+ return 999;
+ }, "GetStockPrice");
+
+ var didCallIrrelevantTool = false;
+ var irrelevantTool = AIFunctionFactory.Create(() => { didCallIrrelevantTool = true; return 123; }, "GetSecretNumber");
+
+ var response = await chatClient.GetResponseAsync("What's the stock price for Microsoft in British pounds?", new ChatOptions
+ {
+ Tools = [stockPriceTool, irrelevantTool],
+ Temperature = 0,
+ Seed = 0,
+ });
+
+ Assert.Contains("999", response.Text);
+ Assert.False(didCallIrrelevantTool);
+ }
+
+ [ConditionalFact]
+ public async Task InvalidModelParameter_ThrowsInvalidOperationException()
+ {
+ SkipIfNotEnabled();
+
+ var endpoint = IntegrationTestHelpers.GetOllamaUri();
+ Assert.NotNull(endpoint);
+
+ using var chatClient = new OllamaApiClient(endpoint, defaultModel: "inexistent-model");
+
+ InvalidOperationException ex;
+ ex = await Assert.ThrowsAsync(() => chatClient.GetResponseAsync("Hello, world!"));
+ Assert.Contains("inexistent-model", ex.Message);
+
+ ex = await Assert.ThrowsAsync(() => chatClient.GetStreamingResponseAsync("Hello, world!").ToChatResponseAsync());
+ Assert.Contains("inexistent-model", ex.Message);
+ }
+
+ private sealed class AssertNoToolsDefinedChatClient(IChatClient innerClient) : DelegatingChatClient(innerClient)
+ {
+ public override Task GetResponseAsync(
+ IEnumerable messages, ChatOptions? options = null, CancellationToken cancellationToken = default)
+ {
+ Assert.Null(options?.Tools);
+ return base.GetResponseAsync(messages, options, cancellationToken);
+ }
+ }
}
diff --git a/test/Libraries/Microsoft.Extensions.AI.OllamaSharp.Integration.Tests/OllamaSharpEmbeddingGeneratorIntegrationTests.cs b/test/Libraries/Microsoft.Extensions.AI.OllamaSharp.Integration.Tests/OllamaSharpEmbeddingGeneratorIntegrationTests.cs
index 1826855f459..f7775143c36 100644
--- a/test/Libraries/Microsoft.Extensions.AI.OllamaSharp.Integration.Tests/OllamaSharpEmbeddingGeneratorIntegrationTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.OllamaSharp.Integration.Tests/OllamaSharpEmbeddingGeneratorIntegrationTests.cs
@@ -2,14 +2,35 @@
// The .NET Foundation licenses this file to you under the MIT license.
using System;
+using System.Threading.Tasks;
+using Microsoft.TestUtilities;
using OllamaSharp;
+using Xunit;
namespace Microsoft.Extensions.AI;
-public class OllamaSharpEmbeddingGeneratorIntegrationTests : OllamaEmbeddingGeneratorIntegrationTests
+public class OllamaSharpEmbeddingGeneratorIntegrationTests : EmbeddingGeneratorIntegrationTests
{
protected override IEmbeddingGenerator>? CreateEmbeddingGenerator() =>
IntegrationTestHelpers.GetOllamaUri() is Uri endpoint ?
new OllamaApiClient(endpoint, "all-minilm") :
null;
+
+ [ConditionalFact]
+ public async Task InvalidModelParameter_ThrowsInvalidOperationException()
+ {
+ SkipIfNotEnabled();
+
+ var endpoint = IntegrationTestHelpers.GetOllamaUri();
+ Assert.NotNull(endpoint);
+
+ using var client = new OllamaApiClient(endpoint, defaultModel: "inexistent-model");
+
+ InvalidOperationException ex;
+ ex = await Assert.ThrowsAsync(() => client.EmbedAsync(new OllamaSharp.Models.EmbedRequest
+ {
+ Input = ["Hello, world!"],
+ }));
+ Assert.Contains("inexistent-model", ex.Message);
+ }
}