Skip to content

Commit ccd86d9

Browse files
authored
Lower M.E.AI.Ollama STJ dependency back to 8 (#5554)
1 parent aa63ac7 commit ccd86d9

File tree

2 files changed

+8
-7
lines changed

2 files changed

+8
-7
lines changed

src/Libraries/Microsoft.Extensions.AI.Ollama/Microsoft.Extensions.AI.Ollama.csproj

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,6 @@
44
<RootNamespace>Microsoft.Extensions.AI</RootNamespace>
55
<Description>Implementation of generative AI abstractions for Ollama.</Description>
66
<Workstream>AI</Workstream>
7-
<!-- This package needs to stay referencing .NET 9 versions of its dependencies -->
8-
<ForceLatestDotnetVersions>true</ForceLatestDotnetVersions>
97
</PropertyGroup>
108

119
<PropertyGroup>

src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
using System;
55
using System.Collections.Generic;
66
using System.Globalization;
7+
using System.IO;
78
using System.Linq;
89
using System.Net.Http;
910
using System.Net.Http.Json;
@@ -114,12 +115,14 @@ public async IAsyncEnumerable<StreamingChatCompletionUpdate> CompleteStreamingAs
114115
#endif
115116
.ConfigureAwait(false);
116117

117-
await foreach (OllamaChatResponse? chunk in JsonSerializer.DeserializeAsyncEnumerable(
118-
httpResponseStream,
119-
JsonContext.Default.OllamaChatResponse,
120-
topLevelValues: true,
121-
cancellationToken).ConfigureAwait(false))
118+
using var streamReader = new StreamReader(httpResponseStream);
119+
#if NET
120+
while ((await streamReader.ReadLineAsync(cancellationToken).ConfigureAwait(false)) is { } line)
121+
#else
122+
while ((await streamReader.ReadLineAsync().ConfigureAwait(false)) is { } line)
123+
#endif
122124
{
125+
var chunk = JsonSerializer.Deserialize(line, JsonContext.Default.OllamaChatResponse);
123126
if (chunk is null)
124127
{
125128
continue;

0 commit comments

Comments
 (0)