-
-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathOpenAIChatClient.cs
More file actions
60 lines (49 loc) · 2.58 KB
/
OpenAIChatClient.cs
File metadata and controls
60 lines (49 loc) · 2.58 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
using System.ClientModel;
using System.ClientModel.Primitives;
using System.Collections.Concurrent;
using Microsoft.Extensions.AI;
using OpenAI;
namespace Devlooped.Extensions.AI.OpenAI;
/// <summary>
/// An <see cref="IChatClient"/> implementation for OpenAI that supports per-request model selection.
/// </summary>
public class OpenAIChatClient : IChatClient
{
readonly ConcurrentDictionary<string, IChatClient> clients = new();
readonly string modelId;
readonly ClientPipeline pipeline;
readonly OpenAIClientOptions? options;
readonly ChatClientMetadata? metadata;
/// <summary>
/// Initializes the client with the specified API key, model ID, and optional OpenAI client options.
/// </summary>
public OpenAIChatClient(string apiKey, string modelId, OpenAIClientOptions? options = default)
{
this.modelId = modelId;
this.options = options;
// NOTE: by caching the pipeline, we speed up creation of new chat clients per model,
// since the pipeline will be the same for all of them.
var client = new OpenAIClient(new ApiKeyCredential(apiKey), options);
metadata = client.GetChatClient(modelId)
.AsIChatClient()
.GetService(typeof(ChatClientMetadata)) as ChatClientMetadata;
pipeline = client.Pipeline;
}
/// <inheritdoc/>
public Task<ChatResponse> GetResponseAsync(IEnumerable<ChatMessage> messages, ChatOptions? options = null, CancellationToken cancellation = default)
=> GetChatClient(options?.ModelId ?? modelId).GetResponseAsync(messages, options.SetResponseOptions(), cancellation);
/// <inheritdoc/>
public IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(IEnumerable<ChatMessage> messages, ChatOptions? options = null, CancellationToken cancellation = default)
=> GetChatClient(options?.ModelId ?? modelId).GetStreamingResponseAsync(messages, options.SetResponseOptions(), cancellation);
IChatClient GetChatClient(string modelId) => clients.GetOrAdd(modelId, model
=> new PipelineClient(pipeline, options).GetOpenAIResponseClient(modelId).AsIChatClient());
void IDisposable.Dispose() => GC.SuppressFinalize(this);
/// <inheritdoc />
public object? GetService(Type serviceType, object? serviceKey = null) => serviceType switch
{
Type t when t == typeof(ChatClientMetadata) => metadata,
_ => null
};
// Allows creating the base OpenAIClient with a pre-created pipeline.
class PipelineClient(ClientPipeline pipeline, OpenAIClientOptions? options) : OpenAIClient(pipeline, options) { }
}