-
Notifications
You must be signed in to change notification settings - Fork 3.2k
/
AzureOpenAITextEmbeddingGenerationService.cs
115 lines (101 loc) · 6.2 KB
/
AzureOpenAITextEmbeddingGenerationService.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
// Copyright (c) Microsoft. All rights reserved.
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using Azure.AI.OpenAI;
using Azure.Core;
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel.Embeddings;
using Microsoft.SemanticKernel.Services;
namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
/// <summary>
/// Azure OpenAI text embedding service.
/// </summary>
[Experimental("SKEXP0010")]
public sealed class AzureOpenAITextEmbeddingGenerationService : ITextEmbeddingGenerationService
{
private readonly AzureClientCore _client;
private readonly int? _dimensions;
/// <summary>
/// Initializes a new instance of the <see cref="AzureOpenAITextEmbeddingGenerationService"/> class.
/// </summary>
/// <param name="deploymentName">Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource</param>
/// <param name="endpoint">Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart</param>
/// <param name="apiKey">Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart</param>
/// <param name="modelId">Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource</param>
/// <param name="httpClient">Custom <see cref="HttpClient"/> for HTTP requests.</param>
/// <param name="loggerFactory">The <see cref="ILoggerFactory"/> to use for logging. If null, no logging will be performed.</param>
/// <param name="dimensions">The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models.</param>
/// <param name="apiVersion">Optional Azure OpenAI API version, see available here <see cref="AzureOpenAIClientOptions.ServiceVersion"/></param>
public AzureOpenAITextEmbeddingGenerationService(
string deploymentName,
string endpoint,
string apiKey,
string? modelId = null,
HttpClient? httpClient = null,
ILoggerFactory? loggerFactory = null,
int? dimensions = null,
string? apiVersion = null)
{
this._client = new(deploymentName, endpoint, apiKey, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextEmbeddingGenerationService)), apiVersion);
this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId);
this._dimensions = dimensions;
}
/// <summary>
/// Initializes a new instance of the <see cref="AzureOpenAITextEmbeddingGenerationService"/> class.
/// </summary>
/// <param name="deploymentName">Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource</param>
/// <param name="endpoint">Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart</param>
/// <param name="credential">Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc.</param>
/// <param name="modelId">Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource</param>
/// <param name="httpClient">Custom <see cref="HttpClient"/> for HTTP requests.</param>
/// <param name="loggerFactory">The <see cref="ILoggerFactory"/> to use for logging. If null, no logging will be performed.</param>
/// <param name="dimensions">The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models.</param>
/// <param name="apiVersion">Optional Azure OpenAI API version, see available here <see cref="AzureOpenAIClientOptions.ServiceVersion"/></param>
public AzureOpenAITextEmbeddingGenerationService(
string deploymentName,
string endpoint,
TokenCredential credential,
string? modelId = null,
HttpClient? httpClient = null,
ILoggerFactory? loggerFactory = null,
int? dimensions = null,
string? apiVersion = null)
{
this._client = new(deploymentName, endpoint, credential, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextEmbeddingGenerationService)), apiVersion);
this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId);
this._dimensions = dimensions;
}
/// <summary>
/// Initializes a new instance of the <see cref="AzureOpenAITextEmbeddingGenerationService"/> class.
/// </summary>
/// <param name="deploymentName">Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource</param>
/// <param name="azureOpenAIClient">Custom <see cref="AzureOpenAIClient"/> for HTTP requests.</param>
/// <param name="modelId">Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource</param>
/// <param name="loggerFactory">The <see cref="ILoggerFactory"/> to use for logging. If null, no logging will be performed.</param>
/// <param name="dimensions">The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models.</param>
public AzureOpenAITextEmbeddingGenerationService(
string deploymentName,
AzureOpenAIClient azureOpenAIClient,
string? modelId = null,
ILoggerFactory? loggerFactory = null,
int? dimensions = null)
{
this._client = new(deploymentName, azureOpenAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextEmbeddingGenerationService)));
this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId);
this._dimensions = dimensions;
}
/// <inheritdoc/>
public IReadOnlyDictionary<string, object?> Attributes => this._client.Attributes;
/// <inheritdoc/>
public Task<IList<ReadOnlyMemory<float>>> GenerateEmbeddingsAsync(
IList<string> data,
Kernel? kernel = null,
CancellationToken cancellationToken = default)
{
return this._client.GetEmbeddingsAsync(this._client.DeploymentName, data, kernel, this._dimensions, cancellationToken);
}
}