* Adding in the same throttling checks that we have in OpenAI, since it's the same underlying reosurces that are oversubscribed
* Updating so our code compiles under the latest alpha from OpenAI
* Stop examples from running
This commit is contained in:
Richard Park 2024-10-02 16:13:56 -07:00 коммит произвёл GitHub
Родитель 9582d420c4
Коммит 910af976fc
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: B5690EEEBB952194
17 изменённых файлов: 88 добавлений и 106 удалений

Просмотреть файл

@ -52,6 +52,10 @@
"Name": "ai/azopenaiassistants",
"CoverageGoal": 0.00
},
{
"Name": "ai/azopenaiextensions",
"CoverageGoal": 0.00
},
{
"Name": "aztemplate",
"CoverageGoal": 0.50

Просмотреть файл

@ -17,4 +17,4 @@ If you're already using the `azopenai` package, but would like to switch to usin
["Azure OpenAI On Your Data"](https://learn.microsoft.com/azure/ai-services/openai/concepts/use-your-data) allows you to use external data sources, such as Azure AI Search, in combination with Azure OpenAI. This package provides a helper function to make it easy to include `DataSources` using `openai-go`:
For a full example see [example_azure_on_your_data_test.go](./example_azure_on_your_data_test.go).
For a full example see [example_azure_on_your_data_test.go](https://aka.ms/azsdk/go/azopenaiextensions/pkg#example-package-UsingAzureOnYourData).

Просмотреть файл

@ -31,7 +31,7 @@ See [Key concepts][openai_key_concepts] in the product documentation for more de
# Examples
Examples for scenarios specific to Azure can be found on [pkg.go.dev](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/ai/azopenaiextensions#pkg-examples) or in the example*_test.go files in our GitHub repo for [azopenaiextensions](https://github.com/Azure/azure-sdk-for-go/blob/main/sdk/ai/azopenaiextensions).
Examples for scenarios specific to Azure can be found on [pkg.go.dev](https://aka.ms/azsdk/go/azopenaiextensions/pkg#pkg-examples) or in the example*_test.go files in our GitHub repo for [azopenaiextensions](https://github.com/Azure/azure-sdk-for-go/blob/main/sdk/ai/azopenaiextensions).
For examples on using the openai-go client, see the examples in the [openai-go](https://github.com/openai/openai-go/tree/main/examples) repository.
@ -60,5 +60,5 @@ comments.
[openai_key_concepts]: https://learn.microsoft.com/azure/cognitive-services/openai/overview#key-concepts
[openai_on_your_data]: https://learn.microsoft.com/azure/ai-services/openai/concepts/use-your-data
[openai_rest_docs]: https://learn.microsoft.com/azure/cognitive-services/openai/reference
[pkggodev]: https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/ai/azopenaiextensions
[pkggodev]: https://aka.ms/azsdk/go/azopenaiextensions/pkg
[repo]: https://github.com/Azure/azure-sdk-for-go/tree/main/sdk/ai/azopenaiextensions

Просмотреть файл

@ -33,8 +33,12 @@ extends:
ServiceDirectory: "ai/azopenaiextensions"
RunLiveTests: true
UsePipelineProxy: false
ExcludeGoNMinus2: true
CloudConfig:
Public:
ServiceConnection: azure-sdk-tests
SubscriptionConfigurationFilePaths:
- eng/common/TestResources/sub-config/AzurePublicMsft.json
SubscriptionConfigurations:
- $(sub-config-azure-cloud-test-resources)
- $(sub-config-openai-test-resources) # TestSecrets-openai

Просмотреть файл

@ -15,7 +15,6 @@ import (
"github.com/Azure/azure-sdk-for-go/sdk/internal/recording"
"github.com/openai/openai-go"
"github.com/openai/openai-go/option"
"github.com/stretchr/testify/require"
)
@ -28,43 +27,27 @@ func TestClient_GetAudioTranscription(t *testing.T) {
model := azureOpenAI.Whisper.Model
// We're experiencing load issues on some of our shared test resources so we'll just spot check.
t.Run(fmt.Sprintf("%s (%s)", openai.AudioTranscriptionNewParamsResponseFormatText, "m4a"), func(t *testing.T) {
// TODO: BUG: I think. I'm not quite sure how to request any format other than JSON because the bare formats
// cause a deserialization error in the Stainless client.
//
// transcriptResp, err := client.Audio.Transcriptions.New(context.Background(), openai.AudioTranscriptionNewParams{
// Model: openai.F(openai.AudioTranscriptionNewParamsModel(model)),
// File: openai.F(getFile(t, "testdata/sampledata_audiofiles_myVoiceIsMyPassportVerifyMe01.m4a")),
// ResponseFormat: openai.F(openai.AudioTranscriptionNewParamsResponseFormatText),
// Language: openai.String("en"),
// Temperature: openai.Float(0.0),
// })
// require.Empty(t, transcriptResp)
// require.EqualError(t, err, "expected destination type of 'string' or '[]byte' for responses with content-type that is not 'application/json'")
var text *string
t.Run(fmt.Sprintf("%s (%s)", openai.AudioResponseFormatText, "m4a"), func(t *testing.T) {
transcriptResp, err := client.Audio.Transcriptions.New(context.Background(), openai.AudioTranscriptionNewParams{
Model: openai.F(openai.AudioModel(model)),
File: openai.F(getFile(t, "testdata/sampledata_audiofiles_myVoiceIsMyPassportVerifyMe01.m4a")),
ResponseFormat: openai.F(openai.AudioTranscriptionNewParamsResponseFormatText),
Language: openai.String("en"),
Temperature: openai.Float(0.0),
}, option.WithResponseBodyInto(&text))
require.Empty(t, transcriptResp)
require.NoError(t, err)
require.NotEmpty(t, *text)
})
t.Run(fmt.Sprintf("%s (%s)", openai.AudioTranscriptionNewParamsResponseFormatJSON, "mp3"), func(t *testing.T) {
transcriptResp, err := client.Audio.Transcriptions.New(context.Background(), openai.AudioTranscriptionNewParams{
Model: openai.F(openai.AudioModel(model)),
File: openai.F(getFile(t, "testdata/sampledata_audiofiles_myVoiceIsMyPassportVerifyMe01.mp3")),
ResponseFormat: openai.F(openai.AudioTranscriptionNewParamsResponseFormatVerboseJSON),
ResponseFormat: openai.F(openai.AudioResponseFormatText),
Language: openai.String("en"),
Temperature: openai.Float(0.0),
})
customRequireNoError(t, err, true)
require.Empty(t, transcriptResp)
require.EqualError(t, err, "expected destination type of 'string' or '[]byte' for responses with content-type that is not 'application/json'")
})
t.Run(fmt.Sprintf("%s (%s)", openai.AudioResponseFormatJSON, "mp3"), func(t *testing.T) {
transcriptResp, err := client.Audio.Transcriptions.New(context.Background(), openai.AudioTranscriptionNewParams{
Model: openai.F(openai.AudioModel(model)),
File: openai.F(getFile(t, "testdata/sampledata_audiofiles_myVoiceIsMyPassportVerifyMe01.mp3")),
ResponseFormat: openai.F(openai.AudioResponseFormatJSON),
Language: openai.String("en"),
Temperature: openai.Float(0.0),
})
customRequireNoError(t, err)
t.Logf("Transcription: %s", transcriptResp.Text)
require.NotEmpty(t, transcriptResp)
})
@ -79,13 +62,12 @@ func TestClient_GetAudioTranslation(t *testing.T) {
model := azureOpenAI.Whisper.Model
resp, err := client.Audio.Translations.New(context.Background(), openai.AudioTranslationNewParams{
Model: openai.F(openai.AudioModel(model)),
File: openai.F(getFile(t, "testdata/sampledata_audiofiles_myVoiceIsMyPassportVerifyMe01.m4a")),
// TODO: no specific enumeration for Translations format?
ResponseFormat: openai.F(string(openai.AudioTranscriptionNewParamsResponseFormatVerboseJSON)),
Model: openai.F(openai.AudioModel(model)),
File: openai.F(getFile(t, "testdata/sampledata_audiofiles_myVoiceIsMyPassportVerifyMe01.m4a")),
ResponseFormat: openai.F(openai.AudioResponseFormatVerboseJSON),
Temperature: openai.Float(0.0),
})
customRequireNoError(t, err, true)
customRequireNoError(t, err)
t.Logf("Translation: %s", resp.Text)
require.NotEmpty(t, resp.Text)
@ -141,7 +123,7 @@ func TestClient_GetAudioSpeech(t *testing.T) {
transcriptResp, err := transcriptClient.Audio.Transcriptions.New(context.Background(), openai.AudioTranscriptionNewParams{
Model: openai.F(openai.AudioModel(azureOpenAI.Whisper.Model)),
File: openai.F[io.Reader](tempFile),
ResponseFormat: openai.F(openai.AudioTranscriptionNewParamsResponseFormatVerboseJSON),
ResponseFormat: openai.F(openai.AudioResponseFormatVerboseJSON),
Language: openai.String("en"),
Temperature: openai.Float(0.0),
})

Просмотреть файл

@ -31,7 +31,7 @@ func TestChatCompletions_extensions_bringYourOwnData(t *testing.T) {
resp, err := client.Chat.Completions.New(context.Background(), inputParams,
azopenaiextensions.WithDataSources(&azureOpenAI.Cognitive))
require.NoError(t, err)
customRequireNoError(t, err)
require.NotEmpty(t, resp)
msg := azopenaiextensions.ChatCompletionMessage(resp.Choices[0].Message)

Просмотреть файл

@ -12,7 +12,6 @@ import (
"time"
"github.com/Azure/azure-sdk-for-go/sdk/ai/azopenaiextensions"
"github.com/Azure/azure-sdk-for-go/sdk/azcore/to"
"github.com/openai/openai-go"
"github.com/stretchr/testify/require"
)
@ -68,22 +67,6 @@ func TestClient_GetChatCompletions(t *testing.T) {
PromptTokens: resp.Usage.PromptTokens,
TotalTokens: resp.Usage.TotalTokens,
})
if checkRAI {
promptFilterResults, err := azopenaiextensions.ChatCompletion(*resp).PromptFilterResults()
require.NoError(t, err)
require.Equal(t, []azopenaiextensions.ContentFilterResultsForPrompt{
{
PromptIndex: to.Ptr[int32](0),
ContentFilterResults: safeContentFilterResultDetailsForPrompt,
},
}, promptFilterResults)
choiceContentFilter, err := azopenaiextensions.ChatCompletionChoice(resp.Choices[0]).ContentFilterResults()
require.NoError(t, err)
require.Equal(t, safeContentFilter, choiceContentFilter)
}
}
t.Run("AzureOpenAI", func(t *testing.T) {
@ -196,15 +179,11 @@ func TestClient_GetChatCompletionsStream(t *testing.T) {
azureChunk := azopenaiextensions.ChatCompletionChunk(chunk)
promptResults, err := azureChunk.PromptFilterResults()
// NOTE: prompt filter results are non-deterministic as they're based on their own criteria, which
// can change over time. We'll check that we can safely attempt to deserialize it.
_, err := azureChunk.PromptFilterResults()
require.NoError(t, err)
if promptResults != nil {
require.Equal(t, []azopenaiextensions.ContentFilterResultsForPrompt{
{PromptIndex: to.Ptr[int32](0), ContentFilterResults: safeContentFilterResultDetailsForPrompt},
}, promptResults)
}
if len(chunk.Choices) == 0 {
// you can get empty entries that contain just metadata (ie, prompt annotations)
continue
@ -249,7 +228,7 @@ func TestClient_GetChatCompletions_Vision(t *testing.T) {
})
// vision is a bit of an oversubscribed Azure resource. Allow 429, but mark the test as skipped.
customRequireNoError(t, err, true)
customRequireNoError(t, err)
require.NotEmpty(t, resp.Choices[0].Message.Content)
t.Logf("Content: %s", resp.Choices[0].Message.Content)

Просмотреть файл

@ -21,7 +21,7 @@ func TestClient_GetEmbeddings_InvalidModel(t *testing.T) {
client := newStainlessTestClient(t, azureOpenAI.Embeddings.Endpoint)
_, err := client.Embeddings.New(context.Background(), openai.EmbeddingNewParams{
Model: openai.F(openai.EmbeddingNewParamsModel("thisdoesntexist")),
Model: openai.F(openai.EmbeddingModel("thisdoesntexist")),
})
var openaiErr *openai.Error
@ -35,7 +35,7 @@ func TestClient_GetEmbeddings(t *testing.T) {
resp, err := client.Embeddings.New(context.Background(), openai.EmbeddingNewParams{
Input: openai.F[openai.EmbeddingNewParamsInputUnion](openai.EmbeddingNewParamsInputArrayOfStrings([]string{"\"Your text string goes here\""})),
Model: openai.F(openai.EmbeddingNewParamsModel(azureOpenAI.Embeddings.Model)),
Model: openai.F(openai.EmbeddingModel(azureOpenAI.Embeddings.Model)),
})
require.NoError(t, err)
require.NotEmpty(t, resp.Data[0].Embedding)
@ -48,7 +48,7 @@ func TestClient_GetEmbeddings_embeddingsFormat(t *testing.T) {
arg := openai.EmbeddingNewParams{
Input: openai.F[openai.EmbeddingNewParamsInputUnion](openai.EmbeddingNewParamsInputArrayOfStrings([]string{"hello"})),
EncodingFormat: openai.F(openai.EmbeddingNewParamsEncodingFormatBase64),
Model: openai.F(openai.EmbeddingNewParamsModel(epm.Model)),
Model: openai.F(openai.EmbeddingModel(epm.Model)),
}
if dimension > 0 {
@ -70,7 +70,7 @@ func TestClient_GetEmbeddings_embeddingsFormat(t *testing.T) {
arg2 := openai.EmbeddingNewParams{
Input: openai.F[openai.EmbeddingNewParamsInputUnion](openai.EmbeddingNewParamsInputArrayOfStrings([]string{"hello"})),
Model: openai.F(openai.EmbeddingNewParamsModel(epm.Model)),
Model: openai.F(openai.EmbeddingModel(epm.Model)),
}
if dimension > 0 {

Просмотреть файл

@ -71,7 +71,7 @@ func TestClient_GetChatCompletions_AzureOpenAI_ContentFilter_WithResponse(t *tes
Temperature: openai.Float(0.0),
Model: openai.F(openai.ChatModel(azureOpenAI.ChatCompletionsRAI.Model)),
})
customRequireNoError(t, err, true)
customRequireNoError(t, err)
contentFilterResults, err := azopenaiextensions.ChatCompletionChoice(resp.Choices[0]).ContentFilterResults()
require.NoError(t, err)

Просмотреть файл

@ -9,6 +9,7 @@ import (
"fmt"
"net/http"
"os"
"strings"
"testing"
"github.com/Azure/azure-sdk-for-go/sdk/ai/azopenaiextensions"
@ -57,7 +58,9 @@ func getEnvVariable(varName string) string {
val := os.Getenv(varName)
if val == "" {
panic(fmt.Sprintf("Missing required environment variable %s", varName))
if recording.GetRecordMode() != recording.PlaybackMode {
panic(fmt.Sprintf("Missing required environment variable %s", varName))
}
}
return val
@ -185,6 +188,11 @@ var azureOpenAI = func() testVars {
}()
func newStainlessTestClient(t *testing.T, ep endpoint) *openai.Client {
if recording.GetRecordMode() == recording.PlaybackMode {
t.Skip("Skipping tests in playback mode")
return nil
}
tokenCredential, err := credential.New(nil)
require.NoError(t, err)
@ -195,6 +203,11 @@ func newStainlessTestClient(t *testing.T, ep endpoint) *openai.Client {
}
func newStainlessChatCompletionService(t *testing.T, ep endpoint) *openai.ChatCompletionService {
if recording.GetRecordMode() == recording.PlaybackMode {
t.Skip("Skipping tests in playback mode")
return nil
}
tokenCredential, err := credential.New(nil)
require.NoError(t, err)
@ -211,22 +224,27 @@ func skipNowIfThrottled(t *testing.T, err error) {
// customRequireNoError checks the error but allows throttling errors to account for resources that are
// constrained.
func customRequireNoError(t *testing.T, err error, throttlingAllowed bool) {
func customRequireNoError(t *testing.T, err error) {
if err == nil {
return
}
if throttlingAllowed {
var respErr *openai.Error
if respErr := (*openai.Error)(nil); errors.As(err, &respErr) && respErr.StatusCode == http.StatusTooManyRequests {
t.Skip("Skipping test because of throttling (http.StatusTooManyRequests)")
return
}
if errors.Is(err, context.DeadlineExceeded) {
t.Skip("Skipping test because of throttling (DeadlineExceeded)")
return
}
switch {
case errors.As(err, &respErr) && respErr.StatusCode == http.StatusTooManyRequests:
t.Skip("Skipping test because of throttling (http.StatusTooManyRequests)")
return
// If you're using OYD, then the response error (from Azure OpenAI) will be a 400, but the underlying text will mention
// that it's 429'd.
// "code": 400,
// "message": "Server responded with status 429. Error message: {'error': {'code': '429', 'message': 'Rate limit is exceeded. Try again in 1 seconds.'}}"
case errors.As(err, &respErr) && respErr.StatusCode == http.StatusBadRequest && strings.Contains(err.Error(), "Rate limit is exceeded"):
t.Skip("Skipping test because of throttling in OYD resource")
return
case errors.Is(err, context.DeadlineExceeded):
t.Skip("Skipping test because of throttling (DeadlineExceeded)")
return
}
require.NoError(t, err)

Просмотреть файл

@ -36,7 +36,7 @@ func TestImageGeneration_AzureOpenAI(t *testing.T) {
ResponseFormat: openai.F(openai.ImageGenerateParamsResponseFormatURL),
Model: openai.F(openai.ImageModel(azureOpenAI.DallE.Model)),
})
customRequireNoError(t, err, true)
customRequireNoError(t, err)
if recording.GetRecordMode() == recording.LiveMode {
headResp, err := http.DefaultClient.Head(resp.Data[0].URL)

Просмотреть файл

@ -21,6 +21,7 @@ type ContentFilterError struct {
ContentFilterResultDetailsForPrompt
}
// Error implements the error interface for type ContentFilterError.
func (c *ContentFilterError) Error() string {
return c.OpenAIError.Error()
}

Просмотреть файл

@ -1,3 +1,6 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
package azopenaiextensions_test
import (
@ -110,11 +113,7 @@ func Example_usingAzureOnYourData() {
fmt.Fprintf(os.Stderr, "Content: %s\n", azureChatCompletionMsg.Content)
}
fmt.Printf("Example complete\n")
// Output:
// Example complete
//
fmt.Fprintf(os.Stderr, "Example complete\n")
}
func Example_usingEnhancements() {
@ -203,9 +202,5 @@ func Example_usingEnhancements() {
fmt.Fprintf(os.Stderr, "Content: %s\n", azureChatCompletionMsg.Content)
}
fmt.Printf("Example complete\n")
// Output:
// Example complete
//
fmt.Fprintf(os.Stderr, "Example complete\n")
}

Просмотреть файл

@ -2,8 +2,6 @@ module github.com/Azure/azure-sdk-for-go/sdk/ai/azopenaiextensions
go 1.21
toolchain go1.21.5
require (
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.14.0
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0
@ -12,7 +10,7 @@ require (
require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0
github.com/openai/openai-go v0.1.0-alpha.16
github.com/openai/openai-go v0.1.0-alpha.23
)
require (
@ -23,7 +21,7 @@ require (
github.com/kylelemons/godebug v1.1.0 // indirect
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/tidwall/gjson v1.14.4 // indirect
github.com/tidwall/gjson v1.17.3 // indirect
github.com/tidwall/match v1.1.1 // indirect
github.com/tidwall/pretty v1.2.1 // indirect
github.com/tidwall/sjson v1.2.5 // indirect

Просмотреть файл

@ -18,8 +18,8 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/openai/openai-go v0.1.0-alpha.16 h1:4YXiVRN1yUjTQHtmGShMJioDA+gxW2PNYRbqbInAYG4=
github.com/openai/openai-go v0.1.0-alpha.16/go.mod h1:3SdE6BffOX9HPEQv8IL/fi3LYZ5TUpRYaqGQZbyk11A=
github.com/openai/openai-go v0.1.0-alpha.23 h1:KDDR/z8jTrVgQd2Xpa55+Pzn4JWUYLmo67oQ/gj+vFE=
github.com/openai/openai-go v0.1.0-alpha.23/go.mod h1:3SdE6BffOX9HPEQv8IL/fi3LYZ5TUpRYaqGQZbyk11A=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
@ -29,8 +29,8 @@ github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/gjson v1.14.4 h1:uo0p8EbA09J7RQaflQ1aBRffTR7xedD2bcIVSYxLnkM=
github.com/tidwall/gjson v1.14.4/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/gjson v1.17.3 h1:bwWLZU7icoKRG+C+0PNwIKC6FCJO/Q3p2pZvuP0jN94=
github.com/tidwall/gjson v1.17.3/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=

Просмотреть файл

@ -1,3 +1,6 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
package azopenaiextensions
import (

Просмотреть файл

@ -1,11 +1,9 @@
//go:build go1.18
// +build go1.18
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
package azopenaiextensions
const (
//nolint // required for CI, but not used since this package doesn't have a client.
version = "v0.1.0"
)