Skip to content

Commit

Permalink
[azopenai] Enabling live testing (Azure#21276)
Browse files Browse the repository at this point in the history
Adds in a ci.yml with live testing enabled and re-recordings since we moved to `ai` instead of `cognitiveservices`.
  • Loading branch information
richardpark-msft authored and chlowell committed Jul 31, 2023
1 parent 27cf957 commit 2eea6ec
Show file tree
Hide file tree
Showing 6 changed files with 47 additions and 10 deletions.
4 changes: 2 additions & 2 deletions sdk/ai/azopenai/assets.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"AssetsRepo": "Azure/azure-sdk-assets",
"AssetsRepoPrefixPath": "go",
"TagPrefix": "go/cognitiveservices/azopenai",
"Tag": "go/cognitiveservices/azopenai_8fdad86997"
"TagPrefix": "go/ai/azopenai",
"Tag": "go/ai/azopenai_2bf13bba09"
}
29 changes: 29 additions & 0 deletions sdk/ai/azopenai/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,3 +26,32 @@ stages:
- template: /eng/pipelines/templates/jobs/archetype-sdk-client.yml
parameters:
ServiceDirectory: "ai/azopenai"
RunLiveTests: true
EnvVars:
AZURE_TEST_RUN_LIVE: 'true' # use when utilizing the New-TestResources Script

# these come from our keyvault (TestSecrets-openai)
AZURE_CLIENT_ID: $(openai-client-id)
AZURE_CLIENT_SECRET: $(openai-client-secret)
AZURE_TENANT_ID: $(openai-tenant-id)

# Azure OpenAI
AOAI_ENDPOINT: $(AOAI-ENDPOINT)
AOAI_API_KEY: $(AOAI-API-KEY)
AOAI_CHAT_COMPLETIONS_MODEL_DEPLOYMENT: $(AOAI-CHAT-COMPLETIONS-MODEL-DEPLOYMENT)
AOAI_COMPLETIONS_MODEL_DEPLOYMENT: $(AOAI-COMPLETIONS-MODEL-DEPLOYMENT)
AOAI_EMBEDDINGS_MODEL_DEPLOYMENT: $(AOAI-EMBEDDINGS-MODEL-DEPLOYMENT)

# Azure OpenAI "Canary"
AOAI_COMPLETIONS_MODEL_DEPLOYMENT_CANARY: $(AOAI-COMPLETIONS-MODEL-DEPLOYMENT-CANARY)
AOAI_API_KEY_CANARY: $(AOAI-API-KEY-CANARY)
AOAI_EMBEDDINGS_MODEL_DEPLOYMENT_CANARY: $(AOAI-EMBEDDINGS-MODEL-DEPLOYMENT-CANARY)
AOAI_CHAT_COMPLETIONS_MODEL_DEPLOYMENT_CANARY: $(AOAI-CHAT-COMPLETIONS-MODEL-DEPLOYMENT-CANARY)
AOAI_ENDPOINT_CANARY: $(AOAI-ENDPOINT-CANARY)

# OpenAI
OPENAI_API_KEY: $(OPENAI-API-KEY)
OPENAI_ENDPOINT: $(OPENAI-ENDPOINT)
OPENAI_EMBEDDINGS_MODEL: $(OPENAI-EMBEDDINGS-MODEL)
OPENAI_CHAT_COMPLETIONS_MODEL: $(OPENAI-CHAT-COMPLETIONS-MODEL)
OPENAI_COMPLETIONS_MODEL: $(OPENAI-COMPLETIONS-MODEL)
7 changes: 7 additions & 0 deletions sdk/ai/azopenai/client_completions_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,13 @@ func testGetCompletions(t *testing.T, client *azopenai.Client, isAzure bool) {
},
}

if isAzure {
want.Choices[0].ContentFilterResults = (*azopenai.ChoiceContentFilterResults)(safeContentFilter)
want.PromptAnnotations = []azopenai.PromptFilterResult{
{PromptIndex: to.Ptr[int32](0), ContentFilterResults: (*azopenai.PromptFilterResultContentFilterResults)(safeContentFilter)},
}
}

want.ID = resp.Completions.ID
want.Created = resp.Completions.Created

Expand Down
6 changes: 2 additions & 4 deletions sdk/ai/azopenai/client_embeddings_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -90,10 +90,8 @@ func testGetEmbeddings(t *testing.T, client *azopenai.Client, modelOrDeploymentI
t.Errorf("Client.GetEmbeddings() error = %v, wantErr %v", err, tt.wantErr)
return
}
if len(got.Embeddings.Data[0].Embedding) != 4096 {
t.Errorf("Client.GetEmbeddings() len(Data) want 4096, got %d", len(got.Embeddings.Data))
return
}

require.NotEmpty(t, got.Embeddings.Data[0].Embedding)
})
}
}
5 changes: 2 additions & 3 deletions sdk/ai/azopenai/client_shared_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -106,12 +106,11 @@ func initEnvVars() {
azureOpenAI.ChatCompletions = "gpt-4-0613"
openAI.ChatCompletions = "gpt-4-0613"

openAI.Embeddings = "text-similarity-curie-001"
openAI.Embeddings = "text-embedding-ada-002"
azureOpenAI.Embeddings = "embedding"
} else {
if err := godotenv.Load(); err != nil {
fmt.Printf("Failed to load .env file: %s\n", err)
os.Exit(1)
}

azureOpenAI = newTestVars("AOAI", false)
Expand All @@ -124,7 +123,7 @@ func newRecordingTransporter(t *testing.T) policy.Transporter {
transport, err := recording.NewRecordingHTTPClient(t, nil)
require.NoError(t, err)

err = recording.Start(t, "sdk/cognitiveservices/azopenai/testdata", nil)
err = recording.Start(t, "sdk/ai/azopenai/testdata", nil)
require.NoError(t, err)

if recording.GetRecordMode() != recording.PlaybackMode {
Expand Down
6 changes: 5 additions & 1 deletion sdk/ai/azopenai/custom_client_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,11 @@ func testGetCompletionsStream(t *testing.T, client *azopenai.Client, tv testVars
const want = "\n\nAzure OpenAI is a platform from Microsoft that provides access to OpenAI's artificial intelligence (AI) technologies. It enables developers to build, train, and deploy AI models in the cloud. Azure OpenAI provides access to OpenAI's powerful AI technologies, such as GPT-3, which can be used to create natural language processing (NLP) applications, computer vision models, and reinforcement learning models."

require.Equal(t, want, got)
require.Equal(t, 86, eventCount)

// there's no strict requirement of how the response is streamed so just
// choosing something that's reasonable but will be lower than typical usage
// (which is usually somewhere around the 80s).
require.GreaterOrEqual(t, eventCount, 50)
}

func TestClient_GetCompletions_Error(t *testing.T) {
Expand Down

0 comments on commit 2eea6ec

Please sign in to comment.