forked from sashabaranov/go-openai
-
Notifications
You must be signed in to change notification settings - Fork 0
/
embeddings_test.go
118 lines (107 loc) · 3.46 KB
/
embeddings_test.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
package openai_test
import (
. "github.com/sashabaranov/go-openai"
"github.com/sashabaranov/go-openai/internal/test/checks"
"bytes"
"context"
"encoding/json"
"fmt"
"net/http"
"testing"
)
func TestEmbedding(t *testing.T) {
embeddedModels := []EmbeddingModel{
AdaSimilarity,
BabbageSimilarity,
CurieSimilarity,
DavinciSimilarity,
AdaSearchDocument,
AdaSearchQuery,
BabbageSearchDocument,
BabbageSearchQuery,
CurieSearchDocument,
CurieSearchQuery,
DavinciSearchDocument,
DavinciSearchQuery,
AdaCodeSearchCode,
AdaCodeSearchText,
BabbageCodeSearchCode,
BabbageCodeSearchText,
}
for _, model := range embeddedModels {
// test embedding request with strings (simple embedding request)
embeddingReq := EmbeddingRequest{
Input: []string{
"The food was delicious and the waiter",
"Other examples of embedding request",
},
Model: model,
}
// marshal embeddingReq to JSON and confirm that the model field equals
// the AdaSearchQuery type
marshaled, err := json.Marshal(embeddingReq)
checks.NoError(t, err, "Could not marshal embedding request")
if !bytes.Contains(marshaled, []byte(`"model":"`+model.String()+`"`)) {
t.Fatalf("Expected embedding request to contain model field")
}
// test embedding request with strings
embeddingReqStrings := EmbeddingRequestStrings{
Input: []string{
"The food was delicious and the waiter",
"Other examples of embedding request",
},
Model: model,
}
marshaled, err = json.Marshal(embeddingReqStrings)
checks.NoError(t, err, "Could not marshal embedding request")
if !bytes.Contains(marshaled, []byte(`"model":"`+model.String()+`"`)) {
t.Fatalf("Expected embedding request to contain model field")
}
// test embedding request with tokens
embeddingReqTokens := EmbeddingRequestTokens{
Input: [][]int{
{464, 2057, 373, 12625, 290, 262, 46612},
{6395, 6096, 286, 11525, 12083, 2581},
},
Model: model,
}
marshaled, err = json.Marshal(embeddingReqTokens)
checks.NoError(t, err, "Could not marshal embedding request")
if !bytes.Contains(marshaled, []byte(`"model":"`+model.String()+`"`)) {
t.Fatalf("Expected embedding request to contain model field")
}
}
}
func TestEmbeddingModel(t *testing.T) {
var em EmbeddingModel
err := em.UnmarshalText([]byte("text-similarity-ada-001"))
checks.NoError(t, err, "Could not marshal embedding model")
if em != AdaSimilarity {
t.Errorf("Model is not equal to AdaSimilarity")
}
err = em.UnmarshalText([]byte("some-non-existent-model"))
checks.NoError(t, err, "Could not marshal embedding model")
if em != Unknown {
t.Errorf("Model is not equal to Unknown")
}
}
func TestEmbeddingEndpoint(t *testing.T) {
client, server, teardown := setupOpenAITestServer()
defer teardown()
server.RegisterHandler(
"/v1/embeddings",
func(w http.ResponseWriter, r *http.Request) {
resBytes, _ := json.Marshal(EmbeddingResponse{})
fmt.Fprintln(w, string(resBytes))
},
)
// test create embeddings with strings (simple embedding request)
_, err := client.CreateEmbeddings(context.Background(), EmbeddingRequest{})
checks.NoError(t, err, "CreateEmbeddings error")
// test create embeddings with strings
_, err = client.CreateEmbeddings(context.Background(), EmbeddingRequestStrings{})
checks.NoError(t, err, "CreateEmbeddings strings error")
// test create embeddings with tokens
_, err = client.CreateEmbeddings(context.Background(), EmbeddingRequestTokens{})
checks.NoError(t, err, "CreateEmbeddings tokens error")
}