From be8d0cc5de19d79d0eb4acecad751ddf0831e4f0 Mon Sep 17 00:00:00 2001 From: CoolProgramme <1227782152@qq.com> Date: Wed, 5 Jul 2023 20:02:34 +0800 Subject: [PATCH 1/6] imitate api --- .env.example | 2 + api/chatgpt/typings.go | 13 +- api/imitate/api.go | 300 ++++++++++++++++++++++++++++++++++++++++ api/imitate/convert.go | 14 ++ api/imitate/request.go | 53 +++++++ api/imitate/response.go | 144 +++++++++++++++++++ go.mod | 1 + go.sum | 1 + main.go | 14 ++ 9 files changed, 541 insertions(+), 1 deletion(-) create mode 100644 api/imitate/api.go create mode 100644 api/imitate/convert.go create mode 100644 api/imitate/request.go create mode 100644 api/imitate/response.go diff --git a/.env.example b/.env.example index 185d6004d..679fc4ca5 100644 --- a/.env.example +++ b/.env.example @@ -3,3 +3,5 @@ GO_CHATGPT_API_PORT=8080 # Network proxy server address GO_CHATGPT_API_PROXY=socks5://ip:port GO_CHATGPT_API_PANDORA=1 +# Imitate access_token +IMITATE_ACCESS_TOKEN= diff --git a/api/chatgpt/typings.go b/api/chatgpt/typings.go index 9e3597462..18bb9b589 100644 --- a/api/chatgpt/typings.go +++ b/api/chatgpt/typings.go @@ -1,7 +1,10 @@ package chatgpt //goland:noinspection GoSnakeCaseUsage -import tls_client "github.com/bogdanfinn/tls-client" +import ( + tls_client "github.com/bogdanfinn/tls-client" + "github.com/google/uuid" +) type UserLogin struct { client tls_client.HttpClient @@ -20,6 +23,14 @@ type CreateConversationRequest struct { AutoContinue bool `json:"auto_continue"` } +func (c *CreateConversationRequest) AddMessage(role string, content string) { + c.Messages = append(c.Messages, Message{ + ID: uuid.New().String(), + Author: Author{Role: role}, + Content: Content{ContentType: "text", Parts: []string{content}}, + }) +} + type Message struct { Author Author `json:"author"` Content Content `json:"content"` diff --git a/api/imitate/api.go b/api/imitate/api.go new file mode 100644 index 000000000..7e19b4da9 --- /dev/null +++ b/api/imitate/api.go @@ -0,0 +1,300 @@ +package imitate + +import ( + "bufio" + "bytes" + "encoding/json" + "fmt" + http "github.com/bogdanfinn/fhttp" + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/linweiyuan/funcaptcha" + "github.com/linweiyuan/go-chatgpt-api/api" + "github.com/linweiyuan/go-chatgpt-api/api/chatgpt" + "io" + "os" + "strings" +) + +var ( + arkoseTokenUrl string + puid string + bx string +) + +//goland:noinspection SpellCheckingInspection +func init() { + arkoseTokenUrl = os.Getenv("GO_CHATGPT_API_ARKOSE_TOKEN_URL") + puid = os.Getenv("GO_CHATGPT_API_PUID") + bx = os.Getenv("GO_CHATGPT_API_BX") +} + +func CreateChatCompletions(c *gin.Context) { + var originalRequest APIRequest + err := c.BindJSON(&originalRequest) + if err != nil { + c.JSON(400, gin.H{"error": gin.H{ + "message": "Request must be proper JSON", + "type": "invalid_request_error", + "param": nil, + "code": err.Error(), + }}) + return + } + + // 从配置文件里获取(先跑起来再说o(*≧▽≦)ツ) + // TODO more + token := os.Getenv("IMITATE_ACCESS_TOKEN") + + // 将聊天请求转换为ChatGPT请求。 + translatedRequest := convertAPIRequest(originalRequest) + + response, done := sendConversationRequest(c, translatedRequest, token) + if done { + c.JSON(500, gin.H{ + "error": "error sending request", + }) + return + } + + defer func(Body io.ReadCloser) { + err := Body.Close() + if err != nil { + return + } + }(response.Body) + + if HandleRequestError(c, response) { + return + } + + var fullResponse string + + for i := 3; i > 0; i-- { + var continueInfo *ContinueInfo + var responsePart string + responsePart, continueInfo = Handler(c, response, originalRequest.Stream) + fullResponse += responsePart + if continueInfo == nil { + break + } + println("Continuing conversation") + translatedRequest.Messages = nil + translatedRequest.Action = "continue" + translatedRequest.ConversationID = &continueInfo.ConversationID + translatedRequest.ParentMessageID = continueInfo.ParentID + response, done = sendConversationRequest(c, translatedRequest, token) + + if done { + c.JSON(500, gin.H{ + "error": "error sending request", + }) + return + } + + // 以下修复代码来自ChatGPT + // 在循环内部创建一个局部作用域,并将资源的引用传递给匿名函数,保证资源将在每次迭代结束时被正确释放 + func() { + defer func(Body io.ReadCloser) { + err := Body.Close() + if err != nil { + return + } + }(response.Body) + }() + + if HandleRequestError(c, response) { + return + } + } + + if !originalRequest.Stream { + c.JSON(200, newChatCompletion(fullResponse)) + } else { + c.String(200, "data: [DONE]\n\n") + } +} + +func convertAPIRequest(apiRequest APIRequest) chatgpt.CreateConversationRequest { + + chatgptRequest := NewChatGPTRequest() + + if strings.HasPrefix(apiRequest.Model, "gpt-3.5") { + chatgptRequest.Model = "text-davinci-002-render-sha" + } + + if strings.HasPrefix(apiRequest.Model, "gpt-4") { + arkoseToken, err := GetOpenAIToken() + if err == nil { + chatgptRequest.ArkoseToken = arkoseToken + } else { + fmt.Println("Error getting Arkose token: ", err) + } + chatgptRequest.Model = apiRequest.Model + } + + if apiRequest.PluginIDs != nil { + chatgptRequest.PluginIDs = apiRequest.PluginIDs + chatgptRequest.Model = "gpt-4-plugins" + } + + for _, apiMessage := range apiRequest.Messages { + if apiMessage.Role == "system" { + apiMessage.Role = "critic" + } + chatgptRequest.AddMessage(apiMessage.Role, apiMessage.Content) + } + + return chatgptRequest +} + +func GetOpenAIToken() (string, error) { + var arkoseToken string + var err error + if arkoseTokenUrl == "" { + if bx == "" { + arkoseToken, err = funcaptcha.GetOpenAIToken() + } else { + arkoseToken, err = funcaptcha.GetOpenAITokenWithBx(bx) + } + if err != nil { + return "", err + } + } else { + req, _ := http.NewRequest(http.MethodGet, arkoseTokenUrl, nil) + resp, err := api.Client.Do(req) + if err != nil || resp.StatusCode != http.StatusOK { + return "", err + } + responseMap := make(map[string]interface{}) + err = json.NewDecoder(resp.Body).Decode(&responseMap) + if err != nil { + return "", err + } + arkoseToken = responseMap["token"].(string) + } + return arkoseToken, err +} + +func NewChatGPTRequest() chatgpt.CreateConversationRequest { + enableHistory := os.Getenv("ENABLE_HISTORY") == "" + return chatgpt.CreateConversationRequest{ + Action: "next", + ParentMessageID: uuid.NewString(), + Model: "text-davinci-002-render-sha", + HistoryAndTrainingDisabled: !enableHistory, + } +} + +//goland:noinspection GoUnhandledErrorResult +func sendConversationRequest(c *gin.Context, request chatgpt.CreateConversationRequest, accessToken string) (*http.Response, bool) { + jsonBytes, _ := json.Marshal(request) + req, _ := http.NewRequest(http.MethodPost, api.ChatGPTApiUrlPrefix+"/backend-api/conversation", bytes.NewBuffer(jsonBytes)) + req.Header.Set("User-Agent", api.UserAgent) + req.Header.Set("Authorization", accessToken) + req.Header.Set("Accept", "text/event-stream") + if puid != "" { + //goland:noinspection SpellCheckingInspection + req.Header.Set("Cookie", "_puid="+puid) + } + resp, err := api.Client.Do(req) + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, api.ReturnMessage(err.Error())) + return nil, true + } + + if resp.StatusCode != http.StatusOK { + responseMap := make(map[string]interface{}) + json.NewDecoder(resp.Body).Decode(&responseMap) + c.AbortWithStatusJSON(resp.StatusCode, responseMap) + return nil, true + } + + return resp, false +} + +func Handler(c *gin.Context, response *http.Response, stream bool) (string, *ContinueInfo) { + maxTokens := false + + // Create a bufio.Reader from the response body + reader := bufio.NewReader(response.Body) + + // Read the response byte by byte until a newline character is encountered + if stream { + // Response content type is text/event-stream + c.Header("Content-Type", "text/event-stream") + } else { + // Response content type is application/json + c.Header("Content-Type", "application/json") + } + var finishReason string + var previousText StringStruct + var originalResponse ChatGPTResponse + var isRole = true + for { + line, err := reader.ReadString('\n') + if err != nil { + if err == io.EOF { + break + } + return "", nil + } + if len(line) < 6 { + continue + } + // Remove "data: " from the beginning of the line + line = line[6:] + // Check if line starts with [DONE] + if !strings.HasPrefix(line, "[DONE]") { + // Parse the line as JSON + + err = json.Unmarshal([]byte(line), &originalResponse) + if err != nil { + continue + } + if originalResponse.Error != nil { + c.JSON(500, gin.H{"error": originalResponse.Error}) + return "", nil + } + if originalResponse.Message.Author.Role != "assistant" || originalResponse.Message.Content.Parts == nil { + continue + } + if originalResponse.Message.Metadata.MessageType != "next" && originalResponse.Message.Metadata.MessageType != "continue" || originalResponse.Message.EndTurn != nil { + continue + } + responseString := ConvertToString(&originalResponse, &previousText, isRole) + isRole = false + if stream { + _, err = c.Writer.WriteString(responseString) + if err != nil { + return "", nil + } + } + // Flush the response writer buffer to ensure that the client receives each line as it's written + c.Writer.Flush() + + if originalResponse.Message.Metadata.FinishDetails != nil { + if originalResponse.Message.Metadata.FinishDetails.Type == "max_tokens" { + maxTokens = true + } + finishReason = originalResponse.Message.Metadata.FinishDetails.Type + } + + } else { + if stream { + finalLine := StopChunk(finishReason) + _, err := c.Writer.WriteString("data: " + finalLine.String() + "\n\n") + if err != nil { + return "", nil + } + } + } + } + if !maxTokens { + return previousText.Text, nil + } + return previousText.Text, &ContinueInfo{ + ConversationID: originalResponse.ConversationID, + ParentID: originalResponse.Message.ID, + } +} diff --git a/api/imitate/convert.go b/api/imitate/convert.go new file mode 100644 index 000000000..ff74088d7 --- /dev/null +++ b/api/imitate/convert.go @@ -0,0 +1,14 @@ +package imitate + +import ( + "strings" +) + +func ConvertToString(chatgptResponse *ChatGPTResponse, previousText *StringStruct, role bool) string { + translatedResponse := NewChatCompletionChunk(strings.ReplaceAll(chatgptResponse.Message.Content.Parts[0], *&previousText.Text, "")) + if role { + translatedResponse.Choices[0].Delta.Role = chatgptResponse.Message.Author.Role + } + previousText.Text = chatgptResponse.Message.Content.Parts[0] + return "data: " + translatedResponse.String() + "\n\n" +} diff --git a/api/imitate/request.go b/api/imitate/request.go new file mode 100644 index 000000000..9c3542aa2 --- /dev/null +++ b/api/imitate/request.go @@ -0,0 +1,53 @@ +package imitate + +import ( + "encoding/json" + http "github.com/bogdanfinn/fhttp" + "github.com/gin-gonic/gin" + "io" +) + +type ContinueInfo struct { + ConversationID string `json:"conversation_id"` + ParentID string `json:"parent_id"` +} + +type APIRequest struct { + Messages []ApiMessage `json:"messages"` + Stream bool `json:"stream"` + Model string `json:"model"` + PluginIDs []string `json:"plugin_ids"` +} + +type ApiMessage struct { + Role string `json:"role"` + Content string `json:"content"` +} + +func HandleRequestError(c *gin.Context, response *http.Response) bool { + if response.StatusCode != 200 { + // Try read response body as JSON + var errorResponse map[string]interface{} + err := json.NewDecoder(response.Body).Decode(&errorResponse) + if err != nil { + // Read response body + body, _ := io.ReadAll(response.Body) + c.JSON(500, gin.H{"error": gin.H{ + "message": "Unknown error", + "type": "internal_server_error", + "param": nil, + "code": "500", + "details": string(body), + }}) + return true + } + c.JSON(response.StatusCode, gin.H{"error": gin.H{ + "message": errorResponse["detail"], + "type": response.Status, + "param": nil, + "code": "error", + }}) + return true + } + return false +} diff --git a/api/imitate/response.go b/api/imitate/response.go new file mode 100644 index 000000000..10b29d360 --- /dev/null +++ b/api/imitate/response.go @@ -0,0 +1,144 @@ +package imitate + +import ( + "encoding/json" + "github.com/linweiyuan/go-chatgpt-api/api/chatgpt" +) + +type ChatCompletionChunk struct { + ID string `json:"id"` + Object string `json:"object"` + Created int64 `json:"created"` + Model string `json:"model"` + Choices []Choices `json:"choices"` +} + +func (chunk *ChatCompletionChunk) String() string { + resp, _ := json.Marshal(chunk) + return string(resp) +} + +type Choices struct { + Delta Delta `json:"delta"` + Index int `json:"index"` + FinishReason interface{} `json:"finish_reason"` +} + +type Delta struct { + Content string `json:"content,omitempty"` + Role string `json:"role,omitempty"` +} + +func NewChatCompletionChunk(text string) ChatCompletionChunk { + return ChatCompletionChunk{ + ID: "chatcmpl-QXlha2FBbmROaXhpZUFyZUF3ZXNvbWUK", + Object: "chat.completion.chunk", + Created: 0, + Model: "gpt-3.5-turbo-0301", + Choices: []Choices{ + { + Index: 0, + Delta: Delta{ + Content: text, + }, + FinishReason: nil, + }, + }, + } +} + +func StopChunk(reason string) ChatCompletionChunk { + return ChatCompletionChunk{ + ID: "chatcmpl-QXlha2FBbmROaXhpZUFyZUF3ZXNvbWUK", + Object: "chat.completion.chunk", + Created: 0, + Model: "gpt-3.5-turbo-0301", + Choices: []Choices{ + { + Index: 0, + FinishReason: reason, + }, + }, + } +} + +type ChatCompletion struct { + ID string `json:"id"` + Object string `json:"object"` + Created int64 `json:"created"` + Model string `json:"model"` + Usage usage `json:"usage"` + Choices []Choice `json:"choices"` +} +type Msg struct { + Role string `json:"role"` + Content string `json:"content"` +} +type Choice struct { + Index int `json:"index"` + Message Msg `json:"message"` + FinishReason interface{} `json:"finish_reason"` +} +type usage struct { + PromptTokens int `json:"prompt_tokens"` + CompletionTokens int `json:"completion_tokens"` + TotalTokens int `json:"total_tokens"` +} + +type ChatGPTResponse struct { + Message Message `json:"message"` + ConversationID string `json:"conversation_id"` + Error interface{} `json:"error"` +} + +type Message struct { + ID string `json:"id"` + Author chatgpt.Author `json:"author"` + CreateTime float64 `json:"create_time"` + UpdateTime interface{} `json:"update_time"` + Content chatgpt.Content `json:"content"` + EndTurn interface{} `json:"end_turn"` + Weight float64 `json:"weight"` + Metadata Metadata `json:"metadata"` + Recipient string `json:"recipient"` +} + +type Metadata struct { + Timestamp string `json:"timestamp_"` + MessageType string `json:"message_type"` + FinishDetails *FinishDetails `json:"finish_details"` + ModelSlug string `json:"model_slug"` + Recipient string `json:"recipient"` +} + +type FinishDetails struct { + Type string `json:"type"` + Stop string `json:"stop"` +} + +type StringStruct struct { + Text string `json:"text"` +} + +func newChatCompletion(fullTest string) ChatCompletion { + return ChatCompletion{ + ID: "chatcmpl-QXlha2FBbmROaXhpZUFyZUF3ZXNvbWUK", + Object: "chat.completion", + Created: int64(0), + Model: "gpt-3.5-turbo-0301", + Usage: usage{ + PromptTokens: 0, + CompletionTokens: 0, + TotalTokens: 0, + }, + Choices: []Choice{ + { + Message: Msg{ + Content: fullTest, + Role: "assistant", + }, + Index: 0, + }, + }, + } +} diff --git a/go.mod b/go.mod index e8f36c315..5514ee401 100644 --- a/go.mod +++ b/go.mod @@ -10,6 +10,7 @@ require ( github.com/joho/godotenv v1.5.1 github.com/linweiyuan/funcaptcha v0.0.0-20230704212036-45a7d90e69e5 github.com/sirupsen/logrus v1.9.0 + github.com/google/uuid v1.3.0 ) require ( diff --git a/go.sum b/go.sum index 102b3a192..1d685d791 100644 --- a/go.sum +++ b/go.sum @@ -38,6 +38,7 @@ github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaS github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= diff --git a/main.go b/main.go index bb3a9e255..c13dc0a53 100644 --- a/main.go +++ b/main.go @@ -1,6 +1,7 @@ package main import ( + "github.com/linweiyuan/go-chatgpt-api/api/imitate" "log" "os" "strings" @@ -30,6 +31,7 @@ func main() { setupChatGPTAPIs(router) setupPlatformAPIs(router) setupPandoraAPIs(router) + setupImitateAPIs(router) router.NoRoute(api.Proxy) router.GET("/", func(c *gin.Context) { @@ -83,3 +85,15 @@ func setupPandoraAPIs(router *gin.Engine) { }) } } + +func setupImitateAPIs(router *gin.Engine) { + imitateGroup := router.Group("/imitate") + { + imitateGroup.POST("/login", chatgpt.Login) + + apiGroup := imitateGroup.Group("/v1") + { + apiGroup.POST("/chat/completions", imitate.CreateChatCompletions) + } + } +} From 72376be8c541c9d2f07d4599161740b113cfba1c Mon Sep 17 00:00:00 2001 From: CoolProgramme <1227782152@qq.com> Date: Wed, 5 Jul 2023 20:16:48 +0800 Subject: [PATCH 2/6] bug fix: imitate api mod deletion --- go.sum | 1 + 1 file changed, 1 insertion(+) diff --git a/go.sum b/go.sum index 1d685d791..a3e0cae5f 100644 --- a/go.sum +++ b/go.sum @@ -38,6 +38,7 @@ github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaS github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= From 2dd74e6a2abf841496ef90f14d4b6fa2a9b3f4ab Mon Sep 17 00:00:00 2001 From: CoolProgramme <1227782152@qq.com> Date: Wed, 5 Jul 2023 20:24:43 +0800 Subject: [PATCH 3/6] bug fix: imitate api mod deletion v2 --- go.mod | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/go.mod b/go.mod index 5514ee401..62e81d24b 100644 --- a/go.mod +++ b/go.mod @@ -7,10 +7,10 @@ require ( github.com/bogdanfinn/fhttp v0.5.23 github.com/bogdanfinn/tls-client v1.4.0 github.com/gin-gonic/gin v1.9.1 + github.com/google/uuid v1.3.0 github.com/joho/godotenv v1.5.1 github.com/linweiyuan/funcaptcha v0.0.0-20230704212036-45a7d90e69e5 github.com/sirupsen/logrus v1.9.0 - github.com/google/uuid v1.3.0 ) require ( From 7c8279762ce39fb2fdc03988048066cf02217319 Mon Sep 17 00:00:00 2001 From: CoolProgramme <1227782152@qq.com> Date: Tue, 25 Jul 2023 17:25:52 +0800 Subject: [PATCH 4/6] simulate api return result optimization --- api/imitate/api.go | 37 +++++++++++++++++++++++++++++-------- api/imitate/convert.go | 5 +++-- api/imitate/response.go | 23 ++++++++++++----------- 3 files changed, 44 insertions(+), 21 deletions(-) diff --git a/api/imitate/api.go b/api/imitate/api.go index 1d7aa9787..42cb785a9 100644 --- a/api/imitate/api.go +++ b/api/imitate/api.go @@ -3,10 +3,12 @@ package imitate import ( "bufio" "bytes" + "encoding/base64" "encoding/json" "fmt" "io" "os" + "regexp" "strings" "github.com/gin-gonic/gin" @@ -23,12 +25,18 @@ import ( var ( arkoseTokenUrl string bx string + reg *regexp.Regexp ) //goland:noinspection SpellCheckingInspection func init() { arkoseTokenUrl = os.Getenv("ARKOSE_TOKEN_URL") bx = os.Getenv("BX") + var err error + reg, err = regexp.Compile("[^a-zA-Z0-9]+") + if err != nil { + panic(fmt.Sprintf("Error compiling regex: %v", err)) + } } func CreateChatCompletions(c *gin.Context) { @@ -55,7 +63,7 @@ func CreateChatCompletions(c *gin.Context) { } // 将聊天请求转换为ChatGPT请求。 - translatedRequest := convertAPIRequest(originalRequest) + translatedRequest, model := convertAPIRequest(originalRequest) response, done := sendConversationRequest(c, translatedRequest, token) if done { @@ -78,11 +86,13 @@ func CreateChatCompletions(c *gin.Context) { var fullResponse string + id := generateId() + for i := 3; i > 0; i-- { var continueInfo *ContinueInfo var responsePart string var continueSignal string - responsePart, continueInfo = Handler(c, response, originalRequest.Stream) + responsePart, continueInfo = Handler(c, response, originalRequest.Stream, id, model) fullResponse += responsePart continueSignal = os.Getenv("CONTINUE_SIGNAL") if continueInfo == nil || continueSignal == "" { @@ -119,16 +129,26 @@ func CreateChatCompletions(c *gin.Context) { } if !originalRequest.Stream { - c.JSON(200, newChatCompletion(fullResponse, translatedRequest.Model)) + c.JSON(200, newChatCompletion(fullResponse, model, id)) } else { c.String(200, "data: [DONE]\n\n") } } +func generateId() string { + id := uuid.NewString() + id = strings.ReplaceAll(id, "-", "") + id = base64.StdEncoding.EncodeToString([]byte(id)) + id = reg.ReplaceAllString(id, "") + return "chatcmpl-" + id +} + //goland:noinspection SpellCheckingInspection -func convertAPIRequest(apiRequest APIRequest) chatgpt.CreateConversationRequest { +func convertAPIRequest(apiRequest APIRequest) (chatgpt.CreateConversationRequest, string) { chatgptRequest := NewChatGPTRequest() + var model = "gpt-3.5-turbo-0613" + if strings.HasPrefix(apiRequest.Model, "gpt-3.5") { chatgptRequest.Model = "text-davinci-002-render-sha" } @@ -141,6 +161,7 @@ func convertAPIRequest(apiRequest APIRequest) chatgpt.CreateConversationRequest fmt.Println("Error getting Arkose token: ", err) } chatgptRequest.Model = apiRequest.Model + model = "gpt-4-0613" } if apiRequest.PluginIDs != nil { @@ -155,7 +176,7 @@ func convertAPIRequest(apiRequest APIRequest) chatgpt.CreateConversationRequest chatgptRequest.AddMessage(apiMessage.Role, apiMessage.Content) } - return chatgptRequest + return chatgptRequest, model } func GetOpenAIToken() (string, error) { @@ -229,7 +250,7 @@ func sendConversationRequest(c *gin.Context, request chatgpt.CreateConversationR } //goland:noinspection SpellCheckingInspection -func Handler(c *gin.Context, response *http.Response, stream bool) (string, *ContinueInfo) { +func Handler(c *gin.Context, response *http.Response, stream bool, id string, model string) (string, *ContinueInfo) { maxTokens := false // Create a bufio.Reader from the response body @@ -278,7 +299,7 @@ func Handler(c *gin.Context, response *http.Response, stream bool) (string, *Con if originalResponse.Message.Metadata.MessageType != "next" && originalResponse.Message.Metadata.MessageType != "continue" || originalResponse.Message.EndTurn != nil { continue } - responseString := ConvertToString(&originalResponse, &previousText, isRole) + responseString := ConvertToString(&originalResponse, &previousText, isRole, id, model) isRole = false if stream { _, err = c.Writer.WriteString(responseString) @@ -298,7 +319,7 @@ func Handler(c *gin.Context, response *http.Response, stream bool) (string, *Con } else { if stream { - finalLine := StopChunk(finishReason) + finalLine := StopChunk(finishReason, id, model) _, err := c.Writer.WriteString("data: " + finalLine.String() + "\n\n") if err != nil { return "", nil diff --git a/api/imitate/convert.go b/api/imitate/convert.go index 8f7989674..63df90a3e 100644 --- a/api/imitate/convert.go +++ b/api/imitate/convert.go @@ -5,8 +5,9 @@ import ( ) //goland:noinspection SpellCheckingInspection -func ConvertToString(chatgptResponse *ChatGPTResponse, previousText *StringStruct, role bool) string { - translatedResponse := NewChatCompletionChunk(strings.ReplaceAll(chatgptResponse.Message.Content.Parts[0], *&previousText.Text, "")) +func ConvertToString(chatgptResponse *ChatGPTResponse, previousText *StringStruct, role bool, id string, model string) string { + text := strings.ReplaceAll(chatgptResponse.Message.Content.Parts[0], *&previousText.Text, "") + translatedResponse := NewChatCompletionChunk(text, id, model) if role { translatedResponse.Choices[0].Delta.Role = chatgptResponse.Message.Author.Role } diff --git a/api/imitate/response.go b/api/imitate/response.go index c081469a3..54722aed0 100644 --- a/api/imitate/response.go +++ b/api/imitate/response.go @@ -2,6 +2,7 @@ package imitate import ( "encoding/json" + "time" "github.com/linweiyuan/go-chatgpt-api/api/chatgpt" ) @@ -31,12 +32,12 @@ type Delta struct { } //goland:noinspection SpellCheckingInspection -func NewChatCompletionChunk(text string) ChatCompletionChunk { +func NewChatCompletionChunk(text string, id string, model string) ChatCompletionChunk { return ChatCompletionChunk{ - ID: "chatcmpl-QXlha2FBbmROaXhpZUFyZUF3ZXNvbWUK", + ID: id, Object: "chat.completion.chunk", - Created: 0, - Model: "gpt-3.5-turbo-0301", + Created: time.Now().Unix(), + Model: model, Choices: []Choices{ { Index: 0, @@ -50,12 +51,12 @@ func NewChatCompletionChunk(text string) ChatCompletionChunk { } //goland:noinspection SpellCheckingInspection -func StopChunk(reason string) ChatCompletionChunk { +func StopChunk(reason string, id string, model string) ChatCompletionChunk { return ChatCompletionChunk{ - ID: "chatcmpl-QXlha2FBbmROaXhpZUFyZUF3ZXNvbWUK", + ID: id, Object: "chat.completion.chunk", - Created: 0, - Model: "gpt-3.5-turbo-0301", + Created: time.Now().Unix(), + Model: model, Choices: []Choices{ { Index: 0, @@ -124,11 +125,11 @@ type StringStruct struct { } //goland:noinspection SpellCheckingInspection -func newChatCompletion(fullTest, model string) ChatCompletion { +func newChatCompletion(fullTest, model string, id string) ChatCompletion { return ChatCompletion{ - ID: "chatcmpl-QXlha2FBbmROaXhpZUFyZUF3ZXNvbWUK", + ID: id, Object: "chat.completion", - Created: int64(0), + Created: time.Now().Unix(), Model: model, Usage: usage{ PromptTokens: 0, From bd484ae6ac974d2621911b4dc18b79a357bc0040 Mon Sep 17 00:00:00 2001 From: CoolProgramme <1227782152@qq.com> Date: Tue, 12 Sep 2023 10:55:24 +0800 Subject: [PATCH 5/6] =?UTF-8?q?=E4=BF=AE=E5=A4=8D=20issues=20#247=20,imita?= =?UTF-8?q?te=E6=8A=A5=E9=94=99=E8=BF=94=E5=9B=9E=E4=B8=8D=E6=98=AF?= =?UTF-8?q?=E5=90=88=E6=B3=95json=E7=9A=84=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api/imitate/api.go | 6 ------ 1 file changed, 6 deletions(-) diff --git a/api/imitate/api.go b/api/imitate/api.go index b0b4cf223..c4ceb01e5 100644 --- a/api/imitate/api.go +++ b/api/imitate/api.go @@ -67,9 +67,6 @@ func CreateChatCompletions(c *gin.Context) { response, done := sendConversationRequest(c, translatedRequest, token) if done { - c.JSON(500, gin.H{ - "error": "error sending request", - }) return } @@ -106,9 +103,6 @@ func CreateChatCompletions(c *gin.Context) { response, done = sendConversationRequest(c, translatedRequest, token) if done { - c.JSON(500, gin.H{ - "error": "error sending request", - }) return } From e0766064a25f4f2545c802047c42c9f6682d19c9 Mon Sep 17 00:00:00 2001 From: CoolProgramme <1227782152@qq.com> Date: Thu, 14 Sep 2023 12:08:31 +0800 Subject: [PATCH 6/6] =?UTF-8?q?=E4=BC=98=E5=8C=96=E6=A8=A1=E6=8B=9Fapi?= =?UTF-8?q?=E7=9A=84=E8=BF=94=E5=9B=9E=EF=BC=8C=E4=BD=BF=E5=85=B6=E6=9B=B4?= =?UTF-8?q?=E5=8A=A0=E6=8E=A5=E8=BF=91=E5=AE=98=E6=96=B9api=E8=BF=94?= =?UTF-8?q?=E5=9B=9E?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api/imitate/api.go | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/api/imitate/api.go b/api/imitate/api.go index c4ceb01e5..cee93e20b 100644 --- a/api/imitate/api.go +++ b/api/imitate/api.go @@ -293,6 +293,9 @@ func Handler(c *gin.Context, response *http.Response, stream bool, id string, mo if originalResponse.Message.Metadata.MessageType != "next" && originalResponse.Message.Metadata.MessageType != "continue" || originalResponse.Message.EndTurn != nil { continue } + if (len(originalResponse.Message.Content.Parts) == 0 || originalResponse.Message.Content.Parts[0] == "") && !isRole { + continue + } responseString := ConvertToString(&originalResponse, &previousText, isRole, id, model) isRole = false if stream { @@ -313,6 +316,9 @@ func Handler(c *gin.Context, response *http.Response, stream bool, id string, mo } else { if stream { + if finishReason == "" { + finishReason = "stop" + } finalLine := StopChunk(finishReason, id, model) _, err := c.Writer.WriteString("data: " + finalLine.String() + "\n\n") if err != nil {