Skip to content

Commit

Permalink
Merge pull request #26 from basenana/fix/log
Browse files Browse the repository at this point in the history
update: debug log
  • Loading branch information
zwwhdls authored Nov 20, 2023
2 parents b7d54e3 + 3ebfb21 commit 89234e9
Show file tree
Hide file tree
Showing 5 changed files with 8 additions and 42 deletions.
4 changes: 4 additions & 0 deletions pkg/llm/client/glm-6b/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,15 +27,18 @@ import (

"github.com/basenana/friday/pkg/llm"
"github.com/basenana/friday/pkg/llm/prompts"
"github.com/basenana/friday/pkg/utils/logger"
)

type GLM struct {
log logger.Logger
baseUri string
}

func NewGLM(uri string) llm.LLM {
return &GLM{
baseUri: uri,
log: logger.NewLogger("glm"),
}
}

Expand All @@ -62,6 +65,7 @@ func (o *GLM) request(path string, method string, body io.Reader) ([]byte, error
if resp.StatusCode != 200 {
return nil, fmt.Errorf("fail to call glm-6b, status code error: %d", resp.StatusCode)
}
o.log.Debugf("openai response: %s", respBody)
return respBody, nil
}

Expand Down
15 changes: 1 addition & 14 deletions pkg/llm/client/openai/v1/chat.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@ package v1
import (
"context"
"encoding/json"
"strings"
"time"

"github.com/basenana/friday/pkg/llm/prompts"
)
Expand All @@ -41,17 +39,7 @@ type ChatChoice struct {
}

func (o *OpenAIV1) Chat(ctx context.Context, prompt prompts.PromptTemplate, parameters map[string]string) ([]string, error) {
answer, err := o.chat(ctx, prompt, parameters)
if err != nil {
errMsg := err.Error()
if strings.Contains(errMsg, "rate_limit_exceeded") || strings.Contains(errMsg, "Rate limit reached") {
o.log.Warn("meets rate limit exceeded, sleep 30 seconds and retry")
time.Sleep(time.Duration(30) * time.Second)
return o.chat(ctx, prompt, parameters)
}
return nil, err
}
return answer, err
return o.chat(ctx, prompt, parameters)
}

func (o *OpenAIV1) chat(ctx context.Context, prompt prompts.PromptTemplate, parameters map[string]string) ([]string, error) {
Expand All @@ -62,7 +50,6 @@ func (o *OpenAIV1) chat(ctx context.Context, prompt prompts.PromptTemplate, para
if err != nil {
return nil, err
}
o.log.Debugf("final prompt: %s", p)

data := map[string]interface{}{
"model": model,
Expand Down
3 changes: 1 addition & 2 deletions pkg/llm/client/openai/v1/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,6 @@ func (o *OpenAIV1) request(ctx context.Context, path string, method string, data
req.Header.Set("Content-Type", "application/json; charset=utf-8")
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", o.key))

o.log.Debugf("request: %s", uri)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
Expand All @@ -108,7 +107,7 @@ func (o *OpenAIV1) request(ctx context.Context, path string, method string, data
time.Sleep(time.Second * 30)
continue
}
//o.log.Debugf("response: %s", respBody)
o.log.Debugf("openai response: %s", respBody)
return respBody, nil
}
}
Expand Down
14 changes: 1 addition & 13 deletions pkg/llm/client/openai/v1/completion.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@ package v1
import (
"context"
"encoding/json"
"strings"
"time"

"github.com/basenana/friday/pkg/llm/prompts"
)
Expand All @@ -42,17 +40,7 @@ type Choice struct {
}

func (o *OpenAIV1) Completion(ctx context.Context, prompt prompts.PromptTemplate, parameters map[string]string) ([]string, error) {
answer, err := o.completion(ctx, prompt, parameters)
if err != nil {
errMsg := err.Error()
if strings.Contains(errMsg, "rate_limit_exceeded") {
o.log.Warn("meets rate limit exceeded, sleep 30 seconds and retry")
time.Sleep(time.Duration(30) * time.Second)
return o.completion(ctx, prompt, parameters)
}
return nil, err
}
return answer, err
return o.completion(ctx, prompt, parameters)
}

func (o *OpenAIV1) completion(ctx context.Context, prompt prompts.PromptTemplate, parameters map[string]string) ([]string, error) {
Expand Down
14 changes: 1 addition & 13 deletions pkg/llm/client/openai/v1/embedding.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@ package v1
import (
"context"
"encoding/json"
"strings"
"time"
)

type EmbeddingResult struct {
Expand All @@ -46,17 +44,7 @@ type Usage struct {
}

func (o *OpenAIV1) Embedding(ctx context.Context, doc string) (*EmbeddingResult, error) {
answer, err := o.embedding(ctx, doc)
if err != nil {
errMsg := err.Error()
if strings.Contains(errMsg, "rate_limit_exceeded") {
o.log.Warn("meets rate limit exceeded, sleep 30 seconds and retry")
time.Sleep(time.Duration(30) * time.Second)
return o.embedding(ctx, doc)
}
return nil, err
}
return answer, err
return o.embedding(ctx, doc)
}

func (o *OpenAIV1) embedding(ctx context.Context, doc string) (*EmbeddingResult, error) {
Expand Down

0 comments on commit 89234e9

Please sign in to comment.