Skip to content

Commit

Permalink
feat: OpenAI explicit value for MaxToken and Temp
Browse files Browse the repository at this point in the history
Because when k8sgpt talks with vLLM, the default MaxToken is 16,
which is so small.
Given the most model supports 2048 token(like Llama1 ..etc), so
put here for a safe value.

Signed-off-by: Peter Pan <Peter.Pan@daocloud.io>
  • Loading branch information
panpan0000 committed Sep 15, 2023
1 parent 0325724 commit ed138fb
Showing 1 changed file with 14 additions and 0 deletions.
14 changes: 14 additions & 0 deletions pkg/ai/openai.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,15 @@ type OpenAIClient struct {
model string
}

const (
// OpenAI completion parameters
maxToken = 2048
temperature = 0.7
presencePenalty = 0.0
frequencyPenalty = 0.0
topP = 1.0
)

func (c *OpenAIClient) Configure(config IAIConfig, language string) error {
token := config.GetPassword()
defaultConfig := openai.DefaultConfig(token)
Expand Down Expand Up @@ -66,6 +75,11 @@ func (c *OpenAIClient) GetCompletion(ctx context.Context, prompt string, promptT
Content: fmt.Sprintf(promptTmpl, c.language, prompt),
},
},
MaxTokens: maxToken,
Temperature: temperature,
PresencePenalty: presencePenalty,
FrequencyPenalty: frequencyPenalty,
TopP: topP,
})
if err != nil {
return "", err
Expand Down

0 comments on commit ed138fb

Please sign in to comment.