Skip to content

Commit

Permalink
Merge pull request #16 from ekkinox/feat/config-model
Browse files Browse the repository at this point in the history
Configurable model
  • Loading branch information
ekkinox authored Apr 18, 2023
2 parents 9fd7ee6 + 2454c37 commit 7987203
Show file tree
Hide file tree
Showing 6 changed files with 35 additions and 3 deletions.
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,12 @@

All notable changes to this project will be documented in this file.

## 0.4.0

### Added

- Configuration for OpenAI API model (default gpt-3.5-turbo)

## 0.3.0

### Added
Expand Down
4 changes: 2 additions & 2 deletions ai/engine.go
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ func (e *Engine) ExecCompletion(input string) (*EngineExecOutput, error) {
resp, err := e.client.CreateChatCompletion(
ctx,
openai.ChatCompletionRequest{
Model: openai.GPT3Dot5Turbo,
Model: e.config.GetAiConfig().GetModel(),
MaxTokens: e.config.GetAiConfig().GetMaxTokens(),
Messages: e.prepareCompletionMessages(),
},
Expand Down Expand Up @@ -169,7 +169,7 @@ func (e *Engine) ChatStreamCompletion(input string) error {
e.appendUserMessage(input)

req := openai.ChatCompletionRequest{
Model: openai.GPT3Dot5Turbo,
Model: e.config.GetAiConfig().GetModel(),
MaxTokens: e.config.GetAiConfig().GetMaxTokens(),
Messages: e.prepareCompletionMessages(),
Stream: true,
Expand Down
6 changes: 6 additions & 0 deletions config/ai.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,15 @@ package config

const (
openai_key = "OPENAI_KEY"
openai_model = "OPENAI_MODEL"
openai_proxy = "OPENAI_PROXY"
openai_temperature = "OPENAI_TEMPERATURE"
openai_max_tokens = "OPENAI_MAX_TOKENS"
)

type AiConfig struct {
key string
model string
proxy string
temperature float64
maxTokens int
Expand All @@ -18,6 +20,10 @@ func (c AiConfig) GetKey() string {
return c.key
}

func (c AiConfig) GetModel() string {
return c.model
}

func (c AiConfig) GetProxy() string {
return c.proxy
}
Expand Down
4 changes: 4 additions & 0 deletions config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ import (
"fmt"
"strings"

"github.com/sashabaranov/go-openai"

"github.com/ekkinox/yo/system"
"github.com/spf13/viper"
)
Expand Down Expand Up @@ -39,6 +41,7 @@ func NewConfig() (*Config, error) {
return &Config{
ai: AiConfig{
key: viper.GetString(openai_key),
model: viper.GetString(openai_model),
proxy: viper.GetString(openai_proxy),
temperature: viper.GetFloat64(openai_temperature),
maxTokens: viper.GetInt(openai_max_tokens),
Expand All @@ -55,6 +58,7 @@ func WriteConfig(key string, write bool) (*Config, error) {
system := system.Analyse()
// ai defaults
viper.Set(openai_key, key)
viper.Set(openai_model, openai.GPT3Dot5Turbo)
viper.SetDefault(openai_proxy, "")
viper.SetDefault(openai_temperature, 0.2)
viper.SetDefault(openai_max_tokens, 1000)
Expand Down
6 changes: 6 additions & 0 deletions config/config_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ import (
"strings"
"testing"

"github.com/sashabaranov/go-openai"

"github.com/ekkinox/yo/system"

"github.com/spf13/viper"
Expand All @@ -24,6 +26,7 @@ func setupViper(t *testing.T) {
viper.SetConfigName(strings.ToLower(system.GetApplicationName()))
viper.AddConfigPath("/tmp/")
viper.Set(openai_key, "test_key")
viper.Set(openai_model, openai.GPT3Dot5Turbo)
viper.Set(openai_proxy, "test_proxy")
viper.Set(openai_temperature, 0.2)
viper.Set(openai_max_tokens, 2000)
Expand All @@ -46,6 +49,7 @@ func testNewConfig(t *testing.T) {
require.NoError(t, err)

assert.Equal(t, "test_key", cfg.GetAiConfig().GetKey())
assert.Equal(t, openai.GPT3Dot5Turbo, cfg.GetAiConfig().GetModel())
assert.Equal(t, "test_proxy", cfg.GetAiConfig().GetProxy())
assert.Equal(t, 0.2, cfg.GetAiConfig().GetTemperature())
assert.Equal(t, 2000, cfg.GetAiConfig().GetMaxTokens())
Expand All @@ -63,6 +67,7 @@ func testWriteConfig(t *testing.T) {
require.NoError(t, err)

assert.Equal(t, "new_test_key", cfg.GetAiConfig().GetKey())
assert.Equal(t, openai.GPT3Dot5Turbo, cfg.GetAiConfig().GetModel())
assert.Equal(t, "test_proxy", cfg.GetAiConfig().GetProxy())
assert.Equal(t, 0.2, cfg.GetAiConfig().GetTemperature())
assert.Equal(t, 2000, cfg.GetAiConfig().GetMaxTokens())
Expand All @@ -72,6 +77,7 @@ func testWriteConfig(t *testing.T) {
assert.NotNil(t, cfg.GetSystemConfig())

assert.Equal(t, "new_test_key", viper.GetString(openai_key))
assert.Equal(t, openai.GPT3Dot5Turbo, viper.GetString(openai_model))
assert.Equal(t, "test_proxy", viper.GetString(openai_proxy))
assert.Equal(t, 0.2, viper.GetFloat64(openai_temperature))
assert.Equal(t, 2000, viper.GetInt(openai_max_tokens))
Expand Down
12 changes: 11 additions & 1 deletion docs/_pages/getting-started.md
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ It will then generate your configuration in the file `~/.config/yo.json`, with t
```json
{
"openai_key": "sk-xxxxxxxxx", // OpenAI API key (mandatory)
"openai_model": "gpt-3.5-turbo", // OpenAI API model (default gpt-3.5-turbo)
"openai_proxy": "", // OpenAI API proxy (default disabled)
"openai_temperature": 0.2, // OpenAI API temperature (defaut 0.2)
"openai_max_tokens": 1000, // OpenAI API max tokens (default 1000)
Expand All @@ -52,7 +53,16 @@ Note that in `REPL` mode, you can press anytime `ctrl+s` to edit settings:

## Fine tuning

In the `~/.config/yo.json` config file, you can use the `user_preferences` to express any preferences in your natural language:
### Model

In the `~/.config/yo.json` config file, you can use the `openai_model` to configure the AI model you want to use.
By default, the model `gpt-3.5-turbo` is used.

You can find the list of [supported models here](https://platform.openai.com/docs/models/overview).

### Preferences

In the `~/.config/yo.json` config file, you can also use the `user_preferences` to express any preferences in your natural language:

```json
{
Expand Down

0 comments on commit 7987203

Please sign in to comment.