diff --git a/README.md b/README.md index 5b676f478b..847d44c6ee 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,8 @@ It has SRE experience codified into its analyzers and helps to pull out the most relevant information to enrich it with AI. +_Out of the box integration with OpenAI, Azure, Cohere, Amazon Bedrock and local models._ + K8sGPT - K8sGPT gives Kubernetes Superpowers to everyone | Product Hunt @@ -359,6 +361,45 @@ k8sgpt analyze --explain --backend cohere +
+Amazon Bedrock provider + + +Prerequisites +Bedrock API access is needed. + + + +As illustrated below, you will need to enable this in the [AWS Console](https://eu-central-1.console.aws.amazon.com/bedrock/home?region=eu-central-1#/modelaccess) + +In addition to this you will need to set the follow local environmental variables: + + +``` +- AWS_ACCESS_KEY +- AWS_SECRET_ACCESS_KEY +- AWS_DEFAULT_REGION +``` + + +``` +k8sgpt auth add --backend amazonbedrock --model anthropic.claude-v2 +``` + +TODO: Currently access key will be requested in the CLI, you can enter anything into this. + +#### Usage + +``` +k8sgpt analyze -e -b amazonbedrock + +0 argocd/argocd-application-controller(argocd-application-controller) +- Error: StatefulSet uses the service argocd/argocd-application-controller which does not exist. + + You're right, I don't have enough context to determine if a StatefulSet is correctly configured to use a non-existent service. A StatefulSet manages Pods with persistent storage, and the Pods are created from the same spec. The service name referenced in the StatefulSet configuration would need to match an existing Kubernetes service for the Pods to connect to. Without more details on the specific StatefulSet and environment, I can't confirm whether the configuration is valid or not. +``` +
+
Setting a new default AI provider @@ -376,6 +417,8 @@ Active: Unused: > localai > noopai +> amazonbedrock +> cohere ``` diff --git a/cmd/auth/add.go b/cmd/auth/add.go index fcae197152..89ca4da73f 100644 --- a/cmd/auth/add.go +++ b/cmd/auth/add.go @@ -41,6 +41,7 @@ var addCmd = &cobra.Command{ _ = cmd.MarkFlagRequired("engine") _ = cmd.MarkFlagRequired("baseurl") } + }, Run: func(cmd *cobra.Command, args []string) { @@ -103,12 +104,13 @@ var addCmd = &cobra.Command{ // create new provider object newProvider := ai.AIProvider{ - Name: backend, - Model: model, - Password: password, - BaseURL: baseURL, - Engine: engine, - Temperature: temperature, + Name: backend, + Model: model, + Password: password, + BaseURL: baseURL, + Engine: engine, + Temperature: temperature, + ProviderRegion: providerRegion, } if providerIndex == -1 { @@ -140,4 +142,6 @@ func init() { addCmd.Flags().Float32VarP(&temperature, "temperature", "t", 0.7, "The sampling temperature, value ranges between 0 ( output be more deterministic) and 1 (more random)") // add flag for azure open ai engine/deployment name addCmd.Flags().StringVarP(&engine, "engine", "e", "", "Azure AI deployment name") + //add flag for amazonbedrock region name + addCmd.Flags().StringVarP(&providerRegion, "providerRegion", "r", "", "Provider Region name") } diff --git a/cmd/auth/auth.go b/cmd/auth/auth.go index b64f54f5c6..b66f027a0a 100644 --- a/cmd/auth/auth.go +++ b/cmd/auth/auth.go @@ -19,12 +19,13 @@ import ( ) var ( - backend string - password string - baseURL string - model string - engine string - temperature float32 + backend string + password string + baseURL string + model string + engine string + temperature float32 + providerRegion string ) var configAI ai.AIConfiguration diff --git a/images/bedrock.png b/images/bedrock.png new file mode 100644 index 0000000000..501a01701f Binary files /dev/null and b/images/bedrock.png differ diff --git a/pkg/ai/amazonbedrock.go b/pkg/ai/amazonbedrock.go new file mode 100644 index 0000000000..732d4e7bb4 --- /dev/null +++ b/pkg/ai/amazonbedrock.go @@ -0,0 +1,196 @@ +package ai + +import ( + "context" + "encoding/base64" + "encoding/json" + "fmt" + "strings" + + "github.com/fatih/color" + + "github.com/k8sgpt-ai/k8sgpt/pkg/cache" + "github.com/k8sgpt-ai/k8sgpt/pkg/util" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/session" + "github.com/aws/aws-sdk-go/service/bedrockruntime" +) + +// AmazonBedRockClient represents the client for interacting with the Amazon Bedrock service. +type AmazonBedRockClient struct { + client *bedrockruntime.BedrockRuntime + language string + model string + temperature float32 +} + +// InvokeModelResponseBody represents the response body structure from the model invocation. +type InvokeModelResponseBody struct { + Completion string `json:"completion"` + Stop_reason string `json:"stop_reason"` +} + +// Amazon BedRock support region list US East (N. Virginia),US West (Oregon),Asia Pacific (Singapore),Asia Pacific (Tokyo),Europe (Frankfurt) +// https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-bedrock.html#bedrock-regions +const BEDROCK_DEFAULT_REGION = "us-east-1" // default use us-east-1 region + +const ( + US_East_1 = "us-east-1" + US_West_2 = "us-west-2" + AP_Southeast_1 = "ap-southeast-1" + AP_Northeast_1 = "ap-northeast-1" + EU_Central_1 = "eu-central-1" +) + +var BEDROCKER_SUPPORTED_REGION = []string{ + US_East_1, + US_West_2, + AP_Southeast_1, + AP_Northeast_1, + EU_Central_1, +} + +const ( + ModelAnthropicClaudeV2 = "anthropic.claude-v2" + ModelAnthropicClaudeV1 = "anthropic.claude-v1" + ModelAnthropicClaudeInstantV1 = "anthropic.claude-instant-v1" +) + +var BEDROCK_MODELS = []string{ + ModelAnthropicClaudeV2, + ModelAnthropicClaudeV1, + ModelAnthropicClaudeInstantV1, +} + +// GetModelOrDefault check config model +func GetModelOrDefault(model string) string { + + // Check if the provided model is in the list + for _, m := range BEDROCK_MODELS { + if m == model { + return model // Return the provided model + } + } + + // Return the default model if the provided model is not in the list + return BEDROCK_MODELS[0] +} + +// GetModelOrDefault check config region +func GetRegionOrDefault(region string) string { + + // Check if the provided model is in the list + for _, m := range BEDROCKER_SUPPORTED_REGION { + if m == region { + return region // Return the provided model + } + } + + // Return the default model if the provided model is not in the list + return BEDROCK_DEFAULT_REGION +} + +// Configure configures the AmazonBedRockClient with the provided configuration and language. +func (a *AmazonBedRockClient) Configure(config IAIConfig, language string) error { + + // Create a new AWS session + providerRegion := GetRegionOrDefault(config.GetProviderRegion()) + + sess, err := session.NewSession(&aws.Config{ + Region: aws.String(providerRegion), + }) + + if err != nil { + return err + } + + // Create a new BedrockRuntime client + a.client = bedrockruntime.New(sess) + a.language = language + a.model = GetModelOrDefault(config.GetModel()) + a.temperature = config.GetTemperature() + + return nil +} + +// GetCompletion sends a request to the model for generating completion based on the provided prompt. +func (a *AmazonBedRockClient) GetCompletion(ctx context.Context, prompt string, promptTmpl string) (string, error) { + + // Prepare the input data for the model invocation + request := map[string]interface{}{ + "prompt": fmt.Sprintf("\n\nHuman: %s \n\nAssistant:", prompt), + "max_tokens_to_sample": 1024, + "temperature": a.temperature, + "top_p": 0.9, + } + + body, err := json.Marshal(request) + if err != nil { + return "", err + } + + // Build the parameters for the model invocation + params := &bedrockruntime.InvokeModelInput{ + Body: body, + ModelId: aws.String(a.model), + ContentType: aws.String("application/json"), + Accept: aws.String("application/json"), + } + // Invoke the model + resp, err := a.client.InvokeModelWithContext(ctx, params) + + if err != nil { + return "", err + } + // Parse the response body + output := &InvokeModelResponseBody{} + err = json.Unmarshal(resp.Body, output) + if err != nil { + return "", err + } + return output.Completion, nil +} + +// Parse generates a completion for the provided prompt using the Amazon Bedrock model. +func (a *AmazonBedRockClient) Parse(ctx context.Context, prompt []string, cache cache.ICache, promptTmpl string) (string, error) { + inputKey := strings.Join(prompt, " ") + // Check for cached data + cacheKey := util.GetCacheKey(a.GetName(), a.language, inputKey) + + if !cache.IsCacheDisabled() && cache.Exists(cacheKey) { + response, err := cache.Load(cacheKey) + if err != nil { + return "", err + } + + if response != "" { + output, err := base64.StdEncoding.DecodeString(response) + if err != nil { + color.Red("error decoding cached data: %v", err) + return "", nil + } + return string(output), nil + } + } + + response, err := a.GetCompletion(ctx, inputKey, promptTmpl) + + if err != nil { + return "", err + } + + err = cache.Store(cacheKey, base64.StdEncoding.EncodeToString([]byte(response))) + + if err != nil { + color.Red("error storing value to cache: %v", err) + return "", nil + } + + return response, nil +} + +// GetName returns the name of the AmazonBedRockClient. +func (a *AmazonBedRockClient) GetName() string { + return "amazonbedrock" +} diff --git a/pkg/ai/iai.go b/pkg/ai/iai.go index b8172d161b..430e04ca4e 100644 --- a/pkg/ai/iai.go +++ b/pkg/ai/iai.go @@ -26,6 +26,7 @@ var ( &LocalAIClient{}, &NoOpAIClient{}, &CohereClient{}, + &AmazonBedRockClient{}, } Backends = []string{ "openai", @@ -33,6 +34,7 @@ var ( "azureopenai", "noopai", "cohere", + "amazonbedrock", } ) @@ -49,6 +51,7 @@ type IAIConfig interface { GetBaseURL() string GetEngine() string GetTemperature() float32 + GetProviderRegion() string } func NewClient(provider string) IAI { @@ -67,12 +70,13 @@ type AIConfiguration struct { } type AIProvider struct { - Name string `mapstructure:"name"` - Model string `mapstructure:"model"` - Password string `mapstructure:"password" yaml:"password,omitempty"` - BaseURL string `mapstructure:"baseurl" yaml:"baseurl,omitempty"` - Engine string `mapstructure:"engine" yaml:"engine,omitempty"` - Temperature float32 `mapstructure:"temperature" yaml:"temperature,omitempty"` + Name string `mapstructure:"name"` + Model string `mapstructure:"model"` + Password string `mapstructure:"password" yaml:"password,omitempty"` + BaseURL string `mapstructure:"baseurl" yaml:"baseurl,omitempty"` + Engine string `mapstructure:"engine" yaml:"engine,omitempty"` + Temperature float32 `mapstructure:"temperature" yaml:"temperature,omitempty"` + ProviderRegion string `mapstructure:"providerregion" yaml:"providerregion,omitempty"` } func (p *AIProvider) GetBaseURL() string { @@ -94,6 +98,10 @@ func (p *AIProvider) GetTemperature() float32 { return p.Temperature } +func (p *AIProvider) GetProviderRegion() string { + return p.ProviderRegion +} + func NeedPassword(backend string) bool { return backend != "localai" }