diff --git a/README.md b/README.md index 9c49da9ea2..0111379044 100644 --- a/README.md +++ b/README.md @@ -431,6 +431,8 @@ _Adding a remote cache_ * AWS S3 * _As a prerequisite `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` are required as environmental variables._ * Configuration, ``` k8sgpt cache add s3 --region --bucket ``` + * Minio Configuration with HTTP endpoint ``` k8sgpt cache add s3 --bucket --endpoint ``` + * Minio Configuration with HTTPs endpoint, skipping TLS verification ``` k8sgpt cache add s3 --bucket --endpoint --insecure``` * K8sGPT will create the bucket if it does not exist * Azure Storage * We support a number of [techniques](https://learn.microsoft.com/en-us/azure/developer/go/azure-sdk-authentication?tabs=bash#2-authenticate-with-azure) to authenticate against Azure diff --git a/cmd/cache/add.go b/cmd/cache/add.go index 3a71192703..926379fcf8 100644 --- a/cmd/cache/add.go +++ b/cmd/cache/add.go @@ -30,6 +30,8 @@ var ( storageAccount string containerName string projectId string + endpoint string + insecure bool ) // addCmd represents the add command @@ -48,7 +50,7 @@ var addCmd = &cobra.Command{ } fmt.Println(color.YellowString("Adding remote based cache")) cacheType := args[0] - remoteCache, err := cache.NewCacheProvider(cacheType, bucketname, region, storageAccount, containerName, projectId) + remoteCache, err := cache.NewCacheProvider(cacheType, bucketName, region, endpoint, storageAccount, containerName, projectId, insecure) if err != nil { color.Red("Error: %v", err) os.Exit(1) @@ -63,9 +65,10 @@ var addCmd = &cobra.Command{ func init() { CacheCmd.AddCommand(addCmd) - addCmd.Flags().StringVarP(®ion, "region", "r", "", "The region to use for the AWS S3 or GCS cache") - addCmd.Flags().StringVarP(&bucketname, "bucket", "b", "", "The name of the AWS S3 bucket to use for the cache") - addCmd.MarkFlagsRequiredTogether("region", "bucket") + addCmd.Flags().StringVarP(®ion, "region", "r", "us-east-1", "The region to use for the AWS S3 or GCS cache") + addCmd.Flags().StringVarP(&endpoint, "endpoint", "e", "", "The S3 or minio endpoint") + addCmd.Flags().BoolVarP(&insecure, "insecure", "i", false, "Skip TLS verification for S3/Minio custom endpoint") + addCmd.Flags().StringVarP(&bucketName, "bucket", "b", "", "The name of the AWS S3 bucket to use for the cache") addCmd.Flags().StringVarP(&projectId, "projectid", "p", "", "The GCP project ID") addCmd.Flags().StringVarP(&storageAccount, "storageacc", "s", "", "The Azure storage account name of the container") addCmd.Flags().StringVarP(&containerName, "container", "c", "", "The Azure container name to use for the cache") diff --git a/cmd/cache/cache.go b/cmd/cache/cache.go index 27042aa6fa..39d531dda2 100644 --- a/cmd/cache/cache.go +++ b/cmd/cache/cache.go @@ -18,10 +18,6 @@ import ( "github.com/spf13/cobra" ) -var ( - bucketname string -) - // cacheCmd represents the cache command var CacheCmd = &cobra.Command{ Use: "cache", diff --git a/cmd/serve/serve.go b/cmd/serve/serve.go index bf26461499..ab8fb748fb 100644 --- a/cmd/serve/serve.go +++ b/cmd/serve/serve.go @@ -79,13 +79,13 @@ var ServeCmd = &cobra.Command{ envIsSet := backend != "" || password != "" || model != "" if envIsSet { aiProvider = &ai.AIProvider{ - Name: backend, - Password: password, - Model: model, - BaseURL: baseURL, - Engine: engine, + Name: backend, + Password: password, + Model: model, + BaseURL: baseURL, + Engine: engine, ProxyEndpoint: proxyEndpoint, - Temperature: temperature(), + Temperature: temperature(), } configAI.Providers = append(configAI.Providers, *aiProvider) diff --git a/go.mod b/go.mod index dbb79f78e3..e17ac48c5d 100644 --- a/go.mod +++ b/go.mod @@ -26,7 +26,7 @@ require github.com/adrg/xdg v0.4.0 require ( buf.build/gen/go/k8sgpt-ai/k8sgpt/grpc-ecosystem/gateway/v2 v2.19.1-20240213144542-6e830f3fdf19.1 buf.build/gen/go/k8sgpt-ai/k8sgpt/grpc/go v1.3.0-20240213144542-6e830f3fdf19.2 - buf.build/gen/go/k8sgpt-ai/k8sgpt/protocolbuffers/go v1.32.0-20240213144542-6e830f3fdf19.1 + buf.build/gen/go/k8sgpt-ai/k8sgpt/protocolbuffers/go v1.33.0-20240406062209-1cc152efbf5c.1 cloud.google.com/go/storage v1.40.0 cloud.google.com/go/vertexai v0.7.1 github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 diff --git a/go.sum b/go.sum index adc9c86695..78f7d385f4 100644 --- a/go.sum +++ b/go.sum @@ -17,6 +17,8 @@ buf.build/gen/go/k8sgpt-ai/k8sgpt/grpc/go v1.3.0-20240213144542-6e830f3fdf19.2/g buf.build/gen/go/k8sgpt-ai/k8sgpt/protocolbuffers/go v1.28.1-20240213144542-6e830f3fdf19.4/go.mod h1:WyRj8OIsAABLNsAELw73BT16v7vvJdEVv771fxX9pJI= buf.build/gen/go/k8sgpt-ai/k8sgpt/protocolbuffers/go v1.32.0-20240213144542-6e830f3fdf19.1 h1:YJ13kOhQHoOe4eMd3CqFFeTyQvVBRmeBGLqH/QLuOjQ= buf.build/gen/go/k8sgpt-ai/k8sgpt/protocolbuffers/go v1.32.0-20240213144542-6e830f3fdf19.1/go.mod h1:4QGFkgjJ3Wm1EBhQ6tOkaKihV4bFF6DvhTk1r9ZhFOE= +buf.build/gen/go/k8sgpt-ai/k8sgpt/protocolbuffers/go v1.33.0-20240406062209-1cc152efbf5c.1 h1:rx7Res/Ji345EbuTWps4sxH2JQHmvEyoe/5wLFZW8nA= +buf.build/gen/go/k8sgpt-ai/k8sgpt/protocolbuffers/go v1.33.0-20240406062209-1cc152efbf5c.1/go.mod h1:BQLbAK4GBQ4xEyMX/G1KEt+4vsa6EiOPD/Rb3VswwI0= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= diff --git a/pkg/ai/azureopenai.go b/pkg/ai/azureopenai.go index 34cd2a46c8..ffeda18d3d 100644 --- a/pkg/ai/azureopenai.go +++ b/pkg/ai/azureopenai.go @@ -34,7 +34,7 @@ func (c *AzureAIClient) Configure(config IAIConfig) error { return azureModelMapping[model] } - + if proxyEndpoint != "" { proxyUrl, err := url.Parse(proxyEndpoint) if err != nil { diff --git a/pkg/ai/openai.go b/pkg/ai/openai.go index b0e38e86a4..d047722a1b 100644 --- a/pkg/ai/openai.go +++ b/pkg/ai/openai.go @@ -63,7 +63,7 @@ func (c *OpenAIClient) Configure(config IAIConfig) error { Transport: transport, } } - + client := openai.NewClientWithConfig(defaultConfig) if client == nil { return errors.New("error creating OpenAI client") diff --git a/pkg/cache/cache.go b/pkg/cache/cache.go index 90006ca76e..00b2f712c2 100644 --- a/pkg/cache/cache.go +++ b/pkg/cache/cache.go @@ -47,7 +47,7 @@ func ParseCacheConfiguration() (CacheProvider, error) { return cacheInfo, nil } -func NewCacheProvider(cacheType, bucketname, region, storageAccount, containerName, projectId string) (CacheProvider, error) { +func NewCacheProvider(cacheType, bucketname, region, endpoint, storageAccount, containerName, projectId string, insecure bool) (CacheProvider, error) { cProvider := CacheProvider{} switch { @@ -61,6 +61,8 @@ func NewCacheProvider(cacheType, bucketname, region, storageAccount, containerNa case cacheType == "s3": cProvider.S3.BucketName = bucketname cProvider.S3.Region = region + cProvider.S3.Endpoint = endpoint + cProvider.S3.InsecureSkipVerify = insecure default: return CacheProvider{}, status.Error(codes.Internal, fmt.Sprintf("%s is not a valid option", cacheType)) } diff --git a/pkg/cache/s3_based.go b/pkg/cache/s3_based.go index 91c998d64a..f3a8b5645c 100644 --- a/pkg/cache/s3_based.go +++ b/pkg/cache/s3_based.go @@ -2,7 +2,9 @@ package cache import ( "bytes" + "crypto/tls" "log" + "net/http" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/session" @@ -17,17 +19,16 @@ type S3Cache struct { } type S3CacheConfiguration struct { - Region string `mapstructure:"region" yaml:"region,omitempty"` - BucketName string `mapstructure:"bucketname" yaml:"bucketname,omitempty"` + Region string `mapstructure:"region" yaml:"region,omitempty"` + BucketName string `mapstructure:"bucketname" yaml:"bucketname,omitempty"` + Endpoint string `mapstructure:"endpoint" yaml:"endpoint,omitempty"` + InsecureSkipVerify bool `mapstructure:"insecure" yaml:"insecure,omitempty"` } func (s *S3Cache) Configure(cacheInfo CacheProvider) error { if cacheInfo.S3.BucketName == "" { log.Fatal("Bucket name not configured") } - if cacheInfo.S3.Region == "" { - log.Fatal("Region not configured") - } s.bucketName = cacheInfo.S3.BucketName sess := session.Must(session.NewSessionWithOptions(session.Options{ @@ -36,6 +37,15 @@ func (s *S3Cache) Configure(cacheInfo CacheProvider) error { Region: aws.String(cacheInfo.S3.Region), }, })) + if cacheInfo.S3.Endpoint != "" { + sess.Config.Endpoint = &cacheInfo.S3.Endpoint + sess.Config.S3ForcePathStyle = aws.Bool(true) + transport := &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: cacheInfo.S3.InsecureSkipVerify}, + } + customClient := &http.Client{Transport: transport} + sess.Config.HTTPClient = customClient + } s3Client := s3.New(sess) diff --git a/pkg/server/config.go b/pkg/server/config.go index 3425878d5e..47942cf8bb 100644 --- a/pkg/server/config.go +++ b/pkg/server/config.go @@ -9,6 +9,16 @@ import ( "google.golang.org/grpc/status" ) +const ( + notUsedBucket = "" + notUsedRegion = "" + notUsedEndpoint = "" + notUsedStorageAcc = "" + notUsedContainerName = "" + notUsedProjectId = "" + notUsedInsecure = false +) + func (h *handler) AddConfig(ctx context.Context, i *schemav1.AddConfigRequest) (*schemav1.AddConfigResponse, error, ) { @@ -23,11 +33,11 @@ func (h *handler) AddConfig(ctx context.Context, i *schemav1.AddConfigRequest) ( switch i.Cache.GetCacheType().(type) { case *schemav1.Cache_AzureCache: - remoteCache, err = cache.NewCacheProvider("azure", "", "", i.Cache.GetAzureCache().StorageAccount, i.Cache.GetAzureCache().ContainerName, "") + remoteCache, err = cache.NewCacheProvider("azure", notUsedBucket, notUsedRegion, notUsedEndpoint, i.Cache.GetAzureCache().StorageAccount, i.Cache.GetAzureCache().ContainerName, notUsedProjectId, notUsedInsecure) case *schemav1.Cache_S3Cache: - remoteCache, err = cache.NewCacheProvider("s3", i.Cache.GetS3Cache().BucketName, i.Cache.GetS3Cache().Region, "", "", "") + remoteCache, err = cache.NewCacheProvider("s3", i.Cache.GetS3Cache().BucketName, i.Cache.GetS3Cache().Region, i.Cache.GetS3Cache().Endpoint, notUsedStorageAcc, notUsedContainerName, notUsedProjectId, i.Cache.GetS3Cache().Insecure) case *schemav1.Cache_GcsCache: - remoteCache, err = cache.NewCacheProvider("gcs", i.Cache.GetGcsCache().BucketName, i.Cache.GetGcsCache().Region, "", "", i.Cache.GetGcsCache().GetProjectId()) + remoteCache, err = cache.NewCacheProvider("gcs", i.Cache.GetGcsCache().BucketName, i.Cache.GetGcsCache().Region, notUsedEndpoint, notUsedStorageAcc, notUsedContainerName, i.Cache.GetGcsCache().GetProjectId(), notUsedInsecure) default: return resp, status.Error(codes.InvalidArgument, "Invalid cache configuration") }