Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add permitPool support to S3 #2466

Merged
merged 4 commits into from
Mar 26, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 29 additions & 2 deletions physical/s3.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import (
"io"
"os"
"sort"
"strconv"
"strings"
"time"

Expand All @@ -16,6 +17,7 @@ import (
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/hashicorp/errwrap"
"github.com/hashicorp/vault/helper/awsutil"
)

Expand All @@ -25,6 +27,7 @@ type S3Backend struct {
bucket string
client *s3.S3
logger log.Logger
permitPool *PermitPool
}

// newS3Backend constructs a S3 backend using a pre-existing
Expand Down Expand Up @@ -85,10 +88,23 @@ func newS3Backend(conf map[string]string, logger log.Logger) (Backend, error) {
return nil, fmt.Errorf("unable to access bucket '%s': %v", bucket, err)
}

maxParStr, ok := conf["max_parallel"]
var maxParInt int
if ok {
maxParInt, err = strconv.Atoi(maxParStr)
if err != nil {
return nil, errwrap.Wrapf("failed parsing max_parallel parameter: {{err}}", err)
}
if logger.IsDebug() {
logger.Debug("s3: max_parallel set", "max_parallel", maxParInt)
}
}

s := &S3Backend{
client: s3conn,
bucket: bucket,
logger: logger,
permitPool: NewPermitPool(maxParInt),
}
return s, nil
}
Expand All @@ -97,6 +113,9 @@ func newS3Backend(conf map[string]string, logger log.Logger) (Backend, error) {
func (s *S3Backend) Put(entry *Entry) error {
defer metrics.MeasureSince([]string{"s3", "put"}, time.Now())

s.permitPool.Acquire()
defer s.permitPool.Release()

_, err := s.client.PutObject(&s3.PutObjectInput{
Bucket: aws.String(s.bucket),
Key: aws.String(entry.Key),
Expand All @@ -114,6 +133,9 @@ func (s *S3Backend) Put(entry *Entry) error {
func (s *S3Backend) Get(key string) (*Entry, error) {
defer metrics.MeasureSince([]string{"s3", "get"}, time.Now())

s.permitPool.Acquire()
defer s.permitPool.Release()

resp, err := s.client.GetObject(&s3.GetObjectInput{
Bucket: aws.String(s.bucket),
Key: aws.String(key),
Expand All @@ -122,9 +144,8 @@ func (s *S3Backend) Get(key string) (*Entry, error) {
// Return nil on 404s, error on anything else
if awsErr.StatusCode() == 404 {
return nil, nil
} else {
return nil, err
}
return nil, err
}
if err != nil {
return nil, err
Expand All @@ -151,6 +172,9 @@ func (s *S3Backend) Get(key string) (*Entry, error) {
func (s *S3Backend) Delete(key string) error {
defer metrics.MeasureSince([]string{"s3", "delete"}, time.Now())

s.permitPool.Acquire()
defer s.permitPool.Release()

_, err := s.client.DeleteObject(&s3.DeleteObjectInput{
Bucket: aws.String(s.bucket),
Key: aws.String(key),
Expand All @@ -168,6 +192,9 @@ func (s *S3Backend) Delete(key string) error {
func (s *S3Backend) List(prefix string) ([]string, error) {
defer metrics.MeasureSince([]string{"s3", "list"}, time.Now())

s.permitPool.Acquire()
defer s.permitPool.Release()

params := &s3.ListObjectsV2Input{
Bucket: aws.String(s.bucket),
Prefix: aws.String(prefix),
Expand Down
2 changes: 1 addition & 1 deletion website/source/docs/configuration/storage/azure.html.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ The current implementation is limited to a maximum of 4 megabytes per blob.
- `container` `(string: <required>)` – Specifies the Azure Storage Blob
container name.

- `max_parallel` `(int: 128)` – Specifies The maximum number of concurrent
- `max_parallel` `(string: "128")` – Specifies The maximum number of concurrent
requests to Azure.

## `azure` Examples
Expand Down
2 changes: 1 addition & 1 deletion website/source/docs/configuration/storage/consul.html.md
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ at Consul's service discovery layer.
- `disable_registration` `(bool: false)` – Specifies whether Vault should
register itself with Consul.

- `max_parallel` `(int: 128)` – Specifies the maximum number of concurrent
- `max_parallel` `(string: "128")` – Specifies the maximum number of concurrent
requests to Consul.

- `path` `(string: "vault/")` – Specifies the path in Consul's key-value store
Expand Down
2 changes: 1 addition & 1 deletion website/source/docs/configuration/storage/dynamodb.html.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ see the [official AWS DynamoDB documentation][dynamodb-rw-capacity].
to run Vault in high availability mode. This can also be provided via the
environment variable `DYNAMODB_HA_ENABLED`.

- `max_parallel` `(int: 128)` – Specifies the maximum number of concurrent
- `max_parallel` `(string: "128")` – Specifies the maximum number of concurrent
requests.

- `region` `(string "us-east-1")` – Specifies the AWS region. This can also be
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ storage "gcs" {
in [JSON format][gcs-private-key]. This can also be provided via the
environment variable `GOOGLE_APPLICATION_CREDENTIALS`.

- `max_parallel` `(int: 128)` – Specifies the maximum number of concurrent
- `max_parallel` `(string: "128")` – Specifies the maximum number of concurrent
requests.

## `gcs` Examples
Expand Down
3 changes: 3 additions & 0 deletions website/source/docs/configuration/storage/s3.html.md
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,9 @@ cause Vault to attempt to retrieve credentials from the AWS metadata service.
- `session_token` `(string: "")` – Specifies the AWS session token. This can
also be provided via the environment variable `AWS_SESSION_TOKEN`.

- `max_parallel` `(string: "128")` – Specifies The maximum number of concurrent
requests to S3.

## `s3` Examples

### Default Example
Expand Down
2 changes: 1 addition & 1 deletion website/source/docs/configuration/storage/swift.html.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ storage "swift" {
container. This can also be provided via the environment variable
`OS_CONTAINER`.

- `max_parallel` `(int: 128)` – The maximum number of concurrent requests.
- `max_parallel` `(string: "128")` – The maximum number of concurrent requests.

- `password` `(string: <required>)` – Specifies the OpenStack password. This can
also be provided via the environment variable `OS_PASSWORD`.
Expand Down