Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Migrate Terrakube API to AWS sdk v2 #1451

Merged
merged 2 commits into from
Oct 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions api/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
<mockserver-spring-test-listener.version>5.15.0</mockserver-spring-test-listener.version>
<snakeyaml.version>2.3</snakeyaml.version>
<quartz.version>2.5.0-rc1</quartz.version>
<aws-sdk.version>1.12.776</aws-sdk.version>
<aws-sdk.version>2.28.27</aws-sdk.version>
<gcp-libraries-bom.version>26.49.0</gcp-libraries-bom.version>
<jjwt.version>0.11.5</jjwt.version>
<jedis.version>5.1.5</jedis.version>
Expand Down Expand Up @@ -249,8 +249,8 @@
<artifactId>azure-storage-blob</artifactId>
</dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId>
<groupId>software.amazon.awssdk</groupId>
<artifactId>s3</artifactId>
</dependency>
<dependency>
<groupId>com.google.cloud</groupId>
Expand Down Expand Up @@ -301,8 +301,8 @@
<scope>import</scope>
</dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-bom</artifactId>
<groupId>software.amazon.awssdk</groupId>
<artifactId>bom</artifactId>
<version>${aws-sdk.version}</version>
<type>pom</type>
<scope>import</scope>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,5 @@ public class AwsStorageTypeProperties {
private String bucketName;
private String region;
private String endpoint;
private boolean enableRoleAuthentication;
}
Original file line number Diff line number Diff line change
@@ -1,13 +1,18 @@
package org.terrakube.api.plugin.storage.aws;

import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.*;
import lombok.Builder;
import lombok.NonNull;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.codec.binary.StringUtils;
import org.apache.sshd.common.util.io.IoUtils;
import org.terrakube.api.plugin.storage.StorageTypeService;
import software.amazon.awssdk.core.ResponseBytes;
import software.amazon.awssdk.core.sync.RequestBody;
import software.amazon.awssdk.core.sync.ResponseTransformer;
import software.amazon.awssdk.services.s3.S3Client;
import software.amazon.awssdk.services.s3.model.*;

import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
Expand All @@ -29,76 +34,65 @@ public class AwsStorageTypeServiceImpl implements StorageTypeService {
private static final String TERRAFORM_TAR_GZ = "content/%s/terraformContent.tar.gz";

@NonNull
private AmazonS3 s3client;
private S3Client s3client;

@NonNull
private String bucketName;

@Override
public byte[] getStepOutput(String organizationId, String jobId, String stepId) {
private byte[] downloadObjectFromBucket(String bucketName, String objectKey) {
byte[] data;
try {
log.info("Searching: tfoutput/{}/{}/{}.tfoutput", organizationId, jobId, stepId);
S3Object s3object = s3client.getObject(bucketName, String.format(BUCKET_LOCATION_OUTPUT, organizationId, jobId, stepId));
S3ObjectInputStream inputStream = s3object.getObjectContent();
data = inputStream.getDelegateStream().readAllBytes();
log.info("Bucket: {} Searching: {}", bucketName, objectKey);

GetObjectRequest objectRequest = GetObjectRequest.builder()
.key(objectKey)
.bucket(bucketName)
.build();
ResponseBytes<GetObjectResponse> objectBytes = s3client.getObject(objectRequest,
ResponseTransformer.toBytes());
data = objectBytes.asByteArray();
} catch (Exception e) {
log.error(S3_ERROR_LOG, e.getMessage());
log.debug(S3_ERROR_LOG, e.getMessage());
data = new byte[0];
}
return data;
}

private void uploadStringToBucket(String bucketName, String blobKey, String data){
PutObjectRequest putObjectRequest = PutObjectRequest.builder()
.bucket(bucketName)
.key(blobKey)
.build();

s3client.putObject(putObjectRequest, RequestBody.fromString(data));
log.info("Upload Object {} completed", blobKey);
}

@Override
public byte[] getStepOutput(String organizationId, String jobId, String stepId) {
return downloadObjectFromBucket(bucketName, String.format(BUCKET_LOCATION_OUTPUT, organizationId, jobId, stepId));
}

@Override
public byte[] getTerraformPlan(String organizationId, String workspaceId, String jobId, String stepId) {
byte[] data;
try {
log.info("Searching: tfstate/{}/{}/{}/{}/terraformLibrary.tfPlan", organizationId, workspaceId, jobId, stepId);
S3Object s3object = s3client.getObject(bucketName, String.format(BUCKET_STATE_LOCATION, organizationId, workspaceId, jobId, stepId));
S3ObjectInputStream inputStream = s3object.getObjectContent();
data = inputStream.getDelegateStream().readAllBytes();
} catch (Exception e) {
log.error(S3_ERROR_LOG, e.getMessage());
data = new byte[0];
}
return data;
return downloadObjectFromBucket(bucketName, String.format(BUCKET_STATE_LOCATION, organizationId, workspaceId, jobId, stepId));
}

@Override
public byte[] getTerraformStateJson(String organizationId, String workspaceId, String stateFileName) {
byte[] data;
try {
log.info("Searching: tfstate/{}/{}/state/{}.json", organizationId, workspaceId, stateFileName);
S3Object s3object = s3client.getObject(bucketName, String.format(BUCKET_STATE_JSON, organizationId, workspaceId, stateFileName));
S3ObjectInputStream inputStream = s3object.getObjectContent();
data = inputStream.getDelegateStream().readAllBytes();
} catch (Exception e) {
log.error(S3_ERROR_LOG, e.getMessage());
data = new byte[0];
}
return data;
return downloadObjectFromBucket(bucketName, String.format(BUCKET_STATE_JSON, organizationId, workspaceId, stateFileName));
}

@Override
public void uploadTerraformStateJson(String organizationId, String workspaceId, String stateJson, String stateJsonHistoryId) {
String blobKey = String.format("tfstate/%s/%s/state/%s.json", organizationId, workspaceId, stateJsonHistoryId);
log.info("terraformJsonStateFile: {}", blobKey);
s3client.putObject(bucketName, blobKey, stateJson);
uploadStringToBucket(bucketName, blobKey, stateJson);
}

@Override
public byte[] getCurrentTerraformState(String organizationId, String workspaceId) {
byte[] data;
try {
log.info("Searching: tfstate/{}/{}/terraform.tfstate", organizationId, workspaceId);
S3Object s3object = s3client.getObject(bucketName, String.format("tfstate/%s/%s/terraform.tfstate", organizationId, workspaceId));
S3ObjectInputStream inputStream = s3object.getObjectContent();
data = inputStream.getDelegateStream().readAllBytes();
} catch (Exception e) {
log.error(S3_ERROR_LOG, e.getMessage());
data = new byte[0];
}
return data;
return downloadObjectFromBucket(bucketName, String.format("tfstate/%s/%s/terraform.tfstate", organizationId, workspaceId));
}

@Override
Expand All @@ -107,34 +101,27 @@ public void uploadState(String organizationId, String workspaceId, String terraf
String rawBlobKey = String.format("tfstate/%s/%s/state/%s.raw.json", organizationId, workspaceId, historyId);
log.info("terraformStateFile: {}", blobKey);
log.info("terraformRawStateFile: {}", rawBlobKey);
s3client.putObject(bucketName, blobKey, terraformState);
s3client.putObject(bucketName, rawBlobKey, terraformState);
uploadStringToBucket(bucketName, blobKey, terraformState);
uploadStringToBucket(bucketName, rawBlobKey, terraformState);
}

@Override
public String saveContext(int jobId, String jobContext) {
String blobKey = String.format(CONTEXT_JSON, jobId);
log.info("context file: {}", String.format(CONTEXT_JSON, jobId));

log.info("context file to bucket: {}", String.format(CONTEXT_JSON, jobId));
byte[] bytes = StringUtils.getBytesUtf8(jobContext);
String utf8EncodedString = StringUtils.newStringUtf8(bytes);

s3client.putObject(bucketName, blobKey, utf8EncodedString);

uploadStringToBucket(bucketName, blobKey, utf8EncodedString);
return jobContext;
}

@Override
public String getContext(int jobId) {
String data;
try {
log.info("Searching: /tfoutput/context/{}/context.json", jobId);
S3Object s3object = s3client.getObject(bucketName, String.format(CONTEXT_JSON, jobId));
S3ObjectInputStream inputStream = s3object.getObjectContent();
data = new String(inputStream.getDelegateStream().readAllBytes(), StandardCharsets.UTF_8);
;
} catch (Exception e) {
log.error(S3_ERROR_LOG, e.getMessage());
byte[] bytes = downloadObjectFromBucket(bucketName, String.format(CONTEXT_JSON, jobId));
if (bytes != null && bytes.length > 0) {
data = new String(bytes, StandardCharsets.UTF_8);
} else {
data = "{}";
}
return data;
Expand All @@ -145,26 +132,28 @@ public void createContentFile(String contentId, InputStream inputStream) {
String blobKey = String.format(TERRAFORM_TAR_GZ, contentId);
log.info("context file: {}", String.format(TERRAFORM_TAR_GZ, contentId));


ObjectMetadata objectMetadata = new ObjectMetadata();
objectMetadata.setContentType("application/gzip");
s3client.putObject(bucketName, blobKey, inputStream, objectMetadata);
try {
PutObjectRequest putObjectRequest = PutObjectRequest.builder()
.bucket(bucketName)
.key(blobKey)
.contentType("application/gzip")
.build();

s3client.putObject(putObjectRequest, RequestBody.fromInputStream(inputStream, IoUtils.toByteArray(inputStream).length));
} catch (IOException e) {
log.error(e.getMessage());
}

}

@Override
public byte[] getContentFile(String contentId) {
byte[] data;
try {
log.info("Searching: content/{}/terraformContent.tar.gz", contentId);
S3Object s3object = s3client.getObject(bucketName, String.format(TERRAFORM_TAR_GZ, contentId));
S3ObjectInputStream inputStream = s3object.getObjectContent();
data = inputStream.getDelegateStream().readAllBytes();
} catch (Exception e) {
log.error(S3_ERROR_LOG, e.getMessage());
data = "".getBytes(Charset.defaultCharset());
byte[] bytes = downloadObjectFromBucket(bucketName, String.format(TERRAFORM_TAR_GZ, contentId));
if (bytes != null && bytes.length > 0) {
return bytes;
} else {
return "".getBytes(Charset.defaultCharset());
}
return data;
}

@Override
Expand All @@ -175,7 +164,7 @@ public void deleteModuleStorage(String organizationName, String moduleName, Stri

@Override
public void deleteWorkspaceOutputData(String organizationId, List<Integer> jobList) {
for (Integer jobId: jobList){
for (Integer jobId : jobList) {
String workspaceOutputFolder = String.format("tfoutput/%s/%s/", organizationId, jobId);
deleteFolderFromBucket(workspaceOutputFolder);
}
Expand All @@ -188,15 +177,16 @@ public void deleteWorkspaceStateData(String organizationId, String workspaceId)
}

private void deleteFolderFromBucket(String prefix) {
ObjectListing objectList = s3client.listObjects(bucketName, prefix);
List<S3ObjectSummary> objectSummeryList = objectList.getObjectSummaries();
String[] keysList = new String[objectSummeryList.size()];
int count = 0;
for (S3ObjectSummary summary : objectSummeryList) {
keysList[count++] = summary.getKey();
log.warn("File {} will be deleted.",summary.getKey());
ListObjectsV2Request listObjectsV2Request = ListObjectsV2Request.builder()
.bucket(bucketName)
.prefix(prefix)
.build();
ListObjectsV2Response listObjectsV2Response = s3client.listObjectsV2(listObjectsV2Request);
List<S3Object> contents = listObjectsV2Response.contents();

for (S3Object content : contents) {
log.warn("Deleting: {}",content.key());
s3client.deleteObject(DeleteObjectRequest.builder().bucket(bucketName).key(content.key()).build());
}
DeleteObjectsRequest deleteObjectsRequest = new DeleteObjectsRequest(bucketName).withKeys(keysList);
s3client.deleteObjects(deleteObjectsRequest);
}
}
Original file line number Diff line number Diff line change
@@ -1,13 +1,5 @@
package org.terrakube.api.plugin.storage.configuration;

import com.amazonaws.ClientConfiguration;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.client.builder.AwsClientBuilder;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.azure.storage.blob.BlobServiceClient;
import com.azure.storage.blob.BlobServiceClientBuilder;
import com.google.auth.Credentials;
Expand All @@ -29,9 +21,19 @@
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.terrakube.api.plugin.streaming.StreamingService;
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider;
import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
import software.amazon.awssdk.endpoints.Endpoint;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.s3.S3Client;
import software.amazon.awssdk.services.s3.endpoints.S3EndpointParams;
import software.amazon.awssdk.services.s3.endpoints.S3EndpointProvider;

import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.net.URI;
import java.util.concurrent.CompletableFuture;

@Configuration
@EnableConfigurationProperties({
Expand Down Expand Up @@ -61,30 +63,38 @@ public StorageTypeService terraformOutput(StreamingService streamingService, Sto
.build();
break;
case AWS:
AWSCredentials credentials = new BasicAWSCredentials(
awsStorageTypeProperties.getAccessKey(),
awsStorageTypeProperties.getSecretKey()
);

AmazonS3 s3client = null;
if (!awsStorageTypeProperties.getEndpoint().equals("")) {
ClientConfiguration clientConfiguration = new ClientConfiguration();
clientConfiguration.setSignerOverride("AWSS3V4SignerType");

s3client = AmazonS3ClientBuilder
.standard()
.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(awsStorageTypeProperties.getEndpoint(), awsStorageTypeProperties.getRegion()))
.withCredentials(new AWSStaticCredentialsProvider(credentials))
.withClientConfiguration(clientConfiguration)
.withPathStyleAccessEnabled(true)
.build();
}else
s3client = AmazonS3ClientBuilder
.standard()
.withCredentials(new AWSStaticCredentialsProvider(credentials))
.withRegion(Regions.fromName(awsStorageTypeProperties.getRegion()))
S3Client s3client;
if (awsStorageTypeProperties.getEndpoint() != "") {
log.info("Creating AWS SDK with custom endpoint and custom credentials");
s3client = S3Client.builder()
.region(Region.AWS_GLOBAL)
.credentialsProvider(StaticCredentialsProvider.create(getAwsBasicCredentials(awsStorageTypeProperties)))
.endpointProvider(new S3EndpointProvider() {
@Override
public CompletableFuture<Endpoint> resolveEndpoint(S3EndpointParams endpointParams) {
return CompletableFuture.completedFuture(Endpoint.builder()
.url(URI.create(awsStorageTypeProperties.getEndpoint() + "/" + endpointParams.bucket()))
.build());
}
})
.build();

} else {
if (awsStorageTypeProperties.isEnableRoleAuthentication()) {
log.info("Creating AWS SDK with default credentials");
s3client = S3Client.builder()
.region(Region.of(awsStorageTypeProperties.getRegion()))
.credentialsProvider(DefaultCredentialsProvider.create())
.build();
} else {
log.info("Creating AWS SDK with custom credentials");
s3client = S3Client.builder()
.region(Region.of(awsStorageTypeProperties.getRegion()))
.credentialsProvider(StaticCredentialsProvider.create(getAwsBasicCredentials(awsStorageTypeProperties)))
.build();
}
}

storageTypeService = AwsStorageTypeServiceImpl.builder()
.s3client(s3client)
.bucketName(awsStorageTypeProperties.getBucketName())
Expand Down Expand Up @@ -119,4 +129,9 @@ public StorageTypeService terraformOutput(StreamingService streamingService, Sto
}
return storageTypeService;
}

private AwsBasicCredentials getAwsBasicCredentials(AwsStorageTypeProperties awsStorageTypeProperties) {
AwsBasicCredentials awsCreds = AwsBasicCredentials.create(awsStorageTypeProperties.getAccessKey(), awsStorageTypeProperties.getSecretKey());
return awsCreds;
}
}
1 change: 1 addition & 0 deletions api/src/main/resources/application-demo.properties
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ org.terrakube.storage.aws.secretKey=${AwsStorageSecretKey}
org.terrakube.storage.aws.bucketName=${AwsStorageBucketName}
org.terrakube.storage.aws.region=${AwsStorageRegion}
org.terrakube.storage.aws.endpoint=${AwsEndpoint}
org.terrakube.storage.aws.enableRoleAuthentication=${AwsEnableRoleAuth:false}

###############
# GCP Storage #
Expand Down
Loading
Loading