Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 4 additions & 12 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -69,17 +69,6 @@
<version>1.11</version>
</dependency>
<!-- The AWS Java SDK Dependencies -->
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId>
<version>${amazon.sdk.version}</version>
</dependency>
<!-- Remove v1 CloudFormation if present -->
<!-- <dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-cloudformation</artifactId>
<version>${amazon.sdk.version}</version>
</dependency> -->
<dependency>
<groupId>software.amazon.awssdk</groupId>
<artifactId>imagebuilder</artifactId>
Expand Down Expand Up @@ -242,7 +231,10 @@
<dependency>
<groupId>software.amazon.awssdk</groupId>
<artifactId>cloudformation</artifactId>
<version>${amazon.sdk.v2.version}</version>
</dependency>
<dependency>
<groupId>software.amazon.awssdk</groupId>
<artifactId>s3</artifactId>
</dependency>
</dependencies>
<build>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,9 @@
import org.sagebionetworks.template.config.Configuration;
import org.sagebionetworks.template.repo.beanstalk.SourceBundle;

import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.google.inject.Inject;

import software.amazon.awssdk.core.sync.RequestBody;
import software.amazon.awssdk.services.cloudformation.CloudFormationClient;
import software.amazon.awssdk.services.cloudformation.model.CloudFormationException;
import software.amazon.awssdk.services.cloudformation.model.CreateStackRequest;
Expand All @@ -44,6 +42,9 @@
import software.amazon.awssdk.services.cloudformation.model.UpdateStackRequest;
import software.amazon.awssdk.services.cloudformation.model.UpdateStackResponse;
import software.amazon.awssdk.services.cloudformation.model.Stack;
import software.amazon.awssdk.services.s3.S3Client;
import software.amazon.awssdk.services.s3.model.DeleteObjectRequest;
import software.amazon.awssdk.services.s3.model.PutObjectRequest;

/**
* Basic implementation CloudFormationClient
Expand All @@ -59,15 +60,16 @@ public class CloudFormationClientWrapperImpl implements CloudFormationClientWrap
public static final String NO_UPDATES_ARE_TO_BE_PERFORMED = "No updates are to be performed";

private final CloudFormationClient cloudFormationClient;
private final AmazonS3 s3Client;
private final S3Client s3Client;
private final Configuration configuration;
private final Logger logger;
private final ThreadProvider threadProvider;
private final Map<String, WaitConditionHandler> waitConditionHandlerMap;

@Inject
public CloudFormationClientWrapperImpl(CloudFormationClient cloudFormationClient, AmazonS3 s3Client,
public CloudFormationClientWrapperImpl(CloudFormationClient cloudFormationClient, S3Client s3Client,
Configuration configuration, LoggerFactory loggerFactory, ThreadProvider threadProvider, Set<WaitConditionHandler> waitConditionHandlers) {

super();
this.cloudFormationClient = cloudFormationClient;
this.s3Client = s3Client;
Expand Down Expand Up @@ -207,7 +209,7 @@ public Optional<Stack> describeStack(String stackName) throws CloudFormationExce
}

/**
* Save the given template to to S3.
* Save the given template to S3.
*
* @param template
* @return
Expand All @@ -218,9 +220,8 @@ SourceBundle saveTemplateToS3(String stackName, String template) {
String key = "templates/" + stackName + "-" + UUID.randomUUID() + ".json";
byte[] bytes = template.getBytes("UTF-8");
ByteArrayInputStream input = new ByteArrayInputStream(bytes);
ObjectMetadata metadata = new ObjectMetadata();
metadata.setContentLength(bytes.length);
s3Client.putObject(new PutObjectRequest(bucket, key, input, metadata));
PutObjectRequest request = PutObjectRequest.builder().bucket(bucket).key(key).contentLength((long)bytes.length).contentType("application/json").build();
s3Client.putObject(request, RequestBody.fromBytes(bytes));
return new SourceBundle(bucket, key);
} catch (IOException e) {
throw new RuntimeException(e);
Expand All @@ -243,7 +244,8 @@ String createS3Url(SourceBundle bundle) {
* @param bundle
*/
void deleteTemplate(SourceBundle bundle) {
s3Client.deleteObject(bundle.getBucket(), bundle.getKey());
DeleteObjectRequest delObjRequest = DeleteObjectRequest.builder().bucket(bundle.getBucket()).key(bundle.getKey()).build();
s3Client.deleteObject(delObjRequest);
}

public boolean isStartedInUpdateRollbackComplete(String stackName) {
Expand Down
28 changes: 12 additions & 16 deletions src/main/java/org/sagebionetworks/template/TemplateGuiceModule.java
Original file line number Diff line number Diff line change
Expand Up @@ -98,8 +98,6 @@
import org.sagebionetworks.template.s3.S3BucketBuilderImpl;
import org.sagebionetworks.template.s3.S3Config;
import org.sagebionetworks.template.s3.S3ConfigValidator;
import org.sagebionetworks.template.s3.S3TransferManagerFactory;
import org.sagebionetworks.template.s3.S3TransferManagerFactoryImpl;
import org.sagebionetworks.template.utils.ArtifactDownload;
import org.sagebionetworks.template.utils.ArtifactDownloadImpl;
import org.sagebionetworks.template.vpc.SubnetTemplateBuilder;
Expand All @@ -112,9 +110,6 @@
import org.sagebionetworks.war.WarAppenderImpl;

import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.google.inject.Provides;
import com.google.inject.multibindings.Multibinder;
import com.google.inject.name.Named;
Expand All @@ -133,6 +128,8 @@
import software.amazon.awssdk.services.kms.KmsClient;
import software.amazon.awssdk.services.lambda.LambdaClient;
import software.amazon.awssdk.services.opensearchserverless.OpenSearchServerlessClient;
import software.amazon.awssdk.services.s3.S3AsyncClient;
import software.amazon.awssdk.services.s3.S3Client;
import software.amazon.awssdk.services.secretsmanager.SecretsManagerClient;
import software.amazon.awssdk.services.ses.SesClient;
import software.amazon.awssdk.services.sts.StsClient;
Expand Down Expand Up @@ -217,11 +214,15 @@ public CloudFormationClient provideAmazonCloudFormationClient() {
}

@Provides
public AmazonS3 provideAmazonS3Client() {
AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard();
builder.withCredentials(new DefaultAWSCredentialsProviderChain());
builder.withRegion(Regions.US_EAST_1);
return builder.build();
public S3Client provideAmazonS3Client() {
S3Client client = S3Client.builder().region(Region.US_EAST_1).build();
return client;
}

@Provides
public S3AsyncClient provideAmazonS3AsyncClient() {
S3AsyncClient client = S3AsyncClient.builder().region(Region.US_EAST_1).build();
return client;
}

@Provides
Expand Down Expand Up @@ -327,19 +328,14 @@ public S3Config s3ConfigProvider() throws IOException {
}

@Provides
public LoadBalancerAlarmsConfig loadBalanacerConfigProvider() throws IOException {
public LoadBalancerAlarmsConfig loadBalancerConfigProvider() throws IOException {
return new LoadBalancerAlarmsConfigValidator(loadFromJsonFile(LOAD_BALANCER_ALARM_CONFIG_FILE, LoadBalancerAlarmsConfig.class)).validate();
}

@Provides
public SynapseAdminClient synapseAdminClient(SynapseAdminClientFactory factory) {
return factory.getInstance();
}

@Provides
public S3TransferManagerFactory provideS3TransferManagerFactory(AmazonS3 s3Client) {
return new S3TransferManagerFactoryImpl(s3Client);
}

@Provides
public DataWarehouseConfig dataWarehouseConfigProvider() throws IOException {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package org.sagebionetworks.template.datawarehouse;

import com.amazonaws.internal.ReleasableInputStream;
import com.amazonaws.services.s3.AmazonS3;
import com.google.inject.Inject;
import org.apache.logging.log4j.Logger;
import org.apache.velocity.Template;
Expand All @@ -17,7 +16,10 @@
import org.sagebionetworks.template.repo.VelocityExceptionThrower;
import org.sagebionetworks.template.utils.ArtifactDownload;
import org.sagebionetworks.util.ValidateArgument;
import software.amazon.awssdk.core.sync.RequestBody;
import software.amazon.awssdk.services.cloudformation.model.Capability;
import software.amazon.awssdk.services.s3.S3Client;
import software.amazon.awssdk.services.s3.model.PutObjectRequest;

import java.io.File;
import java.io.IOException;
Expand Down Expand Up @@ -55,12 +57,12 @@ public class DataWarehouseBuilderImpl implements DataWarehouseBuilder {
private StackTagsProvider tagsProvider;
private DataWarehouseConfig dataWarehouseConfig;
private ArtifactDownload downloader;
private AmazonS3 s3Client;
private S3Client s3Client;

@Inject
public DataWarehouseBuilderImpl(CloudFormationClientWrapper cloudFormationClientWrapper, VelocityEngine velocityEngine,
Configuration config, LoggerFactory loggerFactory,
StackTagsProvider tagsProvider, DataWarehouseConfig dataWarehouseConfig, ArtifactDownload downloader, AmazonS3 s3Client) {
StackTagsProvider tagsProvider, DataWarehouseConfig dataWarehouseConfig, ArtifactDownload downloader, S3Client s3Client) {
this.cloudFormationClientWrapper = cloudFormationClientWrapper;
this.velocityEngine = velocityEngine;
this.config = config;
Expand Down Expand Up @@ -140,11 +142,13 @@ String copyArtifactFromGithub(String bucket) {
ZipEntry entry = null;
while ((entry = zipInputStream.getNextEntry()) != null) {
if (!entry.isDirectory() && entry.getName().contains(scriptPath)) {
String scriptFile = entry.getName();
byte[] fileContent = zipInputStream.readAllBytes();
String scriptFile = entry.getName();
String s3Key = s3ScriptsPath + scriptFile.replace(scriptPath, "");
logger.info("Uploading " + scriptFile + " to " + s3Key);
// Uses a stream with close disabled so that the s3 sdk does not close it for us
s3Client.putObject(bucket, s3Key, ReleasableInputStream.wrap(zipInputStream).disableClose(), null);
PutObjectRequest putObjectRequest = PutObjectRequest.builder().bucket(bucket).key(s3Key).build();
RequestBody requestBody = RequestBody.fromBytes(fileContent);
s3Client.putObject(putObjectRequest, requestBody);
}
}
} catch (IOException e) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
package org.sagebionetworks.template.datawarehouse.backfill;

import com.amazonaws.internal.ReleasableInputStream;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.ListObjectsV2Request;
import com.amazonaws.services.s3.model.ListObjectsV2Result;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Inject;
import org.apache.logging.log4j.Logger;
Expand All @@ -20,6 +17,7 @@
import org.sagebionetworks.template.repo.VelocityExceptionThrower;
import org.sagebionetworks.template.utils.ArtifactDownload;
import org.sagebionetworks.util.ValidateArgument;
import software.amazon.awssdk.core.sync.RequestBody;
import software.amazon.awssdk.services.athena.AthenaClient;
import software.amazon.awssdk.services.athena.model.Datum;
import software.amazon.awssdk.services.athena.model.GetQueryExecutionRequest;
Expand All @@ -40,6 +38,11 @@
import software.amazon.awssdk.services.glue.model.PartitionInput;
import software.amazon.awssdk.services.glue.model.StartJobRunRequest;
import software.amazon.awssdk.services.glue.model.StorageDescriptor;
import software.amazon.awssdk.services.s3.S3Client;
import software.amazon.awssdk.services.s3.model.CommonPrefix;
import software.amazon.awssdk.services.s3.model.ListObjectsV2Request;
import software.amazon.awssdk.services.s3.model.ListObjectsV2Response;
import software.amazon.awssdk.services.s3.model.PutObjectRequest;

import java.io.File;
import java.io.IOException;
Expand Down Expand Up @@ -88,7 +91,7 @@ public class BackfillDataWarehouseBuilderImpl implements BackfillDataWarehouseBu
private Configuration config;
private Logger logger;
private VelocityEngine velocityEngine;
private AmazonS3 s3Client;
private S3Client s3Client;
private CloudFormationClientWrapper cloudFormationClientWrapper;
private StackTagsProvider tagsProvider;
private GlueClient awsGlue;
Expand All @@ -98,7 +101,7 @@ public class BackfillDataWarehouseBuilderImpl implements BackfillDataWarehouseBu
public BackfillDataWarehouseBuilderImpl(CloudFormationClientWrapper cloudFormationClientWrapper, VelocityEngine velocityEngine,
Configuration config, LoggerFactory loggerFactory,
StackTagsProvider tagsProvider, ArtifactDownload downloader,
AmazonS3 s3Client, GlueClient awsGlue, AthenaClient athena) {
S3Client s3Client, GlueClient awsGlue, AthenaClient athena) {
this.cloudFormationClientWrapper = cloudFormationClientWrapper;
this.velocityEngine = velocityEngine;
this.config = config;
Expand Down Expand Up @@ -197,33 +200,45 @@ String copyArtifactFromGithub(String bucket) {
String s3Key = s3ScriptsPath + scriptFile.replace(scriptPath, "");
logger.info("Uploading " + scriptFile + " to " + s3Key);
// Uses a stream with close disabled so that the s3 sdk does not close it for us
s3Client.putObject(bucket, s3Key, ReleasableInputStream.wrap(zipInputStream).disableClose(), null);
logger.info("Uploading {} to {}", scriptFile, s3Key);
PutObjectRequest putObjectRequest = PutObjectRequest.builder()
.bucket(bucket)
.key(s3Key)
.build();
try (ReleasableInputStream releasableInputStream = ReleasableInputStream.wrap(zipInputStream).disableClose()) {
RequestBody requestBody = RequestBody.fromInputStream(releasableInputStream, entry.getSize());
s3Client.putObject(putObjectRequest, requestBody);
}
}
}
} catch (IOException e) {
throw new RuntimeException(e);
logger.error("Error processing the zip file from GitHub URL: {}", githubUrl, e);
throw new RuntimeException("Failed to process artifact from GitHub", e);
} finally {
zipFile.delete();
if (!zipFile.delete()) {
logger.warn("Failed to delete temporary zip file: {}", zipFile.getAbsolutePath());
}
}
return s3ScriptsPath;
}

private void createGluePartitionForOldData(String prefix, String bucketName, String databaseName) {
ListObjectsV2Request listObjectsV2Request = new ListObjectsV2Request().withPrefix(prefix).withBucketName(bucketName).withDelimiter("/");
ListObjectsV2Result s3ObjectResult = s3Client.listObjectsV2(listObjectsV2Request);
if (s3ObjectResult == null || s3ObjectResult.getCommonPrefixes().size() == 0) {
ListObjectsV2Request listObjectsV2Request = ListObjectsV2Request.builder().prefix(prefix).bucket(bucketName).delimiter("/").build();
ListObjectsV2Response s3ObjectResponse = s3Client.listObjectsV2(listObjectsV2Request);
if (s3ObjectResponse == null || s3ObjectResponse.commonPrefixes().isEmpty()) {
getBatchPartitionParametersAndCreateGluePartition(prefix, databaseName, bucketName);
return;
}
for (String newPath : s3ObjectResult.getCommonPrefixes()) {
for (CommonPrefix newPath : s3ObjectResponse.commonPrefixes()) {
if (checkToIterate(prefix, newPath)) {
createGluePartitionForOldData(newPath, bucketName, databaseName);
createGluePartitionForOldData(newPath.prefix(), bucketName, databaseName);
}
}
}
private boolean checkToIterate(String prefix, String newPath) {
if (prefix.length() == 0 && newPath.startsWith("000000")) return true;
return newPath.contains(BULK_FILE_DOWNLOAD_FOLDER_NAME) || newPath.contains(FILE_DOWNLOAD_FOLDER_NAME);

private boolean checkToIterate(String prefix, CommonPrefix newPath) {
if (prefix.isEmpty() && newPath.prefix().startsWith("000000")) return true;
return newPath.prefix().contains(BULK_FILE_DOWNLOAD_FOLDER_NAME) || newPath.prefix().contains(FILE_DOWNLOAD_FOLDER_NAME);
}

private void getBatchPartitionParametersAndCreateGluePartition(String prefix, String databaseName, String bucketName) {
Expand Down
Loading