From 50b2c80a1ad730b414ffda5637f8b693247d01de Mon Sep 17 00:00:00 2001 From: lucio Date: Mon, 12 Aug 2024 14:10:20 +0200 Subject: [PATCH] using TransferManager for file greater than 100Mb to use mutlipart upload --- .../storage/backend/impl/S3Backend.java | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/gcube/data/access/storagehub/storage/backend/impl/S3Backend.java b/src/main/java/org/gcube/data/access/storagehub/storage/backend/impl/S3Backend.java index 7d6b753..82c5e4c 100644 --- a/src/main/java/org/gcube/data/access/storagehub/storage/backend/impl/S3Backend.java +++ b/src/main/java/org/gcube/data/access/storagehub/storage/backend/impl/S3Backend.java @@ -1,5 +1,6 @@ package org.gcube.data.access.storagehub.storage.backend.impl; +import java.io.File; import java.io.InputStream; import java.util.HashMap; import java.util.Map; @@ -22,6 +23,9 @@ import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.amazonaws.services.s3.model.ObjectMetadata; +import com.amazonaws.services.s3.transfer.TransferManager; +import com.amazonaws.services.s3.transfer.TransferManagerBuilder; +import com.amazonaws.services.s3.transfer.Upload; import com.amazonaws.util.IOUtils; @@ -146,8 +150,19 @@ public class S3Backend extends StorageBackend{ log.info("uploading file {} with id {} in bucket {} ",name, storageId, bucketName); - client.putObject(bucketName, storageId, stream, objMeta); + if (size ==null || size<100000000) + client.putObject(bucketName, storageId, stream, objMeta); + else { + TransferManager tm = TransferManagerBuilder.standard() + .withS3Client(client) + .build(); + + Upload upload = tm.upload(bucketName, storageId, stream, objMeta); + + // Optionally, wait for the upload to finish before continuing. + upload.waitForCompletion(); + } long fileSize; if (size != null && size>0) fileSize = size;