using TransferManager for file greater than 100Mb to use mutlipart

upload
This commit is contained in:
lucio 2024-08-12 14:10:20 +02:00
parent 3584bdaf29
commit 50b2c80a1a
1 changed files with 16 additions and 1 deletions

View File

@ -1,5 +1,6 @@
package org.gcube.data.access.storagehub.storage.backend.impl;
import java.io.File;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
@ -22,6 +23,9 @@ import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.transfer.TransferManager;
import com.amazonaws.services.s3.transfer.TransferManagerBuilder;
import com.amazonaws.services.s3.transfer.Upload;
import com.amazonaws.util.IOUtils;
@ -146,8 +150,19 @@ public class S3Backend extends StorageBackend{
log.info("uploading file {} with id {} in bucket {} ",name, storageId, bucketName);
client.putObject(bucketName, storageId, stream, objMeta);
if (size ==null || size<100000000)
client.putObject(bucketName, storageId, stream, objMeta);
else {
TransferManager tm = TransferManagerBuilder.standard()
.withS3Client(client)
.build();
Upload upload = tm.upload(bucketName, storageId, stream, objMeta);
// Optionally, wait for the upload to finish before continuing.
upload.waitForCompletion();
}
long fileSize;
if (size != null && size>0)
fileSize = size;