Compare commits

...

18 Commits

Author SHA1 Message Date
Roberto Cirillo 2e4e709ff8 restore source to 1.8 2022-09-19 10:30:29 +02:00
Roberto Cirillo 89524aac9c set source jdk to 11 2022-09-19 10:28:15 +02:00
Roberto Cirillo c252d1c5ae removing jvm source tag 2022-09-19 10:26:36 +02:00
Roberto Cirillo ae6740cfbb set java version 2022-09-19 10:22:43 +02:00
Roberto Cirillo ac95563480 update to version 2.9.2 2022-09-07 17:25:49 +02:00
Roberto Cirillo 904bf0bc17 restored close method 2022-09-07 17:10:08 +02:00
Roberto Cirillo 83bc9fbc2e update to release version 2.9.1 2022-06-28 14:15:48 +02:00
Roberto Cirillo bbaf866f43 update CHANGELOG 2022-06-28 14:12:42 +02:00
Roberto Cirillo 7d2be48d31 update to 2.9.1-SNAPSHOT version in order to have a fixed bom as latest in the range. see #23578 2022-06-28 14:07:50 +02:00
Roberto Cirillo 588a71db5e Revert "Update 'pom.xml'"
This reverts commit 7194811366.
2022-06-28 14:04:30 +02:00
Roberto Cirillo db61504f66 Update 'CHANGELOG.md'
removed SNAPSHOT
2021-10-08 09:31:37 +02:00
Roberto Cirillo e60fcbfdac Update 'pom.xml'
removed SNAPSHOT
2021-10-08 09:31:18 +02:00
Roberto Cirillo e6ca5d25b4 Update 'pom.xml'
moved to snapshot
2021-10-08 09:26:25 +02:00
Roberto Cirillo 50dcb2f2bd Update 'CHANGELOG.md'
moved to snapshot
2021-10-08 09:26:03 +02:00
Roberto Cirillo c387a38fdf removed unused imports;
deleted main test
2021-10-07 15:43:06 +02:00
Roberto Cirillo 15a4909d7c bug fix 22164; clean code. 2021-10-07 15:11:55 +02:00
Roberto Cirillo 5a644f79a0 removed SNAPSHOT from version 2021-10-07 10:22:19 +02:00
Roberto Cirillo 505346fac3 moved from 2.13.1 to 3.0.0-SNAPSHOT version 2021-10-07 09:38:06 +02:00
45 changed files with 114 additions and 194 deletions

View File

@ -1,15 +1,15 @@
# Changelog for storage-manager-core
## [v2.13.1] 2021-09-10
* fix #21980
* update gcube-bom version
* add close operation on IClient interface
* add check on transport layer instance: if the memory type is not the same, a new transportLayer is instatiated
* move memoryType var from super class TransportManager
* convert BasicDBObject to DBObject the return type used for metadata collections
* One pool for every operation: static Operation class; no mongo close operation
* upgrade mongo-java-driver to 3.12.0
* added input parameter to getSize method in order to be compatible with the needed of s3 client
## [v2.9.3-SNAPSHOT] 2022-09-19
* set java to 1.8
## [v2.9.2] 2022-09-07
* restored close() method to IClient
* add slf4j-simple dependency with test scope
* update gcube-bom to 2.0.2
## [v2.9.1] 2022-06-28
* update to version 2.9.1 in order to have a fixed bom in the latest version of the range
## [v2.9.0] 2019-10-19
* SSL enabled

24
pom.xml
View File

@ -8,10 +8,12 @@
</parent>
<groupId>org.gcube.contentmanagement</groupId>
<artifactId>storage-manager-core</artifactId>
<version>2.13.1</version>
<!-- <properties> -->
<!-- <distroDirectory>${project.basedir}/distro</distroDirectory> -->
<!-- </properties> -->
<version>2.9.3-SNAPSHOT</version>
<properties>
<distroDirectory>${project.basedir}/distro</distroDirectory>
<maven.compiler.target>1.8</maven.compiler.target>
<maven.compiler.source>1.8</maven.compiler.source>
</properties>
<scm>
<connection>scm:git:https://code-repo.d4science.org/gCubeSystem/${project.artifactId}.git</connection>
<developerConnection>scm:git:https://code-repo.d4science.org/gCubeSystem/${project.artifactId}.git</developerConnection>
@ -23,7 +25,7 @@
<dependency>
<groupId>org.gcube.distribution</groupId>
<artifactId>gcube-bom</artifactId>
<version>2.0.1</version>
<version>2.0.2</version>
<type>pom</type>
<scope>import</scope>
</dependency>
@ -53,11 +55,11 @@
<artifactId>commons-codec</artifactId>
<version>1.8</version>
</dependency>
<!-- <dependency> -->
<!-- <groupId>org.slf4j</groupId> -->
<!-- <artifactId>slf4j-simple</artifactId> -->
<!-- <version>1.7.32</version> -->
<!-- <scope>test</scope> -->
<!-- </dependency> -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<version>1.7.32</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -209,6 +209,11 @@ public RemoteResourceComplexInfo getMetaFile();
*/
public void forceClose();
/**
* close the connections to backend storage system. Method restored for backward compatibility
*/
public void close();
public RemoteResource getUrl(boolean forceCreation);

View File

@ -8,7 +8,6 @@ import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.DirectoryBucket;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.DirectoryEntity;
import org.gcube.contentmanagement.blobstorage.service.operation.OperationManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;

View File

@ -1,6 +1,5 @@
package org.gcube.contentmanagement.blobstorage.service.impl;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@ -18,7 +17,6 @@ import org.gcube.contentmanagement.blobstorage.service.directoryOperation.Bucket
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.Encrypter;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.Encrypter.EncryptionException;
import org.gcube.contentmanagement.blobstorage.service.operation.*;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants;
import org.slf4j.Logger;
@ -931,6 +929,26 @@ public class ServiceEngine implements IClient {
}
}
@Override
public void close(){
currentOperation="close";
file.setOwner(owner);
getMyFile().setRemoteResource(REMOTE_RESOURCE.PATH);
setMyFile(file);
service.setResource(getMyFile());
service.setTypeOperation("forceclose");
try {
if(((file.getInputStream() != null) || (file.getOutputStream()!=null)) || ((file.getLocalPath() != null) || (file.getRemotePath() != null)))
service.startOperation(file,file.getRemotePath(), owner, primaryBackend, Costants.DEFAULT_CHUNK_OPTION, getContext(), isReplaceOption());
else{
logger.error("parameters incompatible ");
}
} catch (Throwable t) {
logger.error("get()", t.getCause());
throw new RemoteBackendException(" Error in "+currentOperation+" operation ", t.getCause());
}
}
public String getServiceClass() {

View File

@ -4,7 +4,6 @@ import java.net.UnknownHostException;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.slf4j.Logger;
@ -38,8 +37,6 @@ public abstract class Copy extends Operation{
}
public String doIt(MyFile myFile) throws RemoteBackendException{
// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
String id=null;
try {

View File

@ -5,7 +5,6 @@ import java.util.List;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.slf4j.Logger;

View File

@ -4,7 +4,6 @@ import org.bson.types.ObjectId;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.slf4j.Logger;

View File

@ -4,7 +4,6 @@ import java.io.OutputStream;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.gcube.contentmanagement.blobstorage.transport.backend.operation.DownloadOperator;
import org.slf4j.Logger;
@ -40,8 +39,6 @@ public class DownloadAndLock extends Operation {
//TODO add field for file lock
get(download,myFile, true);
} catch (Exception e) {
// TransportManagerFactory tmf=new TransportManagerFactory(getServer(), getUser(), getPassword());
// TransportManager tm=tmf.getTransport(getBackendType(), myFile.getGcubeMemoryType(), getDbNames(), myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
tm.close();
throw new RemoteBackendException(" Error in downloadAndLock operation ", e.getCause());

View File

@ -7,7 +7,6 @@ import org.bson.types.ObjectId;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.slf4j.Logger;
@ -31,8 +30,6 @@ public abstract class DuplicateFile extends Operation {
}
public String doIt(MyFile myFile) throws RemoteBackendException{
// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
String id=null;
try {

View File

@ -8,7 +8,6 @@ import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendEx
import org.bson.types.ObjectId;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -31,8 +30,6 @@ public class Exist extends Operation{
}
public String doIt(MyFile myFile) throws RemoteBackendException{
// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
boolean isPresent=false;
try {

View File

@ -23,12 +23,7 @@ public class FileWriter extends Thread{
final Logger logger=LoggerFactory.getLogger(FileWriter.class);
private Monitor monitor;
private int id;
// private MyFile myFile;
// private byte[] encode;
// private int offset;
// private static int len=0;
private OutputStream out;
// private String path;
private byte[] full;

View File

@ -2,7 +2,6 @@ package org.gcube.contentmanagement.blobstorage.service.operation;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -13,15 +12,12 @@ public class ForceClose extends Operation{
* Logger for this class
*/
final Logger logger=LoggerFactory.getLogger(GetSize.class);
// public String file_separator = ServiceEngine.FILE_SEPARATOR;//System.getProperty("file.separator");
public ForceClose(String[] server, String user, String pwd, String bucket, Monitor monitor, boolean isChunk, String backendType, String[] dbs) {
super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs);
}
public String doIt(MyFile myFile) throws RemoteBackendException{
// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
try {
tm.forceClose();

View File

@ -3,9 +3,7 @@ package org.gcube.contentmanagement.blobstorage.service.operation;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.DirectoryBucket;
import org.gcube.contentmanagement.blobstorage.service.impl.ServiceEngine;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants;
import org.slf4j.Logger;
@ -23,8 +21,6 @@ public class GetFolderCount extends Operation {
}
public String doIt(MyFile myFile) throws RemoteBackendException{
// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
long dim=0;
try {

View File

@ -4,7 +4,6 @@ import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.DirectoryBucket;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants;
import org.slf4j.Logger;
@ -22,8 +21,6 @@ public class GetFolderSize extends Operation {
}
public String doIt(MyFile myFile) throws RemoteBackendException{
// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
long dim=0;
try {

View File

@ -11,6 +11,12 @@ import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants;
/**
* this class is replaced by getHttpsUrl
* @author roberto
*
*/
@Deprecated
public class GetHttpUrl extends Operation {
@ -46,7 +52,8 @@ public class GetHttpUrl extends Operation {
String urlBase="smp://"+resolverHost+Costants.URL_SEPARATOR;
String urlParam="";
try {
String id=getId(myFile.getAbsoluteRemotePath(), myFile.isForceCreation(), myFile.getGcubeMemoryType(), myFile.getWriteConcern(), myFile.getReadPreference());
// String id=getId(myFile.getAbsoluteRemotePath(), myFile.isForceCreation(), myFile.getGcubeMemoryType(), myFile.getWriteConcern(), myFile.getReadPreference());
String id=getId(myFile);
String phrase=myFile.getPassPhrase();
// urlParam =new StringEncrypter("DES", phrase).encrypt(id);
urlParam = new Encrypter("DES", phrase).encrypt(id);
@ -71,7 +78,7 @@ public class GetHttpUrl extends Operation {
return httpUrl.toString();
}
@Deprecated
private String getId(String path, boolean forceCreation, MemoryType memoryType, String writeConcern, String readPreference){
String id=null;
TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
@ -87,6 +94,21 @@ public class GetHttpUrl extends Operation {
return id;
}
private String getId(MyFile myFile){
String id=null;
TransportManager tm=getTransport(myFile);
try {
id = tm.getId(bucket, myFile.isForceCreation());
} catch (Exception e) {
tm.close();
throw new RemoteBackendException(" Error in GetUrl operation. Problem to discover remote file:"+bucket+" "+ e.getMessage(), e.getCause()); }
if (logger.isDebugEnabled()) {
logger.debug(" PATH " + bucket);
}
return id;
}
private URL translate(URL url) throws IOException {
logger.debug("translating: "+url);
String urlString=url.toString();

View File

@ -49,6 +49,7 @@ public class GetHttpsUrl extends Operation {
String urlParam="";
try {
String id=getId(myFile.getAbsoluteRemotePath(), myFile.isForceCreation(), myFile.getGcubeMemoryType(), myFile.getWriteConcern(), myFile.getReadPreference());
// String id=getId(myFile);
String phrase=myFile.getPassPhrase();
// urlParam =new StringEncrypter("DES", phrase).encrypt(id);
urlParam = new Encrypter("DES", phrase).encrypt(id);
@ -73,6 +74,21 @@ public class GetHttpsUrl extends Operation {
return httpsUrl.toString();
}
private String getId(MyFile myFile){
String id=null;
TransportManager tm=getTransport(myFile);
try {
id = tm.getId(bucket, myFile.isForceCreation());
} catch (Exception e) {
tm.close();
throw new RemoteBackendException(" Error in GetUrl operation. Problem to discover remote file:"+bucket+" "+ e.getMessage(), e.getCause()); }
if (logger.isDebugEnabled()) {
logger.debug(" PATH " + bucket);
}
return id;
}
@Deprecated
private String getId(String path, boolean forceCreation, MemoryType memoryType, String writeConcern, String readPreference){
String id=null;
TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);

View File

@ -4,7 +4,6 @@ import org.bson.types.ObjectId;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -31,8 +30,6 @@ public class GetMetaFile extends Operation{
*
*/
public MyFile doIt(MyFile myFile) throws RemoteBackendException{
// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
long dim=0;
String id=null;

View File

@ -4,7 +4,6 @@ import org.bson.types.ObjectId;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -21,8 +20,6 @@ public class GetMetaInfo extends Operation {
}
public String doIt(MyFile myFile) throws RemoteBackendException{
// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
String value=null;
try {

View File

@ -3,7 +3,6 @@ package org.gcube.contentmanagement.blobstorage.service.operation;
import org.bson.types.ObjectId;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -21,8 +20,6 @@ public class GetRemotePath extends Operation{
}
public String doIt(MyFile myFile) throws RemoteBackendException{
// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
String path=null;
try {

View File

@ -4,7 +4,6 @@ import org.bson.types.ObjectId;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -28,8 +27,6 @@ public class GetSize extends Operation{
}
public String doIt(MyFile myFile) throws RemoteBackendException{
// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
long dim=0;
try {

View File

@ -4,7 +4,6 @@ import java.io.OutputStream;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -36,8 +35,6 @@ public class GetTTL extends Operation {
TransportManager tm=null;
try {
//aggiungere field per il lock del file
// TransportManagerFactory tmf=new TransportManagerFactory(server, user, password);
// tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
tm=getTransport(myFile);
currentTTL=tm.getTTL(bucket);
} catch (Exception e) {

View File

@ -9,8 +9,12 @@ import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants;
/**
* this class is replaced by getHttpsUrl
* @author roberto
*
*/
@Deprecated
public class GetUrl extends Operation{
// private OutputStream os;
@ -40,11 +44,10 @@ public class GetUrl extends Operation{
String urlBase="smp://"+resolverHost+Costants.URL_SEPARATOR;
String urlParam="";
try {
String id=getId(myFile.getAbsoluteRemotePath(), myFile.isForceCreation(), myFile.getGcubeMemoryType(), myFile.getWriteConcern(), myFile.getReadPreference());
// String id=getId(myFile.getAbsoluteRemotePath(), myFile.isForceCreation(), myFile.getGcubeMemoryType(), myFile.getWriteConcern(), myFile.getReadPreference());
String id=getId(myFile);
String phrase=myFile.getPassPhrase();
// urlParam =new StringEncrypter("DES", phrase).encrypt(id);
urlParam = new Encrypter("DES", phrase).encrypt(id);
// String urlEncoded=URLEncoder.encode(urlParam, "UTF-8");
} catch (EncryptionException e) {
throw new RemoteBackendException(" Error in getUrl operation problem to encrypt the string", e.getCause());
}
@ -56,6 +59,7 @@ public class GetUrl extends Operation{
return url;
}
@Deprecated
private String getId(String path, boolean forceCreation, MemoryType memoryType, String writeConcern, String readPreference){
String id=null;
TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
@ -71,4 +75,18 @@ public class GetUrl extends Operation{
return id;
}
private String getId(MyFile myFile){
String id=null;
TransportManager tm=getTransport(myFile);
try {
id = tm.getId(bucket, myFile.isForceCreation());
} catch (Exception e) {
tm.close();
throw new RemoteBackendException(" Error in GetUrl operation. Problem to discover remote file:"+bucket+" "+ e.getMessage(), e.getCause()); }
if (logger.isDebugEnabled()) {
logger.debug(" PATH " + bucket);
}
return id;
}
}

View File

@ -4,7 +4,6 @@ import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.DirectoryBucket;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants;
import org.slf4j.Logger;
@ -20,8 +19,6 @@ public class GetUserTotalItems extends Operation {
}
public String doIt(MyFile myFile) throws RemoteBackendException{
// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
String dim=null;
logger.info("check user total items for user: "+getOwner()+ " user is "+user);

View File

@ -4,7 +4,6 @@ import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.DirectoryBucket;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants;
import org.slf4j.Logger;
@ -13,15 +12,12 @@ import org.slf4j.LoggerFactory;
public class GetUserTotalVolume extends Operation {
final Logger logger=LoggerFactory.getLogger(GetUserTotalVolume.class);
// public String file_separator = ServiceEngine.FILE_SEPARATOR;//System.getProperty("file.separator");
public GetUserTotalVolume(String[] server, String user, String pwd, String bucket, Monitor monitor, boolean isChunk, String backendType, String[] dbs) {
super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs);
}
public String doIt(MyFile myFile) throws RemoteBackendException{
// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
String dim=null;
logger.info("check user total volume for user: "+getOwner()+ " user is "+user);

View File

@ -5,7 +5,6 @@ import java.net.UnknownHostException;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.slf4j.Logger;
@ -37,8 +36,6 @@ public abstract class Link extends Operation{
}
public String doIt(MyFile myFile) throws RemoteBackendException{
// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
String id=null;
try {

View File

@ -44,8 +44,6 @@ public abstract class Lock extends Operation {
Download download = new DownloadOperator(getServer(), getUser(), getPassword(), getBucket(), getMonitor(), isChunk(), getBackendType(), getDbNames());
unlockKey=get(download, myFile, true);
} catch (Exception e) {
// TransportManagerFactory tmf=new TransportManagerFactory(getServer(), getUser(), getPassword());
// TransportManager tm=tmf.getTransport(getBackendType(), myFile.getGcubeMemoryType(), getDbNames(), myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
tm.close();
throw new RemoteBackendException(" Error in lock operation ", e.getCause());

View File

@ -1,14 +1,11 @@
package org.gcube.contentmanagement.blobstorage.service.operation;
import java.io.OutputStream;
import java.net.UnknownHostException;
import org.gcube.contentmanagement.blobstorage.resource.MemoryType;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.DirectoryBucket;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.slf4j.Logger;
@ -40,12 +37,9 @@ public abstract class Move extends Operation{
}
public String doIt(MyFile myFile) throws RemoteBackendException{
// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
String id=null;
try {
// id=tm.move(myFile, sourcePath, destinationPath);
id=tm.move(this);
} catch (UnknownHostException e) {
tm.close();

View File

@ -7,7 +7,6 @@ import org.gcube.contentmanagement.blobstorage.resource.MemoryType;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.slf4j.Logger;
@ -39,8 +38,6 @@ public abstract class MoveDir extends Operation{
}
public String doIt(MyFile myFile) throws RemoteBackendException{
// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
List<String>ids=null;
try {

View File

@ -3,7 +3,6 @@ package org.gcube.contentmanagement.blobstorage.service.operation;
import org.bson.types.ObjectId;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.service.impl.ServiceEngine;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
@ -15,7 +14,6 @@ import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Objects;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
@ -161,8 +159,6 @@ public abstract class Operation {
}else{
if(logger.isDebugEnabled())
logger.debug("NO THREAD POOL USED");
// TransportManagerFactory tmf=new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, resource.getGcubeMemoryType(), dbNames, resource.getWriteConcern(), resource.getReadPreference());
TransportManager tm=getTransport(resource);
String objectId=tm.uploadManager(upload, resource, bucket, bucket+"_1", replaceOption);
return objectId;
@ -180,9 +176,6 @@ public abstract class Operation {
logger.debug("get(String) - start");
}
String unlocKey=null;
// TransportManagerFactory tmf=null;
// tmf=new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
long start=System.currentTimeMillis();
String path=myFile.getLocalPath();
@ -382,10 +375,8 @@ public abstract class Operation {
}
protected TransportManager getTransport(MyFile myFile) {
// if(Objects.isNull(transport)) {
TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
transport=tmf.getTransport(transport, backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
// }
TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
transport=tmf.getTransport(transport, backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
return transport;
}

View File

@ -3,7 +3,6 @@ package org.gcube.contentmanagement.blobstorage.service.operation;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants;
import org.slf4j.Logger;
@ -25,8 +24,6 @@ public class Remove extends Operation{
}
public String doIt(MyFile myFile) throws RemoteBackendException{
// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
removeBucket(tm, bucket, myFile);
if (logger.isDebugEnabled()) {

View File

@ -5,7 +5,6 @@ import java.io.OutputStream;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -30,8 +29,6 @@ public class RenewTTL extends Operation {
@Override
public String doIt(MyFile myFile) throws RemoteBackendException {
// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
long ttl=-1;
try {

View File

@ -4,7 +4,6 @@ import org.bson.types.ObjectId;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -21,8 +20,6 @@ public class SetMetaInfo extends Operation {
}
public String doIt(MyFile myFile) throws RemoteBackendException{
// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
try {
tm.setFileProperty(bucket, myFile.getGenericPropertyField(), myFile.getGenericPropertyValue());

View File

@ -5,11 +5,9 @@ package org.gcube.contentmanagement.blobstorage.service.operation;
import java.net.UnknownHostException;
import org.bson.types.ObjectId;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.slf4j.Logger;
@ -35,21 +33,6 @@ public abstract class SoftCopy extends Operation {
}
public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) {
// if(remotePath != null){
// boolean isId=ObjectId.isValid(remotePath);
// setResource(file);
// if(!isId){
//// String[] dirs= remotePath.split(file_separator);
// if(logger.isDebugEnabled())
// logger.debug("remotePath: "+remotePath);
// String buck=null;
// buck = new BucketCoding().bucketFileCoding(remotePath, rootArea);
// return bucket=buck;
// }else{
// return bucket=remotePath;
// }
// }return bucket=null;//else throw new RemoteBackendException("argument cannot be null");
this.sourcePath=file.getLocalPath();
this.destinationPath=remotePath;
sourcePath = new BucketCoding().bucketFileCoding(file.getLocalPath(), rootArea);
@ -60,8 +43,6 @@ public abstract class SoftCopy extends Operation {
}
public String doIt(MyFile myFile) throws RemoteBackendException{
// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
String id=null;
try {
@ -84,20 +65,6 @@ public abstract class SoftCopy extends Operation {
destinationPath = new BucketCoding().bucketFileCoding(resource.getRemotePath(), rootArea);
setResource(resource);
return bucket=destinationPath;
// if(remotePath != null){
// boolean isId=ObjectId.isValid(remotePath);
// setResource(resource);
// if(!isId){
//// String[] dirs= remotePath.split(file_separator);
// if(logger.isDebugEnabled())
// logger.debug("remotePath: "+remotePath);
// String buck=null;
// buck = new BucketCoding().bucketFileCoding(remotePath, rootArea);
// return bucket=buck;
// }else{
// return bucket=remotePath;
// }
// }return bucket=null;//else throw new RemoteBackendException("argument cannot be null");
}
public abstract String execute(MongoIOManager mongoPrimaryInstance, MyFile resource, String sourcePath, String destinationPath) throws UnknownHostException;

View File

@ -5,7 +5,6 @@ import java.io.OutputStream;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.gcube.contentmanagement.blobstorage.transport.backend.operation.UploadOperator;
@ -43,8 +42,6 @@ public abstract class Unlock extends Operation {
//inserire parametro per il lock
objectId=put(upload, myFile, isChunk(), false, false, true);
} catch (Exception e) {
// TransportManagerFactory tmf=new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
TransportManager tm=getTransport(myFile);
tm.close();
throw new RemoteBackendException(" Error in unlock operation ", e.getCause());

View File

@ -7,7 +7,6 @@ import java.io.OutputStream;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory;
import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants;
@ -49,8 +48,6 @@ public abstract class Upload extends Operation {
try {
objectId=put(this, myFile, isChunk(), false, replaceOption, false);
} catch (Throwable e) {
// TransportManagerFactory tmf=new TransportManagerFactory(server, user, password);
// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
e.printStackTrace();
TransportManager tm=getTransport(myFile);
tm.close();

View File

@ -1,28 +0,0 @@
package org.gcube.contentmanagement.blobstorage.test;
import java.util.List;
import org.gcube.contentmanagement.blobstorage.service.IClient;
import org.gcube.contentmanagement.blobstorage.service.impl.ServiceEngine;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.gcube.contentmanagement.blobstorage.resource.StorageObject;
public class SimpleTest2 {
public static void main(String[] args) throws RemoteBackendException{
String[] server=new String[]{"146.48.123.73","146.48.123.74" };
IClient client=new ServiceEngine(server, "rcirillo", "cnr", "private", "rcirillo");
// String localFile="/home/rcirillo/FilePerTest/CostaRica.jpg";
String remoteFile="/img/shared9.jpg";
String newFile="/home/rcirillo/FilePerTest/repl4.jpg";
client.get().LFile(newFile).RFile(remoteFile);
List<StorageObject> list=client.showDir().RDir("/img/");
for(StorageObject obj : list){
System.out.println("obj found: "+obj.getName());
}
String uri=client.getUrl().RFile(remoteFile);
System.out.println(" uri file: "+uri);
}
}

View File

@ -16,8 +16,6 @@ import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition;
import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.OPERATION;
import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMOTE_RESOURCE;
import org.gcube.contentmanagement.blobstorage.service.impl.ServiceEngine;
import org.gcube.contentmanagement.blobstorage.service.operation.Operation;
import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants;
import org.gcube.contentmanagement.blobstorage.transport.backend.util.DateUtils;
import org.gcube.contentmanagement.blobstorage.transport.backend.util.MongoInputStream;
@ -119,7 +117,7 @@ public class MongoIOManager {
logger.error("Problem to open the DB connection for gridfs file ");
throw new RemoteBackendException("Problem to open the DB connection: "+ e.getMessage());
}
logger.info("new mongo connection pool opened");
logger.info("mongo connection ready");
}
return db;

View File

@ -16,7 +16,6 @@ import java.util.Objects;
import org.gcube.contentmanagement.blobstorage.resource.MemoryType;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition;
import org.gcube.contentmanagement.blobstorage.service.impl.ServiceEngine;
import org.gcube.contentmanagement.blobstorage.service.operation.*;
import org.gcube.contentmanagement.blobstorage.transport.TransportManager;
import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager;

View File

@ -10,7 +10,6 @@ import java.util.List;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.OPERATION;
import org.gcube.contentmanagement.blobstorage.service.impl.ServiceEngine;
import org.gcube.contentmanagement.blobstorage.service.operation.CopyDir;
import org.gcube.contentmanagement.blobstorage.service.operation.Monitor;
import org.gcube.contentmanagement.blobstorage.transport.backend.MongoOperationManager;

View File

@ -12,7 +12,6 @@ import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.OPER
import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMOTE_RESOURCE;
import org.gcube.contentmanagement.blobstorage.service.operation.Link;
import org.gcube.contentmanagement.blobstorage.service.operation.Monitor;
import org.gcube.contentmanagement.blobstorage.service.operation.Operation;
import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager;
import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants;
import org.slf4j.Logger;

View File

@ -8,7 +8,6 @@ import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition;
import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.OPERATION;
import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMOTE_RESOURCE;
import org.gcube.contentmanagement.blobstorage.service.operation.Download;
import org.gcube.contentmanagement.blobstorage.service.operation.Lock;
import org.gcube.contentmanagement.blobstorage.service.operation.Monitor;
import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager;

View File

@ -11,7 +11,6 @@ import org.bson.types.ObjectId;
import org.gcube.contentmanagement.blobstorage.resource.MemoryType;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.OPERATION;
import org.gcube.contentmanagement.blobstorage.service.impl.ServiceEngine;
import org.gcube.contentmanagement.blobstorage.service.operation.Monitor;
import org.gcube.contentmanagement.blobstorage.service.operation.MoveDir;
import org.gcube.contentmanagement.blobstorage.transport.backend.MongoOperationManager;

View File

@ -9,7 +9,6 @@ import java.net.UnknownHostException;
import org.gcube.contentmanagement.blobstorage.resource.MemoryType;
import org.gcube.contentmanagement.blobstorage.resource.MyFile;
import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.OPERATION;
import org.gcube.contentmanagement.blobstorage.service.impl.ServiceEngine;
import org.gcube.contentmanagement.blobstorage.service.operation.Monitor;
import org.gcube.contentmanagement.blobstorage.service.operation.Move;
import org.gcube.contentmanagement.blobstorage.transport.backend.MongoOperationManager;

View File

@ -11,7 +11,6 @@ import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.OPER
import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMOTE_RESOURCE;
import org.gcube.contentmanagement.blobstorage.service.operation.Monitor;
import org.gcube.contentmanagement.blobstorage.service.operation.Unlock;
import org.gcube.contentmanagement.blobstorage.service.operation.Upload;
import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;