diff --git a/.classpath b/.classpath index 5e8a55f..c67d98a 100644 --- a/.classpath +++ b/.classpath @@ -6,6 +6,11 @@ + + + + + @@ -21,6 +26,7 @@ + diff --git a/.gitignore b/.gitignore index b83d222..d00ef39 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,4 @@ /target/ +/.classpath +/*.project +/.settings diff --git a/.project b/.project index cd1394b..de28e07 100644 --- a/.project +++ b/.project @@ -5,6 +5,11 @@ + + org.eclipse.wst.common.project.facet.core.builder + + + org.eclipse.jdt.core.javabuilder @@ -15,9 +20,17 @@ + + org.eclipse.wst.validation.validationbuilder + + + + org.eclipse.jem.workbench.JavaEMFNature + org.eclipse.wst.common.modulecore.ModuleCoreNature org.eclipse.jdt.core.javanature org.eclipse.m2e.core.maven2Nature + org.eclipse.wst.common.project.facet.core.nature diff --git a/.settings/org.eclipse.wst.common.component b/.settings/org.eclipse.wst.common.component new file mode 100644 index 0000000..a17d9d6 --- /dev/null +++ b/.settings/org.eclipse.wst.common.component @@ -0,0 +1,6 @@ + + + + + + diff --git a/.settings/org.eclipse.wst.common.project.facet.core.xml b/.settings/org.eclipse.wst.common.project.facet.core.xml new file mode 100644 index 0000000..fb95c45 --- /dev/null +++ b/.settings/org.eclipse.wst.common.project.facet.core.xml @@ -0,0 +1,5 @@ + + + + + diff --git a/CHANGELOG.md b/CHANGELOG.md index 352def5..345f916 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,25 +3,18 @@ ## [v4.0.0-SNAPSHOT] * compatible with s3 storage backend -## [v2.13.0-SNAPSHOT] +## [v3.0.0] 2021-09-10 + * fix #22164 + * fix #21980 + * update gcube-bom version * add close operation on IClient interface - -## [v2.12.1-SNAPSHOT] * add check on transport layer instance: if the memory type is not the same, a new transportLayer is instatiated * move memoryType var from super class TransportManager * convert BasicDBObject to DBObject the return type used for metadata collections - -## [v2.12.0-SNAPSHOT] * One pool for every operation: static Operation class; no mongo close operation - -## [v2.10.0-SNAPSHOT] * upgrade mongo-java-driver to 3.12.0 - -## [v2.11.0-SNAPSHOT] - * upgrade mongo-java-driver to 3.12.0 - -## [v2.10.0-SNAPSHOT] * added input parameter to getSize method in order to be compatible with the needed of s3 client + * moved from version 2.13.1 to 3.0.0-SNAPSHOT ## [v2.9.0] 2019-10-19 * SSL enabled diff --git a/pom.xml b/pom.xml index c60f517..79d11ed 100644 --- a/pom.xml +++ b/pom.xml @@ -23,7 +23,7 @@ org.gcube.distribution gcube-bom - 1.4.0 + 2.0.1 pom import diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/resource/RequestObject.java b/src/main/java/org/gcube/contentmanagement/blobstorage/resource/MyFile.java similarity index 96% rename from src/main/java/org/gcube/contentmanagement/blobstorage/resource/RequestObject.java rename to src/main/java/org/gcube/contentmanagement/blobstorage/resource/MyFile.java index f1bd800..e1a7dc9 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/resource/RequestObject.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/resource/MyFile.java @@ -19,7 +19,7 @@ import org.slf4j.LoggerFactory; * @author Roberto Cirillo (ISTI - CNR) * */ -public class RequestObject { +public class MyFile { // file name private String name; @@ -95,10 +95,10 @@ public class RequestObject { - final Logger logger = LoggerFactory.getLogger(RequestObject.class); + final Logger logger = LoggerFactory.getLogger(MyFile.class); - public RequestObject(boolean lock){ + public MyFile(boolean lock){ setLock(lock); } @@ -108,7 +108,7 @@ public class RequestObject { * @param name name of the file * @param pathClient local path of the file */ - public RequestObject(String author, String name, String pathClient, MemoryType memoryType){ + public MyFile(String author, String name, String pathClient, MemoryType memoryType){ this.setOwner(author); this.setName(name); this.setLocalPath(pathClient); @@ -123,7 +123,7 @@ public class RequestObject { * @param pathServer remote path of the file */ - public RequestObject(String author, String name, String pathClient, String pathServer, MemoryType memoryType){ + public MyFile(String author, String name, String pathClient, String pathServer, MemoryType memoryType){ this.setOwner(author); this.setName(name); this.setLocalPath(pathClient); @@ -131,7 +131,7 @@ public class RequestObject { setGcubeMemoryType(memoryType); } - public RequestObject(MemoryType memoryType) { + public MyFile(MemoryType memoryType) { setGcubeMemoryType(memoryType); } @@ -140,7 +140,7 @@ public class RequestObject { * build a new object with only the name setted * @param name file name */ - public RequestObject(String name, MemoryType memoryType){ + public MyFile(String name, MemoryType memoryType){ setName(name); setGcubeMemoryType(memoryType); } @@ -245,8 +245,8 @@ public class RequestObject { * returns a copy of the current resource * @return the file copy */ - public RequestObject copyProperties(){ - RequestObject dest=new RequestObject(getGcubeMemoryType()); + public MyFile copyProperties(){ + MyFile dest=new MyFile(getGcubeMemoryType()); dest.setOwner(getOwner()); dest.setLocalDir(this.getLocalDir()); dest.setRemoteDir(this.getRemoteDir()); diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/resource/OperationDefinition.java b/src/main/java/org/gcube/contentmanagement/blobstorage/resource/OperationDefinition.java index d714d0f..a5e4cf4 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/resource/OperationDefinition.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/resource/OperationDefinition.java @@ -16,7 +16,7 @@ package org.gcube.contentmanagement.blobstorage.resource; * * It means that the client would be upload a file that have an absolute local path defined in pathClient field, * on the remote location identifies by pathServer field of the resource MyFile - * @see org.gcube.contentmanagement.blobstorage.resource.RequestObject + * @see org.gcube.contentmanagement.blobstorage.resource.MyFile * * @author Roberto Cirillo (ISTI-CNR) * diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/IClient.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/IClient.java index b005070..a2061a1 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/IClient.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/IClient.java @@ -1,6 +1,7 @@ package org.gcube.contentmanagement.blobstorage.service; +import org.gcube.contentmanagement.blobstorage.resource.MemoryType; import org.gcube.contentmanagement.blobstorage.service.impl.AmbiguousResource; import org.gcube.contentmanagement.blobstorage.service.impl.LocalResource; import org.gcube.contentmanagement.blobstorage.service.impl.RemoteResource; @@ -264,4 +265,6 @@ public abstract RemoteResourceBoolean exist(); public abstract RemoteResourceBoolean exist(String backendType); +public MemoryType getGcubeMemoryType(); + } \ No newline at end of file diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/directoryOperation/DirectoryBucket.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/directoryOperation/DirectoryBucket.java index 14a957f..02f3cef 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/directoryOperation/DirectoryBucket.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/directoryOperation/DirectoryBucket.java @@ -5,7 +5,7 @@ import java.net.UnknownHostException; import java.util.Iterator; import java.util.Map; import java.util.Set; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.StorageObject; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; @@ -92,7 +92,7 @@ public class DirectoryBucket { * @param bucket remote file to remove */ @Deprecated - public void removeKeysOnDirBucket(RequestObject resource, String bucket, String rootArea, String backendType, String[] dbNames){ + public void removeKeysOnDirBucket(MyFile resource, String bucket, String rootArea, String backendType, String[] dbNames){ if(logger.isDebugEnabled()) logger.debug("CHECK REMOVE: "+bucket); String[] bucketList=null; @@ -121,7 +121,7 @@ public class DirectoryBucket { * remove a remote directory and all the files that the remote directory contains * @param bucket */ - public String removeDirBucket(RequestObject resource, String bucket, String rootArea, String backendType, String[] dbNames){ + public String removeDirBucket(MyFile resource, String bucket, String rootArea, String backendType, String[] dbNames){ if(logger.isDebugEnabled()) logger.debug("CHECK REMOVE: "+bucket); String[] bucketList=null; @@ -211,7 +211,7 @@ public class DirectoryBucket { * @param bucketCoded bucketName coded * @param tm a client for the cluster */ - public String searchInBucket(RequestObject resource, String name, String bucketCoded, + public String searchInBucket(MyFile resource, String name, String bucketCoded, TransportManager tm, String rootArea) { Map dirs=null; try{ diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/directoryOperation/DirectoryEntity.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/directoryOperation/DirectoryEntity.java index 2508483..bab7704 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/directoryOperation/DirectoryEntity.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/directoryOperation/DirectoryEntity.java @@ -1,6 +1,6 @@ package org.gcube.contentmanagement.blobstorage.service.directoryOperation; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; /** @@ -38,7 +38,7 @@ public class DirectoryEntity { setAuthor(author); } - public DirectoryEntity(String dir, String author, RequestObject file){ + public DirectoryEntity(String dir, String author, MyFile file){ setDirectory(dir); setAuthor(author); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/AmbiguousResource.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/AmbiguousResource.java index eda5a1d..9f21fd4 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/AmbiguousResource.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/AmbiguousResource.java @@ -1,6 +1,6 @@ package org.gcube.contentmanagement.blobstorage.service.impl; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.LOCAL_RESOURCE; /** * This class is used from methods that can have both a RemoteResource or a LocalResource @@ -10,7 +10,7 @@ import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.LOCA */ public class AmbiguousResource extends RemoteResource { - public AmbiguousResource(RequestObject file, ServiceEngine engine) { + public AmbiguousResource(MyFile file, ServiceEngine engine) { super(file, engine); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/LocalResource.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/LocalResource.java index 9c02ed5..c37e740 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/LocalResource.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/LocalResource.java @@ -2,7 +2,7 @@ package org.gcube.contentmanagement.blobstorage.service.impl; import java.io.InputStream; import java.io.OutputStream; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.LOCAL_RESOURCE; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMOTE_RESOURCE; @@ -17,7 +17,7 @@ import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMO public class LocalResource extends Resource{ - public LocalResource(RequestObject file, ServiceEngine engine) { + public LocalResource(MyFile file, ServiceEngine engine) { super(file, engine); } @@ -46,7 +46,7 @@ public class LocalResource extends Resource{ if(getMyFile() != null){ getMyFile().setInputStream(is); }else{ - setMyFile(new RequestObject(engine.getGcubeMemoryType())); + setMyFile(new MyFile(engine.getGcubeMemoryType())); getMyFile().setInputStream(is); } getMyFile().setLocalResource(LOCAL_RESOURCE.INPUT_STREAM); @@ -62,7 +62,7 @@ public class LocalResource extends Resource{ if(getMyFile() != null){ getMyFile().setOutputStream(os); }else{ - setMyFile(new RequestObject(engine.getGcubeMemoryType())); + setMyFile(new MyFile(engine.getGcubeMemoryType())); getMyFile().setOutputStream(os); } getMyFile().setLocalResource(LOCAL_RESOURCE.OUTPUT_STREAM); diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResource.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResource.java index 4eec85d..092a85b 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResource.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResource.java @@ -4,11 +4,10 @@ import java.net.UnknownHostException; import java.util.Collections; import java.util.List; import java.util.Map; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.DirectoryBucket; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.DirectoryEntity; -import org.gcube.contentmanagement.blobstorage.service.operation.OperationManager; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; @@ -29,7 +28,7 @@ public class RemoteResource extends Resource{ TransportManager tm; - public RemoteResource(RequestObject file, ServiceEngine engine) { + public RemoteResource(MyFile file, ServiceEngine engine) { super(file, engine); logger.info("file gCube parameter costructor: "+file.getGcubeAccessType()+" "+file.getGcubeScope()); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceBoolean.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceBoolean.java index 1155ce3..1daf672 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceBoolean.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceBoolean.java @@ -3,7 +3,7 @@ */ package org.gcube.contentmanagement.blobstorage.service.impl; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMOTE_RESOURCE; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; @@ -17,7 +17,7 @@ public class RemoteResourceBoolean extends Resource{ * @param file * @param engine */ - public RemoteResourceBoolean(RequestObject file, ServiceEngine engine) { + public RemoteResourceBoolean(MyFile file, ServiceEngine engine) { super(file, engine); logger.info("file gCube parameter costructor: "+file.getGcubeAccessType()+" "+file.getGcubeScope()); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceComplexInfo.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceComplexInfo.java index bf5bdbf..3bc370b 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceComplexInfo.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceComplexInfo.java @@ -1,6 +1,6 @@ package org.gcube.contentmanagement.blobstorage.service.impl; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMOTE_RESOURCE; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; @@ -12,7 +12,7 @@ import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendEx public class RemoteResourceComplexInfo extends Resource{ - public RemoteResourceComplexInfo(RequestObject file, ServiceEngine engine) { + public RemoteResourceComplexInfo(MyFile file, ServiceEngine engine) { super(file, engine); } @@ -23,7 +23,7 @@ public class RemoteResourceComplexInfo extends Resource{ * @throws RemoteBackendException if there are runtime exception from the remote backend */ - public RequestObject RFile(String path) throws RemoteBackendException{ + public MyFile RFile(String path) throws RemoteBackendException{ setMyFile(setGenericProperties(engine.getContext(), engine.owner, path, "remote")); getMyFile().setRemotePath(path); getMyFile().setRemoteResource(REMOTE_RESOURCE.PATH); diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceDestination.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceDestination.java index 999a061..3ae2470 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceDestination.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceDestination.java @@ -1,7 +1,7 @@ package org.gcube.contentmanagement.blobstorage.service.impl; import org.bson.types.ObjectId; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMOTE_RESOURCE; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; @@ -12,7 +12,7 @@ import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendEx */ public class RemoteResourceDestination extends Resource{ - public RemoteResourceDestination(RequestObject file, ServiceEngine engine) { + public RemoteResourceDestination(MyFile file, ServiceEngine engine) { super(file, engine); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceFolderInfo.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceFolderInfo.java index 4783385..d151d2e 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceFolderInfo.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceFolderInfo.java @@ -1,6 +1,6 @@ package org.gcube.contentmanagement.blobstorage.service.impl; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMOTE_RESOURCE; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; @@ -18,7 +18,7 @@ public class RemoteResourceFolderInfo extends Resource { private String gcubeAccessType; private String gcubeMemoryType; - public RemoteResourceFolderInfo(RequestObject file, ServiceEngine engine) { + public RemoteResourceFolderInfo(MyFile file, ServiceEngine engine) { super(file, engine); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceInfo.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceInfo.java index 6e5df68..6297d2b 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceInfo.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceInfo.java @@ -1,6 +1,6 @@ package org.gcube.contentmanagement.blobstorage.service.impl; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMOTE_RESOURCE; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; @@ -14,7 +14,7 @@ import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendEx public class RemoteResourceInfo extends Resource{ - public RemoteResourceInfo(RequestObject file, ServiceEngine engine) { + public RemoteResourceInfo(MyFile file, ServiceEngine engine) { super(file, engine); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceSource.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceSource.java index b17ea16..345f419 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceSource.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/RemoteResourceSource.java @@ -1,7 +1,7 @@ package org.gcube.contentmanagement.blobstorage.service.impl; import org.bson.types.ObjectId; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.LOCAL_RESOURCE; /** @@ -11,7 +11,7 @@ import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.LOCA */ public class RemoteResourceSource extends Resource { - public RemoteResourceSource(RequestObject file, ServiceEngine engine) { + public RemoteResourceSource(MyFile file, ServiceEngine engine) { super(file, engine); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/Resource.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/Resource.java index 51cbbfb..e48c901 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/Resource.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/Resource.java @@ -1,7 +1,7 @@ package org.gcube.contentmanagement.blobstorage.service.impl; import org.bson.types.ObjectId; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants; @@ -19,11 +19,11 @@ public class Resource { final Logger logger = LoggerFactory.getLogger(ServiceEngine.class); protected static final String BACKEND_STRING_SEPARATOR="%"; - protected RequestObject file; + protected MyFile file; protected ServiceEngine engine; - public Resource(RequestObject file, ServiceEngine engine){ + public Resource(MyFile file, ServiceEngine engine){ setMyFile(file); setEngine(engine); } @@ -36,11 +36,11 @@ public class Resource { this.engine = engine; } - protected RequestObject getMyFile(){ + protected MyFile getMyFile(){ return file; } - protected void setMyFile(RequestObject f){ + protected void setMyFile(MyFile f){ if (f!=null) file=f; else @@ -56,11 +56,11 @@ public class Resource { * @param type remote or local * @return the current resource */ - protected RequestObject setGenericProperties(String context, String owner, String path, String type) { + protected MyFile setGenericProperties(String context, String owner, String path, String type) { if((path != null) && (path.length()>0)){ if(ObjectId.isValid(path)){ if(file==null) - file= new RequestObject(path, engine.getGcubeMemoryType()); + file= new MyFile(path, engine.getGcubeMemoryType()); String id = file.getId(); if((id != null) && (!id.isEmpty())) file.setId2(path); @@ -75,7 +75,7 @@ public class Resource { logger.debug("path(String) - name: " + name); } if(file == null){ - file= new RequestObject(name, engine.getGcubeMemoryType()); + file= new MyFile(name, engine.getGcubeMemoryType()); }else{ file.setName(name); } @@ -107,7 +107,7 @@ public class Resource { return file; } - protected Object getRemoteObject(RequestObject file, String[] backend, String[] vltBackend)throws RemoteBackendException { + protected Object getRemoteObject(MyFile file, String[] backend, String[] vltBackend)throws RemoteBackendException { Object obj=null; try{ obj=retrieveRemoteObject(file, backend); @@ -122,7 +122,7 @@ public class Resource { return obj; } - protected Object retrieveRemoteObject(RequestObject file, String[] backend) throws RemoteBackendException { + protected Object retrieveRemoteObject(MyFile file, String[] backend) throws RemoteBackendException { Object obj=null; if(((file.getInputStream() != null) || (file.getOutputStream()!=null)) || ((file.getLocalPath() != null) || (file.getRemotePath() != null))) obj=engine.service.startOperation(file,file.getRemotePath(), file.getOwner(), backend, Costants.DEFAULT_CHUNK_OPTION, file.getRootPath(), file.isReplace(), engine.getRegion(), engine.getToken()); diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/ServiceEngine.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/ServiceEngine.java index 907851f..0f259e0 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/ServiceEngine.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/impl/ServiceEngine.java @@ -1,6 +1,5 @@ package org.gcube.contentmanagement.blobstorage.service.impl; -import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -9,7 +8,7 @@ import java.util.Set; import org.gcube.contentmanagement.blobstorage.resource.AccessType; import org.gcube.contentmanagement.blobstorage.resource.MemoryType; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.OPERATION; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMOTE_RESOURCE; import org.gcube.contentmanagement.blobstorage.resource.StorageObject; @@ -18,7 +17,6 @@ import org.gcube.contentmanagement.blobstorage.service.directoryOperation.Bucket import org.gcube.contentmanagement.blobstorage.service.directoryOperation.Encrypter; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.Encrypter.EncryptionException; import org.gcube.contentmanagement.blobstorage.service.operation.*; -import org.gcube.contentmanagement.blobstorage.transport.TransportManager; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants; import org.slf4j.Logger; @@ -45,7 +43,7 @@ public class ServiceEngine implements IClient { public String[] primaryBackend; public String[] volatileBackend; protected OperationManager service; - protected RequestObject file; + protected MyFile file; protected String bucket; protected String bucketID; protected String author; @@ -414,7 +412,7 @@ public class ServiceEngine implements IClient { @Override public RemoteResource remove(String backendType){ backendType=setBackendType(backendType); - file=new RequestObject(getGcubeMemoryType()); + file=new MyFile(getGcubeMemoryType()); file.setGcubeAccessType(this.getGcubeAccessType()); file.setGcubeScope(this.getGcubeScope()); file.setOwnerGcube(this.getOwnerGcube()); @@ -428,11 +426,11 @@ public class ServiceEngine implements IClient { } - public RequestObject getMyFile() { + public MyFile getMyFile() { return file; } - public void setMyFile(RequestObject myFile) { + public void setMyFile(MyFile myFile) { this.file = myFile; } @@ -459,7 +457,7 @@ public class ServiceEngine implements IClient { @Override public RemoteResource showDir(String backendType){ backendType=setBackendType(backendType); - file=new RequestObject(this.getGcubeMemoryType()); + file=new MyFile(this.getGcubeMemoryType()); file.setGcubeAccessType(this.getGcubeAccessType()); file.setGcubeScope(this.getGcubeScope()); file.setOwnerGcube(this.getOwnerGcube()); @@ -497,7 +495,7 @@ public class ServiceEngine implements IClient { @Override public RemoteResource removeDir(String backendType){ backendType=setBackendType(backendType); - file=new RequestObject(this.getGcubeMemoryType()); + file=new MyFile(this.getGcubeMemoryType()); file.setGcubeAccessType(this.getGcubeAccessType()); file.setGcubeScope(this.getGcubeScope()); file.setOwnerGcube(this.getOwnerGcube()); @@ -529,7 +527,7 @@ public class ServiceEngine implements IClient { @Override public RemoteResource getUrl(String backendType, boolean forceCreation){ backendType=setBackendType(backendType); - file=new RequestObject(this.getGcubeMemoryType()); + file=new MyFile(this.getGcubeMemoryType()); file.setGcubeAccessType(this.getGcubeAccessType()); file.setGcubeScope(this.getGcubeScope()); file.setOwnerGcube(this.getOwnerGcube()); @@ -564,7 +562,7 @@ public class ServiceEngine implements IClient { @Override public RemoteResource getHttpUrl(String backendType, boolean forceCreation){ backendType=setBackendType(backendType); - file=new RequestObject(this.getGcubeMemoryType()); + file=new MyFile(this.getGcubeMemoryType()); file.setGcubeAccessType(this.getGcubeAccessType()); file.setGcubeScope(this.getGcubeScope()); file.setOwnerGcube(this.getOwnerGcube()); @@ -601,7 +599,7 @@ public class ServiceEngine implements IClient { @Override public RemoteResource getHttpsUrl(String backendType, boolean forceCreation){ backendType=setBackendType(backendType); - file=new RequestObject(this.getGcubeMemoryType()); + file=new MyFile(this.getGcubeMemoryType()); file.setGcubeAccessType(this.getGcubeAccessType()); file.setGcubeScope(this.getGcubeScope()); file.setOwnerGcube(this.getOwnerGcube()); @@ -674,7 +672,7 @@ public class ServiceEngine implements IClient { public AmbiguousResource lock(String backendType) { backendType=setBackendType(backendType); - file = new RequestObject(true); + file = new MyFile(true); setCurrentOperation("lock"); this.service=new OperationManager(primaryBackend, user, password, getCurrentOperation(), file, backendType, getDbNames(), getToken()); file=setOperationInfo(file, OPERATION.LOCK); @@ -690,7 +688,7 @@ public class ServiceEngine implements IClient { @Override public AmbiguousResource unlock(String key, String backendType) { backendType=setBackendType(backendType); - file=new RequestObject(this.getGcubeMemoryType()); + file=new MyFile(this.getGcubeMemoryType()); file.setLockedKey(key); // put(true); setCurrentOperation("unlock"); @@ -708,7 +706,7 @@ public class ServiceEngine implements IClient { @Override public RemoteResourceInfo getTTL(String backendType) { backendType=setBackendType(backendType); - file=new RequestObject(this.getGcubeMemoryType()); + file=new MyFile(this.getGcubeMemoryType()); // put(true); setCurrentOperation("getTTL"); file=setOperationInfo(file, OPERATION.GET_TTL); @@ -726,7 +724,7 @@ public class ServiceEngine implements IClient { @Override public RemoteResource getMetaInfo(String field, String backendType) { backendType=setBackendType(backendType); - file=new RequestObject(this.getGcubeMemoryType()); + file=new MyFile(this.getGcubeMemoryType()); file.setGenericPropertyField(field); setCurrentOperation("getMetaInfo"); file=setOperationInfo(file, OPERATION.GET_META_INFO); @@ -742,7 +740,7 @@ public class ServiceEngine implements IClient { @Override public RemoteResource setMetaInfo(String field, String value, String backendType) { backendType=setBackendType(backendType); - file=new RequestObject(this.getGcubeMemoryType()); + file=new MyFile(this.getGcubeMemoryType()); file.setGenericPropertyField(field); file.setGenericPropertyValue(value); setCurrentOperation("setMetaInfo"); @@ -759,7 +757,7 @@ public class ServiceEngine implements IClient { @Override public RemoteResourceInfo renewTTL(String key, String backendType) { backendType=setBackendType(backendType); - file=new RequestObject(this.getGcubeMemoryType()); + file=new MyFile(this.getGcubeMemoryType()); file.setLockedKey(key); // put(true); setCurrentOperation("renewTTL"); @@ -1001,9 +999,9 @@ public class ServiceEngine implements IClient { // this.gcubeMemoryType = gcubeMemoryType; } - private RequestObject setOperationInfo(RequestObject file, OPERATION op) { + private MyFile setOperationInfo(MyFile file, OPERATION op) { if(file==null) - file=new RequestObject(this.getGcubeMemoryType()); + file=new MyFile(this.getGcubeMemoryType()); file.setOperation(op); if(getWriteConcern() != null) file.setWriteConcern(getWriteConcern()); @@ -1016,9 +1014,9 @@ public class ServiceEngine implements IClient { return file; } - private RequestObject setMimeType(RequestObject file, String mime) { + private MyFile setMimeType(MyFile file, String mime) { if(file==null) - file=new RequestObject(this.getGcubeMemoryType()); + file=new MyFile(this.getGcubeMemoryType()); file.setMimeType(mime); return file; } @@ -1097,7 +1095,7 @@ public class ServiceEngine implements IClient { public RemoteResource getRemotePath(){ backendType=setBackendType(backendType); - file=new RequestObject(this.getGcubeMemoryType()); + file=new MyFile(this.getGcubeMemoryType()); // put(true); setCurrentOperation("getRemotePath"); file=setOperationInfo(file, OPERATION.GET_REMOTE_PATH); diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/ChunkConsumer.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/ChunkConsumer.java index c9d68ef..151a675 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/ChunkConsumer.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/ChunkConsumer.java @@ -1,6 +1,6 @@ package org.gcube.contentmanagement.blobstorage.service.operation; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.operation.UploadOperator; @@ -29,7 +29,7 @@ public class ChunkConsumer implements Runnable { boolean isChunk=false; String[] dbNames; public static ThreadLocal client=new ThreadLocal(); - public static ThreadLocal resource=new ThreadLocal(); + public static ThreadLocal resource=new ThreadLocal(); private boolean replaceOpt; Thread producer; @@ -37,7 +37,7 @@ public class ChunkConsumer implements Runnable { if (logger.isDebugEnabled()) { logger.debug("run() - start"); } - RequestObject request = null; + MyFile request = null; synchronized (ChunkConsumer.class) { request=monitor.getRequest(); resource.set(request); @@ -49,7 +49,7 @@ public class ChunkConsumer implements Runnable { } } - private void connection(RequestObject richiesta) { + private void connection(MyFile richiesta) { if (logger.isDebugEnabled()) { logger.debug("connection(MyFile) - start"); } @@ -108,7 +108,7 @@ public class ChunkConsumer implements Runnable { return server; } - private void putInTerrastore(RequestObject myFile) { + private void putInTerrastore(MyFile myFile) { if (logger.isDebugEnabled()) { logger.debug("putInTerrastore(MyFile) - start"); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/ChunkProducer.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/ChunkProducer.java index dc6df35..10c4d99 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/ChunkProducer.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/ChunkProducer.java @@ -9,7 +9,7 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import org.apache.commons.io.IOUtils; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -22,7 +22,7 @@ import org.slf4j.LoggerFactory; */ public class ChunkProducer implements Runnable{ - RequestObject resource; + MyFile resource; long dimensionChunk; int totChunks; int nThreads; @@ -31,7 +31,7 @@ public class ChunkProducer implements Runnable{ String bucketName; final Logger logger=LoggerFactory.getLogger(ChunkProducer.class); - public ChunkProducer(Monitor monitor, RequestObject resource, long dimensionChunk, int totChunks, + public ChunkProducer(Monitor monitor, MyFile resource, long dimensionChunk, int totChunks, int nThreads, String bucket, ChunkConsumer consumer ) throws FileNotFoundException{ this.resource=resource; this.dimensionChunk=dimensionChunk; @@ -77,7 +77,7 @@ public class ChunkProducer implements Runnable{ //---- creo i task e li invio al thread-pool ---- String key= getBucketName()+i; resource.setKey(key); - RequestObject copy=resource.copyProperties(); + MyFile copy=resource.copyProperties(); copy.setContent(chunk); if(logger.isDebugEnabled()){ logger.debug("request in queue: "+key); diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Copy.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Copy.java index 6f4026f..dd4ac91 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Copy.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Copy.java @@ -1,10 +1,9 @@ package org.gcube.contentmanagement.blobstorage.service.operation; import java.net.UnknownHostException; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.slf4j.Logger; @@ -19,12 +18,12 @@ public abstract class Copy extends Operation{ final Logger logger=LoggerFactory.getLogger(Copy.class); protected String sourcePath; protected String destinationPath; - protected RequestObject resource; + protected MyFile resource; public Copy(String[] server, String user, String pwd, String bucket, Monitor monitor, boolean isChunk, String backendType, String[] dbs) { super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs); } - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { this.sourcePath=file.getLocalPath(); this.destinationPath=remotePath; @@ -37,9 +36,7 @@ public abstract class Copy extends Operation{ } - public String doIt(RequestObject myFile) throws RemoteBackendException{ -// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + public String doIt(MyFile myFile) throws RemoteBackendException{ TransportManager tm=getTransport(myFile); String id=null; try { @@ -55,7 +52,7 @@ public abstract class Copy extends Operation{ @Override - public String initOperation(RequestObject resource, String remotePath, + public String initOperation(MyFile resource, String remotePath, String author, String[] server, String rootArea) { // For terrastore, the name of bucket is formed: path_____fileName_____author this.sourcePath=resource.getLocalPath(); @@ -70,12 +67,12 @@ public abstract class Copy extends Operation{ // public abstract String execute(MongoIO mongoPrimaryInstance) throws UnknownHostException; - public abstract String execute(MongoIOManager mongoPrimaryInstance, RequestObject resource, String sourcePath, String destinationPath) throws UnknownHostException; - public RequestObject getResource() { + public abstract String execute(MongoIOManager mongoPrimaryInstance, MyFile resource, String sourcePath, String destinationPath) throws UnknownHostException; + public MyFile getResource() { return resource; } - public void setResource(RequestObject resource) { + public void setResource(MyFile resource) { this.resource = resource; } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/CopyDir.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/CopyDir.java index 487a5ad..9f414a7 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/CopyDir.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/CopyDir.java @@ -2,10 +2,9 @@ package org.gcube.contentmanagement.blobstorage.service.operation; import java.net.UnknownHostException; import java.util.List; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.slf4j.Logger; @@ -23,13 +22,13 @@ public abstract class CopyDir extends Operation{ final Logger logger=LoggerFactory.getLogger(Download.class); private String sourcePath; private String destinationPath; - private RequestObject resource; + private MyFile resource; public CopyDir(String[] server, String user, String pwd, String bucket, Monitor monitor, boolean isChunk, String backendType, String[] dbs) { super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs); } - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { this.sourcePath=file.getLocalPath(); this.destinationPath=remotePath; @@ -40,7 +39,7 @@ public abstract class CopyDir extends Operation{ } - public String doIt(RequestObject myFile) throws RemoteBackendException{ + public String doIt(MyFile myFile) throws RemoteBackendException{ TransportManager tm = getTransport(myFile); List ids=null; try { @@ -58,7 +57,7 @@ public abstract class CopyDir extends Operation{ @Override - public String initOperation(RequestObject resource, String remotePath, + public String initOperation(MyFile resource, String remotePath, String author, String[] server, String rootArea) { // DirectoryBucket dirBuc=new DirectoryBucket(server, user, password, remotePath, author); // For terrastore, the name of bucket is formed: path_____fileName_____author @@ -71,7 +70,7 @@ public abstract class CopyDir extends Operation{ return bucket=destinationPath; } - public abstract List execute(MongoIOManager mongoPrimaryInstance, RequestObject resource, String sourcePath, String destinationPath) throws UnknownHostException; + public abstract List execute(MongoIOManager mongoPrimaryInstance, MyFile resource, String sourcePath, String destinationPath) throws UnknownHostException; public String getSourcePath() { return sourcePath; @@ -89,11 +88,11 @@ public abstract class CopyDir extends Operation{ this.destinationPath = destinationPath; } - public RequestObject getResource() { + public MyFile getResource() { return resource; } - public void setResource(RequestObject resource) { + public void setResource(MyFile resource) { this.resource = resource; } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Download.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Download.java index 8a15072..e4b50b9 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Download.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Download.java @@ -1,10 +1,9 @@ package org.gcube.contentmanagement.blobstorage.service.operation; import org.bson.types.ObjectId; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.slf4j.Logger; @@ -27,13 +26,13 @@ public abstract class Download extends Operation{ protected String localPath; protected String remotePath; protected OutputStream os; - protected RequestObject resource; + protected MyFile resource; public Download(String[] server, String user, String pwd, String bucket, Monitor monitor, boolean isChunk, String backendType, String[] dbs) { super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs); } - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { this.localPath=file.getLocalPath(); this.remotePath=remotePath; @@ -41,7 +40,7 @@ public abstract class Download extends Operation{ return getRemoteIdentifier(remotePath, rootArea); } - public String doIt(RequestObject myFile) throws RemoteBackendException{ + public String doIt(MyFile myFile) throws RemoteBackendException{ String id=null; if (logger.isDebugEnabled()) { logger.debug(" DOWNLOAD " + myFile.getRemotePath() @@ -64,7 +63,7 @@ public abstract class Download extends Operation{ @Override - public String initOperation(RequestObject resource, String remotePath, + public String initOperation(MyFile resource, String remotePath, String author, String[] server, String rootArea) { // DirectoryBucket dirBuc=new DirectoryBucket(server, getUser(), getPassword(), remotePath, author); // For terrastore, the name of bucket is formed: path_____fileName_____author @@ -77,11 +76,11 @@ public abstract class Download extends Operation{ public abstract ObjectId execute(MongoIOManager mongoPrimaryInstance, MongoIOManager mongoSecondaryInstance) throws IOException; - public RequestObject getResource() { + public MyFile getResource() { return resource; } - public void setResource(RequestObject resource) { + public void setResource(MyFile resource) { this.resource = resource; } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/DownloadAndLock.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/DownloadAndLock.java new file mode 100644 index 0000000..57f8aaf --- /dev/null +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/DownloadAndLock.java @@ -0,0 +1,65 @@ +package org.gcube.contentmanagement.blobstorage.service.operation; + +import java.io.OutputStream; + +import org.gcube.contentmanagement.blobstorage.resource.MyFile; +import org.gcube.contentmanagement.blobstorage.transport.TransportManager; +import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; +import org.gcube.contentmanagement.blobstorage.transport.backend.operation.DownloadOperator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DownloadAndLock extends Operation { + + final Logger logger=LoggerFactory.getLogger(Download.class); + private String localPath; + private String remotePath; + private OutputStream os; +/** + * @deprecated + * @param server + * @param bucket + * @param monitor + * @param isChunk + * + */ + public DownloadAndLock(String[] server, String user, String pwd, String bucket, Monitor monitor, boolean isChunk, String backendType, String[] dbs) { + // TODO Auto-generated constructor stub + super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs); + } + + @Override + public String doIt(MyFile myFile) throws RemoteBackendException { + if (logger.isDebugEnabled()) { + logger.debug(" DOWNLOAD " + myFile.getRemotePath() + + " in bucket: " + getBucket()); + } + Download download = new DownloadOperator(getServer(), getUser(), getPassword(), getBucket(), getMonitor(), isChunk(), getBackendType(), getDbNames()); + try { + //TODO add field for file lock + get(download,myFile, true); + } catch (Exception e) { + TransportManager tm=getTransport(myFile); + tm.close(); + throw new RemoteBackendException(" Error in downloadAndLock operation ", e.getCause()); + } + return null; + } + + @Override + public String initOperation(MyFile file, String RemotePath, + String author, String[] server, String rootArea, + boolean replaceOption) { + this.localPath=file.getLocalPath(); + this.remotePath=remotePath; + return getRemoteIdentifier(remotePath, rootArea); + } + + @Override + public String initOperation(MyFile resource, String RemotePath, + String author, String[] server, String rootArea) { + // TODO Auto-generated method stub + return null; + } + +} diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/DuplicateFile.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/DuplicateFile.java index f627d94..24a1eae 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/DuplicateFile.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/DuplicateFile.java @@ -4,10 +4,9 @@ package org.gcube.contentmanagement.blobstorage.service.operation; import org.bson.types.ObjectId; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.slf4j.Logger; @@ -24,15 +23,13 @@ public abstract class DuplicateFile extends Operation { */ final Logger logger=LoggerFactory.getLogger(DuplicateFile.class); protected String sourcePath; - protected RequestObject resource; + protected MyFile resource; public DuplicateFile(String[] server, String user, String pwd, String bucket, Monitor monitor, boolean isChunk, String backendType, String[] dbs) { super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs); } - public String doIt(RequestObject myFile) throws RemoteBackendException{ -// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + public String doIt(MyFile myFile) throws RemoteBackendException{ TransportManager tm=getTransport(myFile); String id=null; try { @@ -48,7 +45,7 @@ public abstract class DuplicateFile extends Operation { } @Override - public String initOperation(RequestObject file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { if(remotePath != null){ boolean isId=ObjectId.isValid(remotePath); setResource(file); @@ -67,7 +64,7 @@ public abstract class DuplicateFile extends Operation { @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { throw new IllegalArgumentException("Input/Output stream is not compatible with getSize operation"); } @@ -82,11 +79,11 @@ public abstract class DuplicateFile extends Operation { this.sourcePath = sourcePath; } - public RequestObject getResource() { + public MyFile getResource() { return resource; } - public void setResource(RequestObject resource) { + public void setResource(MyFile resource) { this.resource = resource; } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Exist.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Exist.java index 24dc7d4..6afc851 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Exist.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Exist.java @@ -3,12 +3,11 @@ */ package org.gcube.contentmanagement.blobstorage.service.operation; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.bson.types.ObjectId; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -30,9 +29,7 @@ public class Exist extends Operation{ super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs); } - public String doIt(RequestObject myFile) throws RemoteBackendException{ -// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + public String doIt(MyFile myFile) throws RemoteBackendException{ TransportManager tm=getTransport(myFile); boolean isPresent=false; try { @@ -47,7 +44,7 @@ public class Exist extends Operation{ } @Override - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { // String[] dirs= remotePath.split(file_separator); if(logger.isDebugEnabled()) @@ -64,7 +61,7 @@ public class Exist extends Operation{ @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { throw new IllegalArgumentException("Input/Output stream is not compatible with Exist operation"); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/FileWriter.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/FileWriter.java index 4a6c61b..2d5ad8b 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/FileWriter.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/FileWriter.java @@ -2,7 +2,7 @@ package org.gcube.contentmanagement.blobstorage.service.operation; //import org.apache.log4j.Logger; //import org.gcube.common.core.utils.logging.GCUBELog; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; @@ -23,12 +23,7 @@ public class FileWriter extends Thread{ final Logger logger=LoggerFactory.getLogger(FileWriter.class); private Monitor monitor; private int id; -// private MyFile myFile; -// private byte[] encode; -// private int offset; -// private static int len=0; private OutputStream out; -// private String path; private byte[] full; @@ -36,7 +31,7 @@ public class FileWriter extends Thread{ if (logger.isDebugEnabled()) { logger.debug("run() - start"); } - RequestObject request = monitor.getRequest(); + MyFile request = monitor.getRequest(); synchronized (FileWriter.class) { if(logger.isDebugEnabled()){ logger.debug("recover request: "+request.getKey()+" length: "+request.getContent().length); diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/ForceClose.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/ForceClose.java index 38df94e..0fe9fc3 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/ForceClose.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/ForceClose.java @@ -1,8 +1,7 @@ package org.gcube.contentmanagement.blobstorage.service.operation; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -13,15 +12,12 @@ public class ForceClose extends Operation{ * Logger for this class */ final Logger logger=LoggerFactory.getLogger(GetSize.class); -// public String file_separator = ServiceEngine.FILE_SEPARATOR;//System.getProperty("file.separator"); public ForceClose(String[] server, String user, String pwd, String bucket, Monitor monitor, boolean isChunk, String backendType, String[] dbs) { super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs); } - public String doIt(RequestObject myFile) throws RemoteBackendException{ -// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + public String doIt(MyFile myFile) throws RemoteBackendException{ TransportManager tm=getTransport(myFile); try { tm.forceClose(); @@ -34,14 +30,14 @@ public class ForceClose extends Operation{ } @Override - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { return null; } @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { return null; } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetFolderCount.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetFolderCount.java index b7ad759..5761809 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetFolderCount.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetFolderCount.java @@ -1,11 +1,9 @@ package org.gcube.contentmanagement.blobstorage.service.operation; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.DirectoryBucket; -import org.gcube.contentmanagement.blobstorage.service.impl.ServiceEngine; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants; import org.slf4j.Logger; @@ -22,9 +20,7 @@ public class GetFolderCount extends Operation { super(server, user, pwd, bucket, monitor, isChunk, backendType,dbs); } - public String doIt(RequestObject myFile) throws RemoteBackendException{ -// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + public String doIt(MyFile myFile) throws RemoteBackendException{ TransportManager tm=getTransport(myFile); long dim=0; try { @@ -39,7 +35,7 @@ public class GetFolderCount extends Operation { } @Override - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { if(logger.isDebugEnabled()) logger.debug("remotePath: "+remotePath); @@ -57,7 +53,7 @@ public class GetFolderCount extends Operation { @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { throw new IllegalArgumentException("Input/Output stream is not compatible with getSize operation"); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetFolderLastUpdate.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetFolderLastUpdate.java index 709952f..24ab51d 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetFolderLastUpdate.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetFolderLastUpdate.java @@ -1,6 +1,6 @@ package org.gcube.contentmanagement.blobstorage.service.operation; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.DirectoryBucket; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; @@ -19,12 +19,12 @@ public class GetFolderLastUpdate extends Operation { super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs); } - public String doIt(RequestObject myFile) throws RemoteBackendException{ + public String doIt(MyFile myFile) throws RemoteBackendException{ return null; } @Override - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { // String[] dirs= remotePath.split(file_separator); if(logger.isDebugEnabled()) @@ -43,7 +43,7 @@ public class GetFolderLastUpdate extends Operation { @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { throw new IllegalArgumentException("Input/Output stream is not compatible with getSize operation"); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetFolderSize.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetFolderSize.java index ed2baea..1f17681 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetFolderSize.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetFolderSize.java @@ -1,10 +1,9 @@ package org.gcube.contentmanagement.blobstorage.service.operation; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.DirectoryBucket; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants; import org.slf4j.Logger; @@ -21,9 +20,7 @@ public class GetFolderSize extends Operation { super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs); } - public String doIt(RequestObject myFile) throws RemoteBackendException{ -// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + public String doIt(MyFile myFile) throws RemoteBackendException{ TransportManager tm=getTransport(myFile); long dim=0; try { @@ -38,7 +35,7 @@ public class GetFolderSize extends Operation { } @Override - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { // String[] dirs= remotePath.split(file_separator); if(logger.isDebugEnabled()) @@ -58,7 +55,7 @@ public class GetFolderSize extends Operation { @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { throw new IllegalArgumentException("Input/Output stream is not compatible with getSize operation"); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetHttpUrl.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetHttpUrl.java index b95dda1..9f7c3a5 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetHttpUrl.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetHttpUrl.java @@ -4,13 +4,19 @@ import java.io.IOException; import java.net.URL; import org.apache.commons.codec.binary.Base64; import org.gcube.contentmanagement.blobstorage.resource.MemoryType; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.Encrypter; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.Encrypter.EncryptionException; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants; + +/** + * this class is replaced by getHttpsUrl + * @author roberto + * + */ @Deprecated public class GetHttpUrl extends Operation { @@ -27,13 +33,13 @@ public class GetHttpUrl extends Operation { } @Override - public String initOperation(RequestObject file, String remotePath, String author, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { return getRemoteIdentifier(remotePath, rootArea); } @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { // TODO Auto-generated method stub return null; @@ -41,12 +47,13 @@ public class GetHttpUrl extends Operation { @Override - public Object doIt(RequestObject myFile) throws RemoteBackendException { + public Object doIt(MyFile myFile) throws RemoteBackendException { String resolverHost=myFile.getResolverHOst(); String urlBase="smp://"+resolverHost+Costants.URL_SEPARATOR; String urlParam=""; try { - String id=getId(myFile.getAbsoluteRemotePath(), myFile.isForceCreation(), myFile.getGcubeMemoryType(), myFile.getWriteConcern(), myFile.getReadPreference()); +// String id=getId(myFile.getAbsoluteRemotePath(), myFile.isForceCreation(), myFile.getGcubeMemoryType(), myFile.getWriteConcern(), myFile.getReadPreference()); + String id=getId(myFile); String phrase=myFile.getPassPhrase(); // urlParam =new StringEncrypter("DES", phrase).encrypt(id); urlParam = new Encrypter("DES", phrase).encrypt(id); @@ -71,7 +78,7 @@ public class GetHttpUrl extends Operation { return httpUrl.toString(); } - + @Deprecated private String getId(String path, boolean forceCreation, MemoryType memoryType, String writeConcern, String readPreference){ String id=null; TransportManagerFactory tmf= new TransportManagerFactory(server, user, password, getRegion(), getToken()); @@ -87,6 +94,21 @@ public class GetHttpUrl extends Operation { return id; } + private String getId(MyFile myFile){ + String id=null; + TransportManager tm=getTransport(myFile); + try { + id = tm.getId(bucket, myFile.isForceCreation()); + } catch (Exception e) { + tm.close(); + throw new RemoteBackendException(" Error in GetUrl operation. Problem to discover remote file:"+bucket+" "+ e.getMessage(), e.getCause()); } + if (logger.isDebugEnabled()) { + logger.debug(" PATH " + bucket); + } + return id; + } + + private URL translate(URL url) throws IOException { logger.debug("translating: "+url); String urlString=url.toString(); diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetHttpsUrl.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetHttpsUrl.java index d1be6fb..a7794f4 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetHttpsUrl.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetHttpsUrl.java @@ -5,7 +5,7 @@ import java.net.URL; import org.apache.commons.codec.binary.Base64; import org.gcube.contentmanagement.blobstorage.resource.MemoryType; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.Encrypter; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.Encrypter.EncryptionException; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; @@ -29,13 +29,13 @@ public class GetHttpsUrl extends Operation { } @Override - public String initOperation(RequestObject file, String remotePath, String author, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { return getRemoteIdentifier(remotePath, rootArea); } @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { // TODO Auto-generated method stub return null; @@ -43,12 +43,13 @@ public class GetHttpsUrl extends Operation { @Override - public Object doIt(RequestObject myFile) throws RemoteBackendException { + public Object doIt(MyFile myFile) throws RemoteBackendException { String resolverHost=myFile.getResolverHOst(); String urlBase="smp://"+resolverHost+Costants.URL_SEPARATOR; String urlParam=""; try { String id=getId(myFile.getAbsoluteRemotePath(), myFile.isForceCreation(), myFile.getGcubeMemoryType(), myFile.getWriteConcern(), myFile.getReadPreference()); +// String id=getId(myFile); String phrase=myFile.getPassPhrase(); // urlParam =new StringEncrypter("DES", phrase).encrypt(id); urlParam = new Encrypter("DES", phrase).encrypt(id); @@ -73,6 +74,21 @@ public class GetHttpsUrl extends Operation { return httpsUrl.toString(); } + private String getId(MyFile myFile){ + String id=null; + TransportManager tm=getTransport(myFile); + try { + id = tm.getId(bucket, myFile.isForceCreation()); + } catch (Exception e) { + tm.close(); + throw new RemoteBackendException(" Error in GetUrl operation. Problem to discover remote file:"+bucket+" "+ e.getMessage(), e.getCause()); } + if (logger.isDebugEnabled()) { + logger.debug(" PATH " + bucket); + } + return id; + } + + @Deprecated private String getId(String path, boolean forceCreation, MemoryType memoryType, String writeConcern, String readPreference){ String id=null; TransportManagerFactory tmf= new TransportManagerFactory(server, user, password, getRegion(), getToken()); diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetMetaFile.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetMetaFile.java index c603920..3e420fd 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetMetaFile.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetMetaFile.java @@ -1,10 +1,9 @@ package org.gcube.contentmanagement.blobstorage.service.operation; import org.bson.types.ObjectId; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -30,9 +29,7 @@ public class GetMetaFile extends Operation{ * size. * */ - public RequestObject doIt(RequestObject myFile) throws RemoteBackendException{ -// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + public MyFile doIt(MyFile myFile) throws RemoteBackendException{ TransportManager tm=getTransport(myFile); long dim=0; String id=null; @@ -61,7 +58,7 @@ public class GetMetaFile extends Operation{ } @Override - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { // String[] dirs= remotePath.split(file_separator); if(logger.isDebugEnabled()) @@ -78,7 +75,7 @@ public class GetMetaFile extends Operation{ @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { throw new IllegalArgumentException("Input/Output stream is not compatible with getSize operation"); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetMetaInfo.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetMetaInfo.java index 7ebc4c1..8624de0 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetMetaInfo.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetMetaInfo.java @@ -1,10 +1,9 @@ package org.gcube.contentmanagement.blobstorage.service.operation; import org.bson.types.ObjectId; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -20,9 +19,7 @@ public class GetMetaInfo extends Operation { super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs); } - public String doIt(RequestObject myFile) throws RemoteBackendException{ -// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + public String doIt(MyFile myFile) throws RemoteBackendException{ TransportManager tm=getTransport(myFile); String value=null; try { @@ -38,7 +35,7 @@ public class GetMetaInfo extends Operation { } @Override - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { // String[] dirs= remotePath.split(file_separator); if(logger.isDebugEnabled()) @@ -55,7 +52,7 @@ public class GetMetaInfo extends Operation { @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { throw new IllegalArgumentException("method not compatible with getMetaInfo operation"); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetRemotePath.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetRemotePath.java index 5d1ad2a..4bd182c 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetRemotePath.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetRemotePath.java @@ -1,9 +1,8 @@ package org.gcube.contentmanagement.blobstorage.service.operation; import org.bson.types.ObjectId; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -20,9 +19,7 @@ public class GetRemotePath extends Operation{ super(server, user, pwd, bucket, monitor, isChunk, backendType,dbs); } - public String doIt(RequestObject myFile) throws RemoteBackendException{ -// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + public String doIt(MyFile myFile) throws RemoteBackendException{ TransportManager tm=getTransport(myFile); String path=null; try { @@ -45,7 +42,7 @@ public class GetRemotePath extends Operation{ } @Override - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { rootPath=file.getRootPath(); logger.trace("rootArea is "+file.getRootPath()+ " absoluteremotepath is "+file.getAbsoluteRemotePath()); @@ -61,7 +58,7 @@ public class GetRemotePath extends Operation{ @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { throw new IllegalArgumentException("Input/Output stream is not compatible with getSize operation"); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetSize.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetSize.java index 55d03b7..0209952 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetSize.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetSize.java @@ -1,10 +1,9 @@ package org.gcube.contentmanagement.blobstorage.service.operation; import org.bson.types.ObjectId; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -27,9 +26,7 @@ public class GetSize extends Operation{ super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs); } - public String doIt(RequestObject myFile) throws RemoteBackendException{ -// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + public String doIt(MyFile myFile) throws RemoteBackendException{ TransportManager tm=getTransport(myFile); long dim=0; try { @@ -44,7 +41,7 @@ public class GetSize extends Operation{ } @Override - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { if(logger.isDebugEnabled()) logger.debug("remotePath: "+remotePath); @@ -60,7 +57,7 @@ public class GetSize extends Operation{ @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { throw new IllegalArgumentException("Input/Output stream is not compatible with getSize operation"); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetTTL.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetTTL.java index 5ff8d49..e448ac5 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetTTL.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetTTL.java @@ -2,9 +2,8 @@ package org.gcube.contentmanagement.blobstorage.service.operation; import java.io.OutputStream; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -27,7 +26,7 @@ public class GetTTL extends Operation { } @Override - public String doIt(RequestObject myFile) throws RemoteBackendException { + public String doIt(MyFile myFile) throws RemoteBackendException { if (logger.isDebugEnabled()) { logger.debug(" DOWNLOAD " + myFile.getRemotePath() + " in bucket: " + bucket); @@ -36,8 +35,6 @@ public class GetTTL extends Operation { TransportManager tm=null; try { //aggiungere field per il lock del file -// TransportManagerFactory tmf=new TransportManagerFactory(server, user, password); -// tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); tm=getTransport(myFile); currentTTL=tm.getTTL(bucket); } catch (Exception e) { @@ -48,7 +45,7 @@ public class GetTTL extends Operation { } @Override - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { this.localPath=file.getLocalPath(); @@ -60,7 +57,7 @@ public class GetTTL extends Operation { @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { // TODO Auto-generated method stub return null; diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetUrl.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetUrl.java index 9f869c2..d98bd1a 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetUrl.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetUrl.java @@ -1,7 +1,7 @@ package org.gcube.contentmanagement.blobstorage.service.operation; import org.gcube.contentmanagement.blobstorage.resource.MemoryType; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.Encrypter; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.Encrypter.EncryptionException; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; @@ -9,8 +9,12 @@ import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants; - - +/** + * this class is replaced by getHttpsUrl + * @author roberto + * + */ +@Deprecated public class GetUrl extends Operation{ // private OutputStream os; @@ -21,13 +25,13 @@ public class GetUrl extends Operation{ } @Override - public String initOperation(RequestObject file, String remotePath, String author, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { return getRemoteIdentifier(remotePath, rootArea); } @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { // TODO Auto-generated method stub return null; @@ -35,16 +39,15 @@ public class GetUrl extends Operation{ @Override - public Object doIt(RequestObject myFile) throws RemoteBackendException { + public Object doIt(MyFile myFile) throws RemoteBackendException { String resolverHost=myFile.getResolverHOst(); String urlBase="smp://"+resolverHost+Costants.URL_SEPARATOR; String urlParam=""; try { - String id=getId(myFile.getAbsoluteRemotePath(), myFile.isForceCreation(), myFile.getGcubeMemoryType(), myFile.getWriteConcern(), myFile.getReadPreference()); +// String id=getId(myFile.getAbsoluteRemotePath(), myFile.isForceCreation(), myFile.getGcubeMemoryType(), myFile.getWriteConcern(), myFile.getReadPreference()); + String id=getId(myFile); String phrase=myFile.getPassPhrase(); -// urlParam =new StringEncrypter("DES", phrase).encrypt(id); urlParam = new Encrypter("DES", phrase).encrypt(id); -// String urlEncoded=URLEncoder.encode(urlParam, "UTF-8"); } catch (EncryptionException e) { throw new RemoteBackendException(" Error in getUrl operation problem to encrypt the string", e.getCause()); } @@ -56,6 +59,7 @@ public class GetUrl extends Operation{ return url; } + @Deprecated private String getId(String path, boolean forceCreation, MemoryType memoryType, String writeConcern, String readPreference){ String id=null; TransportManagerFactory tmf= new TransportManagerFactory(server, user, password, getRegion(), getToken()); @@ -71,4 +75,18 @@ public class GetUrl extends Operation{ return id; } + private String getId(MyFile myFile){ + String id=null; + TransportManager tm=getTransport(myFile); + try { + id = tm.getId(bucket, myFile.isForceCreation()); + } catch (Exception e) { + tm.close(); + throw new RemoteBackendException(" Error in GetUrl operation. Problem to discover remote file:"+bucket+" "+ e.getMessage(), e.getCause()); } + if (logger.isDebugEnabled()) { + logger.debug(" PATH " + bucket); + } + return id; + } + } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetUserTotalItems.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetUserTotalItems.java index b89c084..181f845 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetUserTotalItems.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetUserTotalItems.java @@ -1,10 +1,9 @@ package org.gcube.contentmanagement.blobstorage.service.operation; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.DirectoryBucket; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants; import org.slf4j.Logger; @@ -19,9 +18,7 @@ public class GetUserTotalItems extends Operation { super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs); } - public String doIt(RequestObject myFile) throws RemoteBackendException{ -// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + public String doIt(MyFile myFile) throws RemoteBackendException{ TransportManager tm=getTransport(myFile); String dim=null; logger.info("check user total items for user: "+getOwner()+ " user is "+user); @@ -38,7 +35,7 @@ public class GetUserTotalItems extends Operation { } @Override - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { setOwner(author); if((remotePath != null) && (remotePath.length() > 0)){ @@ -64,7 +61,7 @@ public class GetUserTotalItems extends Operation { @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { throw new IllegalArgumentException("Input/Output stream is not compatible with getSize operation"); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetUserTotalVolume.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetUserTotalVolume.java index c0e367a..52ddb74 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetUserTotalVolume.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/GetUserTotalVolume.java @@ -1,10 +1,9 @@ package org.gcube.contentmanagement.blobstorage.service.operation; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.DirectoryBucket; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants; import org.slf4j.Logger; @@ -13,15 +12,12 @@ import org.slf4j.LoggerFactory; public class GetUserTotalVolume extends Operation { final Logger logger=LoggerFactory.getLogger(GetUserTotalVolume.class); -// public String file_separator = ServiceEngine.FILE_SEPARATOR;//System.getProperty("file.separator"); public GetUserTotalVolume(String[] server, String user, String pwd, String bucket, Monitor monitor, boolean isChunk, String backendType, String[] dbs) { super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs); } - public String doIt(RequestObject myFile) throws RemoteBackendException{ -// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + public String doIt(MyFile myFile) throws RemoteBackendException{ TransportManager tm=getTransport(myFile); String dim=null; logger.info("check user total volume for user: "+getOwner()+ " user is "+user); @@ -38,7 +34,7 @@ public class GetUserTotalVolume extends Operation { } @Override - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { setOwner(author); if(remotePath!= null && remotePath.length()>0){ @@ -63,7 +59,7 @@ public class GetUserTotalVolume extends Operation { @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { throw new IllegalArgumentException("Input/Output stream is not compatible with getSize operation"); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Link.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Link.java index c71b35e..a28da4d 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Link.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Link.java @@ -2,10 +2,9 @@ package org.gcube.contentmanagement.blobstorage.service.operation; import java.net.UnknownHostException; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.slf4j.Logger; @@ -20,12 +19,12 @@ public abstract class Link extends Operation{ final Logger logger=LoggerFactory.getLogger(Download.class); private String sourcePath; private String destinationPath; - private RequestObject resource; + private MyFile resource; public Link(String[] server, String user, String pwd, String bucket, Monitor monitor, boolean isChunk, String backendType, String[] dbs) { super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs); } - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { this.sourcePath=file.getLocalPath(); this.destinationPath=remotePath; @@ -36,9 +35,7 @@ public abstract class Link extends Operation{ } - public String doIt(RequestObject myFile) throws RemoteBackendException{ -// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + public String doIt(MyFile myFile) throws RemoteBackendException{ TransportManager tm=getTransport(myFile); String id=null; try { @@ -53,7 +50,7 @@ public abstract class Link extends Operation{ @Override - public String initOperation(RequestObject resource, String remotePath, + public String initOperation(MyFile resource, String remotePath, String author, String[] server, String rootArea) { // For terrastore, the name of bucket is formed: path_____fileName_____author // String bucketName=new BucketCoding().bucketFileCoding(remotePath, rootArea); @@ -65,7 +62,7 @@ public abstract class Link extends Operation{ return bucket=destinationPath; } - public abstract String execute(MongoIOManager mongoPrimaryInstance, MongoIOManager mongoSecondaryInstance, RequestObject resource, String sourcePath, String destinationPath) throws UnknownHostException; + public abstract String execute(MongoIOManager mongoPrimaryInstance, MongoIOManager mongoSecondaryInstance, MyFile resource, String sourcePath, String destinationPath) throws UnknownHostException; public String getSourcePath() { return sourcePath; @@ -83,11 +80,11 @@ public abstract class Link extends Operation{ this.destinationPath = destinationPath; } - public RequestObject getResource() { + public MyFile getResource() { return resource; } - public void setResource(RequestObject resource) { + public void setResource(MyFile resource) { this.resource = resource; } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Lock.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Lock.java index 283d659..01ac820 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Lock.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Lock.java @@ -2,7 +2,7 @@ package org.gcube.contentmanagement.blobstorage.service.operation; import java.io.OutputStream; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; @@ -24,7 +24,7 @@ public abstract class Lock extends Operation { protected String localPath; protected String remotePath; protected OutputStream os; - protected RequestObject resource; + protected MyFile resource; protected Download download; public Lock(String[] server, String user, String pwd, String bucket, Monitor monitor, boolean isChunk, String backendType, String[] dbs) { @@ -33,7 +33,7 @@ public abstract class Lock extends Operation { } @Override - public String doIt(RequestObject myFile) throws RemoteBackendException { + public String doIt(MyFile myFile) throws RemoteBackendException { if (logger.isDebugEnabled()) { logger.debug(" DOWNLOAD " + myFile.getRemotePath() + " in bucket: " + getBucket()); @@ -44,8 +44,6 @@ public abstract class Lock extends Operation { Download download = new DownloadOperator(getServer(), getUser(), getPassword(), getBucket(), getMonitor(), isChunk(), getBackendType(), getDbNames()); unlockKey=get(download, myFile, true); } catch (Exception e) { -// TransportManagerFactory tmf=new TransportManagerFactory(getServer(), getUser(), getPassword()); -// TransportManager tm=tmf.getTransport(getBackendType(), myFile.getGcubeMemoryType(), getDbNames(), myFile.getWriteConcern(), myFile.getReadPreference()); TransportManager tm=getTransport(myFile); tm.close(); throw new RemoteBackendException(" Error in lock operation ", e.getCause()); @@ -54,7 +52,7 @@ public abstract class Lock extends Operation { } @Override - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { String bucketName=null; @@ -73,13 +71,13 @@ public abstract class Lock extends Operation { } @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { // TODO Auto-generated method stub return null; } - public abstract String execute(MongoIOManager mongoPrimaryInstance, MongoIOManager mongoSecondaryInstance, RequestObject resource, String serverLocation) throws Exception; + public abstract String execute(MongoIOManager mongoPrimaryInstance, MongoIOManager mongoSecondaryInstance, MyFile resource, String serverLocation) throws Exception; public String getLocalPath() { return localPath; @@ -105,11 +103,11 @@ public abstract class Lock extends Operation { this.os = os; } - public RequestObject getResource() { + public MyFile getResource() { return resource; } - public void setResource(RequestObject resource) { + public void setResource(MyFile resource) { this.resource = resource; } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Monitor.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Monitor.java index bb5b204..a86b3d2 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Monitor.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Monitor.java @@ -2,7 +2,7 @@ package org.gcube.contentmanagement.blobstorage.service.operation; //import org.apache.log4j.Logger; //import org.gcube.common.core.utils.logging.GCUBELog; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -22,9 +22,9 @@ public class Monitor { // private static final GCUBELog logger = new GCUBELog(Monitor.class); final Logger logger=LoggerFactory.getLogger(Monitor.class); // request queue - private Vector requestQueue = new Vector(); + private Vector requestQueue = new Vector(); // fetch the first request in the queue - public synchronized RequestObject getRequest(){ + public synchronized MyFile getRequest(){ if (logger.isDebugEnabled()) { logger.debug("getRequest() - start"); } @@ -36,7 +36,7 @@ public class Monitor { logger.error("getRequest()", e); } } - RequestObject myFile=requestQueue.remove(0); + MyFile myFile=requestQueue.remove(0); notifyAll(); if (logger.isDebugEnabled()) { logger.debug("getRequest() - end"); @@ -44,7 +44,7 @@ public class Monitor { return myFile; } - public synchronized RequestObject getRequest(ChunkProducer producer){ + public synchronized MyFile getRequest(ChunkProducer producer){ if (logger.isDebugEnabled()) { logger.debug("getRequest(ChunkProducer) - start"); } @@ -56,7 +56,7 @@ public class Monitor { logger.error("getRequest(ChunkProducer)", e); } } - RequestObject myFile=requestQueue.remove(0); + MyFile myFile=requestQueue.remove(0); notifyAll(); if (logger.isDebugEnabled()) { logger.debug("getRequest(ChunkProducer) - end"); @@ -65,7 +65,7 @@ public class Monitor { } // Accoda una nuova richiesta - public synchronized void putRequest(RequestObject richiesta){ + public synchronized void putRequest(MyFile richiesta){ if (logger.isDebugEnabled()) { logger.debug("putRequest(MyFile) - start"); logger.debug("request in queue, queue size: "+requestQueue.size()); diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Move.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Move.java index 5b10cba..ce96b54 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Move.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Move.java @@ -1,14 +1,11 @@ package org.gcube.contentmanagement.blobstorage.service.operation; -import java.io.OutputStream; import java.net.UnknownHostException; import org.gcube.contentmanagement.blobstorage.resource.MemoryType; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; -import org.gcube.contentmanagement.blobstorage.service.directoryOperation.DirectoryBucket; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.slf4j.Logger; @@ -23,12 +20,12 @@ public abstract class Move extends Operation{ final Logger logger=LoggerFactory.getLogger(Download.class); protected String sourcePath; protected String destinationPath; - protected RequestObject resource; + protected MyFile resource; public Move(String[] server, String user, String pwd, String bucket, Monitor monitor, boolean isChunk, String backendType, String[] dbs) { super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs); } - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { this.sourcePath=file.getLocalPath(); this.destinationPath=remotePath; @@ -39,13 +36,10 @@ public abstract class Move extends Operation{ } - public String doIt(RequestObject myFile) throws RemoteBackendException{ -// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + public String doIt(MyFile myFile) throws RemoteBackendException{ TransportManager tm=getTransport(myFile); String id=null; try { -// id=tm.move(myFile, sourcePath, destinationPath); id=tm.move(this); } catch (UnknownHostException e) { tm.close(); @@ -57,7 +51,7 @@ public abstract class Move extends Operation{ @Override - public String initOperation(RequestObject resource, String remotePath, + public String initOperation(MyFile resource, String remotePath, String author, String[] server, String rootArea) { this.sourcePath=resource.getLocalPath(); this.destinationPath=resource.getRemotePath(); @@ -67,7 +61,7 @@ public abstract class Move extends Operation{ return bucket=destinationPath; } - public abstract String execute(MongoIOManager mongoPrimaryInstance, MemoryType memoryType, RequestObject resource, String sourcePath, String destinationPath) throws UnknownHostException; + public abstract String execute(MongoIOManager mongoPrimaryInstance, MemoryType memoryType, MyFile resource, String sourcePath, String destinationPath) throws UnknownHostException; public String getSourcePath() { return sourcePath; @@ -85,11 +79,11 @@ public abstract class Move extends Operation{ this.destinationPath = destinationPath; } - public RequestObject getResource() { + public MyFile getResource() { return resource; } - public void setResource(RequestObject resource) { + public void setResource(MyFile resource) { this.resource = resource; } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/MoveDir.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/MoveDir.java index ad20f91..d0115fa 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/MoveDir.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/MoveDir.java @@ -4,10 +4,9 @@ import java.net.UnknownHostException; import java.util.List; import org.gcube.contentmanagement.blobstorage.resource.MemoryType; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.slf4j.Logger; @@ -21,13 +20,13 @@ public abstract class MoveDir extends Operation{ final Logger logger=LoggerFactory.getLogger(Download.class); private String sourcePath; private String destinationPath; - private RequestObject resource; + private MyFile resource; // private OutputStream os; public MoveDir(String[] server, String user, String pwd, String bucket, Monitor monitor, boolean isChunk, String backendType, String[] dbs) { super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs); } - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { this.sourcePath=file.getLocalPath(); this.destinationPath=remotePath; @@ -38,9 +37,7 @@ public abstract class MoveDir extends Operation{ } - public String doIt(RequestObject myFile) throws RemoteBackendException{ -// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + public String doIt(MyFile myFile) throws RemoteBackendException{ TransportManager tm=getTransport(myFile); Listids=null; try { @@ -55,7 +52,7 @@ public abstract class MoveDir extends Operation{ @Override - public String initOperation(RequestObject resource, String remotePath, + public String initOperation(MyFile resource, String remotePath, String author, String[] server, String rootArea) { this.sourcePath=resource.getLocalPath(); this.destinationPath=resource.getRemotePath(); @@ -65,7 +62,7 @@ public abstract class MoveDir extends Operation{ return bucket=destinationPath; } - public abstract List execute(MongoIOManager mongoPrimaryInstance, RequestObject resource, String sourcePath, String destinationPath, MemoryType memoryType) throws UnknownHostException; + public abstract List execute(MongoIOManager mongoPrimaryInstance, MyFile resource, String sourcePath, String destinationPath, MemoryType memoryType) throws UnknownHostException; public String getSourcePath() { return sourcePath; @@ -83,11 +80,11 @@ public abstract class MoveDir extends Operation{ this.destinationPath = destinationPath; } - public RequestObject getResource() { + public MyFile getResource() { return resource; } - public void setResource(RequestObject resource) { + public void setResource(MyFile resource) { this.resource = resource; } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Operation.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Operation.java index fb111e6..651fdef 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Operation.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Operation.java @@ -1,9 +1,8 @@ package org.gcube.contentmanagement.blobstorage.service.operation; import org.bson.types.ObjectId; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; -import org.gcube.contentmanagement.blobstorage.service.impl.ServiceEngine; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; @@ -15,7 +14,6 @@ import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; -import java.util.Objects; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; @@ -101,7 +99,7 @@ public abstract class Operation { * @return a String that identifies a file * @throws Exception */ - public String put(Upload upload, RequestObject resource, boolean isChunk, boolean isBase64, boolean replaceOption, boolean isLock) throws Exception{ + public String put(Upload upload, MyFile resource, boolean isChunk, boolean isBase64, boolean replaceOption, boolean isLock) throws Exception{ if (logger.isDebugEnabled()) { logger.debug("put(MyFile, boolean, boolean) - start"); } @@ -163,8 +161,6 @@ public abstract class Operation { }else{ if(logger.isDebugEnabled()) logger.debug("NO THREAD POOL USED"); -// TransportManagerFactory tmf=new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, resource.getGcubeMemoryType(), dbNames, resource.getWriteConcern(), resource.getReadPreference()); TransportManager tm=getTransport(resource); String objectId=tm.uploadManager(upload, resource, bucket, bucket+"_1", replaceOption); return objectId; @@ -177,7 +173,7 @@ public abstract class Operation { * @throws IOException * @throws InterruptedException */ - public String get(Download download, RequestObject myFile, boolean isLock) throws IOException, InterruptedException, Exception { + public String get(Download download, MyFile myFile, boolean isLock) throws IOException, InterruptedException, Exception { if (logger.isDebugEnabled()) { logger.debug("get(String) - start"); } @@ -189,7 +185,7 @@ public abstract class Operation { startPThreadChunk(download, myFile, tm, path); }else{ - unlocKey=tm.downloadManager(download, myFile, bucket, RequestObject.class); + unlocKey=tm.downloadManager(download, myFile, bucket, MyFile.class); } if((path!=null) && (new File(path).length()>0)){ @@ -209,12 +205,12 @@ public abstract class Operation { * @throws InterruptedException * @throws IOException */ - protected void startPThreadChunk(Download download,RequestObject myFile, TransportManager tm, + protected void startPThreadChunk(Download download,MyFile myFile, TransportManager tm, String path) throws FileNotFoundException, InterruptedException, IOException { ExecutorService executor = Executors.newFixedThreadPool (2); int j=0; - RequestObject value=null; + MyFile value=null; if(logger.isInfoEnabled()) logger.info("localPath: "+path+" bucket: "+bucket); @@ -228,7 +224,7 @@ public abstract class Operation { logger.debug("get(String) -"); } try{ - value=(RequestObject) tm.get(download); + value=(MyFile) tm.get(download); }catch(Exception e){ if (logger.isDebugEnabled()) { logger.debug("get(String) - \n Trovate " + (j) + " key"); @@ -282,7 +278,7 @@ public abstract class Operation { * @return a generic object that contains operation results * @throws IllegalAccessException */ - public abstract Object doIt(RequestObject myFile) throws RemoteBackendException; + public abstract Object doIt(MyFile myFile) throws RemoteBackendException; /** * init a operation @@ -294,7 +290,7 @@ public abstract class Operation { * @param replaceOption if true the file will be replaced * @return a string that identifies the operation */ - public abstract String initOperation(RequestObject file, String remoteIdentifier, String author, String[] server, String rootArea, boolean replaceOption); + public abstract String initOperation(MyFile file, String remoteIdentifier, String author, String[] server, String rootArea, boolean replaceOption); /** @@ -306,7 +302,7 @@ public abstract class Operation { * @param rootArea remote root path * @return a string that identifies the operation */ - public abstract String initOperation(RequestObject resource, String remoteIdentifier, String author, String[] server, String rootArea); + public abstract String initOperation(MyFile resource, String remoteIdentifier, String author, String[] server, String rootArea); public String getOwner() { return owner; diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/OperationManager.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/OperationManager.java index 4acabe6..3ff37ea 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/OperationManager.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/OperationManager.java @@ -1,6 +1,6 @@ package org.gcube.contentmanagement.blobstorage.service.operation; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -21,7 +21,7 @@ public class OperationManager { private String[] server; // private int dimension; private String operation; - private RequestObject resource; + private MyFile resource; private boolean isChunk; private String bucketName; private String fileDest; @@ -33,7 +33,7 @@ public class OperationManager { - public OperationManager(String[] server, String user, String password, String operation, RequestObject myFile, String backendType, String[] dbs, String token){ + public OperationManager(String[] server, String user, String password, String operation, MyFile myFile, String backendType, String[] dbs, String token){ this.setServer(server); this.setUser(user); this.setPassword(password); @@ -124,11 +124,11 @@ public class OperationManager { this.operation = operation; } - public RequestObject getResource() { + public MyFile getResource() { return resource; } - public void setResource(RequestObject resource) { + public void setResource(MyFile resource) { this.resource = resource; } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Remove.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Remove.java index 566291c..37ff6cc 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Remove.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Remove.java @@ -1,9 +1,8 @@ package org.gcube.contentmanagement.blobstorage.service.operation; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants; import org.slf4j.Logger; @@ -24,9 +23,7 @@ public class Remove extends Operation{ super(server,user,pwd, bucket, monitor, isChunk, backendType, dbs); } - public String doIt(RequestObject myFile) throws RemoteBackendException{ -// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + public String doIt(MyFile myFile) throws RemoteBackendException{ TransportManager tm=getTransport(myFile); removeBucket(tm, bucket, myFile); if (logger.isDebugEnabled()) { @@ -36,7 +33,7 @@ public class Remove extends Operation{ } @Override - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { String[] dirs= remotePath.split(Costants.FILE_SEPARATOR); if(logger.isDebugEnabled()) @@ -64,7 +61,7 @@ public class Remove extends Operation{ * @param bucketName indicates the remote directory to remove * @throws RemoteBackendException */ - public void removeBucket(TransportManager tm, String bucketName, RequestObject resource) throws RemoteBackendException { + public void removeBucket(TransportManager tm, String bucketName, MyFile resource) throws RemoteBackendException { if(logger.isDebugEnabled()) logger.debug("removing file bucket: "+bucketName); try { @@ -77,7 +74,7 @@ public class Remove extends Operation{ } @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { throw new IllegalArgumentException("Input/Output stream is not compatible with remove operation"); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/RenewTTL.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/RenewTTL.java index 1f1924c..ad0327e 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/RenewTTL.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/RenewTTL.java @@ -2,10 +2,9 @@ package org.gcube.contentmanagement.blobstorage.service.operation; import java.io.OutputStream; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -29,9 +28,7 @@ public class RenewTTL extends Operation { } @Override - public String doIt(RequestObject myFile) throws RemoteBackendException { -// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + public String doIt(MyFile myFile) throws RemoteBackendException { TransportManager tm=getTransport(myFile); long ttl=-1; try { @@ -45,7 +42,7 @@ public class RenewTTL extends Operation { } @Override - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { this.localPath=file.getLocalPath(); @@ -56,7 +53,7 @@ public class RenewTTL extends Operation { } @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { // TODO Auto-generated method stub return null; diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/SetMetaInfo.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/SetMetaInfo.java index c784b96..f084018 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/SetMetaInfo.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/SetMetaInfo.java @@ -1,10 +1,9 @@ package org.gcube.contentmanagement.blobstorage.service.operation; import org.bson.types.ObjectId; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -20,9 +19,7 @@ public class SetMetaInfo extends Operation { super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs); } - public String doIt(RequestObject myFile) throws RemoteBackendException{ -// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + public String doIt(MyFile myFile) throws RemoteBackendException{ TransportManager tm=getTransport(myFile); try { tm.setFileProperty(bucket, myFile.getGenericPropertyField(), myFile.getGenericPropertyValue()); @@ -38,7 +35,7 @@ public class SetMetaInfo extends Operation { } @Override - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { if(logger.isDebugEnabled()) logger.debug("remotePath: "+remotePath); @@ -54,7 +51,7 @@ public class SetMetaInfo extends Operation { @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { throw new IllegalArgumentException("Input/Output stream is not compatible with getSize operation"); } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/SoftCopy.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/SoftCopy.java index 906fde5..79ed912 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/SoftCopy.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/SoftCopy.java @@ -5,11 +5,9 @@ package org.gcube.contentmanagement.blobstorage.service.operation; import java.net.UnknownHostException; -import org.bson.types.ObjectId; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.slf4j.Logger; @@ -27,29 +25,14 @@ public abstract class SoftCopy extends Operation { final Logger logger=LoggerFactory.getLogger(SoftCopy.class); private String sourcePath; private String destinationPath; - private RequestObject resource; + private MyFile resource; public SoftCopy(String[] server, String user, String pwd, String bucket, Monitor monitor, boolean isChunk, String backendType, String[] dbs) { super(server, user, pwd, bucket, monitor, isChunk, backendType, dbs); } - public String initOperation(RequestObject file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { -// if(remotePath != null){ -// boolean isId=ObjectId.isValid(remotePath); -// setResource(file); -// if(!isId){ -//// String[] dirs= remotePath.split(file_separator); -// if(logger.isDebugEnabled()) -// logger.debug("remotePath: "+remotePath); -// String buck=null; -// buck = new BucketCoding().bucketFileCoding(remotePath, rootArea); -// return bucket=buck; -// }else{ -// return bucket=remotePath; -// } -// }return bucket=null;//else throw new RemoteBackendException("argument cannot be null"); - + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { this.sourcePath=file.getLocalPath(); this.destinationPath=remotePath; sourcePath = new BucketCoding().bucketFileCoding(file.getLocalPath(), rootArea); @@ -59,9 +42,7 @@ public abstract class SoftCopy extends Operation { } - public String doIt(RequestObject myFile) throws RemoteBackendException{ -// TransportManagerFactory tmf= new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + public String doIt(MyFile myFile) throws RemoteBackendException{ TransportManager tm=getTransport(myFile); String id=null; try { @@ -76,7 +57,7 @@ public abstract class SoftCopy extends Operation { @Override - public String initOperation(RequestObject resource, String remotePath, String author, String[] server, String rootArea) { + public String initOperation(MyFile resource, String remotePath, String author, String[] server, String rootArea) { // For terrastore, the name of bucket is formed: path_____fileName_____author this.sourcePath=resource.getLocalPath(); this.destinationPath=resource.getRemotePath(); @@ -84,23 +65,9 @@ public abstract class SoftCopy extends Operation { destinationPath = new BucketCoding().bucketFileCoding(resource.getRemotePath(), rootArea); setResource(resource); return bucket=destinationPath; -// if(remotePath != null){ -// boolean isId=ObjectId.isValid(remotePath); -// setResource(resource); -// if(!isId){ -//// String[] dirs= remotePath.split(file_separator); -// if(logger.isDebugEnabled()) -// logger.debug("remotePath: "+remotePath); -// String buck=null; -// buck = new BucketCoding().bucketFileCoding(remotePath, rootArea); -// return bucket=buck; -// }else{ -// return bucket=remotePath; -// } -// }return bucket=null;//else throw new RemoteBackendException("argument cannot be null"); } - public abstract String execute(MongoIOManager mongoPrimaryInstance, RequestObject resource, String sourcePath, String destinationPath) throws UnknownHostException; + public abstract String execute(MongoIOManager mongoPrimaryInstance, MyFile resource, String sourcePath, String destinationPath) throws UnknownHostException; public String getSourcePath() { return sourcePath; @@ -118,11 +85,11 @@ public abstract class SoftCopy extends Operation { this.destinationPath = destinationPath; } - public RequestObject getResource() { + public MyFile getResource() { return resource; } - public void setResource(RequestObject resource) { + public void setResource(MyFile resource) { this.resource = resource; } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Unlock.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Unlock.java index 4a9a8e5..3de213e 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Unlock.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Unlock.java @@ -2,10 +2,9 @@ package org.gcube.contentmanagement.blobstorage.service.operation; import java.io.OutputStream; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.gcube.contentmanagement.blobstorage.transport.backend.operation.UploadOperator; @@ -23,7 +22,7 @@ public abstract class Unlock extends Operation { protected String localPath; protected String remotePath; protected OutputStream os; - protected RequestObject resource; + protected MyFile resource; protected Upload upload; public Unlock(String[] server, String user, String pwd, String bucket, Monitor monitor, boolean isChunk, String backendType, String[] dbs) { @@ -32,7 +31,7 @@ public abstract class Unlock extends Operation { } @Override - public String doIt(RequestObject myFile) throws RemoteBackendException { + public String doIt(MyFile myFile) throws RemoteBackendException { if (logger.isDebugEnabled()) { logger.debug(" UPLOAD " + myFile.getLocalPath() + " author: " + myFile.getOwner()); @@ -43,8 +42,6 @@ public abstract class Unlock extends Operation { //inserire parametro per il lock objectId=put(upload, myFile, isChunk(), false, false, true); } catch (Exception e) { -// TransportManagerFactory tmf=new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); TransportManager tm=getTransport(myFile); tm.close(); throw new RemoteBackendException(" Error in unlock operation ", e.getCause()); @@ -54,7 +51,7 @@ public abstract class Unlock extends Operation { } @Override - public String initOperation(RequestObject file, String remotePath, + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { String bucketName=null; @@ -71,13 +68,13 @@ public abstract class Unlock extends Operation { } @Override - public String initOperation(RequestObject resource, String RemotePath, + public String initOperation(MyFile resource, String RemotePath, String author, String[] server, String rootArea) { // TODO Auto-generated method stub return null; } - public abstract String execute(MongoIOManager mongoPrimaryInstance, MongoIOManager mongoSecondaryInstance, RequestObject resource, String bucket, String key4unlock) throws Exception; + public abstract String execute(MongoIOManager mongoPrimaryInstance, MongoIOManager mongoSecondaryInstance, MyFile resource, String bucket, String key4unlock) throws Exception; public String getLocalPath() { return localPath; @@ -103,11 +100,11 @@ public abstract class Unlock extends Operation { this.os = os; } - public RequestObject getResource() { + public MyFile getResource() { return resource; } - public void setResource(RequestObject resource) { + public void setResource(MyFile resource) { this.resource = resource; } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Upload.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Upload.java index 3bb4d27..a7aa4c6 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Upload.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/Upload.java @@ -4,10 +4,9 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; -import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager; import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants; @@ -32,7 +31,7 @@ public abstract class Upload extends Operation { protected String localPath; protected String remotePath; protected OutputStream os; - protected RequestObject resource; + protected MyFile resource; public Upload(String[] server, String user, String pwd, String bucket, Monitor monitor, boolean isChunk, String bck, String[] dbs) { super(server, user, pwd, bucket, monitor, isChunk, bck, dbs); @@ -40,7 +39,7 @@ public abstract class Upload extends Operation { - public String doIt(RequestObject myFile) throws RemoteBackendException{ + public String doIt(MyFile myFile) throws RemoteBackendException{ if (logger.isDebugEnabled()) { logger.debug(" UPLOAD " + myFile.getLocalPath() + " author: " + myFile.getOwner()); @@ -49,8 +48,6 @@ public abstract class Upload extends Operation { try { objectId=put(this, myFile, isChunk(), false, replaceOption, false); } catch (Throwable e) { -// TransportManagerFactory tmf=new TransportManagerFactory(server, user, password); -// TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); e.printStackTrace(); TransportManager tm=getTransport(myFile); tm.close(); @@ -64,7 +61,7 @@ public abstract class Upload extends Operation { @Override - public String initOperation(RequestObject file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { + public String initOperation(MyFile file, String remotePath, String author, String[] server, String rootArea, boolean replaceOption) { // set replace option this.replaceOption=replaceOption; setResource(file); @@ -81,7 +78,7 @@ public abstract class Upload extends Operation { @Override - public String initOperation(RequestObject resource, String remotePath, + public String initOperation(MyFile resource, String remotePath, String author, String[] server, String rootArea) { // the name of bucket is formed: path_____fileName_____author String bucketName=new BucketCoding().bucketFileCoding(remotePath, rootArea); @@ -91,7 +88,7 @@ public abstract class Upload extends Operation { } - public abstract String execute(MongoIOManager mongoPrimaryInstance, MongoIOManager mongoSecondaryInstance, RequestObject resource, String bucket, boolean replace) throws IOException; + public abstract String execute(MongoIOManager mongoPrimaryInstance, MongoIOManager mongoSecondaryInstance, MyFile resource, String bucket, boolean replace) throws IOException; public InputStream getIs() { return is; @@ -153,13 +150,13 @@ public abstract class Upload extends Operation { - public RequestObject getResource() { + public MyFile getResource() { return resource; } - public void setResource(RequestObject resource) { + public void setResource(MyFile resource) { this.resource = resource; } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/UploadAndUnlock.java b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/UploadAndUnlock.java new file mode 100644 index 0000000..4132f88 --- /dev/null +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/service/operation/UploadAndUnlock.java @@ -0,0 +1,63 @@ +package org.gcube.contentmanagement.blobstorage.service.operation; + +import org.gcube.contentmanagement.blobstorage.resource.MyFile; +import org.gcube.contentmanagement.blobstorage.service.directoryOperation.BucketCoding; +import org.gcube.contentmanagement.blobstorage.transport.TransportManager; +import org.gcube.contentmanagement.blobstorage.transport.TransportManagerFactory; +import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; +import org.gcube.contentmanagement.blobstorage.transport.backend.operation.UploadOperator; +/** + * @deprecated + * @author Roberto Cirillo (ISTI - CNR) + * + */ +public class UploadAndUnlock extends Operation { + +// private String keyUnlock; + + public UploadAndUnlock(String[] server, String user, String pwd, String bucket, Monitor monitor, boolean isChunk, String backendType, String[] dbs) { + // TODO Auto-generated constructor stub + super(server,user, pwd, bucket, monitor, isChunk, backendType, dbs); + } + + @Override + public String doIt(MyFile myFile) throws RemoteBackendException { + if (logger.isDebugEnabled()) { + logger.debug(" UPLOAD " + myFile.getLocalPath() + + " author: " + myFile.getOwner()); + } + Upload upload= new UploadOperator(getServer(), getUser(), getPassword(), getBucket(), getMonitor(), isChunk(), getBackendType(), getDbNames()); + String objectId=null; + try { + //inserire parametro per il lock + objectId=put(upload, myFile, isChunk(), false, false, true); + } catch (Exception e) { + TransportManagerFactory tmf=new TransportManagerFactory(server, user, password); + TransportManager tm=tmf.getTransport(transport, backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference()); + tm.close(); + throw new RemoteBackendException(" Error in uploadAndUnlock operation ", e); + } + return objectId; + + } + + @Override + public String initOperation(MyFile file, String remotePath, + String author, String[] server, String rootArea, + boolean replaceOption) { + // set replace option +// this.replaceOption=replaceOption; + // the name of bucket is formed: path_____fileName_____author + String bucketName=new BucketCoding().bucketFileCoding(remotePath, rootArea); + return bucket=bucketName; + + } + + @Override + public String initOperation(MyFile resource, String RemotePath, + String author, String[] server, String rootArea) { + // TODO Auto-generated method stub + return null; + } + +} diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/test/SimpleTest2.java b/src/main/java/org/gcube/contentmanagement/blobstorage/test/SimpleTest2.java deleted file mode 100644 index f085464..0000000 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/test/SimpleTest2.java +++ /dev/null @@ -1,28 +0,0 @@ -package org.gcube.contentmanagement.blobstorage.test; - -import java.util.List; -import org.gcube.contentmanagement.blobstorage.service.IClient; -import org.gcube.contentmanagement.blobstorage.service.impl.ServiceEngine; -import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException; -import org.gcube.contentmanagement.blobstorage.resource.StorageObject; - - - -public class SimpleTest2 { - - public static void main(String[] args) throws RemoteBackendException{ - String[] server=new String[]{"146.48.123.73","146.48.123.74" }; - - IClient client=new ServiceEngine(server, "rcirillo", "cnr", "private", "rcirillo"); -// String localFile="/home/rcirillo/FilePerTest/CostaRica.jpg"; - String remoteFile="/img/shared9.jpg"; - String newFile="/home/rcirillo/FilePerTest/repl4.jpg"; - client.get().LFile(newFile).RFile(remoteFile); - List list=client.showDir().RDir("/img/"); - for(StorageObject obj : list){ - System.out.println("obj found: "+obj.getName()); - } - String uri=client.getUrl().RFile(remoteFile); - System.out.println(" uri file: "+uri); - } -} diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/TransportManager.java b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/TransportManager.java index 96d2263..0385bec 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/TransportManager.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/TransportManager.java @@ -7,7 +7,7 @@ import java.util.List; import java.util.Map; import org.gcube.contentmanagement.blobstorage.resource.MemoryType; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.StorageObject; import org.gcube.contentmanagement.blobstorage.service.operation.*; import org.gcube.contentmanagement.blobstorage.transport.backend.operation.LockOperator; @@ -49,7 +49,7 @@ public abstract class TransportManager { * @return the key of remote resource * @throws IOException if there are IO problems */ - public String downloadManager(Download download, RequestObject myFile, String key, Class type) throws Exception{ + public String downloadManager(Download download, MyFile myFile, String key, Class type) throws Exception{ String key4lock=null; if(myFile.isLock()){ download.setResource(myFile); @@ -76,7 +76,7 @@ public abstract class TransportManager { */ public String uploadManager(Upload upload, Object resource, String bucket, String key, boolean replace) throws Exception{ String id=null; - RequestObject file=(RequestObject)resource; + MyFile file=(MyFile)resource; if((file.getLockedKey()!=null) && (!file.getLockedKey().isEmpty())){ Unlock unlock= new UnlockOperator(upload.getServer(), upload.getUser(), upload.getPassword(), upload.getBucket(), upload.getMonitor(), upload.isChunk(), upload.getBackendType(), upload.getDbNames()); unlock.setResource(file); @@ -133,14 +133,14 @@ public abstract class TransportManager { * @return map that contains the object in the direcotry * @throws UnknownHostException */ - public abstract Map getValues(RequestObject resource, String bucket, Class< ? extends Object> type); + public abstract Map getValues(MyFile resource, String bucket, Class< ? extends Object> type); /** * delete a remote file * @param bucket identifies the remote file * @throws UnknownHostException */ - public abstract void removeRemoteFile(String bucket, RequestObject resource) throws UnknownHostException; + public abstract void removeRemoteFile(String bucket, MyFile resource) throws UnknownHostException; /** * delete a remote directory @@ -150,7 +150,7 @@ public abstract class TransportManager { * @throws UnknownHostException * */ - public abstract void removeDir(String remoteDir, RequestObject myFile) throws UnknownHostException; + public abstract void removeDir(String remoteDir, MyFile myFile) throws UnknownHostException; /** @@ -160,7 +160,7 @@ public abstract class TransportManager { * @return the size of the remote file * @throws UnknownHostException */ - public abstract long getSize(String bucket, RequestObject myFile); + public abstract long getSize(String bucket, MyFile myFile); /** * lock a remote file @@ -210,7 +210,7 @@ public abstract class TransportManager { * @throws UnknownHostException * @throws IllegalAccessException */ - public abstract long renewTTL(RequestObject resource) throws UnknownHostException, IllegalAccessException; + public abstract long renewTTL(MyFile resource) throws UnknownHostException, IllegalAccessException; /** diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/MongoIOManager.java b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/MongoIOManager.java index 86a8f32..944bdc8 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/MongoIOManager.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/MongoIOManager.java @@ -12,12 +12,10 @@ import java.util.List; import java.util.Set; import org.bson.types.ObjectId; import org.gcube.contentmanagement.blobstorage.resource.MemoryType; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.OPERATION; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMOTE_RESOURCE; -import org.gcube.contentmanagement.blobstorage.service.impl.ServiceEngine; -import org.gcube.contentmanagement.blobstorage.service.operation.Operation; import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants; import org.gcube.contentmanagement.blobstorage.transport.backend.util.DateUtils; import org.gcube.contentmanagement.blobstorage.transport.backend.util.MongoInputStream; @@ -119,13 +117,13 @@ public class MongoIOManager { logger.error("Problem to open the DB connection for gridfs file "); throw new RemoteBackendException("Problem to open the DB connection: "+ e.getMessage()); } - logger.info("new mongo connection pool opened"); + logger.info("mongo connection ready"); } return db; } //PATCHED METHODS - protected ObjectId getRemoteObject(GridFS gfs, RequestObject resource, GridFSDBFile f) throws IOException, IllegalAccessError { + protected ObjectId getRemoteObject(GridFS gfs, MyFile resource, GridFSDBFile f) throws IOException, IllegalAccessError { ObjectId id; id=(ObjectId)f.getId(); String lock=(String)f.get("lock"); @@ -141,7 +139,7 @@ public class MongoIOManager { return id; } - public ObjectId getRemoteObject(RequestObject resource, GridFSDBFile f) throws IOException, IllegalAccessError { + public ObjectId getRemoteObject(MyFile resource, GridFSDBFile f) throws IOException, IllegalAccessError { ObjectId id; id=(ObjectId)f.getId(); String lock=(String)f.get("lock"); @@ -187,7 +185,7 @@ public class MongoIOManager { * @param isLock indicates if the file must be locked * @throws IOException */ - private void download(GridFS gfs, RequestObject resource, GridFSDBFile f, boolean isLock) throws IOException { + private void download(GridFS gfs, MyFile resource, GridFSDBFile f, boolean isLock) throws IOException { OperationDefinition op=resource.getOperationDefinition(); logger.info("MongoClient download method: "+op.toString()); // if contains the field link it means that is a link hence I follow ne or more links @@ -218,7 +216,7 @@ public class MongoIOManager { * @param isLock indicates if the file must be locked * @throws IOException */ - private void download( RequestObject resource, GridFSDBFile f, boolean isLock) throws IOException { + private void download( MyFile resource, GridFSDBFile f, boolean isLock) throws IOException { OperationDefinition op=resource.getOperationDefinition(); logger.info("MongoClient download method: "+op.toString()); // if contains the field link it means that is a link hence I follow ne or more links @@ -242,7 +240,7 @@ public class MongoIOManager { } } - public void updateCommonFields(DBObject f, RequestObject resource, OPERATION op) { + public void updateCommonFields(DBObject f, MyFile resource, OPERATION op) { f.put("lastAccess", DateUtils.now("dd MM yyyy 'at' hh:mm:ss z")); String owner=resource.getOwner(); f.put("lastUser", owner); @@ -289,7 +287,7 @@ public class MongoIOManager { close(); throw new IllegalAccessError("The file is locked"); }else{ - oldId=checkAndRemove(fold, (RequestObject)resource); + oldId=checkAndRemove(fold, (MyFile)resource); } } // }else if(oldir == null){ @@ -301,7 +299,7 @@ public class MongoIOManager { } - public ObjectId checkAndRemove(GridFSDBFile f, RequestObject resource){ + public ObjectId checkAndRemove(GridFSDBFile f, MyFile resource){ String idToRemove=f.getId().toString(); logger.info("check and remove object with id "+idToRemove+" and path: "+f.get("filename")); ObjectId idFile=null; @@ -310,6 +308,7 @@ public class MongoIOManager { updateCommonFields(f, resource, OPERATION.REMOVE); // check if the file is linked if((f!=null) && (f.containsField(Costants.COUNT_IDENTIFIER)) && (f.get(Costants.COUNT_IDENTIFIER) != null)){ + logger.debug("RemovingObject: the following object "+idToRemove+" contains a COUNT field"); // this field is only added for reporting tool: storage-manager-trigger String filename=(String)f.get("filename"); f.put("onScope", filename); @@ -322,6 +321,7 @@ public class MongoIOManager { // check if the file is a link }else if((f.containsField(Costants.LINK_IDENTIFIER)) && (f.get(Costants.LINK_IDENTIFIER) != null )){ while((f!=null) && (f.containsField(Costants.LINK_IDENTIFIER)) && (f.get(Costants.LINK_IDENTIFIER) != null )){ + logger.debug("RemovingObject: the following object "+idToRemove+" contains a LINK field"); // remove f and decrement linkCount field on linked object String id=(String)f.get(Costants.LINK_IDENTIFIER); GridFSDBFile fLink=findGFSCollectionObject(new ObjectId(id)); @@ -408,12 +408,12 @@ public class MongoIOManager { //maybe this close is not needed // clean(); try{ - if(((RequestObject)resource).getInputStream()!= null){ + if(((MyFile)resource).getInputStream()!= null){ //upload with client inputStream f2 = writeByInputStream(resource, bucket, name, dir,idFile); f2.save(); - }else if(((((RequestObject)resource).getType() != null) && (((RequestObject)resource).getType().equals("output")))){ + }else if(((((MyFile)resource).getType() != null) && (((MyFile)resource).getType().equals("output")))){ // upload with outputstream f2 = writeByOutputStream(resource, bucket, name, dir, idFile); }else{ @@ -429,7 +429,7 @@ public class MongoIOManager { logger.debug("ObjectId: "+id); // if it is an outputstream don't close - if(!((((RequestObject)resource).getType() != null) && (((RequestObject)resource).getType().equals("output")))){ + if(!((((MyFile)resource).getType() != null) && (((MyFile)resource).getType().equals("output")))){ close(); } }catch(IOException e1){ @@ -454,9 +454,9 @@ public class MongoIOManager { throws IOException { GridFSInputFile f2; if(!(memoryType== MemoryType.VOLATILE)) - f2 = createGFSFileObject(new File(((RequestObject)resource).getLocalPath()), ((RequestObject)resource).getWriteConcern(), ((RequestObject)resource).getReadPreference()); + f2 = createGFSFileObject(new File(((MyFile)resource).getLocalPath()), ((MyFile)resource).getWriteConcern(), ((MyFile)resource).getReadPreference()); else - f2 = createGFSFileObject(new File(((RequestObject)resource).getLocalPath())); + f2 = createGFSFileObject(new File(((MyFile)resource).getLocalPath())); fillInputFile(resource, bucket, name, dir, f2, idFile); saveGFSFileObject(f2); return f2; @@ -466,11 +466,11 @@ public class MongoIOManager { String bucket, String name, String dir, ObjectId idFile) throws IOException { GridFSInputFile f2; if(!(memoryType== MemoryType.VOLATILE)) - f2 = createGFSFileObject(((RequestObject)resource).getName(), ((RequestObject)resource).getWriteConcern(), ((RequestObject)resource).getReadPreference()); + f2 = createGFSFileObject(((MyFile)resource).getName(), ((MyFile)resource).getWriteConcern(), ((MyFile)resource).getReadPreference()); else - f2 = createGFSFileObject(((RequestObject)resource).getName()); + f2 = createGFSFileObject(((MyFile)resource).getName()); fillInputFile(resource, bucket, name, dir, f2, idFile); - ((RequestObject)resource).setOutputStream(new MongoOutputStream(mongo, f2.getOutputStream())); + ((MyFile)resource).setOutputStream(new MongoOutputStream(mongo, f2.getOutputStream())); return f2; } @@ -479,13 +479,13 @@ public class MongoIOManager { throws IOException { GridFSInputFile f2; if(!(memoryType== MemoryType.VOLATILE)) - f2 = createGFSFileObject(((RequestObject)resource).getInputStream(), ((RequestObject)resource).getWriteConcern(),((RequestObject)resource).getReadPreference()); + f2 = createGFSFileObject(((MyFile)resource).getInputStream(), ((MyFile)resource).getWriteConcern(),((MyFile)resource).getReadPreference()); else - f2 = createGFSFileObject(((RequestObject)resource).getInputStream()); + f2 = createGFSFileObject(((MyFile)resource).getInputStream()); fillInputFile(resource, bucket, name, dir, f2, idFile); saveGFSFileObject(f2); - ((RequestObject)resource).getInputStream().close(); - ((RequestObject)resource).setInputStream(null); + ((MyFile)resource).getInputStream().close(); + ((MyFile)resource).setInputStream(null); return f2; } @@ -503,14 +503,14 @@ public class MongoIOManager { f2.put("name", name); if(dir!=null) f2.put("dir", dir); - if(((RequestObject)resource).getOwner() !=null) - f2.put("owner", ((RequestObject)resource).getOwner()); - String mime= ((RequestObject)resource).getMimeType(); + if(((MyFile)resource).getOwner() !=null) + f2.put("owner", ((MyFile)resource).getOwner()); + String mime= ((MyFile)resource).getMimeType(); if( mime !=null){ f2.put("mimetype", mime); } f2.put("creationTime", DateUtils.now("dd MM yyyy 'at' hh:mm:ss z")); - updateCommonFields(f2, (RequestObject)resource, null); + updateCommonFields(f2, (MyFile)resource, null); } @@ -521,7 +521,7 @@ public class MongoIOManager { * @param query * @throws UnknownHostException */ - protected void removeObject(GridFS gfs, BasicDBObject query, RequestObject resource){ + protected void removeObject(GridFS gfs, BasicDBObject query, MyFile resource){ List list = gfs.find(query); for(Iterator it=list.iterator(); it.hasNext();){ GridFSDBFile f=(GridFSDBFile)it.next(); @@ -535,19 +535,19 @@ public class MongoIOManager { } - public void setGenericProperties(RequestObject resource, String destination, + public void setGenericProperties(MyFile resource, String destination, String dir, GridFSInputFile destinationFile, String name) { updateCommonFields(destinationFile, resource, null); destinationFile.put("filename", destination); destinationFile.put("type", "file"); destinationFile.put("name", name); destinationFile.put("dir", dir); - destinationFile.put("owner", ((RequestObject)resource).getOwner()); - destinationFile.put("mimetype", ((RequestObject)resource).getMimeType()); + destinationFile.put("owner", ((MyFile)resource).getOwner()); + destinationFile.put("mimetype", ((MyFile)resource).getMimeType()); destinationFile.put("creationTime", DateUtils.now("dd MM yyyy 'at' hh:mm:ss z")); } - public DBObject setGenericMoveProperties(RequestObject resource, String filename, String dir, + public DBObject setGenericMoveProperties(MyFile resource, String filename, String dir, String name, DBObject sourcePathMetaCollection) { sourcePathMetaCollection.put("filename", filename); sourcePathMetaCollection.put("type", "file"); @@ -669,6 +669,10 @@ public class MongoIOManager { f=null; } } + if (f==null) { + logger.warn("The objectID is not present. Going to abort the current operation"); + throw new RemoteBackendException("Object id "+serverLocation+" not found."); + } // if the remote identifier is not a specified as ID, try to check if it is a valid remote path // in this case the remote identifier is a valid objectID but it indicates a path }else if ((remoteResourceIdentifier != null) && (!(remoteResourceIdentifier.equals(REMOTE_RESOURCE.ID))) && (f==null)){ @@ -835,7 +839,7 @@ public class MongoIOManager { * @param isLock * @return */ - protected String readByInputStream(RequestObject resource, GridFSDBFile f, boolean isLock, int count) { + protected String readByInputStream(MyFile resource, GridFSDBFile f, boolean isLock, int count) { String key=null; resource.setInputStream(new MongoInputStream(mongo, f.getInputStream())); return key; @@ -848,7 +852,7 @@ public class MongoIOManager { * @return * @throws IOException */ - protected String readByOutputStream(RequestObject resource, GridFSDBFile f, boolean isLock, int count) + protected String readByOutputStream(MyFile resource, GridFSDBFile f, boolean isLock, int count) throws IOException { String key=null; f.writeTo(resource.getOutputStream()); @@ -866,7 +870,7 @@ public class MongoIOManager { * @return * @throws IOException */ - protected String readByPath(RequestObject resource, GridFSDBFile f, boolean isLock, int count) + protected String readByPath(MyFile resource, GridFSDBFile f, boolean isLock, int count) throws IOException { String key=null; try{ @@ -1067,11 +1071,11 @@ public class MongoIOManager { public void close() { // if(mongo!=null) // mongo.close(); - logger.debug(" try to close backend but the close operation is not implemented"); + logger.debug(" cleaning mongo objects"); // logger.info("Mongo has been closed"); // mongo=null; -// gfs=null; -// db=null; + gfs=null; + db=null; } @@ -1079,6 +1083,8 @@ public class MongoIOManager { if(mongo!=null) mongo.close(); logger.info("Mongo pool closed"); + close(); + mongo=null; } public void removeGFSFile(GridFSDBFile f, ObjectId idF){ diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/MongoOperationManager.java b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/MongoOperationManager.java index c7c6f22..30504c1 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/MongoOperationManager.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/MongoOperationManager.java @@ -11,11 +11,11 @@ import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Objects; import org.gcube.contentmanagement.blobstorage.resource.MemoryType; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition; -import org.gcube.contentmanagement.blobstorage.service.impl.ServiceEngine; import org.gcube.contentmanagement.blobstorage.service.operation.*; import org.gcube.contentmanagement.blobstorage.transport.TransportManager; import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager; @@ -134,8 +134,10 @@ public class MongoOperationManager extends TransportManager{ } public void forceClose() { - mongoPrimaryInstance.forceClose(); - mongoSecondaryInstance.forceClose(); + if(Objects.nonNull(mongoPrimaryInstance)) + mongoPrimaryInstance.forceClose(); + if(Objects.nonNull(mongoSecondaryInstance)) + mongoSecondaryInstance.forceClose(); } /** @@ -149,7 +151,7 @@ public class MongoOperationManager extends TransportManager{ } @Override - public Map getValues(RequestObject resource, String bucket, Class type){ + public Map getValues(MyFile resource, String bucket, Class type){ Map map=null; try{ OperationDefinition op=resource.getOperationDefinition(); @@ -222,7 +224,7 @@ public class MongoOperationManager extends TransportManager{ @Override - public void removeRemoteFile(String bucket, RequestObject resource) throws UnknownHostException{ + public void removeRemoteFile(String bucket, MyFile resource) throws UnknownHostException{ logger.info("Check file: "+bucket+ " for removing operation"); GridFSDBFile f=mongoPrimaryInstance.retrieveRemoteDescriptor(bucket, null, true); if(f!=null){ @@ -245,7 +247,7 @@ public class MongoOperationManager extends TransportManager{ @Override - public void removeDir(String remoteDir, RequestObject resource){ + public void removeDir(String remoteDir, MyFile resource){ ArrayList dirs=new ArrayList(); dirs.add(remoteDir); // patch for incompatibility v 1-2 @@ -298,7 +300,7 @@ public class MongoOperationManager extends TransportManager{ } @Override - public long getSize(String remotePath, RequestObject file){ + public long getSize(String remotePath, MyFile file){ long length=-1; if(logger.isDebugEnabled()) logger.debug("MongoDB - get Size for pathServer: "+remotePath); @@ -344,9 +346,9 @@ public class MongoOperationManager extends TransportManager{ } @Override - public long renewTTL(RequestObject resource) throws UnknownHostException, IllegalAccessException{ + public long renewTTL(MyFile resource) throws UnknownHostException, IllegalAccessException{ long ttl=-1; - RequestObject file=(RequestObject)resource; + MyFile file=(MyFile)resource; REMOTE_RESOURCE remoteResourceIdentifier=file.getOperation().getRemoteResource(); String key=file.getLockedKey(); String remotePath=file.getRemotePath(); diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/CopyDirOperator.java b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/CopyDirOperator.java index 3bf7279..15fa62b 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/CopyDirOperator.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/CopyDirOperator.java @@ -8,9 +8,8 @@ import java.net.UnknownHostException; import java.util.ArrayList; import java.util.List; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.OPERATION; -import org.gcube.contentmanagement.blobstorage.service.impl.ServiceEngine; import org.gcube.contentmanagement.blobstorage.service.operation.CopyDir; import org.gcube.contentmanagement.blobstorage.service.operation.Monitor; import org.gcube.contentmanagement.blobstorage.transport.backend.MongoOperationManager; @@ -52,7 +51,7 @@ public class CopyDirOperator extends CopyDir { * @see org.gcube.contentmanagement.blobstorage.service.operation.CopyDir#execute(org.gcube.contentmanagement.blobstorage.transport.backend.MongoIO, org.gcube.contentmanagement.blobstorage.resource.MyFile, java.lang.String, java.lang.String) */ @Override - public List execute(MongoIOManager mongoPrimaryInstance, RequestObject resource, String sourcePath, String destinationPath) + public List execute(MongoIOManager mongoPrimaryInstance, MyFile resource, String sourcePath, String destinationPath) throws UnknownHostException { String source=sourcePath; source = appendFileSeparator(source); diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/CopyOperator.java b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/CopyOperator.java index 6d9ed03..69b5d3f 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/CopyOperator.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/CopyOperator.java @@ -7,7 +7,7 @@ import java.io.InputStream; import java.net.UnknownHostException; import org.bson.types.ObjectId; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMOTE_RESOURCE; import org.gcube.contentmanagement.blobstorage.service.operation.Copy; @@ -47,13 +47,13 @@ public class CopyOperator extends Copy { */ @Override // public String execute(MongoIO mongoPrimaryInstance) throws UnknownHostException { - public String execute(MongoIOManager mongoPrimaryInstance, RequestObject resource, String sourcePath, String destinationPath) throws UnknownHostException { + public String execute(MongoIOManager mongoPrimaryInstance, MyFile resource, String sourcePath, String destinationPath) throws UnknownHostException { String source=sourcePath; String destination=destinationPath; - String dir=((RequestObject)resource).getRemoteDir(); - String originalDir=((RequestObject)resource).getLocalDir(); + String dir=((MyFile)resource).getRemoteDir(); + String originalDir=((MyFile)resource).getLocalDir(); logger.debug("from directory: "+originalDir+ "to directory: "+dir); - String name=((RequestObject)resource).getName(); + String name=((MyFile)resource).getName(); REMOTE_RESOURCE remoteResourceIdentifier=resource.getOperation().getRemoteResource(); ObjectId destinationId=null; logger.debug("copy operation on Mongo backend, parameters: source path: "+source+" destination path: "+destination); @@ -96,11 +96,11 @@ public class CopyOperator extends Copy { public String safePut(MongoIOManager mongoPrimaryInstance, Object resource, String bucket, String key, boolean replace) throws UnknownHostException{ - OperationDefinition op=((RequestObject)resource).getOperationDefinition(); - REMOTE_RESOURCE remoteResourceIdentifier=((RequestObject)resource).getOperation().getRemoteResource(); + OperationDefinition op=((MyFile)resource).getOperationDefinition(); + REMOTE_RESOURCE remoteResourceIdentifier=((MyFile)resource).getOperation().getRemoteResource(); logger.info("MongoClient put method: "+op.toString()); - String dir=((RequestObject)resource).getRemoteDir(); - String name=((RequestObject)resource).getName(); + String dir=((MyFile)resource).getRemoteDir(); + String name=((MyFile)resource).getName(); ObjectId id=null; ObjectId oldId=null; // id of the remote file if present diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/DuplicateOperator.java b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/DuplicateOperator.java index af717a2..463a5fb 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/DuplicateOperator.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/DuplicateOperator.java @@ -5,7 +5,7 @@ package org.gcube.contentmanagement.blobstorage.transport.backend.operation; import java.io.IOException; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMOTE_RESOURCE; import org.gcube.contentmanagement.blobstorage.service.operation.DuplicateFile; import org.gcube.contentmanagement.blobstorage.service.operation.Monitor; @@ -46,8 +46,8 @@ public class DuplicateOperator extends DuplicateFile { */ @Override public String execute(MongoIOManager mongoPrimaryInstance){ - String destination=((RequestObject)getResource()).getRemotePath()+Costants.DUPLICATE_SUFFIX; - String dir=((RequestObject)getResource()).getRemoteDir(); + String destination=((MyFile)getResource()).getRemotePath()+Costants.DUPLICATE_SUFFIX; + String dir=((MyFile)getResource()).getRemoteDir(); // String name=((MyFile)getResource()).getName(); if((getBucket() != null) && (!getBucket().isEmpty())){ REMOTE_RESOURCE remoteResourceIdentifier=resource.getOperation().getRemoteResource(); diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/LinkOperator.java b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/LinkOperator.java index 4639fd9..06e5cb0 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/LinkOperator.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/LinkOperator.java @@ -7,12 +7,11 @@ import java.net.UnknownHostException; import org.bson.types.ObjectId; import org.gcube.contentmanagement.blobstorage.resource.MemoryType; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.OPERATION; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMOTE_RESOURCE; import org.gcube.contentmanagement.blobstorage.service.operation.Link; import org.gcube.contentmanagement.blobstorage.service.operation.Monitor; -import org.gcube.contentmanagement.blobstorage.service.operation.Operation; import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager; import org.gcube.contentmanagement.blobstorage.transport.backend.util.Costants; import org.slf4j.Logger; @@ -49,7 +48,7 @@ public class LinkOperator extends Link { * @see org.gcube.contentmanagement.blobstorage.service.operation.Link#execute(org.gcube.contentmanagement.blobstorage.transport.backend.MongoIO, org.gcube.contentmanagement.blobstorage.transport.backend.MongoIO, org.gcube.contentmanagement.blobstorage.resource.MyFile, java.lang.String) */ @Override - public String execute(MongoIOManager mongoPrimaryInstance, MongoIOManager mongoSecondaryInstance, RequestObject resource, String sourcePath, String destinationPath) throws UnknownHostException { + public String execute(MongoIOManager mongoPrimaryInstance, MongoIOManager mongoSecondaryInstance, MyFile resource, String sourcePath, String destinationPath) throws UnknownHostException { boolean replace=true; String source=sourcePath; String destination=destinationPath; @@ -82,7 +81,7 @@ public class LinkOperator extends Link { String oldir=(String)fold.get("dir"); if(logger.isDebugEnabled()) logger.debug("old dir found "+oldir); - if((oldir.equalsIgnoreCase(((RequestObject)resource).getRemoteDir()))){ + if((oldir.equalsIgnoreCase(((MyFile)resource).getRemoteDir()))){ ObjectId oldId=(ObjectId) fold.getId(); if(!replace){ return oldId.toString(); diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/LockOperator.java b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/LockOperator.java index d31310a..7a5d82d 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/LockOperator.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/LockOperator.java @@ -4,11 +4,10 @@ package org.gcube.contentmanagement.blobstorage.transport.backend.operation; import java.io.FileNotFoundException; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.OPERATION; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMOTE_RESOURCE; -import org.gcube.contentmanagement.blobstorage.service.operation.Download; import org.gcube.contentmanagement.blobstorage.service.operation.Lock; import org.gcube.contentmanagement.blobstorage.service.operation.Monitor; import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager; @@ -46,7 +45,7 @@ public class LockOperator extends Lock { * @see org.gcube.contentmanagement.blobstorage.service.operation.Lock#execute(org.gcube.contentmanagement.blobstorage.transport.backend.MongoIO, org.gcube.contentmanagement.blobstorage.transport.backend.MongoIO) */ @Override - public String execute(MongoIOManager mongoPrimaryInstance, MongoIOManager mongoSecondaryInstance, RequestObject resource, String serverLocation) throws Exception { + public String execute(MongoIOManager mongoPrimaryInstance, MongoIOManager mongoSecondaryInstance, MyFile resource, String serverLocation) throws Exception { OperationDefinition op=resource.getOperationDefinition(); REMOTE_RESOURCE remoteResourceIdentifier=resource.getOperation().getRemoteResource(); // if((resource.getLocalPath()!= null) && (!resource.getLocalPath().isEmpty())){ diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/MoveDirOperator.java b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/MoveDirOperator.java index b16f715..ddb78ff 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/MoveDirOperator.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/MoveDirOperator.java @@ -9,9 +9,8 @@ import java.util.List; import org.bson.types.ObjectId; import org.gcube.contentmanagement.blobstorage.resource.MemoryType; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.OPERATION; -import org.gcube.contentmanagement.blobstorage.service.impl.ServiceEngine; import org.gcube.contentmanagement.blobstorage.service.operation.Monitor; import org.gcube.contentmanagement.blobstorage.service.operation.MoveDir; import org.gcube.contentmanagement.blobstorage.transport.backend.MongoOperationManager; @@ -53,7 +52,7 @@ public class MoveDirOperator extends MoveDir { * @see org.gcube.contentmanagement.blobstorage.service.operation.MoveDir#execute(org.gcube.contentmanagement.blobstorage.transport.backend.MongoIO, org.gcube.contentmanagement.blobstorage.resource.MyFile, java.lang.String, java.lang.String) */ @Override - public List execute(MongoIOManager mongoPrimaryInstance, RequestObject resource, String sourcePath, + public List execute(MongoIOManager mongoPrimaryInstance, MyFile resource, String sourcePath, String destinationPath, MemoryType memoryType) throws UnknownHostException { String source=sourcePath; source = appendFileSeparator(source); diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/MoveOperator.java b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/MoveOperator.java index 22f908f..7469146 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/MoveOperator.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/MoveOperator.java @@ -7,9 +7,8 @@ import java.net.InetAddress; import java.net.UnknownHostException; import org.gcube.contentmanagement.blobstorage.resource.MemoryType; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.OPERATION; -import org.gcube.contentmanagement.blobstorage.service.impl.ServiceEngine; import org.gcube.contentmanagement.blobstorage.service.operation.Monitor; import org.gcube.contentmanagement.blobstorage.service.operation.Move; import org.gcube.contentmanagement.blobstorage.transport.backend.MongoOperationManager; @@ -54,12 +53,12 @@ public class MoveOperator extends Move { */ @Override // public String execute(MongoIO mongoPrimaryInstance, MemoryType memoryType) throws UnknownHostException { - public String execute(MongoIOManager mongoPrimaryInstance, MemoryType memoryType, RequestObject resource, String sourcePath, String destinationPath) throws UnknownHostException { + public String execute(MongoIOManager mongoPrimaryInstance, MemoryType memoryType, MyFile resource, String sourcePath, String destinationPath) throws UnknownHostException { String source=sourcePath; String destination=destinationPath; resource.setLocalPath(sourcePath); - String dir=((RequestObject)resource).getRemoteDir(); - String name=((RequestObject)resource).getName(); + String dir=((MyFile)resource).getRemoteDir(); + String name=((MyFile)resource).getName(); String destinationId=null; String sourceId=null; logger.info("move operation on Mongo backend, parameters: source path: "+source+" destination path: "+destination); @@ -112,7 +111,7 @@ public class MoveOperator extends Move { // update fields mongoPrimaryInstance.buildDirTree(mongoPrimaryInstance.getMetaDataCollection(mongoPrimaryInstance.getConnectionDB( MongoOperationManager.getPrimaryCollectionName(), true)), dir); sourcePathMetaCollection= new BasicDBObject(); - sourcePathMetaCollection.put("$set", new BasicDBObject().append("dir", dir).append("filename", destinationPath).append("name", name).append("owner", ((RequestObject)resource).getOwner())); + sourcePathMetaCollection.put("$set", new BasicDBObject().append("dir", dir).append("filename", destinationPath).append("name", name).append("owner", ((MyFile)resource).getOwner())); logger.info("new object merged "); mongoPrimaryInstance.printObject(sourcePathMetaCollection); //applies the update @@ -175,7 +174,7 @@ public class MoveOperator extends Move { } - private DBObject setCommonFields(DBObject sourcePathMetaCollection, RequestObject resource, OPERATION op) { + private DBObject setCommonFields(DBObject sourcePathMetaCollection, MyFile resource, OPERATION op) { String owner=resource.getOwner(); if(op == null){ op=resource.getOperationDefinition().getOperation(); diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/SoftCopyOperator.java b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/SoftCopyOperator.java index 222f21b..888de1b 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/SoftCopyOperator.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/SoftCopyOperator.java @@ -5,15 +5,15 @@ package org.gcube.contentmanagement.blobstorage.transport.backend.operation; import java.io.InputStream; import java.net.UnknownHostException; +import java.util.Objects; import org.bson.types.ObjectId; import org.gcube.contentmanagement.blobstorage.resource.MemoryType; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.LOCAL_RESOURCE; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.OPERATION; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMOTE_RESOURCE; import org.gcube.contentmanagement.blobstorage.service.operation.Monitor; -import org.gcube.contentmanagement.blobstorage.service.operation.Operation; import org.gcube.contentmanagement.blobstorage.service.operation.SoftCopy; import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager; import org.gcube.contentmanagement.blobstorage.transport.backend.MongoOperationManager; @@ -26,6 +26,7 @@ import org.slf4j.LoggerFactory; import com.mongodb.BasicDBObject; import com.mongodb.DBCollection; import com.mongodb.DBObject; +import com.mongodb.DuplicateKeyException; import com.mongodb.gridfs.GridFSDBFile; /** @@ -37,7 +38,7 @@ public class SoftCopyOperator extends SoftCopy { Logger logger=LoggerFactory.getLogger(SoftCopyOperator.class); private MemoryType memoryType; private MongoIOManager mongoPrimaryInstance; - private RequestObject resource; + private MyFile resource; /** * @param server * @param user @@ -55,7 +56,7 @@ public class SoftCopyOperator extends SoftCopy { } @Override - public String execute(MongoIOManager mongoPrimaryInstance, RequestObject resource, String sourcePath, String destinationPath) + public String execute(MongoIOManager mongoPrimaryInstance, MyFile resource, String sourcePath, String destinationPath) throws UnknownHostException { REMOTE_RESOURCE remoteResourceIdentifier=resource.getOperation().getRemoteResource(); LOCAL_RESOURCE localResourceIdentifier=resource.getOperation().getLocalResource(); @@ -70,9 +71,9 @@ public class SoftCopyOperator extends SoftCopy { else destination=destinationPath; if(resource!=null){ - String dir=((RequestObject)resource).getRemoteDir(); - String name=((RequestObject)resource).getName(); - setMemoryType(((RequestObject)resource).getGcubeMemoryType()); + String dir=((MyFile)resource).getRemoteDir(); + String name=((MyFile)resource).getName(); + setMemoryType(((MyFile)resource).getGcubeMemoryType()); } setMongoPrimaryInstance(mongoPrimaryInstance); ObjectId mapId=null; @@ -85,6 +86,7 @@ public class SoftCopyOperator extends SoftCopy { // if it contains a link field, then I'm going to retrieve the related payload sourceObject = mongoPrimaryInstance.retrieveLinkPayload(sourceObject); ObjectId sourceId=(ObjectId)sourceObject.getId(); + logger.debug("source id is "+sourceId); InputStream is= sourceObject.getInputStream(); resource.setInputStream(is); GridFSDBFile dest = null; @@ -103,11 +105,18 @@ public class SoftCopyOperator extends SoftCopy { ObjectId removedId=null; // if the destination location is not empty if (dest != null){ + String destId=dest.getId().toString(); + logger.debug("destination id is "+destId); + // in this case the source and dest are the same object + if(sourceId.toString().equals(destId)) { + logger.info("source and destination are pointing to the same object. The copy operation will have no effects"); + return destId; + } // remove the destination file. The third parameter to true replace the file otherwise the remote id is returned if(resource.isReplace()){ removedId = mongoPrimaryInstance.removeFile(resource, null, resource.isReplace(), null, dest); }else{ - return dest.getId().toString(); + return destId; } } // get metacollection instance @@ -117,7 +126,7 @@ public class SoftCopyOperator extends SoftCopy { ObjectId md5Id=getDuplicatesMap(md5); // check if the source object is already a map if(isMap(sourceObject)){ - logger.debug("the sourceObject with the following id: "+mapId+" is already a map"); + logger.debug("the sourceObject with the following id: "+sourceId+" is already a map"); mapId=sourceId; // then it's needed to add only the destObject to the map //first: create link object to destination place @@ -153,7 +162,7 @@ public class SoftCopyOperator extends SoftCopy { return destObject.getId().toString(); } - private String fillGenericDestinationFields(RequestObject resource, ObjectId souceId) { + private String fillGenericDestinationFields(MyFile resource, ObjectId souceId) { String destination; destination=resource.getRootPath()+souceId; resource.setName(souceId.toString()); @@ -173,17 +182,17 @@ public class SoftCopyOperator extends SoftCopy { */ private ObjectId createNewDuplicatesMap(DBCollection metaCollectionInstance, Object resource, GridFSDBFile sourceObject, String bucket, ObjectId sourceId) throws UnknownHostException { ObjectId id = null; - String dir= ((RequestObject)resource).getRemoteDir(); + String dir= ((MyFile)resource).getRemoteDir(); // create new dir (is it really needed in case of map object?) if((dir !=null && !dir.isEmpty()) && (bucket !=null && !bucket.isEmpty())){ getMongoPrimaryInstance().buildDirTree(getMongoPrimaryInstance().getMetaDataCollection(null), dir); } // create new map object - id= createNewObjectMap(metaCollectionInstance, (RequestObject)resource, sourceObject, sourceId); + id= createNewObjectMap(metaCollectionInstance, (MyFile)resource, sourceObject, sourceId); return id; } - private ObjectId createNewObjectMap(DBCollection metaCollectionInstance, RequestObject resource, GridFSDBFile source, ObjectId sourceId) throws UnknownHostException { + private ObjectId createNewObjectMap(DBCollection metaCollectionInstance, MyFile resource, GridFSDBFile source, ObjectId sourceId) throws UnknownHostException { String md5=source.getMD5(); // set type of object DBObject document=new BasicDBObject("type", "map"); @@ -198,7 +207,7 @@ public class SoftCopyOperator extends SoftCopy { return id; } - private DBObject createNewLinkObject(RequestObject resource, GridFSDBFile sourceObject, String destination, DBCollection metaCollectionInstance, String md5, ObjectId mapId, ObjectId newId){ + private DBObject createNewLinkObject(MyFile resource, GridFSDBFile sourceObject, String destination, DBCollection metaCollectionInstance, String md5, ObjectId mapId, ObjectId newId){ DBObject document=new BasicDBObject("type", "file"); document.put("filename", destination); document.put("name", resource.getName()); @@ -208,25 +217,37 @@ public class SoftCopyOperator extends SoftCopy { ObjectId id=null; if(newId == null){ id=new ObjectId(); - logger.debug("generated id for new object link"+id); + logger.debug("generated id for new object link "+id); }else{ id=newId; - logger.debug("restored id for new object link"+id); + logger.debug("restored id for new object link "+id); } document.put("_id", id); return fillCommonfields(document, resource, sourceObject, metaCollectionInstance, md5); } - private DBObject fillCommonfields(DBObject document, RequestObject resource, GridFSDBFile sourceObject, DBCollection metaCollectionInstance, String md5) { - document.put("mimetype", ((RequestObject)resource).getMimeType()); + private DBObject fillCommonfields(DBObject document, MyFile resource, GridFSDBFile sourceObject, DBCollection metaCollectionInstance, String md5) { + document.put("mimetype", ((MyFile)resource).getMimeType()); document.put("creationTime", DateUtils.now("dd MM yyyy 'at' hh:mm:ss z")); document.put("md5", md5); document.put("length", sourceObject.getLength()); // set chunkSize inherited from original object document.put("chunkSize", sourceObject.getChunkSize()); - metaCollectionInstance.insert(document); - metaCollectionInstance.save(document); + try { + metaCollectionInstance.insert(document); + metaCollectionInstance.save(document); + }catch (DuplicateKeyException e) { + logger.warn("key already present or not completely removed. Wait few seconds and retry"); + try { + Thread.sleep(2000); + } catch (InterruptedException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } + metaCollectionInstance.insert(document); + metaCollectionInstance.save(document); + } return document; } @@ -253,11 +274,21 @@ public class SoftCopyOperator extends SoftCopy { searchQuery.put("_id" ,mapId); DBObject mapObject=mongoPrimaryInstance.findCollectionObject(metaCollectionInstance, searchQuery); // BasicDBObject updateObject= new BasicDBObject().append("$inc",new BasicDBObject().append("count", 1));; - int count=(int)mapObject.get("count"); - count++; - mapObject.put("count", count); -// metaCollectionInstance.update(mapObject, updateObject); - metaCollectionInstance.save(mapObject); + if(!Objects.isNull(mapObject)) { + Object counting=mapObject.get("count"); + if(Objects.nonNull(counting)) { + int count=(int)counting; + count++; + mapObject.put("count", count); + }else { + mapObject.put("count", 1); + } +// metaCollectionInstance.update(mapObject, updateObject); + metaCollectionInstance.save(mapObject); + }else { + logger.error("no object found associated to the following id: "+mapId); + } + } private ObjectId getDuplicatesMap(String md5){ @@ -271,8 +302,11 @@ public class SoftCopyOperator extends SoftCopy { */ private boolean isMap(GridFSDBFile sourceObject) { String type=sourceObject.get("type").toString(); - if(type.equals("map")) + logger.debug("object type: "+type); + if(type.equals("map")) { + logger.debug("sourceFile is a map: "+sourceObject.toString()); return true; + } return false; } @@ -305,11 +339,11 @@ public class SoftCopyOperator extends SoftCopy { this.mongoPrimaryInstance = mongoPrimaryInstance; } - public RequestObject getResource() { + public MyFile getResource() { return resource; } - public void setResource(RequestObject resource) { + public void setResource(MyFile resource) { this.resource = resource; } diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/UnlockOperator.java b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/UnlockOperator.java index 1176475..803bfe7 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/UnlockOperator.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/UnlockOperator.java @@ -5,13 +5,12 @@ package org.gcube.contentmanagement.blobstorage.transport.backend.operation; import java.io.FileNotFoundException; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.OPERATION; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMOTE_RESOURCE; import org.gcube.contentmanagement.blobstorage.service.operation.Monitor; import org.gcube.contentmanagement.blobstorage.service.operation.Unlock; -import org.gcube.contentmanagement.blobstorage.service.operation.Upload; import org.gcube.contentmanagement.blobstorage.transport.backend.MongoIOManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -46,7 +45,7 @@ public class UnlockOperator extends Unlock { * @see org.gcube.contentmanagement.blobstorage.service.operation.Unlock#execute(org.gcube.contentmanagement.blobstorage.transport.backend.MongoIO, org.gcube.contentmanagement.blobstorage.transport.backend.MongoIO) */ @Override - public String execute(MongoIOManager mongoPrimaryInstance, MongoIOManager mongoSecondaryInstance, RequestObject resource, String bucket, String key4unlock) throws Exception { + public String execute(MongoIOManager mongoPrimaryInstance, MongoIOManager mongoSecondaryInstance, MyFile resource, String bucket, String key4unlock) throws Exception { String id=null; OperationDefinition op=resource.getOperationDefinition(); REMOTE_RESOURCE remoteResourceIdentifier=resource.getOperation().getRemoteResource(); @@ -59,8 +58,8 @@ public class UnlockOperator extends Unlock { // mongoPrimaryInstance.close(); // resource.setOperation(op); // } - String dir=((RequestObject)resource).getRemoteDir(); - String name=((RequestObject)resource).getName(); + String dir=((MyFile)resource).getRemoteDir(); + String name=((MyFile)resource).getName(); String path=getBucket(); if(logger.isDebugEnabled()) logger.debug("DIR: "+dir+" name: "+name+" fullPath "+path+" bucket: "+bucket); @@ -69,7 +68,7 @@ public class UnlockOperator extends Unlock { String oldir=(String)f.get("dir"); if(logger.isDebugEnabled()) logger.debug("old dir found "+oldir); - if((oldir.equalsIgnoreCase(((RequestObject)resource).getRemoteDir())) || ((RequestObject)resource).getRemoteDir()==null){ + if((oldir.equalsIgnoreCase(((MyFile)resource).getRemoteDir())) || ((MyFile)resource).getRemoteDir()==null){ String lock=(String)f.get("lock"); //check if the od file is locked if((lock !=null) && (!lock.isEmpty())){ @@ -77,14 +76,14 @@ public class UnlockOperator extends Unlock { if(lck.equalsIgnoreCase(key4unlock)){ f.put("lock", null); f.put("timestamp", null); - mongoPrimaryInstance.updateCommonFields((GridFSFile)f, (RequestObject)resource, OPERATION.UNLOCK); + mongoPrimaryInstance.updateCommonFields((GridFSFile)f, (MyFile)resource, OPERATION.UNLOCK); f.save(); }else{ mongoPrimaryInstance.close(); throw new IllegalAccessError("bad key for unlock"); } }else{ - mongoPrimaryInstance.updateCommonFields((GridFSFile)f, (RequestObject)resource, OPERATION.UNLOCK); + mongoPrimaryInstance.updateCommonFields((GridFSFile)f, (MyFile)resource, OPERATION.UNLOCK); f.save(); } }else{ diff --git a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/UploadOperator.java b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/UploadOperator.java index 9983a79..324fce7 100644 --- a/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/UploadOperator.java +++ b/src/main/java/org/gcube/contentmanagement/blobstorage/transport/backend/operation/UploadOperator.java @@ -5,7 +5,7 @@ package org.gcube.contentmanagement.blobstorage.transport.backend.operation; import java.io.IOException; import org.bson.types.ObjectId; -import org.gcube.contentmanagement.blobstorage.resource.RequestObject; +import org.gcube.contentmanagement.blobstorage.resource.MyFile; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition; import org.gcube.contentmanagement.blobstorage.resource.OperationDefinition.REMOTE_RESOURCE; import org.gcube.contentmanagement.blobstorage.service.operation.Monitor; @@ -44,12 +44,12 @@ public class UploadOperator extends Upload { * @see org.gcube.contentmanagement.blobstorage.service.operation.Upload#execute(org.gcube.contentmanagement.blobstorage.transport.backend.MongoIO, org.gcube.contentmanagement.blobstorage.transport.backend.MongoIO) */ @Override - public String execute(MongoIOManager mongoPrimaryInstance, MongoIOManager mongoSecondaryInstance, RequestObject resource, String bucket, boolean replace) throws IOException { - OperationDefinition op=((RequestObject)resource).getOperationDefinition(); - REMOTE_RESOURCE remoteResourceIdentifier=((RequestObject)resource).getOperation().getRemoteResource(); + public String execute(MongoIOManager mongoPrimaryInstance, MongoIOManager mongoSecondaryInstance, MyFile resource, String bucket, boolean replace) throws IOException { + OperationDefinition op=((MyFile)resource).getOperationDefinition(); + REMOTE_RESOURCE remoteResourceIdentifier=((MyFile)resource).getOperation().getRemoteResource(); logger.info("MongoClient put method: "+op.toString()); - String dir=((RequestObject)resource).getRemoteDir(); - String name=((RequestObject)resource).getName(); + String dir=((MyFile)resource).getRemoteDir(); + String name=((MyFile)resource).getName(); Object id=null; ObjectId oldId=null; // id of the remote file if present @@ -79,11 +79,11 @@ public class UploadOperator extends Upload { } public String executeSafeMode(MongoIOManager mongoPrimaryInstance, MongoIOManager mongoSecondaryInstance) throws IOException { - OperationDefinition op=((RequestObject)resource).getOperationDefinition(); - REMOTE_RESOURCE remoteResourceIdentifier=((RequestObject)resource).getOperation().getRemoteResource(); + OperationDefinition op=((MyFile)resource).getOperationDefinition(); + REMOTE_RESOURCE remoteResourceIdentifier=((MyFile)resource).getOperation().getRemoteResource(); logger.info("MongoClient put method: "+op.toString()); - String dir=((RequestObject)resource).getRemoteDir(); - String name=((RequestObject)resource).getName(); + String dir=((MyFile)resource).getRemoteDir(); + String name=((MyFile)resource).getName(); ObjectId id=null; ObjectId oldId=null; // id of the remote file if present