Resolved merge conflict

This commit is contained in:
Roberto Cirillo 2021-05-14 16:44:47 +02:00
commit 6aabf6729d
21 changed files with 152 additions and 92 deletions

View File

@ -6,23 +6,14 @@
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
<attribute name="test" value="true"/>
</attributes>
</classpathentry>
<classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7">
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/target/

View File

@ -1,12 +1,13 @@
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
org.eclipse.jdt.core.compiler.compliance=1.7
org.eclipse.jdt.core.compiler.compliance=1.8
org.eclipse.jdt.core.compiler.debug.lineNumber=generate
org.eclipse.jdt.core.compiler.debug.localVariable=generate
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
org.eclipse.jdt.core.compiler.source=1.7
org.eclipse.jdt.core.compiler.release=disabled
org.eclipse.jdt.core.compiler.source=1.8

View File

@ -1,5 +1,6 @@
# Changelog for storage-manager-core
<<<<<<< HEAD
## [v3.1.0-SNAPSHOT]
* upgrade mongo-java-client to version 3.12.0
* removed close method for mongo client. Now the connection pool is managed by java driver
@ -12,6 +13,18 @@
## [v3.0.0-SNAPSHOT]
* added token and region parameters in order to be compliant with s3 object storage
* refactoring code
=======
## [v2.12.1-SNAPSHOT
* add check on transport layer instance: if the memory type is not the same, a new transportLayer is instatiated
* move memoryType var from super class TransportManager
* convert BasicDBObject to DBObject the return type used for metadata collections
## [v2.12.0-SNAPSHOT]
* One pool for every operation: static Operation class; no mongo close operation
## [v2.10.0-SNAPSHOT]
* upgrade mongo-java-driver to 3.12.0
>>>>>>> bug21078
## [v2.10.0-SNAPSHOT]
* added input parameter to getSize method in order to be compatible with the needed of s3 client

59
pom.xml
View File

@ -8,10 +8,17 @@
</parent>
<groupId>org.gcube.contentmanagement</groupId>
<artifactId>storage-manager-core</artifactId>
<<<<<<< HEAD
<version>2.9.0-SNAPSHOT</version>
<properties>
<distroDirectory>${project.basedir}/distro</distroDirectory>
</properties>
=======
<version>2.12.1-SNAPSHOT</version>
<!-- <properties> -->
<!-- <distroDirectory>${project.basedir}/distro</distroDirectory> -->
<!-- </properties> -->
>>>>>>> bug21078
<scm>
<connection>scm:git:https://code-repo.d4science.org/gCubeSystem/${project.artifactId}.git</connection>
<developerConnection>scm:git:https://code-repo.d4science.org/gCubeSystem/${project.artifactId}.git</developerConnection>
@ -54,4 +61,56 @@
<version>1.8</version>
</dependency>
</dependencies>
<<<<<<< HEAD
=======
<!-- <build> -->
<!-- <plugins> -->
<!-- <plugin> -->
<!-- <groupId>org.apache.maven.plugins</groupId> -->
<!-- <artifactId>maven-resources-plugin</artifactId> -->
<!-- <version>2.5</version> -->
<!-- <executions> -->
<!-- <execution> -->
<!-- <id>copy-profile</id> -->
<!-- <phase>install</phase> -->
<!-- <goals> -->
<!-- <goal>copy-resources</goal> -->
<!-- </goals> -->
<!-- <configuration> -->
<!-- <outputDirectory>target</outputDirectory> -->
<!-- <resources> -->
<!-- <resource> -->
<!-- <directory>${distroDirectory}</directory> -->
<!-- <filtering>true</filtering> -->
<!-- <includes> -->
<!-- <include>profile.xml</include> -->
<!-- </includes> -->
<!-- </resource> -->
<!-- </resources> -->
<!-- </configuration> -->
<!-- </execution> -->
<!-- </executions> -->
<!-- </plugin> -->
<!-- <plugin> -->
<!-- <groupId>org.apache.maven.plugins</groupId> -->
<!-- <artifactId>maven-assembly-plugin</artifactId> -->
<!-- -->
<!-- <configuration> -->
<!-- <descriptors> -->
<!-- <descriptor>${distroDirectory}/descriptor.xml</descriptor> -->
<!-- </descriptors> -->
<!-- </configuration> -->
<!-- <executions> -->
<!-- <execution> -->
<!-- <id>servicearchive</id> -->
<!-- <phase>install</phase> -->
<!-- <goals> -->
<!-- <goal>single</goal> -->
<!-- </goals> -->
<!-- </execution> -->
<!-- </executions> -->
<!-- </plugin> -->
<!-- </plugins> -->
<!-- </build> -->
>>>>>>> bug21078
</project>

View File

@ -29,6 +29,7 @@ public class DirectoryBucket {
String path;
String[] server;
String user, password;
TransportManager tm;
public DirectoryBucket(String[] server, String user, String password, String path, String author){
if(logger.isDebugEnabled())
logger.debug("DirectoryBucket PATH: "+path);
@ -91,7 +92,7 @@ public class DirectoryBucket {
String[] bucketList=null;
bucketList=retrieveBucketsName(path, rootArea);
TransportManagerFactory tmf=new TransportManagerFactory(server, user, password);
TransportManager tm=tmf.getTransport(backendType, resource.getGcubeMemoryType(), dbNames, resource.getWriteConcern(), resource.getReadPreference(), resource.getToken(), resource.getRegion());
tm=tmf.getTransport(tm, backendType, resource.getGcubeMemoryType(), dbNames, resource.getWriteConcern(), resource.getReadPreference());
// TerrastoreClient client=new TerrastoreClient( new OrderedHostManager(Arrays.asList(server)), new HTTPConnectionFactory());
for(int i=0;i<bucketList.length;i++){
if(logger.isDebugEnabled())
@ -124,7 +125,7 @@ public class DirectoryBucket {
logger.debug("bucketDir Coded: "+bucketDirCoded);
bucketList=retrieveBucketsName(bucket, rootArea);
TransportManagerFactory tmf=new TransportManagerFactory(server, user, password);
TransportManager tm=tmf.getTransport(backendType, resource.getGcubeMemoryType(), dbNames, resource.getWriteConcern(),resource.getReadPreference(), resource.getToken(), resource.getRegion());
tm=tmf.getTransport(tm, backendType, resource.getGcubeMemoryType(), dbNames, resource.getWriteConcern(),resource.getReadPreference());
for(int i=0;i<bucketList.length;i++){
if(logger.isDebugEnabled())
logger.debug("REMOVE: check "+bucketList[i]+" bucketDirCoded: "+bucketDirCoded );

View File

@ -27,7 +27,7 @@ import org.gcube.contentmanagement.blobstorage.resource.StorageObject;
*/
public class RemoteResource extends Resource{
TransportManager tm;
public RemoteResource(RequestObject file, ServiceEngine engine) {
super(file, engine);
@ -112,7 +112,7 @@ public class RemoteResource extends Resource{
if(engine.getCurrentOperation().equalsIgnoreCase("showdir")){
dir = new BucketCoding().bucketDirCoding(dir, engine.getContext());
TransportManagerFactory tmf= new TransportManagerFactory(engine.primaryBackend, engine.getBackendUser(), engine.getBackendPassword());
TransportManager tm=tmf.getTransport(engine.getBackendType(), engine.getGcubeMemoryType(), engine.getDbNames(), engine.getWriteConcern(), engine.getReadConcern(), engine.getToken(), engine.getRegion());
tm=tmf.getTransport(tm, engine.getBackendType(), engine.getGcubeMemoryType(), engine.getDbNames(), engine.getWriteConcern(), engine.getReadConcern());
Map<String, StorageObject> mapDirs=null;
try {
mapDirs = tm.getValues(getMyFile(), dir, DirectoryEntity.class);
@ -133,7 +133,7 @@ public class RemoteResource extends Resource{
dirBuc.removeDirBucket(getMyFile(), dir, engine.getContext(), engine.getBackendType(), engine.getDbNames());
else{
TransportManagerFactory tmf=new TransportManagerFactory(engine.primaryBackend, engine.getBackendUser(), engine.getBackendPassword());
TransportManager tm=tmf.getTransport(Costants.CLIENT_TYPE, engine.getGcubeMemoryType(), engine.getDbNames(), engine.getWriteConcern(), engine.getReadConcern(), engine.getToken(), engine.getRegion());
tm=tmf.getTransport(tm, Costants.CLIENT_TYPE, engine.getGcubeMemoryType(), engine.getDbNames(), engine.getWriteConcern(), engine.getReadConcern());
dir=new BucketCoding().bucketFileCoding(dir, engine.getContext());
try {
tm.removeDir(dir, getMyFile());

View File

@ -117,7 +117,7 @@ public class ChunkConsumer implements Runnable {
synchronized(ChunkConsumer.class){
String [] randomServer=randomizeServer(server);
TransportManagerFactory tmf=new TransportManagerFactory(randomServer, null, null);
client.set(tmf.getTransport(Costants.CLIENT_TYPE, null, null, myFile.getWriteConcern(), myFile.getReadPreference(), myFile.getToken(), myFile.getRegion()));
client.set(tmf.getTransport(null, Costants.CLIENT_TYPE, null, null, myFile.getWriteConcern(), myFile.getReadPreference()));
}
if(logger.isDebugEnabled()){
logger.debug("waiting time for upload: "

View File

@ -74,10 +74,8 @@ public class GetHttpUrl extends Operation {
private String getId(String path, boolean forceCreation, MemoryType memoryType, String writeConcern, String readPreference){
String id=null;
if(tm ==null){
TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
tm=tmf.getTransport(backendType, memoryType, dbNames, writeConcern, readPreference, null, null);
}
TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
tm=tmf.getTransport(tm, backendType, memoryType, dbNames, writeConcern, readPreference);
try {
id = tm.getId(bucket, forceCreation);
} catch (Exception e) {

View File

@ -75,10 +75,8 @@ public class GetHttpsUrl extends Operation {
private String getId(String path, boolean forceCreation, MemoryType memoryType, String writeConcern, String readPreference){
String id=null;
if(tm ==null){
TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
tm=tmf.getTransport(backendType, memoryType, dbNames, writeConcern, readPreference, null, null);
}
TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
tm=tmf.getTransport(tm, backendType, memoryType, dbNames, writeConcern, readPreference);
try {
id = tm.getId(bucket, forceCreation);
} catch (Exception e) {

View File

@ -58,10 +58,8 @@ public class GetUrl extends Operation{
private String getId(String path, boolean forceCreation, MemoryType memoryType, String writeConcern, String readPreference){
String id=null;
if(tm ==null){
TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
tm=tmf.getTransport(backendType, memoryType, dbNames, writeConcern, readPreference, null, null);
}
TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
tm=tmf.getTransport(tm, backendType, memoryType, dbNames, writeConcern, readPreference);
try {
id = tm.getId(bucket, forceCreation);
} catch (Exception e) {

View File

@ -381,11 +381,11 @@ public abstract class Operation {
this.user = user;
}
protected TransportManager getTransport(RequestObject myFile) {
if(Objects.isNull(transport)) {
protected TransportManager getTransport(MyFile myFile) {
// if(Objects.isNull(transport)) {
TransportManagerFactory tmf= new TransportManagerFactory(server, user, password);
transport=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference(), myFile.getToken(), myFile.getRegion());
}
transport=tmf.getTransport(transport, backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
// }
return transport;
}

View File

@ -29,7 +29,8 @@ public class SetMetaInfo extends Operation {
} catch (Exception e) {
tm.close();
e.printStackTrace();
throw new RemoteBackendException(" Error in SetMetaInfo operation ", e.getCause()); }
logger.error("Problem setting file property", e);
throw new RemoteBackendException(" Error in SetMetaInfo operation ", e); }
if (logger.isDebugEnabled()) {
logger.debug(" PATH " + bucket);
}

View File

@ -33,9 +33,10 @@ public class UploadAndUnlock extends Operation {
objectId=put(upload, myFile, isChunk(), false, false, true);
} catch (Exception e) {
TransportManagerFactory tmf=new TransportManagerFactory(server, user, password);
TransportManager tm=tmf.getTransport(backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference(), myFile.getToken(), myFile.getRegion());
TransportManager tm=tmf.getTransport(transport, backendType, myFile.getGcubeMemoryType(), dbNames, myFile.getWriteConcern(), myFile.getReadPreference());
tm.close();
throw new RemoteBackendException(" Error in uploadAndUnlock operation ", e.getCause()); }
throw new RemoteBackendException(" Error in uploadAndUnlock operation ", e);
}
return objectId;
}

View File

@ -23,7 +23,7 @@ import com.mongodb.MongoException;
public abstract class TransportManager {
protected MemoryType memoryType;
/**
* This method specifies the type of the backend for dynamic loading
* For mongoDB, default backend, the name is MongoDB

View File

@ -27,9 +27,11 @@ public class TransportManagerFactory {
// private static final Logger logger = Logger.getLogger(OperationFactory.class);
final Logger logger = LoggerFactory.getLogger(TransportManagerFactory.class);
// TerrastoreClient client;
String[] server;
String user;
String password;
private String[] server;
private String user;
private String password;
private MemoryType memoryType;
private String dbNames;
TransportManager transport;
public TransportManagerFactory(String server[], String user, String password){
@ -38,12 +40,20 @@ public class TransportManagerFactory {
this.password=password;
}
public TransportManager getTransport(String backendType, MemoryType memoryType, String[] dbNames, String writeConcern, String readConcern, String token, String region){
public TransportManager getTransport(TransportManager tm, String backendType, MemoryType memoryType, String[] dbNames, String writeConcern, String readConcern){
if (logger.isDebugEnabled()) {
logger.debug("getOperation(String) - start");
}
if(Objects.isNull(transport))
return load(backendType, memoryType, dbNames, writeConcern, readConcern, token, region);
if(logger.isDebugEnabled() && (!Objects.isNull(transport)))
logger.debug("transportLayer with "+transport.memoryType+" already instatiated. New memoryType request is "+memoryType);
// if we haven't any transport layer instantiated or the transport layer is istantiated on another memory type (persistent, volatile),
// then a new transport layer is needed
if(Objects.isNull(transport) || (!transport.memoryType.equals(memoryType))) {
logger.info("new transport layer instantiated for "+memoryType+" memory");
return load(backendType, memoryType, dbNames, writeConcern, readConcern);
}else {
logger.debug("new transport layer not instantiated.");
}
return transport;
}

View File

@ -547,10 +547,13 @@ public class MongoIOManager {
destinationFile.put("creationTime", DateUtils.now("dd MM yyyy 'at' hh:mm:ss z"));
}
public BasicDBObject setGenericMoveProperties(RequestObject resource, String filename, String dir,
String name, BasicDBObject f) {
f.append("filename", filename).append("type", "file").append("name", name).append("dir", dir);
return f;
public DBObject setGenericMoveProperties(MyFile resource, String filename, String dir,
String name, DBObject sourcePathMetaCollection) {
sourcePathMetaCollection.put("filename", filename);
sourcePathMetaCollection.put("type", "file");
sourcePathMetaCollection.put("name", name);
sourcePathMetaCollection.put("dir", dir);
return sourcePathMetaCollection;
}
@ -778,13 +781,10 @@ public class MongoIOManager {
return list;
}
public BasicDBObject findMetaCollectionObject(String source) throws UnknownHostException {
//set to null in order to perform a query as BasicDBObject and not GridFSObject
db=null;
mongo=null;
public DBObject findMetaCollectionObject(String source) throws UnknownHostException {
DBCollection fileCollection=getConnectionDB(dbName, false).getCollection(Costants.DEFAULT_META_COLLECTION);
BasicDBObject query = new BasicDBObject();
BasicDBObject obj=null;
DBObject obj=null;
query.put( "filename" ,source);
DBCursor cursor=fileCollection.find(query);
if(cursor != null && !cursor.hasNext()){
@ -793,7 +793,7 @@ public class MongoIOManager {
cursor=fileCollection.find(query);
}
if(cursor.hasNext()){
obj=(BasicDBObject) cursor.next();
obj=(DBObject) cursor.next();
String path=(String)obj.get("filename");
logger.debug("path found "+path);
}
@ -1067,6 +1067,7 @@ public class MongoIOManager {
public void close() {
// if(mongo!=null)
// mongo.close();
logger.debug(" try to close backend but the close operation is not implemented");
// logger.info("Mongo has been closed");
// mongo=null;
// gfs=null;

View File

@ -26,6 +26,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.mongodb.BasicDBObject;
import com.mongodb.DBCollection;
import com.mongodb.DBObject;
import com.mongodb.MongoException;
import com.mongodb.gridfs.GridFS;
import com.mongodb.gridfs.GridFSDBFile;
@ -54,9 +55,11 @@ public class MongoOperationManager extends TransportManager{
@Override
public void initBackend(String[] server, String user, String pass, MemoryType memoryType , String[] dbNames, String writeConcern, String readConcern, String token, String region) {
public void initBackend(String[] server, String user, String pass, MemoryType memoryType , String[] dbNames, String writeConcern, String readConcern) {
logger.debug("init storage backend with "+memoryType+" memory");
try {
this.memoryType=memoryType;
super.memoryType=memoryType;
MongoOperationManager.dbNames=dbNames;
logger.debug("check mongo configuration");
if (dbNames!=null){
@ -456,7 +459,7 @@ public class MongoOperationManager extends TransportManager{
*/
private void updateMetaObject(String remoteIdentifier, String propertyField, String propertyValue)
throws UnknownHostException {
BasicDBObject remoteMetaCollectionObject;
DBObject remoteMetaCollectionObject;
logger.debug("find object...");
remoteMetaCollectionObject = mongoPrimaryInstance.findMetaCollectionObject(remoteIdentifier);
if(remoteMetaCollectionObject!=null){

View File

@ -65,7 +65,7 @@ public class MoveOperator extends Move {
logger.info("move operation on Mongo backend, parameters: source path: "+source+" destination path: "+destination);
logger.debug("MOVE OPERATION operation defined: "+resource.getOperationDefinition().getOperation());
if((source != null) && (!source.isEmpty()) && (destination != null) && (!destination.isEmpty())){
BasicDBObject sourcePathMetaCollection = mongoPrimaryInstance.findMetaCollectionObject(source);
DBObject sourcePathMetaCollection = mongoPrimaryInstance.findMetaCollectionObject(source);
//check if the file exist in the destination path, if it exist then it will be deleted
if(sourcePathMetaCollection != null){
sourceId=sourcePathMetaCollection.get("_id").toString();
@ -175,7 +175,7 @@ public class MoveOperator extends Move {
}
private BasicDBObject setCommonFields(BasicDBObject f, RequestObject resource, OPERATION op) {
private DBObject setCommonFields(DBObject sourcePathMetaCollection, MyFile resource, OPERATION op) {
String owner=resource.getOwner();
if(op == null){
op=resource.getOperationDefinition().getOperation();
@ -188,14 +188,23 @@ public class MoveOperator extends Move {
String address=null;
try {
address=InetAddress.getLocalHost().getCanonicalHostName().toString();
f.put("callerIP", address);
sourcePathMetaCollection.put("callerIP", address);
} catch (UnknownHostException e) { }
if(from == null)
f.append("lastAccess", DateUtils.now("dd MM yyyy 'at' hh:mm:ss z")).append("lastUser", owner).append("lastOperation", op.toString()).append("callerIP", address);
else
f.append("lastAccess", DateUtils.now("dd MM yyyy 'at' hh:mm:ss z")).append("lastUser", owner).append("lastOperation", op.toString()).append("callerIP", address).append("from", from);
return f;
if(from == null) {
sourcePathMetaCollection.put("lastAccess", DateUtils.now("dd MM yyyy 'at' hh:mm:ss z"));
sourcePathMetaCollection.put("lastUser", owner);
sourcePathMetaCollection.put("lastOperation", op.toString());
sourcePathMetaCollection.put("callerIP", address);
}else {
sourcePathMetaCollection.put("lastAccess", DateUtils.now("dd MM yyyy 'at' hh:mm:ss z"));
sourcePathMetaCollection.put("lastUser", owner);
sourcePathMetaCollection.put("lastOperation", op.toString());
sourcePathMetaCollection.put("callerIP", address);
sourcePathMetaCollection.put("from", from);
}
return sourcePathMetaCollection;
}
}

View File

@ -67,7 +67,7 @@ public class MongoOutputStream extends ProxyOutputStream {
e.printStackTrace();
}
// mongo.close();
// setClosed(true);
setClosed(true);
}
}

View File

@ -1,25 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<Resource xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<ID />
<Type>Service</Type>
<Profile>
<Description>${description}</Description>
<Class>ContentManagement</Class>
<Name>storage-manager-core</Name>
<Version>1.0.0</Version>
<Packages>
<Software>
<Name>storage-manager-core</Name>
<Version>2.9.0-SNAPSHOT</Version>
<MavenCoordinates>
<groupId>org.gcube.contentmanagement</groupId>
<artifactId>storage-manager-core</artifactId>
<version>2.9.0-SNAPSHOT</version>
</MavenCoordinates>
<Files>
<File>storage-manager-core-2.9.0-SNAPSHOT.jar</File>
</Files>
</Software>
</Packages>
</Profile>
</Resource>