Refactored abstract mongo manager

This commit is contained in:
Fabio Sinibaldi 2022-03-11 16:42:33 +01:00
parent b827ba2f13
commit db82bc0a53
9 changed files with 133 additions and 160 deletions

View File

@ -46,25 +46,27 @@ public class ConcessioniMongoManager extends MongoManager{
public ConcessioniMongoManager() throws ConfigurationException { public ConcessioniMongoManager() throws ConfigurationException {
super(); super();
init(collectionName);
} }
private static final String collectionName="legacyConcessioni"; private static final String collectionName="legacyConcessioni";
//private static final String DB_NAME="gna_dev"; //private static final String DB_NAME="gna_dev";
//
private MongoDatabase db=null; // private MongoDatabase db=null;
//
@Override // @Override
@Synchronized // @Synchronized
protected MongoDatabase getDatabase() { // protected MongoDatabase getDatabase() {
if(db==null) { // if(db==null) {
String toUseDB=super.client.getConnection().getDatabase(); // String toUseDB=super.client.getConnection().getDatabase();
log.info("Connecting to DB {} ",toUseDB); // log.info("Connecting to DB {} ",toUseDB);
//
// TODO MAP OF DATABASES? // // TODO MAP OF DATABASES?
db=client.getTheClient().getDatabase(toUseDB); // db=client.getTheClient().getDatabase(toUseDB);
} // }
return db; // return db;
} // }
protected static Document asDocument (Concessione c) throws JsonProcessingException { protected static Document asDocument (Concessione c) throws JsonProcessingException {
Document toReturn=Document.parse(Serialization.write(c)); Document toReturn=Document.parse(Serialization.write(c));
@ -92,14 +94,14 @@ public class ConcessioniMongoManager extends MongoManager{
log.trace("Going to register {} ",toRegister); log.trace("Going to register {} ",toRegister);
toRegister=onUpdate(toRegister); toRegister=onUpdate(toRegister);
log.trace("Concessione with defaults is {}",toRegister); log.trace("Concessione with defaults is {}",toRegister);
ObjectId id=insert(asDocument(toRegister), collectionName); ObjectId id=insertDoc(asDocument(toRegister));
log.trace("Obtained id {}",id); log.trace("Obtained id {}",id);
Concessione toReturn=asConcessione(getById(id,collectionName)); Concessione toReturn=asConcessione(getDocById(id));
toReturn.setMongo_id(asString(id)); toReturn.setMongo_id(asString(id));
toReturn = asConcessione(replace(asDocument(toReturn),id,collectionName)); toReturn = asConcessione(replaceDoc(asDocument(toReturn),id));
log.debug("Registered {} ",toReturn); log.debug("Registered {} ",toReturn);
return toReturn; return toReturn;
} }
@ -107,22 +109,13 @@ public class ConcessioniMongoManager extends MongoManager{
public Concessione replace(Concessione toRegister) throws IOException { public Concessione replace(Concessione toRegister) throws IOException {
log.trace("Replacing {} ",toRegister); log.trace("Replacing {} ",toRegister);
toRegister=onUpdate(toRegister); toRegister=onUpdate(toRegister);
return asConcessione(replace(asDocument(toRegister),new ObjectId(toRegister.getMongo_id()),collectionName)); return asConcessione(replaceDoc(asDocument(toRegister),new ObjectId(toRegister.getMongo_id())));
} }
/* public Concessione update(String id,String json) throws IOException {
log.trace("Updating id {} with {} ",id,json);
Concessione toReturn=asConcessione(update(asId(id),asDoc(json),collectionName));
log.debug("Refreshing defaults..");
toReturn.setDefaults();
return asConcessione(replace(asDocument(toReturn),collectionName));
}
*/
public Iterable<Concessione> list(){ public Iterable<Concessione> list(){
LinkedBlockingQueue queue=new LinkedBlockingQueue<Concessione>(); LinkedBlockingQueue queue=new LinkedBlockingQueue<Concessione>();
iterate(null,null, collectionName).forEach( iterateDoc(null,null).forEach(
new Consumer<Document>() { new Consumer<Document>() {
@Override @Override
public void accept(Document d) { public void accept(Document d) {
@ -140,7 +133,7 @@ public class ConcessioniMongoManager extends MongoManager{
public Iterable<Concessione> search(Document filter){ public Iterable<Concessione> search(Document filter){
log.info("Searching concessione for filter {} ",filter); log.info("Searching concessione for filter {} ",filter);
LinkedBlockingQueue queue=new LinkedBlockingQueue<Concessione>(); LinkedBlockingQueue queue=new LinkedBlockingQueue<Concessione>();
iterate(filter,null,collectionName).forEach( iterateDoc(filter,null).forEach(
(Consumer<? super Document>) (Document d)->{try{ (Consumer<? super Document>) (Document d)->{try{
queue.put(asConcessione(d)); queue.put(asConcessione(d));
}catch(Throwable t){log.warn("Unable to translate "+d);}}); }catch(Throwable t){log.warn("Unable to translate "+d);}});
@ -151,7 +144,7 @@ public class ConcessioniMongoManager extends MongoManager{
public Iterable<Document> query(QueryRequest queryRequest){ public Iterable<Document> query(QueryRequest queryRequest){
log.info("Searching concessione for filter {} ",queryRequest); log.info("Searching concessione for filter {} ",queryRequest);
LinkedBlockingQueue queue=new LinkedBlockingQueue<Concessione>(); LinkedBlockingQueue queue=new LinkedBlockingQueue<Concessione>();
query(queryRequest,collectionName).forEach( queryDoc(queryRequest).forEach(
(Consumer<? super Document>) (Document d)->{try{ (Consumer<? super Document>) (Document d)->{try{
queue.put(d); queue.put(d);
}catch(Throwable t){log.warn("Unable to translate "+d);}}); }catch(Throwable t){log.warn("Unable to translate "+d);}});
@ -161,7 +154,7 @@ public class ConcessioniMongoManager extends MongoManager{
public Concessione getById(String id)throws IOException { public Concessione getById(String id)throws IOException {
log.debug("Loading by ID "+id); log.debug("Loading by ID "+id);
return asConcessione(getById(asId(id),collectionName)); return asConcessione(getDocById(asId(id)));
} }
public void deleteById(String id,boolean force) throws DeletionException { public void deleteById(String id,boolean force) throws DeletionException {
@ -180,12 +173,12 @@ public class ConcessioniMongoManager extends MongoManager{
if (!concessione.getReport().getStatus().equals(ValidationStatus.PASSED)&&!force) if (!concessione.getReport().getStatus().equals(ValidationStatus.PASSED)&&!force)
throw new DeletionException("Unable to clean "+concessione.getMongo_id()); throw new DeletionException("Unable to clean "+concessione.getMongo_id());
delete(asId(id), collectionName); deleteDoc(asId(id));
}catch(DeletionException e) { }catch(DeletionException e) {
//storing updated - partially deleted //storing updated - partially deleted
log.error("Error while trying to delete",e); log.error("Error while trying to delete",e);
concessione=onUpdate(concessione); concessione=onUpdate(concessione);
replace(asDocument(concessione),new ObjectId(concessione.getMongo_id()), collectionName); replaceDoc(asDocument(concessione),new ObjectId(concessione.getMongo_id()));
throw e; throw e;
} }
}catch(Throwable t){ }catch(Throwable t){
@ -198,21 +191,21 @@ public class ConcessioniMongoManager extends MongoManager{
public Concessione unpublish(String id) throws DeletionException { public Concessione unpublish(String id) throws DeletionException {
try{ try{
Concessione toReturn=asConcessione(getById(asId(id),collectionName)); Concessione toReturn=asConcessione(getDocById(asId(id)));
removeFromIndex(toReturn); removeFromIndex(toReturn);
log.debug("Removed from centroids "+toReturn.getMongo_id()); log.debug("Removed from centroids "+toReturn.getMongo_id());
toReturn = unpublish(toReturn); toReturn = unpublish(toReturn);
log.debug("Concessione after unpublishing is "+toReturn); log.debug("Concessione after unpublishing is "+toReturn);
toReturn = onUpdate(toReturn); toReturn = onUpdate(toReturn);
return asConcessione(replace(asDocument(toReturn),new ObjectId(toReturn.getMongo_id()),collectionName)); return asConcessione(replaceDoc(asDocument(toReturn),new ObjectId(toReturn.getMongo_id())));
}catch(Throwable t){ }catch(Throwable t){
throw new DeletionException("Unable to unpublish "+id,t); throw new DeletionException("Unable to unpublish "+id,t);
} }
} }
public Concessione publish(String id) throws IOException{ public Concessione publish(String id) throws IOException{
Concessione toReturn=asConcessione(getById(asId(id),collectionName)); Concessione toReturn=asConcessione(getDocById(asId(id)));
toReturn=onUpdate(toReturn); toReturn=onUpdate(toReturn);
toReturn.validate(); toReturn.validate();
@ -224,7 +217,7 @@ public class ConcessioniMongoManager extends MongoManager{
toReturn=index(toReturn); toReturn=index(toReturn);
// replace(asDocument(toReturn),collectionName); // replace(asDocument(toReturn),collectionName);
return asConcessione(replace(asDocument(toReturn),new ObjectId(toReturn.getMongo_id()),collectionName)); return asConcessione(replaceDoc(asDocument(toReturn),new ObjectId(toReturn.getMongo_id())));
} }
@ -247,7 +240,7 @@ public class ConcessioniMongoManager extends MongoManager{
toClearContent.getActualContent().clear(); toClearContent.getActualContent().clear();
c=onUpdate(c); c=onUpdate(c);
return asConcessione(replace(asDocument(c),new ObjectId(c.getMongo_id()),collectionName)); return asConcessione(replaceDoc(asDocument(c),new ObjectId(c.getMongo_id())));
}catch(Exception e) { }catch(Exception e) {
throw new Exception("Unable to unregister files.",e); throw new Exception("Unable to unregister files.",e);
@ -277,7 +270,7 @@ public class ConcessioniMongoManager extends MongoManager{
store(section,files,ws,storage,baseFolder); store(section,files,ws,storage,baseFolder);
c=onUpdate(c); c=onUpdate(c);
return asConcessione(replace(asDocument(c),new ObjectId(c.getMongo_id()),collectionName)); return asConcessione(replaceDoc(asDocument(c),new ObjectId(c.getMongo_id())));
}catch(Exception e) { }catch(Exception e) {
throw new Exception("Unable to save file.",e); throw new Exception("Unable to save file.",e);
} }

View File

@ -15,6 +15,8 @@ import org.gcube.application.cms.implementations.ImplementationProvider;
import org.gcube.application.geoportal.service.model.internal.db.Mongo; import org.gcube.application.geoportal.service.model.internal.db.Mongo;
import org.gcube.application.geoportal.common.model.rest.ConfigurationException; import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
import javax.print.Doc;
import static com.mongodb.client.model.Filters.eq; import static com.mongodb.client.model.Filters.eq;
import static com.mongodb.client.model.Sorts.ascending; import static com.mongodb.client.model.Sorts.ascending;
import static com.mongodb.client.model.Sorts.descending; import static com.mongodb.client.model.Sorts.descending;
@ -24,7 +26,9 @@ public abstract class MongoManager {
protected Mongo client=null; protected Mongo client=null;
protected MongoCollection<Document> collection=null;
protected static final String ID="_id"; protected static final String ID="_id";
protected static final ObjectId asId(String id) {return new ObjectId(id);} protected static final ObjectId asId(String id) {return new ObjectId(id);}
@ -35,72 +39,62 @@ public abstract class MongoManager {
public MongoManager() throws ConfigurationException { public MongoManager() throws ConfigurationException {
client=ImplementationProvider.get().getEngineByManagedClass(Mongo.class); client=ImplementationProvider.get().getEngineByManagedClass(Mongo.class);
log.debug("Cached client is {} ",client);
log.info("Got Mongo Client at "+client.getConnection());
log.debug("Mongo client is "+client);
} }
protected void init(String collectionName){
String toUseDB=client.getConnection().getDatabase();
log.info("Opening collection {} : {} ",toUseDB);
collection=client.getTheClient().getDatabase(toUseDB).getCollection(collectionName);
}
// TODO check if existing DB
protected abstract MongoDatabase getDatabase(); MongoCollection getCollection(){
return collection;
}
protected abstract String mongoIDFieldName(); protected abstract String mongoIDFieldName();
//*********** PROJECTS //*********** PROJECTS
// NB BsonId // NB BsonId
protected ObjectId insert(Document proj, String collectionName) { protected ObjectId insertDoc(Document proj) {
MongoDatabase database=getDatabase(); MongoCollection<Document> collection = getCollection();
MongoCollection<Document> collection = database.getCollection(collectionName);
// Check if _id is present // Check if _id is present
ObjectId id=proj.getObjectId(mongoIDFieldName()); ObjectId id=proj.getObjectId(mongoIDFieldName());
if(id==null) { if(id==null) {
proj.append(mongoIDFieldName(), new ObjectId()); proj.append(mongoIDFieldName(), new ObjectId());
id=proj.getObjectId(mongoIDFieldName()); id=proj.getObjectId(mongoIDFieldName());
} }
collection.insertOne(Document.parse(proj.toJson()));
collection.insertOne(Document.parse(proj.toJson()));
return id; return id;
} }
public void delete(ObjectId id, String collectionName) { public void deleteDoc(ObjectId id) {
MongoDatabase database=getDatabase(); MongoCollection<Document> collection = getCollection();
MongoCollection<Document> collection = database.getCollection(collectionName);
collection.deleteOne(eq(mongoIDFieldName(),id)); collection.deleteOne(eq(mongoIDFieldName(),id));
} }
public Document getById(ObjectId id,String collectionName) { public Document getDocById(ObjectId id) {
MongoDatabase database=getDatabase(); MongoCollection<Document> coll=getCollection();
MongoCollection<Document> coll=database.getCollection(collectionName);
return coll.find(new Document(mongoIDFieldName(),id)).first(); return coll.find(new Document(mongoIDFieldName(),id)).first();
} }
public FindIterable<Document> iterate(Document filter, Document projection, String collectionName) { public FindIterable<Document> iterateDoc(Document filter, Document projection) {
log.debug("Iterate over {} ",collectionName); MongoCollection<Document> coll=getCollection();
MongoDatabase database=getDatabase();
MongoCollection<Document> coll=database.getCollection(collectionName);
if(filter == null) filter=new Document(); if(filter == null) filter=new Document();
log.debug("Applying Filter "+filter.toJson()); log.debug("Applying Filter "+filter.toJson());
if(projection != null ) { if(projection != null ) {
log.debug("Applying projection "+projection.toJson()); log.debug("Applying projection "+projection.toJson());
return coll.find(filter).projection(projection); return coll.find(filter).projection(projection);
}else return coll.find(filter); }else return coll.find(filter);
} }
public FindIterable<Document> query(QueryRequest request, String collectionName){ public FindIterable<Document> queryDoc(QueryRequest request){
FindIterable<Document> toReturn=iterate(request.getFilter(), request.getProjection(),collectionName);
FindIterable<Document> toReturn=iterateDoc(request.getFilter(), request.getProjection());
if(request.getOrdering()!=null){ if(request.getOrdering()!=null){
if(request.getOrdering().getDirection().equals(QueryRequest.OrderedRequest.Direction.ASCENDING)) if(request.getOrdering().getDirection().equals(QueryRequest.OrderedRequest.Direction.ASCENDING))
@ -117,34 +111,29 @@ public abstract class MongoManager {
return toReturn; return toReturn;
} }
//
public <T> FindIterable<T> iterateForClass(Document filter,String collectionName,Class<T> clazz) { // public <T> FindIterable<T> iterateForClass(Document filter,Class<T> clazz) {
MongoDatabase database=getDatabase(); // MongoCollection<Document> coll=getCollection();
MongoCollection<Document> coll=database.getCollection(collectionName); // if(filter==null)
if(filter==null) // return coll.find(clazz);
return coll.find(clazz); // else
else // return coll.find(filter,clazz);
return coll.find(filter,clazz); // }
} //
public Document replaceDoc(Document toUpdate,ObjectId id) {
public Document replace(Document toUpdate,ObjectId id, String collectionName) { MongoCollection<Document> coll=getCollection();
MongoDatabase database=getDatabase();
MongoCollection<Document> coll=database.getCollection(collectionName);
return coll.findOneAndReplace( return coll.findOneAndReplace(
eq(mongoIDFieldName(),id), toUpdate,new FindOneAndReplaceOptions().returnDocument(ReturnDocument.AFTER)); eq(mongoIDFieldName(),id), toUpdate,new FindOneAndReplaceOptions().returnDocument(ReturnDocument.AFTER));
} }
public Document update(ObjectId id, Document updateSet, String collectionName) { public Document updateDoc(ObjectId id, Document updateSet) {
MongoDatabase database=getDatabase(); MongoCollection<Document> coll=getCollection();
MongoCollection<Document> coll=database.getCollection(collectionName);
return coll.findOneAndUpdate( return coll.findOneAndUpdate(
eq(mongoIDFieldName(),id), eq(mongoIDFieldName(),id),
updateSet, updateSet,
new FindOneAndUpdateOptions().returnDocument(ReturnDocument.AFTER)); new FindOneAndUpdateOptions().returnDocument(ReturnDocument.AFTER));
} }
//********** PROFILES
} }

View File

@ -67,7 +67,7 @@ public class ProfiledMongoManager extends MongoManager implements MongoManagerI<
@Getter @Getter
UseCaseDescriptor useCaseDescriptor; UseCaseDescriptor useCaseDescriptor;
MongoDatabase db=null;
@Override @Override
protected String mongoIDFieldName() { protected String mongoIDFieldName() {
@ -85,11 +85,13 @@ public class ProfiledMongoManager extends MongoManager implements MongoManagerI<
// Connect to DB // Connect to DB
String toUseDB=super.client.getConnection().getDatabase(); init(getToUseCollectionName());
log.info("Connecting to DB {} ",toUseDB);
}
db=client.getTheClient().getDatabase(toUseDB); private String getToUseCollectionName(){
//TODO collection name in UCD
return useCaseDescriptor.getId();
} }
@Getter(lazy = true) @Getter(lazy = true)
@ -118,16 +120,6 @@ public class ProfiledMongoManager extends MongoManager implements MongoManagerI<
} }
} }
private String getCollectionName(){
// TODO UseCaseDescriptor can directly specify, use ID only as default
return useCaseDescriptor.getId();
}
@Override
public MongoDatabase getDatabase(){
return db;
}
@Override @Override
public Project registerNew(Document toRegisterDoc) throws IOException, StepException, EventException { public Project registerNew(Document toRegisterDoc) throws IOException, StepException, EventException {
@ -165,7 +157,7 @@ public class ProfiledMongoManager extends MongoManager implements MongoManagerI<
log.debug("Going to register {} ",toRegister); log.debug("Going to register {} ",toRegister);
// Insert object // Insert object
ObjectId id =insert(asDocumentWithId(toRegister),getCollectionName()); ObjectId id =insertDoc(asDocumentWithId(toRegister));
log.info("Obtained id {} ",id); log.info("Obtained id {} ",id);
return getByID(id.toHexString()); return getByID(id.toHexString());
@ -179,7 +171,7 @@ public class ProfiledMongoManager extends MongoManager implements MongoManagerI<
toUpdate.getLifecycleInformation().cleanState(); toUpdate.getLifecycleInformation().cleanState();
toUpdate=onUpdate(toUpdate); toUpdate=onUpdate(toUpdate);
Project toReturn =convert(replace(asDocumentWithId(toUpdate),new ObjectId(id),getCollectionName()), Project.class); Project toReturn =convert(replaceDoc(asDocumentWithId(toUpdate),new ObjectId(id)), Project.class);
log.debug("Updated Project is {}",toReturn); log.debug("Updated Project is {}",toReturn);
return toReturn; return toReturn;
} }
@ -224,7 +216,7 @@ public class ProfiledMongoManager extends MongoManager implements MongoManagerI<
@Override @Override
public Project getByID(String id) throws WebApplicationException{ public Project getByID(String id) throws WebApplicationException{
Document doc=super.getById(asId(id),getCollectionName()); Document doc=getDocById(asId(id));
if(doc==null) throw new WebApplicationException("No document with ID "+id); if(doc==null) throw new WebApplicationException("No document with ID "+id);
return convert(doc, Project.class); return convert(doc, Project.class);
} }
@ -233,7 +225,7 @@ public class ProfiledMongoManager extends MongoManager implements MongoManagerI<
public Iterable<Document> query(QueryRequest queryRequest) { public Iterable<Document> query(QueryRequest queryRequest) {
log.info("Querying {} ",queryRequest); log.info("Querying {} ",queryRequest);
LinkedBlockingQueue queue=new LinkedBlockingQueue<Project>(); LinkedBlockingQueue queue=new LinkedBlockingQueue<Project>();
query(queryRequest,getCollectionName()).forEach( queryDoc(queryRequest).forEach(
(Consumer<? super Document>) (Document d)->{try{ (Consumer<? super Document>) (Document d)->{try{
queue.put(d); queue.put(d);
}catch(Throwable t){log.warn("Unable to translate "+d);}}); }catch(Throwable t){log.warn("Unable to translate "+d);}});
@ -245,7 +237,7 @@ public class ProfiledMongoManager extends MongoManager implements MongoManagerI<
public Iterable<Project> filter(QueryRequest queryRequest) { public Iterable<Project> filter(QueryRequest queryRequest) {
log.info("Searching concessione for filter {} ",queryRequest); log.info("Searching concessione for filter {} ",queryRequest);
LinkedBlockingQueue queue=new LinkedBlockingQueue<Project>(); LinkedBlockingQueue queue=new LinkedBlockingQueue<Project>();
query(queryRequest,getCollectionName()).forEach( queryDoc(queryRequest).forEach(
(Consumer<? super Document>) (Document d)->{try{ (Consumer<? super Document>) (Document d)->{try{
queue.put(d); queue.put(d);
}catch(Throwable t){log.warn("Unable to translate "+d);}}); }catch(Throwable t){log.warn("Unable to translate "+d);}});
@ -273,7 +265,7 @@ public class ProfiledMongoManager extends MongoManager implements MongoManagerI<
step,document.getLifecycleInformation().getLastOperationStatus()); step,document.getLifecycleInformation().getLastOperationStatus());
log.debug("LifecycleInformation is {} ",document.getLifecycleInformation()); log.debug("LifecycleInformation is {} ",document.getLifecycleInformation());
if(log.isTraceEnabled())log.trace("Document is {} ",Serialization.write(document)); if(log.isTraceEnabled())log.trace("Document is {} ",Serialization.write(document));
return convert(replace(asDocumentWithId(document),new ObjectId(id),getCollectionName()), Project.class); return convert(replaceDoc(asDocumentWithId(document),new ObjectId(id)), Project.class);
} }
} }
@ -366,7 +358,7 @@ public class ProfiledMongoManager extends MongoManager implements MongoManagerI<
doc=onUpdate(doc); doc=onUpdate(doc);
return convert(replace(asDocumentWithId(doc),new ObjectId(id),getCollectionName()), Project.class); return convert(replaceDoc(asDocumentWithId(doc),new ObjectId(id)), Project.class);
} }
@Override @Override
@ -388,7 +380,7 @@ public class ProfiledMongoManager extends MongoManager implements MongoManagerI<
log.debug("Removing FS from document [ID : ] by path {}",id,path); log.debug("Removing FS from document [ID : ] by path {}",id,path);
wrapper.setElement(path,null); wrapper.setElement(path,null);
doc=onUpdate(doc); doc=onUpdate(doc);
return convert(replace(asDocumentWithId(doc),new ObjectId(id),getCollectionName()), Project.class); return convert(replaceDoc(asDocumentWithId(doc),new ObjectId(id)), Project.class);
} }
@Override @Override
@ -405,9 +397,9 @@ public class ProfiledMongoManager extends MongoManager implements MongoManagerI<
// Add Mongo Info // Add Mongo Info
Archive mongoArchive = new Archive("DOCUMENT-STORE-COLLECTION"); Archive mongoArchive = new Archive("DOCUMENT-STORE-COLLECTION");
MongoCollection coll=getDatabase().getCollection(getCollectionName()); MongoCollection coll=getCollection();
mongoArchive.put("count",coll.count()); mongoArchive.put("count",coll.count());
mongoArchive.put("collection_name",getCollectionName()); mongoArchive.put("collection_name",getToUseCollectionName());
archives.add(mongoArchive); archives.add(mongoArchive);
// Set WS Info // Set WS Info

View File

@ -2,6 +2,7 @@ package org.gcube.application.geoportal.service.engine.mongo;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.mongodb.client.FindIterable; import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase; import com.mongodb.client.MongoDatabase;
import com.mongodb.client.model.FindOneAndReplaceOptions; import com.mongodb.client.model.FindOneAndReplaceOptions;
import com.mongodb.client.model.ReturnDocument; import com.mongodb.client.model.ReturnDocument;
@ -30,19 +31,8 @@ import java.util.function.Consumer;
public class UCDMongoManager extends MongoManager implements UCDManagerI{ public class UCDMongoManager extends MongoManager implements UCDManagerI{
MongoDatabase db=null;
public UCDMongoManager() throws ConfigurationException { public UCDMongoManager() throws ConfigurationException {
String toUseDB=super.client.getConnection().getDatabase(); init("UCD_"+ContextUtils.getCurrentScope().replaceAll("/","_"));
log.info("Connecting to DB {} ",toUseDB);
db=client.getTheClient().getDatabase(toUseDB);
}
@Override
public MongoDatabase getDatabase(){
return db;
} }
@Override @Override
@ -50,11 +40,6 @@ public class UCDMongoManager extends MongoManager implements UCDManagerI{
return UseCaseDescriptor.MONGO_ID; return UseCaseDescriptor.MONGO_ID;
} }
private String getCollectionName(){
return "profiles_"+ContextUtils.getCurrentScope();
}
public UseCaseDescriptor insert(UseCaseDescriptor desc) throws RegistrationException { public UseCaseDescriptor insert(UseCaseDescriptor desc) throws RegistrationException {
try { try {
if (desc.getMongoId() != null) if (desc.getMongoId() != null)
@ -62,9 +47,9 @@ public class UCDMongoManager extends MongoManager implements UCDManagerI{
//TODO validate //TODO validate
ObjectId id = super.insert(Serialization.asDocument(desc), mongoCollectionName()); ObjectId id = insertDoc(Serialization.asDocument(desc));
desc.setMongoId(id); desc.setMongoId(id);
return Serialization.convert(super.getById(id, mongoCollectionName()), UseCaseDescriptor.class); return Serialization.convert(getDocById(id), UseCaseDescriptor.class);
}catch(JsonProcessingException e){ }catch(JsonProcessingException e){
log.error("Unexpected serialization exception ",e); log.error("Unexpected serialization exception ",e);
throw new WebApplicationException("Unexpected exception ",e); throw new WebApplicationException("Unexpected exception ",e);
@ -80,7 +65,7 @@ public class UCDMongoManager extends MongoManager implements UCDManagerI{
// MONGO ID SHOULD MATCH IF PROVIDED // MONGO ID SHOULD MATCH IF PROVIDED
filter.put(UseCaseDescriptor.MONGO_ID,desc.getMongoId()); filter.put(UseCaseDescriptor.MONGO_ID,desc.getMongoId());
try { try {
UseCaseDescriptor toReturn = Serialization.convert(getDatabase().getCollection(mongoCollectionName()).findOneAndReplace( UseCaseDescriptor toReturn = Serialization.convert(getCollection().findOneAndReplace(
filter, Serialization.asDocument(desc), filter, Serialization.asDocument(desc),
new FindOneAndReplaceOptions().returnDocument(ReturnDocument.BEFORE)), UseCaseDescriptor.class); new FindOneAndReplaceOptions().returnDocument(ReturnDocument.BEFORE)), UseCaseDescriptor.class);
log.trace("Matching is {} ", toReturn); log.trace("Matching is {} ", toReturn);
@ -103,24 +88,17 @@ public class UCDMongoManager extends MongoManager implements UCDManagerI{
} }
} }
private String mongoCollectionName(){
return "_UCD"+ ContextUtils.getCurrentScope().replaceAll("/","_");
}
@Override @Override
public Iterable<UseCaseDescriptor> query(QueryRequest queryRequest) { public Iterable<UseCaseDescriptor> query(QueryRequest queryRequest) {
log.info("Searching UCD for {} ",queryRequest); log.info("Searching UCD for {} ",queryRequest);
LinkedBlockingQueue queue=new LinkedBlockingQueue<UseCaseDescriptor>(); LinkedBlockingQueue queue=new LinkedBlockingQueue<UseCaseDescriptor>();
query(queryRequest,getCollectionName()).forEach( FindIterable<Document> it=queryDoc(queryRequest);
it.forEach(
(Consumer<? super Document>) d ->{try{ (Consumer<? super Document>) d ->{try{
queue.put(Serialization.convert(d,UseCaseDescriptor.class)); queue.put(Serialization.convert(d,UseCaseDescriptor.class));
}catch(Throwable t){log.warn("Unable to translate "+d);}}); }catch(Throwable t){log.warn("Unable to translate "+d,t);}});
log.info("Returned {} elements ",queue.size()); log.info("Returned {} elements ",queue.size());
return queue; return queue;
@ -128,7 +106,7 @@ public class UCDMongoManager extends MongoManager implements UCDManagerI{
@Override @Override
public void deleteById(String id, boolean force) throws ConfigurationException { public void deleteById(String id, boolean force) throws ConfigurationException {
delete(getById(id).getMongoId(),mongoCollectionName()); deleteDoc(getById(id).getMongoId());
} }

View File

@ -17,6 +17,7 @@ import javax.ws.rs.core.MediaType;
import java.util.Iterator; import java.util.Iterator;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import static junit.framework.TestCase.assertTrue;
import static org.junit.Assume.assumeTrue; import static org.junit.Assume.assumeTrue;
@Slf4j @Slf4j
@ -51,6 +52,6 @@ public class UCDTests extends BasicServiceTestUnit{
QueryRequest request=new QueryRequest(); QueryRequest request=new QueryRequest();
AtomicLong l = new AtomicLong(); AtomicLong l = new AtomicLong();
query(request).forEachRemaining(u->l.incrementAndGet()); query(request).forEachRemaining(u->l.incrementAndGet());
assumeTrue(l.get()>0); assertTrue(l.get()>0);
} }
} }

View File

@ -1,4 +1,4 @@
package org.gcube.application.geoportal.service; package org.gcube.application.geoportal.service.engine.mongo;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.mongodb.Block; import com.mongodb.Block;
@ -25,15 +25,7 @@ public class MongoTests {
public static class MongoTester extends MongoManager { public static class MongoTester extends MongoManager {
public MongoTester() throws ConfigurationException { public MongoTester() throws ConfigurationException {
} init("legacyConcessioni");
public MongoCollection<Document> getCollection(){return getDatabase().getCollection("legacyConcessioni");}
@Override
protected MongoDatabase getDatabase() {
return client.getTheClient().getDatabase("gna_dev");
} }
@Override @Override

View File

@ -18,6 +18,7 @@ import org.gcube.application.geoportal.common.utils.tests.GCubeTest;
import org.gcube.application.geoportal.service.BasicServiceTestUnit; import org.gcube.application.geoportal.service.BasicServiceTestUnit;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import javax.ws.rs.client.Entity; import javax.ws.rs.client.Entity;
@ -40,9 +41,10 @@ public abstract class AbstractProfiledDocumentsTests extends BasicServiceTestUni
} }
protected abstract WebTarget baseTarget();
protected abstract WebTarget baseTarget();
@Test @Test
public void getAll() { public void getAll() {
@ -66,9 +68,10 @@ public abstract class AbstractProfiledDocumentsTests extends BasicServiceTestUni
} }
@Test @Test
public void getConfiguration() { public void getConfiguration() throws Exception {
assumeTrue(GCubeTest.isTestInfrastructureEnabled()); assumeTrue(GCubeTest.isTestInfrastructureEnabled());
System.out.println(baseTarget().path(InterfaceConstants.Methods.CONFIGURATION_PATH).request(MediaType.APPLICATION_JSON).get(Configuration.class)); System.out.println(check(
baseTarget().path(InterfaceConstants.Methods.CONFIGURATION_PATH).request(MediaType.APPLICATION_JSON).get(),Configuration.class));
} }
// Queries // Queries

View File

@ -2,6 +2,7 @@ package org.gcube.application.geoportal.service.profiledDocuments;
import org.bson.Document; import org.bson.Document;
import org.gcube.application.cms.serialization.Serialization; import org.gcube.application.cms.serialization.Serialization;
import org.gcube.application.cms.tests.TokenSetter;
import org.gcube.application.geoportal.common.model.JSONPathWrapper; import org.gcube.application.geoportal.common.model.JSONPathWrapper;
import org.gcube.application.geoportal.common.model.document.Project; import org.gcube.application.geoportal.common.model.document.Project;
import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFileSet; import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFileSet;
@ -9,8 +10,10 @@ import org.gcube.application.geoportal.common.model.useCaseDescriptor.Field;
import org.gcube.application.geoportal.common.model.rest.RegisterFileSetRequest; import org.gcube.application.geoportal.common.model.rest.RegisterFileSetRequest;
import org.gcube.application.geoportal.common.rest.InterfaceConstants; import org.gcube.application.geoportal.common.rest.InterfaceConstants;
import org.gcube.application.geoportal.common.utils.StorageUtils; import org.gcube.application.geoportal.common.utils.StorageUtils;
import org.gcube.application.geoportal.common.utils.tests.GCubeTest;
import org.gcube.application.geoportal.service.BasicServiceTestUnit; import org.gcube.application.geoportal.service.BasicServiceTestUnit;
import org.junit.Assert; import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import javax.ws.rs.client.Entity; import javax.ws.rs.client.Entity;
@ -18,6 +21,7 @@ import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
public class DummyProjectTest extends AbstractProfiledDocumentsTests{ public class DummyProjectTest extends AbstractProfiledDocumentsTests{
@ -28,6 +32,9 @@ public class DummyProjectTest extends AbstractProfiledDocumentsTests{
} }
@Test @Test
public void registerNew() throws Exception { public void registerNew() throws Exception {

View File

@ -0,0 +1,18 @@
{
"profile_id": "profiledConcessioni",
"context": "/gcube/devsec/devVRE",
"last_updated_time": "2022-03-11T15:53:41.099",
"indexes": null,
"archives":
[
{
"_type": "DOCUMENT-STORE-COLLECTION",
"count": 41,
"collection_name": "profiledConcessioni"
},
{
"_type": "W-STORAGE",
"folder_id": "2a688014-cf98-464d-aa7c-38f1667f5542"
}
]
}