Refactored Implementation Provider

This commit is contained in:
Fabio Sinibaldi 2022-02-18 18:11:34 +01:00
parent 2a41352748
commit be0c16aa7e
11 changed files with 41 additions and 19 deletions

View File

@ -53,7 +53,7 @@ public class WorkspaceManager {
public WorkspaceManager() throws ConfigurationException, StorageHubException {
sgClient= ImplementationProvider.get().getSHubProvider().getObject();
sgClient= ImplementationProvider.get().getEngineByClass(StorageHubClient.class);
appBase=getApplicationBaseFolder(sgClient);
}

View File

@ -258,7 +258,7 @@ public class ConcessioniMongoManager extends MongoManager{
log.info("Persisting {} files for path {} in concessione ",files.size(),destinationPath,id);
try{
Concessione c = getById(id);
StorageUtils storage=ImplementationProvider.get().getStorageProvider().getObject();
StorageUtils storage=ImplementationProvider.get().getEngineByClass(StorageUtils.class);
WorkspaceManager ws=new WorkspaceManager();
//Check Init Base folder
FolderContainer baseFolder=null;

View File

@ -34,7 +34,7 @@ public abstract class MongoManager {
protected static final Document asDoc(String json) {return Document.parse(json);}
public MongoManager() throws ConfigurationException {
client=ImplementationProvider.get().getMongoClientProvider().getObject();
client=ImplementationProvider.get().getEngineByClass(Mongo.class);
log.info("Got Mongo Client at "+client.getConnection());
log.debug("Mongo client is "+client);

View File

@ -38,6 +38,8 @@ import org.gcube.application.geoportal.common.utils.StorageUtils;
import org.gcube.application.cms.implementations.ImplementationProvider;
import org.gcube.application.geoportal.service.engine.WorkspaceManager;
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
import org.gcube.application.geoportal.service.engine.providers.PluginManager;
import org.gcube.application.geoportal.service.engine.providers.ProfileMapCache;
import org.gcube.application.geoportal.service.model.internal.faults.DeletionException;
import org.gcube.application.cms.serialization.Serialization;
import org.gcube.application.geoportal.service.utils.UserUtils;
@ -70,7 +72,7 @@ public class ProfiledMongoManager extends MongoManager implements MongoManagerI<
// Check Profile ID
log.info("Loading profile ID {} ",profileId);
if(profileId==null) throw new InvalidParameterException("Profile ID cannot be null");
Map<String,Profile> profiles=ImplementationProvider.get().getProfiles().getObject();
Map<String,Profile> profiles=ImplementationProvider.get().getEngineByClass(ProfileMapCache.ProfileMap.class);
if(!profiles.containsKey(profileId)) {
log.debug("Asked profile {} not found. Available ones are {} ",profileId,profiles.keySet());
throw new WebApplicationException("Profile " + profileId + " not registered", Response.Status.NOT_FOUND);
@ -102,7 +104,8 @@ public class ProfiledMongoManager extends MongoManager implements MongoManagerI<
// Loading Lifecycle Manager
log.debug("Looking for handler {} ",lcHandlerDeclaration);
toReturn=(LifecycleManager) ImplementationProvider.get().getPluginManager().getObject().get(lcHandlerDeclaration.getId());
toReturn=(LifecycleManager) ImplementationProvider.get().
getEngineByClass(PluginManager.PluginMap.class).get(lcHandlerDeclaration.getId());
if(toReturn==null) throw new ConfigurationException("Unable to find Lifecycle Manager Plugin. ID "+lcHandlerDeclaration.getId());
return toReturn;
@ -296,7 +299,7 @@ public class ProfiledMongoManager extends MongoManager implements MongoManagerI<
doc.getLifecycleInformation().setLastOperationStatus(LifecycleInformation.Status.OK);
WorkspaceManager ws=new WorkspaceManager();
StorageUtils storage=ImplementationProvider.get().getStorageProvider().getObject();
StorageUtils storage=ImplementationProvider.get().getEngineByClass(StorageUtils.class);
log.debug("Checking field {} definition in {}",request.getFieldDefinitionPath(),profile.getId());

View File

@ -120,7 +120,9 @@ public class PluginManager extends AbstractScopedMap<PluginManager.PluginMap> im
@Override
public Plugin getById(String pluginID) throws ConfigurationException {
return getObject().get(pluginID);
Plugin toReturn = getObject().get(pluginID);
if(toReturn == null ) throw new ConfigurationException("Plugin "+pluginID+" not found ");
return toReturn;
}
private static final void logReport(Plugin p, InitializationReport report){

View File

@ -19,9 +19,7 @@ import java.util.Map;
public class ProfileMapCache extends AbstractScopedMap<ProfileMapCache.ProfileMap> {
@AllArgsConstructor
public static class ProfileMap {
@Getter
Map<String,Profile> profileMap=new HashMap<>();
public static class ProfileMap extends HashMap<String,Profile>{
}
public ProfileMapCache() {
@ -44,7 +42,7 @@ public class ProfileMapCache extends AbstractScopedMap<ProfileMapCache.ProfileMa
// Load from resources
HashMap<String,Profile> toReturn=new HashMap<>();
ProfileMap toReturn=new ProfileMap();
try {
Profile p=Serialization.read(Files.
@ -58,7 +56,7 @@ public class ProfileMapCache extends AbstractScopedMap<ProfileMapCache.ProfileMa
} catch (IOException e) {
e.printStackTrace();
}
return new ProfileMap(toReturn);
return toReturn;
}

View File

@ -20,6 +20,9 @@ import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
public class ISUtils implements ISInterface {
public DatabaseConnection queryForDB(String platform, String flag) throws ConfigurationException {
return ISUtils.performQueryForDB(platform,flag);
}
@ -122,6 +125,11 @@ public class ISUtils implements ISInterface {
throw new RuntimeException("Unable to decrypt : "+toDecrypt,e);
}
}
@Override
public DatabaseConnection queryForDatabase(String platform, String flag) throws ConfigurationException {
return queryForDB(platform,flag);
}
//
//
// private static String getgCubeBaseEndpoint(String category,String name) {

View File

@ -151,6 +151,19 @@ public class DummyProfiledDocumentTest extends AbstractProfiledDocumentsTests{
}
req.setStepID("APPROVE DRAFT");
doc=step(doc.get_id(),req);
if(doc.getLifecycleInformation().getErrorMessages()!=null) {
System.out.println("ERROR MESSAGES");
doc.getLifecycleInformation().getErrorMessages().forEach(s -> System.out.println(s));
}
if(doc.getLifecycleInformation().getWarningMessages()!=null) {
System.out.println("WARNING MESSAGES");
doc.getLifecycleInformation().getWarningMessages().forEach(s -> System.out.println(s));
}
assertTrue(doc.getLifecycleInformation().getLastOperationStatus().equals(LifecycleInformation.Status.OK));
}
}

View File

@ -35,13 +35,13 @@ public class MongoTests {
@BeforeClass
public static final void init() {
ImplementationProvider.get().setMongoClientProvider(new MongoClientProvider() {
ImplementationProvider.get().setEngine(new MongoClientProvider() {
@Override
public Mongo getObject() throws ConfigurationException {
TokenSetter.set("/gcube/devsec/devVRE");
return super.getObject();
}
});
},Mongo.class);
}

View File

@ -12,7 +12,7 @@ public class StorageTests {
public static void main(String[] args) throws ConfigurationException, IOException {
TokenSetter.set("/gcube/devsec/devVRE");
StorageClientProvider storage= ImplementationProvider.get().getStorageProvider();
StorageClientProvider storage= ImplementationProvider.get().getEngineByClass(StorageClientProvider.class);
String id="614de23b647cef06aecdfb28";
System.out.println("FROM PROVIDER " + storage.getObject().getURL(id));

View File

@ -109,7 +109,7 @@ public class Caches extends BasicServiceTestUnit {
AtomicLong executed = new AtomicLong(0);
AtomicLong launched = new AtomicLong(0);
final StorageUtils storage=ImplementationProvider.get().getStorageProvider().getObject();
final StorageUtils storage=ImplementationProvider.get().getEngineByClass(StorageUtils.class);
String id =storage.putOntoStorage(new File(TestConcessioniModel.getBaseFolder(),"relazione.pdf"))[0].getId();
@ -121,10 +121,8 @@ public class Caches extends BasicServiceTestUnit {
public void run() {
try {
try {Thread.sleep(1000);} catch (InterruptedException i) {}
System.out.println(ImplementationProvider.get().getStorageProvider().getObject().getURL(id));
System.out.println(storage.getURL(id));
// storage.getURL(id);
} catch (ConfigurationException e) {
e.printStackTrace();
} catch (MongoWaitQueueFullException e) {
log.info("Too many connections... ");
}finally{