This commit is contained in:
Fabio Sinibaldi 2019-03-27 17:03:51 +00:00
parent 9705974338
commit c1b2e7d353
20 changed files with 413 additions and 125 deletions

View File

@ -80,6 +80,23 @@
<artifactId>commons-dbcp2</artifactId>
<version>2.0.1</version>
</dependency>
<dependency>
<groupId>postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>9.1-901.jdbc4</version>
</dependency>
<!-- STORAGE -->
<dependency>
<groupId>org.gcube.contentmanagement</groupId>
<artifactId>storage-manager-core</artifactId>
<version>[2.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.contentmanagement</groupId>
<artifactId>storage-manager-wrapper</artifactId>
<version>[2.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
</dependency>
<!-- test -->
@ -110,7 +127,7 @@
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>1.3.170</version>
<version>1.4.198</version>
<scope>test</scope>
</dependency>

View File

@ -5,15 +5,21 @@ import javax.ws.rs.ApplicationPath;
import org.gcube.data.publishing.gCatFeeder.service.engine.CatalogueControllersManager;
import org.gcube.data.publishing.gCatFeeder.service.engine.CollectorsManager;
import org.gcube.data.publishing.gCatFeeder.service.engine.ConnectionManager;
import org.gcube.data.publishing.gCatFeeder.service.engine.ExecutionManager;
import org.gcube.data.publishing.gCatFeeder.service.engine.FeederEngine;
import org.gcube.data.publishing.gCatFeeder.service.engine.Infrastructure;
import org.gcube.data.publishing.gCatFeeder.service.engine.LocalConfiguration;
import org.gcube.data.publishing.gCatFeeder.service.engine.PersistenceManager;
import org.gcube.data.publishing.gCatFeeder.service.engine.Storage;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.CatalogueControllersManagerImpl;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.CollectorsManagerImpl;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.ExecutionManagerImpl;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.FeederEngineImpl;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.InfrastructureUtilsImpl;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.LocalConfigurationImpl;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.persistence.ConnectionManagerImpl;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.persistence.PersistenceManagerImpl;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.persistence.StorageImpl;
import org.gcube.data.publishing.gCatFeeder.service.rest.Capabilities;
import org.gcube.data.publishing.gCatFeeder.service.rest.Executions;
@ -35,6 +41,9 @@ public class GCatFeeder extends ResourceConfig{
bind(ExecutionManagerImpl.class).to(ExecutionManager.class).in(Singleton.class);
bind(InfrastructureUtilsImpl.class).to(Infrastructure.class);
bind(StorageImpl.class).to(Storage.class);
bind(PersistenceManagerImpl.class).to(PersistenceManager.class);
bind(ConnectionManagerImpl.class).to(ConnectionManager.class).in(Singleton.class);
bind(LocalConfigurationImpl.class).to(LocalConfiguration.class).in(Singleton.class);
}
};
register(binder);

View File

@ -3,6 +3,8 @@ package org.gcube.data.publishing.gCatFeeder.service.engine;
import java.sql.Connection;
import java.sql.SQLException;
import org.gcube.data.publishing.gCatFeeder.service.model.fault.InternalError;
public interface ConnectionManager {

View File

@ -1,6 +1,5 @@
package org.gcube.data.publishing.gCatFeeder.service.engine;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.ExecutionManagerConfiguration;
import org.gcube.data.publishing.gCatFeeder.service.model.ExecutionDescriptor;
public interface ExecutionManager {
@ -11,5 +10,4 @@ public interface ExecutionManager {
public void load();
public void init(ExecutionManagerConfiguration config);
}

View File

@ -1,8 +0,0 @@
package org.gcube.data.publishing.gCatFeeder.service.engine.impl;
public class ExecutionManagerConfiguration {
private int threadPoolSize;
}

View File

@ -11,6 +11,7 @@ import org.gcube.data.publishing.gCatFeeder.service.engine.CatalogueControllersM
import org.gcube.data.publishing.gCatFeeder.service.engine.CollectorsManager;
import org.gcube.data.publishing.gCatFeeder.service.engine.ExecutionManager;
import org.gcube.data.publishing.gCatFeeder.service.engine.Infrastructure;
import org.gcube.data.publishing.gCatFeeder.service.engine.LocalConfiguration;
import org.gcube.data.publishing.gCatFeeder.service.engine.PersistenceManager;
import org.gcube.data.publishing.gCatFeeder.service.engine.Storage;
import org.gcube.data.publishing.gCatFeeder.service.model.ExecutionDescriptor;
@ -23,14 +24,18 @@ public class ExecutionManagerImpl implements ExecutionManager {
private ThreadPoolExecutor executor=null;
private static final Logger log= LoggerFactory.getLogger(ExecutionManagerImpl.class);
private boolean defaultConfiguration=true;
@Inject
private PersistenceManager persistence;
@Inject
private CollectorsManager collectors;
@Inject
private CatalogueControllersManager catalogues;
@Inject
private Infrastructure infrastructure;
@Inject
private Storage storage;
@Inject
private LocalConfiguration config;
@PostConstruct
@ -39,30 +44,30 @@ public class ExecutionManagerImpl implements ExecutionManager {
new LinkedBlockingQueue<Runnable>());
}
@Inject
public void setPersistence(PersistenceManager p) {
this.persistence=p;
}
@Inject
public void setCollectorPluginManager(CollectorsManager c) {
this.collectors=c;
}
@Inject
public void setCataloguesPluginManager(CatalogueControllersManager c) {
this.catalogues=c;
}
@Inject
public void setInfastructureInterface(Infrastructure infra) {
this.infrastructure=infra;
}
@Inject
public void setStorage(Storage storage) {
this.storage = storage;
}
// @Inject
// public void setPersistence(PersistenceManager p) {
// this.persistence=p;
// }
//
// @Inject
// public void setCollectorPluginManager(CollectorsManager c) {
// this.collectors=c;
// }
//
// @Inject
// public void setCataloguesPluginManager(CatalogueControllersManager c) {
// this.catalogues=c;
// }
//
//
// @Inject
// public void setInfastructureInterface(Infrastructure infra) {
// this.infrastructure=infra;
// }
// @Inject
// public void setStorage(Storage storage) {
// this.storage = storage;
// }
@Override
public synchronized void submit(ExecutionDescriptor desc) {
@ -97,13 +102,6 @@ public class ExecutionManagerImpl implements ExecutionManager {
throw new RuntimeException("NOT YET IMPLEMENTED");
}
@Override
public synchronized void init(ExecutionManagerConfiguration config) {
// NEED TO BE IDEMPOTENT
if(executor==null||defaultConfiguration) {
throw new RuntimeException("NOT YET IMPLEMENTED");
// executor=new ThreadPoolExecutor(corePoolSize, maximumPoolSize, keepAliveTime, unit, new ConcurrentLinkedQueue<ExecutionTask>());
}
}
}

View File

@ -0,0 +1,30 @@
package org.gcube.data.publishing.gCatFeeder.service.engine.impl;
import java.io.IOException;
import java.util.Properties;
import javax.annotation.PostConstruct;
import org.gcube.data.publishing.gCatFeeder.service.BaseTest;
import org.gcube.data.publishing.gCatFeeder.service.engine.LocalConfiguration;
public class LocalConfigurationImpl implements LocalConfiguration {
private Properties props;
@PostConstruct
public void load() {
props=new Properties();
try{
props.load(BaseTest.class.getResourceAsStream("/gcat-feeder-config.properties"));
}catch(IOException e) {throw new RuntimeException(e);}
}
@Override
public String getProperty(String propertyName) {
if(props.isEmpty()) throw new RuntimeException("No properties loaded");
return props.getProperty(propertyName);
}
}

View File

@ -15,6 +15,7 @@ import org.apache.commons.dbcp2.PoolingDataSource;
import org.apache.commons.pool2.ObjectPool;
import org.apache.commons.pool2.impl.GenericObjectPool;
import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
import org.gcube.data.publishing.gCatFeeder.service.model.fault.InternalError;
import org.gcube.data.publishing.gCatFeeder.service.engine.ConnectionManager;
import org.gcube.data.publishing.gCatFeeder.service.engine.Infrastructure;
import org.gcube.data.publishing.gCatFeeder.service.engine.LocalConfiguration;
@ -75,7 +76,7 @@ public class ConnectionManagerImpl implements ConnectionManager {
}
private synchronized DataSource getDataSource() throws InternalError {
private synchronized DataSource getDataSource() throws InternalError, SQLException {
DatabaseConnectionDescriptor dbDescriptor=getDB();
if(!datasources.containsKey(dbDescriptor.getUrl())) {
@ -84,7 +85,7 @@ public class ConnectionManagerImpl implements ConnectionManager {
return datasources.get(dbDescriptor.getUrl());
}
private DataSource setupDataSource(DatabaseConnectionDescriptor db) {
private DataSource setupDataSource(DatabaseConnectionDescriptor db) throws SQLException, InternalError {
log.trace("Setting up data source for {} ",db);
@ -113,6 +114,21 @@ public class ConnectionManagerImpl implements ConnectionManager {
PoolingDataSource<PoolableConnection> dataSource =
new PoolingDataSource<>(connectionPool);
log.trace("Initializing schema...");
Connection conn=null;
try{
conn=dataSource.getConnection();
conn.createStatement().executeUpdate(Queries.getInitDB(db.getFlavor()));
conn.commit();
}catch(SQLException e) {
throw new InternalError("Unable to Init database "+db,e);
}finally {
if(conn!=null) conn.close();
}
return dataSource;
}

View File

@ -33,6 +33,8 @@ public class DBField {
fields.put(REPORT_URL, new DBField(Types.VARCHAR,REPORT_URL));
fields.put(START, new DBField(Types.TIMESTAMP,START));
fields.put(END, new DBField(Types.TIMESTAMP,END));
fields.put(COLLECTORS, new DBField(Types.VARCHAR,COLLECTORS));
fields.put(CONTROLLERS, new DBField(Types.VARCHAR,CONTROLLERS));
}
}

View File

@ -6,10 +6,10 @@ import java.util.Map.Entry;
public class DBQueryDescriptor {
private Map<DBField,Object> condition;
private Map<DBField,Object> condition=new HashMap<DBField,Object>();
public DBQueryDescriptor() {
condition=new HashMap<DBField,Object>();
// TODO Auto-generated constructor stub
}
public Map<DBField, Object> getCondition() {
@ -22,14 +22,14 @@ public class DBQueryDescriptor {
this.condition = condition;
}
public DBQueryDescriptor(DBField field, Object value) {
this();
public DBQueryDescriptor(DBField field, Object value) {
add(field,value);
}
public String toString() {
StringBuilder builder=new StringBuilder();
if(condition.isEmpty())return "EMPTY";
for(Entry<DBField,Object> entry : condition.entrySet()) {
builder.append(String.format("%1$s = %2$s AND ", entry.getKey().getFieldName(),entry.getValue()));
}

View File

@ -2,15 +2,21 @@ package org.gcube.data.publishing.gCatFeeder.service.engine.impl.persistence;
public class DatabaseConnectionDescriptor {
public static enum Flavor{
POSTGRES,MYSQL
}
private String username;
private String url;
private String password;
private Flavor flavor=Flavor.POSTGRES;
@Override
public String toString() {
return "DatabaseConnectionDescriptor [username=" + username + ", url=" + url + "]";
return "DatabaseConnectionDescriptor [username=" + username + ", url=" + url + ", password=" + password
+ ", flavor=" + flavor + "]";
}
public DatabaseConnectionDescriptor(String username, String url, String password) {
@ -20,6 +26,11 @@ public class DatabaseConnectionDescriptor {
this.password = password;
}
public DatabaseConnectionDescriptor(String username, String url, String password, Flavor flavor) {
this(username,url,password);
this.flavor = flavor;
}
public String getUsername() {
return username;
}
@ -32,6 +43,8 @@ public class DatabaseConnectionDescriptor {
return password;
}
public Flavor getFlavor() {
return flavor;
}
}

View File

@ -48,8 +48,8 @@ public class PersistenceManagerImpl implements PersistenceManager {
}else {
log.debug("Inserting request ..");
// PREPARE REQUEST
PreparedStatement psInsert=Queries.INSERT_NEW.prepare(conn, Statement.RETURN_GENERATED_KEYS);
psInsert=Queries.INSERT_NEW.fill(psInsert, queryDescriptor);
psInsert.executeUpdate();
ResultSet rsId=psInsert.getGeneratedKeys();
rsId.next();
@ -69,7 +69,7 @@ public class PersistenceManagerImpl implements PersistenceManager {
throw new PersistenceError(t);
}finally {
try {
conn.close();
if(conn!=null)conn.close();
} catch (SQLException e) {
throw new PersistenceError(e);
}
@ -94,7 +94,7 @@ public class PersistenceManagerImpl implements PersistenceManager {
throw new PersistenceError(t);
}finally {
try {
conn.close();
if(conn!=null)conn.close();
} catch (SQLException e) {
throw new PersistenceError(e);
}
@ -120,7 +120,7 @@ public class PersistenceManagerImpl implements PersistenceManager {
throw new PersistenceError(t);
}finally {
try {
conn.close();
if(conn!=null)conn.close();
} catch (SQLException e) {
throw new PersistenceError(e);
}
@ -143,7 +143,7 @@ public class PersistenceManagerImpl implements PersistenceManager {
throw new PersistenceError(t);
}finally {
try {
conn.close();
if(conn!=null)conn.close();
} catch (SQLException e) {
throw new PersistenceError(e);
}
@ -166,7 +166,7 @@ public class PersistenceManagerImpl implements PersistenceManager {
throw new PersistenceError(t);
}finally {
try {
conn.close();
if(conn!=null)conn.close();
} catch (SQLException e) {
throw new PersistenceError(e);
}

View File

@ -21,6 +21,7 @@ import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.persistence.DatabaseConnectionDescriptor.Flavor;
import org.gcube.data.publishing.gCatFeeder.service.model.ExecutionDescriptor;
import org.gcube.data.publishing.gCatFeeder.service.model.ExecutionRequest;
import org.gcube.data.publishing.gCatFeeder.service.model.ExecutionStatus;
@ -28,6 +29,22 @@ import org.gcube.data.publishing.gCatFeeder.service.model.fault.InvalidRequest;
public class Queries {
public static final String getInitDB(DatabaseConnectionDescriptor.Flavor flavor) {
return "CREATE TABLE IF NOT EXISTS "+TABLE+" ("
+ID+" "+(flavor.equals(Flavor.POSTGRES)?"BIGSERIAL":"bigint auto_increment") +" NOT NULL,"
+CALLER_TOKEN+" VARCHAR NOT NULL,"
+CALLER_ID+" VARCHAR NOT NULL,"
+CALLER_CONTEXT+" VARCHAR NOT NULL,"
+STATUS+" VARCHAR(40) NOT NULL,"
+REPORT_URL+" VARCHAR,"
+COLLECTORS+" text,"
+CONTROLLERS+" text,"
+START+" timestamp with time zone,"
+END+" timestamp with time zone,"
+"primary key ("+ID+"))";
}
public static final Query GET_BY_ID=new Query("Select * from "+TABLE+" where "+ID+" = ?",
new DBField[] {fields.get(ID)});
@ -41,12 +58,15 @@ public class Queries {
fields.get(ID)});
public static final Query ACQUIRE= new Query("UPDATE "+TABLE+" SET "
+STATUS+"="+ExecutionStatus.RUNNING+" WHERE "+ID+"=? AND "+STATUS+"="+ExecutionStatus.PENDING,
+STATUS+"='"+ExecutionStatus.RUNNING+"' WHERE "+ID+"=? AND "+STATUS+"='"+ExecutionStatus.PENDING+"'",
new DBField[] {fields.get(ID)});
public static final Query GET_ALL= new Query("SELECT * FROM "+TABLE+" ORDER BY "+END+" DESC",
new DBField[] {});
/*
* SAME CONTEXT,
* status is RUNNING OR PENDING
@ -55,7 +75,7 @@ public class Queries {
*/
public static final Query GET_SIMILAR=new Query ("SELECT * FROM "+TABLE+" WHERE "
+CALLER_CONTEXT+"=? AND "+COLLECTORS+"=? AND "+CONTROLLERS+"=? AND "
/* STATUS */ +"("+STATUS+"="+ExecutionStatus.RUNNING+" OR "+STATUS+"="+ExecutionStatus.PENDING+")",
/* STATUS */ +"("+STATUS+"='"+ExecutionStatus.RUNNING+"' OR "+STATUS+"='"+ExecutionStatus.PENDING+"')",
new DBField[] {fields.get(CALLER_CONTEXT),
fields.get(COLLECTORS),
fields.get(CONTROLLERS)});
@ -65,13 +85,11 @@ public class Queries {
+CALLER_TOKEN+","
+CALLER_ID+","
+CALLER_CONTEXT+","
+STATUS+","
+REPORT_URL+","
+STATUS+","
+COLLECTORS+","
+CONTROLLERS+","
+START+") VALUES (?,?,?,?,?,?,?,?)",
+CONTROLLERS+") VALUES (?,?,?,'"+ExecutionStatus.PENDING+"',?,?)",
new DBField[] {fields.get(CALLER_TOKEN),fields.get(CALLER_ID),fields.get(CALLER_CONTEXT),
fields.get(STATUS),fields.get(REPORT_URL),fields.get(COLLECTORS),fields.get(CONTROLLERS),fields.get(START)});
fields.get(COLLECTORS),fields.get(CONTROLLERS)});
public static final ExecutionDescriptor translateRow(ResultSet row) throws SQLException {
ExecutionDescriptor toReturn=new ExecutionDescriptor();
@ -86,7 +104,9 @@ public class Queries {
toReturn.setId(row.getLong(ID));
toReturn.setReportUrl(row.getString(REPORT_URL));
toReturn.setStartTime(row.getTimestamp(START).toInstant());
Timestamp startTime=row.getTimestamp(START);
if(startTime!=null)
toReturn.setStartTime(startTime.toInstant());
toReturn.setStatus(ExecutionStatus.valueOf(row.getString(STATUS)));
return toReturn;
}

View File

@ -1,14 +1,75 @@
package org.gcube.data.publishing.gCatFeeder.service.engine.impl.persistence;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.UUID;
import javax.inject.Inject;
import org.gcube.contentmanagement.blobstorage.service.IClient;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.gcube.contentmanager.storageclient.wrapper.AccessType;
import org.gcube.contentmanager.storageclient.wrapper.MemoryType;
import org.gcube.contentmanager.storageclient.wrapper.StorageClient;
import org.gcube.data.publishing.gCatFeeder.service.engine.Infrastructure;
import org.gcube.data.publishing.gCatFeeder.service.engine.Storage;
import org.gcube.data.publishing.gCatFeeder.service.model.fault.InternalError;
import org.gcube.data.publishing.gCatFeeder.service.model.reports.ExecutionReport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
public class StorageImpl implements Storage{
private static final Logger log= LoggerFactory.getLogger(StorageImpl.class);
protected static ObjectMapper mapper=new ObjectMapper();
@Inject
private Infrastructure infra;
private final IClient getClient(){
return new StorageClient("data-publishing", "gcat-feeder", infra.getClientID(infra.getCurrentToken()), AccessType.SHARED, MemoryType.PERSISTENT).getClient();
}
//return Id
private final String putOntoStorage(File source) throws RemoteBackendException, FileNotFoundException{
IClient client=getClient();
log.debug("Uploading local file "+source.getAbsolutePath());
String id=client.put(true).LFile(new FileInputStream(source)).RFile(UUID.randomUUID().toString());
log.debug("File uploaded. ID : "+id);
String toReturn= client.getHttpUrl().RFile(id);
log.debug("Created URL : "+toReturn);
return toReturn;
}
protected File asFile(ExecutionReport report) throws InternalError {
try {
File f=File.createTempFile("report", ".json");
String serialized=mapper.writeValueAsString(report);
Files.write(Paths.get(f.getAbsolutePath()), serialized.getBytes());
return f;
} catch (IOException e) {
throw new InternalError("Unable to rite report : ",e);
}
}
@Override
public String storeReport(ExecutionReport report) throws InternalError {
throw new RuntimeException("Implment THIS");
public String storeReport(ExecutionReport report) throws InternalError {
try {
return putOntoStorage(asFile(report));
} catch (RemoteBackendException | FileNotFoundException e) {
throw new InternalError("Unable to store report ",e);
}
}
}

View File

@ -1,14 +1,15 @@
package org.gcube.data.publishing.gCatFeeder.service.model;
import java.time.Instant;
import java.util.HashSet;
import java.util.Set;
public class ExecutionDescriptor {
private Long id;
private Set<String> collectors;
private Set<String> catalogues;
private Set<String> collectors=new HashSet<>();
private Set<String> catalogues=new HashSet<>();
private String callerEncryptedToken;

View File

@ -0,0 +1,6 @@
db.pools.max_idle=5
db.pools.max_total=50
db.pools.min_total=3
mapping-db.ep.name=Feeder_DB
mapping-db.ep.category=Database

View File

@ -2,57 +2,115 @@ package org.gcube.data.publishing.gCatFeeder.service;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Properties;
import javax.inject.Singleton;
import javax.ws.rs.core.Application;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data.publishing.gCatFeeder.service.engine.CatalogueControllersManager;
import org.gcube.data.publishing.gCatFeeder.service.engine.CollectorsManager;
import org.gcube.data.publishing.gCatFeeder.service.engine.ConnectionManager;
import org.gcube.data.publishing.gCatFeeder.service.engine.ExecutionManager;
import org.gcube.data.publishing.gCatFeeder.service.engine.FeederEngine;
import org.gcube.data.publishing.gCatFeeder.service.engine.Infrastructure;
import org.gcube.data.publishing.gCatFeeder.service.engine.LocalConfiguration;
import org.gcube.data.publishing.gCatFeeder.service.engine.PersistenceManager;
import org.gcube.data.publishing.gCatFeeder.service.engine.Storage;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.CatalogueControllersManagerImpl;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.CollectorsManagerImpl;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.ExecutionManagerImpl;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.FeederEngineImpl;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.InfrastructureUtilsImpl;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.LocalConfigurationImpl;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.persistence.ConnectionManagerImpl;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.persistence.PersistenceManagerImpl;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.persistence.StorageImpl;
import org.gcube.data.publishing.gCatFeeder.service.mockups.InfrastructureMockup;
import org.gcube.data.publishing.gCatFeeder.service.mockups.PersistenceManagerMock;
import org.gcube.data.publishing.gCatFeeder.service.mockups.StorageMockup;
import org.glassfish.hk2.utilities.binding.AbstractBinder;
import org.glassfish.jersey.test.JerseyTest;
import org.junit.Before;
import org.junit.BeforeClass;
public class BaseTest extends JerseyTest{
@Before
public void init() throws IOException, SQLException{
}
@Override
protected Application configure() {
AbstractBinder binder = new AbstractBinder() {
@Override
protected void configure() {
bind(FeederEngineImpl.class).to(FeederEngine.class);
bind(CatalogueControllersManagerImpl.class).to(CatalogueControllersManager.class).in(Singleton.class);
bind(CollectorsManagerImpl.class).to(CollectorsManager.class).in(Singleton.class);
bind(ExecutionManagerImpl.class).to(ExecutionManager.class).in(Singleton.class);
bind(PersistenceManagerMock.class).to(PersistenceManager.class).in(Singleton.class);
bind(InfrastructureMockup.class).to(Infrastructure.class);
bind(StorageMockup.class).to(Storage.class);
}
};
return new GCatFeeder(binder);
private static String testContext=null;
@BeforeClass
public static void checkEnvironment() {
testContext=System.getProperty("testContext");
System.out.println("TEST CONTEXT = "+testContext);
}
@Before
public void init() throws IOException, SQLException{
setTestInfrastructure();
}
@Override
protected Application configure() {
if(testContext!=null) {
System.out.println("TEST INFRASTRUCTURE IS "+testContext);
AbstractBinder binder = new AbstractBinder() {
@Override
protected void configure() {
bind(FeederEngineImpl.class).to(FeederEngine.class);
bind(CatalogueControllersManagerImpl.class).to(CatalogueControllersManager.class).in(Singleton.class);
bind(CollectorsManagerImpl.class).to(CollectorsManager.class).in(Singleton.class);
bind(ExecutionManagerImpl.class).to(ExecutionManager.class).in(Singleton.class);
bind(StorageImpl.class).to(Storage.class);
bind(PersistenceManagerImpl.class).to(PersistenceManager.class);
bind(ConnectionManagerImpl.class).to(ConnectionManager.class).in(Singleton.class);
bind(LocalConfigurationImpl.class).to(LocalConfiguration.class).in(Singleton.class);
//Mockup
bind(InfrastructureMockup.class).to(Infrastructure.class);
}
};
return new GCatFeeder(binder);
}else {
System.out.println("NO TEST INFRASTRUCTURE AVAILABLE");
AbstractBinder binder = new AbstractBinder() {
@Override
protected void configure() {
bind(FeederEngineImpl.class).to(FeederEngine.class);
bind(CatalogueControllersManagerImpl.class).to(CatalogueControllersManager.class).in(Singleton.class);
bind(CollectorsManagerImpl.class).to(CollectorsManager.class).in(Singleton.class);
bind(ExecutionManagerImpl.class).to(ExecutionManager.class).in(Singleton.class);
bind(PersistenceManagerImpl.class).to(PersistenceManager.class);
bind(ConnectionManagerImpl.class).to(ConnectionManager.class).in(Singleton.class);
bind(LocalConfigurationImpl.class).to(LocalConfiguration.class).in(Singleton.class);
// Mockups
bind(InfrastructureMockup.class).to(Infrastructure.class);
bind(StorageMockup.class).to(Storage.class);
}
};
return new GCatFeeder(binder);
}
}
public static void setTestInfrastructure() {
if(isTestInfrastructureEnabled()) {
Properties props=new Properties();
try{
props.load(BaseTest.class.getResourceAsStream("/tokens.properties"));
}catch(IOException e) {throw new RuntimeException(e);}
if(!props.containsKey(testContext)) throw new RuntimeException("No token found for scope : "+testContext);
SecurityTokenProvider.instance.set(props.getProperty(testContext));
ScopeProvider.instance.set(testContext);
}
}
public static boolean isTestInfrastructureEnabled() {
return testContext!=null;
}
}

View File

@ -5,6 +5,7 @@ import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.Response;
import org.gcube.data.publishing.gCatFeeder.service.model.ExecutionDescriptor;
import org.junit.Assert;
import org.junit.Test;
public class ExecutionsTest extends BaseTest {
@ -17,8 +18,11 @@ public class ExecutionsTest extends BaseTest {
System.out.println(target.getUri());
Response resp=target.request().get();
if(resp.getStatus()!=200) {
System.err.println(resp.readEntity(String.class));
throw new RuntimeException("GetAll error should never happen");
}
System.out.println(resp.getStatus() + " : "+ resp.readEntity(String.class));
if(resp.getStatus()!=200) throw new RuntimeException("GetAll error should never happen");
}
@Test
@ -33,8 +37,12 @@ public class ExecutionsTest extends BaseTest {
ExecutionDescriptor desc=resp.readEntity(ExecutionDescriptor.class);
Long id=desc.getId();
waitForSuccess(id);
}
private void waitForSuccess(Long executionId) {
WebTarget pollTarget=
target(ServiceConstants.Executions.PATH).path(id+"");
target(ServiceConstants.Executions.PATH).path(executionId+"");
boolean end=false;
do {
@ -57,6 +65,24 @@ public class ExecutionsTest extends BaseTest {
}while(!end);
}
@Test
public void checkSimilar() {
WebTarget target=
target(ServiceConstants.Executions.PATH);
System.out.println(target.getUri());
Response resp=target.request().post(Entity.json(""));
// System.out.println(resp.getStatus() + " : "+ resp.readEntity(String.class));
ExecutionDescriptor desc=resp.readEntity(ExecutionDescriptor.class);
Long id=desc.getId();
resp=target.request().post(Entity.json(""));
Assert.assertEquals(id, resp.readEntity(ExecutionDescriptor.class).getId());
waitForSuccess(id);
}
@Test
public void wrongSubmission() {
WebTarget target=

View File

@ -1,42 +1,97 @@
package org.gcube.data.publishing.gCatFeeder.service.mockups;
import org.gcube.data.publishing.gCatFeeder.service.engine.Infrastructure;
import java.sql.SQLException;
public class InfrastructureMockup implements Infrastructure {
import org.gcube.data.publishing.gCatFeeder.service.BaseTest;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.InfrastructureUtilsImpl;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.persistence.DatabaseConnectionDescriptor;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.persistence.DatabaseConnectionDescriptor.Flavor;
public class InfrastructureMockup extends InfrastructureUtilsImpl {
static {
try {
org.h2.tools.Server.createTcpServer().start();
} catch (SQLException e) {
throw new RuntimeException("Unable to init in memory DB.",e);
}
}
@Override
public String getCurrentToken() {
if(BaseTest.isTestInfrastructureEnabled()) {
BaseTest.setTestInfrastructure();
return super.getCurrentToken();
}
return "FAKE_TOKEN";
}
@Override
public String getCurrentContext() {
if(BaseTest.isTestInfrastructureEnabled()) {
BaseTest.setTestInfrastructure();
return super.getCurrentContext();
}
return "FAKE_CONTEXT";
}
@Override
public String getCurrentContextName() {
if(BaseTest.isTestInfrastructureEnabled()) {
BaseTest.setTestInfrastructure();
return super.getCurrentContextName();
}
return "FAKE";
}
@Override
public String getClientID(String token){
if(BaseTest.isTestInfrastructureEnabled()) {
BaseTest.setTestInfrastructure();
return super.getClientID(token);
}
return "FAKE_ID";
}
@Override
public void setToken(String token) {
if(BaseTest.isTestInfrastructureEnabled()) {
BaseTest.setTestInfrastructure();
super.setToken(token);
}
}
@Override
public String decrypt(String toDecrypt) {
return toDecrypt;
if(BaseTest.isTestInfrastructureEnabled()) {
BaseTest.setTestInfrastructure();
return super.decrypt(toDecrypt);
}
else return toDecrypt;
}
@Override
public String encrypt(String toEncrypt) {
if(BaseTest.isTestInfrastructureEnabled()) {
BaseTest.setTestInfrastructure();
return super.encrypt(toEncrypt);
}
return toEncrypt;
}
@Override
public DatabaseConnectionDescriptor queryForDatabase(String category, String name) throws InternalError {
if(BaseTest.isTestInfrastructureEnabled()) {
BaseTest.setTestInfrastructure();
return super.queryForDatabase(category, name);
}else {
String url="jdbc:h2:mem:test;DB_CLOSE_DELAY=-1";
// String url="jdbc:h2:tcp://localhost:9092//data;DB_CLOSE_DELAY=-1";
return new DatabaseConnectionDescriptor(null, url, null,Flavor.MYSQL);
}
}
}

View File

@ -1,33 +1,17 @@
package org.gcube.data.publishing.gCatFeeder.service.mockups;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import org.gcube.data.publishing.gCatFeeder.service.engine.Storage;
import org.gcube.data.publishing.gCatFeeder.service.engine.impl.persistence.StorageImpl;
import org.gcube.data.publishing.gCatFeeder.service.model.fault.InternalError;
import org.gcube.data.publishing.gCatFeeder.service.model.reports.ExecutionReport;
import com.fasterxml.jackson.databind.ObjectMapper;
public class StorageMockup extends StorageImpl {
public class StorageMockup implements Storage {
private static ObjectMapper mapper=new ObjectMapper();
@Override
public String storeReport(ExecutionReport report) throws InternalError {
try {
File f=File.createTempFile("report", ".json");
String serialized=mapper.writeValueAsString(report);
Files.write(Paths.get(f.getAbsolutePath()), serialized.getBytes());
return f.getAbsolutePath();
} catch (IOException e) {
throw new InternalError("Unable to rite report : ",e);
}
return asFile(report).getAbsolutePath();
}
}