Refactoring Infrastructure Tests

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/private/luca.frosini/infrastructure-tests@151477 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Luca Frosini 2017-07-31 13:39:45 +00:00
parent 4d791f6fd9
commit 4603c4d080
21 changed files with 82 additions and 417 deletions

View File

@ -19,9 +19,9 @@ import org.gcube.accounting.datamodel.aggregation.AggregatedServiceUsageRecord;
import org.gcube.accounting.datamodel.aggregation.AggregatedStorageUsageRecord;
import org.gcube.accounting.datamodel.usagerecords.ServiceUsageRecord;
import org.gcube.accounting.datamodel.usagerecords.StorageUsageRecord;
import org.gcube.accounting.datamodel.usagerecords.TestUsageRecord;
import org.gcube.documentstore.records.AggregatedRecord;
import org.gcube.testutility.ScopedTest;
import org.gcube.testutility.TestUsageRecord;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;

View File

@ -4,7 +4,7 @@ import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.concurrent.TimeUnit;
import org.gcube.accounting.aggregator.madeaggregation.AggregationType;
import org.gcube.accounting.aggregator.aggregation.AggregationType;
import org.gcube.accounting.analytics.persistence.AccountingPersistenceBackendQueryConfiguration;
import org.gcube.testutility.ScopedTest;
import org.junit.Test;
@ -28,7 +28,7 @@ public class RemoveOldRecords extends ScopedTest {
if (scope!=null){
generateKey.add(scope);
}
for (String value: key.split(",")){
for (String value: key.split("/")){
if (!value.toString().isEmpty())
generateKey.add(Integer.parseInt(value));
}
@ -43,10 +43,17 @@ public class RemoveOldRecords extends ScopedTest {
AccountingPersistenceQueryCouchBase accountingPersistenceQueryCouchBase = new AccountingPersistenceQueryCouchBase();
accountingPersistenceQueryCouchBase.prepareConnection(configuration);
SimpleDateFormat format = new SimpleDateFormat(AggregationType.HOURLY.getDateformat());
SimpleDateFormat format = new SimpleDateFormat(AggregationType.DAILY.getDateFormatPattern());
Calendar start = Calendar.getInstance();
start.set(1970, 1, 1);
start.set(Calendar.YEAR, 1970);
start.set(Calendar.MONTH, Calendar.JANUARY);
start.set(Calendar.DAY_OF_MONTH, 1);
start.set(Calendar.HOUR_OF_DAY, 0);
start.set(Calendar.MINUTE, 0);
start.set(Calendar.SECOND, 0);
start.set(Calendar.MILLISECOND, 0);
Calendar end = Calendar.getInstance();
end.add(Calendar.DAY_OF_MONTH, -60);
@ -56,7 +63,7 @@ public class RemoveOldRecords extends ScopedTest {
JsonArray startKey = generateKey(null, startKeyString);
JsonArray endKey = generateKey(null, endKeyString);
String designDocId = "noContext";
String designDocId = "ServiceUsageRecordAggregated";
String viewName = "all";

View File

@ -16,7 +16,6 @@ import org.gcube.documentstore.exception.InvalidValueException;
import org.gcube.documentstore.exception.NotAggregatableRecordsExceptions;
import org.gcube.documentstore.persistence.PersistenceBackendFactory;
import org.gcube.testutility.ScopedTest;
import org.gcube.testutility.TestUsageRecord;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@ -63,7 +62,7 @@ public class AccountingTest extends ScopedTest{
@Test
public void accountingStorageUsageRecordStressTest() throws InvalidValueException, NotAggregatableRecordsExceptions {
for(int i=0; i<1000; i++){
StorageUsageRecord sur = org.gcube.testutility.TestUsageRecord.createTestStorageUsageRecord();
StorageUsageRecord sur = org.gcube.accounting.datamodel.usagerecords.TestUsageRecord.createTestStorageUsageRecord();
sur.setScope(TestUsageRecord.TEST_SCOPE);
accountingPersistence.account(sur);
}

View File

@ -7,7 +7,6 @@ import java.util.Set;
import org.gcube.documentstore.exception.InvalidValueException;
import org.gcube.testutility.ScopedTest;
import org.gcube.testutility.TestUsageRecord;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;

View File

@ -7,7 +7,6 @@ import java.util.Set;
import org.gcube.documentstore.exception.InvalidValueException;
import org.gcube.testutility.ScopedTest;
import org.gcube.testutility.TestUsageRecord;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;

View File

@ -7,7 +7,6 @@ import java.util.Set;
import org.gcube.documentstore.exception.InvalidValueException;
import org.gcube.testutility.ScopedTest;
import org.gcube.testutility.TestUsageRecord;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;

View File

@ -11,7 +11,6 @@ import org.gcube.accounting.datamodel.basetypes.AbstractTaskUsageRecord;
import org.gcube.documentstore.exception.InvalidValueException;
import org.gcube.documentstore.records.Record;
import org.gcube.testutility.ScopedTest;
import org.gcube.testutility.TestUsageRecord;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;

View File

@ -1,7 +1,7 @@
/**
*
*/
package org.gcube.testutility;
package org.gcube.accounting.datamodel.usagerecords;
import java.io.Serializable;
import java.net.URI;

View File

@ -7,6 +7,7 @@ import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import org.gcube.accounting.aggregator.plugin.AccountingAggregatorPluginDeclaration;
import org.gcube.testutility.ScopedTest;
import org.gcube.vremanagement.executor.api.types.LaunchParameter;
import org.gcube.vremanagement.executor.api.types.Scheduling;
@ -27,182 +28,52 @@ public class AggregatorAccountingPluginSmartExecutorSchedulerTest extends Scoped
@Before
public void before() throws Exception{
//ScopeProvider.instance.reset(); // Comment this to run the test. this line has been added to avoid unwanted launch
//SecurityTokenProvider.instance.set(TestUtility.TOKEN);
//ScopeProvider.instance.set("/gcube/devNext");
proxy = ExecutorPlugin.getExecutorProxy("Accouting-Aggregator-Plugin").build();
proxy = ExecutorPlugin.getExecutorProxy(AccountingAggregatorPluginDeclaration.NAME).build();
Assert.assertNotNull(proxy);
}
public UUID scheduleTest(Scheduling scheduling) throws Exception {
private Map<String, Object> getInputs() throws Exception {
Map<String, Object> inputs = new HashMap<String, Object>();
logger.debug("Inputs : {}", inputs);
inputs.put("type","DAILY");
//period to be processed
inputs.put("interval",3);
//change to time
inputs.put("startTime", 14);
//specify bucket
inputs.put("bucket","accounting_service");
//current scope
inputs.put("currentScope",false);
//specify user for save to workspace
inputs.put("user","alessandro.pieve");
//specify a recovery 0 default recovery and aggregate, 1 only aggregate, 2 only recovery
inputs.put("recovery",0);
//optional if present interval is not considered and elaborate a specificy step
//e.g if type is daily and set input.put("intervalStep",10), this plugin elaborate a 10 hour
// inputs.put("intervalStep",24);
//optional if exist and true no backup, but start elaborate immediately
// inputs.put("backup",false);
// inputs.put("typePersisted",1);
LaunchParameter parameter = new LaunchParameter("Accouting-Aggregator-Plugin", inputs);
parameter.setScheduling(scheduling);
try {
String uuidString = proxy.launch(parameter);
return inputs;
}
private UUID launch(Scheduling scheduling) throws Exception {
Map<String, Object> inputs = new HashMap<String, Object>();
LaunchParameter launchParameter = new LaunchParameter(AccountingAggregatorPluginDeclaration.NAME, inputs);
launchParameter.setScheduling(scheduling);
try {
String uuidString = proxy.launch(launchParameter);
logger.debug("Launched with UUID : {}", uuidString);
return UUID.fromString(uuidString);
} catch(Exception e){
logger.error("Error launching sheduled task", e);
}catch (Exception e) {
logger.error("Error while launching {}", e);
throw e;
}
}
@Test
public void launch() {
Map<String, Object> inputs = new HashMap<String, Object>();
LaunchParameter launchParameter = new LaunchParameter("Test", inputs);
try {
proxy.launch(launchParameter);
} catch (Exception e) {
logger.error("Error launching sheduled task", e);
//throw e;
}
}
@Test
public void LaunchTest() throws Exception {
Map<String, Object> inputs = new HashMap<String, Object>();
logger.debug("Inputs : {}", inputs);
//inputs.put("type","DAILY");
inputs.put("type","MONTHLY");
//period to be processed
inputs.put("interval",1);
//change to time
//novembre 6
//ottobre 7
//settembre 8
//agosto 9
//luglio 10
//giugno e' 11
inputs.put("startTime",7);
//inputs.put("startTime",173);
inputs.put("intervalStep",4);
//specify bucket
inputs.put("bucket","accounting_service");
//current scope
inputs.put("currentScope",false);
//specify user for save to workspace
inputs.put("user","alessandro.pieve");
//specify a recovery 0 default recovery and aggregate, 1 only aggregate, 2 only recovery
inputs.put("recovery",0);
//optional if present interval is not considered and elaborate a specificy step
//e.g if type is daily and set input.put("intervalStep",10), this plugin elaborate a 10 hour
//optional if exist and true no backup, but start elaborate immediately
//inputs.put("backup",false);
//inputs.put("typePersisted",1);
LaunchParameter parameter = new LaunchParameter("Accouting-Aggregator-Plugin", inputs);
try {
String uuidString = proxy.launch(parameter);
logger.debug("Launched with UUID : {}", uuidString);
} catch(Exception e){
logger.error("Error launching sheduled task", e);
throw e;
}
}
@Test
public void LaunchTestAutomatic() throws Exception {
Map<String, Object> inputs = new HashMap<String, Object>();
logger.debug("Inputs : {}", inputs);
inputs.put("type","DAILY");
//period to be processed
inputs.put("interval",1);
//change to time
//load a file for start time
inputs.put("pathFile","/home/gcube/SmartGears/startTime");
//specify bucket
inputs.put("bucket","accounting_service");
inputs.put("endScriptTime","18:30");
//current scope
inputs.put("currentScope",false);
//specify user for save to workspace
inputs.put("user","alessandro.pieve");
//specify a recovery 0 default recovery and aggregate, 1 only aggregate, 2 only recovery
inputs.put("recovery",0);
LaunchParameter parameter = new LaunchParameter("Accouting-Aggregator-Plugin", inputs);
try {
String uuidString = proxy.launch(parameter);
logger.debug("Launched with UUID : {}", uuidString);
} catch(Exception e){
logger.error("Error launching sheduled task", e);
throw e;
}
}
@Test
public void cronExpPreviousMustBeTerminated() throws Exception {
CronExpression cronExpression = new CronExpression("0 0 2 * * ?"); // every day at 2:00
Scheduling scheduling = new Scheduling(cronExpression, true);
scheduling.setGlobal(true);
UUID uuid = scheduleTest(scheduling);
logger.debug("Launched with UUID : {}", uuid);
UUID uuid = launch(scheduling);
}
@Test
public void unSchedule() throws Exception {
//proxy.unSchedule("", true);
}
@Test
public void stop() throws Exception {
proxy.stop("Accouting-Aggregator-Plugin");
proxy.stop(AccountingAggregatorPluginDeclaration.NAME);
}
}

View File

@ -1,4 +1,4 @@
package org.gcube.test;
package org.gcube.discovery;
import java.security.Key;
import java.util.HashMap;
@ -14,7 +14,7 @@ import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.gcube.resources.discovery.icclient.ICFactory;
public class DatabaseDiscovery {
public class ServiceEndpointDiscovery {
protected String serviceEndpointCategory;
protected String serviceEndpointName;
@ -30,11 +30,11 @@ public class DatabaseDiscovery {
this.propertyMap = new HashMap<String, Property>();
}
public DatabaseDiscovery() throws Exception {
public ServiceEndpointDiscovery() throws Exception {
this.propertyMap = new HashMap<String, Property>();
}
public DatabaseDiscovery(String serviceEndpointCategory, String serviceEndpointName, String entryName) throws Exception {
public ServiceEndpointDiscovery(String serviceEndpointCategory, String serviceEndpointName, String entryName) throws Exception {
this.propertyMap = new HashMap<String, Property>();
this.serviceEndpointCategory = serviceEndpointCategory;
this.serviceEndpointName = serviceEndpointName;

View File

@ -0,0 +1,33 @@
/**
*
*/
package org.gcube.discovery;
import org.gcube.testutility.ScopedTest;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR)
*
*/
public class ServiceEndpointInfo extends ScopedTest {
private static final Logger logger = LoggerFactory.getLogger(ServiceEndpointInfo.class);
public static final String SERVICE_ENDPOINT_CATEGORY = "Database";
public static final String SERVICE_ENDPOINT_NAME = "TwitterMonitorDatabase";
public static final String ENTRY_NAME = "postgress";
@Test
public void testTwitterMonitorDatabaseDiscovery() throws Exception{
ServiceEndpointDiscovery databaseDiscovery = new ServiceEndpointDiscovery(SERVICE_ENDPOINT_CATEGORY, SERVICE_ENDPOINT_NAME, ENTRY_NAME);
logger.info("{}", databaseDiscovery.getUsername());
logger.info("{}", databaseDiscovery.getPassword());
logger.info("{}", databaseDiscovery.getURL());
}
}

View File

@ -6,11 +6,11 @@ package org.gcube.documentstore.persistence;
import java.util.concurrent.TimeUnit;
import org.gcube.accounting.datamodel.UsageRecord;
import org.gcube.accounting.datamodel.usagerecords.TestUsageRecord;
import org.gcube.documentstore.exception.InvalidValueException;
import org.gcube.testutility.ScopedTest;
import org.gcube.testutility.StressTestUtility;
import org.gcube.testutility.TestOperation;
import org.gcube.testutility.TestUsageRecord;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -6,11 +6,11 @@ package org.gcube.documentstore.persistence;
import java.util.Calendar;
import org.gcube.accounting.datamodel.UsageRecord;
import org.gcube.accounting.datamodel.usagerecords.TestUsageRecord;
import org.gcube.accounting.persistence.AccountingPersistenceFactory;
import org.gcube.testutility.ScopedTest;
import org.gcube.testutility.StressTestUtility;
import org.gcube.testutility.TestOperation;
import org.gcube.testutility.TestUsageRecord;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;

View File

@ -3,16 +3,18 @@
*/
package org.gcube.documentstore.persistence;
import org.gcube.accounting.datamodel.usagerecords.TestUsageRecord;
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.documentstore.records.DSMapper;
import org.gcube.documentstore.records.Record;
import org.gcube.testutility.ScopedTest;
import org.gcube.testutility.TestUsageRecord;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.couchbase.client.deps.com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.JsonNode;
/**
* @author Luca Frosini (ISTI - CNR)
@ -44,7 +46,7 @@ public class PersistenceCouchBaseTest extends ScopedTest {
@Test
public void testJsonNodeUsageRecordConversions() throws Exception {
Record record = TestUsageRecord.createTestServiceUsageRecord();
logger.debug("UsageRecord : {}", record.toString());
logger.debug("UsageRecord : {}", DSMapper.marshal(record));
JsonNode node = PersistenceCouchBase.usageRecordToJsonNode(record);
logger.debug("Node : {}", node.toString());
Record r = PersistenceCouchBase.jsonNodeToUsageRecord(node);

View File

@ -10,10 +10,10 @@ import java.io.IOException;
import java.io.PrintWriter;
import java.util.Calendar;
import org.gcube.accounting.datamodel.usagerecords.TestUsageRecord;
import org.gcube.accounting.persistence.AccountingPersistence;
import org.gcube.accounting.persistence.AccountingPersistenceFactory;
import org.gcube.testutility.ScopedTest;
import org.gcube.testutility.TestUsageRecord;
import org.junit.Test;
/**

View File

@ -1,5 +0,0 @@
package org.gcube.documentstore.records;
public class DSMapperTest {
}

View File

@ -25,57 +25,6 @@ public class RecordUtilityTest {
private static Logger logger = LoggerFactory.getLogger(RecordUtilityTest.class);
@Test
public void testImportOldRecord() throws Exception {
String line = "{"
+ "resourceScope=/gcube/devsec, "
+ "scope=/gcube/devsec, "
+ "operationCount=1, "
+ "usageRecordType=StorageUsageRecord, "
+ "endTime=1448447153009, "
+ "consumerId=CSV, "
+ "startTime=1448447153009, "
+ "id=c7bab219-4024-4019-a8ad-ff5f342b439b, "
+ "dataVolume=68, "
+ "dataType=STORAGE, "
+ "resourceOwner=CSV, "
+ "operationResult=SUCCESS, "
+ "resourceURI=testprotocol://objectURI, "
+ "operationType=CREATE, "
+ "aggregated=true, "
+ "creationTime=1448447153096, "
+ "providerURI=data.d4science.org}";
logger.debug(line);
Record record = RecordUtility.getRecord(line);
Assert.assertTrue(record instanceof AggregatedStorageUsageRecord);
AggregatedStorageUsageRecord aggregatedStorageUsageRecord = (AggregatedStorageUsageRecord) record;
Assert.assertTrue(aggregatedStorageUsageRecord.getResourceScope().compareTo("/gcube/devsec")==0);
Assert.assertTrue(aggregatedStorageUsageRecord.getScope().compareTo("/gcube/devsec")==0);
Assert.assertTrue(aggregatedStorageUsageRecord.getOperationCount()==1);
//
Assert.assertTrue(aggregatedStorageUsageRecord.getRecordType().compareTo("StorageUsageRecord")==0);
Assert.assertTrue(aggregatedStorageUsageRecord.getEndTime().getTimeInMillis()==(new Long("1448447153009")));
Assert.assertTrue(aggregatedStorageUsageRecord.getConsumerId().compareTo("CSV")==0);
Assert.assertTrue(aggregatedStorageUsageRecord.getStartTime().getTimeInMillis()==(new Long("1448447153009")));
Assert.assertTrue(aggregatedStorageUsageRecord.getId().compareTo("c7bab219-4024-4019-a8ad-ff5f342b439b")==0);
Assert.assertTrue(aggregatedStorageUsageRecord.getDataVolume()==68);
Assert.assertTrue(aggregatedStorageUsageRecord.getDataType()==DataType.STORAGE);
Assert.assertTrue(aggregatedStorageUsageRecord.getResourceOwner().compareTo("CSV")==0);
Assert.assertTrue(aggregatedStorageUsageRecord.getOperationResult()==OperationResult.SUCCESS);
Assert.assertTrue(aggregatedStorageUsageRecord.getResourceURI().compareTo(new URI("testprotocol://objectURI"))==0);
Assert.assertTrue(aggregatedStorageUsageRecord.getOperationType()==OperationType.CREATE);
Assert.assertTrue((Boolean) aggregatedStorageUsageRecord.getResourceProperty(AggregatedStorageUsageRecord.AGGREGATED));
Assert.assertTrue(aggregatedStorageUsageRecord.getCreationTime().getTimeInMillis()==(new Long("1448447153096")));
Assert.assertTrue(aggregatedStorageUsageRecord.getProviderURI().compareTo(new URI("data.d4science.org"))==0);
logger.debug("{}", aggregatedStorageUsageRecord);
}
@Test
public void test() {
Assert.assertTrue(Record.class.isAssignableFrom(ServiceUsageRecord.class));

View File

@ -9,13 +9,13 @@ import java.util.Map;
import org.gcube.accounting.datamodel.UsageRecord;
import org.gcube.accounting.datamodel.usagerecords.ServiceUsageRecord;
import org.gcube.accounting.datamodel.usagerecords.TestUsageRecord;
import org.gcube.documentstore.persistence.PersistenceExecutor;
import org.gcube.documentstore.records.AggregatedRecord;
import org.gcube.documentstore.records.Record;
import org.gcube.testutility.ScopedTest;
import org.gcube.testutility.StressTestUtility;
import org.gcube.testutility.TestOperation;
import org.gcube.testutility.TestUsageRecord;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;

View File

@ -1,21 +0,0 @@
package org.gcube.informationsystem.resource_checker.utils;
import java.util.Map;
import org.gcube.vremanagement.executor.plugin.PluginStateEvolution;
import org.gcube.vremanagement.executor.plugin.PluginStateNotification;
public class SendNotification extends PluginStateNotification{
public SendNotification(Map<String, String> inputs) {
super(inputs);
// TODO Auto-generated constructor stub
}
@Override
public void pluginStateEvolution(PluginStateEvolution pluginStateEvolution, Exception exception) throws Exception {
// TODO Auto-generated method stub
}
}

View File

@ -1,116 +0,0 @@
/**
*
*/
package org.gcube.test;
import java.io.IOException;
import java.io.StringWriter;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import org.gcube.common.resources.gcore.GenericResource;
import org.gcube.common.resources.gcore.Resource;
import org.gcube.common.resources.gcore.Resources;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.informationsystem.publisher.RegistryPublisherFactory;
import org.gcube.informationsystem.publisher.ScopedPublisher;
import org.gcube.informationsystem.publisher.exception.RegistryNotFoundException;
import org.gcube.informationsystem.resourceregistry.api.exceptions.ExceptionMapper;
import org.gcube.informationsystem.resourceregistry.api.exceptions.ResourceRegistryException;
import org.gcube.informationsystem.resourceregistry.api.exceptions.entity.EntityAlreadyPresentException;
import org.gcube.testutility.ScopedTest;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR)
*
*/
public class TempTest extends ScopedTest {
private static final Logger logger = LoggerFactory.getLogger(TempTest.class);
@Test
public void test(){
try {
Class.forName("com.duke.MyLocaleServiceProvider");
logger.debug("OK");
} catch (ClassNotFoundException e) {
logger.error("Not found", e);
}
}
private static void publishScopedResource(Resource resource, List<String> scopes) throws RegistryNotFoundException, Exception {
StringWriter stringWriter = new StringWriter();
Resources.marshal(resource, stringWriter);
ScopedPublisher scopedPublisher = RegistryPublisherFactory.scopedPublisher();
try {
logger.debug("Trying to publish to {}:\n{}", scopes, stringWriter);
scopedPublisher.create(resource, scopes);
} catch (RegistryNotFoundException e) {
logger.error("The resource was not published", e);
throw e;
}
}
@Test
public void anotherTest(){
ScopeProvider.instance.set("/gcube");
try {
GenericResource a = new GenericResource();
List<String> b = new LinkedList<>();
String scope = ScopeProvider.instance.get();
b.add(scope);
a.newProfile().name(UUID.randomUUID().toString()).type("FHN-nodes").description("FHN node");
a.profile().newBody("<name>Nunzio</name>");
try {
publishScopedResource(a,b);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
logger.debug("OK");
} catch (Exception e) {
logger.error("Not found", e);
}
}
@Test
public void exceptionSerilizationTest() throws IOException{
//ResourceRegistryException rre = new ResourceRegistryException("Test");
ResourceRegistryException rre = new EntityAlreadyPresentException("Aux");
String jsonString = ExceptionMapper.marshal(rre);
logger.debug(jsonString);
rre = ExceptionMapper.unmarshal(ResourceRegistryException.class, jsonString);
logger.debug("Unmarshalled exception is ", rre);
}
public static final String SERVICE_ENDPOINT_CATEGORY = "Database";
public static final String SERVICE_ENDPOINT_NAME = "TwitterMonitorDatabase";
public static final String ENTRY_NAME = "postgress";
@Test
public void testTwitterMonitorDatabaseDiscovery() throws Exception{
DatabaseDiscovery databaseDiscovery = new DatabaseDiscovery(SERVICE_ENDPOINT_CATEGORY, SERVICE_ENDPOINT_NAME, ENTRY_NAME);
logger.info("{}", databaseDiscovery.getUsername());
logger.info("{}", databaseDiscovery.getPassword());
logger.info("{}", databaseDiscovery.getURL());
}
}

View File

@ -1,50 +0,0 @@
/**
*
*/
package org.gcube.vremanagement.executor.client;
import java.util.List;
import org.gcube.testutility.ScopedTest;
//import org.gcube.dataanalysis.executor.plugin.GenericWorkerPluginDeclaration;
import org.gcube.vremanagement.executor.client.plugins.ExecutorPlugin;
import org.gcube.vremanagement.executor.client.plugins.query.SmartExecutorPluginQuery;
import org.gcube.vremanagement.executor.client.plugins.query.filter.ListEndpointDiscoveryFilter;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR)
*
*/
public class SmartGenericWorkerDiscoveryQuery extends ScopedTest {
private static Logger logger = LoggerFactory.getLogger(SmartGenericWorkerDiscoveryQuery.class);
@Test
public void testGenericWorkerDiscoveryQuery() throws Exception {
//GenericWorkerPluginDeclaration gwpd = new GenericWorkerPluginDeclaration();
ExecutorPlugin executorPlugin = new ExecutorPlugin();
SmartExecutorPluginQuery query = new SmartExecutorPluginQuery(executorPlugin);
/*
add key_value filter here
* Tuple<String, String>[] tuples = new Tuple[n];
*
* runQuery.addConditions(pluginName, tuples);
*/
//query.addConditions(gwpd.getName());
/* Used to add extra filter to ServiceEndpoint discovery */
query.setServiceEndpointQueryFilter(null);
List<String> nodes = query.discoverEndpoints(new ListEndpointDiscoveryFilter());
logger.debug("Found the following nodes: {}", nodes);
}
}