Added old tests of alessandro pieve

This commit is contained in:
Luca Frosini 2019-07-25 15:44:20 +02:00
parent c4bfafe1b8
commit 6ef174cab1
25 changed files with 2290 additions and 0 deletions

View File

@ -0,0 +1,5 @@
package org.gcube.pieve;
public class Test {
}

View File

@ -0,0 +1,51 @@
/**
*
*/
package org.gcube.pieve.accounting.datamodel.records.aggregation;
import java.util.ArrayList;
import java.util.List;
import org.gcube.accounting.datamodel.usagerecords.JobUsageRecord;
import org.gcube.accounting.datamodel.usagerecords.PortletUsageRecord;
import org.gcube.accounting.datamodel.usagerecords.ServiceUsageRecord;
import org.gcube.accounting.datamodel.usagerecords.StorageUsageRecord;
import org.gcube.documentstore.records.AggregatedRecord;
import org.gcube.documentstore.records.Record;
import org.gcube.documentstore.records.RecordUtility;
import org.gcube.pieve.accounting.datamodel.usagerecords.TaskUsageRecord;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR) http://www.lucafrosini.com/
*
*/
public class RecordUtilityTest {
private static Logger logger = LoggerFactory.getLogger(RecordUtilityTest.class);
@Test
public void recordUtilityTest() {
List<Class<? extends Record>> recordClasses = new ArrayList<>();
recordClasses.add(ServiceUsageRecord.class);
recordClasses.add(StorageUsageRecord.class);
recordClasses.add(JobUsageRecord.class);
recordClasses.add(TaskUsageRecord.class);
recordClasses.add(PortletUsageRecord.class);
for(Class<? extends Record> recordClass : recordClasses){
try {
@SuppressWarnings("rawtypes")
Class<? extends AggregatedRecord> aggregatedClass = RecordUtility.getAggregatedRecordClass(recordClass.getSimpleName());
logger.error("Aggregated Record Class for {} is {}",
recordClass.getSimpleName(),
aggregatedClass.getName());
} catch (ClassNotFoundException e) {
logger.error("Error getting Aggregated Record Class for {}",
recordClass.getSimpleName(), e);
}
}
}
}

View File

@ -0,0 +1,98 @@
/**
*
*/
package org.gcube.pieve.accounting.datamodel.usagerecords;
import java.util.concurrent.TimeUnit;
import org.gcube.accounting.datamodel.usagerecords.JobUsageRecord;
import org.gcube.accounting.datamodel.usagerecords.PortletUsageRecord;
import org.gcube.accounting.datamodel.usagerecords.ServiceUsageRecord;
import org.gcube.accounting.datamodel.usagerecords.StorageUsageRecord;
import org.gcube.accounting.persistence.AccountingPersistence;
import org.gcube.accounting.persistence.AccountingPersistenceFactory;
import org.gcube.documentstore.exception.InvalidValueException;
import org.gcube.documentstore.exception.NotAggregatableRecordsExceptions;
import org.gcube.pieve.accounting.datamodel.usagerecords.TaskUsageRecord;
import org.gcube.pieve.testutility.TestUsageRecord;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR) http://www.lucafrosini.com/
*
*/
public class AccountingTest {
private static Logger logger = LoggerFactory.getLogger(AccountingTest.class);
private AccountingPersistence accountingPersistence;
@Before
public void before(){
accountingPersistence = AccountingPersistenceFactory.getPersistence();
}
@After
public void after(){
try {
accountingPersistence.flush(1000, TimeUnit.MILLISECONDS);
} catch (Exception e) {
logger.error("Error flushing Buffered Records", e);
}
}
@Test
public void accountingServiceUsageRecordStressTest() throws InvalidValueException, NotAggregatableRecordsExceptions {
for(int i=0; i<2; i++){
ServiceUsageRecord sur = TestUsageRecord.createTestServiceUsageRecord();
sur.setScope(TestUsageRecord.TEST_SCOPE);
accountingPersistence.account(sur);
}
}
@Test
public void accountingStorageUsageRecordStressTest() throws InvalidValueException, NotAggregatableRecordsExceptions {
for(int i=0; i<1000; i++){
StorageUsageRecord sur = org.gcube.pieve.testutility.TestUsageRecord.createTestStorageUsageRecord();
sur.setScope(TestUsageRecord.TEST_SCOPE);
accountingPersistence.account(sur);
}
}
@Test
public void accountingJobUsageRecordStressTest() throws InvalidValueException, NotAggregatableRecordsExceptions {
for(int i=0; i<1000; i++){
JobUsageRecord jur = TestUsageRecord.createTestJobUsageRecord();
jur.setScope(TestUsageRecord.TEST_SCOPE);
accountingPersistence.account(jur);
}
}
@Test
public void accountingPortletUsageRecordStressTest() throws InvalidValueException, NotAggregatableRecordsExceptions {
for(int i=0; i<1000; i++){
PortletUsageRecord pur = TestUsageRecord.createTestPortletUsageRecord();
pur.setScope(TestUsageRecord.TEST_SCOPE);
accountingPersistence.account(pur);
}
}
@Test
public void accountingTaskUsageRecordStressTest() throws InvalidValueException, NotAggregatableRecordsExceptions {
for(int i=0; i<1000; i++){
TaskUsageRecord tur = TestUsageRecord.createTestTaskUsageRecord();
tur.setScope(TestUsageRecord.TEST_SCOPE);
accountingPersistence.account(tur);
}
}
}

View File

@ -0,0 +1,158 @@
/**
*
*/
package org.gcube.pieve.accounting.persistence;
import java.io.StringWriter;
import java.net.URL;
import org.gcube.accounting.persistence.AccountingPersistenceConfiguration;
import org.gcube.common.resources.gcore.Resources;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.Profile;
import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
import org.gcube.common.resources.gcore.ServiceEndpoint.Runtime;
import org.gcube.common.resources.gcore.common.Platform;
import org.gcube.common.resources.gcore.utils.Group;
import org.gcube.documentstore.persistence.PersistenceBackend;
import org.gcube.pieve.testutility.ScopedTest;
import org.gcube.pieve.testutility.TestUtility;
import org.junit.Assert;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR) http://www.lucafrosini.com/
*
*/
public class AccountingPersistenceConfigurationTest extends ScopedTest {
private static final Logger logger = LoggerFactory.getLogger(AccountingPersistenceConfigurationTest.class);
private static final String PROFILE_DESCRIPTION = "This ServiceEndpoint contains the parameter to connect to DB to persist log accounting";
private static final String HOSTED_ON = "pc-frosini.isti.cnr.it";
private static final String ENDPOINT = "http://localhost:5984";
private static final String READY = "READY";
private static final String PLATFORM_NAME = "Platform Name";
private static final String TEST_VERSION = "1.0.0";
private static final short[] VERSION_SLICES = new short[]{1,6,0,0};
private static final String DESCRIPTION = "Persistence Configuration Test";
private static final String FAKE_USERNAME = "fakeusername";
private static final String FAKE_PASSWORD = "fakepassword";
private static final String DB_NAME_PROPERTY_KEY = "dbName";
private static final String DB_NAME_PROPERTY_VALUE = "accounting";
private abstract class AccountingPersistenceFakeDB extends PersistenceBackend {
}
/**
* Create the Service Endpoint using information related to discovered
* available plugins and their own discovered capabilities
* @return the created {@link ServiceEndpoint}
*/
protected static ServiceEndpoint createServiceEndpoint(){
logger.debug("Getting Available Plugins and their own supported capabilities");
logger.debug("Creating ServiceEndpoint to publish on IS available plugins and their own supported capabilities");
ServiceEndpoint serviceEndpoint = new ServiceEndpoint();
Profile profile = serviceEndpoint.newProfile();
profile.category(AccountingPersistenceConfiguration.SERVICE_ENDPOINT_CATEGORY);
profile.name(AccountingPersistenceConfiguration.SERVICE_ENDPOINT_NAME);
profile.version(TEST_VERSION);
profile.description(PROFILE_DESCRIPTION);
Platform platform = profile.newPlatform();
platform.name(PLATFORM_NAME);
platform.version(VERSION_SLICES[0]);
platform.minorVersion(VERSION_SLICES[1]);
platform.buildVersion(VERSION_SLICES[2]);
platform.revisionVersion(VERSION_SLICES[3]);
Runtime runtime = profile.newRuntime();
runtime.hostedOn(HOSTED_ON);
runtime.status(READY);
Group<AccessPoint> accessPoints = profile.accessPoints();
AccessPoint accessPointElement = new AccessPoint();
accessPoints.add(accessPointElement);
accessPointElement.description(DESCRIPTION);
accessPointElement.credentials(FAKE_USERNAME, FAKE_PASSWORD);
accessPointElement.address(ENDPOINT);
accessPointElement.name(AccountingPersistenceFakeDB.class.getSimpleName());
Group<Property> properties = accessPointElement.properties();
Property dbName = new Property();
dbName.nameAndValue(DB_NAME_PROPERTY_KEY, DB_NAME_PROPERTY_VALUE);
dbName.encrypted(false);
properties.add(dbName);
StringWriter stringWriter = new StringWriter();
Resources.marshal(serviceEndpoint, stringWriter);
logger.debug("The created ServiceEndpoint profile is\n{}", stringWriter.toString());
return serviceEndpoint;
}
public void testPersistenceConfigurationFromIS() throws Exception{
boolean createResource = true;
ServiceEndpoint serviceEndpoint = null;
if(createResource){
serviceEndpoint = createServiceEndpoint();
TestUtility.publishResource(serviceEndpoint);
}
Thread.sleep(5000); // Waiting 5 sec
try {
AccountingPersistenceConfiguration persitenceConfiguration = new AccountingPersistenceConfiguration(AccountingPersistenceFakeDB.class);
if(createResource){
String uri = persitenceConfiguration.getProperty(AccountingPersistenceConfiguration.URL_PROPERTY_KEY);
Assert.assertTrue(uri.compareTo(new URL(ENDPOINT).toString())==0);
String username = persitenceConfiguration.getProperty(AccountingPersistenceConfiguration.USERNAME_PROPERTY_KEY);
Assert.assertTrue(username.compareTo(FAKE_USERNAME)==0);
String password = persitenceConfiguration.getProperty(AccountingPersistenceConfiguration.PASSWORD_PROPERTY_KEY);
Assert.assertTrue(password.compareTo(FAKE_PASSWORD)==0);
String dbName = persitenceConfiguration.getProperty(DB_NAME_PROPERTY_KEY);
Assert.assertTrue(dbName.compareTo(DB_NAME_PROPERTY_VALUE)==0);
}
} finally {
if(createResource){
TestUtility.unPublishResource(serviceEndpoint);
}
}
}
public void getUsernamePasswordForScopes() throws Exception{
logger.debug("START ======================================================");
try {
AccountingPersistenceConfiguration persitenceConfiguration = new AccountingPersistenceConfiguration(AccountingPersistenceFakeDB.class);
String uri = persitenceConfiguration.getProperty(AccountingPersistenceConfiguration.URL_PROPERTY_KEY);
String username = persitenceConfiguration.getProperty(AccountingPersistenceConfiguration.USERNAME_PROPERTY_KEY);
String password = persitenceConfiguration.getProperty(AccountingPersistenceConfiguration.PASSWORD_PROPERTY_KEY);
logger.debug("{} - {} - {} - {}", TestUtility.getScope(), uri, username, password);
}catch(IndexOutOfBoundsException e){
logger.debug("No AccountingPersistenceConfiguration : \n {} {} \n\n", e.getClass().getName(), e.getMessage());
} catch(Exception e){
logger.error("Error getting AccountingPersistenceConfiguration", e);
throw e;
} finally {
logger.debug(" END ======================================================");
}
}
}

View File

@ -0,0 +1,125 @@
/**
*
*/
package org.gcube.pieve.accounting.persistence;
import java.io.StringWriter;
import java.util.Calendar;
import java.util.concurrent.TimeUnit;
import org.gcube.accounting.persistence.AccountingPersistenceConfiguration;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.resources.gcore.Resource;
import org.gcube.common.resources.gcore.Resources;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.documentstore.persistence.PersistenceBackend;
import org.gcube.documentstore.persistence.PersistenceBackendFactory;
//import org.gcube.documentstore.persistence.PersistenceCouchBase;
import org.gcube.informationsystem.publisher.RegistryPublisher;
import org.gcube.informationsystem.publisher.RegistryPublisherFactory;
import org.gcube.pieve.documentstore.persistence.PersistenceCouchBase;
import org.gcube.pieve.testutility.ScopedTest;
import org.gcube.pieve.testutility.TestUtility;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR) http://www.lucafrosini.com/
*
*/
public class PersistenceCouchBaseTest extends ScopedTest {
private static final Logger logger = LoggerFactory.getLogger(PersistenceCouchBaseTest.class);
public static final long timeout = 5000;
public static final TimeUnit timeUnit = TimeUnit.MILLISECONDS;
public static PersistenceBackend getPersistence(){
PersistenceBackendFactory.setFallbackLocation(null);
return PersistenceBackendFactory.getPersistenceBackend(TestUtility.getScope());
}
@Test
public void persistenceIsCouchBase() {
PersistenceBackend persistence = getPersistence();
Assert.assertTrue(persistence instanceof PersistenceCouchBase);
}
private static void publishResource(Resource resource) throws Exception {
StringWriter stringWriter = new StringWriter();
Resources.marshal(resource, stringWriter);
RegistryPublisher registryPublisher = RegistryPublisherFactory.create();
try {
logger.debug("Trying to publish to {}:\n{}", TestUtility.getScope(), stringWriter);
registryPublisher.create(resource);
} catch (Exception e) {
logger.error("The resource was not published", e);
throw e;
}
}
private static void unPublishResource(Resource resource) throws Exception {
//StringWriter stringWriter = new StringWriter();
//Resources.marshal(resource, stringWriter);
RegistryPublisher registryPublisher = RegistryPublisherFactory.create();
String id = resource.id();
logger.debug("Trying to remove {} with ID {} from {}",
resource.getClass().getSimpleName(), id,
TestUtility.getScope());
registryPublisher.remove(resource);
logger.debug("{} with ID {} removed successfully", resource.getClass().getSimpleName(), id);
}
public void testScopeRecheck() throws Exception {
ServiceEndpoint serviceEndpoint = null;
try {
AccountingPersistenceConfiguration persitenceConfiguration = new AccountingPersistenceConfiguration(PersistenceCouchBase.class);
serviceEndpoint = persitenceConfiguration.getServiceEndpoint(
AccountingPersistenceConfiguration.SERVICE_ENDPOINT_CATEGORY,
AccountingPersistenceConfiguration.SERVICE_ENDPOINT_NAME,
PersistenceCouchBase.class);
unPublishResource(serviceEndpoint);
}catch(IndexOutOfBoundsException e){
SecurityTokenProvider.instance.set(TestUtility.PARENT_TOKEN);
AccountingPersistenceConfiguration persitenceConfiguration = new AccountingPersistenceConfiguration(PersistenceCouchBase.class);
serviceEndpoint = persitenceConfiguration.getServiceEndpoint(
AccountingPersistenceConfiguration.SERVICE_ENDPOINT_CATEGORY, AccountingPersistenceConfiguration.SERVICE_ENDPOINT_NAME,
PersistenceCouchBase.class);
SecurityTokenProvider.instance.set(TestUtility.TOKEN);
}
long startTime = Calendar.getInstance().getTimeInMillis();
long endTime = startTime;
while(endTime <= (startTime + 10*1000)){ // 10 sec
endTime = Calendar.getInstance().getTimeInMillis();
}
logger.debug("Going to check First Time");
PersistenceBackend first = PersistenceBackendFactory.getPersistenceBackend(TestUtility.getScope());
logger.debug("First {} : {}", PersistenceBackend.class.getSimpleName(), first);
publishResource(serviceEndpoint);
startTime = Calendar.getInstance().getTimeInMillis();
endTime = startTime;
while(endTime <= (startTime + (PersistenceBackendFactory.FALLBACK_RETRY_TIME + 100))){
endTime = Calendar.getInstance().getTimeInMillis();
}
logger.debug("Going to check Second Time");
PersistenceBackend second = PersistenceBackendFactory.getPersistenceBackend(TestUtility.getScope());
logger.debug("Second {} : {}", PersistenceBackend.class.getSimpleName(), second);
Assert.assertNotEquals(first, second);
}
}

View File

@ -0,0 +1,99 @@
/**
*
*/
package org.gcube.pieve.accounting.verify.plugin;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import org.gcube.pieve.testutility.ScopedTest;
import org.gcube.vremanagement.executor.api.types.LaunchParameter;
import org.gcube.vremanagement.executor.api.types.Scheduling;
import org.gcube.vremanagement.executor.client.plugins.ExecutorPlugin;
import org.gcube.vremanagement.executor.client.proxies.SmartExecutorProxy;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AccountingVerifyPluginSmartExecutorSchedulerTest extends ScopedTest {
private static Logger logger = LoggerFactory.getLogger(AccountingVerifyPluginSmartExecutorSchedulerTest.class);
private SmartExecutorProxy proxy;
@Before
public void before() throws Exception{
super.before();
//ScopeProvider.instance.reset(); // Comment this to run the test. this line has been added to avoid unwanted launch
//SecurityTokenProvider.instance.set(TestUtility.TOKEN);
//ScopeProvider.instance.set("/gcube/devNext");
proxy = ExecutorPlugin.getExecutorProxy("Accouting-Verify-Insert-Couchbase-Plugin").build();
Assert.assertNotNull(proxy);
}
public UUID scheduleTest(Scheduling scheduling) throws Exception {
Map<String, Object> inputs = new HashMap<String, Object>();
logger.debug("Inputs : {}", inputs);
inputs.put("interval",4);
LaunchParameter parameter = new LaunchParameter("Accouting-Verify-Insert-Couchbase-Plugin", inputs);
parameter.setScheduling(scheduling);
try {
String uuidString = proxy.launch(parameter);
return UUID.fromString(uuidString);
} catch(Exception e){
logger.error("Error launching sheduled task", e);
throw e;
}
}
@Test
public void LaunchTest() throws Exception {
Map<String, Object> inputs = new HashMap<String, Object>();
logger.debug("Inputs : {}", inputs);
inputs.put("interval",4);
//specify a recovery 0 default recovery and aggregate, 1 only aggregate, 2 only recovery
LaunchParameter parameter = new LaunchParameter("Accouting-Verify-Insert-Couchbase-Plugin", inputs);
//parameter.setScheduling(scheduling);
try {
String uuidString = proxy.launch(parameter);
logger.debug("Launched with UUID : {}", uuidString);
} catch(Exception e){
logger.error("Error launching sheduled task", e);
throw e;
}
}
@Test
public void cronExpPreviousMustBeTerminated() throws Exception {
//CronExpression cronExpression = new CronExpression("0 10 10 * * ?"); // every day at 10:10
CronExpression cronExpression = new CronExpression("0 0 0/3 * * ?");
Scheduling scheduling = new Scheduling(cronExpression, true);
scheduling.setGlobal(true);
UUID uuid = scheduleTest(scheduling);
logger.debug("Launched with UUID : {}", uuid);
}
@Test
public void unSchedule() throws Exception {
proxy.unSchedule(null, true);
}
@Test
public void stop() throws Exception {
proxy.stop("Accouting-Aggregator-Plugin");
}
}

View File

@ -0,0 +1,128 @@
/**
*
*/
package org.gcube.pieve.accountinginsert.plugin;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import org.gcube.pieve.testutility.ScopedTest;
import org.gcube.vremanagement.executor.api.types.LaunchParameter;
import org.gcube.vremanagement.executor.api.types.Scheduling;
import org.gcube.vremanagement.executor.client.plugins.ExecutorPlugin;
import org.gcube.vremanagement.executor.client.proxies.SmartExecutorProxy;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AccountingInsertPluginSmartExecutorSchedulerTest extends ScopedTest {
private static Logger logger = LoggerFactory.getLogger(AccountingInsertPluginSmartExecutorSchedulerTest.class);
private SmartExecutorProxy proxy;
@Before
public void before() throws Exception{
super.before();
proxy = ExecutorPlugin.getExecutorProxy("Accounting-Insert-Storage").build();
Assert.assertNotNull(proxy);
}
public UUID scheduleTest(Scheduling scheduling) throws Exception {
Map<String, Object> inputs = new HashMap<String, Object>();
logger.debug("Inputs : {}", inputs);
inputs.put("dataServiceClass","content-management");
inputs.put("dataServiceName","storage-manager");
inputs.put("dataServiceId","identifier");
inputs.put("uri","MongoDb");
inputs.put("dataType","STORAGE");
LaunchParameter parameter = new LaunchParameter("Accounting-Insert-Storage", inputs);
parameter.setScheduling(scheduling);
try {
String uuidString = proxy.launch(parameter);
return UUID.fromString(uuidString);
} catch(Exception e){
logger.error("Error launching sheduled task", e);
throw e;
}
}
@Test
public void launch() {
Map<String, Object> inputs = new HashMap<String, Object>();
LaunchParameter launchParameter = new LaunchParameter("Test", inputs);
try {
proxy.launch(launchParameter);
} catch (Exception e) {
logger.error("Error launching sheduled task", e);
//throw e;
}
}
@Test
public void LaunchTest() throws Exception {
Map<String, Object> inputs = new HashMap<String, Object>();
/*optional*/
inputs.put("dataServiceClass","content-management");
inputs.put("dataServiceName","storage-manager");
inputs.put("dataServiceId","identifier");
inputs.put("uri","MongoDb");
inputs.put("dataType","STORAGE");
LaunchParameter parameter = new LaunchParameter("Accounting-Insert-Storage", inputs);
//parameter.setScheduling(scheduling);
try {
String uuidString = proxy.launch(parameter);
logger.debug("Launched with UUID : {}", uuidString);
} catch(Exception e){
logger.error("Error launching sheduled task", e);
throw e;
}
}
@Test
public void cronExpPreviousMustBeTerminated() throws Exception {
CronExpression cronExpression = new CronExpression("0 0 6 * * ?"); // every day at 2:00
//Launched with UUID : df3ca103-dc0a-477d-b7fe-1445c6984e6d
Scheduling scheduling = new Scheduling(cronExpression, true);
scheduling.setGlobal(true);
UUID uuid = scheduleTest(scheduling);
logger.debug("Launched with UUID : {}", uuid);
}
@Test
public void unSchedule() throws Exception {
//proxy.unSchedule("fde48bd2-097d-4196-8a81-b9304534c76b", true);
//
}
@Test
public void stop() throws Exception {
proxy.stop("Accouting-Aggregator-Plugin");
}
}

View File

@ -0,0 +1,128 @@
/**
*
*/
package org.gcube.pieve.accountinginsertRstudio.plugin;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import org.gcube.pieve.testutility.ScopedTest;
import org.gcube.vremanagement.executor.api.types.LaunchParameter;
import org.gcube.vremanagement.executor.api.types.Scheduling;
import org.gcube.vremanagement.executor.client.plugins.ExecutorPlugin;
import org.gcube.vremanagement.executor.client.proxies.SmartExecutorProxy;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AccountingInsertRstudioPluginSmartExecutorSchedulerTest extends ScopedTest {
private static Logger logger = LoggerFactory.getLogger(AccountingInsertRstudioPluginSmartExecutorSchedulerTest.class);
private SmartExecutorProxy proxy;
@Before
public void before() throws Exception{
super.before();
//ScopeProvider.instance.reset(); // Comment this to run the test. this line has been added to avoid unwanted launch
//SecurityTokenProvider.instance.set(TestUtility.TOKEN);
//ScopeProvider.instance.set("/gcube/devNext");
proxy = ExecutorPlugin.getExecutorProxy("Accounting-Insert-RStudio").build();
Assert.assertNotNull(proxy);
}
public UUID scheduleTest(Scheduling scheduling) throws Exception {
Map<String, Object> inputs = new HashMap<String, Object>();
logger.debug("Inputs : {}", inputs);
inputs.put("dataServiceClass","content-management");
inputs.put("dataServiceName","storage-manager");
inputs.put("uri","Rstudio");
inputs.put("dataType","STORAGE");
inputs.put("unitVolume","Kilobyte");
inputs.put("pathFile","/srv/d4science/home_disk_space");
LaunchParameter parameter = new LaunchParameter("Accounting-Insert-RStudio", inputs);
parameter.setScheduling(scheduling);
try {
String uuidString = proxy.launch(parameter);
return UUID.fromString(uuidString);
} catch(Exception e){
logger.error("Error launching sheduled task", e);
throw e;
}
}
@Test
public void launch() {
Map<String, Object> inputs = new HashMap<String, Object>();
LaunchParameter launchParameter = new LaunchParameter("Test", inputs);
try {
proxy.launch(launchParameter);
} catch (Exception e) {
logger.error("Error launching sheduled task", e);
//throw e;
}
}
@Test
public void LaunchTest() throws Exception {
Map<String, Object> inputs = new HashMap<String, Object>();
inputs.put("dataServiceClass","content-management");
inputs.put("dataServiceName","storage-manager");
inputs.put("uri","Rstudio");
inputs.put("dataType","STORAGE");
inputs.put("unitVolume","Kilobyte");
inputs.put("pathFile","/srv/d4science/home_disk_space");
LaunchParameter parameter = new LaunchParameter("Accounting-Insert-RStudio", inputs);
//parameter.setScheduling(scheduling);
try {
String uuidString = proxy.launch(parameter);
logger.debug("Launched with UUID : {}", uuidString);
} catch(Exception e){
logger.error("Error launching sheduled task", e);
throw e;
}
}
@Test
public void cronExpPreviousMustBeTerminated() throws Exception {
CronExpression cronExpression = new CronExpression("0 0 16 * * ?"); // every day at 2:00
//Launched with UUID : df3ca103-dc0a-477d-b7fe-1445c6984e6d
Scheduling scheduling = new Scheduling(cronExpression, true);
scheduling.setGlobal(true);
UUID uuid = scheduleTest(scheduling);
logger.debug("Launched with UUID : {}", uuid);
}
@Test
public void unSchedule() throws Exception {
//56e33a1f-ab24-4d7b-a7b7-cac29147136e active on prod at 18 o clock
//active for each day on 23 cf5f3f75-4e1e-4d8c-ac87-6bfd20ec7330
//active for each day on 23 0f84c6f9-3f43-40c6-8a8d-00cf2a3b311d
//proxy.unSchedule("542ddb03-d8d7-4913-8700-2acfa74c7485", true);
//
}
@Test
public void stop() throws Exception {
proxy.stop("Accouting-Insert-RStudio");
}
}

View File

@ -0,0 +1,209 @@
/**
*
*/
package org.gcube.pieve.aggregator.plugin;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import org.gcube.pieve.testutility.ScopedTest;
import org.gcube.vremanagement.executor.api.types.LaunchParameter;
import org.gcube.vremanagement.executor.api.types.Scheduling;
import org.gcube.vremanagement.executor.client.plugins.ExecutorPlugin;
import org.gcube.vremanagement.executor.client.proxies.SmartExecutorProxy;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AggregatorAccountingPluginSmartExecutorSchedulerTest extends ScopedTest {
private static Logger logger = LoggerFactory.getLogger(AggregatorAccountingPluginSmartExecutorSchedulerTest.class);
private SmartExecutorProxy proxy;
@Before
public void before() throws Exception{
super.before();
//ScopeProvider.instance.reset(); // Comment this to run the test. this line has been added to avoid unwanted launch
//SecurityTokenProvider.instance.set(TestUtility.TOKEN);
//ScopeProvider.instance.set("/gcube/devNext");
proxy = ExecutorPlugin.getExecutorProxy("Accouting-Aggregator-Plugin").build();
Assert.assertNotNull(proxy);
}
public UUID scheduleTest(Scheduling scheduling) throws Exception {
Map<String, Object> inputs = new HashMap<String, Object>();
logger.debug("Inputs : {}", inputs);
inputs.put("type","DAILY");
//period to be processed
inputs.put("interval",3);
//change to time
inputs.put("startTime", 14);
//specify bucket
inputs.put("bucket","accounting_service");
//current scope
inputs.put("currentScope",false);
//specify user for save to workspace
inputs.put("user","alessandro.pieve");
//specify a recovery 0 default recovery and aggregate, 1 only aggregate, 2 only recovery
inputs.put("recovery",0);
//optional if present interval is not considered and elaborate a specificy step
//e.g if type is daily and set input.put("intervalStep",10), this plugin elaborate a 10 hour
// inputs.put("intervalStep",24);
//optional if exist and true no backup, but start elaborate immediately
// inputs.put("backup",false);
// inputs.put("typePersisted",1);
LaunchParameter parameter = new LaunchParameter("Accouting-Aggregator-Plugin", inputs);
parameter.setScheduling(scheduling);
try {
String uuidString = proxy.launch(parameter);
return UUID.fromString(uuidString);
} catch(Exception e){
logger.error("Error launching sheduled task", e);
throw e;
}
}
@Test
public void launch() {
Map<String, Object> inputs = new HashMap<String, Object>();
LaunchParameter launchParameter = new LaunchParameter("Test", inputs);
try {
proxy.launch(launchParameter);
} catch (Exception e) {
logger.error("Error launching sheduled task", e);
//throw e;
}
}
@Test
public void LaunchTest() throws Exception {
Map<String, Object> inputs = new HashMap<String, Object>();
logger.debug("Inputs : {}", inputs);
//inputs.put("type","DAILY");
inputs.put("type","MONTHLY");
//period to be processed
inputs.put("interval",1);
//change to time
//novembre 6
//ottobre 7
//settembre 8
//agosto 9
//luglio 10
//giugno e' 11
inputs.put("startTime",7);
//inputs.put("startTime",173);
inputs.put("intervalStep",4);
//specify bucket
inputs.put("bucket","accounting_service");
//current scope
inputs.put("currentScope",false);
//specify user for save to workspace
inputs.put("user","alessandro.pieve");
//specify a recovery 0 default recovery and aggregate, 1 only aggregate, 2 only recovery
inputs.put("recovery",0);
//optional if present interval is not considered and elaborate a specificy step
//e.g if type is daily and set input.put("intervalStep",10), this plugin elaborate a 10 hour
//optional if exist and true no backup, but start elaborate immediately
//inputs.put("backup",false);
//inputs.put("typePersisted",1);
LaunchParameter parameter = new LaunchParameter("Accouting-Aggregator-Plugin", inputs);
try {
String uuidString = proxy.launch(parameter);
logger.debug("Launched with UUID : {}", uuidString);
} catch(Exception e){
logger.error("Error launching sheduled task", e);
throw e;
}
}
@Test
public void LaunchTestAutomatic() throws Exception {
Map<String, Object> inputs = new HashMap<String, Object>();
logger.debug("Inputs : {}", inputs);
inputs.put("type","DAILY");
//period to be processed
inputs.put("interval",1);
//change to time
//load a file for start time
inputs.put("pathFile","/home/gcube/SmartGears/startTime");
//specify bucket
inputs.put("bucket","accounting_service");
inputs.put("endScriptTime","18:30");
//current scope
inputs.put("currentScope",false);
//specify user for save to workspace
inputs.put("user","alessandro.pieve");
//specify a recovery 0 default recovery and aggregate, 1 only aggregate, 2 only recovery
inputs.put("recovery",0);
LaunchParameter parameter = new LaunchParameter("Accouting-Aggregator-Plugin", inputs);
try {
String uuidString = proxy.launch(parameter);
logger.debug("Launched with UUID : {}", uuidString);
} catch(Exception e){
logger.error("Error launching sheduled task", e);
throw e;
}
}
@Test
public void cronExpPreviousMustBeTerminated() throws Exception {
CronExpression cronExpression = new CronExpression("0 0 2 * * ?"); // every day at 2:00
Scheduling scheduling = new Scheduling(cronExpression, true);
scheduling.setGlobal(true);
UUID uuid = scheduleTest(scheduling);
logger.debug("Launched with UUID : {}", uuid);
}
@Test
public void unSchedule() throws Exception {
//proxy.unSchedule("", true);
}
@Test
public void stop() throws Exception {
proxy.stop("Accouting-Aggregator-Plugin");
}
}

View File

@ -0,0 +1,97 @@
/**
*
*/
package org.gcube.pieve.aggregator.plugin;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.vremanagement.executor.api.types.LaunchParameter;
import org.gcube.vremanagement.executor.api.types.Scheduling;
import org.gcube.vremanagement.executor.client.plugins.ExecutorPlugin;
import org.gcube.vremanagement.executor.client.proxies.SmartExecutorProxy;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AggregatorAccountingPluginSmartExecutorSchedulerTestDev {
private static Logger logger = LoggerFactory.getLogger(AggregatorAccountingPluginSmartExecutorSchedulerTestDev.class);
private SmartExecutorProxy proxy;
@Before
public void before() throws Exception{
SecurityTokenProvider.instance.set("36501a0d-a205-4bf1-87ad-4c7185faa0d6-98187548");
//FOR DEBUG
String scopeDebug="/gcube/devNext";
ScopeProvider.instance.set(scopeDebug);
proxy = ExecutorPlugin.getExecutorProxy("Accouting-Aggregator-Plugin").build();
Assert.assertNotNull(proxy);
}
public UUID scheduleTest(Scheduling scheduling) throws Exception {
Map<String, Object> inputs = new HashMap<String, Object>();
logger.debug("Inputs : {}", inputs);
inputs.put("type","DAILY");
//period to be processed
inputs.put("interval",3);
//change to time
inputs.put("startTime", 14);
//specify bucket
inputs.put("bucket","accounting_service");
//current scope
inputs.put("currentScope",false);
//specify user for save to workspace
inputs.put("user","alessandro.pieve");
//specify a recovery 0 default recovery and aggregate, 1 only aggregate, 2 only recovery
inputs.put("recovery",0);
LaunchParameter parameter = new LaunchParameter("Accouting-Aggregator-Plugin", inputs);
parameter.setScheduling(scheduling);
try {
String uuidString = proxy.launch(parameter);
return UUID.fromString(uuidString);
} catch(Exception e){
logger.error("Error launching sheduled task", e);
throw e;
}
}
@Test
public void cronExpPreviousMustBeTerminated() throws Exception {
CronExpression cronExpression = new CronExpression("0 0 2 * * ?"); // every day at 2:00
Scheduling scheduling = new Scheduling(cronExpression, true);
scheduling.setGlobal(true);
UUID uuid = scheduleTest(scheduling);
logger.debug("Launched with UUID : {}", uuid);
}
@Test
public void unSchedule() throws Exception {
proxy.unSchedule("d588bb3f-a4c0-496c-b2ce-7c27059b44b7", true); //ore 2
}
@Test
public void stop() throws Exception {
proxy.stop("Accouting-Aggregator-Plugin");
}
}

View File

@ -0,0 +1,158 @@
/**
*
*/
package org.gcube.pieve.aggregator.plugin;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.vremanagement.executor.api.types.LaunchParameter;
import org.gcube.vremanagement.executor.api.types.Scheduling;
import org.gcube.vremanagement.executor.client.plugins.ExecutorPlugin;
import org.gcube.vremanagement.executor.client.proxies.SmartExecutorProxy;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AggregatorAccountingPluginSmartExecutorSchedulerTestProduction {
private static Logger logger = LoggerFactory.getLogger(AggregatorAccountingPluginSmartExecutorSchedulerTestProduction.class);
private SmartExecutorProxy proxy;
@Before
public void before() throws Exception{
String token="73cc40ab-dfe9-41c6-afa5-abd75de32d3c-843339462";
SecurityTokenProvider.instance.set(token);
ScopeProvider.instance.set("/d4science.research-infrastructures.eu");
proxy = ExecutorPlugin.getExecutorProxy("Accouting-Aggregator-Plugin").build();
Assert.assertNotNull(proxy);
}
public UUID scheduleTest(Scheduling scheduling) throws Exception {
Map<String, Object> inputs = new HashMap<String, Object>();
logger.debug("Inputs : {}", inputs);
inputs.put("type","DAILY");
//period to be processed
inputs.put("interval",1);
inputs.put("pathFile","/home/gcube/SmartGears/startTime");
//specify bucket
inputs.put("bucket","accounting_service");
inputs.put("endScriptTime","18:30");
//specify bucket
inputs.put("bucket","accounting_service");
//current scope
inputs.put("currentScope",false);
//specify user for save to workspace
inputs.put("user","alessandro.pieve");
//specify a recovery 0 default recovery and aggregate, 1 only aggregate, 2 only recovery
inputs.put("recovery",0);
LaunchParameter parameter = new LaunchParameter("Accouting-Aggregator-Plugin", inputs);
parameter.setScheduling(scheduling);
try {
String uuidString = proxy.launch(parameter);
return UUID.fromString(uuidString);
} catch(Exception e){
logger.error("Error launching sheduled task", e);
throw e;
}
}
@Test
public void LaunchTest() throws Exception {
Map<String, Object> inputs = new HashMap<String, Object>();
logger.debug("Inputs : {}", inputs);
//inputs.put("type","DAILY");
inputs.put("type","MONTHLY");
//period to be processed
inputs.put("interval",1);
//change to time
//novembre 6
//ottobre 7
//settembre 8
//agosto 9
//luglio 10
//giugno e' 11
inputs.put("startTime",8);
//inputs.put("startTime",173);
inputs.put("intervalStep",4);
//specify bucket
inputs.put("bucket","accounting_service");
//current scope
inputs.put("currentScope",false);
//specify user for save to workspace
inputs.put("user","alessandro.pieve");
//specify a recovery 0 default recovery and aggregate, 1 only aggregate, 2 only recovery
inputs.put("recovery",0);
//optional if present interval is not considered and elaborate a specificy step
//e.g if type is daily and set input.put("intervalStep",10), this plugin elaborate a 10 hour
//optional if exist and true no backup, but start elaborate immediately
//inputs.put("backup",false);
//inputs.put("typePersisted",1);
LaunchParameter parameter = new LaunchParameter("Accouting-Aggregator-Plugin", inputs);
try {
String uuidString = proxy.launch(parameter);
logger.debug("Launched with UUID : {}", uuidString);
} catch(Exception e){
logger.error("Error launching sheduled task", e);
throw e;
}
}
@Test
public void cronExpPreviousMustBeTerminated() throws Exception {
CronExpression cronExpression = new CronExpression("0 0 7 * * ?"); // every day at 2:00
Scheduling scheduling = new Scheduling(cronExpression, true);
scheduling.setGlobal(true);
UUID uuid = scheduleTest(scheduling);
logger.debug("Launched with UUID : {}", uuid);
}
@Test
public void unSchedule() throws Exception {
proxy.unSchedule("41909530-0142-4073-96e1-11c5d688994a", true); //ore 7
}
@Test
public void stop() throws Exception {
proxy.stop("96e9d82c-7fa8-4dda-a4fd-85696bb0575a");
}
}

View File

@ -0,0 +1,30 @@
/**
*
*/
package org.gcube.pieve.documentstore.persistence;
import java.util.concurrent.TimeUnit;
import org.gcube.documentstore.persistence.PersistenceBackend;
import org.gcube.documentstore.persistence.PersistenceBackendFactory;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR) http://www.lucafrosini.com/
*
*/
public class PersistenceBackendFactoryTest {
private static final Logger logger = LoggerFactory.getLogger(PersistenceBackendFactoryTest.class);
@Test
public void parsingTest() throws Exception {
PersistenceBackend persistenceBackend = PersistenceBackendFactory.getPersistenceBackend(null);
logger.debug("{}", persistenceBackend);
PersistenceBackendFactory.flushAll(100, TimeUnit.MILLISECONDS);
PersistenceBackendFactory.flush(null, 100, TimeUnit.MILLISECONDS);
persistenceBackend.flush(100, TimeUnit.MILLISECONDS);
}
}

View File

@ -0,0 +1,85 @@
/**
*
*/
package org.gcube.pieve.documentstore.persistence;
import java.util.concurrent.TimeUnit;
import org.gcube.accounting.datamodel.UsageRecord;
import org.gcube.documentstore.exception.InvalidValueException;
import org.gcube.documentstore.persistence.FallbackPersistenceBackend;
import org.gcube.documentstore.persistence.PersistenceBackend;
import org.gcube.documentstore.persistence.PersistenceBackendFactory;
import org.gcube.pieve.testutility.ScopedTest;
import org.gcube.pieve.testutility.StressTestUtility;
import org.gcube.pieve.testutility.TestOperation;
import org.gcube.pieve.testutility.TestUsageRecord;
import org.gcube.pieve.testutility.TestUtility;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR) http://www.lucafrosini.com/
*
*/
public class PersistenceBackendMonitorTest extends ScopedTest {
private static final Logger logger = LoggerFactory.getLogger(PersistenceBackendMonitorTest.class);
public static final long timeout = 5000;
public static final TimeUnit timeUnit = TimeUnit.MILLISECONDS;
@Test
public void parsingTest() throws Exception {
PersistenceBackendFactory.setFallbackLocation(null);
final PersistenceBackend persistence = PersistenceBackendFactory.getPersistenceBackend(TestUtility.getScope());
StressTestUtility.stressTest(new TestOperation() {
@Override
public void operate(int i) {
UsageRecord usageRecord = null;
switch (i%3) {
case 0:
usageRecord = TestUsageRecord.createTestServiceUsageRecord();
break;
case 1:
usageRecord = TestUsageRecord.createTestStorageUsageRecord();
break;
case 2:
usageRecord = TestUsageRecord.createTestJobUsageRecord();
break;
}
try {
persistence.account(usageRecord);
} catch (InvalidValueException e) {
throw new RuntimeException(e);
}
}
});
logger.debug(" START -----------------------------------------------");
logger.debug("Flushing the buffered records");
persistence.flush(timeout, timeUnit);
logger.debug(" END -----------------------------------------------");
PersistenceBackend persistenceBackend = PersistenceBackendFactory.getPersistenceBackend(TestUtility.getScope());
persistenceBackend.setFallback((FallbackPersistenceBackend) persistenceBackend);
PersistenceBackendMonitor temporalDataPersistenceBackendMonitor = new PersistenceBackendMonitor(persistenceBackend);
temporalDataPersistenceBackendMonitor.run();
}
@Test
public void singleParsingTest() throws Exception {
PersistenceBackendFactory.setFallbackLocation(null);
PersistenceBackend persistenceBackend = PersistenceBackendFactory.getPersistenceBackend(TestUtility.getScope());
//persistenceBackend.setFallback((FallbackPersistenceBackend) persistenceBackend);
PersistenceBackendMonitor temporalDataPersistenceBackendMonitor = new PersistenceBackendMonitor(persistenceBackend);
temporalDataPersistenceBackendMonitor.run();
}
}

View File

@ -0,0 +1,117 @@
/**
*
*/
package org.gcube.pieve.documentstore.persistence;
import java.util.Calendar;
import java.util.concurrent.TimeUnit;
import org.gcube.accounting.datamodel.UsageRecord;
import org.gcube.accounting.persistence.AccountingPersistenceFactory;
import org.gcube.documentstore.persistence.FallbackPersistenceBackend;
import org.gcube.documentstore.persistence.PersistenceBackend;
import org.gcube.documentstore.persistence.PersistenceBackendFactory;
import org.gcube.pieve.testutility.ScopedTest;
import org.gcube.pieve.testutility.StressTestUtility;
import org.gcube.pieve.testutility.TestOperation;
import org.gcube.pieve.testutility.TestUsageRecord;
import org.gcube.pieve.testutility.TestUtility;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR) http://www.lucafrosini.com/
*
*/
public class PersistenceBackendTest extends ScopedTest {
private static Logger logger = LoggerFactory.getLogger(PersistenceBackendTest.class);
public static final long timeout = 5000;
public static final TimeUnit timeUnit = TimeUnit.MILLISECONDS;
public static PersistenceBackend getPersistence(){
AccountingPersistenceFactory.initAccountingPackages();
PersistenceBackendFactory.setFallbackLocation(null);
return PersistenceBackendFactory.getPersistenceBackend(TestUtility.getScope());
}
@Test
public void singleTestNoScope() throws Exception {
PersistenceBackendFactory.setFallbackLocation(null);
final PersistenceBackend persistence = PersistenceBackendFactory.getPersistenceBackend(null);
Assert.assertTrue(persistence instanceof FallbackPersistenceBackend);
StressTestUtility.stressTest(new TestOperation() {
@Override
public void operate(int i) {
UsageRecord usageRecord = TestUsageRecord.createTestServiceUsageRecord();
persistence.accountValidateAggregate(usageRecord, true, false);
}
}, 1);
persistence.flush(timeout, timeUnit);
}
@Test
public void singleTest() throws Exception {
final PersistenceBackend persistence = getPersistence();
StressTestUtility.stressTest(new TestOperation() {
@Override
public void operate(int i) {
UsageRecord usageRecord = TestUsageRecord.createTestServiceUsageRecord();
persistence.accountValidateAggregate(usageRecord, true, false);
}
}, 1);
persistence.flush(timeout, timeUnit);
}
@Test
public void stressTestNoAggregation() throws Exception {
final PersistenceBackend persistence = getPersistence();
StressTestUtility.stressTest(new TestOperation() {
@Override
public void operate(int i) {
UsageRecord usageRecord = TestUsageRecord.createTestServiceUsageRecord();
persistence.accountValidateAggregate(usageRecord, true, false);
}
});
}
@Test
public void stressTestWithAggregation() throws Exception {
final PersistenceBackend persistence = getPersistence();
StressTestUtility.stressTest(new TestOperation() {
@Override
public void operate(int i) throws Exception {
UsageRecord usageRecord = TestUsageRecord.createTestServiceUsageRecord();
persistence.account(usageRecord);
}
});
persistence.flush(timeout, timeUnit);
}
@Test
public void testScopeRecheck() throws Exception {
logger.debug("Going to check First Time");
PersistenceBackend first = getPersistence();
logger.debug("First {} : {}", PersistenceBackend.class.getSimpleName(), first);
long startTime = Calendar.getInstance().getTimeInMillis();
long endTime = startTime;
while(endTime <= (startTime + (PersistenceBackendFactory.FALLBACK_RETRY_TIME + 2100))){
endTime = Calendar.getInstance().getTimeInMillis();
}
logger.debug("Going to check Second Time");
PersistenceBackend second = getPersistence();
logger.debug("Second {} : {}", PersistenceBackend.class.getSimpleName(), second);
}
}

View File

@ -0,0 +1,63 @@
/**
*
*/
package org.gcube.pieve.documentstore.persistence;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Calendar;
import org.gcube.accounting.persistence.AccountingPersistence;
import org.gcube.accounting.persistence.AccountingPersistenceFactory;
import org.gcube.pieve.testutility.ScopedTest;
import org.gcube.pieve.testutility.TestUsageRecord;
import org.junit.Test;
/**
* @author Luca Frosini (ISTI - CNR) http://www.lucafrosini.com/
*
*/
public class RenameFileTest extends ScopedTest {
private final static String ELABORATION_FILE_SUFFIX = ".ELABORATION";
public void printLine(File file, String line) throws Exception {
synchronized (file) {
try(FileWriter fw = new FileWriter(file, true);
BufferedWriter bw = new BufferedWriter(fw);
PrintWriter out = new PrintWriter(bw)){
out.println(line);
out.flush();
} catch( IOException e ){
throw e;
}
}
}
@Test
public void renameFileTest() throws Exception{
File first = new File("./test.txt");
Long timestamp = Calendar.getInstance().getTimeInMillis();
File elaborationFile = new File(first.getAbsolutePath() + ELABORATION_FILE_SUFFIX + "." + timestamp.toString());
first.renameTo(elaborationFile);
printLine(first, "-FIRST-");
for(int i=0; i<100; i++){
printLine(elaborationFile, "-ELABORATION-" + i);
printLine(first, "-FIRST MOVED-"+i);
}
}
@Test
public void testPersistenceBackendMonitor() throws Exception{
AccountingPersistenceFactory.setFallbackLocation(".");
AccountingPersistence accountingPersistence = AccountingPersistenceFactory.getPersistence();
accountingPersistence.account(TestUsageRecord.createTestStorageUsageRecord());
}
}

View File

@ -0,0 +1,99 @@
/**
*
*/
package org.gcube.pieve.documentstore.records;
import java.net.URI;
import org.gcube.accounting.datamodel.UsageRecord.OperationResult;
import org.gcube.accounting.datamodel.aggregation.AggregatedServiceUsageRecord;
import org.gcube.accounting.datamodel.aggregation.AggregatedStorageUsageRecord;
import org.gcube.accounting.datamodel.basetypes.AbstractStorageUsageRecord.DataType;
import org.gcube.accounting.datamodel.basetypes.AbstractStorageUsageRecord.OperationType;
import org.gcube.accounting.datamodel.usagerecords.ServiceUsageRecord;
import org.gcube.documentstore.records.AggregatedRecord;
import org.gcube.documentstore.records.Record;
import org.gcube.documentstore.records.RecordUtility;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR) http://www.lucafrosini.com/
*
*/
public class RecordUtilityTest {
private static Logger logger = LoggerFactory.getLogger(RecordUtilityTest.class);
@Test
public void testImportOldRecord() throws Exception {
String line = "{"
+ "resourceScope=/gcube/devsec, "
+ "scope=/gcube/devsec, "
+ "operationCount=1, "
+ "usageRecordType=StorageUsageRecord, "
+ "endTime=1448447153009, "
+ "consumerId=CSV, "
+ "startTime=1448447153009, "
+ "id=c7bab219-4024-4019-a8ad-ff5f342b439b, "
+ "dataVolume=68, "
+ "dataType=STORAGE, "
+ "resourceOwner=CSV, "
+ "operationResult=SUCCESS, "
+ "resourceURI=testprotocol://objectURI, "
+ "operationType=CREATE, "
+ "aggregated=true, "
+ "creationTime=1448447153096, "
+ "providerURI=data.d4science.org}";
logger.debug(line);
Record record = RecordUtility.getRecord(line);
Assert.assertTrue(record instanceof AggregatedStorageUsageRecord);
AggregatedStorageUsageRecord aggregatedStorageUsageRecord = (AggregatedStorageUsageRecord) record;
Assert.assertTrue(aggregatedStorageUsageRecord.getResourceScope().compareTo("/gcube/devsec")==0);
Assert.assertTrue(aggregatedStorageUsageRecord.getScope().compareTo("/gcube/devsec")==0);
Assert.assertTrue(aggregatedStorageUsageRecord.getOperationCount()==1);
//
Assert.assertTrue(aggregatedStorageUsageRecord.getRecordType().compareTo("StorageUsageRecord")==0);
Assert.assertTrue(aggregatedStorageUsageRecord.getEndTime().getTimeInMillis()==(new Long("1448447153009")));
Assert.assertTrue(aggregatedStorageUsageRecord.getConsumerId().compareTo("CSV")==0);
Assert.assertTrue(aggregatedStorageUsageRecord.getStartTime().getTimeInMillis()==(new Long("1448447153009")));
Assert.assertTrue(aggregatedStorageUsageRecord.getId().compareTo("c7bab219-4024-4019-a8ad-ff5f342b439b")==0);
Assert.assertTrue(aggregatedStorageUsageRecord.getDataVolume()==68);
Assert.assertTrue(aggregatedStorageUsageRecord.getDataType()==DataType.STORAGE);
Assert.assertTrue(aggregatedStorageUsageRecord.getResourceOwner().compareTo("CSV")==0);
Assert.assertTrue(aggregatedStorageUsageRecord.getOperationResult()==OperationResult.SUCCESS);
Assert.assertTrue(aggregatedStorageUsageRecord.getResourceURI().compareTo(new URI("testprotocol://objectURI"))==0);
Assert.assertTrue(aggregatedStorageUsageRecord.getOperationType()==OperationType.CREATE);
Assert.assertTrue((Boolean) aggregatedStorageUsageRecord.getResourceProperty(AggregatedStorageUsageRecord.AGGREGATED));
Assert.assertTrue(aggregatedStorageUsageRecord.getCreationTime().getTimeInMillis()==(new Long("1448447153096")));
Assert.assertTrue(aggregatedStorageUsageRecord.getProviderURI().compareTo(new URI("data.d4science.org"))==0);
logger.debug("{}", aggregatedStorageUsageRecord);
}
@Test
public void test() {
Assert.assertTrue(Record.class.isAssignableFrom(ServiceUsageRecord.class));
Assert.assertFalse(AggregatedRecord.class.isAssignableFrom(ServiceUsageRecord.class));
Assert.assertTrue(Record.class.isAssignableFrom(AggregatedServiceUsageRecord.class));
Assert.assertTrue(AggregatedRecord.class.isAssignableFrom(AggregatedServiceUsageRecord.class));
}
@Test
public void testNewReflection() {
RecordUtility.addRecordPackage(ServiceUsageRecord.class.getPackage());
logger.trace("{}", RecordUtility.recordClassesFound);
RecordUtility.addRecordPackage(AggregatedServiceUsageRecord.class.getPackage());
logger.trace("{}", RecordUtility.aggregatedRecordClassesFound);
logger.trace("{}", RecordUtility.recordAggregationMapping);
}
}

View File

@ -0,0 +1,36 @@
/**
*
*/
package org.gcube.pieve.test;
import org.gcube.accounting.persistence.AccountingPersistence;
import org.gcube.accounting.persistence.AccountingPersistenceFactory;
import org.gcube.pieve.testutility.ScopedTest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR) http://www.lucafrosini.com/
*
*/
public class AccountingTest extends ScopedTest {
private static final Logger logger = LoggerFactory.getLogger(AccountingTest.class);
protected AccountingPersistence accountingPersistence;
//@Before
public void before() throws Exception {
super.before();
AccountingPersistenceFactory.setFallbackLocation("src/test/resources");
accountingPersistence = AccountingPersistenceFactory.getPersistence();
}
//@Test
public void testAccounting() throws Exception {
logger.trace("Let See");
Thread.sleep(1000*60*3);
logger.trace("Finished");
}
}

View File

@ -0,0 +1,81 @@
/**
*
*/
package org.gcube.pieve.test;
import java.io.StringWriter;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import org.gcube.common.resources.gcore.GenericResource;
import org.gcube.common.resources.gcore.Resource;
import org.gcube.common.resources.gcore.Resources;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.informationsystem.publisher.RegistryPublisherFactory;
import org.gcube.informationsystem.publisher.ScopedPublisher;
import org.gcube.informationsystem.publisher.exception.RegistryNotFoundException;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR) http://www.lucafrosini.com/
*
*/
public class TempTest {
private static final Logger logger = LoggerFactory.getLogger(TempTest.class);
@Test
public void test(){
try {
Class.forName("com.duke.MyLocaleServiceProvider");
logger.debug("OK");
} catch (ClassNotFoundException e) {
logger.error("Not found", e);
}
}
private static void publishScopedResource(Resource resource, List<String> scopes) throws RegistryNotFoundException, Exception {
StringWriter stringWriter = new StringWriter();
Resources.marshal(resource, stringWriter);
ScopedPublisher scopedPublisher = RegistryPublisherFactory.scopedPublisher();
try {
logger.debug("Trying to publish to {}:\n{}", scopes, stringWriter);
scopedPublisher.create(resource, scopes);
} catch (RegistryNotFoundException e) {
logger.error("The resource was not published", e);
throw e;
}
}
@Test
public void anotherTest(){
ScopeProvider.instance.set("/gcube");
try {
GenericResource a = new GenericResource();
List<String> b = new LinkedList<>();
String scope = ScopeProvider.instance.get();
b.add(scope);
a.newProfile().name(UUID.randomUUID().toString()).type("FHN-nodes").description("FHN node");
a.profile().newBody("<name>Nunzio</name>");
try {
publishScopedResource(a,b);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
logger.debug("OK");
} catch (Exception e) {
logger.error("Not found", e);
}
}
}

View File

@ -0,0 +1,23 @@
package org.gcube.pieve.testutility;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
public abstract class IOUtility {
public static String readFile(String filePath) throws IOException {
BufferedReader reader = new BufferedReader(new FileReader(filePath));
String line = null;
StringBuilder stringBuilder = new StringBuilder();
String ls = System.getProperty("line.separator");
while ((line = reader.readLine()) != null) {
stringBuilder.append(line);
stringBuilder.append(ls);
}
reader.close();
return stringBuilder.toString();
}
}

View File

@ -0,0 +1,40 @@
/**
*
*/
package org.gcube.pieve.testutility;
//import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.junit.After;
import org.junit.Before;
/**
* @author Luca Frosini (ISTI - CNR) http://www.lucafrosini.com/
*
*/
public class ScopedTest {
@Before
public void before() throws Exception{
// SecurityTokenProvider.instance.set("36501a0d-a205-4bf1-87ad-4c7185faa0d6-98187548");
// ScopeProvider.instance.set("/gcube/devNext");
// SecurityTokenProvider.instance.set("3acdde42-6883-4564-b3ba-69f6486f6fe0-98187548");
// ScopeProvider.instance.set("/gcube");
/* DA SCOMMENTARE E LANCIARE UNA VOLTA COMPLETATO GCUBE APPS*/
SecurityTokenProvider.instance.set("73cc40ab-dfe9-41c6-afa5-abd75de32d3c-843339462");
ScopeProvider.instance.set("/d4science.research-infrastructures.eu");
}
@After
public void after() throws Exception{
SecurityTokenProvider.instance.reset();
ScopeProvider.instance.reset();
}
}

View File

@ -0,0 +1,42 @@
/**
*
*/
package org.gcube.pieve.testutility;
import java.util.Calendar;
import java.util.GregorianCalendar;
import org.gcube.testutility.StressTestUtility;
import org.gcube.testutility.TestOperation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR) http://www.lucafrosini.com/
*
*/
public class StressTestUtility {
private static final Logger logger = LoggerFactory.getLogger(StressTestUtility.class);
public final static int DEFAULT_NUMBER_OF_RECORDS = 3000;
public static void stressTest(TestOperation operation) throws Exception {
stressTest(operation, DEFAULT_NUMBER_OF_RECORDS);
}
public static void stressTest(TestOperation operation, int runs) throws Exception {
Calendar startTestTime = new GregorianCalendar();
for(int i=0; i< runs; i++){
operation.operate(i);
}
Calendar stopTestTime = new GregorianCalendar();
double startMillis = startTestTime.getTimeInMillis();
double stopMillis = stopTestTime.getTimeInMillis();
double duration = stopMillis - startMillis;
double average = (duration/runs);
logger.debug("Duration (in millisec) : " + duration);
logger.debug("Average (in millisec) : " + average);
}
}

View File

@ -0,0 +1,15 @@
/**
*
*/
package org.gcube.pieve.testutility;
/**
* @author Luca Frosini (ISTI - CNR) http://www.lucafrosini.com/
*
*/
public interface TestOperation {
public void operate(int i) throws Exception;
}

View File

@ -0,0 +1,240 @@
/**
*
*/
package org.gcube.pieve.testutility;
import java.io.Serializable;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Calendar;
import java.util.HashMap;
import java.util.UUID;
import org.gcube.accounting.datamodel.UsageRecord.OperationResult;
import org.gcube.accounting.datamodel.basetypes.AbstractStorageUsageRecord;
import org.gcube.accounting.datamodel.usagerecords.JobUsageRecord;
import org.gcube.accounting.datamodel.usagerecords.PortletUsageRecord;
import org.gcube.accounting.datamodel.usagerecords.ServiceUsageRecord;
import org.gcube.accounting.datamodel.usagerecords.StorageUsageRecord;
import org.gcube.documentstore.exception.InvalidValueException;
import org.gcube.pieve.accounting.datamodel.usagerecords.TaskUsageRecord;
import org.gcube.testutility.TestUsageRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR) http://www.lucafrosini.com/
*
*/
public class TestUsageRecord {
private static final Logger logger = LoggerFactory.getLogger(TestUsageRecord.class);
public final static String TEST_CONSUMER_ID = "name.surname";
public final static String TEST_SCOPE = "/infrastructure/vo";
public final static OperationResult TEST_OPERATION_RESULT = OperationResult.SUCCESS;
public final static String TEST_SERVICE_CLASS = "TestServiceClass";
public final static String TEST_SERVICE_NAME = "TestServiceName";
public final static String TEST_CALLED_METHOD = "TestCalledMethod";
public final static String TEST_CALLER_HOST = "remotehost";
public final static String TEST_HOST = "localhost";
public final static String TEST_PROPERTY_NAME = "TestPropertyName";
public final static String TEST_PROPERTY_VALUE = "TestPropertyValue";
public final static String TEST_JOB_ID = UUID.randomUUID().toString();
public final static String TEST_JOB_NAME = "TestJobName";
public final static int TEST_VMS_USED = 2;
public final static String TEST_JOB_QUALIFIER = "TestJobQualifier";
public final static long HALF_DURATION = 10 * 60 * 1000; // 10 min
public final static String TEST_TASK_ID = UUID.randomUUID().toString();
public final static String TEST_NESTED_MAP = "TestNestedMap";
public final static String TEST_PORTLET_ID = "TestPortlet";
public final static String TEST_PORTLET_OPERATION_ID = "TestPortletOperationID";
public final static String TEST_PORTLET_MESSAGE = "TestPortletMessage";
private final static long MIN_DURATION = 60; // millisec
private final static long MAX_DURATION = 1000; // millisec
/**
* Generate A Random long in a range between min and max.
* This function is internally used to set random duration.
* @return the generated random long
*/
public static long generateRandomLong(long min, long max){
return min + (int)(Math.random() * ((max - min) + 1));
}
/**
* Create a valid #ServiceUsageRecord with scope set automatically.
* @return the created #ServiceUsageRecord
*/
public static ServiceUsageRecord createTestServiceUsageRecord() {
ServiceUsageRecord usageRecord = new ServiceUsageRecord();
try {
usageRecord.setConsumerId(TEST_CONSUMER_ID);
usageRecord.setOperationResult(TEST_OPERATION_RESULT);
usageRecord.setCallerHost(TEST_CALLER_HOST);
usageRecord.setHost(TEST_HOST);
usageRecord.setServiceClass(TEST_SERVICE_CLASS);
usageRecord.setServiceName(TEST_SERVICE_NAME);
usageRecord.setCalledMethod(TEST_CALLED_METHOD);
usageRecord.setDuration(generateRandomLong(MIN_DURATION, MAX_DURATION));
} catch (InvalidValueException e) {
logger.error(" ------ You SHOULD NOT SEE THIS MESSAGE. Error Creating a test Usage Record", e);
throw new RuntimeException(e);
}
return usageRecord;
}
public final static String TEST_RESOUCE_OWNER = "resource.owner";
public final static String TEST_RESOUCE_SCOPE = TEST_SCOPE;
public final static String TEST_RESOURCE_URI = "testprotocol://objectURI";
public final static String TEST_PROVIDER_URI = "testprotocol://providerURI";
private final static long MIN_DATA_VOLUME = 1024;
private final static long MAX_DATA_VOLUME = 10240;
/**
* Create a valid #StorageUsageRecord with scope set automatically.
* @return the created #StorageUsageRecord
*/
public static StorageUsageRecord createTestStorageUsageRecord() {
StorageUsageRecord usageRecord = new StorageUsageRecord();
try {
usageRecord.setConsumerId(TEST_CONSUMER_ID);
usageRecord.setOperationResult(TEST_OPERATION_RESULT);
usageRecord.setResourceOwner(TEST_RESOUCE_OWNER);
usageRecord.setResourceScope(TEST_RESOUCE_SCOPE);
usageRecord.setResourceURI(new URI(TEST_RESOURCE_URI));
usageRecord.setProviderURI(new URI(TEST_PROVIDER_URI));
usageRecord.setOperationType(AbstractStorageUsageRecord.OperationType.READ);
usageRecord.setDataType(AbstractStorageUsageRecord.DataType.STORAGE);
usageRecord.setDataVolume(generateRandomLong(MIN_DATA_VOLUME, MAX_DATA_VOLUME));
usageRecord.setQualifier("image/png");
} catch (InvalidValueException | URISyntaxException e) {
logger.error(" ------ You SHOULD NOT SEE THIS MESSAGE. Error Creating a test Usage Record", e);
throw new RuntimeException(e);
}
return usageRecord;
}
/**
* @return
*/
public static JobUsageRecord createTestJobUsageRecord() {
JobUsageRecord usageRecord = new JobUsageRecord();
try {
usageRecord.setConsumerId(TEST_CONSUMER_ID);
usageRecord.setOperationResult(TEST_OPERATION_RESULT);
usageRecord.setJobId(TEST_JOB_ID);
usageRecord.setJobName(TEST_JOB_NAME);
usageRecord.setJobQualifier(TEST_JOB_QUALIFIER);
Calendar startTime = Calendar.getInstance();
Calendar endTime = Calendar.getInstance();
endTime.setTimeInMillis(startTime.getTimeInMillis() + HALF_DURATION);
startTime.setTimeInMillis(startTime.getTimeInMillis() - HALF_DURATION);
usageRecord.setJobStartTime(startTime);
usageRecord.setJobEndTime(endTime);
} catch (InvalidValueException e) {
logger.error(" ------ You SHOULD NOT SEE THIS MESSAGE. Error Creating a test Usage Record", e);
}
return usageRecord;
}
/**
* @return
*/
public static TaskUsageRecord createTestTaskUsageRecord() {
TaskUsageRecord usageRecord = new TaskUsageRecord();
try {
usageRecord.setConsumerId(TEST_CONSUMER_ID);
usageRecord.setOperationResult(TEST_OPERATION_RESULT);
usageRecord.setTaskId(TEST_TASK_ID);
usageRecord.setTaskId(TEST_JOB_ID);
usageRecord.setHost(TEST_HOST);
usageRecord.setRefHostingNodeId(UUID.randomUUID().toString());
Calendar startTime = Calendar.getInstance();
Calendar endTime = Calendar.getInstance();
endTime.setTimeInMillis(startTime.getTimeInMillis() + HALF_DURATION);
startTime.setTimeInMillis(startTime.getTimeInMillis() - HALF_DURATION);
usageRecord.setTaskStartTime(startTime);
usageRecord.setTaskEndTime(endTime);
HashMap<String, Serializable> inputParameters = new HashMap<>();
inputParameters.put(TEST_PROPERTY_NAME, TEST_PROPERTY_VALUE);
inputParameters.put(TEST_PROPERTY_VALUE, TEST_PROPERTY_NAME);
HashMap<String, Serializable> parameter = new HashMap<>();
parameter.put(TEST_PROPERTY_NAME, TEST_PROPERTY_VALUE);
parameter.put(TEST_PROPERTY_VALUE, TEST_PROPERTY_NAME);
inputParameters.put(TEST_NESTED_MAP, parameter);
usageRecord.setInputParameters(inputParameters);
} catch (InvalidValueException e) {
logger.error(" ------ You SHOULD NOT SEE THIS MESSAGE. Error Creating a test Usage Record", e);
}
return usageRecord;
}
/**
* @return
*/
public static PortletUsageRecord createTestPortletUsageRecord() {
PortletUsageRecord usageRecord = new PortletUsageRecord();
try {
usageRecord.setConsumerId(TEST_CONSUMER_ID);
usageRecord.setOperationResult(TEST_OPERATION_RESULT);
Calendar startTime = Calendar.getInstance();
Calendar endTime = Calendar.getInstance();
endTime.setTimeInMillis(startTime.getTimeInMillis() + HALF_DURATION);
startTime.setTimeInMillis(startTime.getTimeInMillis() - HALF_DURATION);
usageRecord.setPortletId(TEST_PORTLET_ID);
usageRecord.setOperationId(TEST_PORTLET_OPERATION_ID);
usageRecord.setMessage(TEST_PORTLET_MESSAGE);
} catch (InvalidValueException e) {
logger.error(" ------ You SHOULD NOT SEE THIS MESSAGE. Error Creating a test Usage Record", e);
}
return usageRecord;
}
}

View File

@ -0,0 +1,87 @@
/**
*
*/
package org.gcube.pieve.testutility;
import java.io.StringWriter;
import org.gcube.common.authorization.library.AuthorizationEntry;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.authorization.client.Constants;
import org.gcube.common.resources.gcore.Resource;
import org.gcube.common.resources.gcore.Resources;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.informationsystem.publisher.RegistryPublisher;
import org.gcube.informationsystem.publisher.RegistryPublisherFactory;
import org.gcube.informationsystem.publisher.exception.RegistryNotFoundException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR) http://www.lucafrosini.com/
*
*/
public class TestUtility {
public static final String TOKEN = "36501a0d-a205-4bf1-87ad-4c7185faa0d6-98187548";
public static final String PARENT_TOKEN = "";
/**
* Logger
*/
private static Logger logger = LoggerFactory.getLogger(TestUtility.class);
public static String getScope(){
String token = SecurityTokenProvider.instance.get();
AuthorizationEntry authorizationEntry;
try {
authorizationEntry = Constants.authorizationService().get(token);
} catch (Exception e) {
return ScopeProvider.instance.get();
}
String scope = authorizationEntry.getContext();
return scope;
}
/**
* Publish the provided resource on current scope
* @param resource to be published
* @throws RegistryNotFoundException if the Registry is not found so the
* resource has not be published
*/
public static void publishResource(Resource resource) throws Exception {
StringWriter stringWriter = new StringWriter();
Resources.marshal(resource, stringWriter);
RegistryPublisher registryPublisher = RegistryPublisherFactory.create();
try {
logger.debug("Trying to publish to {}:\n{}", getScope(), stringWriter);
registryPublisher.create(resource);
} catch (Exception e) {
logger.error("The resource was not published", e);
throw e;
}
}
/**
* Remove the resource from IS from curretn scope
* @param resource to be unpublished
* @throws RegistryNotFoundException if the Registry is not found so the
* resource has not be published
*/
public static void unPublishResource(Resource resource) throws Exception {
//StringWriter stringWriter = new StringWriter();
//Resources.marshal(resource, stringWriter);
RegistryPublisher registryPublisher = RegistryPublisherFactory.create();
String id = resource.id();
logger.debug("Trying to remove {} with ID {} from {}", resource.getClass().getSimpleName(), id, getScope());
registryPublisher.remove(resource);
logger.debug("{} with ID {} removed successfully", resource.getClass().getSimpleName(), id);
}
}

View File

@ -0,0 +1,76 @@
/**
*
*/
package org.gcube.pieve.vremanagement.executor;
import org.gcube.pieve.testutility.ScopedTest;
/**
* @author Luca Frosini (ISTI - CNR) http://www.lucafrosini.com/
*
*/
public class GCoreEndPointCreationTest extends ScopedTest {
//private static Logger logger = LoggerFactory.getLogger(GCoreEndPointCreationTest.class);
/* protected static GCoreEndpoint createGCoreEndpoint(){
logger.debug("Getting Available Plugins and their own supported capabilities");
PluginManager pluginManager = PluginManager.getInstance();
//ContextProvider.get().application();
GCoreEndpoint gCoreEndpoint = new GCoreEndpoint();
Profile profile = gCoreEndpoint.profile();
profile.serviceId("serviceid");
profile.ghnId("nodeid");
profile.serviceClass("serviceClass");
profile.serviceName("serviceName");
profile.version("version");
profile.description("description");
profile.endpoints().add().nameAndAddress("name",URI.create("http://acme.org"));
profile.serviceClass(ContextProvider.get().configuration().serviceClass());
profile.serviceName(ContextProvider.get().configuration().name());
profile.version(ContextProvider.get().configuration().version());
profile.description(ContextProvider.get().configuration().description());
DeploymentData deploymentData = profile.newDeploymentData();
deploymentData.activationTime(Calendar.getInstance());
Map<String, PluginDeclaration> availablePlugins = pluginManager.getAvailablePlugins();
for(String pluginName : availablePlugins.keySet()){
PluginDeclaration pluginDeclaration = availablePlugins.get(pluginName);
deploymentData.plugins().add().service("",pluginName, pluginDeclaration.getVersion()).pluginPackage("").version(pluginDeclaration.getVersion());
Map<String, String> pluginCapabilities = pluginDeclaration.getSupportedCapabilities();
for(String capabilityName : pluginCapabilities.keySet()){
Parameter parameter = new Parameter();
parameter.nameAndValues(capabilityName, pluginCapabilities.get(capabilityName));
}
}
return gCoreEndpoint;
}
@Test
public void logCreatedGCoreEndpoint() throws Exception {
GCoreEndpoint gCoreEndpoint = createGCoreEndpoint();
StringWriter stringWriter = new StringWriter();
Resources.marshal(gCoreEndpoint, stringWriter);
logger.debug("Created {} for scope {}:\n{}",
GCoreEndpoint.class.getSimpleName(),
TestUtility.getScopeFromToken(), stringWriter);
}*/
}