diff --git a/pom.xml b/pom.xml
index d3e52f5..2a11b0f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -97,6 +97,12 @@
-->
+
+ org.gcube.information-system
+ context-creation
+ [1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)
+
+
org.gcube.accounting
@@ -167,6 +173,14 @@
+
+ org.gcube.information-system
+ resource-registry
+ [1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)
+ test
+
+
+
org.gcube.core
common-scope
diff --git a/src/test/java/org/gcube/accounting/aggregator/plugin/AggregatorAccountingPluginSmartExecutorSchedulerTest.java b/src/test/java/org/gcube/accounting/aggregator/plugin/AggregatorAccountingPluginSmartExecutorSchedulerTest.java
index 634fbbd..37e5012 100644
--- a/src/test/java/org/gcube/accounting/aggregator/plugin/AggregatorAccountingPluginSmartExecutorSchedulerTest.java
+++ b/src/test/java/org/gcube/accounting/aggregator/plugin/AggregatorAccountingPluginSmartExecutorSchedulerTest.java
@@ -11,6 +11,7 @@ import org.gcube.accounting.aggregator.aggregation.AggregationType;
import org.gcube.accounting.aggregator.plugin.AccountingAggregatorPlugin.ElaborationType;
import org.gcube.accounting.aggregator.utility.Utility;
import org.gcube.accounting.datamodel.usagerecords.ServiceUsageRecord;
+import org.gcube.accounting.datamodel.usagerecords.StorageUsageRecord;
import org.gcube.testutility.ScopedTest;
import org.gcube.vremanagement.executor.api.types.LaunchParameter;
import org.gcube.vremanagement.executor.api.types.Scheduling;
@@ -31,47 +32,12 @@ public class AggregatorAccountingPluginSmartExecutorSchedulerTest extends Scoped
@Before
public void before() throws Exception {
+ setContext(ROOT);
proxy = ExecutorPlugin.getExecutorProxy(AccountingAggregatorPluginDeclaration.NAME).build();
Assert.assertNotNull(proxy);
}
-
- private Map getRecoveryInputs() throws Exception {
- Map inputs = new HashMap();
- inputs.put(AccountingAggregatorPlugin.ELABORATION_TYPE_INPUT_PARAMETER, ElaborationType.RECOVERY.name());
-
- inputs.put(AccountingAggregatorPlugin.PERSIST_START_TIME_INPUT_PARAMETER, Utility.getPersistTimeParameter(4, 0));
- inputs.put(AccountingAggregatorPlugin.PERSIST_END_TIME_INPUT_PARAMETER, Utility.getPersistTimeParameter(20, 0));
-
- return inputs;
- }
-
- private Map getAggregateInputs() throws Exception {
- Map inputs = new HashMap();
- inputs.put(AccountingAggregatorPlugin.AGGREGATION_TYPE_INPUT_PARAMETER, AggregationType.DAILY.name());
-
- inputs.put(AccountingAggregatorPlugin.ELABORATION_TYPE_INPUT_PARAMETER, ElaborationType.AGGREGATE.name());
-
- inputs.put(AccountingAggregatorPlugin.PERSIST_START_TIME_INPUT_PARAMETER, Utility.getPersistTimeParameter(3, 0));
- inputs.put(AccountingAggregatorPlugin.PERSIST_END_TIME_INPUT_PARAMETER, Utility.getPersistTimeParameter(19, 00));
-
- inputs.put(AccountingAggregatorPlugin.RECORD_TYPE_INPUT_PARAMETER,
- ServiceUsageRecord.class.newInstance().getRecordType());
-
- // Start Aggregation Date
- Calendar aggregationStartCalendar = Utility.getAggregationStartCalendar(2017, Calendar.APRIL,
- 17);
- String aggregationStartDate = AccountingAggregatorPlugin.AGGREGATION_START_DATE_DATE_FORMAT
- .format(aggregationStartCalendar.getTime());
- logger.trace("{} : {}", AccountingAggregatorPlugin.AGGREGATION_START_DATE_INPUT_PARAMETER,
- aggregationStartDate);
- inputs.put(AccountingAggregatorPlugin.AGGREGATION_START_DATE_INPUT_PARAMETER, aggregationStartDate);
-
- inputs.put(AccountingAggregatorPlugin.RESTART_FROM_LAST_AGGREGATION_DATE_INPUT_PARAMETER, false);
-
- return inputs;
- }
-
+
private void launch(Scheduling scheduling, Map inputs) throws Exception {
LaunchParameter launchParameter = new LaunchParameter(AccountingAggregatorPluginDeclaration.NAME, inputs);
@@ -85,6 +51,35 @@ public class AggregatorAccountingPluginSmartExecutorSchedulerTest extends Scoped
throw e;
}
}
+
+ private Map getAggregateInputs() throws Exception {
+ Map inputs = new HashMap();
+ inputs.put(AccountingAggregatorPlugin.AGGREGATION_TYPE_INPUT_PARAMETER, AggregationType.DAILY.name());
+
+ inputs.put(AccountingAggregatorPlugin.ELABORATION_TYPE_INPUT_PARAMETER, ElaborationType.AGGREGATE.name());
+
+ inputs.put(AccountingAggregatorPlugin.PERSIST_START_TIME_INPUT_PARAMETER, Utility.getPersistTimeParameter(3, 0));
+ inputs.put(AccountingAggregatorPlugin.PERSIST_END_TIME_INPUT_PARAMETER, Utility.getPersistTimeParameter(19, 00));
+
+ inputs.put(AccountingAggregatorPlugin.RECORD_TYPE_INPUT_PARAMETER,
+ ServiceUsageRecord.class.newInstance().getRecordType());
+
+ Calendar aggregationStartCalendar = Utility.getAggregationStartCalendar(2017, Calendar.SEPTEMBER, 22);
+ String aggregationStartDate = AccountingAggregatorPlugin.AGGREGATION_START_DATE_DATE_FORMAT.format(aggregationStartCalendar.getTime());
+ logger.trace("{} : {}", AccountingAggregatorPlugin.AGGREGATION_START_DATE_INPUT_PARAMETER, aggregationStartDate);
+ inputs.put(AccountingAggregatorPlugin.AGGREGATION_START_DATE_INPUT_PARAMETER, aggregationStartDate);
+
+ /*
+ Calendar aggregationEndCalendar = Utility.getAggregationStartCalendar(2017, Calendar.SEPTEMBER, 23);
+ String aggregationEndDate = AccountingAggregatorPlugin.AGGREGATION_START_DATE_DATE_FORMAT.format(aggregationEndCalendar.getTime());
+ logger.trace("{} : {}", AccountingAggregatorPlugin.AGGREGATION_START_DATE_INPUT_PARAMETER, aggregationEndDate);
+ inputs.put(AccountingAggregatorPlugin.AGGREGATION_END_DATE_INPUT_PARAMETER, aggregationEndDate);
+ */
+
+ inputs.put(AccountingAggregatorPlugin.RESTART_FROM_LAST_AGGREGATION_DATE_INPUT_PARAMETER, false);
+
+ return inputs;
+ }
@Test
public void aggregate() throws Exception {
@@ -98,17 +93,84 @@ public class AggregatorAccountingPluginSmartExecutorSchedulerTest extends Scoped
launch(null, inputs);
}
+
+
+
+ private Map getMonthlyAggregateInputs() throws Exception {
+ Map inputs = new HashMap();
+ inputs.put(AccountingAggregatorPlugin.AGGREGATION_TYPE_INPUT_PARAMETER, AggregationType.MONTHLY.name());
+
+ inputs.put(AccountingAggregatorPlugin.ELABORATION_TYPE_INPUT_PARAMETER, ElaborationType.AGGREGATE.name());
+
+ inputs.put(AccountingAggregatorPlugin.PERSIST_START_TIME_INPUT_PARAMETER, Utility.getPersistTimeParameter(3, 0));
+ inputs.put(AccountingAggregatorPlugin.PERSIST_END_TIME_INPUT_PARAMETER, Utility.getPersistTimeParameter(19, 00));
+
+ inputs.put(AccountingAggregatorPlugin.RECORD_TYPE_INPUT_PARAMETER,
+ StorageUsageRecord.class.newInstance().getRecordType());
+
+ Calendar aggregationStartCalendar = Utility.getAggregationStartCalendar(2016, Calendar.JANUARY, 1);
+ String aggregationStartDate = AccountingAggregatorPlugin.AGGREGATION_START_DATE_DATE_FORMAT.format(aggregationStartCalendar.getTime());
+ logger.trace("{} : {}", AccountingAggregatorPlugin.AGGREGATION_START_DATE_INPUT_PARAMETER, aggregationStartDate);
+ inputs.put(AccountingAggregatorPlugin.AGGREGATION_START_DATE_INPUT_PARAMETER, aggregationStartDate);
+ /*
+ Calendar aggregationEndCalendar = Utility.getAggregationStartCalendar(2017, Calendar.JUNE, 30);
+ String aggregationEndDate = AccountingAggregatorPlugin.AGGREGATION_START_DATE_DATE_FORMAT.format(aggregationEndCalendar.getTime());
+ logger.trace("{} : {}", AccountingAggregatorPlugin.AGGREGATION_START_DATE_INPUT_PARAMETER, aggregationEndDate);
+ inputs.put(AccountingAggregatorPlugin.AGGREGATION_END_DATE_INPUT_PARAMETER, aggregationEndDate);
+ */
+
+ inputs.put(AccountingAggregatorPlugin.RESTART_FROM_LAST_AGGREGATION_DATE_INPUT_PARAMETER, true);
+
+ return inputs;
+ }
+
+ @Test
+ public void aggregateMonthly() throws Exception {
+ CronExpression cronExpression = new CronExpression("0 0/5 * 1/1 * ? *");
+ Scheduling scheduling = new Scheduling(cronExpression, true);
+ scheduling.setGlobal(false);
+
+ Map inputs = getMonthlyAggregateInputs();
+ launch(scheduling, inputs);
+ }
+
+
+ /* ----------------------------------------------------------------------------------- */
+
+
+ private Map getRecoveryInputs() throws Exception {
+ Map inputs = new HashMap();
+ inputs.put(AccountingAggregatorPlugin.ELABORATION_TYPE_INPUT_PARAMETER, ElaborationType.RECOVERY.name());
+
+ inputs.put(AccountingAggregatorPlugin.PERSIST_START_TIME_INPUT_PARAMETER, Utility.getPersistTimeParameter(3, 0));
+ inputs.put(AccountingAggregatorPlugin.PERSIST_END_TIME_INPUT_PARAMETER, Utility.getPersistTimeParameter(19, 0));
+
+
+ Calendar aggregationStartCalendar = Utility.getAggregationStartCalendar(2017, Calendar.AUGUST, 1);
+ String aggregationStartDate = AccountingAggregatorPlugin.AGGREGATION_START_DATE_DATE_FORMAT.format(aggregationStartCalendar.getTime());
+ logger.trace("{} : {}", AccountingAggregatorPlugin.AGGREGATION_START_DATE_INPUT_PARAMETER, aggregationStartDate);
+ inputs.put(AccountingAggregatorPlugin.AGGREGATION_START_DATE_INPUT_PARAMETER, aggregationStartDate);
+
+ /*
+ Calendar aggregationEndCalendar = Utility.getAggregationStartCalendar(2017, Calendar.JUNE, 22);
+ String aggregationEndDate = AccountingAggregatorPlugin.AGGREGATION_START_DATE_DATE_FORMAT.format(aggregationEndCalendar.getTime());
+ logger.trace("{} : {}", AccountingAggregatorPlugin.AGGREGATION_START_DATE_INPUT_PARAMETER, aggregationEndDate);
+ inputs.put(AccountingAggregatorPlugin.AGGREGATION_END_DATE_INPUT_PARAMETER, aggregationEndDate);
+ */
+
+ return inputs;
+ }
@Test
public void recovery() throws Exception {
// Every Day at 8:00
- CronExpression cronExpression = new CronExpression("0 0/15 4-19 1/1 * ? *");
+ CronExpression cronExpression = new CronExpression("0 0/5 3-19 1/1 * ? *");
Scheduling scheduling = new Scheduling(cronExpression, true);
scheduling.setGlobal(false);
Map inputs = getRecoveryInputs();
- launch(null, inputs);
+ launch(scheduling, inputs);
}
diff --git a/src/test/java/org/gcube/accounting/analytics/persistence/couchbase/RemoveOldRecords.java b/src/test/java/org/gcube/accounting/analytics/persistence/couchbase/RemoveOldRecords.java
index f77835e..d79382a 100644
--- a/src/test/java/org/gcube/accounting/analytics/persistence/couchbase/RemoveOldRecords.java
+++ b/src/test/java/org/gcube/accounting/analytics/persistence/couchbase/RemoveOldRecords.java
@@ -81,7 +81,7 @@ public class RemoveOldRecords extends ScopedTest {
List buckets = new ArrayList();
- buckets.add(accountingPersistenceQueryCouchBase.connectionMa.get("ServiceUsageRecord"));
+ buckets.add(accountingPersistenceQueryCouchBase.connectionMap.get("ServiceUsageRecord"));
for(Bucket bucket : buckets){
diff --git a/src/test/java/org/gcube/accounting/persistence/PersistenceCouchBaseTest.java b/src/test/java/org/gcube/accounting/persistence/PersistenceCouchBaseTest.java
index ea24054..ef832a1 100644
--- a/src/test/java/org/gcube/accounting/persistence/PersistenceCouchBaseTest.java
+++ b/src/test/java/org/gcube/accounting/persistence/PersistenceCouchBaseTest.java
@@ -106,4 +106,5 @@ public class PersistenceCouchBaseTest extends ScopedTest {
Assert.assertNotEquals(first, second);
}
+
}
diff --git a/src/test/java/org/gcube/documentstore/persistence/PersistenceCouchBaseTest.java b/src/test/java/org/gcube/documentstore/persistence/PersistenceCouchBaseTest.java
index b3b7967..120681b 100644
--- a/src/test/java/org/gcube/documentstore/persistence/PersistenceCouchBaseTest.java
+++ b/src/test/java/org/gcube/documentstore/persistence/PersistenceCouchBaseTest.java
@@ -4,6 +4,7 @@
package org.gcube.documentstore.persistence;
import org.gcube.accounting.datamodel.usagerecords.TestUsageRecord;
+import org.gcube.accounting.persistence.AccountingPersistenceFactory;
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.documentstore.records.DSMapper;
import org.gcube.documentstore.records.Record;
@@ -35,11 +36,15 @@ public class PersistenceCouchBaseTest extends ScopedTest {
@Test
public void persistenceIsCouchBaseForcingImmediateRediscovery() throws ObjectNotFound, Exception {
+
+ AccountingPersistenceFactory.initAccountingPackages();
+
PersistenceBackendFactory.setFallbackLocation(null);
String context = ScopedTest.getCurrentContext();
PersistenceBackendFactory.forceImmediateRediscovery(context);
PersistenceBackend persistenceBackend = PersistenceBackendFactory.getPersistenceBackend(context);
Assert.assertTrue(persistenceBackend instanceof PersistenceCouchBase);
+
}
diff --git a/src/test/java/org/gcube/informationsystem/exporter/ISExporterPluginSmartExecutorSchedulerTest.java b/src/test/java/org/gcube/informationsystem/exporter/ISExporterPluginSmartExecutorSchedulerTest.java
index 8b4af55..6a5dcbf 100644
--- a/src/test/java/org/gcube/informationsystem/exporter/ISExporterPluginSmartExecutorSchedulerTest.java
+++ b/src/test/java/org/gcube/informationsystem/exporter/ISExporterPluginSmartExecutorSchedulerTest.java
@@ -3,6 +3,7 @@
*/
package org.gcube.informationsystem.exporter;
+import java.io.File;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
@@ -19,6 +20,9 @@ import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
/**
* @author Luca Frosini (ISTI - CNR)
*/
@@ -49,6 +53,52 @@ public class ISExporterPluginSmartExecutorSchedulerTest extends ScopedTest {
}
+ @Test
+ public void production() throws Exception {
+ File src = new File("src");
+ File test = new File(src, "test");
+ File resources = new File(test, "resources");
+
+ File tokenFile = new File(resources, "production-tokens-is-exporter.json");
+
+ ObjectMapper objectMapper = new ObjectMapper();
+ JsonNode jsonNode = objectMapper.readTree(tokenFile);
+
+ Map contextSecondMinutes = new HashMap<>();
+ contextSecondMinutes.put("/d4science.research-infrastructures.eu", "0 0");
+ contextSecondMinutes.put("/d4science.research-infrastructures.eu/ParthenosVO", "30 7");
+ contextSecondMinutes.put("/d4science.research-infrastructures.eu/ParthenosVO/RubRIcA", "0 15");
+ contextSecondMinutes.put("/d4science.research-infrastructures.eu/ParthenosVO/PARTHENOS_Registry", "30 22");
+ contextSecondMinutes.put("/d4science.research-infrastructures.eu/gCubeApps", "0 30");
+ contextSecondMinutes.put("/d4science.research-infrastructures.eu/gCubeApps/Parthenos", "30 37");
+ contextSecondMinutes.put("/d4science.research-infrastructures.eu/D4Research", "0 45");
+ contextSecondMinutes.put("/d4science.research-infrastructures.eu/D4Research/NERLiX", "30 52");
+
+ for(String context : contextSecondMinutes.keySet()){
+ logger.info("\n\n\n-------------------------------------------------------------------------");
+
+ String token = jsonNode.get(context).asText();
+
+ ScopedTest.setContext(token);
+
+ CronExpression cronExpression = new CronExpression(contextSecondMinutes.get(context) + " 0/1 * * ?"); // every hour at contextSecondMinutes.get(token)
+ Scheduling scheduling = new Scheduling(cronExpression, true);
+ scheduling.setGlobal(true);
+
+ logger.debug("{} : {} : {}", context, token, cronExpression.getCronExpression());
+
+
+
+ //UUID uuid = scheduleTest(scheduling);
+ //logger.debug("Launched with UUID : {}", uuid);
+
+ logger.info("\n\n\n");
+ }
+
+
+ }
+
+
@Test
public void cronExpPreviousMustBeTerminated() throws Exception {
diff --git a/src/test/java/org/gcube/resourceregistry/ContextCreator.java b/src/test/java/org/gcube/resourceregistry/ContextCreator.java
new file mode 100644
index 0000000..d15edae
--- /dev/null
+++ b/src/test/java/org/gcube/resourceregistry/ContextCreator.java
@@ -0,0 +1,53 @@
+package org.gcube.resourceregistry;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+
+import org.gcube.common.scope.impl.ScopeBean;
+import org.gcube.context.ContextElaborator;
+import org.gcube.informationsystem.impl.utils.ISMapper;
+import org.gcube.informationsystem.model.entity.Context;
+import org.gcube.informationsystem.resourceregistry.context.ContextManagement;
+import org.gcube.informationsystem.resourceregistry.context.ContextManagementImpl;
+
+/**
+ * @author Luca Frosini (ISTI - CNR)
+ */
+public class ContextCreator extends ContextElaborator {
+
+ protected Map contexts;
+
+ protected ContextManagement contextManagement;
+
+ public ContextCreator() {
+ super();
+ contexts = new HashMap<>();
+ contextManagement = new ContextManagementImpl();
+ }
+
+ @Override
+ protected void elaborateContext(ScopeBean scopeBean) throws Exception {
+ UUID parentUUID = null;
+ switch(scopeBean.type()) {
+ case INFRASTRUCTURE:
+ parentUUID = null;
+ break;
+
+ default:
+ parentUUID = getParentUUID(scopeBean);
+ break;
+ }
+
+
+ String created = contextManagement.create(parentUUID, scopeBean.name());
+ Context context = ISMapper.unmarshal(Context.class, created);
+ contexts.put(scopeBean.toString(), context.getHeader().getUUID());
+ }
+
+ private UUID getParentUUID(ScopeBean scopeBean) {
+ String parent = scopeBean.toString().replace("/" + scopeBean.name(), "");
+ return contexts.get(parent);
+ }
+
+}
diff --git a/src/test/java/org/gcube/resourceregistry/ContextCreatorTest.java b/src/test/java/org/gcube/resourceregistry/ContextCreatorTest.java
new file mode 100644
index 0000000..7bd26ea
--- /dev/null
+++ b/src/test/java/org/gcube/resourceregistry/ContextCreatorTest.java
@@ -0,0 +1,22 @@
+package org.gcube.resourceregistry;
+
+import java.io.File;
+
+import org.junit.Test;
+
+public class ContextCreatorTest extends org.gcube.ScopedTest {
+
+ @Test
+ public void createAll() throws Exception {
+ File src = new File("src");
+ File test = new File(src, "test");
+ File resources = new File(test, "resources");
+
+ File voFile = new File(resources, "scopedata.xml");
+
+ ContextCreator contextCreator = new ContextCreator();
+ contextCreator.all(voFile);
+
+ }
+
+}
diff --git a/src/test/java/org/gcube/testutility/ScopedTest.java b/src/test/java/org/gcube/testutility/ScopedTest.java
index 5b4501a..44e4d59 100644
--- a/src/test/java/org/gcube/testutility/ScopedTest.java
+++ b/src/test/java/org/gcube/testutility/ScopedTest.java
@@ -42,6 +42,9 @@ public class ScopedTest {
public static final String GCUBE_VARNAME = "GCUBE";
public static final String GCUBE;
+ public static final String ROOT_VARNAME = "ROOT";
+ public static final String ROOT;
+
public static final String DEFAULT_TEST_SCOPE;
public static final String ALTERNATIVE_TEST_SCOPE;
@@ -64,9 +67,11 @@ public class ScopedTest {
GCUBE_DEVSEC_DEVVRE = properties.getProperty(GCUBE_DEVSEC_DEVVRE_VARNAME);
GCUBE = properties.getProperty(GCUBE_VARNAME);
-
+
+ ROOT = properties.getProperty(ROOT_VARNAME);
+
DEFAULT_TEST_SCOPE = GCUBE_DEVNEXT;
- ALTERNATIVE_TEST_SCOPE = GCUBE_DEVSEC_DEVVRE;
+ ALTERNATIVE_TEST_SCOPE = GCUBE_DEVSEC;
}
public static String getCurrentContext() throws Exception{