Code redesign

Refs #11756: Refactor DataHArvesterPlugin to support scheduled execution from smart-executor 

Task-Url: https://support.d4science.org/issues/11756

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/accounting/accounting-dashboard-harvester-se-plugin@167791 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Luca Frosini 2018-05-28 13:22:59 +00:00
parent 3c6b05d00b
commit 9f40118fa7
11 changed files with 68 additions and 49 deletions

View File

@ -120,8 +120,8 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
end = DateUtils.getEndDateFromStartDate(measureType, start, 1); end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
logger.debug("Harvesting from {} to {}", DateUtils.LAUNCH_DATE_FORMAT.format(start), logger.debug("Harvesting from {} to {}", DateUtils.format(start),
DateUtils.LAUNCH_DATE_FORMAT.format(end)); DateUtils.format(end));
getConfigParameters(); getConfigParameters();

View File

@ -9,6 +9,7 @@ import org.gcube.common.authorization.library.AuthorizationEntry;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider; import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider; import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataharvest.datamodel.HarvestedData; import org.gcube.dataharvest.datamodel.HarvestedData;
import org.gcube.dataharvest.utils.DateUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -16,12 +17,14 @@ public abstract class BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(BasicHarvester.class); private static Logger logger = LoggerFactory.getLogger(BasicHarvester.class);
public Date startDate; public Date start;
public Date endDate; public Date end;
public BasicHarvester(Date start, Date end) throws ParseException { public BasicHarvester(Date start, Date end) throws ParseException {
startDate = start; this.start = start;
endDate = end; this.end = end;
logger.debug("Creating {} for the period {} {} ", this.getClass().getSimpleName(), DateUtils.format(start), DateUtils.format(end));
} }
public static String getCurrentContext(String token) throws Exception { public static String getCurrentContext(String token) throws Exception {

View File

@ -44,7 +44,7 @@ public class MethodInvocationHarvester extends BasicHarvester {
List<HarvestedData> data = new ArrayList<>(); List<HarvestedData> data = new ArrayList<>();
AccountingPersistenceQuery accountingPersistenceQuery = AccountingPersistenceQueryFactory.getInstance(); AccountingPersistenceQuery accountingPersistenceQuery = AccountingPersistenceQueryFactory.getInstance();
TemporalConstraint temporalConstraint = new TemporalConstraint(startDate.getTime(), endDate.getTime(), TemporalConstraint temporalConstraint = new TemporalConstraint(start.getTime(), end.getTime(),
AggregationMode.MONTHLY); AggregationMode.MONTHLY);
List<Filter> filters = new ArrayList<>(); List<Filter> filters = new ArrayList<>();
@ -65,10 +65,10 @@ public class MethodInvocationHarvester extends BasicHarvester {
for(Filter filter : result.keySet()) { for(Filter filter : result.keySet()) {
SortedMap<Calendar,Info> infoMap = result.get(filter); SortedMap<Calendar,Info> infoMap = result.get(filter);
Calendar calendar = DateUtils.dateToCalendar(startDate); Calendar calendar = DateUtils.dateToCalendar(start);
Info info = infoMap.get(calendar); Info info = infoMap.get(calendar);
logger.debug("{} : {}", DateUtils.LAUNCH_DATE_FORMAT.format(calendar.getTime()), info); logger.debug("{} : {}", DateUtils.format(calendar), info);
JSONObject jsonObject = info.getValue(); JSONObject jsonObject = info.getValue();
long numberOfInvocation = jsonObject.getLong(AggregatedUsageRecord.OPERATION_COUNT); long numberOfInvocation = jsonObject.getLong(AggregatedUsageRecord.OPERATION_COUNT);

View File

@ -83,7 +83,7 @@ public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
JSONObject item = res.getJSONObject(i); JSONObject item = res.getJSONObject(i);
long time = item.getLong("time"); long time = item.getLong("time");
if(startDate.getTime() <= time && time <= endDate.getTime()) { if(start.getTime() <= time && time <= end.getTime()) {
posts++; posts++;
replies += item.getInt("comments_no"); replies += item.getInt("comments_no");
likes += item.getInt("likes_no"); likes += item.getInt("likes_no");

View File

@ -76,7 +76,7 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester {
logger.debug("Getting item by Path: "+path); logger.debug("Getting item by Path: "+path);
JCRWorkspaceItem item = (JCRWorkspaceItem) ws.getItemByPath(path); JCRWorkspaceItem item = (JCRWorkspaceItem) ws.getItemByPath(path);
// //
logger.info("Analyzing " + defaultContext + " in the period [" + startDate.toString() + " to " + endDate.toString() +"] starting from root: "+item.getName()); logger.info("Analyzing " + defaultContext + " in the period [" + start.toString() + " to " + end.toString() +"] starting from root: "+item.getName());
HarvestedData defaultHarvesteData = new HarvestedData(HarvestedDataKey.DATA_METHOD_DOWNLOAD, defaultContext, count); HarvestedData defaultHarvesteData = new HarvestedData(HarvestedDataKey.DATA_METHOD_DOWNLOAD, defaultContext, count);
@ -87,7 +87,7 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester {
//Getting statistics for folder //Getting statistics for folder
if(children.isFolder()){ if(children.isFolder()){
logger.info("Getting statistics for folder: "+children.getName()); logger.info("Getting statistics for folder: "+children.getName());
getStats(children, startDate, endDate); getStats(children, start, end);
String normalizedName = children.getName().replaceAll("[^A-Za-z0-9]",""); String normalizedName = children.getName().replaceAll("[^A-Za-z0-9]","");
String scope = mapWsFolderNameToVRE.get(normalizedName); String scope = mapWsFolderNameToVRE.get(normalizedName);
@ -110,7 +110,7 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester {
//ADDING DEFAULT ACCOUNTING //ADDING DEFAULT ACCOUNTING
data.add(defaultHarvesteData); data.add(defaultHarvesteData);
logger.info("In the period [from {} to {} ] returning workspace accouting data {}", DateUtils.LAUNCH_DATE_FORMAT.format(startDate), DateUtils.LAUNCH_DATE_FORMAT.format(endDate), data); logger.info("In the period [from {} to {} ] returning workspace accouting data {}", DateUtils.format(start), DateUtils.format(end), data);
return data; return data;

View File

@ -71,9 +71,9 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
List<String> solrParameters = new ArrayList<String>(1); List<String> solrParameters = new ArrayList<String>(1);
solrParameters.add("extras_systemtype:\""+systemType+"\""); solrParameters.add("extras_systemtype:\""+systemType+"\"");
//EXECUTING THE QUERY IN THE PERIOD //EXECUTING THE QUERY IN THE PERIOD
String queryResult = executeQueryFor(solrParameters, startDate, endDate, "groups"); String queryResult = executeQueryFor(solrParameters, start, end, "groups");
HarvestedDataKey insertDBKey = HarvestedDataKey.valueOf(mapSystemTypeToDBEntry.get(systemType)); HarvestedDataKey insertDBKey = HarvestedDataKey.valueOf(mapSystemTypeToDBEntry.get(systemType));
logger.info("Creating statistics for type: "+systemType+ " using db key "+insertDBKey); logger.info("Creating statistics for type {} using db key {}", systemType, insertDBKey);
data.addAll(buildListOfHarvestedData(queryResult, insertDBKey)); data.addAll(buildListOfHarvestedData(queryResult, insertDBKey));
} }
@ -97,7 +97,7 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
if(status != 0) { if(status != 0) {
String err = "Query Deliverable in error: status " + status; String err = "Query Deliverable in error: status " + status;
logger.error(err); logger.error(err);
throw new Exception(err, null); throw new Exception(err);
} }
JSONObject response = jsonObject.getJSONObject("response"); JSONObject response = jsonObject.getJSONObject("response");
@ -130,7 +130,6 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
counter.put(catalogueGroupName, currentCount+1); counter.put(catalogueGroupName, currentCount+1);
else{ else{
logger.warn("No mapping found for Catalogue-Group Name {} from VREs. Accounting it in the catalogue context {}", catalogueGroupName, catalogueContext); logger.warn("No mapping found for Catalogue-Group Name {} from VREs. Accounting it in the catalogue context {}", catalogueGroupName, catalogueContext);
//counter.put(catalogueContext, counter.get(catalogueContext)+1);
catalogueContextCount++; catalogueContextCount++;
} }
@ -159,7 +158,7 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
data.add(new HarvestedData(harvestKey, mapCatalogueGroupToVRE.get(key), counter.get(key))); data.add(new HarvestedData(harvestKey, mapCatalogueGroupToVRE.get(key), counter.get(key)));
} }
logger.info("For {} in the period [from {} to {}] returning accouting data :", harvestKey, DateUtils.LAUNCH_DATE_FORMAT.format(startDate), DateUtils.LAUNCH_DATE_FORMAT.format(endDate), data); logger.info("For {} in the period [from {} to {}] returning accouting data :", harvestKey, DateUtils.format(start), DateUtils.format(end), data);
return data; return data;
@ -201,9 +200,9 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
query += "q=" + UrlEncoderUtil.encodeQuery(q) + "&wt=json&indent=true&rows="+ROWS; query += "q=" + UrlEncoderUtil.encodeQuery(q) + "&wt=json&indent=true&rows="+ROWS;
query += flValue!=null && !flValue.isEmpty()?"&fl="+UrlEncoderUtil.encodeQuery(flValue):""; query += flValue!=null && !flValue.isEmpty()?"&fl="+UrlEncoderUtil.encodeQuery(flValue):"";
logger.debug("\nPerforming query: "+query); logger.debug("\nPerforming query {}", query);
String jsonResult = Utils.getJson(query); String jsonResult = Utils.getJson(query);
logger.trace("Response is: "+jsonResult); logger.trace("Response is {}", jsonResult);
return jsonResult; return jsonResult;
} }

View File

@ -50,8 +50,6 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
//Added by Francesco //Added by Francesco
private DataCatalogueFactory catalogueFactory; private DataCatalogueFactory catalogueFactory;
public static final String SO_BIG_DATA_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData";
protected SortedSet<String> contexts; protected SortedSet<String> contexts;
/** /**
@ -70,11 +68,11 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
String currentContext = Utils.getCurrentContext(); String currentContext = Utils.getCurrentContext();
// Truncating the context to the last / (the last is retained for filtering issues) // Truncating the context to the last / (the last / is retained for filtering issues)
String baseContext = currentContext.substring(0, currentContext.lastIndexOf("/")+1); String baseContext = currentContext.substring(0, currentContext.lastIndexOf("/")+1);
this.contexts = getValidContexts(contexts, baseContext); this.contexts = getValidContexts(contexts, baseContext);
logger.trace("Valid contexts are {}", contexts); logger.trace("Valid contexts are {}", this.contexts);
initMappingMaps(); initMappingMaps();
@ -92,7 +90,6 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
mapSystemTypeToDBEntry = new HashMap<String,String>(); mapSystemTypeToDBEntry = new HashMap<String,String>();
for(String key : keys) { for(String key : keys) {
//System.out.println(key + " : " + properties.getProperty(key));
try { try {
HarvestedDataKey valueEnum = HarvestedDataKey.valueOf(key); HarvestedDataKey valueEnum = HarvestedDataKey.valueOf(key);
mapSystemTypeToDBEntry.put(properties.getProperty(key), valueEnum.name()); mapSystemTypeToDBEntry.put(properties.getProperty(key), valueEnum.name());
@ -101,7 +98,7 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
} }
} }
logger.info("Built from properties the mapping 'SystemType' to 'DB entry' : " + mapSystemTypeToDBEntry); logger.info("Built from properties the mapping 'SystemType' to 'DB entry' {}", mapSystemTypeToDBEntry);
String currentContext = Utils.getCurrentContext(); String currentContext = Utils.getCurrentContext();
@ -109,7 +106,7 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
List<String> groups = loadGroupsFromCKAN(currentContext); List<String> groups = loadGroupsFromCKAN(currentContext);
//NORMALIZING THE GROUP NAME TO MATCH WITH VRE NAME //NORMALIZING THE GROUP NAME TO MATCH WITH VRE NAME
Map<String,String> mapNormalizedGroups = normalizeGroups(groups); Map<String,String> mapNormalizedGroups = normalizeGroups(groups);
logger.debug("Map of Normalized Groups is: " + mapNormalizedGroups); logger.debug("Map of Normalized Groups is {} ", mapNormalizedGroups);
//CREATING MAPPING BETWEEN (CATALOGUE GROUP NAME TO VRE NAME) //CREATING MAPPING BETWEEN (CATALOGUE GROUP NAME TO VRE NAME)
mapCatalogueGroupToVRE = new HashMap<String,String>(); mapCatalogueGroupToVRE = new HashMap<String,String>();
@ -117,12 +114,10 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
mapWsFolderNameToVRE = new HashMap<String,String>(); mapWsFolderNameToVRE = new HashMap<String,String>();
Set<String> normalizedGroups = mapNormalizedGroups.keySet(); Set<String> normalizedGroups = mapNormalizedGroups.keySet();
for(String context : contexts) { for(String context : contexts) {
//logger.trace("Context is: " + context);
String loweredVREName = context.substring(context.lastIndexOf("/") + 1, context.length()).toLowerCase(); String loweredVREName = context.substring(context.lastIndexOf("/") + 1, context.length()).toLowerCase();
try { try {
//logger.trace("vreName lowered is: " + loweredVREName);
if(normalizedGroups.contains(loweredVREName)) { if(normalizedGroups.contains(loweredVREName)) {
logger.debug("Normalized Groups matching the lowered VRE name: " + loweredVREName); logger.debug("Normalized Groups matching the lowered VRE name {}", loweredVREName);
// Creating the map with couple (catalogue group name, scope) // Creating the map with couple (catalogue group name, scope)
mapCatalogueGroupToVRE.put(mapNormalizedGroups.get(loweredVREName), context); mapCatalogueGroupToVRE.put(mapNormalizedGroups.get(loweredVREName), context);
} }
@ -133,8 +128,8 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
} }
} }
logger.info("Map of Catalogue Groups To VRE is: " + mapCatalogueGroupToVRE); logger.info("Map of Catalogue Groups To VRE is {} ", mapCatalogueGroupToVRE);
logger.info("Map of (lowered) Ws Folder Name To VRE is: " + mapWsFolderNameToVRE); logger.info("Map of (lowered) Ws Folder Name To VRE is {}", mapWsFolderNameToVRE);
} }
@ -176,7 +171,7 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
groups.add(ckanGroup.getName()); groups.add(ckanGroup.getName());
} }
} catch(Exception e) { } catch(Exception e) {
logger.error("Error occurred on getting CKAN groups for scope: " + scope + " and CKAN URL: " + ckanURL, e); logger.error("Error occurred on getting CKAN groups for scope {} and CKAN URL {}", scope, ckanURL, e);
} }
return groups; return groups;
@ -218,7 +213,7 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
public SortedSet<String> getValidContexts(Set<String> contexts, String base) { public SortedSet<String> getValidContexts(Set<String> contexts, String base) {
SortedSet<String> filteredContext = new TreeSet<>(); SortedSet<String> filteredContext = new TreeSet<>();
for(String context : contexts) { for(String context : contexts) {
if(context.startsWith(SO_BIG_DATA_CONTEXT)) { if(context.startsWith(base)) {
filteredContext.add(context); filteredContext.add(context);
} }
} }

View File

@ -46,7 +46,7 @@ public class TagMeMethodInvocationHarvester extends BasicHarvester {
List<HarvestedData> data = new ArrayList<>(); List<HarvestedData> data = new ArrayList<>();
AccountingPersistenceQuery accountingPersistenceQuery = AccountingPersistenceQueryFactory.getInstance(); AccountingPersistenceQuery accountingPersistenceQuery = AccountingPersistenceQueryFactory.getInstance();
TemporalConstraint temporalConstraint = new TemporalConstraint(startDate.getTime(), endDate.getTime(), TemporalConstraint temporalConstraint = new TemporalConstraint(start.getTime(), end.getTime(),
AggregationMode.MONTHLY); AggregationMode.MONTHLY);
List<Filter> filters = new ArrayList<>(); List<Filter> filters = new ArrayList<>();
@ -64,10 +64,10 @@ public class TagMeMethodInvocationHarvester extends BasicHarvester {
for(Filter filter : result.keySet()) { for(Filter filter : result.keySet()) {
SortedMap<Calendar,Info> infoMap = result.get(filter); SortedMap<Calendar,Info> infoMap = result.get(filter);
Calendar calendar = DateUtils.dateToCalendar(startDate); Calendar calendar = DateUtils.dateToCalendar(start);
Info info = infoMap.get(calendar); Info info = infoMap.get(calendar);
logger.debug("{} : {}", DateUtils.LAUNCH_DATE_FORMAT.format(calendar.getTime()), info); logger.debug("{} : {}", DateUtils.format(calendar), info);
JSONObject jsonObject = info.getValue(); JSONObject jsonObject = info.getValue();
long numberOfInvocation = jsonObject.getLong(AggregatedUsageRecord.OPERATION_COUNT); long numberOfInvocation = jsonObject.getLong(AggregatedUsageRecord.OPERATION_COUNT);

View File

@ -106,6 +106,15 @@ public class DateUtils {
return calendar; return calendar;
} }
public static String format(Date date) {
return DateUtils.LAUNCH_DATE_FORMAT.format(date);
}
public static String format(Calendar calendar) {
return format(calendar.getTime());
}
public static String dateToStringWithTZ(Date date) { public static String dateToStringWithTZ(Date date) {
DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
return formatter.format(date) + "Z"; return formatter.format(date) + "Z";

View File

@ -13,7 +13,6 @@ import org.gcube.dataharvest.datamodel.HarvestedData;
import org.gcube.dataharvest.harvester.MethodInvocationHarvester; import org.gcube.dataharvest.harvester.MethodInvocationHarvester;
import org.gcube.dataharvest.harvester.sobigdata.DataMethodDownloadHarvester; import org.gcube.dataharvest.harvester.sobigdata.DataMethodDownloadHarvester;
import org.gcube.dataharvest.harvester.sobigdata.ResourceCatalogueHarvester; import org.gcube.dataharvest.harvester.sobigdata.ResourceCatalogueHarvester;
import org.gcube.dataharvest.harvester.sobigdata.SoBigDataHarvester;
import org.gcube.dataharvest.harvester.sobigdata.TagMeMethodInvocationHarvester; import org.gcube.dataharvest.harvester.sobigdata.TagMeMethodInvocationHarvester;
import org.gcube.dataharvest.utils.ContextTest; import org.gcube.dataharvest.utils.ContextTest;
import org.gcube.dataharvest.utils.DateUtils; import org.gcube.dataharvest.utils.DateUtils;
@ -27,6 +26,23 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterPluginTest.class); private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterPluginTest.class);
public static final String SO_BIG_DATA_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData";
public static SortedSet<String> getContexts() throws Exception{
SortedSet<String> contexts = new TreeSet<>();
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts();
for(String scope : map.keySet()) {
try {
String context = map.get(scope).toString();
contexts.add(context);
}catch (Exception e) {
throw e;
}
}
return contexts;
}
@Test @Test
public void test() { public void test() {
try { try {
@ -114,15 +130,13 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
Date start = DateUtils.getPreviousPeriod(measureType).getTime(); Date start = DateUtils.getPreviousPeriod(measureType).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1); Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
SortedSet<String> contexts = getContexts();
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts();
SortedSet<String> contexts = new TreeSet<>(map.keySet());
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null); AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
accountingDataHarvesterPlugin.getConfigParameters(); accountingDataHarvesterPlugin.getConfigParameters();
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, contexts); ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, contexts);
SortedSet<String> validContexts = resourceCatalogueHarvester.getValidContexts(contexts, SoBigDataHarvester.SO_BIG_DATA_CONTEXT); SortedSet<String> validContexts = resourceCatalogueHarvester.getValidContexts(contexts, SO_BIG_DATA_CONTEXT + "/");
logger.info("Valid Contexts {}", validContexts); logger.info("Valid Contexts {}", validContexts);
} catch(Exception e) { } catch(Exception e) {
@ -139,14 +153,14 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
MeasureType measureType = MeasureType.MONTHLY; MeasureType measureType = MeasureType.MONTHLY;
Date start = DateUtils.getStartCalendar(2018, 04, 01).getTime(); //Date start = DateUtils.getStartCalendar(2018, Calendar.APRIL, 1).getTime();
Date start = DateUtils.getPreviousPeriod(measureType).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1); Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null); AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
accountingDataHarvesterPlugin.getConfigParameters(); accountingDataHarvesterPlugin.getConfigParameters();
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts(); SortedSet<String> contexts = getContexts();
SortedSet<String> contexts = new TreeSet<>(map.keySet());
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, contexts); ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, contexts);
List<HarvestedData> data = resourceCatalogueHarvester.getData(); List<HarvestedData> data = resourceCatalogueHarvester.getData();
@ -167,14 +181,13 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
MeasureType measureType = MeasureType.MONTHLY; MeasureType measureType = MeasureType.MONTHLY;
Date start = DateUtils.getStartCalendar(2018, 04, 01).getTime(); Date start = DateUtils.getPreviousPeriod(measureType).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1); Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null); AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
accountingDataHarvesterPlugin.getConfigParameters(); accountingDataHarvesterPlugin.getConfigParameters();
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts(); SortedSet<String> contexts = getContexts();
SortedSet<String> contexts = new TreeSet<>(map.keySet());
DataMethodDownloadHarvester resourceCatalogueHarvester = new DataMethodDownloadHarvester(start, end, contexts); DataMethodDownloadHarvester resourceCatalogueHarvester = new DataMethodDownloadHarvester(start, end, contexts);
List<HarvestedData> data = resourceCatalogueHarvester.getData(); List<HarvestedData> data = resourceCatalogueHarvester.getData();

View File

@ -9,7 +9,7 @@
</appender> </appender>
<logger name="org.gcube" level="INFO" /> <logger name="org.gcube" level="WARN" />
<logger name="org.gcube.dataharvest" level="TRACE" /> <logger name="org.gcube.dataharvest" level="TRACE" />
<root level="WARN"> <root level="WARN">