diff --git a/src/main/java/org/gcube/dataharvest/AccountingDataHarvesterPlugin.java b/src/main/java/org/gcube/dataharvest/AccountingDataHarvesterPlugin.java index f176a25..b2ad9e0 100644 --- a/src/main/java/org/gcube/dataharvest/AccountingDataHarvesterPlugin.java +++ b/src/main/java/org/gcube/dataharvest/AccountingDataHarvesterPlugin.java @@ -40,9 +40,7 @@ public class AccountingDataHarvesterPlugin extends Plugin harvested = resourceCatalogueHarvester.getData(); data.addAll(harvested); } catch(Exception e) { @@ -177,7 +170,7 @@ public class AccountingDataHarvesterPlugin extends Plugin harvested = dataMethodDownloadHarvester.getData(); data.addAll(harvested); } catch(Exception e) { diff --git a/src/main/java/org/gcube/dataharvest/harvester/sobigdata/DataMethodDownloadHarvester.java b/src/main/java/org/gcube/dataharvest/harvester/sobigdata/DataMethodDownloadHarvester.java index d5b21d4..00341d5 100644 --- a/src/main/java/org/gcube/dataharvest/harvester/sobigdata/DataMethodDownloadHarvester.java +++ b/src/main/java/org/gcube/dataharvest/harvester/sobigdata/DataMethodDownloadHarvester.java @@ -46,8 +46,8 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester { * @param contexts the contexts * @throws ParseException the parse exception */ - public DataMethodDownloadHarvester(Date start, Date end, String catalogueContext, SortedSet contexts) throws ParseException { - super(start, end, catalogueContext, contexts); + public DataMethodDownloadHarvester(Date start, Date end, SortedSet contexts) throws Exception { + super(start, end, contexts); } /* (non-Javadoc) @@ -110,10 +110,7 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester { //ADDING DEFAULT ACCOUNTING data.add(defaultHarvesteData); - logger.info("In the period [from "+startDate+" to "+endDate+ "] returning workspace accouting data:"); - for (HarvestedData harvestedData : data) { - logger.info(harvestedData.toString()); - } + logger.info("In the period [from {} to {} ] returning workspace accouting data {}", DateUtils.LAUNCH_DATE_FORMAT.format(startDate), DateUtils.LAUNCH_DATE_FORMAT.format(endDate), data); return data; diff --git a/src/main/java/org/gcube/dataharvest/harvester/sobigdata/ResourceCatalogueHarvester.java b/src/main/java/org/gcube/dataharvest/harvester/sobigdata/ResourceCatalogueHarvester.java index 3fafdc9..d143f5d 100644 --- a/src/main/java/org/gcube/dataharvest/harvester/sobigdata/ResourceCatalogueHarvester.java +++ b/src/main/java/org/gcube/dataharvest/harvester/sobigdata/ResourceCatalogueHarvester.java @@ -45,13 +45,8 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester { * @param contexts the contexts. They are the VREs * @throws Exception the exception */ - public ResourceCatalogueHarvester(Date start, Date end, String catalogueContext, SortedSet contexts) throws Exception { - super(start, end, catalogueContext, contexts); - - if(catalogueContext==null || catalogueContext.isEmpty()) - throw new Exception("The catalogue context is null or empty. Pass a valid scope"); - - logger.debug("Catalogue context is: "+catalogueContext); + public ResourceCatalogueHarvester(Date start, Date end, SortedSet contexts) throws Exception { + super(start, end, contexts); } /** @@ -65,9 +60,6 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester { return "https://ckan-solr-d4s.d4science.org/solr/sobigdata"; } - /* (non-Javadoc) - * @see org.gcube.dataharvest.harvester.BasicHarvester#getData() - */ @Override public List getData() throws Exception { @@ -115,10 +107,12 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester { for (String groupName : mapCatalogueGroupToVRE.keySet()) { counter.put(groupName, 0); } - + + String catalogueContext = Utils.getCurrentContext(); + //Counter for default context of accounting int catalogueContextCount = 0; - logger.debug("For "+harvestKey+" has found "+numFound+" doc/s"); + logger.debug("For {} has found {} doc/s", harvestKey, numFound); if(numFound > 0) { JSONArray docs = response.getJSONArray("docs"); @@ -129,13 +123,13 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester { Iterator git = groups.iterator(); while(git.hasNext()) { String catalogueGroupName = (String) git.next(); - logger.debug("GroupName found: "+catalogueGroupName); + logger.debug("GroupName found {}", catalogueGroupName); //counterByGroup(groupItem); Integer currentCount = counter.get(catalogueGroupName); if(currentCount!=null) counter.put(catalogueGroupName, currentCount+1); else{ - logger.warn("No mapping found for Catalogue-Group Name: "+catalogueGroupName+" from VREs. Accounting it in the catalogue context: "+catalogueContext); + logger.warn("No mapping found for Catalogue-Group Name {} from VREs. Accounting it in the catalogue context {}", catalogueGroupName, catalogueContext); //counter.put(catalogueContext, counter.get(catalogueContext)+1); catalogueContextCount++; } @@ -153,17 +147,19 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester { } List data = new ArrayList(); - logger.trace("The context: "+catalogueContext + " has count: "+catalogueContextCount); - data.add(new HarvestedData(harvestKey,catalogueContext, catalogueContextCount)); + + String context = Utils.getCurrentContext(); + + logger.trace("The context {} has count ", context, catalogueContextCount); + + data.add(new HarvestedData(harvestKey, context, catalogueContextCount)); + for (String key : counter.keySet()) { - logger.trace("The group: "+key + " has count: "+counter.get(key)); + logger.trace("The group {} has count {}", key, counter.get(key)); data.add(new HarvestedData(harvestKey, mapCatalogueGroupToVRE.get(key), counter.get(key))); } - logger.info("For "+harvestKey+ " in the period [from "+startDate+" to "+endDate+ "] returning accouting data:"); - for (HarvestedData harvestedData : data) { - logger.info(harvestedData.toString()); - } + logger.info("For {} in the period [from {} to {}] returning accouting data :", harvestKey, DateUtils.LAUNCH_DATE_FORMAT.format(startDate), DateUtils.LAUNCH_DATE_FORMAT.format(endDate), data); return data; @@ -212,24 +208,4 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester { return jsonResult; } - /** - * Gets the catalogue context. - * - * @return the catalogueContext - */ - public String getCatalogueContext() { - - return catalogueContext; - } - - /** - * Sets the catalogue context. - * - * @param catalogueContext the catalogueContext to set - */ - public void setCatalogueContext(String catalogueContext) { - - this.catalogueContext = catalogueContext; - } - } diff --git a/src/main/java/org/gcube/dataharvest/harvester/sobigdata/SoBigDataHarvester.java b/src/main/java/org/gcube/dataharvest/harvester/sobigdata/SoBigDataHarvester.java index 3be43de..462827e 100644 --- a/src/main/java/org/gcube/dataharvest/harvester/sobigdata/SoBigDataHarvester.java +++ b/src/main/java/org/gcube/dataharvest/harvester/sobigdata/SoBigDataHarvester.java @@ -9,19 +9,21 @@ import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.SortedSet; +import java.util.TreeSet; import org.apache.commons.lang.Validate; +import org.gcube.common.authorization.client.exceptions.ObjectNotFound; import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueFactory; import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueImpl; import org.gcube.dataharvest.AccountingDataHarvesterPlugin; import org.gcube.dataharvest.datamodel.HarvestedDataKey; import org.gcube.dataharvest.harvester.BasicHarvester; +import org.gcube.dataharvest.utils.Utils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import eu.trentorise.opendata.jackan.model.CkanGroup; - /** * The Class SoBigDataHarvester. * @@ -30,41 +32,28 @@ import eu.trentorise.opendata.jackan.model.CkanGroup; * May 24, 2018 */ public abstract class SoBigDataHarvester extends BasicHarvester { - + private static Logger logger = LoggerFactory.getLogger(SoBigDataHarvester.class); - - + //Added by Francesco private static final String GROUP_LABEL = "group"; - + //Added by Francesco - protected HashMap mapSystemTypeToDBEntry; - + protected HashMap mapSystemTypeToDBEntry; + //Added by Francesco - protected HashMap mapCatalogueGroupToVRE; - + protected HashMap mapCatalogueGroupToVRE; + //Added by Francesco - protected HashMap mapWsFolderNameToVRE; - - //Added by Francesco - protected String catalogueContext; - + protected HashMap mapWsFolderNameToVRE; + //Added by Francesco private DataCatalogueFactory catalogueFactory; - - -// public static String SECONDARY_TYPE_FORMAT = "$resource/Profile/SecondaryType/text() eq '%1s'"; -// public static String NAME_FORMAT = "$resource/Profile/Name/text() eq '%1s'"; -// -// public static String SECONDARY_TYPE = "ExcludingVREs"; -// public static String NAME = "AccountingHarvesters"; - - - + public static final String SO_BIG_DATA_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData"; - + protected SortedSet contexts; - + /** * Instantiates a new so big data harvester. * @@ -74,74 +63,81 @@ public abstract class SoBigDataHarvester extends BasicHarvester { * @param vreScopes the contexts * @throws ParseException the parse exception */ - public SoBigDataHarvester(Date start, Date end, String catalogueContext, SortedSet vreScopes) throws ParseException { + public SoBigDataHarvester(Date start, Date end, SortedSet contexts) throws Exception { super(start, end); - this.catalogueContext = catalogueContext; + this.catalogueFactory = DataCatalogueFactory.getFactory(); - this.contexts = vreScopes; + + String currentContext = Utils.getCurrentContext(); + + // Truncating the context to the last / (the last is retained for filtering issues) + String baseContext = currentContext.substring(0, currentContext.lastIndexOf("/")+1); + + this.contexts = getValidContexts(contexts, baseContext); + logger.trace("Valid contexts are {}", contexts); + initMappingMaps(); - - -// this.excludedContexts = getExcludedContexts(); -// // Adding trailing slash to SO_BIG_DATA_CONTEXT to avoid to get VO -// this.contexts = getSoBigDataContexts(contexts, SO_BIG_DATA_CONTEXT + "/"); -// logger.trace("Valid contexts are {}", contexts); + + } - + /** * Inits the mapping maps. + * @throws Exception + * @throws ObjectNotFound */ - protected void initMappingMaps(){ + protected void initMappingMaps() throws ObjectNotFound, Exception { Properties properties = AccountingDataHarvesterPlugin.getProperties().get(); - Set keys = properties.stringPropertyNames(); - - mapSystemTypeToDBEntry = new HashMap(); - for (String key : keys) { - //System.out.println(key + " : " + properties.getProperty(key)); - try{ - HarvestedDataKey valueEnum = HarvestedDataKey.valueOf(key); - mapSystemTypeToDBEntry.put(properties.getProperty(key), valueEnum.name()); - }catch(Exception e){ - //silent - } - } - - logger.info("Built from properties the mapping 'SystemType' to 'DB entry' : "+mapSystemTypeToDBEntry); - + Set keys = properties.stringPropertyNames(); + + mapSystemTypeToDBEntry = new HashMap(); + for(String key : keys) { + //System.out.println(key + " : " + properties.getProperty(key)); + try { + HarvestedDataKey valueEnum = HarvestedDataKey.valueOf(key); + mapSystemTypeToDBEntry.put(properties.getProperty(key), valueEnum.name()); + } catch(Exception e) { + //silent + } + } + + logger.info("Built from properties the mapping 'SystemType' to 'DB entry' : " + mapSystemTypeToDBEntry); + + String currentContext = Utils.getCurrentContext(); + //GET CATALOGUE'S GROUPS - List groups = loadGroupsFromCKAN(catalogueContext); + List groups = loadGroupsFromCKAN(currentContext); //NORMALIZING THE GROUP NAME TO MATCH WITH VRE NAME Map mapNormalizedGroups = normalizeGroups(groups); - logger.debug("Map of Normalized Groups is: "+mapNormalizedGroups); - + logger.debug("Map of Normalized Groups is: " + mapNormalizedGroups); + //CREATING MAPPING BETWEEN (CATALOGUE GROUP NAME TO VRE NAME) - mapCatalogueGroupToVRE = new HashMap(); + mapCatalogueGroupToVRE = new HashMap(); //CREATING MAPPING BETWEEN (WS FOLDER NAME TO VRE NAME) - mapWsFolderNameToVRE = new HashMap(); + mapWsFolderNameToVRE = new HashMap(); Set normalizedGroups = mapNormalizedGroups.keySet(); - for (String context : contexts) { + for(String context : contexts) { //logger.trace("Context is: " + context); String loweredVREName = context.substring(context.lastIndexOf("/") + 1, context.length()).toLowerCase(); try { //logger.trace("vreName lowered is: " + loweredVREName); - if (normalizedGroups.contains(loweredVREName)) { - logger.debug("Normalized Groups matching the lowered VRE name: "+loweredVREName); + if(normalizedGroups.contains(loweredVREName)) { + logger.debug("Normalized Groups matching the lowered VRE name: " + loweredVREName); // Creating the map with couple (catalogue group name, scope) mapCatalogueGroupToVRE.put(mapNormalizedGroups.get(loweredVREName), context); } - + mapWsFolderNameToVRE.put(loweredVREName, context); - } - catch (Exception e) { + } catch(Exception e) { // silent } } - - logger.info("Map of Catalogue Groups To VRE is: "+mapCatalogueGroupToVRE); - logger.info("Map of (lowered) Ws Folder Name To VRE is: "+mapWsFolderNameToVRE); - + + logger.info("Map of Catalogue Groups To VRE is: " + mapCatalogueGroupToVRE); + logger.info("Map of (lowered) Ws Folder Name To VRE is: " + mapWsFolderNameToVRE); + } - + /** * Normalize groups. * @@ -151,25 +147,24 @@ public abstract class SoBigDataHarvester extends BasicHarvester { */ private Map normalizeGroups(List groups) { Map listNGroups = new HashMap(groups.size()); - for (String group : groups) { + for(String group : groups) { String normalizedGroup = group; - if(normalizedGroup.endsWith(GROUP_LABEL)){ - normalizedGroup = normalizedGroup.substring(0, normalizedGroup.length()-GROUP_LABEL.length()); + if(normalizedGroup.endsWith(GROUP_LABEL)) { + normalizedGroup = normalizedGroup.substring(0, normalizedGroup.length() - GROUP_LABEL.length()); } - normalizedGroup = normalizedGroup.replaceAll("-",""); + normalizedGroup = normalizedGroup.replaceAll("-", ""); listNGroups.put(normalizedGroup.toLowerCase(), group); } return listNGroups; } - - + /** * Load groups from ckan. * * @param scope the scope * @return the list */ - private List loadGroupsFromCKAN(String scope){ + private List loadGroupsFromCKAN(String scope) { List groups = new ArrayList(); String ckanURL = ""; try { @@ -177,185 +172,57 @@ public abstract class SoBigDataHarvester extends BasicHarvester { ckanURL = utils.getCatalogueUrl(); List theGroups = utils.getGroups(); Validate.notNull(theGroups, "The list of Groups is null"); - for (CkanGroup ckanGroup : theGroups) { + for(CkanGroup ckanGroup : theGroups) { groups.add(ckanGroup.getName()); } + } catch(Exception e) { + logger.error("Error occurred on getting CKAN groups for scope: " + scope + " and CKAN URL: " + ckanURL, e); } - catch (Exception e) { - logger.error("Error occurred on getting CKAN groups for scope: "+scope+" and CKAN URL: "+ckanURL,e); - } - + return groups; } - + /** * Gets the map catalogue group to vre. * * @return the map catalogue group to vre */ - public HashMap getMapCatalogueGroupToVRE() { - + public HashMap getMapCatalogueGroupToVRE() { + return mapCatalogueGroupToVRE; } - - + /** * @return the mapSystemTypeToDBEntry */ - public HashMap getMapSystemTypeToDBEntry() { - + public HashMap getMapSystemTypeToDBEntry() { + return mapSystemTypeToDBEntry; } - - + /** * @return the mapWsFolderNameToVRE */ - public HashMap getMapWsFolderNameToVRE() { - + public HashMap getMapWsFolderNameToVRE() { + return mapWsFolderNameToVRE; } - - - -// /** -// * Gets the filtered contexts. -// * -// * @return the filtered contexts -// */ -// public SortedSet getFilteredContexts() { -// return contexts; -// } - -// /** -// * Gets the filtering generic resource. -// * -// * @return the filtering generic resource -// */ -// protected SimpleQuery getFilteringGenericResource() { -// return ICFactory.queryFor(GenericResource.class) -// .addCondition(String.format(SECONDARY_TYPE_FORMAT, SECONDARY_TYPE)) -// .addCondition(String.format(NAME_FORMAT, NAME)); -// } - -// /** -// * Gets the generic resource. -// * -// * @return the generic resource -// */ -// protected GenericResource getGenericResource() { -// SimpleQuery simpleQuery = getFilteringGenericResource(); -// List res = ICFactory.clientFor(GenericResource.class).submit(simpleQuery); -// if(res.size()==0) { -// // At time of writing it should be an error but it can change in the future -// logger.info("No {} for filtering contexts.", GenericResource.class.getSimpleName()); -// return null; -// } -// return res.get(0); -// } - -// /** -// * Gets the excluded contexts. -// * -// * @return the excluded contexts -// */ -// public SortedSet getExcludedContexts() { -// SortedSet excludedContexts = new TreeSet<>(); -// -// GenericResource genericResource = getGenericResource(); -// if(genericResource==null) { -// return excludedContexts; -// } -// -// Element body = genericResource.profile().body(); -// -// /* -// * The following code parse an XML formatted as this -// * -// * -// * -// * /d4science.research-infrastructures.eu/SoBigData/ResourceCatalogue -// * /d4science.research-infrastructures.eu/SoBigData/TagMe -// * -// * -// * -// */ -// -// NodeList nodeList = body.getElementsByTagName(this.getClass().getSimpleName()); -// if(nodeList.getLength()==0) { -// // At time of writing it should be an error but it can change in the future -// logger.info("The body of the {} does not contains any information to filter contexts.", GenericResource.class.getSimpleName()); -// } -// -// Element classNameElement = null; -// for(int c=0; c getSoBigDataContexts(Set contexts, String base) { -// SortedSet filteredContext = new TreeSet<>(); -// for(String context : contexts) { -// if(context.startsWith(SO_BIG_DATA_CONTEXT)) { -// if(!filterContext(context)) { -// filteredContext.add(context); -// } -// } -// } -// return filteredContext; -// } - - - + + /** + * Gets the so big data contexts. + * + * @param contexts the contexts + * @param base the base + * @return the so big data contexts + */ + public SortedSet getValidContexts(Set contexts, String base) { + SortedSet filteredContext = new TreeSet<>(); + for(String context : contexts) { + if(context.startsWith(SO_BIG_DATA_CONTEXT)) { + filteredContext.add(context); + } + } + return filteredContext; + } + } diff --git a/src/test/java/org/gcube/dataharvest/AccountingDataHarvesterPluginTest.java b/src/test/java/org/gcube/dataharvest/AccountingDataHarvesterPluginTest.java index 518fcd3..c1bdf82 100644 --- a/src/test/java/org/gcube/dataharvest/AccountingDataHarvesterPluginTest.java +++ b/src/test/java/org/gcube/dataharvest/AccountingDataHarvesterPluginTest.java @@ -5,7 +5,6 @@ import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Properties; import java.util.SortedSet; import java.util.TreeSet; @@ -14,6 +13,7 @@ import org.gcube.dataharvest.datamodel.HarvestedData; import org.gcube.dataharvest.harvester.MethodInvocationHarvester; import org.gcube.dataharvest.harvester.sobigdata.DataMethodDownloadHarvester; import org.gcube.dataharvest.harvester.sobigdata.ResourceCatalogueHarvester; +import org.gcube.dataharvest.harvester.sobigdata.SoBigDataHarvester; import org.gcube.dataharvest.harvester.sobigdata.TagMeMethodInvocationHarvester; import org.gcube.dataharvest.utils.ContextTest; import org.gcube.dataharvest.utils.DateUtils; @@ -27,7 +27,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest { private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterPluginTest.class); - //@Test + @Test public void test() { try { @@ -61,7 +61,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest { } } - //@Test + @Test public void testMethodInvocation() { try { @@ -82,7 +82,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest { } } - //@Test + @Test public void testTagMeMethodInvocation() { try { @@ -104,7 +104,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest { } - //@Test + @Test public void testFilteringGenericResource() { try { org.gcube.dataharvest.utils.Utils.setContext(RESOURCE_CATALOGUE); @@ -121,18 +121,9 @@ public class AccountingDataHarvesterPluginTest extends ContextTest { AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null); accountingDataHarvesterPlugin.getConfigParameters(); - //Added by Francesco - Properties properties = AccountingDataHarvesterPlugin.getProperties().get(); - String catalogueContext = (String) properties.get(AccountingDataHarvesterPlugin.RESOURCE_CATALOGUE_CONTEXT); - logger.debug("Read from properties "+AccountingDataHarvesterPlugin.RESOURCE_CATALOGUE_CONTEXT+" value: "+catalogueContext); - //end - - ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, catalogueContext, contexts); -// SortedSet excludedContexts = resourceCatalogueHarvester.getExcludedContexts(); -// logger.info("Excluded contexts {}", excludedContexts); -// SortedSet validContexts = resourceCatalogueHarvester.getFilteredContexts(); -// -// logger.info("Valid Contexts {}", validContexts); + ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, contexts); + SortedSet validContexts = resourceCatalogueHarvester.getValidContexts(contexts, SoBigDataHarvester.SO_BIG_DATA_CONTEXT); + logger.info("Valid Contexts {}", validContexts); } catch(Exception e) { logger.error("", e); @@ -145,39 +136,22 @@ public class AccountingDataHarvesterPluginTest extends ContextTest { try { org.gcube.dataharvest.utils.Utils.setContext(RESOURCE_CATALOGUE); + MeasureType measureType = MeasureType.MONTHLY; -// Date start = DateUtils.getPreviousPeriod(measureType).getTime(); -// Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1); -// Date start = DateUtils.getStartCalendar(2016, 12, 01).getTime(); - Date start = DateUtils.getPreviousPeriod(measureType).getTime(); + Date start = DateUtils.getStartCalendar(2018, 04, 01).getTime(); Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1); AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null); accountingDataHarvesterPlugin.getConfigParameters(); - //Added by Francesco - Properties properties = AccountingDataHarvesterPlugin.getProperties().get(); - String catalogueContext = (String) properties.get(AccountingDataHarvesterPlugin.RESOURCE_CATALOGUE_CONTEXT); - logger.debug("Read from properties "+AccountingDataHarvesterPlugin.RESOURCE_CATALOGUE_CONTEXT+" value: "+catalogueContext); - //end - - //TODO @LUCA FROSINI MUST PASS SoBigData VREs - TreeSet contexts = new TreeSet(); - contexts.add("/d4science.research-infrastructures.eu/SoBigData/ResourceCatalogue"); - contexts.add("/d4science.research-infrastructures.eu/SoBigData/TagMe"); - contexts.add("/d4science.research-infrastructures.eu/SoBigData/WellBeingAndEconomy"); - contexts.add("/d4science.research-infrastructures.eu/SoBigData/CityOfCitizens"); - contexts.add("/d4science.research-infrastructures.eu/SoBigData/SocietalDebates"); - contexts.add("/d4science.research-infrastructures.eu/SoBigData/SportsDataScience"); - contexts.add("/d4science.research-infrastructures.eu/SoBigData/SMAPH"); - - ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, catalogueContext, contexts); + LinkedHashMap map = ContextManager.readContexts(); + SortedSet contexts = new TreeSet<>(map.keySet()); + + ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, contexts); List data = resourceCatalogueHarvester.getData(); - for (HarvestedData harvestedData : data) { - System.out.println(harvestedData.toString()); - } + logger.debug("{}", data); } catch(Exception e) { logger.error("", e); @@ -193,37 +167,20 @@ public class AccountingDataHarvesterPluginTest extends ContextTest { MeasureType measureType = MeasureType.MONTHLY; - Date start = DateUtils.getStartCalendar(2016, 12, 01).getTime(); - // Date start = DateUtils.getPreviousPeriod(measureType).getTime(); -// Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1); - Date end = DateUtils.getEndDateFromStartDate(measureType, start, 18); + Date start = DateUtils.getStartCalendar(2018, 04, 01).getTime(); + Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1); AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null); accountingDataHarvesterPlugin.getConfigParameters(); - //Added by Francesco - Properties properties = AccountingDataHarvesterPlugin.getProperties().get(); - String catalogueContext = (String) properties.get(AccountingDataHarvesterPlugin.RESOURCE_CATALOGUE_CONTEXT); - logger.debug("Read from properties "+AccountingDataHarvesterPlugin.RESOURCE_CATALOGUE_CONTEXT+" value: "+catalogueContext); - //end - - //TODO @LUCA FROSINI MUST PASS SoBigData VREs - TreeSet contexts = new TreeSet(); - contexts.add("/d4science.research-infrastructures.eu/SoBigData/ResourceCatalogue"); - contexts.add("/d4science.research-infrastructures.eu/SoBigData/TagMe"); - contexts.add("/d4science.research-infrastructures.eu/SoBigData/WellBeingAndEconomy"); - contexts.add("/d4science.research-infrastructures.eu/SoBigData/CityOfCitizens"); - contexts.add("/d4science.research-infrastructures.eu/SoBigData/SocietalDebates"); - contexts.add("/d4science.research-infrastructures.eu/SoBigData/SportsDataScience"); - contexts.add("/d4science.research-infrastructures.eu/SoBigData/SMAPH"); - - DataMethodDownloadHarvester resourceCatalogueHarvester = new DataMethodDownloadHarvester(start, end, catalogueContext, contexts); + LinkedHashMap map = ContextManager.readContexts(); + SortedSet contexts = new TreeSet<>(map.keySet()); + + DataMethodDownloadHarvester resourceCatalogueHarvester = new DataMethodDownloadHarvester(start, end, contexts); List data = resourceCatalogueHarvester.getData(); -// for (HarvestedData harvestedData : data) { -// System.out.println(harvestedData.toString()); -// } - + logger.debug("{}", data); + } catch(Exception e) { logger.error("", e); }