Code redesign
Refs #11756: Refactor DataHArvesterPlugin to support scheduled execution from smart-executor Task-Url: https://support.d4science.org/issues/11756 git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/accounting/accounting-dashboard-harvester-se-plugin@167788 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
19e396d374
commit
3c6b05d00b
|
@ -40,9 +40,7 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
|
||||||
public static final String RERUN_INPUT_PARAMETER = "reRun";
|
public static final String RERUN_INPUT_PARAMETER = "reRun";
|
||||||
public static final String DRY_RUN_INPUT_PARAMETER = "dryRun";
|
public static final String DRY_RUN_INPUT_PARAMETER = "dryRun";
|
||||||
|
|
||||||
public static final String RESOURCE_CATALOGUE_CONTEXT = "RESOURCE_CATALOGUE_CONTEXT";
|
public static final String SO_BIG_DATA_CATALOGUE_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData/ResourceCatalogue";
|
||||||
|
|
||||||
//public static final String RESOURCE_CATALOGUE_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData/ResourceCatalogue";
|
|
||||||
public static final String TAGME_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData/TagMe";
|
public static final String TAGME_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData/TagMe";
|
||||||
|
|
||||||
protected Date start;
|
protected Date start;
|
||||||
|
@ -158,17 +156,12 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
|
||||||
logger.error("Error harvesting Context Users for {}", context, e);
|
logger.error("Error harvesting Context Users for {}", context, e);
|
||||||
}
|
}
|
||||||
|
|
||||||
//Added by Francesco
|
|
||||||
Properties properties = AccountingDataHarvesterPlugin.getProperties().get();
|
|
||||||
String catalogueContext = (String) properties.get(RESOURCE_CATALOGUE_CONTEXT);
|
|
||||||
logger.debug("Read from properties "+RESOURCE_CATALOGUE_CONTEXT+" value: "+catalogueContext);
|
|
||||||
//end
|
|
||||||
|
|
||||||
if(context.startsWith(catalogueContext)) {
|
if(context.startsWith(SO_BIG_DATA_CATALOGUE_CONTEXT)) {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Collecting info on Resource Catalogue (Dataset, Application, Deliverables, Methods)
|
// Collecting info on Resource Catalogue (Dataset, Application, Deliverables, Methods)
|
||||||
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, catalogueContext, contexts);
|
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, contexts);
|
||||||
List<HarvestedData> harvested = resourceCatalogueHarvester.getData();
|
List<HarvestedData> harvested = resourceCatalogueHarvester.getData();
|
||||||
data.addAll(harvested);
|
data.addAll(harvested);
|
||||||
} catch(Exception e) {
|
} catch(Exception e) {
|
||||||
|
@ -177,7 +170,7 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Collecting info on Data/Method download
|
// Collecting info on Data/Method download
|
||||||
DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start, end, catalogueContext, contexts);
|
DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start, end, contexts);
|
||||||
List<HarvestedData> harvested = dataMethodDownloadHarvester.getData();
|
List<HarvestedData> harvested = dataMethodDownloadHarvester.getData();
|
||||||
data.addAll(harvested);
|
data.addAll(harvested);
|
||||||
} catch(Exception e) {
|
} catch(Exception e) {
|
||||||
|
|
|
@ -46,8 +46,8 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester {
|
||||||
* @param contexts the contexts
|
* @param contexts the contexts
|
||||||
* @throws ParseException the parse exception
|
* @throws ParseException the parse exception
|
||||||
*/
|
*/
|
||||||
public DataMethodDownloadHarvester(Date start, Date end, String catalogueContext, SortedSet<String> contexts) throws ParseException {
|
public DataMethodDownloadHarvester(Date start, Date end, SortedSet<String> contexts) throws Exception {
|
||||||
super(start, end, catalogueContext, contexts);
|
super(start, end, contexts);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* (non-Javadoc)
|
/* (non-Javadoc)
|
||||||
|
@ -110,10 +110,7 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester {
|
||||||
//ADDING DEFAULT ACCOUNTING
|
//ADDING DEFAULT ACCOUNTING
|
||||||
data.add(defaultHarvesteData);
|
data.add(defaultHarvesteData);
|
||||||
|
|
||||||
logger.info("In the period [from "+startDate+" to "+endDate+ "] returning workspace accouting data:");
|
logger.info("In the period [from {} to {} ] returning workspace accouting data {}", DateUtils.LAUNCH_DATE_FORMAT.format(startDate), DateUtils.LAUNCH_DATE_FORMAT.format(endDate), data);
|
||||||
for (HarvestedData harvestedData : data) {
|
|
||||||
logger.info(harvestedData.toString());
|
|
||||||
}
|
|
||||||
|
|
||||||
return data;
|
return data;
|
||||||
|
|
||||||
|
|
|
@ -45,13 +45,8 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
|
||||||
* @param contexts the contexts. They are the VREs
|
* @param contexts the contexts. They are the VREs
|
||||||
* @throws Exception the exception
|
* @throws Exception the exception
|
||||||
*/
|
*/
|
||||||
public ResourceCatalogueHarvester(Date start, Date end, String catalogueContext, SortedSet<String> contexts) throws Exception {
|
public ResourceCatalogueHarvester(Date start, Date end, SortedSet<String> contexts) throws Exception {
|
||||||
super(start, end, catalogueContext, contexts);
|
super(start, end, contexts);
|
||||||
|
|
||||||
if(catalogueContext==null || catalogueContext.isEmpty())
|
|
||||||
throw new Exception("The catalogue context is null or empty. Pass a valid scope");
|
|
||||||
|
|
||||||
logger.debug("Catalogue context is: "+catalogueContext);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -65,9 +60,6 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
|
||||||
return "https://ckan-solr-d4s.d4science.org/solr/sobigdata";
|
return "https://ckan-solr-d4s.d4science.org/solr/sobigdata";
|
||||||
}
|
}
|
||||||
|
|
||||||
/* (non-Javadoc)
|
|
||||||
* @see org.gcube.dataharvest.harvester.BasicHarvester#getData()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public List<HarvestedData> getData() throws Exception {
|
public List<HarvestedData> getData() throws Exception {
|
||||||
|
|
||||||
|
@ -116,9 +108,11 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
|
||||||
counter.put(groupName, 0);
|
counter.put(groupName, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
String catalogueContext = Utils.getCurrentContext();
|
||||||
|
|
||||||
//Counter for default context of accounting
|
//Counter for default context of accounting
|
||||||
int catalogueContextCount = 0;
|
int catalogueContextCount = 0;
|
||||||
logger.debug("For "+harvestKey+" has found "+numFound+" doc/s");
|
logger.debug("For {} has found {} doc/s", harvestKey, numFound);
|
||||||
if(numFound > 0) {
|
if(numFound > 0) {
|
||||||
|
|
||||||
JSONArray docs = response.getJSONArray("docs");
|
JSONArray docs = response.getJSONArray("docs");
|
||||||
|
@ -129,13 +123,13 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
|
||||||
Iterator<Object> git = groups.iterator();
|
Iterator<Object> git = groups.iterator();
|
||||||
while(git.hasNext()) {
|
while(git.hasNext()) {
|
||||||
String catalogueGroupName = (String) git.next();
|
String catalogueGroupName = (String) git.next();
|
||||||
logger.debug("GroupName found: "+catalogueGroupName);
|
logger.debug("GroupName found {}", catalogueGroupName);
|
||||||
//counterByGroup(groupItem);
|
//counterByGroup(groupItem);
|
||||||
Integer currentCount = counter.get(catalogueGroupName);
|
Integer currentCount = counter.get(catalogueGroupName);
|
||||||
if(currentCount!=null)
|
if(currentCount!=null)
|
||||||
counter.put(catalogueGroupName, currentCount+1);
|
counter.put(catalogueGroupName, currentCount+1);
|
||||||
else{
|
else{
|
||||||
logger.warn("No mapping found for Catalogue-Group Name: "+catalogueGroupName+" from VREs. Accounting it in the catalogue context: "+catalogueContext);
|
logger.warn("No mapping found for Catalogue-Group Name {} from VREs. Accounting it in the catalogue context {}", catalogueGroupName, catalogueContext);
|
||||||
//counter.put(catalogueContext, counter.get(catalogueContext)+1);
|
//counter.put(catalogueContext, counter.get(catalogueContext)+1);
|
||||||
catalogueContextCount++;
|
catalogueContextCount++;
|
||||||
}
|
}
|
||||||
|
@ -153,17 +147,19 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
|
||||||
}
|
}
|
||||||
|
|
||||||
List<HarvestedData> data = new ArrayList<HarvestedData>();
|
List<HarvestedData> data = new ArrayList<HarvestedData>();
|
||||||
logger.trace("The context: "+catalogueContext + " has count: "+catalogueContextCount);
|
|
||||||
data.add(new HarvestedData(harvestKey,catalogueContext, catalogueContextCount));
|
String context = Utils.getCurrentContext();
|
||||||
|
|
||||||
|
logger.trace("The context {} has count ", context, catalogueContextCount);
|
||||||
|
|
||||||
|
data.add(new HarvestedData(harvestKey, context, catalogueContextCount));
|
||||||
|
|
||||||
for (String key : counter.keySet()) {
|
for (String key : counter.keySet()) {
|
||||||
logger.trace("The group: "+key + " has count: "+counter.get(key));
|
logger.trace("The group {} has count {}", key, counter.get(key));
|
||||||
data.add(new HarvestedData(harvestKey, mapCatalogueGroupToVRE.get(key), counter.get(key)));
|
data.add(new HarvestedData(harvestKey, mapCatalogueGroupToVRE.get(key), counter.get(key)));
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info("For "+harvestKey+ " in the period [from "+startDate+" to "+endDate+ "] returning accouting data:");
|
logger.info("For {} in the period [from {} to {}] returning accouting data :", harvestKey, DateUtils.LAUNCH_DATE_FORMAT.format(startDate), DateUtils.LAUNCH_DATE_FORMAT.format(endDate), data);
|
||||||
for (HarvestedData harvestedData : data) {
|
|
||||||
logger.info(harvestedData.toString());
|
|
||||||
}
|
|
||||||
|
|
||||||
return data;
|
return data;
|
||||||
|
|
||||||
|
@ -212,24 +208,4 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
|
||||||
return jsonResult;
|
return jsonResult;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets the catalogue context.
|
|
||||||
*
|
|
||||||
* @return the catalogueContext
|
|
||||||
*/
|
|
||||||
public String getCatalogueContext() {
|
|
||||||
|
|
||||||
return catalogueContext;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sets the catalogue context.
|
|
||||||
*
|
|
||||||
* @param catalogueContext the catalogueContext to set
|
|
||||||
*/
|
|
||||||
public void setCatalogueContext(String catalogueContext) {
|
|
||||||
|
|
||||||
this.catalogueContext = catalogueContext;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,19 +9,21 @@ import java.util.Map;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.SortedSet;
|
import java.util.SortedSet;
|
||||||
|
import java.util.TreeSet;
|
||||||
|
|
||||||
import org.apache.commons.lang.Validate;
|
import org.apache.commons.lang.Validate;
|
||||||
|
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
|
||||||
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueFactory;
|
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueFactory;
|
||||||
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueImpl;
|
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueImpl;
|
||||||
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
|
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
|
||||||
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
|
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
|
||||||
import org.gcube.dataharvest.harvester.BasicHarvester;
|
import org.gcube.dataharvest.harvester.BasicHarvester;
|
||||||
|
import org.gcube.dataharvest.utils.Utils;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import eu.trentorise.opendata.jackan.model.CkanGroup;
|
import eu.trentorise.opendata.jackan.model.CkanGroup;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The Class SoBigDataHarvester.
|
* The Class SoBigDataHarvester.
|
||||||
*
|
*
|
||||||
|
@ -33,34 +35,21 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
|
||||||
|
|
||||||
private static Logger logger = LoggerFactory.getLogger(SoBigDataHarvester.class);
|
private static Logger logger = LoggerFactory.getLogger(SoBigDataHarvester.class);
|
||||||
|
|
||||||
|
|
||||||
//Added by Francesco
|
//Added by Francesco
|
||||||
private static final String GROUP_LABEL = "group";
|
private static final String GROUP_LABEL = "group";
|
||||||
|
|
||||||
//Added by Francesco
|
//Added by Francesco
|
||||||
protected HashMap<String, String> mapSystemTypeToDBEntry;
|
protected HashMap<String,String> mapSystemTypeToDBEntry;
|
||||||
|
|
||||||
//Added by Francesco
|
//Added by Francesco
|
||||||
protected HashMap<String, String> mapCatalogueGroupToVRE;
|
protected HashMap<String,String> mapCatalogueGroupToVRE;
|
||||||
|
|
||||||
//Added by Francesco
|
//Added by Francesco
|
||||||
protected HashMap<String, String> mapWsFolderNameToVRE;
|
protected HashMap<String,String> mapWsFolderNameToVRE;
|
||||||
|
|
||||||
//Added by Francesco
|
|
||||||
protected String catalogueContext;
|
|
||||||
|
|
||||||
//Added by Francesco
|
//Added by Francesco
|
||||||
private DataCatalogueFactory catalogueFactory;
|
private DataCatalogueFactory catalogueFactory;
|
||||||
|
|
||||||
|
|
||||||
// public static String SECONDARY_TYPE_FORMAT = "$resource/Profile/SecondaryType/text() eq '%1s'";
|
|
||||||
// public static String NAME_FORMAT = "$resource/Profile/Name/text() eq '%1s'";
|
|
||||||
//
|
|
||||||
// public static String SECONDARY_TYPE = "ExcludingVREs";
|
|
||||||
// public static String NAME = "AccountingHarvesters";
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
public static final String SO_BIG_DATA_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData";
|
public static final String SO_BIG_DATA_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData";
|
||||||
|
|
||||||
protected SortedSet<String> contexts;
|
protected SortedSet<String> contexts;
|
||||||
|
@ -74,71 +63,78 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
|
||||||
* @param vreScopes the contexts
|
* @param vreScopes the contexts
|
||||||
* @throws ParseException the parse exception
|
* @throws ParseException the parse exception
|
||||||
*/
|
*/
|
||||||
public SoBigDataHarvester(Date start, Date end, String catalogueContext, SortedSet<String> vreScopes) throws ParseException {
|
public SoBigDataHarvester(Date start, Date end, SortedSet<String> contexts) throws Exception {
|
||||||
super(start, end);
|
super(start, end);
|
||||||
this.catalogueContext = catalogueContext;
|
|
||||||
this.catalogueFactory = DataCatalogueFactory.getFactory();
|
this.catalogueFactory = DataCatalogueFactory.getFactory();
|
||||||
this.contexts = vreScopes;
|
|
||||||
|
String currentContext = Utils.getCurrentContext();
|
||||||
|
|
||||||
|
// Truncating the context to the last / (the last is retained for filtering issues)
|
||||||
|
String baseContext = currentContext.substring(0, currentContext.lastIndexOf("/")+1);
|
||||||
|
|
||||||
|
this.contexts = getValidContexts(contexts, baseContext);
|
||||||
|
logger.trace("Valid contexts are {}", contexts);
|
||||||
|
|
||||||
initMappingMaps();
|
initMappingMaps();
|
||||||
|
|
||||||
|
|
||||||
// this.excludedContexts = getExcludedContexts();
|
|
||||||
// // Adding trailing slash to SO_BIG_DATA_CONTEXT to avoid to get VO
|
|
||||||
// this.contexts = getSoBigDataContexts(contexts, SO_BIG_DATA_CONTEXT + "/");
|
|
||||||
// logger.trace("Valid contexts are {}", contexts);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Inits the mapping maps.
|
* Inits the mapping maps.
|
||||||
|
* @throws Exception
|
||||||
|
* @throws ObjectNotFound
|
||||||
*/
|
*/
|
||||||
protected void initMappingMaps(){
|
protected void initMappingMaps() throws ObjectNotFound, Exception {
|
||||||
Properties properties = AccountingDataHarvesterPlugin.getProperties().get();
|
Properties properties = AccountingDataHarvesterPlugin.getProperties().get();
|
||||||
Set<String> keys = properties.stringPropertyNames();
|
Set<String> keys = properties.stringPropertyNames();
|
||||||
|
|
||||||
mapSystemTypeToDBEntry = new HashMap<String, String>();
|
mapSystemTypeToDBEntry = new HashMap<String,String>();
|
||||||
for (String key : keys) {
|
for(String key : keys) {
|
||||||
//System.out.println(key + " : " + properties.getProperty(key));
|
//System.out.println(key + " : " + properties.getProperty(key));
|
||||||
try{
|
try {
|
||||||
HarvestedDataKey valueEnum = HarvestedDataKey.valueOf(key);
|
HarvestedDataKey valueEnum = HarvestedDataKey.valueOf(key);
|
||||||
mapSystemTypeToDBEntry.put(properties.getProperty(key), valueEnum.name());
|
mapSystemTypeToDBEntry.put(properties.getProperty(key), valueEnum.name());
|
||||||
}catch(Exception e){
|
} catch(Exception e) {
|
||||||
//silent
|
//silent
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info("Built from properties the mapping 'SystemType' to 'DB entry' : "+mapSystemTypeToDBEntry);
|
logger.info("Built from properties the mapping 'SystemType' to 'DB entry' : " + mapSystemTypeToDBEntry);
|
||||||
|
|
||||||
|
String currentContext = Utils.getCurrentContext();
|
||||||
|
|
||||||
//GET CATALOGUE'S GROUPS
|
//GET CATALOGUE'S GROUPS
|
||||||
List<String> groups = loadGroupsFromCKAN(catalogueContext);
|
List<String> groups = loadGroupsFromCKAN(currentContext);
|
||||||
//NORMALIZING THE GROUP NAME TO MATCH WITH VRE NAME
|
//NORMALIZING THE GROUP NAME TO MATCH WITH VRE NAME
|
||||||
Map<String,String> mapNormalizedGroups = normalizeGroups(groups);
|
Map<String,String> mapNormalizedGroups = normalizeGroups(groups);
|
||||||
logger.debug("Map of Normalized Groups is: "+mapNormalizedGroups);
|
logger.debug("Map of Normalized Groups is: " + mapNormalizedGroups);
|
||||||
|
|
||||||
//CREATING MAPPING BETWEEN (CATALOGUE GROUP NAME TO VRE NAME)
|
//CREATING MAPPING BETWEEN (CATALOGUE GROUP NAME TO VRE NAME)
|
||||||
mapCatalogueGroupToVRE = new HashMap<String, String>();
|
mapCatalogueGroupToVRE = new HashMap<String,String>();
|
||||||
//CREATING MAPPING BETWEEN (WS FOLDER NAME TO VRE NAME)
|
//CREATING MAPPING BETWEEN (WS FOLDER NAME TO VRE NAME)
|
||||||
mapWsFolderNameToVRE = new HashMap<String, String>();
|
mapWsFolderNameToVRE = new HashMap<String,String>();
|
||||||
Set<String> normalizedGroups = mapNormalizedGroups.keySet();
|
Set<String> normalizedGroups = mapNormalizedGroups.keySet();
|
||||||
for (String context : contexts) {
|
for(String context : contexts) {
|
||||||
//logger.trace("Context is: " + context);
|
//logger.trace("Context is: " + context);
|
||||||
String loweredVREName = context.substring(context.lastIndexOf("/") + 1, context.length()).toLowerCase();
|
String loweredVREName = context.substring(context.lastIndexOf("/") + 1, context.length()).toLowerCase();
|
||||||
try {
|
try {
|
||||||
//logger.trace("vreName lowered is: " + loweredVREName);
|
//logger.trace("vreName lowered is: " + loweredVREName);
|
||||||
if (normalizedGroups.contains(loweredVREName)) {
|
if(normalizedGroups.contains(loweredVREName)) {
|
||||||
logger.debug("Normalized Groups matching the lowered VRE name: "+loweredVREName);
|
logger.debug("Normalized Groups matching the lowered VRE name: " + loweredVREName);
|
||||||
// Creating the map with couple (catalogue group name, scope)
|
// Creating the map with couple (catalogue group name, scope)
|
||||||
mapCatalogueGroupToVRE.put(mapNormalizedGroups.get(loweredVREName), context);
|
mapCatalogueGroupToVRE.put(mapNormalizedGroups.get(loweredVREName), context);
|
||||||
}
|
}
|
||||||
|
|
||||||
mapWsFolderNameToVRE.put(loweredVREName, context);
|
mapWsFolderNameToVRE.put(loweredVREName, context);
|
||||||
}
|
} catch(Exception e) {
|
||||||
catch (Exception e) {
|
|
||||||
// silent
|
// silent
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info("Map of Catalogue Groups To VRE is: "+mapCatalogueGroupToVRE);
|
logger.info("Map of Catalogue Groups To VRE is: " + mapCatalogueGroupToVRE);
|
||||||
logger.info("Map of (lowered) Ws Folder Name To VRE is: "+mapWsFolderNameToVRE);
|
logger.info("Map of (lowered) Ws Folder Name To VRE is: " + mapWsFolderNameToVRE);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -151,25 +147,24 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
|
||||||
*/
|
*/
|
||||||
private Map<String,String> normalizeGroups(List<String> groups) {
|
private Map<String,String> normalizeGroups(List<String> groups) {
|
||||||
Map<String,String> listNGroups = new HashMap<String,String>(groups.size());
|
Map<String,String> listNGroups = new HashMap<String,String>(groups.size());
|
||||||
for (String group : groups) {
|
for(String group : groups) {
|
||||||
String normalizedGroup = group;
|
String normalizedGroup = group;
|
||||||
if(normalizedGroup.endsWith(GROUP_LABEL)){
|
if(normalizedGroup.endsWith(GROUP_LABEL)) {
|
||||||
normalizedGroup = normalizedGroup.substring(0, normalizedGroup.length()-GROUP_LABEL.length());
|
normalizedGroup = normalizedGroup.substring(0, normalizedGroup.length() - GROUP_LABEL.length());
|
||||||
}
|
}
|
||||||
normalizedGroup = normalizedGroup.replaceAll("-","");
|
normalizedGroup = normalizedGroup.replaceAll("-", "");
|
||||||
listNGroups.put(normalizedGroup.toLowerCase(), group);
|
listNGroups.put(normalizedGroup.toLowerCase(), group);
|
||||||
}
|
}
|
||||||
return listNGroups;
|
return listNGroups;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Load groups from ckan.
|
* Load groups from ckan.
|
||||||
*
|
*
|
||||||
* @param scope the scope
|
* @param scope the scope
|
||||||
* @return the list
|
* @return the list
|
||||||
*/
|
*/
|
||||||
private List<String> loadGroupsFromCKAN(String scope){
|
private List<String> loadGroupsFromCKAN(String scope) {
|
||||||
List<String> groups = new ArrayList<String>();
|
List<String> groups = new ArrayList<String>();
|
||||||
String ckanURL = "";
|
String ckanURL = "";
|
||||||
try {
|
try {
|
||||||
|
@ -177,12 +172,11 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
|
||||||
ckanURL = utils.getCatalogueUrl();
|
ckanURL = utils.getCatalogueUrl();
|
||||||
List<CkanGroup> theGroups = utils.getGroups();
|
List<CkanGroup> theGroups = utils.getGroups();
|
||||||
Validate.notNull(theGroups, "The list of Groups is null");
|
Validate.notNull(theGroups, "The list of Groups is null");
|
||||||
for (CkanGroup ckanGroup : theGroups) {
|
for(CkanGroup ckanGroup : theGroups) {
|
||||||
groups.add(ckanGroup.getName());
|
groups.add(ckanGroup.getName());
|
||||||
}
|
}
|
||||||
}
|
} catch(Exception e) {
|
||||||
catch (Exception e) {
|
logger.error("Error occurred on getting CKAN groups for scope: " + scope + " and CKAN URL: " + ckanURL, e);
|
||||||
logger.error("Error occurred on getting CKAN groups for scope: "+scope+" and CKAN URL: "+ckanURL,e);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return groups;
|
return groups;
|
||||||
|
@ -193,169 +187,42 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
|
||||||
*
|
*
|
||||||
* @return the map catalogue group to vre
|
* @return the map catalogue group to vre
|
||||||
*/
|
*/
|
||||||
public HashMap<String, String> getMapCatalogueGroupToVRE() {
|
public HashMap<String,String> getMapCatalogueGroupToVRE() {
|
||||||
|
|
||||||
return mapCatalogueGroupToVRE;
|
return mapCatalogueGroupToVRE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return the mapSystemTypeToDBEntry
|
* @return the mapSystemTypeToDBEntry
|
||||||
*/
|
*/
|
||||||
public HashMap<String, String> getMapSystemTypeToDBEntry() {
|
public HashMap<String,String> getMapSystemTypeToDBEntry() {
|
||||||
|
|
||||||
return mapSystemTypeToDBEntry;
|
return mapSystemTypeToDBEntry;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return the mapWsFolderNameToVRE
|
* @return the mapWsFolderNameToVRE
|
||||||
*/
|
*/
|
||||||
public HashMap<String, String> getMapWsFolderNameToVRE() {
|
public HashMap<String,String> getMapWsFolderNameToVRE() {
|
||||||
|
|
||||||
return mapWsFolderNameToVRE;
|
return mapWsFolderNameToVRE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the so big data contexts.
|
||||||
// /**
|
*
|
||||||
// * Gets the filtered contexts.
|
* @param contexts the contexts
|
||||||
// *
|
* @param base the base
|
||||||
// * @return the filtered contexts
|
* @return the so big data contexts
|
||||||
// */
|
*/
|
||||||
// public SortedSet<String> getFilteredContexts() {
|
public SortedSet<String> getValidContexts(Set<String> contexts, String base) {
|
||||||
// return contexts;
|
SortedSet<String> filteredContext = new TreeSet<>();
|
||||||
// }
|
for(String context : contexts) {
|
||||||
|
if(context.startsWith(SO_BIG_DATA_CONTEXT)) {
|
||||||
// /**
|
filteredContext.add(context);
|
||||||
// * Gets the filtering generic resource.
|
}
|
||||||
// *
|
}
|
||||||
// * @return the filtering generic resource
|
return filteredContext;
|
||||||
// */
|
}
|
||||||
// protected SimpleQuery getFilteringGenericResource() {
|
|
||||||
// return ICFactory.queryFor(GenericResource.class)
|
|
||||||
// .addCondition(String.format(SECONDARY_TYPE_FORMAT, SECONDARY_TYPE))
|
|
||||||
// .addCondition(String.format(NAME_FORMAT, NAME));
|
|
||||||
// }
|
|
||||||
|
|
||||||
// /**
|
|
||||||
// * Gets the generic resource.
|
|
||||||
// *
|
|
||||||
// * @return the generic resource
|
|
||||||
// */
|
|
||||||
// protected GenericResource getGenericResource() {
|
|
||||||
// SimpleQuery simpleQuery = getFilteringGenericResource();
|
|
||||||
// List<GenericResource> res = ICFactory.clientFor(GenericResource.class).submit(simpleQuery);
|
|
||||||
// if(res.size()==0) {
|
|
||||||
// // At time of writing it should be an error but it can change in the future
|
|
||||||
// logger.info("No {} for filtering contexts.", GenericResource.class.getSimpleName());
|
|
||||||
// return null;
|
|
||||||
// }
|
|
||||||
// return res.get(0);
|
|
||||||
// }
|
|
||||||
|
|
||||||
// /**
|
|
||||||
// * Gets the excluded contexts.
|
|
||||||
// *
|
|
||||||
// * @return the excluded contexts
|
|
||||||
// */
|
|
||||||
// public SortedSet<String> getExcludedContexts() {
|
|
||||||
// SortedSet<String> excludedContexts = new TreeSet<>();
|
|
||||||
//
|
|
||||||
// GenericResource genericResource = getGenericResource();
|
|
||||||
// if(genericResource==null) {
|
|
||||||
// return excludedContexts;
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// Element body = genericResource.profile().body();
|
|
||||||
//
|
|
||||||
// /*
|
|
||||||
// * The following code parse an XML formatted as this
|
|
||||||
// *
|
|
||||||
// * <ResourceCatalogueHarvester>
|
|
||||||
// * <vres>
|
|
||||||
// * <vre>/d4science.research-infrastructures.eu/SoBigData/ResourceCatalogue</vre>
|
|
||||||
// * <vre>/d4science.research-infrastructures.eu/SoBigData/TagMe</vre>
|
|
||||||
// * </vres>
|
|
||||||
// * </ResourceCatalogueHarvester>
|
|
||||||
// *
|
|
||||||
// */
|
|
||||||
//
|
|
||||||
// NodeList nodeList = body.getElementsByTagName(this.getClass().getSimpleName());
|
|
||||||
// if(nodeList.getLength()==0) {
|
|
||||||
// // At time of writing it should be an error but it can change in the future
|
|
||||||
// logger.info("The body of the {} does not contains any information to filter contexts.", GenericResource.class.getSimpleName());
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// Element classNameElement = null;
|
|
||||||
// for(int c=0; c<nodeList.getLength(); c++) {
|
|
||||||
// Node node = nodeList.item(c);
|
|
||||||
// if(node.getNodeType() == Node.ELEMENT_NODE) {
|
|
||||||
// classNameElement = (Element) node;
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// Element vresElement = null;
|
|
||||||
// NodeList vresNodeList = classNameElement.getElementsByTagName("vres");
|
|
||||||
// for(int c=0; c<vresNodeList.getLength(); c++) {
|
|
||||||
// Node node = vresNodeList.item(c);
|
|
||||||
// if(node.getNodeType() == Node.ELEMENT_NODE) {
|
|
||||||
// vresElement = (Element) node;
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// NodeList vreNodeList = vresElement.getElementsByTagName("vre");
|
|
||||||
// for(int c=0; c<vreNodeList.getLength(); c++) {
|
|
||||||
// Node node = vreNodeList.item(c);
|
|
||||||
// if(node.getNodeType() == Node.ELEMENT_NODE) {
|
|
||||||
// Element vreElement = (Element) node;
|
|
||||||
// NodeList nl = vreElement.getChildNodes();
|
|
||||||
// for(int i=0; i<nl.getLength(); i++) {
|
|
||||||
// Node n = nl.item(i);
|
|
||||||
// if(n.getNodeType() == Node.TEXT_NODE) {
|
|
||||||
// excludedContexts.add(n.getNodeValue());
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// return excludedContexts;
|
|
||||||
//
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// /**
|
|
||||||
// * Filter context.
|
|
||||||
// *
|
|
||||||
// * @param context the context
|
|
||||||
// * @return true, if successful
|
|
||||||
// */
|
|
||||||
// protected boolean filterContext(String context) {
|
|
||||||
// if(excludedContexts.contains(context)) {
|
|
||||||
// return true;
|
|
||||||
// }
|
|
||||||
// return false;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// /**
|
|
||||||
// * Gets the so big data contexts.
|
|
||||||
// *
|
|
||||||
// * @param contexts the contexts
|
|
||||||
// * @param base the base
|
|
||||||
// * @return the so big data contexts
|
|
||||||
// */
|
|
||||||
// protected SortedSet<String> getSoBigDataContexts(Set<String> contexts, String base) {
|
|
||||||
// SortedSet<String> filteredContext = new TreeSet<>();
|
|
||||||
// for(String context : contexts) {
|
|
||||||
// if(context.startsWith(SO_BIG_DATA_CONTEXT)) {
|
|
||||||
// if(!filterContext(context)) {
|
|
||||||
// filteredContext.add(context);
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// return filteredContext;
|
|
||||||
// }
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,6 @@ import java.util.HashMap;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Properties;
|
|
||||||
import java.util.SortedSet;
|
import java.util.SortedSet;
|
||||||
import java.util.TreeSet;
|
import java.util.TreeSet;
|
||||||
|
|
||||||
|
@ -14,6 +13,7 @@ import org.gcube.dataharvest.datamodel.HarvestedData;
|
||||||
import org.gcube.dataharvest.harvester.MethodInvocationHarvester;
|
import org.gcube.dataharvest.harvester.MethodInvocationHarvester;
|
||||||
import org.gcube.dataharvest.harvester.sobigdata.DataMethodDownloadHarvester;
|
import org.gcube.dataharvest.harvester.sobigdata.DataMethodDownloadHarvester;
|
||||||
import org.gcube.dataharvest.harvester.sobigdata.ResourceCatalogueHarvester;
|
import org.gcube.dataharvest.harvester.sobigdata.ResourceCatalogueHarvester;
|
||||||
|
import org.gcube.dataharvest.harvester.sobigdata.SoBigDataHarvester;
|
||||||
import org.gcube.dataharvest.harvester.sobigdata.TagMeMethodInvocationHarvester;
|
import org.gcube.dataharvest.harvester.sobigdata.TagMeMethodInvocationHarvester;
|
||||||
import org.gcube.dataharvest.utils.ContextTest;
|
import org.gcube.dataharvest.utils.ContextTest;
|
||||||
import org.gcube.dataharvest.utils.DateUtils;
|
import org.gcube.dataharvest.utils.DateUtils;
|
||||||
|
@ -27,7 +27,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
|
||||||
|
|
||||||
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterPluginTest.class);
|
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterPluginTest.class);
|
||||||
|
|
||||||
//@Test
|
@Test
|
||||||
public void test() {
|
public void test() {
|
||||||
try {
|
try {
|
||||||
|
|
||||||
|
@ -61,7 +61,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//@Test
|
@Test
|
||||||
public void testMethodInvocation() {
|
public void testMethodInvocation() {
|
||||||
try {
|
try {
|
||||||
|
|
||||||
|
@ -82,7 +82,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//@Test
|
@Test
|
||||||
public void testTagMeMethodInvocation() {
|
public void testTagMeMethodInvocation() {
|
||||||
try {
|
try {
|
||||||
|
|
||||||
|
@ -104,7 +104,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
//@Test
|
@Test
|
||||||
public void testFilteringGenericResource() {
|
public void testFilteringGenericResource() {
|
||||||
try {
|
try {
|
||||||
org.gcube.dataharvest.utils.Utils.setContext(RESOURCE_CATALOGUE);
|
org.gcube.dataharvest.utils.Utils.setContext(RESOURCE_CATALOGUE);
|
||||||
|
@ -121,18 +121,9 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
|
||||||
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
|
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
|
||||||
accountingDataHarvesterPlugin.getConfigParameters();
|
accountingDataHarvesterPlugin.getConfigParameters();
|
||||||
|
|
||||||
//Added by Francesco
|
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, contexts);
|
||||||
Properties properties = AccountingDataHarvesterPlugin.getProperties().get();
|
SortedSet<String> validContexts = resourceCatalogueHarvester.getValidContexts(contexts, SoBigDataHarvester.SO_BIG_DATA_CONTEXT);
|
||||||
String catalogueContext = (String) properties.get(AccountingDataHarvesterPlugin.RESOURCE_CATALOGUE_CONTEXT);
|
logger.info("Valid Contexts {}", validContexts);
|
||||||
logger.debug("Read from properties "+AccountingDataHarvesterPlugin.RESOURCE_CATALOGUE_CONTEXT+" value: "+catalogueContext);
|
|
||||||
//end
|
|
||||||
|
|
||||||
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, catalogueContext, contexts);
|
|
||||||
// SortedSet<String> excludedContexts = resourceCatalogueHarvester.getExcludedContexts();
|
|
||||||
// logger.info("Excluded contexts {}", excludedContexts);
|
|
||||||
// SortedSet<String> validContexts = resourceCatalogueHarvester.getFilteredContexts();
|
|
||||||
//
|
|
||||||
// logger.info("Valid Contexts {}", validContexts);
|
|
||||||
|
|
||||||
} catch(Exception e) {
|
} catch(Exception e) {
|
||||||
logger.error("", e);
|
logger.error("", e);
|
||||||
|
@ -145,39 +136,22 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
|
||||||
try {
|
try {
|
||||||
|
|
||||||
org.gcube.dataharvest.utils.Utils.setContext(RESOURCE_CATALOGUE);
|
org.gcube.dataharvest.utils.Utils.setContext(RESOURCE_CATALOGUE);
|
||||||
|
|
||||||
MeasureType measureType = MeasureType.MONTHLY;
|
MeasureType measureType = MeasureType.MONTHLY;
|
||||||
|
|
||||||
// Date start = DateUtils.getPreviousPeriod(measureType).getTime();
|
Date start = DateUtils.getStartCalendar(2018, 04, 01).getTime();
|
||||||
// Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
|
|
||||||
// Date start = DateUtils.getStartCalendar(2016, 12, 01).getTime();
|
|
||||||
Date start = DateUtils.getPreviousPeriod(measureType).getTime();
|
|
||||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
|
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
|
||||||
|
|
||||||
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
|
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
|
||||||
accountingDataHarvesterPlugin.getConfigParameters();
|
accountingDataHarvesterPlugin.getConfigParameters();
|
||||||
|
|
||||||
//Added by Francesco
|
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts();
|
||||||
Properties properties = AccountingDataHarvesterPlugin.getProperties().get();
|
SortedSet<String> contexts = new TreeSet<>(map.keySet());
|
||||||
String catalogueContext = (String) properties.get(AccountingDataHarvesterPlugin.RESOURCE_CATALOGUE_CONTEXT);
|
|
||||||
logger.debug("Read from properties "+AccountingDataHarvesterPlugin.RESOURCE_CATALOGUE_CONTEXT+" value: "+catalogueContext);
|
|
||||||
//end
|
|
||||||
|
|
||||||
//TODO @LUCA FROSINI MUST PASS SoBigData VREs
|
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, contexts);
|
||||||
TreeSet<String> contexts = new TreeSet<String>();
|
|
||||||
contexts.add("/d4science.research-infrastructures.eu/SoBigData/ResourceCatalogue");
|
|
||||||
contexts.add("/d4science.research-infrastructures.eu/SoBigData/TagMe");
|
|
||||||
contexts.add("/d4science.research-infrastructures.eu/SoBigData/WellBeingAndEconomy");
|
|
||||||
contexts.add("/d4science.research-infrastructures.eu/SoBigData/CityOfCitizens");
|
|
||||||
contexts.add("/d4science.research-infrastructures.eu/SoBigData/SocietalDebates");
|
|
||||||
contexts.add("/d4science.research-infrastructures.eu/SoBigData/SportsDataScience");
|
|
||||||
contexts.add("/d4science.research-infrastructures.eu/SoBigData/SMAPH");
|
|
||||||
|
|
||||||
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, catalogueContext, contexts);
|
|
||||||
List<HarvestedData> data = resourceCatalogueHarvester.getData();
|
List<HarvestedData> data = resourceCatalogueHarvester.getData();
|
||||||
|
|
||||||
for (HarvestedData harvestedData : data) {
|
logger.debug("{}", data);
|
||||||
System.out.println(harvestedData.toString());
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch(Exception e) {
|
} catch(Exception e) {
|
||||||
logger.error("", e);
|
logger.error("", e);
|
||||||
|
@ -193,36 +167,19 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
|
||||||
|
|
||||||
MeasureType measureType = MeasureType.MONTHLY;
|
MeasureType measureType = MeasureType.MONTHLY;
|
||||||
|
|
||||||
Date start = DateUtils.getStartCalendar(2016, 12, 01).getTime();
|
Date start = DateUtils.getStartCalendar(2018, 04, 01).getTime();
|
||||||
// Date start = DateUtils.getPreviousPeriod(measureType).getTime();
|
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
|
||||||
// Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
|
|
||||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 18);
|
|
||||||
|
|
||||||
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
|
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
|
||||||
accountingDataHarvesterPlugin.getConfigParameters();
|
accountingDataHarvesterPlugin.getConfigParameters();
|
||||||
|
|
||||||
//Added by Francesco
|
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts();
|
||||||
Properties properties = AccountingDataHarvesterPlugin.getProperties().get();
|
SortedSet<String> contexts = new TreeSet<>(map.keySet());
|
||||||
String catalogueContext = (String) properties.get(AccountingDataHarvesterPlugin.RESOURCE_CATALOGUE_CONTEXT);
|
|
||||||
logger.debug("Read from properties "+AccountingDataHarvesterPlugin.RESOURCE_CATALOGUE_CONTEXT+" value: "+catalogueContext);
|
|
||||||
//end
|
|
||||||
|
|
||||||
//TODO @LUCA FROSINI MUST PASS SoBigData VREs
|
DataMethodDownloadHarvester resourceCatalogueHarvester = new DataMethodDownloadHarvester(start, end, contexts);
|
||||||
TreeSet<String> contexts = new TreeSet<String>();
|
|
||||||
contexts.add("/d4science.research-infrastructures.eu/SoBigData/ResourceCatalogue");
|
|
||||||
contexts.add("/d4science.research-infrastructures.eu/SoBigData/TagMe");
|
|
||||||
contexts.add("/d4science.research-infrastructures.eu/SoBigData/WellBeingAndEconomy");
|
|
||||||
contexts.add("/d4science.research-infrastructures.eu/SoBigData/CityOfCitizens");
|
|
||||||
contexts.add("/d4science.research-infrastructures.eu/SoBigData/SocietalDebates");
|
|
||||||
contexts.add("/d4science.research-infrastructures.eu/SoBigData/SportsDataScience");
|
|
||||||
contexts.add("/d4science.research-infrastructures.eu/SoBigData/SMAPH");
|
|
||||||
|
|
||||||
DataMethodDownloadHarvester resourceCatalogueHarvester = new DataMethodDownloadHarvester(start, end, catalogueContext, contexts);
|
|
||||||
List<HarvestedData> data = resourceCatalogueHarvester.getData();
|
List<HarvestedData> data = resourceCatalogueHarvester.getData();
|
||||||
|
|
||||||
// for (HarvestedData harvestedData : data) {
|
logger.debug("{}", data);
|
||||||
// System.out.println(harvestedData.toString());
|
|
||||||
// }
|
|
||||||
|
|
||||||
} catch(Exception e) {
|
} catch(Exception e) {
|
||||||
logger.error("", e);
|
logger.error("", e);
|
||||||
|
|
Loading…
Reference in New Issue