Updated by Francesco:

- new Resource Catalogue Harvester created.

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/accounting/accounting-dashboard-harvester-se-plugin@167720 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Francesco Mangiacrapa 2018-05-24 10:49:53 +00:00
parent deb7890ea1
commit 09a8f2d2dc
12 changed files with 822 additions and 399 deletions

View File

@ -30,40 +30,42 @@ import org.slf4j.LoggerFactory;
* @author Luca Frosini (ISTI - CNR) * @author Luca Frosini (ISTI - CNR)
*/ */
public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDeclaration> { public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDeclaration> {
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterPlugin.class); private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterPlugin.class);
private static final String PROPERTY_FILENAME = "config.properties"; private static final String PROPERTY_FILENAME = "config.properties";
public static final String START_DATE_INPUT_PARAMETER = "startDate"; public static final String START_DATE_INPUT_PARAMETER = "startDate";
public static final String MEASURE_TYPE_INPUT_PARAMETER = "measureType"; public static final String MEASURE_TYPE_INPUT_PARAMETER = "measureType";
public static final String RERUN_INPUT_PARAMETER = "reRun"; public static final String RERUN_INPUT_PARAMETER = "reRun";
public static final String DRY_RUN_INPUT_PARAMETER = "dryRun"; public static final String DRY_RUN_INPUT_PARAMETER = "dryRun";
public static final String RESOURCE_CATALOGUE_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData/ResourceCatalogue"; public static final String RESOURCE_CATALOGUE_CONTEXT = "RESOURCE_CATALOGUE_CONTEXT";
//public static final String RESOURCE_CATALOGUE_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData/ResourceCatalogue";
public static final String TAGME_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData/TagMe"; public static final String TAGME_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData/TagMe";
protected Date start; protected Date start;
protected Date end; protected Date end;
public AccountingDataHarvesterPlugin(DataHarvestPluginDeclaration pluginDeclaration) { public AccountingDataHarvesterPlugin(DataHarvestPluginDeclaration pluginDeclaration) {
super(pluginDeclaration); super(pluginDeclaration);
} }
private static final InheritableThreadLocal<Properties> properties = new InheritableThreadLocal<Properties>() { private static final InheritableThreadLocal<Properties> properties = new InheritableThreadLocal<Properties>() {
@Override @Override
protected Properties initialValue() { protected Properties initialValue() {
return new Properties(); return new Properties();
} }
}; };
public static InheritableThreadLocal<Properties> getProperties() { public static InheritableThreadLocal<Properties> getProperties() {
return properties; return properties;
} }
private void getConfigParameters() throws IOException { public void getConfigParameters() throws IOException {
try { try {
Properties properties = new Properties(); Properties properties = new Properties();
InputStream input = AccountingDataHarvesterPlugin.class.getClassLoader() InputStream input = AccountingDataHarvesterPlugin.class.getClassLoader()
@ -75,24 +77,24 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
"Unable to load {} file containing configuration properties. AccountingDataHarvesterPlugin will use defaults", "Unable to load {} file containing configuration properties. AccountingDataHarvesterPlugin will use defaults",
PROPERTY_FILENAME); PROPERTY_FILENAME);
} }
} }
/** {@inheritDoc} */ /** {@inheritDoc} */
@Override @Override
public void launch(Map<String,Object> inputs) throws Exception { public void launch(Map<String,Object> inputs) throws Exception {
logger.debug("{} is starting", this.getClass().getSimpleName()); logger.debug("{} is starting", this.getClass().getSimpleName());
if(inputs == null || inputs.isEmpty()) { if(inputs == null || inputs.isEmpty()) {
throw new IllegalArgumentException("The can only be launched providing valid input parameters"); throw new IllegalArgumentException("The can only be launched providing valid input parameters");
} }
if(!inputs.containsKey(MEASURE_TYPE_INPUT_PARAMETER)) { if(!inputs.containsKey(MEASURE_TYPE_INPUT_PARAMETER)) {
throw new IllegalArgumentException("Please set required parameter '" + MEASURE_TYPE_INPUT_PARAMETER + "'"); throw new IllegalArgumentException("Please set required parameter '" + MEASURE_TYPE_INPUT_PARAMETER + "'");
} }
MeasureType measureType = MeasureType.valueOf((String) inputs.get(MEASURE_TYPE_INPUT_PARAMETER)); MeasureType measureType = MeasureType.valueOf((String) inputs.get(MEASURE_TYPE_INPUT_PARAMETER));
boolean reRun = true; boolean reRun = true;
if(inputs.containsKey(RERUN_INPUT_PARAMETER)) { if(inputs.containsKey(RERUN_INPUT_PARAMETER)) {
try { try {
@ -101,7 +103,7 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
throw new IllegalArgumentException("'" + RERUN_INPUT_PARAMETER + "' must be a boolean"); throw new IllegalArgumentException("'" + RERUN_INPUT_PARAMETER + "' must be a boolean");
} }
} }
boolean dryRun = true; boolean dryRun = true;
if(inputs.containsKey(DRY_RUN_INPUT_PARAMETER)) { if(inputs.containsKey(DRY_RUN_INPUT_PARAMETER)) {
try { try {
@ -110,34 +112,34 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
throw new IllegalArgumentException("'" + DRY_RUN_INPUT_PARAMETER + "' must be a boolean"); throw new IllegalArgumentException("'" + DRY_RUN_INPUT_PARAMETER + "' must be a boolean");
} }
} }
if(inputs.containsKey(START_DATE_INPUT_PARAMETER)) { if(inputs.containsKey(START_DATE_INPUT_PARAMETER)) {
String startDateString = (String) inputs.get(START_DATE_INPUT_PARAMETER); String startDateString = (String) inputs.get(START_DATE_INPUT_PARAMETER);
start = DateUtils.UTC_DATE_FORMAT.parse(startDateString + " " + DateUtils.UTC); start = DateUtils.UTC_DATE_FORMAT.parse(startDateString + " " + DateUtils.UTC);
} else { } else {
start = DateUtils.getPreviousPeriod(measureType).getTime(); start = DateUtils.getPreviousPeriod(measureType).getTime();
} }
end = DateUtils.getEndDateFromStartDate(measureType, start, 1); end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
logger.debug("Harvesting from {} to {}", DateUtils.LAUNCH_DATE_FORMAT.format(start), logger.debug("Harvesting from {} to {}", DateUtils.LAUNCH_DATE_FORMAT.format(start),
DateUtils.LAUNCH_DATE_FORMAT.format(end)); DateUtils.LAUNCH_DATE_FORMAT.format(end));
getConfigParameters(); getConfigParameters();
ContextAuthorization contextAuthorization = new ContextAuthorization(); ContextAuthorization contextAuthorization = new ContextAuthorization();
DatabaseManager dbaseManager = new DatabaseManager(); DatabaseManager dbaseManager = new DatabaseManager();
SortedSet<String> contexts = contextAuthorization.getContexts(); SortedSet<String> contexts = contextAuthorization.getContexts();
ArrayList<HarvestedData> data = new ArrayList<HarvestedData>(); ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
for(String context : contexts) { for(String context : contexts) {
// Setting the token for the context // Setting the token for the context
Utils.setContext(contextAuthorization.getTokenForContext(context)); Utils.setContext(contextAuthorization.getTokenForContext(context));
try { try {
// Collecting info on social (posts, replies and likes) // Collecting info on social (posts, replies and likes)
SocialInteractionsHarvester socialHarvester = new SocialInteractionsHarvester(start, end); SocialInteractionsHarvester socialHarvester = new SocialInteractionsHarvester(start, end);
@ -146,7 +148,7 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
} catch(Exception e) { } catch(Exception e) {
logger.error("Error harvesting Social Interactions for {}", context, e); logger.error("Error harvesting Social Interactions for {}", context, e);
} }
try { try {
// Collecting info on VRE users // Collecting info on VRE users
VREUsersHarvester vreUsersHarvester = new VREUsersHarvester(start, end); VREUsersHarvester vreUsersHarvester = new VREUsersHarvester(start, end);
@ -155,29 +157,35 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
} catch(Exception e) { } catch(Exception e) {
logger.error("Error harvesting Context Users for {}", context, e); logger.error("Error harvesting Context Users for {}", context, e);
} }
if(context.startsWith(RESOURCE_CATALOGUE_CONTEXT)) { //Added by Francesco
Properties properties = AccountingDataHarvesterPlugin.getProperties().get();
String catalogueContext = (String) properties.get(RESOURCE_CATALOGUE_CONTEXT);
logger.debug("Read from properties "+RESOURCE_CATALOGUE_CONTEXT+" value: "+catalogueContext);
//end
if(context.startsWith(catalogueContext)) {
try { try {
// Collecting info on Resource Catalogue (Dataset, Application, Deliverables, Methods) // Collecting info on Resource Catalogue (Dataset, Application, Deliverables, Methods)
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end,contexts); ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, catalogueContext, contexts);
List<HarvestedData> harvested = resourceCatalogueHarvester.getData(); List<HarvestedData> harvested = resourceCatalogueHarvester.getData();
data.addAll(harvested); data.addAll(harvested);
} catch(Exception e) { } catch(Exception e) {
logger.error("Error harvesting Resource Catalogue Information for {}", context, e); logger.error("Error harvesting Resource Catalogue Information for {}", context, e);
} }
try { try {
// Collecting info on Data/Method download // Collecting info on Data/Method download
DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start, end,contexts);; DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start, end,contexts);
List<HarvestedData> harvested = dataMethodDownloadHarvester.getData(); List<HarvestedData> harvested = dataMethodDownloadHarvester.getData();
data.addAll(harvested); data.addAll(harvested);
} catch(Exception e) { } catch(Exception e) {
logger.error("Error harvesting Data Method Download for {}", context, e); logger.error("Error harvesting Data Method Download for {}", context, e);
} }
} }
if(context.startsWith(TAGME_CONTEXT)) { if(context.startsWith(TAGME_CONTEXT)) {
try { try {
// Collecting info on method invocation // Collecting info on method invocation
@ -197,19 +205,19 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
logger.error("Error harvesting Method Invocations for {}", context, e); logger.error("Error harvesting Method Invocations for {}", context, e);
} }
} }
} }
if(!dryRun) { if(!dryRun) {
dbaseManager.insertMonthlyData(start, end, data, reRun); dbaseManager.insertMonthlyData(start, end, data, reRun);
} }
} }
/** {@inheritDoc} */ /** {@inheritDoc} */
@Override @Override
protected void onStop() throws Exception { protected void onStop() throws Exception {
logger.debug("{} is stopping", this.getClass().getSimpleName()); logger.debug("{} is stopping", this.getClass().getSimpleName());
} }
} }

View File

@ -3,7 +3,6 @@ package org.gcube.dataharvest;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.vremanagement.executor.plugin.Plugin; import org.gcube.vremanagement.executor.plugin.Plugin;
import org.gcube.vremanagement.executor.plugin.PluginDeclaration; import org.gcube.vremanagement.executor.plugin.PluginDeclaration;
import org.slf4j.Logger; import org.slf4j.Logger;

View File

@ -1,90 +1,180 @@
/*
*
*/
package org.gcube.dataharvest.datamodel; package org.gcube.dataharvest.datamodel;
import java.util.Date;
import java.io.Serializable; import java.io.Serializable;
import java.util.Date;
/** /**
* The Class HarvestedData.
*
* @author Eric Perrone (ISTI - CNR) * @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR) * @author Luca Frosini
* @author Francesco Mangiacrapa (ISTI - CNR)
*/ */
public class HarvestedData implements Serializable { public class HarvestedData implements Serializable {
/** /**
* Generated Serial Version UID * Generated Serial Version UID
*/ */
private static final long serialVersionUID = 3699669951917080213L; private static final long serialVersionUID = 3699669951917080213L;
public static int ACCESSESS = 1; // public static int ACCESSESS = 1;
public static int USERS = 2; // public static int USERS = 2;
public static int DATA_METHOD_DOWNLOAD = 3; // public static int DATA_METHOD_DOWNLOAD = 3;
public static int NEW_CATALOGUE_METHODS = 4; // public static int NEW_CATALOGUE_METHODS = 4;
public static int NEW_CATALOGUE_DATASETS = 5; // public static int NEW_CATALOGUE_DATASETS = 5;
public static int NEW_CATALOGUE_DELIVERABLES = 6; // public static int NEW_CATALOGUE_DELIVERABLES = 6;
public static int NEW_CATALOGUE_APPLICATIONS = 7; // public static int NEW_CATALOGUE_APPLICATIONS = 7;
public static int SOCIAL_POSTS = 8; // public static int SOCIAL_POSTS = 8;
public static int SOCIAL_REPLIES = 9; // public static int SOCIAL_REPLIES = 9;
public static int SOCIAL_LIKES = 10; // public static int SOCIAL_LIKES = 10;
public static int METHOD_INVOCATIONS = 11; // public static int METHOD_INVOCATIONS = 11;
public static int VISUAL_TOOLS = 12; // public static int VISUAL_TOOLS = 12;
private int dataType; private int dataType;
private String context; private String context;
private long measure; private long measure;
private Date day; private Date day;
private HarvestedDataKey harvestedDataKey;
/**
* Instantiates a new harvested data.
*/
public HarvestedData() { public HarvestedData() {
} }
public HarvestedData(int dataType, String context, long measure, Date day) { /**
super(); * Instantiates a new harvested data.
this.dataType = dataType; *
* @param key the key
*/
private HarvestedData(HarvestedDataKey key){
this.harvestedDataKey = key;
setDataType(harvestedDataKey.getValue());
}
/**
* Instantiates a new harvested data.
*
* @param key the key
* @param context the context
* @param measure the measure
* @param day the day
*/
public HarvestedData(HarvestedDataKey key, String context, long measure, Date day) {
this(key);
this.context = context; this.context = context;
this.measure = measure; this.measure = measure;
this.day = day; this.day = day;
} }
public HarvestedData(int dataType, String context, long measure) { /**
this.dataType = dataType; * Instantiates a new harvested data.
*
* @param key the key
* @param context the context
* @param measure the measure
*/
public HarvestedData(HarvestedDataKey key, String context, long measure) {
this(key);
this.context = context; this.context = context;
this.measure = measure; this.measure = measure;
} }
public void setDataType(int dataType) { /**
* Sets the data type.
*
* @param dataType the new data type
*/
private void setDataType(int dataType) {
this.dataType = dataType; this.dataType = dataType;
} }
/**
* Sets the context.
*
* @param context the new context
*/
public void setContext(String context) { public void setContext(String context) {
this.context = context; this.context = context;
} }
/**
* Sets the measure.
*
* @param measure the new measure
*/
public void setMeasure(long measure) { public void setMeasure(long measure) {
this.measure = measure; this.measure = measure;
} }
/**
* Sets the day.
*
* @param day the new day
*/
public void setDay(Date day) { public void setDay(Date day) {
this.day = day; this.day = day;
} }
/**
* Gets the data type.
*
* @return the data type
*/
public int getDataType() { public int getDataType() {
return dataType; return dataType;
} }
/**
* Gets the context.
*
* @return the context
*/
public String getContext() { public String getContext() {
return context; return context;
} }
/**
* Gets the measure.
*
* @return the measure
*/
public long getMeasure() { public long getMeasure() {
return measure; return measure;
} }
/**
* Gets the day.
*
* @return the day
*/
public Date getDay() { public Date getDay() {
return day; return day;
} }
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override @Override
public String toString() { public String toString() {
return "Harvest [context=" + context + ", dataType=" + dataType + ", measure=" + measure + "]";
StringBuilder builder = new StringBuilder();
builder.append("HarvestedData [dataType=");
builder.append(dataType);
builder.append(", context=");
builder.append(context);
builder.append(", measure=");
builder.append(measure);
builder.append(", day=");
builder.append(day);
builder.append(", harvestedDataKey=");
builder.append(harvestedDataKey);
builder.append("]");
return builder.toString();
} }
} }

View File

@ -0,0 +1,42 @@
/**
*
*/
package org.gcube.dataharvest.datamodel;
/**
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
* May 22, 2018
*/
public enum HarvestedDataKey {
ACCESSESS(1),
USERS(2),
DATA_METHOD_DOWNLOAD(3),
NEW_CATALOGUE_METHODS(4),
NEW_CATALOGUE_DATASETS(5),
NEW_CATALOGUE_DELIVERABLES(6),
NEW_CATALOGUE_APPLICATIONS(7),
SOCIAL_POSTS(8),
SOCIAL_REPLIES(9),
SOCIAL_LIKES(10),
METHOD_INVOCATIONS(11),
VISUAL_TOOLS(12);
private int value;
HarvestedDataKey(int value){
this.value = value;
}
/**
* @return the value
*/
public int getValue() {
return value;
}
}

View File

@ -16,6 +16,7 @@ import org.gcube.accounting.analytics.persistence.AccountingPersistenceQueryFact
import org.gcube.accounting.datamodel.AggregatedUsageRecord; import org.gcube.accounting.datamodel.AggregatedUsageRecord;
import org.gcube.accounting.datamodel.aggregation.AggregatedJobUsageRecord; import org.gcube.accounting.datamodel.aggregation.AggregatedJobUsageRecord;
import org.gcube.dataharvest.datamodel.HarvestedData; import org.gcube.dataharvest.datamodel.HarvestedData;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.DateUtils; import org.gcube.dataharvest.utils.DateUtils;
import org.gcube.dataharvest.utils.Utils; import org.gcube.dataharvest.utils.Utils;
import org.json.JSONObject; import org.json.JSONObject;
@ -27,65 +28,65 @@ import org.slf4j.LoggerFactory;
* @author Luca Frosini (ISTI - CNR) * @author Luca Frosini (ISTI - CNR)
*/ */
public class MethodInvocationHarvester extends BasicHarvester { public class MethodInvocationHarvester extends BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(MethodInvocationHarvester.class); private static Logger logger = LoggerFactory.getLogger(MethodInvocationHarvester.class);
public static final String DATAMINER_SERVICE_NAME = "DataMiner"; public static final String DATAMINER_SERVICE_NAME = "DataMiner";
public MethodInvocationHarvester(Date start, Date end) throws ParseException { public MethodInvocationHarvester(Date start, Date end) throws ParseException {
super(start, end); super(start, end);
} }
@Override @Override
public List<HarvestedData> getData() throws Exception { public List<HarvestedData> getData() throws Exception {
try { try {
List<HarvestedData> data = new ArrayList<>(); List<HarvestedData> data = new ArrayList<>();
AccountingPersistenceQuery accountingPersistenceQuery = AccountingPersistenceQueryFactory.getInstance(); AccountingPersistenceQuery accountingPersistenceQuery = AccountingPersistenceQueryFactory.getInstance();
TemporalConstraint temporalConstraint = new TemporalConstraint(startDate.getTime(), endDate.getTime(), TemporalConstraint temporalConstraint = new TemporalConstraint(startDate.getTime(), endDate.getTime(),
AggregationMode.MONTHLY); AggregationMode.MONTHLY);
List<Filter> filters = new ArrayList<>(); List<Filter> filters = new ArrayList<>();
//filters.add(new Filter(ServiceUsageRecord.SERVICE_NAME, DATAMINER_SERVICE_NAME)); //filters.add(new Filter(ServiceUsageRecord.SERVICE_NAME, DATAMINER_SERVICE_NAME));
String context = Utils.getCurrentContext(); String context = Utils.getCurrentContext();
List<String> contexts = new ArrayList<>(); List<String> contexts = new ArrayList<>();
contexts.add(context); contexts.add(context);
//SortedMap<Filter,SortedMap<Calendar,Info>> result = accountingPersistenceQuery.getContextTimeSeries( //SortedMap<Filter,SortedMap<Calendar,Info>> result = accountingPersistenceQuery.getContextTimeSeries(
// AggregatedServiceUsageRecord.class, temporalConstraint, filters, contexts, true); // AggregatedServiceUsageRecord.class, temporalConstraint, filters, contexts, true);
SortedMap<Filter,SortedMap<Calendar,Info>> result = accountingPersistenceQuery.getContextTimeSeries( SortedMap<Filter,SortedMap<Calendar,Info>> result = accountingPersistenceQuery.getContextTimeSeries(
AggregatedJobUsageRecord.class, temporalConstraint, filters, contexts, true); AggregatedJobUsageRecord.class, temporalConstraint, filters, contexts, true);
if(result != null) { if(result != null) {
for(Filter filter : result.keySet()) { for(Filter filter : result.keySet()) {
SortedMap<Calendar,Info> infoMap = result.get(filter); SortedMap<Calendar,Info> infoMap = result.get(filter);
Calendar calendar = DateUtils.dateToCalendar(startDate); Calendar calendar = DateUtils.dateToCalendar(startDate);
Info info = infoMap.get(calendar); Info info = infoMap.get(calendar);
logger.debug("{} : {}", DateUtils.LAUNCH_DATE_FORMAT.format(calendar.getTime()), info); logger.debug("{} : {}", DateUtils.LAUNCH_DATE_FORMAT.format(calendar.getTime()), info);
JSONObject jsonObject = info.getValue(); JSONObject jsonObject = info.getValue();
long numberOfInvocation = jsonObject.getLong(AggregatedUsageRecord.OPERATION_COUNT); long numberOfInvocation = jsonObject.getLong(AggregatedUsageRecord.OPERATION_COUNT);
HarvestedData harvestedData = new HarvestedData(HarvestedData.METHOD_INVOCATIONS, context, HarvestedData harvestedData = new HarvestedData(HarvestedDataKey.METHOD_INVOCATIONS, context,
numberOfInvocation); numberOfInvocation);
data.add(harvestedData); data.add(harvestedData);
} }
} else { } else {
logger.error("No data found."); logger.error("No data found.");
} }
return data; return data;
} catch(Exception e) { } catch(Exception e) {
throw e; throw e;
} }
} }
} }

View File

@ -7,6 +7,7 @@ import java.util.Date;
import java.util.List; import java.util.List;
import org.gcube.dataharvest.datamodel.HarvestedData; import org.gcube.dataharvest.datamodel.HarvestedData;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.Utils; import org.gcube.dataharvest.utils.Utils;
import org.json.JSONArray; import org.json.JSONArray;
import org.json.JSONObject; import org.json.JSONObject;
@ -18,78 +19,78 @@ import org.slf4j.LoggerFactory;
* @author Luca Frosini (ISTI - CNR) * @author Luca Frosini (ISTI - CNR)
*/ */
public class SocialInteractionsHarvester extends SocialNetworkingHarvester { public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
private static Logger logger = LoggerFactory.getLogger(SocialInteractionsHarvester.class); private static Logger logger = LoggerFactory.getLogger(SocialInteractionsHarvester.class);
private int likes; private int likes;
private int replies; private int replies;
private int posts; private int posts;
public static final String PATH = "/2/posts/get-posts-vre?gcube-token="; public static final String PATH = "/2/posts/get-posts-vre?gcube-token=";
public SocialInteractionsHarvester(Date start, Date end) throws ParseException { public SocialInteractionsHarvester(Date start, Date end) throws ParseException {
super(start, end); super(start, end);
} }
@Override @Override
public List<HarvestedData> getData() throws Exception { public List<HarvestedData> getData() throws Exception {
String context = Utils.getCurrentContext(); String context = Utils.getCurrentContext();
try { try {
ArrayList<HarvestedData> data = new ArrayList<HarvestedData>(); ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
getJson(); getJson();
HarvestedData likesH = new HarvestedData(HarvestedData.SOCIAL_LIKES, context, likes); HarvestedData likesH = new HarvestedData(HarvestedDataKey.SOCIAL_LIKES, context, likes);
logger.debug("{}", likesH); logger.debug("{}", likesH);
data.add(likesH); data.add(likesH);
HarvestedData postsH = new HarvestedData(HarvestedData.SOCIAL_POSTS, context, posts); HarvestedData postsH = new HarvestedData(HarvestedDataKey.SOCIAL_POSTS, context, posts);
logger.debug("{}", postsH); logger.debug("{}", postsH);
data.add(postsH); data.add(postsH);
HarvestedData socialReplies = new HarvestedData(HarvestedData.SOCIAL_REPLIES, context, replies); HarvestedData socialReplies = new HarvestedData(HarvestedDataKey.SOCIAL_REPLIES, context, replies);
logger.debug("{}", socialReplies); logger.debug("{}", socialReplies);
data.add(socialReplies); data.add(socialReplies);
return data; return data;
} catch(Exception e) { } catch(Exception e) {
logger.error("Error Harvesting Social Interactions for context {}", context, e); logger.error("Error Harvesting Social Interactions for context {}", context, e);
throw e; throw e;
} }
} }
private void getJson() throws Exception { private void getJson() throws Exception {
JSONObject jsonObject = getJSONObject(PATH); JSONObject jsonObject = getJSONObject(PATH);
Boolean success = (Boolean) jsonObject.get("success"); Boolean success = (Boolean) jsonObject.get("success");
if(success == false) { if(success == false) {
throw new IOException("Erro while getting posts"); throw new IOException("Erro while getting posts");
} }
JSONArray res = jsonObject.getJSONArray("result"); JSONArray res = jsonObject.getJSONArray("result");
int len = res.length(); int len = res.length();
likes = replies = posts = 0; likes = replies = posts = 0;
for(int i = 0; i < len; i++) { for(int i = 0; i < len; i++) {
JSONObject item = res.getJSONObject(i); JSONObject item = res.getJSONObject(i);
long time = item.getLong("time"); long time = item.getLong("time");
if((startDate.getTime() <= time) && (time <= endDate.getTime())) { if(startDate.getTime() <= time && time <= endDate.getTime()) {
posts++; posts++;
replies += item.getInt("comments_no"); replies += item.getInt("comments_no");
likes += item.getInt("likes_no"); likes += item.getInt("likes_no");
} }
} }
} }
} }

View File

@ -7,6 +7,7 @@ import java.util.Date;
import java.util.List; import java.util.List;
import org.gcube.dataharvest.datamodel.HarvestedData; import org.gcube.dataharvest.datamodel.HarvestedData;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.Utils; import org.gcube.dataharvest.utils.Utils;
import org.json.JSONObject; import org.json.JSONObject;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -17,22 +18,22 @@ import org.slf4j.LoggerFactory;
* @author Luca Frosini (ISTI - CNR) * @author Luca Frosini (ISTI - CNR)
*/ */
public class VREUsersHarvester extends SocialNetworkingHarvester { public class VREUsersHarvester extends SocialNetworkingHarvester {
private static Logger logger = LoggerFactory.getLogger(VREUsersHarvester.class); private static Logger logger = LoggerFactory.getLogger(VREUsersHarvester.class);
public static final String PATH = "/2/users/get-all-usernames?gcube-token="; public static final String PATH = "/2/users/get-all-usernames?gcube-token=";
public VREUsersHarvester(Date start, Date end) throws ParseException { public VREUsersHarvester(Date start, Date end) throws ParseException {
super(start, end); super(start, end);
} }
@Override @Override
public List<HarvestedData> getData() throws Exception { public List<HarvestedData> getData() throws Exception {
try { try {
String context = Utils.getCurrentContext(); String context = Utils.getCurrentContext();
int measure = get(); int measure = get();
HarvestedData harvest = new HarvestedData(HarvestedData.USERS, context, measure); HarvestedData harvest = new HarvestedData(HarvestedDataKey.USERS, context, measure);
logger.debug(harvest.toString()); logger.debug(harvest.toString());
ArrayList<HarvestedData> data = new ArrayList<HarvestedData>(); ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
data.add(harvest); data.add(harvest);
@ -41,19 +42,19 @@ public class VREUsersHarvester extends SocialNetworkingHarvester {
throw e; throw e;
} }
} }
private int get() throws Exception { private int get() throws Exception {
JSONObject jsonObject = getJSONObject(PATH); JSONObject jsonObject = getJSONObject(PATH);
int userNumber = 0; int userNumber = 0;
Boolean success = (Boolean) jsonObject.get("success"); Boolean success = (Boolean) jsonObject.get("success");
if(success == false) { if(success == false) {
throw new IOException("Erro while getting VRE Users"); throw new IOException("Erro while getting VRE Users");
} }
userNumber = jsonObject.getJSONArray("result").length(); userNumber = jsonObject.getJSONArray("result").length();
return userNumber; return userNumber;
} }
} }

View File

@ -18,6 +18,7 @@ import org.gcube.common.homelibrary.jcr.repository.JCRRepository;
import org.gcube.common.homelibrary.jcr.workspace.JCRWorkspace; import org.gcube.common.homelibrary.jcr.workspace.JCRWorkspace;
import org.gcube.common.homelibrary.jcr.workspace.JCRWorkspaceItem; import org.gcube.common.homelibrary.jcr.workspace.JCRWorkspaceItem;
import org.gcube.dataharvest.datamodel.HarvestedData; import org.gcube.dataharvest.datamodel.HarvestedData;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.DateUtils; import org.gcube.dataharvest.utils.DateUtils;
import org.gcube.dataharvest.utils.Utils; import org.gcube.dataharvest.utils.Utils;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -28,52 +29,52 @@ import org.slf4j.LoggerFactory;
* @author Luca Frosini (ISTI - CNR) * @author Luca Frosini (ISTI - CNR)
*/ */
public class DataMethodDownloadHarvester extends SoBigDataHarvester { public class DataMethodDownloadHarvester extends SoBigDataHarvester {
private static Logger logger = LoggerFactory.getLogger(DataMethodDownloadHarvester.class); private static Logger logger = LoggerFactory.getLogger(DataMethodDownloadHarvester.class);
private int count = 0; private int count = 0;
public DataMethodDownloadHarvester(Date start, Date end, SortedSet<String> contexts) throws ParseException { public DataMethodDownloadHarvester(Date start, Date end, SortedSet<String> contexts) throws ParseException {
super(start, end, contexts); super(start, end, contexts);
} }
@Override @Override
public List<HarvestedData> getData() throws Exception { public List<HarvestedData> getData() throws Exception {
String context = Utils.getCurrentContext(); String context = Utils.getCurrentContext();
try { try {
ArrayList<HarvestedData> data = new ArrayList<HarvestedData>(); ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
count = 0; count = 0;
HomeManager manager = HomeLibrary.getHomeManagerFactory().getHomeManager(); HomeManager manager = HomeLibrary.getHomeManagerFactory().getHomeManager();
String user = getVREName(context) + "-Manager"; String user = getVREName(context) + "-Manager";
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
Home home = manager.getHome(user); Home home = manager.getHome(user);
JCRWorkspace ws = (JCRWorkspace) home.getWorkspace(); JCRWorkspace ws = (JCRWorkspace) home.getWorkspace();
JCRWorkspaceItem item = (JCRWorkspaceItem) ws JCRWorkspaceItem item = (JCRWorkspaceItem) ws
.getItemByPath("/Workspace/MySpecialFolders/" + getVREName(context)); .getItemByPath("/Workspace/MySpecialFolders/" + getVREName(context));
logger.debug("Analyzing " + context + " from " + startDate.toString() + " to " + endDate.toString()); logger.debug("Analyzing " + context + " from " + startDate.toString() + " to " + endDate.toString());
logger.error("Before getStats()"); logger.error("Before getStats()");
getStats(item, startDate, endDate); getStats(item, startDate, endDate);
logger.error("After getStats()"); logger.error("After getStats()");
HarvestedData harvest = new HarvestedData(HarvestedData.DATA_METHOD_DOWNLOAD, context, count); HarvestedData harvest = new HarvestedData(HarvestedDataKey.DATA_METHOD_DOWNLOAD, context, count);
data.add(harvest); data.add(harvest);
logger.debug(harvest.toString()); logger.debug(harvest.toString());
return data; return data;
} catch(Exception e) { } catch(Exception e) {
throw e; throw e;
} }
} }
private void getStats(WorkspaceItem root, Date start, Date end) throws InternalErrorException { private void getStats(WorkspaceItem root, Date start, Date end) throws InternalErrorException {
List<? extends WorkspaceItem> children; List<? extends WorkspaceItem> children;
if(root.isFolder()) { if(root.isFolder()) {
@ -82,10 +83,10 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester {
getStats(child, start, end); getStats(child, start, end);
} else { } else {
try { try {
List<AccountingEntry> accounting = root.getAccounting(); List<AccountingEntry> accounting = root.getAccounting();
for(AccountingEntry entry : accounting) { for(AccountingEntry entry : accounting) {
switch(entry.getEntryType()) { switch(entry.getEntryType()) {
case CREATE: case CREATE:
case UPDATE: case UPDATE:
@ -95,13 +96,13 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester {
&& calendar.before(DateUtils.dateToCalendar(end))) { && calendar.before(DateUtils.dateToCalendar(end))) {
count++; count++;
} }
break; break;
default: default:
break; break;
} }
} }
} catch(Exception e) { } catch(Exception e) {
logger.error("DataMethodDownloadHarvester: " + e.getLocalizedMessage()); logger.error("DataMethodDownloadHarvester: " + e.getLocalizedMessage());
@ -109,10 +110,10 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester {
} }
} }
} }
private static String getVREName(String vre) { private static String getVREName(String vre) {
Validate.notNull(vre, "scope must be not null"); Validate.notNull(vre, "scope must be not null");
String newName; String newName;
if(vre.startsWith(JCRRepository.PATH_SEPARATOR)) if(vre.startsWith(JCRRepository.PATH_SEPARATOR))
newName = vre.replace(JCRRepository.PATH_SEPARATOR, "-").substring(1); newName = vre.replace(JCRRepository.PATH_SEPARATOR, "-").substring(1);
@ -120,5 +121,5 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester {
newName = vre.replace(JCRRepository.PATH_SEPARATOR, "-"); newName = vre.replace(JCRRepository.PATH_SEPARATOR, "-");
return newName; return newName;
} }
} }

View File

@ -1,13 +1,21 @@
package org.gcube.dataharvest.harvester.sobigdata; package org.gcube.dataharvest.harvester.sobigdata;
import java.text.ParseException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.SortedSet; import java.util.SortedSet;
import org.apache.commons.lang.Validate;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueFactory;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueImpl;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedData; import org.gcube.dataharvest.datamodel.HarvestedData;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.DateUtils; import org.gcube.dataharvest.utils.DateUtils;
import org.gcube.dataharvest.utils.Utils; import org.gcube.dataharvest.utils.Utils;
import org.gcube.portlets.user.urlshortener.UrlEncoderUtil; import org.gcube.portlets.user.urlshortener.UrlEncoderUtil;
@ -17,80 +25,147 @@ import org.json.JSONObject;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import eu.trentorise.opendata.jackan.model.CkanGroup;
/** /**
* The Class ResourceCatalogueHarvester.
*
* @author Eric Perrone (ISTI - CNR) * @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR) * @author Luca Frosini (ISTI - CNR)
* @author Francesco Mangiacrapa(ISTI - CNR)
*/ */
public class ResourceCatalogueHarvester extends SoBigDataHarvester { public class ResourceCatalogueHarvester extends SoBigDataHarvester {
/**
*
*/
private static final String GROUP_LABEL = "group";
private static final String AND = " AND ";
public static int ROWS = 500;
private static Logger logger = LoggerFactory.getLogger(ResourceCatalogueHarvester.class); private static Logger logger = LoggerFactory.getLogger(ResourceCatalogueHarvester.class);
private int cityOfCitizensCounter = 0;
private int migrationStudiesCounter = 0;
private int societalDebatesCounter = 0;
private int wellBeingAndEconomyCounter = 0;
protected String solrBaseUrl; protected String solrBaseUrl;
public ResourceCatalogueHarvester(Date start, Date end, SortedSet<String> contexts) throws ParseException { private DataCatalogueFactory factory;
private HashMap<String, String> mapTypeToDBEntry;
private HashMap<String, String> mapCatalogueGroupToVRE;
private String catalogueContext;
/**
* Instantiates a new resource catalogue harvester.
*
* @param start the start
* @param end the end
* @param catalogueContext the catalogue context
* @param contexts the contexts
* @throws Exception the exception
*/
public ResourceCatalogueHarvester(Date start, Date end, String catalogueContext, SortedSet<String> contexts) throws Exception {
super(start, end, contexts); super(start, end, contexts);
factory = DataCatalogueFactory.getFactory();
this.catalogueContext = catalogueContext;
if(catalogueContext==null || catalogueContext.isEmpty())
throw new Exception("The catalogue context is null or empty. Pass a valid scope");
logger.debug("Catalogue context is: "+catalogueContext);
Properties properties = AccountingDataHarvesterPlugin.getProperties().get();
Set<String> keys = properties.stringPropertyNames();
mapTypeToDBEntry = new HashMap<String, String>();
for (String key : keys) {
//System.out.println(key + " : " + properties.getProperty(key));
try{
HarvestedDataKey valueEnum = HarvestedDataKey.valueOf(key);
mapTypeToDBEntry.put(properties.getProperty(key), valueEnum.name());
}catch(Exception e){
//silent
}
}
logger.info("Built from properties the mapping 'SystemType' to 'DB entry' : "+mapTypeToDBEntry);
//GET CATALOGUE'S GROUPS
List<String> groups = getGroups(catalogueContext);
//NORMALIZING THE GROUP NAME TO MATCH WITH VRE NAME
Map<String,String> mapNormalizedGroups = normalizeGroups(groups);
logger.debug("Map of Normalized Groups is: "+mapNormalizedGroups);
//CREATING MAPPING BETWEEN (CATALOGUE GROUP NAME TO VRE NAME)
mapCatalogueGroupToVRE = new HashMap<String, String>();
Set<String> normalizedGroups = mapNormalizedGroups.keySet();
for (String context : contexts) {
//logger.trace("Context is: " + context);
String loweredVREName =context.substring(context.lastIndexOf("/") + 1, context.length()).toLowerCase();
//logger.trace("vreName lowered is: " + loweredVREName);
try {
if (normalizedGroups.contains(loweredVREName)) {
logger.debug("Normalized Groups matching the lowered VRE name: "+loweredVREName);
// Creating the map with couple (catalogue group name,
// scope)
mapCatalogueGroupToVRE.put(mapNormalizedGroups.get(loweredVREName), context);
}
}
catch (Exception e) {
// silent
}
}
logger.info("Map of Catalogue Groups To VRE is: "+mapCatalogueGroupToVRE);
} }
/**
* Gets the solr base url.
*
* @return the solr base url
*/
protected String getSolrBaseUrl() { protected String getSolrBaseUrl() {
return "https://ckan-solr-d4s.d4science.org/solr/sobigdata"; return "https://ckan-solr-d4s.d4science.org/solr/sobigdata";
} }
/* (non-Javadoc)
* @see org.gcube.dataharvest.harvester.BasicHarvester#getData()
*/
@Override @Override
public List<HarvestedData> getData() throws Exception { public List<HarvestedData> getData() throws Exception {
ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
List<HarvestedData> dataDeliverable = getDataDeliverable(); List<HarvestedData> data = new ArrayList<HarvestedData>();
for(HarvestedData harvest : dataDeliverable) {
data.add(harvest); //FOR EACH SYSTEM_TYPE
for (String systemType : mapTypeToDBEntry.keySet()) {
List<String> solrParameters = new ArrayList<String>(1);
solrParameters.add("extras_systemtype:\""+systemType+"\"");
//EXECUTING THE QUERY IN THE PERIOD
String queryResult = executeQueryFor(solrParameters, startDate, endDate, "groups");
HarvestedDataKey insertDBKey = HarvestedDataKey.valueOf(mapTypeToDBEntry.get(systemType));
logger.info("Creating statistics for type: "+systemType+ " using db key "+insertDBKey);
data.addAll(buildListOfHarvestedData(queryResult, insertDBKey));
} }
List<HarvestedData> dataMethod = getDataMethod();
for(HarvestedData harvest : dataMethod) {
data.add(harvest);
}
List<HarvestedData> dataDataset = getDataDataset();
for(HarvestedData harvest : dataDataset) {
data.add(harvest);
}
List<HarvestedData> dataApplication = getDataApplication();
for(HarvestedData harvest : dataApplication) {
data.add(harvest);
}
return data; return data;
} }
public List<HarvestedData> getDataDeliverable() throws Exception {
String json = executeQuery("Deliverable"); /**
return buildList(json, HarvestedData.NEW_CATALOGUE_DELIVERABLES); * Builds the list of harvested data.
} *
* @param json the json
public List<HarvestedData> getDataMethod() throws Exception { * @param harvestKey the harvest key
String json = executeQuery("Method"); * @return the list
return buildList(json, HarvestedData.NEW_CATALOGUE_METHODS); * @throws Exception the exception
} */
private List<HarvestedData> buildListOfHarvestedData(String json, HarvestedDataKey harvestKey) throws Exception {
public List<HarvestedData> getDataDataset() throws Exception {
String json = executeQuery("Dataset");
return buildList(json, HarvestedData.NEW_CATALOGUE_DATASETS);
}
public List<HarvestedData> getDataApplication() throws Exception {
String json = executeQuery("Application");
return buildList(json, HarvestedData.NEW_CATALOGUE_APPLICATIONS);
}
private List<HarvestedData> buildList(String json, int dataType) throws Exception {
ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
JSONObject jsonObject = new JSONObject(json); JSONObject jsonObject = new JSONObject(json);
JSONObject responseHeader = jsonObject.getJSONObject("responseHeader"); JSONObject responseHeader = jsonObject.getJSONObject("responseHeader");
int status = responseHeader.getInt("status"); int status = responseHeader.getInt("status");
if(status != 0) { if(status != 0) {
@ -98,16 +173,21 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
logger.error(err); logger.error(err);
throw new Exception(err, null); throw new Exception(err, null);
} }
JSONObject response = jsonObject.getJSONObject("response"); JSONObject response = jsonObject.getJSONObject("response");
int numFound = response.getInt("numFound"); int numFound = response.getInt("numFound");
HarvestedData h = new HarvestedData(dataType,
"/d4science.research-infrastructures.eu/SoBigData/ResourceCatalogue", numFound); Map<String, Integer> counter = new HashMap<String, Integer>(mapCatalogueGroupToVRE.size()+1);
logger.debug(h.toString());
data.add(h); for (String groupName : mapCatalogueGroupToVRE.keySet()) {
counter.put(groupName, 0);
}
//Counter for default context of accounting
int catalogueContextCount = 0;
logger.debug("For "+harvestKey+" has found "+numFound+" doc/s");
if(numFound > 0) { if(numFound > 0) {
JSONArray docs = response.getJSONArray("docs"); JSONArray docs = response.getJSONArray("docs");
for(Object item : docs) { for(Object item : docs) {
JSONObject doc = (JSONObject) item; JSONObject doc = (JSONObject) item;
@ -115,70 +195,211 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
JSONArray groups = doc.getJSONArray("groups"); JSONArray groups = doc.getJSONArray("groups");
Iterator<Object> git = groups.iterator(); Iterator<Object> git = groups.iterator();
while(git.hasNext()) { while(git.hasNext()) {
String groupItem = (String) git.next(); String catalogueGroupName = (String) git.next();
counterByGroup(groupItem); logger.debug("GroupName found: "+catalogueGroupName);
//counterByGroup(groupItem);
Integer currentCount = counter.get(catalogueGroupName);
if(currentCount!=null)
counter.put(catalogueGroupName, currentCount+1);
else{
logger.warn("No mapping found for Catalogue-Group Name: "+catalogueGroupName+" from VREs. Accounting it in the catalogue context: "+catalogueContext);
//counter.put(catalogueContext, counter.get(catalogueContext)+1);
catalogueContextCount++;
}
break; //Accounting the item only in the first group found
} }
} catch(JSONException x) { } catch(JSONException x) {
logger.debug("Document without groups"); logger.debug("Document without groups, accounting it in the catalogue context");
catalogueContextCount++;
} catch (Exception e) {
logger.error("Skipping parsing error", e);
} }
} }
h = new HarvestedData(dataType, "/d4science.research-infrastructures.eu/SoBigData/CityOfCitizens",
cityOfCitizensCounter);
logger.debug(h.toString());
data.add(h);
h = new HarvestedData(dataType, "/Migration Studies", migrationStudiesCounter);
logger.debug(h.toString());
data.add(h);
h = new HarvestedData(dataType, "/d4science.research-infrastructures.eu/SoBigData/SocietalDebates",
societalDebatesCounter);
logger.debug(h.toString());
data.add(h);
h = new HarvestedData(dataType, "/d4science.research-infrastructures.eu/SoBigData/WellBeingAndEconomy",
wellBeingAndEconomyCounter);
logger.debug(h.toString());
data.add(h);
} }
List<HarvestedData> data = new ArrayList<HarvestedData>();
logger.trace("The context: "+catalogueContext + " has count: "+catalogueContextCount);
data.add(new HarvestedData(harvestKey,catalogueContext, catalogueContextCount));
for (String key : counter.keySet()) {
logger.trace("The group: "+key + " has count: "+counter.get(key));
data.add(new HarvestedData(harvestKey, mapCatalogueGroupToVRE.get(key), counter.get(key)));
}
logger.info("For "+harvestKey+ " in the period [from "+startDate+" to "+endDate+ "] returning accouting data:");
for (HarvestedData harvestedData : data) {
logger.info(harvestedData.toString());
}
return data; return data;
} }
private String executeQuery(String fqSubString) throws Exception {
/**
String query = getSolrBaseUrl() + "select?"; * Gets the groups.
*
String q = UrlEncoderUtil.encodeQuery("metadata_created:[" + DateUtils.dateToStringWithTZ(startDate) + " TO " * @param scope the scope
+ DateUtils.dateToStringWithTZ(endDate) + "]"); * @return the groups
query += "q=" + q; */
String fq = UrlEncoderUtil.encodeQuery("extras_systemtype:\"SoBigData.eu: " + fqSubString + "\""); private List<String> getGroups(String scope){
query += "&fq=" + fq + "&wt=json&indent=true"; List<String> groups = new ArrayList<String>();
logger.debug(query); String ckanURL = "";
try {
String json = Utils.getJson(query); DataCatalogueImpl utils = factory.getUtilsPerScope(scope);
// logger.debug(json); ckanURL = utils.getCatalogueUrl();
List<CkanGroup> theGroups = utils.getGroups();
return json; Validate.notNull(theGroups, "The list of Groups is null");
} for (CkanGroup ckanGroup : theGroups) {
groups.add(ckanGroup.getName());
private void counterByGroup(String groupName) { }
cityOfCitizensCounter = migrationStudiesCounter = societalDebatesCounter = wellBeingAndEconomyCounter = 0;
switch(groupName) {
case "city-of-citizens-group":
cityOfCitizensCounter++;
break;
case "migration-studies":
migrationStudiesCounter++;
break;
case "societal-debates-group":
societalDebatesCounter++;
break;
case "well-being-and-economy-group":
wellBeingAndEconomyCounter++;
break;
} }
catch (Exception e) {
logger.error("Error occurred on getting CKAN groups for scope: "+scope+" and CKAN URL: "+ckanURL,e);
}
return groups;
} }
// /**
// * Execute query.
// *
// * @param fqSubString the fq sub string
// * @return the string
// * @throws Exception the exception
// */
// //TODO THIS METHOD MUST BE OPTIMIZED USING HttpSolrClient
// //We are not considering the rows (the number of documents returned from Solr by default)
// private String executeQuery(String fqSubString) throws Exception {
//
// String query = getSolrBaseUrl().endsWith("/")? getSolrBaseUrl():getSolrBaseUrl()+"/";
// query+="select?";
//
// String q = UrlEncoderUtil.encodeQuery("metadata_created:[" + DateUtils.dateToStringWithTZ(startDate) + " TO "
// + DateUtils.dateToStringWithTZ(endDate) + "]");
// query += "q=" + q;
// String fq = UrlEncoderUtil.encodeQuery("extras_systemtype:\"SoBigData.eu: " + fqSubString + "\"");
// query += "&fq=" + fq + "&wt=json&indent=true&rows="+ROWS;
// logger.debug("Performing query: "+query);
//
// String json = Utils.getJson(query);
// logger.trace("Response is: "+json);
//
// return json;
// }
/**
* Execute query.
*
* @param solrParameters the solr parameters
* @param startDate the start date
* @param endDate the end date
* @param flValue the fl value
* @return the string
* @throws Exception the exception
*/
//TODO THIS METHOD MUST BE OPTIMIZED USING HttpSolrClient
//We are not considering the rows (the number of documents returned from Solr by default)
public String executeQueryFor(List<String> solrParameters, Date startDate, Date endDate, String flValue) throws Exception {
String query = getSolrBaseUrl().endsWith("/")? getSolrBaseUrl():getSolrBaseUrl()+"/";
query+="select?";
String q="";
//ADDING START AND END DATE IF ARE VALIDS
if(startDate!=null && endDate!=null){
q+= "metadata_created:[" + DateUtils.dateToStringWithTZ(startDate) + " TO " + DateUtils.dateToStringWithTZ(endDate) + "]";
}
//ADDING PARAMETERS
if(solrParameters!=null && solrParameters.size()>0){
q+= q.isEmpty()?"":AND;
for (int i = 0; i < solrParameters.size()-1; i++) {
q+= solrParameters.get(i)+AND;
}
q+= solrParameters.get(solrParameters.size()-1);
}
query += "q=" + UrlEncoderUtil.encodeQuery(q) + "&wt=json&indent=true&rows="+ROWS;
query += flValue!=null && !flValue.isEmpty()?"&fl="+UrlEncoderUtil.encodeQuery(flValue):"";
logger.debug("\nPerforming query: "+query);
String jsonResult = Utils.getJson(query);
logger.trace("Response is: "+jsonResult);
return jsonResult;
}
/**
* Gets the catalogue context.
*
* @return the catalogueContext
*/
public String getCatalogueContext() {
return catalogueContext;
}
/**
* Sets the catalogue context.
*
* @param catalogueContext the catalogueContext to set
*/
public void setCatalogueContext(String catalogueContext) {
this.catalogueContext = catalogueContext;
}
/**
* Normalize groups.
*
* @param groups the groups
* @return the map with couple (normalized group name, group name)
*/
private Map<String,String> normalizeGroups(List<String> groups) {
Map<String,String> listNGroups = new HashMap<String,String>(groups.size());
for (String group : groups) {
String normalizedGroup = group;
if(normalizedGroup.endsWith(GROUP_LABEL)){
normalizedGroup = normalizedGroup.substring(0, normalizedGroup.length()-GROUP_LABEL.length());
}
normalizedGroup = normalizedGroup.replaceAll("-","");
listNGroups.put(normalizedGroup.toLowerCase(), group);
}
return listNGroups;
}
// /**
// * Gets the data francesco.
// *
// * @return the data francesco
// * @throws Exception the exception
// */
// public List<HarvestedData> getDataFrancesco() throws Exception {
//
// List<HarvestedData> data = new ArrayList<HarvestedData>();
//
// //FOR EACH SYSTEM_TYPE
// for (String systemType : mapTypeToDBEntry.keySet()) {
//
// List<String> solrParameters = new ArrayList<String>(1);
// solrParameters.add("extras_systemtype:\""+systemType+"\"");
// //EXECUTING THE QUERY IN THE PERIOD
// String queryResult = executeQueryFor(solrParameters, startDate, endDate, "groups");
// HarvestedDataKey insertDBKey = HarvestedDataKey.valueOf(mapTypeToDBEntry.get(systemType));
// logger.info("Creating statistics for type: "+systemType+ " using db key "+insertDBKey);
// data.addAll(buildListOfHarvestedData(queryResult, insertDBKey));
// }
//
// return data;
// }
} }

View File

@ -17,6 +17,7 @@ import org.gcube.accounting.datamodel.AggregatedUsageRecord;
import org.gcube.accounting.datamodel.aggregation.AggregatedServiceUsageRecord; import org.gcube.accounting.datamodel.aggregation.AggregatedServiceUsageRecord;
import org.gcube.accounting.datamodel.usagerecords.ServiceUsageRecord; import org.gcube.accounting.datamodel.usagerecords.ServiceUsageRecord;
import org.gcube.dataharvest.datamodel.HarvestedData; import org.gcube.dataharvest.datamodel.HarvestedData;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.harvester.BasicHarvester; import org.gcube.dataharvest.harvester.BasicHarvester;
import org.gcube.dataharvest.utils.DateUtils; import org.gcube.dataharvest.utils.DateUtils;
import org.gcube.dataharvest.utils.Utils; import org.gcube.dataharvest.utils.Utils;
@ -29,63 +30,63 @@ import org.slf4j.LoggerFactory;
* @author Luca Frosini (ISTI - CNR) * @author Luca Frosini (ISTI - CNR)
*/ */
public class TagMeMethodInvocationHarvester extends BasicHarvester { public class TagMeMethodInvocationHarvester extends BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(TagMeMethodInvocationHarvester.class); private static Logger logger = LoggerFactory.getLogger(TagMeMethodInvocationHarvester.class);
public static final String TAG_METHOD = "tag"; public static final String TAG_METHOD = "tag";
public TagMeMethodInvocationHarvester(Date start, Date end) throws ParseException { public TagMeMethodInvocationHarvester(Date start, Date end) throws ParseException {
super(start, end); super(start, end);
} }
@Override @Override
public List<HarvestedData> getData() throws Exception { public List<HarvestedData> getData() throws Exception {
try { try {
List<HarvestedData> data = new ArrayList<>(); List<HarvestedData> data = new ArrayList<>();
AccountingPersistenceQuery accountingPersistenceQuery = AccountingPersistenceQueryFactory.getInstance(); AccountingPersistenceQuery accountingPersistenceQuery = AccountingPersistenceQueryFactory.getInstance();
TemporalConstraint temporalConstraint = new TemporalConstraint(startDate.getTime(), endDate.getTime(), TemporalConstraint temporalConstraint = new TemporalConstraint(startDate.getTime(), endDate.getTime(),
AggregationMode.MONTHLY); AggregationMode.MONTHLY);
List<Filter> filters = new ArrayList<>(); List<Filter> filters = new ArrayList<>();
filters.add(new Filter(ServiceUsageRecord.CALLED_METHOD, TAG_METHOD)); filters.add(new Filter(ServiceUsageRecord.CALLED_METHOD, TAG_METHOD));
String context = Utils.getCurrentContext(); String context = Utils.getCurrentContext();
List<String> contexts = new ArrayList<>(); List<String> contexts = new ArrayList<>();
contexts.add(context); contexts.add(context);
SortedMap<Filter,SortedMap<Calendar,Info>> result = accountingPersistenceQuery.getContextTimeSeries( SortedMap<Filter,SortedMap<Calendar,Info>> result = accountingPersistenceQuery.getContextTimeSeries(
AggregatedServiceUsageRecord.class, temporalConstraint, filters, contexts, true); AggregatedServiceUsageRecord.class, temporalConstraint, filters, contexts, true);
if(result != null) { if(result != null) {
for(Filter filter : result.keySet()) { for(Filter filter : result.keySet()) {
SortedMap<Calendar,Info> infoMap = result.get(filter); SortedMap<Calendar,Info> infoMap = result.get(filter);
Calendar calendar = DateUtils.dateToCalendar(startDate); Calendar calendar = DateUtils.dateToCalendar(startDate);
Info info = infoMap.get(calendar); Info info = infoMap.get(calendar);
logger.debug("{} : {}", DateUtils.LAUNCH_DATE_FORMAT.format(calendar.getTime()), info); logger.debug("{} : {}", DateUtils.LAUNCH_DATE_FORMAT.format(calendar.getTime()), info);
JSONObject jsonObject = info.getValue(); JSONObject jsonObject = info.getValue();
long numberOfInvocation = jsonObject.getLong(AggregatedUsageRecord.OPERATION_COUNT); long numberOfInvocation = jsonObject.getLong(AggregatedUsageRecord.OPERATION_COUNT);
HarvestedData harvestedData = new HarvestedData(HarvestedData.METHOD_INVOCATIONS, context, HarvestedData harvestedData = new HarvestedData(HarvestedDataKey.METHOD_INVOCATIONS, context,
numberOfInvocation); numberOfInvocation);
data.add(harvestedData); data.add(harvestedData);
} }
} else { } else {
logger.error("No data found."); logger.error("No data found.");
} }
return data; return data;
} catch(Exception e) { } catch(Exception e) {
throw e; throw e;
} }
} }
} }

View File

@ -2,7 +2,6 @@ package org.gcube.dataharvest.utils;
import static org.gcube.common.authorization.client.Constants.authorizationService; import static org.gcube.common.authorization.client.Constants.authorizationService;
import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.Map; import java.util.Map;
@ -22,25 +21,25 @@ import org.slf4j.LoggerFactory;
* @author Luca Frosini (ISTI - CNR) * @author Luca Frosini (ISTI - CNR)
*/ */
public class ContextAuthorization { public class ContextAuthorization {
private static Logger logger = LoggerFactory.getLogger(ContextAuthorization.class); private static Logger logger = LoggerFactory.getLogger(ContextAuthorization.class);
public static final String USERNAME = "USERNAME"; public static final String USERNAME = "USERNAME";
public static final String DEFAULT_USERNAME = "luca.frosini"; public static final String DEFAULT_USERNAME = "luca.frosini";
public static final String SERVICE_NAME = "SERVICE_NAME"; public static final String SERVICE_NAME = "SERVICE_NAME";
public static final String DEFAULT_SERVICE_NAME = "accounting-harvester"; public static final String DEFAULT_SERVICE_NAME = "accounting-harvester";
/** /**
* Contains Context full name as key and Token as Value * Contains Context full name as key and Token as Value
*/ */
protected Map<String,String> contextToToken; protected Map<String,String> contextToToken;
/** /**
* Contains Token as key and Context full name as Value * Contains Token as key and Context full name as Value
*/ */
protected Map<String,String> tokenToContext; protected Map<String,String> tokenToContext;
/** /**
* Contains Properties used to generate tokens * Contains Properties used to generate tokens
*/ */
@ -49,39 +48,40 @@ public class ContextAuthorization {
this.tokenToContext = new HashMap<>(); this.tokenToContext = new HashMap<>();
retrieveContextsAndTokens(); retrieveContextsAndTokens();
} }
protected void retrieveContextsAndTokens() throws Exception { protected void retrieveContextsAndTokens() throws Exception {
String initialToken = SecurityTokenProvider.instance.get(); String initialToken = SecurityTokenProvider.instance.get();
try { try {
Properties properties = AccountingDataHarvesterPlugin.getProperties().get(); Properties properties = AccountingDataHarvesterPlugin.getProperties().get();
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts(); LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts();
for(String scope : map.keySet()) { for(String scope : map.keySet()) {
try { try {
String context = map.get(scope).toString(); String context = map.get(scope).toString();
logger.info("Going to generate Token for Context {}", context); logger.info("Going to generate Token for Context {}", context);
UserInfo userInfo = new UserInfo(properties.getProperty(USERNAME, DEFAULT_USERNAME), // UserInfo userInfo = new UserInfo(properties.getProperty(USERNAME, DEFAULT_USERNAME),
new ArrayList<>()); // new ArrayList<>());
UserInfo userInfo = null;
String userToken = authorizationService().generateUserToken(userInfo, context); String userToken = authorizationService().generateUserToken(userInfo, context);
SecurityTokenProvider.instance.set(userToken); SecurityTokenProvider.instance.set(userToken);
String generatedToken = authorizationService() String generatedToken = authorizationService()
.generateExternalServiceToken(properties.getProperty(SERVICE_NAME, DEFAULT_SERVICE_NAME)); .generateExternalServiceToken(properties.getProperty(SERVICE_NAME, DEFAULT_SERVICE_NAME));
logger.trace("Token for Context {} is {}", context, generatedToken); logger.trace("Token for Context {} is {}", context, generatedToken);
contextToToken.put(context, generatedToken); contextToToken.put(context, generatedToken);
tokenToContext.put(generatedToken, context); tokenToContext.put(generatedToken, context);
} catch(Exception e) { } catch(Exception e) {
logger.error("Error while elaborating {}", scope, e); logger.error("Error while elaborating {}", scope, e);
throw e; throw e;
} finally { } finally {
SecurityTokenProvider.instance.reset(); SecurityTokenProvider.instance.reset();
} }
} }
} catch(Exception ex) { } catch(Exception ex) {
throw ex; throw ex;
@ -89,17 +89,17 @@ public class ContextAuthorization {
SecurityTokenProvider.instance.set(initialToken); SecurityTokenProvider.instance.set(initialToken);
} }
} }
public String getTokenForContext(String contextFullName) { public String getTokenForContext(String contextFullName) {
return contextToToken.get(contextFullName); return contextToToken.get(contextFullName);
} }
public String getContextFromToken(String token) { public String getContextFromToken(String token) {
return tokenToContext.get(token); return tokenToContext.get(token);
} }
public SortedSet<String> getContexts() { public SortedSet<String> getContexts() {
return new TreeSet<String>(contextToToken.keySet()); return new TreeSet<String>(contextToToken.keySet());
} }
} }

View File

@ -5,6 +5,7 @@ import java.util.HashMap;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Properties;
import java.util.SortedSet; import java.util.SortedSet;
import java.util.TreeSet; import java.util.TreeSet;
@ -22,133 +23,190 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
public class AccountingDataHarvesterPluginTest extends ContextTest { public class AccountingDataHarvesterPluginTest extends ContextTest {
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterPluginTest.class); private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterPluginTest.class);
@Test //@Test
public void test() { public void test() {
try { try {
org.gcube.dataharvest.utils.Utils.setContext(ROOT); org.gcube.dataharvest.utils.Utils.setContext(ROOT);
DataHarvestPluginDeclaration dataHarvestPluginDeclaration = new DataHarvestPluginDeclaration(); DataHarvestPluginDeclaration dataHarvestPluginDeclaration = new DataHarvestPluginDeclaration();
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin( AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(
dataHarvestPluginDeclaration); dataHarvestPluginDeclaration);
Map<String,Object> inputs = new HashMap<>(); Map<String,Object> inputs = new HashMap<>();
MeasureType measureType = MeasureType.MONTHLY; MeasureType measureType = MeasureType.MONTHLY;
inputs.put(AccountingDataHarvesterPlugin.MEASURE_TYPE_INPUT_PARAMETER, measureType.name()); inputs.put(AccountingDataHarvesterPlugin.MEASURE_TYPE_INPUT_PARAMETER, measureType.name());
inputs.put(AccountingDataHarvesterPlugin.RERUN_INPUT_PARAMETER, true); inputs.put(AccountingDataHarvesterPlugin.RERUN_INPUT_PARAMETER, true);
inputs.put(AccountingDataHarvesterPlugin.DRY_RUN_INPUT_PARAMETER, true); inputs.put(AccountingDataHarvesterPlugin.DRY_RUN_INPUT_PARAMETER, true);
/* /*
Calendar from = DateUtils.getStartCalendar(2018, Calendar.APRIL, 1); Calendar from = DateUtils.getStartCalendar(2018, Calendar.APRIL, 1);
String fromDate = DateUtils.LAUNCH_DATE_FORMAT.format(from.getTime()); String fromDate = DateUtils.LAUNCH_DATE_FORMAT.format(from.getTime());
logger.trace("{} is {}", AccountingDataHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate); logger.trace("{} is {}", AccountingDataHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
inputs.put(AccountingDataHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate); inputs.put(AccountingDataHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
*/ */
accountingDataHarvesterPlugin.launch(inputs); accountingDataHarvesterPlugin.launch(inputs);
logger.info("End."); logger.info("End.");
} catch(Exception e) { } catch(Exception e) {
logger.error("", e); logger.error("", e);
} }
} }
@Test //@Test
public void testMethodInvocation() { public void testMethodInvocation() {
try { try {
org.gcube.dataharvest.utils.Utils.setContext(StockAssessment); org.gcube.dataharvest.utils.Utils.setContext(StockAssessment);
MeasureType measureType = MeasureType.MONTHLY; MeasureType measureType = MeasureType.MONTHLY;
Date start = DateUtils.getPreviousPeriod(measureType).getTime(); Date start = DateUtils.getPreviousPeriod(measureType).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1); Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(start, end); MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(start, end);
List<HarvestedData> harvestedData = methodInvocationHarvester.getData(); List<HarvestedData> harvestedData = methodInvocationHarvester.getData();
logger.debug("{}", harvestedData); logger.debug("{}", harvestedData);
} catch(Exception e) { } catch(Exception e) {
logger.error("", e); logger.error("", e);
} }
} }
@Test //@Test
public void testTagMeMethodInvocation() { public void testTagMeMethodInvocation() {
try { try {
org.gcube.dataharvest.utils.Utils.setContext(TAGME); org.gcube.dataharvest.utils.Utils.setContext(TAGME);
MeasureType measureType = MeasureType.MONTHLY; MeasureType measureType = MeasureType.MONTHLY;
Date start = DateUtils.getPreviousPeriod(measureType).getTime(); Date start = DateUtils.getPreviousPeriod(measureType).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1); Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
TagMeMethodInvocationHarvester methodInvocationHarvester = new TagMeMethodInvocationHarvester(start, end); TagMeMethodInvocationHarvester methodInvocationHarvester = new TagMeMethodInvocationHarvester(start, end);
List<HarvestedData> harvestedData = methodInvocationHarvester.getData(); List<HarvestedData> harvestedData = methodInvocationHarvester.getData();
logger.debug("{}", harvestedData); logger.debug("{}", harvestedData);
} catch(Exception e) { } catch(Exception e) {
logger.error("", e); logger.error("", e);
} }
} }
@Test
public void testResourceCatalogueHarvester() { //@Test
try {
org.gcube.dataharvest.utils.Utils.setContext(RESOURCE_CATALOGUE);
MeasureType measureType = MeasureType.MONTHLY;
Date start = DateUtils.getPreviousPeriod(measureType).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts();
SortedSet<String> contexts = new TreeSet<>(map.keySet());
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, contexts);
List<HarvestedData> harvestedData = resourceCatalogueHarvester.getData();
logger.debug("{}", harvestedData);
} catch(Exception e) {
logger.error("", e);
}
}
@Test
public void testFilteringGenericResource() { public void testFilteringGenericResource() {
try { try {
org.gcube.dataharvest.utils.Utils.setContext(RESOURCE_CATALOGUE); org.gcube.dataharvest.utils.Utils.setContext(RESOURCE_CATALOGUE);
MeasureType measureType = MeasureType.MONTHLY; MeasureType measureType = MeasureType.MONTHLY;
Date start = DateUtils.getPreviousPeriod(measureType).getTime(); Date start = DateUtils.getPreviousPeriod(measureType).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1); Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts(); LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts();
SortedSet<String> contexts = new TreeSet<>(map.keySet()); SortedSet<String> contexts = new TreeSet<>(map.keySet());
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, contexts); AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
accountingDataHarvesterPlugin.getConfigParameters();
//Added by Francesco
Properties properties = AccountingDataHarvesterPlugin.getProperties().get();
String catalogueContext = (String) properties.get(AccountingDataHarvesterPlugin.RESOURCE_CATALOGUE_CONTEXT);
logger.debug("Read from properties "+AccountingDataHarvesterPlugin.RESOURCE_CATALOGUE_CONTEXT+" value: "+catalogueContext);
//end
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, catalogueContext, contexts);
SortedSet<String> excludedContexts = resourceCatalogueHarvester.getExcludedContexts(); SortedSet<String> excludedContexts = resourceCatalogueHarvester.getExcludedContexts();
logger.info("Excluded contexts {}", excludedContexts); logger.info("Excluded contexts {}", excludedContexts);
SortedSet<String> validContexts = resourceCatalogueHarvester.getFilteredContexts(); SortedSet<String> validContexts = resourceCatalogueHarvester.getFilteredContexts();
logger.info("Valid Contexts {}", validContexts); logger.info("Valid Contexts {}", validContexts);
} catch(Exception e) {
logger.error("", e);
}
}
@Test
public void testResourceCatalogueHarvester() {
try {
// Properties properties = AccountingDataHarvesterPlugin.getProperties().get();
// //Enumeration<String> enums = (Enumeration<String>) properties.propertyNames();
// //System.out.println("enums: " +enums.hasMoreElements());
// Set<String> keys = properties.stringPropertyNames();
//
// Map<String, String> typeToDB = new HashMap<String, String>();
// for (String key : keys) {
// System.out.println(key + " : " + properties.getProperty(key));
//
// try{
// HarvestedDataKey valueEnum = HarvestedDataKey.valueOf(key);
// typeToDB.put(properties.getProperty(key), valueEnum.name());
// }catch(Exception e){
// //silent
// }
// }
//
// System.out.println(typeToDB);
org.gcube.dataharvest.utils.Utils.setContext(RESOURCE_CATALOGUE);
MeasureType measureType = MeasureType.MONTHLY;
// Date start = DateUtils.getPreviousPeriod(measureType).getTime();
// Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
//Date start = DateUtils.getPreviousPeriod(measureType).getTime();
Date start = DateUtils.getStartCalendar(2016, 12, 01).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 18);
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts();
SortedSet<String> contexts = new TreeSet<>(map.keySet());
for (String context : contexts) {
System.out.println("context: "+context);
}
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
accountingDataHarvesterPlugin.getConfigParameters();
//Added by Francesco
Properties properties = AccountingDataHarvesterPlugin.getProperties().get();
String catalogueContext = (String) properties.get(AccountingDataHarvesterPlugin.RESOURCE_CATALOGUE_CONTEXT);
logger.debug("Read from properties "+AccountingDataHarvesterPlugin.RESOURCE_CATALOGUE_CONTEXT+" value: "+catalogueContext);
//end
contexts = new TreeSet<String>();
contexts.add("/d4science.research-infrastructures.eu/SoBigData/ResourceCatalogue");
contexts.add("/d4science.research-infrastructures.eu/SoBigData/TagMe");
contexts.add("/d4science.research-infrastructures.eu/SoBigData/WellBeingAndEconomy");
contexts.add("/d4science.research-infrastructures.eu/SoBigData/CityOfCitizens");
contexts.add("/d4science.research-infrastructures.eu/SoBigData/SocietalDebates");
contexts.add("/d4science.research-infrastructures.eu/SoBigData/SportsDataScience");
contexts.add("/d4science.research-infrastructures.eu/SoBigData/SMAPH");
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, catalogueContext, contexts);
List<HarvestedData> data = resourceCatalogueHarvester.getData();
for (HarvestedData harvestedData : data) {
System.out.println(harvestedData.toString());
}
} catch(Exception e) { } catch(Exception e) {
logger.error("", e); logger.error("", e);
} }
} }
} }