Code redesign
Refs #11756: Refactor DataHArvesterPlugin to support scheduled execution from smart-executor Task-Url: https://support.d4science.org/issues/11756 git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/accounting/accounting-dashboard-harvester-se-plugin@167791 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
3c6b05d00b
commit
9f40118fa7
|
@ -120,8 +120,8 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
|
|||
|
||||
end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
|
||||
|
||||
logger.debug("Harvesting from {} to {}", DateUtils.LAUNCH_DATE_FORMAT.format(start),
|
||||
DateUtils.LAUNCH_DATE_FORMAT.format(end));
|
||||
logger.debug("Harvesting from {} to {}", DateUtils.format(start),
|
||||
DateUtils.format(end));
|
||||
|
||||
getConfigParameters();
|
||||
|
||||
|
|
|
@ -9,6 +9,7 @@ import org.gcube.common.authorization.library.AuthorizationEntry;
|
|||
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.dataharvest.datamodel.HarvestedData;
|
||||
import org.gcube.dataharvest.utils.DateUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -16,12 +17,14 @@ public abstract class BasicHarvester {
|
|||
|
||||
private static Logger logger = LoggerFactory.getLogger(BasicHarvester.class);
|
||||
|
||||
public Date startDate;
|
||||
public Date endDate;
|
||||
public Date start;
|
||||
public Date end;
|
||||
|
||||
public BasicHarvester(Date start, Date end) throws ParseException {
|
||||
startDate = start;
|
||||
endDate = end;
|
||||
this.start = start;
|
||||
this.end = end;
|
||||
|
||||
logger.debug("Creating {} for the period {} {} ", this.getClass().getSimpleName(), DateUtils.format(start), DateUtils.format(end));
|
||||
}
|
||||
|
||||
public static String getCurrentContext(String token) throws Exception {
|
||||
|
|
|
@ -44,7 +44,7 @@ public class MethodInvocationHarvester extends BasicHarvester {
|
|||
List<HarvestedData> data = new ArrayList<>();
|
||||
|
||||
AccountingPersistenceQuery accountingPersistenceQuery = AccountingPersistenceQueryFactory.getInstance();
|
||||
TemporalConstraint temporalConstraint = new TemporalConstraint(startDate.getTime(), endDate.getTime(),
|
||||
TemporalConstraint temporalConstraint = new TemporalConstraint(start.getTime(), end.getTime(),
|
||||
AggregationMode.MONTHLY);
|
||||
|
||||
List<Filter> filters = new ArrayList<>();
|
||||
|
@ -65,10 +65,10 @@ public class MethodInvocationHarvester extends BasicHarvester {
|
|||
for(Filter filter : result.keySet()) {
|
||||
SortedMap<Calendar,Info> infoMap = result.get(filter);
|
||||
|
||||
Calendar calendar = DateUtils.dateToCalendar(startDate);
|
||||
Calendar calendar = DateUtils.dateToCalendar(start);
|
||||
|
||||
Info info = infoMap.get(calendar);
|
||||
logger.debug("{} : {}", DateUtils.LAUNCH_DATE_FORMAT.format(calendar.getTime()), info);
|
||||
logger.debug("{} : {}", DateUtils.format(calendar), info);
|
||||
|
||||
JSONObject jsonObject = info.getValue();
|
||||
long numberOfInvocation = jsonObject.getLong(AggregatedUsageRecord.OPERATION_COUNT);
|
||||
|
|
|
@ -83,7 +83,7 @@ public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
|
|||
JSONObject item = res.getJSONObject(i);
|
||||
long time = item.getLong("time");
|
||||
|
||||
if(startDate.getTime() <= time && time <= endDate.getTime()) {
|
||||
if(start.getTime() <= time && time <= end.getTime()) {
|
||||
posts++;
|
||||
replies += item.getInt("comments_no");
|
||||
likes += item.getInt("likes_no");
|
||||
|
|
|
@ -76,7 +76,7 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester {
|
|||
logger.debug("Getting item by Path: "+path);
|
||||
JCRWorkspaceItem item = (JCRWorkspaceItem) ws.getItemByPath(path);
|
||||
//
|
||||
logger.info("Analyzing " + defaultContext + " in the period [" + startDate.toString() + " to " + endDate.toString() +"] starting from root: "+item.getName());
|
||||
logger.info("Analyzing " + defaultContext + " in the period [" + start.toString() + " to " + end.toString() +"] starting from root: "+item.getName());
|
||||
|
||||
HarvestedData defaultHarvesteData = new HarvestedData(HarvestedDataKey.DATA_METHOD_DOWNLOAD, defaultContext, count);
|
||||
|
||||
|
@ -87,7 +87,7 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester {
|
|||
//Getting statistics for folder
|
||||
if(children.isFolder()){
|
||||
logger.info("Getting statistics for folder: "+children.getName());
|
||||
getStats(children, startDate, endDate);
|
||||
getStats(children, start, end);
|
||||
|
||||
String normalizedName = children.getName().replaceAll("[^A-Za-z0-9]","");
|
||||
String scope = mapWsFolderNameToVRE.get(normalizedName);
|
||||
|
@ -110,7 +110,7 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester {
|
|||
//ADDING DEFAULT ACCOUNTING
|
||||
data.add(defaultHarvesteData);
|
||||
|
||||
logger.info("In the period [from {} to {} ] returning workspace accouting data {}", DateUtils.LAUNCH_DATE_FORMAT.format(startDate), DateUtils.LAUNCH_DATE_FORMAT.format(endDate), data);
|
||||
logger.info("In the period [from {} to {} ] returning workspace accouting data {}", DateUtils.format(start), DateUtils.format(end), data);
|
||||
|
||||
return data;
|
||||
|
||||
|
|
|
@ -71,9 +71,9 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
|
|||
List<String> solrParameters = new ArrayList<String>(1);
|
||||
solrParameters.add("extras_systemtype:\""+systemType+"\"");
|
||||
//EXECUTING THE QUERY IN THE PERIOD
|
||||
String queryResult = executeQueryFor(solrParameters, startDate, endDate, "groups");
|
||||
String queryResult = executeQueryFor(solrParameters, start, end, "groups");
|
||||
HarvestedDataKey insertDBKey = HarvestedDataKey.valueOf(mapSystemTypeToDBEntry.get(systemType));
|
||||
logger.info("Creating statistics for type: "+systemType+ " using db key "+insertDBKey);
|
||||
logger.info("Creating statistics for type {} using db key {}", systemType, insertDBKey);
|
||||
data.addAll(buildListOfHarvestedData(queryResult, insertDBKey));
|
||||
}
|
||||
|
||||
|
@ -97,7 +97,7 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
|
|||
if(status != 0) {
|
||||
String err = "Query Deliverable in error: status " + status;
|
||||
logger.error(err);
|
||||
throw new Exception(err, null);
|
||||
throw new Exception(err);
|
||||
}
|
||||
|
||||
JSONObject response = jsonObject.getJSONObject("response");
|
||||
|
@ -130,7 +130,6 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
|
|||
counter.put(catalogueGroupName, currentCount+1);
|
||||
else{
|
||||
logger.warn("No mapping found for Catalogue-Group Name {} from VREs. Accounting it in the catalogue context {}", catalogueGroupName, catalogueContext);
|
||||
//counter.put(catalogueContext, counter.get(catalogueContext)+1);
|
||||
catalogueContextCount++;
|
||||
}
|
||||
|
||||
|
@ -159,7 +158,7 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
|
|||
data.add(new HarvestedData(harvestKey, mapCatalogueGroupToVRE.get(key), counter.get(key)));
|
||||
}
|
||||
|
||||
logger.info("For {} in the period [from {} to {}] returning accouting data :", harvestKey, DateUtils.LAUNCH_DATE_FORMAT.format(startDate), DateUtils.LAUNCH_DATE_FORMAT.format(endDate), data);
|
||||
logger.info("For {} in the period [from {} to {}] returning accouting data :", harvestKey, DateUtils.format(start), DateUtils.format(end), data);
|
||||
|
||||
return data;
|
||||
|
||||
|
@ -201,9 +200,9 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
|
|||
|
||||
query += "q=" + UrlEncoderUtil.encodeQuery(q) + "&wt=json&indent=true&rows="+ROWS;
|
||||
query += flValue!=null && !flValue.isEmpty()?"&fl="+UrlEncoderUtil.encodeQuery(flValue):"";
|
||||
logger.debug("\nPerforming query: "+query);
|
||||
logger.debug("\nPerforming query {}", query);
|
||||
String jsonResult = Utils.getJson(query);
|
||||
logger.trace("Response is: "+jsonResult);
|
||||
logger.trace("Response is {}", jsonResult);
|
||||
|
||||
return jsonResult;
|
||||
}
|
||||
|
|
|
@ -50,8 +50,6 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
|
|||
//Added by Francesco
|
||||
private DataCatalogueFactory catalogueFactory;
|
||||
|
||||
public static final String SO_BIG_DATA_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData";
|
||||
|
||||
protected SortedSet<String> contexts;
|
||||
|
||||
/**
|
||||
|
@ -70,11 +68,11 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
|
|||
|
||||
String currentContext = Utils.getCurrentContext();
|
||||
|
||||
// Truncating the context to the last / (the last is retained for filtering issues)
|
||||
// Truncating the context to the last / (the last / is retained for filtering issues)
|
||||
String baseContext = currentContext.substring(0, currentContext.lastIndexOf("/")+1);
|
||||
|
||||
this.contexts = getValidContexts(contexts, baseContext);
|
||||
logger.trace("Valid contexts are {}", contexts);
|
||||
logger.trace("Valid contexts are {}", this.contexts);
|
||||
|
||||
initMappingMaps();
|
||||
|
||||
|
@ -92,7 +90,6 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
|
|||
|
||||
mapSystemTypeToDBEntry = new HashMap<String,String>();
|
||||
for(String key : keys) {
|
||||
//System.out.println(key + " : " + properties.getProperty(key));
|
||||
try {
|
||||
HarvestedDataKey valueEnum = HarvestedDataKey.valueOf(key);
|
||||
mapSystemTypeToDBEntry.put(properties.getProperty(key), valueEnum.name());
|
||||
|
@ -101,7 +98,7 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
|
|||
}
|
||||
}
|
||||
|
||||
logger.info("Built from properties the mapping 'SystemType' to 'DB entry' : " + mapSystemTypeToDBEntry);
|
||||
logger.info("Built from properties the mapping 'SystemType' to 'DB entry' {}", mapSystemTypeToDBEntry);
|
||||
|
||||
String currentContext = Utils.getCurrentContext();
|
||||
|
||||
|
@ -109,7 +106,7 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
|
|||
List<String> groups = loadGroupsFromCKAN(currentContext);
|
||||
//NORMALIZING THE GROUP NAME TO MATCH WITH VRE NAME
|
||||
Map<String,String> mapNormalizedGroups = normalizeGroups(groups);
|
||||
logger.debug("Map of Normalized Groups is: " + mapNormalizedGroups);
|
||||
logger.debug("Map of Normalized Groups is {} ", mapNormalizedGroups);
|
||||
|
||||
//CREATING MAPPING BETWEEN (CATALOGUE GROUP NAME TO VRE NAME)
|
||||
mapCatalogueGroupToVRE = new HashMap<String,String>();
|
||||
|
@ -117,12 +114,10 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
|
|||
mapWsFolderNameToVRE = new HashMap<String,String>();
|
||||
Set<String> normalizedGroups = mapNormalizedGroups.keySet();
|
||||
for(String context : contexts) {
|
||||
//logger.trace("Context is: " + context);
|
||||
String loweredVREName = context.substring(context.lastIndexOf("/") + 1, context.length()).toLowerCase();
|
||||
try {
|
||||
//logger.trace("vreName lowered is: " + loweredVREName);
|
||||
if(normalizedGroups.contains(loweredVREName)) {
|
||||
logger.debug("Normalized Groups matching the lowered VRE name: " + loweredVREName);
|
||||
logger.debug("Normalized Groups matching the lowered VRE name {}", loweredVREName);
|
||||
// Creating the map with couple (catalogue group name, scope)
|
||||
mapCatalogueGroupToVRE.put(mapNormalizedGroups.get(loweredVREName), context);
|
||||
}
|
||||
|
@ -133,8 +128,8 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
|
|||
}
|
||||
}
|
||||
|
||||
logger.info("Map of Catalogue Groups To VRE is: " + mapCatalogueGroupToVRE);
|
||||
logger.info("Map of (lowered) Ws Folder Name To VRE is: " + mapWsFolderNameToVRE);
|
||||
logger.info("Map of Catalogue Groups To VRE is {} ", mapCatalogueGroupToVRE);
|
||||
logger.info("Map of (lowered) Ws Folder Name To VRE is {}", mapWsFolderNameToVRE);
|
||||
|
||||
}
|
||||
|
||||
|
@ -176,7 +171,7 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
|
|||
groups.add(ckanGroup.getName());
|
||||
}
|
||||
} catch(Exception e) {
|
||||
logger.error("Error occurred on getting CKAN groups for scope: " + scope + " and CKAN URL: " + ckanURL, e);
|
||||
logger.error("Error occurred on getting CKAN groups for scope {} and CKAN URL {}", scope, ckanURL, e);
|
||||
}
|
||||
|
||||
return groups;
|
||||
|
@ -218,7 +213,7 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
|
|||
public SortedSet<String> getValidContexts(Set<String> contexts, String base) {
|
||||
SortedSet<String> filteredContext = new TreeSet<>();
|
||||
for(String context : contexts) {
|
||||
if(context.startsWith(SO_BIG_DATA_CONTEXT)) {
|
||||
if(context.startsWith(base)) {
|
||||
filteredContext.add(context);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,7 +46,7 @@ public class TagMeMethodInvocationHarvester extends BasicHarvester {
|
|||
List<HarvestedData> data = new ArrayList<>();
|
||||
|
||||
AccountingPersistenceQuery accountingPersistenceQuery = AccountingPersistenceQueryFactory.getInstance();
|
||||
TemporalConstraint temporalConstraint = new TemporalConstraint(startDate.getTime(), endDate.getTime(),
|
||||
TemporalConstraint temporalConstraint = new TemporalConstraint(start.getTime(), end.getTime(),
|
||||
AggregationMode.MONTHLY);
|
||||
|
||||
List<Filter> filters = new ArrayList<>();
|
||||
|
@ -64,10 +64,10 @@ public class TagMeMethodInvocationHarvester extends BasicHarvester {
|
|||
for(Filter filter : result.keySet()) {
|
||||
SortedMap<Calendar,Info> infoMap = result.get(filter);
|
||||
|
||||
Calendar calendar = DateUtils.dateToCalendar(startDate);
|
||||
Calendar calendar = DateUtils.dateToCalendar(start);
|
||||
|
||||
Info info = infoMap.get(calendar);
|
||||
logger.debug("{} : {}", DateUtils.LAUNCH_DATE_FORMAT.format(calendar.getTime()), info);
|
||||
logger.debug("{} : {}", DateUtils.format(calendar), info);
|
||||
|
||||
JSONObject jsonObject = info.getValue();
|
||||
long numberOfInvocation = jsonObject.getLong(AggregatedUsageRecord.OPERATION_COUNT);
|
||||
|
|
|
@ -106,6 +106,15 @@ public class DateUtils {
|
|||
return calendar;
|
||||
}
|
||||
|
||||
public static String format(Date date) {
|
||||
return DateUtils.LAUNCH_DATE_FORMAT.format(date);
|
||||
}
|
||||
|
||||
public static String format(Calendar calendar) {
|
||||
return format(calendar.getTime());
|
||||
}
|
||||
|
||||
|
||||
public static String dateToStringWithTZ(Date date) {
|
||||
DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
|
||||
return formatter.format(date) + "Z";
|
||||
|
|
|
@ -13,7 +13,6 @@ import org.gcube.dataharvest.datamodel.HarvestedData;
|
|||
import org.gcube.dataharvest.harvester.MethodInvocationHarvester;
|
||||
import org.gcube.dataharvest.harvester.sobigdata.DataMethodDownloadHarvester;
|
||||
import org.gcube.dataharvest.harvester.sobigdata.ResourceCatalogueHarvester;
|
||||
import org.gcube.dataharvest.harvester.sobigdata.SoBigDataHarvester;
|
||||
import org.gcube.dataharvest.harvester.sobigdata.TagMeMethodInvocationHarvester;
|
||||
import org.gcube.dataharvest.utils.ContextTest;
|
||||
import org.gcube.dataharvest.utils.DateUtils;
|
||||
|
@ -27,6 +26,23 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
|
|||
|
||||
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterPluginTest.class);
|
||||
|
||||
public static final String SO_BIG_DATA_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData";
|
||||
|
||||
public static SortedSet<String> getContexts() throws Exception{
|
||||
SortedSet<String> contexts = new TreeSet<>();
|
||||
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts();
|
||||
for(String scope : map.keySet()) {
|
||||
try {
|
||||
String context = map.get(scope).toString();
|
||||
contexts.add(context);
|
||||
}catch (Exception e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
return contexts;
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void test() {
|
||||
try {
|
||||
|
@ -114,15 +130,13 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
|
|||
Date start = DateUtils.getPreviousPeriod(measureType).getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
|
||||
|
||||
|
||||
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts();
|
||||
SortedSet<String> contexts = new TreeSet<>(map.keySet());
|
||||
SortedSet<String> contexts = getContexts();
|
||||
|
||||
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
|
||||
accountingDataHarvesterPlugin.getConfigParameters();
|
||||
|
||||
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, contexts);
|
||||
SortedSet<String> validContexts = resourceCatalogueHarvester.getValidContexts(contexts, SoBigDataHarvester.SO_BIG_DATA_CONTEXT);
|
||||
SortedSet<String> validContexts = resourceCatalogueHarvester.getValidContexts(contexts, SO_BIG_DATA_CONTEXT + "/");
|
||||
logger.info("Valid Contexts {}", validContexts);
|
||||
|
||||
} catch(Exception e) {
|
||||
|
@ -139,14 +153,14 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
|
|||
|
||||
MeasureType measureType = MeasureType.MONTHLY;
|
||||
|
||||
Date start = DateUtils.getStartCalendar(2018, 04, 01).getTime();
|
||||
//Date start = DateUtils.getStartCalendar(2018, Calendar.APRIL, 1).getTime();
|
||||
Date start = DateUtils.getPreviousPeriod(measureType).getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
|
||||
|
||||
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
|
||||
accountingDataHarvesterPlugin.getConfigParameters();
|
||||
|
||||
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts();
|
||||
SortedSet<String> contexts = new TreeSet<>(map.keySet());
|
||||
SortedSet<String> contexts = getContexts();
|
||||
|
||||
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, contexts);
|
||||
List<HarvestedData> data = resourceCatalogueHarvester.getData();
|
||||
|
@ -167,14 +181,13 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
|
|||
|
||||
MeasureType measureType = MeasureType.MONTHLY;
|
||||
|
||||
Date start = DateUtils.getStartCalendar(2018, 04, 01).getTime();
|
||||
Date start = DateUtils.getPreviousPeriod(measureType).getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
|
||||
|
||||
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
|
||||
accountingDataHarvesterPlugin.getConfigParameters();
|
||||
|
||||
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts();
|
||||
SortedSet<String> contexts = new TreeSet<>(map.keySet());
|
||||
SortedSet<String> contexts = getContexts();
|
||||
|
||||
DataMethodDownloadHarvester resourceCatalogueHarvester = new DataMethodDownloadHarvester(start, end, contexts);
|
||||
List<HarvestedData> data = resourceCatalogueHarvester.getData();
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
</appender>
|
||||
|
||||
|
||||
<logger name="org.gcube" level="INFO" />
|
||||
<logger name="org.gcube" level="WARN" />
|
||||
<logger name="org.gcube.dataharvest" level="TRACE" />
|
||||
|
||||
<root level="WARN">
|
||||
|
|
Loading…
Reference in New Issue