Code redesign

Refs #11756: Refactor DataHArvesterPlugin to support scheduled execution from smart-executor 

Task-Url: https://support.d4science.org/issues/11756

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/accounting/accounting-dashboard-harvester-se-plugin@167798 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Luca Frosini 2018-05-28 14:53:00 +00:00
parent 0d0aaf1b21
commit 67ae816424
3 changed files with 62 additions and 61 deletions

View File

@ -32,11 +32,11 @@ import org.slf4j.LoggerFactory;
* @author Francesco Mangiacrapa (ISTI - CNR) * @author Francesco Mangiacrapa (ISTI - CNR)
*/ */
public class DataMethodDownloadHarvester extends SoBigDataHarvester { public class DataMethodDownloadHarvester extends SoBigDataHarvester {
private static Logger logger = LoggerFactory.getLogger(DataMethodDownloadHarvester.class); private static Logger logger = LoggerFactory.getLogger(DataMethodDownloadHarvester.class);
private int count = 0; private int count = 0;
/** /**
* Instantiates a new data method download harvester. * Instantiates a new data method download harvester.
* *
@ -49,78 +49,82 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester {
public DataMethodDownloadHarvester(Date start, Date end, SortedSet<String> contexts) throws Exception { public DataMethodDownloadHarvester(Date start, Date end, SortedSet<String> contexts) throws Exception {
super(start, end, contexts); super(start, end, contexts);
} }
/* (non-Javadoc) /* (non-Javadoc)
* @see org.gcube.dataharvest.harvester.BasicHarvester#getData() * @see org.gcube.dataharvest.harvester.BasicHarvester#getData()
*/ */
@Override @Override
public List<HarvestedData> getData() throws Exception { public List<HarvestedData> getData() throws Exception {
String defaultContext = Utils.getCurrentContext(); String defaultContext = Utils.getCurrentContext();
logger.debug("The context is: "+defaultContext); logger.debug("The context is {}", defaultContext);
try { try {
String vreName = getVRENameToHL(defaultContext); String vreName = getVRENameToHL(defaultContext);
logger.debug("Getting VRE Name to HL from context/scope returns: "+vreName); logger.debug("Getting VRE Name to HL from context/scope returns {} ", vreName);
String user = vreName + "-Manager"; String user = vreName + "-Manager";
logger.debug("Using user '"+user+"' to getHome from HL"); logger.debug("Using user '{}' to getHome from HL", user);
//ISTANCING HL AND GETTING HOME FOR VRE MANAGER //Getting HL instance and home for VRE MANAGER
HomeManager manager = HomeLibrary.getHomeManagerFactory().getHomeManager(); HomeManager manager = HomeLibrary.getHomeManagerFactory().getHomeManager();
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
Home home = manager.getHome(user); Home home = manager.getHome(user);
JCRWorkspace ws = (JCRWorkspace) home.getWorkspace(); JCRWorkspace ws = (JCRWorkspace) home.getWorkspace();
//
String path = "/Workspace/MySpecialFolders/" + vreName; String path = "/Workspace/MySpecialFolders/" + vreName;
logger.debug("Getting item by Path: "+path); logger.debug("Getting item by Path {}", path);
JCRWorkspaceItem item = (JCRWorkspaceItem) ws.getItemByPath(path); JCRWorkspaceItem item = (JCRWorkspaceItem) ws.getItemByPath(path);
//
logger.info("Analyzing " + defaultContext + " in the period [" + start.toString() + " to " + end.toString() +"] starting from root: "+item.getName()); logger.debug("Analyzing {} in the period [{} to {}] starting from root {}", defaultContext,
DateUtils.format(start), DateUtils.format(end), item.getName());
HarvestedData defaultHarvesteData = new HarvestedData(HarvestedDataKey.DATA_METHOD_DOWNLOAD, defaultContext, count);
HarvestedData defaultHarvesteData = new HarvestedData(HarvestedDataKey.DATA_METHOD_DOWNLOAD, defaultContext,
count);
List<HarvestedData> data = new ArrayList<HarvestedData>(); List<HarvestedData> data = new ArrayList<HarvestedData>();
for (WorkspaceItem children: item.getChildren()) { for(WorkspaceItem children : item.getChildren()) {
count = 0; //resettings the counter count = 0; //resettings the counter
HarvestedData harvestedData; HarvestedData harvestedData;
//Getting statistics for folder //Getting statistics for folder
if(children.isFolder()){ if(children.isFolder()) {
logger.info("Getting statistics for folder: "+children.getName()); logger.debug("Getting statistics for folder {}", children.getName());
getStats(children, start, end); getStats(children, start, end);
String normalizedName = children.getName().replaceAll("[^A-Za-z0-9]",""); String normalizedName = children.getName().replaceAll("[^A-Za-z0-9]", "");
String scope = mapWsFolderNameToVRE.get(normalizedName); String scope = mapWsFolderNameToVRE.get(normalizedName);
//Checking if it is a VRE name to right accouning... //Checking if it is a VRE name to right accounting...
if(scope!=null && !scope.isEmpty()){ if(scope != null && !scope.isEmpty()) {
logger.info("Found scope '" + scope + "' matching with normalized VRE name: "+normalizedName); logger.debug("Found context '{}' matching with normalized VRE name {} ", scope, normalizedName);
harvestedData = new HarvestedData(HarvestedDataKey.DATA_METHOD_DOWNLOAD, scope, count); harvestedData = new HarvestedData(HarvestedDataKey.DATA_METHOD_DOWNLOAD, scope, count);
data.add(harvestedData); data.add(harvestedData);
logger.info("Added data: "+harvestedData); logger.debug("Added data {}", harvestedData);
}else{ } else {
logger.info("No scope found matching the folder name: "+normalizedName +", accounting its stats in the default context: "+defaultContext); logger.debug(
"No scope found matching the folder name {}, accounting its stats in the default context {}",
normalizedName, defaultContext);
//INCREASING THE DEFAULT CONTEXT COUNTER... //INCREASING THE DEFAULT CONTEXT COUNTER...
defaultHarvesteData.setMeasure(defaultHarvesteData.getMeasure()+count); defaultHarvesteData.setMeasure(defaultHarvesteData.getMeasure() + count);
logger.info("Increased default context stats: "+defaultHarvesteData); logger.trace("Increased default context stats {}", defaultHarvesteData);
} }
} }
} }
//ADDING DEFAULT ACCOUNTING //ADDING DEFAULT ACCOUNTING
data.add(defaultHarvesteData); data.add(defaultHarvesteData);
logger.info("In the period [from {} to {} ] returning workspace accouting data {}", DateUtils.format(start), DateUtils.format(end), data); logger.debug("In the period [from {} to {} ] returning workspace accouting data {}", DateUtils.format(start),
DateUtils.format(end), data);
return data; return data;
} catch(Exception e) { } catch(Exception e) {
throw e; throw e;
} }
} }
/** /**
* Gets the stats. * Gets the stats.
* *
@ -138,34 +142,33 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester {
getStats(child, start, end); getStats(child, start, end);
} else { } else {
try { try {
List<AccountingEntry> accounting = baseItem.getAccounting(); List<AccountingEntry> accounting = baseItem.getAccounting();
for(AccountingEntry entry : accounting) { for(AccountingEntry entry : accounting) {
switch(entry.getEntryType()) { switch(entry.getEntryType()) {
case CREATE: case CREATE:
case UPDATE: case UPDATE:
case READ: case READ:
Calendar calendar = entry.getDate(); Calendar calendar = entry.getDate();
if(calendar.after(DateUtils.dateToCalendar(start)) && calendar.before(DateUtils.dateToCalendar(end))) { if(calendar.after(DateUtils.dateToCalendar(start))
&& calendar.before(DateUtils.dateToCalendar(end))) {
count++; count++;
} }
break; break;
default: default:
break; break;
} }
} }
} catch(Exception e) { } catch(Exception e) {
logger.error("DataMethodDownloadHarvester: " + e.getLocalizedMessage()); throw new InternalErrorException(e);
throw new InternalErrorException(e.getLocalizedMessage());
} }
} }
} }
/** /**
* Gets the VRE name to HL. * Gets the VRE name to HL.
* *
@ -174,7 +177,7 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester {
*/ */
private static String getVRENameToHL(String vre) { private static String getVRENameToHL(String vre) {
Validate.notNull(vre, "scope must be not null"); Validate.notNull(vre, "scope must be not null");
String newName; String newName;
if(vre.startsWith(JCRRepository.PATH_SEPARATOR)) if(vre.startsWith(JCRRepository.PATH_SEPARATOR))
newName = vre.replace(JCRRepository.PATH_SEPARATOR, "-").substring(1); newName = vre.replace(JCRRepository.PATH_SEPARATOR, "-").substring(1);
@ -182,5 +185,5 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester {
newName = vre.replace(JCRRepository.PATH_SEPARATOR, "-"); newName = vre.replace(JCRRepository.PATH_SEPARATOR, "-");
return newName; return newName;
} }
} }

View File

@ -73,7 +73,7 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
//EXECUTING THE QUERY IN THE PERIOD //EXECUTING THE QUERY IN THE PERIOD
String queryResult = executeQueryFor(solrParameters, start, end, "groups"); String queryResult = executeQueryFor(solrParameters, start, end, "groups");
HarvestedDataKey insertDBKey = HarvestedDataKey.valueOf(mapSystemTypeToDBEntry.get(systemType)); HarvestedDataKey insertDBKey = HarvestedDataKey.valueOf(mapSystemTypeToDBEntry.get(systemType));
logger.info("Creating statistics for type {} using db key {}", systemType, insertDBKey); logger.debug("Creating statistics for type {} using db key {}", systemType, insertDBKey);
data.addAll(buildListOfHarvestedData(queryResult, insertDBKey)); data.addAll(buildListOfHarvestedData(queryResult, insertDBKey));
} }
@ -95,9 +95,7 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
JSONObject responseHeader = jsonObject.getJSONObject("responseHeader"); JSONObject responseHeader = jsonObject.getJSONObject("responseHeader");
int status = responseHeader.getInt("status"); int status = responseHeader.getInt("status");
if(status != 0) { if(status != 0) {
String err = "Query Deliverable in error: status " + status; throw new Exception("Query Deliverable in error: status " + status);
logger.error(err);
throw new Exception(err);
} }
JSONObject response = jsonObject.getJSONObject("response"); JSONObject response = jsonObject.getJSONObject("response");
@ -139,7 +137,7 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
logger.debug("Document without groups, accounting it in the catalogue context"); logger.debug("Document without groups, accounting it in the catalogue context");
catalogueContextCount++; catalogueContextCount++;
} catch (Exception e) { } catch (Exception e) {
logger.error("Skipping parsing error", e); logger.warn("Skipping parsing error", e);
} }
} }
@ -158,7 +156,7 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
data.add(new HarvestedData(harvestKey, mapCatalogueGroupToVRE.get(key), counter.get(key))); data.add(new HarvestedData(harvestKey, mapCatalogueGroupToVRE.get(key), counter.get(key)));
} }
logger.info("For {} in the period [from {} to {}] returning accouting data :", harvestKey, DateUtils.format(start), DateUtils.format(end), data); logger.debug("For {} in the period [from {} to {}] returning accouting data :", harvestKey, DateUtils.format(start), DateUtils.format(end), data);
return data; return data;

View File

@ -43,7 +43,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
} }
// @Test @Test
public void launch() { public void launch() {
try { try {
@ -59,7 +59,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
inputs.put(AccountingDataHarvesterPlugin.MEASURE_TYPE_INPUT_PARAMETER, measureType.name()); inputs.put(AccountingDataHarvesterPlugin.MEASURE_TYPE_INPUT_PARAMETER, measureType.name());
inputs.put(AccountingDataHarvesterPlugin.RERUN_INPUT_PARAMETER, true); inputs.put(AccountingDataHarvesterPlugin.RERUN_INPUT_PARAMETER, true);
inputs.put(AccountingDataHarvesterPlugin.DRY_RUN_INPUT_PARAMETER, true); inputs.put(AccountingDataHarvesterPlugin.DRY_RUN_INPUT_PARAMETER, false);
/* /*
Calendar from = DateUtils.getStartCalendar(2018, Calendar.APRIL, 1); Calendar from = DateUtils.getStartCalendar(2018, Calendar.APRIL, 1);