ref 21031: Add support to Jupyter

Updated Jupyter Accesses Harvester
This commit is contained in:
Giancarlo Panichi 2021-03-26 13:24:45 +01:00
parent 13481c35a5
commit 38ec08e0a3
5 changed files with 290 additions and 224 deletions

View File

@ -21,6 +21,7 @@ import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.common.scope.impl.ScopeBean.Type; import org.gcube.common.scope.impl.ScopeBean.Type;
import org.gcube.dataharvest.harvester.CatalogueAccessesHarvester; import org.gcube.dataharvest.harvester.CatalogueAccessesHarvester;
import org.gcube.dataharvest.harvester.CoreServicesAccessesHarvester; import org.gcube.dataharvest.harvester.CoreServicesAccessesHarvester;
import org.gcube.dataharvest.harvester.JupyterAccessesHarvester;
import org.gcube.dataharvest.harvester.MethodInvocationHarvester; import org.gcube.dataharvest.harvester.MethodInvocationHarvester;
import org.gcube.dataharvest.harvester.SocialInteractionsHarvester; import org.gcube.dataharvest.harvester.SocialInteractionsHarvester;
import org.gcube.dataharvest.harvester.VREAccessesHarvester; import org.gcube.dataharvest.harvester.VREAccessesHarvester;
@ -41,99 +42,98 @@ import org.slf4j.LoggerFactory;
* @author Luca Frosini (ISTI - CNR) * @author Luca Frosini (ISTI - CNR)
*/ */
public class AccountingDashboardHarvesterPlugin extends Plugin { public class AccountingDashboardHarvesterPlugin extends Plugin {
private static Logger logger = LoggerFactory.getLogger(AccountingDashboardHarvesterPlugin.class); private static Logger logger = LoggerFactory.getLogger(AccountingDashboardHarvesterPlugin.class);
private static final String PROPERTY_FILENAME = "config.properties"; private static final String PROPERTY_FILENAME = "config.properties";
public static final String START_DATE_INPUT_PARAMETER = "startDate"; public static final String START_DATE_INPUT_PARAMETER = "startDate";
public static final String MEASURE_TYPE_INPUT_PARAMETER = "measureType"; public static final String MEASURE_TYPE_INPUT_PARAMETER = "measureType";
public static final String RERUN_INPUT_PARAMETER = "reRun"; public static final String RERUN_INPUT_PARAMETER = "reRun";
public static final String GET_VRE_USERS_INPUT_PARAMETER = "getVREUsers"; public static final String GET_VRE_USERS_INPUT_PARAMETER = "getVREUsers";
public static final String DRY_RUN_INPUT_PARAMETER = "dryRun"; public static final String DRY_RUN_INPUT_PARAMETER = "dryRun";
/** /**
* Allows partial harvesting of data of the current period. * Allows partial harvesting of data of the current period. This means that
* This means that in MONTHLY aggregation type the current month is harvested instead of the previous month which * in MONTHLY aggregation type the current month is harvested instead of the
* is done when the month is completed. * previous month which is done when the month is completed. This allow the
* This allow the portlet to display monthly data in the current moth even the data is partial (till the current day). * portlet to display monthly data in the current moth even the data is
* partial (till the current day).
*/ */
public static final String PARTIAL_HARVESTING = "partialHarvesting"; public static final String PARTIAL_HARVESTING = "partialHarvesting";
public static final String SO_BIG_DATA_VO = "/d4science.research-infrastructures.eu/SoBigData"; public static final String SO_BIG_DATA_VO = "/d4science.research-infrastructures.eu/SoBigData";
public static final String SO_BIG_DATA_EU_VRE = "/d4science.research-infrastructures.eu/gCubeApps/SoBigData.eu"; public static final String SO_BIG_DATA_EU_VRE = "/d4science.research-infrastructures.eu/gCubeApps/SoBigData.eu";
public static final String SO_BIG_DATA_IT_VRE = "/d4science.research-infrastructures.eu/gCubeApps/SoBigData.it"; public static final String SO_BIG_DATA_IT_VRE = "/d4science.research-infrastructures.eu/gCubeApps/SoBigData.it";
public static final String SO_BIG_DATA_CATALOGUE_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData/ResourceCatalogue"; public static final String SO_BIG_DATA_CATALOGUE_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData/ResourceCatalogue";
public static final String TAGME_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData/TagMe"; public static final String TAGME_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData/TagMe";
public static final String TO_BE_SET = "TO BE SET"; public static final String TO_BE_SET = "TO BE SET";
protected Date start; protected Date start;
protected Date end; protected Date end;
public AccountingDashboardHarvesterPlugin() { public AccountingDashboardHarvesterPlugin() {
super(); super();
} }
private static final InheritableThreadLocal<Properties> properties = new InheritableThreadLocal<Properties>() { private static final InheritableThreadLocal<Properties> properties = new InheritableThreadLocal<Properties>() {
@Override @Override
protected Properties initialValue() { protected Properties initialValue() {
return new Properties(); return new Properties();
} }
}; };
public static InheritableThreadLocal<Properties> getProperties() { public static InheritableThreadLocal<Properties> getProperties() {
return properties; return properties;
} }
public static Dimension getDimension(String key) { public static Dimension getDimension(String key) {
Dimension dimension = dimensions.get().get(key); Dimension dimension = dimensions.get().get(key);
if(dimension == null) { if (dimension == null) {
dimension = new Dimension(key, key, null, key); dimension = new Dimension(key, key, null, key);
} }
return dimension; return dimension;
} }
protected static final InheritableThreadLocal<Map<String, Dimension>> dimensions = new InheritableThreadLocal<Map<String, Dimension>>() { protected static final InheritableThreadLocal<Map<String, Dimension>> dimensions = new InheritableThreadLocal<Map<String, Dimension>>() {
@Override @Override
protected Map<String, Dimension> initialValue() { protected Map<String, Dimension> initialValue() {
return new HashMap<>(); return new HashMap<>();
} }
}; };
public static ScopeDescriptor getScopeDescriptor(String context) { public static ScopeDescriptor getScopeDescriptor(String context) {
return scopeDescriptors.get().get(context); return scopeDescriptors.get().get(context);
} }
protected static final InheritableThreadLocal<Map<String, ScopeDescriptor>> scopeDescriptors = new InheritableThreadLocal<Map<String, ScopeDescriptor>>() { protected static final InheritableThreadLocal<Map<String, ScopeDescriptor>> scopeDescriptors = new InheritableThreadLocal<Map<String, ScopeDescriptor>>() {
@Override @Override
protected Map<String, ScopeDescriptor> initialValue() { protected Map<String, ScopeDescriptor> initialValue() {
return new HashMap<>(); return new HashMap<>();
} }
}; };
public static ScopeDescriptor getScopeDescriptor() { public static ScopeDescriptor getScopeDescriptor() {
return scopeDescriptor.get(); return scopeDescriptor.get();
} }
public static final InheritableThreadLocal<ScopeDescriptor> scopeDescriptor = new InheritableThreadLocal<ScopeDescriptor>() { public static final InheritableThreadLocal<ScopeDescriptor> scopeDescriptor = new InheritableThreadLocal<ScopeDescriptor>() {
@Override @Override
protected ScopeDescriptor initialValue() { protected ScopeDescriptor initialValue() {
return new ScopeDescriptor("",""); return new ScopeDescriptor("", "");
} }
}; };
public Properties getConfigParameters() throws IOException { public Properties getConfigParameters() throws IOException {
Properties properties = new Properties(); Properties properties = new Properties();
try { try {
@ -141,272 +141,318 @@ public class AccountingDashboardHarvesterPlugin extends Plugin {
.getResourceAsStream(PROPERTY_FILENAME); .getResourceAsStream(PROPERTY_FILENAME);
properties.load(input); properties.load(input);
return properties; return properties;
} catch(Exception e) { } catch (Exception e) {
logger.warn( logger.warn(
"Unable to load {} file containing configuration properties. AccountingDataHarvesterPlugin will use defaults", "Unable to load {} file containing configuration properties. AccountingDataHarvesterPlugin will use defaults",
PROPERTY_FILENAME); PROPERTY_FILENAME);
} }
return properties; return properties;
} }
/** {@inheritDoc} */ /** {@inheritDoc} */
@Override @Override
public void launch(Map<String,Object> inputs) throws Exception { public void launch(Map<String, Object> inputs) throws Exception {
logger.debug("{} is starting", this.getClass().getSimpleName()); logger.debug("{} is starting", this.getClass().getSimpleName());
if(inputs == null || inputs.isEmpty()) { if (inputs == null || inputs.isEmpty()) {
throw new IllegalArgumentException("The can only be launched providing valid input parameters"); throw new IllegalArgumentException("The can only be launched providing valid input parameters");
} }
if(!inputs.containsKey(MEASURE_TYPE_INPUT_PARAMETER)) { if (!inputs.containsKey(MEASURE_TYPE_INPUT_PARAMETER)) {
throw new IllegalArgumentException("Please set required parameter '" + MEASURE_TYPE_INPUT_PARAMETER + "'"); throw new IllegalArgumentException("Please set required parameter '" + MEASURE_TYPE_INPUT_PARAMETER + "'");
} }
AggregationType aggregationType = AggregationType.valueOf((String) inputs.get(MEASURE_TYPE_INPUT_PARAMETER)); AggregationType aggregationType = AggregationType.valueOf((String) inputs.get(MEASURE_TYPE_INPUT_PARAMETER));
boolean reRun = true; boolean reRun = true;
if(inputs.containsKey(RERUN_INPUT_PARAMETER)) { if (inputs.containsKey(RERUN_INPUT_PARAMETER)) {
try { try {
reRun = (boolean) inputs.get(RERUN_INPUT_PARAMETER); reRun = (boolean) inputs.get(RERUN_INPUT_PARAMETER);
} catch(Exception e) { } catch (Exception e) {
throw new IllegalArgumentException("'" + RERUN_INPUT_PARAMETER + "' must be a boolean"); throw new IllegalArgumentException("'" + RERUN_INPUT_PARAMETER + "' must be a boolean");
} }
} }
boolean getVREUsers = true; boolean getVREUsers = true;
if(inputs.containsKey(GET_VRE_USERS_INPUT_PARAMETER)) { if (inputs.containsKey(GET_VRE_USERS_INPUT_PARAMETER)) {
try { try {
reRun = (boolean) inputs.get(GET_VRE_USERS_INPUT_PARAMETER); reRun = (boolean) inputs.get(GET_VRE_USERS_INPUT_PARAMETER);
} catch(Exception e) { } catch (Exception e) {
throw new IllegalArgumentException("'" + GET_VRE_USERS_INPUT_PARAMETER + "' must be a boolean"); throw new IllegalArgumentException("'" + GET_VRE_USERS_INPUT_PARAMETER + "' must be a boolean");
} }
} }
boolean dryRun = true; boolean dryRun = true;
if(inputs.containsKey(DRY_RUN_INPUT_PARAMETER)) { if (inputs.containsKey(DRY_RUN_INPUT_PARAMETER)) {
try { try {
dryRun = (boolean) inputs.get(DRY_RUN_INPUT_PARAMETER); dryRun = (boolean) inputs.get(DRY_RUN_INPUT_PARAMETER);
} catch(Exception e) { } catch (Exception e) {
throw new IllegalArgumentException("'" + DRY_RUN_INPUT_PARAMETER + "' must be a boolean"); throw new IllegalArgumentException("'" + DRY_RUN_INPUT_PARAMETER + "' must be a boolean");
} }
} }
boolean partialHarvesting = false; boolean partialHarvesting = false;
if(inputs.containsKey(PARTIAL_HARVESTING)) { if (inputs.containsKey(PARTIAL_HARVESTING)) {
partialHarvesting = (boolean) inputs.get(PARTIAL_HARVESTING); partialHarvesting = (boolean) inputs.get(PARTIAL_HARVESTING);
} }
if(inputs.containsKey(START_DATE_INPUT_PARAMETER)) { if (inputs.containsKey(START_DATE_INPUT_PARAMETER)) {
String startDateString = (String) inputs.get(START_DATE_INPUT_PARAMETER); String startDateString = (String) inputs.get(START_DATE_INPUT_PARAMETER);
start = DateUtils.UTC_DATE_FORMAT.parse(startDateString + " " + DateUtils.UTC); start = DateUtils.UTC_DATE_FORMAT.parse(startDateString + " " + DateUtils.UTC);
} else { } else {
start = DateUtils.getPreviousPeriod(aggregationType, partialHarvesting).getTime(); start = DateUtils.getPreviousPeriod(aggregationType, partialHarvesting).getTime();
} }
end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1, partialHarvesting); end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1, partialHarvesting);
logger.debug("Harvesting from {} to {} (ReRun:{} - GetVREUsers:{} - DryRun:{})", logger.debug("Harvesting from {} to {} (ReRun:{} - GetVREUsers:{} - DryRun:{})", DateUtils.format(start),
DateUtils.format(start), DateUtils.format(end), reRun, getVREUsers, dryRun); DateUtils.format(end), reRun, getVREUsers, dryRun);
Properties properties = getConfigParameters(); Properties properties = getConfigParameters();
getProperties().set(properties); getProperties().set(properties);
ContextAuthorization contextAuthorization = new ContextAuthorization(); ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts(); SortedSet<String> contexts = contextAuthorization.getContexts();
String root = contexts.first(); String root = contexts.first();
Utils.setContext(contextAuthorization.getTokenForContext(root)); Utils.setContext(contextAuthorization.getTokenForContext(root));
AccountingDao dao = AccountingDao.get(); AccountingDao dao = AccountingDao.get();
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts(); Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
Map<String,ScopeDescriptor> scopeDescriptorMap = new HashMap<>(); Map<String, ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
for(ScopeDescriptor scopeDescriptor : scopeDescriptorSet) { for (ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor); scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
} }
scopeDescriptors.set(scopeDescriptorMap); scopeDescriptors.set(scopeDescriptorMap);
Set<Dimension> dimensionSet = dao.getDimensions(); Set<Dimension> dimensionSet = dao.getDimensions();
Map<String,Dimension> dimensionMap = new HashMap<>(); Map<String, Dimension> dimensionMap = new HashMap<>();
for(Dimension dimension : dimensionSet) { for (Dimension dimension : dimensionSet) {
dimensionMap.put(dimension.getId(), dimension); dimensionMap.put(dimension.getId(), dimension);
} }
dimensions.set(dimensionMap); dimensions.set(dimensionMap);
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>(); ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
String initialToken = SecurityTokenProvider.instance.get(); String initialToken = SecurityTokenProvider.instance.get();
VREAccessesHarvester vreAccessesHarvester = null; VREAccessesHarvester vreAccessesHarvester = null;
JupyterAccessesHarvester jupyterAccessesHarvester = null;
for(String context : contexts) {
for (String context : contexts) {
// Setting the token for the context // Setting the token for the context
Utils.setContext(contextAuthorization.getTokenForContext(context)); Utils.setContext(contextAuthorization.getTokenForContext(context));
ScopeBean scopeBean = new ScopeBean(context); ScopeBean scopeBean = new ScopeBean(context);
ScopeDescriptor actualScopeDescriptor = scopeDescriptorMap.get(context); ScopeDescriptor actualScopeDescriptor = scopeDescriptorMap.get(context);
if(actualScopeDescriptor==null) { if (actualScopeDescriptor == null) {
actualScopeDescriptor = new ScopeDescriptor(scopeBean.name(), context); actualScopeDescriptor = new ScopeDescriptor(scopeBean.name(), context);
} }
scopeDescriptor.set(actualScopeDescriptor); scopeDescriptor.set(actualScopeDescriptor);
if(scopeBean.is(Type.INFRASTRUCTURE)) { if (scopeBean.is(Type.INFRASTRUCTURE)) {
try { try {
CatalogueAccessesHarvester catalogueHarvester = new CatalogueAccessesHarvester(start, end); CatalogueAccessesHarvester catalogueHarvester = new CatalogueAccessesHarvester(start, end);
List<AccountingRecord> harvested = catalogueHarvester.getAccountingRecords(); List<AccountingRecord> harvested = catalogueHarvester.getAccountingRecords();
accountingRecords.addAll(harvested); accountingRecords.addAll(harvested);
CoreServicesAccessesHarvester coreServicesHarvester = new CoreServicesAccessesHarvester(start, end); CoreServicesAccessesHarvester coreServicesHarvester = new CoreServicesAccessesHarvester(start, end);
List<AccountingRecord> records = coreServicesHarvester.getAccountingRecords(); List<AccountingRecord> records = coreServicesHarvester.getAccountingRecords();
accountingRecords.addAll(records); accountingRecords.addAll(records);
}catch (Exception e) { } catch (Exception e) {
logger.error("Error harvesting {} for {}", CatalogueAccessesHarvester.class.getSimpleName(), context, e); logger.error("Error harvesting {} for {}", CatalogueAccessesHarvester.class.getSimpleName(),
context, e);
} }
} }
if (vreAccessesHarvester == null) {
if(vreAccessesHarvester == null) {
if (scopeBean.is(Type.INFRASTRUCTURE)) {
if(scopeBean.is(Type.INFRASTRUCTURE)) {
vreAccessesHarvester = new VREAccessesHarvester(start, end); vreAccessesHarvester = new VREAccessesHarvester(start, end);
} else { } else {
// This code should be never used because the scopes are sorted by fullname // This code should be never used because the scopes are
// sorted by fullname
ScopeBean parent = scopeBean.enclosingScope(); ScopeBean parent = scopeBean.enclosingScope();
while(!parent.is(Type.INFRASTRUCTURE)) { while (!parent.is(Type.INFRASTRUCTURE)) {
parent = scopeBean.enclosingScope(); parent = scopeBean.enclosingScope();
} }
// Setting back token for the context // Setting back token for the context
Utils.setContext(contextAuthorization.getTokenForContext(parent.toString())); Utils.setContext(contextAuthorization.getTokenForContext(parent.toString()));
vreAccessesHarvester = new VREAccessesHarvester(start, end); vreAccessesHarvester = new VREAccessesHarvester(start, end);
// Setting back token for the context // Setting back token for the context
Utils.setContext(contextAuthorization.getTokenForContext(context)); Utils.setContext(contextAuthorization.getTokenForContext(context));
} }
} }
if (jupyterAccessesHarvester == null) {
if((context.startsWith(SO_BIG_DATA_VO) || context.startsWith(SO_BIG_DATA_EU_VRE)
if (scopeBean.is(Type.INFRASTRUCTURE)) {
jupyterAccessesHarvester = new JupyterAccessesHarvester(start, end);
} else {
// This code should be never used because the scopes are
// sorted by fullname
ScopeBean parent = scopeBean.enclosingScope();
while (!parent.is(Type.INFRASTRUCTURE)) {
parent = scopeBean.enclosingScope();
}
// Setting back token for the context
Utils.setContext(contextAuthorization.getTokenForContext(parent.toString()));
jupyterAccessesHarvester = new JupyterAccessesHarvester(start, end);
// Setting back token for the context
Utils.setContext(contextAuthorization.getTokenForContext(context));
}
}
if ((context.startsWith(SO_BIG_DATA_VO) || context.startsWith(SO_BIG_DATA_EU_VRE)
|| context.startsWith(SO_BIG_DATA_IT_VRE)) || context.startsWith(SO_BIG_DATA_IT_VRE))
&& start.before(DateUtils.getStartCalendar(2018, Calendar.APRIL, 1).getTime())) { && start.before(DateUtils.getStartCalendar(2018, Calendar.APRIL, 1).getTime())) {
logger.info("Not Harvesting for {} from {} to {}", context, DateUtils.format(start), logger.info("Not Harvesting for {} from {} to {}", context, DateUtils.format(start),
DateUtils.format(end)); DateUtils.format(end));
} else { } else {
try { try {
// Collecting Google Analytics Data for VREs Accesses // Collecting Google Analytics Data for VREs Accesses
logger.info("Going to harvest VRE Accesses for {}", context); logger.info("Going to harvest VRE Accesses for {}", context);
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords(); List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested); accountingRecords.addAll(harvested);
/* /*
List<HarvestedData> harvested = vreAccessesHarvester.getData(); * List<HarvestedData> harvested =
data.addAll(harvested); * vreAccessesHarvester.getData(); data.addAll(harvested);
*/ */
} catch(Exception e) { } catch (Exception e) {
logger.error("Error harvesting VRE Accesses for {}", context, e); logger.error("Error harvesting VRE Accesses for {}", context, e);
} }
try {
// Collecting Google Analytics Data for Jupyters Accesses
logger.info("Going to harvest Jupyter Accesses for {}", context);
List<AccountingRecord> harvested = jupyterAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
* List<HarvestedData> harvested =
* jupyterAccessesHarvester.getData();
* data.addAll(harvested);
*/
} catch (Exception e) {
logger.error("Error harvesting VRE Accesses for {}", context, e);
}
try { try {
// Collecting info on social (posts, replies and likes) // Collecting info on social (posts, replies and likes)
logger.info("Going to harvest Social Interactions for {}", context); logger.info("Going to harvest Social Interactions for {}", context);
SocialInteractionsHarvester socialHarvester = new SocialInteractionsHarvester(start, end); SocialInteractionsHarvester socialHarvester = new SocialInteractionsHarvester(start, end);
List<AccountingRecord> harvested = socialHarvester.getAccountingRecords(); List<AccountingRecord> harvested = socialHarvester.getAccountingRecords();
accountingRecords.addAll(harvested); accountingRecords.addAll(harvested);
/* /*
List<HarvestedData> harvested = socialHarvester.getData(); * List<HarvestedData> harvested =
data.addAll(harvested); * socialHarvester.getData(); data.addAll(harvested);
*/ */
} catch(Exception e) { } catch (Exception e) {
logger.error("Error harvesting Social Interactions for {}", context, e); logger.error("Error harvesting Social Interactions for {}", context, e);
} }
try { try {
// Collecting info on VRE users // Collecting info on VRE users
if(getVREUsers) { if (getVREUsers) {
// Harvesting Users only for VREs (not for VO and ROOT which is the sum of the children contexts) // Harvesting Users only for VREs (not for VO and ROOT
// which is the sum of the children contexts)
// The VREUsers can be only Harvested for the last month // The VREUsers can be only Harvested for the last month
if(scopeBean.is(Type.VRE) && start.equals(DateUtils.getPreviousPeriod(aggregationType, partialHarvesting).getTime())) { if (scopeBean.is(Type.VRE) && start
.equals(DateUtils.getPreviousPeriod(aggregationType, partialHarvesting).getTime())) {
logger.info("Going to harvest Context Users for {}", context); logger.info("Going to harvest Context Users for {}", context);
VREUsersHarvester vreUsersHarvester = new VREUsersHarvester(start, end); VREUsersHarvester vreUsersHarvester = new VREUsersHarvester(start, end);
List<AccountingRecord> harvested = vreUsersHarvester.getAccountingRecords(); List<AccountingRecord> harvested = vreUsersHarvester.getAccountingRecords();
accountingRecords.addAll(harvested); accountingRecords.addAll(harvested);
/* /*
List<HarvestedData> harvested = vreUsersHarvester.getData(); * List<HarvestedData> harvested =
data.addAll(harvested); * vreUsersHarvester.getData();
*/ * data.addAll(harvested);
*/
} }
} }
} catch(Exception e) { } catch (Exception e) {
logger.error("Error harvesting Context Users for {}", context, e); logger.error("Error harvesting Context Users for {}", context, e);
} }
if(context.startsWith(SO_BIG_DATA_CATALOGUE_CONTEXT)) { if (context.startsWith(SO_BIG_DATA_CATALOGUE_CONTEXT)) {
try { try {
// Collecting info on Resource Catalogue (Dataset, Application, Deliverables, Methods) // Collecting info on Resource Catalogue (Dataset,
// Application, Deliverables, Methods)
logger.info("Going to harvest Resource Catalogue Information for {}", context); logger.info("Going to harvest Resource Catalogue Information for {}", context);
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start,
contexts); end, contexts);
List<AccountingRecord> harvested = resourceCatalogueHarvester.getAccountingRecords(); List<AccountingRecord> harvested = resourceCatalogueHarvester.getAccountingRecords();
accountingRecords.addAll(harvested); accountingRecords.addAll(harvested);
/* /*
List<HarvestedData> harvested = resourceCatalogueHarvester.getData(); * List<HarvestedData> harvested =
data.addAll(harvested); * resourceCatalogueHarvester.getData();
*/ * data.addAll(harvested);
*/
} catch(Exception e) {
} catch (Exception e) {
logger.error("Error harvesting Resource Catalogue Information for {}", context, e); logger.error("Error harvesting Resource Catalogue Information for {}", context, e);
} }
try { try {
// Collecting info on Data/Method download // Collecting info on Data/Method download
logger.info("Going to harvest Data Method Download for {}", context); logger.info("Going to harvest Data Method Download for {}", context);
DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start, DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start,
end, contexts); end, contexts);
List<AccountingRecord> harvested = dataMethodDownloadHarvester.getAccountingRecords(); List<AccountingRecord> harvested = dataMethodDownloadHarvester.getAccountingRecords();
accountingRecords.addAll(harvested); accountingRecords.addAll(harvested);
} catch(Exception e) { } catch (Exception e) {
logger.error("Error harvesting Data Method Download for {}", context, e); logger.error("Error harvesting Data Method Download for {}", context, e);
} }
} }
if(context.startsWith(TAGME_CONTEXT)) { if (context.startsWith(TAGME_CONTEXT)) {
try { try {
// Collecting info on method invocation // Collecting info on method invocation
logger.info("Going to harvest Method Invocations for {}", context); logger.info("Going to harvest Method Invocations for {}", context);
TagMeMethodInvocationHarvester tagMeMethodInvocationHarvester = new TagMeMethodInvocationHarvester( TagMeMethodInvocationHarvester tagMeMethodInvocationHarvester = new TagMeMethodInvocationHarvester(
start, end); start, end);
List<AccountingRecord> harvested = tagMeMethodInvocationHarvester.getAccountingRecords(); List<AccountingRecord> harvested = tagMeMethodInvocationHarvester.getAccountingRecords();
accountingRecords.addAll(harvested); accountingRecords.addAll(harvested);
/* /*
List<HarvestedData> harvested = tagMeMethodInvocationHarvester.getData(); * List<HarvestedData> harvested =
data.addAll(harvested); * tagMeMethodInvocationHarvester.getData();
*/ * data.addAll(harvested);
*/
} catch(Exception e) {
} catch (Exception e) {
logger.error("Error harvesting Method Invocations for {}", context, e); logger.error("Error harvesting Method Invocations for {}", context, e);
} }
} else { } else {
@ -414,38 +460,39 @@ public class AccountingDashboardHarvesterPlugin extends Plugin {
// Collecting info on method invocation // Collecting info on method invocation
logger.info("Going to harvest Method Invocations for {}", context); logger.info("Going to harvest Method Invocations for {}", context);
MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(start, end); MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(start, end);
List<AccountingRecord> harvested = methodInvocationHarvester.getAccountingRecords(); List<AccountingRecord> harvested = methodInvocationHarvester.getAccountingRecords();
accountingRecords.addAll(harvested); accountingRecords.addAll(harvested);
/* /*
List<HarvestedData> harvested = methodInvocationHarvester.getData(); * List<HarvestedData> harvested =
data.addAll(harvested); * methodInvocationHarvester.getData();
*/ * data.addAll(harvested);
} catch(Exception e) { */
} catch (Exception e) {
logger.error("Error harvesting Method Invocations for {}", context, e); logger.error("Error harvesting Method Invocations for {}", context, e);
} }
} }
} }
} }
Utils.setContext(initialToken); Utils.setContext(initialToken);
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end), accountingRecords); logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end),
if(!dryRun) { accountingRecords);
if (!dryRun) {
dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1])); dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
//dbaseManager.insertMonthlyData(start, end, data, reRun); // dbaseManager.insertMonthlyData(start, end, data, reRun);
}else { } else {
logger.debug("Harvested measures are {}", accountingRecords); logger.debug("Harvested measures are {}", accountingRecords);
} }
} }
/** {@inheritDoc} */ /** {@inheritDoc} */
@Override @Override
protected void onStop() throws Exception { protected void onStop() throws Exception {
logger.debug("{} is stopping", this.getClass().getSimpleName()); logger.debug("{} is stopping", this.getClass().getSimpleName());
} }
} }

View File

@ -80,6 +80,7 @@ public class JupyterAccessesHarvester extends BasicHarvester {
public JupyterAccessesHarvester(Date start, Date end) throws Exception { public JupyterAccessesHarvester(Date start, Date end) throws Exception {
super(start, end); super(start, end);
logger.debug("JupyerAccessHArvester: {}, {}",start,end);
vreAccesses = getAllAccesses(start, end); vreAccesses = getAllAccesses(start, end);
} }
@ -94,9 +95,10 @@ public class JupyterAccessesHarvester extends BasicHarvester {
ScopeBean scopeBean = new ScopeBean(context); ScopeBean scopeBean = new ScopeBean(context);
String lowerCasedContext = scopeBean.name().toLowerCase(); String lowerCasedContext = scopeBean.name().toLowerCase();
logger.debug("JupyerAccessHArvester lowerCasedContext: {}",lowerCasedContext);
for (VREAccessesReportRow row : vreAccesses) { for (VREAccessesReportRow row : vreAccesses) {
String pagePath = row.getPagePath().toLowerCase(); String pagePath = row.getPagePath().toLowerCase();
//logger.debug("JupyerAccessHArvester pagePath: {}",lowerCasedContext);
if (pagePath != null && !pagePath.isEmpty()) { if (pagePath != null && !pagePath.isEmpty()) {
if (pagePath.contains(lowerCasedContext)) { if (pagePath.contains(lowerCasedContext)) {
if (pagePath.contains("jupyter") || pagePath.contains("jupiter")) { if (pagePath.contains("jupyter") || pagePath.contains("jupiter")) {
@ -111,7 +113,7 @@ public class JupyterAccessesHarvester extends BasicHarvester {
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(); ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor();
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant,
getDimension(HarvestedDataKey.ACCESSES), (long) measure); getDimension(HarvestedDataKey.JUPYTER_ACCESSES), (long) measure);
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure()); logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
accountingRecords.add(ar); accountingRecords.add(ar);

View File

@ -494,56 +494,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
} }
} }
// @Test
public void testJupyterccessesHarvester() throws Exception {
try {
// AccountingDao dao = getAccountingDao();
List<Date> starts = new ArrayList<>();
starts.add(DateUtils.getStartCalendar(2018, Calendar.SEPTEMBER, 1).getTime());
starts.add(DateUtils.getStartCalendar(2018, Calendar.OCTOBER, 1).getTime());
starts.add(DateUtils.getStartCalendar(2018, Calendar.NOVEMBER, 1).getTime());
starts.add(DateUtils.getStartCalendar(2018, Calendar.DECEMBER, 1).getTime());
starts.add(DateUtils.getStartCalendar(2019, Calendar.JANUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2019, Calendar.FEBRUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2019, Calendar.MARCH, 1).getTime());
AggregationType measureType = AggregationType.MONTHLY;
String[] contextFullNames = new String[] { "/d4science.research-infrastructures.eu/D4OS/Blue-CloudLab" };
List<AccountingRecord> accountingRecords = new ArrayList<>();
for (Date start : starts) {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
ContextTest.setContextByName(ROOT);
JupyterAccessesHarvester vreAccessesHarvester = new JupyterAccessesHarvester(start, end);
for (String contextFullname : contextFullNames) {
setContextByNameAndScopeDescriptor(contextFullname);
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
logger.debug("{} - {}", contextFullname, accountingRecords);
}
}
logger.debug("{}", accountingRecords);
ContextTest.setContextByName(ROOT);
// dao.insertRecords(accountingRecords.toArray(new
// AccountingRecord[1]));
} catch (Exception e) {
logger.error("", e);
throw e;
}
}
// @Test // @Test
public void testSocialInteraction() { public void testSocialInteraction() {
try { try {

View File

@ -0,0 +1,65 @@
package org.gcube.dataharvest.jupyter;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.dataharvest.harvester.JupyterAccessesHarvester;
import org.gcube.dataharvest.utils.AggregationType;
import org.gcube.dataharvest.utils.ContextTest;
import org.gcube.dataharvest.utils.DateUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AccountingDataHarvesterJupyterTest extends ContextTest {
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterJupyterTest.class);
public static final String ROOT = "/d4science.research-infrastructures.eu";
private static final String SCOPE = "/d4science.research-infrastructures.eu/D4OS/Blue-CloudLab";
@Test
public void testJupyterccessesHarvester() throws Exception {
try {
// AccountingDao dao = getAccountingDao();
List<Date> starts = new ArrayList<>();
starts.add(DateUtils.getStartCalendar(2021, Calendar.JANUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2021, Calendar.FEBRUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2021, Calendar.MARCH, 1).getTime());
AggregationType measureType = AggregationType.MONTHLY;
List<AccountingRecord> accountingRecords = new ArrayList<>();
for (Date start : starts) {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
ContextTest.setContextByName(ROOT);
JupyterAccessesHarvester jupyterAccessesHarvester = new JupyterAccessesHarvester(start, end);
ContextTest.setContextByName(SCOPE);
List<AccountingRecord> harvested = jupyterAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
logger.debug("{} - {}", SCOPE, accountingRecords);
}
logger.debug("{}", accountingRecords);
ContextTest.setContextByName(ROOT);
// dao.insertRecords(accountingRecords.toArray(new
// AccountingRecord[1]));
} catch (Throwable e) {
logger.error(e.getLocalizedMessage(), e);
throw e;
}
}
}

View File

@ -1,3 +1,4 @@
/*.gcubekey /*.gcubekey
/*.key /*.key
/*.properties /*.properties
/howto.txt