Removed uneeded harvester and key to be harvested

Feature/26508
Luca Frosini 2 years ago
parent 1fe73af6bc
commit dbc270a523

@ -76,16 +76,16 @@
<artifactId>google-http-client-gson</artifactId>
<version>1.21.0</version>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>storagehub-client-library</artifactId>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- <dependency> -->
<!-- <groupId>org.gcube.common</groupId> -->
<!-- <artifactId>storagehub-client-library</artifactId> -->
<!-- <exclusions> -->
<!-- <exclusion> -->
<!-- <groupId>com.fasterxml.jackson.core</groupId> -->
<!-- <artifactId>jackson-core</artifactId> -->
<!-- </exclusion> -->
<!-- </exclusions> -->
<!-- </dependency> -->
<dependency>
<groupId>org.gcube.data-catalogue</groupId>
<artifactId>gcat-client</artifactId>

@ -28,7 +28,6 @@ import org.gcube.dataharvest.harvester.RStudioAccessesHarvester;
import org.gcube.dataharvest.harvester.SocialInteractionsHarvester;
import org.gcube.dataharvest.harvester.VREAccessesHarvester;
import org.gcube.dataharvest.harvester.VREUsersHarvester;
import org.gcube.dataharvest.harvester.sobigdata.DataMethodDownloadHarvester;
import org.gcube.dataharvest.harvester.sobigdata.ResourceCatalogueHarvester;
import org.gcube.dataharvest.harvester.sobigdata.TagMeMethodInvocationHarvester;
import org.gcube.dataharvest.utils.AggregationType;
@ -453,18 +452,18 @@ public class AccountingDashboardHarvesterPlugin extends Plugin {
logger.error("Error harvesting Resource Catalogue Information for {}", context, e);
}
try {
// Collecting info on Data/Method download
logger.info("Going to harvest Data Method Download for {}", context);
DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start,
end, contexts);
List<AccountingRecord> harvested = dataMethodDownloadHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
} catch (Exception e) {
logger.error("Error harvesting Data Method Download for {}", context, e);
}
// try {
// // Collecting info on Data/Method download
// logger.info("Going to harvest Data Method Download for {}", context);
// DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start,
// end, contexts);
//
// List<AccountingRecord> harvested = dataMethodDownloadHarvester.getAccountingRecords();
// accountingRecords.addAll(harvested);
//
// } catch (Exception e) {
// logger.error("Error harvesting Data Method Download for {}", context, e);
// }
}

@ -5,7 +5,6 @@ package org.gcube.dataharvest.datamodel;
/**
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
* @author M. Assante, ISTI-CNR
*/
@ -24,16 +23,11 @@ public enum HarvestedDataKey {
CATALOGUE_RESOURCE_ACCESSES("Item Resource"),
ACCESSES("VRE Accesses"),
USERS("VRE Users"),
DATA_METHOD_DOWNLOAD("Data/Method download"),
// NEW_CATALOGUE_METHODS("New Catalogue Methods"),
// NEW_CATALOGUE_DATASETS("New Catalogue Datasets"),
// NEW_CATALOGUE_DELIVERABLES("New Catalogue Deliverables"),
// NEW_CATALOGUE_APPLICATIONS("New Catalogue Applications"),
SOCIAL_POSTS("VRE Social Interations Posts"),
SOCIAL_REPLIES("VRE Social Interations Replies"),
SOCIAL_LIKES("VRE Social Interations Likes"),
METHOD_INVOCATIONS("VRE Methods Invocation"),
VISUAL_TOOLS("VRE Visual Tools");
METHOD_INVOCATIONS("VRE Methods Invocation");
private String key;

@ -1,196 +0,0 @@
package org.gcube.dataharvest.harvester.sobigdata;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.SortedSet;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.storagehub.client.dsl.ContainerType;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.client.dsl.ItemContainer;
import org.gcube.common.storagehub.client.dsl.ListResolverTyped;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
import org.gcube.common.storagehub.model.items.FolderItem;
import org.gcube.common.storagehub.model.items.Item;
import org.gcube.common.storagehub.model.items.nodes.Accounting;
import org.gcube.common.storagehub.model.items.nodes.accounting.AccountEntry;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.DateUtils;
import org.gcube.dataharvest.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The Class DataMethodDownloadHarvester.
*
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
* @author Francesco Mangiacrapa (ISTI - CNR)
*/
public class DataMethodDownloadHarvester extends SoBigDataHarvester {
private static Logger logger = LoggerFactory.getLogger(DataMethodDownloadHarvester.class);
private int count = 0;
/**
* Instantiates a new data method download harvester.
*
* @param start the start
* @param end the end
* @param contexts the contexts
* @throws ParseException the parse exception
*/
public DataMethodDownloadHarvester(Date start, Date end, SortedSet<String> contexts) throws Exception {
super(start, end, contexts);
}
/* (non-Javadoc)
* @see org.gcube.dataharvest.harvester.BasicHarvester#getData()
*/
@Override
public List<AccountingRecord> getAccountingRecords() throws Exception {
String defaultContext = Utils.getCurrentContext();
logger.debug("The context is {}", defaultContext);
try {
/*
String vreName = getVRENameToHL(defaultContext);
logger.debug("Getting VRE Name to HL from context/scope returns {} ", vreName);
String user = vreName + "-Manager";
logger.debug("Using user '{}' to getHome from HL", user);
//Getting HL instance and home for VRE MANAGER
HomeManager manager = HomeLibrary.getHomeManagerFactory().getHomeManager();
@SuppressWarnings("deprecation")
Home home = manager.getHome(user);
JCRWorkspace ws = (JCRWorkspace) home.getWorkspace();
String path = "/Workspace/MySpecialFolders/" + vreName;
logger.debug("Getting item by Path {}", path);
JCRWorkspaceItem item = (JCRWorkspaceItem) ws.getItemByPath(path);
*/
StorageHubClient storageHubClient = new StorageHubClient();
FolderContainer vreFolderContainer = storageHubClient.openVREFolder();
FolderItem vreFolderItem = vreFolderContainer.get();
logger.debug("Analyzing {} in the period [{} to {}] starting from root {}", defaultContext,
DateUtils.format(start), DateUtils.format(end), vreFolderItem.getName());
ScopeDescriptor defaultScopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor();
AccountingRecord defaultHarvesteData = new AccountingRecord(defaultScopeDescriptor, instant, getDimension(HarvestedDataKey.DATA_METHOD_DOWNLOAD), (long) count);
logger.debug("{} : {}", defaultHarvesteData.getDimension().getId(), defaultHarvesteData.getMeasure());
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
ListResolverTyped listResolverTyped = vreFolderContainer.list();
List<ItemContainer<? extends Item>> containers = listResolverTyped.includeHidden().getContainers();
for(ItemContainer<? extends Item> itemContainer : containers) {
count = 0; //resettings the counter
//HarvestedData harvestedData;
//Getting statistics for folder
if(itemContainer.getType() == ContainerType.FOLDER) {
Item item = itemContainer.get();
logger.debug("Getting statistics for folder {}", item.getName());
getStats(itemContainer, start, end);
String normalizedName = item.getName().replaceAll("[^A-Za-z0-9]", "");
String context = mapWsFolderNameToVRE.get(normalizedName);
//Checking if it is a VRE name to right accounting...
if(context != null && !context.isEmpty()) {
logger.debug("Found context '{}' matching with normalized VRE name {} ", context, normalizedName);
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.DATA_METHOD_DOWNLOAD), (long) count);
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
accountingRecords.add(ar);
} else {
logger.debug(
"No scope found matching the folder name {}, accounting its stats in the default context {}",
normalizedName, defaultContext);
//INCREASING THE DEFAULT CONTEXT COUNTER...
defaultHarvesteData.setMeasure(defaultHarvesteData.getMeasure() + count);
logger.trace("Increased default context stats {}", defaultHarvesteData);
}
}
}
//ADDING DEFAULT ACCOUNTING
accountingRecords.add(defaultHarvesteData);
logger.debug("In the period [from {} to {} ] returning workspace accouting data {}", DateUtils.format(start),
DateUtils.format(end), accountingRecords);
return accountingRecords;
} catch(Exception e) {
throw e;
}
}
/**
* Gets the stats.
*
* @param baseItem the base item
* @param start the start
* @param end the end
* @return the stats
* @throws InternalErrorException the internal error exception
*/
private void getStats(ItemContainer<? extends Item> itemContainer, Date start, Date end) throws Exception {
if(itemContainer.getType() == ContainerType.FOLDER) {
ListResolverTyped listResolverTyped = ((FolderContainer)itemContainer).list();
List<ItemContainer<? extends Item>> containers = listResolverTyped.includeHidden().getContainers();
for(ItemContainer<? extends Item> itemCont : containers) {
getStats(itemCont , start, end);
}
} else {
try {
Accounting accounting = itemContainer.get().getAccounting();
for(AccountEntry entry : accounting.getEntries()) {
switch(entry.getType()) {
case CREATE:
case UPDATE:
case READ:
Calendar calendar = entry.getDate();
if(calendar.after(DateUtils.dateToCalendar(start))
&& calendar.before(DateUtils.dateToCalendar(end))) {
count++;
}
break;
default:
break;
}
}
} catch(Exception e) {
throw e;
}
}
}
}

@ -28,7 +28,6 @@ import org.gcube.dataharvest.harvester.MethodInvocationHarvester;
import org.gcube.dataharvest.harvester.SocialInteractionsHarvester;
import org.gcube.dataharvest.harvester.VREAccessesHarvester;
import org.gcube.dataharvest.harvester.VREUsersHarvester;
import org.gcube.dataharvest.harvester.sobigdata.DataMethodDownloadHarvester;
import org.gcube.dataharvest.harvester.sobigdata.ResourceCatalogueHarvester;
import org.gcube.dataharvest.harvester.sobigdata.TagMeMethodInvocationHarvester;
import org.gcube.dataharvest.utils.AggregationType;
@ -884,54 +883,6 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
}
}
@Ignore
// @Test
public void testDataMethodDownloadHarvester() {
try {
// Utils.setContext(RESOURCE_CATALOGUE);
ContextTest.setContextByName(ROOT);
AggregationType measureType = AggregationType.MONTHLY;
// Date start = DateUtils.getStartCalendar(2015, Calendar.FEBRUARY,
// 1).getTime();
// Date end = DateUtils.getStartCalendar(2019, Calendar.FEBRUARY,
// 1).getTime();
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
AccountingDashboardHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDashboardHarvesterPlugin();
accountingDataHarvesterPlugin.getConfigParameters();
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
for (String context : contexts) {
ScopeBean scopeBean = new ScopeBean(context);
logger.debug("FullName {} - Name {}", scopeBean.toString(), scopeBean.name());
if (context.startsWith(AccountingDashboardHarvesterPlugin.SO_BIG_DATA_VO)) {
if (scopeBean.is(Type.VRE)) {
if (context.startsWith(TAGME_VRE)) {
continue;
}
ContextTest.set(contextAuthorization.getSecretForContext(context));
DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start,
end, contexts);
List<AccountingRecord> data = dataMethodDownloadHarvester.getAccountingRecords();
logger.debug("{}", data);
}
}
}
} catch (Exception e) {
logger.error("", e);
}
}
public static final String E_LEARNING_AREA_VRE = "/d4science.research-infrastructures.eu/SoBigData/E-Learning_Area";
@Ignore

Loading…
Cancel
Save