Fixes #11760: Refactor accounting-dashboard-harvester-se-plugin to support new DB schema

Task-Url: https://support.d4science.org/issues/11760

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/accounting/accounting-dashboard-harvester-se-plugin@171294 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Luca Frosini 2018-09-04 09:13:16 +00:00
parent 456f8b31b7
commit 99b12cfe07
21 changed files with 474 additions and 1059 deletions

22
pom.xml
View File

@ -127,44 +127,52 @@
<groupId>org.gcube.information-system</groupId>
<artifactId>gcube-resources</artifactId>
</dependency>
<dependency>
<!-- dependency>
<groupId>org.gcube.portlets.user</groupId>
<artifactId>gcube-url-shortener</artifactId>
<version>[1.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
<scope>compile</scope>
</dependency>
</dependency-->
<dependency>
<groupId>org.gcube.accounting</groupId>
<artifactId>accounting-analytics</artifactId>
<version>[2.0.0-SNAPSHOT,3.0.0-SNAPSHOT)</version>
</dependency>
<!-- -->
<dependency>
<groupId>org.gcube.accounting</groupId>
<artifactId>accounting-analytics-persistence-couchbase</artifactId>
<version>[1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.accounting</groupId>
<artifactId>accounting-lib</artifactId>
<scope>provided</scope>
</dependency>
<!-- -->
<dependency>
<groupId>org.gcube.accounting</groupId>
<artifactId>accounting-summary-access</artifactId>
<version>[1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
</dependency>
<!-- dependency>
<groupId>org.ancoron.postgresql</groupId>
<artifactId>org.postgresql</artifactId>
<version>9.1.901.jdbc4.1-rc9</version>
<scope>compile</scope>
</dependency>
</dependency -->
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20171018</version>
<scope>compile</scope>
</dependency>
<!-- Test Dependencies -->
<dependency>
<groupId>junit</groupId>

View File

@ -5,16 +5,20 @@ import java.io.InputStream;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.SortedSet;
import org.gcube.accounting.accounting.summary.access.AccountingDao;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.internal.Dimension;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.common.scope.impl.ScopeBean.Type;
import org.gcube.dataharvest.dao.DatabaseManager;
import org.gcube.dataharvest.datamodel.HarvestedData;
import org.gcube.dataharvest.harvester.MethodInvocationHarvester;
import org.gcube.dataharvest.harvester.SocialInteractionsHarvester;
import org.gcube.dataharvest.harvester.VREAccessesHarvester;
@ -22,9 +26,9 @@ import org.gcube.dataharvest.harvester.VREUsersHarvester;
import org.gcube.dataharvest.harvester.sobigdata.DataMethodDownloadHarvester;
import org.gcube.dataharvest.harvester.sobigdata.ResourceCatalogueHarvester;
import org.gcube.dataharvest.harvester.sobigdata.TagMeMethodInvocationHarvester;
import org.gcube.dataharvest.utils.AggregationType;
import org.gcube.dataharvest.utils.ContextAuthorization;
import org.gcube.dataharvest.utils.DateUtils;
import org.gcube.dataharvest.utils.AggregationType;
import org.gcube.dataharvest.utils.Utils;
import org.gcube.vremanagement.executor.plugin.Plugin;
import org.slf4j.Logger;
@ -53,6 +57,8 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
public static final String TAGME_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData/TagMe";
public static final String TO_BE_SET = "TO BE SET";
protected Date start;
protected Date end;
@ -69,10 +75,57 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
};
public static InheritableThreadLocal<Properties> getProperties() {
return properties;
}
public static Dimension getDimension(String key) {
Dimension dimension = dimensions.get().get(key);
if(dimension == null) {
dimension = new Dimension(key, key, null, key);
}
return dimension;
}
private static final InheritableThreadLocal<Map<String, Dimension>> dimensions = new InheritableThreadLocal<Map<String, Dimension>>() {
@Override
protected Map<String, Dimension> initialValue() {
return new HashMap<>();
}
};
public static ScopeDescriptor getScopeDescriptor(String context) {
return scopeDescriptors.get().get(context);
}
private static final InheritableThreadLocal<Map<String, ScopeDescriptor>> scopeDescriptors = new InheritableThreadLocal<Map<String, ScopeDescriptor>>() {
@Override
protected Map<String, ScopeDescriptor> initialValue() {
return new HashMap<>();
}
};
public static ScopeDescriptor getScopeDescriptor() {
return scopeDescriptor.get();
}
private static final InheritableThreadLocal<ScopeDescriptor> scopeDescriptor = new InheritableThreadLocal<ScopeDescriptor>() {
@Override
protected ScopeDescriptor initialValue() {
return new ScopeDescriptor("","");
}
};
public Properties getConfigParameters() throws IOException {
Properties properties = new Properties();
try {
@ -147,11 +200,30 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
ContextAuthorization contextAuthorization = new ContextAuthorization();
DatabaseManager dbaseManager = new DatabaseManager();
// DatabaseManager dbaseManager = new DatabaseManager();
AccountingDao dao = AccountingDao.get();
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
Map<String,ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
for(ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
}
scopeDescriptors.set(scopeDescriptorMap);
Set<Dimension> dimensionSet = dao.getDimensions();
Map<String,Dimension> dimensionMap = new HashMap<>();
for(Dimension dimension : dimensionSet) {
dimensionMap.put(dimension.getId(), dimension);
}
dimensions.set(dimensionMap);
SortedSet<String> contexts = contextAuthorization.getContexts();
ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
// ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
String initialToken = SecurityTokenProvider.instance.get();
@ -163,6 +235,14 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
ScopeBean scopeBean = new ScopeBean(context);
ScopeDescriptor actualScopeDescriptor = scopeDescriptorMap.get(context);
if(actualScopeDescriptor==null) {
actualScopeDescriptor = new ScopeDescriptor(scopeBean.name(), context);
}
scopeDescriptor.set(actualScopeDescriptor);
if(vreAccessesHarvester == null) {
if(scopeBean.is(Type.INFRASTRUCTURE)) {
@ -197,8 +277,14 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
try {
// Collecting Google Analytics Data for VREs Accesses
logger.info("Going to harvest VRE Accesses for {}", context);
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = vreAccessesHarvester.getData();
data.addAll(harvested);
*/
} catch(Exception e) {
logger.error("Error harvesting VRE Accesses for {}", context, e);
}
@ -207,8 +293,14 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
// Collecting info on social (posts, replies and likes)
logger.info("Going to harvest Social Interactions for {}", context);
SocialInteractionsHarvester socialHarvester = new SocialInteractionsHarvester(start, end);
List<AccountingRecord> harvested = socialHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = socialHarvester.getData();
data.addAll(harvested);
*/
} catch(Exception e) {
logger.error("Error harvesting Social Interactions for {}", context, e);
}
@ -221,8 +313,14 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
if(scopeBean.is(Type.VRE) && start.equals(DateUtils.getPreviousPeriod(aggregationType).getTime())) {
logger.info("Going to harvest Context Users for {}", context);
VREUsersHarvester vreUsersHarvester = new VREUsersHarvester(start, end);
List<AccountingRecord> harvested = vreUsersHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = vreUsersHarvester.getData();
data.addAll(harvested);
*/
}
}
} catch(Exception e) {
@ -236,8 +334,15 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
logger.info("Going to harvest Resource Catalogue Information for {}", context);
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end,
contexts);
List<AccountingRecord> harvested = resourceCatalogueHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = resourceCatalogueHarvester.getData();
data.addAll(harvested);
*/
} catch(Exception e) {
logger.error("Error harvesting Resource Catalogue Information for {}", context, e);
}
@ -247,8 +352,15 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
logger.info("Going to harvest Data Method Download for {}", context);
DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start,
end, contexts);
List<AccountingRecord> harvested = dataMethodDownloadHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = dataMethodDownloadHarvester.getData();
data.addAll(harvested);
*/
} catch(Exception e) {
logger.error("Error harvesting Data Method Download for {}", context, e);
}
@ -261,8 +373,15 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
logger.info("Going to harvest Method Invocations for {}", context);
TagMeMethodInvocationHarvester tagMeMethodInvocationHarvester = new TagMeMethodInvocationHarvester(
start, end);
List<AccountingRecord> harvested = tagMeMethodInvocationHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = tagMeMethodInvocationHarvester.getData();
data.addAll(harvested);
*/
} catch(Exception e) {
logger.error("Error harvesting Method Invocations for {}", context, e);
}
@ -271,8 +390,15 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
// Collecting info on method invocation
logger.info("Going to harvest Method Invocations for {}", context);
MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(start, end);
List<AccountingRecord> harvested = methodInvocationHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = methodInvocationHarvester.getData();
data.addAll(harvested);
*/
} catch(Exception e) {
logger.error("Error harvesting Method Invocations for {}", context, e);
}
@ -282,9 +408,12 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
Utils.setContext(initialToken);
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end), data);
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end), accountingRecords);
if(!dryRun) {
dbaseManager.insertMonthlyData(start, end, data, reRun);
dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
//dbaseManager.insertMonthlyData(start, end, data, reRun);
}else {
logger.debug("Harvested measures are {}", accountingRecords);
}
}

View File

@ -1,386 +0,0 @@
package org.gcube.dataharvest.dao;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.dataharvest.datamodel.HarvestedData;
import org.gcube.dataharvest.utils.DateUtils;
import org.postgresql.util.PSQLException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
*/
public class Dao {
private Connection conn = null;
private static Logger logger = LoggerFactory.getLogger(Dao.class);
/**
* Class constructor. This method must be called before any other class
* method.
*
* @throws DaoException
*/
public void init() throws DaoException {
try {
Class.forName("org.postgresql.Driver");
} catch(ClassNotFoundException ex) {
logger.error(ex.getLocalizedMessage());
throw new DaoException(ex.getLocalizedMessage(), ex.getCause());
}
}
/**
* Connect a database
*
* @param url
* @param user
* @param password
* @throws DaoException
*/
public void connect(String url, String user, String password) throws DaoException {
try {
conn = DriverManager.getConnection(url, user, password);
logger.debug("Connected to: " + url);
} catch(SQLException ex) {
logger.error(ex.getLocalizedMessage());
throw new DaoException(ex.getLocalizedMessage(), ex.getCause());
}
}
/**
* Release a database connection
*
* @throws DaoException
*/
public void disconnect() throws DaoException {
try {
if(conn != null) {
conn.close();
conn = null;
logger.debug("Disconnecting from database");
}
} catch(SQLException ex) {
logger.error(ex.getLocalizedMessage());
throw new DaoException(ex.getLocalizedMessage(), ex.getCause());
}
}
/**
* Check the connection with the database.
*
* @return true|false
*/
public boolean isConnected() {
return (!(conn == null));
}
/**
* Getter for the database connection
*
* @return Connection class
*/
public Connection getConnection() {
return conn;
}
/**
* Read on the database all active VRES
*
* @return
*/
public String[] getActiveVres() {
String query = "select dname from context where dname is not null and dismissed is null";
logger.debug(query);
Statement s = null;
ResultSet rs = null;
ArrayList<String> list = new ArrayList<>();
try {
s = conn.createStatement();
rs = s.executeQuery(query);
while(rs.next()) {
list.add(rs.getString("dname"));
}
return list.toArray(new String[list.size()]);
} catch(Exception ex) {
logger.error(ex.getLocalizedMessage());
return null;
} finally {
if(rs != null) {
try {
rs.close();
} catch(SQLException ex) {
// do nothing
}
}
if(s != null) {
try {
s.close();
} catch(SQLException ex) {
// do nothing
}
}
}
}
/**
* This method insert/update data in the monthly_measure database table.
*
* @param data
* @param from
* @param to
* @throws DaoException
*/
public void insertMonthlyMeasure(List<HarvestedData> data, Date from, Date to, boolean doUpdate)
throws DaoException {
// first of all: check if data of the same type are already in the
// database.
// In this case data will be updated.
Calendar cFrom = DateUtils.dateToCalendar(from);
Calendar cTo = DateUtils.dateToCalendar(to);
int monthFrom = cFrom.get(Calendar.MONTH);
int yearFrom = cFrom.get(Calendar.YEAR);
int monthTo = cTo.get(Calendar.MONTH);
int yearTo = cTo.get(Calendar.YEAR);
if((monthFrom != monthTo) || (yearFrom != yearTo)) {
String err = "Invalid time period. The time period MUST refer one month.";
logger.error(err);
throw new DaoException(err, null);
}
if(data == null || data.size() <= 0) {
String err = "No data passed in input. Aborting operation.";
logger.error(err);
throw new DaoException(err, null);
}
monthFrom++; // because january = 0...
try {
for(HarvestedData harvestedData : data) {
int contextId = getOrInsertContextId(harvestedData.getContext());
String query = "select id from monthly_measure where measure_type_id=" + harvestedData.getDataType()
+ " and context_id=" + contextId + " and month=" + monthFrom + " and year=" + yearFrom;
logger.debug(query);
Statement sel = conn.createStatement();
ResultSet rs = sel.executeQuery(query);
if(rs.next()) {
if(doUpdate) {
// record found: update it
Statement s = conn.createStatement();
int id = rs.getInt("id");
String update = "update monthly_measure set measure=" + harvestedData.getMeasure() + " where id="
+ id;
logger.debug(update);
s.execute(update);
s.close();
} else {
logger.warn("Skipped " + harvestedData.getContext());
}
} else {
// record not found: insert new record
Statement s = conn.createStatement();
String insert = "insert into monthly_measure (year, month, measure, measure_type_id, context_id, day) values (";
insert += yearFrom + "," + monthFrom + "," + harvestedData.getMeasure() + "," + harvestedData.getDataType()
+ ",";
insert += "(select id from context where dname='" + harvestedData.getContext() + "'),";
insert += "'" + yearFrom + "-" + monthFrom + "-01'";
insert += ")";
logger.debug(insert);
s.execute(insert);
s.close();
}
rs.close();
sel.close();
}
} catch(PSQLException x) {
// handled exception: try to iterate...
logger.error(x.getLocalizedMessage());
} catch(Exception x) {
// not handled exception: stop
logger.error(x.getLocalizedMessage());
throw new DaoException(x.getClass().getName() + "::" + x.getLocalizedMessage(), x);
}
}
public enum contextType {
INFRASTRUCTURE, VO, VRE
}
public int getOrInsertContextId(String contextFullName) throws SQLException {
String query = "select id from context where dname='" + contextFullName + "'";
logger.debug(query);
Statement sel = conn.createStatement();
ResultSet rs = sel.executeQuery(query);
if(rs.next()) {
// context found
int id = rs.getInt("id");
logger.debug("Context {} has id {}", contextFullName, id);
return id;
} else {
// context not found: insert new record
int parentId = 0; // The id of D4Science infrastructure as aggregator
int lastIndexOfSlash = contextFullName.lastIndexOf("/");
if(lastIndexOfSlash!=0) {
String parentContextFullName = contextFullName.substring(0, contextFullName.lastIndexOf("/"));
parentId = getOrInsertContextId(parentContextFullName);
}
// It is a new context and we don't know which context type is. Using 0 for ROOT, 1 for VO, 2 for VRE
int contextTypeId = contextType.INFRASTRUCTURE.ordinal();
ScopeBean scopeBean = new ScopeBean(contextFullName);
switch(scopeBean.type()) {
case INFRASTRUCTURE:
contextTypeId = contextType.INFRASTRUCTURE.ordinal();
break;
case VO:
contextTypeId = contextType.VO.ordinal();
break;
case VRE:
contextTypeId = contextType.VRE.ordinal();
break;
default:
break;
}
Statement s = conn.createStatement();
String insert = "insert into context (name, context_id, context_type_id, dname) values ('";
insert += contextFullName + "'," + parentId + "," + contextTypeId + ", '" + contextFullName;
insert += "')";
logger.debug(insert);
s.execute(insert);
s.close();
}
rs.close();
sel.close();
return getOrInsertContextId(contextFullName);
}
public ArrayList<Integer> getSubTree(Integer rootId) throws DaoException {
String queryBase = "select id from context where context_id in (%%)";
ArrayList<Integer> subTree = new ArrayList<>();
ArrayList<Integer> temp = new ArrayList<>();
Statement s = null;
ResultSet rs = null;
temp.add(rootId);
subTree.add(rootId);
boolean again = true;
for(int i = 0; (i < 10) && (again); i++) {
try {
String listId = "";
for(Integer id : temp) {
listId += "," + id;
}
listId = listId.substring(1);
String query = queryBase.replace("%%", listId);
s = conn.createStatement();
rs = s.executeQuery(query);
if(rs.next()) {
temp = new ArrayList<>();
Integer dbId = rs.getInt("id");
subTree.add(dbId);
temp.add(dbId);
while(rs.next()) {
dbId = rs.getInt("id");
subTree.add(dbId);
temp.add(dbId);
}
} else {
again = false;
}
rs.close();
s.close();
} catch(Exception x) {
logger.error(x.getLocalizedMessage());
throw new DaoException(x.getClass().getName() + "::" + x.getLocalizedMessage(), x);
}
}
return subTree;
}
public void createSocialReport(int contextId, int orderBy) throws DaoException {
Statement rep = null;
ResultSet rs = null;
try {
String report = "insert into report (context_id, name, orderby, chart_type, x_text, y_text) values ";
report += "(" + contextId + ", \'VRE Social\', " + orderBy
+ ", \'column\', \'Period\', \'Social interaction\')";
logger.debug(report);
rep = conn.createStatement();
rep.execute(report);
String query = "select id from report where name=\'VRE Social\' order by id desc";
rs = rep.executeQuery(query);
if(rs.next()) {
int id = rs.getInt("id");
String reportItem = "insert into report_item (report_id, type_id_1, type_id_1_name) ";
reportItem += "values (" + id + ", 8, \'mt\'), (" + id + ", 9, \'mt\'), (" + id + ", 10, \'mt\')";
rep.execute(reportItem);
} else
throw new DaoException("No report id.", null);
} catch(SQLException ex) {
logger.error(ex.getLocalizedMessage());
throw new DaoException(ex.getLocalizedMessage(), ex.getCause());
} finally {
try {
if(rs != null) {
rs.close();
}
if(rep != null) {
rep.close();
}
} catch(SQLException ex) {
logger.error(ex.getLocalizedMessage());
}
}
}
/**
* Dummy tester
*
* @Deprecated
* @param originator
* @throws DaoException
*
*/
public void dummyTest(String originator) throws DaoException {
String insert = "insert into dummy (now, originator) values(current_timestamp, '" + originator + "')";
logger.debug(insert);
Statement s = null;
try {
s = conn.createStatement();
s.execute(insert);
} catch(SQLException ex) {
logger.error(ex.getLocalizedMessage());
throw new DaoException(ex.getLocalizedMessage(), ex.getCause());
} finally {
if(s != null)
try {
s.close();
} catch(SQLException ex) {
logger.error(ex.getLocalizedMessage());
}
}
}
}

View File

@ -1,26 +0,0 @@
package org.gcube.dataharvest.dao;
/**
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
*/
public class DaoException extends Exception {
/**
* Generated Serial Version UID
*/
private static final long serialVersionUID = -6302570066137502483L;
public DaoException() {
super();
}
public DaoException(String message) {
super(message);
}
public DaoException(String message, Throwable throwable) {
super(message, throwable);
}
}

View File

@ -1,53 +0,0 @@
package org.gcube.dataharvest.dao;
/**
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
*/
public class DatabaseConnectionData {
protected String uri;
protected String user;
protected String password;
public DatabaseConnectionData() {
this.uri = null;
this.user = null;
this.password = null;
}
public DatabaseConnectionData(String uri, String user, String password) {
this.uri = uri;
this.user = user;
this.password = password;
}
public String getURI() {
return uri;
}
public void setURI(String uri) {
this.uri = uri;
}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
@Override
public String toString() {
return this.getClass().getSimpleName() + " [uri =" + uri + ", user=" + user + ", password=" + password + "]";
}
}

View File

@ -1,44 +0,0 @@
package org.gcube.dataharvest.dao;
import java.util.Date;
import java.util.List;
import org.gcube.dataharvest.datamodel.HarvestedData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
*/
public class DatabaseManager {
private static Logger logger = LoggerFactory.getLogger(DatabaseManager.class);
public void insertMonthlyData(Date from, Date to, List<HarvestedData> data, boolean updateFlag) {
Dao dao = null;
try {
dao = dbConnect();
dao.insertMonthlyMeasure(data, from, to, updateFlag);
} catch(Exception e) {
logger.error("", e);
} finally {
if(dao != null) {
try {
dao.disconnect();
} catch(DaoException e) {
logger.error("", e);
}
}
}
}
public Dao dbConnect() throws DaoException {
DatabaseParameterRetriever dde = new DatabaseParameterRetriever();
DatabaseConnectionData dcd = dde.retrieveDatabaseInfo();
Dao dao = new Dao();
dao.init();
dao.connect(dcd.getURI(), dcd.getUser(), dcd.getPassword());
return dao;
}
}

View File

@ -1,108 +0,0 @@
package org.gcube.dataharvest.dao;
import java.security.Key;
import java.util.List;
import java.util.Properties;
import org.gcube.common.encryption.StringEncrypter;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.resources.gcore.utils.Group;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.gcube.resources.discovery.icclient.ICFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR)
*/
public class DatabaseParameterRetriever {
private static Logger logger = LoggerFactory.getLogger(DatabaseParameterRetriever.class);
public static final String LOCAL_DB = "LOCAL_DB";
public static final String SERVICE_ENDPOINT_CATEGORY = "Database";
public static final String SERVICE_ENDPOINT_NAME = "AccountingDashboard";
public static final String DB_URI = "DB_URI";
public static final String DB_USERNAME = "DB_USERNAME";
public static final String DB_PASSWORD = "DB_PASSWORD";
public DatabaseParameterRetriever() {
}
private static String decrypt(String encrypted, Key... key) throws Exception {
return StringEncrypter.getEncrypter().decrypt(encrypted);
}
protected void checkParameter(String parameter, String parameterName, boolean localDB) throws DaoException {
if(parameter == null || parameter.isEmpty()) {
throw new DaoException("DB " + parameterName + " cannot be null nor empty. Please check your "
+ (localDB ? "local configuration." : "ServiceEndpoint"));
}
}
public DatabaseConnectionData retrieveDatabaseInfo() throws DaoException {
Properties properties = AccountingDataHarvesterPlugin.getProperties().get();
boolean localDB = Boolean.parseBoolean(properties.getProperty(LOCAL_DB, "true"));
String uri = "";
String username = "";
String password = "";
if(localDB) {
logger.debug("Using configuration from local config");
uri = properties.getProperty(DB_URI);
username = properties.getProperty(DB_USERNAME);
password = properties.getProperty(DB_PASSWORD);
} else {
try {
String className = this.getClass().getSimpleName();
SimpleQuery query = ICFactory.queryFor(ServiceEndpoint.class);
query.addCondition(
String.format("$resource/Profile/Category/text() eq '%s'", SERVICE_ENDPOINT_CATEGORY));
query.addCondition(String.format("$resource/Profile/Name/text() eq '%s'", SERVICE_ENDPOINT_NAME));
query.addCondition(String.format("$resource/Profile/AccessPoint/Interface/Endpoint/@EntryName eq '%s'",
className));
DiscoveryClient<ServiceEndpoint> client = ICFactory.clientFor(ServiceEndpoint.class);
List<ServiceEndpoint> serviceEndpoints = client.submit(query);
if(serviceEndpoints.size()==0) {
throw new DaoException("No endpoints found to get database connection.");
}
if(serviceEndpoints.size() > 1) {
throw new DaoException("More than one endpoint found to get database connection. Not sure which one use.");
}
Group<AccessPoint> accessPoints = serviceEndpoints.get(0).profile().accessPoints();
for(AccessPoint accessPoint : accessPoints) {
if(accessPoint.name().compareTo(className) == 0) {
uri = accessPoint.address();
username = accessPoint.username();
String encryptedPassword = accessPoint.password();
password = decrypt(encryptedPassword);
}
}
} catch(Exception e) {
throw new DaoException("Error retrieving database connection", e);
}
}
checkParameter(uri, "URI", localDB);
checkParameter(username, "Username", localDB);
checkParameter(password, "Password", localDB);
return new DatabaseConnectionData(uri, username, password);
}
}

View File

@ -1,180 +0,0 @@
/*
*
*/
package org.gcube.dataharvest.datamodel;
import java.io.Serializable;
import java.util.Date;
/**
* The Class HarvestedData.
*
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini
* @author Francesco Mangiacrapa (ISTI - CNR)
*/
public class HarvestedData implements Serializable {
/**
* Generated Serial Version UID
*/
private static final long serialVersionUID = 3699669951917080213L;
// public static int ACCESSESS = 1;
// public static int USERS = 2;
// public static int DATA_METHOD_DOWNLOAD = 3;
// public static int NEW_CATALOGUE_METHODS = 4;
// public static int NEW_CATALOGUE_DATASETS = 5;
// public static int NEW_CATALOGUE_DELIVERABLES = 6;
// public static int NEW_CATALOGUE_APPLICATIONS = 7;
// public static int SOCIAL_POSTS = 8;
// public static int SOCIAL_REPLIES = 9;
// public static int SOCIAL_LIKES = 10;
// public static int METHOD_INVOCATIONS = 11;
// public static int VISUAL_TOOLS = 12;
private int dataType;
private String context;
private long measure;
private Date day;
private HarvestedDataKey harvestedDataKey;
/**
* Instantiates a new harvested data.
*/
public HarvestedData() {
}
/**
* Instantiates a new harvested data.
*
* @param key the key
*/
private HarvestedData(HarvestedDataKey key){
this.harvestedDataKey = key;
setDataType(harvestedDataKey.getValue());
}
/**
* Instantiates a new harvested data.
*
* @param key the key
* @param context the context
* @param measure the measure
* @param day the day
*/
public HarvestedData(HarvestedDataKey key, String context, long measure, Date day) {
this(key);
this.context = context;
this.measure = measure;
this.day = day;
}
/**
* Instantiates a new harvested data.
*
* @param key the key
* @param context the context
* @param measure the measure
*/
public HarvestedData(HarvestedDataKey key, String context, long measure) {
this(key);
this.context = context;
this.measure = measure;
}
/**
* Sets the data type.
*
* @param dataType the new data type
*/
private void setDataType(int dataType) {
this.dataType = dataType;
}
/**
* Sets the context.
*
* @param context the new context
*/
public void setContext(String context) {
this.context = context;
}
/**
* Sets the measure.
*
* @param measure the new measure
*/
public void setMeasure(long measure) {
this.measure = measure;
}
/**
* Sets the day.
*
* @param day the new day
*/
public void setDay(Date day) {
this.day = day;
}
/**
* Gets the data type.
*
* @return the data type
*/
public int getDataType() {
return dataType;
}
/**
* Gets the context.
*
* @return the context
*/
public String getContext() {
return context;
}
/**
* Gets the measure.
*
* @return the measure
*/
public long getMeasure() {
return measure;
}
/**
* Gets the day.
*
* @return the day
*/
public Date getDay() {
return day;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("HarvestedData [dataType=");
builder.append(dataType);
builder.append(", context=");
builder.append(context);
builder.append(", measure=");
builder.append(measure);
builder.append(", day=");
builder.append(day);
builder.append(", harvestedDataKey=");
builder.append(harvestedDataKey);
builder.append("]");
return builder.toString();
}
}

View File

@ -11,32 +11,30 @@ package org.gcube.dataharvest.datamodel;
*/
public enum HarvestedDataKey {
ACCESSES(1),
USERS(2),
DATA_METHOD_DOWNLOAD(3),
NEW_CATALOGUE_METHODS(4),
NEW_CATALOGUE_DATASETS(5),
NEW_CATALOGUE_DELIVERABLES(6),
NEW_CATALOGUE_APPLICATIONS(7),
SOCIAL_POSTS(8),
SOCIAL_REPLIES(9),
SOCIAL_LIKES(10),
METHOD_INVOCATIONS(11),
VISUAL_TOOLS(12);
ACCESSES("VRE Accesses"),
USERS("VRE Users"),
DATA_METHOD_DOWNLOAD("Data/Method download"),
NEW_CATALOGUE_METHODS("New Catalogue Methods"),
NEW_CATALOGUE_DATASETS("New Catalogue Datasets"),
NEW_CATALOGUE_DELIVERABLES("New Catalogue Deliverables"),
NEW_CATALOGUE_APPLICATIONS("New Catalogue Applications"),
SOCIAL_POSTS("VRE Social Interations Posts"),
SOCIAL_REPLIES("VRE Social Interations Replies"),
SOCIAL_LIKES("VRE Social Interations Likes"),
METHOD_INVOCATIONS("VRE Methods Invocation"),
VISUAL_TOOLS("VRE Visual Tools");
private int value;
private String key;
HarvestedDataKey(int value){
this.value = value;
HarvestedDataKey(String key){
this.key = key;
}
/**
* @return the value
* @return the key
*/
public int getValue() {
return value;
public String getKey() {
return key;
}
}

View File

@ -1,14 +1,19 @@
package org.gcube.dataharvest.harvester;
import java.text.ParseException;
import java.time.Instant;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import org.gcube.accounting.accounting.summary.access.model.internal.Dimension;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.authorization.client.Constants;
import org.gcube.common.authorization.library.AuthorizationEntry;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataharvest.datamodel.HarvestedData;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -17,13 +22,18 @@ public abstract class BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(BasicHarvester.class);
public Date start;
public Date end;
protected final Date start;
protected final Date end;
protected final Instant instant;
public BasicHarvester(Date start, Date end) throws ParseException {
this.start = start;
this.end = end;
Calendar toSetOnDB = DateUtils.dateToCalendar(start);
toSetOnDB.add(Calendar.DAY_OF_MONTH, 15);
instant = toSetOnDB.toInstant();
logger.debug("Creating {} for the period {} {} ", this.getClass().getSimpleName(), DateUtils.format(start), DateUtils.format(end));
}
@ -44,6 +54,12 @@ public abstract class BasicHarvester {
return getCurrentContext(token);
}
public abstract List<HarvestedData> getData() throws Exception;
public abstract List<AccountingRecord> getAccountingRecords() throws Exception;
public Dimension getDimension(HarvestedDataKey harvestedDataKey) {
return AccountingDataHarvesterPlugin.getDimension(harvestedDataKey.getKey());
}
}

View File

@ -1,12 +1,14 @@
package org.gcube.dataharvest.harvester;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.SortedMap;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.internal.Dimension;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.accounting.analytics.Filter;
import org.gcube.accounting.analytics.Info;
import org.gcube.accounting.analytics.TemporalConstraint;
@ -17,7 +19,7 @@ import org.gcube.accounting.datamodel.AggregatedUsageRecord;
import org.gcube.accounting.datamodel.aggregation.AggregatedJobUsageRecord;
import org.gcube.accounting.datamodel.aggregation.AggregatedServiceUsageRecord;
import org.gcube.accounting.datamodel.usagerecords.ServiceUsageRecord;
import org.gcube.dataharvest.datamodel.HarvestedData;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.DateUtils;
import org.gcube.dataharvest.utils.Utils;
@ -35,16 +37,16 @@ public class MethodInvocationHarvester extends BasicHarvester {
public static final String DATAMINER_SERVICE_NAME = "DataMiner";
public MethodInvocationHarvester(Date start, Date end) throws ParseException {
public MethodInvocationHarvester(Date start, Date end) throws Exception {
super(start, end);
}
@Override
public List<HarvestedData> getData() throws Exception {
public List<AccountingRecord> getAccountingRecords() throws Exception {
try {
List<HarvestedData> data = new ArrayList<>();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
AccountingPersistenceQuery accountingPersistenceQuery = AccountingPersistenceQueryFactory.getInstance();
TemporalConstraint temporalConstraint = new TemporalConstraint(start.getTime(), end.getTime(),
AggregationMode.MONTHLY);
@ -70,6 +72,9 @@ public class MethodInvocationHarvester extends BasicHarvester {
AggregatedServiceUsageRecord.class, temporalConstraint, filters, contexts, true);
}
ScopeDescriptor scopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor();
Dimension dimension = getDimension(HarvestedDataKey.METHOD_INVOCATIONS);
if(result != null) {
for(Filter filter : result.keySet()) {
SortedMap<Calendar,Info> infoMap = result.get(filter);
@ -82,16 +87,16 @@ public class MethodInvocationHarvester extends BasicHarvester {
JSONObject jsonObject = info.getValue();
long numberOfInvocation = jsonObject.getLong(AggregatedUsageRecord.OPERATION_COUNT);
HarvestedData harvestedData = new HarvestedData(HarvestedDataKey.METHOD_INVOCATIONS, context,
numberOfInvocation);
data.add(harvestedData);
AccountingRecord accountingRecord = new AccountingRecord(scopeDescriptor, instant, dimension, numberOfInvocation);
accountingRecords.add(accountingRecord);
}
} else {
logger.error("No data found.");
}
return data;
return accountingRecords;
} catch(Exception e) {
throw e;

View File

@ -1,12 +1,13 @@
package org.gcube.dataharvest.harvester;
import java.io.IOException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.gcube.dataharvest.datamodel.HarvestedData;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.Utils;
import org.json.JSONArray;
@ -28,34 +29,36 @@ public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
public static final String PATH = "/2/posts/get-posts-vre?gcube-token=";
public SocialInteractionsHarvester(Date start, Date end) throws ParseException {
public SocialInteractionsHarvester(Date start, Date end) throws Exception {
super(start, end);
}
@Override
public List<HarvestedData> getData() throws Exception {
public List<AccountingRecord> getAccountingRecords() throws Exception {
String context = Utils.getCurrentContext();
try {
ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
getJson();
HarvestedData likesH = new HarvestedData(HarvestedDataKey.SOCIAL_LIKES, context, likes);
logger.debug("{}", likesH);
data.add(likesH);
HarvestedData postsH = new HarvestedData(HarvestedDataKey.SOCIAL_POSTS, context, posts);
logger.debug("{}", postsH);
data.add(postsH);
HarvestedData socialReplies = new HarvestedData(HarvestedDataKey.SOCIAL_REPLIES, context, replies);
logger.debug("{}", socialReplies);
data.add(socialReplies);
return data;
ScopeDescriptor scopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor();
AccountingRecord likesAR = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.SOCIAL_LIKES), (long) likes);
logger.debug("{} : {}", likesAR.getDimension().getId(), likesAR.getMeasure());
accountingRecords.add(likesAR);
AccountingRecord postsAR = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.SOCIAL_POSTS), (long) posts);
logger.debug("{} : {}", postsAR.getDimension().getId(), postsAR.getMeasure());
accountingRecords.add(postsAR);
AccountingRecord repliesAR = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.SOCIAL_REPLIES), (long) replies);
logger.debug("{} : {}", repliesAR.getDimension().getId(), repliesAR.getMeasure());
accountingRecords.add(repliesAR);
return accountingRecords;
} catch(Exception e) {
logger.error("Error Harvesting Social Interactions for context {}", context, e);
throw e;
@ -92,5 +95,6 @@ public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
}
}
}

View File

@ -1,6 +1,5 @@
package org.gcube.dataharvest.harvester;
import java.text.ParseException;
import java.util.Date;
import java.util.List;
@ -12,9 +11,9 @@ import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.gcube.resources.discovery.icclient.ICFactory;
import org.json.JSONObject;
public abstract class SocialNetworkingHarvester extends BasicHarvester{
public abstract class SocialNetworkingHarvester extends BasicHarvester {
public SocialNetworkingHarvester(Date start, Date end) throws ParseException {
public SocialNetworkingHarvester(Date start, Date end) throws Exception {
super(start, end);
}

View File

@ -24,6 +24,8 @@ import java.util.Date;
import java.util.HashMap;
import java.util.List;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.encryption.StringEncrypter;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
@ -31,8 +33,8 @@ import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
import org.gcube.common.resources.gcore.utils.Group;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.datamodel.AnalyticsReportCredentials;
import org.gcube.dataharvest.datamodel.HarvestedData;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.datamodel.VREAccessesReportRow;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
@ -54,7 +56,6 @@ import com.google.api.services.analyticsreporting.v4.AnalyticsReporting;
import com.google.api.services.analyticsreporting.v4.AnalyticsReportingScopes;
import com.google.api.services.analyticsreporting.v4.model.DateRange;
import com.google.api.services.analyticsreporting.v4.model.DateRangeValues;
import com.google.api.services.analyticsreporting.v4.model.Dimension;
import com.google.api.services.analyticsreporting.v4.model.GetReportsRequest;
import com.google.api.services.analyticsreporting.v4.model.GetReportsResponse;
import com.google.api.services.analyticsreporting.v4.model.Metric;
@ -83,10 +84,12 @@ public class VREAccessesHarvester extends BasicHarvester {
}
@Override
public List<HarvestedData> getData() throws Exception {
public List<AccountingRecord> getAccountingRecords() throws Exception {
try {
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
int measure = 0;
ScopeBean scopeBean = new ScopeBean(context);
@ -106,10 +109,14 @@ public class VREAccessesHarvester extends BasicHarvester {
}
}
HarvestedData harvest = new HarvestedData(HarvestedDataKey.ACCESSES, context, measure);
logger.debug(harvest.toString());
data.add(harvest);
return data;
ScopeDescriptor scopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor();
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.ACCESSES), (long) measure);
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
accountingRecords.add(ar);
return accountingRecords;
} catch(Exception e) {
throw e;
}
@ -172,7 +179,7 @@ public class VREAccessesHarvester extends BasicHarvester {
// Create the Metrics object.
Metric sessions = new Metric().setExpression("ga:pageviews").setAlias("pages");
Dimension pageTitle = new Dimension().setName("ga:pagePath");
com.google.api.services.analyticsreporting.v4.model.Dimension pageTitle = new com.google.api.services.analyticsreporting.v4.model.Dimension().setName("ga:pagePath");
for(String view : viewIDs) {
logger.info("Getting data from Google Analytics for viewid: " + view);

View File

@ -1,14 +1,14 @@
package org.gcube.dataharvest.harvester;
import java.io.IOException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.gcube.dataharvest.datamodel.HarvestedData;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.Utils;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -24,20 +24,26 @@ public class VREUsersHarvester extends SocialNetworkingHarvester {
public static final String PATH = "/2/users/get-all-usernames?gcube-token=";
public VREUsersHarvester(Date start, Date end) throws ParseException {
public VREUsersHarvester(Date start, Date end) throws Exception {
super(start, end);
}
@Override
public List<HarvestedData> getData() throws Exception {
public List<AccountingRecord> getAccountingRecords() throws Exception {
try {
String context = Utils.getCurrentContext();
// String context = Utils.getCurrentContext();
int measure = get();
HarvestedData harvest = new HarvestedData(HarvestedDataKey.USERS, context, measure);
logger.debug(harvest.toString());
ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
data.add(harvest);
return data;
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
ScopeDescriptor scopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor();
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.USERS), (long) measure);
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
accountingRecords.add(ar);
return accountingRecords;
} catch(Exception e) {
throw e;
}

View File

@ -8,6 +8,8 @@ import java.util.List;
import java.util.SortedSet;
import org.apache.commons.lang.Validate;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.homelibrary.home.Home;
import org.gcube.common.homelibrary.home.HomeLibrary;
import org.gcube.common.homelibrary.home.HomeManager;
@ -17,7 +19,7 @@ import org.gcube.common.homelibrary.home.workspace.accounting.AccountingEntry;
import org.gcube.common.homelibrary.jcr.repository.JCRRepository;
import org.gcube.common.homelibrary.jcr.workspace.JCRWorkspace;
import org.gcube.common.homelibrary.jcr.workspace.JCRWorkspaceItem;
import org.gcube.dataharvest.datamodel.HarvestedData;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.DateUtils;
import org.gcube.dataharvest.utils.Utils;
@ -54,7 +56,7 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester {
* @see org.gcube.dataharvest.harvester.BasicHarvester#getData()
*/
@Override
public List<HarvestedData> getData() throws Exception {
public List<AccountingRecord> getAccountingRecords() throws Exception {
String defaultContext = Utils.getCurrentContext();
logger.debug("The context is {}", defaultContext);
@ -79,26 +81,36 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester {
logger.debug("Analyzing {} in the period [{} to {}] starting from root {}", defaultContext,
DateUtils.format(start), DateUtils.format(end), item.getName());
HarvestedData defaultHarvesteData = new HarvestedData(HarvestedDataKey.DATA_METHOD_DOWNLOAD, defaultContext,
count);
List<HarvestedData> data = new ArrayList<HarvestedData>();
ScopeDescriptor defaultScopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor();
AccountingRecord defaultHarvesteData = new AccountingRecord(defaultScopeDescriptor, instant, getDimension(HarvestedDataKey.DATA_METHOD_DOWNLOAD), (long) count);
logger.debug("{} : {}", defaultHarvesteData.getDimension().getId(), defaultHarvesteData.getMeasure());
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
for(WorkspaceItem children : item.getChildren()) {
count = 0; //resettings the counter
HarvestedData harvestedData;
//HarvestedData harvestedData;
//Getting statistics for folder
if(children.isFolder()) {
logger.debug("Getting statistics for folder {}", children.getName());
getStats(children, start, end);
String normalizedName = children.getName().replaceAll("[^A-Za-z0-9]", "");
String scope = mapWsFolderNameToVRE.get(normalizedName);
String context = mapWsFolderNameToVRE.get(normalizedName);
//Checking if it is a VRE name to right accounting...
if(scope != null && !scope.isEmpty()) {
logger.debug("Found context '{}' matching with normalized VRE name {} ", scope, normalizedName);
harvestedData = new HarvestedData(HarvestedDataKey.DATA_METHOD_DOWNLOAD, scope, count);
data.add(harvestedData);
logger.debug("Added data {}", harvestedData);
if(context != null && !context.isEmpty()) {
logger.debug("Found context '{}' matching with normalized VRE name {} ", context, normalizedName);
ScopeDescriptor scopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor(context);
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.DATA_METHOD_DOWNLOAD), (long) count);
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
accountingRecords.add(ar);
} else {
logger.debug(
"No scope found matching the folder name {}, accounting its stats in the default context {}",
@ -112,12 +124,13 @@ public class DataMethodDownloadHarvester extends SoBigDataHarvester {
}
//ADDING DEFAULT ACCOUNTING
data.add(defaultHarvesteData);
accountingRecords.add(defaultHarvesteData);
logger.debug("In the period [from {} to {} ] returning workspace accouting data {}", DateUtils.format(start),
DateUtils.format(end), data);
DateUtils.format(end), accountingRecords);
return data;
return accountingRecords;
} catch(Exception e) {
throw e;

View File

@ -8,7 +8,10 @@ import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import org.gcube.dataharvest.datamodel.HarvestedData;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.internal.Dimension;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.DateUtils;
import org.gcube.dataharvest.utils.Utils;
@ -27,15 +30,15 @@ import org.slf4j.LoggerFactory;
* @author Francesco Mangiacrapa(ISTI - CNR)
*/
public class ResourceCatalogueHarvester extends SoBigDataHarvester {
private static final String AND = " AND ";
public static int ROWS = 500;
private static Logger logger = LoggerFactory.getLogger(ResourceCatalogueHarvester.class);
protected String solrBaseUrl;
/**
* Instantiates a new resource catalogue harvester.
*
@ -48,39 +51,39 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
public ResourceCatalogueHarvester(Date start, Date end, SortedSet<String> contexts) throws Exception {
super(start, end, contexts);
}
/**
* Gets the solr base url.
*
* @return the solr base url
*/
//TODO @LUCA FROSINI
protected String getSolrBaseUrl() {
return "https://ckan-solr-d4s.d4science.org/solr/sobigdata";
}
@Override
public List<HarvestedData> getData() throws Exception {
List<HarvestedData> data = new ArrayList<HarvestedData>();
public List<AccountingRecord> getAccountingRecords() throws Exception {
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
//FOR EACH SYSTEM_TYPE
for (String systemType : mapSystemTypeToDBEntry.keySet()) {
for(String systemType : mapSystemTypeToDBEntry.keySet()) {
List<String> solrParameters = new ArrayList<String>(1);
solrParameters.add("extras_systemtype:\""+systemType+"\"");
solrParameters.add("extras_systemtype:\"" + systemType + "\"");
//EXECUTING THE QUERY IN THE PERIOD
String queryResult = executeQueryFor(solrParameters, start, end, "groups");
HarvestedDataKey insertDBKey = HarvestedDataKey.valueOf(mapSystemTypeToDBEntry.get(systemType));
logger.debug("Creating statistics for type {} using db key {}", systemType, insertDBKey);
data.addAll(buildListOfHarvestedData(queryResult, insertDBKey));
accountingRecords.addAll(buildListOfHarvestedData(queryResult, insertDBKey));
}
return data;
return accountingRecords;
}
/**
* Builds the list of harvested data.
*
@ -89,20 +92,20 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
* @return the list
* @throws Exception the exception
*/
private List<HarvestedData> buildListOfHarvestedData(String json, HarvestedDataKey harvestKey) throws Exception {
private List<AccountingRecord> buildListOfHarvestedData(String json, HarvestedDataKey harvestKey) throws Exception {
JSONObject jsonObject = new JSONObject(json);
JSONObject responseHeader = jsonObject.getJSONObject("responseHeader");
int status = responseHeader.getInt("status");
if(status != 0) {
throw new Exception("Query Deliverable in error: status " + status);
}
JSONObject response = jsonObject.getJSONObject("response");
int numFound = response.getInt("numFound");
Map<String, Integer> counter = new HashMap<String, Integer>(mapCatalogueGroupToVRE.size()+1);
for (String groupName : mapCatalogueGroupToVRE.keySet()) {
Map<String,Integer> counter = new HashMap<String,Integer>(mapCatalogueGroupToVRE.size() + 1);
for(String groupName : mapCatalogueGroupToVRE.keySet()) {
counter.put(groupName, 0);
}
@ -112,7 +115,7 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
int catalogueContextCount = 0;
logger.debug("For {} has found {} doc/s", harvestKey, numFound);
if(numFound > 0) {
JSONArray docs = response.getJSONArray("docs");
for(Object item : docs) {
JSONObject doc = (JSONObject) item;
@ -124,44 +127,50 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
logger.debug("GroupName found {}", catalogueGroupName);
//counterByGroup(groupItem);
Integer currentCount = counter.get(catalogueGroupName);
if(currentCount!=null)
counter.put(catalogueGroupName, currentCount+1);
else{
logger.warn("No mapping found for Catalogue-Group Name {} from VREs. Accounting it in the catalogue context {}", catalogueGroupName, catalogueContext);
if(currentCount != null)
counter.put(catalogueGroupName, currentCount + 1);
else {
logger.warn(
"No mapping found for Catalogue-Group Name {} from VREs. Accounting it in the catalogue context {}",
catalogueGroupName, catalogueContext);
catalogueContextCount++;
}
break; //Accounting the item only in the first group found
}
} catch(JSONException x) {
logger.debug("Document without groups, accounting it in the catalogue context");
catalogueContextCount++;
} catch (Exception e) {
} catch(Exception e) {
logger.warn("Skipping parsing error", e);
}
}
}
List<HarvestedData> data = new ArrayList<HarvestedData>();
String context = Utils.getCurrentContext();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
logger.trace("The context {} has count ", context, catalogueContextCount);
logger.trace("The context {} has count ", catalogueContext, catalogueContextCount);
data.add(new HarvestedData(harvestKey, context, catalogueContextCount));
ScopeDescriptor catalogueScopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor(catalogueContext);
Dimension dimension = getDimension(harvestKey);
for (String key : counter.keySet()) {
AccountingRecord ar = new AccountingRecord(catalogueScopeDescriptor, instant, dimension, (long) catalogueContextCount);
accountingRecords.add(ar);
for(String key : counter.keySet()) {
logger.trace("The group {} has count {}", key, counter.get(key));
data.add(new HarvestedData(harvestKey, mapCatalogueGroupToVRE.get(key), counter.get(key)));
ScopeDescriptor sd = AccountingDataHarvesterPlugin.getScopeDescriptor(mapCatalogueGroupToVRE.get(key));
AccountingRecord accountingRecord = new AccountingRecord(sd, instant, dimension, (long) counter.get(key));
accountingRecords.add(accountingRecord);
}
logger.debug("For {} in the period [from {} to {}] returning accouting data :", harvestKey, DateUtils.format(start), DateUtils.format(end), data);
return data;
logger.debug("For {} in the period [from {} to {}] returning accouting data :", harvestKey,
DateUtils.format(start), DateUtils.format(end), accountingRecords);
return accountingRecords;
}
/**
* Execute query.
*
@ -174,35 +183,37 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
*/
//TODO THIS METHOD MUST BE OPTIMIZED USING HttpSolrClient
//We are not considering the rows (the number of documents returned from Solr by default)
public String executeQueryFor(List<String> solrParameters, Date startDate, Date endDate, String flValue) throws Exception {
String query = getSolrBaseUrl().endsWith("/")? getSolrBaseUrl():getSolrBaseUrl()+"/";
query+="select?";
String q="";
public String executeQueryFor(List<String> solrParameters, Date startDate, Date endDate, String flValue)
throws Exception {
String query = getSolrBaseUrl().endsWith("/") ? getSolrBaseUrl() : getSolrBaseUrl() + "/";
query += "select?";
String q = "";
//ADDING START AND END DATE IF ARE VALIDS
if(startDate!=null && endDate!=null){
q+= "metadata_created:[" + DateUtils.dateToStringWithTZ(startDate) + " TO " + DateUtils.dateToStringWithTZ(endDate) + "]";
if(startDate != null && endDate != null) {
q += "metadata_created:[" + DateUtils.dateToStringWithTZ(startDate) + " TO "
+ DateUtils.dateToStringWithTZ(endDate) + "]";
}
//ADDING PARAMETERS
if(solrParameters!=null && solrParameters.size()>0){
q+= q.isEmpty()?"":AND;
for (int i = 0; i < solrParameters.size()-1; i++) {
q+= solrParameters.get(i)+AND;
if(solrParameters != null && solrParameters.size() > 0) {
q += q.isEmpty() ? "" : AND;
for(int i = 0; i < solrParameters.size() - 1; i++) {
q += solrParameters.get(i) + AND;
}
q+= solrParameters.get(solrParameters.size()-1);
q += solrParameters.get(solrParameters.size() - 1);
}
query += "q=" + UrlEncoderUtil.encodeQuery(q) + "&wt=json&indent=true&rows="+ROWS;
query += flValue!=null && !flValue.isEmpty()?"&fl="+UrlEncoderUtil.encodeQuery(flValue):"";
query += "q=" + UrlEncoderUtil.encodeQuery(q) + "&wt=json&indent=true&rows=" + ROWS;
query += flValue != null && !flValue.isEmpty() ? "&fl=" + UrlEncoderUtil.encodeQuery(flValue) : "";
logger.debug("\nPerforming query {}", query);
String jsonResult = Utils.getJson(query);
logger.trace("Response is {}", jsonResult);
return jsonResult;
}
}

View File

@ -1,12 +1,14 @@
package org.gcube.dataharvest.harvester.sobigdata;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.SortedMap;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.internal.Dimension;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.accounting.analytics.Filter;
import org.gcube.accounting.analytics.Info;
import org.gcube.accounting.analytics.TemporalConstraint;
@ -16,7 +18,7 @@ import org.gcube.accounting.analytics.persistence.AccountingPersistenceQueryFact
import org.gcube.accounting.datamodel.AggregatedUsageRecord;
import org.gcube.accounting.datamodel.aggregation.AggregatedServiceUsageRecord;
import org.gcube.accounting.datamodel.usagerecords.ServiceUsageRecord;
import org.gcube.dataharvest.datamodel.HarvestedData;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.harvester.BasicHarvester;
import org.gcube.dataharvest.utils.DateUtils;
@ -35,16 +37,16 @@ public class TagMeMethodInvocationHarvester extends BasicHarvester {
public static final String TAG_METHOD = "tag";
public TagMeMethodInvocationHarvester(Date start, Date end) throws ParseException {
public TagMeMethodInvocationHarvester(Date start, Date end) throws Exception {
super(start, end);
}
@Override
public List<HarvestedData> getData() throws Exception {
public List<AccountingRecord> getAccountingRecords() throws Exception {
try {
List<HarvestedData> data = new ArrayList<>();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
AccountingPersistenceQuery accountingPersistenceQuery = AccountingPersistenceQueryFactory.getInstance();
TemporalConstraint temporalConstraint = new TemporalConstraint(start.getTime(), end.getTime(),
AggregationMode.MONTHLY);
@ -60,6 +62,9 @@ public class TagMeMethodInvocationHarvester extends BasicHarvester {
SortedMap<Filter,SortedMap<Calendar,Info>> result = accountingPersistenceQuery.getContextTimeSeries(
AggregatedServiceUsageRecord.class, temporalConstraint, filters, contexts, true);
ScopeDescriptor scopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor();
Dimension dimension = getDimension(HarvestedDataKey.METHOD_INVOCATIONS);
if(result != null) {
for(Filter filter : result.keySet()) {
SortedMap<Calendar,Info> infoMap = result.get(filter);
@ -72,17 +77,17 @@ public class TagMeMethodInvocationHarvester extends BasicHarvester {
JSONObject jsonObject = info.getValue();
long numberOfInvocation = jsonObject.getLong(AggregatedUsageRecord.OPERATION_COUNT);
HarvestedData harvestedData = new HarvestedData(HarvestedDataKey.METHOD_INVOCATIONS, context,
numberOfInvocation);
data.add(harvestedData);
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, dimension, numberOfInvocation);
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
accountingRecords.add(ar);
}
} else {
logger.error("No data found.");
}
return data;
return accountingRecords;
} catch(Exception e) {
throw e;

View File

@ -99,14 +99,14 @@ public class DateUtils {
return aggregationEndDate.getTime();
}
/* OLD functions of Eric Perrone (ISTI - CNR) */
public static Calendar dateToCalendar(Date date) {
Calendar calendar = DateUtils.getUTCCalendarInstance();
calendar.setTime(date);
return calendar;
}
/* OLD functions of Eric Perrone (ISTI - CNR) */
public static String format(Date date) {
return DateUtils.LAUNCH_DATE_FORMAT.format(date);
}

View File

@ -8,14 +8,16 @@ import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.TimeUnit;
import org.gcube.accounting.accounting.summary.access.AccountingDao;
import org.gcube.accounting.accounting.summary.access.model.internal.Dimension;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.common.scope.impl.ScopeBean.Type;
import org.gcube.dataharvest.dao.DatabaseManager;
import org.gcube.dataharvest.datamodel.HarvestedData;
import org.gcube.dataharvest.harvester.MethodInvocationHarvester;
import org.gcube.dataharvest.harvester.SocialInteractionsHarvester;
import org.gcube.dataharvest.harvester.VREAccessesHarvester;
@ -53,7 +55,29 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
return contexts;
}
// @Test
@Test
public void getDimensions() {
try {
Utils.setContext(ROOT);
AccountingDao dao = AccountingDao.get();
Set<Dimension> dimensionSet = dao.getDimensions();
for(Dimension d : dimensionSet) {
logger.debug("{} - {} - {} - {}", d.getId(), d.getGroup(), d.getAggregatedMeasure(), d.getLabel());
}
logger.info("End.");
} catch(Exception e) {
logger.error("", e);
}
}
@Test
public void launch() {
try {
@ -71,7 +95,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
inputs.put(AccountingDataHarvesterPlugin.MEASURE_TYPE_INPUT_PARAMETER, aggregationType.name());
inputs.put(AccountingDataHarvesterPlugin.GET_VRE_USERS_INPUT_PARAMETER, true);
inputs.put(AccountingDataHarvesterPlugin.RERUN_INPUT_PARAMETER, true);
inputs.put(AccountingDataHarvesterPlugin.DRY_RUN_INPUT_PARAMETER, false);
inputs.put(AccountingDataHarvesterPlugin.DRY_RUN_INPUT_PARAMETER, true);
/*
Calendar from = DateUtils.getStartCalendar(2016, Calendar.SEPTEMBER, 1);
@ -134,7 +158,8 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
try {
Utils.setContext(ROOT);
DatabaseManager dbaseManager = new DatabaseManager();
//DatabaseManager dbaseManager = new DatabaseManager();
AccountingDao dao = AccountingDao.get();
DataHarvestPluginDeclaration dataHarvestPluginDeclaration = new DataHarvestPluginDeclaration();
@ -158,7 +183,8 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
logger.debug("Harvesting from {} to {}", DateUtils.format(start), DateUtils.format(end));
ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
// ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<>();
VREAccessesHarvester vreAccessesHarvester = null;
@ -196,17 +222,24 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
logger.info("Not Harvesting VREs Accesses for {} from {} to {}", context, DateUtils.format(start), DateUtils.format(end));
} else {
// Collecting Google Analytics Data for VREs Accesses
List<HarvestedData> harvested = vreAccessesHarvester.getData();
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<AccountingRecord> harvested = vreAccessesHarvester.getData();
data.addAll(harvested);
*/
}
} catch(Exception e) {
logger.error("Error harvesting Social Interactions for {}", context, e);
}
}
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end), data);
// logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end), data);
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end), accountingRecords);
Utils.setContext(ROOT);
dbaseManager.insertMonthlyData(start, end, data, true);
dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
// dbaseManager.insertMonthlyData(start, end, data, true);
Thread.sleep(TimeUnit.SECONDS.toMillis(10));
@ -295,7 +328,8 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
VREAccessesHarvester vreAccessesHarvester = null;
ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
// ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<>();
for(String context : contexts) {
// Setting the token for the context
@ -331,15 +365,21 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
logger.info("Not Harvesting VREs Accesses for {} from {} to {}", context, DateUtils.format(start), DateUtils.format(end));
} else {
// Collecting Google Analytics Data for VREs Accesses
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = vreAccessesHarvester.getData();
data.addAll(harvested);
*/
}
} catch(Exception e) {
logger.error("Error harvesting Social Interactions for {}", context, e);
}
}
logger.debug("{}", data);
logger.debug("{}", accountingRecords);
} catch(Exception e) {
logger.error("", e);
@ -351,7 +391,8 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
try {
Utils.setContext(ROOT);
DatabaseManager dbaseManager = new DatabaseManager();
//DatabaseManager dbaseManager = new DatabaseManager();
AccountingDao dao = AccountingDao.get();
DataHarvestPluginDeclaration dataHarvestPluginDeclaration = new DataHarvestPluginDeclaration();
@ -378,8 +419,8 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
logger.debug("Harvesting Social Interaction from {} to {}", DateUtils.format(start), DateUtils.format(end));
ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
// ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<>();
for(String context : contexts) {
// Setting the token for the context
@ -388,16 +429,23 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
// Collecting info on social (posts, replies and likes)
logger.info("Going to harvest Social Interactions for {}", context);
SocialInteractionsHarvester socialHarvester = new SocialInteractionsHarvester(start, end);
List<AccountingRecord> harvested = socialHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = socialHarvester.getData();
data.addAll(harvested);
*/
} catch(Exception e) {
logger.error("Error harvesting Social Interactions for {}", context, e);
}
}
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end), data);
//logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end), data);
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end), accountingRecords);
Utils.setContext(ROOT);
dbaseManager.insertMonthlyData(start, end, data, true);
dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
// dbaseManager.insertMonthlyData(start, end, data, true);
@ -423,7 +471,11 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(start, end);
List<HarvestedData> harvestedData = methodInvocationHarvester.getData();
List<AccountingRecord> harvestedData = methodInvocationHarvester.getAccountingRecords();
/*
List<HarvestedData> harvested = methodInvocationHarvester.getData();
*/
logger.debug("{}", harvestedData);
@ -444,7 +496,10 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
TagMeMethodInvocationHarvester methodInvocationHarvester = new TagMeMethodInvocationHarvester(start, end);
List<HarvestedData> harvestedData = methodInvocationHarvester.getData();
List<AccountingRecord> harvestedData = methodInvocationHarvester.getAccountingRecords();
/*
List<HarvestedData> harvested = methodInvocationHarvester.getData();
*/
logger.debug("{}", harvestedData);
@ -475,7 +530,11 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
VREUsersHarvester vreUsersHarvester = new VREUsersHarvester(start, end);
List<AccountingRecord> harvested = vreUsersHarvester.getAccountingRecords();
/*
List<HarvestedData> harvested = vreUsersHarvester.getData();
*/
logger.info("Harvested Data from {} to {} : {}", DateUtils.format(start), DateUtils.format(end), harvested);
org.gcube.dataharvest.utils.Utils.setContext(ROOT);
@ -534,8 +593,11 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
SortedSet<String> contexts = getContexts();
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end, contexts);
List<AccountingRecord> data = resourceCatalogueHarvester.getAccountingRecords();
/*
List<HarvestedData> data = resourceCatalogueHarvester.getData();
*/
logger.debug("{}", data);
} catch(Exception e) {
@ -563,9 +625,12 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
SortedSet<String> contexts = getContexts();
DataMethodDownloadHarvester resourceCatalogueHarvester = new DataMethodDownloadHarvester(start, end, contexts);
List<HarvestedData> data = resourceCatalogueHarvester.getData();
DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start, end, contexts);
List<AccountingRecord> data = dataMethodDownloadHarvester.getAccountingRecords();
/*
List<HarvestedData> data = dataMethodDownloadHarvester.getData();
*/
logger.debug("{}", data);
} catch(Exception e) {

View File

@ -1,54 +0,0 @@
package org.gcube.dataharvest.dao;
import java.util.LinkedHashMap;
import java.util.Properties;
import java.util.SortedSet;
import java.util.TreeSet;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.DataHarvestPluginDeclaration;
import org.gcube.dataharvest.utils.ContextTest;
import org.gcube.resourcemanagement.support.server.managers.context.ContextManager;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DaoTests extends ContextTest {
private static Logger logger = LoggerFactory.getLogger(DaoTests.class);
public static SortedSet<String> getContexts() throws Exception{
SortedSet<String> contexts = new TreeSet<>();
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts();
for(String scope : map.keySet()) {
try {
String context = map.get(scope).toString();
contexts.add(context);
}catch (Exception e) {
throw e;
}
}
return contexts;
}
@Test
public void testInsertMissingContext() throws Exception {
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(new DataHarvestPluginDeclaration());
Properties properties = accountingDataHarvesterPlugin.getConfigParameters();
AccountingDataHarvesterPlugin.getProperties().set(properties);
DatabaseManager dbaseManager = new DatabaseManager();
Dao dao = dbaseManager.dbConnect();
String[] contexts = new String[]{"/d4science.research-infrastructures.eu", "/d4science.research-infrastructures.eu/gCubeApps/rScience", "/d4science.research-infrastructures.eu/gCubeApps"};
for(String contextFullname : contexts) {
int id = dao.getOrInsertContextId(contextFullname);
logger.debug("{} is is {}", contextFullname, id);
}
}
}