Refs #11756: Refactor DataHArvesterPlugin to support scheduled execution from smart-executor

Task-Url: https://support.d4science.org/issues/11756

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/accounting/accounting-dashboard-harvester-se-plugin@167516 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Luca Frosini 2018-05-16 13:08:25 +00:00
parent 06fddf107f
commit 1753289cd5
7 changed files with 225 additions and 40 deletions

13
pom.xml
View File

@ -118,6 +118,19 @@
<version>20171018</version>
<scope>compile</scope>
</dependency>
<!-- Test Dependencies -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.0.13</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>

View File

@ -1,30 +1,29 @@
package org.gcube.dataharvest;
import java.util.Date;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.gcube.dataharvest.DataHarvestPluginDeclaration;
import org.gcube.vremanagement.executor.exception.InputsNullException;
import org.gcube.vremanagement.executor.exception.InvalidInputsException;
import org.gcube.vremanagement.executor.plugin.Plugin;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.gcube.dataharvest.dao.*;
import org.gcube.dataharvest.dao.DatabaseManager;
import org.gcube.dataharvest.datamodel.Harvest;
import org.gcube.dataharvest.harvester.DataMethodDownloadHarvester;
import org.gcube.dataharvest.harvester.MethodInvocationHarvester;
import org.gcube.dataharvest.harvester.ResourceCatalogueHarvester;
import org.gcube.dataharvest.harvester.SocialHarvester;
import org.gcube.dataharvest.harvester.VreUsersHarvester;
import org.gcube.vremanagement.executor.plugin.Plugin;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDeclaration> {
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterPlugin.class);
public static final String PARAMETER_FROM = "from";
public static final String PARAMETER_TO = "to";
public static final String TEST = "test";
private boolean testMode = false, updateFlag = false;
private Date dateFrom, dateTo;
@ -62,7 +61,7 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
}
dateTo = (Date)to;
testMode = (inputs.get(TEST) == null) ? false : true;
testMode = (inputs.get(TEST) == null) ? false : (boolean) inputs.get(TEST);
logger.debug("TEST mode is " + testMode);
setTimePeriod();
@ -72,7 +71,9 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
// collecting info on VRE users
VreUsersHarvester vreUsersHarvester = new VreUsersHarvester(dateFrom, dateTo);
List<Harvest> users = vreUsersHarvester.getData();
dbaseManager.insertMonthlyData((Date) dateFrom, (Date) dateTo, users, updateFlag);
if(!testMode) {
dbaseManager.insertMonthlyData((Date) dateFrom, (Date) dateTo, users, updateFlag);
}
} catch (Exception x) {
logger.error(x.getLocalizedMessage());
}
@ -81,7 +82,9 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
// collecting info on Res. Catalogue (Dataset, Application, Deliverables, Methods)
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(dateFrom, dateTo);
List<Harvest> res = resourceCatalogueHarvester.getData();
dbaseManager.insertMonthlyData((Date) dateFrom, (Date) dateTo, res, updateFlag);
if(!testMode) {
dbaseManager.insertMonthlyData((Date) dateFrom, (Date) dateTo, res, updateFlag);
}
} catch (Exception x) {
logger.error(x.getLocalizedMessage());
}
@ -90,7 +93,9 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
// collecting info on Data/Method download
DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(dateFrom, dateTo);
List<Harvest> res = dataMethodDownloadHarvester.getData();
dbaseManager.insertMonthlyData((Date) dateFrom, (Date) dateTo, res, updateFlag);
if(!testMode) {
dbaseManager.insertMonthlyData((Date) dateFrom, (Date) dateTo, res, updateFlag);
}
} catch (Exception x) {
logger.error(x.getLocalizedMessage());
}
@ -99,7 +104,9 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
// collecting info on social (posts, replies and likes)
SocialHarvester socialHarvester = new SocialHarvester(dateFrom, dateTo);
List<Harvest> res = socialHarvester.getData();
dbaseManager.insertMonthlyData((Date) dateFrom, (Date) dateTo, res, updateFlag);
if(!testMode) {
dbaseManager.insertMonthlyData((Date) dateFrom, (Date) dateTo, res, updateFlag);
}
} catch (Exception x) {
logger.error(x.getLocalizedMessage());
}
@ -109,7 +116,9 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
// collecting info on method invocation
MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(dateFrom, dateTo);
List<Harvest> res = methodInvocationHarvester.getData();
//insertMonthlyData((Date) dateFrom, (Date) dateTo, res);
if(!testMode) {
dbaseManager.insertMonthlyData((Date) dateFrom, (Date) dateTo, res, updateFlag);
}
} catch (Exception x) {
logger.error(x.getLocalizedMessage());
}

View File

@ -3,30 +3,25 @@ package org.gcube.dataharvest.harvester;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.io.IOException;
import java.io.InputStream;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Properties;
import org.gcube.dataharvest.dao.Dao;
import org.gcube.dataharvest.dao.DatabaseManager;
import org.gcube.dataharvest.datamodel.Harvest;
import org.gcube.common.authorization.client.Constants;
import org.gcube.common.authorization.library.AuthorizationEntry;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.encryption.StringEncrypter;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
import org.gcube.common.resources.gcore.utils.Group;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataharvest.dao.DatabaseManager;
import org.gcube.dataharvest.datamodel.Harvest;
import org.gcube.informationsystem.publisher.RegistryPublisher;
import org.gcube.informationsystem.publisher.RegistryPublisherFactory;
import org.gcube.resources.discovery.client.api.DiscoveryClient;

View File

@ -1,33 +1,18 @@
package org.gcube.dataharvest.harvester;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.Properties;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.gcube.dataharvest.datamodel.Harvest;
import org.gcube.dataharvest.utils.Utils;
import org.json.*;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class VreUsersHarvester extends BasicHarvester {
// private final String CATEGORY_NAME = "Accounting";

View File

@ -0,0 +1,80 @@
package org.gcube.dataharvest;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Map;
import java.util.TimeZone;
import org.gcube.utils.ScopedTest;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AccountingDataHarvesterPluginTest extends ScopedTest {
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterPluginTest.class);
public static TimeZone UTC_TIMEZONE = TimeZone.getTimeZone("UTC");
public static final String DATETIME_PATTERN = "yyyy-MM-dd HH:mm:ss.SSS Z";
public static final DateFormat DEFAULT_DATE_FORMAT;
static {
DEFAULT_DATE_FORMAT = getUTCDateFormat(DATETIME_PATTERN);
}
public static DateFormat getUTCDateFormat(String pattern){
DateFormat dateFormat = new SimpleDateFormat(pattern);
dateFormat.setTimeZone(UTC_TIMEZONE);
return dateFormat;
}
public static Calendar getUTCCalendarInstance(){
return Calendar.getInstance(UTC_TIMEZONE);
}
public static Calendar getAggregationStartCalendar(int year, int month, int day){
Calendar aggregationStartCalendar = getUTCCalendarInstance();
aggregationStartCalendar.set(Calendar.YEAR, year);
aggregationStartCalendar.set(Calendar.MONTH, month);
aggregationStartCalendar.set(Calendar.DAY_OF_MONTH, day);
aggregationStartCalendar.set(Calendar.HOUR_OF_DAY, 0);
aggregationStartCalendar.set(Calendar.MINUTE, 0);
aggregationStartCalendar.set(Calendar.SECOND, 0);
aggregationStartCalendar.set(Calendar.MILLISECOND, 0);
logger.debug("{}", DEFAULT_DATE_FORMAT.format(aggregationStartCalendar.getTime()));
return aggregationStartCalendar;
}
@Test
public void test() {
try {
ScopedTest.setContext(ScopedTest.ROOT);
DataHarvestPluginDeclaration dataHarvestPluginDeclaration = new DataHarvestPluginDeclaration();
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(dataHarvestPluginDeclaration);
Calendar from = getAggregationStartCalendar(2018, Calendar.APRIL, 1);
Calendar to = getAggregationStartCalendar(2018, Calendar.APRIL, 30);
Map<String,Object> map = new HashMap<>();
map.put(AccountingDataHarvesterPlugin.PARAMETER_FROM, from.getTime());
map.put(AccountingDataHarvesterPlugin.PARAMETER_TO, to.getTime());
map.put(AccountingDataHarvesterPlugin.TEST, false);
accountingDataHarvesterPlugin.launch(map);
logger.info("End.");
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,103 @@
/**
*
*/
package org.gcube.utils;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import org.gcube.common.authorization.client.Constants;
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.common.authorization.library.AuthorizationEntry;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR)
*
*/
public class ScopedTest {
private static final Logger logger = LoggerFactory.getLogger(ScopedTest.class);
protected static final String PROPERTIES_FILENAME = "token.properties";
private static final String GCUBE_DEVNEXT_VARNAME = "GCUBE_DEVNEXT";
public static final String GCUBE_DEVNEXT;
private static final String GCUBE_DEVNEXT_NEXTNEXT_VARNAME = "GCUBE_DEVNEXT_NEXTNEXT";
public static final String GCUBE_DEVNEXT_NEXTNEXT;
public static final String GCUBE_DEVSEC_VARNAME = "GCUBE_DEVSEC";
public static final String GCUBE_DEVSEC;
public static final String GCUBE_DEVSEC_DEVVRE_VARNAME = "GCUBE_DEVSEC_DEVVRE";
public static final String GCUBE_DEVSEC_DEVVRE;
public static final String GCUBE_VARNAME = "GCUBE";
public static final String GCUBE;
public static final String DEFAULT_TEST_SCOPE;
public static final String ROOT_VARNAME = "ROOT_ERIC";
public static final String ROOT;
public static final String TAGME_VARNAME = "TAGME_ERIC";
public static final String TAGME;
static {
Properties properties = new Properties();
InputStream input = ScopedTest.class.getClassLoader().getResourceAsStream(PROPERTIES_FILENAME);
try {
// load the properties file
properties.load(input);
} catch (IOException e) {
throw new RuntimeException(e);
}
GCUBE = properties.getProperty(GCUBE_VARNAME);
GCUBE_DEVNEXT = properties.getProperty(GCUBE_DEVNEXT_VARNAME);
GCUBE_DEVNEXT_NEXTNEXT = properties.getProperty(GCUBE_DEVNEXT_NEXTNEXT_VARNAME);
GCUBE_DEVSEC = properties.getProperty(GCUBE_DEVSEC_VARNAME);
GCUBE_DEVSEC_DEVVRE = properties.getProperty(GCUBE_DEVSEC_DEVVRE_VARNAME);
ROOT = properties.getProperty(ROOT_VARNAME);
TAGME = properties.getProperty(TAGME_VARNAME);
DEFAULT_TEST_SCOPE = GCUBE_DEVSEC;
}
public static String getCurrentScope(String token) throws ObjectNotFound, Exception{
AuthorizationEntry authorizationEntry = Constants.authorizationService().get(token);
String context = authorizationEntry.getContext();
logger.info("Context of token {} is {}", token, context);
return context;
}
public static void setContext(String token) throws ObjectNotFound, Exception{
SecurityTokenProvider.instance.set(token);
ScopeProvider.instance.set(getCurrentScope(token));
}
@BeforeClass
public static void beforeClass() throws Exception{
setContext(DEFAULT_TEST_SCOPE);
}
@AfterClass
public static void afterClass() throws Exception{
SecurityTokenProvider.instance.reset();
ScopeProvider.instance.reset();
}
}

View File

@ -10,7 +10,7 @@
<logger name="org.gcube" level="INFO" />
<!-- logger name="org.gcube.accounting.aggregator" level="TRACE" / -->
<logger name="org.gcube.dataharvest" level="TRACE" />
<root level="WARN">
<appender-ref ref="STDOUT" />