This commit is contained in:
Lucio Lelii 2017-09-14 15:59:10 +00:00
parent 3adf9d263b
commit 3c04ac2495
3 changed files with 52 additions and 7 deletions

View File

@ -1,6 +1,9 @@
<ReleaseNotes>
<Changeset component="${groupId}.${artifactId}.1-5-1" date="2017-09-14">
<Change>added accounting on algorithm execution</Change>
</Changeset>
<Changeset component="${groupId}.${artifactId}.1-5-0" date="2017-07-31">
<Change>service interface classes movedo to wps project</Change>
<Change>service interface classes moved to wps project</Change>
</Changeset>
<Changeset component="${groupId}.${artifactId}.1-1-0" date="2016-10-03">
<Change>First Release</Change>

View File

@ -9,7 +9,7 @@
</parent>
<groupId>org.gcube.dataanalysis</groupId>
<artifactId>dataminer</artifactId>
<version>1.5.0-SNAPSHOT</version>
<version>1.5.1-SNAPSHOT</version>
<name>dataminer</name>
<description>An e-Infrastructure service providing state-of-the art DataMining algorithms and ecological modelling approaches under the Web Processing Service (WPS) standard.</description>
<scm>
@ -140,6 +140,13 @@
<artifactId>xercesImpl</artifactId>
<version>2.11.0</version>
</dependency>
<dependency>
<groupId>org.gcube.accounting</groupId>
<artifactId>accounting-lib</artifactId>
<version>[3.0.0-SNAPSHOT,4.0.0-SNAPSHOT)</version>
<scope>provided</scope>
</dependency>
</dependencies>

View File

@ -4,12 +4,17 @@ import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.UUID;
import org.gcube.accounting.datamodel.UsageRecord.OperationResult;
import org.gcube.accounting.datamodel.usagerecords.JobUsageRecord;
import org.gcube.accounting.persistence.AccountingPersistence;
import org.gcube.accounting.persistence.AccountingPersistenceFactory;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
@ -36,6 +41,7 @@ import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.utils.Observabl
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.utils.Observer;
import org.hibernate.SessionFactory;
import org.n52.wps.algorithm.annotation.Execute;
import org.n52.wps.commons.WPSConfig;
import org.n52.wps.server.AbstractAnnotatedAlgorithm;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -45,7 +51,7 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
/**
* Deploying procedure: 1 - modify configuration files 2 - modify resource file: resources/templates/setup.cfg 3 - generate classes with ClassGenerator 4 - add new classes in the wps_config.xml on the wps web app config folder 5 - produce the Jar file of this project 6 - copy the jar file in the lib folder of the wps web app change the server parameters in the wps_config.xml file
*/
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractEcologicalEngineMapper.class);
private Observer observer = null;
@ -235,7 +241,7 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
}
float previousStatus = -3;
String host = WPSConfig.getInstance().getWPSConfig().getServer().getHostname();
public void updateStatus(float status) {
if (agent != null) {
if (status != previousStatus) {
@ -295,7 +301,11 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
public void run() throws Exception {
if (observer!=null)
observer.isStarted(this);
long startTimeLong = System.currentTimeMillis();
OperationResult operationResult = null;
String algorithm = "";
List<String> generatedInputTables = null;
List<String> generatedOutputTables = null;
@ -308,6 +318,8 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
} else
LOGGER.info("Wps External ID not set");
InputsManager inputsManager = null;
ConfigurationManager configManager = new ConfigurationManager(); // initializes parameters from file
manageUserToken();
try {
// wait for server resources to be available
@ -315,8 +327,7 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
time("WPS Algorithm objects Initialization: Session " + computationSession);
// set the configuration environment for this algorithm
ConfigurationManager configManager = new ConfigurationManager(); // initializes parameters from file
manageUserToken();
configManager.configAlgorithmEnvironment(inputs);
configManager.setComputationId(computationSession);
config = configManager.getConfig();
@ -430,7 +441,9 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
throw new Exception("Computation cancelled");
}
LOGGER.debug("All done");
operationResult = OperationResult.SUCCESS;
} catch (Exception e) {
operationResult = OperationResult.FAILED;
LOGGER.error("Error execution Algorithm {}",algorithm,e);
int exitstatus = -2;
if (isCancelled())
@ -459,7 +472,29 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
cleanResources();
if (observer!=null) observer.isFinished(this);
LOGGER.debug("All done - Computation Finished");
accountAlgorithmExecution(configManager, startTimeLong, System.currentTimeMillis(), operationResult);
}
}
private void accountAlgorithmExecution(ConfigurationManager confManager, long start, long end, OperationResult result) {
try{
JobUsageRecord jobUsageRecord = new JobUsageRecord();
jobUsageRecord.setJobName(this.getAlgorithmClass().getSimpleName());
jobUsageRecord.setConsumerId(confManager.getUsername());
Calendar startCal = Calendar.getInstance();
startCal.setTimeInMillis(start);
jobUsageRecord.setDuration(end-start);
jobUsageRecord.setOperationResult(result);
jobUsageRecord.setServiceName("DataMiner");
jobUsageRecord.setServiceClass("WPS");
jobUsageRecord.setHost(WPSConfig.getInstance().getWPSConfig().getServer().getHostname());
AccountingPersistence accountingPersistence =
AccountingPersistenceFactory.getPersistence();
accountingPersistence.account(jobUsageRecord);
}catch(Exception e){
LOGGER.error("error accounting algorithm execution",e);
}
}