2016-04-01 13:09:40 +02:00
|
|
|
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping;
|
|
|
|
|
2016-05-24 12:17:21 +02:00
|
|
|
import java.io.BufferedReader;
|
2016-04-01 13:09:40 +02:00
|
|
|
import java.io.File;
|
2016-05-24 12:17:21 +02:00
|
|
|
import java.io.InputStreamReader;
|
2018-01-08 12:53:50 +01:00
|
|
|
import java.nio.file.Files;
|
|
|
|
import java.nio.file.Path;
|
|
|
|
import java.nio.file.Paths;
|
2016-04-01 13:09:40 +02:00
|
|
|
import java.util.ArrayList;
|
|
|
|
import java.util.Date;
|
|
|
|
import java.util.HashMap;
|
|
|
|
import java.util.LinkedHashMap;
|
|
|
|
import java.util.List;
|
|
|
|
import java.util.UUID;
|
|
|
|
|
2017-09-14 17:59:10 +02:00
|
|
|
import org.gcube.accounting.datamodel.UsageRecord.OperationResult;
|
|
|
|
import org.gcube.accounting.datamodel.usagerecords.JobUsageRecord;
|
|
|
|
import org.gcube.accounting.persistence.AccountingPersistence;
|
|
|
|
import org.gcube.accounting.persistence.AccountingPersistenceFactory;
|
2016-04-01 13:09:40 +02:00
|
|
|
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
|
|
|
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
|
|
|
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
|
|
|
import org.gcube.dataanalysis.ecoengine.processing.factories.ClusterersFactory;
|
|
|
|
import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory;
|
|
|
|
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
|
|
|
|
import org.gcube.dataanalysis.ecoengine.processing.factories.ModelersFactory;
|
|
|
|
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
|
|
|
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
|
|
|
|
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
|
|
|
|
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.infrastructure.DatabaseInfo;
|
|
|
|
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.infrastructure.InfrastructureDialoguer;
|
|
|
|
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.infrastructure.TableCoherenceChecker;
|
|
|
|
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.IClusterer;
|
|
|
|
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.IEvaluator;
|
|
|
|
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.IGenerator;
|
|
|
|
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.IModeller;
|
|
|
|
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.ITransducer;
|
|
|
|
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.dataspace.ComputationData;
|
|
|
|
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.dataspace.DataspaceManager;
|
|
|
|
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.dataspace.StoredData;
|
2017-07-13 11:14:22 +02:00
|
|
|
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.utils.Cancellable;
|
|
|
|
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.utils.Observable;
|
|
|
|
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.utils.Observer;
|
2016-04-01 13:09:40 +02:00
|
|
|
import org.hibernate.SessionFactory;
|
|
|
|
import org.n52.wps.algorithm.annotation.Execute;
|
2017-09-14 17:59:10 +02:00
|
|
|
import org.n52.wps.commons.WPSConfig;
|
2016-04-01 13:09:40 +02:00
|
|
|
import org.n52.wps.server.AbstractAnnotatedAlgorithm;
|
2017-05-05 10:08:44 +02:00
|
|
|
import org.slf4j.Logger;
|
2016-04-01 13:09:40 +02:00
|
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
|
2021-05-25 11:14:58 +02:00
|
|
|
public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm implements Observable, Cancellable {
|
2016-04-01 13:09:40 +02:00
|
|
|
|
|
|
|
/**
|
2021-05-25 11:14:58 +02:00
|
|
|
* Deploying procedure: 1 - modify configuration files 2 - modify resource
|
|
|
|
* file: resources/templates/setup.cfg 3 - generate classes with
|
|
|
|
* ClassGenerator 4 - add new classes in the wps_config.xml on the wps web
|
|
|
|
* app config folder 5 - produce the Jar file of this project 6 - copy the
|
|
|
|
* jar file in the lib folder of the wps web app change the server
|
|
|
|
* parameters in the wps_config.xml file
|
2016-04-01 13:09:40 +02:00
|
|
|
*/
|
2017-09-19 17:19:27 +02:00
|
|
|
|
2017-05-05 10:08:44 +02:00
|
|
|
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractEcologicalEngineMapper.class);
|
2017-06-27 15:22:46 +02:00
|
|
|
|
2017-07-13 11:14:22 +02:00
|
|
|
private Observer observer = null;
|
|
|
|
|
|
|
|
private boolean cancelled = false;
|
2021-05-25 11:14:58 +02:00
|
|
|
|
2017-09-19 17:19:27 +02:00
|
|
|
private TokenManager tokenm = null;
|
2021-05-25 11:14:58 +02:00
|
|
|
|
2018-10-12 15:53:36 +02:00
|
|
|
private EnvironmentVariableManager env = null;
|
2021-05-25 11:14:58 +02:00
|
|
|
|
2016-04-01 13:09:40 +02:00
|
|
|
// inputs and outputs
|
|
|
|
public LinkedHashMap<String, Object> inputs = new LinkedHashMap<String, Object>();
|
|
|
|
public LinkedHashMap<String, Object> outputs = new LinkedHashMap<String, Object>();
|
|
|
|
public LinkedHashMap<String, Long> times = new LinkedHashMap<String, Long>();
|
|
|
|
public String startTime;
|
|
|
|
public String endTime;
|
|
|
|
public static HashMap<String, DatabaseInfo> databaseParametersMemoryCache = new HashMap<String, DatabaseInfo>();
|
|
|
|
public static HashMap<String, String> runningcomputations = new HashMap<String, String>();
|
|
|
|
ComputationalAgent agent;
|
|
|
|
public String wpsExternalID = null;
|
|
|
|
ComputationData currentComputation;
|
|
|
|
|
|
|
|
public void setWpsExternalID(String wpsExternalID) {
|
|
|
|
this.wpsExternalID = wpsExternalID;
|
|
|
|
}
|
|
|
|
|
|
|
|
public static synchronized void addComputation(String session, String user) {
|
|
|
|
runningcomputations.put(session, user);
|
|
|
|
}
|
|
|
|
|
|
|
|
public static synchronized void removeComputation(String session) {
|
|
|
|
runningcomputations.remove(session);
|
|
|
|
}
|
|
|
|
|
|
|
|
public static synchronized int getRuningComputations() {
|
|
|
|
return runningcomputations.size();
|
|
|
|
}
|
|
|
|
|
|
|
|
public static synchronized String displayRunningComputations() {
|
|
|
|
return runningcomputations.toString();
|
|
|
|
}
|
|
|
|
|
|
|
|
public void waitForResources() throws Exception {
|
|
|
|
while (getRuningComputations() > ConfigurationManager.getMaxComputations()) {
|
2016-05-24 12:17:21 +02:00
|
|
|
Thread.sleep(20000);
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Waiting for resources to be available: " + displayRunningComputations());
|
2016-04-01 13:09:40 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
2017-07-13 11:14:22 +02:00
|
|
|
|
2016-04-01 13:09:40 +02:00
|
|
|
// inner objects
|
|
|
|
public AlgorithmConfiguration config;
|
|
|
|
public InfrastructureDialoguer infrastructureDialoguer;
|
|
|
|
|
|
|
|
public static synchronized DatabaseInfo getDatabaseInfo(String scope) {
|
|
|
|
return databaseParametersMemoryCache.get(scope);
|
|
|
|
}
|
|
|
|
|
|
|
|
public static synchronized void addDatabaseInfo(String scope, DatabaseInfo info) {
|
|
|
|
databaseParametersMemoryCache.put(scope, info);
|
|
|
|
}
|
2017-07-13 11:14:22 +02:00
|
|
|
|
2016-04-01 13:09:40 +02:00
|
|
|
public ComputationalAgent getComputationalAgent(String algorithmName) throws Exception {
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Searching for Agents.. " + algorithmName);
|
2016-04-01 13:09:40 +02:00
|
|
|
List<ComputationalAgent> agents = new ArrayList<ComputationalAgent>();
|
|
|
|
|
|
|
|
if (this instanceof ITransducer)
|
|
|
|
agents = TransducerersFactory.getTransducerers(config);
|
|
|
|
else if (this instanceof IClusterer)
|
|
|
|
agents = ClusterersFactory.getClusterers(config);
|
|
|
|
else if (this instanceof IEvaluator)
|
|
|
|
agents = EvaluatorsFactory.getEvaluators(config);
|
|
|
|
else if (this instanceof IGenerator)
|
|
|
|
agents = GeneratorsFactory.getGenerators(config);
|
|
|
|
else if (this instanceof IModeller)
|
|
|
|
agents = ModelersFactory.getModelers(config);
|
|
|
|
|
|
|
|
if (agents != null && agents.size() > 0 && agents.get(0) != null) {
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Found " + agents.size() + " Agents for " + algorithmName);
|
2016-04-01 13:09:40 +02:00
|
|
|
ComputationalAgent agent = agents.get(0);
|
|
|
|
agent.setConfiguration(config);
|
|
|
|
return agent;
|
|
|
|
} else
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
public List<StatisticalType> getInputParameters(String algorithmName) throws Exception {
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Searching for Agents Inputs.. " + algorithmName);
|
2016-04-01 13:09:40 +02:00
|
|
|
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
|
|
|
|
|
|
|
if (this instanceof ITransducer)
|
|
|
|
parameters = TransducerersFactory.getTransducerParameters(config, algorithmName);
|
|
|
|
else if (this instanceof IClusterer)
|
2017-09-19 17:19:27 +02:00
|
|
|
parameters = ClusterersFactory.getClustererParameters(config.getConfigPath(), algorithmName, config);
|
2016-04-01 13:09:40 +02:00
|
|
|
else if (this instanceof IEvaluator)
|
2017-09-19 17:19:27 +02:00
|
|
|
parameters = EvaluatorsFactory.getEvaluatorParameters(config.getConfigPath(), algorithmName, config);
|
2016-04-01 13:09:40 +02:00
|
|
|
else if (this instanceof IGenerator)
|
2017-09-19 17:19:27 +02:00
|
|
|
parameters = GeneratorsFactory.getAlgorithmParameters(config.getConfigPath(), algorithmName, config);
|
2016-04-01 13:09:40 +02:00
|
|
|
else if (this instanceof IModeller)
|
2017-09-19 17:19:27 +02:00
|
|
|
parameters = ModelersFactory.getModelParameters(config.getConfigPath(), algorithmName, config);
|
2016-04-01 13:09:40 +02:00
|
|
|
|
|
|
|
if (parameters != null) {
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Found " + parameters.size() + " Parameters for " + algorithmName);
|
2016-04-01 13:09:40 +02:00
|
|
|
return parameters;
|
|
|
|
} else
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
public StatisticalType getOutput(String algorithmName) throws Exception {
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Searching for Agents Inputs.. " + algorithmName);
|
2016-04-01 13:09:40 +02:00
|
|
|
StatisticalType output = null;
|
|
|
|
|
|
|
|
if (this instanceof ITransducer)
|
|
|
|
output = TransducerersFactory.getTransducerOutput(config, algorithmName);
|
|
|
|
else if (this instanceof IClusterer)
|
2017-09-19 17:19:27 +02:00
|
|
|
output = ClusterersFactory.getClustererOutput(config.getConfigPath(), algorithmName, config);
|
2016-04-01 13:09:40 +02:00
|
|
|
else if (this instanceof IEvaluator)
|
2017-09-19 17:19:27 +02:00
|
|
|
output = EvaluatorsFactory.getEvaluatorOutput(config.getConfigPath(), algorithmName, config);
|
2016-04-01 13:09:40 +02:00
|
|
|
else if (this instanceof IGenerator)
|
2017-09-19 17:19:27 +02:00
|
|
|
output = GeneratorsFactory.getAlgorithmOutput(config.getConfigPath(), algorithmName, config);
|
2016-04-01 13:09:40 +02:00
|
|
|
else if (this instanceof IModeller)
|
2017-09-19 17:19:27 +02:00
|
|
|
output = ModelersFactory.getModelOutput(config.getConfigPath(), algorithmName, config);
|
2016-04-01 13:09:40 +02:00
|
|
|
|
|
|
|
if (output != null) {
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Found " + output + " for " + algorithmName);
|
2016-04-01 13:09:40 +02:00
|
|
|
return output;
|
|
|
|
}
|
|
|
|
return output;
|
|
|
|
}
|
|
|
|
|
|
|
|
public void deleteTemporaryTables(List<String> generatedInputTables) throws Exception {
|
|
|
|
|
|
|
|
if (generatedInputTables != null && generatedInputTables.size() > 0) {
|
|
|
|
SessionFactory dbConnection = null;
|
|
|
|
try {
|
|
|
|
dbConnection = DatabaseUtils.initDBSession(config);
|
|
|
|
|
|
|
|
for (String table : generatedInputTables) {
|
|
|
|
if (table != null) {
|
|
|
|
if (TableCoherenceChecker.isSystemTable(table))
|
|
|
|
continue;
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Dropping Temporary Table: " + table);
|
2016-04-01 13:09:40 +02:00
|
|
|
try {
|
|
|
|
DatabaseFactory.executeSQLUpdate("drop table " + table, dbConnection);
|
|
|
|
} catch (Exception e) {
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Could not drop Temporary Table: " + table);
|
2016-04-01 13:09:40 +02:00
|
|
|
}
|
|
|
|
} else
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Could not drop Temporary Table: " + table + " table is null");
|
2016-04-01 13:09:40 +02:00
|
|
|
}
|
|
|
|
} catch (Exception e) {
|
2021-05-25 11:14:58 +02:00
|
|
|
LOGGER.error("error deleting temporary table", e);
|
2016-04-01 13:09:40 +02:00
|
|
|
} finally {
|
|
|
|
DatabaseUtils.closeDBConnection(dbConnection);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public static void deleteGeneratedFiles(List<File> generatedFiles) throws Exception {
|
2016-05-24 12:17:21 +02:00
|
|
|
System.gc();
|
2016-04-01 13:09:40 +02:00
|
|
|
if (generatedFiles != null) {
|
|
|
|
for (File file : generatedFiles) {
|
|
|
|
if (file.exists()) {
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Deleting File " + file.getAbsolutePath());
|
2016-09-23 10:39:21 +02:00
|
|
|
try {
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Deleting File Check " + file.delete());
|
2016-09-23 10:39:21 +02:00
|
|
|
} catch (Exception e) {
|
|
|
|
}
|
2016-04-01 13:09:40 +02:00
|
|
|
} else
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Deleting File - File does not exist " + file.getAbsolutePath());
|
2016-04-01 13:09:40 +02:00
|
|
|
}
|
|
|
|
}
|
2016-05-24 12:17:21 +02:00
|
|
|
System.gc();
|
2016-04-01 13:09:40 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
public void manageUserToken() {
|
|
|
|
String scope = null;
|
|
|
|
String username = null;
|
2016-09-23 10:39:21 +02:00
|
|
|
String token = null;
|
2016-04-01 13:09:40 +02:00
|
|
|
// DONE get scope and username from SmartGears
|
|
|
|
// get scope from SmartGears
|
2017-09-19 17:19:27 +02:00
|
|
|
tokenm = new TokenManager();
|
2016-04-01 13:09:40 +02:00
|
|
|
tokenm.getCredentials();
|
|
|
|
scope = tokenm.getScope();
|
|
|
|
username = tokenm.getUserName();
|
2016-09-23 10:39:21 +02:00
|
|
|
token = tokenm.getToken();
|
2016-04-01 13:09:40 +02:00
|
|
|
// set parameters
|
|
|
|
inputs.put(ConfigurationManager.scopeParameter, scope);
|
|
|
|
inputs.put(ConfigurationManager.usernameParameter, username);
|
2016-09-23 10:39:21 +02:00
|
|
|
inputs.put(ConfigurationManager.tokenParameter, token);
|
2016-04-01 13:09:40 +02:00
|
|
|
}
|
|
|
|
|
2016-05-24 12:17:21 +02:00
|
|
|
float previousStatus = -3;
|
2017-09-14 17:59:10 +02:00
|
|
|
String host = WPSConfig.getInstance().getWPSConfig().getServer().getHostname();
|
2021-05-25 11:14:58 +02:00
|
|
|
|
2019-04-17 17:36:38 +02:00
|
|
|
public void updateStatus(float status, boolean canWrite) {
|
2016-04-01 13:09:40 +02:00
|
|
|
if (agent != null) {
|
2017-07-13 11:14:22 +02:00
|
|
|
if (status != previousStatus) {
|
2021-05-25 11:14:58 +02:00
|
|
|
LOGGER.debug("STATUS update to: {} ", status);
|
2017-09-19 17:19:27 +02:00
|
|
|
previousStatus = status;
|
2017-07-13 11:14:22 +02:00
|
|
|
super.update(new Integer((int) status));
|
|
|
|
try {
|
2021-05-25 11:14:58 +02:00
|
|
|
if (canWrite)
|
|
|
|
updateComputationOnWS(status, null);
|
2017-07-13 11:14:22 +02:00
|
|
|
} catch (Exception e) {
|
|
|
|
LOGGER.warn("error updating compution on WS");
|
2016-09-23 10:39:21 +02:00
|
|
|
}
|
2016-04-01 13:09:40 +02:00
|
|
|
}
|
|
|
|
}
|
2017-07-13 11:14:22 +02:00
|
|
|
|
2016-04-01 13:09:40 +02:00
|
|
|
}
|
2016-09-23 10:39:21 +02:00
|
|
|
|
2018-10-12 15:53:36 +02:00
|
|
|
public void setEnvironmentVariableManager(EnvironmentVariableManager env) {
|
2019-03-27 18:34:46 +01:00
|
|
|
this.env = env;
|
2018-10-12 15:53:36 +02:00
|
|
|
}
|
2021-05-25 11:14:58 +02:00
|
|
|
|
2016-04-01 13:09:40 +02:00
|
|
|
public void updateComputationOnWS(float status, String exception) {
|
2016-05-11 13:35:14 +02:00
|
|
|
updateComputationOnWS(status, exception, null, null);
|
|
|
|
}
|
2016-09-23 10:39:21 +02:00
|
|
|
|
2021-05-25 11:14:58 +02:00
|
|
|
class RunDataspaceManager implements Runnable {
|
2016-09-27 15:44:14 +02:00
|
|
|
List<StoredData> inputData;
|
|
|
|
List<File> generatedData;
|
2021-05-25 11:14:58 +02:00
|
|
|
|
|
|
|
public RunDataspaceManager(List<StoredData> inputData, List<File> generatedData) {
|
|
|
|
this.inputData = inputData;
|
|
|
|
this.generatedData = generatedData;
|
2016-09-27 15:44:14 +02:00
|
|
|
}
|
2017-06-27 15:22:46 +02:00
|
|
|
|
2016-09-27 15:44:14 +02:00
|
|
|
public void run() {
|
2016-05-11 13:35:14 +02:00
|
|
|
DataspaceManager manager = new DataspaceManager(config, currentComputation, inputData, null, generatedData);
|
2016-04-01 13:09:40 +02:00
|
|
|
try {
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Dataspace->Status updater->Writing computational info on the WS asyncronously");
|
2016-04-01 13:09:40 +02:00
|
|
|
manager.writeRunningComputationData();
|
|
|
|
} catch (Exception ez) {
|
2021-05-25 11:14:58 +02:00
|
|
|
LOGGER.error("Dataspace->Status updater->Impossible to write computation information on the Workspace",
|
|
|
|
ez);
|
2016-04-01 13:09:40 +02:00
|
|
|
}
|
|
|
|
}
|
2016-09-27 15:44:14 +02:00
|
|
|
};
|
|
|
|
|
2021-05-25 11:14:58 +02:00
|
|
|
public void updateComputationOnWS(float status, String exception, List<StoredData> inputData,
|
|
|
|
List<File> generatedData) {
|
2016-09-27 15:44:14 +02:00
|
|
|
if (currentComputation != null) {
|
|
|
|
currentComputation.setStatus("" + status);
|
|
|
|
if (exception != null && exception.length() > 0)
|
|
|
|
currentComputation.setException(exception);
|
2021-05-25 11:14:58 +02:00
|
|
|
LOGGER.debug("RunDataspaceManager: [inputData=" + inputData + ", generatedData=" + generatedData + "]");
|
|
|
|
RunDataspaceManager rundm = new RunDataspaceManager(inputData, generatedData);
|
2016-09-27 15:44:14 +02:00
|
|
|
rundm.run();
|
2017-06-27 15:22:46 +02:00
|
|
|
|
2016-09-27 15:44:14 +02:00
|
|
|
/*
|
2021-05-25 11:14:58 +02:00
|
|
|
* Thread t = new Thread(rundm); t.start();
|
2017-06-27 15:22:46 +02:00
|
|
|
*/
|
2016-09-27 15:44:14 +02:00
|
|
|
}
|
2016-04-01 13:09:40 +02:00
|
|
|
}
|
2016-09-23 10:39:21 +02:00
|
|
|
|
2016-04-01 13:09:40 +02:00
|
|
|
@Execute
|
|
|
|
public void run() throws Exception {
|
2021-05-25 11:14:58 +02:00
|
|
|
if (observer != null)
|
2017-07-13 11:14:22 +02:00
|
|
|
observer.isStarted(this);
|
2017-09-19 17:19:27 +02:00
|
|
|
|
2021-05-25 11:14:58 +02:00
|
|
|
LOGGER.info("classloader context in this thread is {}", Thread.currentThread().getContextClassLoader());
|
|
|
|
|
2017-09-14 17:59:10 +02:00
|
|
|
long startTimeLong = System.currentTimeMillis();
|
2017-09-19 17:19:27 +02:00
|
|
|
|
2017-09-14 17:59:10 +02:00
|
|
|
OperationResult operationResult = null;
|
2017-09-19 17:19:27 +02:00
|
|
|
|
2016-04-01 13:09:40 +02:00
|
|
|
String algorithm = "";
|
|
|
|
List<String> generatedInputTables = null;
|
|
|
|
List<String> generatedOutputTables = null;
|
|
|
|
List<File> generatedFiles = null;
|
2021-05-25 11:14:58 +02:00
|
|
|
// String date = new
|
|
|
|
// java.text.SimpleDateFormat("dd_MM_yyyy_HH:mm:ss").format(System.currentTimeMillis());
|
2016-04-01 13:09:40 +02:00
|
|
|
String computationSession = this.getAlgorithmClass().getSimpleName() + "_ID_" + UUID.randomUUID().toString();
|
|
|
|
if (wpsExternalID != null) {
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("Using wps External ID " + wpsExternalID);
|
2016-04-01 13:09:40 +02:00
|
|
|
computationSession = this.getAlgorithmClass().getSimpleName() + "_ID_" + wpsExternalID;
|
|
|
|
} else
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("Wps External ID not set");
|
2016-05-11 13:35:14 +02:00
|
|
|
InputsManager inputsManager = null;
|
2021-05-25 11:14:58 +02:00
|
|
|
ConfigurationManager configManager = new ConfigurationManager(this.env); // initializes
|
|
|
|
// parameters
|
|
|
|
// from
|
|
|
|
// web.xml
|
2017-09-14 17:59:10 +02:00
|
|
|
manageUserToken();
|
2021-05-25 11:14:58 +02:00
|
|
|
|
2019-03-27 18:34:46 +01:00
|
|
|
boolean canWriteOnShub = checkWriteAuthorization(tokenm.getUserName());
|
2021-05-25 11:14:58 +02:00
|
|
|
|
2018-01-08 12:53:50 +01:00
|
|
|
Path dir = Paths.get(System.getProperty("java.io.tmpdir"), "dmlocks");
|
|
|
|
if (!Files.exists(dir))
|
|
|
|
dir = Files.createDirectory(dir);
|
|
|
|
Path lockFile = Files.createTempFile(dir, "dm", ".lck");
|
2021-05-25 11:14:58 +02:00
|
|
|
LOGGER.info("lock file created {}", lockFile.toUri().toURL());
|
2016-04-01 13:09:40 +02:00
|
|
|
try {
|
|
|
|
|
|
|
|
// wait for server resources to be available
|
|
|
|
startTime = new java.text.SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(System.currentTimeMillis());
|
|
|
|
time("WPS Algorithm objects Initialization: Session " + computationSession);
|
|
|
|
|
|
|
|
// set the configuration environment for this algorithm
|
2017-09-19 17:19:27 +02:00
|
|
|
|
2016-04-01 13:09:40 +02:00
|
|
|
configManager.configAlgorithmEnvironment(inputs);
|
|
|
|
configManager.setComputationId(computationSession);
|
|
|
|
config = configManager.getConfig();
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("Configured algorithm with session " + computationSession);
|
2016-04-01 13:09:40 +02:00
|
|
|
time("Configuration");
|
|
|
|
waitForResources();
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("Running algorithm with session " + computationSession);
|
2016-04-01 13:09:40 +02:00
|
|
|
time("Waiting time for resources to be free");
|
|
|
|
// add the computation to the global list of computations
|
|
|
|
addComputation(computationSession, configManager.getUsername() + ":" + configManager.getScope());
|
|
|
|
|
|
|
|
String scope = configManager.getScope();
|
|
|
|
String username = configManager.getUsername();
|
|
|
|
|
2021-05-25 11:14:58 +02:00
|
|
|
LOGGER.info("1 - Algorithm environment initialized in scope " + scope + " with user name " + username
|
|
|
|
+ " and session " + computationSession);
|
|
|
|
LOGGER.info("Max allowed computations " + ConfigurationManager.getMaxComputations() + " using storage "
|
|
|
|
+ ConfigurationManager.useStorage());
|
2016-04-01 13:09:40 +02:00
|
|
|
// init the infrastructure dialoguer
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("2 - Initializing connection to the e-Infrastructure");
|
2016-04-01 13:09:40 +02:00
|
|
|
infrastructureDialoguer = new InfrastructureDialoguer(scope);
|
|
|
|
time("Connection to the e-Infrastructure initialized");
|
|
|
|
// set the database parameters
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("3 - Initializing connection to the e-Infrastructure central database for computations");
|
2016-04-01 13:09:40 +02:00
|
|
|
DatabaseInfo supportDatabaseInfo = getDatabaseInfo(scope);
|
|
|
|
if (supportDatabaseInfo == null) {
|
|
|
|
supportDatabaseInfo = infrastructureDialoguer.getDatabaseInfo("StatisticalManagerDataBase");
|
|
|
|
addDatabaseInfo(scope, supportDatabaseInfo);
|
|
|
|
} else
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("Using cached database information: " + supportDatabaseInfo);
|
|
|
|
LOGGER.info("Retrieved Central Database: " + supportDatabaseInfo);
|
2016-05-11 13:35:14 +02:00
|
|
|
inputsManager = new InputsManager(inputs, config, computationSession);
|
2016-04-01 13:09:40 +02:00
|
|
|
inputsManager.configSupportDatabaseParameters(supportDatabaseInfo);
|
|
|
|
time("Central database information retrieval");
|
|
|
|
// retrieve the algorithm to execute
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("4 - Retrieving WPS algorithm name");
|
2016-04-01 13:09:40 +02:00
|
|
|
algorithm = this.getAlgorithmClass().getSimpleName();
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Selected Algorithm: " + algorithm);
|
2016-04-01 13:09:40 +02:00
|
|
|
config.setAgent(algorithm);
|
|
|
|
config.setModel(algorithm);
|
|
|
|
time("Ecological Engine Algorithm selection");
|
|
|
|
// adding service parameters to the configuration
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("5 - Adding Service parameters to the configuration");
|
2018-11-19 10:03:18 +01:00
|
|
|
List<StatisticalType> dataminerInputParameters = getInputParameters(algorithm);
|
2021-05-25 11:14:58 +02:00
|
|
|
LOGGER.debug("Dataminer Algo Default InputParameters: " + dataminerInputParameters);
|
2018-11-19 10:03:18 +01:00
|
|
|
inputsManager.addInputServiceParameters(dataminerInputParameters, infrastructureDialoguer);
|
2016-04-01 13:09:40 +02:00
|
|
|
time("Service parameters added to the algorithm");
|
|
|
|
// merging wps with ecological engine parameters - modifies the
|
|
|
|
// config
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("6 - Translating WPS Inputs into Ecological Engine Inputs");
|
|
|
|
LOGGER.debug("Operator class is " + this.getClass().getCanonicalName());
|
2016-04-01 13:09:40 +02:00
|
|
|
// build computation Data
|
2021-05-25 11:14:58 +02:00
|
|
|
currentComputation = new ComputationData(config.getTaskID(), config.getAgent(), "", "", startTime, "-", "0",
|
|
|
|
config.getTaskID(), configManager.getUsername(), config.getGcubeScope(),
|
|
|
|
this.getClass().getCanonicalName());
|
2018-11-19 10:03:18 +01:00
|
|
|
inputsManager.mergeWpsAndEcologicalInputs(supportDatabaseInfo, dataminerInputParameters);
|
2016-04-01 13:09:40 +02:00
|
|
|
generatedInputTables = inputsManager.getGeneratedTables();
|
|
|
|
generatedFiles = inputsManager.getGeneratedInputFiles();
|
|
|
|
time("Setup and download of input parameters with tables creation");
|
|
|
|
// retrieve the computational agent given the configuration
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("7 - Retrieving Ecological Engine algorithm");
|
2016-04-01 13:09:40 +02:00
|
|
|
agent = getComputationalAgent(algorithm);
|
|
|
|
currentComputation.setOperatorDescription(agent.getDescription());
|
|
|
|
currentComputation.setInfrastructure(agent.getInfrastructure().name());
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Found Ecological Engine Algorithm: " + agent);
|
2016-04-01 13:09:40 +02:00
|
|
|
time("Algorithm initialization");
|
|
|
|
// take the a priori declared wps output
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("8 - Retrieving the a priori output of the algorithm");
|
2016-04-01 13:09:40 +02:00
|
|
|
StatisticalType prioroutput = null;
|
|
|
|
try {
|
|
|
|
prioroutput = getOutput(algorithm);
|
|
|
|
} catch (Exception e) {
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("Warning: No a priori output for algorithm " + algorithm);
|
2016-04-01 13:09:40 +02:00
|
|
|
}
|
|
|
|
time("A priori output retrieval");
|
|
|
|
// run the computation
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("9 - Running the computation and updater");
|
2016-09-23 10:39:21 +02:00
|
|
|
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("Initializing the WPS status of the computation");
|
2019-04-17 17:36:38 +02:00
|
|
|
updateStatus(0, canWriteOnShub);
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("Initializing the computation");
|
2016-04-01 13:09:40 +02:00
|
|
|
agent.init();
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("Updating status");
|
2019-04-17 17:36:38 +02:00
|
|
|
runStatusUpdater(canWriteOnShub);
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("Running the computation");
|
2016-04-01 13:09:40 +02:00
|
|
|
agent.compute();
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("The computation has finished. Retrieving output");
|
2016-04-01 13:09:40 +02:00
|
|
|
time("Execution time");
|
|
|
|
// get the a posteriori output
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("10 - Retrieving the a posteriori output of the algorithm");
|
2016-04-01 13:09:40 +02:00
|
|
|
StatisticalType postoutput = agent.getOutput();
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Computation Output: " + postoutput);
|
2016-04-01 13:09:40 +02:00
|
|
|
time("Output retrieval");
|
|
|
|
// merge the posterior and prior outputs
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("11 - Merging the a priori and a posteriori output");
|
2016-04-01 13:09:40 +02:00
|
|
|
OutputsManager outputmanager = new OutputsManager(config, computationSession);
|
|
|
|
outputs = outputmanager.createOutput(prioroutput, postoutput);
|
|
|
|
// in the case of storage usage, delete all local files
|
|
|
|
generatedOutputTables = outputmanager.getGeneratedTables();
|
|
|
|
if (ConfigurationManager.useStorage()) {
|
|
|
|
generatedFiles.addAll(outputmanager.getGeneratedFiles());
|
|
|
|
time("Output preparation for WPS document (using storage)");
|
|
|
|
} else
|
|
|
|
time("Output preparation for WPS document (no storage manager)");
|
|
|
|
|
|
|
|
outputmanager.shutdown();
|
2021-05-25 11:14:58 +02:00
|
|
|
|
2018-11-19 10:03:18 +01:00
|
|
|
LOGGER.debug("12 - Final Computation Output");
|
2021-05-25 11:14:58 +02:00
|
|
|
LOGGER.debug("Outputs: " + outputs);
|
2016-09-23 10:39:21 +02:00
|
|
|
|
2016-04-01 13:09:40 +02:00
|
|
|
endTime = new java.text.SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(System.currentTimeMillis());
|
2017-07-13 11:14:22 +02:00
|
|
|
if (!isCancelled()) {
|
2018-11-19 10:03:18 +01:00
|
|
|
LOGGER.debug("Save Computation Data");
|
2021-05-25 11:14:58 +02:00
|
|
|
if (canWriteOnShub)
|
|
|
|
saveComputationOnWS(inputsManager.getProvenanceData(), outputmanager.getProvenanceData(), agent,
|
|
|
|
generatedFiles);
|
2016-09-23 10:39:21 +02:00
|
|
|
} else {
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Computation interrupted - no update");
|
2016-09-23 10:39:21 +02:00
|
|
|
throw new Exception("Computation cancelled");
|
2016-05-24 12:17:21 +02:00
|
|
|
}
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("All done");
|
2017-09-14 17:59:10 +02:00
|
|
|
operationResult = OperationResult.SUCCESS;
|
2016-04-01 13:09:40 +02:00
|
|
|
} catch (Exception e) {
|
2017-09-14 17:59:10 +02:00
|
|
|
operationResult = OperationResult.FAILED;
|
2021-05-25 11:14:58 +02:00
|
|
|
LOGGER.error("Error execution Algorithm {}", algorithm, e);
|
2016-05-24 12:17:21 +02:00
|
|
|
int exitstatus = -2;
|
2017-07-13 11:14:22 +02:00
|
|
|
if (isCancelled())
|
2016-05-24 12:17:21 +02:00
|
|
|
exitstatus = -1;
|
2016-09-23 10:39:21 +02:00
|
|
|
|
|
|
|
if (inputsManager != null)
|
2021-05-25 11:14:58 +02:00
|
|
|
if (canWriteOnShub)
|
|
|
|
updateComputationOnWS(exitstatus, e.getMessage(), inputsManager.getProvenanceData(),
|
|
|
|
generatedFiles);
|
|
|
|
else if (canWriteOnShub)
|
|
|
|
updateComputationOnWS(exitstatus, e.getMessage());
|
2017-07-13 11:14:22 +02:00
|
|
|
if (isCancelled())
|
2016-09-23 10:39:21 +02:00
|
|
|
throw new Exception("Computation cancelled");
|
2016-05-24 12:17:21 +02:00
|
|
|
else
|
|
|
|
throw e;
|
2016-04-01 13:09:40 +02:00
|
|
|
} finally {
|
2017-09-19 17:19:27 +02:00
|
|
|
LOGGER.debug("accounting algorithm");
|
2021-05-25 11:14:58 +02:00
|
|
|
if(operationResult==null){
|
|
|
|
operationResult=OperationResult.FAILED;
|
|
|
|
}
|
2017-09-19 17:19:27 +02:00
|
|
|
accountAlgorithmExecution(startTimeLong, System.currentTimeMillis(), operationResult);
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Deleting Input Tables");
|
2016-04-01 13:09:40 +02:00
|
|
|
deleteTemporaryTables(generatedInputTables);
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Deleting Output Tables");
|
2016-04-01 13:09:40 +02:00
|
|
|
deleteTemporaryTables(generatedOutputTables);
|
2017-05-05 10:08:44 +02:00
|
|
|
// LOGGER.debug("Deleting Files");
|
2016-04-01 13:09:40 +02:00
|
|
|
// deleteGeneratedFiles(generatedFiles);
|
|
|
|
// remove this computation from the list
|
|
|
|
removeComputation(computationSession);
|
|
|
|
// cleanResources();
|
|
|
|
time("Cleaning of resources");
|
|
|
|
displayTimes();
|
|
|
|
cleanResources();
|
2021-05-25 11:14:58 +02:00
|
|
|
if (observer != null)
|
|
|
|
observer.isFinished(this);
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("All done - Computation Finished");
|
2018-01-08 12:53:50 +01:00
|
|
|
Files.deleteIfExists(lockFile);
|
2017-09-14 17:59:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
2016-09-23 10:39:21 +02:00
|
|
|
|
2019-03-27 18:34:46 +01:00
|
|
|
private boolean checkWriteAuthorization(String username) {
|
2021-05-25 11:14:58 +02:00
|
|
|
if (env != null && env.getShubUsersExcluded() != null) {
|
2019-04-17 17:36:38 +02:00
|
|
|
if (env.getShubUsersExcluded().isEmpty()) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
if (env.getShubUsersExcluded().contains(username)) {
|
|
|
|
return false;
|
|
|
|
}
|
2019-03-27 18:34:46 +01:00
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2017-09-19 17:19:27 +02:00
|
|
|
private void accountAlgorithmExecution(long start, long end, OperationResult result) {
|
2021-05-25 11:14:58 +02:00
|
|
|
try {
|
2017-09-19 17:19:27 +02:00
|
|
|
JobUsageRecord jobUsageRecord = new JobUsageRecord();
|
|
|
|
jobUsageRecord.setJobName(this.getAlgorithmClass().getSimpleName());
|
|
|
|
jobUsageRecord.setConsumerId(tokenm.getUserName());
|
2021-05-25 11:14:58 +02:00
|
|
|
jobUsageRecord.setDuration(end - start);
|
2017-09-19 17:19:27 +02:00
|
|
|
jobUsageRecord.setOperationResult(result);
|
|
|
|
jobUsageRecord.setServiceName("DataMiner");
|
|
|
|
jobUsageRecord.setServiceClass("WPS");
|
|
|
|
jobUsageRecord.setHost(WPSConfig.getInstance().getWPSConfig().getServer().getHostname());
|
|
|
|
jobUsageRecord.setCallerQualifier(tokenm.getTokenQualifier());
|
2021-05-25 11:14:58 +02:00
|
|
|
|
|
|
|
AccountingPersistence accountingPersistence = AccountingPersistenceFactory.getPersistence();
|
2017-09-19 17:19:27 +02:00
|
|
|
accountingPersistence.account(jobUsageRecord);
|
2021-05-25 11:14:58 +02:00
|
|
|
} catch (Throwable e) {
|
|
|
|
LOGGER.error("error accounting algorithm execution", e);
|
2016-04-01 13:09:40 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
public class StatusUpdater implements Runnable {
|
2021-05-25 11:14:58 +02:00
|
|
|
|
2019-04-17 17:36:38 +02:00
|
|
|
private boolean canWrite = true;
|
2021-05-25 11:14:58 +02:00
|
|
|
|
2019-04-17 17:36:38 +02:00
|
|
|
public StatusUpdater(boolean canWrite) {
|
|
|
|
this.canWrite = canWrite;
|
|
|
|
}
|
2021-05-25 11:14:58 +02:00
|
|
|
|
2016-04-01 13:09:40 +02:00
|
|
|
@Override
|
|
|
|
public void run() {
|
2017-07-13 11:14:22 +02:00
|
|
|
while (agent != null && !isCancelled() && agent.getStatus() < 100) {
|
2016-04-01 13:09:40 +02:00
|
|
|
try {
|
2019-04-17 17:36:38 +02:00
|
|
|
updateStatus(agent.getStatus(), canWrite);
|
2016-04-01 13:09:40 +02:00
|
|
|
Thread.sleep(10000);
|
2021-05-25 11:14:58 +02:00
|
|
|
} catch (InterruptedException e) {
|
|
|
|
}
|
2016-04-01 13:09:40 +02:00
|
|
|
}
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.info("Status updater terminated");
|
2016-04-01 13:09:40 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-17 17:36:38 +02:00
|
|
|
private void runStatusUpdater(boolean canWrite) {
|
|
|
|
StatusUpdater updater = new StatusUpdater(canWrite);
|
2016-04-01 13:09:40 +02:00
|
|
|
|
|
|
|
Thread t = new Thread(updater);
|
|
|
|
t.start();
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Provenance manager running");
|
2016-04-01 13:09:40 +02:00
|
|
|
}
|
|
|
|
|
2021-05-25 11:14:58 +02:00
|
|
|
private void saveComputationOnWS(List<StoredData> inputData, List<StoredData> outputData, ComputationalAgent agent,
|
|
|
|
List<File> generatedFiles) {
|
2018-11-19 10:03:18 +01:00
|
|
|
LOGGER.debug("Save Computation On WS");
|
2021-05-25 11:14:58 +02:00
|
|
|
LOGGER.debug("InputData: " + inputData);
|
|
|
|
LOGGER.debug("OutputData: " + outputData);
|
|
|
|
LOGGER.debug("Agent: " + agent);
|
|
|
|
LOGGER.debug("Generated files: " + generatedFiles);
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Provenance manager started for operator " + this.getClass().getCanonicalName());
|
2021-05-25 11:14:58 +02:00
|
|
|
|
|
|
|
ComputationData computation = new ComputationData(config.getTaskID(), config.getAgent(), agent.getDescription(),
|
|
|
|
agent.getInfrastructure().name(), startTime, endTime, "100", config.getTaskID(),
|
|
|
|
config.getParam(ConfigurationManager.serviceUserNameParameterVariable), config.getGcubeScope(),
|
|
|
|
this.getClass().getCanonicalName());
|
2016-04-01 13:09:40 +02:00
|
|
|
// post on WS
|
|
|
|
DataspaceManager manager = new DataspaceManager(config, computation, inputData, outputData, generatedFiles);
|
2017-06-27 15:22:46 +02:00
|
|
|
|
2016-04-01 13:09:40 +02:00
|
|
|
Thread t = new Thread(manager);
|
|
|
|
t.start();
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Provenance manager running");
|
2016-04-01 13:09:40 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
private void time(String label) {
|
|
|
|
times.put(label, System.currentTimeMillis());
|
|
|
|
}
|
|
|
|
|
|
|
|
private void displayTimes() {
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Times Summary:");
|
|
|
|
LOGGER.debug("Label;Elapsed(ms);Time");
|
2016-04-01 13:09:40 +02:00
|
|
|
long prevtime = 0;
|
|
|
|
long inittime = 0;
|
|
|
|
for (String label : times.keySet()) {
|
|
|
|
long currentTime = times.get(label);
|
|
|
|
if (prevtime == 0) {
|
|
|
|
prevtime = currentTime;
|
|
|
|
inittime = currentTime;
|
|
|
|
}
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug(label + ";" + (currentTime - prevtime) + ";" + new Date(currentTime));
|
2016-04-01 13:09:40 +02:00
|
|
|
prevtime = currentTime;
|
|
|
|
}
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Total Elapsed;" + (prevtime - inittime) + ";" + new Date(prevtime));
|
2016-04-01 13:09:40 +02:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
private void cleanResources() {
|
|
|
|
times = null;
|
|
|
|
agent = null;
|
2016-05-24 12:17:21 +02:00
|
|
|
// manage open files and garbage
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Managing open files");
|
2016-09-23 10:39:21 +02:00
|
|
|
// String checkOpenFiles = "ls -l /proc/*/fd/* 2|grep \"wps/ecocfg\"";
|
|
|
|
try {
|
|
|
|
String checkOpenFiles = "for i in `ls -l /proc/*/fd/* 2>/dev/null | grep delete | grep tomcat | awk '{print $9}'`; do du -hL $i | awk '{print $1}' | tr '\n' ' '; ls -l $i | awk '{print $6\" \"$7\" \"$8\" \"$9\" \"$10\" \"$11\" \"$12}'; done";
|
|
|
|
List<String> openFiles = command(checkOpenFiles, "./");
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Open Files " + openFiles);
|
2016-09-23 10:39:21 +02:00
|
|
|
|
|
|
|
if (openFiles != null) {
|
|
|
|
for (String openFile : openFiles) {
|
|
|
|
if (!openFile.contains("cannot access") && openFile.contains("(deleted)")) {
|
|
|
|
String size = openFile.substring(0, openFile.indexOf(" ")).trim();
|
|
|
|
String pid = openFile.substring(openFile.indexOf("/proc/"), openFile.indexOf("->"));
|
|
|
|
pid = pid.trim();
|
|
|
|
if (!size.equals("0")) {
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Killing " + pid + " with size " + size);
|
2016-09-23 10:39:21 +02:00
|
|
|
command(":>" + pid, "./");
|
|
|
|
}
|
2016-05-24 12:17:21 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-09-23 10:39:21 +02:00
|
|
|
|
|
|
|
} catch (Exception e) {
|
2017-05-05 10:08:44 +02:00
|
|
|
LOGGER.debug("Could not kill files " + e.getLocalizedMessage());
|
2016-05-24 12:17:21 +02:00
|
|
|
}
|
2016-09-23 10:39:21 +02:00
|
|
|
|
2016-04-01 13:09:40 +02:00
|
|
|
System.gc();
|
|
|
|
}
|
|
|
|
|
2016-09-23 10:39:21 +02:00
|
|
|
public static List<String> command(final String cmdline, final String directory) {
|
|
|
|
try {
|
2021-05-25 11:14:58 +02:00
|
|
|
Process process = new ProcessBuilder(new String[] { "bash", "-c", cmdline }).redirectErrorStream(true)
|
|
|
|
.directory(new File(directory)).start();
|
2016-09-23 10:39:21 +02:00
|
|
|
|
|
|
|
List<String> output = new ArrayList<String>();
|
|
|
|
BufferedReader br = new BufferedReader(new InputStreamReader(process.getInputStream()));
|
|
|
|
String line = null;
|
|
|
|
while ((line = br.readLine()) != null)
|
|
|
|
output.add(line);
|
|
|
|
|
|
|
|
// There should really be a timeout here.
|
|
|
|
if (0 != process.waitFor())
|
|
|
|
return null;
|
|
|
|
|
|
|
|
return output;
|
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-13 11:14:22 +02:00
|
|
|
@Override
|
|
|
|
public void setObserver(Observer o) {
|
2021-05-25 11:14:58 +02:00
|
|
|
LOGGER.debug("setting observer in {} ", wpsExternalID);
|
|
|
|
this.observer = o;
|
2017-07-13 11:14:22 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
public synchronized boolean cancel() {
|
2021-05-25 11:14:58 +02:00
|
|
|
if (!cancelled) {
|
|
|
|
LOGGER.debug("COMPUTATION INTERRUPTED! ({})", wpsExternalID);
|
|
|
|
try {
|
|
|
|
if (agent != null) {
|
2017-07-13 11:14:22 +02:00
|
|
|
agent.shutdown();
|
|
|
|
agent = null;
|
|
|
|
}
|
|
|
|
|
|
|
|
super.update(new Integer((int) -1));
|
|
|
|
try {
|
|
|
|
updateComputationOnWS(-1, null);
|
|
|
|
} catch (Exception e) {
|
|
|
|
|
|
|
|
}
|
|
|
|
System.gc();
|
|
|
|
cancelled = true;
|
2021-05-25 11:14:58 +02:00
|
|
|
} catch (Exception e) {
|
|
|
|
LOGGER.warn("error cancelling computation with id {}", wpsExternalID);
|
2017-07-13 11:14:22 +02:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
} else {
|
2021-05-25 11:14:58 +02:00
|
|
|
LOGGER.debug("COMPUTATION ALREADY INTERRUPT! ({})", wpsExternalID);
|
2017-07-13 11:14:22 +02:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
public boolean isCancelled() {
|
|
|
|
return cancelled;
|
|
|
|
}
|
|
|
|
|
2016-04-01 13:09:40 +02:00
|
|
|
}
|