diff --git a/src/main/java/org/gcube/data/analysis/wps/CancelComputation.java b/src/main/java/org/gcube/data/analysis/wps/CancelComputation.java index b97c2f7..adb626e 100644 --- a/src/main/java/org/gcube/data/analysis/wps/CancelComputation.java +++ b/src/main/java/org/gcube/data/analysis/wps/CancelComputation.java @@ -1,6 +1,5 @@ package org.gcube.data.analysis.wps; -import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -14,9 +13,8 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang.StringUtils; +import org.gcube.data.analysis.wps.processes.Processes; import org.n52.wps.commons.XMLUtil; -import org.n52.wps.server.database.DatabaseFactory; -import org.n52.wps.server.database.IDatabase; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -48,16 +46,16 @@ public class CancelComputation extends HttpServlet { // id of result to retrieve. String id = request.getParameter("id"); - LOGGER.debug("CANCEL COMPUTATION -> RETRIEVING ID " + id); + LOGGER.debug("CANCEL COMPUTATION -> RETRIEVING ID {}", id); if (StringUtils.isEmpty(id)) { errorResponse("id parameter missing", response); } else { - LOGGER.debug("CANCEL COMPUTATION -> ID RETRIEVED " + id); + LOGGER.debug("CANCEL COMPUTATION -> ID {} RETRIEVED ", id); if (!isIDValid(id)) { errorResponse("id parameter not valid", response); } - LOGGER.debug("CANCEL COMPUTATION -> ID IS VALID " + id); - IDatabase db = DatabaseFactory.getDatabase(); + LOGGER.debug("CANCEL COMPUTATION -> ID {} IS VALID ",id); + /*IDatabase db = DatabaseFactory.getDatabase(); long len = db.getContentLengthForStoreResponse(id); LOGGER.debug("CANCEL COMPUTATION -> INITIAL ID RESPONSE LENGTH " + len); @@ -78,8 +76,14 @@ public class CancelComputation extends HttpServlet { } catch (Exception e) { LOGGER.error("error in do get",e); } finally { - } + }*/ + if (Processes.getRunningProcesses().containsKey(id)){ + boolean cancelled = Processes.getRunningProcesses().get(id).cancel(); + if(!cancelled) + LOGGER.debug("CANCEL COMPUTATION -> process with id {} already cancelled ", id); + } else LOGGER.debug("CANCEL COMPUTATION -> no running process with id {} (probably it already finished) ", id); } + } protected void errorResponse(String error, HttpServletResponse response) throws IOException { diff --git a/src/main/java/org/gcube/data/analysis/wps/ExecuteRequest.java b/src/main/java/org/gcube/data/analysis/wps/ExecuteRequest.java index 105aa98..571b562 100644 --- a/src/main/java/org/gcube/data/analysis/wps/ExecuteRequest.java +++ b/src/main/java/org/gcube/data/analysis/wps/ExecuteRequest.java @@ -598,6 +598,7 @@ public class ExecuteRequest extends Request implements IObserver { } if (algorithm instanceof AbstractEcologicalEngineMapper) { ((AbstractEcologicalEngineMapper) algorithm).setWpsExternalID(wpsid); + ((AbstractEcologicalEngineMapper) algorithm).setObserver(GCubeObserver.getObserver()); } if (algorithm instanceof AbstractTransactionalAlgorithm) { diff --git a/src/main/java/org/gcube/data/analysis/wps/GCubeObserver.java b/src/main/java/org/gcube/data/analysis/wps/GCubeObserver.java new file mode 100644 index 0000000..63d543a --- /dev/null +++ b/src/main/java/org/gcube/data/analysis/wps/GCubeObserver.java @@ -0,0 +1,38 @@ +package org.gcube.data.analysis.wps; + +import org.gcube.data.analysis.wps.processes.Processes; +import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper; +import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.utils.Observable; +import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.utils.Observer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class GCubeObserver implements Observer{ + + private static final Logger LOGGER = LoggerFactory.getLogger(GCubeObserver.class); + + public static GCubeObserver getObserver(){ + return instance; + } + + private static GCubeObserver instance = new GCubeObserver(); + + private GCubeObserver(){} + + @Override + public void isFinished(Observable o) { + AbstractEcologicalEngineMapper algorithm = (AbstractEcologicalEngineMapper)o; + LOGGER.debug("computation with id {} finished",algorithm.wpsExternalID); + Processes.getRunningProcesses().remove(algorithm.wpsExternalID); + } + + @Override + public void isStarted(Observable o) { + AbstractEcologicalEngineMapper algorithm = (AbstractEcologicalEngineMapper)o; + LOGGER.debug("computation with id {} started",algorithm.wpsExternalID); + Processes.getRunningProcesses().put(algorithm.wpsExternalID,algorithm); + } + + + +} diff --git a/src/main/java/org/gcube/data/analysis/wps/processes/Processes.java b/src/main/java/org/gcube/data/analysis/wps/processes/Processes.java index 78f1a5c..fd2f0b0 100644 --- a/src/main/java/org/gcube/data/analysis/wps/processes/Processes.java +++ b/src/main/java/org/gcube/data/analysis/wps/processes/Processes.java @@ -3,22 +3,15 @@ package org.gcube.data.analysis.wps.processes; import java.util.Collections; import java.util.HashMap; import java.util.Map; -import java.util.WeakHashMap; import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.utils.Cancellable; public class Processes { - private Map runningProcesses = Collections.synchronizedMap(new HashMap()); - - private Map cancelledProcesses = Collections.synchronizedMap(new WeakHashMap()); + private static Map runningProcesses = Collections.synchronizedMap(new HashMap()); - public Map getRunningProcesses() { + public static Map getRunningProcesses() { return runningProcesses; } - public Map getCancelledProcesses() { - return cancelledProcesses; - } - } diff --git a/src/main/webapp/WEB-INF/README b/src/main/webapp/WEB-INF/README index 61fbb2e..cbf1700 100644 --- a/src/main/webapp/WEB-INF/README +++ b/src/main/webapp/WEB-INF/README @@ -25,7 +25,7 @@ no. 654119), SoBigData (grant no. 654024); Version -------------------------------------------------- -1.0.0-SNAPSHOT (${maven.build.timestamp}) +1.0.0-SNAPSHOT (2017-07-12) Please see the file named "changelog.xml" in this directory for the release notes.