ref 20971: ShortLink - Check for obsolete short urls
https://support.d4science.org/issues/20971 Fixed ShortLink urls.
This commit is contained in:
parent
2d982938ce
commit
dbb87e55f7
|
@ -51,7 +51,12 @@ import org.slf4j.LoggerFactory;
|
||||||
public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm implements Observable, Cancellable {
|
public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm implements Observable, Cancellable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Deploying procedure: 1 - modify configuration files 2 - modify resource file: resources/templates/setup.cfg 3 - generate classes with ClassGenerator 4 - add new classes in the wps_config.xml on the wps web app config folder 5 - produce the Jar file of this project 6 - copy the jar file in the lib folder of the wps web app change the server parameters in the wps_config.xml file
|
* Deploying procedure: 1 - modify configuration files 2 - modify resource
|
||||||
|
* file: resources/templates/setup.cfg 3 - generate classes with
|
||||||
|
* ClassGenerator 4 - add new classes in the wps_config.xml on the wps web
|
||||||
|
* app config folder 5 - produce the Jar file of this project 6 - copy the
|
||||||
|
* jar file in the lib folder of the wps web app change the server
|
||||||
|
* parameters in the wps_config.xml file
|
||||||
*/
|
*/
|
||||||
|
|
||||||
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractEcologicalEngineMapper.class);
|
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractEcologicalEngineMapper.class);
|
||||||
|
@ -248,6 +253,7 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
|
|
||||||
float previousStatus = -3;
|
float previousStatus = -3;
|
||||||
String host = WPSConfig.getInstance().getWPSConfig().getServer().getHostname();
|
String host = WPSConfig.getInstance().getWPSConfig().getServer().getHostname();
|
||||||
|
|
||||||
public void updateStatus(float status, boolean canWrite) {
|
public void updateStatus(float status, boolean canWrite) {
|
||||||
if (agent != null) {
|
if (agent != null) {
|
||||||
if (status != previousStatus) {
|
if (status != previousStatus) {
|
||||||
|
@ -255,7 +261,8 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
previousStatus = status;
|
previousStatus = status;
|
||||||
super.update(new Integer((int) status));
|
super.update(new Integer((int) status));
|
||||||
try {
|
try {
|
||||||
if (canWrite) updateComputationOnWS(status, null);
|
if (canWrite)
|
||||||
|
updateComputationOnWS(status, null);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.warn("error updating compution on WS");
|
LOGGER.warn("error updating compution on WS");
|
||||||
}
|
}
|
||||||
|
@ -275,6 +282,7 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
class RunDataspaceManager implements Runnable {
|
class RunDataspaceManager implements Runnable {
|
||||||
List<StoredData> inputData;
|
List<StoredData> inputData;
|
||||||
List<File> generatedData;
|
List<File> generatedData;
|
||||||
|
|
||||||
public RunDataspaceManager(List<StoredData> inputData, List<File> generatedData) {
|
public RunDataspaceManager(List<StoredData> inputData, List<File> generatedData) {
|
||||||
this.inputData = inputData;
|
this.inputData = inputData;
|
||||||
this.generatedData = generatedData;
|
this.generatedData = generatedData;
|
||||||
|
@ -286,12 +294,14 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
LOGGER.debug("Dataspace->Status updater->Writing computational info on the WS asyncronously");
|
LOGGER.debug("Dataspace->Status updater->Writing computational info on the WS asyncronously");
|
||||||
manager.writeRunningComputationData();
|
manager.writeRunningComputationData();
|
||||||
} catch (Exception ez) {
|
} catch (Exception ez) {
|
||||||
LOGGER.error("Dataspace->Status updater->Impossible to write computation information on the Workspace",ez);
|
LOGGER.error("Dataspace->Status updater->Impossible to write computation information on the Workspace",
|
||||||
|
ez);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
public void updateComputationOnWS(float status, String exception, List<StoredData> inputData, List<File> generatedData) {
|
public void updateComputationOnWS(float status, String exception, List<StoredData> inputData,
|
||||||
|
List<File> generatedData) {
|
||||||
if (currentComputation != null) {
|
if (currentComputation != null) {
|
||||||
currentComputation.setStatus("" + status);
|
currentComputation.setStatus("" + status);
|
||||||
if (exception != null && exception.length() > 0)
|
if (exception != null && exception.length() > 0)
|
||||||
|
@ -301,14 +311,11 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
rundm.run();
|
rundm.run();
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Thread t = new Thread(rundm);
|
* Thread t = new Thread(rundm); t.start();
|
||||||
t.start();
|
|
||||||
*/
|
*/
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@Execute
|
@Execute
|
||||||
public void run() throws Exception {
|
public void run() throws Exception {
|
||||||
if (observer != null)
|
if (observer != null)
|
||||||
|
@ -324,7 +331,8 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
List<String> generatedInputTables = null;
|
List<String> generatedInputTables = null;
|
||||||
List<String> generatedOutputTables = null;
|
List<String> generatedOutputTables = null;
|
||||||
List<File> generatedFiles = null;
|
List<File> generatedFiles = null;
|
||||||
//String date = new java.text.SimpleDateFormat("dd_MM_yyyy_HH:mm:ss").format(System.currentTimeMillis());
|
// String date = new
|
||||||
|
// java.text.SimpleDateFormat("dd_MM_yyyy_HH:mm:ss").format(System.currentTimeMillis());
|
||||||
String computationSession = this.getAlgorithmClass().getSimpleName() + "_ID_" + UUID.randomUUID().toString();
|
String computationSession = this.getAlgorithmClass().getSimpleName() + "_ID_" + UUID.randomUUID().toString();
|
||||||
if (wpsExternalID != null) {
|
if (wpsExternalID != null) {
|
||||||
LOGGER.info("Using wps External ID " + wpsExternalID);
|
LOGGER.info("Using wps External ID " + wpsExternalID);
|
||||||
|
@ -332,7 +340,10 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
} else
|
} else
|
||||||
LOGGER.info("Wps External ID not set");
|
LOGGER.info("Wps External ID not set");
|
||||||
InputsManager inputsManager = null;
|
InputsManager inputsManager = null;
|
||||||
ConfigurationManager configManager = new ConfigurationManager(this.env); // initializes parameters from web.xml
|
ConfigurationManager configManager = new ConfigurationManager(this.env); // initializes
|
||||||
|
// parameters
|
||||||
|
// from
|
||||||
|
// web.xml
|
||||||
manageUserToken();
|
manageUserToken();
|
||||||
|
|
||||||
boolean canWriteOnShub = checkWriteAuthorization(tokenm.getUserName());
|
boolean canWriteOnShub = checkWriteAuthorization(tokenm.getUserName());
|
||||||
|
@ -364,8 +375,10 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
String scope = configManager.getScope();
|
String scope = configManager.getScope();
|
||||||
String username = configManager.getUsername();
|
String username = configManager.getUsername();
|
||||||
|
|
||||||
LOGGER.info("1 - Algorithm environment initialized in scope " + scope + " with user name " + username + " and session " + computationSession);
|
LOGGER.info("1 - Algorithm environment initialized in scope " + scope + " with user name " + username
|
||||||
LOGGER.info("Max allowed computations " + ConfigurationManager.getMaxComputations() + " using storage " + ConfigurationManager.useStorage());
|
+ " and session " + computationSession);
|
||||||
|
LOGGER.info("Max allowed computations " + ConfigurationManager.getMaxComputations() + " using storage "
|
||||||
|
+ ConfigurationManager.useStorage());
|
||||||
// init the infrastructure dialoguer
|
// init the infrastructure dialoguer
|
||||||
LOGGER.info("2 - Initializing connection to the e-Infrastructure");
|
LOGGER.info("2 - Initializing connection to the e-Infrastructure");
|
||||||
infrastructureDialoguer = new InfrastructureDialoguer(scope);
|
infrastructureDialoguer = new InfrastructureDialoguer(scope);
|
||||||
|
@ -400,7 +413,9 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
LOGGER.info("6 - Translating WPS Inputs into Ecological Engine Inputs");
|
LOGGER.info("6 - Translating WPS Inputs into Ecological Engine Inputs");
|
||||||
LOGGER.debug("Operator class is " + this.getClass().getCanonicalName());
|
LOGGER.debug("Operator class is " + this.getClass().getCanonicalName());
|
||||||
// build computation Data
|
// build computation Data
|
||||||
currentComputation = new ComputationData(config.getTaskID(), config.getAgent(), "", "", startTime, "-", "0", config.getTaskID(), configManager.getUsername(), config.getGcubeScope(), this.getClass().getCanonicalName());
|
currentComputation = new ComputationData(config.getTaskID(), config.getAgent(), "", "", startTime, "-", "0",
|
||||||
|
config.getTaskID(), configManager.getUsername(), config.getGcubeScope(),
|
||||||
|
this.getClass().getCanonicalName());
|
||||||
inputsManager.mergeWpsAndEcologicalInputs(supportDatabaseInfo, dataminerInputParameters);
|
inputsManager.mergeWpsAndEcologicalInputs(supportDatabaseInfo, dataminerInputParameters);
|
||||||
generatedInputTables = inputsManager.getGeneratedTables();
|
generatedInputTables = inputsManager.getGeneratedTables();
|
||||||
generatedFiles = inputsManager.getGeneratedInputFiles();
|
generatedFiles = inputsManager.getGeneratedInputFiles();
|
||||||
|
@ -459,7 +474,9 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
endTime = new java.text.SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(System.currentTimeMillis());
|
endTime = new java.text.SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(System.currentTimeMillis());
|
||||||
if (!isCancelled()) {
|
if (!isCancelled()) {
|
||||||
LOGGER.debug("Save Computation Data");
|
LOGGER.debug("Save Computation Data");
|
||||||
if (canWriteOnShub) saveComputationOnWS(inputsManager.getProvenanceData(), outputmanager.getProvenanceData(), agent, generatedFiles);
|
if (canWriteOnShub)
|
||||||
|
saveComputationOnWS(inputsManager.getProvenanceData(), outputmanager.getProvenanceData(), agent,
|
||||||
|
generatedFiles);
|
||||||
} else {
|
} else {
|
||||||
LOGGER.debug("Computation interrupted - no update");
|
LOGGER.debug("Computation interrupted - no update");
|
||||||
throw new Exception("Computation cancelled");
|
throw new Exception("Computation cancelled");
|
||||||
|
@ -474,15 +491,20 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
exitstatus = -1;
|
exitstatus = -1;
|
||||||
|
|
||||||
if (inputsManager != null)
|
if (inputsManager != null)
|
||||||
if (canWriteOnShub) updateComputationOnWS(exitstatus, e.getMessage(), inputsManager.getProvenanceData(), generatedFiles);
|
if (canWriteOnShub)
|
||||||
else
|
updateComputationOnWS(exitstatus, e.getMessage(), inputsManager.getProvenanceData(),
|
||||||
if (canWriteOnShub) updateComputationOnWS(exitstatus, e.getMessage());
|
generatedFiles);
|
||||||
|
else if (canWriteOnShub)
|
||||||
|
updateComputationOnWS(exitstatus, e.getMessage());
|
||||||
if (isCancelled())
|
if (isCancelled())
|
||||||
throw new Exception("Computation cancelled");
|
throw new Exception("Computation cancelled");
|
||||||
else
|
else
|
||||||
throw e;
|
throw e;
|
||||||
} finally {
|
} finally {
|
||||||
LOGGER.debug("accounting algorithm");
|
LOGGER.debug("accounting algorithm");
|
||||||
|
if(operationResult==null){
|
||||||
|
operationResult=OperationResult.FAILED;
|
||||||
|
}
|
||||||
accountAlgorithmExecution(startTimeLong, System.currentTimeMillis(), operationResult);
|
accountAlgorithmExecution(startTimeLong, System.currentTimeMillis(), operationResult);
|
||||||
LOGGER.debug("Deleting Input Tables");
|
LOGGER.debug("Deleting Input Tables");
|
||||||
deleteTemporaryTables(generatedInputTables);
|
deleteTemporaryTables(generatedInputTables);
|
||||||
|
@ -496,7 +518,8 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
time("Cleaning of resources");
|
time("Cleaning of resources");
|
||||||
displayTimes();
|
displayTimes();
|
||||||
cleanResources();
|
cleanResources();
|
||||||
if (observer!=null) observer.isFinished(this);
|
if (observer != null)
|
||||||
|
observer.isFinished(this);
|
||||||
LOGGER.debug("All done - Computation Finished");
|
LOGGER.debug("All done - Computation Finished");
|
||||||
Files.deleteIfExists(lockFile);
|
Files.deleteIfExists(lockFile);
|
||||||
}
|
}
|
||||||
|
@ -527,8 +550,7 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
jobUsageRecord.setHost(WPSConfig.getInstance().getWPSConfig().getServer().getHostname());
|
jobUsageRecord.setHost(WPSConfig.getInstance().getWPSConfig().getServer().getHostname());
|
||||||
jobUsageRecord.setCallerQualifier(tokenm.getTokenQualifier());
|
jobUsageRecord.setCallerQualifier(tokenm.getTokenQualifier());
|
||||||
|
|
||||||
AccountingPersistence accountingPersistence =
|
AccountingPersistence accountingPersistence = AccountingPersistenceFactory.getPersistence();
|
||||||
AccountingPersistenceFactory.getPersistence();
|
|
||||||
accountingPersistence.account(jobUsageRecord);
|
accountingPersistence.account(jobUsageRecord);
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
LOGGER.error("error accounting algorithm execution", e);
|
LOGGER.error("error accounting algorithm execution", e);
|
||||||
|
@ -550,7 +572,8 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
try {
|
try {
|
||||||
updateStatus(agent.getStatus(), canWrite);
|
updateStatus(agent.getStatus(), canWrite);
|
||||||
Thread.sleep(10000);
|
Thread.sleep(10000);
|
||||||
} catch (InterruptedException e) {}
|
} catch (InterruptedException e) {
|
||||||
|
}
|
||||||
}
|
}
|
||||||
LOGGER.info("Status updater terminated");
|
LOGGER.info("Status updater terminated");
|
||||||
}
|
}
|
||||||
|
@ -564,7 +587,8 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
LOGGER.debug("Provenance manager running");
|
LOGGER.debug("Provenance manager running");
|
||||||
}
|
}
|
||||||
|
|
||||||
private void saveComputationOnWS(List<StoredData> inputData, List<StoredData> outputData, ComputationalAgent agent, List<File> generatedFiles) {
|
private void saveComputationOnWS(List<StoredData> inputData, List<StoredData> outputData, ComputationalAgent agent,
|
||||||
|
List<File> generatedFiles) {
|
||||||
LOGGER.debug("Save Computation On WS");
|
LOGGER.debug("Save Computation On WS");
|
||||||
LOGGER.debug("InputData: " + inputData);
|
LOGGER.debug("InputData: " + inputData);
|
||||||
LOGGER.debug("OutputData: " + outputData);
|
LOGGER.debug("OutputData: " + outputData);
|
||||||
|
@ -572,7 +596,10 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
LOGGER.debug("Generated files: " + generatedFiles);
|
LOGGER.debug("Generated files: " + generatedFiles);
|
||||||
LOGGER.debug("Provenance manager started for operator " + this.getClass().getCanonicalName());
|
LOGGER.debug("Provenance manager started for operator " + this.getClass().getCanonicalName());
|
||||||
|
|
||||||
ComputationData computation = new ComputationData(config.getTaskID(), config.getAgent(), agent.getDescription(), agent.getInfrastructure().name(), startTime, endTime, "100", config.getTaskID(), config.getParam(ConfigurationManager.serviceUserNameParameterVariable), config.getGcubeScope(), this.getClass().getCanonicalName());
|
ComputationData computation = new ComputationData(config.getTaskID(), config.getAgent(), agent.getDescription(),
|
||||||
|
agent.getInfrastructure().name(), startTime, endTime, "100", config.getTaskID(),
|
||||||
|
config.getParam(ConfigurationManager.serviceUserNameParameterVariable), config.getGcubeScope(),
|
||||||
|
this.getClass().getCanonicalName());
|
||||||
// post on WS
|
// post on WS
|
||||||
DataspaceManager manager = new DataspaceManager(config, computation, inputData, outputData, generatedFiles);
|
DataspaceManager manager = new DataspaceManager(config, computation, inputData, outputData, generatedFiles);
|
||||||
|
|
||||||
|
@ -637,7 +664,8 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
|
|
||||||
public static List<String> command(final String cmdline, final String directory) {
|
public static List<String> command(final String cmdline, final String directory) {
|
||||||
try {
|
try {
|
||||||
Process process = new ProcessBuilder(new String[] { "bash", "-c", cmdline }).redirectErrorStream(true).directory(new File(directory)).start();
|
Process process = new ProcessBuilder(new String[] { "bash", "-c", cmdline }).redirectErrorStream(true)
|
||||||
|
.directory(new File(directory)).start();
|
||||||
|
|
||||||
List<String> output = new ArrayList<String>();
|
List<String> output = new ArrayList<String>();
|
||||||
BufferedReader br = new BufferedReader(new InputStreamReader(process.getInputStream()));
|
BufferedReader br = new BufferedReader(new InputStreamReader(process.getInputStream()));
|
||||||
|
|
Loading…
Reference in New Issue