ref 20971: ShortLink - Check for obsolete short urls

https://support.d4science.org/issues/20971

Fixed ShortLink urls.
This commit is contained in:
Giancarlo Panichi 2021-05-25 11:14:58 +02:00
parent 2d982938ce
commit dbb87e55f7
1 changed files with 98 additions and 70 deletions

View File

@ -48,10 +48,15 @@ import org.n52.wps.server.AbstractAnnotatedAlgorithm;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm implements Observable, Cancellable{ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm implements Observable, Cancellable {
/** /**
* Deploying procedure: 1 - modify configuration files 2 - modify resource file: resources/templates/setup.cfg 3 - generate classes with ClassGenerator 4 - add new classes in the wps_config.xml on the wps web app config folder 5 - produce the Jar file of this project 6 - copy the jar file in the lib folder of the wps web app change the server parameters in the wps_config.xml file * Deploying procedure: 1 - modify configuration files 2 - modify resource
* file: resources/templates/setup.cfg 3 - generate classes with
* ClassGenerator 4 - add new classes in the wps_config.xml on the wps web
* app config folder 5 - produce the Jar file of this project 6 - copy the
* jar file in the lib folder of the wps web app change the server
* parameters in the wps_config.xml file
*/ */
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractEcologicalEngineMapper.class); private static final Logger LOGGER = LoggerFactory.getLogger(AbstractEcologicalEngineMapper.class);
@ -205,7 +210,7 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
LOGGER.debug("Could not drop Temporary Table: " + table + " table is null"); LOGGER.debug("Could not drop Temporary Table: " + table + " table is null");
} }
} catch (Exception e) { } catch (Exception e) {
LOGGER.error("error deleting temporary table",e); LOGGER.error("error deleting temporary table", e);
} finally { } finally {
DatabaseUtils.closeDBConnection(dbConnection); DatabaseUtils.closeDBConnection(dbConnection);
} }
@ -248,14 +253,16 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
float previousStatus = -3; float previousStatus = -3;
String host = WPSConfig.getInstance().getWPSConfig().getServer().getHostname(); String host = WPSConfig.getInstance().getWPSConfig().getServer().getHostname();
public void updateStatus(float status, boolean canWrite) { public void updateStatus(float status, boolean canWrite) {
if (agent != null) { if (agent != null) {
if (status != previousStatus) { if (status != previousStatus) {
LOGGER.debug("STATUS update to: {} ", status ); LOGGER.debug("STATUS update to: {} ", status);
previousStatus = status; previousStatus = status;
super.update(new Integer((int) status)); super.update(new Integer((int) status));
try { try {
if (canWrite) updateComputationOnWS(status, null); if (canWrite)
updateComputationOnWS(status, null);
} catch (Exception e) { } catch (Exception e) {
LOGGER.warn("error updating compution on WS"); LOGGER.warn("error updating compution on WS");
} }
@ -272,12 +279,13 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
updateComputationOnWS(status, exception, null, null); updateComputationOnWS(status, exception, null, null);
} }
class RunDataspaceManager implements Runnable{ class RunDataspaceManager implements Runnable {
List<StoredData> inputData; List<StoredData> inputData;
List<File> generatedData; List<File> generatedData;
public RunDataspaceManager(List<StoredData> inputData, List<File> generatedData){
this.inputData=inputData; public RunDataspaceManager(List<StoredData> inputData, List<File> generatedData) {
this.generatedData=generatedData; this.inputData = inputData;
this.generatedData = generatedData;
} }
public void run() { public void run() {
@ -286,35 +294,34 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
LOGGER.debug("Dataspace->Status updater->Writing computational info on the WS asyncronously"); LOGGER.debug("Dataspace->Status updater->Writing computational info on the WS asyncronously");
manager.writeRunningComputationData(); manager.writeRunningComputationData();
} catch (Exception ez) { } catch (Exception ez) {
LOGGER.error("Dataspace->Status updater->Impossible to write computation information on the Workspace",ez); LOGGER.error("Dataspace->Status updater->Impossible to write computation information on the Workspace",
ez);
} }
} }
}; };
public void updateComputationOnWS(float status, String exception, List<StoredData> inputData, List<File> generatedData) { public void updateComputationOnWS(float status, String exception, List<StoredData> inputData,
List<File> generatedData) {
if (currentComputation != null) { if (currentComputation != null) {
currentComputation.setStatus("" + status); currentComputation.setStatus("" + status);
if (exception != null && exception.length() > 0) if (exception != null && exception.length() > 0)
currentComputation.setException(exception); currentComputation.setException(exception);
LOGGER.debug("RunDataspaceManager: [inputData="+inputData+", generatedData="+generatedData+"]"); LOGGER.debug("RunDataspaceManager: [inputData=" + inputData + ", generatedData=" + generatedData + "]");
RunDataspaceManager rundm = new RunDataspaceManager(inputData,generatedData); RunDataspaceManager rundm = new RunDataspaceManager(inputData, generatedData);
rundm.run(); rundm.run();
/* /*
Thread t = new Thread(rundm); * Thread t = new Thread(rundm); t.start();
t.start();
*/ */
} }
} }
@Execute @Execute
public void run() throws Exception { public void run() throws Exception {
if (observer!=null) if (observer != null)
observer.isStarted(this); observer.isStarted(this);
LOGGER.info("classloader context in this thread is {}",Thread.currentThread().getContextClassLoader()); LOGGER.info("classloader context in this thread is {}", Thread.currentThread().getContextClassLoader());
long startTimeLong = System.currentTimeMillis(); long startTimeLong = System.currentTimeMillis();
@ -324,7 +331,8 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
List<String> generatedInputTables = null; List<String> generatedInputTables = null;
List<String> generatedOutputTables = null; List<String> generatedOutputTables = null;
List<File> generatedFiles = null; List<File> generatedFiles = null;
//String date = new java.text.SimpleDateFormat("dd_MM_yyyy_HH:mm:ss").format(System.currentTimeMillis()); // String date = new
// java.text.SimpleDateFormat("dd_MM_yyyy_HH:mm:ss").format(System.currentTimeMillis());
String computationSession = this.getAlgorithmClass().getSimpleName() + "_ID_" + UUID.randomUUID().toString(); String computationSession = this.getAlgorithmClass().getSimpleName() + "_ID_" + UUID.randomUUID().toString();
if (wpsExternalID != null) { if (wpsExternalID != null) {
LOGGER.info("Using wps External ID " + wpsExternalID); LOGGER.info("Using wps External ID " + wpsExternalID);
@ -332,7 +340,10 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
} else } else
LOGGER.info("Wps External ID not set"); LOGGER.info("Wps External ID not set");
InputsManager inputsManager = null; InputsManager inputsManager = null;
ConfigurationManager configManager = new ConfigurationManager(this.env); // initializes parameters from web.xml ConfigurationManager configManager = new ConfigurationManager(this.env); // initializes
// parameters
// from
// web.xml
manageUserToken(); manageUserToken();
boolean canWriteOnShub = checkWriteAuthorization(tokenm.getUserName()); boolean canWriteOnShub = checkWriteAuthorization(tokenm.getUserName());
@ -341,7 +352,7 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
if (!Files.exists(dir)) if (!Files.exists(dir))
dir = Files.createDirectory(dir); dir = Files.createDirectory(dir);
Path lockFile = Files.createTempFile(dir, "dm", ".lck"); Path lockFile = Files.createTempFile(dir, "dm", ".lck");
LOGGER.info("lock file created {}",lockFile.toUri().toURL()); LOGGER.info("lock file created {}", lockFile.toUri().toURL());
try { try {
// wait for server resources to be available // wait for server resources to be available
@ -364,8 +375,10 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
String scope = configManager.getScope(); String scope = configManager.getScope();
String username = configManager.getUsername(); String username = configManager.getUsername();
LOGGER.info("1 - Algorithm environment initialized in scope " + scope + " with user name " + username + " and session " + computationSession); LOGGER.info("1 - Algorithm environment initialized in scope " + scope + " with user name " + username
LOGGER.info("Max allowed computations " + ConfigurationManager.getMaxComputations() + " using storage " + ConfigurationManager.useStorage()); + " and session " + computationSession);
LOGGER.info("Max allowed computations " + ConfigurationManager.getMaxComputations() + " using storage "
+ ConfigurationManager.useStorage());
// init the infrastructure dialoguer // init the infrastructure dialoguer
LOGGER.info("2 - Initializing connection to the e-Infrastructure"); LOGGER.info("2 - Initializing connection to the e-Infrastructure");
infrastructureDialoguer = new InfrastructureDialoguer(scope); infrastructureDialoguer = new InfrastructureDialoguer(scope);
@ -392,7 +405,7 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
// adding service parameters to the configuration // adding service parameters to the configuration
LOGGER.info("5 - Adding Service parameters to the configuration"); LOGGER.info("5 - Adding Service parameters to the configuration");
List<StatisticalType> dataminerInputParameters = getInputParameters(algorithm); List<StatisticalType> dataminerInputParameters = getInputParameters(algorithm);
LOGGER.debug("Dataminer Algo Default InputParameters: "+dataminerInputParameters); LOGGER.debug("Dataminer Algo Default InputParameters: " + dataminerInputParameters);
inputsManager.addInputServiceParameters(dataminerInputParameters, infrastructureDialoguer); inputsManager.addInputServiceParameters(dataminerInputParameters, infrastructureDialoguer);
time("Service parameters added to the algorithm"); time("Service parameters added to the algorithm");
// merging wps with ecological engine parameters - modifies the // merging wps with ecological engine parameters - modifies the
@ -400,7 +413,9 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
LOGGER.info("6 - Translating WPS Inputs into Ecological Engine Inputs"); LOGGER.info("6 - Translating WPS Inputs into Ecological Engine Inputs");
LOGGER.debug("Operator class is " + this.getClass().getCanonicalName()); LOGGER.debug("Operator class is " + this.getClass().getCanonicalName());
// build computation Data // build computation Data
currentComputation = new ComputationData(config.getTaskID(), config.getAgent(), "", "", startTime, "-", "0", config.getTaskID(), configManager.getUsername(), config.getGcubeScope(), this.getClass().getCanonicalName()); currentComputation = new ComputationData(config.getTaskID(), config.getAgent(), "", "", startTime, "-", "0",
config.getTaskID(), configManager.getUsername(), config.getGcubeScope(),
this.getClass().getCanonicalName());
inputsManager.mergeWpsAndEcologicalInputs(supportDatabaseInfo, dataminerInputParameters); inputsManager.mergeWpsAndEcologicalInputs(supportDatabaseInfo, dataminerInputParameters);
generatedInputTables = inputsManager.getGeneratedTables(); generatedInputTables = inputsManager.getGeneratedTables();
generatedFiles = inputsManager.getGeneratedInputFiles(); generatedFiles = inputsManager.getGeneratedInputFiles();
@ -454,12 +469,14 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
outputmanager.shutdown(); outputmanager.shutdown();
LOGGER.debug("12 - Final Computation Output"); LOGGER.debug("12 - Final Computation Output");
LOGGER.debug("Outputs: "+ outputs); LOGGER.debug("Outputs: " + outputs);
endTime = new java.text.SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(System.currentTimeMillis()); endTime = new java.text.SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(System.currentTimeMillis());
if (!isCancelled()) { if (!isCancelled()) {
LOGGER.debug("Save Computation Data"); LOGGER.debug("Save Computation Data");
if (canWriteOnShub) saveComputationOnWS(inputsManager.getProvenanceData(), outputmanager.getProvenanceData(), agent, generatedFiles); if (canWriteOnShub)
saveComputationOnWS(inputsManager.getProvenanceData(), outputmanager.getProvenanceData(), agent,
generatedFiles);
} else { } else {
LOGGER.debug("Computation interrupted - no update"); LOGGER.debug("Computation interrupted - no update");
throw new Exception("Computation cancelled"); throw new Exception("Computation cancelled");
@ -468,21 +485,26 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
operationResult = OperationResult.SUCCESS; operationResult = OperationResult.SUCCESS;
} catch (Exception e) { } catch (Exception e) {
operationResult = OperationResult.FAILED; operationResult = OperationResult.FAILED;
LOGGER.error("Error execution Algorithm {}",algorithm,e); LOGGER.error("Error execution Algorithm {}", algorithm, e);
int exitstatus = -2; int exitstatus = -2;
if (isCancelled()) if (isCancelled())
exitstatus = -1; exitstatus = -1;
if (inputsManager != null) if (inputsManager != null)
if (canWriteOnShub) updateComputationOnWS(exitstatus, e.getMessage(), inputsManager.getProvenanceData(), generatedFiles); if (canWriteOnShub)
else updateComputationOnWS(exitstatus, e.getMessage(), inputsManager.getProvenanceData(),
if (canWriteOnShub) updateComputationOnWS(exitstatus, e.getMessage()); generatedFiles);
else if (canWriteOnShub)
updateComputationOnWS(exitstatus, e.getMessage());
if (isCancelled()) if (isCancelled())
throw new Exception("Computation cancelled"); throw new Exception("Computation cancelled");
else else
throw e; throw e;
} finally { } finally {
LOGGER.debug("accounting algorithm"); LOGGER.debug("accounting algorithm");
if(operationResult==null){
operationResult=OperationResult.FAILED;
}
accountAlgorithmExecution(startTimeLong, System.currentTimeMillis(), operationResult); accountAlgorithmExecution(startTimeLong, System.currentTimeMillis(), operationResult);
LOGGER.debug("Deleting Input Tables"); LOGGER.debug("Deleting Input Tables");
deleteTemporaryTables(generatedInputTables); deleteTemporaryTables(generatedInputTables);
@ -496,7 +518,8 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
time("Cleaning of resources"); time("Cleaning of resources");
displayTimes(); displayTimes();
cleanResources(); cleanResources();
if (observer!=null) observer.isFinished(this); if (observer != null)
observer.isFinished(this);
LOGGER.debug("All done - Computation Finished"); LOGGER.debug("All done - Computation Finished");
Files.deleteIfExists(lockFile); Files.deleteIfExists(lockFile);
} }
@ -504,7 +527,7 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
} }
private boolean checkWriteAuthorization(String username) { private boolean checkWriteAuthorization(String username) {
if (env!=null && env.getShubUsersExcluded()!=null) { if (env != null && env.getShubUsersExcluded() != null) {
if (env.getShubUsersExcluded().isEmpty()) { if (env.getShubUsersExcluded().isEmpty()) {
return false; return false;
} }
@ -516,22 +539,21 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
} }
private void accountAlgorithmExecution(long start, long end, OperationResult result) { private void accountAlgorithmExecution(long start, long end, OperationResult result) {
try{ try {
JobUsageRecord jobUsageRecord = new JobUsageRecord(); JobUsageRecord jobUsageRecord = new JobUsageRecord();
jobUsageRecord.setJobName(this.getAlgorithmClass().getSimpleName()); jobUsageRecord.setJobName(this.getAlgorithmClass().getSimpleName());
jobUsageRecord.setConsumerId(tokenm.getUserName()); jobUsageRecord.setConsumerId(tokenm.getUserName());
jobUsageRecord.setDuration(end-start); jobUsageRecord.setDuration(end - start);
jobUsageRecord.setOperationResult(result); jobUsageRecord.setOperationResult(result);
jobUsageRecord.setServiceName("DataMiner"); jobUsageRecord.setServiceName("DataMiner");
jobUsageRecord.setServiceClass("WPS"); jobUsageRecord.setServiceClass("WPS");
jobUsageRecord.setHost(WPSConfig.getInstance().getWPSConfig().getServer().getHostname()); jobUsageRecord.setHost(WPSConfig.getInstance().getWPSConfig().getServer().getHostname());
jobUsageRecord.setCallerQualifier(tokenm.getTokenQualifier()); jobUsageRecord.setCallerQualifier(tokenm.getTokenQualifier());
AccountingPersistence accountingPersistence = AccountingPersistence accountingPersistence = AccountingPersistenceFactory.getPersistence();
AccountingPersistenceFactory.getPersistence();
accountingPersistence.account(jobUsageRecord); accountingPersistence.account(jobUsageRecord);
}catch(Throwable e){ } catch (Throwable e) {
LOGGER.error("error accounting algorithm execution",e); LOGGER.error("error accounting algorithm execution", e);
} }
} }
@ -550,7 +572,8 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
try { try {
updateStatus(agent.getStatus(), canWrite); updateStatus(agent.getStatus(), canWrite);
Thread.sleep(10000); Thread.sleep(10000);
} catch (InterruptedException e) {} } catch (InterruptedException e) {
}
} }
LOGGER.info("Status updater terminated"); LOGGER.info("Status updater terminated");
} }
@ -564,15 +587,19 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
LOGGER.debug("Provenance manager running"); LOGGER.debug("Provenance manager running");
} }
private void saveComputationOnWS(List<StoredData> inputData, List<StoredData> outputData, ComputationalAgent agent, List<File> generatedFiles) { private void saveComputationOnWS(List<StoredData> inputData, List<StoredData> outputData, ComputationalAgent agent,
List<File> generatedFiles) {
LOGGER.debug("Save Computation On WS"); LOGGER.debug("Save Computation On WS");
LOGGER.debug("InputData: "+inputData); LOGGER.debug("InputData: " + inputData);
LOGGER.debug("OutputData: "+outputData); LOGGER.debug("OutputData: " + outputData);
LOGGER.debug("Agent: "+agent); LOGGER.debug("Agent: " + agent);
LOGGER.debug("Generated files: "+generatedFiles); LOGGER.debug("Generated files: " + generatedFiles);
LOGGER.debug("Provenance manager started for operator " + this.getClass().getCanonicalName()); LOGGER.debug("Provenance manager started for operator " + this.getClass().getCanonicalName());
ComputationData computation = new ComputationData(config.getTaskID(), config.getAgent(), agent.getDescription(), agent.getInfrastructure().name(), startTime, endTime, "100", config.getTaskID(), config.getParam(ConfigurationManager.serviceUserNameParameterVariable), config.getGcubeScope(), this.getClass().getCanonicalName()); ComputationData computation = new ComputationData(config.getTaskID(), config.getAgent(), agent.getDescription(),
agent.getInfrastructure().name(), startTime, endTime, "100", config.getTaskID(),
config.getParam(ConfigurationManager.serviceUserNameParameterVariable), config.getGcubeScope(),
this.getClass().getCanonicalName());
// post on WS // post on WS
DataspaceManager manager = new DataspaceManager(config, computation, inputData, outputData, generatedFiles); DataspaceManager manager = new DataspaceManager(config, computation, inputData, outputData, generatedFiles);
@ -637,7 +664,8 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
public static List<String> command(final String cmdline, final String directory) { public static List<String> command(final String cmdline, final String directory) {
try { try {
Process process = new ProcessBuilder(new String[] { "bash", "-c", cmdline }).redirectErrorStream(true).directory(new File(directory)).start(); Process process = new ProcessBuilder(new String[] { "bash", "-c", cmdline }).redirectErrorStream(true)
.directory(new File(directory)).start();
List<String> output = new ArrayList<String>(); List<String> output = new ArrayList<String>();
BufferedReader br = new BufferedReader(new InputStreamReader(process.getInputStream())); BufferedReader br = new BufferedReader(new InputStreamReader(process.getInputStream()));
@ -658,16 +686,16 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
@Override @Override
public void setObserver(Observer o) { public void setObserver(Observer o) {
LOGGER.debug("setting observer in {} ",wpsExternalID); LOGGER.debug("setting observer in {} ", wpsExternalID);
this.observer = o; this.observer = o;
} }
@Override @Override
public synchronized boolean cancel() { public synchronized boolean cancel() {
if (!cancelled){ if (!cancelled) {
LOGGER.debug("COMPUTATION INTERRUPTED! ({})",wpsExternalID); LOGGER.debug("COMPUTATION INTERRUPTED! ({})", wpsExternalID);
try{ try {
if (agent!=null){ if (agent != null) {
agent.shutdown(); agent.shutdown();
agent = null; agent = null;
} }
@ -680,12 +708,12 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
} }
System.gc(); System.gc();
cancelled = true; cancelled = true;
}catch(Exception e){ } catch (Exception e) {
LOGGER.warn("error cancelling computation with id {}",wpsExternalID); LOGGER.warn("error cancelling computation with id {}", wpsExternalID);
return false; return false;
} }
} else { } else {
LOGGER.debug("COMPUTATION ALREADY INTERRUPT! ({})",wpsExternalID); LOGGER.debug("COMPUTATION ALREADY INTERRUPT! ({})", wpsExternalID);
return false; return false;
} }
return true; return true;