This commit is contained in:
Lucio Lelii 2018-01-08 11:53:50 +00:00
parent ebaf785934
commit 768ea5f212
10 changed files with 98 additions and 37 deletions

View File

@ -1,4 +1,8 @@
<ReleaseNotes>
<Changeset component="${groupId}.${artifactId}.1-5-2" date="2017-12-13">
<Change>added the right extension on output file</Change>
<Change>lock file created on execution</Change>
</Changeset>
<Changeset component="${groupId}.${artifactId}.1-5-1" date="2017-09-14">
<Change>added accounting on algorithm execution</Change>
</Changeset>

View File

@ -9,7 +9,7 @@
</parent>
<groupId>org.gcube.dataanalysis</groupId>
<artifactId>dataminer</artifactId>
<version>1.5.1-SNAPSHOT</version>
<version>1.5.2-SNAPSHOT</version>
<name>dataminer</name>
<description>An e-Infrastructure service providing state-of-the art DataMining algorithms and ecological modelling approaches under the Web Processing Service (WPS) standard.</description>
<scm>

View File

@ -1,10 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.algorithms;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@Retention(RetentionPolicy.RUNTIME)
public @interface AlgorithmClassification {
String value() default "Others";
}

View File

@ -74,6 +74,23 @@ public class InfrastructureDialoguer {
LOGGER.debug("Found {} algorithms",resources.size());
return resources;
}
public List<String> getPrivateAlgorithmsInScope(String user) throws Exception{
LOGGER.debug("Searching for Algorithms in scope {} with classloader type {}",scope,Thread.currentThread().getContextClassLoader().getClass().getSimpleName());
SimpleQuery query = queryFor(GenericResource.class);
query.addCondition("$resource/Profile/SecondaryType/string() eq 'StatisticalManagerAlgorithmPrivate' ");
query.addCondition(String.format("contains($resource/Profile/Body/privateusers/string(),'%s')",user));
query.setResult("$resource/Profile/Name/text()");
DiscoveryClient<String> client = client();
List<String> resources = client.submit(query);
if (resources==null || resources.size()==0){
throw new Exception("No resource named StatisticalManagerAlgorithmPrivate available in scope "+scope);
}
LOGGER.debug("Found {} algorithms",resources.size());
return resources;
}

View File

@ -3,6 +3,9 @@ package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
@ -323,6 +326,11 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
InputsManager inputsManager = null;
ConfigurationManager configManager = new ConfigurationManager(); // initializes parameters from file
manageUserToken();
Path dir = Paths.get(System.getProperty("java.io.tmpdir"), "dmlocks");
if (!Files.exists(dir))
dir = Files.createDirectory(dir);
Path lockFile = Files.createTempFile(dir, "dm", ".lck");
LOGGER.info("lock file created {}",lockFile.toUri().toURL());
try {
// wait for server resources to be available
@ -477,7 +485,7 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
cleanResources();
if (observer!=null) observer.isFinished(this);
LOGGER.debug("All done - Computation Finished");
Files.deleteIfExists(lockFile);
}
}

View File

@ -72,10 +72,9 @@ public class StatisticalTypeToWPSType {
return;
templates = new Properties();
try(InputStream is = this.getClass().getClassLoader().getResourceAsStream("templates/classtemplate.properties"))
{
InputStream is = this.getClass().getClassLoader().getResourceAsStream("templates/classtemplate.properties");
templates.load(is);
}
is.close();
}
public StatisticalTypeToWPSType() throws Exception {
@ -179,6 +178,7 @@ public class StatisticalTypeToWPSType {
logger.debug("The file exists! " + originalfile);
filename = ((File) ptype.getContent()).getName();
String filenameDest = System.currentTimeMillis() + "_" + filename;
logger.debug("file destination for output is "+filenameDest);
String destinationfile = new File(webpersistence, filenameDest).getAbsolutePath();
logger.debug("Copying file into a temporary file: " + destinationfile);

View File

@ -6,9 +6,11 @@ import java.io.FileInputStream;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.gcube.common.homelibrary.home.Home;
@ -113,16 +115,12 @@ public class DataspaceManager implements Runnable {
return uploadData(data, wsFolder, true);
}
public String uploadData(StoredData data, WorkspaceFolder wsFolder, boolean changename) throws Exception {
LOGGER.debug("Dataspace->Analysing " + data.name);
LOGGER.debug("Dataspace->Analysing " + data);
// String filenameonwsString = WorkspaceUtil.getUniqueName(data.name, wsFolder);
String filenameonwsString = data.name ;
if (changename){
filenameonwsString = data.name + "_[" + data.computationId + "]";// ("_"+UUID.randomUUID()).replace("-", "");
if (data.type.equals("text/csv"))
filenameonwsString+=".csv";
else if (data.type.equals("image/png"))
filenameonwsString+=".png";
}
if (changename)
filenameonwsString = data.name + "_[" + data.computationId + "]"+getExtension(data.payload, data.type);// ("_"+UUID.randomUUID()).replace("-", "");
InputStream in = null;
String url = "";
try {
@ -381,6 +379,7 @@ public class DataspaceManager implements Runnable {
return payload;
}
public void writeProvenance(ComputationData computation, List<StoredData> inputData, List<StoredData> outputData) throws Exception {
LOGGER.debug("Dataspace->connecting to Workspace");
HomeManagerFactory factory = HomeLibrary.getHomeManagerFactory();
@ -469,6 +468,7 @@ public class DataspaceManager implements Runnable {
}
public void deleteRunningComputationData() throws Exception {
LOGGER.debug("Dataspace->deleting computation item");
LOGGER.debug("Dataspace->connecting to Workspace");
HomeManagerFactory factory = HomeLibrary.getHomeManagerFactory();
@ -503,4 +503,35 @@ public class DataspaceManager implements Runnable {
LOGGER.debug("Dataspace->finished removing computation data - success "+!ws.exists(computation.id,computationsFolderWs.getId()));
}
public static String getExtension(String payload, String type){
String extension = "";
if (type.toLowerCase().equals("text/plain")){}
else if (payload.toLowerCase().startsWith("http")){
try {
URL obj= new URL(payload);
URLConnection conn = obj.openConnection();
// get all headers
Map<String, List<String>> map = conn.getHeaderFields();
for (Map.Entry<String, List<String>> entry : map.entrySet()) {
String value = entry.getValue().toString();
if (value.toLowerCase().contains("filename=")){
System.out.println("DataSpace->Searching in http header: found "+value);
extension = value.substring(value.lastIndexOf("."),value.lastIndexOf("\""));
}
}
conn.getInputStream().close();
} catch (Exception e) {
System.out.println("DataSpace->Error in the payload http link "+e.getMessage());
}
}
else {
File paylFile = new File(payload);
if (paylFile.exists()){
String paylname = paylFile.getName();
extension = paylname.substring(paylname.lastIndexOf("."));
}
}
return extension;
}
}

View File

@ -25,4 +25,15 @@ public class StoredData {
String vre;
String type;
String payload;
@Override
public String toString() {
return "StoredData [name=" + name + ", description=" + description
+ ", id=" + id + ", provenance=" + provenance
+ ", creationDate=" + creationDate + ", operator=" + operator
+ ", computationId=" + computationId + ", vre=" + vre
+ ", type=" + type + ", payload=" + payload + "]";
}
}

View File

@ -22,7 +22,7 @@ public class AlgorithmTest {
@Test
public void executeAlgorithmsFromFile() throws Exception{
String env = "proto";
String env = "dev";
Properties prop = new Properties();
prop.load(AlgorithmTest.class.getResourceAsStream("/test_params.properties"));

View File

@ -173,7 +173,7 @@
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.WEB_APP_PUBLISHER&DataInputs=ZipFile={PROTOCOL}://goo.gl/dYQ089;
#ECOPATH_WITH_ECOSIM
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ECOPATH_WITH_ECOSIM
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ECOPATH_WITH_ECOSIM&DataInputs=Model File=http://data.d4science.org/eHFkNmhoSUwxMVpmcElhcUlmQUpWaWRGSjQzNkFXNElHbWJQNStIS0N6Yz0;Config File=http://data.d4science.org/ZGFWaGc4NjUrQmRmcElhcUlmQUpWbTNVQjhqdUV3OWdHbWJQNStIS0N6Yz0;
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ECOPATH_WITH_ECOSIM&DataInputs=Model File={PROTOCOL}://data.d4science.org/eHFkNmhoSUwxMVpmcElhcUlmQUpWaWRGSjQzNkFXNElHbWJQNStIS0N6Yz0;Config File={PROTOCOL}://data.d4science.org/ZGFWaGc4NjUrQmRmcElhcUlmQUpWbTNVQjhqdUV3OWdHbWJQNStIS0N6Yz0;
#OCCURRENCES_MERGER
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.OCCURRENCES_MERGER
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.OCCURRENCES_MERGER&DataInputs=final_Table_Name=MergedOcc;leftTableName={PROTOCOL}://data.d4science.org/QkNIYVp1dm81bnhmcElhcUlmQUpWbzlqZWlDbXpmaytHbWJQNStIS0N6Yz0;rightTableName={PROTOCOL}://data.d4science.org/QkNIYVp1dm81bnhmcElhcUlmQUpWbzlqZWlDbXpmaytHbWJQNStIS0N6Yz0;longitudeColumn=decimalLongitude;latitudeColumn=decimalLatitude;recordedByColumn=recordedBy;scientificNameColumn=scientificName;eventDateColumn=eventDate;lastModificationColumn=modified;spatialTolerance=0.5;confidence=80;