Gianpaolo Coro 2018-01-17 17:44:40 +00:00
parent 4e764a19e5
commit 13fbb283eb
70 changed files with 2504 additions and 0 deletions

31
.classpath Normal file
View File

@ -0,0 +1,31 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="output" path="target/classes"/>
</classpath>

23
.project Normal file
View File

@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>DataminerAlgorithmsInstaller1.2</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.m2e.core.maven2Builder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.m2e.core.maven2Nature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>

View File

@ -0,0 +1,5 @@
eclipse.preferences.version=1
encoding//src/main/java=UTF-8
encoding//src/main/resources=UTF-8
encoding//src/test/java=UTF-8
encoding/<project>=UTF-8

View File

@ -0,0 +1,2 @@
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning

View File

@ -0,0 +1,3 @@
#Wed Apr 27 12:48:22 CEST 2016
eclipse.preferences.version=1
org.eclipse.ltk.core.refactoring.enable.project.refactoring.history=false

View File

@ -0,0 +1,5 @@
#Tue Apr 26 20:08:03 CEST 2016
activeProfiles=
eclipse.preferences.version=1
resolveWorkspaceProjects=true
version=1

1
distro/LICENSE Normal file
View File

@ -0,0 +1 @@
${gcube.license}

69
distro/README Normal file
View File

@ -0,0 +1,69 @@
The gCube System - ${name}
--------------------------------------------------
${description}
${gcube.description}
${gcube.funding}
Version
--------------------------------------------------
${version} (${buildDate})
Please see the file named "changelog.xml" in this directory for the release notes.
Authors
--------------------------------------------------
* Gianpaolo Coro (gianpaolo.coro-AT-isti.cnr.it),
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" CNR, Pisa IT
Maintainers
-----------
* Gianpaolo Coro (gianpaolo.coro-AT-isti.cnr.it),
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" CNR, Pisa IT
Download information
--------------------------------------------------
Source code is available from SVN:
${scm.url}
Binaries can be downloaded from the gCube website:
${gcube.website}
Installation
--------------------------------------------------
Installation documentation is available on-line in the gCube Wiki:
https://wiki.gcube-system.org/gcube
Documentation
--------------------------------------------------
Documentation is available on-line in the gCube Wiki:
https://wiki.gcube-system.org/gcube/Ecological_Modeling
Support
--------------------------------------------------
Bugs and support requests can be reported in the gCube issue tracking tool:
${gcube.issueTracking}
Licensing
--------------------------------------------------
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.

7
distro/addAlgorithm.sh Normal file
View File

@ -0,0 +1,7 @@
#!/bin/sh
INFRA_ENV=$( egrep ^INFRA_REFERENCE= /usr/local/bin/algorithms-updater | cut -d = -f 2 )/software
if [ ! -z $9 ] ; then
INFRA_ENV=$9
fi
echo $# arguments to $0: $*
java -cp ../tomcat/webapps/wps/WEB-INF/lib/*:../tomcat/lib/*:./*:../wps_algorithms/algorithms/$INFRA_ENV org.gcube.dataanalysis.wps.mapper.DataMinerUpdater -a$1 -l../wps_algorithms/algorithms/$INFRA_ENV -t$2 -i$3 -c../tomcat/webapps/wps/ecocfg/ -s$4 -e$5 -k$6 -u$7 -d$8

2
distro/algorithmTemplate Normal file
View File

@ -0,0 +1,2 @@
<category>TRANSDUCERS</category>
<inputs><input><name>Empty</name><description>Empty</description><defaultValue /><type>string</type></input></inputs>

30
distro/assembly.xml Normal file
View File

@ -0,0 +1,30 @@
<assembly
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
<id>algorithm-installer-bundle</id>
<formats>
<format>tar.gz</format>
</formats>
<baseDirectory>algorithmInstaller</baseDirectory>
<fileSets>
<fileSet>
<outputDirectory>.</outputDirectory>
<directory>${distroDirectory}</directory>
<useDefaultExcludes>true</useDefaultExcludes>
<includes>
<include>addAlgorithm.sh</include>
<include>algorithmTemplate</include>
</includes>
<fileMode>755</fileMode>
<filtered>true</filtered>
</fileSet>
<fileSet>
<directory>target</directory>
<outputDirectory>.</outputDirectory>
<includes>
<include>dataminer-algorithms-importer*.jar</include>
</includes>
</fileSet>
</fileSets>
</assembly>

5
distro/changelog.xml Normal file
View File

@ -0,0 +1,5 @@
<ReleaseNotes>
<Changeset component="${groupId}.${artifactId}.1-1-3" date="2017-09-03">
<Change>First Release</Change>
</Changeset>
</ReleaseNotes>

32
distro/descriptor.xml Normal file
View File

@ -0,0 +1,32 @@
<assembly
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
<id>servicearchive</id>
<formats>
<format>tar.gz</format>
</formats>
<baseDirectory>/</baseDirectory>
<fileSets>
<fileSet>
<directory>${distroDirectory}</directory>
<outputDirectory>/</outputDirectory>
<useDefaultExcludes>true</useDefaultExcludes>
<includes>
<include>README</include>
<include>LICENSE</include>
<include>changelog.xml</include>
<include>profile.xml</include>
</includes>
<fileMode>755</fileMode>
<filtered>true</filtered>
</fileSet>
</fileSets>
<files>
<file>
<source>target/${build.finalName}.${project.packaging}</source>
<outputDirectory>/${artifactId}</outputDirectory>
</file>
</files>
</assembly>

30
distro/profile.xml Normal file
View File

@ -0,0 +1,30 @@
<?xml version="1.0" encoding="UTF-8"?>
<Resource>
<ID></ID>
<Type>Service</Type>
<Profile>
<Description>${project.description}</Description>
<Class>DataAnalysis</Class>
<Name>${project.name}</Name>
<Version>1.0.0</Version>
<Packages>
<Software>
<Name>${project.name}</Name>
<Description>${project.description}</Description>
<Version>${version}</Version>
<MavenCoordinates>
<groupId>${project.groupId}</groupId>
<artifactId>${project.artifactId}</artifactId>
<version>${project.version}</version>
</MavenCoordinates>
<Type>Service</Type>
<Files>
<File>${project.build.finalName}.${project.packaging}</File>
</Files>
</Software>
</Packages>
</Profile>
</Resource>

Binary file not shown.

119
pom.xml Normal file
View File

@ -0,0 +1,119 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>maven-parent</artifactId>
<groupId>org.gcube.tools</groupId>
<version>1.0.0</version>
<relativePath />
</parent>
<groupId>org.gcube.dataanalysis</groupId>
<artifactId>dataminer-algorithms-importer</artifactId>
<version>1.2.0-SNAPSHOT</version>
<name>dataminer-algorithms-importer</name>
<description>Algorithms for the dataminer service</description>
<scm>
<url>https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/DataminerAlgorithms</url>
</scm>
<developers>
<developer>
<name>Gianpaolo Coro</name>
<email>gianpaolo.coro@isti.cnr.it</email>
<organization>CNR Pisa, Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo"</organization>
<roles>
<role>architect</role>
<role>developer</role>
</roles>
</developer>
</developers>
<properties>
<webappDirectory>${project.build.directory}/${project.build.finalName}</webappDirectory>
<distroDirectory>distro</distroDirectory>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
</properties>
<dependencies>
<dependency>
<groupId>org.gcube.dataanalysis</groupId>
<artifactId>dataminer</artifactId>
<version>[1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<artifactId>gis-interface</artifactId>
<groupId>org.gcube.spatial.data</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.gcube.resources</groupId>
<artifactId>registry-publisher</artifactId>
<version>[1.1.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.resources</groupId>
<artifactId>common-gcore-resources</artifactId>
<version>[1.1.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.3</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.18.1</version>
<configuration>
<skipTests>true</skipTests>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<descriptors>
<descriptor>${distroDirectory}/assembly.xml</descriptor>
</descriptors>
<finalName>dataminer-algorithms-importer-${project.version}</finalName>
<appendAssemblyId>false</appendAssemblyId>
</configuration>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
<repositories>
<repository>
<id>n52-releases</id>
<name>52n Releases</name>
<url>http://52north.org/maven/repo/releases</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
</repositories>
</project>

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,183 @@
package org.gcube.dataanalysis.wps.mapper;
import java.io.File;
import java.util.HashMap;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.GenericAlgorithm;
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.processing.factories.ProcessorsFactory;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.StatisticalTypeToWPSType;
public class ClassGenerator {
private String configPath = "./cfg/";
private String generationPath = "./src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/";
private StatisticalTypeToWPSType converter;
private String packageString = null;
private String javaFileName = null;
public ClassGenerator(String algorithmName, String implementation, String generationPath, String configPath) throws Exception {
this.generationPath = generationPath;
this.configPath = configPath;
converter = new StatisticalTypeToWPSType();
this.javaFileName = generateEcologicalEngineClasses(algorithmName, implementation);
}
public String getPackageString() {
return packageString;
}
public String getJavaFileName() {
return javaFileName;
}
private String generateEcologicalEngineClasses(String algorithmName, String implementation) throws Exception {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(configPath);
config.setAlgorithmClassLoader(Thread.currentThread().getContextClassLoader());
// set scope etc..
HashMap<String, List<String>> algorithms = ProcessorsFactory.getAllFeatures(config);
for (String algorithmSet : algorithms.keySet()) {
List<String> parametersList = algorithms.get(algorithmSet);
System.out.println(algorithmSet + ":" + parametersList.toString());
for (String algorithm : parametersList) {
if (!algorithm.equals(algorithmName))
continue;
// got an algorithm
System.out.println("Algorithm: " + algorithm);
String description = ""; // get this information
StringBuffer classWriter = new StringBuffer();
List<StatisticalType> inputs = null;
StatisticalType outputs = null;
// build class preamble
config.setAgent(algorithm);
config.setModel(algorithm);
config.setAlgorithmClassLoader(Thread.currentThread().getContextClassLoader());
String interfaceString = "";
Object impl = Class.forName(implementation).newInstance();
if ((impl instanceof GenericAlgorithm))
{
GenericAlgorithm ga = (GenericAlgorithm)impl;
inputs = ga.getInputParameters();
description = ga.getDescription();
try
{
outputs = ga.getOutput();
}
catch (Exception e)
{
outputs = null;
}
}
else if (impl instanceof Model){
Model ca = (Model)impl;
inputs = ca.getInputParameters();
description = ca.getDescription();
try
{
outputs = ca.getOutput();
}
catch (Exception e)
{
outputs = null;
}
}
else if ((impl instanceof ComputationalAgent))
{
ComputationalAgent ca = (ComputationalAgent)impl;
inputs = ca.getInputParameters();
description = ca.getDescription();
try
{
outputs = ca.getOutput();
}
catch (Exception e)
{
outputs = null;
}
}
else
{
throw new Exception("invalid algorithm class "+impl.getClass());
}
try{
if (algorithmSet.equals("DISTRIBUTIONS")) {
packageString = "generators";
interfaceString = "IGenerator";
} else if (algorithmSet.equals("TRANSDUCERS")) {
packageString = "transducerers";
interfaceString = "ITransducer";
} else if (algorithmSet.equals("MODELS")) {
packageString = "modellers";
interfaceString = "IModeller";
} else if (algorithmSet.equals("CLUSTERERS")) {
packageString = "clusterers";
interfaceString = "IClusterer";
} else if (algorithmSet.equals("TEMPORAL_ANALYSIS")) {
} else if (algorithmSet.equals("EVALUATORS")) {
packageString = "evaluators";
interfaceString = "IEvaluator";
}
}catch(Exception e){
System.out.println("Error in retrieving output:");
e.printStackTrace();
}
classWriter.append(((String) StatisticalTypeToWPSType.templates.get("package")).replace("#PACKAGE#", packageString) + "\n" + ((String) StatisticalTypeToWPSType.templates.get("import")) + "\n");
System.out.println("Class preamble: \n" + classWriter.toString());
// build class description
String classdescription = (String) StatisticalTypeToWPSType.templates.get("description");
//modification of 20/07/15
classdescription = classdescription.replace("#TITLE#", algorithm).replace("#ABSTRACT#", description).replace("#CLASSNAME#", algorithm).replace("#PACKAGE#", packageString);
System.out.println("Class description : \n" + classdescription);
String classdefinition = (String) StatisticalTypeToWPSType.templates.get("class_definition");
classdefinition = classdefinition.replace("#CLASSNAME#", algorithm).replace("#INTERFACE#", interfaceString);
System.out.println("Class definition: \n" + classdefinition);
classWriter.append(classdescription + "\n");
classWriter.append(classdefinition + "\n");
// attach scope input deprecated!
// classWriter.append((String) StatisticalTypeToWPSType.templates.get("scopeInput") + "\n");
// classWriter.append((String) StatisticalTypeToWPSType.templates.get("usernameInput") + "\n");
for (StatisticalType input : inputs) {
System.out.println(input);
String wpsInput = converter.convert2WPSType(input, true, config);
if (wpsInput != null) {
classWriter.append(wpsInput + "\n");
System.out.println("Input:\n" + wpsInput);
}
}
if (outputs != null) {
System.out.println("Alg. Output:\n" + outputs);
String wpsOutput = converter.convert2WPSType(outputs, false, config);
classWriter.append(wpsOutput + "\n");
System.out.println("Output:\n" + wpsOutput);
}
else
System.out.println("Output is empty!");
// add potential outputs
classWriter.append((String) StatisticalTypeToWPSType.templates.getProperty("optionalOutput") + "\n");
classWriter.append((String) StatisticalTypeToWPSType.templates.get("class_closure"));
System.out.println("Class:\n" + classWriter.toString());
System.out.println("Saving...");
File dirs = new File(generationPath + packageString);
if (!dirs.exists()) dirs.mkdirs();
FileTools.saveString(generationPath + packageString+"/"+algorithm + ".java", classWriter.toString(), true, "UTF-8");
return generationPath + packageString+"/"+algorithm + ".java";
}
}
return null;
}
}

View File

@ -0,0 +1,555 @@
package org.gcube.dataanalysis.wps.mapper;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import java.net.URLConnection;
import java.util.Arrays;
import java.util.Base64;
import java.util.List;
import java.util.UUID;
import java.util.jar.Attributes;
import java.util.jar.JarEntry;
import java.util.jar.JarOutputStream;
import java.util.jar.Manifest;
import javax.tools.Diagnostic;
import javax.tools.Diagnostic.Kind;
import javax.tools.DiagnosticCollector;
import javax.tools.JavaCompiler;
import javax.tools.JavaFileObject;
import javax.tools.StandardJavaFileManager;
import javax.tools.ToolProvider;
import org.gcube.common.resources.gcore.GenericResource;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
import org.gcube.informationsystem.publisher.RegistryPublisher;
import org.gcube.informationsystem.publisher.RegistryPublisherFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.ls.DOMImplementationLS;
import org.w3c.dom.ls.LSSerializer;
public class DataMinerUpdater {
// Example of call with modifications for private users
// ./addAlgorithm.sh NETCDF_SUPPORT_JAVA_13
// BLACK_BOX-[gianpaolo.coro,giancarlo.panichi]
// org.gcube.dataanalysis.executor.rscripts.NetCDFSupportJava
// /gcube/devNext/NextNext transducerers N
// http://data-d.d4science.org/ZGZSUHlQODRZY3pBQVZxNlVOK0xzUVZKc0dZczBKaEpHbWJQNStIS0N6Yz0
// "NetCDF Support Java" dev/software/
public static void main(String args[]) throws Exception {
String algorithmName = null;
String implementation = null;
String category = null;
String configPath = null;
String libPath = null;
String scope = null;
String description = null;
String atype = null;
String skipJava = null;
String url = null;
String privateusers = null;
int i = 0;
for (String arg : args) {
if (arg.startsWith("-c")) {
configPath = arg.substring(2);
}
if (arg.startsWith("-i")) {
implementation = arg.substring(2);
}
if (arg.startsWith("-a")) {
algorithmName = arg.substring(2);
}
if (arg.startsWith("-t")) {
category = arg.substring(2);
int idxprivusers = category.indexOf("-[");
if (idxprivusers > 1) {
privateusers = category.substring(idxprivusers + 2, category.length() - 1);
System.out.println("Found private Users " + privateusers);
}
}
if (arg.startsWith("-l")) {
libPath = arg.substring(2);
}
if (arg.startsWith("-s")) {
scope = arg.substring(2);
}
if (arg.startsWith("-e")) {
atype = arg.substring(2);
}
if (arg.startsWith("-k")) {
skipJava = arg.substring(2);
if (skipJava.equals("Y"))
skipJava = null;
}
if (arg.startsWith("-u")) {
url = arg.substring(2);
}
if (arg.startsWith("-d")) {
description = arg.substring(2);
for (int j = i + 1; j < args.length; j++) {
description = description + " " + args[j];
}
System.out.println("DESCRIPTION " + description);
break;
}
i++;
}
if (libPath == null || category == null || implementation == null || configPath == null || algorithmName == null || scope == null) {
System.out.println("Wrong parameters");
System.out.println("Usage example: ");
System.exit(0);
}
Update(algorithmName, implementation, category, configPath, libPath, scope, description, atype, skipJava, url, privateusers);
}
public static void UpdateFiles(String configPath, String atype, String algorithmName, String implementation, String category) throws Exception {
File transducers = new File(configPath, atype + ".properties");
File userpersp = new File(configPath, "userperspective.properties");
// UPDATING TRANDUCERERS FILES
System.out.println("*******Modifying transducerer file");
BufferedReader br = new BufferedReader(new FileReader(transducers));
String entry = algorithmName + "=" + implementation;
String line = br.readLine();
boolean found = false;
boolean endline = false;
String lastline = "";
StringBuffer sbb = new StringBuffer();
while (line != null) {
lastline = line;
if (line.trim().equalsIgnoreCase(entry)) {
found = true;
break;
}
sbb.append(line.trim() + System.lineSeparator());
line = br.readLine();
}
br.close();
if (!found) {
FileWriter fw = new FileWriter(transducers, false);
sbb.append(entry + System.lineSeparator());
fw.write(sbb.toString());
fw.close();
System.out.println("*******Modified transducerer file");
} else
System.out.println("*******Transducerer file was not modified, since it already contains the algorithm");
// UPDATING USER PERSPECTIVE FILE
System.out.println("*******Modifying user perspective file");
br = new BufferedReader(new FileReader(userpersp));
line = br.readLine();
found = false;
StringBuffer sb = new StringBuffer();
boolean foundCategory = false;
while (line != null) {
String cat = line.substring(0, line.indexOf("="));
if (cat.equalsIgnoreCase(category)) {
foundCategory = true;
String arguments = line.substring(line.indexOf("=") + 1).trim();
String argums[] = arguments.split(",");
List valid = Arrays.asList(argums);
// if (!line.contains(algorithmName)){
if (!valid.contains(algorithmName)) {
if (line.substring(line.indexOf("=") + 1).trim().length() == 0)
line = line + algorithmName;
else
line = line + "," + algorithmName;
} else
found = true;
}
if (line.trim().length() > 0)
sb.append(line + System.lineSeparator());
line = br.readLine();
}
br.close();
if (!foundCategory) {
sb.append(category + "=" + algorithmName + System.lineSeparator());
FileWriter fw = new FileWriter(userpersp, false);
fw.write(sb.toString());
fw.close();
System.out.println("*******Modified user perspective file");
} else {
if (!found) {
FileWriter fw = new FileWriter(userpersp, false);
fw.write(sb.toString());
fw.close();
System.out.println("*******Modified user perspective file");
} else
System.out.println("*******Perspective file was not modified, since it already contains the algorithm");
}
if (atype.equals("models"))
atype = "modellers";
if (atype.equals("nodealgorithms"))
atype = "generators";
if (atype.equals("algorithms"))
atype = "generators";
}
public static void Update(String algorithmName, String implementation, String category, String configPath, String applicationlibs, String scope, String description, String atype, String skipJava, String url, String privateusers) throws Exception {
BufferedReader br;
String line;
if (atype == null || atype.trim().length() == 0)
atype = "transducerers";
System.out.println("*****Parameters");
System.out.println("*****algorithmName:" + algorithmName);
System.out.println("*****implementation:" + implementation);
System.out.println("*****category:" + category);
System.out.println("*****configPath:" + configPath);
System.out.println("*****applicationlibs:" + applicationlibs);
System.out.println("*****scope:" + scope);
System.out.println("*****description:" + description);
System.out.println("*****atype:" + atype);
System.out.println("*****skipJava:" + ((skipJava == null) ? true : false));
System.out.println("*****url:" + url);
System.out.println("*******1 - Downloading file");
if (url != null && url.length() > 1 && skipJava != null) {
File jarfile = new File(applicationlibs, algorithmName + ".jar");
System.out.println("*******Downloading to " + jarfile.getAbsolutePath());
downloadFromUrl(url, jarfile.getAbsolutePath());
System.out.println("*******Download OK - check " + jarfile.exists());
System.out.println("*******Updating classpath");
// load the jar into the classpath
URLClassLoader sysloader = (URLClassLoader) Thread.currentThread().getContextClassLoader();
Class sysclass = URLClassLoader.class;
Class[] parameters = new Class[] { URL.class };
try {
Method method = sysclass.getDeclaredMethod("addURL", parameters);
method.setAccessible(true);
method.invoke(sysloader, new Object[] { jarfile.toURI().toURL() });
} catch (Throwable t) {
t.printStackTrace();
throw new IOException("Error, could not add URL to system classloader");
}// end try catch
} else
System.out.println("*******1 - Nothing to download");
System.out.println("*******2 - Updating files");
UpdateFiles(configPath, atype, algorithmName, implementation, category);
if (atype.equals("nodealgorithms")) {
System.out.println("*******2 - Updating also algorithms file");
UpdateFiles(configPath, "algorithms", algorithmName, implementation, category);
}
System.out.println("*******2 - Files updated!");
// GENERATING CLASS
if (skipJava != null) {
System.out.println("*******3 - Generating classes");
String generationPath = "./org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/";
ClassGenerator classGenerator = new ClassGenerator(algorithmName, implementation, generationPath, configPath);
System.out.println("*******3 - Classes generated! " + classGenerator.getJavaFileName());
// PREPARING JAR FILE
File dataminerjar = new File(applicationlibs, algorithmName + "_interface.jar");
dataminerjar.delete();
// COMPILING JAR
System.out.println("*******4 - Compiling the Jar");
compileJava(classGenerator.getJavaFileName());
System.out.println("*******->creating jar");
createJar(dataminerjar, classGenerator.getPackageString(), algorithmName);
// command(classes);
// command(createjar);
System.out.println("*******Size " + dataminerjar.length());
System.out.println("*******4 - Jar compiled!! " + dataminerjar);
} else {
System.out.println("*******3 - Generating classes skipped");
System.out.println("*******4 - Compiling the Jar skipped");
}
// INDEX ON THE IS
System.out.println("*******5 - Indexing on the IS");
System.out.println("Indexing on IS in scope " + scope);
indexOnIS(algorithmName, description, scope, privateusers);
System.out.println("*******5 - Finished indexing on the IS");
System.out.println("*******All done!");
System.out.println("*******Table entry:");
System.out.println("| " + algorithmName + " | " + "system" + " | " + category + "| Prod | <notextile>./addAlgorithm.sh " + algorithmName + " " + category + " " + implementation + " " + scope + " " + atype + " " + (skipJava != null ? "N" : "Y") + " " + ((url != null && url.length() > 1) ? url : "k") + " \"" + description + "\"" + " </notextile> | none |");
}
private static void compileJava(String javaFileName) throws Exception {
JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
StandardJavaFileManager fileManager = compiler.getStandardFileManager(null, null, null);
Iterable<? extends JavaFileObject> compilationUnits1 = fileManager.getJavaFileObjectsFromFiles(Arrays.asList(new File(javaFileName)));
DiagnosticCollector<JavaFileObject> diagnostics = new DiagnosticCollector<JavaFileObject>();
boolean success = compiler.getTask(null, fileManager, diagnostics, null, null, compilationUnits1).call();
for (Diagnostic<? extends JavaFileObject> diagnostic : diagnostics.getDiagnostics())
if (diagnostic.getKind() == Kind.ERROR)
System.out.format("Error on line %d in %s%n", diagnostic.getLineNumber(), diagnostic.getSource().toUri());
if (!success)
throw new Exception("error compiling generated class");
}
private static void createJar(File dataminerjar, String packageString, String algorithmName) throws Exception {
Manifest manifest = new Manifest();
manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0");
try (JarOutputStream target = new JarOutputStream(new FileOutputStream(dataminerjar), manifest)) {
target.putNextEntry(new JarEntry("org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/" + packageString + "/"));
String source = "org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/" + packageString + "/" + algorithmName + ".class";
File fileSource = new File(source);
JarEntry entry = new JarEntry(source);
target.putNextEntry(entry);
try (BufferedInputStream in = new BufferedInputStream(new FileInputStream(fileSource));) {
byte[] buffer = new byte[1024];
while (true) {
int count = in.read(buffer);
if (count == -1)
break;
target.write(buffer, 0, count);
}
}
target.closeEntry();
target.close();
}
}
public static String ExecuteGetLineOld(String cmd) {
Process process = null;
String lastline = "";
StringBuffer sb = new StringBuffer();
try {
System.out.println("ExecuteScript-> OSCommand-> Executing Control ->" + cmd);
process = Runtime.getRuntime().exec(cmd);
BufferedReader br = new BufferedReader(new InputStreamReader(process.getInputStream()));
String line = br.readLine();
System.out.println("ExecuteScript-> OSCommand-> line->" + line);
while (line != null) {
try {
lastline = line;
System.out.println("ExecuteScript-> OSCommand-> line->" + line);
line = br.readLine();
if (line != null)
sb.append(line + System.lineSeparator());
} catch (EOFException e) {
System.out.println("ExecuteScript-> OSCommand -> Process Finished with EOF");
break;
} catch (Exception e) {
line = "ERROR";
break;
}
}
System.out.println("ExecuteScript-> OSCommand -> Process Finished");
} catch (Throwable e) {
System.out.println("ExecuteScript-> OSCommand-> error ");
e.printStackTrace();
lastline = "ERROR";
}
process.destroy();
System.out.println("ExecuteScript-> OSCommand-> Process destroyed ");
return sb.toString();
}
public static void indexOnIS(String algorithmName, String algorithmDescription, String scope, String privateusers) throws Exception {
System.out.println("setting scope to " + scope);
String secondaryTypePublic = "StatisticalManagerAlgorithm";
String secondaryTypePrivate = "StatisticalManagerAlgorithmPrivate";
String secondaryType = secondaryTypePublic;
if (privateusers != null)
secondaryType = secondaryTypePrivate;
InfrastructureDialoguer id = new InfrastructureDialoguer(scope);
if (privateusers != null) {
System.out.println("Transforming Algorithm " + algorithmName + " into private algorithm");
ScopeProvider.instance.set(scope);
System.out.println("Deleting previous algorithm " + algorithmName + " from private algorithms");
id.deleteAlgorithmInScope(algorithmName, secondaryTypePrivate);
System.out.println("Deleting previous algorithm " + algorithmName + " from public algorithms");
id.deleteAlgorithmInScope(algorithmName, secondaryTypePublic);
} else {
ScopeProvider.instance.set(scope);
List<String> algorithms = id.getAlgorithmsInScope(secondaryType);
System.out.println("Deleting previous algorithm " + algorithmName + " from private algorithms");
id.deleteAlgorithmInScope(algorithmName, secondaryTypePrivate);
boolean found = false;
for (String alg : algorithms) {
System.out.println("Algorithm in scope " + alg);
if (alg.equals(algorithmName)) {
System.out.println("Found Match! ");
found = true;
break;
}
}
if (found) {
return;
}
}
String xml = FileTools.loadString("algorithmTemplate", "UTF-8");
xml = xml.replace("#UUID#", UUID.randomUUID().toString());
xml = xml.replace("#SCOPE#", scope);
xml = xml.replace("#NAME#", algorithmName);
xml = xml.replace("#DESCRIPTION#", algorithmDescription);
// patch to add private users property - GP
if (privateusers != null) {
ScopeProvider.instance.set(scope);
/* encryption using d4science */
/*
* InputStream privateusersstream = new
* ByteArrayInputStream(privateusers
* .getBytes(StandardCharsets.UTF_8.name())); ByteArrayOutputStream
* baos = new ByteArrayOutputStream(); new
* EncryptionUtil().encrypt(privateusersstream, baos); String
* privateusersencr = new String( baos.toByteArray());
*/
String privateuserencr = privateusers;//encrypt(privateusers);
xml = xml.replace("</inputs>", "</inputs><privateusers>" + privateuserencr + "</privateusers>");
}
xml = xml.trim();
System.out.println("XML:" + xml);
ScopeProvider.instance.set(scope);
GenericResource toPublish = new GenericResource();
Document document = toPublish.newProfile().description(algorithmDescription.replace("\"", "")).name(algorithmName).type(secondaryType).newBody().getOwnerDocument();
toPublish.profile().newBody(xml);
Node n = toPublish.profile().body();
DOMImplementationLS lsImpl = (DOMImplementationLS) n.getOwnerDocument().getImplementation().getFeature("LS", "3.0");
LSSerializer serializer = lsImpl.createLSSerializer();
serializer.getDomConfig().setParameter("xml-declaration", false); // by
// default
// its
// true,
// so
// set
// it
// to
// false
// to
// get
// String
// without
// xml-declaration
String str = serializer.writeToString(n);
System.out.println("STRING:" + str);
System.out.println(toPublish.profile().body());
RegistryPublisher rp = RegistryPublisherFactory.create();
toPublish = rp.create(toPublish);
System.out.println("PUBLISHED");
}
public static String encrypt(String text) {
return new String(Base64.getEncoder().encode(xor(text.getBytes())));
}
public static String decrypt(String hash) {
try {
return new String(xor(Base64.getDecoder().decode(hash.getBytes())), "UTF-8");
} catch (java.io.UnsupportedEncodingException ex) {
throw new IllegalStateException(ex);
}
}
private static byte[] xor(final byte[] input) {
final byte[] output = new byte[input.length];
final byte[] secret = "dminstall".getBytes();
int spos = 0;
for (int pos = 0; pos < input.length; ++pos) {
output[pos] = (byte) (input[pos] ^ secret[spos]);
spos += 1;
if (spos >= secret.length) {
spos = 0;
}
}
return output;
}
static void downloadFromUrl(String urlString, String localFilename) throws IOException {
InputStream is = null;
FileOutputStream fos = null;
System.out.println("Downloading :" + urlString);
URL url = new URL(urlString);
try {
URLConnection urlConn = url.openConnection();// connect
is = urlConn.getInputStream(); // get connection inputstream
fos = new FileOutputStream(localFilename); // open outputstream to
// local file
byte[] buffer = new byte[4096]; // declare 4KB buffer
int len;
// while we have availble data, continue downloading and storing to
// local file
while ((len = is.read(buffer)) > 0) {
fos.write(buffer, 0, len);
}
} finally {
try {
if (is != null) {
is.close();
}
} finally {
if (fos != null) {
fos.close();
}
}
}
}
}

View File

@ -0,0 +1,114 @@
package org.gcube.dataanalysis.wps.mapper;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.util.ArrayList;
import java.util.List;
import org.gcube.common.encryption.StringEncrypter;
import org.gcube.common.resources.gcore.GenericResource;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.infrastructure.DatabaseInfo;
import org.gcube.informationsystem.publisher.RegistryPublisher;
import org.gcube.informationsystem.publisher.RegistryPublisherFactory;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
public class InfrastructureDialoguer {
public String scope;
public InfrastructureDialoguer(String scope){
this.scope = scope;
}
public DatabaseInfo getDatabaseInfo(String resourceName) throws Exception{
DatabaseInfo dbi = new DatabaseInfo();
AnalysisLogger.getLogger().debug("Searching for Database "+resourceName+" in scope "+scope);
SimpleQuery query = queryFor(ServiceEndpoint.class);
// query.addCondition("$resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'StatisticalManagerDataBase' ");
// query.addCondition("$resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq '"+resourceName+"' ");
query.addCondition("$resource/Profile/Name eq '"+resourceName+"' ");
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
List<ServiceEndpoint> resources = client.submit(query);
if (resources==null || resources.size()==0){
throw new Exception("No resource named "+resourceName+" available in scope "+scope);
}
else{
AccessPoint ap = resources.get(0).profile().accessPoints().iterator().next();
dbi.url = ap.address();
dbi.username = ap.username();
dbi.password = StringEncrypter.getEncrypter().decrypt(ap.password().trim());
for (ServiceEndpoint.Property property:ap.properties()){
if (property.name().equalsIgnoreCase("driver"))
dbi.driver = property.value();
}
AnalysisLogger.getLogger().debug("Found Database : "+dbi);
}
if (dbi.url == null)
throw new Exception("No database URL for resource "+resourceName+" available in scope "+scope);
return dbi;
}
public void deleteAlgorithmInScope(String algorithmName, String secondaryType) throws Exception{
System.out.println("Searching for Algorithms in scope "+scope);
SimpleQuery query = queryFor(GenericResource.class);
query.addCondition("$resource/Profile/SecondaryType eq '"+secondaryType+"' ");
DiscoveryClient<GenericResource> client = clientFor(GenericResource.class);
List<GenericResource> resources = client.submit(query);
if (resources==null || resources.size()==0){
System.out.println("No resource named "+secondaryType+"/"+algorithmName+" available in scope "+scope);
return;
}
System.out.println("Found "+resources.size()+" resources");
for (GenericResource resource: resources){
if (resource.profile().name().equals(algorithmName)){
System.out.println("Removing Algorithm "+algorithmName);
RegistryPublisher rp = RegistryPublisherFactory.create();
rp.remove(resource);
}
}
}
public List<String> getAlgorithmsInScope(String secondaryType) throws Exception{
AnalysisLogger.getLogger().debug("Searching for Algorithms in scope "+scope);
SimpleQuery query = queryFor(GenericResource.class);
query.addCondition("$resource/Profile/SecondaryType eq '"+secondaryType+"' ");
DiscoveryClient<GenericResource> client = clientFor(GenericResource.class);
List<GenericResource> resources = client.submit(query);
if (resources==null || resources.size()==0){
System.out.println("WARNING: No resource named StatisticalManagerAlgorithm available in scope "+scope);
return new ArrayList<String>();
}
List<String> resourcesNames = new ArrayList<String>();
AnalysisLogger.getLogger().debug("Found "+resources.size()+" resources");
for (GenericResource resource: resources){
resourcesNames.add(resource.profile().name());
}
return resourcesNames;
}
public static void main(String[] args) throws Exception{
AnalysisLogger.setLogger("cfg/"
+ AlgorithmConfiguration.defaultLoggerFile);
InfrastructureDialoguer dialoguer = new InfrastructureDialoguer("/gcube/devsec/devVRE");
// dialoguer.getDatabaseInfo("StatisticalManagerDataBase");
dialoguer.getDatabaseInfo("FishBase");
}
}

View File

@ -0,0 +1,48 @@
package org.gcube.dataanalysis.wps.mapper;
import java.io.File;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
public class WpsxmlGenerator {
public static void main (String[] args) throws Exception{
// String transducerers = "C:/Users/coro/Desktop/WorkFolder/Workspace/EcologicalEngineConfiguration/cfg/transducerers.properties";
// String transducerers = "C:/Users/coro/Desktop/WorkFolder/Workspace/EcologicalEngineConfiguration/cfg/models.properties";
String transducerers = "C:/Users/coro/Desktop/WorkFolder/Workspace/EcologicalEngineConfiguration/cfg/nodealgorithms.properties";
String userp = "C:/Users/coro/Desktop/WorkFolder/Workspace/EcologicalEngineConfiguration/cfg/userperspective.properties";
String atype = "nodealgorithms";
Properties props = new Properties();
props.load(new FileReader(new File(transducerers)));
Properties persp= new Properties();
persp.load(new FileReader(new File(userp)));
Collection<String> unsorted = (Collection)props.keySet();
List<String> list = new ArrayList<String>(unsorted);
java.util.Collections.sort(list);
for (Object algorithm:list)
{
String classname = (String) props.getProperty((String)algorithm);
String found = "OTHER";
for (Object category:persp.keySet()){
String algorithms = persp.getProperty((String)category);
if (algorithms.contains((String)algorithm)){
found = (String)category;
break;
}
}
String addAlgorithm = "./addAlgorithm.sh "+((String)algorithm).trim()+" " +found+" "+classname+" /gcube/devsec "+atype+" Y "+"a test algorithm for the alg publisher";
System.out.println(addAlgorithm);
//System.out.println("<Property name=\"Algorithm\" active=\"true\">org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers."+algorithm+"</Property>");
}
}
}

View File

@ -0,0 +1,343 @@
package org.gcube.dataanalysis.wps.remote;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.net.URL;
import java.net.URLConnection;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.params.HttpConnectionParams;
import org.apache.http.params.HttpParams;
import org.apache.http.util.EntityUtils;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class GetCapabilitiesChecker {
private static final char DOT = '.';
private static final char SLASH = '/';
private static final String CLASS_SUFFIX = ".class";
private static final String BAD_PACKAGE_ERROR = "Unable to get resources from path '%s'. Are you sure the package '%s' exists?";
public static List<Class<?>> getClassesInSamePackageFromJar(String packageName) throws Exception {
String scannedPath = packageName.replace(".", "/");
URL scannedUrl = Thread.currentThread().getContextClassLoader().getResource(scannedPath);
String jarPath = scannedUrl.getFile();
AnalysisLogger.getLogger().debug("Jar Path complete: " + jarPath);
jarPath = jarPath.substring(jarPath.indexOf("file:/") + 6, jarPath.lastIndexOf("!"));
if (jarPath.startsWith("home"))
jarPath = "/" + jarPath;
AnalysisLogger.getLogger().debug("Jar Path: " + jarPath);
JarFile jarFile = null;
List<Class<?>> result = new ArrayList<Class<?>>();
String pathTojars = new File(jarPath).getParent();
File[] jars = new File(pathTojars).listFiles();
try {
for (File jar : jars) {
// File otherjar = new File(new File(jarPath).getParent(),"dataminer-algorithms.jar");
if (jar.getName().equals("dataminer-algorithms.jar") || jar.getName().endsWith("_interface.jar")) {
//File otherjar = new File(new File(jarPath).getParent(), "dataminer-algorithms.jar");
File otherjar = jar;
if (otherjar.exists())
jarPath = otherjar.getAbsolutePath();
AnalysisLogger.getLogger().debug("Alternative Jar Path: " + jarPath);
jarFile = new JarFile(jarPath);
Enumeration<JarEntry> en = jarFile.entries();
while (en.hasMoreElements()) {
JarEntry entry = en.nextElement();
String entryName = entry.getName();
packageName = packageName.replace('.', '/');
if (entryName != null && entryName.endsWith(".class") && entryName.startsWith(packageName)) {
try {
Class entryClass = Class.forName(entryName.substring(0, entryName.length() - 6).replace('/', '.'));
if (entryClass != null) {
result.add(entryClass);
}
} catch (Throwable e) {
// do nothing, just continue processing classes
}
}
}// while
}// if jar known
}
return result;
} catch (Exception e) {
throw e;
} finally {
try {
if (jarFile != null) {
jarFile.close();
}
} catch (Exception e) {
}
}
}
public static List<Class<?>> find(String scannedPackage) {
String scannedPath = scannedPackage.replace(DOT, SLASH);
URL scannedUrl = Thread.currentThread().getContextClassLoader().getResource(scannedPath);
if (scannedUrl == null) {
throw new IllegalArgumentException(String.format(BAD_PACKAGE_ERROR, scannedPath, scannedPackage));
}
File scannedDir = new File(scannedUrl.getFile());
System.out.println("scannedDir:" + scannedDir);
System.out.println("scannedUrl:" + scannedUrl);
System.out.println("scannedUrl List:" + scannedDir.listFiles());
List<Class<?>> classes = new ArrayList<Class<?>>();
for (File file : scannedDir.listFiles()) {
classes.addAll(find(file, scannedPackage));
}
return classes;
}
private static List<Class<?>> find(File file, String scannedPackage) {
List<Class<?>> classes = new ArrayList<Class<?>>();
String resource = scannedPackage + DOT + file.getName();
if (file.isDirectory()) {
for (File child : file.listFiles()) {
classes.addAll(find(child, resource));
}
} else if (resource.endsWith(CLASS_SUFFIX)) {
int endIndex = resource.length() - CLASS_SUFFIX.length();
String className = resource.substring(0, endIndex);
try {
if (!(className.contains("IClusterer") || className.contains("IEvaluator") || className.contains("IGenerator") || className.contains("IModeller") || className.contains("ITransducer")))
classes.add(Class.forName(className));
} catch (ClassNotFoundException ignore) {
}
}
return classes;
}
public static String readPage(URL url) throws Exception {
DefaultHttpClient httpClient = new DefaultHttpClient();
HttpParams params = httpClient.getParams();
HttpConnectionParams.setConnectionTimeout(params, 7 * 60000);
HttpConnectionParams.setSoTimeout(params, 7 * 60000);
HttpConnectionParams.setStaleCheckingEnabled(params, false);
HttpConnectionParams.setSoKeepalive(params, false);
HttpGet request = new HttpGet(url.toURI());
HttpResponse response = httpClient.execute(request);
System.out.println("URL executed!");
Reader reader = null;
try {
reader = new InputStreamReader(response.getEntity().getContent());
System.out.println("Read input stream!");
StringBuffer sb = new StringBuffer();
{
int read;
char[] cbuf = new char[1024];
while ((read = reader.read(cbuf)) != -1)
sb.append(cbuf, 0, read);
}
EntityUtils.consume(response.getEntity());
httpClient.getConnectionManager().shutdown();
return sb.toString();
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
public static Charset getConnectionCharset(URLConnection connection) {
String contentType = null;
try {
contentType = connection.getContentType();
} catch (Exception e) {
// specified charset is not found,
// skip it to return the default one
return Charset.defaultCharset();
}
if (contentType != null && contentType.length() > 0) {
contentType = contentType.toLowerCase();
String charsetName = extractCharsetName(contentType);
if (charsetName != null && charsetName.length() > 0) {
try {
return Charset.forName(charsetName);
} catch (Exception e) {
// specified charset is not found,
// skip it to return the default one
}
}
}
// return the default charset
return Charset.defaultCharset();
}
/**
* Extract the charset name form the content type string. Content type string is received from Content-Type header.
*
* @param contentType
* the content type string, must be not null.
* @return the found charset name or null if not found.
*/
private static String extractCharsetName(String contentType) {
// split onto media types
final String[] mediaTypes = contentType.split(":");
if (mediaTypes.length > 0) {
// use only the first one, and split it on parameters
final String[] params = mediaTypes[0].split(";");
// find the charset parameter and return it's value
for (String each : params) {
each = each.trim();
if (each.startsWith("charset=")) {
// return the charset name
return each.substring(8).trim();
}
}
}
return null;
}
private static String RUNTIME_RESOURCE_NAME = "ReportsStoreGateway";
private static String CATEGORY_NAME = "Service";
public static String readPageNoHttpClient(URL url) throws Exception {
URLConnection conn = url.openConnection();
// pretend you're a browser (make my request from Java more browsery-like.)
conn.addRequestProperty("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.95 Safari/537.11");
conn.setDoOutput(true);
conn.setAllowUserInteraction(true);
conn.setConnectTimeout(25 * 60000);
conn.setReadTimeout(25 * 60000);
Charset charset = getConnectionCharset(conn);
BufferedReader dis = new BufferedReader(new InputStreamReader(conn.getInputStream(), charset));
String inputLine;
StringBuffer pageBuffer = new StringBuffer();
// Loop through each line, looking for the closing head element
while ((inputLine = dis.readLine()) != null) {
pageBuffer.append(inputLine + "\r\n");
}
String page = pageBuffer.toString();
System.out.println(page);
conn.getInputStream().close();
return page;
}
public static String readPageHTTPHeader(URL url, String token) throws Exception {
URLConnection conn = url.openConnection();
// pretend you're a browser (make my request from Java more browsery-like.)
conn.addRequestProperty("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.95 Safari/537.11");
conn.setDoOutput(true);
conn.setAllowUserInteraction(true);
conn.setConnectTimeout(25 * 60000);
conn.setReadTimeout(25 * 60000);
conn.setRequestProperty("gcube-token", token);
Charset charset = getConnectionCharset(conn);
BufferedReader dis = new BufferedReader(new InputStreamReader(conn.getInputStream(), charset));
String inputLine;
StringBuffer pageBuffer = new StringBuffer();
// Loop through each line, looking for the closing head element
while ((inputLine = dis.readLine()) != null) {
pageBuffer.append(inputLine + "\r\n");
}
String page = pageBuffer.toString();
System.out.println(page);
conn.getInputStream().close();
return page;
}
// build config.xml
public static void main(String[] args) throws Exception {
String packageS = "org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses";
List<Class<?>> classes = GetCapabilitiesChecker.find(packageS);
System.out.println(classes + "\n");
for (Class<?> classfind : classes) {
System.out.println("<Property name=\"Algorithm\" active=\"true\">" + classfind.getName() + "</Property>");
}
// System.exit(0);
System.out.println("\n");
System.out.println(classes.size() + " algorithms");
}
public static void main1(String[] args) throws Exception {
String packageS = "org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses";
List<Class<?>> classes = GetCapabilitiesChecker.find(packageS);
System.out.println(classes + "\n");
for (Class<?> classfind : classes) {
System.out.println("<Property name=\"Algorithm\" active=\"true\">" + classfind.getName() + "</Property>");
}
// System.exit(0);
System.out.println("\n");
for (Class<?> classfind : classes) {
System.out.println("http://localhost:8080/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&Identifier=" + classfind.getName() + "\n");
}
System.out.println("\n");
System.out.println("Checking errors in Processes descriptions");
int counter = 0;
for (Class<?> classfind : classes) {
String httplink = "http://statistical-manager-new.d4science.org:8080/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&Identifier=" + classfind.getName();
if (!httplink.contains("IClusterer") && !httplink.contains("IEvaluator") && !httplink.contains("IGenerator") && !httplink.contains("IModeller") && !httplink.contains("ITransducer")) {
String pageCheck = readPage(new URL(httplink));
counter++;
if (pageCheck.contains("ows:ExceptionText") || pageCheck.contains("Exception")) {
System.out.println("Reading Link: " + httplink);
System.out.println("ERROR:\n" + pageCheck);
}
}
}
System.out.println("Checked " + counter + " algorithms");
}
}

View File

@ -0,0 +1,184 @@
package org.gcube.dataanalysis.wps.remote;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.net.URL;
import java.net.URLEncoder;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import org.junit.Test;
import org.slf4j.LoggerFactory;
public class RegressionTests {
public static boolean checkHttpPage(String httplink, String page) {
if (page.contains("ows:ExceptionText") || page.contains("Exception")) {
System.out.println("Reading Link: " + httplink);
System.out.println("ERROR:\n" + page);
return false;
}
return true;
}
public static long getDateDiff(Date date1, Date date2, TimeUnit timeUnit) {
long diffInMillies = date2.getTime() - date1.getTime();
return timeUnit.convert(diffInMillies, TimeUnit.MILLISECONDS);
}
public static String prepareURL(String executionURL) throws Exception {
String firstPart = executionURL.substring(0, executionURL.indexOf("DataInputs=") + 11);
System.out.println("Execution URL:" + firstPart);
String secondPart = URLEncoder.encode(executionURL.substring(executionURL.indexOf("DataInputs=") + 11), "UTF-8");
System.out.println("Parameters: " + secondPart);
executionURL = firstPart + secondPart;
return executionURL;
}
public static void callHttps(String httpURLFile) throws Exception {
//ch.qos.logback.classic.Logger root = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME);
//root.setLevel(ch.qos.logback.classic.Level.OFF);
BufferedReader br = new BufferedReader(new FileReader(new File(httpURLFile)));
String line = br.readLine();
int counter = 0;
Date d0 = new Date(System.currentTimeMillis());
LinkedHashMap<String, String> executionTimes = new LinkedHashMap<String, String>();
while (line != null) {
boolean check = true;
if (line.contains(" - ") && !line.startsWith("#")) {
Date d00 = new Date(System.currentTimeMillis());
String algorithmName = line.substring(line.indexOf("-") + 1).trim();
String detailsURL = br.readLine();
if (!detailsURL.startsWith("http://"))
detailsURL = "http://" + detailsURL;
System.out.println("************************************************************");
System.out.println("TESTING ALGORITHM : " + algorithmName);
System.out.println("************************************************************");
String executionURL = br.readLine();
String firstPart = executionURL.substring(0, executionURL.indexOf("DataInputs=") + 11);
System.out.println("Execution URL:" + firstPart);
String secondPart = URLEncoder.encode(executionURL.substring(executionURL.indexOf("DataInputs=") + 11), "UTF-8");
System.out.println("Parameters: " + secondPart);
executionURL = firstPart + secondPart;
System.out.println("CHECKING DEFINITION " + algorithmName + " : " + detailsURL);
String pageCheck = GetCapabilitiesChecker.readPageNoHttpClient(new URL(detailsURL));
check = checkHttpPage(detailsURL, pageCheck);
System.out.println("DEFINITION CHECK " + check);
if (!check)
break;
System.out.println("EXECUTING " + algorithmName + " : " + executionURL);
executionURL = executionURL.replace(".nc", "_test" + UUID.randomUUID() + ".nc");
pageCheck = GetCapabilitiesChecker.readPageNoHttpClient(new URL(executionURL));
System.out.println("EXECUTION RESULT " + pageCheck);
check = checkHttpPage(executionURL, pageCheck);
System.out.println("EXECUTION CHECK " + check);
if (!check)
break;
Date d11 = new Date(System.currentTimeMillis());
System.out.println("EXECUTION TIME " + algorithmName + " : " + getDateDiff(d00, d11, TimeUnit.MILLISECONDS) + " s");
executionTimes.put(algorithmName, "" + getDateDiff(d00, d11, TimeUnit.MILLISECONDS));
System.out.println("-------------------------------------------------------------\n");
counter++;
}
if (!check) {
System.out.println("EXECUTION FAILURE! - BREAK -");
break;
}
line = br.readLine();
}
Date d1 = new Date(System.currentTimeMillis());
System.out.println("CHECKED " + counter + " PAGES in " + getDateDiff(d0, d1, TimeUnit.MINUTES) + " minutes " + " (" + getDateDiff(d0, d1, TimeUnit.SECONDS) + " s)");
System.out.println("EXECUTION TIMES SUMMARY:");
for (String key : executionTimes.keySet()) {
String time = executionTimes.get(key);
System.out.println(key + "," + time + " s");
}
br.close();
}
@Test
public void testDevNext() throws Exception {
String algorithmsfile = "tests/Test-dataminer-devNext.txt";
callHttps(algorithmsfile);
}
@Test
public void testPreprod() throws Exception {
String algorithmsfile = "tests/Test-dataminer-pre.txt";
callHttps(algorithmsfile);
}
@Test
public void testDevVRE() throws Exception {
String algorithmsfile = "tests/Test-dataminer-dev1.txt";
callHttps(algorithmsfile);
}
@Test
public void testProd1() throws Exception {
String algorithmsfile = "tests/Test-dataminer-prod1.txt";
callHttps(algorithmsfile);
}
@Test
public void testProd2() throws Exception {
String algorithmsfile = "tests/Test-dataminer-prod2.txt";
callHttps(algorithmsfile);
}
@Test
public void testProd3() throws Exception {
String algorithmsfile = "tests/Test-dataminer-prod3.txt";
callHttps(algorithmsfile);
}
@Test
public void testProd4() throws Exception {
String algorithmsfile = "tests/Test-dataminer-prod4.txt";
callHttps(algorithmsfile);
}
@Test
public void testProd5() throws Exception {
String algorithmsfile = "tests/Test-dataminer-prod5.txt";
callHttps(algorithmsfile);
}
@Test
public void testProd6() throws Exception {
String algorithmsfile = "tests/Test-dataminer-prod6.txt";
callHttps(algorithmsfile);
}
@Test
public void testProdGeneralProxy() throws Exception {
String algorithmsfile = "tests/Test-dataminer-proxy-general.txt";
callHttps(algorithmsfile);
}
@Test
public void testProdBigDataProxy() throws Exception {
String algorithmsfile = "tests/Test-dataminer-proxy-bigdata.txt";
callHttps(algorithmsfile);
}
@Test
public void testEGI() throws Exception {
String algorithmsfile = "tests/Test-dataminer-EGI.txt";
callHttps(algorithmsfile);
}
}

View File

@ -0,0 +1,53 @@
package org.gcube.dataanalysis.wps.remote;
public class RemoteAligner extends RemoteInstaller{
/*
public static void align(String dataminer,String password) throws Exception{
long t0 = System.currentTimeMillis();
String libdir = "cd ./tomcat/webapps/wps/WEB-INF/lib/";
String getAlgorithms = "wget -r -l1 -e robots=off --no-parent http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataMinerConfiguration/algorithms/";
String moveAlgorithms = "mv svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataMinerConfiguration/algorithms/* ~/tomcat/webapps/wps/WEB-INF/lib/";
String rmAlgorithms = "rm -r svn.research-infrastructures.eu/";
String libcfg = "cd ../../ecocfg/";
String getconfig = "wget -r -l1 -e robots=off --no-parent http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataMinerConfiguration/cfg/";
String moveConfig = "mv svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataMinerConfiguration/cfg/* ~/tomcat/webapps/wps/ecocfg/";
String rmConfig = "rm -r svn.research-infrastructures.eu/";
String configDir = "cd ../config/";
String changewpsconfig = "sed -Ei 's/localhost/"+dataminer+"/g' wps_config.xml";
String commands [] = {
sshConnection+dataminer,"2",
"y","0",
password,"0",
"ls -l","0",
"./stopContainer.sh","3",
libdir,"0",
getAlgorithms,"30",
moveAlgorithms,"1",
rmAlgorithms,"1",
libcfg,"0",
getconfig,"5",
moveConfig,"1",
rmConfig,"1",
configDir,"0",
changewpsconfig,"1",
"cd /home/gcube/","0",
"./startContainer.sh","60"
};
cmd2(commands);
System.out.println("Elapsed Time: "+(System.currentTimeMillis()-t0));
}
*/
}

View File

@ -0,0 +1,343 @@
package org.gcube.dataanalysis.wps.remote;
import java.io.File;
import java.io.FileWriter;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
public class RemoteInstaller {
static String sshConnection = "plink -ssh -i privatekeyss2.ppk gcube@";
static String print(InputStream std,OutputStream out,InputStream err) throws Exception{
int value = 0;
StringBuffer sb = new StringBuffer();
if (std.available () > 0) {
System.out.println ("STD:");
value = std.read ();
System.out.print ((char) value);
sb.append(""+(char) value);
while (std.available () > 0) {
value = std.read ();
System.out.print ((char) value);
sb.append(""+(char) value);
}
}
if (err.available () > 0) {
System.out.println ("ERR:");
value = err.read ();
System.out.print ((char) value);
sb.append(""+(char) value);
while (err.available () > 0) {
value = err.read ();
System.out.print ((char) value);
sb.append(""+(char) value);
}
}
System.out.println();
return sb.toString();
}
static void cmd1(String[] commands) throws Exception {
Runtime r = Runtime.getRuntime ();
Process p = r.exec (commands[0]);
InputStream std = p.getInputStream ();
OutputStream out = p.getOutputStream ();
InputStream err = p.getErrorStream ();
Thread.sleep (1000);
print(std,out,err);
int commandTries = 1;
for (int i=2;i<commands.length;i=i+2){
String command = commands[i];
System.out.println("Executing "+command);
out.write ((command+"\n").getBytes ());
out.flush ();
Thread.sleep (1000);
String value = print(std,out,err);
System.out.println("N. LINES: ************************ "+value.length());
int k = 1;
int steps = Integer.parseInt(commands[i+1]);
StringBuffer lastline = new StringBuffer();
lastline.append(value);
while (k<steps){
Thread.sleep(1000);
System.out.println("flushing...."+k);
value = print(std,out,err);
System.out.println("length...."+value.length());
k++;
if (value.length()>0){
k=1;
lastline.append(value);
}
}
if (command.contains("./addAlgorithm")){
//if (!lastline.toString().contains("All done!")){
if (lastline.toString().contains("Exception:")){
if (commandTries<2){
commandTries++;
i = i-2; //retry the command
}
else{
System.err.println("Error at installing the algorithm!!!");
System.err.println("last line "+lastline);
System.exit(-1);
}
}
}
}
p.destroy ();
System.out.println("Ready!");
}
static void cmd2(String[] commands) throws Exception {
Runtime r = Runtime.getRuntime ();
Process p = r.exec (commands[0]);
InputStream std = p.getInputStream ();
OutputStream out = p.getOutputStream ();
InputStream err = p.getErrorStream ();
Thread.sleep (1000);
print(std,out,err);
int commandTries = 1;
for (int i=2;i<commands.length;i=i+2){
String command = commands[i];
System.out.println("Executing "+command);
out.write ((command+"\n").getBytes ());
out.flush ();
Thread.sleep (1000);
String value = print(std,out,err);
System.out.println("N. LINES: ************************ "+value.length());
int k = 1;
int steps = Integer.parseInt(commands[i+1]);
StringBuffer lastline = new StringBuffer();
lastline.append(value);
while (k<steps){
Thread.sleep(1000);
System.out.println("flushing...."+k);
value = print(std,out,err);
System.out.println("length...."+value.length());
k++;
if (value.length()>0){
k=1;
lastline.append(value);
System.out.println("lastline: "+value.substring(0, Math.min(200,value.length())));
if (value.startsWith("gcube@dataminer")){
System.out.println("Prompt READY!");
break;
}
}
}
if (command.contains("./addAlgorithm")){
//if (!lastline.toString().contains("All done!")){
if (lastline.toString().contains("Exception:")){
if (commandTries<2){
commandTries++;
i = i-2; //retry the command
}
else{
System.err.println("Error at installing the algorithm!!!");
System.err.println("last line "+lastline);
System.exit(-1);
}
}
}
}
p.destroy ();
System.out.println("Ready!");
}
public static void startRobot(String dataminer,String password,String scope, boolean skipinstallerdownload) throws Exception{
String filepath = "DataMinerAlgorithms.txt";
startRobot(dataminer, password, scope, filepath, skipinstallerdownload);
}
public static void startRobot(String dataminer,String password,String scope, String filepath, boolean skipinstallerdownload) throws Exception{
long t0 = System.currentTimeMillis();
String installStrings = FileTools.loadString(filepath,"UTF-8");
//String[] install = installStrings.split("\n");
String[] install = installStrings.split("\n");
System.out.println("Algorithms to install "+install.length);
ArrayList<String> installArray = new ArrayList<String>(Arrays.asList(install));
String rmlogging = "rm ./tomcat/webapps/wps/WEB-INF/lib/log4j-over-slf4j-1.7.5.jar";
String rmlib1 = "rm ./tomcat/webapps/wps/WEB-INF/lib/STEP1VPAICCATBFTERetros-1.0.0.jar";
// String rmlib1 = "rm ./tomcat/webapps/wps/WEB-INF/lib/ECOPATH*";
String rmlib2 = "rm ./tomcat/webapps/wps/WEB-INF/lib/TunaAtlasDataAccess-1.0.0.jar";
String rmlib3 = "rm ./tomcat/webapps/wps/WEB-INF/lib/dataminer-algorithms.jar";
String rmInstaller = "rm algorithmInstaller.zip";
String rmInstallerFolder = "rm -r ./algorithmInstaller";
String chmod = "chmod 777 tomcat/webapps/wps/config/*";
String rmSMState = "rm -r SmartGears/state/";
//String commands [] = {sshConnection+dataminer,"0",password,"0","ls -l","0",rmlogging,"0",chmod,"0","cd algorithmInstaller","0",install,"5",install,"5","cd ..","0",rmSMState,"0","./stopContainer.sh","3","./startContainer.sh","30"};
String forecommands [] = null;
if (!skipinstallerdownload){
String getInstaller = "wget --no-check-certificate https://svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataminerAlgorithmsInstaller/package/algorithmInstaller.zip";
String unzipInstaller = "unzip algorithmInstaller.zip";
String choice= "N";
String mod= "chmod 777 -R algorithmInstaller/*";
String iforecommands [] = {sshConnection+dataminer,"2","y","0",password,"0","ls -l","0",rmlogging,"0",rmlib1,"0",rmlib2,"0",rmlib3,"0",rmInstaller,"0",rmInstallerFolder,"0",
chmod,"0",getInstaller,"2",unzipInstaller,"2",choice,"0",mod,"0","./stopContainer.sh","3",
"cd algorithmInstaller","0"};
forecommands = iforecommands;
}
else{
String iforecommands [] = {sshConnection+dataminer,"2","y","0",password,"0","ls -l","0",rmlogging,"0",rmlib1,"0",rmlib2,"0",rmlib3,"0",rmInstaller,"0",
chmod,"0","./stopContainer.sh","10","cd algorithmInstaller","0"};
forecommands = iforecommands;
}
String postcommands [] = {"cd ..","0","./startContainer.sh","30"};
ArrayList<String> commandsArray = new ArrayList<String>(Arrays.asList(forecommands));
String [] installers = installStrings.split("\n");
StringBuffer sb = new StringBuffer();
int max = 10;
int i =0;
for (String installer: installers){
int limit = 1000;
if (installer.length()>limit)
installer = installer.substring(0, limit)+"...\"";
sb.append(installer+"\n");
if (i == max)
{
i = 0;
String commands = sb.toString();
commandsArray.add(commands);
commandsArray.add("10");
sb = new StringBuffer();
}
else
i++;
}
if (sb.toString().length()>0)
{
commandsArray.add(sb.toString());
commandsArray.add("10");
}
/*
for (String installer:installArray) {
installer = installer.trim().replace("/gcube/devsec", scope);
if (installer.length()>0){
commandsArray.add(installer);
commandsArray.add("3");
}
}
*/
commandsArray.addAll(new ArrayList<String>(Arrays.asList(postcommands)));
String[] commands = new String[commandsArray.size()];
commands = commandsArray.toArray(commands);
cmd1(commands);
System.out.println("Elapsed Time: "+(System.currentTimeMillis()-t0));
}
public enum Environment {
PROD,
DEV,
PROTO,
PRE
}
public static String dumpInstallerFile(Environment env){
String url = "";
String file = "";
switch(env){
case PROD:
url = "http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataMinerConfiguration/algorithms/prod/algorithms";
file ="ProdInstaller.txt";
break;
case DEV:
url = "http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataMinerConfiguration/algorithms/dev/algorithms";
file ="DevInstaller.txt";
break;
case PROTO:
url = "http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataMinerConfiguration/algorithms/proto/algorithms";
file ="ProtoInstaller.txt";
break;
case PRE:
url = "http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataMinerConfiguration/algorithms/preprod/algorithms";
file ="PreInstaller.txt";
break;
}
String answer = HttpRequest.sendGetRequest(url, "");
List<String> installationStringsList = new ArrayList<String>();
String answerbuffer = answer;
String install = "";
while (answerbuffer.length()>0){
for (int i=0;i<7;i++){
int pipe = answerbuffer.indexOf("|");
String token = answerbuffer.substring(0,pipe);
install+=token+"|";
answerbuffer = answerbuffer.substring(pipe+1);
}
install = install.trim();
if (!install.startsWith("|"))
install = "|"+install;
installationStringsList.add(install);
install = "";
}
StringBuffer sb = new StringBuffer();
for (String installer:installationStringsList){
if (installer.contains("deprecated"))
continue;
String [] tablerow = installer.split("\\|");
String row = tablerow[5];
if (row.contains("<notextile>")){
row = row .replace("<notextile>","").replace("</notextile>", "");
row = row.trim();
sb.append(row+"\n");
}
}
try {
FileWriter fw = new FileWriter(new File(file),false) ;
fw.write(sb.toString());
fw.close();
return file;
}catch(Exception e){
e.printStackTrace();
return null;
}
}
}

View File

@ -0,0 +1,5 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.util;
public class Foo {
}

View File

@ -0,0 +1,19 @@
package=package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.#PACKAGE#;
import=import java.io.File;\nimport java.net.URL;\nimport org.n52.wps.algorithm.annotation.MethodOrder;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.util.LinkedHashMap;\nimport java.io.StringWriter;\nimport org.apache.commons.io.IOUtils;\nimport org.apache.xmlbeans.XmlObject;\nimport org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;\nimport org.n52.wps.algorithm.annotation.*;\nimport org.n52.wps.io.data.*;\nimport org.n52.wps.io.data.binding.complex.*;\nimport org.n52.wps.io.data.binding.literal.*;\nimport org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
description=@Algorithm(statusSupported=true, title="#TITLE#", abstrakt="#ABSTRACT#", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.#PACKAGE#.#CLASSNAME#", version = "1.1.0")
class_definition=public class #CLASSNAME# extends AbstractEcologicalEngineMapper implements #INTERFACE#{
class_closure=@Execute public void run() throws Exception { super.run(); } }
stringInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", defaultValue="#DEFAULT#", title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void set#IDMETHOD#(String data) {inputs.put(\"#ID#\",data);}
enumeratedInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", allowedValues= {#ALLOWED#}, defaultValue="#DEFAULT#", title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void set#IDMETHOD#(String data) {inputs.put(\"#ID#\",data);}
doubleInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", defaultValue="#DEFAULT#", title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void set#IDMETHOD#(Double data) {inputs.put(\"#ID#\",""+data);}
integerInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", defaultValue="#DEFAULT#", title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void set#IDMETHOD#(Integer data) {inputs.put(\"#ID#\",""+data);}
booleanInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", defaultValue="#DEFAULT#", allowedValues= {"true","false"}, title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void set#IDMETHOD#(Boolean data) {inputs.put(\"#ID#\",""+data);}
csvFileInput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataInput(abstrakt="#ABSTRACT#", title="#TITLE#", maxOccurs=1, minOccurs=1, identifier = "#ID#", binding = GenericFileDataBinding.class) public void set#IDMETHOD#(GenericFileData file) {inputs.put(\"#ID#\",file);}
gislinkInput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataInput(abstrakt="#ABSTRACT#", title="#TITLE#", maxOccurs=1, minOccurs=1, identifier = "#ID#", binding = GisLinkDataInputBinding.class) public void set#IDMETHOD#(GenericFileData file) {inputs.put(\"#ID#\",file);}
d4scienceFileInput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataInput(abstrakt="#ABSTRACT#", title="#TITLE#", maxOccurs=1, minOccurs=1, identifier = "#ID#", binding = D4ScienceDataInputBinding.class) public void set#IDMETHOD#(GenericFileData file) {inputs.put(\"#ID#\",file);}
d4scienceFileOutput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = D4ScienceFileDataBinding.class) public GenericFileData get#IDMETHOD#() {URL url=null;try {url = new URL((String) outputs.get("#ID#")); return new GenericFileData(url.openStream(),"application/d4science");} catch (Exception e) {e.printStackTrace();return null;}}
pngFileOutput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = PngFileDataBinding.class) public GenericFileData get#IDMETHOD#() {URL url=null;try {url = new URL((String) outputs.get("#ID#")); return new GenericFileData(url.openStream(),"image/png");} catch (Exception e) {e.printStackTrace();return null;}}
csvFileOutput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = CsvFileDataBinding.class) public GenericFileData get#IDMETHOD#() {URL url=null;try {url = new URL((String) outputs.get("#ID#")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
gisLinkOutput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = GisLinkDataBinding.class) public GenericFileData get#IDMETHOD#() {URL url=null;try {url = new URL((String) outputs.get("#ID#")); return new GenericFileData(url.openStream(),"application/geotiff");} catch (Exception e) {e.printStackTrace();return null;}}
stringOutput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = LiteralStringBinding.class) public String get#IDMETHOD#() {return (String) outputs.get("#ID#");}
optionalOutput=@MethodOrder()\n@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)\n public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}

View File

@ -0,0 +1,19 @@
package=package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.#PACKAGE#;
import=import java.io.File;\nimport java.net.URL;\nimport org.n52.wps.algorithm.annotation.MethodOrder;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.util.LinkedHashMap;\nimport java.io.StringWriter;\nimport org.apache.commons.io.IOUtils;\nimport org.apache.xmlbeans.XmlObject;\nimport org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;\nimport org.n52.wps.algorithm.annotation.*;\nimport org.n52.wps.io.data.*;\nimport org.n52.wps.io.data.binding.complex.*;\nimport org.n52.wps.io.data.binding.literal.*;\nimport org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
description=@Algorithm(statusSupported=true, title="#TITLE#", abstrakt="#ABSTRACT#", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.#PACKAGE#.#CLASSNAME#", version = "1.1.0")
class_definition=public class #CLASSNAME# extends AbstractEcologicalEngineMapper implements #INTERFACE#{
class_closure=@Execute public void run() throws Exception { super.run(); } }
stringInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", defaultValue="#DEFAULT#", title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void set#IDMETHOD#(String data) {inputs.put(\"#ID#\",data);}
enumeratedInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", allowedValues= {#ALLOWED#}, defaultValue="#DEFAULT#", title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void set#IDMETHOD#(String data) {inputs.put(\"#ID#\",data);}
doubleInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", defaultValue="#DEFAULT#", title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void set#IDMETHOD#(Double data) {inputs.put(\"#ID#\",""+data);}
integerInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", defaultValue="#DEFAULT#", title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void set#IDMETHOD#(Integer data) {inputs.put(\"#ID#\",""+data);}
booleanInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", defaultValue="#DEFAULT#", allowedValues= {"true","false"}, title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void set#IDMETHOD#(Boolean data) {inputs.put(\"#ID#\",""+data);}
csvFileInput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataInput(abstrakt="#ABSTRACT#", title="#TITLE#", maxOccurs=1, minOccurs=1, identifier = "#ID#", binding = GenericFileDataBinding.class) public void set#IDMETHOD#(GenericFileData file) {inputs.put(\"#ID#\",file);}
gislinkInput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataInput(abstrakt="#ABSTRACT#", title="#TITLE#", maxOccurs=1, minOccurs=1, identifier = "#ID#", binding = GisLinkDataInputBinding.class) public void set#IDMETHOD#(GenericFileData file) {inputs.put(\"#ID#\",file);}
d4scienceFileInput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataInput(abstrakt="#ABSTRACT#", title="#TITLE#", maxOccurs=1, minOccurs=1, identifier = "#ID#", binding = D4ScienceDataInputBinding.class) public void set#IDMETHOD#(GenericFileData file) {inputs.put(\"#ID#\",file);}
d4scienceFileOutput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = D4ScienceFileDataBinding.class) public GenericFileData get#IDMETHOD#() {URL url=null;try {url = new URL((String) outputs.get("#ID#")); return new GenericFileData(url.openStream(),"application/d4science");} catch (Exception e) {e.printStackTrace();return null;}}
pngFileOutput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = PngFileDataBinding.class) public GenericFileData get#IDMETHOD#() {URL url=null;try {url = new URL((String) outputs.get("#ID#")); return new GenericFileData(url.openStream(),"image/png");} catch (Exception e) {e.printStackTrace();return null;}}
csvFileOutput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = CsvFileDataBinding.class) public GenericFileData get#IDMETHOD#() {URL url=null;try {url = new URL((String) outputs.get("#ID#")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
gisLinkOutput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = GisLinkDataBinding.class) public GenericFileData get#IDMETHOD#() {URL url=null;try {url = new URL((String) outputs.get("#ID#")); return new GenericFileData(url.openStream(),"application/geotiff");} catch (Exception e) {e.printStackTrace();return null;}}
stringOutput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = LiteralStringBinding.class) public String get#IDMETHOD#() {return (String) outputs.get("#ID#");}
optionalOutput=@MethodOrder()\n@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)\n public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}