git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngine@56820 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
9aba3dea04
commit
645c869f0d
|
@ -1,3 +1,2 @@
|
|||
LOCAL_WITH_DATABASE=org.gcube.dataanalysis.ecoengine.processing.LocalSplitGenerator
|
||||
SIMPLE_LOCAL=org.gcube.dataanalysis.ecoengine.processing.LocalSimpleSplitGenerator
|
||||
REMOTE_RAINYCLOUD=org.gcube.dataanalysis.ecoengine.processing.RainyCloudGenerator
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="WINDOWS-1252" standalone="no"?>
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<jardesc>
|
||||
<jar path="StatisticalLibSupportLibraries/lib/EcologicalEngine/ecologicalDataMining.jar"/>
|
||||
<jar path="EcologicalEngineExecutor/AQUAMAPS_SUITABLE/ecologicalDataMining.jar"/>
|
||||
<options buildIfNeeded="true" compress="true" descriptionLocation="/EcologicalEngine/ecologicalEngine.jardesc" exportErrors="true" exportWarnings="true" includeDirectoryEntries="false" overwrite="true" saveDescription="true" storeRefactorings="false" useSourceFolders="false"/>
|
||||
<storedRefactorings deprecationInfo="true" structuralOnly="false"/>
|
||||
<selectedProjects/>
|
||||
|
@ -11,6 +11,6 @@
|
|||
</sealing>
|
||||
</manifest>
|
||||
<selectedElements exportClassFiles="true" exportJavaFiles="false" exportOutputFolder="false">
|
||||
<javaElement handleIdentifier="=EcologicalEngine1.2/src"/>
|
||||
<javaElement handleIdentifier="=EcologicalEngine/src\/main\/java"/>
|
||||
</selectedElements>
|
||||
</jardesc>
|
||||
|
|
|
@ -0,0 +1,122 @@
|
|||
package org.gcube.dataanalysis.ecoengine.clustering;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Transformations;
|
||||
|
||||
import com.rapidminer.example.ExampleSet;
|
||||
import com.rapidminer.operator.IOContainer;
|
||||
import com.rapidminer.operator.IOObject;
|
||||
import com.rapidminer.operator.clustering.Cluster;
|
||||
import com.rapidminer.operator.clustering.ClusterModel;
|
||||
import com.rapidminer.tools.OperatorService;
|
||||
|
||||
public class DBScan implements Clusterer{
|
||||
|
||||
AlgorithmConfiguration config;
|
||||
String epsilon;
|
||||
String minPoints;
|
||||
ExampleSet points;
|
||||
ArrayList<ArrayList<String>> rows;
|
||||
|
||||
public static void main(String[] args) {
|
||||
// TODO Auto-generated method stub
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public ALG_PROPS[] getSupportedAlgorithms() {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public INFRASTRUCTURE getInfrastructure() {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void init() throws Exception {
|
||||
if (config!=null)
|
||||
config.initRapidMiner();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setConfiguration(AlgorithmConfiguration config) {
|
||||
// TODO Auto-generated method stub
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
// TODO Auto-generated method stub
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
protected void getSamples(double[][] sampleVectors) throws Exception{
|
||||
|
||||
points = Transformations.matrix2ExampleSet(sampleVectors);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cluster() throws Exception {
|
||||
rows = new ArrayList<ArrayList<String>>();
|
||||
com.rapidminer.operator.clustering.clusterer.DBScan clusterer = (com.rapidminer.operator.clustering.clusterer.DBScan) OperatorService.createOperator("DBScanClustering");
|
||||
clusterer.setParameter("local_random_seed", "-1");
|
||||
clusterer.setParameter("epsilon", epsilon);
|
||||
clusterer.setParameter("min_points", minPoints);
|
||||
clusterer.setParameter("add_cluster_attribute", "true");
|
||||
clusterer.setParameter("keep_example_set", "true");
|
||||
|
||||
IOContainer innerInput = new IOContainer(points);
|
||||
IOContainer output = clusterer.apply(innerInput);
|
||||
|
||||
IOObject[] outputvector = output.getIOObjects();
|
||||
|
||||
ClusterModel innermodel = (ClusterModel) outputvector[1];
|
||||
for (Cluster c : innermodel.getClusters()){
|
||||
c.getClusterId();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public float getStatus() {
|
||||
// TODO Auto-generated method stub
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public StatisticalType getOutput() {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -65,6 +65,7 @@ public class AlgorithmConfiguration extends LexicalEngineConfiguration implement
|
|||
private String cachePath;
|
||||
private String persistencePath;
|
||||
private String distributionTable;
|
||||
private String tableSpace;
|
||||
private Boolean createTable = false;
|
||||
private Boolean useDB = true;
|
||||
|
||||
|
@ -202,6 +203,14 @@ public class AlgorithmConfiguration extends LexicalEngineConfiguration implement
|
|||
public void setEndpoints(List<String> endpoints) {
|
||||
this.endpoints = endpoints;
|
||||
}
|
||||
|
||||
public String getTableSpace() {
|
||||
return tableSpace;
|
||||
}
|
||||
|
||||
public void setTableSpace(String tableSpace) {
|
||||
this.tableSpace = tableSpace;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
package org.gcube.dataanalysis.ecoengine.datatypes;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
|
||||
|
||||
public class DatabaseType extends StatisticalType{
|
||||
|
||||
public DatabaseType(DatabaseParameters databaseParameter, String name, String description, String defaultValue, boolean optional) {
|
||||
super(name, description, defaultValue, optional);
|
||||
this.databaseParameter=databaseParameter;
|
||||
}
|
||||
|
||||
public DatabaseType(DatabaseParameters databaseParameter, String name, String description, String defaultValue) {
|
||||
super(name, description, defaultValue);
|
||||
this.databaseParameter=databaseParameter;
|
||||
}
|
||||
|
||||
public DatabaseType(DatabaseParameters databaseParameter, String name, String description) {
|
||||
super(name, description);
|
||||
this.databaseParameter=databaseParameter;
|
||||
}
|
||||
|
||||
protected DatabaseParameters databaseParameter;
|
||||
|
||||
public DatabaseParameters getDatabaseParameter() {
|
||||
return databaseParameter;
|
||||
}
|
||||
|
||||
public void setDatabaseParameter(DatabaseParameters databaseParameters) {
|
||||
this.databaseParameter = databaseParameters;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
package org.gcube.dataanalysis.ecoengine.datatypes;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||
|
||||
public class InputTable extends StatisticalType{
|
||||
|
||||
protected List<TableTemplates> templateNames;
|
||||
|
||||
public InputTable(List<TableTemplates> templateName, String name, String description, String defaultValue, boolean optional) {
|
||||
super(name, description, defaultValue, optional);
|
||||
this.templateNames=templateName;
|
||||
}
|
||||
|
||||
public InputTable(List<TableTemplates> templateName,String name, String description, String defaultValue) {
|
||||
super(name, description, defaultValue);
|
||||
this.templateNames=templateName;
|
||||
}
|
||||
|
||||
public InputTable(List<TableTemplates> templateName,String name, String description) {
|
||||
super(name, description);
|
||||
this.templateNames=templateName;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public List<TableTemplates> getTemplateNames() {
|
||||
return templateNames;
|
||||
}
|
||||
|
||||
public void setTemplateNames(List<TableTemplates> templateName) {
|
||||
this.templateNames = templateName;
|
||||
}
|
||||
|
||||
public String getTableName(){
|
||||
return super.name;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
package org.gcube.dataanalysis.ecoengine.datatypes;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||
|
||||
public class OutputTable extends InputTable{
|
||||
|
||||
public OutputTable(List<TableTemplates> templateName, String name, String tableName, String description, String defaultValue, boolean optional) {
|
||||
super(templateName, name, description, defaultValue, optional);
|
||||
this.tableName=tableName;
|
||||
}
|
||||
|
||||
public OutputTable(List<TableTemplates> templateName, String name, String tableName, String description, String defaultValue) {
|
||||
super(templateName, name, description, defaultValue);
|
||||
this.tableName=tableName;
|
||||
}
|
||||
|
||||
public OutputTable(List<TableTemplates> templateName, String name, String tableName, String description) {
|
||||
super(templateName, name, description);
|
||||
this.tableName=tableName;
|
||||
}
|
||||
|
||||
protected String tableName;
|
||||
|
||||
public String getTableName(){
|
||||
return tableName;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
package org.gcube.dataanalysis.ecoengine.datatypes;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
|
||||
|
||||
public class PrimitiveType extends StatisticalType{
|
||||
|
||||
|
||||
public PrimitiveType(String className, Object content, PrimitiveTypes type, String name, String description, String defaultValue, boolean optional) {
|
||||
super(name, description, defaultValue, optional);
|
||||
this.className=className;
|
||||
this.content=content;
|
||||
this.type=type;
|
||||
}
|
||||
public PrimitiveType(String className, Object content, PrimitiveTypes type,String name, String description, String defaultValue) {
|
||||
super(name, description, defaultValue);
|
||||
this.className=className;
|
||||
this.content=content;
|
||||
this.type=type;
|
||||
}
|
||||
public PrimitiveType(String className, Object content, PrimitiveTypes type,String name, String description) {
|
||||
super(name, description);
|
||||
this.className=className;
|
||||
this.content=content;
|
||||
this.type=type;
|
||||
}
|
||||
|
||||
protected String className;
|
||||
public String getClassName() {
|
||||
return className;
|
||||
}
|
||||
public void setClassName(String className) {
|
||||
this.className = className;
|
||||
}
|
||||
protected Object content;
|
||||
public Object getContent() {
|
||||
return content;
|
||||
}
|
||||
public void setContent(Object content) {
|
||||
this.content = content;
|
||||
}
|
||||
protected PrimitiveTypes type;
|
||||
public PrimitiveTypes getType() {
|
||||
return type;
|
||||
}
|
||||
public void setType(PrimitiveTypes type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
package org.gcube.dataanalysis.ecoengine.datatypes;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
|
||||
|
||||
public class ServiceType extends StatisticalType{
|
||||
|
||||
|
||||
public ServiceType(ServiceParameters serviceParameter,String name, String description, String defaultValue, boolean optional) {
|
||||
super(name, description, defaultValue, optional);
|
||||
this.serviceParameter = serviceParameter;
|
||||
}
|
||||
|
||||
public ServiceType(ServiceParameters serviceParameter,String name, String description, String defaultValue) {
|
||||
super(name, description, defaultValue);
|
||||
this.serviceParameter = serviceParameter;
|
||||
}
|
||||
|
||||
public ServiceType(ServiceParameters serviceParameter,String name, String description) {
|
||||
super(name, description);
|
||||
this.serviceParameter = serviceParameter;
|
||||
}
|
||||
|
||||
protected ServiceParameters serviceParameter;
|
||||
|
||||
public ServiceParameters getServiceParameter() {
|
||||
return serviceParameter;
|
||||
}
|
||||
|
||||
public void setServiceParameter(ServiceParameters serviceParameter) {
|
||||
this.serviceParameter = serviceParameter;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
package org.gcube.dataanalysis.ecoengine.datatypes;
|
||||
|
||||
public class StatisticalType {
|
||||
|
||||
protected String defaultValue;
|
||||
protected String description;
|
||||
protected String name;
|
||||
|
||||
protected boolean optional;
|
||||
|
||||
public StatisticalType(String name, String description,String defaultValue, boolean optional){
|
||||
this.name=name;
|
||||
this.description=description;
|
||||
this.defaultValue=defaultValue;
|
||||
this.optional=optional;
|
||||
}
|
||||
|
||||
public StatisticalType(String name, String description,String defaultValue){
|
||||
this.name=name;
|
||||
this.description=description;
|
||||
this.defaultValue=defaultValue;
|
||||
this.optional=true;
|
||||
}
|
||||
|
||||
public StatisticalType(String name, String description){
|
||||
this.name=name;
|
||||
this.description=description;
|
||||
this.defaultValue="";
|
||||
this.optional=true;
|
||||
}
|
||||
|
||||
|
||||
public String getDefaultValue() {
|
||||
return defaultValue;
|
||||
}
|
||||
public void setDefaultValue(String defaultValue) {
|
||||
this.defaultValue = defaultValue;
|
||||
}
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
public boolean isOptional() {
|
||||
return optional;
|
||||
}
|
||||
public void setOptional(boolean optional) {
|
||||
this.optional = optional;
|
||||
}
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String toString(){
|
||||
return name+":"+description+":"+defaultValue+":"+optional;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
package org.gcube.dataanalysis.ecoengine.datatypes.enumtypes;
|
||||
|
||||
public enum DatabaseParameters {
|
||||
|
||||
DATABASEUSERNAME,
|
||||
DATABASEPASSWORD,
|
||||
DATABASEURL,
|
||||
DATABASEDRIVER,
|
||||
DATABASEDIALECT
|
||||
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
package org.gcube.dataanalysis.ecoengine.datatypes.enumtypes;
|
||||
|
||||
public enum PrimitiveTypes {
|
||||
STRING,
|
||||
NUMBER,
|
||||
CONSTANT,
|
||||
RANDOM,
|
||||
FILE,
|
||||
MAP,
|
||||
BOOLEAN
|
||||
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
package org.gcube.dataanalysis.ecoengine.datatypes.enumtypes;
|
||||
|
||||
public enum ServiceParameters {
|
||||
|
||||
EPR_LIST,
|
||||
INFRA,
|
||||
SERVICE,
|
||||
USERNAME,
|
||||
RANDOMSTRING
|
||||
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
package org.gcube.dataanalysis.ecoengine.datatypes.enumtypes;
|
||||
|
||||
public enum TableTemplates {
|
||||
|
||||
HSPEN,
|
||||
HCAF,
|
||||
HSPEC,
|
||||
OCCURRENCE,
|
||||
MINMAXLAT,
|
||||
TRAININGSET,
|
||||
TESTSET
|
||||
}
|
|
@ -3,16 +3,22 @@ package org.gcube.dataanalysis.ecoengine.evaluation;
|
|||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Operations;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
public class DiscrepancyAnalysis extends DataAnalysis {
|
||||
|
@ -42,41 +48,42 @@ public class DiscrepancyAnalysis extends DataAnalysis {
|
|||
private HashMap<String, String> output;
|
||||
|
||||
@Override
|
||||
public HashMap<String, VarCouple> getInputParameters() {
|
||||
|
||||
HashMap<String, VarCouple> parameters = new HashMap<String, VarCouple>();
|
||||
parameters.put("FirstTable", new VarCouple(VARTYPE.STRING, "hspec1"));
|
||||
parameters.put("SecondTable", new VarCouple(VARTYPE.STRING, "hspec2"));
|
||||
parameters.put("FirstTableCsquareColumn", new VarCouple(VARTYPE.STRING, "csquare"));
|
||||
parameters.put("SecondTableCsquareColumn", new VarCouple(VARTYPE.STRING, "csquarecode"));
|
||||
parameters.put("FirstTableProbabilityColumn", new VarCouple(VARTYPE.STRING, "csquare"));
|
||||
parameters.put("SecondTableProbabilityColumn", new VarCouple(VARTYPE.STRING, "csquarecode"));
|
||||
parameters.put("ComparisonThreshold", new VarCouple(VARTYPE.STRING, "0.1"));
|
||||
parameters.put("MaxSamples", new VarCouple(VARTYPE.STRING, "10000"));
|
||||
parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, ""));
|
||||
parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, ""));
|
||||
parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, ""));
|
||||
parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, ""));
|
||||
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
List<TableTemplates> templates = new ArrayList<TableTemplates>();
|
||||
templates.add(TableTemplates.HSPEC);
|
||||
templates.add(TableTemplates.TRAININGSET);
|
||||
templates.add(TableTemplates.TESTSET);
|
||||
InputTable p1 = new InputTable(templates,"FirstTable","First Table");
|
||||
InputTable p2 = new InputTable(templates,"SecondTable","Second Table");
|
||||
PrimitiveType p3 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "FirstTableCsquareColumn","the csquares column name in the first table ","csquarecode");
|
||||
PrimitiveType p4 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "SecondTableCsquareColumn","the csquares column name in the second table","csquarecode");
|
||||
PrimitiveType p5 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "FirstTableProbabilityColumn","the probability column in the first table","probability");
|
||||
PrimitiveType p6 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.NUMBER, "ComparisonThreshold","the comparison threshold","0.1");
|
||||
PrimitiveType p7 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.NUMBER, "MaxSamples","the comparison threshold","10000");
|
||||
DatabaseType p8 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p9 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
DatabaseType p10 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
|
||||
DatabaseType p11 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
|
||||
DatabaseType p12 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
parameters.add(p3);
|
||||
parameters.add(p4);
|
||||
parameters.add(p5);
|
||||
parameters.add(p6);
|
||||
parameters.add(p7);
|
||||
parameters.add(p8);
|
||||
parameters.add(p9);
|
||||
parameters.add(p10);
|
||||
parameters.add(p11);
|
||||
parameters.add(p12);
|
||||
|
||||
return parameters;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getOutputParameters() {
|
||||
|
||||
List<String> outputs = new ArrayList<String>();
|
||||
|
||||
outputs.add("MEAN");
|
||||
outputs.add("VARIANCE");
|
||||
outputs.add("NUMBER_OF_ERRORS");
|
||||
outputs.add("NUMBER_OF_COMPARISONS");
|
||||
outputs.add("ACCURACY");
|
||||
outputs.add("MAXIMUM_ERROR");
|
||||
outputs.add("MAXIMUM_ERROR_POINT");
|
||||
|
||||
return outputs;
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void init(AlgorithmConfiguration config) throws Exception {
|
||||
|
@ -200,12 +207,8 @@ public class DiscrepancyAnalysis extends DataAnalysis {
|
|||
}
|
||||
|
||||
@Override
|
||||
public VARTYPE getContentType() {
|
||||
return VARTYPE.MAP;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getContent() {
|
||||
return output;
|
||||
public StatisticalType getOutput() {
|
||||
PrimitiveType p = new PrimitiveType(Map.class.getName(), output, PrimitiveTypes.MAP, "ErrorsAnalysis","Analysis of the discrepancies");
|
||||
return p;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,13 +4,19 @@ import java.util.ArrayList;
|
|||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
import com.rapidminer.example.Attribute;
|
||||
|
@ -38,28 +44,48 @@ public class DistributionQualityAnalysis extends DataAnalysis {
|
|||
double bestThreshold = 0.5d;
|
||||
private HashMap<String, String> output;
|
||||
|
||||
public HashMap<String, VarCouple> getInputParameters() {
|
||||
|
||||
HashMap<String, VarCouple> parameters = new HashMap<String, VarCouple>();
|
||||
|
||||
parameters.put("PositiveCasesTable", new VarCouple(VARTYPE.STRING, ""));
|
||||
parameters.put("NegativeCasesTable", new VarCouple(VARTYPE.STRING, ""));
|
||||
|
||||
parameters.put("PositiveCasesTableKeyColumn", new VarCouple(VARTYPE.STRING, "csquarecode"));
|
||||
parameters.put("NegativeCasesTableKeyColumn", new VarCouple(VARTYPE.STRING, "csquarecode"));
|
||||
|
||||
parameters.put("DistributionTable", new VarCouple(VARTYPE.STRING, "csquare"));
|
||||
parameters.put("DistributionTableKeyColumn", new VarCouple(VARTYPE.STRING, "csquarecode"));
|
||||
parameters.put("DistributionTableProbabilityColumn", new VarCouple(VARTYPE.STRING, "csquarecode"));
|
||||
|
||||
parameters.put("PositiveThreshold", new VarCouple(VARTYPE.STRING, "0.8"));
|
||||
parameters.put("NegativeThreshold", new VarCouple(VARTYPE.STRING, "0.3"));
|
||||
|
||||
parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, ""));
|
||||
parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, ""));
|
||||
parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, ""));
|
||||
parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, ""));
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
List<TableTemplates> templates = new ArrayList<TableTemplates>();
|
||||
templates.add(TableTemplates.HSPEC);
|
||||
templates.add(TableTemplates.TRAININGSET);
|
||||
templates.add(TableTemplates.TESTSET);
|
||||
|
||||
List<TableTemplates> templatesOccurrences = new ArrayList<TableTemplates>();
|
||||
templatesOccurrences.add(TableTemplates.OCCURRENCE);
|
||||
templatesOccurrences.add(TableTemplates.TRAININGSET);
|
||||
templatesOccurrences.add(TableTemplates.TESTSET);
|
||||
|
||||
InputTable p1 = new InputTable(templatesOccurrences,"PositiveCasesTable","A Table containing positive cases");
|
||||
InputTable p2 = new InputTable(templatesOccurrences,"NegativeCasesTable","A Table containing negative cases");
|
||||
PrimitiveType p3 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "PositiveCasesTableKeyColumn","Positive Cases Table Key Column","csquarecode");
|
||||
PrimitiveType p4 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "NegativeCasesTableKeyColumn","Negative Cases Table Key Column","csquarecode");
|
||||
InputTable p5 = new InputTable(templates,"DistributionTable","A probability distribution table");
|
||||
PrimitiveType p6 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "DistributionTableKeyColumn","Distribution Table Key Column","csquarecode");
|
||||
PrimitiveType p7 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "DistributionTableProbabilityColumn","Distribution Table Probability Column","csquarecode");
|
||||
PrimitiveType p8 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "PositiveThreshold","Positive acceptance threshold","0.8");
|
||||
PrimitiveType p9 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "NegativeThreshold","Negative acceptance threshold","0.3");
|
||||
DatabaseType p10 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p11 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
DatabaseType p12 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
|
||||
DatabaseType p13 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
|
||||
DatabaseType p14 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
parameters.add(p3);
|
||||
parameters.add(p4);
|
||||
parameters.add(p5);
|
||||
parameters.add(p6);
|
||||
parameters.add(p7);
|
||||
parameters.add(p8);
|
||||
parameters.add(p9);
|
||||
parameters.add(p10);
|
||||
parameters.add(p11);
|
||||
parameters.add(p12);
|
||||
parameters.add(p13);
|
||||
parameters.add(p14);
|
||||
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
@ -324,13 +350,9 @@ public class DistributionQualityAnalysis extends DataAnalysis {
|
|||
|
||||
|
||||
@Override
|
||||
public VARTYPE getContentType() {
|
||||
return VARTYPE.MAP;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getContent() {
|
||||
return output;
|
||||
public StatisticalType getOutput() {
|
||||
PrimitiveType p = new PrimitiveType(Map.class.getName(), output, PrimitiveTypes.MAP, "AnalysisResult","Analysis of the probability distribution quality");
|
||||
return p;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -3,17 +3,23 @@ package org.gcube.dataanalysis.ecoengine.evaluation;
|
|||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis;
|
||||
import org.gcube.dataanalysis.ecoengine.models.cores.pca.PrincipalComponentAnalysis;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Operations;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Transformations;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
public class HabitatRepresentativeness extends DataAnalysis {
|
||||
|
@ -31,24 +37,39 @@ public class HabitatRepresentativeness extends DataAnalysis {
|
|||
private float innerstatus;
|
||||
private int maxTests = 2;
|
||||
|
||||
public HashMap<String, VarCouple> getInputParameters() {
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
List<TableTemplates> templates = new ArrayList<TableTemplates>();
|
||||
templates.add(TableTemplates.HCAF);
|
||||
templates.add(TableTemplates.TRAININGSET);
|
||||
templates.add(TableTemplates.TESTSET);
|
||||
|
||||
HashMap<String, VarCouple> parameters = new HashMap<String, VarCouple>();
|
||||
|
||||
parameters.put("ProjectingAreaTable", new VarCouple(VARTYPE.STRING, ""));
|
||||
parameters.put("ProjectingAreaFeaturesOptionalCondition", new VarCouple(VARTYPE.STRING, "oceanarea>0"));
|
||||
|
||||
parameters.put("PositiveCasesTable", new VarCouple(VARTYPE.STRING, ""));
|
||||
parameters.put("NegativeCasesTable", new VarCouple(VARTYPE.STRING, ""));
|
||||
|
||||
parameters.put("FeaturesColumns", new VarCouple(VARTYPE.STRING, ""));
|
||||
parameters.put("PositiveFeaturesColumns", new VarCouple(VARTYPE.STRING, ""));
|
||||
|
||||
parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, ""));
|
||||
parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, ""));
|
||||
parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, ""));
|
||||
parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, ""));
|
||||
List<TableTemplates> templatesOccurrences = new ArrayList<TableTemplates>();
|
||||
templatesOccurrences.add(TableTemplates.OCCURRENCE);
|
||||
templatesOccurrences.add(TableTemplates.TRAININGSET);
|
||||
templatesOccurrences.add(TableTemplates.TESTSET);
|
||||
|
||||
InputTable p1 = new InputTable(templates,"ProjectingAreaTable","A Table containing projecting area information");
|
||||
PrimitiveType p2 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "ProjectingAreaFeaturesOptionalCondition","optional filter for taking area rows","oceanarea>0",true);
|
||||
InputTable p3 = new InputTable(templatesOccurrences,"PositiveCasesTable","A Table containing positive cases");
|
||||
InputTable p4 = new InputTable(templatesOccurrences,"NegativeCasesTable","A Table containing negative cases");
|
||||
PrimitiveType p5 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "FeaturesColumns","fetures columns names separated by comma","depthmean,depthmax,depthmin, sstanmean,sbtanmean,salinitymean,salinitybmean, primprodmean,iceconann,landdist,oceanarea");
|
||||
DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p7 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
DatabaseType p8 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
|
||||
DatabaseType p9 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
|
||||
DatabaseType p10 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
parameters.add(p3);
|
||||
parameters.add(p4);
|
||||
parameters.add(p5);
|
||||
parameters.add(p6);
|
||||
parameters.add(p7);
|
||||
parameters.add(p8);
|
||||
parameters.add(p9);
|
||||
parameters.add(p10);
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
@ -315,13 +336,9 @@ public class HabitatRepresentativeness extends DataAnalysis {
|
|||
}
|
||||
|
||||
@Override
|
||||
public VARTYPE getContentType() {
|
||||
return VARTYPE.MAP;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getContent() {
|
||||
return output;
|
||||
public StatisticalType getOutput() {
|
||||
PrimitiveType p = new PrimitiveType(Map.class.getName(), output, PrimitiveTypes.MAP, "AnalysisResult","Habitat Representativeness Score");
|
||||
return p;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -4,11 +4,14 @@ import java.awt.Image;
|
|||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
|
||||
import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.Hspen;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory;
|
||||
|
@ -32,10 +35,10 @@ public class BioClimateAnalysis {
|
|||
private static String countNumberOfSpeciesPerRange = "select count(*) from %1$s where %4$s>%2$s and %4$s<=%3$s ";
|
||||
private static String countNumberOfSpecies = "select count(*) from %1$s where depthmin<%2$s and depthmin>=%3$s and depthmax<%4$s and depthmax>=%5$s";
|
||||
private static String countProbabilityPerArea = "select count(*) from %1$s as a join hcaf_s as b on b.%2$s = %3$s and a.probability > #THRESHOLD# and a.csquarecode=b.csquarecode";
|
||||
|
||||
|
||||
public static String salinityDefaultRange = "salinitymin>27.44 and salinitymin<=36.57";
|
||||
public static String salinityMinFeature = "salinitymin";
|
||||
|
||||
|
||||
private static enum FIELD {
|
||||
iceconann, sstanmean, salinitymean
|
||||
};
|
||||
|
@ -90,7 +93,7 @@ public class BioClimateAnalysis {
|
|||
bioClimate.produceGraphs2D();
|
||||
}
|
||||
|
||||
private static String[] SERIES = { "High Probability Cells Trend (>%1$s)", "Number of Changing Cells", "Reducing Ice Concentration Trend", "Average Discrepancy Between Distributions", "Average Trends", "Ice Concentration", "Sea Surface Temperature", "Salinity" };
|
||||
private static String[] SERIES = { "High Probability Cells Trend (>%1$s)", "Number of Changing Cells", "Reducing Ice Concentration Trend", "High Probability Cells Trend Derivative", "Average Trends", "Ice Concentration", "Sea Surface Temperature", "Salinity" };
|
||||
|
||||
public void produceGraphs2D() throws Exception {
|
||||
DefaultCategoryDataset testpoints = new DefaultCategoryDataset();
|
||||
|
@ -107,18 +110,18 @@ public class BioClimateAnalysis {
|
|||
lineg9.render(testpoints);
|
||||
}
|
||||
|
||||
private HashMap<String,Image> producedImages;
|
||||
private HashMap<String, Image> producedImages;
|
||||
|
||||
public HashMap<String,Image> getProducedImages() {
|
||||
public HashMap<String, Image> getProducedImages() {
|
||||
return producedImages;
|
||||
}
|
||||
|
||||
private void produceGraphs(String[] csquareTableNames, String[] hspecTableNames, float threshold) throws Exception {
|
||||
|
||||
|
||||
csquareTableNames = checkTableNames(csquareTableNames);
|
||||
hspecTableNames = checkTableNames(hspecTableNames);
|
||||
|
||||
producedImages = new HashMap<String,Image> ();
|
||||
|
||||
producedImages = new HashMap<String, Image>();
|
||||
int numberOfTrends = highProbabilityCells.length;
|
||||
|
||||
// create the datasets...
|
||||
|
@ -149,14 +152,14 @@ public class BioClimateAnalysis {
|
|||
discrepancies[0] = min;
|
||||
|
||||
if (liveRender) {
|
||||
BioClimateGraph lineg1 = new BioClimateGraph(String.format(SERIES[0],threshold), Operations.getMax(highProbabilityCells), Operations.getMin(highProbabilityCells));
|
||||
BioClimateGraph lineg1 = new BioClimateGraph(String.format(SERIES[0], threshold), Operations.getMax(highProbabilityCells), Operations.getMin(highProbabilityCells));
|
||||
BioClimateGraph lineg4 = new BioClimateGraph(SERIES[3], Operations.getMax(discrepancies), min);
|
||||
lineg4.render(discrepanciesTrend);
|
||||
lineg1.render(probabilityTrend);
|
||||
}
|
||||
|
||||
producedImages.put("Probability_Trend",BioClimateGraph.renderStaticImgObject(width, height, probabilityTrend, String.format(SERIES[0],threshold), Operations.getMax(highProbabilityCells), Operations.getMin(highProbabilityCells)));
|
||||
producedImages.put("Probability_Discrepancies_Trend",BioClimateGraph.renderStaticImgObject(width, height, discrepanciesTrend, SERIES[3], Operations.getMax(discrepancies), min));
|
||||
producedImages.put("Probability_Trend", BioClimateGraph.renderStaticImgObject(width, height, probabilityTrend, String.format(SERIES[0], threshold), Operations.getMax(highProbabilityCells), Operations.getMin(highProbabilityCells)));
|
||||
producedImages.put("Probability_Discrepancies_Trend", BioClimateGraph.renderStaticImgObject(width, height, discrepanciesTrend, SERIES[3], Operations.getMax(discrepancies), min));
|
||||
|
||||
}
|
||||
if (doHcafAn) {
|
||||
|
@ -170,9 +173,9 @@ public class BioClimateAnalysis {
|
|||
lineg8.render(avgSalinityD);
|
||||
}
|
||||
|
||||
producedImages.put("Average_Ice_Concentration",BioClimateGraph.renderStaticImgObject(width, height, avgIceD, SERIES[5], Operations.getMax(avgIce), Operations.getMin(avgIce)));
|
||||
producedImages.put("Average_SST",BioClimateGraph.renderStaticImgObject(width, height, avgSSTD, SERIES[6], Operations.getMax(avgSST), Operations.getMin(avgSST)));
|
||||
producedImages.put("Average_Salinity",BioClimateGraph.renderStaticImgObject(width, height, avgSalinityD, SERIES[7], Operations.getMax(avgSalinity), Operations.getMin(avgSalinity)));
|
||||
producedImages.put("Average_Ice_Concentration", BioClimateGraph.renderStaticImgObject(width, height, avgIceD, SERIES[5], Operations.getMax(avgIce), Operations.getMin(avgIce)));
|
||||
producedImages.put("Average_SST", BioClimateGraph.renderStaticImgObject(width, height, avgSSTD, SERIES[6], Operations.getMax(avgSST), Operations.getMin(avgSST)));
|
||||
producedImages.put("Average_Salinity", BioClimateGraph.renderStaticImgObject(width, height, avgSalinityD, SERIES[7], Operations.getMax(avgSalinity), Operations.getMin(avgSalinity)));
|
||||
|
||||
}
|
||||
|
||||
|
@ -181,37 +184,37 @@ public class BioClimateAnalysis {
|
|||
}
|
||||
|
||||
public void hcafEvolutionAnalysis(String[] hcafTable, String[] hcafTableNames) throws Exception {
|
||||
globalEvolutionAnalysis(hcafTable, null, hcafTableNames, null, null, null,0f);
|
||||
globalEvolutionAnalysis(hcafTable, null, hcafTableNames, null, null, null, 0f);
|
||||
}
|
||||
|
||||
public void hspecEvolutionAnalysis(String[] hspecTables, String[] hspecTableNames, String probabilityColumn, String csquareColumn,float threshold) throws Exception {
|
||||
public void hspecEvolutionAnalysis(String[] hspecTables, String[] hspecTableNames, String probabilityColumn, String csquareColumn, float threshold) throws Exception {
|
||||
globalEvolutionAnalysis(null, hspecTables, null, hspecTableNames, probabilityColumn, csquareColumn, threshold);
|
||||
}
|
||||
|
||||
private String[] checkTableNames(String [] tablesNames){
|
||||
private String[] checkTableNames(String[] tablesNames) {
|
||||
ArrayList<String> newtables = new ArrayList<String>();
|
||||
if ((tablesNames==null) || (tablesNames.length==0))
|
||||
if ((tablesNames == null) || (tablesNames.length == 0))
|
||||
return tablesNames;
|
||||
for (String table:tablesNames){
|
||||
int i=1;
|
||||
for (String table : tablesNames) {
|
||||
int i = 1;
|
||||
String originalTable = table;
|
||||
while (newtables.contains(table)){
|
||||
table = originalTable+"_"+i;
|
||||
while (newtables.contains(table)) {
|
||||
table = originalTable + "_" + i;
|
||||
i++;
|
||||
}
|
||||
newtables.add(table);
|
||||
}
|
||||
String [] tables = new String[tablesNames.length];
|
||||
for (int j=0;j<tablesNames.length;j++){
|
||||
String[] tables = new String[tablesNames.length];
|
||||
for (int j = 0; j < tablesNames.length; j++) {
|
||||
tables[j] = newtables.get(j);
|
||||
}
|
||||
return tables;
|
||||
}
|
||||
|
||||
|
||||
public void produceCharts(HashMap<String, HashMap<String, double[]>> GeoMap, String[] tablesNames) {
|
||||
// produce a char for each feature
|
||||
tablesNames = checkTableNames(tablesNames);
|
||||
producedImages = new HashMap<String,Image> ();
|
||||
producedImages = new HashMap<String, Image>();
|
||||
for (String featurename : GeoMap.keySet()) {
|
||||
DefaultCategoryDataset chart = new DefaultCategoryDataset();
|
||||
HashMap<String, double[]> timeseries = GeoMap.get(featurename);
|
||||
|
@ -231,13 +234,13 @@ public class BioClimateAnalysis {
|
|||
BioClimateGraph lineg1 = new BioClimateGraph(featurename, absmax, absmin);
|
||||
lineg1.render(chart);
|
||||
}
|
||||
producedImages.put(featurename.replace(" ", "_"),BioClimateGraph.renderStaticImgObject(width, height, chart, featurename, absmax, absmin));
|
||||
producedImages.put(featurename.replace(" ", "_"), BioClimateGraph.renderStaticImgObject(width, height, chart, featurename, absmax, absmin));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a chart for hspens in time according to a certain interval in the parameter
|
||||
* e.g. : a chart for several salinity intervals
|
||||
* Generates a chart for hspens in time according to a certain interval in the parameter e.g. : a chart for several salinity intervals
|
||||
*
|
||||
* @param hspenTables
|
||||
* @param hspenTableNames
|
||||
* @param parameterName
|
||||
|
@ -291,9 +294,9 @@ public class BioClimateAnalysis {
|
|||
// for each cluster build up a chart
|
||||
for (int j = 1; j < pClusters.length; j++) {
|
||||
|
||||
double prevpmax = MathFunctions.roundDecimal(Double.parseDouble("" + (Object) paramrange.get(pClusters[j - 1])),2);
|
||||
pmax = MathFunctions.roundDecimal(Double.parseDouble("" + (Object) paramrange.get(pClusters[j])),2);
|
||||
|
||||
double prevpmax = MathFunctions.roundDecimal(Double.parseDouble("" + (Object) paramrange.get(pClusters[j - 1])), 2);
|
||||
pmax = MathFunctions.roundDecimal(Double.parseDouble("" + (Object) paramrange.get(pClusters[j])), 2);
|
||||
|
||||
if (prevpmax != pmax) {
|
||||
// take the number of elements for this range
|
||||
String countSpeciesQuery = String.format(countNumberOfSpeciesPerRange, hspenTables[i], prevpmax, pmax, parameterName);
|
||||
|
@ -340,11 +343,13 @@ public class BioClimateAnalysis {
|
|||
|
||||
/**
|
||||
* Generates a chart for hspec probability > thr in each Fao Area and LME
|
||||
*
|
||||
* @param hspecTables
|
||||
* @param hspecTablesNames
|
||||
* @throws Exception
|
||||
*/
|
||||
public void speciesGeographicEvolutionAnalysis(String[] hspecTables, String[] hspecTablesNames, float threshold) throws Exception {
|
||||
|
||||
public void speciesGeographicEvolutionAnalysis2(String[] hspecTables, String[] hspecTablesNames, float threshold) throws Exception {
|
||||
try {
|
||||
referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config);
|
||||
AnalysisLogger.getLogger().debug("ReferenceDB initialized");
|
||||
|
@ -356,24 +361,23 @@ public class BioClimateAnalysis {
|
|||
|
||||
// a map for each feature. each sub map contains a trend for faoaream, lme etc.
|
||||
HashMap<String, HashMap<String, double[]>> GeoMap = new HashMap<String, HashMap<String, double[]>>();
|
||||
|
||||
|
||||
float statusstep = 80f / (float) numbOfTables;
|
||||
// for each table
|
||||
for (int i = 0; i < numbOfTables; i++) {
|
||||
// for each criterion to apply: fao area, lme etc.
|
||||
for (int j = 0; j < criteriaNames.length; j++) {
|
||||
List<Object> listCriterion = DatabaseFactory.executeSQLQuery(DatabaseUtils.getDinstictElements("hcaf_s", selectionCriteria[j],criteriaFilters[j]),referencedbConnection);
|
||||
for (Object code: listCriterion){
|
||||
String code$ = ""+code;
|
||||
String query = String.format(countProbabilityPerArea,hspecTables[i],selectionCriteria[j],code$);
|
||||
query = query.replace("#THRESHOLD#", ""+threshold);
|
||||
AnalysisLogger.getLogger().trace("Executing query for counting probabilities: "+query);
|
||||
List<Object> listCriterion = DatabaseFactory.executeSQLQuery(DatabaseUtils.getDinstictElements("hcaf_s", selectionCriteria[j], criteriaFilters[j]), referencedbConnection);
|
||||
for (Object code : listCriterion) {
|
||||
String code$ = "" + code;
|
||||
String query = String.format(countProbabilityPerArea, hspecTables[i], selectionCriteria[j], code$);
|
||||
query = query.replace("#THRESHOLD#", "" + threshold);
|
||||
AnalysisLogger.getLogger().trace("Executing query for counting probabilities: " + query);
|
||||
List<Object> counts = DatabaseFactory.executeSQLQuery(query, referencedbConnection);
|
||||
AnalysisLogger.getLogger().trace("Query Executed");
|
||||
int countPerArea = (counts==null)?0:Integer.parseInt(""+counts.get(0));
|
||||
|
||||
String chartName = "Hspec (prob>0.8) for " + criteriaNames[j] + "_" + code$;
|
||||
// put the code and the value in the timeseries associated to the feature name
|
||||
int countPerArea = (counts == null) ? 0 : Integer.parseInt("" + counts.get(0));
|
||||
|
||||
String chartName = "Hspec (prob>0.8) for " + criteriaNames[j] + "_" + code$; // put the code and the value in the timeseries associated to the feature name
|
||||
HashMap<String, double[]> submap = GeoMap.get(chartName);
|
||||
if (submap == null) {
|
||||
submap = new HashMap<String, double[]>();
|
||||
|
@ -386,9 +390,9 @@ public class BioClimateAnalysis {
|
|||
elements = new double[numbOfTables];
|
||||
submap.put(timeseries, elements);
|
||||
}
|
||||
|
||||
|
||||
elements[i] = countPerArea;
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -406,13 +410,97 @@ public class BioClimateAnalysis {
|
|||
referencedbConnection.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a geographic trend for each hspec feature: ice con, salinity, sst in each fao area
|
||||
* @param hcafTable
|
||||
* @param hcafTableNames
|
||||
* @throws Exception
|
||||
*/
|
||||
|
||||
public void speciesGeographicEvolutionAnalysis(String[] hspecTables, String[] hspecTablesNames, float threshold) throws Exception {
|
||||
try {
|
||||
referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config);
|
||||
AnalysisLogger.getLogger().debug("ReferenceDB initialized");
|
||||
status = 0f;
|
||||
|
||||
int numbOfTables = (hspecTables != null) ? hspecTables.length : 0;
|
||||
|
||||
if (numbOfTables > 0) {
|
||||
|
||||
// a map for each feature. each sub map contains a trend for faoaream, lme etc.
|
||||
HashMap<String, HashMap<String, double[]>> GeoMap = new HashMap<String, HashMap<String, double[]>>();
|
||||
|
||||
float statusstep = 80f / (float) numbOfTables;
|
||||
// for each table
|
||||
for (int i = 0; i < numbOfTables; i++) {
|
||||
String tmpanalysisTable = "tmpanalysis" + ("" + UUID.randomUUID()).replace("-", "").replace("_", "");
|
||||
try {
|
||||
DatabaseFactory.executeSQLUpdate("drop table " + tmpanalysisTable, referencedbConnection);
|
||||
} catch (Exception ee) {
|
||||
AnalysisLogger.getLogger().trace("table " + tmpanalysisTable + " does not exist");
|
||||
}
|
||||
String preparationQuery = "create table " + tmpanalysisTable + " as select a.faoaream, lme,count(*) from %1$s as a where a.probability > #THRESHOLD# group by faoaream,lme;";
|
||||
preparationQuery = String.format(preparationQuery, hspecTables[i]);
|
||||
preparationQuery = preparationQuery.replace("#THRESHOLD#", "" + threshold);
|
||||
AnalysisLogger.getLogger().trace("Executing query for counting probabilities: " + preparationQuery);
|
||||
|
||||
DatabaseFactory.executeSQLUpdate(preparationQuery, referencedbConnection);
|
||||
|
||||
AnalysisLogger.getLogger().trace("Query Executed");
|
||||
|
||||
// for each criterion to apply: fao area, lme etc.
|
||||
for (int j = 0; j < criteriaNames.length; j++) {
|
||||
String criteriaQuery = String.format("select %1$s,sum(count) from " + tmpanalysisTable + " %2$s group by %1$s;", selectionCriteria[j], criteriaFilters[j]);
|
||||
AnalysisLogger.getLogger().trace("Executing query for counting probabilities: " + criteriaQuery);
|
||||
List<Object> codeSums = DatabaseFactory.executeSQLQuery(criteriaQuery, referencedbConnection);
|
||||
for (Object codeSum : codeSums) {
|
||||
String code$ = "" + ((Object[]) codeSum)[0];
|
||||
int countPerArea = (((Object[]) codeSum)[1] == null) ? 0 : Integer.parseInt("" + ((Object[]) codeSum)[1]);
|
||||
AnalysisLogger.getLogger().trace("Analyzing " + selectionCriteria[j] + " with code " + code$ + " count " + countPerArea);
|
||||
|
||||
String chartName = "Hspec (prob>0.8) for " + criteriaNames[j] + "_" + code$;
|
||||
// put the code and the value in the timeseries associated to the feature name
|
||||
HashMap<String, double[]> submap = GeoMap.get(chartName);
|
||||
if (submap == null) {
|
||||
submap = new HashMap<String, double[]>();
|
||||
GeoMap.put(chartName, submap);
|
||||
}
|
||||
String timeseries = "number of occupied cells";
|
||||
double[] elements = submap.get(timeseries);
|
||||
if (elements == null) {
|
||||
elements = new double[numbOfTables];
|
||||
submap.put(timeseries, elements);
|
||||
}
|
||||
|
||||
elements[i] = countPerArea;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
try {
|
||||
DatabaseFactory.executeSQLUpdate("drop table " + tmpanalysisTable, referencedbConnection);
|
||||
} catch (Exception ee) {
|
||||
ee.printStackTrace();
|
||||
AnalysisLogger.getLogger().trace("table " + tmpanalysisTable + " does not exist");
|
||||
}
|
||||
|
||||
status = status + statusstep;
|
||||
}
|
||||
|
||||
status = 80f;
|
||||
produceCharts(GeoMap, hspecTablesNames);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
throw e;
|
||||
} finally {
|
||||
status = 100f;
|
||||
referencedbConnection.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a geographic trend for each hspec feature: ice con, salinity, sst in each fao area
|
||||
*
|
||||
* @param hcafTable
|
||||
* @param hcafTableNames
|
||||
* @throws Exception
|
||||
*/
|
||||
public void geographicEvolutionAnalysis(String[] hcafTable, String[] hcafTableNames) throws Exception {
|
||||
try {
|
||||
referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config);
|
||||
|
@ -482,8 +570,8 @@ public class BioClimateAnalysis {
|
|||
}
|
||||
|
||||
/**
|
||||
* Generates a chart for each hspec feature
|
||||
* Generates a chart for hspec prob > thr and performs a discrepancy analysis on hspec
|
||||
* Generates a chart for each hspec feature Generates a chart for hspec prob > thr and performs a discrepancy analysis on hspec
|
||||
*
|
||||
* @param hcafTable
|
||||
* @param hspecTables
|
||||
* @param hcafTablesNames
|
||||
|
@ -492,7 +580,7 @@ public class BioClimateAnalysis {
|
|||
* @param csquareColumn
|
||||
* @throws Exception
|
||||
*/
|
||||
public void globalEvolutionAnalysis(String[] hcafTable, String[] hspecTables, String[] hcafTablesNames, String[] hspecTableNames, String probabilityColumn, String csquareColumn,float threshold) throws Exception {
|
||||
public void globalEvolutionAnalysis(String[] hcafTable, String[] hspecTables, String[] hcafTablesNames, String[] hspecTableNames, String probabilityColumn, String csquareColumn, float threshold) throws Exception {
|
||||
try {
|
||||
referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config);
|
||||
AnalysisLogger.getLogger().debug("ReferenceDB initialized");
|
||||
|
@ -517,7 +605,10 @@ public class BioClimateAnalysis {
|
|||
avgSalinity = new double[numbOfPoints];
|
||||
|
||||
float statusstep = 80f / (float) numbOfPoints;
|
||||
|
||||
// create temp table puppa as select count(*), probability > 0.8 as aboveThreshold, sum(probability) as partialprobability from hspec_2050_suitable group by probability >0.8;
|
||||
// select count(*), probability > 0.8 as aboveThreshold, sum(probability) as partialprobability from hspec_2050_suitable group by probability >0.8;
|
||||
// select sum(count) as count, 3 as x from puppa union select count,2 as x from puppa where abovethreshold = true union select sum(partialprobability) as count, 1 as x from puppa order by x desc;
|
||||
|
||||
for (int i = 0; i < numbOfPoints; i++) {
|
||||
if (doHspecAn)
|
||||
highProbabilityCells[i] = calcHighProbabilityCells(hspecTables[i], threshold);
|
||||
|
@ -534,7 +625,8 @@ public class BioClimateAnalysis {
|
|||
discrepancies[i] = 1.0;
|
||||
} else {
|
||||
// OLD CALCULATION discrepancies[i] = MathFunctions.roundDecimal(calcDiscrepancy(configPath, temporaryDirectory, hspecTables[i], hspecTables[i - 1], probabilityColumn, csquareColumn, 0.1f), 5);
|
||||
discrepancies[i] = MathFunctions.roundDecimal(calcOverDiscrepancy(configPath, temporaryDirectory, hspecTables[i], hspecTables[i - 1], probabilityColumn, csquareColumn, 0.1f), 5);
|
||||
// discrepancies[i] = MathFunctions.roundDecimal(calcOverDiscrepancy(configPath, temporaryDirectory, hspecTables[i], hspecTables[i - 1], probabilityColumn, csquareColumn, 0.1f), 5);
|
||||
discrepancies[i] = highProbabilityCells[i]-highProbabilityCells[i-1];
|
||||
}
|
||||
AnalysisLogger.getLogger().trace("(" + hspecTables[i] + "): DISCREPANCY " + discrepancies[i] + " HIGH PROB CELLS " + highProbabilityCells[i]);
|
||||
}
|
||||
|
@ -544,7 +636,7 @@ public class BioClimateAnalysis {
|
|||
status = status + statusstep;
|
||||
}
|
||||
status = 80f;
|
||||
produceGraphs(hcafTablesNames, hspecTableNames,threshold);
|
||||
produceGraphs(hcafTablesNames, hspecTableNames, threshold);
|
||||
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
|
@ -653,7 +745,9 @@ public class BioClimateAnalysis {
|
|||
config.setParam("MaxSamples", "" + 30000);
|
||||
|
||||
eval = EvaluatorsFactory.getEvaluators(config).get(0);
|
||||
HashMap<String, String> out = eval.process(config);
|
||||
PrimitiveType output = (PrimitiveType) eval.process(config);
|
||||
|
||||
HashMap<String, String> out = (HashMap<String, String>)output.getContent();
|
||||
|
||||
Double d = Double.parseDouble(out.get("MEAN"));
|
||||
return d;
|
||||
|
|
|
@ -212,6 +212,7 @@ public class InterpolateTables {
|
|||
String filename = temporaryDirectory + initialFile + "_" + (yearCals) + "_" + function.name() + "_" + i + System.currentTimeMillis() + ".csv";
|
||||
FileTools.saveString(filename, completeFile.toString(), true, "UTF-8");
|
||||
producedfiles[i] = new File(filename);
|
||||
System.out.println("PRODUCED FILE TO COPY "+producedfiles[i]);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -267,13 +268,18 @@ public class InterpolateTables {
|
|||
public static void main(String[] args) throws Exception {
|
||||
|
||||
String configPath = "./cfg/";
|
||||
String persistencePath = "c:/tmp/";
|
||||
String persistencePath = "/win/";
|
||||
/*
|
||||
String databaseUrl = "jdbc:postgresql://localhost/testdb";
|
||||
String databaseUser = "gcube";
|
||||
String databasePassword = "d4science2";
|
||||
*/
|
||||
String databaseUrl = "jdbc:postgresql://dbtest.research-infrastructures.eu/aquamapsorgupdated";
|
||||
String databaseUser = "utente";
|
||||
String databasePassword = "d4science";
|
||||
InterpolateTables interp = new InterpolateTables(configPath, persistencePath, databaseUrl, databaseUser, databasePassword);
|
||||
|
||||
interp.interpolate("hcaf_d", "hcaf_d_2050", 5, INTERPOLATIONFUNCTIONS.LINEAR, 2012, 2050);
|
||||
interp.interpolate("hcaf_d", "hcaf_d_2050", 7, INTERPOLATIONFUNCTIONS.LINEAR, 2012, 2050);
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
package org.gcube.dataanalysis.ecoengine.interfaces;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
|
||||
public interface Clusterer {
|
||||
|
||||
public ALG_PROPS[] getSupportedAlgorithms();
|
||||
|
||||
public INFRASTRUCTURE getInfrastructure();
|
||||
|
||||
public void init() throws Exception;
|
||||
|
||||
public void setConfiguration(AlgorithmConfiguration config);
|
||||
|
||||
public void shutdown();
|
||||
|
||||
public float getStatus();
|
||||
|
||||
public String getDescription();
|
||||
|
||||
public List<StatisticalType> getInputParameters();
|
||||
|
||||
public StatisticalType getOutput();
|
||||
|
||||
public void cluster() throws Exception;
|
||||
}
|
|
@ -1,15 +1,14 @@
|
|||
package org.gcube.dataanalysis.ecoengine.interfaces;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
|
||||
public interface ComputationalAgent {
|
||||
|
||||
//set the input parameters for this generator
|
||||
public HashMap<String, VarCouple> getInputParameters();
|
||||
public List<StatisticalType> getInputParameters();
|
||||
|
||||
public String getResourceLoad();
|
||||
|
||||
|
@ -20,10 +19,8 @@ public interface ComputationalAgent {
|
|||
//gets the weight of the generator: according to this the generator will be placed in the execution order
|
||||
public INFRASTRUCTURE getInfrastructure();
|
||||
|
||||
// gets the type of the content inside the generator: String, File, HashMap.
|
||||
public VARTYPE getContentType();
|
||||
|
||||
// gets the content of the model: e.g. Table indications etc.
|
||||
public Object getContent();
|
||||
public StatisticalType getOutput();
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -2,11 +2,14 @@ package org.gcube.dataanalysis.ecoengine.interfaces;
|
|||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
|
||||
/**
|
||||
* Implements a mono-thread data analysis process
|
||||
|
@ -21,15 +24,11 @@ public abstract class DataAnalysis implements Evaluator{
|
|||
protected float status;
|
||||
|
||||
/**
|
||||
* establishes imput parameters for this algorithm along with their type
|
||||
* establishes input parameters for this algorithm along with their type
|
||||
*/
|
||||
public abstract HashMap<String, VarCouple> getInputParameters();
|
||||
public abstract List<StatisticalType> getInputParameters();
|
||||
|
||||
/**
|
||||
* lists the output parameters names
|
||||
* @return
|
||||
*/
|
||||
public abstract List<String> getOutputParameters();
|
||||
|
||||
|
||||
/**
|
||||
* Executed the core of the algorithm
|
||||
|
@ -58,7 +57,7 @@ public abstract class DataAnalysis implements Evaluator{
|
|||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
public HashMap<String, String> process(AlgorithmConfiguration config) throws Exception{
|
||||
public StatisticalType process(AlgorithmConfiguration config) throws Exception{
|
||||
status = 0;
|
||||
HashMap<String, String> out = new HashMap<String, String>();
|
||||
try{
|
||||
|
@ -72,7 +71,7 @@ public abstract class DataAnalysis implements Evaluator{
|
|||
finally{
|
||||
status = 100;
|
||||
}
|
||||
return out;
|
||||
return new PrimitiveType(Map.class.getName(), out, PrimitiveTypes.MAP, "Analysis","Analysis Results");
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -3,12 +3,13 @@ package org.gcube.dataanalysis.ecoengine.interfaces;
|
|||
import java.util.HashMap;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
|
||||
|
||||
public interface Evaluator extends ComputationalAgent{
|
||||
|
||||
|
||||
public HashMap<String, String> process(AlgorithmConfiguration config) throws Exception;
|
||||
public StatisticalType process(AlgorithmConfiguration config) throws Exception;
|
||||
|
||||
public abstract void init(AlgorithmConfiguration config) throws Exception;
|
||||
|
||||
|
|
|
@ -20,4 +20,5 @@ public interface Generator extends ComputationalAgent{
|
|||
|
||||
public void generate() throws Exception;
|
||||
|
||||
public SpatialProbabilityDistribution getAlgorithm();
|
||||
}
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
package org.gcube.dataanalysis.ecoengine.interfaces;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
|
||||
public interface Model {
|
||||
|
||||
|
@ -20,7 +19,7 @@ public interface Model {
|
|||
public String getDescription();
|
||||
|
||||
//set the input parameters for this generator
|
||||
public HashMap<String, VarCouple> getInputParameters();
|
||||
public List<StatisticalType> getInputParameters();
|
||||
|
||||
public float getVersion();
|
||||
|
||||
|
@ -34,17 +33,11 @@ public interface Model {
|
|||
|
||||
public float getStatus();
|
||||
|
||||
public String getInputType();
|
||||
|
||||
public String getOutputType();
|
||||
|
||||
public void postprocess(AlgorithmConfiguration Input, Model previousModel);
|
||||
|
||||
public void train(AlgorithmConfiguration Input, Model previousModel);
|
||||
|
||||
public void stop();
|
||||
|
||||
public VARTYPE getContentType();
|
||||
|
||||
public Object getContent();
|
||||
public StatisticalType getOutput();
|
||||
}
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
package org.gcube.dataanalysis.ecoengine.interfaces;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
|
||||
public interface SpatialProbabilityDistribution {
|
||||
|
||||
|
@ -18,10 +17,8 @@ public interface SpatialProbabilityDistribution {
|
|||
public String getDescription();
|
||||
|
||||
//set the input parameters for this generator
|
||||
public HashMap<String, VarCouple> getInputParameters();
|
||||
public List<StatisticalType> getInputParameters();
|
||||
|
||||
public VARTYPE getContentType();
|
||||
|
||||
public Object getContent();
|
||||
public StatisticalType getOutput();
|
||||
|
||||
}
|
||||
|
|
|
@ -1,34 +1,80 @@
|
|||
package org.gcube.dataanalysis.ecoengine.interfaces;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
|
||||
public interface SpatialProbabilityDistributionNode extends SpatialProbabilityDistribution {
|
||||
public abstract class SpatialProbabilityDistributionNode implements SpatialProbabilityDistribution {
|
||||
|
||||
//initialization of ta single node
|
||||
public void initSingleNode(AlgorithmConfiguration config);
|
||||
public abstract void initSingleNode(AlgorithmConfiguration config);
|
||||
|
||||
//get the internal processing status for the single step calculation
|
||||
public float getInternalStatus();
|
||||
public abstract float getInternalStatus();
|
||||
|
||||
//execute a single node
|
||||
public int executeNode(int cellStarIndex, int numberOfCellsToProcess, int speciesStartIndex, int numberOfSpeciesToProcess, String sandboxFolder, String logfileNameToProduce);
|
||||
public abstract int executeNode(int cellStarIndex, int numberOfCellsToProcess, int speciesStartIndex, int numberOfSpeciesToProcess, boolean duplicate, String sandboxFolder, String nodeConfigurationFileObject, String logfileNameToProduce);
|
||||
|
||||
// An initialization phase in which the inputs are initialized
|
||||
public void setup(AlgorithmConfiguration config) throws Exception;
|
||||
public abstract void setup(AlgorithmConfiguration config) throws Exception;
|
||||
|
||||
//get overall number of species to process
|
||||
public int getNumberOfSpecies();
|
||||
public abstract int getNumberOfSpecies();
|
||||
|
||||
//get overall number of geographical information to process
|
||||
public int getNumberOfGeoInfo();
|
||||
public abstract int getNumberOfGeoInfo();
|
||||
|
||||
//get overall number of processed species
|
||||
public int getNumberOfProcessedSpecies();
|
||||
public abstract int getNumberOfProcessedSpecies();
|
||||
|
||||
//stop the sexecution of the node
|
||||
public void stop();
|
||||
public abstract void stop();
|
||||
|
||||
//prostprocess after the whole calculation : reduce operation
|
||||
public void postProcess();
|
||||
public abstract void postProcess(boolean manageDuplicates, boolean manageFault);
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
try{
|
||||
System.out.println("Generic Node: Process Started ");
|
||||
try {
|
||||
for (int i = 0; i < args.length; i++) {
|
||||
System.out.println("Generic Node: RECEIVED INPUT " + args[i]);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
}
|
||||
|
||||
System.out.println("Generic Node: checking arguments from "+args[0]);
|
||||
String[] rargs = args[0].split("_");
|
||||
|
||||
int order = Integer.parseInt(rargs[0]);
|
||||
System.out.println("Generic Node: order: " + order);
|
||||
int chunksize = Integer.parseInt(rargs[1]);
|
||||
System.out.println("Generic Node: chunk: " + chunksize);
|
||||
int speciesOrder = Integer.parseInt(rargs[2]);
|
||||
System.out.println("Generic Node: species: " + speciesOrder);
|
||||
int speciesChunksize = Integer.parseInt(rargs[3]);
|
||||
System.out.println("Generic Node: species chunk size: " + speciesChunksize);
|
||||
String path = rargs[4];
|
||||
System.out.println("Generic Node: path: " + path);
|
||||
String algorithmClass = rargs[5];
|
||||
System.out.println("Generic Node: algorithmClass: " + algorithmClass);
|
||||
Boolean duplicate = Boolean.parseBoolean(rargs[6]);
|
||||
System.out.println("Generic Node: duplicate message: " + duplicate);
|
||||
String nodeConfiguration = rargs[7];
|
||||
System.out.println("Generic Node: config: " + nodeConfiguration);
|
||||
String logfile = args[1];
|
||||
System.out.println("Generic Node: logfile: " + logfile);
|
||||
|
||||
System.out.println("Generic Node: executing class");
|
||||
|
||||
SpatialProbabilityDistributionNode node = (SpatialProbabilityDistributionNode) Class.forName(algorithmClass).newInstance();
|
||||
|
||||
node.executeNode(order, chunksize, speciesOrder, speciesChunksize, duplicate, path, nodeConfiguration, logfile);
|
||||
}catch(Exception e){
|
||||
System.out.println("ERROR "+e.getMessage());
|
||||
System.out.println(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
package org.gcube.dataanalysis.ecoengine.modeling;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
|
||||
public class SimpleModeler implements Modeler{
|
||||
private Model innermodel;
|
||||
|
@ -64,7 +63,7 @@ public class SimpleModeler implements Modeler{
|
|||
}
|
||||
|
||||
@Override
|
||||
public HashMap<String, VarCouple> getInputParameters() {
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
return innermodel.getInputParameters();
|
||||
}
|
||||
|
||||
|
@ -73,12 +72,8 @@ public class SimpleModeler implements Modeler{
|
|||
return INFRASTRUCTURE.LOCAL;
|
||||
}
|
||||
|
||||
public VARTYPE getContentType() {
|
||||
return innermodel.getContentType();
|
||||
}
|
||||
|
||||
public Object getContent() {
|
||||
return innermodel.getContent();
|
||||
public StatisticalType getOutput() {
|
||||
return innermodel.getOutput();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -4,17 +4,26 @@ import java.io.File;
|
|||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
|
||||
import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
public class ModelAquamapsNN implements Model {
|
||||
|
@ -36,18 +45,34 @@ public class ModelAquamapsNN implements Model {
|
|||
}
|
||||
|
||||
@Override
|
||||
public HashMap<String, VarCouple> getInputParameters() {
|
||||
HashMap<String, VarCouple> parameters = new HashMap<String, VarCouple>();
|
||||
parameters.put("AbsenceDataTable", new VarCouple(VARTYPE.STRING, "absence_data"));
|
||||
parameters.put("PresenceDataTable", new VarCouple(VARTYPE.STRING, "presence_data"));
|
||||
parameters.put("SpeciesName", new VarCouple(VARTYPE.STRING, ""));
|
||||
parameters.put("LayersNeurons", new VarCouple(VARTYPE.STRING, "100,2"));
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
List<TableTemplates> templatesOccurrences = new ArrayList<TableTemplates>();
|
||||
templatesOccurrences.add(TableTemplates.OCCURRENCE);
|
||||
|
||||
InputTable p1 = new InputTable(templatesOccurrences,"AbsenceDataTable","A Table containing absence points");
|
||||
InputTable p2 = new InputTable(templatesOccurrences,"PresenceDataTable","A Table containing positive occurrences");
|
||||
PrimitiveType p3 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "SpeciesName","Species Code of the fish the NN will correspond to","Fis-10407");
|
||||
PrimitiveType p4 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "LayersNeurons","a list of neurons number for each inner layer separated by comma","100,2");
|
||||
|
||||
parameters.put("UserName", new VarCouple(VARTYPE.SERVICE, ""));
|
||||
parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, ""));
|
||||
parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, ""));
|
||||
parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, ""));
|
||||
parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, ""));
|
||||
DatabaseType p5 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
DatabaseType p7 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
|
||||
DatabaseType p8 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
|
||||
DatabaseType p9 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
|
||||
|
||||
ServiceType p10 = new ServiceType(ServiceParameters.USERNAME, "UserName","LDAP username");
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
parameters.add(p3);
|
||||
parameters.add(p4);
|
||||
parameters.add(p5);
|
||||
parameters.add(p6);
|
||||
parameters.add(p7);
|
||||
parameters.add(p8);
|
||||
parameters.add(p9);
|
||||
parameters.add(p10);
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
@ -119,15 +144,7 @@ public class ModelAquamapsNN implements Model {
|
|||
return status;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getInputType() {
|
||||
return AlgorithmConfiguration.class.getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getOutputType() {
|
||||
return File.class.getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postprocess(AlgorithmConfiguration Input, Model previousModel) {
|
||||
|
@ -182,15 +199,14 @@ public class ModelAquamapsNN implements Model {
|
|||
status = 100f;
|
||||
}
|
||||
|
||||
public VARTYPE getContentType() {
|
||||
return VARTYPE.FILE;
|
||||
|
||||
|
||||
@Override
|
||||
public StatisticalType getOutput() {
|
||||
PrimitiveType p = new PrimitiveType(File.class.getName(), new File(fileName), PrimitiveTypes.FILE, "NeuralNetwork","Trained Neural Network");
|
||||
return p;
|
||||
}
|
||||
|
||||
public Object getContent() {
|
||||
return new File(fileName);
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
|
||||
|
|
|
@ -4,19 +4,25 @@ import java.io.File;
|
|||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
|
||||
import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network;
|
||||
import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.neurosolutions.NeuralNet;
|
||||
import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.neurosolutions.Pattern;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
public class ModelAquamapsNNNS implements Model {
|
||||
|
@ -38,20 +44,36 @@ public class ModelAquamapsNNNS implements Model {
|
|||
}
|
||||
|
||||
@Override
|
||||
public HashMap<String, VarCouple> getInputParameters() {
|
||||
HashMap<String, VarCouple> parameters = new HashMap<String, VarCouple>();
|
||||
parameters.put("AbsenceDataTable", new VarCouple(VARTYPE.STRING, "absence_data"));
|
||||
parameters.put("PresenceDataTable", new VarCouple(VARTYPE.STRING, "presence_data"));
|
||||
parameters.put("SpeciesName", new VarCouple(VARTYPE.STRING, ""));
|
||||
parameters.put("UserName", new VarCouple(VARTYPE.STRING, ""));
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
List<TableTemplates> templatesOccurrences = new ArrayList<TableTemplates>();
|
||||
templatesOccurrences.add(TableTemplates.OCCURRENCE);
|
||||
|
||||
InputTable p1 = new InputTable(templatesOccurrences,"AbsenceDataTable","A Table containing absence points");
|
||||
InputTable p2 = new InputTable(templatesOccurrences,"PresenceDataTable","A Table containing positive occurrences");
|
||||
PrimitiveType p3 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "SpeciesName","Species Code of the fish the NN will correspond to","Fis-10407");
|
||||
PrimitiveType p4 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "LayersNeurons","a list of neurons number for each inner layer separated by comma","100,2");
|
||||
|
||||
parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, ""));
|
||||
parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, ""));
|
||||
parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, ""));
|
||||
parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, ""));
|
||||
DatabaseType p5 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
DatabaseType p7 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
|
||||
DatabaseType p8 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
|
||||
DatabaseType p9 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
|
||||
|
||||
ServiceType p10 = new ServiceType(ServiceParameters.USERNAME, "UserName","LDAP username");
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
parameters.add(p3);
|
||||
parameters.add(p4);
|
||||
parameters.add(p5);
|
||||
parameters.add(p6);
|
||||
parameters.add(p7);
|
||||
parameters.add(p8);
|
||||
parameters.add(p9);
|
||||
parameters.add(p10);
|
||||
return parameters;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getVersion() {
|
||||
return 0;
|
||||
|
@ -109,15 +131,7 @@ public class ModelAquamapsNNNS implements Model {
|
|||
return status;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getInputType() {
|
||||
return AlgorithmConfiguration.class.getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getOutputType() {
|
||||
return String.class.getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postprocess(AlgorithmConfiguration Input, Model previousModel) {
|
||||
|
@ -187,14 +201,12 @@ public class ModelAquamapsNNNS implements Model {
|
|||
status = 100f;
|
||||
}
|
||||
|
||||
public VARTYPE getContentType() {
|
||||
return VARTYPE.FILE;
|
||||
}
|
||||
|
||||
public Object getContent() {
|
||||
return fileName;
|
||||
|
||||
@Override
|
||||
public StatisticalType getOutput() {
|
||||
PrimitiveType p = new PrimitiveType(File.class.getName(), new File(fileName), PrimitiveTypes.FILE, "NeuralNetwork","Trained Neural Network");
|
||||
return p;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
|
|
|
@ -13,12 +13,20 @@ import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
|
|||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
|
||||
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
|
||||
import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.AquamapsEnvelopeAlgorithm;
|
||||
import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.EnvelopeSet;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
public class ModelHSPEN implements Model {
|
||||
|
@ -57,6 +65,7 @@ public class ModelHSPEN implements Model {
|
|||
private long lastTime;
|
||||
AlgorithmConfiguration outconfig;
|
||||
private String outputTable;
|
||||
private String outputTableLabel;
|
||||
|
||||
@Override
|
||||
public float getVersion() {
|
||||
|
@ -90,6 +99,7 @@ public class ModelHSPEN implements Model {
|
|||
}
|
||||
|
||||
outputTable = outconfig.getParam("OuputEnvelopeTable");
|
||||
outputTableLabel = outconfig.getParam("OuputEnvelopeTableLabel");
|
||||
// initialize queries
|
||||
dynamicAlterQuery = alterQuery.replace("%HSPEN%", outconfig.getParam("OuputEnvelopeTable"));
|
||||
dynamicDropTable = dropHspenTable.replace("%HSPEN%", outconfig.getParam("OuputEnvelopeTable"));
|
||||
|
@ -335,14 +345,16 @@ public class ModelHSPEN implements Model {
|
|||
// take ending time
|
||||
}
|
||||
|
||||
public VARTYPE getContentType() {
|
||||
return VARTYPE.HSPEN;
|
||||
}
|
||||
|
||||
public Object getContent() {
|
||||
return outputTable;
|
||||
@Override
|
||||
public StatisticalType getOutput() {
|
||||
List<TableTemplates> templateHspen = new ArrayList<TableTemplates>();
|
||||
templateHspen.add(TableTemplates.HSPEN);
|
||||
OutputTable p = new OutputTable(templateHspen,outputTableLabel,outputTable,"Output hspen table");
|
||||
return p;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public void setVersion(float version) {
|
||||
this.version = version;
|
||||
|
@ -404,16 +416,7 @@ public class ModelHSPEN implements Model {
|
|||
return status;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getInputType() {
|
||||
return AlgorithmConfiguration.class.getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getOutputType() {
|
||||
return String.class.getName();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public ALG_PROPS[] getProperties() {
|
||||
ALG_PROPS[] props = {ALG_PROPS.SPECIES_ENVELOPES};
|
||||
|
@ -426,17 +429,39 @@ public class ModelHSPEN implements Model {
|
|||
}
|
||||
|
||||
@Override
|
||||
public HashMap<String, VarCouple> getInputParameters() {
|
||||
HashMap<String, VarCouple> parameters = new HashMap<String,VarCouple>();
|
||||
parameters.put("EnvelopeTable", new VarCouple(VARTYPE.STRING,"hspen"));
|
||||
parameters.put("CsquarecodesTable", new VarCouple(VARTYPE.STRING,"hcaf_d"));
|
||||
parameters.put("OccurrenceCellsTable", new VarCouple(VARTYPE.STRING,"occurrencecells"));
|
||||
parameters.put("CreateTable", new VarCouple(VARTYPE.STRING,"true"));
|
||||
parameters.put("OuputEnvelopeTable", new VarCouple(VARTYPE.RANDOM,"hspen_"));
|
||||
parameters.put("DatabaseUserName",new VarCouple(VARTYPE.DATABASEUSERNAME,""));
|
||||
parameters.put("DatabasePassword",new VarCouple(VARTYPE.DATABASEPASSWORD,""));
|
||||
parameters.put("DatabaseURL",new VarCouple(VARTYPE.DATABASEURL,""));
|
||||
parameters.put("DatabaseDriver",new VarCouple(VARTYPE.DATABASEDRIVER,""));
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
List<TableTemplates> templatesOccurrences = new ArrayList<TableTemplates>();
|
||||
templatesOccurrences.add(TableTemplates.OCCURRENCE);
|
||||
List<TableTemplates> templateHspen = new ArrayList<TableTemplates>();
|
||||
templateHspen.add(TableTemplates.HSPEN);
|
||||
List<TableTemplates> templateHcaf = new ArrayList<TableTemplates>();
|
||||
templateHcaf.add(TableTemplates.HCAF);
|
||||
|
||||
InputTable p1 = new InputTable(templateHspen,"EnvelopeTable","The previous hspen table for regeneration","hspen");
|
||||
InputTable p2 = new InputTable(templateHcaf,"CsquarecodesTable","HCaf Table","hcaf_d");
|
||||
InputTable p3 = new InputTable(templatesOccurrences,"OccurrenceCellsTable","Ocurrence Cells Table","occurrencecells");
|
||||
PrimitiveType p4 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.CONSTANT, "CreateTable","Create New Table for each computation","true");
|
||||
PrimitiveType p5 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "OuputEnvelopeTableLabel","Table name for the new hspen","hspen_1");
|
||||
ServiceType p11 = new ServiceType(ServiceParameters.RANDOMSTRING, "OuputEnvelopeTable","Table name for the new hspen","hspen_");
|
||||
DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p7 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
DatabaseType p8 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
|
||||
DatabaseType p9 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
|
||||
DatabaseType p10 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
parameters.add(p3);
|
||||
parameters.add(p4);
|
||||
parameters.add(p5);
|
||||
parameters.add(p6);
|
||||
parameters.add(p7);
|
||||
parameters.add(p8);
|
||||
parameters.add(p9);
|
||||
parameters.add(p10);
|
||||
parameters.add(p11);
|
||||
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
package org.gcube.dataanalysis.ecoengine.processing;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
@ -17,10 +16,10 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
|||
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
|
||||
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
|
||||
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
|
||||
public class LocalSimpleSplitGenerator implements Generator {
|
||||
|
||||
|
@ -328,18 +327,21 @@ public class LocalSimpleSplitGenerator implements Generator {
|
|||
}
|
||||
|
||||
@Override
|
||||
public HashMap<String, VarCouple> getInputParameters() {
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
return distributionModel.getInputParameters();
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public VARTYPE getContentType() {
|
||||
return distributionModel.getContentType();
|
||||
public StatisticalType getOutput() {
|
||||
|
||||
return distributionModel.getOutput();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getContent() {
|
||||
return distributionModel.getContent();
|
||||
public SpatialProbabilityDistribution getAlgorithm() {
|
||||
return distributionModel;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
package org.gcube.dataanalysis.ecoengine.processing;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
import java.util.Queue;
|
||||
|
@ -19,11 +18,11 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
|||
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
|
||||
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
|
||||
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionTable;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
public class LocalSplitGenerator implements Generator {
|
||||
|
@ -497,18 +496,22 @@ public class LocalSplitGenerator implements Generator {
|
|||
}
|
||||
|
||||
@Override
|
||||
public HashMap<String, VarCouple> getInputParameters() {
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
return distributionModel.getInputParameters();
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public VARTYPE getContentType() {
|
||||
return distributionModel.getContentType();
|
||||
public StatisticalType getOutput() {
|
||||
|
||||
return distributionModel.getOutput();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getContent() {
|
||||
return config.getParam("DistributionTable");
|
||||
public SpatialProbabilityDistribution getAlgorithm() {
|
||||
return distributionModel;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package org.gcube.dataanalysis.ecoengine.processing;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
|
@ -12,11 +13,11 @@ import org.gcube.dataanalysis.ecoengine.connectors.RemoteHspecInputObject;
|
|||
import org.gcube.dataanalysis.ecoengine.connectors.RemoteHspecOutputObject;
|
||||
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
|
||||
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution;
|
||||
|
||||
public class RainyCloudGenerator implements Generator {
|
||||
//deprecated
|
||||
public class RainyCloudGenerator {
|
||||
|
||||
AlgorithmConfiguration config;
|
||||
private boolean interruptProcessing;
|
||||
|
@ -31,7 +32,6 @@ public class RainyCloudGenerator implements Generator {
|
|||
public RainyCloudGenerator() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getStatus() {
|
||||
RemoteHspecOutputObject oo = remoteGenerationManager.retrieveCompleteStatus();
|
||||
|
||||
|
@ -45,7 +45,6 @@ public class RainyCloudGenerator implements Generator {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void init() {
|
||||
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
|
||||
interruptProcessing = false;
|
||||
|
@ -89,17 +88,16 @@ public class RainyCloudGenerator implements Generator {
|
|||
remoteGenerationManager = new RemoteGenerationManager(config.getParam("RemoteCalculator"));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
|
||||
public void setConfiguration(AlgorithmConfiguration config) {
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
interruptProcessing = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getResourceLoad() {
|
||||
String returnString = "[]";
|
||||
|
||||
|
@ -115,7 +113,6 @@ public class RainyCloudGenerator implements Generator {
|
|||
return returnString;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getResources() {
|
||||
Resources res = new Resources();
|
||||
try {
|
||||
|
@ -129,7 +126,6 @@ public class RainyCloudGenerator implements Generator {
|
|||
return "[]";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLoad() {
|
||||
RemoteHspecOutputObject rhoo = remoteGenerationManager.retrieveCompleteStatus();
|
||||
String returnString = "[]";
|
||||
|
@ -140,7 +136,7 @@ public class RainyCloudGenerator implements Generator {
|
|||
return returnString;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
public void generate() throws Exception {
|
||||
|
||||
try {
|
||||
|
@ -163,18 +159,18 @@ public class RainyCloudGenerator implements Generator {
|
|||
AnalysisLogger.getLogger().trace("REMOTE PROCESSING ENDED");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
public ALG_PROPS[] getSupportedAlgorithms() {
|
||||
ALG_PROPS[] p = { ALG_PROPS.SPECIES_VS_CSQUARE_REMOTE_FROM_DATABASE };
|
||||
return p;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
public INFRASTRUCTURE getInfrastructure() {
|
||||
return INFRASTRUCTURE.D4SCIENCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
/*
|
||||
public HashMap<String, VarCouple> getInputParameters() {
|
||||
HashMap<String, VarCouple> parameters = new HashMap<String, VarCouple>();
|
||||
parameters.put("RemoteEnvironment", new VarCouple(VARTYPE.INFRA,""));
|
||||
|
@ -191,14 +187,14 @@ public class RainyCloudGenerator implements Generator {
|
|||
return parameters;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
public VARTYPE getContentType() {
|
||||
return VARTYPE.HSPEC;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
public Object getContent() {
|
||||
return config.getParam("DistributionTable");
|
||||
}
|
||||
|
||||
*/
|
||||
}
|
||||
|
|
|
@ -1,17 +1,11 @@
|
|||
package org.gcube.dataanalysis.ecoengine.processing.factories;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
|
||||
public class EvaluatorsFactory {
|
||||
|
||||
|
@ -26,8 +20,8 @@ public class EvaluatorsFactory {
|
|||
return evaluators;
|
||||
}
|
||||
|
||||
public static HashMap<String,VarCouple> getEvaluatorParameters(String configPath, String algorithmName) throws Exception{
|
||||
HashMap<String,VarCouple> inputs = ProcessorsFactory.getParameters(configPath + AlgorithmConfiguration.evaluatorsFile, algorithmName);
|
||||
public static List<StatisticalType> getEvaluatorParameters(String configPath, String algorithmName) throws Exception{
|
||||
List<StatisticalType> inputs = ProcessorsFactory.getParameters(configPath + AlgorithmConfiguration.evaluatorsFile, algorithmName);
|
||||
return inputs;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package org.gcube.dataanalysis.ecoengine.processing.factories;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
import java.util.ServiceLoader;
|
||||
|
@ -9,10 +8,10 @@ import java.util.ServiceLoader;
|
|||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
|
||||
public class GeneratorsFactory {
|
||||
|
||||
|
@ -69,8 +68,8 @@ public class GeneratorsFactory {
|
|||
|
||||
|
||||
|
||||
public static HashMap<String,VarCouple> getAlgorithmParameters(String configPath, String algorithmName) throws Exception{
|
||||
HashMap<String,VarCouple> inputs = ProcessorsFactory.getParameters(configPath + AlgorithmConfiguration.algorithmsFile, algorithmName);
|
||||
public static List<StatisticalType> getAlgorithmParameters(String configPath, String algorithmName) throws Exception{
|
||||
List<StatisticalType> inputs = ProcessorsFactory.getParameters(configPath + AlgorithmConfiguration.algorithmsFile, algorithmName);
|
||||
return inputs;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,16 +1,15 @@
|
|||
package org.gcube.dataanalysis.ecoengine.processing.factories;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
|
||||
public class ModelersFactory {
|
||||
|
||||
|
@ -31,8 +30,8 @@ public class ModelersFactory {
|
|||
}
|
||||
|
||||
|
||||
public static HashMap<String,VarCouple> getModelParameters(String configPath, String algorithmName) throws Exception{
|
||||
HashMap<String,VarCouple> inputs = ProcessorsFactory.getParameters(configPath + AlgorithmConfiguration.modelsFile, algorithmName);
|
||||
public static List<StatisticalType> getModelParameters(String configPath, String algorithmName) throws Exception{
|
||||
List<StatisticalType> inputs = ProcessorsFactory.getParameters(configPath + AlgorithmConfiguration.modelsFile, algorithmName);
|
||||
return inputs;
|
||||
}
|
||||
|
||||
|
|
|
@ -7,17 +7,17 @@ import java.util.Properties;
|
|||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
|
||||
public class ProcessorsFactory {
|
||||
|
||||
public static HashMap<String, VarCouple> getDefaultDatabaseConfiguration(String cfgPath) {
|
||||
public static List<StatisticalType> getDefaultDatabaseConfiguration(String cfgPath) {
|
||||
String databasecfgfile = cfgPath + AlgorithmConfiguration.defaultConnectionFile;
|
||||
try {
|
||||
return DatabaseFactory.getDefaultDatabaseConfiguration(databasecfgfile);
|
||||
|
@ -37,7 +37,7 @@ public class ProcessorsFactory {
|
|||
return algs;
|
||||
}
|
||||
|
||||
public static HashMap<String, VarCouple> getParameters(String file, String algorithmName) throws Exception {
|
||||
public static List<StatisticalType> getParameters(String file, String algorithmName) throws Exception {
|
||||
|
||||
Properties p = AlgorithmConfiguration.getProperties(file);
|
||||
String algorithmclass = p.getProperty(algorithmName);
|
||||
|
|
|
@ -3,13 +3,16 @@ package org.gcube.dataanalysis.ecoengine.spatialdistributions;
|
|||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
public class AquamapsNN extends AquamapsNative{
|
||||
|
@ -27,24 +30,15 @@ public class AquamapsNN extends AquamapsNative{
|
|||
}
|
||||
|
||||
@Override
|
||||
public HashMap<String, VarCouple> getInputParameters() {
|
||||
HashMap<String, VarCouple> parameters = new HashMap<String, VarCouple>();
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = super.getInputParameters();
|
||||
|
||||
parameters.put("EnvelopeTable", new VarCouple(VARTYPE.STRING,"hspen"));
|
||||
parameters.put("PreprocessedTable", new VarCouple(VARTYPE.STRING,"maxminlat_hspen"));
|
||||
|
||||
parameters.put("SpeciesTable", new VarCouple(VARTYPE.STRING, "hspen"));
|
||||
parameters.put("CsquarecodesTable", new VarCouple(VARTYPE.STRING, "hcaf_d"));
|
||||
parameters.put("DistributionTable", new VarCouple(VARTYPE.RANDOM, "hspec_nn_"));
|
||||
parameters.put("SpeciesName", new VarCouple(VARTYPE.STRING, ""));
|
||||
parameters.put("CreateTable", new VarCouple(VARTYPE.STRING,"true"));
|
||||
parameters.put("UserName", new VarCouple(VARTYPE.STRING, ""));
|
||||
PrimitiveType p1 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "SpeciesName","Name of the Species for which the distribution has to be produced","Fis-10407");
|
||||
ServiceType p2 = new ServiceType(ServiceParameters.USERNAME, "UserName","LDAP username");
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
|
||||
parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, ""));
|
||||
parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, ""));
|
||||
parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, ""));
|
||||
parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, ""));
|
||||
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
|
|
@ -3,13 +3,16 @@ package org.gcube.dataanalysis.ecoengine.spatialdistributions;
|
|||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.neurosolutions.NeuralNet;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
public class AquamapsNNNS extends AquamapsNative{
|
||||
|
@ -27,24 +30,15 @@ public class AquamapsNNNS extends AquamapsNative{
|
|||
}
|
||||
|
||||
@Override
|
||||
public HashMap<String, VarCouple> getInputParameters() {
|
||||
HashMap<String, VarCouple> parameters = new HashMap<String, VarCouple>();
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = super.getInputParameters();
|
||||
|
||||
parameters.put("EnvelopeTable", new VarCouple(VARTYPE.STRING,"hspen"));
|
||||
parameters.put("PreprocessedTable", new VarCouple(VARTYPE.STRING,"maxminlat_hspen"));
|
||||
|
||||
parameters.put("SpeciesTable", new VarCouple(VARTYPE.STRING, "hspen"));
|
||||
parameters.put("CsquarecodesTable", new VarCouple(VARTYPE.STRING, "hcaf_d"));
|
||||
parameters.put("DistributionTable", new VarCouple(VARTYPE.RANDOM, "hspec_nn_"));
|
||||
parameters.put("SpeciesName", new VarCouple(VARTYPE.STRING, ""));
|
||||
parameters.put("CreateTable", new VarCouple(VARTYPE.STRING,"true"));
|
||||
parameters.put("UserName", new VarCouple(VARTYPE.STRING, ""));
|
||||
PrimitiveType p1 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "SpeciesName","Name of the Species for which the distribution has to be produced","Fis-10407");
|
||||
ServiceType p2 = new ServiceType(ServiceParameters.USERNAME, "UserName","LDAP username");
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
|
||||
parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, ""));
|
||||
parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, ""));
|
||||
parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, ""));
|
||||
parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, ""));
|
||||
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
|
|
@ -3,13 +3,16 @@ package org.gcube.dataanalysis.ecoengine.spatialdistributions;
|
|||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
public class AquamapsNNSuitable extends AquamapsSuitable{
|
||||
|
@ -27,24 +30,15 @@ public class AquamapsNNSuitable extends AquamapsSuitable{
|
|||
}
|
||||
|
||||
@Override
|
||||
public HashMap<String, VarCouple> getInputParameters() {
|
||||
HashMap<String, VarCouple> parameters = new HashMap<String, VarCouple>();
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = super.getInputParameters();
|
||||
|
||||
parameters.put("EnvelopeTable", new VarCouple(VARTYPE.STRING,"hspen"));
|
||||
parameters.put("PreprocessedTable", new VarCouple(VARTYPE.STRING,"maxminlat_hspen"));
|
||||
|
||||
parameters.put("SpeciesTable", new VarCouple(VARTYPE.STRING, "hspen"));
|
||||
parameters.put("CsquarecodesTable", new VarCouple(VARTYPE.STRING, "hcaf_d"));
|
||||
parameters.put("DistributionTable", new VarCouple(VARTYPE.RANDOM, "hspec_nn_"));
|
||||
parameters.put("SpeciesName", new VarCouple(VARTYPE.STRING, ""));
|
||||
parameters.put("CreateTable", new VarCouple(VARTYPE.STRING,"true"));
|
||||
parameters.put("UserName", new VarCouple(VARTYPE.STRING, ""));
|
||||
PrimitiveType p1 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "SpeciesName","Name of the Species for which the distribution has to be produced","Fis-10407");
|
||||
ServiceType p2 = new ServiceType(ServiceParameters.USERNAME, "UserName","LDAP username");
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
|
||||
parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, ""));
|
||||
parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, ""));
|
||||
parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, ""));
|
||||
parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, ""));
|
||||
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
|
|
@ -8,10 +8,18 @@ import java.util.Queue;
|
|||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionTable;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
public class AquamapsSuitable implements SpatialProbabilityDistributionTable{
|
||||
|
@ -20,6 +28,7 @@ public class AquamapsSuitable implements SpatialProbabilityDistributionTable{
|
|||
String csquareCodeQuery = "select csquarecode,depthmean,depthmax,depthmin, sstanmean,sbtanmean,salinitymean,salinitybmean, primprodmean,iceconann,landdist,oceanarea,centerlat,centerlong,faoaream,eezall,lme from %1$s d where oceanarea>0";
|
||||
String createTableStatement = "CREATE TABLE %1$s ( speciesid character varying, csquarecode character varying, probability real, boundboxyn smallint, faoareayn smallint, faoaream integer, eezall character varying, lme integer) WITH (OIDS=FALSE ); CREATE INDEX CONCURRENTLY %1$s_idx ON %1$s USING btree (speciesid, csquarecode, faoaream, eezall, lme);";
|
||||
String destinationTable;
|
||||
String destinationTableLabel;
|
||||
String metainfo ="boundboxyn, faoareayn, faoaream, eezall, lme";
|
||||
String selectAllSpeciesObservationQuery = "SELECT speciesid,maxclat,minclat from %1$s;";
|
||||
String hspenMinMaxLat = "maxminlat_hspen";
|
||||
|
@ -36,6 +45,8 @@ public class AquamapsSuitable implements SpatialProbabilityDistributionTable{
|
|||
csquareCodeQuery = String.format(csquareCodeQuery, config.getParam("CsquarecodesTable"));
|
||||
createTableStatement = String.format(createTableStatement,config.getParam("DistributionTable"));
|
||||
destinationTable = config.getParam("DistributionTable");
|
||||
destinationTableLabel = config.getParam("DistributionTableLabel");
|
||||
|
||||
core = new AquamapsAlgorithmCore();
|
||||
|
||||
if ((config.getParam("PreprocessedTable")!=null)&&(config.getParam("PreprocessedTable").length()>0))
|
||||
|
@ -204,33 +215,54 @@ public class AquamapsSuitable implements SpatialProbabilityDistributionTable{
|
|||
}
|
||||
|
||||
@Override
|
||||
public HashMap<String, VarCouple> getInputParameters() {
|
||||
HashMap<String, VarCouple> parameters = new HashMap<String,VarCouple>();
|
||||
parameters.put("EnvelopeTable", new VarCouple(VARTYPE.HSPEN,"hspen"));
|
||||
parameters.put("CsquarecodesTable", new VarCouple(VARTYPE.HCAF,"hcaf_d"));
|
||||
parameters.put("DistributionTable", new VarCouple(VARTYPE.RANDOM,"hspec_"));
|
||||
parameters.put("PreprocessedTable", new VarCouple(VARTYPE.MINMAXLAT,"maxminlat_hspen"));
|
||||
parameters.put("CreateTable", new VarCouple(VARTYPE.STRING,"true"));
|
||||
parameters.put("DatabaseUserName",new VarCouple(VARTYPE.DATABASEUSERNAME,""));
|
||||
parameters.put("DatabasePassword",new VarCouple(VARTYPE.DATABASEPASSWORD,""));
|
||||
parameters.put("DatabaseURL",new VarCouple(VARTYPE.DATABASEURL,""));
|
||||
parameters.put("DatabaseDriver",new VarCouple(VARTYPE.DATABASEDRIVER,""));
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
List<TableTemplates> templatesMinmax = new ArrayList<TableTemplates>();
|
||||
templatesMinmax.add(TableTemplates.MINMAXLAT);
|
||||
List<TableTemplates> templateHspen = new ArrayList<TableTemplates>();
|
||||
templateHspen.add(TableTemplates.HSPEN);
|
||||
List<TableTemplates> templateHcaf = new ArrayList<TableTemplates>();
|
||||
templateHcaf.add(TableTemplates.HCAF);
|
||||
|
||||
InputTable p1 = new InputTable(templateHspen,"EnvelopeTable","The previous hspen table for regeneration","hspen");
|
||||
InputTable p2 = new InputTable(templateHcaf,"CsquarecodesTable","HCaf Table","hcaf_d");
|
||||
ServiceType p3 = new ServiceType(ServiceParameters.RANDOMSTRING, "DistributionTable","Table name of the distribution","hspec_");
|
||||
PrimitiveType p4 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "DistributionTableLabel","Name of the HSPEC probability distribution","hspec");
|
||||
InputTable p5 = new InputTable(templateHcaf,"PreprocessedTable","Minimum maximum latitudes table for species","maxminlat_hspen");
|
||||
PrimitiveType p6 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.CONSTANT, "CreateTable","Create New Table for each computation","true");
|
||||
|
||||
DatabaseType p7 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p8 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
DatabaseType p9 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
|
||||
DatabaseType p10 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
|
||||
DatabaseType p11 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
parameters.add(p3);
|
||||
parameters.add(p4);
|
||||
parameters.add(p5);
|
||||
parameters.add(p6);
|
||||
parameters.add(p7);
|
||||
parameters.add(p8);
|
||||
parameters.add(p9);
|
||||
parameters.add(p10);
|
||||
parameters.add(p11);
|
||||
|
||||
return parameters;
|
||||
}
|
||||
|
||||
@Override
|
||||
public VARTYPE getContentType() {
|
||||
return VARTYPE.HSPEC;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public Object getContent() {
|
||||
return destinationTable;
|
||||
public StatisticalType getOutput() {
|
||||
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
|
||||
templateHspec.add(TableTemplates.HSPEC);
|
||||
OutputTable p = new OutputTable(templateHspec,destinationTableLabel,destinationTable,"Output hspec table");
|
||||
return p;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -5,7 +5,6 @@ import java.io.FileOutputStream;
|
|||
import java.io.IOException;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
@ -13,9 +12,10 @@ import java.util.UUID;
|
|||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
|
||||
public class DummyAlgorithm implements SpatialProbabilityDistributionGeneric{
|
||||
|
||||
|
@ -188,23 +188,19 @@ public class DummyAlgorithm implements SpatialProbabilityDistributionGeneric{
|
|||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
// TODO Auto-generated method stub
|
||||
|
||||
return "a testing algorithm for statistical service work performances - calculates a random probability distribution and stores on a file";
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public HashMap<String, VarCouple> getInputParameters() {
|
||||
return null;
|
||||
public StatisticalType getOutput() {
|
||||
PrimitiveType p = new PrimitiveType(File.class.getName(), new File(filename), PrimitiveTypes.FILE, "DummyDistribution","Dummy Distribution File");
|
||||
return p;
|
||||
}
|
||||
|
||||
@Override
|
||||
public VARTYPE getContentType() {
|
||||
return VARTYPE.FILE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getContent() {
|
||||
return new File(filename);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -5,7 +5,6 @@ import java.io.FileOutputStream;
|
|||
import java.io.IOException;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
@ -13,9 +12,10 @@ import java.util.UUID;
|
|||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
|
||||
public class TestAlgorithm implements SpatialProbabilityDistributionGeneric{
|
||||
|
||||
|
@ -136,22 +136,18 @@ public class TestAlgorithm implements SpatialProbabilityDistributionGeneric{
|
|||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "A performance test algorithm for the Statistical Manager - generates a constant probability distribution";
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public HashMap<String, VarCouple> getInputParameters() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public VARTYPE getContentType() {
|
||||
return VARTYPE.FILE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getContent() {
|
||||
return new File(filename);
|
||||
public StatisticalType getOutput() {
|
||||
PrimitiveType p = new PrimitiveType(File.class.getName(), new File(filename), PrimitiveTypes.FILE, "TestDistribution","Test Distribution File");
|
||||
return p;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -5,13 +5,13 @@ import java.util.List;
|
|||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.ModelersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.ProcessorsFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
|
||||
|
||||
public class TestsMetaInfo {
|
||||
/**
|
||||
|
@ -22,7 +22,7 @@ public class TestsMetaInfo {
|
|||
public static void main(String[] args) throws Exception {
|
||||
|
||||
System.out.println("***TEST 1 - Get Algorithm Information***");
|
||||
HashMap<String, VarCouple> map = GeneratorsFactory.getAlgorithmParameters("./cfg/","DUMMY");
|
||||
List<StatisticalType> map = GeneratorsFactory.getAlgorithmParameters("./cfg/","DUMMY");
|
||||
System.out.println("input for DUMMY algorithm: "+map);
|
||||
|
||||
map = GeneratorsFactory.getAlgorithmParameters("./cfg/","AQUAMAPS_SUITABLE");
|
||||
|
|
|
@ -17,13 +17,15 @@ public class RegressionComplexGeneration {
|
|||
public static void main(String[] args) throws Exception {
|
||||
|
||||
System.out.println("TEST 1");
|
||||
/*
|
||||
List<Generator> generators = GeneratorsFactory.getGenerators(testConfigRemote());
|
||||
generators.get(0).init();
|
||||
// generate(generators.get(0));
|
||||
generators = null;
|
||||
*/
|
||||
|
||||
System.out.println("TEST 2");
|
||||
generators = GeneratorsFactory.getGenerators(testConfigLocal());
|
||||
List<Generator> generators = GeneratorsFactory.getGenerators(testConfigLocal());
|
||||
generators.get(0).init();
|
||||
generate(generators.get(0));
|
||||
generators = null;
|
||||
|
|
|
@ -5,6 +5,7 @@ import java.util.List;
|
|||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.evaluation.DiscrepancyAnalysis;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory;
|
||||
|
@ -73,8 +74,8 @@ public static void main(String[] args) throws Exception {
|
|||
|
||||
public void run() {
|
||||
try {
|
||||
|
||||
HashMap<String, String> out = dg.process(config);
|
||||
PrimitiveType output = (PrimitiveType) dg.process(config);
|
||||
HashMap<String, String> out = (HashMap<String, String>)output.getContent();
|
||||
DiscrepancyAnalysis.visualizeResults(out);
|
||||
|
||||
} catch (Exception e) {
|
||||
|
|
|
@ -0,0 +1,61 @@
|
|||
package org.gcube.dataanalysis.ecoengine.test.regressions;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.BioClimateAnalysis;
|
||||
|
||||
public class TestHSPECBioClimateAnalysisDev {
|
||||
|
||||
|
||||
public static void main(String args[]) throws Exception{
|
||||
/*
|
||||
String dburl = "jdbc:postgresql://node49.p.d4science.research-infrastructures.eu/aquamaps";
|
||||
String dbUser = "gcube";
|
||||
String dbPassword = "bilico1980";
|
||||
*/
|
||||
|
||||
String dburl = "jdbc:postgresql://dbtest.research-infrastructures.eu/aquamapsorgupdated";
|
||||
String dbUser = "utente";
|
||||
String dbPassword = "d4science";
|
||||
|
||||
BioClimateAnalysis bioClimate=new BioClimateAnalysis("./cfg/","./",dburl, dbUser, dbPassword, true);
|
||||
|
||||
/*
|
||||
final String [] hspecTables = {
|
||||
"hspec2012_07_05_21_47_13_772",
|
||||
"hspec2012_07_05_21_47_13_801",
|
||||
"hspec2012_07_05_21_47_13_819",
|
||||
"hspec2012_07_05_21_47_13_842",
|
||||
"hspec2012_07_05_21_47_13_860",
|
||||
"hspec2012_07_05_21_47_13_888",
|
||||
"hspec2012_07_05_21_47_13_903",
|
||||
"hspec2012_07_05_21_47_13_917"
|
||||
};
|
||||
*/
|
||||
final String [] hspecTables = {
|
||||
"hspec2012_03_12_12_13_14_610",
|
||||
"hspec2012_03_12_15_07_50_820",
|
||||
"hspec2012_03_12_18_07_21_503",
|
||||
"hspec2012_03_12_23_59_57_744",
|
||||
"hspec2012_03_13_02_50_59_399",
|
||||
"hspec2012_03_13_10_22_31_865"
|
||||
};
|
||||
|
||||
final String [] hspecTableNames = {
|
||||
"T1",
|
||||
"T2",
|
||||
"T3",
|
||||
"T4",
|
||||
"T5",
|
||||
"T6",
|
||||
"T7",
|
||||
"T8"
|
||||
};
|
||||
|
||||
|
||||
// bioClimate.globalEvolutionAnalysis(null, hspecTables, null, hspecTableNames, "probability", "csquare", 0.8f);
|
||||
bioClimate.speciesGeographicEvolutionAnalysis(hspecTables, hspecTableNames,0.8f);
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,57 @@
|
|||
package org.gcube.dataanalysis.ecoengine.test.regressions;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.BioClimateAnalysis;
|
||||
|
||||
public class TestHSPECBioClimateAnalysisProd {
|
||||
|
||||
|
||||
public static void main(String args[]) throws Exception{
|
||||
|
||||
String dburl = "jdbc:postgresql://node49.p.d4science.research-infrastructures.eu/aquamaps";
|
||||
String dbUser = "gcube";
|
||||
String dbPassword = "bilico1980";
|
||||
|
||||
/*
|
||||
String dburl = "jdbc:postgresql://dbtest.research-infrastructures.eu/aquamapsorgupdated";
|
||||
String dbUser = "utente";
|
||||
String dbPassword = "d4science";
|
||||
*/
|
||||
|
||||
BioClimateAnalysis bioClimate=new BioClimateAnalysis("./cfg/","./",dburl, dbUser, dbPassword, true);
|
||||
|
||||
|
||||
final String [] hspecTables = {
|
||||
"hspec2012_07_02_17_14_10_063",
|
||||
"hspec2012_07_05_21_47_13_772",
|
||||
"hspec2012_07_05_21_47_13_801"
|
||||
// "hspec2012_07_05_21_47_13_819",
|
||||
// "hspec2012_07_05_21_47_13_842",
|
||||
// "hspec2012_07_05_21_47_13_860",
|
||||
// "hspec2012_07_05_21_47_13_888",
|
||||
// "hspec2012_07_05_21_47_13_903",
|
||||
// "hspec2012_07_05_21_47_13_917",
|
||||
// "hspec2012_07_06_13_05_11_775"
|
||||
};
|
||||
|
||||
final String [] hspecTableNames = {
|
||||
"HSPEC 2015 Suitable Parabolic ",
|
||||
"HSPEC 2018 Suitable Parabolic",
|
||||
"HSPEC 2021 Suitable Parabolic",
|
||||
"HSPEC 2024 Suitable Parabolic",
|
||||
"HSPEC 2027 Suitable Parabolic",
|
||||
"HSPEC 2030 Suitable Parabolic",
|
||||
"HSPEC 2033 Suitable Parabolic",
|
||||
"HSPEC 2036 Suitable Parabolic",
|
||||
"HSPEC 2039 Suitable Parabolic",
|
||||
"HSPEC 2042 Suitable Parabolic",
|
||||
};
|
||||
|
||||
|
||||
bioClimate.globalEvolutionAnalysis(null, hspecTables, null, hspecTableNames, "probability", "csquare", 0.8f);
|
||||
// bioClimate.speciesGeographicEvolutionAnalysis(hspecTables, hspecTableNames,0.8f);
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -4,6 +4,7 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.evaluation.DiscrepancyAnalysis;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory;
|
||||
|
@ -16,8 +17,9 @@ public class EvaluatorT implements Runnable{
|
|||
|
||||
public void run() {
|
||||
try {
|
||||
|
||||
HashMap<String, String> out = dg.process(config);
|
||||
|
||||
PrimitiveType output = (PrimitiveType) dg.process(config);
|
||||
HashMap<String, String> out = (HashMap<String, String>)output.getContent();
|
||||
DiscrepancyAnalysis.visualizeResults(out);
|
||||
|
||||
} catch (Exception e) {
|
||||
|
|
|
@ -7,7 +7,7 @@ import java.io.FileInputStream;
|
|||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.Statement;
|
||||
import java.util.HashMap;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -18,6 +18,9 @@ import org.dom4j.Node;
|
|||
import org.dom4j.io.SAXReader;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
|
||||
import org.hibernate.Query;
|
||||
import org.hibernate.Session;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
@ -35,9 +38,9 @@ public class DatabaseFactory{
|
|||
}
|
||||
|
||||
|
||||
public static HashMap<String,VarCouple> getDefaultDatabaseConfiguration(String configurationFile) throws Exception {
|
||||
public static List<StatisticalType> getDefaultDatabaseConfiguration(String configurationFile) throws Exception {
|
||||
|
||||
HashMap<String,VarCouple> defaultconfig = new HashMap<String, VarCouple>();
|
||||
List<StatisticalType> defaultconfig = new ArrayList<StatisticalType>();
|
||||
|
||||
// take the configuration file
|
||||
File fl = new File(configurationFile);
|
||||
|
@ -53,15 +56,15 @@ public class DatabaseFactory{
|
|||
Node currentnode = nodesIterator.next();
|
||||
String element = currentnode.valueOf("@name");
|
||||
if (element.equals("connection.driver_class"))
|
||||
defaultconfig.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, currentnode.getText()));
|
||||
defaultconfig.add(new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver",currentnode.getText()));
|
||||
if (element.equals("connection.url"))
|
||||
defaultconfig.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, currentnode.getText()));
|
||||
defaultconfig.add(new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url",currentnode.getText()));
|
||||
if (element.equals("connection.username"))
|
||||
defaultconfig.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, currentnode.getText()));
|
||||
defaultconfig.add(new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name",currentnode.getText()));
|
||||
if (element.equals("connection.password"))
|
||||
defaultconfig.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, currentnode.getText()));
|
||||
defaultconfig.add(new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password",currentnode.getText()));
|
||||
if (element.equals("dialect"))
|
||||
defaultconfig.put("DatabaseDialect", new VarCouple(VARTYPE.DATABASEDIALECT, currentnode.getText()));
|
||||
defaultconfig.add(new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect",currentnode.getText()));
|
||||
}
|
||||
|
||||
return defaultconfig;
|
||||
|
|
|
@ -19,20 +19,7 @@ public class TrainingSetsGenerator {
|
|||
float threshold = 0.1f;
|
||||
String configPath = "./cfg/";
|
||||
|
||||
public HashMap<String, VarCouple> getInputParameters() {
|
||||
|
||||
HashMap<String, VarCouple> parameters = new HashMap<String, VarCouple>();
|
||||
|
||||
parameters.put("casesTable", new VarCouple(VARTYPE.STRING, ""));
|
||||
parameters.put("columnKeyName", new VarCouple(VARTYPE.STRING, ""));
|
||||
|
||||
parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, ""));
|
||||
parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, ""));
|
||||
parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, ""));
|
||||
parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, ""));
|
||||
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
||||
private int calculateNumberOfPoints(String table) {
|
||||
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
package org.gcube.dataanalysis.ecoengine.utils;
|
||||
|
||||
public enum VARTYPE {
|
||||
STRING,
|
||||
EPR_LIST,
|
||||
INFRA,
|
||||
SERVICE,
|
||||
DATABASEUSERNAME,
|
||||
DATABASEPASSWORD,
|
||||
DATABASEURL,
|
||||
DATABASEDRIVER,
|
||||
DATABASEDIALECT,
|
||||
CONSTANT,
|
||||
RANDOM,
|
||||
HSPEN,
|
||||
HCAF,
|
||||
HSPEC,
|
||||
OCCURRENCE,
|
||||
MINMAXLAT,
|
||||
FILE,
|
||||
MAP
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
package org.gcube.dataanalysis.ecoengine.utils;
|
||||
|
||||
public class VarCouple {
|
||||
|
||||
|
||||
VARTYPE first;
|
||||
private String second;
|
||||
|
||||
public VarCouple(VARTYPE first, String second){
|
||||
this.first = first;
|
||||
this.second = second;
|
||||
}
|
||||
|
||||
public String getSecond(){
|
||||
return second;
|
||||
}
|
||||
|
||||
public String getFirst(){
|
||||
return first.toString();
|
||||
}
|
||||
|
||||
public String toString(){
|
||||
return "("+first+","+second+")";
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue