git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngine@58772 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
da08da01f1
commit
00f8dbdb88
|
@ -6,11 +6,11 @@ import java.util.List;
|
|||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
|
||||
|
@ -387,35 +387,19 @@ public class DBScan implements Clusterer{
|
|||
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
List<TableTemplates> templateOccs = new ArrayList<TableTemplates>();
|
||||
templateOccs.add(TableTemplates.GENERIC);
|
||||
|
||||
InputTable p1 = new InputTable(templateOccs,"OccurrencePointsTable","Occurrence Points Table","occurrences");
|
||||
// PrimitiveType p2 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "FeaturesColumnNames","Column Names for the features comma separated","x,y");
|
||||
|
||||
PrimitiveTypesList p2 = new PrimitiveTypesList(PrimitiveTypes.STRING, "FeaturesColumnNames","Column Names for the features",false);
|
||||
|
||||
ColumnTypesList p2 = new ColumnTypesList ("OccurrencePointsTable","FeaturesColumnNames", "Column Names for the features", false);
|
||||
ServiceType p3 = new ServiceType(ServiceParameters.RANDOMSTRING, "OccurrencePointsClusterTable","Table name of the distribution","occCluster_");
|
||||
PrimitiveType p4 = new PrimitiveType(Integer.class.getName(), null, PrimitiveTypes.NUMBER, "epsilon","DBScan epsilon parameter","10");
|
||||
PrimitiveType p5 = new PrimitiveType(Integer.class.getName(), null, PrimitiveTypes.NUMBER, "min_points","DBScan minimum points parameter (identifies outliers)","1");
|
||||
|
||||
|
||||
DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p7 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
DatabaseType p8 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
|
||||
DatabaseType p9 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
|
||||
DatabaseType p10 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
|
||||
DatabaseType p11 = new DatabaseType(DatabaseParameters.DATABASETABLESPACE, "DatabaseTableSpace", "db dialect");
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
parameters.add(p3);
|
||||
parameters.add(p4);
|
||||
parameters.add(p5);
|
||||
parameters.add(p6);
|
||||
parameters.add(p7);
|
||||
parameters.add(p8);
|
||||
parameters.add(p9);
|
||||
parameters.add(p10);
|
||||
parameters.add(p11);
|
||||
|
||||
DatabaseType.addDefaultDBPars(parameters);
|
||||
|
||||
return parameters;
|
||||
}
|
||||
|
@ -436,7 +420,10 @@ public class DBScan implements Clusterer{
|
|||
|
||||
@Override
|
||||
public String getResources() {
|
||||
if ((status>0)&&(status<100))
|
||||
return ResourceFactory.getResources(100f);
|
||||
else
|
||||
return ResourceFactory.getResources(0f);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -5,10 +5,10 @@ import java.util.List;
|
|||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
|
||||
|
@ -114,17 +114,9 @@ public class KMeans extends DBScan{
|
|||
List<TableTemplates> templateOccs = new ArrayList<TableTemplates>();
|
||||
templateOccs.add(TableTemplates.GENERIC);
|
||||
InputTable p1 = new InputTable(templateOccs,"OccurrencePointsTable","Occurrence Points Table","occurrences");
|
||||
PrimitiveTypesList p2 = new PrimitiveTypesList(PrimitiveTypes.STRING, "FeaturesColumnNames","Column Names for the features",false);
|
||||
ColumnTypesList p2 = new ColumnTypesList ("OccurrencePointsTable","FeaturesColumnNames", "Column Names for the features", false);
|
||||
|
||||
ServiceType p3 = new ServiceType(ServiceParameters.RANDOMSTRING, "OccurrencePointsClusterTable","Table name of the distribution","occCluster_");
|
||||
|
||||
DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p7 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
DatabaseType p8 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
|
||||
DatabaseType p9 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
|
||||
DatabaseType p10 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
|
||||
DatabaseType p11 = new DatabaseType(DatabaseParameters.DATABASETABLESPACE, "DatabaseTableSpace", "db dialect");
|
||||
|
||||
PrimitiveType p4 = new PrimitiveType(Integer.class.getName(), null, PrimitiveTypes.NUMBER, "k","Expected Number of Clusters","3");
|
||||
PrimitiveType p5 = new PrimitiveType(Integer.class.getName(), null, PrimitiveTypes.NUMBER, "max_runs","Max runs of the clustering procedure","10");
|
||||
PrimitiveType p12 = new PrimitiveType(Integer.class.getName(), null, PrimitiveTypes.NUMBER, "max_optimization_steps","Max number of internal optimization steps","5");
|
||||
|
@ -135,15 +127,10 @@ public class KMeans extends DBScan{
|
|||
parameters.add(p3);
|
||||
parameters.add(p4);
|
||||
parameters.add(p5);
|
||||
parameters.add(p6);
|
||||
parameters.add(p7);
|
||||
parameters.add(p8);
|
||||
parameters.add(p9);
|
||||
parameters.add(p10);
|
||||
parameters.add(p11);
|
||||
parameters.add(p12);
|
||||
parameters.add(p13);
|
||||
|
||||
DatabaseType.addDefaultDBPars(parameters);
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
|
|
@ -8,10 +8,10 @@ import java.util.List;
|
|||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
|
||||
|
@ -120,7 +120,7 @@ public class XMeansWrapper extends DBScan {
|
|||
templateOccs.add(TableTemplates.GENERIC);
|
||||
|
||||
InputTable p1 = new InputTable(templateOccs, "OccurrencePointsTable", "Occurrence Points Table", "occurrences");
|
||||
PrimitiveTypesList p2 = new PrimitiveTypesList(PrimitiveTypes.STRING, "FeaturesColumnNames", "Column Names for the features", false);
|
||||
ColumnTypesList p2 = new ColumnTypesList ("OccurrencePointsTable","FeaturesColumnNames", "Column Names for the features", false);
|
||||
ServiceType p3 = new ServiceType(ServiceParameters.RANDOMSTRING, "OccurrencePointsClusterTable", "Table name of the distribution", "occCluster_");
|
||||
|
||||
PrimitiveType p4 = new PrimitiveType(Integer.class.getName(), null, PrimitiveTypes.NUMBER, "maxIterations", "XMeans max number of overall iterations of the clustering learning", "10");
|
||||
|
@ -128,27 +128,15 @@ public class XMeansWrapper extends DBScan {
|
|||
PrimitiveType p12 = new PrimitiveType(Integer.class.getName(), null, PrimitiveTypes.NUMBER, "maxClusters", "Maximum number of clusters to produce", "50");
|
||||
PrimitiveType p13 = new PrimitiveType(Integer.class.getName(), null, PrimitiveTypes.NUMBER, "min_points", "Number of points which define an outlier set", "2");
|
||||
|
||||
DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p7 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
DatabaseType p8 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
|
||||
DatabaseType p9 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
|
||||
DatabaseType p10 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
|
||||
DatabaseType p11 = new DatabaseType(DatabaseParameters.DATABASETABLESPACE, "DatabaseTableSpace", "db dialect");
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
parameters.add(p3);
|
||||
parameters.add(p4);
|
||||
parameters.add(p5);
|
||||
parameters.add(p6);
|
||||
parameters.add(p7);
|
||||
parameters.add(p8);
|
||||
parameters.add(p9);
|
||||
parameters.add(p10);
|
||||
parameters.add(p11);
|
||||
parameters.add(p12);
|
||||
parameters.add(p13);
|
||||
|
||||
DatabaseType.addDefaultDBPars(parameters);
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
package org.gcube.dataanalysis.ecoengine.datatypes;
|
||||
|
||||
|
||||
//name of columns
|
||||
public class ColumnType extends StatisticalType{
|
||||
|
||||
private String tableName;
|
||||
|
||||
public ColumnType(String tableName, String name, String description, String defaultValue, boolean optional) {
|
||||
super(name, description, defaultValue, optional);
|
||||
this.tableName=tableName;
|
||||
}
|
||||
|
||||
public String getTableName() {
|
||||
return tableName;
|
||||
}
|
||||
|
||||
public void setTableName(String tableName) {
|
||||
this.tableName = tableName;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,27 @@
|
|||
package org.gcube.dataanalysis.ecoengine.datatypes;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class ColumnTypesList extends StatisticalType {
|
||||
|
||||
String tableName;
|
||||
protected List<ColumnType> list;
|
||||
|
||||
public ColumnTypesList(String tableName, String name, String description, boolean optional) {
|
||||
super(name, description, optional);
|
||||
this.tableName=tableName;
|
||||
}
|
||||
|
||||
public void add(ColumnType st){
|
||||
list.add(st);
|
||||
}
|
||||
|
||||
public List<ColumnType> getList(){
|
||||
return list;
|
||||
}
|
||||
|
||||
public String getTabelName(){
|
||||
return tableName;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,5 +1,7 @@
|
|||
package org.gcube.dataanalysis.ecoengine.datatypes;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
|
||||
|
||||
public class DatabaseType extends StatisticalType{
|
||||
|
@ -29,4 +31,20 @@ public class DatabaseType extends StatisticalType{
|
|||
this.databaseParameter = databaseParameters;
|
||||
}
|
||||
|
||||
public static void addDefaultDBPars(List<StatisticalType> parameters){
|
||||
DatabaseType p1 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p2 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
DatabaseType p3 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
|
||||
DatabaseType p4 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
|
||||
DatabaseType p5 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
|
||||
DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASETABLESPACE, "DatabaseTableSpace", "db dialect");
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
parameters.add(p3);
|
||||
parameters.add(p4);
|
||||
parameters.add(p5);
|
||||
parameters.add(p6);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -5,10 +5,11 @@ public enum TableTemplates {
|
|||
HSPEN,
|
||||
HCAF,
|
||||
HSPEC,
|
||||
OCCURRENCE,
|
||||
OCCURRENCE_AQUAMAPS,
|
||||
MINMAXLAT,
|
||||
TRAININGSET,
|
||||
TESTSET,
|
||||
GENERIC,
|
||||
CLUSTER
|
||||
CLUSTER,
|
||||
OCCURRENCE_SPECIES
|
||||
}
|
||||
|
|
|
@ -3,15 +3,14 @@ package org.gcube.dataanalysis.ecoengine.evaluation;
|
|||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis;
|
||||
|
@ -54,17 +53,12 @@ public class DiscrepancyAnalysis extends DataAnalysis {
|
|||
templates.add(TableTemplates.TESTSET);
|
||||
InputTable p1 = new InputTable(templates,"FirstTable","First Table");
|
||||
InputTable p2 = new InputTable(templates,"SecondTable","Second Table");
|
||||
PrimitiveType p3 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "FirstTableCsquareColumn","the csquares column name in the first table ","csquarecode");
|
||||
PrimitiveType p4 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "SecondTableCsquareColumn","the csquares column name in the second table","csquarecode");
|
||||
PrimitiveType p5 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "FirstTableProbabilityColumn","the probability column in the first table","probability");
|
||||
PrimitiveType p13 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "SecondTableProbabilityColumn","the probability column in the second table","probability");
|
||||
ColumnType p3 = new ColumnType("FirstTable", "FirstTableCsquareColumn", "the csquares column name in the first table", "csquarecode", false);
|
||||
ColumnType p4 = new ColumnType("SecondTable", "SecondTableCsquareColumn", "the csquares column name in the second table", "csquarecode", false);
|
||||
ColumnType p5 = new ColumnType("FirstTable", "FirstTableProbabilityColumn", "the probability column in the first table", "probability", false);
|
||||
ColumnType p13 = new ColumnType("SecondTable", "SecondTableProbabilityColumn", "the probability column in the second table", "probability", false);
|
||||
PrimitiveType p6 = new PrimitiveType(Float.class.getName(), null, PrimitiveTypes.NUMBER, "ComparisonThreshold","the comparison threshold","0.1");
|
||||
PrimitiveType p7 = new PrimitiveType(Integer.class.getName(), null, PrimitiveTypes.NUMBER, "MaxSamples","the comparison threshold","10000");
|
||||
DatabaseType p8 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p9 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
DatabaseType p10 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
|
||||
DatabaseType p11 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
|
||||
DatabaseType p12 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
|
@ -74,12 +68,8 @@ public class DiscrepancyAnalysis extends DataAnalysis {
|
|||
parameters.add(p13);
|
||||
parameters.add(p6);
|
||||
parameters.add(p7);
|
||||
parameters.add(p8);
|
||||
parameters.add(p9);
|
||||
parameters.add(p10);
|
||||
parameters.add(p11);
|
||||
parameters.add(p12);
|
||||
|
||||
DatabaseType.addDefaultDBPars(parameters);
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ import java.util.Map;
|
|||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
|
@ -50,24 +51,21 @@ public class DistributionQualityAnalysis extends DataAnalysis {
|
|||
templates.add(TableTemplates.TESTSET);
|
||||
|
||||
List<TableTemplates> templatesOccurrences = new ArrayList<TableTemplates>();
|
||||
templatesOccurrences.add(TableTemplates.OCCURRENCE);
|
||||
templatesOccurrences.add(TableTemplates.OCCURRENCE_AQUAMAPS);
|
||||
templatesOccurrences.add(TableTemplates.TRAININGSET);
|
||||
templatesOccurrences.add(TableTemplates.TESTSET);
|
||||
|
||||
InputTable p1 = new InputTable(templatesOccurrences,"PositiveCasesTable","A Table containing positive cases");
|
||||
InputTable p2 = new InputTable(templatesOccurrences,"NegativeCasesTable","A Table containing negative cases");
|
||||
PrimitiveType p3 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "PositiveCasesTableKeyColumn","Positive Cases Table Key Column","csquarecode");
|
||||
PrimitiveType p4 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "NegativeCasesTableKeyColumn","Negative Cases Table Key Column","csquarecode");
|
||||
InputTable p5 = new InputTable(templates,"DistributionTable","A probability distribution table");
|
||||
PrimitiveType p6 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "DistributionTableKeyColumn","Distribution Table Key Column","csquarecode");
|
||||
PrimitiveType p7 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "DistributionTableProbabilityColumn","Distribution Table Probability Column","csquarecode");
|
||||
|
||||
ColumnType p3 = new ColumnType("PositiveCasesTable", "PositiveCasesTableKeyColumn", "Positive Cases Table Key Column", "csquarecode", false);
|
||||
ColumnType p4 = new ColumnType("NegativeCasesTable", "NegativeCasesTableKeyColumn", "Negative Cases Table Key Column", "csquarecode", false);
|
||||
ColumnType p6 = new ColumnType("DistributionTable", "DistributionTableKeyColumn", "Distribution Table Key Column", "csquarecode", false);
|
||||
ColumnType p7 = new ColumnType("DistributionTable", "DistributionTableProbabilityColumn", "Distribution Table Probability Column", "probability", false);
|
||||
|
||||
PrimitiveType p8 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "PositiveThreshold","Positive acceptance threshold","0.8");
|
||||
PrimitiveType p9 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "NegativeThreshold","Negative acceptance threshold","0.3");
|
||||
DatabaseType p10 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p11 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
DatabaseType p12 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
|
||||
DatabaseType p13 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
|
||||
DatabaseType p14 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
|
@ -78,11 +76,8 @@ public class DistributionQualityAnalysis extends DataAnalysis {
|
|||
parameters.add(p7);
|
||||
parameters.add(p8);
|
||||
parameters.add(p9);
|
||||
parameters.add(p10);
|
||||
parameters.add(p11);
|
||||
parameters.add(p12);
|
||||
parameters.add(p13);
|
||||
parameters.add(p14);
|
||||
|
||||
DatabaseType.addDefaultDBPars(parameters);
|
||||
|
||||
return parameters;
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ import java.util.Map;
|
|||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
|
@ -20,7 +21,6 @@ import org.gcube.dataanalysis.ecoengine.models.cores.pca.PrincipalComponentAnaly
|
|||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Operations;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Transformations;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
public class HabitatRepresentativeness extends DataAnalysis {
|
||||
|
||||
|
@ -45,7 +45,7 @@ public class HabitatRepresentativeness extends DataAnalysis {
|
|||
templates.add(TableTemplates.TESTSET);
|
||||
|
||||
List<TableTemplates> templatesOccurrences = new ArrayList<TableTemplates>();
|
||||
templatesOccurrences.add(TableTemplates.OCCURRENCE);
|
||||
templatesOccurrences.add(TableTemplates.OCCURRENCE_AQUAMAPS);
|
||||
templatesOccurrences.add(TableTemplates.TRAININGSET);
|
||||
templatesOccurrences.add(TableTemplates.TESTSET);
|
||||
|
||||
|
@ -53,23 +53,17 @@ public class HabitatRepresentativeness extends DataAnalysis {
|
|||
PrimitiveType p2 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "ProjectingAreaFeaturesOptionalCondition","optional filter for taking area rows","oceanarea>0",true);
|
||||
InputTable p3 = new InputTable(templatesOccurrences,"PositiveCasesTable","A Table containing positive cases");
|
||||
InputTable p4 = new InputTable(templatesOccurrences,"NegativeCasesTable","A Table containing negative cases");
|
||||
PrimitiveType p5 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "FeaturesColumns","fetures columns names separated by comma","depthmean,depthmax,depthmin, sstanmean,sbtanmean,salinitymean,salinitybmean, primprodmean,iceconann,landdist,oceanarea");
|
||||
DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p7 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
DatabaseType p8 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
|
||||
DatabaseType p9 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
|
||||
DatabaseType p10 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
|
||||
|
||||
// PrimitiveType p5 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "FeaturesColumns","fetures columns names separated by comma","depthmean,depthmax,depthmin, sstanmean,sbtanmean,salinitymean,salinitybmean, primprodmean,iceconann,landdist,oceanarea");
|
||||
ColumnTypesList p5 = new ColumnTypesList ("PositiveCasesTable","FeaturesColumns", "Features columns", false);
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
parameters.add(p3);
|
||||
parameters.add(p4);
|
||||
parameters.add(p5);
|
||||
parameters.add(p6);
|
||||
parameters.add(p7);
|
||||
parameters.add(p8);
|
||||
parameters.add(p9);
|
||||
parameters.add(p10);
|
||||
|
||||
DatabaseType.addDefaultDBPars(parameters);
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
@ -203,7 +197,7 @@ public class HabitatRepresentativeness extends DataAnalysis {
|
|||
status = 0;
|
||||
String projectingAreaTable = config.getParam("ProjectingAreaTable");
|
||||
String projectingAreaFeaturesOptionalCondition = config.getParam("ProjectingAreaFeaturesOptionalCondition");
|
||||
String FeaturesColumns = config.getParam("FeaturesColumns");
|
||||
String FeaturesColumns = config.getParam("FeaturesColumns").replace(AlgorithmConfiguration.getListSeparator(), ",");
|
||||
String positiveCasesTable = config.getParam("PositiveCasesTable");
|
||||
String negativeCasesTable = config.getParam("NegativeCasesTable");
|
||||
connection = AlgorithmConfiguration.getConnectionFromConfig(config);
|
||||
|
|
|
@ -15,6 +15,7 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
|||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
|
||||
|
@ -48,12 +49,12 @@ public class ModelAquamapsNN implements Model {
|
|||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
List<TableTemplates> templatesOccurrences = new ArrayList<TableTemplates>();
|
||||
templatesOccurrences.add(TableTemplates.OCCURRENCE);
|
||||
templatesOccurrences.add(TableTemplates.OCCURRENCE_AQUAMAPS);
|
||||
|
||||
InputTable p1 = new InputTable(templatesOccurrences,"AbsenceDataTable","A Table containing absence points");
|
||||
InputTable p2 = new InputTable(templatesOccurrences,"PresenceDataTable","A Table containing positive occurrences");
|
||||
PrimitiveType p3 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "SpeciesName","Species Code of the fish the NN will correspond to","Fis-10407");
|
||||
PrimitiveType p4 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "LayersNeurons","a list of neurons number for each inner layer separated by comma","100,2");
|
||||
PrimitiveTypesList p4 = new PrimitiveTypesList(PrimitiveTypes.NUMBER,"LayersNeurons","a list of neurons number for each inner layer separated by comma",false);
|
||||
|
||||
DatabaseType p5 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
|
@ -118,7 +119,7 @@ public class ModelAquamapsNN implements Model {
|
|||
String layersNeurons$ = Input.getParam("LayersNeurons");
|
||||
if ((layersNeurons$!=null)&&(layersNeurons$.length()>0))
|
||||
{
|
||||
String [] split = layersNeurons$.split(",");
|
||||
String [] split = layersNeurons$.split(AlgorithmConfiguration.getListSeparator());
|
||||
layersNeurons = new int[split.length];
|
||||
for (int i = 0;i<split.length;i++){
|
||||
layersNeurons[i] = Integer.parseInt(split[i]);
|
||||
|
|
|
@ -47,7 +47,7 @@ public class ModelAquamapsNNNS implements Model {
|
|||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
List<TableTemplates> templatesOccurrences = new ArrayList<TableTemplates>();
|
||||
templatesOccurrences.add(TableTemplates.OCCURRENCE);
|
||||
templatesOccurrences.add(TableTemplates.OCCURRENCE_AQUAMAPS);
|
||||
|
||||
InputTable p1 = new InputTable(templatesOccurrences,"AbsenceDataTable","A Table containing absence points");
|
||||
InputTable p2 = new InputTable(templatesOccurrences,"PresenceDataTable","A Table containing positive occurrences");
|
||||
|
|
|
@ -432,7 +432,7 @@ public class ModelHSPEN implements Model {
|
|||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
List<TableTemplates> templatesOccurrences = new ArrayList<TableTemplates>();
|
||||
templatesOccurrences.add(TableTemplates.OCCURRENCE);
|
||||
templatesOccurrences.add(TableTemplates.OCCURRENCE_AQUAMAPS);
|
||||
List<TableTemplates> templateHspen = new ArrayList<TableTemplates>();
|
||||
templateHspen.add(TableTemplates.HSPEN);
|
||||
List<TableTemplates> templateHcaf = new ArrayList<TableTemplates>();
|
||||
|
|
|
@ -251,7 +251,7 @@ public class AquamapsSuitable implements SpatialProbabilityDistributionTable{
|
|||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
List<TableTemplates> templatesOccurrence = new ArrayList<TableTemplates>();
|
||||
templatesOccurrence.add(TableTemplates.OCCURRENCE);
|
||||
templatesOccurrence.add(TableTemplates.OCCURRENCE_AQUAMAPS);
|
||||
List<TableTemplates> templateHspen = new ArrayList<TableTemplates>();
|
||||
templateHspen.add(TableTemplates.HSPEN);
|
||||
List<TableTemplates> templateHcaf = new ArrayList<TableTemplates>();
|
||||
|
@ -264,26 +264,14 @@ public class AquamapsSuitable implements SpatialProbabilityDistributionTable{
|
|||
InputTable p5 = new InputTable(templatesOccurrence,"OccurrencePointsTable","The Occurrence points table for calculating the bounding box","occurrencecells");
|
||||
PrimitiveType p6 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.CONSTANT, "CreateTable","Create New Table for each computation","true");
|
||||
|
||||
DatabaseType p7 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p8 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
DatabaseType p9 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
|
||||
DatabaseType p10 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
|
||||
DatabaseType p11 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
|
||||
DatabaseType p12 = new DatabaseType(DatabaseParameters.DATABASETABLESPACE, "DatabaseTableSpace", "db tablespace");
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
parameters.add(p3);
|
||||
parameters.add(p4);
|
||||
parameters.add(p5);
|
||||
parameters.add(p6);
|
||||
parameters.add(p7);
|
||||
parameters.add(p8);
|
||||
parameters.add(p9);
|
||||
parameters.add(p10);
|
||||
parameters.add(p11);
|
||||
parameters.add(p12);
|
||||
|
||||
DatabaseType.addDefaultDBPars(parameters);
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
|
|
@ -12,8 +12,9 @@ public class RegressionTestTransducers {
|
|||
public static void main(String[] args) throws Exception {
|
||||
|
||||
System.out.println("TEST 1");
|
||||
List<Transducerer> trans = null;
|
||||
|
||||
List<Transducerer> trans = TransducerersFactory.getTransducerers(testConfigLocal());
|
||||
trans = TransducerersFactory.getTransducerers(testConfigLocal());
|
||||
trans.get(0).init();
|
||||
Regressor.process(trans.get(0));
|
||||
trans = null;
|
||||
|
@ -32,6 +33,11 @@ public static void main(String[] args) throws Exception {
|
|||
trans.get(0).init();
|
||||
Regressor.process(trans.get(0));
|
||||
trans = null;
|
||||
|
||||
trans = TransducerersFactory.getTransducerers(testConfigLocal5());
|
||||
trans.get(0).init();
|
||||
Regressor.process(trans.get(0));
|
||||
trans = null;
|
||||
}
|
||||
|
||||
|
||||
|
@ -82,4 +88,25 @@ public static void main(String[] args) throws Exception {
|
|||
return config;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testConfigLocal5() {
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
config.setAgent("OCCURRENCES_MERGER");
|
||||
|
||||
config.setParam("longitudeColumn", "decimallongitude");
|
||||
config.setParam("latitudeColumn", "decimallatitude");
|
||||
config.setParam("recordedByColumn", "recordedby");
|
||||
config.setParam("scientificNameColumn", "scientificname");
|
||||
config.setParam("eventDateColumn", "eventdate");
|
||||
config.setParam("lastModificationColumn", "modified");
|
||||
config.setParam("rightTableName", "whitesharkoccurrences2");
|
||||
config.setParam("leftTableName", "whitesharkoccurrences1");
|
||||
config.setParam("mergedTableName", "whitesharkoccurrencesmerged");
|
||||
config.setParam("spatialTolerance", "0.5");
|
||||
config.setParam("confidence", "0.8");
|
||||
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
package org.gcube.dataanalysis.ecoengine.test.regression;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
|
||||
|
@ -24,8 +23,9 @@ public class Regressor {
|
|||
Thread.sleep(1000);
|
||||
}
|
||||
} else
|
||||
AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported");
|
||||
System.out.println("Generator Algorithm Not Supported" );
|
||||
|
||||
System.out.println("-|"+agent.getStatus());
|
||||
}
|
||||
|
||||
public class ThreadCalculator implements Runnable {
|
||||
|
|
|
@ -35,28 +35,15 @@ public class BioClimateHCAFTransducer extends BioClimateHSPECTransducer{
|
|||
@Override
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
|
||||
DatabaseType p1 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p2 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
DatabaseType p3 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
|
||||
DatabaseType p4 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
|
||||
DatabaseType p5 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
|
||||
DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASETABLESPACE, "DatabaseTableSpace", "db dialect");
|
||||
|
||||
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
|
||||
templateHspec.add(TableTemplates.HCAF);
|
||||
TablesList p7 = new TablesList(templateHspec, "HCAF_TABLE_LIST", "List of HCAF tables to analyze", false);
|
||||
PrimitiveTypesList p8 = new PrimitiveTypesList(PrimitiveTypes.STRING, "HCAF_TABLE_NAMES", "List of HCAF table names to be used as labels", false);
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
parameters.add(p3);
|
||||
parameters.add(p4);
|
||||
parameters.add(p5);
|
||||
parameters.add(p6);
|
||||
parameters.add(p7);
|
||||
parameters.add(p8);
|
||||
|
||||
DatabaseType.addDefaultDBPars(parameters);
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
|
|
@ -72,13 +72,6 @@ public class BioClimateHSPECTransducer implements Transducerer{
|
|||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
|
||||
DatabaseType p1 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p2 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
DatabaseType p3 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
|
||||
DatabaseType p4 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
|
||||
DatabaseType p5 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
|
||||
DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASETABLESPACE, "DatabaseTableSpace", "db dialect");
|
||||
|
||||
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
|
||||
templateHspec.add(TableTemplates.HSPEC);
|
||||
TablesList p7 = new TablesList(templateHspec, "HSPEC_TABLE_LIST", "List of HSPEC tables to analyze", false);
|
||||
|
@ -86,16 +79,10 @@ public class BioClimateHSPECTransducer implements Transducerer{
|
|||
|
||||
PrimitiveType p9 = new PrimitiveType(Double.class.getName(), null, PrimitiveTypes.NUMBER, "Threshold", "A threshold of probability over which the abundancy per species will be calculated","0.5");
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
parameters.add(p3);
|
||||
parameters.add(p4);
|
||||
parameters.add(p5);
|
||||
parameters.add(p6);
|
||||
parameters.add(p7);
|
||||
parameters.add(p8);
|
||||
parameters.add(p9);
|
||||
|
||||
DatabaseType.addDefaultDBPars(parameters);
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
|
|
@ -35,28 +35,14 @@ public class BioClimateHSPENTransducer extends BioClimateHSPECTransducer{
|
|||
@Override
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
|
||||
DatabaseType p1 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p2 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
DatabaseType p3 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
|
||||
DatabaseType p4 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
|
||||
DatabaseType p5 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
|
||||
DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASETABLESPACE, "DatabaseTableSpace", "db dialect");
|
||||
|
||||
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
|
||||
templateHspec.add(TableTemplates.HSPEN);
|
||||
TablesList p7 = new TablesList(templateHspec, "HSPEN_TABLE_LIST", "List of HSPEN tables containing the species for which the salinity will be analyzed", false);
|
||||
PrimitiveTypesList p8 = new PrimitiveTypesList(PrimitiveTypes.STRING, "HSPEN_TABLE_NAMES", "List of HSPEN table names to be used as labels", false);
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
parameters.add(p3);
|
||||
parameters.add(p4);
|
||||
parameters.add(p5);
|
||||
parameters.add(p6);
|
||||
parameters.add(p7);
|
||||
parameters.add(p8);
|
||||
|
||||
DatabaseType.addDefaultDBPars(parameters);
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
package org.gcube.dataanalysis.ecoengine.transducers;
|
||||
|
||||
import java.awt.Image;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
|
@ -10,10 +8,9 @@ import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
|
|||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.TablesList;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.TablesList;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.InterpolateTables;
|
||||
|
@ -67,14 +64,6 @@ public class InterpolationTransducer implements Transducerer{
|
|||
@Override
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
|
||||
DatabaseType p1 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
|
||||
DatabaseType p2 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
|
||||
DatabaseType p3 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
|
||||
DatabaseType p4 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
|
||||
DatabaseType p5 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
|
||||
DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASETABLESPACE, "DatabaseTableSpace", "db dialect");
|
||||
|
||||
List<TableTemplates> templates = new ArrayList<TableTemplates>();
|
||||
templates.add(TableTemplates.HCAF);
|
||||
|
||||
|
@ -86,12 +75,6 @@ public class InterpolationTransducer implements Transducerer{
|
|||
|
||||
PrimitiveType p12 = new PrimitiveType(Enum.class.getName(), InterpolateTables.INTERPOLATIONFUNCTIONS.values(), PrimitiveTypes.ENUMERATED, "InterpolationFunction", "The interpolation Function to use",""+InterpolateTables.INTERPOLATIONFUNCTIONS.LINEAR);
|
||||
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
parameters.add(p3);
|
||||
parameters.add(p4);
|
||||
parameters.add(p5);
|
||||
parameters.add(p6);
|
||||
parameters.add(p7);
|
||||
parameters.add(p8);
|
||||
parameters.add(p9);
|
||||
|
@ -99,7 +82,7 @@ public class InterpolationTransducer implements Transducerer{
|
|||
parameters.add(p11);
|
||||
parameters.add(p12);
|
||||
|
||||
|
||||
DatabaseType.addDefaultDBPars(parameters);
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,12 +1,10 @@
|
|||
package org.gcube.dataanalysis.ecoengine.transducers;
|
||||
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.gcube.contentmanagement.graphtools.utils.DateGuesser;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
|
@ -14,10 +12,20 @@ import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory;
|
|||
import org.gcube.contentmanagement.lexicalmatcher.utils.DistanceCalculator;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
public class OccurrencePointsMerger implements Transducerer {
|
||||
|
@ -54,6 +62,7 @@ public class OccurrencePointsMerger implements Transducerer{
|
|||
protected List<OccurrenceRecord> objectstodelete;
|
||||
protected List<Object> columnsNames;
|
||||
protected SessionFactory dbconnection;
|
||||
protected float status;
|
||||
|
||||
protected class OccurrenceRecord {
|
||||
|
||||
|
@ -68,6 +77,7 @@ public class OccurrencePointsMerger implements Transducerer{
|
|||
|
||||
// Map<String,String> metadata;
|
||||
public List<String> otherValues;
|
||||
|
||||
public OccurrenceRecord() {
|
||||
otherValues = new ArrayList<String>();
|
||||
}
|
||||
|
@ -79,9 +89,10 @@ public class OccurrencePointsMerger implements Transducerer{
|
|||
SimpleDateFormat formatter = new SimpleDateFormat("MM/dd/yy KK:mm:ss a");
|
||||
String formattedDate = formatter.format(new Date(date.getTimeInMillis()));
|
||||
return formattedDate;
|
||||
|
||||
}
|
||||
|
||||
boolean displaydateconvert = true;
|
||||
|
||||
public OccurrenceRecord row2OccurrenceRecord(Object[] row) {
|
||||
OccurrenceRecord record = new OccurrenceRecord();
|
||||
int index = 0;
|
||||
|
@ -91,47 +102,29 @@ public class OccurrencePointsMerger implements Transducerer{
|
|||
String value$ = "" + row[index];
|
||||
if (name$.equalsIgnoreCase(lonFld)) {
|
||||
record.x = Float.parseFloat(value$);
|
||||
}
|
||||
else if (name$.equalsIgnoreCase(latFld)){
|
||||
} else if (name$.equalsIgnoreCase(latFld)) {
|
||||
record.y = Float.parseFloat(value$);
|
||||
}
|
||||
else if (name$.equalsIgnoreCase(recordedByFld)){
|
||||
} else if (name$.equalsIgnoreCase(recordedByFld)) {
|
||||
record.recordedby = value$;
|
||||
}
|
||||
else if (name$.equalsIgnoreCase(scientificNameFld)){
|
||||
} else if (name$.equalsIgnoreCase(scientificNameFld)) {
|
||||
record.scientificName = value$;
|
||||
}
|
||||
else if (name$.equalsIgnoreCase(eventDatFld)){
|
||||
} else if (name$.equalsIgnoreCase(eventDatFld)) {
|
||||
if ((value$ == null) || (value$.length() == 0)) {
|
||||
record.eventdate = null;
|
||||
}
|
||||
else{
|
||||
} else {
|
||||
/*
|
||||
SimpleDateFormat formatter = new SimpleDateFormat("MM/dd/yy KK:mm a",Locale.UK);
|
||||
try {
|
||||
Date d = (Date) formatter.parse(value$);
|
||||
Calendar cal = Calendar.getInstance();
|
||||
cal.setTime(d);
|
||||
System.out.println("From "+value$+"->"+(cal.get(Calendar.MONTH)+1)+" "+cal.get(Calendar.DAY_OF_MONTH)+" "+cal.get(Calendar.YEAR)+" "+cal.get(Calendar.HOUR)+" "+cal.get(Calendar.MINUTE));
|
||||
// System.out.println("->"+cal.toString());
|
||||
} catch (ParseException e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
* SimpleDateFormat formatter = new SimpleDateFormat("MM/dd/yy KK:mm a",Locale.UK); try { Date d = (Date) formatter.parse(value$); Calendar cal = Calendar.getInstance(); cal.setTime(d); System.out.println("From "+value$+"->"+(cal.get(Calendar.MONTH)+1)+" "+cal.get(Calendar.DAY_OF_MONTH)+" "+cal.get(Calendar.YEAR)+" "+cal.get(Calendar.HOUR)+" "+cal.get(Calendar.MINUTE)); // System.out.println("->"+cal.toString()); } catch (ParseException e) { // TODO Auto-generated catch block e.printStackTrace(); }
|
||||
*/
|
||||
record.eventdate = DateGuesser.convertDate(value$);
|
||||
if (displaydateconvert)
|
||||
{ AnalysisLogger.getLogger().trace("From "+value$+"->"+convert2conventionalFormat(record.eventdate)+" pattern "+DateGuesser.getPattern(value$));
|
||||
if (displaydateconvert) {
|
||||
AnalysisLogger.getLogger().trace("From " + value$ + "->" + convert2conventionalFormat(record.eventdate) + " pattern " + DateGuesser.getPattern(value$));
|
||||
displaydateconvert = false;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
else if (name$.equalsIgnoreCase(modifDatFld)){
|
||||
} else if (name$.equalsIgnoreCase(modifDatFld)) {
|
||||
record.modifdate = DateGuesser.convertDate(value$);
|
||||
}
|
||||
else
|
||||
} else
|
||||
record.otherValues.add(value$);
|
||||
|
||||
index++;
|
||||
|
@ -151,31 +144,25 @@ public class OccurrencePointsMerger implements Transducerer{
|
|||
String value$ = "''";
|
||||
if (name$.equalsIgnoreCase(lonFld)) {
|
||||
value$ = "'" + record.x + "'";
|
||||
}
|
||||
else if (name$.equalsIgnoreCase(latFld)){
|
||||
} else if (name$.equalsIgnoreCase(latFld)) {
|
||||
value$ = "'" + record.y + "'";
|
||||
}
|
||||
else if (name$.equalsIgnoreCase(recordedByFld)){
|
||||
} else if (name$.equalsIgnoreCase(recordedByFld)) {
|
||||
if (record.recordedby != null)
|
||||
value$ = "'" + record.recordedby + "'";
|
||||
}
|
||||
else if (name$.equalsIgnoreCase(scientificNameFld)){
|
||||
} else if (name$.equalsIgnoreCase(scientificNameFld)) {
|
||||
if (record.scientificName != null)
|
||||
value$ = "'" + record.scientificName + "'";
|
||||
}
|
||||
else if (name$.equalsIgnoreCase(eventDatFld)){
|
||||
} else if (name$.equalsIgnoreCase(eventDatFld)) {
|
||||
if (record.eventdate != null) {
|
||||
value$ = "'" + convert2conventionalFormat(record.eventdate) + "'";
|
||||
// value$="'"+record.eventdate.getTimeInMillis()+"'";
|
||||
}
|
||||
}
|
||||
else if (name$.equalsIgnoreCase(modifDatFld)){
|
||||
} else if (name$.equalsIgnoreCase(modifDatFld)) {
|
||||
if (record.modifdate != null) {
|
||||
value$ = "'" + convert2conventionalFormat(record.modifdate) + "'";
|
||||
// value$="'"+record.modifdate.getTimeInMillis()+"'";
|
||||
}
|
||||
}
|
||||
else{
|
||||
} else {
|
||||
if (record.otherValues != null) {
|
||||
value$ = "'" + record.otherValues.get(k) + "'";
|
||||
k++;
|
||||
|
@ -192,7 +179,6 @@ public class OccurrencePointsMerger implements Transducerer{
|
|||
return buffer.toString();
|
||||
}
|
||||
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
config.setNumberOfResources(1);
|
||||
|
@ -210,6 +196,8 @@ public class OccurrencePointsMerger implements Transducerer{
|
|||
config.setParam(spatialTolerance, "0.5");
|
||||
config.setParam(confidence, "0.8");
|
||||
|
||||
List<TableTemplates> templatesOccurrence = new ArrayList<TableTemplates>();
|
||||
templatesOccurrence.add(TableTemplates.OCCURRENCE_AQUAMAPS);
|
||||
OccurrencePointsMerger occm = new OccurrencePointsMerger();
|
||||
occm.setConfiguration(config);
|
||||
occm.init();
|
||||
|
@ -218,45 +206,76 @@ public class OccurrencePointsMerger implements Transducerer{
|
|||
|
||||
@Override
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
List<TableTemplates> templatesOccurrence = new ArrayList<TableTemplates>();
|
||||
templatesOccurrence.add(TableTemplates.OCCURRENCE_SPECIES);
|
||||
// occurrence points tables
|
||||
InputTable p1 = new InputTable(templatesOccurrence, leftTableNameF, "The First table containing the occurrence points", "");
|
||||
InputTable p2 = new InputTable(templatesOccurrence, rightTableNameF, "The Second table containing the occurrence points", "");
|
||||
|
||||
// string parameters
|
||||
ColumnType p3 = new ColumnType(leftTableNameF, longitudeColumn, "column with longitude values", "decimallongitude", false);
|
||||
ColumnType p4 = new ColumnType(leftTableNameF, latitudeColumn, "column with latitude values", "decimallatitude", false);
|
||||
ColumnType p5 = new ColumnType(leftTableNameF, recordedByColumn, "column with RecordedBy values", "recordedby", false);
|
||||
ColumnType p6 = new ColumnType(leftTableNameF, scientificNameColumn, "column with Scientific Names", "scientificname", false);
|
||||
ColumnType p7 = new ColumnType(leftTableNameF, eventDateColumn, "column with EventDate values", "eventdate", false);
|
||||
ColumnType p8 = new ColumnType(leftTableNameF, lastModificationColumn, "column with Modified values", "modified", false);
|
||||
ServiceType p9 = new ServiceType(ServiceParameters.RANDOMSTRING, mergedTableNameF, "Name of the final produced", "mergedoccurrences_");
|
||||
PrimitiveType p10 = new PrimitiveType(Float.class.getName(), null, PrimitiveTypes.NUMBER, spatialTolerance, "The tolerance in degree for assessing that two points could be the same", "0.5");
|
||||
PrimitiveType p11 = new PrimitiveType(Float.class.getName(), null, PrimitiveTypes.NUMBER, confidence, "The overall acceptance similarity threshold over which two points are the same", "0.8");
|
||||
|
||||
List<StatisticalType> inputs = new ArrayList<StatisticalType>();
|
||||
inputs.add(p1);
|
||||
inputs.add(p2);
|
||||
inputs.add(p3);
|
||||
inputs.add(p4);
|
||||
inputs.add(p5);
|
||||
inputs.add(p6);
|
||||
inputs.add(p7);
|
||||
inputs.add(p8);
|
||||
inputs.add(p9);
|
||||
inputs.add(p10);
|
||||
inputs.add(p11);
|
||||
|
||||
DatabaseType.addDefaultDBPars(inputs);
|
||||
return inputs;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getResourceLoad() {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getResources() {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
if ((status > 0) && (status < 100))
|
||||
return ResourceFactory.getResources(100f);
|
||||
else
|
||||
return ResourceFactory.getResources(0f);
|
||||
}
|
||||
|
||||
ResourceFactory resourceManager;
|
||||
|
||||
@Override
|
||||
public String getResourceLoad() {
|
||||
if (resourceManager == null)
|
||||
resourceManager = new ResourceFactory();
|
||||
return resourceManager.getResourceLoad(1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getStatus() {
|
||||
// TODO Auto-generated method stub
|
||||
return 0;
|
||||
return status;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public INFRASTRUCTURE getInfrastructure() {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
return INFRASTRUCTURE.LOCAL;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public StatisticalType getOutput() {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
List<TableTemplates> templatesOccurrence = new ArrayList<TableTemplates>();
|
||||
templatesOccurrence.add(TableTemplates.OCCURRENCE_SPECIES);
|
||||
// occurrence points tables
|
||||
OutputTable p = new OutputTable(templatesOccurrence, mergedTableName, mergedTableName, "The output table containing the merged points");
|
||||
|
||||
return p;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void init() throws Exception {
|
||||
|
@ -276,26 +295,22 @@ public class OccurrencePointsMerger implements Transducerer{
|
|||
|
||||
objectstoinsert = new ArrayList<OccurrencePointsMerger.OccurrenceRecord>();
|
||||
objectstodelete = new ArrayList<OccurrencePointsMerger.OccurrenceRecord>();
|
||||
status = 0;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void setConfiguration(AlgorithmConfiguration config) {
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
// TODO Auto-generated method stub
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
return "An algorithm for merging two sets of occurrence points of species coming from the Species Discovery Facility of D4Science";
|
||||
}
|
||||
|
||||
protected float probabilityStrings(String first, String second) {
|
||||
|
@ -330,7 +345,20 @@ public class OccurrencePointsMerger implements Transducerer{
|
|||
}
|
||||
|
||||
protected void manageHighProbability(float probability, OccurrenceRecord leftOcc, OccurrenceRecord rightOcc) {
|
||||
//if over the threshold then don't add
|
||||
// insert the most recent:
|
||||
// if it is the left then leave it as is
|
||||
// otherwise put the left in the deletion list and the right in the insertion list
|
||||
|
||||
if (
|
||||
((leftOcc.modifdate!=null)&&(rightOcc.modifdate!=null)&&leftOcc.modifdate.before(rightOcc.modifdate))
|
||||
||
|
||||
(leftOcc.modifdate==null)&&(rightOcc.modifdate!=null)
|
||||
)
|
||||
{
|
||||
|
||||
objectstodelete.add(leftOcc);
|
||||
objectstoinsert.add(rightOcc);
|
||||
}
|
||||
}
|
||||
|
||||
protected void manageLowProbability(float probability, OccurrenceRecord leftOcc, OccurrenceRecord rightOcc) {
|
||||
|
@ -339,9 +367,38 @@ public class OccurrencePointsMerger implements Transducerer{
|
|||
}
|
||||
|
||||
protected void persist() {
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
int toins = objectstoinsert.size();
|
||||
|
||||
// DELETE ELEMENTS IN THE DELETION LIST
|
||||
int todel = objectstodelete.size();
|
||||
int counter = 0;
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
AnalysisLogger.getLogger().debug("Deleting " + todel + " objects");
|
||||
if (todel > 0) {
|
||||
for (OccurrenceRecord record : objectstodelete) {
|
||||
String rec = recordedByFld + "='" + record.recordedby.replace("'","")+"'";
|
||||
String sci = scientificNameFld + "='" + record.scientificName.replace("'","")+"'";
|
||||
String x = lonFld + "='" + record.x+"'";
|
||||
String y = latFld + "='" + record.y+"'";
|
||||
|
||||
buffer.append("(");
|
||||
buffer.append(rec + " AND " + sci + " AND " + x + " AND " + y);
|
||||
buffer.append(")");
|
||||
if (counter < todel - 1)
|
||||
buffer.append(" OR ");
|
||||
|
||||
counter++;
|
||||
}
|
||||
|
||||
String updateQ = DatabaseUtils.deleteFromBuffer(mergedTableName, buffer);
|
||||
// System.out.println("Update:\n"+updateQ);
|
||||
DatabaseFactory.executeSQLUpdate(updateQ, dbconnection);
|
||||
AnalysisLogger.getLogger().debug("Objects deleted");
|
||||
}
|
||||
|
||||
buffer = new StringBuffer();
|
||||
int toins = objectstoinsert.size();
|
||||
AnalysisLogger.getLogger().debug("Inserting " + toins + " objects");
|
||||
counter = 0;
|
||||
if (toins > 0) {
|
||||
for (OccurrenceRecord record : objectstoinsert) {
|
||||
buffer.append("(");
|
||||
|
@ -356,7 +413,9 @@ public class OccurrencePointsMerger implements Transducerer{
|
|||
String updateQ = DatabaseUtils.insertFromBuffer(mergedTableName, columns.toString(), buffer);
|
||||
// System.out.println("Update:\n"+updateQ);
|
||||
DatabaseFactory.executeSQLUpdate(updateQ, dbconnection);
|
||||
AnalysisLogger.getLogger().debug("Objects inserted");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -392,8 +451,10 @@ public class OccurrencePointsMerger implements Transducerer{
|
|||
// for each element in dx
|
||||
List<OccurrenceRecord> leftRecords = new ArrayList<OccurrencePointsMerger.OccurrenceRecord>();
|
||||
AnalysisLogger.getLogger().trace("Processing " + leftTableName + " vs " + rightTableName);
|
||||
status = 10;
|
||||
int rightCounter = 0;
|
||||
int similaritiesCounter = 0;
|
||||
int allrightrows = rightRows.size();
|
||||
for (Object rRow : rightRows) {
|
||||
// transform into an occurrence object
|
||||
OccurrenceRecord rightOcc = row2OccurrenceRecord((Object[]) rRow);
|
||||
|
@ -412,8 +473,7 @@ public class OccurrencePointsMerger implements Transducerer{
|
|||
leftRecords.add(leftOcc);
|
||||
leftrecordsSize++;
|
||||
// System.out.println("ADDED "+k+"-th elements size: "+leftRecords.size());
|
||||
}
|
||||
else
|
||||
} else
|
||||
leftOcc = leftRecords.get(k);
|
||||
|
||||
// evaluate P(dx,sx)
|
||||
|
@ -423,8 +483,7 @@ public class OccurrencePointsMerger implements Transducerer{
|
|||
bestleftOcc = leftOcc;
|
||||
found = true;
|
||||
similaritiesCounter++;
|
||||
AnalysisLogger.getLogger().trace("Found a similarity with P="+p+" between ("+"\""+leftOcc.scientificName+"\""+","+leftOcc.x+"\""+","+"\""+leftOcc.y+"\""+","+"\""+leftOcc.recordedby+"\""+","+"\""+ convert2conventionalFormat(leftOcc.eventdate)+"\""+") VS "+
|
||||
"("+"\""+rightOcc.scientificName+"\""+","+"\""+rightOcc.x+"\""+","+"\""+rightOcc.y+"\""+","+"\""+rightOcc.recordedby+"\""+","+"\""+ convert2conventionalFormat(rightOcc.eventdate)+"\""+")");
|
||||
AnalysisLogger.getLogger().trace("Found a similarity with P=" + p + " between (" + "\"" + leftOcc.scientificName + "\"" + "," + leftOcc.x + "\"" + "," + "\"" + leftOcc.y + "\"" + "," + "\"" + leftOcc.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(leftOcc.eventdate) + "\"" + ") VS " + "(" + "\"" + rightOcc.scientificName + "\"" + "," + "\"" + rightOcc.x + "\"" + "," + "\"" + rightOcc.y + "\"" + "," + "\"" + rightOcc.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(rightOcc.eventdate) + "\"" + ")");
|
||||
break;
|
||||
}
|
||||
k++;
|
||||
|
@ -434,19 +493,21 @@ public class OccurrencePointsMerger implements Transducerer{
|
|||
manageHighProbability(p, bestleftOcc, rightOcc);
|
||||
else
|
||||
manageLowProbability(p, bestleftOcc, rightOcc);
|
||||
|
||||
status = Math.min(90, 10f + (80 * ((float) rightCounter) / ((float) allrightrows)));
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().trace("Found " + similaritiesCounter + " similarities on " + rightCounter + " elements");
|
||||
|
||||
status = 90;
|
||||
// transform the complete list into a table
|
||||
persist();
|
||||
// close DB connection
|
||||
} catch (Exception e) {
|
||||
throw e;
|
||||
}
|
||||
finally{
|
||||
} finally {
|
||||
if (dbconnection != null)
|
||||
dbconnection.close();
|
||||
status = 100;
|
||||
AnalysisLogger.getLogger().trace("Occ Points Processing Finished and db closed");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -158,6 +158,10 @@ public class DatabaseUtils {
|
|||
return "insert into "+table+" ("+columnsNames+") values "+values;
|
||||
}
|
||||
|
||||
public static String deleteFromBuffer(String table, StringBuffer couples) {
|
||||
|
||||
return "delete from "+table+" where "+couples;
|
||||
}
|
||||
|
||||
public static String copyFileToTableStatement (String file, String table){
|
||||
return "COPY "+table+" FROM '"+file+"' DELIMITERS ';' WITH NULL AS 'null string'";
|
||||
|
|
Loading…
Reference in New Issue