git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngine@71681 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
b30615b560
commit
03f7f99d2b
|
@ -1,6 +1,7 @@
|
|||
package org.gcube.dataanalysis.ecoengine.datatypes;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
|
||||
|
@ -49,8 +50,8 @@ public class PrimitiveType extends StatisticalType{
|
|||
this.type = type;
|
||||
}
|
||||
|
||||
public static HashMap<String,StatisticalType> stringMap2StatisticalMap(HashMap<String,String> stringmap){
|
||||
HashMap<String,StatisticalType> map = new HashMap<String, StatisticalType>();
|
||||
public static LinkedHashMap<String,StatisticalType> stringMap2StatisticalMap(HashMap<String,String> stringmap){
|
||||
LinkedHashMap<String,StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
|
||||
for (String key:stringmap.keySet()){
|
||||
String value = stringmap.get(key);
|
||||
PrimitiveType string = new PrimitiveType(String.class.getName(), value, PrimitiveTypes.STRING, key,key);
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
package org.gcube.dataanalysis.ecoengine.evaluation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||
|
||||
public class ClimateImpactAnalysis extends DiscrepancyAnalysis {
|
||||
|
||||
protected String SpeciesListTable = "ReferenceHSPEN";
|
||||
protected String LeftHSPEC = "LeftHSPEC";
|
||||
protected String RightHSPEC = "RightHSPEC";
|
||||
|
||||
@Override
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
|
||||
List<TableTemplates> templatesHspen = new ArrayList<TableTemplates>();
|
||||
templatesHspen.add(TableTemplates.HSPEN);
|
||||
|
||||
List<TableTemplates> templatesHspec = new ArrayList<TableTemplates>();
|
||||
templatesHspec.add(TableTemplates.HSPEC);
|
||||
|
||||
InputTable hspec1 = new InputTable(templatesHspec,LeftHSPEC,"Left table containing a hspec distribution","hspen");
|
||||
InputTable hspec2 = new InputTable(templatesHspec,RightHSPEC,"Right table containing a hspec distribution","hspen");
|
||||
|
||||
InputTable hspen = new InputTable(templatesHspen,SpeciesListTable,"Species List Table taken from envelopes","hspen");
|
||||
|
||||
List<StatisticalType> stlist = new ArrayList<StatisticalType>();
|
||||
stlist.add(hspec1);
|
||||
stlist.add(hspec2);
|
||||
stlist.add(hspen);
|
||||
|
||||
DatabaseType.addDefaultDBPars(stlist);
|
||||
|
||||
return stlist;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -2,6 +2,7 @@ package org.gcube.dataanalysis.ecoengine.evaluation;
|
|||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
|
||||
|
@ -41,7 +42,7 @@ public class DiscrepancyAnalysis extends DataAnalysis {
|
|||
int numberofvectors;
|
||||
float maxerror;
|
||||
String maxdiscrepancyPoint;
|
||||
private HashMap<String, String> output;
|
||||
private LinkedHashMap<String, String> output;
|
||||
|
||||
@Override
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
|
@ -74,7 +75,7 @@ public class DiscrepancyAnalysis extends DataAnalysis {
|
|||
|
||||
|
||||
@Override
|
||||
public HashMap<String, String> analyze() throws Exception {
|
||||
public LinkedHashMap<String, String> analyze() throws Exception {
|
||||
|
||||
String FirstTableCsquareColumn = config.getParam("FirstTableCsquareColumn");
|
||||
String SecondTableCsquareColumn = config.getParam("SecondTableCsquareColumn");
|
||||
|
@ -97,7 +98,7 @@ public class DiscrepancyAnalysis extends DataAnalysis {
|
|||
|
||||
|
||||
if (FirstTable.equals(SecondTable)){
|
||||
output = new HashMap<String, String>();
|
||||
output = new LinkedHashMap<String, String>();
|
||||
output.put("MEAN", "0.0");
|
||||
output.put("VARIANCE", "0.0");
|
||||
output.put("NUMBER_OF_ERRORS", "0");
|
||||
|
@ -129,7 +130,7 @@ public class DiscrepancyAnalysis extends DataAnalysis {
|
|||
analyzeCompareList(takePoints);
|
||||
calcDiscrepancy();
|
||||
|
||||
output = new HashMap<String, String>();
|
||||
output = new LinkedHashMap<String, String>();
|
||||
output.put("MEAN", "" + MathFunctions.roundDecimal(mean,2));
|
||||
output.put("VARIANCE", "" + MathFunctions.roundDecimal(variance,2));
|
||||
output.put("NUMBER_OF_ERRORS", "" + numberoferrors);
|
||||
|
|
|
@ -2,6 +2,7 @@ package org.gcube.dataanalysis.ecoengine.evaluation;
|
|||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -14,7 +15,6 @@ import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
|||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis;
|
||||
|
@ -42,7 +42,7 @@ public class DistributionQualityAnalysis extends DataAnalysis {
|
|||
float acceptanceThreshold = 0.8f;
|
||||
float rejectionThreshold = 0.3f;
|
||||
double bestThreshold = 0.5d;
|
||||
private HashMap<String, String> output;
|
||||
private LinkedHashMap<String, String> output;
|
||||
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
|
@ -138,7 +138,7 @@ public class DistributionQualityAnalysis extends DataAnalysis {
|
|||
return points;
|
||||
}
|
||||
|
||||
public HashMap<String, String> analyze() throws Exception {
|
||||
public LinkedHashMap<String, String> analyze() throws Exception {
|
||||
|
||||
try {
|
||||
acceptanceThreshold = Float.parseFloat(config.getParam("PositiveThreshold"));
|
||||
|
@ -185,7 +185,7 @@ public class DistributionQualityAnalysis extends DataAnalysis {
|
|||
double omissionrate = calculateOmissionRate(truePositives, falseNegatives);
|
||||
double specificity = calculateSpecificity(trueNegatives, falsePositives);
|
||||
|
||||
output = new HashMap<String, String>();
|
||||
output = new LinkedHashMap<String, String>();
|
||||
output.put("TRUE_POSITIVES", "" + truePositives);
|
||||
output.put("TRUE_NEGATIVES", "" + trueNegatives);
|
||||
output.put("FALSE_POSITIVES", "" + falsePositives);
|
||||
|
|
|
@ -2,6 +2,7 @@ package org.gcube.dataanalysis.ecoengine.evaluation;
|
|||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -13,7 +14,6 @@ import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
|||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis;
|
||||
|
@ -30,7 +30,7 @@ public class HabitatRepresentativeness extends DataAnalysis {
|
|||
|
||||
String configPath = "./cfg/";
|
||||
|
||||
private HashMap<String, String> output;
|
||||
private LinkedHashMap<String, String> output;
|
||||
private static int minimumNumberToTake = 10000;
|
||||
private float status;
|
||||
private int currentIterationStep;
|
||||
|
@ -191,7 +191,7 @@ public class HabitatRepresentativeness extends DataAnalysis {
|
|||
private double currentHRSScore;
|
||||
private double [] currentHRSVector;
|
||||
|
||||
public HashMap<String, String> analyze() throws Exception {
|
||||
public LinkedHashMap<String, String> analyze() throws Exception {
|
||||
|
||||
try {
|
||||
status = 0;
|
||||
|
@ -220,7 +220,7 @@ public class HabitatRepresentativeness extends DataAnalysis {
|
|||
status=Math.min(status+100f/maxTests,99f);
|
||||
}
|
||||
|
||||
output = new HashMap<String, String>();
|
||||
output = new LinkedHashMap<String, String>();
|
||||
output.put("HRS_VECTOR", "" + Transformations.vector2String(meanHRSVector));
|
||||
output.put("HRS", "" + org.gcube.contentmanagement.graphtools.utils.MathFunctions.roundDecimal(meanHRS,2));
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package org.gcube.dataanalysis.ecoengine.interfaces;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
|
@ -26,7 +27,7 @@ public abstract class DataAnalysis implements Evaluator{
|
|||
protected AlgorithmConfiguration config;
|
||||
protected SessionFactory connection;
|
||||
|
||||
public abstract HashMap<String, String> analyze() throws Exception;
|
||||
public abstract LinkedHashMap<String, String> analyze() throws Exception;
|
||||
|
||||
/**
|
||||
* Processing skeleton : init-analyze-end
|
||||
|
@ -34,10 +35,10 @@ public abstract class DataAnalysis implements Evaluator{
|
|||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
HashMap<String, String> out;
|
||||
LinkedHashMap<String, String> out;
|
||||
public void compute() throws Exception{
|
||||
status = 0;
|
||||
out = new HashMap<String, String>();
|
||||
out = new LinkedHashMap<String, String>();
|
||||
try{
|
||||
out = analyze();
|
||||
|
||||
|
|
Loading…
Reference in New Issue