This commit is contained in:
Gianpaolo Coro 2012-11-29 11:07:43 +00:00
parent a5adb46057
commit 3587a68077
18 changed files with 121 additions and 27 deletions

View File

@ -8,5 +8,6 @@ public enum ALG_PROPS {
SPECIES_VS_CSQUARE,
PHENOMENON_VS_GEOINFO,
SPECIES_ENVELOPES,
SPECIES_MODEL
SPECIES_MODEL,
PHENOMENON_VS_PARALLEL_PHENOMENON
}

View File

@ -1,5 +1,7 @@
package org.gcube.dataanalysis.ecoengine.datatypes;
import java.util.HashMap;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
@ -47,4 +49,14 @@ public class PrimitiveType extends StatisticalType{
this.type = type;
}
public static HashMap<String,StatisticalType> stringMap2StatisticalMap(HashMap<String,String> stringmap){
HashMap<String,StatisticalType> map = new HashMap<String, StatisticalType>();
for (String key:stringmap.keySet()){
String value = stringmap.get(key);
PrimitiveType string = new PrimitiveType(String.class.getName(), value, PrimitiveTypes.STRING, key,key);
map.put(key,string);
}
return map;
}
}

View File

@ -327,7 +327,7 @@ public class DistributionQualityAnalysis extends DataAnalysis {
@Override
public StatisticalType getOutput() {
PrimitiveType p = new PrimitiveType(Map.class.getName(), output, PrimitiveTypes.MAP, "AnalysisResult","Analysis of the probability distribution quality");
PrimitiveType p = new PrimitiveType(Map.class.getName(), PrimitiveType.stringMap2StatisticalMap(output), PrimitiveTypes.MAP, "AnalysisResult","Analysis of the probability distribution quality");
return p;
}

View File

@ -316,7 +316,7 @@ public class HabitatRepresentativeness extends DataAnalysis {
@Override
public StatisticalType getOutput() {
PrimitiveType p = new PrimitiveType(Map.class.getName(), output, PrimitiveTypes.MAP, "AnalysisResult","Habitat Representativeness Score");
PrimitiveType p = new PrimitiveType(Map.class.getName(), PrimitiveType.stringMap2StatisticalMap(output), PrimitiveTypes.MAP, "AnalysisResult","Habitat Representativeness Score");
return p;
}

View File

@ -0,0 +1,77 @@
package org.gcube.dataanalysis.ecoengine.interfaces;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
public abstract class ActorNode implements GenericAlgorithm{
// initialization of ta single node
public abstract void initSingleNode(AlgorithmConfiguration config);
// get the internal processing status for the single step calculation
public abstract float getInternalStatus();
// execute a single node
public abstract int executeNode(int cellStarIndex, int numberOfRightElementsToProcess, int leftStartIndex, int numberOfLeftElementsToProcess, boolean duplicate, String sandboxFolder, String nodeConfigurationFileObject, String logfileNameToProduce);
// An initialization phase in which the inputs are initialized
public abstract void setup(AlgorithmConfiguration config) throws Exception;
// get overall number of species to process
public abstract int getNumberOfRightElements();
// get overall number of geographical information to process
public abstract int getNumberOfLeftElements();
// get overall number of processed species
public abstract int getNumberOfProcessedElements();
// stop the sexecution of the node
public abstract void stop();
// prostprocess after the whole calculation : reduce operation
public abstract void postProcess(boolean manageDuplicates, boolean manageFault);
public static void main(String[] args) throws Exception {
try {
System.out.println("Generic Node: Process Started ");
try {
for (int i = 0; i < args.length; i++) {
System.out.println("Generic Node: RECEIVED INPUT " + args[i]);
}
} catch (Exception e) {
}
System.out.println("Generic Node: checking arguments from " + args[0]);
String[] rargs = args[0].split("_");
int order = Integer.parseInt(rargs[0]);
System.out.println("Generic Node: order: " + order);
int chunksize = Integer.parseInt(rargs[1]);
System.out.println("Generic Node: chunk: " + chunksize);
int speciesOrder = Integer.parseInt(rargs[2]);
System.out.println("Generic Node: species: " + speciesOrder);
int speciesChunksize = Integer.parseInt(rargs[3]);
System.out.println("Generic Node: species chunk size: " + speciesChunksize);
String path = rargs[4];
System.out.println("Generic Node: path: " + path);
String algorithmClass = rargs[5];
System.out.println("Generic Node: algorithmClass: " + algorithmClass);
Boolean duplicate = Boolean.parseBoolean(rargs[6]);
System.out.println("Generic Node: duplicate message: " + duplicate);
String nodeConfiguration = rargs[7];
System.out.println("Generic Node: config: " + nodeConfiguration);
String logfile = args[1];
System.out.println("Generic Node: logfile: " + logfile);
System.out.println("Generic Node: executing class");
SpatialProbabilityDistributionNode node = (SpatialProbabilityDistributionNode) Class.forName(algorithmClass).newInstance();
node.executeNode(order, chunksize, speciesOrder, speciesChunksize, duplicate, path, nodeConfiguration, logfile);
} catch (Exception e) {
System.out.println("ERROR " + e.getMessage());
System.out.println(e);
}
}
}

View File

@ -54,7 +54,7 @@ public abstract class DataAnalysis implements Evaluator{
@Override
public StatisticalType getOutput() {
PrimitiveType p = new PrimitiveType(Map.class.getName(), out, PrimitiveTypes.MAP, "AnalysisResult","Analysis Values");
PrimitiveType p = new PrimitiveType(Map.class.getName(), PrimitiveType.stringMap2StatisticalMap(out), PrimitiveTypes.MAP, "AnalysisResult","Analysis Values");
return p;
}

View File

@ -8,7 +8,7 @@ public interface Generator extends ComputationalAgent{
public ALG_PROPS[] getSupportedAlgorithms();
public SpatialProbabilityDistribution getAlgorithm();
public GenericAlgorithm getAlgorithm();
public String getLoad();

View File

@ -5,7 +5,7 @@ import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
public interface SpatialProbabilityDistribution {
public interface GenericAlgorithm {
//defines the properties of this algorithm
public ALG_PROPS[] getProperties();

View File

@ -6,7 +6,7 @@ import java.util.Map;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
//implements a SpatialProbabilityDistribution where data are taken from a Database
public interface SpatialProbabilityDistributionGeneric extends SpatialProbabilityDistribution{
public interface SpatialProbabilityDistributionGeneric extends GenericAlgorithm{
//initialization of the distribution model
public void init(AlgorithmConfiguration config) throws Exception;

View File

@ -5,9 +5,9 @@ import java.io.FileWriter;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
public abstract class SpatialProbabilityDistributionNode implements SpatialProbabilityDistribution {
public abstract class SpatialProbabilityDistributionNode implements GenericAlgorithm {
//initialization of ta single node
//initialization of the single node
public abstract void initSingleNode(AlgorithmConfiguration config);
//get the internal processing status for the single step calculation

View File

@ -7,7 +7,7 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.hibernate.SessionFactory;
//implements a SpatialProbabilityDistribution where data are taken from a Database
public interface SpatialProbabilityDistributionTable extends SpatialProbabilityDistribution{
public interface SpatialProbabilityDistributionTable extends GenericAlgorithm{
//define the properties of this algorithm
public ALG_PROPS[] getProperties();

View File

@ -19,7 +19,7 @@ import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution;
import org.gcube.dataanalysis.ecoengine.interfaces.GenericAlgorithm;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
public class LocalSimpleSplitGenerator implements Generator {
@ -352,7 +352,7 @@ public class LocalSimpleSplitGenerator implements Generator {
}
@Override
public SpatialProbabilityDistribution getAlgorithm() {
public GenericAlgorithm getAlgorithm() {
return distributionModel;
}

View File

@ -21,7 +21,7 @@ import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution;
import org.gcube.dataanalysis.ecoengine.interfaces.GenericAlgorithm;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionTable;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.hibernate.SessionFactory;
@ -512,7 +512,7 @@ public class LocalSplitGenerator implements Generator {
}
@Override
public SpatialProbabilityDistribution getAlgorithm() {
public GenericAlgorithm getAlgorithm() {
return distributionModel;
}

View File

@ -14,7 +14,7 @@ import org.gcube.dataanalysis.ecoengine.connectors.RemoteHspecOutputObject;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution;
import org.gcube.dataanalysis.ecoengine.interfaces.GenericAlgorithm;
//deprecated
public class RainyCloudGenerator {

View File

@ -10,7 +10,7 @@ import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution;
import org.gcube.dataanalysis.ecoengine.interfaces.GenericAlgorithm;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
public class GeneratorsFactory {
@ -102,7 +102,7 @@ public class GeneratorsFactory {
}
else
{
SpatialProbabilityDistribution sp = (SpatialProbabilityDistribution) algclass;
GenericAlgorithm sp = (GenericAlgorithm) algclass;
//take alg's properties
ALG_PROPS[] algp = sp.getProperties();
//take all generators

View File

@ -13,7 +13,7 @@ import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution;
import org.gcube.dataanalysis.ecoengine.interfaces.GenericAlgorithm;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
@ -63,7 +63,7 @@ public class ProcessorsFactory {
return ((Transducerer) algclass).getDescription();
}
else
return ((SpatialProbabilityDistribution) algclass).getDescription();
return ((GenericAlgorithm) algclass).getDescription();
}
@ -90,7 +90,7 @@ public class ProcessorsFactory {
return ((Transducerer) algclass).getInputParameters();
}
else
return ((SpatialProbabilityDistribution) algclass).getInputParameters();
return ((GenericAlgorithm) algclass).getInputParameters();
}

View File

@ -57,7 +57,7 @@ public static void main(String[] args) throws Exception {
*/
// List<Evaluator> trans = null;
// trans = EvaluatorsFactory.getEvaluators(testConfigLocal12());
List<Transducerer> trans = TransducerersFactory.getTransducerers(testConfigLocal5c());
List<Transducerer> trans = TransducerersFactory.getTransducerers(testConfigLocal5b());
trans.get(0).init();
Regressor.process(trans.get(0));
@ -174,12 +174,16 @@ public static void main(String[] args) throws Exception {
config.setParam("rightTableName", "occurrence_species_id1e8f7b48_b99a_48a3_8b52_89976fd79cd4");
config.setParam("leftTableName", "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
*/
config.setParam("rightTableName", "occurrence_species_id7a77d613_c21d_495d_8a04_b9534cf5e159");
config.setParam("leftTableName", "occurrence_species_idbb2931ef_af2c_495a_ad5f_4ef81ad16159");
//"processedoccurrences_id_e7b77fc2_f1cf_4a46_b7b7_898b663b65dd" OBIS
//"processedoccurrences_id_bd3fdae3_a64e_4215_8eb3_c1bd95981dd2" GBIF
config.setParam("finalTableName", "occurrencessubtracted2");
config.setParam("spatialTolerance", "0.000001");
config.setParam("confidence", "90");
config.setParam("leftTableName", "processedoccurrences_id_e7b77fc2_f1cf_4a46_b7b7_898b663b65dd");
config.setParam("rightTableName", "processedoccurrences_id_bd3fdae3_a64e_4215_8eb3_c1bd95981dd2");
config.setParam("finalTableName", "occurrencessubtractedarticle3");
config.setParam("spatialTolerance", "0.01");
config.setParam("confidence", "0");
config.setParam("DatabaseUserName","utente");
config.setParam("DatabasePassword","d4science");

View File

@ -336,7 +336,7 @@ public class OccurrencePointsMerger implements Transducerer {
float probability = 0;
float distance = (float) Math.sqrt(Math.abs(left.x - right.x) + Math.abs(left.y - right.y));
if (distance > spatialToleranceValue)
probability = 0;
probability = -1;
else {
float pSpecies = probabilityStrings(right.scientificName, left.scientificName);
float pRecordedBy = probabilityStrings(right.recordedby, left.recordedby);