Gianpaolo Coro 2014-03-03 14:20:11 +00:00
parent 54e72c7859
commit 704ff1bd71
3 changed files with 256 additions and 3 deletions

View File

@ -56,7 +56,7 @@ public class MapsComparator extends DataAnalysis {
@Override
public String getDescription() {
return "An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. Supported maps can only be in WFS, Opendap or ASC formats. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold.";
return "An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold.";
}
public double BBxLL = -180;
@ -127,6 +127,7 @@ public class MapsComparator extends DataAnalysis {
String rastertable1 = raster1.getTablename();
AnalysisLogger.getLogger().debug("MapsComparator: Map 1 was dumped in table: " + rastertable1);
status = 40;
intersector = new XYExtractor (config);
AnalysisLogger.getLogger().debug("MapsComparator: ****Rasterizing map 2****");
double[][] slice2 = intersector.extractXYGrid(layerT2, time2, BBxLL, BBxUR, BByLL, BByUR, z, resolution, resolution);
AnalysisLogger.getLogger().debug("MapsComparator: Dumping map 2");
@ -144,8 +145,8 @@ public class MapsComparator extends DataAnalysis {
config.setParam("SecondTable", rastertable2);
config.setParam("FirstTableCsquareColumn", RasterTable.csquareColumn);
config.setParam("SecondTableCsquareColumn", RasterTable.csquareColumn);
config.setParam("FirstTableProbabilityColumn", RasterTable.probabilityColumn);
config.setParam("SecondTableProbabilityColumn", RasterTable.probabilityColumn);
config.setParam("FirstTableProbabilityColumn", RasterTable.valuesColumn);
config.setParam("SecondTableProbabilityColumn", RasterTable.valuesColumn);
config.setParam("ComparisonThreshold", "" + valuesthreshold);
AnalysisLogger.getLogger().debug("MapsComparator: Analyzing discrepancy between maps: " + rastertable1 + " and " + rastertable2);
DiscrepancyAnalysis da = new DiscrepancyAnalysis();

View File

@ -0,0 +1,206 @@
package org.gcube.dataanalysis.geo.algorithms;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor;
public class XYExtraction implements Transducerer{
static String layerName = "Layer";
static String t = "TimeIndex";
static String z = "Z";
static String yLL = "BBox_LowerLeftLat";
static String xLL = "BBox_LowerLeftLong";
static String yUR = "BBox_UpperRightLat";
static String xUR = "BBox_UpperRightLong";
static String xRes = "XResolution";
static String yRes = "YResolution";
static String tableName = "OutputTableName";
static String tableLabel = "OutputTableLabel";
AlgorithmConfiguration config;
float status;
public String layerNameValue;
public int time;
public double zValue;
public double xResValue;
public double yResValue;
public String tableNameValue;
public String tableLabelValue;
public double BBxLL = -180;
public double BBxUR = 180;
public double BByLL = -90;
public double BByUR = 90;
public List<StatisticalType> inputs = new ArrayList<StatisticalType>();
public LinkedHashMap<String, String> outputParameters = new LinkedHashMap<String, String>();
@Override
public void init() throws Exception {
AnalysisLogger.getLogger().debug("XY Extraction Initialization");
}
@Override
public String getDescription() {
return "An algorithm to extract associated to an environmental feature repository (e.g. NETCDF, ASC files, Tables etc. ). A grid of points is specified by the user and values are associated to the points from the environmental repository. " + "It accepts as two geospatial repositoried (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) " + "and the specification about time and space. Produces a table containing the values associated to the selected bounding box.";
}
@Override
public List<StatisticalType> getInputParameters() {
IOHelper.addStringInput(inputs, layerName, "Layer Title or UUID: The title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer", "");
IOHelper.addDoubleInput(inputs, yLL, "Lower Left Latitute of the Bounding Box", "-90");
IOHelper.addDoubleInput(inputs, xLL, "Lower Left Longitude of the Bounding Box", "-180");
IOHelper.addDoubleInput(inputs, yUR, "Upper Right Latitute of the Bounding Box", "90");
IOHelper.addDoubleInput(inputs, xUR, "Upper Right Longitude of the Bounding Box", "180");
IOHelper.addRandomStringInput(inputs, tableName, "the db name of the table to produce", "extr_");
IOHelper.addStringInput(inputs, tableLabel, "The name of the table to produce", "extr_");
IOHelper.addDoubleInput(inputs, z, "value of Z. Default is 0, that means processing will be at surface level", "0");
IOHelper.addIntegerInput(inputs, t, "Time Index. The default is the first", "0");
IOHelper.addDoubleInput(inputs, xRes, "Projection resolution on the X axis", "0.5");
IOHelper.addDoubleInput(inputs, yRes, "Projection resolution on the Y axis", "0.5");
DatabaseType.addDefaultDBPars(inputs);
return inputs;
}
protected void getParameters() {
layerNameValue = IOHelper.getInputParameter(config, layerName);
AnalysisLogger.getLogger().debug("Extraction: Layer " + layerNameValue);
String z$ = IOHelper.getInputParameter(config, z);
String time$ = IOHelper.getInputParameter(config, t);
time = ((time$ != null) && (time$.trim().length() > 0)) ? Integer.parseInt(time$) : 0;
if (time < 0)
time = 0;
AnalysisLogger.getLogger().debug("Extraction: Time " + time);
zValue = 0;
if ((z$ != null) && (z$.trim().length() > 0))
try {
zValue = Double.parseDouble(z$);
} catch (Exception ee) {
}
AnalysisLogger.getLogger().debug("Extraction: Z " + zValue);
BByLL = Double.parseDouble(IOHelper.getInputParameter(config, yLL));
BBxLL = Double.parseDouble(IOHelper.getInputParameter(config, xLL));
BByUR = Double.parseDouble(IOHelper.getInputParameter(config, yUR));
BBxUR = Double.parseDouble(IOHelper.getInputParameter(config, xUR));
AnalysisLogger.getLogger().debug("Extraction: yLL " + BByLL);
AnalysisLogger.getLogger().debug("Extraction: xLL " + BBxLL);
AnalysisLogger.getLogger().debug("Extraction: yUR " + BByUR);
AnalysisLogger.getLogger().debug("Extraction: xUR " + BBxUR);
yResValue = Double.parseDouble(IOHelper.getInputParameter(config, yRes));
AnalysisLogger.getLogger().debug("Extraction: yRes " + yResValue);
xResValue = Double.parseDouble(IOHelper.getInputParameter(config, xRes));
AnalysisLogger.getLogger().debug("Extraction: xRes " + xResValue);
tableNameValue = IOHelper.getInputParameter(config, tableName);
tableLabelValue = IOHelper.getInputParameter(config, tableLabel);
AnalysisLogger.getLogger().debug("Extraction: tableName " + tableNameValue);
AnalysisLogger.getLogger().debug("Extraction: tableLabel " + tableLabelValue);
String scope = config.getGcubeScope();
AnalysisLogger.getLogger().debug("Extraction: Externally set scope " + scope);
if (scope == null) {
scope = ScopeProvider.instance.get();
config.setGcubeScope(scope);
}
}
@Override
public void compute() throws Exception {
try{
status = 0;
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
getParameters();
AnalysisLogger.getLogger().debug("Extractor: MatrixExtractor initialized");
long t0 = System.currentTimeMillis();
XYExtractor extractor = new XYExtractor(config);
double[][] matrix = extractor.extractXYGrid(layerNameValue,time,BBxLL,BBxUR,BByLL,BByUR,zValue,xResValue,yResValue);
System.out.println("ELAPSED TIME: " + (System.currentTimeMillis() - t0));
AnalysisLogger.getLogger().debug("Extractor: Matrix Extracted");
AnalysisLogger.getLogger().debug("Extractor: ****Rasterizing grid into table****");
status = 30;
RasterTable raster = new RasterTable(BBxLL, BBxUR, BByLL, BByUR, zValue, xResValue, yResValue, matrix, config);
raster.setTablename(tableNameValue);
raster.deleteTable();
raster.dumpGeoTable();
AnalysisLogger.getLogger().debug("Extractor: Map was dumped in table: " + tableNameValue);
status = 80;
AnalysisLogger.getLogger().debug("Extractor: Elapsed: Whole operation completed in " + ((double) (System.currentTimeMillis() - t0) / 1000d) + "s");
} catch (Exception e) {
e.printStackTrace();
AnalysisLogger.getLogger().debug("Extractor: ERROR!: " + e.getLocalizedMessage());
} finally {
status = 100;
}
}
@Override
public void shutdown() {
AnalysisLogger.getLogger().debug("Shutdown");
}
@Override
public StatisticalType getOutput() {
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
templateHspec.add(TableTemplates.GENERIC);
OutputTable p = new OutputTable(templateHspec,tableLabelValue,tableNameValue,"Output table");
return p;
}
@Override
public float getStatus() {
return status;
}
@Override
public INFRASTRUCTURE getInfrastructure() {
return INFRASTRUCTURE.LOCAL;
}
@Override
public void setConfiguration(AlgorithmConfiguration config) {
this.config=config;
}
protected ResourceFactory resourceManager;
public String getResourceLoad() {
if (resourceManager == null)
resourceManager = new ResourceFactory();
return resourceManager.getResourceLoad(1);
}
public String getResources() {
return ResourceFactory.getResources(100f);
}
}

View File

@ -0,0 +1,46 @@
package org.gcube.dataanalysis.geo.algorithms;
import java.util.ArrayList;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class XYExtractionTable extends XYExtraction{
@Override
public List<StatisticalType> getInputParameters() {
List<StatisticalType> inputs = new ArrayList<StatisticalType>();
List<TableTemplates> template= new ArrayList<TableTemplates>();
template.add(TableTemplates.GENERIC);
InputTable table = new InputTable(template,TableMatrixRepresentation.tableNameParameter,"A geospatial table containing at least x,y information","");
ColumnType columnx = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.xDimensionColumnParameter, "The column containing x (longitude) information", "x", false);
ColumnType columny = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.yDimensionColumnParameter, "The column containing y (latitude) information", "y", false);
ColumnType columnz = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.zDimensionColumnParameter, "The column containing z (altitude or depth) information", "z", true);
ColumnType columnt = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.timeDimensionColumnParameter, "The column containing time", "datetime", true);
ColumnType columnvalue = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.valueDimensionColumnParameter, "The column containing values", "value", false);
inputs.add(table);
inputs.add(columnx);
inputs.add(columny);
inputs.add(columnz);
inputs.add(columnt);
inputs.add(columnvalue);
IOHelper.addStringInput(inputs, TableMatrixRepresentation.filterParameter, "A filter on one of the columns (e.g. speed<2)","");
List<StatisticalType> previnputs = super.getInputParameters();
previnputs.remove(0);
inputs.addAll(previnputs);
return inputs;
}
}