Lucio Lelii 2017-05-24 15:35:50 +00:00
parent 947c607030
commit 03efa271dc
70 changed files with 206 additions and 3798 deletions

View File

@ -1,8 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java"/>
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java"/>
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources"/>
<classpathentry kind="src" path="src/test/java"/>
<classpathentry kind="src" path="src/test/resources"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>

22
pom.xml
View File

@ -130,6 +130,18 @@
<artifactId>maxent-princeton</artifactId>
<version>3.3.3</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.0.13</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
</dependencies>
<repositories>
<repository>
@ -161,15 +173,6 @@
<build>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<configuration>
<source>1.6</source>
<target>1.6</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
@ -184,7 +187,6 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.2</version>
<configuration>
<descriptors>
<descriptor>${distroDirectory}/descriptor.xml</descriptor>

View File

@ -3,7 +3,6 @@ package org.gcube.dataanalysis.geo.algorithms;
import java.util.ArrayList;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
@ -58,30 +57,4 @@ public class PointsMapsCreator extends MapsCreator {
}
public static void main(String[] args) throws Exception {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./cfg/");
config.setGcubeScope("/gcube/devsec");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam(dburlParameterName, "jdbc:postgresql://geoserver-test.d4science-ii.research-infrastructures.eu/timeseriesgisdb");
config.setParam(dbuserParameterName, "postgres");
config.setParam(dbpasswordParameterName, "d4science2");
config.setParam(inputTableParameter, "occurrence_species_id_fb60ce42_1704_43f9_91a3_2df7d82b1b96");
config.setParam(xParameter, "decimallongitude");
config.setParam(yParameter, "decimallatitude");
config.setParam(infoParameter, "scientificname");
config.setParam("ServiceUserName", "gianpaolo.coro");
config.setParam(layerNameParameter, "Generic Species");
MapsCreator maps = new PointsMapsCreator();
maps.setConfiguration(config);
maps.init();
maps.compute();
}
}

View File

@ -12,7 +12,6 @@ import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.InterpolateTables;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalInfraAlgorithm;
import org.gcube.dataanalysis.executor.scripts.OSCommand;
import org.gcube.dataanalysis.geo.utils.GeospatialDataPublicationLevel;
@ -103,7 +102,7 @@ public class RasterDataPublisher extends StandardLocalInfraAlgorithm{
String [] topicsListArr = new String[listTopics.size()];
topics = listTopics.toArray(topicsListArr);
boolean result = ThreddsPublisher.publishOnThredds(scope, username, newf.getAbsolutePath(), layerTitle, layerName, abstractField, topics, resolution,isprivate);
boolean result = ThreddsPublisher.publishOnThredds(username, newf.getAbsolutePath(), layerTitle, layerName, abstractField, topics, resolution,isprivate);
if (result) {
addOutputString("Created map name", layerTitle);

View File

@ -6,9 +6,10 @@ import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.utils.VectorOperations;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ucar.ma2.Array;
import ucar.ma2.ArrayByte;
@ -40,7 +41,9 @@ public class NetCDFDataExplorer {
// http://thredds.research-infrastructures.eu:8080/thredds/catalog/public/netcdf/catalog.xml
public static String timePrefix = "time:";
private static final Logger LOGGER = LoggerFactory.getLogger(NetCDFDataExplorer.class);
public NetCDFDataExplorer(String openDapLink, String layer) {
calcZRange(openDapLink, layer);
}
@ -49,19 +52,17 @@ public class NetCDFDataExplorer {
try {
List<Double> values = new ArrayList<Double>();
if (isGridDataset(openDapLink)) {
AnalysisLogger.getLogger().debug("Managing Grid File");
LOGGER.debug("Managing Grid File");
return manageGridDataset(layer, openDapLink, time, triplets);
}
/*
* else if (isPointDataset(openDapLink)) { AnalysisLogger.getLogger().debug("Managing Points File"); }
* else if (isPointDataset(openDapLink)) { LOGGER.debug("Managing Points File"); }
*/
else
AnalysisLogger.getLogger().debug("Warning: the NETCDF file is of an unknown type");
LOGGER.debug("Warning: the NETCDF file is of an unknown type");
return values;
} catch (Exception e) {
AnalysisLogger.getLogger().debug("ERROR: " + e.getMessage());
AnalysisLogger.getLogger().debug(e);
// e.printStackTrace();
LOGGER.error("ERROR",e);
return null;
}
}
@ -76,8 +77,8 @@ public class NetCDFDataExplorer {
List<GridDatatype> gridTypes = gds.getGrids();
for (GridDatatype gdt : gridTypes) {
// AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getFullName());
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getName());
// LOGGER.debug("Inside File - layer name: " + gdt.getFullName());
LOGGER.debug("Inside File - layer name: " + gdt.getName());
if (layer.equalsIgnoreCase(gdt.getName())) {
CoordinateAxis zAxis = gdt.getCoordinateSystem().getVerticalAxis();
minZ = zAxis.getMinValue();
@ -87,9 +88,8 @@ public class NetCDFDataExplorer {
}
}
} catch (Exception e) {
e.printStackTrace();
AnalysisLogger.getLogger().debug("NetCDF Explorer Error:" + e.getLocalizedMessage());
AnalysisLogger.getLogger().debug(e);
LOGGER.error("NetCDF Explorer Error",e);
}
}
@ -103,10 +103,10 @@ public class NetCDFDataExplorer {
List<GridDatatype> gridTypes = gds.getGrids();
for (GridDatatype gdt : gridTypes) {
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getName() + " layer to find " + layer);
LOGGER.debug("Inside File - layer name: " + gdt.getName() + " layer to find " + layer);
// if the layer is an HTTP link then take the first innser layer
if (layer.equalsIgnoreCase(gdt.getName()) || layer.toLowerCase().startsWith("http:")) {
AnalysisLogger.getLogger().debug("Found layer " + layer + " inside file");
LOGGER.debug("Found layer " + layer + " inside file");
GridDatatype grid = gds.findGridDatatype(gdt.getName());
CoordinateAxis zAxis = gdt.getCoordinateSystem().getVerticalAxis();
CoordinateAxis xAxis = gdt.getCoordinateSystem().getXHorizAxis();
@ -114,7 +114,7 @@ public class NetCDFDataExplorer {
double resolutionZ = 0;
try {
resolutionZ = Math.abs((double) (zAxis.getMaxValue() - zAxis.getMinValue()) / (double) zAxis.getShape()[0]);
AnalysisLogger.getLogger().debug("Zmin:" + zAxis.getMinValue() + " Zmax:" + zAxis.getMaxValue());
LOGGER.debug("Zmin:" + zAxis.getMinValue() + " Zmax:" + zAxis.getMaxValue());
} catch (Exception e) {
}
@ -122,7 +122,7 @@ public class NetCDFDataExplorer {
int tsize = triplets.size();
long t01 = System.currentTimeMillis();
LatLonRect llr = null;
AnalysisLogger.getLogger().debug("Extracting subset...");
LOGGER.debug("Extracting subset...");
GridDatatype gdtsub = grid.makeSubset(new Range(time, time), null, llr, 1, 1, 1);
Array data = gdtsub.readVolumeData(time); // note order is t, z, y, x
int[] shapeD = data.getShape();
@ -145,11 +145,11 @@ public class NetCDFDataExplorer {
double resolutionX = Math.abs((double) (xAxis.getMaxValue() - xAxis.getMinValue()) / (double) xD);
double resolutionY = Math.abs((double) (yAxis.getMaxValue() - yAxis.getMinValue()) / (double) yD);
AnalysisLogger.getLogger().debug("Shape: Z:" + zD + " X:" + xD + " Y:" + yD);
LOGGER.debug("Shape: Z:" + zD + " X:" + xD + " Y:" + yD);
AnalysisLogger.getLogger().debug("Layer Information Retrieval ELAPSED Time: " + (System.currentTimeMillis() - t01));
LOGGER.debug("Layer Information Retrieval ELAPSED Time: " + (System.currentTimeMillis() - t01));
int rank = data.getRank();
AnalysisLogger.getLogger().debug("Rank of the layer: " + rank);
LOGGER.debug("Rank of the layer: " + rank);
ArrayFloat.D3 data3Float = null;
ArrayDouble.D3 data3Double = null;
@ -201,15 +201,15 @@ public class NetCDFDataExplorer {
// if (((xmax == 360) && (xmin == 0)) || ((xmax == 359.5) && (xmin == 0.5))) {
// if ((xmin>=0) || (ymin == -77.0104751586914 && ymax==89.94786834716797)) {
AnalysisLogger.getLogger().debug("X dimension: " + xD + " Xmin:" + xmin + " Xmax:" + xmax + " Xmaxidx:" + xmaxidx+" XRes: "+resolutionX);
AnalysisLogger.getLogger().debug("Y dimension: " + yD + " Ymin:" + ymin + " Ymax:" + ymax + " Ymaxidx:" + ymaxidx+" YRes: "+resolutionY);
LOGGER.debug("X dimension: " + xD + " Xmin:" + xmin + " Xmax:" + xmax + " Xmaxidx:" + xmaxidx+" XRes: "+resolutionX);
LOGGER.debug("Y dimension: " + yD + " Ymin:" + ymin + " Ymax:" + ymax + " Ymaxidx:" + ymaxidx+" YRes: "+resolutionY);
if ((xmin >= 0)) {
xmax = 180;
xmin = -180;
is0_360 = true;
}
AnalysisLogger.getLogger().debug("Assigning "+tsize+" grid elements to the NetCDF values");
LOGGER.debug("Assigning "+tsize+" grid elements to the NetCDF values");
for (int i = 0; i < tsize; i++) {
int zint = 0;
int xint = 0;
@ -250,9 +250,9 @@ public class NetCDFDataExplorer {
x = x - inverseOrigin.getLongitude();
y = y - inverseOrigin.getLatitude();
if (i==0)
AnalysisLogger.getLogger().debug("bb: " + idxbb[0] +","+idxbb[1]+" origin: "+idxo[0]+","+idxo[1]+" middle "+xD/2+","+yD/2+" shift "+(idxo[0]-(xD/2))+" inverse shift on origin "+inverseOrigin);
LOGGER.debug("bb: " + idxbb[0] +","+idxbb[1]+" origin: "+idxo[0]+","+idxo[1]+" middle "+xD/2+","+yD/2+" shift "+(idxo[0]-(xD/2))+" inverse shift on origin "+inverseOrigin);
}catch(Exception e){
AnalysisLogger.getLogger().debug("Error getting x,y corrections "+e.getLocalizedMessage());
LOGGER.debug("Error getting x,y corrections "+e.getLocalizedMessage());
e.printStackTrace();
}
@ -304,7 +304,7 @@ public class NetCDFDataExplorer {
else if (data2Long != null)
val = Double.valueOf(data2Long.get(yint, xint));
/*AnalysisLogger.getLogger().debug("Choice "+ (data3Float!=null)+","+
/*LOGGER.debug("Choice "+ (data3Float!=null)+","+
(data3Int!=null)+","+
(data3Double!=null)+","+
(data3Long!=null)+","+
@ -313,9 +313,9 @@ public class NetCDFDataExplorer {
(data2Double!=null)+","+
(data2Long!=null));
*/
// AnalysisLogger.getLogger().debug("Assigning "+val+" to "+x+","+y+" ["+xint+","+yint+"]");
// AnalysisLogger.getLogger().debug("checking "+data2Float.get(yint, xint)+" vs ");
// try{AnalysisLogger.getLogger().debug("checking2 "+data2Float.get(xint,yint));}catch(Exception e){}
// LOGGER.debug("Assigning "+val+" to "+x+","+y+" ["+xint+","+yint+"]");
// LOGGER.debug("checking "+data2Float.get(yint, xint)+" vs ");
// try{LOGGER.debug("checking2 "+data2Float.get(xint,yint));}catch(Exception e){}
values.add(val);
@ -334,16 +334,16 @@ public class NetCDFDataExplorer {
}
public static GridDatatype getGrid(String layer, String netcdffile) throws Exception{
AnalysisLogger.getLogger().debug("Opening File : " + netcdffile);
AnalysisLogger.getLogger().debug("Searching for layer: " + layer);
LOGGER.debug("Opening File : " + netcdffile);
LOGGER.debug("Searching for layer: " + layer);
GridDataset gds = ucar.nc2.dt.grid.GridDataset.open(netcdffile);
List<GridDatatype> gridTypes = gds.getGrids();
StringBuffer sb = new StringBuffer();
for (GridDatatype gdt : gridTypes) {
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getName());
LOGGER.debug("Inside File - layer name: " + gdt.getName());
sb.append(gdt.getName()+" ");
if (layer.equals(gdt.getName())) {
AnalysisLogger.getLogger().debug("Found layer " + layer + " inside file");
LOGGER.debug("Found layer " + layer + " inside file");
GridDatatype grid = gds.findGridDatatype(gdt.getName());
return grid;
}
@ -358,9 +358,9 @@ public class NetCDFDataExplorer {
GridDataset gds = ucar.nc2.dt.grid.GridDataset.open(filename);
List<GridDatatype> gridTypes = gds.getGrids();
for (GridDatatype gdt : gridTypes) {
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getName());
LOGGER.debug("Inside File - layer name: " + gdt.getName());
if (layer.equalsIgnoreCase(gdt.getName())) {
AnalysisLogger.getLogger().debug("Found layer " + layer + " inside file");
LOGGER.debug("Found layer " + layer + " inside file");
GridDatatype grid = gds.findGridDatatype(gdt.getName());
GridCoordSystem gcs = grid.getCoordinateSystem();
long timeSteps = 0;
@ -382,7 +382,7 @@ public class NetCDFDataExplorer {
zint = Math.abs((int) Math.round((z - zAxis.getMinValue()) / resolutionZ));
}
AnalysisLogger.getLogger().debug("Z index to take: " + zint);
LOGGER.debug("Z index to take: " + zint);
int[] xy = gcs.findXYindexFromLatLon(x, y, null);
for (int j = 0; j < timeSteps; j++) {
@ -396,7 +396,7 @@ public class NetCDFDataExplorer {
valuesMap.put(timePrefix + date, Double.parseDouble("" + val));
}
} catch (Exception e) {
AnalysisLogger.getLogger().debug("Error in getting grid values in (" + x + "," + y + "," + z + "= with zint: " + zint + " resolution: " + resolutionZ + " and shape: " + zAxis.getShape()[0]);
LOGGER.debug("Error in getting grid values in (" + x + "," + y + "," + z + "= with zint: " + zint + " resolution: " + resolutionZ + " and shape: " + zAxis.getShape()[0]);
}
}
break;
@ -417,7 +417,7 @@ public class NetCDFDataExplorer {
}
}
} catch (Exception ee) {
AnalysisLogger.getLogger().debug("NetCDFDataExplorer-> WARNING: Error in getting value: " + ee.getLocalizedMessage());
LOGGER.debug("NetCDFDataExplorer-> WARNING: Error in getting value: " + ee.getLocalizedMessage());
}
return val;
}
@ -444,33 +444,33 @@ public class NetCDFDataExplorer {
else
break;
tolerance = tolerance + 0.25f;
AnalysisLogger.getLogger().debug("NetCDFDataExplorer-> -> tolerance = " + tolerance);
LOGGER.debug("NetCDFDataExplorer-> -> tolerance = " + tolerance);
}
if (iter != null) {
try {
while (iter.hasNext()) {
ucar.nc2.ft.PointFeature pf = iter.next();
AnalysisLogger.getLogger().debug("NetCDFDataExplorer-> -> EarthLoc: " + pf.getLocation());
AnalysisLogger.getLogger().debug("NetCDFDataExplorer-> -> EarthTime: " + pf.getObservationTime());
LOGGER.debug("NetCDFDataExplorer-> -> EarthLoc: " + pf.getLocation());
LOGGER.debug("NetCDFDataExplorer-> -> EarthTime: " + pf.getObservationTime());
StructureData sd = pf.getData();
List<Member> mems = sd.getMembers();
for (Member m : mems) {
String unit = m.getUnitsString();
if ((unit != null) && (unit.length() > 0)) {
AnalysisLogger.getLogger().debug("NetCDFDataExplorer-> -> description: " + m.getDescription());
AnalysisLogger.getLogger().debug("NetCDFDataExplorer-> -> data param: " + m.getDataParam());
AnalysisLogger.getLogger().debug("NetCDFDataExplorer-> -> name: " + m.getName());
AnalysisLogger.getLogger().debug("NetCDFDataExplorer-> -> unit: " + m.getUnitsString());
AnalysisLogger.getLogger().debug("NetCDFDataExplorer-> -> type: " + m.getDataType());
LOGGER.debug("NetCDFDataExplorer-> -> description: " + m.getDescription());
LOGGER.debug("NetCDFDataExplorer-> -> data param: " + m.getDataParam());
LOGGER.debug("NetCDFDataExplorer-> -> name: " + m.getName());
LOGGER.debug("NetCDFDataExplorer-> -> unit: " + m.getUnitsString());
LOGGER.debug("NetCDFDataExplorer-> -> type: " + m.getDataType());
Array arr = sd.getArray(m.getName());
AnalysisLogger.getLogger().debug("NetCDFDataExplorer-> -> is Time: " + m.getDataType());
LOGGER.debug("NetCDFDataExplorer-> -> is Time: " + m.getDataType());
Double val = takeFirstDouble(arr);
AnalysisLogger.getLogger().debug("NetCDFDataExplorer-> -> extracted value: " + val);
LOGGER.debug("NetCDFDataExplorer-> -> extracted value: " + val);
}
}
AnalysisLogger.getLogger().debug("NetCDFDataExplorer-> -> EarthTime: ");
LOGGER.debug("NetCDFDataExplorer-> -> EarthTime: ");
}
} finally {
iter.finish();
@ -484,12 +484,12 @@ public class NetCDFDataExplorer {
// A GridDatatype is like a specialized Variable that explicitly handles X,Y,Z,T dimensions
public static boolean isGridDataset(String filename) {
try {
AnalysisLogger.getLogger().debug("Analyzing file " + filename);
LOGGER.debug("Analyzing file " + filename);
Formatter errlog = new Formatter();
FeatureDataset fdataset = FeatureDatasetFactoryManager.open(FeatureType.GRID, filename, null, errlog);
if (fdataset == null) {
// System.out.printf("GRID Parse failed --> %s\n", errlog);
AnalysisLogger.getLogger().debug("NetCDFDataExplorer-> NOT GRID");
LOGGER.debug("NetCDFDataExplorer-> NOT GRID");
return false;
} else
return true;
@ -505,7 +505,7 @@ public class NetCDFDataExplorer {
Formatter errlog = new Formatter();
FeatureDataset fdataset = FeatureDatasetFactoryManager.open(FeatureType.POINT, filename, null, errlog);
if (fdataset == null) {
AnalysisLogger.getLogger().debug("NetCDFDataExplorer-> NOT POINT");
LOGGER.debug("NetCDFDataExplorer-> NOT POINT");
return false;
} else
return true;
@ -524,7 +524,7 @@ public class NetCDFDataExplorer {
if (fdataset == null) {
// System.out.printf(fts[i]+": Parse failed --> %s\n",errlog);
} else {
AnalysisLogger.getLogger().debug("NetCDFDataExplorer-> " + fts[i] + " OK!");
LOGGER.debug("NetCDFDataExplorer-> " + fts[i] + " OK!");
isdataset = true;
}
}

View File

@ -1,6 +1,5 @@
package org.gcube.dataanalysis.geo.meta;
import it.geosolutions.geonetwork.GNClient;
import it.geosolutions.geonetwork.util.GNInsertConfiguration;
import java.io.File;
@ -20,7 +19,6 @@ import java.util.UUID;
import javax.xml.bind.JAXBException;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.spatial.data.geonetwork.GeoNetwork;
import org.gcube.spatial.data.geonetwork.GeoNetworkAdministration;
import org.gcube.spatial.data.geonetwork.LoginLevel;
@ -68,9 +66,13 @@ import org.opengis.metadata.spatial.GeometricObjectType;
import org.opengis.metadata.spatial.SpatialRepresentationType;
import org.opengis.metadata.spatial.TopologyLevel;
import org.opengis.util.InternationalString;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class GenericLayerMetadata {
private static final Logger LOGGER = LoggerFactory.getLogger(GenericLayerMetadata.class);
private String geonetworkUrl = "http://geoserver-dev2.d4science-ii.research-infrastructures.eu/geonetwork/";
private String geoserverUrl = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geonetwork/";
private String geonetworkUser = "admin";
@ -368,7 +370,7 @@ public class GenericLayerMetadata {
ArrayList<DefaultUsage> usages = new ArrayList<DefaultUsage>(Arrays.asList(usage));
//build categories by guessing on the filename
List<TopicCategory> categories = guessTopicCategory(categoryTypes);
AnalysisLogger.getLogger().debug("Guessed Topics: "+categories);
LOGGER.debug("Guessed Topics: "+categories);
// Spatial Rapresentation Info
DefaultGeometricObjects geoObjs = new DefaultGeometricObjects();
geoObjs.setGeometricObjectType(GeometricObjectType.COMPLEX);
@ -562,7 +564,7 @@ public class GenericLayerMetadata {
ArrayList<DefaultUsage> usages = new ArrayList<DefaultUsage>(Arrays.asList(usage));
//build categories by guessing on the filename
List<TopicCategory> categories = guessTopicCategory(categoryTypes);
AnalysisLogger.getLogger().debug("Guessed Topics: "+categories);
LOGGER.debug("Guessed Topics: "+categories);
// Spatial Rapresentation Info
DefaultGeometricObjects geoObjs = new DefaultGeometricObjects();
geoObjs.setGeometricObjectType(GeometricObjectType.COMPLEX);
@ -599,7 +601,7 @@ public class GenericLayerMetadata {
String estdescription = getDescription(url);
String estProtocol = getProtocol(url);
String estName = getName(title, abstractField, estProtocol, url);
AnalysisLogger.getLogger().debug("URL:" + url + "\nDescription:" + estdescription + "\nName:" + estName + "\nProtocol:" + estProtocol);
LOGGER.debug("URL:" + url + "\nDescription:" + estdescription + "\nName:" + estName + "\nProtocol:" + estProtocol);
onlineres.setDescription(new DefaultInternationalString(estdescription));
onlineres.setName(estName);
onlineres.setProtocol(estProtocol);
@ -668,14 +670,15 @@ public class GenericLayerMetadata {
// Metadata Obj:
DefaultMetadata meta = new DefaultMetadata(party, sourceGenerationDate, ident);
meta.setFileIdentifier(UUID.randomUUID().toString());
String id = UUID.randomUUID().toString();
meta.setFileIdentifier(id);
meta.getSpatialRepresentationInfo().add(spatial);
meta.setDistributionInfo(distribution);
meta.getMetadataConstraints().add(constraints);
meta.getDataQualityInfo().add(processQuality);
meta.setLanguage(Locale.ENGLISH);
// AnalysisLogger.getLogger().debug(meta);
LOGGER.debug("id of generated metadata is {}",id);
GeoNetworkAdministration readerScope = GeoNetwork.get();
@ -686,11 +689,15 @@ public class GenericLayerMetadata {
GNInsertConfiguration configuration = readerScope.getCurrentUserConfiguration("datasets", "_none_");
if (isprivate)
readerScope.insertMetadata(configuration, meta);
else
readerScope.insertAndPromoteMetadata(configuration, meta);
//old command
/*
GNClient client = new GNClient(geonetworkUrl);
@ -783,7 +790,7 @@ public class GenericLayerMetadata {
ArrayList<DefaultUsage> usages = new ArrayList<DefaultUsage>(Arrays.asList(usage));
//build categories by guessing on the filename
List<TopicCategory> categories = guessTopicCategory(categoryTypes);
AnalysisLogger.getLogger().debug("Guessed Topics: "+categories);
LOGGER.debug("Guessed Topics: "+categories);
// Spatial Rapresentation Info
DefaultGeometricObjects geoObjs = new DefaultGeometricObjects();
geoObjs.setGeometricObjectType(GeometricObjectType.COMPLEX);
@ -930,7 +937,7 @@ public class GenericLayerMetadata {
if (abstractTitle.matches(".+: ")) {
String variablename = title.substring(title.indexOf(":"));
AnalysisLogger.getLogger().debug("detected variable " + variablename);
LOGGER.debug("detected variable " + variablename);
return variablename;
} else {
if (url.contains("catalog.xml")) {
@ -967,7 +974,7 @@ public class GenericLayerMetadata {
int column = -1;
if ((column = layerName.indexOf(":")) > 0)
layerName = layerName.substring(column + 1);
AnalysisLogger.getLogger().debug("Layer Name: " + layerName);
LOGGER.debug("Layer Name: " + layerName);
return layerName;
} else
return title;

View File

@ -1,87 +0,0 @@
package org.gcube.dataanalysis.geo.test.infra;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.infrastructure.GeoNetworkInspector;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.opengis.metadata.Metadata;
public class RetrieveOGCLinks {
static String scope = "/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab";
public static void main(String[] args) throws Exception{
AlgorithmConfiguration config = new AlgorithmConfiguration();
AnalysisLogger.setLogger("./cfg/ALog.properties");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "gcube");
config.setParam("DatabasePassword", "d4science2");
config.setParam("DatabaseURL", "jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setGcubeScope("/gcube/devsec/devVRE");
MatrixExtractor extractor = new MatrixExtractor(config);
extractor.getConnector("http://geoserver.d4science-ii.research-infrastructures.eu/geoserver/ows?service=WFS&version=1.0.0&request=GetFeature&typeName=lluidiamaculata20121218223748535cet&format=json&maxfeatures=1", 0.5);
}
public static void main1(String[] args) throws Exception{
AnalysisLogger.setLogger("./cfg/ALog.properties");
GeoNetworkInspector gnInspector = new GeoNetworkInspector();
gnInspector.setScope(scope);
BufferedReader br = new BufferedReader(new FileReader(new File("AquamapsSpecies.txt")));
String line = br.readLine();
BufferedWriter bw = new BufferedWriter(new FileWriter(new File("AquamapsSpeciesLinks.csv")));
System.out.println("speciesname,WMS,WFS,algorithm,abstract");
bw.write("speciesname,WMS,WFS,algorithm,abstract\n");
while (line!=null){
String speciesname = line;
List<Metadata> metadts = gnInspector.getAllGNInfobyTitle(speciesname, "1");
for (Metadata meta:metadts){
String abstractS = ""+meta.getIdentificationInfo().iterator().next().getAbstract();
String WFS = gnInspector.getWFSLink(meta);
String WMS = gnInspector.getWFSLink(meta);
String algoritm = "OTHER";
if (abstractS.contains("AquaMaps NativeRange2050 algorithm")){
algoritm = "NATIVE 2050";
}
else if (abstractS.contains("AquaMaps SuitableRange algorithm")){
algoritm = "SUITABLE";
}
else if (abstractS.contains("AquaMaps SuitableRange2050 algorithm")){
algoritm = "SUITABLE 2050";
}
else if (abstractS.contains("AquaMaps NativeRange algorithm")){
algoritm = "NATIVE";
}
String outstring = speciesname+",\""+WMS+"\",\""+WFS+"\","+algoritm+",\""+abstractS.replace("\"", "")+"\"";
// System.out.println(abstractS);
System.out.println(outstring);
bw.write(outstring+"\n");
//System.out.println("WFS = "+gnInspector.getWFSLink(meta));
//System.out.println("ABSTRACT = "+meta.getIdentificationInfo().iterator().next().getAbstract());
}
line = br.readLine();
}
bw.close();
br.close();
}
}

View File

@ -1,69 +0,0 @@
package org.gcube.dataanalysis.geo.test.infra;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TestESRIGRIDExtraction {
// static AlgorithmConfiguration[] configs = { testXYExtractionNetCDF(),testXYExtractionAquaMaps(),testXYExtractionTable(),testXYExtractionTable2(),testDirectExtraction()};
// static AlgorithmConfiguration[] configs = { testXYExtractionTable2()};
// static AlgorithmConfiguration[] configs = { testDirectExtraction()};
// static AlgorithmConfiguration[] configs = { testXYExtractionAquaMaps()};
// static AlgorithmConfiguration[] configs = { testXYExtractionGeotermia()};
// static AlgorithmConfiguration[] configs = { testXYExtractionFAO()};
static AlgorithmConfiguration[] configs = { testXYExtractionNetCDF()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
for (int i = 0; i < configs.length; i++) {
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testXYExtractionNetCDF() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("ESRI_GRID_EXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
// config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setGcubeScope("/gcube/devsec/devVRE");
config.setParam("Layer","fc9ac2f4-a2bd-43d1-a361-ac67c5ceac31");
config.setParam("Z","0");
config.setParam("TimeIndex","0");
config.setParam("BBox_LowerLeftLat","-60");
config.setParam("BBox_LowerLeftLong","-50");
config.setParam("BBox_UpperRightLat","60");
config.setParam("BBox_UpperRightLong","50");
config.setParam("XResolution","1");
config.setParam("YResolution","1");
return config;
}
}

View File

@ -1,57 +0,0 @@
package org.gcube.dataanalysis.geo.test.infra;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TestEstimateFishingActivity {
static AlgorithmConfiguration[] configs = { testGridConversion()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
for (int i = 0; i < configs.length; i++) {
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testGridConversion() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("ESTIMATE_FISHING_ACTIVITY");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","utente");
config.setParam("DatabasePassword","d4science");
config.setParam("DatabaseURL","jdbc:postgresql://statistical-manager.d.d4science.org/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/gcube/devsec/devVRE");
config.setParam("InputTable", "generic_idb7822ad1_f66c_444c_9c07_2698b824bab3");
config.setParam("VesselsIDColumn", "vessel_id");
config.setParam("VesselsSpeedsColumn", "speed");
config.setParam("VesselsTimestampsColumn", "datetime");
config.setParam("VesselsLatitudesColumn", "y");
config.setParam("VesselsLongitudesColumn", "x");
return config;
}
}

View File

@ -1,58 +0,0 @@
package org.gcube.dataanalysis.geo.test.infra;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TestEstimateMonthlyFishingEffort {
static AlgorithmConfiguration[] configs = { testGridConversion()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
for (int i = 0; i < configs.length; i++) {
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testGridConversion() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("ESTIMATE_MONTHLY_FISHING_EFFORT");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","utente");
config.setParam("DatabasePassword","d4science");
config.setParam("DatabaseURL","jdbc:postgresql://statistical-manager.d.d4science.org/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/gcube/devsec/devVRE");
config.setParam("InputTable", "code_0b7c500bfaca49f2a0ab4fd3c1647074");
config.setParam("VesselsIDColumn", "vessel_id");
config.setParam("VesselsTimestampsColumn", "datetime");
config.setParam("VesselsLatitudesColumn", "y");
config.setParam("VesselsLongitudesColumn", "x");
config.setParam("VesselsActivityHoursColumn", "activity_hours");
config.setParam("VesselsActivityClassificationColumn", "activity_class_speed");
return config;
}
}

View File

@ -1,249 +0,0 @@
package org.gcube.dataanalysis.geo.test.infra;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TestExtraction {
// static AlgorithmConfiguration[] configs = { testXYExtractionNetCDF(),testXYExtractionAquaMaps(),testXYExtractionTable(),testXYExtractionTable2(),testDirectExtraction()};
// static AlgorithmConfiguration[] configs = { testXYExtractionTable2()};
// static AlgorithmConfiguration[] configs = { testDirectExtraction()};
// static AlgorithmConfiguration[] configs = { testXYExtractionAquaMaps()};
// static AlgorithmConfiguration[] configs = { testXYExtractionGeotermia()};
// static AlgorithmConfiguration[] configs = { testXYExtractionFAO()};
static AlgorithmConfiguration[] configs = { testXYExtractionNetCDF()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
for (int i = 0; i < configs.length; i++) {
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testXYExtractionProd() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("XYEXTRACTOR");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("Layer","0aac424b-5f5b-4fa6-97d6-4b4deee62b97");
config.setParam("Z","0");
config.setParam("TimeIndex","0");
config.setParam("BBox_LowerLeftLat","-60");
config.setParam("BBox_LowerLeftLong","-50");
config.setParam("BBox_UpperRightLat","60");
config.setParam("BBox_UpperRightLong","50");
config.setParam("XResolution","0.5");
config.setParam("YResolution","0.5");
config.setParam("OutputTableName","testextractionprod");
config.setParam("OutputTableLabel","testextractionprod");
return config;
}
private static AlgorithmConfiguration testXYExtractionGeotermia() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("XYEXTRACTOR");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("Layer","http://repoigg.services.iit.cnr.it:8080/geoserver/IGG/ows?service=WFS&version=1.0.0&request=GetFeature&typeName=IGG:area_temp_1000&maxFeatures=50");
config.setParam("Z","-1000");
config.setParam("TimeIndex","0");
config.setParam("BBox_LowerLeftLat","34.46");
config.setParam("BBox_LowerLeftLong","5.85");
config.setParam("BBox_UpperRightLat","49");
config.setParam("BBox_UpperRightLong","21.41");
config.setParam("XResolution","0.01");
config.setParam("YResolution","0.01");
config.setParam("OutputTableName","testextractiongeotermia");
config.setParam("OutputTableLabel","testextractiongeotermia");
return config;
}
private static AlgorithmConfiguration testXYExtractionFAO() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("XYEXTRACTOR");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/gcube/devsec/devVRE");
config.setParam("Layer","20c06241-f00f-4cb0-82a3-4e5ec97a0d0a");
config.setParam("Z","0");
config.setParam("TimeIndex","0");
config.setParam("BBox_LowerLeftLat","-90");
config.setParam("BBox_LowerLeftLong","-180");
config.setParam("BBox_UpperRightLat","90");
config.setParam("BBox_UpperRightLong","180");
config.setParam("XResolution","0.2");
config.setParam("YResolution","0.2");
config.setParam("OutputTableName","testextractionfao");
config.setParam("OutputTableLabel","testextractionfao");
return config;
}
private static AlgorithmConfiguration testXYExtractionNetCDF() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("XYEXTRACTOR");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("Layer","c565e32c-c5b3-4964-b44f-06dc620563e9");
config.setParam("Z","0");
config.setParam("TimeIndex","0");
config.setParam("BBox_LowerLeftLat","-60");
config.setParam("BBox_LowerLeftLong","-50");
config.setParam("BBox_UpperRightLat","60");
config.setParam("BBox_UpperRightLong","50");
config.setParam("XResolution","0.5");
config.setParam("YResolution","0.5");
config.setParam("OutputTableName","testextraction2");
config.setParam("OutputTableLabel","testextraction2");
return config;
}
private static AlgorithmConfiguration testDirectExtraction() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("XYEXTRACTOR");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("Layer","https://dl.dropboxusercontent.com/u/12809149/geoserver-GetCoverage.image.asc");
config.setParam("Z","0");
config.setParam("TimeIndex","0");
config.setParam("BBox_LowerLeftLat","-60");
config.setParam("BBox_LowerLeftLong","-50");
config.setParam("BBox_UpperRightLat","60");
config.setParam("BBox_UpperRightLong","50");
config.setParam("XResolution","0.5");
config.setParam("YResolution","0.5");
config.setParam("OutputTableName","testextractiondirect");
config.setParam("OutputTableLabel","testextractiondirect");
return config;
}
private static AlgorithmConfiguration testXYExtractionAquaMaps() {
AlgorithmConfiguration config = testXYExtractionNetCDF();
config.setParam("Layer","04e61cb8-3c32-47fe-823c-80ac3d417a0b");
config.setParam("OutputTableName","testextractionaquamaps");
return config;
}
private static AlgorithmConfiguration testXYExtractionTable() {
AlgorithmConfiguration config = testXYExtractionNetCDF();
config.setAgent("XYEXTRACTOR_TABLE");
config.setParam("OutputTableName","testextractiontable");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
/*
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
*/
config.setParam(TableMatrixRepresentation.tableNameParameter, "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "decimallongitude");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "decimallatitude");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, " ");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "decimallatitude");
config.setParam(TableMatrixRepresentation.filterParameter, " ");
return config;
}
private static AlgorithmConfiguration testXYExtractionTable2() {
AlgorithmConfiguration config = testXYExtractionNetCDF();
config.setAgent("XYEXTRACTOR_TABLE");
config.setParam("OutputTableName","testextractiontable2");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
/*
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
*/
config.setParam(TableMatrixRepresentation.tableNameParameter, "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "decimallongitude");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "decimallatitude");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "modified");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "decimallatitude");
config.setParam(TableMatrixRepresentation.filterParameter, " ");
config.setParam("Z","0");
config.setParam("TimeIndex","1");
return config;
}
}

View File

@ -1,54 +0,0 @@
package org.gcube.dataanalysis.geo.test.infra;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TestGridCWPConversion {
static AlgorithmConfiguration[] configs = { testGridConversion()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
for (int i = 0; i < configs.length; i++) {
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testGridConversion() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("GRID_CWP_TO_COORDINATES");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","utente");
config.setParam("DatabasePassword","d4science");
config.setParam("DatabaseURL","jdbc:postgresql://statistical-manager.d.d4science.org/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/gcube/devsec/devVRE");
config.setParam("InputTable", "timeseries_idc9450f74_7791_4077_a0a7_9cba4da0c93f");
config.setParam("ColumnWithCodes", "grid");
config.setParam("OutputTableName", "cwp_t2");
return config;
}
}

View File

@ -1,92 +0,0 @@
package org.gcube.dataanalysis.geo.test.infra;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TestMaxEnt {
static AlgorithmConfiguration[] configs = { testMaxentTemperature()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
for (int i = 0; i < configs.length; i++) {
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testMaxentTemperature() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("MAX_ENT_NICHE_MODELLING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/gcube/devsec/devVRE");
config.setParam("OutputTableName","maxenttest");
config.setParam("OutputTableLabel","maxenttest");
config.setParam("SpeciesName","testsspecies");
config.setParam("OccurrencesTable","occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam("LongitudeColumn","decimallongitude");
config.setParam("LatitudeColumn","decimallatitude");
config.setParam("ScientificNameColumn","scientificname");
String sep=AlgorithmConfiguration.getListSeparator();
// config.setParam("Layers","dfd1bad2-ab00-42ac-8bb2-46a17162f509"+sep+"23646f93-23a8-4be4-974e-aee6bebe1707");
//config.setParam("Layers","94ea5767-ae76-41dc-be87-f9a0bdc96419");//temperature 99-09 2D
// config.setParam("Layers","23646f93-23a8-4be4-974e-aee6bebe1707");//ph
// config.setParam("Layers","fc9ac2f4-a2bd-43d1-a361-ac67c5ceac31");//temperature
config.setParam("Layers","http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/ows?service=wfs&version=1.0.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&TYPENAME=aquamaps:worldborders");
config.setParam("MaxIterations","10000");
config.setParam("DefaultPrevalence","1");
config.setParam("Z","0");
config.setParam("TimeIndex","0");
config.setParam("BBox_LowerLeftLong","-180");
config.setParam("BBox_UpperRightLong","180");
config.setParam("BBox_LowerLeftLat","-90");
config.setParam("BBox_UpperRightLat","90");
/*
config.setParam("BBox_LowerLeftLong","-60");
config.setParam("BBox_UpperRightLong","60");
config.setParam("BBox_LowerLeftLat","-10");
config.setParam("BBox_UpperRightLat","10");
*/
// config.setParam("XResolution","0.5");
// config.setParam("YResolution","0.5");
config.setParam("XResolution","1");
config.setParam("YResolution","1");
return config;
}
}

View File

@ -1,149 +0,0 @@
package org.gcube.dataanalysis.geo.test.infra;
import java.util.ArrayList;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
public class TestOccurrenceEnrichment {
// static AlgorithmConfiguration[] configs = { testOccEnrichment(), testOccEnrichmentWFS()};
static AlgorithmConfiguration[] configs = { testOccEnrichmentWFSFAO()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
for (int i = 0; i < configs.length; i++) {
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testOccEnrichment() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("OCCURRENCE_ENRICHMENT");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("OccurrenceTable","occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam("LongitudeColumn","decimallongitude");
config.setParam("LatitudeColumn","decimallatitude");
config.setParam("ScientificNameColumn","scientificname");
config.setParam("TimeColumn","eventdate");
config.setParam("OptionalFilter","");
config.setParam("Resolution","0.5");
config.setParam("OutputTableDBName","testenrichment");
config.setParam("OutputTableName","testenrichment");
String sep=AlgorithmConfiguration.getListSeparator();
// config.setParam("Layers","http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridt__ENVIRONMENT_OCEANS_ELEVATION_1366210702774.nc"+sep+"4d597da9-dbfa-4a65-9de6-9bbff69eac19"+sep+"2c2304d1-681a-4f3a-8409-e8cdb5ed447f");
//ASC file
config.setParam("Layers","http://goo.gl/s6fOfS");
config.setParam("Layers","https://dl.dropboxusercontent.com/u/12809149/test1.tiff");
// config.setParam("Layers","https://dl.dropboxusercontent.com/u/12809149/geoserver-GetCoverage.image.asc");
//NETCDF http - cannot work
// config.setParam("Layers","http://goo.gl/qXtqiY");
// config.setParam("Layers","https://dl.dropboxusercontent.com/u/12809149/geoserver-GetCoverage.image.tiff");
// config.setParam("Layers","https://dl.dropboxusercontent.com/u/12809149/TrueMarble.tif");
// config.setParam("Layers","http://goo.gl/l4tEmd");
// config.setParam("Layers","8f5d883f-95bf-4b7c-8252-aaf0b2e6fd81"+sep+"4d597da9-dbfa-4a65-9de6-9bbff69eac19"+sep+"2c2304d1-681a-4f3a-8409-e8cdb5ed447f");
config.setParam("FeaturesNames","temperature"+sep+"chlorophyll"+sep+"ph");
// config.setParam("Layers","4d597da9-dbfa-4a65-9de6-9bbff69eac19"+sep+"2c2304d1-681a-4f3a-8409-e8cdb5ed447f");
// config.setParam("FeaturesNames","chlorophyll"+sep+"ph");
return config;
}
private static AlgorithmConfiguration testOccEnrichmentWFSFAO() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("OCCURRENCE_ENRICHMENT");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setGcubeScope("/gcube/devsec/devVRE");
config.setParam("OccurrenceTable","occurrence_carch");
config.setParam("LongitudeColumn","decimallongitude");
config.setParam("LatitudeColumn","decimallatitude");
config.setParam("ScientificNameColumn","scientificname");
config.setParam("TimeColumn","eventdate");
config.setParam("OptionalFilter","");
config.setParam("Resolution","0.5");
config.setParam("OutputTableDBName","testenrichmentwpsfao");
config.setParam("OutputTableName","testenrichmentwpsfao");
String sep=AlgorithmConfiguration.getListSeparator();
//WFS: carcharodon
config.setParam("Layers"," fao-species-map-wsh");
return config;
}
private static AlgorithmConfiguration testOccEnrichmentWFS() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("OCCURRENCE_ENRICHMENT");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setGcubeScope("/gcube/devsec/devVRE");
config.setParam("OccurrenceTable","occurrence_carch");
config.setParam("LongitudeColumn","decimallongitude");
config.setParam("LatitudeColumn","decimallatitude");
config.setParam("ScientificNameColumn","scientificname");
config.setParam("TimeColumn","eventdate");
config.setParam("OptionalFilter","");
config.setParam("Resolution","0.5");
config.setParam("OutputTableDBName","testenrichmentwps");
config.setParam("OutputTableName","testenrichmentwps");
String sep=AlgorithmConfiguration.getListSeparator();
//WFS: carcharodon
config.setParam("Layers","b8a17d86-c62f-4e73-b5c9-bdb3366015c9");
return config;
}
}

View File

@ -1,62 +0,0 @@
package org.gcube.dataanalysis.geo.test.infra;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TestSHAPEFILEPUBLISHER {
static AlgorithmConfiguration[] configs = { testSFImporter()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
for (int i = 0; i < configs.length; i++) {
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testSFImporter() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("SHAPEFILE_PUBLISHER");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DBUser","postgres");
config.setParam("DBPassword","d4science2");
config.setParam("DBUrl","jdbc:postgresql://geoserver-test.d4science-ii.research-infrastructures.eu:5432/timeseriesgisdb");
config.setParam("driver","org.postgresql.Driver");
// config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setGcubeScope("/gcube/devsec/devVRE");
config.setParam("MapTitle","Test local shapefile");
config.setParam("MapAbstract","A local test");
config.setParam("ShapeFileZip","shapefiletest.zip");
config.setParam("ShapeFileName","shapefile2.shp");
config.setParam("Topics","test|shapefile");
config.setParam("PublicationLevel","PUBLIC");
config.setParam("ServiceUserName","gianpaolo.coro");
return config;
}
}

View File

@ -1,211 +0,0 @@
package org.gcube.dataanalysis.geo.test.infra;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.ecoengine.transducers.TimeSeriesAnalysis;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TestTimeExtraction {
// static AlgorithmConfiguration[] configs = { testTimeExtractionTable(), testTimeExtractionNetCDF(),testTimeExtractionTable2()};
// static AlgorithmConfiguration[] configs = { testTimeExtractionNetCDF()};
// static AlgorithmConfiguration[] configs = { testTExtractionAquamaps()};
static AlgorithmConfiguration[] configs = { testTExtractionTemperature()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
TimeSeriesAnalysis.display=true;
for (int i = 0; i < configs.length; i++) {
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testTExtractionTemperature() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIMEEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/gcube/devsec/devVRE");
config.setParam("Layer","b875403f-1e63-44e1-a1c0-2296d3e147a6");
config.setParam("OutputTableName","testtextractiontemp2");
config.setParam("OutputTableLabel","testtextractiontemp2");
config.setParam("X","0");
config.setParam("Y","0");
config.setParam("Resolution","0.5");
config.setParam("Z","0");
config.setParam("SamplingFreq","-1");
// config.setParam("MinFrequency","-1");
// config.setParam("MaxFrequency","-1");
// config.setParam("FFTSamples","100");
return config;
}
private static AlgorithmConfiguration testTExtractionAquamaps() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIMEEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/gcube/devsec/devVRE");
config.setParam("Layer","40198411-9ceb-420f-8f39-a7e1b8128d6b");
config.setParam("OutputTableName","testtextractionaquamaps");
config.setParam("OutputTableLabel","testtextractionaquamaps");
config.setParam("X","121");
config.setParam("Y","-4");
config.setParam("Resolution","0.5");
config.setParam("Z","0");
config.setParam("SamplingFreq","-1");
config.setParam("MinFrequency","-1");
config.setParam("MaxFrequency","-1");
config.setParam("FrequencyError","-1");
return config;
}
private static AlgorithmConfiguration testTimeExtractionNetCDF() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIMEEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
// config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setGcubeScope("/gcube/devsec/devVRE");
// config.setParam("Layer","c565e32c-c5b3-4964-b44f-06dc620563e9");
// config.setParam("Layer","dffa504b-dbc8-4553-896e-002549f8f5d3");
//wind
config.setParam("Layer","21715b2e-28de-4646-acce-d4f16b59d6d0");
config.setParam("OutputTableName","testtimeextraction");
config.setParam("OutputTableLabel","testtimeextraction");
config.setParam("Z","0");
config.setParam("X","0");
config.setParam("Y","0");
config.setParam("Resolution","0.5");
config.setParam("SamplingFreq","-1");
config.setParam("MinFrequency","-1");
config.setParam("MaxFrequency","-1");
config.setParam("FrequencyError","-1");
return config;
}
private static AlgorithmConfiguration testTimeExtractionTable() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIMEEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("Layer","c565e32c-c5b3-4964-b44f-06dc620563e9");
config.setParam("OutputTableName","testtimeextraction2");
config.setParam("OutputTableLabel","testtimeextraction2");
config.setParam("Z","0");
config.setParam("X","-47.97");
config.setParam("Y","43.42");
config.setParam("Resolution","0.5");
config.setParam("SamplingFreq","-1");
config.setParam("MinFrequency","-1");
config.setParam("MaxFrequency","-1");
config.setParam("FrequencyError","-1");
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
return config;
}
private static AlgorithmConfiguration testTimeExtractionTable2() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIMEEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("OutputTableName","testtimeextraction2");
config.setParam("OutputTableLabel","testtimeextraction2");
config.setParam("Z","0");
config.setParam("X","18.61669921875");
config.setParam("Y","-34.1833000183105");
config.setParam("Resolution","10");
config.setParam("SamplingFreq","-1");
config.setParam("MinFrequency","-1");
config.setParam("MaxFrequency","-1");
config.setParam("FrequencyError","-1");
config.setParam(TableMatrixRepresentation.tableNameParameter, "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "decimallongitude");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "decimallatitude");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "eventdate");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "decimallongitude");
config.setParam(TableMatrixRepresentation.filterParameter, " ");
return config;
}
}

View File

@ -1,190 +0,0 @@
package org.gcube.dataanalysis.geo.test.infra;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TestZExtraction {
// static AlgorithmConfiguration[] configs = { testTimeExtractionTable(), testZExtractionLongNetCDF(),testZExtractionNetCDF(),testTimeExtractionTable2()};
static AlgorithmConfiguration[] configs = { testZExtractionAquamaps()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
for (int i = 0; i < configs.length; i++) {
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testZExtractionAquamaps() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("ZEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/gcube/devsec/devVRE");
config.setParam("Layer","40198411-9ceb-420f-8f39-a7e1b8128d6b");
config.setParam("OutputTableName","testzextractionaquamaps");
config.setParam("OutputTableLabel","testzextractionaquamaps");
config.setParam("TimeIndex","0");
config.setParam("X","121");
config.setParam("Y","-4");
config.setParam("Resolution","0.5");
return config;
}
private static AlgorithmConfiguration testZExtractionLongNetCDF() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("ZEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("Layer","6411b110-7572-457a-a662-a16e4ff09e4e");
// config.setParam("Layer","dffa504b-dbc8-4553-896e-002549f8f5d3");
config.setParam("OutputTableName","testzextractionlong");
config.setParam("OutputTableLabel","testzextractionlong");
config.setParam("TimeIndex","0");
config.setParam("X","0");
config.setParam("Y","0");
config.setParam("Resolution","0.5");
return config;
}
private static AlgorithmConfiguration testZExtractionNetCDF() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("ZEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/gcube/devsec");
config.setParam("Layer","7f90e153-0c5c-4d45-a498-a6374593e68d");
config.setParam("OutputTableName","testzextractionstandard");
config.setParam("OutputTableLabel","testzextractionstandard");
config.setParam("TimeIndex","0");
config.setParam("X","0");
config.setParam("Y","0");
config.setParam("Resolution","100");
return config;
}
private static AlgorithmConfiguration testTimeExtractionTable() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIMEEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("Layer","c565e32c-c5b3-4964-b44f-06dc620563e9");
config.setParam("OutputTableName","testtimeextraction");
config.setParam("OutputTableLabel","testtimeextraction");
config.setParam("Z","0");
config.setParam("X","-47.97");
config.setParam("Y","43.42");
config.setParam("Resolution","0.5");
config.setParam("SamplingFreq","-1");
config.setParam("MinFrequency","-1");
config.setParam("MaxFrequency","-1");
config.setParam("FrequencyError","-1");
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
return config;
}
private static AlgorithmConfiguration testTimeExtractionTable2() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIMEEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("OutputTableName","testtimeextraction2");
config.setParam("OutputTableLabel","testtimeextraction2");
config.setParam("Z","0");
config.setParam("X","18.61669921875");
config.setParam("Y","-34.1833000183105");
config.setParam("Resolution","10");
config.setParam("SamplingFreq","-1");
config.setParam("MinFrequency","-1");
config.setParam("MaxFrequency","-1");
config.setParam("FrequencyError","-1");
config.setParam(TableMatrixRepresentation.tableNameParameter, "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "decimallongitude");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "decimallatitude");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "eventdate");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "decimallongitude");
config.setParam(TableMatrixRepresentation.filterParameter, " ");
return config;
}
}

View File

@ -1,119 +0,0 @@
package org.gcube.dataanalysis.geo.test.maps;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.algorithms.PointsMapsCreator;
import org.gcube.dataanalysis.geo.algorithms.PolygonMapsCreator;
public class TestMapCreation {
static String cfg = "./cfg/";
public static void main1(String[] args) throws Exception{
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
// config.setGcubeScope("/gcube/devsec/statVRE");
config.setGcubeScope("/gcube/devsec/devVRE");
config.setPersistencePath("./");
config.setParam("MapName","Test Polygonal Map Ph 6");
/*
config.setParam("InputTable","occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam("xDimension","decimallongitude");
config.setParam("yDimension","decimallatitude");
config.setParam("Info","recordedby") ;
config.setParam("Resolution","0.5");
*/
config.setParam("InputTable","generic_idbc699da3_a4d5_40fb_80ff_666dbf1316d5");
config.setParam("xDimension","x");
config.setParam("yDimension","y");
config.setParam("Info","fvalue") ;
config.setParam("DatabaseUserName","utente");
config.setParam("DatabasePassword","d4science");
config.setParam("DatabaseURL","jdbc:postgresql://statistical-manager.d.d4science.org/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Z","0");
config.setParam("user", "postgres");
config.setParam("password", "d4science2");
config.setParam("STOREURL","jdbc:postgresql://geoserver-test.d4science-ii.research-infrastructures.eu/timeseriesgisdb");
config.setParam("driver", "org.postgresql.Driver");
config.setParam("dialect", "org.hibernatespatial.postgis.PostgisDialect");
/*
PolygonMapsCreator mc = new PolygonMapsCreator();
*/
PointsMapsCreator mc = new PointsMapsCreator();
mc.setConfiguration(config);
mc.init();
mc.compute();
}
public static void main(String[] args) throws Exception{
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
// config.setGcubeScope("/gcube/devsec/statVRE");
config.setGcubeScope("/gcube/devsec/devVRE");
ScopeProvider.instance.set("/gcube/devsec/devVRE");
config.setPersistencePath("./");
config.setParam("MapName","Test Polygonal Map Ph 10");
config.setParam("PublicationLevel","PRIVATE");
/*
config.setParam("InputTable","occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam("xDimension","decimallongitude");
config.setParam("yDimension","decimallatitude");
config.setParam("Info","recordedby") ;
config.setParam("Resolution","0.5");
*/
/*
config.setParam("InputTable","testextraction2");
config.setParam("xDimension","x");
config.setParam("yDimension","y");
config.setParam("Info","fvalue") ;
*/
config.setParam("InputTable","code_85e5d927f7094a3ca677a53f4433fed4");
config.setParam("xDimension","longitude");
config.setParam("yDimension","latitude");
config.setParam("Info","longitude") ;
config.setParam("DatabaseUserName","utente");
config.setParam("DatabasePassword","d4science");
config.setParam("DatabaseURL","jdbc:postgresql://statistical-manager.d.d4science.org/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
/*
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
*/
config.setParam("Z","0");
config.setParam("user", "postgres");
config.setParam("password", "d4science2");
config.setParam("STOREURL","jdbc:postgresql://geoserver-test.d4science-ii.research-infrastructures.eu/timeseriesgisdb");
config.setParam("driver", "org.postgresql.Driver");
config.setParam("dialect", "org.hibernatespatial.postgis.PostgisDialect");
/*
PolygonMapsCreator mc = new PolygonMapsCreator();
*/
PointsMapsCreator mc = new PointsMapsCreator();
mc.setConfiguration(config);
mc.init();
mc.compute();
}
}

View File

@ -1,66 +0,0 @@
package org.gcube.dataanalysis.geo.test.maps;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.algorithms.MapsComparator;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
public class TestMapsComparison {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
// String layertitle = "Ice velocity u from [12-15-02 01:00] to [12-15-09 01:00] (2D) {Native grid ORCA025.L75 monthly average: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-reanalysis-phys-001-004-b-ref-fr-mjm95-icemod}";
// String layertitle2 = "Ice velocity v from [12-15-02 01:00] to [12-15-09 01:00] (2D) {Native grid ORCA025.L75 monthly average: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-reanalysis-phys-001-004-b-ref-fr-mjm95-icemod}";
// String layertitle = "Number of Observations in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Apparent Oxygen Utilization - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/apparent_oxygen_utilization_annual_1deg_ENVIRONMENT_BIOTA_.nc}";
// String layertitle2 = "Seasonal or Monthly Climatology minus Annual Climatology in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Apparent Oxygen Utilization - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/apparent_oxygen_utilization_annual_1deg_ENVIRONMENT_BIOTA_.nc}";
// String layertitle2 = "Number of Mean Values within Radius of Influence in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Apparent Oxygen Utilization - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/apparent_oxygen_utilization_annual_1deg_ENVIRONMENT_BIOTA_.nc}";
// String layertitle = "Ice velocity u from [12-15-02 01:00] to [12-15-09 01:00] (2D) {Native grid ORCA025.L75 monthly average: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-reanalysis-phys-001-004-b-ref-fr-mjm95-icemod}";
// String layertitle2 = "Ice velocity v from [12-15-02 01:00] to [12-15-09 01:00] (2D) {Native grid ORCA025.L75 monthly average: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-reanalysis-phys-001-004-b-ref-fr-mjm95-icemod}";
//String layertitle = "wind stress from [05-01-07 14:00] to [04-01-12 14:00] (2D) {Monthly ASCAT global wind field: Data extracted from dataset http://tds0.ifremer.fr/thredds/dodsC/CERSAT-GLO-CLIM_WIND_L4-OBS_FULL_TIME_SERIE}";
//String layertitle2 = "wind speed from [05-01-07 14:00] to [04-01-12 14:00] (2D) {Monthly ASCAT global wind field: Data extracted from dataset http://tds0.ifremer.fr/thredds/dodsC/CERSAT-GLO-CLIM_WIND_L4-OBS_FULL_TIME_SERIE}";
//String layertitle = "Objectively Analyzed Climatology from [02-16-01 01:00] to [11-16-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Salinity - seasonal: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/salinity_seasonal_1deg_ENVIRONMENT_OCEANS_.nc}";
//String layertitle2 = "Objectively Analyzed Climatology from [01-16-01 01:00] to [12-16-01 01:00] (3D) {World Ocean Atlas 09: Apparent Oxygen Utilization - monthly: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/apparent_oxygen_utilization_monthly_1deg_ENVIRONMENT_BIOTA_.nc}";
// String layertitle = "FAO aquatic species distribution map of Istiophorus platypterus";
// String layertitle2 = "FAO aquatic species distribution map of Teuthowenia megalops";
//{MEAN=1.0, VARIANCE=0.0, NUMBER_OF_ERRORS=38596, NUMBER_OF_COMPARISONS=260281, ACCURACY=85.17, MAXIMUM_ERROR=1.0, MAXIMUM_ERROR_POINT=3207:219:1, TREND=CONTRACTION, Resolution=0.5}
//String layertitle = "Sarda orientalis";
//String layertitle2 = "FAO aquatic species distribution map of Sarda chiliensis";
// String layertitle2 = "4e5c1bbf-f5ce-4b66-a67c-14d7d9920aa0";
// String layertitle = "38b2eb74-1c07-4569-8a81-36ac2f973146";
//String layertitle = "http://geoserver.d4science-ii.research-infrastructures.eu/geoserver/ows?service=WFS&version=1.0.0&request=GetFeature&typeName=lluidiamaculata20121218223748535cet&format=json&maxfeatures=1";
//String layertitle2 = "http://geoserver.d4science-ii.research-infrastructures.eu/geoserver/ows?service=WFS&version=1.0.0&request=GetFeature&typeName=lluidiamaculata20121218223748535cet&format=json&maxfeatures=1";
String layertitle = "http://geoserver-dev4.d4science.org/geoserver/aquamaps/ows?service=wfs&version=1.1.0&REQUEST=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&TYPENAME=aquamaps:llatimeriachalumnae20130717140243002cest&format=json&maxfeatures=1";
String layertitle2 = "http://geoserver-dev4.d4science.org/geoserver/aquamaps/ows?service=wfs&version=1.1.0&REQUEST=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&TYPENAME=aquamaps:llatimeriachalumnae20130717140243002cest&format=json&maxfeatures=1";
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
config.setGcubeScope("/gcube/devsec");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Layer_1",layertitle);
config.setParam("Layer_2",layertitle2);
config.setParam("ValuesComparisonThreshold","0.1");
config.setParam("Z","0");
MapsComparator mc = new MapsComparator();
mc.setConfiguration(config);
mc.init();
mc.compute();
mc.getOutput();
}
}

View File

@ -1,46 +0,0 @@
package org.gcube.dataanalysis.geo.test.maps;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class TestMapsComparisonASCFiles {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
List<ComputationalAgent> evaluators = EvaluatorsFactory.getEvaluators(testConfig1());
evaluators.get(0).init();
Regressor.process(evaluators.get(0));
evaluators = null;
}
private static AlgorithmConfiguration testConfig1() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setNumberOfResources(1);
config.setConfigPath("./cfg");
config.setPersistencePath("./");
config.setAgent("MAPS_COMPARISON");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Layer_1","Bio-Oracle Chlorophyll A Concentration (Mean)");
config.setParam("Layer_2","Bio-Oracle Chlorophyll A Concentration (Max)");
config.setParam("ValuesComparisonThreshold",""+0.1);
config.setParam("Z","0");
config.setGcubeScope("/gcube");
return config;
}
}

View File

@ -1,50 +0,0 @@
package org.gcube.dataanalysis.geo.test.maps;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.algorithms.MapsComparator;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
public class TestMapsComparisonAquaMapsvsFAO {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
String layertitle = "FAO aquatic species distribution map of Eleutheronema tetradactylum";
String layertitle2 = "Eleutheronema tetradactylum";
/*
* {MEAN=0.81, VARIANCE=0.02, NUMBER_OF_ERRORS=6691, NUMBER_OF_COMPARISONS=259200,
* ACCURACY=97.42,
* MAXIMUM_ERROR=1.0, MAXIMUM_ERROR_POINT=3005:363:1,
* COHENS_KAPPA=0.218,
* COHENS_KAPPA_CLASSIFICATION_LANDIS_KOCH=Fair,
* COHENS_KAPPA_CLASSIFICATION_FLEISS=Marginal,
* TREND=EXPANSION,
* Resolution=0.5}
*/
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Layer_1",layertitle);
config.setParam("Layer_2",layertitle2);
config.setParam("ValuesComparisonThreshold","0.5");
config.setParam("KThreshold","0.5");
config.setParam("Z","0");
config.setGcubeScope("/gcube/devsec/devVRE");
MapsComparator mc = new MapsComparator();
mc.setConfiguration(config);
mc.init();
mc.compute();
mc.getOutput();
}
}

View File

@ -1,39 +0,0 @@
package org.gcube.dataanalysis.geo.test.maps;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.algorithms.MapsComparator;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
public class TestMapsComparisonArchiteuthis {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
String layertitle = "96575f38-583c-4b6c-926f-e804156c7baa";
String layertitle2 = "f9a831c0-143f-4792-a5d9-896d72db73f6";
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
config.setPersistencePath("./");
config.setParam("DatabaseUserName","utente");
config.setParam("DatabasePassword","d4science");
config.setParam("DatabaseURL","jdbc:postgresql://statistical-manager.d.d4science.org/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Layer_1",layertitle);
config.setParam("Layer_2",layertitle2);
config.setParam("ValuesComparisonThreshold","0.5");
config.setParam("KThreshold","0.5");
config.setParam("Z","0");
config.setGcubeScope("/gcube/devsec/devVRE");
MapsComparator mc = new MapsComparator();
mc.setConfiguration(config);
mc.init();
mc.compute();
mc.getOutput();
}
}

View File

@ -1,36 +0,0 @@
package org.gcube.dataanalysis.geo.test.maps;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.algorithms.MapsComparator;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
public class TestMapsComparisonExampleTCOM {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
String layertitle2 = "FAO aquatic species distribution map of Eleutheronema tetradactylum";
String layertitle = "FAO aquatic species distribution map of Leptomelanosoma indicum";
//{MEAN=1.0, VARIANCE=0.0, NUMBER_OF_ERRORS=1823, NUMBER_OF_COMPARISONS=260281, ACCURACY=99.3, MAXIMUM_ERROR=1.0, MAXIMUM_ERROR_POINT=1008:390:1, TREND=EXPANSION, Resolution=0.5}
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Layer_1",layertitle);
config.setParam("Layer_2",layertitle2);
config.setParam("Z","0");
config.setGcubeScope(null);
MapsComparator mc = new MapsComparator();
mc.setConfiguration(config);
mc.init();
mc.compute();
}
}

View File

@ -1,50 +0,0 @@
package org.gcube.dataanalysis.geo.test.maps;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.algorithms.MapsComparator;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
public class TestMapsComparisonGeothermal {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
// String layertitle = "Temperature in [12-15-09 01:00] (3D) {Native grid ORCA025.L75 monthly average: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridt}";
// String layertitle = "Standard Deviation from Statistical Mean from [01-16-01 01:00] to [12-16-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - monthly: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_monthly_1deg_ENVIRONMENT_OCEANS_.nc}";
// String layertitle = "86a7ac79-866a-49c6-b5d5-602fc2d87ddd";
String layertitle = "821b1753-a52c-45ff-9a39-14af88833a0f";
// String layertitle2 = "Statistical Mean in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_annual_1deg_ENVIRONMENT_OCEANS_.nc}";
// String layertitle2 = "0e03d0fa-9c44-4a0c-a7e3-9f6d48710d00";
// String layertitle2 = layertitle;
String layertitle2 = "821b1753-a52c-45ff-9a39-14af88833a0f";
// {MEAN=224.49, VARIANCE=10337.11, NUMBER_OF_ERRORS=47054, NUMBER_OF_COMPARISONS=65522, ACCURACY=28.19, MAXIMUM_ERROR=303.6, MAXIMUM_ERROR_POINT=5006:104, Resolution=0.9972222222222222}
// layertitle = "3fb7fd88-33d4-492d-b241-4e61299c44bb";
// layertitle2 = "3fb7fd88-33d4-492d-b241-4e61299c44bb";
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Layer_1",layertitle);
config.setParam("Layer_2",layertitle2);
config.setParam("ValuesComparisonThreshold",""+0.1);
config.setParam("Z","0");
config.setGcubeScope(null);
// ScopeProvider.instance.set("/gcube/devNext/NextNext");
ScopeProvider.instance.set("/d4science.research-infrastructures.eu/gCubeApps/EGIP");
// ScopeProvider.instance.set("/gcube/devsec");
MapsComparator mc = new MapsComparator();
mc.setConfiguration(config);
mc.init();
mc.compute();
mc.getOutput();
}
}

View File

@ -1,41 +0,0 @@
package org.gcube.dataanalysis.geo.test.maps;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.algorithms.MapsComparator;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
public class TestMapsComparisonTemperature {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
// String layertitle = "MyDistributionMap";
// String layertitle = "Mass Concentration of Chlorophyll in Sea Water in [03-30-13 01:00] (3D) {Mercator Ocean BIOMER1V1R1: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-analysis-bio-001-008-a}";
String layertitle = "Temperature in [12-15-09 01:00] (3D) {Native grid ORCA025.L75 monthly average: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridt}";
String layertitle2 = "Statistical Mean in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_annual_1deg_ENVIRONMENT_OCEANS_.nc}";
// String layertitle2 = "Objectively Analyzed Climatology in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_annual_1deg_ENVIRONMENT_OCEANS_.nc}";
// String layertitle2 = "FAO AQUATIC SPECIES DISTRIBUTION MAP OF MEGALASPIS CORDYLA";
// {MEAN=224.49, VARIANCE=10337.11, NUMBER_OF_ERRORS=47054, NUMBER_OF_COMPARISONS=65522, ACCURACY=28.19, MAXIMUM_ERROR=303.6, MAXIMUM_ERROR_POINT=5006:104, Resolution=0.9972222222222222}
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Layer_1",layertitle);
config.setParam("Layer_2",layertitle2);
config.setParam("ValuesComparisonThreshold",""+0.1);
config.setParam("Z","0");
config.setGcubeScope(null);
MapsComparator mc = new MapsComparator();
mc.setConfiguration(config);
mc.init();
mc.compute();
mc.getOutput();
}
}

View File

@ -1,45 +0,0 @@
package org.gcube.dataanalysis.geo.test.maps;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.algorithms.MapsComparator;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
public class TestMapsComparisonTemperatureUUID {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
// String layertitle = "Temperature in [12-15-09 01:00] (3D) {Native grid ORCA025.L75 monthly average: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridt}";
// String layertitle = "Standard Deviation from Statistical Mean from [01-16-01 01:00] to [12-16-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - monthly: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_monthly_1deg_ENVIRONMENT_OCEANS_.nc}";
// String layertitle = "86a7ac79-866a-49c6-b5d5-602fc2d87ddd";
String layertitle = "35412bde-aa6e-49f6-b9ad-e99458882f92";
// String layertitle2 = "Statistical Mean in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_annual_1deg_ENVIRONMENT_OCEANS_.nc}";
// String layertitle2 = "0e03d0fa-9c44-4a0c-a7e3-9f6d48710d00";
// String layertitle2 = layertitle;
String layertitle2 = "625ba45b-c19f-4a9d-b3f7-be13e8f73101";
// {MEAN=224.49, VARIANCE=10337.11, NUMBER_OF_ERRORS=47054, NUMBER_OF_COMPARISONS=65522, ACCURACY=28.19, MAXIMUM_ERROR=303.6, MAXIMUM_ERROR_POINT=5006:104, Resolution=0.9972222222222222}
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Layer_1",layertitle);
config.setParam("Layer_2",layertitle2);
config.setParam("ValuesComparisonThreshold",""+0.1);
config.setParam("Z","0");
config.setGcubeScope(null);
MapsComparator mc = new MapsComparator();
mc.setConfiguration(config);
mc.init();
mc.compute();
mc.getOutput();
}
}

View File

@ -1,35 +0,0 @@
package org.gcube.dataanalysis.geo.test.maps;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.algorithms.MapsComparator;
public class TestMapsComparisonTemperatureWOA {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
String layertitle = "Temperature in [07-01-01 13:00] (3D) {World Ocean Atlas 2005: Tcl version: 8.4.13, NAP version: 6.2.2}";
String layertitle2 = "Statistical Mean in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_annual_1deg_ENVIRONMENT_OCEANS_.nc}";
//{MEAN=0.0, VARIANCE=0.0, NUMBER_OF_ERRORS=0, NUMBER_OF_COMPARISONS=65522, ACCURACY=100.0, MAXIMUM_ERROR=0.0, MAXIMUM_ERROR_POINT=null, TREND=STATIONARY, Resolution=0.9972222222222222}
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Layer_1",layertitle);
config.setParam("Layer_2",layertitle2);
config.setParam("ValuesComparisonThreshold",""+0.1);
config.setParam("Z","0");
config.setGcubeScope("/gcube/devsec/devVRE");
MapsComparator mc = new MapsComparator();
mc.setConfiguration(config);
mc.init();
mc.compute();
mc.getOutput();
}
}

View File

@ -1,65 +0,0 @@
package org.gcube.dataanalysis.geo.test.maps;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.algorithms.MapsComparator;
public class TestMapsComparisoninTime {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
int[] timeIdx = {0,12,24,36,48,60,72,84,96,108,120};
for (int i=1;i<timeIdx.length;i++){
compare(timeIdx[i-1], timeIdx[i]);
}
}
public static void compare(int t1, int t2) throws Exception{
String layertitle = "Temperature from [12-15-99 01:00] to [12-15-09 01:00] (2D) {Native grid ORCA025.L75 monthly average: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridt}";
//1vs0: {MEAN=6.23, VARIANCE=30.58, NUMBER_OF_ERRORS=39650, NUMBER_OF_COMPARISONS=522242, ACCURACY=92.41, MAXIMUM_ERROR=45.35, MAXIMUM_ERROR_POINT=3215:143, Resolution=0.3525954946131244}
/*
**********(12->0) {MEAN=0.53, VARIANCE=0.28, NUMBER_OF_ERRORS=36075, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.09, MAXIMUM_ERROR=6.0, MAXIMUM_ERROR_POINT=7309:456, TREND=EXPANSION, Resolution=0.3525954946131244} ELAPSED: 1370363639187
**********(24->12) {MEAN=0.56, VARIANCE=0.3, NUMBER_OF_ERRORS=36053, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.1, MAXIMUM_ERROR=6.95, MAXIMUM_ERROR_POINT=1313:143, TREND=EXPANSION, Resolution=0.3525954946131244} ELAPSED: 1370363722843 Japan
**********(36->24) {MEAN=0.62, VARIANCE=0.33, NUMBER_OF_ERRORS=35744, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.16, MAXIMUM_ERROR=6.87, MAXIMUM_ERROR_POINT=1314:465, TREND=EXPANSION, Resolution=0.3525954946131244} ELAPSED: 1370363798387
**********(48->36) {MEAN=0.49, VARIANCE=0.22, NUMBER_OF_ERRORS=35664, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.17, MAXIMUM_ERROR=7.54, MAXIMUM_ERROR_POINT=7307:456, TREND=CONTRACTION, Resolution=0.3525954946131244} ELAPSED: 1370363875063 North Carolina
**********(60->48) {MEAN=0.46, VARIANCE=0.23, NUMBER_OF_ERRORS=36133, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.08, MAXIMUM_ERROR=5.42, MAXIMUM_ERROR_POINT=7307:456, TREND=EXPANSION, Resolution=0.3525954946131244} ELAPSED: 1370363953390
**********(72->60) {MEAN=0.56, VARIANCE=0.31, NUMBER_OF_ERRORS=35970, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.11, MAXIMUM_ERROR=7.01, MAXIMUM_ERROR_POINT=7307:236, TREND=EXPANSION, Resolution=0.3525954946131244} ELAPSED: 1370364033154
**********(84->72) {MEAN=0.57, VARIANCE=0.31, NUMBER_OF_ERRORS=36148, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.08, MAXIMUM_ERROR=7.02, MAXIMUM_ERROR_POINT=7307:247, TREND=CONTRACTION, Resolution=0.3525954946131244} ELAPSED: 1370364110444
**********(96->84) {MEAN=0.59, VARIANCE=0.32, NUMBER_OF_ERRORS=35873, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.13, MAXIMUM_ERROR=5.23, MAXIMUM_ERROR_POINT=7306:249, TREND=CONTRACTION, Resolution=0.3525954946131244} ELAPSED: 1370364190900
**********(108->96) {MEAN=0.53, VARIANCE=0.27, NUMBER_OF_ERRORS=35789, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.15, MAXIMUM_ERROR=4.96, MAXIMUM_ERROR_POINT=7306:249, TREND=EXPANSION, Resolution=0.3525954946131244} ELAPSED: 1370364272133
**********(120->108) {MEAN=0.62, VARIANCE=0.37, NUMBER_OF_ERRORS=36194, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.07, MAXIMUM_ERROR=5.51, MAXIMUM_ERROR_POINT=1316:352, TREND=EXPANSION, Resolution=0.3525954946131244} pacific ocean
*/
long t0=0;
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Layer_1",layertitle);
config.setParam("Layer_2",layertitle);
config.setParam("TimeIndex_1",""+t1);
config.setParam("TimeIndex_2",""+t2);
config.setParam("ValuesComparisonThreshold","0.01");
config.setParam("Z","0");
config.setGcubeScope(null);
MapsComparator mc = new MapsComparator();
mc.setConfiguration(config);
mc.init();
mc.compute();
mc.getOutput();
System.out.println("*********("+t2+"->"+t1+") "+mc.outputParameters +" ELAPSED: "+(System.currentTimeMillis()-t0));
}
}

View File

@ -1,218 +0,0 @@
package org.gcube.dataanalysis.geo.test.projections;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.DistanceCalculator;
import org.gcube.dataanalysis.ecoengine.utils.Transformations;
public class GeolocateCountry {
static String faoreport = "FAO data.csv";
// static String faoreport = "C:\\Users\\coro\\Desktop\\allCountries.txt";
public static void main1(String[] args) throws Exception {
BufferedReader fr = new BufferedReader(new FileReader(new File(faoreport)));
String line = fr.readLine();
long counter = 0;
while (line != null) {
// System.out.println(line);
String[] split = line.split("\t");
String country = split[17];
String x = split[5];
String y = split[4];
if (country.contains("Russia"))
break;
// else
// System.out.println("Country:"+country+" "+x+","+y);
counter++;
if (counter % 500000 == 0)
System.out.println("Country:" + country + " " + x + "," + y);
line = fr.readLine();
}
fr.close();
}
public static void main2(String[] args) throws Exception {
BufferedReader fr = new BufferedReader(new FileReader(new File(faoreport)));
String line = fr.readLine();
parseCentroidsFile();
parseWorldCapitalsFile();
line = fr.readLine();
HashMap<String, String> yetDone = new HashMap<String, String>();
while (line != null) {
List<String> p = Transformations.parseCVSString(line, ",");
String country = p.get(1);
// TO DO rebuild the original CSV file
String suggestion = yetDone.get(country);
if (suggestion == null) {
suggestion = getCentroid(country, capitals, 0.6);
if (suggestion.length() == 0)
suggestion = getCentroid(country, centroids, 0.3);
yetDone.put(country, suggestion);
}
System.out.println(line + "," + suggestion);
line = fr.readLine();
}
fr.close();
}
public static void main(String[] args) throws Exception {
String file = "LargeTS.csv";
System.out.println("Processing");
List<String> countries = GeolocateCountry.geoLocateCountries(1, file);
System.out.println("Dumping");
BufferedWriter bw = new BufferedWriter(new FileWriter(new File("LargeTsGeo.csv")));
for (String country:countries){
bw.write(country+"\n");
}
bw.close();
System.out.println("Done");
}
public static List<String> geoLocateCountries(int idxCountryColumn, String file) throws Exception {
BufferedReader fr = new BufferedReader(new FileReader(new File(file)));
String line = fr.readLine();
parseCentroidsFile();
parseWorldCapitalsFile();
line = fr.readLine();
List<String> yetDone = new ArrayList<String>();
while (line != null) {
List<String> p = Transformations.parseCVSString(line, ",");
String country = p.get(idxCountryColumn);
String suggestion = null;
suggestion = getCentroid(country, capitals, 0.6);
if (suggestion.length() == 0)
suggestion = getCentroid(country, centroids, 0.3);
if (suggestion==null || suggestion.length()==0)
suggestion = ",,,,";
String outstring = country + "," + suggestion;
yetDone.add(outstring);
// System.out.println(outstring);
line = fr.readLine();
}
fr.close();
return yetDone;
}
public static Map<String, String> geoLocateCountriesWithNoDuplicates(int idxCountryColumn, String file) throws Exception {
BufferedReader fr = new BufferedReader(new FileReader(new File(file)));
String line = fr.readLine();
parseCentroidsFile();
parseWorldCapitalsFile();
line = fr.readLine();
LinkedHashMap<String, String> yetDone = new LinkedHashMap<String, String>();
while (line != null) {
List<String> p = Transformations.parseCVSString(line, ",");
String country = p.get(idxCountryColumn);
String suggestion = yetDone.get(country);
if (suggestion == null) {
suggestion = getCentroid(country, capitals, 0.6);
if (suggestion.length() == 0)
suggestion = getCentroid(country, centroids, 0.3);
yetDone.put(country, suggestion);
}
System.out.println(line + "," + suggestion);
line = fr.readLine();
}
fr.close();
return yetDone;
}
static HashMap<String, String> centroids = new HashMap<String, String>();
static HashMap<String, String> capitals = new HashMap<String, String>();
public static void parseCentroidsFile() throws Exception {
BufferedReader fr = new BufferedReader(new FileReader(new File("countriescentroids.txt")));
String line = fr.readLine();
while (line != null) {
String[] elems = line.split(",");
String x = elems[0];
String y = elems[1];
String cntry_name = elems[2];
centroids.put(cntry_name, x + "," + y);
line = fr.readLine();
}
fr.close();
}
public static void parseWorldCapitalsFile() throws Exception {
BufferedReader fr = new BufferedReader(new FileReader(new File("country-capitals.csv")));
String line = fr.readLine();
while (line != null) {
String[] elems = line.split(",");
String x = elems[3];
String y = elems[2];
String cntry_name = elems[0];
capitals.put(cntry_name, x + "," + y);
line = fr.readLine();
}
fr.close();
}
public static String getCentroid(String country, HashMap<String, String> centroids, double threshold) {
String c = centroids.get(country);
List<String> sb = new ArrayList<String>();
List<Double> scores = new ArrayList<Double>();
DistanceCalculator dc = new DistanceCalculator();
if (c == null) {
for (String key : centroids.keySet()) {
if (key.length() > 0) {
/*
* if (key.contains(country) || country.contains(key)) { if (sb.length() > 0) sb.append("/");
*
* sb.append(key + "," + centroids.get(key) + "("+0.8+")"+" "); } else {
*/
double score = dc.CD(false, country, key, true, false);
if (score > threshold) {
int i = 0;
for (Double cscore : scores) {
if (cscore < score)
break;
i++;
}
sb.add(i, key + "," + centroids.get(key) + "," + MathFunctions.roundDecimal(score, 2));
scores.add(i, score);
}
// }
}
}
if (sb.size() > 0)
return sb.get(0).toString();
else
return "";
} else
return country + "," + c + "," + 1;
}
}

View File

@ -1,143 +0,0 @@
package org.gcube.dataanalysis.geo.test.projections;
import java.io.File;
import java.io.FileWriter;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.connectors.asc.AscRasterWriter;
import org.gcube.dataanalysis.geo.connectors.table.Table;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.utils.MapUtils;
import org.gcube.dataanalysis.geo.utils.VectorOperations;
public class ProduceASCFile {
static String layer = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/ows?service=wfs&version=1.0.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&TYPENAME=aquamaps:worldborders";
// static String layer = "ed8f77bd-2423-4036-b34d-2f1cb5fcaffc";
// static String layer = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/ows?service=wfs&version=1.0.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&TYPENAME=aquamaps:eezall";
// static String layer = "http://geo.vliz.be/geoserver/MarineRegions/ows?service=wfs&version=1.0.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&typename=MarineRegions:eez";
// static String layer = "aeabfdb5-9ddb-495e-b628-5b7d2cf1d8a2";
static String field = "f_cat";
// static String field = "eez_id";
// static String field = "f_eezall";
// static String field = "f_eez_id";
// static String field = "f_zone";
static double res = 0.3;
static String table = "testextraction4";
static String scope = "/gcube/devsec/devVRE";
static String databaseUser = "gcube";
static String databasePwd = "d4science2";
static String databaseURL = "jdbc:postgresql://localhost/testdb";
static String databaseDriver = "org.postgresql.Driver";
static double xll = -180;
static double yll=-90;
static double xur=180;
static double yur=90;
static String outASCIIMAP = "producedmap.txt";
static String outASCFile = "produced.asc";
private static AlgorithmConfiguration XYExtractionConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("XYEXTRACTOR");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", databaseUser);
config.setParam("DatabasePassword", databasePwd);
config.setParam("DatabaseURL", databaseURL);
config.setParam("DatabaseDriver", databaseDriver);
config.setGcubeScope(scope);
config.setParam("Layer", layer);
config.setParam("Z", "0");
config.setParam("TimeIndex", "0");
config.setParam("BBox_LowerLeftLat", ""+yll);
config.setParam("BBox_LowerLeftLong", ""+xll);
config.setParam("BBox_UpperRightLat", ""+yur);
config.setParam("BBox_UpperRightLong", ""+xur);
config.setParam("XResolution", ""+res);
config.setParam("YResolution", ""+res);
config.setParam("OutputTableName", table);
config.setParam("OutputTableLabel", table);
return config;
}
private static AlgorithmConfiguration TableExtractionConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", databaseUser);
config.setParam("DatabasePassword", databasePwd);
config.setParam("DatabaseURL", databaseURL);
config.setParam("DatabaseDriver", databaseDriver);
config.setGcubeScope(scope);
config.setParam("BBox_LowerLeftLat", ""+yll);
config.setParam("BBox_LowerLeftLong", ""+xll);
config.setParam("BBox_UpperRightLat", ""+xur);
config.setParam("BBox_UpperRightLong", ""+yur);
config.setParam("XResolution", ""+res);
config.setParam("YResolution", ""+res);
config.setParam("OutputTableName", table);
config.setParam("OutputTableLabel", table);
config.setParam(TableMatrixRepresentation.tableNameParameter, table);
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "approx_x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "approx_y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "time");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, field);
config.setParam(TableMatrixRepresentation.filterParameter, " ");
return config;
}
public static void main(String[] args) throws Exception{
// produce(XYExtractionConfig());
AnalysisLogger.setLogger("./cfg/"+AlgorithmConfiguration.defaultLoggerFile);
List<Tuple<Double>> tuples = VectorOperations.generateCoordinateTripletsInBoundingBox(xll,xur,yll,yur, 0, res, res);
Table connector = new Table(TableExtractionConfig(), res);
List<Double> values = connector.getFeaturesInTimeInstantAndArea(null, null, 0, tuples, xll,xur,yll,yur);
double[][] matrix = VectorOperations.vectorToMatix(values, xll,xur,yll,yur,res, res);
System.out.println(MapUtils.globalASCIIMap(matrix));
FileWriter fw = new FileWriter(new File(outASCIIMAP));
fw.write(MapUtils.globalASCIIMap(matrix));
fw.close();
AscRasterWriter writer = new AscRasterWriter();
writer.writeRasterInvertYAxis(outASCFile, matrix, xll,yll, res, "-9999");
}
public static void produce(AlgorithmConfiguration config) throws Exception {
System.out.println("TEST 1");
AnalysisLogger.getLogger().debug("Executing: "+config.getAgent());
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(config);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}

View File

@ -1,48 +0,0 @@
package org.gcube.dataanalysis.geo.test.projections;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.geo.connectors.wfs.FeaturedPolygon;
import org.gcube.dataanalysis.geo.connectors.wfs.WFSDataExplorer;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.Point;
import com.vividsolutions.jts.geom.Polygon;
public class ProduceCentroids {
static String layer = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/ows?service=wfs&version=1.0.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&TYPENAME=aquamaps:worldborders";
static String layername = "aquamaps:worldborders";
public static void main(String[] args) throws Exception{
List<FeaturedPolygon> featuresInTime = new ArrayList<FeaturedPolygon>();
AnalysisLogger.getLogger().debug("taking WFS features from layer: "+layer);
featuresInTime = WFSDataExplorer.getFeatures(layer, layername, -180, -90, 180, 90);
HashMap<String, Point> centroidsmap = new HashMap<String, Point>();
HashMap<String, Geometry> polymap = new HashMap<String, Geometry>();
for (FeaturedPolygon fpoly:featuresInTime){
// Point centroid = fpoly.p.getCentroid();
Geometry prevPoly = polymap.get(fpoly.features.get("cntry_name"));
if (prevPoly!=null){
prevPoly = prevPoly.union(fpoly.p);
}
else
prevPoly = fpoly.p;
// if ((""+fpoly.features).contains("United States"))
// System.out.println("centroid:"+fpoly.p.getCentroid()+" now "+prevPoly.getCentroid());
polymap.put(fpoly.features.get("cntry_name"),prevPoly);
}
for (String key:polymap.keySet()){
Point centroid = polymap.get(key).getCentroid();
System.out.println(centroid.getX()+","+centroid.getY()+","+key);
}
}
}

View File

@ -1,114 +0,0 @@
package org.gcube.dataanalysis.geo.test.projections;
import java.io.File;
import java.io.FileWriter;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor;
import org.gcube.dataanalysis.geo.utils.MapUtils;
public class TestExtractionXYMatrixFromTable {
public static void sliceTableAquaMaps(AlgorithmConfiguration config) throws Exception {
// latimeria chalumnae
config.setParam(TableMatrixRepresentation.tableNameParameter, "testextractionaquamaps");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "approx_x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "approx_y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "time");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "f_probability");
config.setParam(TableMatrixRepresentation.filterParameter, "");
}
public static void sliceTablePhImported(AlgorithmConfiguration config) throws Exception {
// ph
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.org/testdb");
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_idbc699da3_a4d5_40fb_80ff_666dbf1316d5");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "time");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "fvalue");
config.setParam(TableMatrixRepresentation.filterParameter, "");
}
public static void sliceTablePh(AlgorithmConfiguration config) throws Exception {
// ph
config.setParam(TableMatrixRepresentation.tableNameParameter, "testextraction");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "time");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "fvalue");
config.setParam(TableMatrixRepresentation.filterParameter, "");
}
public static void sliceMapCreated(AlgorithmConfiguration config) throws Exception {
config.setParam(TableMatrixRepresentation.tableNameParameter, "testextraction2");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "time");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "fvalue");
config.setParam(TableMatrixRepresentation.filterParameter, "");
}
public static void sliceMapCreated2(AlgorithmConfiguration config) throws Exception {
config.setParam(TableMatrixRepresentation.tableNameParameter, "testextraction2");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "approx_x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "approx_y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "time");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "f_temp");
config.setParam(TableMatrixRepresentation.filterParameter, "");
}
public static void sliceMaxEnt(AlgorithmConfiguration config) throws Exception {
config.setParam(TableMatrixRepresentation.tableNameParameter, "rstrf31af9ff13de42e583327e4ca51c38ef");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "time");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "fvalue");
config.setParam(TableMatrixRepresentation.filterParameter, "");
}
public static void sliceTableMapServer(AlgorithmConfiguration config) throws Exception {
config.setParam(TableMatrixRepresentation.tableNameParameter, "testextraction3");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "approx_x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "approx_y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "time");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "f_depth");
config.setParam(TableMatrixRepresentation.filterParameter, "");
}
public static void main(String[] args) throws Exception {
AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "gcube");
config.setParam("DatabasePassword", "d4science2");
config.setParam("DatabaseURL", "jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setGcubeScope("/gcube/devsec/devVRE");
sliceTableMapServer(config);
double resolution = 1;
FileWriter fw = new FileWriter(new File("maps.txt"));
XYExtractor extractor = new XYExtractor(config);
double[][] matrix = extractor.extractXYGrid(null, 0, -180, 180, -90, 90, 0, resolution, resolution);
String map = MapUtils.globalASCIIMap(matrix);
fw.write(map);
fw.close();
System.out.println("DONE!");
}
}

View File

@ -1,60 +0,0 @@
package org.gcube.dataanalysis.geo.test.projections;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.algorithms.PointsMapsCreator;
import org.gcube.dataanalysis.geo.algorithms.PolygonMapsCreator;
public class TestMapCreation {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
// config.setGcubeScope("/gcube/devsec/statVRE");
config.setGcubeScope("/gcube/devsec/devVRE");
config.setPersistencePath("./");
config.setParam("MapName","Test Polygonal Map Ph 8");
/*
config.setParam("InputTable","occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam("xDimension","decimallongitude");
config.setParam("yDimension","decimallatitude");
config.setParam("Info","recordedby") ;
config.setParam("Resolution","0.5");
*/
config.setParam("InputTable","testextraction2");
config.setParam("xDimension","x");
config.setParam("yDimension","y");
config.setParam("Info","fvalue") ;
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Z","0");
config.setParam("user", "postgres");
config.setParam("password", "d4science2");
config.setParam("STOREURL","jdbc:postgresql://geoserver-test.d4science-ii.research-infrastructures.eu/timeseriesgisdb");
config.setParam("driver", "org.postgresql.Driver");
config.setParam("dialect", "org.hibernatespatial.postgis.PostgisDialect");
/*
PolygonMapsCreator mc = new PolygonMapsCreator();
*/
PointsMapsCreator mc = new PointsMapsCreator();
mc.setConfiguration(config);
mc.init();
mc.compute();
}
}

View File

@ -1,316 +0,0 @@
package org.gcube.dataanalysis.geo.test.projections;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TestXYExtractionAlgorithm {
// static AlgorithmConfiguration[] configs = { testXYExtractionNetCDF(),testXYExtractionAquaMaps(),testXYExtractionTable(),testXYExtractionTable2(),testDirectExtraction()};
// static AlgorithmConfiguration[] configs = { testXYExtractionTable2()};
// static AlgorithmConfiguration[] configs = { testDirectExtraction()};
// static AlgorithmConfiguration[] configs = { testXYExtractionAquaMaps()};
// static AlgorithmConfiguration[] configs = { testXYExtractionGeotermia()};
// static AlgorithmConfiguration[] configs = { testXYExtractionFAO()};
// static AlgorithmConfiguration[] configs = { testXYExtractionNetCDF()};
// static AlgorithmConfiguration[] configs = { testXYExtractionWFS11()};
static AlgorithmConfiguration[] configs = { testXYExtractionWFSDirect()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
for (int i = 0; i < configs.length; i++) {
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testXYExtractionProd() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("XYEXTRACTOR");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("Layer","0aac424b-5f5b-4fa6-97d6-4b4deee62b97");
config.setParam("Z","0");
config.setParam("TimeIndex","0");
config.setParam("BBox_LowerLeftLat","-60");
config.setParam("BBox_LowerLeftLong","-50");
config.setParam("BBox_UpperRightLat","60");
config.setParam("BBox_UpperRightLong","50");
config.setParam("XResolution","0.5");
config.setParam("YResolution","0.5");
config.setParam("OutputTableName","testextractionprod");
config.setParam("OutputTableLabel","testextractionprod");
return config;
}
private static AlgorithmConfiguration testXYExtractionGeotermia() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("XYEXTRACTOR");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("Layer","http://repoigg.services.iit.cnr.it:8080/geoserver/IGG/ows?service=WFS&version=1.0.0&request=GetFeature&typeName=IGG:area_temp_1000&maxFeatures=50");
config.setParam("Z","-1000");
config.setParam("TimeIndex","0");
config.setParam("BBox_LowerLeftLat","34.46");
config.setParam("BBox_LowerLeftLong","5.85");
config.setParam("BBox_UpperRightLat","49");
config.setParam("BBox_UpperRightLong","21.41");
config.setParam("XResolution","0.01");
config.setParam("YResolution","0.01");
config.setParam("OutputTableName","testextractiongeotermia");
config.setParam("OutputTableLabel","testextractiongeotermia");
return config;
}
private static AlgorithmConfiguration testXYExtractionFAO() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("XYEXTRACTOR");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/gcube/devsec/devVRE");
config.setParam("Layer","20c06241-f00f-4cb0-82a3-4e5ec97a0d0a");
config.setParam("Z","0");
config.setParam("TimeIndex","0");
config.setParam("BBox_LowerLeftLat","-90");
config.setParam("BBox_LowerLeftLong","-180");
config.setParam("BBox_UpperRightLat","90");
config.setParam("BBox_UpperRightLong","180");
config.setParam("XResolution","0.2");
config.setParam("YResolution","0.2");
config.setParam("OutputTableName","testextractionfao");
config.setParam("OutputTableLabel","testextractionfao");
return config;
}
private static AlgorithmConfiguration testXYExtractionNetCDF() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("XYEXTRACTOR");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("Layer","c565e32c-c5b3-4964-b44f-06dc620563e9");
config.setParam("Z","0");
config.setParam("TimeIndex","0");
config.setParam("BBox_LowerLeftLat","-60");
config.setParam("BBox_LowerLeftLong","-50");
config.setParam("BBox_UpperRightLat","60");
config.setParam("BBox_UpperRightLong","50");
config.setParam("XResolution","0.5");
config.setParam("YResolution","0.5");
config.setParam("OutputTableName","testextraction2");
config.setParam("OutputTableLabel","testextraction2");
return config;
}
private static AlgorithmConfiguration testXYExtractionWFS11() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("XYEXTRACTOR");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/EGIP");
config.setParam("Layer","3f16f87a-68db-49ca-bfc7-affcd83ae274");
config.setParam("Z","0");
config.setParam("TimeIndex","0");
config.setParam("BBox_LowerLeftLat","-60");
config.setParam("BBox_LowerLeftLong","-70");
config.setParam("BBox_UpperRightLat","60");
config.setParam("BBox_UpperRightLong","70");
config.setParam("XResolution","0.5");
config.setParam("YResolution","0.5");
config.setParam("OutputTableName","testextraction2");
config.setParam("OutputTableLabel","testextraction2");
return config;
}
private static AlgorithmConfiguration testXYExtractionWFSDirect() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("XYEXTRACTOR");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/EGIP");
//config.setParam("Layer","http://repoigg.services.iit.cnr.it/geoserver/IGG/ows?service=WFS&version=1.1.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&TYPENAME=IGG:HeatFlowUnit");
// config.setParam("Layer","http://egip.brgm-rec.fr/wxs/?service=WFS&version=1.1.0&request=GetFeature&typeName=TemperatureUnit&srsName=EPSG:4326");
config.setParam("Layer","http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/ows?service=wfs&version=1.0.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&TYPENAME=aquamaps:worldborders");
https://issue.imarine.research-infrastructures.eu/raw-attachment/ticket/3082/gifgeomap.gif
config.setParam("Z","0");
config.setParam("TimeIndex","0");
config.setParam("BBox_LowerLeftLat","-90");
config.setParam("BBox_LowerLeftLong","-180");
config.setParam("BBox_UpperRightLat","90");
config.setParam("BBox_UpperRightLong","180");
// config.setParam("XResolution","0.3");
// config.setParam("YResolution","0.3");
config.setParam("XResolution","0.5");
config.setParam("YResolution","0.5");
config.setParam("OutputTableName","testextraction4");
config.setParam("OutputTableLabel","testextraction4");
return config;
}
private static AlgorithmConfiguration testDirectExtraction() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("XYEXTRACTOR");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("Layer","https://dl.dropboxusercontent.com/u/12809149/geoserver-GetCoverage.image.asc");
config.setParam("Z","0");
config.setParam("TimeIndex","0");
config.setParam("BBox_LowerLeftLat","-60");
config.setParam("BBox_LowerLeftLong","-50");
config.setParam("BBox_UpperRightLat","60");
config.setParam("BBox_UpperRightLong","50");
config.setParam("XResolution","0.5");
config.setParam("YResolution","0.5");
config.setParam("OutputTableName","testextractiondirect");
config.setParam("OutputTableLabel","testextractiondirect");
return config;
}
private static AlgorithmConfiguration testXYExtractionAquaMaps() {
AlgorithmConfiguration config = testXYExtractionNetCDF();
config.setParam("Layer","04e61cb8-3c32-47fe-823c-80ac3d417a0b");
config.setParam("OutputTableName","testextractionaquamaps");
return config;
}
private static AlgorithmConfiguration testXYExtractionTable() {
AlgorithmConfiguration config = testXYExtractionNetCDF();
config.setAgent("XYEXTRACTOR_TABLE");
config.setParam("OutputTableName","testextractiontable");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
/*
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
*/
config.setParam(TableMatrixRepresentation.tableNameParameter, "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "decimallongitude");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "decimallatitude");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, " ");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "decimallatitude");
config.setParam(TableMatrixRepresentation.filterParameter, " ");
return config;
}
private static AlgorithmConfiguration testXYExtractionTable2() {
AlgorithmConfiguration config = testXYExtractionNetCDF();
config.setAgent("XYEXTRACTOR_TABLE");
config.setParam("OutputTableName","testextractiontable2");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
/*
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
*/
config.setParam(TableMatrixRepresentation.tableNameParameter, "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "decimallongitude");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "decimallatitude");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "modified");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "decimallatitude");
config.setParam(TableMatrixRepresentation.filterParameter, " ");
config.setParam("Z","0");
config.setParam("TimeIndex","1");
return config;
}
}

View File

@ -1,142 +0,0 @@
package org.gcube.dataanalysis.geo.test.projections;
import java.io.File;
import java.io.FileWriter;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.connectors.asc.AscDataExplorer;
import org.gcube.dataanalysis.geo.connectors.asc.AscRasterWriter;
import org.gcube.dataanalysis.geo.connectors.geotiff.GeoTiff;
import org.gcube.dataanalysis.geo.connectors.netcdf.NetCDF;
import org.gcube.dataanalysis.geo.connectors.table.Table;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.connectors.wcs.WCS;
import org.gcube.dataanalysis.geo.connectors.wfs.WFS;
import org.gcube.dataanalysis.geo.utils.MapUtils;
import org.gcube.dataanalysis.geo.utils.VectorOperations;
public class TestXYExtractionConnectors {
static String[] urlToTest3 = {
// "http://geoserver3.d4science.research-infrastructures.eu/geoserver"
// "http://geoserver2.d4science.research-infrastructures.eu/geoserver"
"http://www.fao.org/figis/geoserver/species/ows" };
static String[] layernamesTest3 = {
// "lsoleasolea20121217184934494cet"
// "lcarcharodoncarcharias20121217173706733cet"
// "lxiphiasgladius20130410182141778cest"
// "SPECIES_DIST_BIB"
"SPECIES_DIST_SWO" };
static String[] urlToTest1 = { "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridv_OCEANS_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_1366211498692.nc", };
static String[] layernamesTest1 = { "vomecrty" };
static String[] urlToTest2 = { "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/WOA2005TemperatureAnnual_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_.nc" };
static String[] layernamesTest2 = { "t00an1" };
static String[] urlToTest5 = { "./maxent3719990c-7998-4859-9dca-4b0a792f9d2f/layer1.asc" };
static String[] layernamesTest5 = { "layer1" };
static String[] urlToTest6 = { "table" };
static String[] layernamesTest6 = { "table" };
static String[] urlToTest = { "tableeez" };
static String[] layernamesTest = { "tableeez" };
static String[] urlToTest_ = { "https://dl.dropboxusercontent.com/u/12809149/layer1.asc", "http://thredds.research-infrastructures.eu/thredds/fileServer/public/netcdf/ph.asc", "http://thredds.research-infrastructures.eu/thredds/fileServer/public/netcdf/calcite.asc",
"https://dl.dropboxusercontent.com/u/12809149/wind1.tif",
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/WOA2005TemperatureAnnual_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_.nc", "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/salinity_annual_1deg_ENVIRONMENT_OCEANS_.nc", "http://thredds.d4science.org/thredds/fileServer/public/netcdf/global-reanalysis-phys-001-004-b-ref-fr-mjm95-icemod_ENVIRONMENT_OCEANS_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_1366211441189.nc", "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/CERSAT-GLO-CLIM_WIND_L4-OBS_FULL_TIME_SERIE_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_1366217956317.nc", "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/phosphate_seasonal_5deg_ENVIRONMENT_BIOTA_.nc", "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-analysis-bio-001-008-_a_BIOTA_ENVIRONMENT_1366217546908.nc", "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/dissolved_oxygen_annual_1deg_ENVIRONMENT_BIOTA_.nc", "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridv_OCEANS_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_1366211498692.nc", "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/nitrate_seasonal_5deg_ENVIRONMENT_BIOTA_.nc", "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-analysis-bio-001-008-a_BIOTA_ENVIRONMENT_1366217608283.nc", "http://thredds.research-infrastructures.eu/thredds/fileServer/public/netcdf/cloudmean.asc",
"http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/wcs/wcs?service=wcs&version=1.0.0&request=GetCoverage&coverage=aquamaps:WorldClimBio2&CRS=EPSG:4326&bbox=-180,0,180,90&width=1&height=1&format=geotiff&RESPONSE_CRS=EPSG:4326",
"http://geoserver2.d4science.research-infrastructures.eu/geoserver"
};
static String[] layernamesTest_ = { "layer1", "ph", "calcite", "wind", "t00an1", "s_sd", "iicevelu", "wind_speed", "p_mn", "CHL", "o_mn", "vomecrty", "n_mn", "PHYC", "cloud", "aquamaps:WorldClimBio2", "lxiphiasgladius20130410182141778cest" };
public static void main(String[] args) throws Exception {
AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "gcube");
config.setParam("DatabasePassword", "d4science2");
config.setParam("DatabaseURL", "jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setGcubeScope("/gcube/devsec/devVRE");
FileWriter fw = new FileWriter(new File("mapsconnectors.txt"));
for (int t = 0; t < urlToTest.length; t++) {
String layerURL = urlToTest[t];
String layerName = layernamesTest[t];
AnalysisLogger.getLogger().debug("Processing Layer: " + layerURL);
List<Double> values = null;
double res = 0.5d;
List<Tuple<Double>> tuples = VectorOperations.generateCoordinateTripletsInBoundingBox(-180, 180, -90, 90, 0, res, res);
if (layerURL.endsWith(".nc")) {
NetCDF geotiff = new NetCDF(layerURL, layerName);
values = geotiff.getFeaturesInTimeInstantAndArea(layerURL, layerName, 0, tuples, -180, 180, -90, 90);
} else if (layerURL.endsWith(".asc")) {
AscDataExplorer asc = new AscDataExplorer(layerURL);
values = asc.retrieveDataFromAsc(tuples, 0);
} else if (layerURL.endsWith("tif")) {
GeoTiff geotiff = new GeoTiff(config);
values = geotiff.getFeaturesInTimeInstantAndArea(layerURL, layerName, 0, tuples, -180, 180, -90, 90);
} else if (layerURL.contains("wcs")) {
WCS wcs = new WCS(config, layerURL);
values = wcs.getFeaturesInTimeInstantAndArea(layerURL, layerName, 0, tuples, -180, 180, -90, 90);
} else if (layerURL.contains("geoserver")) {
WFS wfs = new WFS();
values = wfs.getFeaturesInTimeInstantAndArea(layerURL, layerName, 0, tuples, -180, 180, -90, 90);
} else if (layerURL.equals("table")) {
config.setParam(TableMatrixRepresentation.tableNameParameter, "testextraction4");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "approx_x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "approx_y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "time");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "f_cat");
config.setParam(TableMatrixRepresentation.filterParameter, " ");
Table connector = new Table(config, res);
values = connector.getFeaturesInTimeInstantAndArea(null, null, 0, tuples, -180, 180, -90, 90);
} else if (layerURL.contains("tableeez")) {
config.setParam("DatabaseUserName", "postgres");
config.setParam("DatabasePassword", "d4science2");
config.setParam("DatabaseURL", "jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu/aquamapsdb");
config.setParam(TableMatrixRepresentation.tableNameParameter, "\"WorldEEZv72012HR\"");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "longitude");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "latitude");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "eez_id");
config.setParam(TableMatrixRepresentation.filterParameter, " ");
Table connector = new Table(config, res);
values = connector.getFeaturesInTimeInstantAndArea(null, null, 0, tuples, -180, 180, -90, 90);
}
double[][] matrix = VectorOperations.vectorToMatix(values, -180, 180, -90, 90, res, res);
// System.out.println(MapUtils.globalASCIIMap(values,step,step));
System.out.println(MapUtils.globalASCIIMap(matrix));
fw.write(MapUtils.globalASCIIMap(matrix));
AscRasterWriter writer = new AscRasterWriter();
writer.writeRasterInvertYAxis("testraster.asc", matrix, -180, -90, res, "-9999");
}
fw.close();
}
}

View File

@ -1,75 +0,0 @@
package org.gcube.dataanalysis.geo.test.regression;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
public class RegressionGeoTiff {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
String layertitle = "WorldClimBioGeoTiffTest2";
long t0 = System.currentTimeMillis();
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setConfigPath(cfg);
TimeSeriesExtractor intersector = new TimeSeriesExtractor(config);
double signal[] = intersector.extractT(layertitle, 0d, 0d,0d,0.5);
SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
}
public static void main1(String[] args) throws Exception{
takeSignal();
}
public static void takeSignal() throws Exception{
BufferedReader br = new BufferedReader(new FileReader(new File("signal.txt")));
String line = br.readLine();
double[] signal = null;
while (line!=null){
String [] el = line.split(",");
signal=new double[el.length];
int i=0;
for (String e:el){
signal[i]=Double.parseDouble(e);
i++;
}
line = null;
}
br.close();
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
PeriodicityDetector pd = new PeriodicityDetector();
// signal = pd.produceNoisySignal(2000, 1, 0.1f, 0f);
//float freq=1;//signal.length;
// double F = pd.detectFrequency(signal, (int)freq, 0, freq, 1f,true);
double F = pd.detectFrequency(signal,true);
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength);
}
}

View File

@ -1,33 +0,0 @@
package org.gcube.dataanalysis.geo.test.regression;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
public class RegressionLowPeriodicity {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
String layertitle = "c565e32c-c5b3-4964-b44f-06dc620563e9";
long t0 = System.currentTimeMillis();
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setConfigPath(cfg);
TimeSeriesExtractor intersector = new TimeSeriesExtractor(config);
double signal[] = intersector.extractT(layertitle, 0d, 0d,0d,0.5);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
System.out.println("Signal: "+signal.length);
PeriodicityDetector pd = new PeriodicityDetector();
double F = pd.detectFrequency(signal,true);
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength+":"+pd.getPeriodicityStregthInterpretation());
}
}

View File

@ -1,70 +0,0 @@
package org.gcube.dataanalysis.geo.test.regression;
import java.util.ArrayList;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
public class RegressionOccurrenceEnrichment {
static AlgorithmConfiguration[] configs = { testOccEnrichment()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
for (int i = 0; i < configs.length; i++) {
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testOccEnrichment() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("OCCURRENCE_ENRICHMENT");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("OccurrenceTable","occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam("LongitudeColumn","decimallongitude");
config.setParam("LatitudeColumn","decimallatitude");
config.setParam("ScientificNameColumn","scientificname");
config.setParam("TimeColumn","eventdate");
config.setParam("OptionalFilter","");
config.setParam("Resolution","0.5");
config.setParam("OutputTableDBName","testenrichment");
config.setParam("OutputTableName","testenrichment");
String sep=AlgorithmConfiguration.getListSeparator();
config.setParam("Layers","8f5d883f-95bf-4b7c-8252-aaf0b2e6fd81"+sep+"4d597da9-dbfa-4a65-9de6-9bbff69eac19"+sep+"2c2304d1-681a-4f3a-8409-e8cdb5ed447f");
config.setParam("FeaturesNames","temperature"+sep+"chlorophyll"+sep+"ph");
// config.setParam("Layers","4d597da9-dbfa-4a65-9de6-9bbff69eac19"+sep+"2c2304d1-681a-4f3a-8409-e8cdb5ed447f");
// config.setParam("FeaturesNames","chlorophyll"+sep+"ph");
return config;
}
}

View File

@ -1,40 +0,0 @@
package org.gcube.dataanalysis.geo.test.regression;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
public class RegressionPeriodicity {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
takeSignal();
}
public static void takeSignal() throws Exception{
BufferedReader br = new BufferedReader(new FileReader(new File("signalPeriodic.txt")));
String line = br.readLine();
double[] signal = null;
while (line!=null){
String [] el = line.split(",");
signal=new double[el.length];
int i=0;
for (String e:el){
signal[i]=Double.parseDouble(e);
i++;
}
line = null;
}
br.close();
PeriodicityDetector pd = new PeriodicityDetector();
double F = pd.detectFrequency(signal,true);
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength+":"+pd.getPeriodicityStregthInterpretation());
}
}

View File

@ -1,25 +0,0 @@
package org.gcube.dataanalysis.geo.test.regression;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
public class RegressionPeriodicityAutomatic {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
takeSignal();
}
public static void takeSignal() throws Exception{
PeriodicityDetector pd = new PeriodicityDetector();
double[] signal = pd.produceNoisySignal(2000, 1, 0.1f, 0f);
double F = pd.detectFrequency(signal,true);
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength+":"+pd.getPeriodicityStregthInterpretation());
}
}

View File

@ -1,39 +0,0 @@
package org.gcube.dataanalysis.geo.test.regression;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.geo.matrixmodel.PointsExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
import com.vividsolutions.jts.geom.util.PointExtracter;
public class RegressionPointExtraction {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
String layertitle = "afd54b39-30f7-403a-815c-4f91c6c74c26";
long t0 = System.currentTimeMillis();
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setConfigPath(cfg);
PointsExtractor pe = new PointsExtractor(config);
double value = pe.extractXYZT(layertitle, 0,0,0,0, 0);
System.out.println("Point value: "+value);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
}
}

View File

@ -1,51 +0,0 @@
package org.gcube.dataanalysis.geo.test.regression;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
public class RegressionSignalFromTable {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setConfigPath(cfg);
TimeSeriesExtractor intersector = new TimeSeriesExtractor(config);
long t0 = System.currentTimeMillis();
double signal[] = intersector.extractT("table", -47.97,43.42, 0d, 0.5);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
System.out.println("Signal: "+signal.length);
PeriodicityDetector pd = new PeriodicityDetector();
double F = pd.detectFrequency(signal,true);
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength);
}
}

View File

@ -1,88 +0,0 @@
package org.gcube.dataanalysis.geo.test.regression;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class RegressionTestMapsComparison {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
List<ComputationalAgent> evaluators = EvaluatorsFactory.getEvaluators(testConfig1());
evaluators.get(0).init();
Regressor.process(evaluators.get(0));
evaluators = null;
}
private static AlgorithmConfiguration testConfig1() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setNumberOfResources(1);
config.setConfigPath("./cfg");
config.setPersistencePath("./");
config.setAgent("MAPS_COMPARISON");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
// config.setParam("Layer_1","86a7ac79-866a-49c6-b5d5-602fc2d87ddd");
// config.setParam("Layer_2","86a7ac79-866a-49c6-b5d5-602fc2d87ddd");
//World seas : IHO vs Marine regions
// config.setParam("Layer_2","70a6d757-e607-46f7-b643-e21749f45a42");
// config.setParam("Layer_1","a2a8c130-124f-45b5-973f-c9358028a2a6");
//FAO vs FAO:
// config.setParam("Layer_1","b040894b-c5db-47fc-ba9c-d4fafcdcf620"); //goblin shark
// config.setParam("Layer_2","c9a31223-cc00-4acd-bc5b-a0c76a7f79c7"); //humbolt squid
//FAO vs AquaMaps
// config.setParam("Layer_1","b040894b-c5db-47fc-ba9c-d4fafcdcf620");
// config.setParam("Layer_2","c9a31223-cc00-4acd-bc5b-a0c76a7f79c7");
//NetCDF vs NETCDF WOA
// config.setParam("Layer_1","e0dbbcc0-8364-4087-8bcb-c7d95b2f55c8"); //statistical mean oxygen
// config.setParam("Layer_2","49f5a5a1-80ff-4a00-8c84-dac29bda1a23");//statistical mean phosphate
//Eleutheronema tetradactylum
config.setParam("Layer_1","fao-species-map-fot");
config.setParam("Layer_2","c492f5d3-1cfc-44e3-b8d2-8530fec3e7e7");
//NetCDF vs NetCDF MyOcean
// config.setParam("Layer_1","e0dbbcc0-8364-4087-8bcb-c7d95b2f55c8"); //statistical mean oxygen
// config.setParam("Layer_2","fc9ac2f4-a2bd-43d1-a361-ac67c5ceac31");//temperature
//NetCDF vs IHO
// config.setParam("Layer_1","70a6d757-e607-46f7-b643-e21749f45a42");//IHO
// config.setParam("Layer_2","fc9ac2f4-a2bd-43d1-a361-ac67c5ceac31");//temperature
//NetCDF vs NetCDF MyOcean only
// config.setParam("Layer_1","fc9ac2f4-a2bd-43d1-a361-ac67c5ceac31"); //statistical mean oxygen
// config.setParam("Layer_2","fc9ac2f4-a2bd-43d1-a361-ac67c5ceac31");//temperature
//NetCDF vs NetCDF Envri
// config.setParam("Layer_1","Etna Volcano SAR Analysis 1"); //
// config.setParam("Layer_2","Etna Volcano SAR Analysis 7");//
config.setParam("ValuesComparisonThreshold",""+0.1);
config.setParam("Z","0");
config.setGcubeScope("/gcube");
// config.setGcubeScope(null);
// config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps");
return config;
}
}

View File

@ -1,75 +0,0 @@
package org.gcube.dataanalysis.geo.test.regression;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
public class RegressionWCS {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
String layertitle = "WorldClimBioWCS2";
long t0 = System.currentTimeMillis();
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setConfigPath(cfg);
TimeSeriesExtractor intersector = new TimeSeriesExtractor(config);
double signal[] = intersector.extractT(layertitle, 0d, 0d,0d,0.5);
SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
}
public static void main1(String[] args) throws Exception{
takeSignal();
}
public static void takeSignal() throws Exception{
BufferedReader br = new BufferedReader(new FileReader(new File("signal.txt")));
String line = br.readLine();
double[] signal = null;
while (line!=null){
String [] el = line.split(",");
signal=new double[el.length];
int i=0;
for (String e:el){
signal[i]=Double.parseDouble(e);
i++;
}
line = null;
}
br.close();
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
PeriodicityDetector pd = new PeriodicityDetector();
// signal = pd.produceNoisySignal(2000, 1, 0.1f, 0f);
//float freq=1;//signal.length;
// double F = pd.detectFrequency(signal, (int)freq, 0, freq, 1f,true);
double F = pd.detectFrequency(signal,true);
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength);
}
}

View File

@ -1,52 +0,0 @@
package org.gcube.dataanalysis.geo.test.regression;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.matrixmodel.ZExtractor;
public class RegressionWaterColumnGeoTiff {
public static void main(String[] args) throws Exception{
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
String layertitle = "WorldClimBioGeoTiffTest2";
// layertitle = "WorldClimBioWCS2";
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
ZExtractor extractor = new ZExtractor(config);
long t0 = System.currentTimeMillis();
double watercolumn[] = extractor.extractZ(layertitle, 18.620429d,20.836419d,0, 0);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
System.out.println("Signal: "+watercolumn.length);
System.out.println("Signal first element: "+watercolumn[0]);
SignalProcessing.displaySignalWithGenericTime(watercolumn, 0, 1, "signal");
layertitle = "WorldClimBioWCS2";
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
extractor = new ZExtractor(config);
t0 = System.currentTimeMillis();
watercolumn = extractor.extractZ(layertitle, 18.620429d,20.836419d,0, 0);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
System.out.println("Signal: "+watercolumn.length);
System.out.println("Signal first element: "+watercolumn[0]);
SignalProcessing.displaySignalWithGenericTime(watercolumn, 0, 1, "signal");
}
}

View File

@ -1,45 +0,0 @@
package org.gcube.dataanalysis.geo.test.regression;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.matrixmodel.ZExtractor;
public class RegressionWaterColumnTable {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
config.setParam(TableMatrixRepresentation.zDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setConfigPath(cfg);
ZExtractor extractor = new ZExtractor(config);
long t0 = System.currentTimeMillis();
double watercolumn[] = extractor.extractZ("table", -47.97,43.42, 0, 0.5);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
System.out.println("Signal: "+watercolumn.length);
SignalProcessing.displaySignalWithGenericTime(watercolumn, 0, 1, "signal");
}
}

View File

@ -1,32 +0,0 @@
package org.gcube.dataanalysis.geo.test.regression;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.matrixmodel.ZExtractor;
public class RegressionWaterColumnTemperature {
public static void main(String[] args) throws Exception{
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
String layertitle = "6411b110-7572-457a-a662-a16e4ff09e4e";
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
ZExtractor extractor = new ZExtractor(config);
long t0 = System.currentTimeMillis();
double watercolumn[] = extractor.extractZ(layertitle, 0,0, 0, 100);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
System.out.println("Signal: "+watercolumn.length);
SignalProcessing.displaySignalWithGenericTime(watercolumn, 0, 1, "signal");
}
}

View File

@ -1,71 +0,0 @@
package org.gcube.dataanalysis.geo.test.regression;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor;
public class RegressionXYSlice {
static String cfg = "./cfg/";
static String layertitle = "120313e1-c0cb-4b3c-9779-ed651c490cdb";
static AlgorithmConfiguration config = new AlgorithmConfiguration();
public static void main(String[] args) throws Exception {
config.setConfigPath(cfg);
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
AnalysisLogger.setLogger(cfg + AlgorithmConfiguration.defaultLoggerFile);
config.setPersistencePath("./");
// sliceWFS();
// sliceNetCDF();
sliceASC();
// sliceTable();
}
public static void sliceWFS() throws Exception{
AnalysisLogger.getLogger().debug("WFS");
// latimeria chalumnae
layertitle = "120313e1-c0cb-4b3c-9779-ed651c490cdb";
execute();
}
public static void sliceNetCDF() throws Exception{
AnalysisLogger.getLogger().debug("NetCDF");
// Chlorophyll
layertitle = "c565e32c-c5b3-4964-b44f-06dc620563e9";
execute();
}
public static void sliceASC() throws Exception{
AnalysisLogger.getLogger().debug("ASC");
//
layertitle = "2c2304d1-681a-4f3a-8409-e8cdb5ed447f";
execute();
}
public static void sliceTable() throws Exception{
AnalysisLogger.getLogger().debug("Table");
// latimeria chalumnae
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
// config.setParam(TableMatrixRepresentation.zDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
execute();
}
public static void execute() throws Exception{
long t0 = System.currentTimeMillis();
XYExtractor intersector = new XYExtractor(config);
intersector.extractXYGrid(layertitle, 0, -180, 180, -90, 90, 0, 0.5, 0.5);
System.out.println("ELAPSED TIME: " + (System.currentTimeMillis() - t0));
}
}

View File

@ -1,104 +1,121 @@
package org.gcube.dataanalysis.geo.utils;
import java.io.File;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.common.resources.gcore.GCoreEndpoint;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data.transfer.library.TransferResult;
import org.gcube.dataanalysis.executor.util.DataTransferer;
import org.gcube.dataanalysis.executor.util.InfraRetrieval;
import org.gcube.dataanalysis.geo.connectors.netcdf.NetCDFDataExplorer;
import org.gcube.dataanalysis.geo.infrastructure.GeoNetworkInspector;
import org.gcube.dataanalysis.geo.meta.GenericLayerMetadata;
import org.gcube.dataanalysis.geo.meta.OGCFormatter;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.gcube.resources.discovery.icclient.ICFactory;
import org.opengis.metadata.identification.TopicCategory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ucar.nc2.dt.GridDatatype;
public class ThreddsPublisher {
public static void main (String[] args) throws Exception{
// String scope = "/d4science.research-infrastructures.eu/gCubeApps";
String scope = "/gcube/devsec";
String username = "gianpaolo.coro";
//String fileAbsolutePath = "C:/Users/coro/Dropbox/Public/wind1.tif";
String fileAbsolutePath = "C:/Users/coro/Downloads/adux_pres_portale_test.nc";
String layerTitle = "architeuthis dux distribution file - test";
String layerName = "adux_pres_2";
String abstractField = "abstract architeuthis dux distribution file - test";
String[] topics = {"adux","D4Science"};
double resolution = -1;
AnalysisLogger.setLogger("./cfg/"+AlgorithmConfiguration.defaultLoggerFile);
publishOnThredds(scope, username, fileAbsolutePath, layerTitle, layerName, abstractField, topics, resolution,false);
}
private static final String threddsServiceName = "Thredds";
private static final String threddsServiceClass = "SDI";
public static boolean publishOnThredds(String scope,String username, String fileAbsolutePath, String layerTitle, String layerName, String abstractField, String[] topics, double resolution, boolean isprivate) throws Exception{
private static final String dataTransferName = "data-transfer-service";
private static final String dataTransferClass = "DataTransfer";
private static final String dataTransferEndpoint = "org.gcube.data.transfer.service.DTService";
private static final String threddsPersistenceID = "thredds";
private static final String threddsRemoteFolder = "/public/netcdf";
private static final String threddsFileServerPath = "/thredds/fileServer/public/netcdf/";
private static final String threddsCatalogPath = "/thredds/catalog/public/netcdf/catalog.xml";
private static final String netCDFExtension = ".nc";
private static final Logger log = LoggerFactory.getLogger(ThreddsPublisher.class);
public static boolean publishOnThredds(String username, String fileAbsolutePath, String layerTitle, String layerName, String abstractField, String[] topics, double resolution, boolean isprivate) throws Exception{
//TODO manage faults
String remoteFolder = "/data/content/thredds/public/netcdf/";
List<String> threddsAddress = InfraRetrieval.retrieveServiceAddress("Gis", "THREDDS", scope, "Geoserver");
if (threddsAddress.size()==0)
threddsAddress = InfraRetrieval.retrieveServiceAddress("Gis", "Thredds", scope, "Geoserver");
DiscoveryClient<String> threddsClient = ICFactory.client();
SimpleQuery thrredsQuery = ICFactory.queryFor(GCoreEndpoint.class);
thrredsQuery.addCondition(String.format("$resource/Profile/ServiceName eq '%s'",threddsServiceName));
thrredsQuery.addCondition(String.format("$resource/Profile/ServiceClass eq '%s'",threddsServiceClass));
thrredsQuery.setResult("$resource/Profile/GHN/@UniqueID/string()");
List<String> threddsAddress = threddsClient.submit(thrredsQuery);
if (threddsAddress.size()==0)
throw new Exception("Thredds resource is not available in scope "+scope);
throw new Exception("Thredds Endpoint not found in scope "+ScopeProvider.instance.get());
String threddServiceAddress = threddsAddress.get(0);
threddServiceAddress = threddServiceAddress.substring(threddServiceAddress.indexOf("http://")+7);
threddServiceAddress = threddServiceAddress.substring(0,threddServiceAddress.indexOf("/"));
String threddsWhnId = threddsAddress.get(0);
AnalysisLogger.getLogger().debug("Found "+threddsAddress.size()+" thredds services");
AnalysisLogger.getLogger().debug("THREDDS: "+threddServiceAddress);
List<String> dataTransferAddress = InfraRetrieval.retrieveService("agent-service", scope);
DiscoveryClient<String> dataTransferClient = ICFactory.client();
SimpleQuery dataTransferQuery = ICFactory.queryFor(GCoreEndpoint.class);
dataTransferQuery.addCondition(String.format("$resource/Profile/ServiceName/string() eq '%s'",dataTransferName));
dataTransferQuery.addCondition(String.format("$resource/Profile/ServiceClass/string() eq '%s'",dataTransferClass));
dataTransferQuery.addCondition(String.format("$resource/Profile/GHN/@UniqueID/string() eq '%s'",threddsWhnId));
dataTransferQuery.setResult(String.format("$resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint[@EntryName/string() eq \"%s\"]/string()",dataTransferEndpoint));
List<String> dataTransferAddress = dataTransferClient.submit(dataTransferQuery);
if (dataTransferAddress.size()==0)
throw new Exception("Data Transfer services are not available in scope "+scope);
throw new Exception("Data Transfer services is not available in scope "+ScopeProvider.instance.get());
AnalysisLogger.getLogger().debug("Found "+dataTransferAddress.size()+" transfer services");
String threddsDTService = threddServiceAddress;
int threddsDTPort = 9090;
boolean found = false;
for (String datatransferservice:dataTransferAddress){
AnalysisLogger.getLogger().debug("Transfer service found");
datatransferservice = datatransferservice.substring(datatransferservice.indexOf("http://")+7);
String servicehost = datatransferservice.substring(0,datatransferservice.indexOf(":"));
String serviceport = datatransferservice.substring(datatransferservice.indexOf(":")+1,datatransferservice.indexOf("/"));
AnalysisLogger.getLogger().debug("Transfer service: "+servicehost+":"+serviceport);
if (threddServiceAddress.equals(servicehost)){
threddsDTPort = Integer.parseInt(serviceport);
found = true;
break;
}
}
String threddsDTService = dataTransferAddress.get(0);
log.debug("data transfer found is {}",threddsDTService);
Pattern pattern = Pattern.compile("(https?)://([^:/]*)(:(\\d{2,5}))?.*");
Matcher matcher = pattern.matcher(threddsDTService);
if (!matcher.find())
throw new Exception("wrong address found "+threddsDTService);
String dataTransferProtocol = matcher.group(1);
String dataTransferHost = matcher.group(2);
String portAsString = matcher.group(4);
Integer dataTransferPort = portAsString==null?null:Integer.parseInt(portAsString);
StringBuilder threedsBaseURL = new StringBuilder(dataTransferProtocol).append("://").append(dataTransferHost);
if (portAsString!=null)
threedsBaseURL.append(":").append(portAsString);
if (!found)
throw new Exception("Thredds data transfer has not been found in the same scope of the catalog: "+scope);
boolean gridded=true;
if (fileAbsolutePath.endsWith(".nc")){
AnalysisLogger.getLogger().debug("checking NetCDF file coherence"+fileAbsolutePath);
log.debug("checking NetCDF file coherence {}",fileAbsolutePath);
//let's publish also if the netCDF is not gridded
try{
NetCDFDataExplorer.getGrid(layerName, fileAbsolutePath);
}catch(Exception e){
gridded=false;
AnalysisLogger.getLogger().debug("NetCDF is not gridded"+fileAbsolutePath);
log.debug("NetCDF is not gridded {}",fileAbsolutePath);
}
}
AnalysisLogger.getLogger().debug("Transferring via DT to "+threddServiceAddress);
DataTransferer.transferFileToService(scope, username, threddsDTService, threddsDTPort, fileAbsolutePath, remoteFolder);
log.debug("Transferring via DT to {} with parameters {} {} {} {} ",threddsDTService, dataTransferHost, dataTransferPort, fileAbsolutePath, threddsRemoteFolder);
TransferResult transferResult = DataTransferer.transferFileToService(ScopeProvider.instance.get(), username, dataTransferHost, dataTransferPort, fileAbsolutePath, threddsRemoteFolder, threddsPersistenceID);
AnalysisLogger.getLogger().debug("Adding metadata on GeoNetwork");
String realFileName = transferResult.getRemotePath().substring(transferResult.getRemotePath().lastIndexOf("/")+1);
if (fileAbsolutePath.endsWith(".nc") && gridded)
publishNetCDFMeta(scope, layerTitle, abstractField, new File(fileAbsolutePath).getName(),layerName,threddServiceAddress,username,topics,isprivate);
log.debug("Adding metadata on GeoNetwork, real file name on threadds is {} ",realFileName);
if (fileAbsolutePath.endsWith(netCDFExtension) && gridded)
publishNetCDFMeta(ScopeProvider.instance.get(), layerTitle, abstractField, realFileName,layerName,threedsBaseURL.toString(),username,topics,isprivate);
else{
if (resolution==-1 && gridded)
throw new Exception ("Specify valid resolution parameter for non-NetCDF raster datasets");
publishOtherFileMeta(scope, layerTitle, resolution, abstractField, new File(fileAbsolutePath).getName(), threddServiceAddress,username,topics,isprivate);
publishOtherFileMeta(ScopeProvider.instance.get(), layerTitle, resolution, abstractField, realFileName, threedsBaseURL.toString(),username,topics,isprivate);
}
AnalysisLogger.getLogger().debug("Finished");
log.debug("Finished");
return true;
}
@ -116,7 +133,7 @@ public class ThreddsPublisher {
else
geonetworkGroup = gninspector.getGeonetworkPublicGroup();
AnalysisLogger.getLogger().debug("GeoNetwork Info: "+geonetworkURL+" "+geonetworkUser+" "+geonetworkGroup);
log.debug("GeoNetwork Info: "+geonetworkURL+" "+geonetworkUser+" "+geonetworkGroup);
metadataInserter.setGeonetworkUrl(geonetworkURL);
metadataInserter.setGeonetworkPwd(geonetworkPassword);
@ -132,20 +149,20 @@ public class ThreddsPublisher {
metadataInserter.setResolution(resolution);
AnalysisLogger.getLogger().debug("Res:"+resolution);
log.debug("Res:"+resolution);
String [] urls = {"http://"+threddsURL+"/thredds/fileServer/public/netcdf/"+filename};
String [] urls = {threddsURL+threddsFileServerPath+filename};
String [] protocols = {"HTTP"};
AnalysisLogger.getLogger().debug("Publishing in group: "+metadataInserter.getGeonetworkGroup());
AnalysisLogger.getLogger().debug("Inserting custom metadata ");
log.debug("Publishing in group: "+metadataInserter.getGeonetworkGroup());
log.debug("Inserting custom metadata ");
metadataInserter.customMetaDataInsert(urls,protocols,isprivate);
}
private static void publishNetCDFMeta(String scope, String layerTitle,String abstractField, String filename, String netCDFLayerName, String threddsURL, String username, String [] topics, boolean isprivate) throws Exception{
AnalysisLogger.getLogger().debug("Getting GeoNetwork Info");
private static void publishNetCDFMeta(String scope, String layerTitle,String abstractField, String filename, String netCDFLayerName, String threddsURL, String username, String [] topics, boolean isprivate) throws Exception{
log.debug("Getting GeoNetwork Info");
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
GeoNetworkInspector gninspector =new GeoNetworkInspector();
@ -159,7 +176,7 @@ public class ThreddsPublisher {
else
geonetworkGroup = gninspector.getGeonetworkPublicGroup();
AnalysisLogger.getLogger().debug("GeoNetwork Info: "+geonetworkURL+" "+geonetworkUser+" "+geonetworkGroup);
log.debug("GeoNetwork Info: "+geonetworkURL+" "+geonetworkUser+" "+geonetworkGroup);
metadataInserter.setGeonetworkUrl(geonetworkURL);
metadataInserter.setGeonetworkPwd(geonetworkPassword);
@ -172,9 +189,9 @@ public class ThreddsPublisher {
metadataInserter.setAbstractField(abstractField+" Hosted on the D4Science Thredds Catalog: "+threddsURL);
metadataInserter.setCustomTopics(topics);
metadataInserter.setAuthor(username);
String Threddscatalog = "http://"+threddsURL+"/thredds/catalog/public/netcdf/catalog.xml";
String url = OGCFormatter.getOpenDapURL(Threddscatalog, filename);
AnalysisLogger.getLogger().debug("OpenDAP URL: "+url);
String threddscatalog = threddsURL+threddsCatalogPath;
String url = OGCFormatter.getOpenDapURL(threddscatalog, filename);
log.debug("OpenDAP URL: {} ",url);
GridDatatype gdt = NetCDFDataExplorer.getGrid(netCDFLayerName, url);
@ -187,15 +204,16 @@ public class ThreddsPublisher {
metadataInserter.setResolution(resolutionY);
AnalysisLogger.getLogger().debug("minX: "+minX+" minY: "+minY+" maxX:"+maxX+" maxY:"+maxY+" Res:"+resolutionY);
log.debug("minX: "+minX+" minY: "+minY+" maxX:"+maxX+" maxY:"+maxY+" Res:"+resolutionY);
String wms = OGCFormatter.getWmsNetCDFUrl(url, netCDFLayerName, OGCFormatter.buildBoundingBox(minX, minY, maxX, maxY)).replace("width=676", "width=640").replace("height=330", "height=480");
AnalysisLogger.getLogger().debug("WMS URL: "+wms);
log.debug("WMS URL: {}",wms);
String wcs = OGCFormatter.getWcsNetCDFUrl(url, netCDFLayerName, OGCFormatter.buildBoundingBox(minX, minY, maxX, maxY)).replace("width=676", "width=640").replace("height=330", "height=480");
AnalysisLogger.getLogger().debug("WCS URL: "+wcs);
AnalysisLogger.getLogger().debug("HTTP URL: "+"http://"+threddsURL+"/thredds/fileServer/public/netcdf/"+filename);
String [] urls = {"http://"+threddsURL+"/thredds/fileServer/public/netcdf/"+filename,wms,wcs,url};
log.debug("WCS URL: {}",wcs);
String fileServerUrl = threddsURL+threddsFileServerPath+filename;
log.debug("HTTP URL: {} ",fileServerUrl);
String [] urls = {fileServerUrl,wms,wcs,url};
String [] protocols = {"HTTP","WMS","WCS","OPeNDAP"};
@ -203,8 +221,8 @@ public class ThreddsPublisher {
metadataInserter.setYLeftLow(minY);
metadataInserter.setXRightUpper(maxX);
metadataInserter.setYRightUpper(maxY);
AnalysisLogger.getLogger().debug("Publishing in group: "+metadataInserter.getGeonetworkGroup());
AnalysisLogger.getLogger().debug("Inserting metadata ");
log.debug("Publishing in group: {} ",metadataInserter.getGeonetworkGroup());
log.debug("Inserting metadata ");
metadataInserter.customMetaDataInsert(urls,protocols,isprivate);
}

View File

@ -8,9 +8,6 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.matrixmodel.ZExtractor;
public class TestRasterPublisher {
@ -43,7 +40,8 @@ public class TestRasterPublisher {
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(config);
trans.get(0).init();
Regressor.process(trans.get(0));
//TODO: check the next line
//Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;

View File

@ -0,0 +1,16 @@
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{0}: %msg%n</pattern>
</encoder>
</appender>
<logger name="org.gcube" level="TRACE" />
<root level="WARN">
<appender-ref ref="STDOUT" />
</root>
</configuration>