Gianpaolo Coro 2015-09-23 09:53:42 +00:00
parent 9503d112c3
commit b6d36373a7
4 changed files with 416 additions and 0 deletions

View File

@ -0,0 +1,135 @@
package org.gcube.dataanalysis.geo.algorithms;
import java.io.File;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Collections;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.gcube.dataanalysis.geo.utils.ThreddsPublisher;
import scala.actors.threadpool.Arrays;
public class RasterDataPublisher extends StandardLocalExternalAlgorithm{
private static String layerTitleParam = "DatasetTitle";
private static String layerAbstractParam = "DatasetAbstract";
private static String layerInnerNameParam = "InnerLayerName";
private static String FileParam = "RasterFile";
private static String TopicsParam = "Topics";
private static String ResolutionParam = "SpatialResolution";
private static String FileNameInfraParam = "FileNameOnInfra";
@Override
public String getDescription() {
return "This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants.";
}
@Override
public void init() throws Exception {
}
@Override
protected void process() throws Exception {
status = 10;
String scope = config.getGcubeScope();
String username = config.getParam("ServiceUserName");
String fileAbsolutePath = config.getParam(FileParam);
String fileName = config.getParam(FileNameInfraParam);
String layerTitle = config.getParam(layerTitleParam);
String layerName = config.getParam(layerInnerNameParam);
String abstractField = config.getParam(layerAbstractParam);
String[] topics = config.getParam(TopicsParam).split(AlgorithmConfiguration.listSeparator);
AnalysisLogger.getLogger().debug("scope: "+scope);
AnalysisLogger.getLogger().debug("username: "+username);
AnalysisLogger.getLogger().debug("fileAbsolutePath: "+fileAbsolutePath);
AnalysisLogger.getLogger().debug("layerTitle: "+layerTitle);
AnalysisLogger.getLogger().debug("layerName: "+layerName);
AnalysisLogger.getLogger().debug("abstractField: "+abstractField);
AnalysisLogger.getLogger().debug("topics: "+topics);
AnalysisLogger.getLogger().debug("filename: "+fileName);
if (scope==null || username==null)
throw new Exception ("Service parameters are not set - please contact the Administrators");
if (fileAbsolutePath==null || fileAbsolutePath.trim().length()==0)
throw new Exception ("No file has been provided to the process");
if (layerTitle==null || layerTitle.trim().length()==0)
throw new Exception ("Please provide a valid dataset title");
if (abstractField==null || abstractField.trim().length()==0)
throw new Exception ("Please provide a valid abstract for the dataset");
if (topics==null || topics.length==0 || topics[0].length()==0)
throw new Exception ("Please provide at least a valid topic for the dataset");
double resolution = Double.parseDouble(config.getParam(ResolutionParam));
AnalysisLogger.getLogger().debug("resolution: "+resolution);
if (!(fileName.endsWith(".nc")||fileName.endsWith(".tiff")||fileName.endsWith(".geotiff")||fileName.endsWith(".asc")))
throw new Exception("Wrong file name: allowed files extensions are .nc, .tiff, .geotiff, .asc");
File f = new File(fileAbsolutePath);
File newf = new File(f.getParent(),fileName);
AnalysisLogger.getLogger().debug("renaming: "+fileAbsolutePath+" to "+newf.getAbsolutePath());
boolean renamed = f.renameTo(newf);
if (!renamed)
throw new Exception("Impossible to use "+fileName+" as file name");
fileName=fileName.trim();
layerTitle=layerTitle.trim();
layerName=layerName.trim();
abstractField = abstractField.trim();
ArrayList<String> listTopics = new ArrayList<String>();
listTopics.addAll(Arrays.asList(topics));
listTopics.add(username);
listTopics.add("D4Science");
listTopics.add(scope);
String [] topicsListArr = new String[listTopics.size()];
topics = listTopics.toArray(topicsListArr);
boolean result = ThreddsPublisher.publishOnThredds(scope, username, newf.getAbsolutePath(), layerTitle, layerName, abstractField, topics, resolution);
if (result) {
addOutputString("Created map name", layerTitle);
addOutputString("Map abstract", abstractField);
addOutputString("Map Topics", Arrays.toString(topics));
addOutputString("Inner layer name", layerName);
addOutputString("File name created in the e-Infrastructure", fileName);
addOutputString("Map creator", username);
}
status = 100;
}
@Override
protected void setInputParameters() {
try {
addStringInput(layerTitleParam, "Title of the geospatial dataset to be shown on GeoExplorer", "Generic Raster Layer");
addStringInput(layerAbstractParam, "Abstract defining the content, the references and usage policies", "Abstract");
addStringInput(layerInnerNameParam, "Name of the inner layer or band to be published as a Map (ignored for non-NetCDF files)", "band_1");
addStringInput(FileNameInfraParam, "Name of the file that will be created in the infrastructures", "test.nc");
inputs.add(new PrimitiveType(File.class.getName(), null, PrimitiveTypes.FILE, FileParam, "Raster dataset to process"));
inputs.add(new PrimitiveTypesList(String.class.getName(), PrimitiveTypes.STRING, TopicsParam, "Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather", false));
addDoubleInput(ResolutionParam, "The resolution of the layer. For NetCDF file this is automatically estimated by data (leave -1)", "-1d");
inputs.add(new ServiceType(ServiceParameters.USERNAME,"ServiceUserName","The final user Name"));
} catch (Throwable e) {
e.printStackTrace();
}
}
@Override
public void shutdown() {
AnalysisLogger.getLogger().debug("RasterFilePublisher - shutdown");
}
}

View File

@ -5,6 +5,7 @@ import java.util.Formatter;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.utils.VectorOperations;
@ -310,6 +311,25 @@ public class NetCDFDataExplorer {
return det > 0;
}
public static GridDatatype getGrid(String layer, String netcdffile) throws Exception{
AnalysisLogger.getLogger().debug("Opening File : " + netcdffile);
AnalysisLogger.getLogger().debug("Searching for layer: " + layer);
GridDataset gds = ucar.nc2.dt.grid.GridDataset.open(netcdffile);
List<GridDatatype> gridTypes = gds.getGrids();
StringBuffer sb = new StringBuffer();
for (GridDatatype gdt : gridTypes) {
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getName());
sb.append(gdt.getName()+" ");
if (layer.equals(gdt.getName())) {
AnalysisLogger.getLogger().debug("Found layer " + layer + " inside file");
GridDatatype grid = gds.findGridDatatype(gdt.getName());
return grid;
}
}
throw new java.lang.Exception("No layer with name "+layer+" is available in the NetCDF file. Possible values are "+sb.toString());
}
// A GridDatatype is like a specialized Variable that explicitly handles X,Y,Z,T dimensions
public static LinkedHashMap<String, Double> manageGridDataset(String layer, String filename, double x, double y, double z) throws Exception {
LinkedHashMap<String, Double> valuesMap = new LinkedHashMap<String, Double>();
@ -524,4 +544,24 @@ public class NetCDFDataExplorer {
CoordinateAxis yAxis = gcs.getYHorizAxis();
return adjY(yAxis.getMaxValue());
}
public static double getResolution(String layer, String file) throws Exception{
GridDatatype gdt = getGrid(layer, file);
double minY = NetCDFDataExplorer.getMinY(gdt.getCoordinateSystem());
double maxY = NetCDFDataExplorer.getMaxY(gdt.getCoordinateSystem());
GridDatatype gdtsub = gdt.makeSubset(new Range(0, 0), null, null, 1, 1, 1);
Array data = gdtsub.readVolumeData(0); // note order is t, z, y, x
int[] shapeD = data.getShape();
int yD = 0;
if (shapeD.length > 2)
yD = shapeD[1];
else
yD = shapeD[0];
double resolutionY = Math.abs((double) (maxY - minY) / (double) yD);
resolutionY = MathFunctions.roundDecimal(resolutionY, 4);
return resolutionY;
}
}

View File

@ -0,0 +1,49 @@
package org.gcube.dataanalysis.geo.test;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.matrixmodel.ZExtractor;
public class TestRasterPublisher {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setAgent("RASTER_DATA_PUBLISHER");
config.setGcubeScope("/gcube/devsec");
config.setParam("ServiceUserName","gianpaolo.coro");
config.setParam("DatasetTitle", "test raster dataset production");
config.setParam("DatasetAbstract", "test raster dataset production abstract");
config.setParam("InnerLayerName", "adux_pres_1");
config.setParam("RasterFile", "C:/Users/coro/Downloads/adux_pres_portale_test.nc");
config.setParam("Topics", "adux"+AlgorithmConfiguration.listSeparator+"gianpaolo");
config.setParam("SpatialResolution", "-1");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
AnalysisLogger.getLogger().debug("Executing: " + config.getAgent());
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(config);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}

View File

@ -0,0 +1,192 @@
package org.gcube.dataanalysis.geo.utils;
import java.io.File;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.executor.util.DataTransferer;
import org.gcube.dataanalysis.executor.util.InfraRetrieval;
import org.gcube.dataanalysis.geo.connectors.netcdf.NetCDFDataExplorer;
import org.gcube.dataanalysis.geo.infrastructure.GeoNetworkInspector;
import org.gcube.dataanalysis.geo.meta.GenericLayerMetadata;
import org.gcube.dataanalysis.geo.meta.OGCFormatter;
import org.opengis.metadata.identification.TopicCategory;
import ucar.nc2.dt.GridDatatype;
public class ThreddsPublisher {
public static void main (String[] args) throws Exception{
// String scope = "/d4science.research-infrastructures.eu/gCubeApps";
String scope = "/gcube/devsec";
String username = "gianpaolo.coro";
//String fileAbsolutePath = "C:/Users/coro/Dropbox/Public/wind1.tif";
String fileAbsolutePath = "C:/Users/coro/Downloads/adux_pres_portale_test.nc";
String layerTitle = "architeuthis dux distribution file - test";
String layerName = "adux_pres_2";
String abstractField = "abstract architeuthis dux distribution file - test";
String[] topics = {"adux","D4Science"};
double resolution = -1;
AnalysisLogger.setLogger("./cfg/"+AlgorithmConfiguration.defaultLoggerFile);
publishOnThredds(scope, username, fileAbsolutePath, layerTitle, layerName, abstractField, topics, resolution);
}
public static boolean publishOnThredds(String scope,String username, String fileAbsolutePath, String layerTitle, String layerName, String abstractField, String[] topics, double resolution) throws Exception{
//TODO manage faults
String remoteFolder = "/data/content/thredds/public/netcdf/";
List<String> threddsAddress = InfraRetrieval.retrieveServiceAddress("Gis", "THREDDS", scope, "Geoserver");
if (threddsAddress.size()==0)
threddsAddress = InfraRetrieval.retrieveServiceAddress("Gis", "Thredds", scope, "Geoserver");
if (threddsAddress.size()==0)
throw new Exception("Thredds resource is not available in scope "+scope);
String threddServiceAddress = threddsAddress.get(0);
threddServiceAddress = threddServiceAddress.substring(threddServiceAddress.indexOf("http://")+7);
threddServiceAddress = threddServiceAddress.substring(0,threddServiceAddress.indexOf("/"));
AnalysisLogger.getLogger().debug("Found "+threddsAddress.size()+" thredds services");
AnalysisLogger.getLogger().debug("THREDDS: "+threddServiceAddress);
List<String> dataTransferAddress = InfraRetrieval.retrieveService("agent-service", scope);
if (dataTransferAddress.size()==0)
throw new Exception("Data Transfer services are not available in scope "+scope);
AnalysisLogger.getLogger().debug("Found "+dataTransferAddress.size()+" transfer services");
String threddsDTService = threddServiceAddress;
int threddsDTPort = 9090;
boolean found = false;
for (String datatransferservice:dataTransferAddress){
AnalysisLogger.getLogger().debug("Transfer service found");
datatransferservice = datatransferservice.substring(datatransferservice.indexOf("http://")+7);
String servicehost = datatransferservice.substring(0,datatransferservice.indexOf(":"));
String serviceport = datatransferservice.substring(datatransferservice.indexOf(":")+1,datatransferservice.indexOf("/"));
AnalysisLogger.getLogger().debug("Transfer service: "+servicehost+":"+serviceport);
if (threddServiceAddress.equals(servicehost)){
threddsDTPort = Integer.parseInt(serviceport);
found = true;
break;
}
}
if (!found)
throw new Exception("Thredds data transfer has not been found in the same scope of the catalog: "+scope);
if (fileAbsolutePath.endsWith(".nc")){
AnalysisLogger.getLogger().debug("checking NetCDF file coherence"+fileAbsolutePath);
NetCDFDataExplorer.getGrid(layerName, fileAbsolutePath);
}
AnalysisLogger.getLogger().debug("Transferring via DT to "+threddServiceAddress);
DataTransferer.transferFileToService(scope, username, threddsDTService, threddsDTPort, fileAbsolutePath, remoteFolder);
AnalysisLogger.getLogger().debug("Adding metadata on GeoNetwork");
if (fileAbsolutePath.endsWith(".nc"))
publishNetCDFMeta(scope, layerTitle, abstractField, new File(fileAbsolutePath).getName(),layerName,threddServiceAddress,username,topics);
else{
if (resolution==-1)
throw new Exception ("Specify valid resolution parameter for non-NetCDF raster datasets");
publishOtherFileMeta(scope, layerTitle, resolution, abstractField, new File(fileAbsolutePath).getName(), threddServiceAddress,username,topics);
}
AnalysisLogger.getLogger().debug("Finished");
return true;
}
private static void publishOtherFileMeta(String scope, String layerTitle, double resolution, String abstractField, String filename, String threddsURL, String username, String [] topics) throws Exception{
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
GeoNetworkInspector gninspector =new GeoNetworkInspector();
gninspector.setScope(scope);
String geonetworkURL = gninspector.getGeonetworkURLFromScope();
String geonetworkUser = gninspector.getGeonetworkUserFromScope();
String geonetworkPassword = gninspector.getGeonetworkPasswordFromScope();
AnalysisLogger.getLogger().debug("GeoNetwork Info: "+geonetworkURL+" "+geonetworkUser);
metadataInserter.setGeonetworkUrl(geonetworkURL);
metadataInserter.setGeonetworkPwd(geonetworkPassword);
metadataInserter.setGeonetworkUser(geonetworkUser);
metadataInserter.setTitle(layerTitle);
metadataInserter.setCategoryTypes("_"+TopicCategory.ENVIRONMENT.name()+"_");
metadataInserter.setAbstractField(abstractField+" Hosted on the D4Science Thredds Catalog: "+threddsURL);
metadataInserter.setCustomTopics(topics);
metadataInserter.setAuthor(username);
metadataInserter.setResolution(resolution);
AnalysisLogger.getLogger().debug("Res:"+resolution);
String [] urls = {"http://"+threddsURL+"/thredds/fileServer/public/netcdf/"+filename};
String [] protocols = {"HTTP"};
metadataInserter.customMetaDataInsert(urls,protocols);
}
private static void publishNetCDFMeta(String scope, String layerTitle,String abstractField, String filename, String netCDFLayerName, String threddsURL, String username, String [] topics) throws Exception{
AnalysisLogger.getLogger().debug("Getting GeoNetwork Info");
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
GeoNetworkInspector gninspector =new GeoNetworkInspector();
gninspector.setScope(scope);
String geonetworkURL = gninspector.getGeonetworkURLFromScope();
String geonetworkUser = gninspector.getGeonetworkUserFromScope();
String geonetworkPassword = gninspector.getGeonetworkPasswordFromScope();
AnalysisLogger.getLogger().debug("GeoNetwork Info: "+geonetworkURL+" "+geonetworkUser);
metadataInserter.setGeonetworkUrl(geonetworkURL);
metadataInserter.setGeonetworkPwd(geonetworkPassword);
metadataInserter.setGeonetworkUser(geonetworkUser);
metadataInserter.setTitle(layerTitle);
metadataInserter.setCategoryTypes("_"+TopicCategory.ENVIRONMENT.name()+"_");
metadataInserter.setAbstractField(abstractField+" Hosted on the D4Science Thredds Catalog: "+threddsURL);
metadataInserter.setCustomTopics(topics);
metadataInserter.setAuthor(username);
String Threddscatalog = "http://"+threddsURL+"/thredds/catalog/public/netcdf/catalog.xml";
String url = OGCFormatter.getOpenDapURL(Threddscatalog, filename);
AnalysisLogger.getLogger().debug("OpenDAP URL: "+url);
GridDatatype gdt = NetCDFDataExplorer.getGrid(netCDFLayerName, url);
double minX = NetCDFDataExplorer.getMinX(gdt.getCoordinateSystem());
double maxX = NetCDFDataExplorer.getMaxX(gdt.getCoordinateSystem());
double minY = NetCDFDataExplorer.getMinY(gdt.getCoordinateSystem());
double maxY = NetCDFDataExplorer.getMaxY(gdt.getCoordinateSystem());
double resolutionY = NetCDFDataExplorer.getResolution(netCDFLayerName,url);
metadataInserter.setResolution(resolutionY);
AnalysisLogger.getLogger().debug("minX: "+minX+" minY: "+minY+" maxX:"+maxX+" maxY:"+maxY+" Res:"+resolutionY);
String wms = OGCFormatter.getWmsNetCDFUrl(url, netCDFLayerName, OGCFormatter.buildBoundingBox(minX, minY, maxX, maxY)).replace("width=676", "width=640").replace("height=330", "height=480");
AnalysisLogger.getLogger().debug("WMS URL: "+wms);
String wcs = OGCFormatter.getWcsNetCDFUrl(url, netCDFLayerName, OGCFormatter.buildBoundingBox(minX, minY, maxX, maxY)).replace("width=676", "width=640").replace("height=330", "height=480");
AnalysisLogger.getLogger().debug("WCS URL: "+wcs);
AnalysisLogger.getLogger().debug("HTTP URL: "+"http://"+threddsURL+"/thredds/fileServer/public/netcdf/"+filename);
String [] urls = {"http://"+threddsURL+"/thredds/fileServer/public/netcdf/"+filename,wms,wcs,url};
String [] protocols = {"HTTP","WMS","WCS","OPeNDAP"};
metadataInserter.setXLeftLow(minX);
metadataInserter.setYLeftLow(minY);
metadataInserter.setXRightUpper(maxX);
metadataInserter.setYRightUpper(maxY);
AnalysisLogger.getLogger().debug("Inserting metadata ");
metadataInserter.customMetaDataInsert(urls,protocols);
}
}