git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngineGeoSpatialExtension@129111 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
63c5343544
commit
6407ad45f3
|
@ -8,10 +8,13 @@ import java.util.UUID;
|
||||||
|
|
||||||
import org.gcube.common.scope.api.ScopeProvider;
|
import org.gcube.common.scope.api.ScopeProvider;
|
||||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||||
|
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||||
|
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||||
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
|
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
|
||||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
|
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
|
||||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
|
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
|
||||||
import org.gcube.dataanalysis.geo.meta.PolyMapMetadata;
|
import org.gcube.dataanalysis.geo.meta.PolyMapMetadata;
|
||||||
|
import org.gcube.dataanalysis.geo.utils.GeospatialDataPublicationLevel;
|
||||||
import org.gcube.spatial.data.gis.model.report.PublishResponse;
|
import org.gcube.spatial.data.gis.model.report.PublishResponse;
|
||||||
import org.hibernate.SessionFactory;
|
import org.hibernate.SessionFactory;
|
||||||
|
|
||||||
|
@ -34,6 +37,7 @@ public abstract class MapsCreator extends StandardLocalExternalAlgorithm {
|
||||||
static String infoParameter = "Info";
|
static String infoParameter = "Info";
|
||||||
static String resolutionParameter = "Resolution";
|
static String resolutionParameter = "Resolution";
|
||||||
static String layerNameParameter = "MapName";
|
static String layerNameParameter = "MapName";
|
||||||
|
static String publicationLevel= "PublicationLevel";
|
||||||
static int maxNPoints = 259000;
|
static int maxNPoints = 259000;
|
||||||
SessionFactory gisdbconnection = null;
|
SessionFactory gisdbconnection = null;
|
||||||
SessionFactory smdbconnection = null;
|
SessionFactory smdbconnection = null;
|
||||||
|
@ -77,6 +81,13 @@ public abstract class MapsCreator extends StandardLocalExternalAlgorithm {
|
||||||
if (scope == null)
|
if (scope == null)
|
||||||
scope = ScopeProvider.instance.get();
|
scope = ScopeProvider.instance.get();
|
||||||
log("Using scope:"+scope);
|
log("Using scope:"+scope);
|
||||||
|
|
||||||
|
String publicationLevelValue = getInputParameter(publicationLevel);
|
||||||
|
log("Publication Level:"+publicationLevelValue);
|
||||||
|
boolean isprivate = false;
|
||||||
|
if (GeospatialDataPublicationLevel.valueOf(publicationLevelValue)==GeospatialDataPublicationLevel.PRIVATE)
|
||||||
|
isprivate=true;
|
||||||
|
|
||||||
//initialize Gis DB parameters
|
//initialize Gis DB parameters
|
||||||
String databaseJdbc = getInputParameter(dburlParameterName);
|
String databaseJdbc = getInputParameter(dburlParameterName);
|
||||||
String databaseUser = getInputParameter(dbuserParameterName);
|
String databaseUser = getInputParameter(dbuserParameterName);
|
||||||
|
@ -202,7 +213,7 @@ public abstract class MapsCreator extends StandardLocalExternalAlgorithm {
|
||||||
username = usernameP;
|
username = usernameP;
|
||||||
|
|
||||||
String layerName = config.getParam(layerNameParameter);
|
String layerName = config.getParam(layerNameParameter);
|
||||||
PublishResponse response = PolyMapMetadata.publishTable(scope, gisTableName, resolution, username, layerName, defaultStyle, workspace, datastore, purpose, credits, keyword);
|
PublishResponse response = PolyMapMetadata.publishTable(scope, gisTableName, resolution, username, layerName, defaultStyle, workspace, datastore, purpose, credits, keyword,isprivate);
|
||||||
status = 80;
|
status = 80;
|
||||||
//analyzing response
|
//analyzing response
|
||||||
if (response == null) {
|
if (response == null) {
|
||||||
|
|
|
@ -7,7 +7,10 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
|
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||||
|
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||||
|
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||||
|
import org.gcube.dataanalysis.geo.utils.GeospatialDataPublicationLevel;
|
||||||
|
|
||||||
public class PointsMapsCreator extends MapsCreator {
|
public class PointsMapsCreator extends MapsCreator {
|
||||||
|
|
||||||
|
@ -31,6 +34,9 @@ public class PointsMapsCreator extends MapsCreator {
|
||||||
@Override
|
@Override
|
||||||
protected void setInputParameters() {
|
protected void setInputParameters() {
|
||||||
try {
|
try {
|
||||||
|
PrimitiveType e = new PrimitiveType(Enum.class.getName(), GeospatialDataPublicationLevel.values(), PrimitiveTypes.ENUMERATED, publicationLevel, "The visibility level of the produced map",""+GeospatialDataPublicationLevel.PRIVATE);
|
||||||
|
inputs.add(e);
|
||||||
|
|
||||||
List<TableTemplates> templates = new ArrayList<TableTemplates>();
|
List<TableTemplates> templates = new ArrayList<TableTemplates>();
|
||||||
addRemoteDatabaseInput(databaseParameterName, dburlParameterName, dbuserParameterName, dbpasswordParameterName, "driver", "dialect");
|
addRemoteDatabaseInput(databaseParameterName, dburlParameterName, dbuserParameterName, dbpasswordParameterName, "driver", "dialect");
|
||||||
templates.add(TableTemplates.GENERIC);
|
templates.add(TableTemplates.GENERIC);
|
||||||
|
|
|
@ -7,7 +7,10 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
|
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||||
|
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||||
|
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||||
|
import org.gcube.dataanalysis.geo.utils.GeospatialDataPublicationLevel;
|
||||||
|
|
||||||
public class PolygonMapsCreator extends MapsCreator {
|
public class PolygonMapsCreator extends MapsCreator {
|
||||||
|
|
||||||
|
@ -32,6 +35,9 @@ return "A transducer algorithm to produce a GIS map of filled polygons associate
|
||||||
@Override
|
@Override
|
||||||
protected void setInputParameters() {
|
protected void setInputParameters() {
|
||||||
try {
|
try {
|
||||||
|
PrimitiveType e = new PrimitiveType(Enum.class.getName(), GeospatialDataPublicationLevel.values(), PrimitiveTypes.ENUMERATED, publicationLevel, "The visibility level of the produced map",""+GeospatialDataPublicationLevel.PRIVATE);
|
||||||
|
inputs.add(e);
|
||||||
|
|
||||||
List<TableTemplates> templates = new ArrayList<TableTemplates>();
|
List<TableTemplates> templates = new ArrayList<TableTemplates>();
|
||||||
addRemoteDatabaseInput(databaseParameterName, dburlParameterName, dbuserParameterName, dbpasswordParameterName, "driver", "dialect");
|
addRemoteDatabaseInput(databaseParameterName, dburlParameterName, dbuserParameterName, dbpasswordParameterName, "driver", "dialect");
|
||||||
templates.add(TableTemplates.GENERIC);
|
templates.add(TableTemplates.GENERIC);
|
||||||
|
|
|
@ -4,6 +4,7 @@ import java.io.File;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
|
import org.apache.commons.io.FileDeleteStrategy;
|
||||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||||
|
@ -11,7 +12,10 @@ import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
|
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
|
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
|
||||||
|
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.InterpolateTables;
|
||||||
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
|
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
|
||||||
|
import org.gcube.dataanalysis.executor.scripts.OSCommand;
|
||||||
|
import org.gcube.dataanalysis.geo.utils.GeospatialDataPublicationLevel;
|
||||||
import org.gcube.dataanalysis.geo.utils.ThreddsPublisher;
|
import org.gcube.dataanalysis.geo.utils.ThreddsPublisher;
|
||||||
|
|
||||||
public class RasterDataPublisher extends StandardLocalExternalAlgorithm{
|
public class RasterDataPublisher extends StandardLocalExternalAlgorithm{
|
||||||
|
@ -44,6 +48,11 @@ public class RasterDataPublisher extends StandardLocalExternalAlgorithm{
|
||||||
String abstractField = config.getParam(layerAbstractParam);
|
String abstractField = config.getParam(layerAbstractParam);
|
||||||
String[] topics = config.getParam(TopicsParam).split(AlgorithmConfiguration.listSeparator);
|
String[] topics = config.getParam(TopicsParam).split(AlgorithmConfiguration.listSeparator);
|
||||||
|
|
||||||
|
String publicationLevelValue = getInputParameter("PublicationLevel");
|
||||||
|
log("Publication Level:"+publicationLevelValue);
|
||||||
|
boolean isprivate = false;
|
||||||
|
if (GeospatialDataPublicationLevel.valueOf(publicationLevelValue)==GeospatialDataPublicationLevel.PRIVATE)
|
||||||
|
isprivate=true;
|
||||||
|
|
||||||
AnalysisLogger.getLogger().debug("scope: "+scope);
|
AnalysisLogger.getLogger().debug("scope: "+scope);
|
||||||
AnalysisLogger.getLogger().debug("username: "+username);
|
AnalysisLogger.getLogger().debug("username: "+username);
|
||||||
|
@ -53,7 +62,7 @@ public class RasterDataPublisher extends StandardLocalExternalAlgorithm{
|
||||||
AnalysisLogger.getLogger().debug("abstractField: "+abstractField);
|
AnalysisLogger.getLogger().debug("abstractField: "+abstractField);
|
||||||
AnalysisLogger.getLogger().debug("topics: "+topics);
|
AnalysisLogger.getLogger().debug("topics: "+topics);
|
||||||
AnalysisLogger.getLogger().debug("filename: "+fileName);
|
AnalysisLogger.getLogger().debug("filename: "+fileName);
|
||||||
|
AnalysisLogger.getLogger().debug("is private: "+isprivate);
|
||||||
|
|
||||||
if (scope==null || username==null)
|
if (scope==null || username==null)
|
||||||
throw new Exception ("Service parameters are not set - please contact the Administrators");
|
throw new Exception ("Service parameters are not set - please contact the Administrators");
|
||||||
|
@ -94,7 +103,7 @@ public class RasterDataPublisher extends StandardLocalExternalAlgorithm{
|
||||||
String [] topicsListArr = new String[listTopics.size()];
|
String [] topicsListArr = new String[listTopics.size()];
|
||||||
topics = listTopics.toArray(topicsListArr);
|
topics = listTopics.toArray(topicsListArr);
|
||||||
|
|
||||||
boolean result = ThreddsPublisher.publishOnThredds(scope, username, newf.getAbsolutePath(), layerTitle, layerName, abstractField, topics, resolution);
|
boolean result = ThreddsPublisher.publishOnThredds(scope, username, newf.getAbsolutePath(), layerTitle, layerName, abstractField, topics, resolution,isprivate);
|
||||||
|
|
||||||
if (result) {
|
if (result) {
|
||||||
addOutputString("Created map name", layerTitle);
|
addOutputString("Created map name", layerTitle);
|
||||||
|
@ -105,13 +114,22 @@ public class RasterDataPublisher extends StandardLocalExternalAlgorithm{
|
||||||
addOutputString("Map creator", username);
|
addOutputString("Map creator", username);
|
||||||
}
|
}
|
||||||
AnalysisLogger.getLogger().debug("Deleting raster file "+newf.getAbsolutePath());
|
AnalysisLogger.getLogger().debug("Deleting raster file "+newf.getAbsolutePath());
|
||||||
newf.delete();
|
|
||||||
|
|
||||||
|
try{OSCommand.ExecuteGetLine("rm "+newf.getAbsolutePath(), null);}catch(Exception e){}
|
||||||
|
FileDeleteStrategy.FORCE.deleteQuietly(newf);
|
||||||
|
System.gc();
|
||||||
|
// newf.delete();
|
||||||
status = 100;
|
status = 100;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void setInputParameters() {
|
protected void setInputParameters() {
|
||||||
try {
|
try {
|
||||||
|
PrimitiveType e = new PrimitiveType(Enum.class.getName(), GeospatialDataPublicationLevel.values(), PrimitiveTypes.ENUMERATED, "PublicationLevel", "The visibility level of the produced map",""+GeospatialDataPublicationLevel.PRIVATE);
|
||||||
|
inputs.add(e);
|
||||||
addStringInput(layerTitleParam, "Title of the geospatial dataset to be shown on GeoExplorer", "Generic Raster Layer");
|
addStringInput(layerTitleParam, "Title of the geospatial dataset to be shown on GeoExplorer", "Generic Raster Layer");
|
||||||
addStringInput(layerAbstractParam, "Abstract defining the content, the references and usage policies", "Abstract");
|
addStringInput(layerAbstractParam, "Abstract defining the content, the references and usage policies", "Abstract");
|
||||||
addStringInput(layerInnerNameParam, "Name of the inner layer or band to be published as a Map (ignored for non-NetCDF files)", "band_1");
|
addStringInput(layerInnerNameParam, "Name of the inner layer or band to be published as a Map (ignored for non-NetCDF files)", "band_1");
|
||||||
|
|
|
@ -7,7 +7,10 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
|
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||||
|
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||||
|
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||||
|
import org.gcube.dataanalysis.geo.utils.GeospatialDataPublicationLevel;
|
||||||
|
|
||||||
public class SpeciesDistributionsMapsCreatorFromCsquares extends MapsCreator {
|
public class SpeciesDistributionsMapsCreatorFromCsquares extends MapsCreator {
|
||||||
|
|
||||||
|
@ -31,6 +34,9 @@ public class SpeciesDistributionsMapsCreatorFromCsquares extends MapsCreator {
|
||||||
@Override
|
@Override
|
||||||
protected void setInputParameters() {
|
protected void setInputParameters() {
|
||||||
try {
|
try {
|
||||||
|
PrimitiveType e = new PrimitiveType(Enum.class.getName(), GeospatialDataPublicationLevel.values(), PrimitiveTypes.ENUMERATED, publicationLevel, "The visibility level of the produced map",""+GeospatialDataPublicationLevel.PRIVATE);
|
||||||
|
inputs.add(e);
|
||||||
|
|
||||||
List<TableTemplates> templates = new ArrayList<TableTemplates>();
|
List<TableTemplates> templates = new ArrayList<TableTemplates>();
|
||||||
addRemoteDatabaseInput(databaseParameterName, dburlParameterName, dbuserParameterName, dbpasswordParameterName, "driver", "dialect");
|
addRemoteDatabaseInput(databaseParameterName, dburlParameterName, dbuserParameterName, dbpasswordParameterName, "driver", "dialect");
|
||||||
templates.add(TableTemplates.GENERIC);
|
templates.add(TableTemplates.GENERIC);
|
||||||
|
|
|
@ -7,7 +7,10 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
|
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||||
|
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||||
|
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||||
|
import org.gcube.dataanalysis.geo.utils.GeospatialDataPublicationLevel;
|
||||||
|
|
||||||
public class SpeciesDistributionsMapsCreatorFromPoints extends MapsCreator {
|
public class SpeciesDistributionsMapsCreatorFromPoints extends MapsCreator {
|
||||||
|
|
||||||
|
@ -31,6 +34,9 @@ public class SpeciesDistributionsMapsCreatorFromPoints extends MapsCreator {
|
||||||
@Override
|
@Override
|
||||||
protected void setInputParameters() {
|
protected void setInputParameters() {
|
||||||
try {
|
try {
|
||||||
|
PrimitiveType e = new PrimitiveType(Enum.class.getName(), GeospatialDataPublicationLevel.values(), PrimitiveTypes.ENUMERATED, publicationLevel, "The visibility level of the produced map",""+GeospatialDataPublicationLevel.PRIVATE);
|
||||||
|
inputs.add(e);
|
||||||
|
|
||||||
List<TableTemplates> templates = new ArrayList<TableTemplates>();
|
List<TableTemplates> templates = new ArrayList<TableTemplates>();
|
||||||
addRemoteDatabaseInput(databaseParameterName, dburlParameterName, dbuserParameterName, dbpasswordParameterName, "driver", "dialect");
|
addRemoteDatabaseInput(databaseParameterName, dburlParameterName, dbuserParameterName, dbpasswordParameterName, "driver", "dialect");
|
||||||
templates.add(TableTemplates.GENERIC);
|
templates.add(TableTemplates.GENERIC);
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
package org.gcube.dataanalysis.geo.batch;
|
package org.gcube.dataanalysis.geo.batch;
|
||||||
|
|
||||||
|
import org.gcube.common.scope.api.ScopeProvider;
|
||||||
import org.gcube.dataanalysis.geo.meta.GenericLayerMetadata;
|
import org.gcube.dataanalysis.geo.meta.GenericLayerMetadata;
|
||||||
import org.gcube.dataanalysis.geo.meta.NetCDFMetadata;
|
import org.gcube.dataanalysis.geo.meta.NetCDFMetadata;
|
||||||
import org.opengis.metadata.identification.TopicCategory;
|
import org.opengis.metadata.identification.TopicCategory;
|
||||||
|
@ -15,11 +16,12 @@ public class BaseLayerAddonMetadataInsertDev {
|
||||||
//static String geoserverurl = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver";
|
//static String geoserverurl = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver";
|
||||||
|
|
||||||
static String user = "admin";
|
static String user = "admin";
|
||||||
static String password = "admin";
|
static String password = "Geey6ohz";
|
||||||
static String workspace= "aquamaps";
|
static String workspace= "aquamaps";
|
||||||
// static String workspace= "timeseriesgisdb";
|
// static String workspace= "timeseriesgisdb";
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception{
|
public static void main(String[] args) throws Exception{
|
||||||
|
ScopeProvider.instance.set("/gcube/devsec");
|
||||||
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
|
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
|
||||||
metadataInserter.setGeonetworkUrl(geonetworkurl);
|
metadataInserter.setGeonetworkUrl(geonetworkurl);
|
||||||
metadataInserter.setGeonetworkUser(user);
|
metadataInserter.setGeonetworkUser(user);
|
||||||
|
@ -34,7 +36,7 @@ public class BaseLayerAddonMetadataInsertDev {
|
||||||
|
|
||||||
terrestrialecoregions(metadataInserter);
|
terrestrialecoregions(metadataInserter);
|
||||||
metadataInserter.insertMetaData();
|
metadataInserter.insertMetaData();
|
||||||
|
/*
|
||||||
nafo(metadataInserter);
|
nafo(metadataInserter);
|
||||||
metadataInserter.insertMetaData();
|
metadataInserter.insertMetaData();
|
||||||
tdwg(metadataInserter);
|
tdwg(metadataInserter);
|
||||||
|
@ -43,7 +45,7 @@ public class BaseLayerAddonMetadataInsertDev {
|
||||||
metadataInserter.insertMetaData();
|
metadataInserter.insertMetaData();
|
||||||
continental(metadataInserter);
|
continental(metadataInserter);
|
||||||
metadataInserter.insertMetaData();
|
metadataInserter.insertMetaData();
|
||||||
|
*/
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -18,8 +18,8 @@ public class MetadataInsertDevExample {
|
||||||
|
|
||||||
static String geonetworkurl = "http://geoserver-dev2.d4science-ii.research-infrastructures.eu/geonetwork/";
|
static String geonetworkurl = "http://geoserver-dev2.d4science-ii.research-infrastructures.eu/geonetwork/";
|
||||||
//
|
//
|
||||||
static String user = "...";
|
static String user = "admin";
|
||||||
static String password = "..";
|
static String password = "Geey6ohz";
|
||||||
|
|
||||||
static String threddsbaseURL= "thredds-d-d4s.d4science.org";
|
static String threddsbaseURL= "thredds-d-d4s.d4science.org";
|
||||||
static String threddsURL = "http://"+threddsbaseURL+"/thredds/catalog/public/netcdf/catalog.xml";
|
static String threddsURL = "http://"+threddsbaseURL+"/thredds/catalog/public/netcdf/catalog.xml";
|
||||||
|
|
|
@ -17,6 +17,7 @@ import org.gcube.spatial.data.geonetwork.GeoNetworkReader;
|
||||||
import org.gcube.spatial.data.geonetwork.LoginLevel;
|
import org.gcube.spatial.data.geonetwork.LoginLevel;
|
||||||
import org.gcube.spatial.data.geonetwork.configuration.Configuration;
|
import org.gcube.spatial.data.geonetwork.configuration.Configuration;
|
||||||
import org.gcube.spatial.data.geonetwork.configuration.ConfigurationManager;
|
import org.gcube.spatial.data.geonetwork.configuration.ConfigurationManager;
|
||||||
|
import org.gcube.spatial.data.geonetwork.model.Account;
|
||||||
import org.geotoolkit.metadata.iso.identification.DefaultDataIdentification;
|
import org.geotoolkit.metadata.iso.identification.DefaultDataIdentification;
|
||||||
import org.opengis.metadata.Metadata;
|
import org.opengis.metadata.Metadata;
|
||||||
import org.opengis.metadata.citation.OnlineResource;
|
import org.opengis.metadata.citation.OnlineResource;
|
||||||
|
@ -311,36 +312,6 @@ public class GeoNetworkInspector {
|
||||||
AnalysisLogger.getLogger().debug("Features Manager: configuring GeoNetwork");
|
AnalysisLogger.getLogger().debug("Features Manager: configuring GeoNetwork");
|
||||||
if (scope != null)
|
if (scope != null)
|
||||||
ScopeProvider.instance.set(scope);
|
ScopeProvider.instance.set(scope);
|
||||||
else {
|
|
||||||
AnalysisLogger.getLogger().debug("Features Manager: Using manual configuration of GeoNetwork");
|
|
||||||
class CustomConfiguration implements Configuration {
|
|
||||||
@Override
|
|
||||||
public String getGeoNetworkEndpoint() {
|
|
||||||
return geonetworkUrl;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<LoginLevel, String> getGeoNetworkPasswords() {
|
|
||||||
HashMap<LoginLevel, String> map = new HashMap<LoginLevel, String>();
|
|
||||||
map.put(LoginLevel.DEFAULT, geonetworkPwd);
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<LoginLevel, String> getGeoNetworkUsers() {
|
|
||||||
HashMap<LoginLevel, String> map = new HashMap<LoginLevel, String>();
|
|
||||||
map.put(LoginLevel.DEFAULT, geonetworkUser);
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getScopeGroup() {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ConfigurationManager.setConfiguration(CustomConfiguration.class);
|
|
||||||
}
|
|
||||||
|
|
||||||
AnalysisLogger.getLogger().debug("Initializing GeoNetwork");
|
AnalysisLogger.getLogger().debug("Initializing GeoNetwork");
|
||||||
|
|
||||||
|
@ -358,19 +329,29 @@ public class GeoNetworkInspector {
|
||||||
|
|
||||||
public String getGeonetworkUserFromScope() throws Exception {
|
public String getGeonetworkUserFromScope() throws Exception {
|
||||||
GeoNetworkReader gn = initGeoNetworkReader();
|
GeoNetworkReader gn = initGeoNetworkReader();
|
||||||
return gn.getConfiguration().getGeoNetworkUsers().get(LoginLevel.DEFAULT);
|
return gn.getConfiguration().getScopeConfiguration().getAccounts().get(Account.Type.SCOPE).getUser();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String getGeonetworkPrivateGroup() throws Exception {
|
||||||
|
GeoNetworkReader gn = initGeoNetworkReader();
|
||||||
|
return ""+gn.getConfiguration().getScopeConfiguration().getPrivateGroup();
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getGeonetworkPublicGroup() throws Exception {
|
||||||
|
GeoNetworkReader gn = initGeoNetworkReader();
|
||||||
|
return ""+gn.getConfiguration().getScopeConfiguration().getPublicGroup();
|
||||||
|
}
|
||||||
|
|
||||||
public String getGeonetworkPasswordFromScope() throws Exception {
|
public String getGeonetworkPasswordFromScope() throws Exception {
|
||||||
GeoNetworkReader gn = initGeoNetworkReader();
|
GeoNetworkReader gn = initGeoNetworkReader();
|
||||||
return gn.getConfiguration().getGeoNetworkPasswords().get(LoginLevel.DEFAULT);
|
return gn.getConfiguration().getScopeConfiguration().getAccounts().get(Account.Type.SCOPE).getPassword();
|
||||||
}
|
}
|
||||||
|
|
||||||
private Metadata getGNInfobyTitle(String info) throws Exception {
|
private Metadata getGNInfobyTitle(String info) throws Exception {
|
||||||
|
|
||||||
GeoNetworkReader gn = initGeoNetworkReader();
|
GeoNetworkReader gn = initGeoNetworkReader();
|
||||||
// Form query object
|
// Form query object
|
||||||
gn.login(LoginLevel.DEFAULT);
|
gn.login(LoginLevel.ADMIN);
|
||||||
GNSearchRequest req = new GNSearchRequest();
|
GNSearchRequest req = new GNSearchRequest();
|
||||||
req.addParam(GNSearchRequest.Param.title, info);
|
req.addParam(GNSearchRequest.Param.title, info);
|
||||||
// req.addConfig(GNSearchRequest.Config.similarity, "1");
|
// req.addConfig(GNSearchRequest.Config.similarity, "1");
|
||||||
|
@ -393,7 +374,7 @@ public class GeoNetworkInspector {
|
||||||
|
|
||||||
GeoNetworkReader gn = initGeoNetworkReader();
|
GeoNetworkReader gn = initGeoNetworkReader();
|
||||||
// Form query object
|
// Form query object
|
||||||
gn.login(LoginLevel.DEFAULT);
|
gn.login(LoginLevel.ADMIN);
|
||||||
GNSearchRequest req = new GNSearchRequest();
|
GNSearchRequest req = new GNSearchRequest();
|
||||||
req.addParam(GNSearchRequest.Param.title, info);
|
req.addParam(GNSearchRequest.Param.title, info);
|
||||||
req.addConfig(GNSearchRequest.Config.similarity, tolerance);
|
req.addConfig(GNSearchRequest.Config.similarity, tolerance);
|
||||||
|
@ -417,7 +398,7 @@ public class GeoNetworkInspector {
|
||||||
|
|
||||||
GeoNetworkReader gn = initGeoNetworkReader();
|
GeoNetworkReader gn = initGeoNetworkReader();
|
||||||
// Form query object
|
// Form query object
|
||||||
gn.login(LoginLevel.DEFAULT);
|
gn.login(LoginLevel.ADMIN);
|
||||||
GNSearchRequest req = new GNSearchRequest();
|
GNSearchRequest req = new GNSearchRequest();
|
||||||
req.addParam(GNSearchRequest.Param.any, info);
|
req.addParam(GNSearchRequest.Param.any, info);
|
||||||
req.addConfig(GNSearchRequest.Config.similarity, tolerance);
|
req.addConfig(GNSearchRequest.Config.similarity, tolerance);
|
||||||
|
@ -441,7 +422,7 @@ public class GeoNetworkInspector {
|
||||||
|
|
||||||
GeoNetworkReader gn = initGeoNetworkReader();
|
GeoNetworkReader gn = initGeoNetworkReader();
|
||||||
// Form query object
|
// Form query object
|
||||||
gn.login(LoginLevel.DEFAULT);
|
gn.login(LoginLevel.ADMIN);
|
||||||
GNSearchRequest req = new GNSearchRequest();
|
GNSearchRequest req = new GNSearchRequest();
|
||||||
req.addParam(GNSearchRequest.Param.title, info);
|
req.addParam(GNSearchRequest.Param.title, info);
|
||||||
req.addConfig(GNSearchRequest.Config.similarity, tolerance);
|
req.addConfig(GNSearchRequest.Config.similarity, tolerance);
|
||||||
|
@ -476,7 +457,7 @@ public class GeoNetworkInspector {
|
||||||
GeoNetworkReader gn = initGeoNetworkReader();
|
GeoNetworkReader gn = initGeoNetworkReader();
|
||||||
AnalysisLogger.getLogger().debug("Initialized GeoNetwork!");
|
AnalysisLogger.getLogger().debug("Initialized GeoNetwork!");
|
||||||
// Form query object
|
// Form query object
|
||||||
gn.login(LoginLevel.DEFAULT);
|
gn.login(LoginLevel.ADMIN);
|
||||||
Metadata meta = gn.getById(UUID);
|
Metadata meta = gn.getById(UUID);
|
||||||
AnalysisLogger.getLogger().debug("Layer with UUID: " + UUID + " successfully Retrieved!");
|
AnalysisLogger.getLogger().debug("Layer with UUID: " + UUID + " successfully Retrieved!");
|
||||||
|
|
||||||
|
@ -583,14 +564,22 @@ public class GeoNetworkInspector {
|
||||||
return layerTitle2.replaceAll(punct, " ").replaceAll("( )+", " ");
|
return layerTitle2.replaceAll(punct, " ").replaceAll("( )+", " ");
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main1(String args[]) throws Exception {
|
public static void main(String args[]) throws Exception {
|
||||||
// String title = "temperature (04091217ruc.nc)";
|
// String title = "temperature (04091217ruc.nc)";
|
||||||
// String title = "Bathymetry";
|
// String title = "Bathymetry";
|
||||||
// String title = "FAO aquatic species distribution map of Melanogrammus aeglefinus";
|
// String title = "FAO aquatic species distribution map of Melanogrammus aeglefinus";
|
||||||
// String title = "geopotential height from [12/09/2004 19:00] to [12/09/2004 22:00] (04091217_ruc.nc)";
|
// String title = "geopotential height from [12/09/2004 19:00] to [12/09/2004 22:00] (04091217_ruc.nc)";
|
||||||
String title = "geopotential height";
|
String title = "geopotential height";
|
||||||
GeoNetworkInspector fm = new GeoNetworkInspector();
|
GeoNetworkInspector fm = new GeoNetworkInspector();
|
||||||
Metadata meta = fm.getGNInfobyTitle(title);
|
// Metadata meta = fm.getGNInfobyTitle(title);
|
||||||
|
// fm.setScope("/gcube/devsec/devVRE");
|
||||||
|
// fm.setScope("/gcube/devsec/devVRE");
|
||||||
|
fm.setScope("/gcube/devsec");
|
||||||
|
// Metadata meta = fm.getGNInfobyUUID("ffd86c4b-e624-493b-b279-2cd20d6b267f");
|
||||||
|
// Metadata meta = fm.getGNInfobyUUID("0815e357-ebd7-4c02-8dc8-f945eceb870c"); //public
|
||||||
|
// Metadata meta = fm.getGNInfobyUUID("d57e7f27-9763-4216-a72f-48289f35779f"); //private
|
||||||
|
Metadata meta = fm.getGNInfobyUUID("c286077b-53e9-4389-aaf8-85fb3cb480a2"); //public
|
||||||
|
|
||||||
System.out.println("is file? " + fm.isNetCDFFile(meta));
|
System.out.println("is file? " + fm.isNetCDFFile(meta));
|
||||||
System.out.println("opendap: " + fm.getOpenDapLink(meta));
|
System.out.println("opendap: " + fm.getOpenDapLink(meta));
|
||||||
System.out.println("wcs:" + fm.getWCSLink(meta));
|
System.out.println("wcs:" + fm.getWCSLink(meta));
|
||||||
|
@ -598,7 +587,7 @@ public class GeoNetworkInspector {
|
||||||
System.out.println("thredds:" + fm.getThreddsLink(meta));
|
System.out.println("thredds:" + fm.getThreddsLink(meta));
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String args[]) throws Exception {
|
public static void main2(String args[]) throws Exception {
|
||||||
System.out.println(treatTitleForGN("sea/land/lake/ice field composite mask from"));
|
System.out.println(treatTitleForGN("sea/land/lake/ice field composite mask from"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -72,6 +72,8 @@ public class GenericLayerMetadata {
|
||||||
private String geoserverUrl = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geonetwork/";
|
private String geoserverUrl = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geonetwork/";
|
||||||
private String geonetworkUser = "admin";
|
private String geonetworkUser = "admin";
|
||||||
private String geonetworkPwd = "admin";
|
private String geonetworkPwd = "admin";
|
||||||
|
private String geonetworkGroup = "3";
|
||||||
|
|
||||||
private String title = "temperature 04091217ruc.nc";
|
private String title = "temperature 04091217ruc.nc";
|
||||||
private String layerName = "T";
|
private String layerName = "T";
|
||||||
private String usageField = "Environmental enrichment";
|
private String usageField = "Environmental enrichment";
|
||||||
|
@ -278,6 +280,14 @@ public class GenericLayerMetadata {
|
||||||
this.geoserverUrl = geoserverUrl;
|
this.geoserverUrl = geoserverUrl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String getGeonetworkGroup() {
|
||||||
|
return geonetworkGroup;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setGeonetworkGroup(String geonetworkGroup) {
|
||||||
|
this.geonetworkGroup = geonetworkGroup;
|
||||||
|
}
|
||||||
|
|
||||||
static File meta2File(Metadata meta) throws IOException, JAXBException {
|
static File meta2File(Metadata meta) throws IOException, JAXBException {
|
||||||
File temp = File.createTempFile("meta", ".xml");
|
File temp = File.createTempFile("meta", ".xml");
|
||||||
FileWriter writer = new FileWriter(temp);
|
FileWriter writer = new FileWriter(temp);
|
||||||
|
@ -458,7 +468,7 @@ public class GenericLayerMetadata {
|
||||||
GNClient client = new GNClient(geonetworkUrl);
|
GNClient client = new GNClient(geonetworkUrl);
|
||||||
client.login(geonetworkUser, geonetworkPwd);
|
client.login(geonetworkUser, geonetworkPwd);
|
||||||
File tmetafile = meta2File(meta);
|
File tmetafile = meta2File(meta);
|
||||||
client.insertMetadata(new GNInsertConfiguration("3", "datasets", "_none_", true), tmetafile);
|
client.insertMetadata(new GNInsertConfiguration(geonetworkGroup, "datasets", "_none_", true), tmetafile);
|
||||||
tmetafile.delete();
|
tmetafile.delete();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -635,7 +645,8 @@ public class GenericLayerMetadata {
|
||||||
GNClient client = new GNClient(geonetworkUrl);
|
GNClient client = new GNClient(geonetworkUrl);
|
||||||
client.login(geonetworkUser, geonetworkPwd);
|
client.login(geonetworkUser, geonetworkPwd);
|
||||||
File tmetafile = meta2File(meta);
|
File tmetafile = meta2File(meta);
|
||||||
client.insertMetadata(new GNInsertConfiguration("3", "datasets", "_none_", true), tmetafile);
|
|
||||||
|
client.insertMetadata(new GNInsertConfiguration(geonetworkGroup, "datasets", "_none_", true), tmetafile);
|
||||||
tmetafile.delete();
|
tmetafile.delete();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
package org.gcube.dataanalysis.geo.meta;
|
package org.gcube.dataanalysis.geo.meta;
|
||||||
|
|
||||||
import it.geosolutions.geonetwork.util.GNInsertConfiguration;
|
|
||||||
import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder;
|
import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder;
|
||||||
import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder;
|
import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder;
|
||||||
|
|
||||||
|
@ -24,7 +23,7 @@ public class PolyMapMetadata {
|
||||||
|
|
||||||
private static final String crs = "GEOGCS[\"WGS 84\", DATUM[\"World Geodetic System 1984\", SPHEROID[\"WGS 84\", 6378137.0, 298.257223563, AUTHORITY[\"EPSG\",\"7030\"]]," + "AUTHORITY[\"EPSG\",\"6326\"]], PRIMEM[\"Greenwich\", 0.0, AUTHORITY[\"EPSG\",\"8901\"]], UNIT[\"degree\", 0.017453292519943295]," + "AXIS[\"Geodetic longitude\", EAST], AXIS[\"Geodetic latitude\", NORTH], AUTHORITY[\"EPSG\",\"4326\"]]";
|
private static final String crs = "GEOGCS[\"WGS 84\", DATUM[\"World Geodetic System 1984\", SPHEROID[\"WGS 84\", 6378137.0, 298.257223563, AUTHORITY[\"EPSG\",\"7030\"]]," + "AUTHORITY[\"EPSG\",\"6326\"]], PRIMEM[\"Greenwich\", 0.0, AUTHORITY[\"EPSG\",\"8901\"]], UNIT[\"degree\", 0.017453292519943295]," + "AXIS[\"Geodetic longitude\", EAST], AXIS[\"Geodetic latitude\", NORTH], AUTHORITY[\"EPSG\",\"4326\"]]";
|
||||||
|
|
||||||
public static PublishResponse publishTable(String scope, String tableName, double resolution, String username, String layerName, String defaultStyle, String workspace, String datastore, String purpose, String credits, String keyword) throws Exception {
|
public static PublishResponse publishTable(String scope, String tableName, double resolution, String username, String layerName, String defaultStyle, String workspace, String datastore, String purpose, String credits, String keyword, boolean isprivate) throws Exception {
|
||||||
|
|
||||||
GSFeatureTypeEncoder fte = new GSFeatureTypeEncoder();
|
GSFeatureTypeEncoder fte = new GSFeatureTypeEncoder();
|
||||||
fte.setEnabled(true);
|
fte.setEnabled(true);
|
||||||
|
@ -40,7 +39,12 @@ public class PolyMapMetadata {
|
||||||
GISInterface gis = GISInterface.get();
|
GISInterface gis = GISInterface.get();
|
||||||
Configuration gnConfig = gis.getGeoNetworkReader().getConfiguration();
|
Configuration gnConfig = gis.getGeoNetworkReader().getConfiguration();
|
||||||
AnalysisLogger.getLogger().debug("Using the following GNetwork:" + gnConfig.getGeoNetworkEndpoint());
|
AnalysisLogger.getLogger().debug("Using the following GNetwork:" + gnConfig.getGeoNetworkEndpoint());
|
||||||
PublishResponse resp = gis.publishDBTable(workspace, datastore, fte, le, meta.getMetadata(), new GNInsertConfiguration(gnConfig.getScopeGroup() + "", "datasets", "_none_", true), LoginLevel.DEFAULT);
|
LoginLevel level = LoginLevel.SCOPE;
|
||||||
|
if (isprivate)
|
||||||
|
level = LoginLevel.PRIVATE;
|
||||||
|
|
||||||
|
//PublishResponse resp = gis.publishDBTable(workspace, datastore, fte, le, meta.getMetadata(), new GNInsertConfiguration(gnConfig.getScopeGroup() + "", "datasets", "_none_", true), LoginLevel.DEFAULT);
|
||||||
|
PublishResponse resp = gis.publishDBTable(workspace, datastore, fte, le, meta.getMetadata(), "datasets","_none_", level);
|
||||||
AnalysisLogger.getLogger().debug(resp);
|
AnalysisLogger.getLogger().debug(resp);
|
||||||
AnalysisLogger.getLogger().debug("ID:" + resp.getReturnedMetaId());
|
AnalysisLogger.getLogger().debug("ID:" + resp.getReturnedMetaId());
|
||||||
AnalysisLogger.getLogger().debug("Result:" + resp.getMetaOperationResult());
|
AnalysisLogger.getLogger().debug("Result:" + resp.getMetaOperationResult());
|
||||||
|
|
|
@ -2,6 +2,7 @@ package org.gcube.dataanalysis.geo.test;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.gcube.common.scope.api.ScopeProvider;
|
||||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||||
|
@ -17,22 +18,25 @@ public class TestRasterPublisher {
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||||
|
ScopeProvider.instance.set("/gcube/devsec");
|
||||||
|
|
||||||
config.setConfigPath("./cfg/");
|
config.setConfigPath("./cfg/");
|
||||||
config.setPersistencePath("./");
|
config.setPersistencePath("./");
|
||||||
|
|
||||||
config.setAgent("RASTER_DATA_PUBLISHER");
|
config.setAgent("RASTER_DATA_PUBLISHER");
|
||||||
|
|
||||||
config.setGcubeScope("/gcube/devsec");
|
config.setGcubeScope("/gcube/devsec");
|
||||||
|
|
||||||
config.setParam("ServiceUserName","gianpaolo.coro");
|
config.setParam("ServiceUserName","gianpaolo.coro");
|
||||||
|
config.setParam("PublicationLevel","PRIVATE");
|
||||||
config.setParam("DatasetTitle", "test raster dataset production");
|
config.setParam("DatasetTitle", "test raster dataset production");
|
||||||
config.setParam("DatasetAbstract", "test raster dataset production abstract");
|
config.setParam("DatasetAbstract", "test raster dataset production abstract");
|
||||||
config.setParam("InnerLayerName", "adux_pres_1");
|
config.setParam("InnerLayerName", "adux_pres_1");
|
||||||
config.setParam("RasterFile", "C:/Users/coro/Downloads/adux_pres_portale_test.nc");
|
config.setParam("RasterFile", "C:/Users/coro/Downloads/adux_pres_portale_test.nc");
|
||||||
config.setParam("Topics", "adux"+AlgorithmConfiguration.listSeparator+"gianpaolo");
|
config.setParam("Topics", "adux"+AlgorithmConfiguration.listSeparator+"gianpaolo");
|
||||||
config.setParam("SpatialResolution", "-1");
|
config.setParam("SpatialResolution", "-1");
|
||||||
|
config.setParam("FileNameOnInfra", "adux_pres_portale_test_10.nc");
|
||||||
|
|
||||||
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
|
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
|
||||||
|
|
||||||
AnalysisLogger.getLogger().debug("Executing: " + config.getAgent());
|
AnalysisLogger.getLogger().debug("Executing: " + config.getAgent());
|
||||||
|
|
|
@ -23,6 +23,6 @@ public class TestStandardLayerMetadataInsert {
|
||||||
metadataInserter.setXRightUpper(180);
|
metadataInserter.setXRightUpper(180);
|
||||||
metadataInserter.setYRightUpper(85.5);
|
metadataInserter.setYRightUpper(85.5);
|
||||||
|
|
||||||
metadataInserter.insertMetaData();
|
//metadataInserter.insertMetaData("");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
package org.gcube.dataanalysis.geo.test.maps;
|
package org.gcube.dataanalysis.geo.test.maps;
|
||||||
|
|
||||||
|
import org.gcube.common.scope.api.ScopeProvider;
|
||||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||||
import org.gcube.dataanalysis.geo.algorithms.PointsMapsCreator;
|
import org.gcube.dataanalysis.geo.algorithms.PointsMapsCreator;
|
||||||
|
@ -63,9 +64,12 @@ public static void main(String[] args) throws Exception{
|
||||||
config.setConfigPath(cfg);
|
config.setConfigPath(cfg);
|
||||||
// config.setGcubeScope("/gcube/devsec/statVRE");
|
// config.setGcubeScope("/gcube/devsec/statVRE");
|
||||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||||
|
ScopeProvider.instance.set("/gcube/devsec/devVRE");
|
||||||
|
|
||||||
config.setPersistencePath("./");
|
config.setPersistencePath("./");
|
||||||
|
|
||||||
config.setParam("MapName","Test Polygonal Map Ph 7");
|
config.setParam("MapName","Test Polygonal Map Ph 10");
|
||||||
|
config.setParam("PublicationLevel","PRIVATE");
|
||||||
/*
|
/*
|
||||||
config.setParam("InputTable","occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
|
config.setParam("InputTable","occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
|
||||||
config.setParam("xDimension","decimallongitude");
|
config.setParam("xDimension","decimallongitude");
|
||||||
|
@ -73,18 +77,28 @@ public static void main(String[] args) throws Exception{
|
||||||
config.setParam("Info","recordedby") ;
|
config.setParam("Info","recordedby") ;
|
||||||
config.setParam("Resolution","0.5");
|
config.setParam("Resolution","0.5");
|
||||||
*/
|
*/
|
||||||
|
/*
|
||||||
config.setParam("InputTable","testextraction2");
|
config.setParam("InputTable","testextraction2");
|
||||||
config.setParam("xDimension","x");
|
config.setParam("xDimension","x");
|
||||||
config.setParam("yDimension","y");
|
config.setParam("yDimension","y");
|
||||||
config.setParam("Info","fvalue") ;
|
config.setParam("Info","fvalue") ;
|
||||||
|
*/
|
||||||
|
|
||||||
|
config.setParam("InputTable","code_85e5d927f7094a3ca677a53f4433fed4");
|
||||||
|
config.setParam("xDimension","longitude");
|
||||||
|
config.setParam("yDimension","latitude");
|
||||||
|
config.setParam("Info","longitude") ;
|
||||||
|
|
||||||
|
config.setParam("DatabaseUserName","utente");
|
||||||
|
config.setParam("DatabasePassword","d4science");
|
||||||
|
config.setParam("DatabaseURL","jdbc:postgresql://statistical-manager.d.d4science.org/testdb");
|
||||||
|
config.setParam("DatabaseDriver","org.postgresql.Driver");
|
||||||
|
/*
|
||||||
config.setParam("DatabaseUserName","gcube");
|
config.setParam("DatabaseUserName","gcube");
|
||||||
config.setParam("DatabasePassword","d4science2");
|
config.setParam("DatabasePassword","d4science2");
|
||||||
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
|
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
|
||||||
config.setParam("DatabaseDriver","org.postgresql.Driver");
|
config.setParam("DatabaseDriver","org.postgresql.Driver");
|
||||||
|
*/
|
||||||
config.setParam("Z","0");
|
config.setParam("Z","0");
|
||||||
|
|
||||||
config.setParam("user", "postgres");
|
config.setParam("user", "postgres");
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
package org.gcube.dataanalysis.geo.utils;
|
||||||
|
|
||||||
|
public enum GeospatialDataPublicationLevel {
|
||||||
|
PUBLIC,
|
||||||
|
PRIVATE
|
||||||
|
}
|
|
@ -30,10 +30,10 @@ public class ThreddsPublisher {
|
||||||
String[] topics = {"adux","D4Science"};
|
String[] topics = {"adux","D4Science"};
|
||||||
double resolution = -1;
|
double resolution = -1;
|
||||||
AnalysisLogger.setLogger("./cfg/"+AlgorithmConfiguration.defaultLoggerFile);
|
AnalysisLogger.setLogger("./cfg/"+AlgorithmConfiguration.defaultLoggerFile);
|
||||||
publishOnThredds(scope, username, fileAbsolutePath, layerTitle, layerName, abstractField, topics, resolution);
|
publishOnThredds(scope, username, fileAbsolutePath, layerTitle, layerName, abstractField, topics, resolution,false);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static boolean publishOnThredds(String scope,String username, String fileAbsolutePath, String layerTitle, String layerName, String abstractField, String[] topics, double resolution) throws Exception{
|
public static boolean publishOnThredds(String scope,String username, String fileAbsolutePath, String layerTitle, String layerName, String abstractField, String[] topics, double resolution, boolean isprivate) throws Exception{
|
||||||
//TODO manage faults
|
//TODO manage faults
|
||||||
String remoteFolder = "/data/content/thredds/public/netcdf/";
|
String remoteFolder = "/data/content/thredds/public/netcdf/";
|
||||||
List<String> threddsAddress = InfraRetrieval.retrieveServiceAddress("Gis", "THREDDS", scope, "Geoserver");
|
List<String> threddsAddress = InfraRetrieval.retrieveServiceAddress("Gis", "THREDDS", scope, "Geoserver");
|
||||||
|
@ -90,11 +90,11 @@ public class ThreddsPublisher {
|
||||||
AnalysisLogger.getLogger().debug("Adding metadata on GeoNetwork");
|
AnalysisLogger.getLogger().debug("Adding metadata on GeoNetwork");
|
||||||
|
|
||||||
if (fileAbsolutePath.endsWith(".nc") && gridded)
|
if (fileAbsolutePath.endsWith(".nc") && gridded)
|
||||||
publishNetCDFMeta(scope, layerTitle, abstractField, new File(fileAbsolutePath).getName(),layerName,threddServiceAddress,username,topics);
|
publishNetCDFMeta(scope, layerTitle, abstractField, new File(fileAbsolutePath).getName(),layerName,threddServiceAddress,username,topics,isprivate);
|
||||||
else{
|
else{
|
||||||
if (resolution==-1 && gridded)
|
if (resolution==-1 && gridded)
|
||||||
throw new Exception ("Specify valid resolution parameter for non-NetCDF raster datasets");
|
throw new Exception ("Specify valid resolution parameter for non-NetCDF raster datasets");
|
||||||
publishOtherFileMeta(scope, layerTitle, resolution, abstractField, new File(fileAbsolutePath).getName(), threddServiceAddress,username,topics);
|
publishOtherFileMeta(scope, layerTitle, resolution, abstractField, new File(fileAbsolutePath).getName(), threddServiceAddress,username,topics,isprivate);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -102,7 +102,7 @@ public class ThreddsPublisher {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void publishOtherFileMeta(String scope, String layerTitle, double resolution, String abstractField, String filename, String threddsURL, String username, String [] topics) throws Exception{
|
private static void publishOtherFileMeta(String scope, String layerTitle, double resolution, String abstractField, String filename, String threddsURL, String username, String [] topics, boolean isprivate) throws Exception{
|
||||||
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
|
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
|
||||||
|
|
||||||
GeoNetworkInspector gninspector =new GeoNetworkInspector();
|
GeoNetworkInspector gninspector =new GeoNetworkInspector();
|
||||||
|
@ -110,12 +110,18 @@ public class ThreddsPublisher {
|
||||||
String geonetworkURL = gninspector.getGeonetworkURLFromScope();
|
String geonetworkURL = gninspector.getGeonetworkURLFromScope();
|
||||||
String geonetworkUser = gninspector.getGeonetworkUserFromScope();
|
String geonetworkUser = gninspector.getGeonetworkUserFromScope();
|
||||||
String geonetworkPassword = gninspector.getGeonetworkPasswordFromScope();
|
String geonetworkPassword = gninspector.getGeonetworkPasswordFromScope();
|
||||||
|
String geonetworkGroup = "";
|
||||||
AnalysisLogger.getLogger().debug("GeoNetwork Info: "+geonetworkURL+" "+geonetworkUser);
|
if (isprivate)
|
||||||
|
geonetworkGroup = gninspector.getGeonetworkPrivateGroup();
|
||||||
|
else
|
||||||
|
geonetworkGroup = gninspector.getGeonetworkPublicGroup();
|
||||||
|
|
||||||
|
AnalysisLogger.getLogger().debug("GeoNetwork Info: "+geonetworkURL+" "+geonetworkUser+" "+geonetworkGroup);
|
||||||
|
|
||||||
metadataInserter.setGeonetworkUrl(geonetworkURL);
|
metadataInserter.setGeonetworkUrl(geonetworkURL);
|
||||||
metadataInserter.setGeonetworkPwd(geonetworkPassword);
|
metadataInserter.setGeonetworkPwd(geonetworkPassword);
|
||||||
metadataInserter.setGeonetworkUser(geonetworkUser);
|
metadataInserter.setGeonetworkUser(geonetworkUser);
|
||||||
|
metadataInserter.setGeonetworkGroup(geonetworkGroup);
|
||||||
|
|
||||||
metadataInserter.setTitle(layerTitle);
|
metadataInserter.setTitle(layerTitle);
|
||||||
metadataInserter.setCategoryTypes("_"+TopicCategory.ENVIRONMENT.name()+"_");
|
metadataInserter.setCategoryTypes("_"+TopicCategory.ENVIRONMENT.name()+"_");
|
||||||
|
@ -132,11 +138,13 @@ public class ThreddsPublisher {
|
||||||
|
|
||||||
String [] protocols = {"HTTP"};
|
String [] protocols = {"HTTP"};
|
||||||
|
|
||||||
|
AnalysisLogger.getLogger().debug("Publishing in group: "+metadataInserter.getGeonetworkGroup());
|
||||||
|
AnalysisLogger.getLogger().debug("Inserting custom metadata ");
|
||||||
metadataInserter.customMetaDataInsert(urls,protocols);
|
metadataInserter.customMetaDataInsert(urls,protocols);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private static void publishNetCDFMeta(String scope, String layerTitle,String abstractField, String filename, String netCDFLayerName, String threddsURL, String username, String [] topics) throws Exception{
|
private static void publishNetCDFMeta(String scope, String layerTitle,String abstractField, String filename, String netCDFLayerName, String threddsURL, String username, String [] topics, boolean isprivate) throws Exception{
|
||||||
AnalysisLogger.getLogger().debug("Getting GeoNetwork Info");
|
AnalysisLogger.getLogger().debug("Getting GeoNetwork Info");
|
||||||
|
|
||||||
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
|
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
|
||||||
|
@ -145,12 +153,18 @@ public class ThreddsPublisher {
|
||||||
String geonetworkURL = gninspector.getGeonetworkURLFromScope();
|
String geonetworkURL = gninspector.getGeonetworkURLFromScope();
|
||||||
String geonetworkUser = gninspector.getGeonetworkUserFromScope();
|
String geonetworkUser = gninspector.getGeonetworkUserFromScope();
|
||||||
String geonetworkPassword = gninspector.getGeonetworkPasswordFromScope();
|
String geonetworkPassword = gninspector.getGeonetworkPasswordFromScope();
|
||||||
|
String geonetworkGroup = "";
|
||||||
|
if (isprivate)
|
||||||
|
geonetworkGroup = gninspector.getGeonetworkPrivateGroup();
|
||||||
|
else
|
||||||
|
geonetworkGroup = gninspector.getGeonetworkPublicGroup();
|
||||||
|
|
||||||
AnalysisLogger.getLogger().debug("GeoNetwork Info: "+geonetworkURL+" "+geonetworkUser);
|
AnalysisLogger.getLogger().debug("GeoNetwork Info: "+geonetworkURL+" "+geonetworkUser+" "+geonetworkGroup);
|
||||||
|
|
||||||
metadataInserter.setGeonetworkUrl(geonetworkURL);
|
metadataInserter.setGeonetworkUrl(geonetworkURL);
|
||||||
metadataInserter.setGeonetworkPwd(geonetworkPassword);
|
metadataInserter.setGeonetworkPwd(geonetworkPassword);
|
||||||
metadataInserter.setGeonetworkUser(geonetworkUser);
|
metadataInserter.setGeonetworkUser(geonetworkUser);
|
||||||
|
metadataInserter.setGeonetworkGroup(geonetworkGroup);
|
||||||
|
|
||||||
metadataInserter.setTitle(layerTitle);
|
metadataInserter.setTitle(layerTitle);
|
||||||
metadataInserter.setCategoryTypes("_"+TopicCategory.ENVIRONMENT.name()+"_");
|
metadataInserter.setCategoryTypes("_"+TopicCategory.ENVIRONMENT.name()+"_");
|
||||||
|
@ -189,7 +203,7 @@ public class ThreddsPublisher {
|
||||||
metadataInserter.setYLeftLow(minY);
|
metadataInserter.setYLeftLow(minY);
|
||||||
metadataInserter.setXRightUpper(maxX);
|
metadataInserter.setXRightUpper(maxX);
|
||||||
metadataInserter.setYRightUpper(maxY);
|
metadataInserter.setYRightUpper(maxY);
|
||||||
|
AnalysisLogger.getLogger().debug("Publishing in group: "+metadataInserter.getGeonetworkGroup());
|
||||||
AnalysisLogger.getLogger().debug("Inserting metadata ");
|
AnalysisLogger.getLogger().debug("Inserting metadata ");
|
||||||
metadataInserter.customMetaDataInsert(urls,protocols);
|
metadataInserter.customMetaDataInsert(urls,protocols);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
package org.gcube.dataanalysis.geo.vti;
|
package org.gcube.dataanalysis.geo.vti;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -140,8 +141,10 @@ public class EstimateFishingActivity extends GridCWP2Coordinates{
|
||||||
"\""+config.getParam(VesselsLongitudesColumn)+"\""+" as fhv04,"+
|
"\""+config.getParam(VesselsLongitudesColumn)+"\""+" as fhv04,"+
|
||||||
"\""+config.getParam(VesselsLatitudesColumn)+"\""+" as fhv05 "+
|
"\""+config.getParam(VesselsLatitudesColumn)+"\""+" as fhv05 "+
|
||||||
" from "+table+
|
" from "+table+
|
||||||
" where "+"\""+config.getParam(VesselsIDColumn)+"\""+" ="+vesselID+" order by CAST("+"\""+config.getParam(VesselsTimestampsColumn)+"\""+" as timestamp)";
|
" where "+"CAST(\""+config.getParam(VesselsIDColumn)+"\" as integer)"+" ="+vesselID+" order by CAST("+"\""+config.getParam(VesselsTimestampsColumn)+"\""+" as timestamp)";
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
AnalysisLogger.getLogger().debug("EstimateFishingActivity: EstimateFishingActivity: Select trajectory: "+selectTrajectory);
|
AnalysisLogger.getLogger().debug("EstimateFishingActivity: EstimateFishingActivity: Select trajectory: "+selectTrajectory);
|
||||||
|
|
||||||
List<Object> vesselTrajectory = DatabaseFactory.executeSQLQuery(selectTrajectory, connection);
|
List<Object> vesselTrajectory = DatabaseFactory.executeSQLQuery(selectTrajectory, connection);
|
||||||
|
@ -180,7 +183,9 @@ public class EstimateFishingActivity extends GridCWP2Coordinates{
|
||||||
|
|
||||||
short [] bathymetry = null;
|
short [] bathymetry = null;
|
||||||
try{
|
try{
|
||||||
Bathymetry bathymetryprocessor = new Bathymetry(config.getConfigPath()+"gebco_08.nc");
|
String bathpath=new File(config.getConfigPath(),"gebco_08.nc").getAbsolutePath();
|
||||||
|
AnalysisLogger.getLogger().debug("EstimateFishingActivity: searching bathymetry in "+bathpath);
|
||||||
|
Bathymetry bathymetryprocessor = new Bathymetry(bathpath);
|
||||||
bathymetry = bathymetryprocessor.compute(coordinates);
|
bathymetry = bathymetryprocessor.compute(coordinates);
|
||||||
}catch(Exception e){
|
}catch(Exception e){
|
||||||
AnalysisLogger.getLogger().debug("EstimateFishingActivity: Error - Bathymetry resource not available for the service "+e.getLocalizedMessage());
|
AnalysisLogger.getLogger().debug("EstimateFishingActivity: Error - Bathymetry resource not available for the service "+e.getLocalizedMessage());
|
||||||
|
|
Loading…
Reference in New Issue