Lucio Lelii 2017-05-25 14:50:11 +00:00
parent 03efa271dc
commit 98faecfa4c
3 changed files with 22 additions and 33 deletions

View File

@ -8,14 +8,13 @@ import java.util.UUID;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalInfraAlgorithm;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.geo.meta.PolyMapMetadata;
import org.gcube.dataanalysis.geo.utils.GeospatialDataPublicationLevel;
import org.gcube.spatial.data.gis.model.report.PublishResponse;
import org.gcube.spatial.data.gis.model.report.Report.OperationState;
import org.hibernate.SessionFactory;
import scala.collection.Iterator;
@ -216,15 +215,15 @@ public abstract class MapsCreator extends StandardLocalInfraAlgorithm {
PublishResponse response = PolyMapMetadata.publishTable(scope, gisTableName, resolution, username, layerName, defaultStyle, workspace, datastore, purpose, credits, keyword,isprivate);
status = 80;
//analyzing response
if (response == null) {
log("Error in generating map - dropping gis table");
if (response.getMetaOperationResult() != OperationState.COMPLETE && response.getDataOperationResult()!= OperationState.COMPLETE) {
log("Error in generating map - dropping gis table - error on data are "+response.getDataOperationMessages()+" erorre on metadata are "+response.getMetaOperationMessages());
try {
DatabaseFactory.executeSQLUpdate(DatabaseUtils.dropTableStatement(gisTableName), gisdbconnection);
log("gis table dropped");
} catch (Exception e) {
log("Impossible to drop table:" + e.getLocalizedMessage());
}
throw new Exception("Impossible to publish on GeoNetwork or GeoServer: " + gisTableName);
throw new Exception("Impossible to publish on GeoNetwork or GeoServer table: " + gisTableName+" (error on data are "+response.getDataOperationMessages()+" erorre on metadata are "+response.getMetaOperationMessages()+" )");
} else {
//writing output
addOutputString("GIS map title", layerName);

View File

@ -1,8 +1,6 @@
package org.gcube.dataanalysis.geo.algorithms;
import java.io.File;
import java.sql.Connection;
import java.sql.DriverManager;
import java.util.List;
import java.util.UUID;
@ -18,6 +16,7 @@ import org.gcube.dataanalysis.geo.meta.PolyMapMetadata;
import org.gcube.dataanalysis.geo.utils.GdalConverter;
import org.gcube.dataanalysis.geo.utils.GeospatialDataPublicationLevel;
import org.gcube.spatial.data.gis.model.report.PublishResponse;
import org.gcube.spatial.data.gis.model.report.Report.OperationState;
public class ShapefilePublisher extends StandardLocalInfraAlgorithm{
@ -149,14 +148,13 @@ public class ShapefilePublisher extends StandardLocalInfraAlgorithm{
try{
response = PolyMapMetadata.publishTable(scope, gisRandomTableName, resolution, username, layerName, defaultStyle, workspace, datastore, purpose, credits, keywords, isprivate);
}catch(Exception e){
e.printStackTrace();
log ("ShapefilePublisher->Error during table publication: "+e.getLocalizedMessage());
throw new Exception("Error during the publication of the shapefile on the SDI");
throw new Exception("Error during the publication of the shapefile on the SDI",e);
}
log("ShapefilePublisher->Finished publishing the table");
if (response == null) {
if (response.getMetaOperationResult() != OperationState.COMPLETE && response.getDataOperationResult()!= OperationState.COMPLETE){
log("ShapefilePublisher->Error in generating map");
throw new Exception("Impossible to publish on GeoNetwork or GeoServer this table: " + gisRandomTableName+" possibly it is already present!");
} else {

View File

@ -1,23 +1,14 @@
package org.gcube.dataanalysis.geo.meta;
import it.geosolutions.geonetwork.util.GNPriv;
import it.geosolutions.geonetwork.util.GNPrivConfiguration;
import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder;
import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder;
import java.util.Date;
import java.util.EnumSet;
import java.util.Set;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.spatial.data.geonetwork.GeoNetwork;
import org.gcube.spatial.data.geonetwork.GeoNetworkPublisher;
import org.gcube.spatial.data.geonetwork.LoginLevel;
import org.gcube.spatial.data.geonetwork.configuration.Configuration;
import org.gcube.spatial.data.geonetwork.iso.GcubeISOMetadata;
import org.gcube.spatial.data.geonetwork.iso.Thesaurus;
import org.gcube.spatial.data.geonetwork.model.ScopeConfiguration;
import org.gcube.spatial.data.gis.GISInterface;
import org.gcube.spatial.data.gis.model.report.PublishResponse;
import org.geotoolkit.metadata.iso.extent.DefaultExtent;
@ -25,11 +16,15 @@ import org.opengis.metadata.citation.PresentationForm;
import org.opengis.metadata.identification.TopicCategory;
import org.opengis.metadata.spatial.GeometricObjectType;
import org.opengis.metadata.spatial.TopologyLevel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class PolyMapMetadata {
private static final String crs = "GEOGCS[\"WGS 84\", DATUM[\"World Geodetic System 1984\", SPHEROID[\"WGS 84\", 6378137.0, 298.257223563, AUTHORITY[\"EPSG\",\"7030\"]]," + "AUTHORITY[\"EPSG\",\"6326\"]], PRIMEM[\"Greenwich\", 0.0, AUTHORITY[\"EPSG\",\"8901\"]], UNIT[\"degree\", 0.017453292519943295]," + "AXIS[\"Geodetic longitude\", EAST], AXIS[\"Geodetic latitude\", NORTH], AUTHORITY[\"EPSG\",\"4326\"]]";
private static Logger LOGGER = LoggerFactory.getLogger(PolyMapMetadata.class);
public static PublishResponse publishTable(String scope, String tableName, double resolution, String username, String layerName, String defaultStyle, String workspace, String datastore, String purpose, String credits, String keyword, boolean isprivate) throws Exception {
GSFeatureTypeEncoder fte = new GSFeatureTypeEncoder();
@ -40,12 +35,12 @@ public class PolyMapMetadata {
GSLayerEncoder le = new GSLayerEncoder();
le.setDefaultStyle(defaultStyle);
le.setEnabled(true);
AnalysisLogger.getLogger().debug("Filling Metadata");
LOGGER.debug("Filling Metadata");
GcubeISOMetadata meta = fillMeta(resolution, username, layerName, scope, tableName, purpose, credits, keyword);
AnalysisLogger.getLogger().debug("Getting GIS from scope " + scope);
LOGGER.debug("Getting GIS from scope " + scope);
GISInterface gis = GISInterface.get();
Configuration gnConfig = gis.getGeoNetworkReader().getConfiguration();
AnalysisLogger.getLogger().debug("Using the following GNetwork:" + gnConfig.getGeoNetworkEndpoint());
LOGGER.debug("Using the following GNetwork:" + gnConfig.getGeoNetworkEndpoint());
LoginLevel level = LoginLevel.SCOPE;
if (isprivate)
level = LoginLevel.PRIVATE;
@ -54,13 +49,10 @@ public class PolyMapMetadata {
PublishResponse resp = gis.publishDBTable(workspace, datastore, fte, le, meta.getMetadata(), "datasets", "_none_", level,!isprivate);
AnalysisLogger.getLogger().debug(resp);
AnalysisLogger.getLogger().debug("ID:" + resp.getReturnedMetaId());
AnalysisLogger.getLogger().debug("Result:" + resp.getMetaOperationResult());
if (resp.getReturnedMetaId() == 0)
return null;
else
return resp;
LOGGER.debug("response is {} ",resp);
LOGGER.debug("ID {} ",resp.getReturnedMetaId());
LOGGER.debug("Result {}",resp.getMetaOperationResult());
return resp;
}
private static GcubeISOMetadata fillMeta(double resolution, String username, String title, String scope, String tableName, String purpose, String credits, String keyword) throws Exception {
@ -70,12 +62,12 @@ public class PolyMapMetadata {
scope = ScopeProvider.instance.get();
*/
AnalysisLogger.getLogger().debug("Setting scope for filling Meta");
LOGGER.debug("Setting scope for filling Meta");
// ScopeProvider.instance.set(scope);
AnalysisLogger.getLogger().debug("Fulfilling metadata");
LOGGER.debug("Fulfilling metadata");
GcubeISOMetadata meta = new GcubeISOMetadata();
AnalysisLogger.getLogger().debug("Fulfilling metadata Begin");
LOGGER.debug("Fulfilling metadata Begin");
meta.setAbstractField("This metadata has been automatically generated from the Statistical Manager on the basis of a distribution of points and according the resolution of " + resolution + " degrees.");
meta.setCreationDate(new Date(System.currentTimeMillis()));
meta.setExtent((DefaultExtent) DefaultExtent.WORLD);
@ -100,7 +92,7 @@ public class PolyMapMetadata {
meta.addKeyword(keyword, generalThesaurus);
meta.addKeyword(tableName, generalThesaurus);
meta.addTopicCategory(TopicCategory.BIOTA);
AnalysisLogger.getLogger().debug("Fulfilling done");
LOGGER.debug("Fulfilling done");
return meta;
}