Shapefile publisher
Test batch processing for Public layers git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngineGeoSpatialExtension@130888 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
06424347a4
commit
4f37b11790
|
@ -0,0 +1,204 @@
|
|||
package org.gcube.dataanalysis.geo.algorithms;
|
||||
|
||||
import java.io.File;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.ZipTools;
|
||||
import org.gcube.dataanalysis.geo.meta.PolyMapMetadata;
|
||||
import org.gcube.dataanalysis.geo.utils.GdalConverter;
|
||||
import org.gcube.dataanalysis.geo.utils.GeospatialDataPublicationLevel;
|
||||
import org.gcube.spatial.data.gis.model.report.PublishResponse;
|
||||
|
||||
public class ShapefilePublisher extends StandardLocalExternalAlgorithm{
|
||||
|
||||
private static String layerTitleParam = "MapTitle";
|
||||
private static String layerAbstractParam = "MapAbstract";
|
||||
private static String FileParam = "ShapeFileZip";
|
||||
private static String ShapeFileParam = "ShapeFileName";
|
||||
private static String TopicsParam = "Topics";
|
||||
private static String DBUserParam = "DBUser";
|
||||
private static String DBPasswordParam = "DBPassword";
|
||||
private static String DBUrlParameter = "DBUrl";
|
||||
static String publicationLevel= "PublicationLevel";
|
||||
|
||||
@Override
|
||||
public void init() throws Exception {
|
||||
log("ShapefilePublisher->initialised");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
String description = "An algorithm to publish shapefiles under WMS and WFS standards in the e-Infrastructure. The produced WMS, WFS links are reported as output of this process. The map will be available in the VRE for consultation.";
|
||||
return description;
|
||||
}
|
||||
|
||||
//static String shapeImporting = "shp2pgsql -g the_geom -d shapefile2.shp | PGPASSWORD=d4science2 psql -h geoserver-test.d4science-ii.research-infrastructures.eu -p 5432 -U postgres timeseriesgisdb";
|
||||
//static String shapeImporting = "shp2pgsql -g the_geom -d %1$s | PGPASSWORD=%2$s psql -h %3$s -p 5432 -U %4$s %5$s";
|
||||
static String shapeImporting = "shp2pgsql -s 4326 -g the_geom -d %1$s public.%6$s | PGPASSWORD=%2$s psql -h %3$s -p 5432 -U %4$s %5$s";
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
protected void process() throws Exception {
|
||||
status = 10;
|
||||
//collect information
|
||||
String databaseJdbc = getInputParameter(DBUrlParameter);
|
||||
String databaseUser = getInputParameter(DBUserParam);
|
||||
String databasePwd = getInputParameter(DBPasswordParam);
|
||||
|
||||
//get the shapefile and extract the zip file
|
||||
String zipFile = getInputParameter(FileParam);
|
||||
String shapeFileName = getInputParameter(ShapeFileParam);
|
||||
String layerName = getInputParameter(layerTitleParam);
|
||||
String layerAbstract = getInputParameter(layerAbstractParam);
|
||||
String topics = getInputParameter(TopicsParam);
|
||||
String username = getInputParameter("ServiceUserName");
|
||||
String publicationLevelValue = getInputParameter(publicationLevel);
|
||||
|
||||
log("ShapefilePublisher->"+databasePwd);
|
||||
log("ShapefilePublisher->Parameters:");
|
||||
log("ShapefilePublisher->zipFile:"+zipFile);
|
||||
log("ShapefilePublisher->shapeFileName:"+shapeFileName);
|
||||
log("ShapefilePublisher->layerName:"+layerName);
|
||||
log("ShapefilePublisher->layerAbstract:"+layerAbstract);
|
||||
log("ShapefilePublisher->topics:"+topics);
|
||||
log("ShapefilePublisher->databaseJdbc:"+databaseJdbc);
|
||||
log("ShapefilePublisher->databaseUser:"+databaseUser);
|
||||
log("ShapefilePublisher->service user name:"+username);
|
||||
|
||||
if (topics == null || topics.trim().length()==0)
|
||||
throw new Exception("Error topics missing!");
|
||||
|
||||
|
||||
|
||||
File tempFolder = new File(config.getConfigPath(),""+UUID.randomUUID());
|
||||
log("ShapefilePublisher->Creating temp folder "+tempFolder);
|
||||
|
||||
boolean created = tempFolder.mkdir();
|
||||
log("ShapefilePublisher->Temp Folder creation check "+created);
|
||||
|
||||
log("ShapefilePublisher->unzipping file :"+zipFile+" in folder "+tempFolder.getAbsolutePath());
|
||||
|
||||
ZipTools.unZip(zipFile, tempFolder.getAbsolutePath());
|
||||
// parse a string like this:
|
||||
// jdbc:postgresql://geoserver-test.d4science-ii.research-infrastructures.eu:5432/timeseriesgisdb
|
||||
String databaseName = databaseJdbc.substring(databaseJdbc.lastIndexOf("/")+1);
|
||||
String databaseAddress = databaseJdbc.substring(databaseJdbc.indexOf("//")+2,databaseJdbc.lastIndexOf(":"));
|
||||
|
||||
log("ShapefilePublisher->Parsed database Info:"+databaseName+" ["+databaseAddress+"]");
|
||||
|
||||
String shapefile = new File (tempFolder,shapeFileName).getAbsolutePath();
|
||||
log("ShapefilePublisher->Shape file to search for:"+shapefile);
|
||||
|
||||
String gisTableName = shapeFileName;
|
||||
String gisRandomTableName = ("shp_"+UUID.randomUUID()).replace("-", "");
|
||||
try{
|
||||
gisTableName = shapeFileName.substring(0,shapeFileName.lastIndexOf("."));
|
||||
}catch(Exception e){
|
||||
throw new Exception("Error - Wrong file name "+shapeFileName);
|
||||
}
|
||||
|
||||
log("ShapefilePublisher->Original table name :"+gisTableName);
|
||||
|
||||
log("ShapefilePublisher->Table name to produce :"+gisRandomTableName);
|
||||
|
||||
|
||||
String shapeImportCommand = String.format(shapeImporting, shapefile, databasePwd, databaseAddress, databaseUser, databaseName,gisRandomTableName);
|
||||
|
||||
log("ShapefilePublisher->Shape file command:"+shapeImportCommand);
|
||||
|
||||
//run the importer
|
||||
status = 50;
|
||||
List<String> commandsExecuted = GdalConverter.command(shapeImportCommand, tempFolder.getAbsolutePath());
|
||||
String commandExecuted = commandsExecuted.toString();
|
||||
|
||||
log("ShapefilePublisher->Command executed output:"+commandExecuted);
|
||||
|
||||
//check for the import to be success
|
||||
if (!commandExecuted.contains("COMMIT")){
|
||||
throw new Exception("An error occurred when importing the file "+commandExecuted);
|
||||
}
|
||||
|
||||
log("ShapefilePublisher->Publishing the table "+gisRandomTableName);
|
||||
|
||||
String scope = config.getGcubeScope();
|
||||
|
||||
|
||||
double resolution = 0;
|
||||
String datastore = "timeseriesws";
|
||||
String defaultStyle = "polygon";
|
||||
String workspace = "aquamaps";
|
||||
String purpose = "To Publish Geometric Layers for user-provided Vector Maps";
|
||||
String credits = "Generated via the DataMiner Service";
|
||||
String keywords = topics.replace(AlgorithmConfiguration.listSeparator, ",");
|
||||
|
||||
boolean isprivate = false;
|
||||
if (GeospatialDataPublicationLevel.valueOf(publicationLevelValue)==GeospatialDataPublicationLevel.PRIVATE)
|
||||
isprivate=true;
|
||||
PublishResponse response = null;
|
||||
try{
|
||||
response = PolyMapMetadata.publishTable(scope, gisRandomTableName, resolution, username, layerName, defaultStyle, workspace, datastore, purpose, credits, keywords, isprivate);
|
||||
}catch(Exception e){
|
||||
e.printStackTrace();
|
||||
log ("ShapefilePublisher->Error during table publication: "+e.getLocalizedMessage());
|
||||
throw new Exception("Error during the publication of the shapefile on the SDI");
|
||||
}
|
||||
|
||||
log("ShapefilePublisher->Finished publishing the table");
|
||||
|
||||
if (response == null) {
|
||||
log("ShapefilePublisher->Error in generating map");
|
||||
throw new Exception("Impossible to publish on GeoNetwork or GeoServer this table: " + gisRandomTableName+" possibly it is already present!");
|
||||
} else {
|
||||
//writing output
|
||||
addOutputString("GIS map title", layerName);
|
||||
addOutputString("GIS map UUID", "" + response.getPublishedMetadata().getFileIdentifier());
|
||||
addOutputString("GIS Table ", gisRandomTableName);
|
||||
addOutputString("Generated by ", username);
|
||||
addOutputString("Resolution", "" + resolution);
|
||||
addOutputString("Style", "" + defaultStyle);
|
||||
addOutputString("Keyword", "" + topics);
|
||||
}
|
||||
|
||||
log("ShapefilePublisher->Output produced:"+outputParameters);
|
||||
|
||||
log("ShapefilePublisher->All done!");
|
||||
status = 100;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setInputParameters() {
|
||||
try {
|
||||
PrimitiveType e = new PrimitiveType(Enum.class.getName(), GeospatialDataPublicationLevel.values(), PrimitiveTypes.ENUMERATED, publicationLevel, "The visibility level of the produced map",""+GeospatialDataPublicationLevel.PRIVATE);
|
||||
inputs.add(e);
|
||||
addStringInput(layerTitleParam, "Title of the geospatial dataset to be shown on GeoExplorer", "Generic Vector Layer");
|
||||
addStringInput(ShapeFileParam, "Name of the shape file inside the zip", "shapefile.shp");
|
||||
addStringInput(layerAbstractParam, "Abstract defining the content, the references and usage policies", "Abstract");
|
||||
inputs.add(new PrimitiveType(File.class.getName(), null, PrimitiveTypes.FILE, FileParam, "Shapefile zip file to process"));
|
||||
inputs.add(new PrimitiveTypesList(String.class.getName(), PrimitiveTypes.STRING, TopicsParam, "Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather", false));
|
||||
inputs.add(new ServiceType(ServiceParameters.USERNAME,"ServiceUserName","The final user Name"));
|
||||
|
||||
addRemoteDatabaseInput("UsersGisTablesDB", DBUrlParameter,DBUserParam,DBPasswordParam, "driver", "dialect");
|
||||
|
||||
} catch (Throwable e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,147 @@
|
|||
package org.gcube.dataanalysis.geo.batch;
|
||||
|
||||
import it.geosolutions.geonetwork.GNClient;
|
||||
import it.geosolutions.geonetwork.util.GNInsertConfiguration;
|
||||
import it.geosolutions.geonetwork.util.GNSearchRequest;
|
||||
import it.geosolutions.geonetwork.util.GNSearchResponse;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.dataanalysis.geo.meta.GenericLayerMetadata;
|
||||
import org.gcube.spatial.data.geonetwork.GeoNetwork;
|
||||
import org.gcube.spatial.data.geonetwork.GeoNetworkAdministration;
|
||||
import org.gcube.spatial.data.geonetwork.LoginLevel;
|
||||
import org.gcube.spatial.data.geonetwork.configuration.Configuration;
|
||||
import org.gcube.spatial.data.geonetwork.configuration.DefaultConfiguration;
|
||||
import org.gcube.spatial.data.geonetwork.model.Account;
|
||||
import org.gcube.spatial.data.geonetwork.model.ScopeConfiguration;
|
||||
import org.gcube.spatial.data.geonetwork.model.faults.MissingConfigurationException;
|
||||
import org.gcube.spatial.data.geonetwork.utils.UserUtils;
|
||||
import org.opengis.metadata.Metadata;
|
||||
import org.opengis.metadata.identification.Identification;
|
||||
import org.opengis.metadata.identification.TopicCategory;
|
||||
|
||||
public class CheckLayerRetrieval {
|
||||
|
||||
static String geonetworkurl = "http://geonetwork.d4science.org/geonetwork/";
|
||||
//static String geonetworkurl = "http://geoserver-dev2.d4science-ii.research-infrastructures.eu/geonetwork/";
|
||||
|
||||
//static String geoserverurl = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver";
|
||||
|
||||
static String geonetworkUser = "admin";
|
||||
//static String geonetworkPwd = "Geey6ohz";
|
||||
static String geonetworkPwd = "kee9GeeK";
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
String scope = "/d4science.research-infrastructures.eu/gCubeApps";
|
||||
String title ="oscar";
|
||||
ScopeProvider.instance.set(scope);
|
||||
|
||||
GeoNetworkAdministration reader=GeoNetwork.get();
|
||||
reader.login(LoginLevel.SCOPE);
|
||||
|
||||
//Configure search request
|
||||
GNSearchRequest req=new GNSearchRequest();
|
||||
req.addParam(GNSearchRequest.Param.any,title);
|
||||
req.addConfig(GNSearchRequest.Config.similarity, "1");
|
||||
GNSearchResponse resp=reader.query(req);
|
||||
System.out.println("Found N layers: "+resp.getCount());
|
||||
Long id = 0L;
|
||||
Metadata meta = null;
|
||||
DefaultConfiguration geonetworkCfg = (DefaultConfiguration) reader.getConfiguration();
|
||||
Integer scopePublicGroup = geonetworkCfg.getScopeConfiguration().getPublicGroup();
|
||||
System.out.println("scopePublicGroup "+scopePublicGroup );
|
||||
for(GNSearchResponse.GNMetadata metadata:resp){
|
||||
id = metadata.getId();
|
||||
System.out.println("ID "+id );
|
||||
System.out.println("Name "+metadata.getInfo().getName());
|
||||
meta = reader.getById(id);
|
||||
Identification idf = meta.getIdentificationInfo().iterator().next();
|
||||
String otitle = idf.getCitation().getTitle().toString();
|
||||
System.out.println("Title "+otitle);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static void main2(String[] args) throws Exception{
|
||||
//String startScope = "/d4science.research-infrastructures.eu/gCubeApps/ScalableDataMining";
|
||||
String targetScope = "/d4science.research-infrastructures.eu/gCubeApps";
|
||||
|
||||
// String targetScope = "/gcube/devsec";
|
||||
//String title ="Ocean Surface Zonal Currents (u) in 1992 from OSCAR Third Degree Sea Surface Velocity [oscar_vel1992_180.nc]";
|
||||
String title ="oscar";
|
||||
checkLayerInScope(title, targetScope, targetScope);
|
||||
}
|
||||
|
||||
public static void checkLayerInScope(String title,String startScope,String targetScope) throws Exception{
|
||||
|
||||
ScopeProvider.instance.set(startScope);
|
||||
|
||||
GeoNetworkAdministration reader=GeoNetwork.get();
|
||||
reader.login(LoginLevel.SCOPE);
|
||||
DefaultConfiguration geonetworkCfg = (DefaultConfiguration)reader.getConfiguration();
|
||||
Integer scopePublicGroup = geonetworkCfg.getScopeConfiguration().getPublicGroup();
|
||||
Map<Account.Type,Account> accounts = geonetworkCfg.getScopeConfiguration().getAccounts();
|
||||
Account account = accounts.get(Account.Type.SCOPE);
|
||||
String geonetworkUser = account.getUser();
|
||||
String geonetworkPassword = account.getPassword();
|
||||
System.out.println("GeoNetwork user "+geonetworkUser);
|
||||
System.out.println("GeoNetwork password "+geonetworkPassword);
|
||||
System.out.println("GeoNetwork scope Public Group "+scopePublicGroup);
|
||||
|
||||
//getScopeConfiguration().getPublicGroup();
|
||||
|
||||
//Configure search request
|
||||
GNSearchRequest req=new GNSearchRequest();
|
||||
req.addParam(GNSearchRequest.Param.any,title);
|
||||
req.addConfig(GNSearchRequest.Config.similarity, "1");
|
||||
GNSearchResponse resp=reader.query(req);
|
||||
|
||||
//Iterate through results and access found metadata
|
||||
Long id = 0L;
|
||||
Metadata meta = null;
|
||||
System.out.println("Found N layers: "+resp.getCount());
|
||||
for(GNSearchResponse.GNMetadata metadata:resp){
|
||||
id = metadata.getId();
|
||||
System.out.println("ID "+id );
|
||||
System.out.println("Name "+metadata.getInfo().getName());
|
||||
meta = reader.getById(id);
|
||||
Identification idf = meta.getIdentificationInfo().iterator().next();
|
||||
String otitle = idf.getCitation().getTitle().toString();
|
||||
System.out.println("Title "+otitle);
|
||||
if (!otitle.toLowerCase().contains(title.toLowerCase())){
|
||||
System.out.println("Invalid layer");
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
//look for target configuration
|
||||
ScopeConfiguration targetConfiguration=null;
|
||||
targetScope = targetScope.substring(targetScope.lastIndexOf("/")+1);
|
||||
System.out.println("target scope "+targetScope);
|
||||
for(ScopeConfiguration configuration : reader.getConfiguration().getExistingConfigurations())
|
||||
if(configuration.getAssignedScope().equals(targetScope)) targetConfiguration= configuration;
|
||||
|
||||
if(targetConfiguration==null)
|
||||
throw new MissingConfigurationException("Scope "+targetScope+" has no configuration");
|
||||
|
||||
int targetUserId=UserUtils.getByName(reader.getUsers(),targetConfiguration.getAccounts().get(Account.Type.SCOPE).getUser()).getId();
|
||||
int targetGroup=targetConfiguration.getDefaultGroup();
|
||||
|
||||
System.out.println("INFO: ID "+id +" targetUserId "+ targetUserId + " targetGroup "+ targetGroup);
|
||||
System.out.println("Done with "+otitle);
|
||||
Thread.sleep(2000);
|
||||
//break;
|
||||
}
|
||||
|
||||
System.out.println("All done");
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,253 @@
|
|||
package org.gcube.dataanalysis.geo.batch;
|
||||
|
||||
import it.geosolutions.geonetwork.GNClient;
|
||||
import it.geosolutions.geonetwork.util.GNInsertConfiguration;
|
||||
import it.geosolutions.geonetwork.util.GNSearchRequest;
|
||||
import it.geosolutions.geonetwork.util.GNSearchResponse;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.dataanalysis.geo.meta.GenericLayerMetadata;
|
||||
import org.gcube.spatial.data.geonetwork.GeoNetwork;
|
||||
import org.gcube.spatial.data.geonetwork.GeoNetworkAdministration;
|
||||
import org.gcube.spatial.data.geonetwork.LoginLevel;
|
||||
import org.gcube.spatial.data.geonetwork.configuration.DefaultConfiguration;
|
||||
import org.gcube.spatial.data.geonetwork.model.Account;
|
||||
import org.gcube.spatial.data.geonetwork.model.ScopeConfiguration;
|
||||
import org.gcube.spatial.data.geonetwork.model.faults.MissingConfigurationException;
|
||||
import org.gcube.spatial.data.geonetwork.utils.UserUtils;
|
||||
import org.opengis.metadata.Metadata;
|
||||
import org.opengis.metadata.identification.Identification;
|
||||
import org.opengis.metadata.identification.TopicCategory;
|
||||
|
||||
public class OpenLayerToVREs {
|
||||
|
||||
static String geonetworkurl = "http://geonetwork.d4science.org/geonetwork/";
|
||||
// static String geonetworkurl = "http://geoserver-dev2.d4science-ii.research-infrastructures.eu/geonetwork/";
|
||||
|
||||
// static String geoserverurl = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver";
|
||||
|
||||
static String geonetworkUser = "admin";
|
||||
// static String geonetworkPwd = "Geey6ohz";
|
||||
static String geonetworkPwd = "kee9GeeK";
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
// String startScope = "/d4science.research-infrastructures.eu/gCubeApps/ScalableDataMining";
|
||||
String targetScope = "/d4science.research-infrastructures.eu/gCubeApps";
|
||||
// String targetScope = "/gcube/devsec";
|
||||
// String title ="Ocean Surface Zonal Currents (u) in 1992 from OSCAR Third Degree Sea Surface Velocity [oscar_vel1992_180.nc]";
|
||||
String title ="Oscar";
|
||||
// String title = "Ocean Surface Meridional Currents (v) in 1992 from OSCAR Third Degree Sea Surface Velocity [oscar_vel1992_180.nc]";
|
||||
// String searchtitle ="Oscar";
|
||||
//changeScope3(searchtitle,title, targetScope, targetScope);
|
||||
changeScope2(title, targetScope, targetScope);
|
||||
}
|
||||
|
||||
public static void changeScope3(String searchTitle, String title, String startScope, String targetScope) throws Exception {
|
||||
|
||||
ScopeProvider.instance.set("/gcube/devsec");
|
||||
GeoNetworkAdministration reader = GeoNetwork.get();
|
||||
reader.login(LoginLevel.ADMIN);
|
||||
|
||||
// Configure search request
|
||||
GNSearchRequest req = new GNSearchRequest();
|
||||
req.addParam(GNSearchRequest.Param.any, searchTitle);
|
||||
req.addConfig(GNSearchRequest.Config.similarity, "1");
|
||||
GNSearchResponse resp = reader.query(req);
|
||||
|
||||
// Iterate through results and access found metadata
|
||||
Long id = 0L;
|
||||
Metadata meta = null;
|
||||
int N = resp.getCount();
|
||||
System.out.println("Found N layers: " + N);
|
||||
int i = 1;
|
||||
for (GNSearchResponse.GNMetadata metadata : resp) {
|
||||
System.out.println("Layer " + i + " of " + N);
|
||||
i++;
|
||||
id = metadata.getId();
|
||||
System.out.println("ID " + id);
|
||||
System.out.println("Name " + metadata.getInfo().getName());
|
||||
meta = reader.getById(id);
|
||||
Identification idf = meta.getIdentificationInfo().iterator().next();
|
||||
String otitle = idf.getCitation().getTitle().toString();
|
||||
System.out.println("Title " + otitle);
|
||||
if (!otitle.toLowerCase().contains(title.toLowerCase())) {
|
||||
System.out.println("Invalid layer");
|
||||
continue;
|
||||
}
|
||||
System.out.println("Inserting meta in prod " + otitle);
|
||||
GNClient client = new GNClient(geonetworkurl);
|
||||
client.login(geonetworkUser, geonetworkPwd);
|
||||
|
||||
File tmetafile = GenericLayerMetadata.meta2File(meta);
|
||||
|
||||
int scopePublicGroup = 23;
|
||||
client.insertMetadata(new GNInsertConfiguration("" + scopePublicGroup, "datasets", "_none_", true), tmetafile);
|
||||
|
||||
tmetafile.delete();
|
||||
System.out.println("Done with " + otitle);
|
||||
Thread.sleep(2000);
|
||||
|
||||
|
||||
}
|
||||
|
||||
System.out.println("All done");
|
||||
}
|
||||
|
||||
public static void changeScope2(String title, String startScope, String targetScope) throws Exception {
|
||||
|
||||
ScopeProvider.instance.set(startScope);
|
||||
|
||||
GeoNetworkAdministration reader = GeoNetwork.get();
|
||||
reader.login(LoginLevel.SCOPE);
|
||||
DefaultConfiguration geonetworkCfg = (DefaultConfiguration) reader.getConfiguration();
|
||||
|
||||
Map<Account.Type, Account> accounts = geonetworkCfg.getScopeConfiguration().getAccounts();
|
||||
Account account = accounts.get(Account.Type.SCOPE);
|
||||
|
||||
String geonetworkScopeUser = account.getUser();
|
||||
String geonetworkScopePassword = account.getPassword();
|
||||
Integer scopePublicGroup = geonetworkCfg.getScopeConfiguration().getPublicGroup();
|
||||
|
||||
System.out.println("GeoNetwork user " + geonetworkScopeUser);
|
||||
System.out.println("GeoNetwork password " + geonetworkScopePassword);
|
||||
System.out.println("GeoNetwork scope Public Group " + scopePublicGroup);
|
||||
|
||||
System.out.println("GeoNetwork Admin user " + geonetworkCfg.getAdminAccount().getUser());
|
||||
System.out.println("GeoNetwork Admin password " + geonetworkCfg.getAdminAccount().getPassword());
|
||||
System.out.println("GeoNetwork scope Public Group " + scopePublicGroup);
|
||||
|
||||
reader = GeoNetwork.get();
|
||||
reader.login(LoginLevel.ADMIN);
|
||||
|
||||
// Configure search request
|
||||
GNSearchRequest req = new GNSearchRequest();
|
||||
req.addParam(GNSearchRequest.Param.any, title);
|
||||
req.addConfig(GNSearchRequest.Config.similarity, "1");
|
||||
GNSearchResponse resp = reader.query(req);
|
||||
|
||||
// Iterate through results and access found metadata
|
||||
Long id = 0L;
|
||||
Metadata meta = null;
|
||||
int N = resp.getCount();
|
||||
System.out.println("Found N layers: " + N);
|
||||
int i = 1;
|
||||
for (GNSearchResponse.GNMetadata metadata : resp) {
|
||||
System.out.println("Layer " + i + " of " + N);
|
||||
i++;
|
||||
id = metadata.getId();
|
||||
System.out.println("ID " + id);
|
||||
System.out.println("Name " + metadata.getInfo().getName());
|
||||
meta = reader.getById(id);
|
||||
Identification idf = meta.getIdentificationInfo().iterator().next();
|
||||
String otitle = idf.getCitation().getTitle().toString();
|
||||
System.out.println("Title " + otitle);
|
||||
if (!otitle.toLowerCase().contains(title.toLowerCase())) {
|
||||
System.out.println("Invalid layer");
|
||||
continue;
|
||||
}
|
||||
|
||||
System.out.println("Publishing " + id);
|
||||
// look for target configuration
|
||||
ScopeConfiguration targetConfiguration = null;
|
||||
targetScope = targetScope.substring(targetScope.lastIndexOf("/") + 1);
|
||||
System.out.println("target scope " + targetScope);
|
||||
for (ScopeConfiguration configuration : reader.getConfiguration().getExistingConfigurations())
|
||||
if (configuration.getAssignedScope().equals(targetScope))
|
||||
targetConfiguration = configuration;
|
||||
|
||||
if (targetConfiguration == null)
|
||||
throw new MissingConfigurationException("Scope " + targetScope + " has no configuration");
|
||||
|
||||
int targetUserId = UserUtils.getByName(reader.getUsers(), targetConfiguration.getAccounts().get(Account.Type.SCOPE).getUser()).getId();
|
||||
int targetGroup = targetConfiguration.getDefaultGroup();
|
||||
|
||||
System.out.println("ID " + id + " targetUserId " + targetUserId + " targetGroup " + targetGroup + " Public group " + scopePublicGroup);
|
||||
// reader.assignOwnership(ids,(int) targetUserId, (int)targetGroup);
|
||||
|
||||
System.out.println("deleting meta " + id);
|
||||
|
||||
GNClient client = new GNClient(geonetworkurl);
|
||||
client.login(geonetworkUser, geonetworkPwd);
|
||||
|
||||
client.deleteMetadata(id);
|
||||
|
||||
System.out.println("inserting meta");
|
||||
client = new GNClient(geonetworkurl);
|
||||
client.login(geonetworkScopeUser, geonetworkScopePassword);
|
||||
File tmetafile = GenericLayerMetadata.meta2File(meta);
|
||||
|
||||
try{
|
||||
GeoNetworkAdministration readerScope = GeoNetwork.get();
|
||||
readerScope.login(LoginLevel.SCOPE);
|
||||
GNInsertConfiguration configuration = readerScope.getCurrentUserConfiguration("datasets", "_none_");
|
||||
readerScope.insertMetadata(configuration,meta);
|
||||
}catch(Exception e){
|
||||
e.printStackTrace();
|
||||
System.out.println("Using standard client");
|
||||
long metaid = client.insertMetadata(new GNInsertConfiguration(""+scopePublicGroup, "datasets", "_none_", true), tmetafile);
|
||||
System.out.println("Generated Metaid " + metaid);
|
||||
}
|
||||
|
||||
|
||||
|
||||
tmetafile.delete();
|
||||
System.out.println("Done with " + otitle);
|
||||
Thread.sleep(2000);
|
||||
|
||||
}
|
||||
|
||||
System.out.println("All done");
|
||||
}
|
||||
|
||||
public static void changeScope(String title, String startScope, String targetScope) throws Exception {
|
||||
|
||||
ScopeProvider.instance.set(startScope);
|
||||
GeoNetworkAdministration reader = GeoNetwork.get();
|
||||
reader.login(LoginLevel.ADMIN);
|
||||
|
||||
// Configure search request
|
||||
GNSearchRequest req = new GNSearchRequest();
|
||||
req.addParam(GNSearchRequest.Param.any, title);
|
||||
req.addConfig(GNSearchRequest.Config.similarity, "1");
|
||||
GNSearchResponse resp = reader.query(req);
|
||||
|
||||
// Iterate through results and access found metadata
|
||||
Long id = 0L;
|
||||
for (GNSearchResponse.GNMetadata metadata : resp) {
|
||||
id = metadata.getId();
|
||||
System.out.println("ID " + id);
|
||||
System.out.println("Name " + metadata.getInfo().getName());
|
||||
Metadata meta = reader.getById(id);
|
||||
Identification idf = meta.getIdentificationInfo().iterator().next();
|
||||
String otitle = idf.getCitation().getTitle().toString();
|
||||
System.out.println("Title " + otitle);
|
||||
break;
|
||||
}
|
||||
|
||||
System.out.println("Publishing " + id);
|
||||
// look for target configuration
|
||||
ScopeConfiguration targetConfiguration = null;
|
||||
targetScope = "gCubeApps";
|
||||
|
||||
for (ScopeConfiguration configuration : reader.getConfiguration().getExistingConfigurations())
|
||||
if (configuration.getAssignedScope().equals(targetScope))
|
||||
targetConfiguration = configuration;
|
||||
|
||||
if (targetConfiguration == null)
|
||||
throw new MissingConfigurationException("Scope " + targetScope + " has no configuration");
|
||||
|
||||
int targetUserId = UserUtils.getByName(reader.getUsers(), targetConfiguration.getAccounts().get(Account.Type.SCOPE).getUser()).getId();
|
||||
int targetGroup = targetConfiguration.getDefaultGroup();
|
||||
System.out.println("ID " + id + " targetUserId " + targetUserId + " targetGroup " + targetGroup);
|
||||
List<Long> ids = Collections.singletonList((long) id);
|
||||
reader.assignOwnership(ids, (int) targetUserId, (int) targetGroup);
|
||||
|
||||
System.out.println("Assigned " + id);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,155 @@
|
|||
package org.gcube.dataanalysis.geo.batch;
|
||||
|
||||
import it.geosolutions.geonetwork.GNClient;
|
||||
import it.geosolutions.geonetwork.util.GNInsertConfiguration;
|
||||
import it.geosolutions.geonetwork.util.GNSearchRequest;
|
||||
import it.geosolutions.geonetwork.util.GNSearchResponse;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.dataanalysis.geo.meta.GenericLayerMetadata;
|
||||
import org.gcube.spatial.data.geonetwork.GeoNetwork;
|
||||
import org.gcube.spatial.data.geonetwork.GeoNetworkAdministration;
|
||||
import org.gcube.spatial.data.geonetwork.LoginLevel;
|
||||
import org.gcube.spatial.data.geonetwork.model.Account;
|
||||
import org.gcube.spatial.data.geonetwork.model.ScopeConfiguration;
|
||||
import org.gcube.spatial.data.geonetwork.model.faults.MissingConfigurationException;
|
||||
import org.gcube.spatial.data.geonetwork.utils.UserUtils;
|
||||
import org.opengis.metadata.Metadata;
|
||||
import org.opengis.metadata.identification.Identification;
|
||||
import org.opengis.metadata.identification.TopicCategory;
|
||||
|
||||
public class OpenLayerToVREs_Dev_ {
|
||||
|
||||
//static String geonetworkurl = "http://geonetwork.d4science.org/geonetwork/";
|
||||
static String geonetworkurl = "http://geoserver-dev2.d4science-ii.research-infrastructures.eu/geonetwork/";
|
||||
|
||||
//static String geoserverurl = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver";
|
||||
|
||||
static String geonetworkUser = "admin";
|
||||
static String geonetworkPwd = "Geey6ohz";
|
||||
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
//String startScope = "/d4science.research-infrastructures.eu/gCubeApps/ScalableDataMining";
|
||||
//String targetScope = "/d4science.research-infrastructures.eu/gCubeApps";
|
||||
String targetScope = "/gcube/devsec";
|
||||
//String title ="Ocean Surface Zonal Currents (u) in 1992 from OSCAR Third Degree Sea Surface Velocity [oscar_vel1992_180.nc]";
|
||||
String title ="Oscar";
|
||||
changeScope2(title, targetScope, targetScope);
|
||||
}
|
||||
|
||||
public static void changeScope2(String title,String startScope,String targetScope) throws Exception{
|
||||
|
||||
ScopeProvider.instance.set(startScope);
|
||||
GeoNetworkAdministration reader=GeoNetwork.get();
|
||||
reader.login(LoginLevel.ADMIN);
|
||||
|
||||
|
||||
//Configure search request
|
||||
GNSearchRequest req=new GNSearchRequest();
|
||||
req.addParam(GNSearchRequest.Param.any,title);
|
||||
req.addConfig(GNSearchRequest.Config.similarity, "1");
|
||||
GNSearchResponse resp=reader.query(req);
|
||||
|
||||
//Iterate through results and access found metadata
|
||||
Long id = 0L;
|
||||
Metadata meta = null;
|
||||
for(GNSearchResponse.GNMetadata metadata:resp){
|
||||
id = metadata.getId();
|
||||
System.out.println("ID "+id );
|
||||
System.out.println("Name "+metadata.getInfo().getName());
|
||||
meta = reader.getById(id);
|
||||
Identification idf = meta.getIdentificationInfo().iterator().next();
|
||||
String otitle = idf.getCitation().getTitle().toString();
|
||||
System.out.println("Title "+otitle);
|
||||
break;
|
||||
}
|
||||
|
||||
System.out.println("Publishing "+id);
|
||||
//look for target configuration
|
||||
ScopeConfiguration targetConfiguration=null;
|
||||
targetScope = targetScope.substring(targetScope.lastIndexOf("/")+1);
|
||||
System.out.println("target scope "+targetScope);
|
||||
for(ScopeConfiguration configuration : reader.getConfiguration().getExistingConfigurations())
|
||||
if(configuration.getAssignedScope().equals(targetScope)) targetConfiguration= configuration;
|
||||
|
||||
if(targetConfiguration==null)
|
||||
throw new MissingConfigurationException("Scope "+targetScope+" has no configuration");
|
||||
|
||||
int targetUserId=UserUtils.getByName(reader.getUsers(),targetConfiguration.getAccounts().get(Account.Type.SCOPE).getUser()).getId();
|
||||
int targetGroup=targetConfiguration.getDefaultGroup();
|
||||
|
||||
System.out.println("ID "+id +" targetUserId "+ targetUserId + " targetGroup "+ targetGroup);
|
||||
List<Long> ids = Collections.singletonList((long)id);
|
||||
//reader.assignOwnership(ids,(int) targetUserId, (int)targetGroup);
|
||||
|
||||
GNClient client = new GNClient(geonetworkurl);
|
||||
client.login(geonetworkUser, geonetworkPwd);
|
||||
System.out.println("deleting meta "+id);
|
||||
|
||||
client.deleteMetadata(id);
|
||||
|
||||
System.out.println("inserting meta");
|
||||
|
||||
File tmetafile = GenericLayerMetadata.meta2File(meta);
|
||||
client.insertMetadata(new GNInsertConfiguration(""+targetGroup, "datasets", "_none_", true), tmetafile);
|
||||
tmetafile.delete();
|
||||
|
||||
System.out.println("All done");
|
||||
}
|
||||
|
||||
public static void changeScope(String title,String startScope,String targetScope) throws Exception{
|
||||
|
||||
|
||||
|
||||
ScopeProvider.instance.set(startScope);
|
||||
GeoNetworkAdministration reader=GeoNetwork.get();
|
||||
reader.login(LoginLevel.ADMIN);
|
||||
|
||||
|
||||
//Configure search request
|
||||
GNSearchRequest req=new GNSearchRequest();
|
||||
req.addParam(GNSearchRequest.Param.any,title);
|
||||
req.addConfig(GNSearchRequest.Config.similarity, "1");
|
||||
GNSearchResponse resp=reader.query(req);
|
||||
|
||||
//Iterate through results and access found metadata
|
||||
Long id = 0L;
|
||||
for(GNSearchResponse.GNMetadata metadata:resp){
|
||||
id = metadata.getId();
|
||||
System.out.println("ID "+id );
|
||||
System.out.println("Name "+metadata.getInfo().getName());
|
||||
Metadata meta = reader.getById(id);
|
||||
Identification idf = meta.getIdentificationInfo().iterator().next();
|
||||
String otitle = idf.getCitation().getTitle().toString();
|
||||
System.out.println("Title "+otitle);
|
||||
break;
|
||||
}
|
||||
|
||||
System.out.println("Publishing "+id);
|
||||
//look for target configuration
|
||||
ScopeConfiguration targetConfiguration=null;
|
||||
targetScope = "gCubeApps";
|
||||
|
||||
for(ScopeConfiguration configuration : reader.getConfiguration().getExistingConfigurations())
|
||||
if(configuration.getAssignedScope().equals(targetScope)) targetConfiguration= configuration;
|
||||
|
||||
if(targetConfiguration==null)
|
||||
throw new MissingConfigurationException("Scope "+targetScope+" has no configuration");
|
||||
|
||||
int targetUserId=UserUtils.getByName(reader.getUsers(),targetConfiguration.getAccounts().get(Account.Type.SCOPE).getUser()).getId();
|
||||
int targetGroup=targetConfiguration.getDefaultGroup();
|
||||
System.out.println("ID "+id +" targetUserId "+ targetUserId + " targetGroup "+ targetGroup);
|
||||
List<Long> ids = Collections.singletonList((long)id);
|
||||
reader.assignOwnership(ids,(int) targetUserId, (int)targetGroup);
|
||||
|
||||
System.out.println("Assigned "+id);
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,93 @@
|
|||
package org.gcube.dataanalysis.geo.batch;
|
||||
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.dataanalysis.geo.meta.GenericLayerMetadata;
|
||||
import org.gcube.dataanalysis.geo.meta.NetCDFMetadata;
|
||||
import org.gcube.spatial.data.geonetwork.GeoNetwork;
|
||||
import org.gcube.spatial.data.geonetwork.GeoNetworkAdministration;
|
||||
import org.gcube.spatial.data.geonetwork.LoginLevel;
|
||||
import org.gcube.spatial.data.geonetwork.model.Account;
|
||||
import org.gcube.spatial.data.geonetwork.model.ScopeConfiguration;
|
||||
import org.gcube.spatial.data.geonetwork.model.faults.MissingConfigurationException;
|
||||
import org.gcube.spatial.data.geonetwork.utils.UserUtils;
|
||||
import org.opengis.metadata.identification.TopicCategory;
|
||||
|
||||
public class OscarMetadataInsertDev {
|
||||
|
||||
//static String geonetworkurl = "http://geonetwork.d4science.org/geonetwork/";
|
||||
static String geonetworkurl = "http://geoserver-dev2.d4science-ii.research-infrastructures.eu/geonetwork/";
|
||||
|
||||
static String user = "admin";
|
||||
static String password = "Geey6ohz";
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
|
||||
metadataInserter.setGeonetworkUrl(geonetworkurl);
|
||||
metadataInserter.setGeonetworkUser(user);
|
||||
metadataInserter.setGeonetworkPwd(password);
|
||||
//String scope ="/d4science.research-infrastructures.eu/gCubeApps";
|
||||
String scope ="/gcube/devsec";
|
||||
ScopeProvider.instance.set(scope);
|
||||
String targetScope = scope.substring(scope.lastIndexOf("/")+1);
|
||||
System.out.println("target scope "+targetScope);
|
||||
GeoNetworkAdministration reader=GeoNetwork.get();
|
||||
reader.login(LoginLevel.ADMIN);
|
||||
ScopeConfiguration targetConfiguration = null;
|
||||
for(ScopeConfiguration configuration : reader.getConfiguration().getExistingConfigurations()){
|
||||
if(configuration.getAssignedScope().equals(targetScope)){
|
||||
targetConfiguration = configuration;
|
||||
}
|
||||
}
|
||||
|
||||
if(targetConfiguration==null)
|
||||
throw new MissingConfigurationException("Scope "+targetScope+" has no configuration");
|
||||
|
||||
int targetUserId=UserUtils.getByName(reader.getUsers(),targetConfiguration.getAccounts().get(Account.Type.SCOPE).getUser()).getId();
|
||||
int targetGroup=targetConfiguration.getDefaultGroup();
|
||||
|
||||
System.out.println("Target group:"+targetGroup);
|
||||
metadataInserter.setGeonetworkGroup(""+targetGroup);
|
||||
|
||||
metadataInserter.setResolution(0.3326);
|
||||
metadataInserter.setXLeftLow(-180);
|
||||
metadataInserter.setYLeftLow(-80);
|
||||
metadataInserter.setXRightUpper(180);
|
||||
metadataInserter.setYRightUpper(80);
|
||||
|
||||
Oscar(metadataInserter);
|
||||
metadataInserter.insertMetaData();
|
||||
|
||||
}
|
||||
|
||||
private static void Oscar(GenericLayerMetadata metadataInserter) throws Exception{
|
||||
|
||||
metadataInserter.setTitle("Ocean Surface Zonal and Meridional currents between 1992 and 2015 from Ocean Surface Current Analyses Real-time (OSCAR-NASA)");
|
||||
|
||||
metadataInserter.setCategoryTypes("_"+TopicCategory.ENVIRONMENT.name()+"_"+"_"+TopicCategory.CLIMATOLOGY_METEOROLOGY_ATMOSPHERE.name()+"_");
|
||||
|
||||
metadataInserter.setAbstractField("Ocean Surface Zonal and Meridional currents between 1992 and 2015 from OSCAR - Ocean Surface Current Analyses Real-time (NASA) - https://podaac.jpl.nasa.gov/dataset/OSCAR_L4_OC_third-deg");
|
||||
|
||||
metadataInserter.setCustomTopics("Ocean Surface Currents","OSCAR","NASA","LAS", "Ocean Surface Zonal Currents", "Ocean Surface Meridional Currents");
|
||||
|
||||
metadataInserter.setAuthor("D4Science");
|
||||
|
||||
SimpleDateFormat formatter = new SimpleDateFormat("yyyy");
|
||||
|
||||
Date datestart = formatter.parse("1992");
|
||||
Date dateend = formatter.parse("2015");
|
||||
|
||||
metadataInserter.setStartDate(datestart);
|
||||
metadataInserter.setEndDate(dateend);
|
||||
|
||||
String [] urls = {"http://thredds-d-d4s.d4science.org/thredds/fileServer/public/netcdf/oscar-1992-2015.nc"};
|
||||
String [] protocols = {"HTTP"};
|
||||
|
||||
metadataInserter.customMetaDataInsert(urls,protocols);
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,93 @@
|
|||
package org.gcube.dataanalysis.geo.batch;
|
||||
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.dataanalysis.geo.meta.GenericLayerMetadata;
|
||||
import org.gcube.dataanalysis.geo.meta.NetCDFMetadata;
|
||||
import org.gcube.spatial.data.geonetwork.GeoNetwork;
|
||||
import org.gcube.spatial.data.geonetwork.GeoNetworkAdministration;
|
||||
import org.gcube.spatial.data.geonetwork.LoginLevel;
|
||||
import org.gcube.spatial.data.geonetwork.model.Account;
|
||||
import org.gcube.spatial.data.geonetwork.model.ScopeConfiguration;
|
||||
import org.gcube.spatial.data.geonetwork.model.faults.MissingConfigurationException;
|
||||
import org.gcube.spatial.data.geonetwork.utils.UserUtils;
|
||||
import org.opengis.metadata.identification.TopicCategory;
|
||||
|
||||
public class OscarMetadataInsertProd {
|
||||
|
||||
static String geonetworkurl = "http://geonetwork.d4science.org/geonetwork/";
|
||||
//static String geonetworkurl = "http://geoserver-dev2.d4science-ii.research-infrastructures.eu/geonetwork/";
|
||||
|
||||
static String user = "admin";
|
||||
static String password = "kee9GeeK";
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
|
||||
metadataInserter.setGeonetworkUrl(geonetworkurl);
|
||||
metadataInserter.setGeonetworkUser(user);
|
||||
metadataInserter.setGeonetworkPwd(password);
|
||||
String scope ="/d4science.research-infrastructures.eu/gCubeApps";
|
||||
// String scope ="/gcube/devsec";
|
||||
ScopeProvider.instance.set(scope);
|
||||
String targetScope = scope.substring(scope.lastIndexOf("/")+1);
|
||||
System.out.println("target scope "+targetScope);
|
||||
GeoNetworkAdministration reader=GeoNetwork.get();
|
||||
reader.login(LoginLevel.ADMIN);
|
||||
ScopeConfiguration targetConfiguration = null;
|
||||
for(ScopeConfiguration configuration : reader.getConfiguration().getExistingConfigurations()){
|
||||
if(configuration.getAssignedScope().equals(targetScope)){
|
||||
targetConfiguration = configuration;
|
||||
}
|
||||
}
|
||||
|
||||
if(targetConfiguration==null)
|
||||
throw new MissingConfigurationException("Scope "+targetScope+" has no configuration");
|
||||
|
||||
int targetUserId=UserUtils.getByName(reader.getUsers(),targetConfiguration.getAccounts().get(Account.Type.SCOPE).getUser()).getId();
|
||||
int targetGroup=targetConfiguration.getDefaultGroup();
|
||||
|
||||
System.out.println("Target group:"+targetGroup);
|
||||
metadataInserter.setGeonetworkGroup(""+targetGroup);
|
||||
|
||||
metadataInserter.setResolution(0.3326);
|
||||
metadataInserter.setXLeftLow(-180);
|
||||
metadataInserter.setYLeftLow(-80);
|
||||
metadataInserter.setXRightUpper(180);
|
||||
metadataInserter.setYRightUpper(80);
|
||||
|
||||
Oscar(metadataInserter);
|
||||
metadataInserter.insertMetaData();
|
||||
|
||||
}
|
||||
|
||||
private static void Oscar(GenericLayerMetadata metadataInserter) throws Exception{
|
||||
|
||||
metadataInserter.setTitle("Ocean Surface Zonal and Meridional currents between 1992 and 2015 from Ocean Surface Current Analyses Real-time (OSCAR-NASA)");
|
||||
|
||||
metadataInserter.setCategoryTypes("_"+TopicCategory.ENVIRONMENT.name()+"_"+"_"+TopicCategory.CLIMATOLOGY_METEOROLOGY_ATMOSPHERE.name()+"_");
|
||||
|
||||
metadataInserter.setAbstractField("Ocean Surface Zonal and Meridional currents between 1992 and 2015 from OSCAR - Ocean Surface Current Analyses Real-time (NASA) - https://podaac.jpl.nasa.gov/dataset/OSCAR_L4_OC_third-deg");
|
||||
|
||||
metadataInserter.setCustomTopics("Ocean Surface Currents","OSCAR","NASA","LAS", "Ocean Surface Zonal Currents", "Ocean Surface Meridional Currents");
|
||||
|
||||
metadataInserter.setAuthor("D4Science");
|
||||
|
||||
SimpleDateFormat formatter = new SimpleDateFormat("yyyy");
|
||||
|
||||
Date datestart = formatter.parse("1992");
|
||||
Date dateend = formatter.parse("2015");
|
||||
|
||||
metadataInserter.setStartDate(datestart);
|
||||
metadataInserter.setEndDate(dateend);
|
||||
|
||||
String [] urls = {"http://thredds.d4science.org/thredds/fileServer/public/netcdf/oscar_vel/oscar_vel1999_2015.nc"};
|
||||
String [] protocols = {"HTTP"};
|
||||
|
||||
metadataInserter.customMetaDataInsert(urls,protocols);
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -288,7 +288,7 @@ public class GenericLayerMetadata {
|
|||
this.geonetworkGroup = geonetworkGroup;
|
||||
}
|
||||
|
||||
static File meta2File(Metadata meta) throws IOException, JAXBException {
|
||||
public static File meta2File(Metadata meta) throws IOException, JAXBException {
|
||||
File temp = File.createTempFile("meta", ".xml");
|
||||
FileWriter writer = new FileWriter(temp);
|
||||
writer.write(XML.marshal(meta));
|
||||
|
|
|
@ -1,16 +1,23 @@
|
|||
package org.gcube.dataanalysis.geo.meta;
|
||||
|
||||
import it.geosolutions.geonetwork.util.GNPriv;
|
||||
import it.geosolutions.geonetwork.util.GNPrivConfiguration;
|
||||
import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder;
|
||||
import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.EnumSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.spatial.data.geonetwork.GeoNetwork;
|
||||
import org.gcube.spatial.data.geonetwork.GeoNetworkPublisher;
|
||||
import org.gcube.spatial.data.geonetwork.LoginLevel;
|
||||
import org.gcube.spatial.data.geonetwork.configuration.Configuration;
|
||||
import org.gcube.spatial.data.geonetwork.iso.GcubeISOMetadata;
|
||||
import org.gcube.spatial.data.geonetwork.iso.Thesaurus;
|
||||
import org.gcube.spatial.data.geonetwork.model.ScopeConfiguration;
|
||||
import org.gcube.spatial.data.gis.GISInterface;
|
||||
import org.gcube.spatial.data.gis.model.report.PublishResponse;
|
||||
import org.geotoolkit.metadata.iso.extent.DefaultExtent;
|
||||
|
@ -20,11 +27,11 @@ import org.opengis.metadata.spatial.GeometricObjectType;
|
|||
import org.opengis.metadata.spatial.TopologyLevel;
|
||||
|
||||
public class PolyMapMetadata {
|
||||
|
||||
|
||||
private static final String crs = "GEOGCS[\"WGS 84\", DATUM[\"World Geodetic System 1984\", SPHEROID[\"WGS 84\", 6378137.0, 298.257223563, AUTHORITY[\"EPSG\",\"7030\"]]," + "AUTHORITY[\"EPSG\",\"6326\"]], PRIMEM[\"Greenwich\", 0.0, AUTHORITY[\"EPSG\",\"8901\"]], UNIT[\"degree\", 0.017453292519943295]," + "AXIS[\"Geodetic longitude\", EAST], AXIS[\"Geodetic latitude\", NORTH], AUTHORITY[\"EPSG\",\"4326\"]]";
|
||||
|
||||
|
||||
public static PublishResponse publishTable(String scope, String tableName, double resolution, String username, String layerName, String defaultStyle, String workspace, String datastore, String purpose, String credits, String keyword, boolean isprivate) throws Exception {
|
||||
|
||||
|
||||
GSFeatureTypeEncoder fte = new GSFeatureTypeEncoder();
|
||||
fte.setEnabled(true);
|
||||
fte.setLatLonBoundingBox(-180.0, -90.0, 180.0, 90.0, crs);
|
||||
|
@ -34,39 +41,39 @@ public class PolyMapMetadata {
|
|||
le.setDefaultStyle(defaultStyle);
|
||||
le.setEnabled(true);
|
||||
AnalysisLogger.getLogger().debug("Filling Metadata");
|
||||
GcubeISOMetadata meta = fillMeta(resolution, username, layerName, scope, tableName,purpose, credits, keyword);
|
||||
AnalysisLogger.getLogger().debug("Getting GIS from scope "+scope);
|
||||
GcubeISOMetadata meta = fillMeta(resolution, username, layerName, scope, tableName, purpose, credits, keyword);
|
||||
AnalysisLogger.getLogger().debug("Getting GIS from scope " + scope);
|
||||
GISInterface gis = GISInterface.get();
|
||||
Configuration gnConfig = gis.getGeoNetworkReader().getConfiguration();
|
||||
AnalysisLogger.getLogger().debug("Using the following GNetwork:" + gnConfig.getGeoNetworkEndpoint());
|
||||
LoginLevel level = LoginLevel.SCOPE;
|
||||
if (isprivate)
|
||||
level = LoginLevel.PRIVATE;
|
||||
|
||||
// PublishResponse resp = gis.publishDBTable(workspace, datastore, fte, le, meta.getMetadata(), new GNInsertConfiguration(gnConfig.getScopeGroup() + "", "datasets", "_none_", true), LoginLevel.DEFAULT);
|
||||
PublishResponse resp = gis.publishDBTable(workspace, datastore, fte, le, meta.getMetadata(), "datasets", "_none_", level);
|
||||
|
||||
//PublishResponse resp = gis.publishDBTable(workspace, datastore, fte, le, meta.getMetadata(), new GNInsertConfiguration(gnConfig.getScopeGroup() + "", "datasets", "_none_", true), LoginLevel.DEFAULT);
|
||||
PublishResponse resp = gis.publishDBTable(workspace, datastore, fte, le, meta.getMetadata(), "datasets","_none_", level);
|
||||
AnalysisLogger.getLogger().debug(resp);
|
||||
AnalysisLogger.getLogger().debug("ID:" + resp.getReturnedMetaId());
|
||||
AnalysisLogger.getLogger().debug("Result:" + resp.getMetaOperationResult());
|
||||
if (resp.getReturnedMetaId()==0)
|
||||
if (resp.getReturnedMetaId() == 0)
|
||||
return null;
|
||||
else
|
||||
return resp;
|
||||
}
|
||||
|
||||
|
||||
private static GcubeISOMetadata fillMeta(double resolution, String username, String title, String scope, String tableName ,String purpose, String credits, String keyword) throws Exception {
|
||||
private static GcubeISOMetadata fillMeta(double resolution, String username, String title, String scope, String tableName, String purpose, String credits, String keyword) throws Exception {
|
||||
|
||||
if (scope == null)
|
||||
scope = ScopeProvider.instance.get();
|
||||
|
||||
AnalysisLogger.getLogger().debug("Setting scope for filling Meta");
|
||||
ScopeProvider.instance.set(scope);
|
||||
|
||||
|
||||
AnalysisLogger.getLogger().debug("Fulfilling metadata");
|
||||
GcubeISOMetadata meta = new GcubeISOMetadata();
|
||||
AnalysisLogger.getLogger().debug("Fulfilling metadata Begin");
|
||||
meta.setAbstractField("This metadata has been automatically generated from the Statistical Manager on the basis of a distribution of points and according the resolution of "+resolution+" degrees.");
|
||||
meta.setAbstractField("This metadata has been automatically generated from the Statistical Manager on the basis of a distribution of points and according the resolution of " + resolution + " degrees.");
|
||||
meta.setCreationDate(new Date(System.currentTimeMillis()));
|
||||
meta.setExtent((DefaultExtent) DefaultExtent.WORLD);
|
||||
meta.setGeometricObjectType(GeometricObjectType.SURFACE);
|
||||
|
@ -81,7 +88,6 @@ public class PolyMapMetadata {
|
|||
meta.setTopologyLevel(TopologyLevel.GEOMETRY_ONLY);
|
||||
meta.setUser(username);
|
||||
|
||||
|
||||
meta.addGraphicOverview("http://www.d4science.org/D4ScienceOrg-Social-theme/images/custom/D4ScienceInfrastructure.png");
|
||||
meta.addCredits(credits);
|
||||
Thesaurus generalThesaurus = meta.getConfig().getThesauri().get("General");
|
||||
|
@ -89,10 +95,10 @@ public class PolyMapMetadata {
|
|||
meta.addKeyword(username, generalThesaurus);
|
||||
meta.addKeyword("StatisticalManager", generalThesaurus);
|
||||
meta.addKeyword(keyword, generalThesaurus);
|
||||
meta.addKeyword(tableName,generalThesaurus);
|
||||
meta.addKeyword(tableName, generalThesaurus);
|
||||
meta.addTopicCategory(TopicCategory.BIOTA);
|
||||
AnalysisLogger.getLogger().debug("Fulfilling done");
|
||||
return meta;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,62 @@
|
|||
package org.gcube.dataanalysis.geo.test.infra;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
|
||||
|
||||
public class TestSHAPEFILEPUBLISHER {
|
||||
|
||||
static AlgorithmConfiguration[] configs = { testSFImporter()};
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
System.out.println("TEST 1");
|
||||
|
||||
for (int i = 0; i < configs.length; i++) {
|
||||
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(configs[i]);
|
||||
trans.get(0).init();
|
||||
Regressor.process(trans.get(0));
|
||||
StatisticalType st = trans.get(0).getOutput();
|
||||
AnalysisLogger.getLogger().debug("ST:" + st);
|
||||
trans = null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
private static AlgorithmConfiguration testSFImporter() {
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setAgent("SHAPEFILE_PUBLISHER");
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setParam("DBUser","postgres");
|
||||
config.setParam("DBPassword","d4science2");
|
||||
config.setParam("DBUrl","jdbc:postgresql://geoserver-test.d4science-ii.research-infrastructures.eu:5432/timeseriesgisdb");
|
||||
config.setParam("driver","org.postgresql.Driver");
|
||||
// config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
|
||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||
|
||||
config.setParam("MapTitle","Test local shapefile");
|
||||
config.setParam("MapAbstract","A local test");
|
||||
config.setParam("ShapeFileZip","shapefiletest.zip");
|
||||
config.setParam("ShapeFileName","shapefile2.shp");
|
||||
config.setParam("Topics","test|shapefile");
|
||||
config.setParam("PublicationLevel","PUBLIC");
|
||||
config.setParam("ServiceUserName","gianpaolo.coro");
|
||||
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -13,6 +13,8 @@ import java.io.InputStream;
|
|||
import java.io.InputStreamReader;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class GdalConverter {
|
||||
|
||||
|
@ -76,6 +78,33 @@ public class GdalConverter {
|
|||
return ascTiffFile;
|
||||
}
|
||||
|
||||
public static List<String> command(final String cmdline,
|
||||
final String directory) {
|
||||
try {
|
||||
Process process =
|
||||
new ProcessBuilder(new String[] {"bash", "-c", cmdline})
|
||||
.redirectErrorStream(true)
|
||||
.directory(new File(directory))
|
||||
.start();
|
||||
|
||||
List<String> output = new ArrayList<String>();
|
||||
BufferedReader br = new BufferedReader(
|
||||
new InputStreamReader(process.getInputStream()));
|
||||
String line = null;
|
||||
while ( (line = br.readLine()) != null )
|
||||
output.add(line);
|
||||
|
||||
//There should really be a timeout here.
|
||||
if (0 != process.waitFor())
|
||||
return null;
|
||||
|
||||
return output;
|
||||
|
||||
} catch (Exception e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public static String ExecuteGetLine(String cmd){
|
||||
|
||||
Process process = null;
|
||||
|
|
Loading…
Reference in New Issue