git-svn-id: http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/spatial-data/gis-interface@144532 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
0ed720a8ac
commit
04ebcbb4c9
|
@ -26,4 +26,9 @@
|
|||
<Changeset component="gis-interface.2-3-0" date="2016-09-09">
|
||||
<Change>gCube 4 SDI policies integration</Change>
|
||||
</Changeset>
|
||||
<Changeset component="gis-interface.2-4-0" date="2017-02-09">
|
||||
<Change>Cached information</Change>
|
||||
<Change>Workspace methods</Change>
|
||||
<Change>Datastore methods</Change>
|
||||
</Changeset>
|
||||
</ReleaseNotes>
|
2
pom.xml
2
pom.xml
|
@ -8,7 +8,7 @@
|
|||
</parent>
|
||||
<groupId>org.gcube.spatial.data</groupId>
|
||||
<artifactId>gis-interface</artifactId>
|
||||
<version>2.3.1-SNAPSHOT</version>
|
||||
<version>2.4.0-SNAPSHOT</version>
|
||||
<name>gis-interface</name>
|
||||
|
||||
<properties>
|
||||
|
|
|
@ -0,0 +1,60 @@
|
|||
package org.gcube.spatial.data.gis;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Properties;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
public class Configuration {
|
||||
|
||||
private static final Long DEFAULT_TTL=60000l;
|
||||
|
||||
private static Configuration singleton=null;
|
||||
|
||||
|
||||
public static synchronized Configuration get() throws IOException{
|
||||
if(singleton==null) singleton=new Configuration();
|
||||
return singleton;
|
||||
}
|
||||
|
||||
|
||||
public static final String GEOSERVER_HOSTED_LAYERS_TTL="geoserver.cache.hostedLayers.TTL";
|
||||
public static final String GEOSERVER_STYLES_TTL="geoserver.cache.hostedLayers.TTL";
|
||||
public static final String GEOSERVER_WORKSPACE_TTL="geoserver.cache.hostedLayers.TTL";
|
||||
public static final String GEOSERVER_DATASTORE_TTL="geoserver.cache.hostedLayers.TTL";
|
||||
public static final String IS_CACHE_TTL="geoserver.cache.hostedLayers.TTL";
|
||||
public static final String IS_SERVICE_PROFILE_CATEGORY="is.serviceProfile.category";
|
||||
public static final String IS_SERVICE_PROFILE_PLATFORM_NAME="is.serviceProfile.platform.name";
|
||||
public static final String IS_ACCESS_POLICY="is.accessPolicy";
|
||||
|
||||
|
||||
|
||||
HashMap<String,String> properties;
|
||||
Properties props;
|
||||
|
||||
private Configuration() throws IOException {
|
||||
props=new Properties();
|
||||
try {
|
||||
props.load(Configuration.class.getClassLoader().getResourceAsStream("gis-interface.properties"));
|
||||
} catch (IOException e) {
|
||||
log.warn("****************** Unable to load properties file ****************** ",e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public String getProperty(String propertyName){
|
||||
return props.getProperty(propertyName);
|
||||
}
|
||||
|
||||
public static Long getTTL(String TTLParameter) {
|
||||
try{
|
||||
return Long.parseLong(Configuration.get().getProperty(TTLParameter));
|
||||
}catch(Exception e){
|
||||
log.warn("Unable to get TTL "+TTLParameter,e);
|
||||
return DEFAULT_TTL;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -3,8 +3,9 @@ package org.gcube.spatial.data.gis;
|
|||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.NoSuchElementException;
|
||||
|
@ -18,8 +19,11 @@ import org.gcube.spatial.data.geonetwork.LoginLevel;
|
|||
import org.gcube.spatial.data.geonetwork.configuration.XMLAdapter;
|
||||
import org.gcube.spatial.data.geonetwork.iso.BoundingBox;
|
||||
import org.gcube.spatial.data.geonetwork.utils.ScopeUtils;
|
||||
import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
|
||||
import org.gcube.spatial.data.gis.is.GeoServerDescriptor;
|
||||
import org.gcube.spatial.data.gis.is.InfrastructureCrawler;
|
||||
import org.gcube.spatial.data.gis.is.cache.ExplicitCache;
|
||||
import org.gcube.spatial.data.gis.is.cache.GeoServerCache;
|
||||
import org.gcube.spatial.data.gis.is.cache.ISGeoServerCache;
|
||||
import org.gcube.spatial.data.gis.meta.MetadataEnricher;
|
||||
import org.gcube.spatial.data.gis.model.report.DeleteReport;
|
||||
import org.gcube.spatial.data.gis.model.report.PublishResponse;
|
||||
|
@ -27,25 +31,25 @@ import org.gcube.spatial.data.gis.model.report.Report;
|
|||
import org.gcube.spatial.data.gis.model.report.Report.OperationState;
|
||||
import org.opengis.metadata.Metadata;
|
||||
import org.opengis.metadata.citation.DateType;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import it.geosolutions.geonetwork.util.GNInsertConfiguration;
|
||||
import it.geosolutions.geoserver.rest.GeoServerRESTManager;
|
||||
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher;
|
||||
import it.geosolutions.geoserver.rest.GeoServerRESTReader;
|
||||
import it.geosolutions.geoserver.rest.encoder.GSAbstractStoreEncoder;
|
||||
import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder;
|
||||
import it.geosolutions.geoserver.rest.encoder.GSResourceEncoder.ProjectionPolicy;
|
||||
import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
public class GISInterface {
|
||||
@Slf4j
|
||||
public class GISInterface{
|
||||
|
||||
final static Logger logger= LoggerFactory.getLogger(GISInterface.class);
|
||||
|
||||
private static final long MAX_GEOSERVER_CACHE_TIME=2*60*1000;
|
||||
|
||||
public static GISInterface get() throws Exception{
|
||||
return new GISInterface();
|
||||
public static GISInterface get(AbstractGeoServerDescriptor... descriptors) throws Exception{
|
||||
if(descriptors!=null&&descriptors.length>0)
|
||||
return new GISInterface(Arrays.asList(descriptors));
|
||||
else return new GISInterface();
|
||||
}
|
||||
|
||||
|
||||
|
@ -57,8 +61,14 @@ public class GISInterface {
|
|||
|
||||
|
||||
private GISInterface() throws Exception{
|
||||
|
||||
theActualCache=new ISGeoServerCache();
|
||||
}
|
||||
|
||||
private <T extends AbstractGeoServerDescriptor> GISInterface(Collection<T> collection){
|
||||
theActualCache=new ExplicitCache<T>(collection);
|
||||
}
|
||||
|
||||
|
||||
public void setToRegisterXMLAdapters(List<XMLAdapter> toRegisterXMLAdapters) {
|
||||
this.toRegisterXMLAdapters = toRegisterXMLAdapters;
|
||||
}
|
||||
|
@ -73,37 +83,25 @@ public class GISInterface {
|
|||
return getGN();
|
||||
}
|
||||
|
||||
public GeoServerRESTReader getGeoServerReader(ResearchMethod method,boolean forceRefresh) throws Exception{
|
||||
GeoServerDescriptor desc=getGeoServerSet(forceRefresh).last();
|
||||
return getGeoServerReader(desc);
|
||||
}
|
||||
|
||||
public GeoServerRESTReader getGeoServerReader(GeoServerDescriptor desc)throws Exception{
|
||||
return getGeoServerManager(desc.getUrl(), desc.getUser(), desc.getPassword()).getReader();
|
||||
}
|
||||
|
||||
public GeoServerRESTReader getGeoServerReader(String url,String user,String password) throws IllegalArgumentException, MalformedURLException{
|
||||
return getGeoServerManager(url, user, password).getReader();
|
||||
}
|
||||
|
||||
public GeoServerRESTReader getGeoServerReader(String url) throws MalformedURLException{
|
||||
return new GeoServerRESTReader(url);
|
||||
}
|
||||
|
||||
public GeoServerDescriptor getCurrentGeoServerDescriptor(){
|
||||
return getGeoServerSet(false).last();
|
||||
}
|
||||
|
||||
public SortedSet<GeoServerDescriptor> getGeoServerDescriptorSet(boolean forceRefresh){
|
||||
return getGeoServerSet(forceRefresh);
|
||||
}
|
||||
|
||||
//******************* Create logic
|
||||
|
||||
public Report.OperationState createWorkspace(String workspace){
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Publishes toPublishFile [GeoTIFF] in the default GeoServer descriptor
|
||||
*
|
||||
* @param workspace
|
||||
* @param storeName
|
||||
* @param coverageName
|
||||
* @param toPublishFile
|
||||
* @param srs
|
||||
* @param policy
|
||||
* @param defaultStyle
|
||||
* @param bbox
|
||||
* @param geoNetworkMeta
|
||||
* @param gnCategory
|
||||
* @param gnStylesheet
|
||||
* @param level
|
||||
* @param promoteMetadataPublishing
|
||||
* @return
|
||||
*/
|
||||
public PublishResponse addGeoTIFF(String workspace, String storeName, String coverageName,
|
||||
File toPublishFile,String srs,
|
||||
ProjectionPolicy policy,String defaultStyle, double[] bbox,
|
||||
|
@ -120,6 +118,20 @@ public class GISInterface {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Publishes the specified fte in the default GeoServer
|
||||
*
|
||||
* @param workspace
|
||||
* @param storeName
|
||||
* @param fte
|
||||
* @param layerEncoder
|
||||
* @param geoNetworkMeta
|
||||
* @param gnCategory
|
||||
* @param gnStylesheet
|
||||
* @param level
|
||||
* @param promoteMetadataPublishing
|
||||
* @return
|
||||
*/
|
||||
public PublishResponse publishDBTable(String workspace, String storeName, GSFeatureTypeEncoder fte,GSLayerEncoder layerEncoder,
|
||||
Metadata geoNetworkMeta, String gnCategory,String gnStylesheet,LoginLevel level, boolean promoteMetadataPublishing){
|
||||
try{
|
||||
|
@ -135,10 +147,24 @@ public class GISInterface {
|
|||
}
|
||||
|
||||
/**
|
||||
* @see it.geosolutions.geoserver.rest.GeoServerRESTPublisher#publishGeoTIFF(String, String, String, File, String, ProjectionPolicy, String, double[])
|
||||
* @deprecated use addGeoTIFF(String workspace, String storeName, String coverageName,File toPublishFile,String srs,
|
||||
* ProjectionPolicy policy,String defaultStyle, double[] bbox,
|
||||
* Metadata geoNetworkMeta, String gnCategory,String gnStylesheet, LoginLevel level, boolean promoteMetadataPublishing)
|
||||
*
|
||||
* @param workspace
|
||||
* @param storeName
|
||||
* @param coverageName
|
||||
* @param toPublishFile
|
||||
* @param srs
|
||||
* @param policy
|
||||
* @param defaultStyle
|
||||
* @param bbox
|
||||
* @param geoNetworkMeta
|
||||
* @param config
|
||||
* @param level
|
||||
* @param promoteMetadataPublishing
|
||||
* @return
|
||||
*/
|
||||
|
||||
@Deprecated
|
||||
public PublishResponse addGeoTIFF(String workspace, String storeName, String coverageName,
|
||||
File toPublishFile,String srs,
|
||||
|
@ -147,14 +173,16 @@ public class GISInterface {
|
|||
boolean publishResult = false;
|
||||
PublishResponse toReturn=new PublishResponse(geoNetworkMeta);
|
||||
GeoServerRESTPublisher publisher=null;
|
||||
GeoServerDescriptor desc=getGeoServerSet(false).last();
|
||||
logger.debug("Using "+desc);
|
||||
AbstractGeoServerDescriptor desc=getCache().getDefaultDescriptor();
|
||||
log.debug("Using "+desc);
|
||||
try{
|
||||
publisher=getGeoServerManager(desc.getUrl(), desc.getUser(), desc.getPassword()).getPublisher();
|
||||
publisher=desc.getPublisher();
|
||||
// Publishing the file to geoserver depends on file type
|
||||
publishResult=publisher.publishGeoTIFF(workspace, storeName, coverageName, toPublishFile, srs, policy, defaultStyle, bbox);
|
||||
|
||||
if(publishResult){
|
||||
// Data publish ok
|
||||
desc.onChangedLayers();
|
||||
toReturn.setDataOperationResult(Report.OperationState.COMPLETE);
|
||||
|
||||
MetadataEnricher enricher=new MetadataEnricher(geoNetworkMeta, true);
|
||||
|
@ -166,7 +194,7 @@ public class GISInterface {
|
|||
try{
|
||||
distributionUris.add(URIUtils.getGisLinkByUUID(enricher.getMetadataIdentifier()));
|
||||
}catch(Exception e){
|
||||
logger.warn("Unabel to get Gis Link ",e);
|
||||
log.warn("Unabel to get Gis Link ",e);
|
||||
toReturn.setMetaOperationResult(OperationState.WARN);
|
||||
toReturn.getMetaOperationMessages().add("Unable to generate GIS link, cause : "+e.getMessage());
|
||||
}
|
||||
|
@ -191,21 +219,21 @@ public class GISInterface {
|
|||
} catch (IllegalArgumentException e) {
|
||||
if(publisher==null){
|
||||
toReturn.getDataOperationMessages().add("Unable to instatiate GeoServerRESTPublisher, cause :"+e.getMessage());
|
||||
logger.debug("Unable to instatiate GeoServerRESTPublisher",e);
|
||||
log.debug("Unable to instatiate GeoServerRESTPublisher",e);
|
||||
}else {
|
||||
toReturn.getDataOperationMessages().add("Unable to publish data, cause :"+e.getMessage());
|
||||
logger.debug("Unable to publish data",e);
|
||||
log.debug("Unable to publish data",e);
|
||||
}
|
||||
} catch (MalformedURLException e) {
|
||||
toReturn.getDataOperationMessages().add("Unable to instatiate GeoServerRESTPublisher, cause :"+e.getMessage());
|
||||
logger.debug("Unable to instatiate GeoServerRESTPublisher",e);
|
||||
log.debug("Unable to instatiate GeoServerRESTPublisher",e);
|
||||
} catch (FileNotFoundException e) {
|
||||
toReturn.getDataOperationMessages().add("Unable to publish data, cause :"+e.getMessage());
|
||||
logger.debug("Unable to publish data",e);
|
||||
log.debug("Unable to publish data",e);
|
||||
} catch (Exception e) {
|
||||
// exceptions raised by publishing metadata, need to clean up
|
||||
toReturn.getMetaOperationMessages().add("Unable to publish metadata, cause :"+e.getMessage());
|
||||
logger.debug("Unable to publish metadata",e);
|
||||
log.debug("Unable to publish metadata",e);
|
||||
DeleteReport delRep=deleteStore(workspace,storeName,null,desc);
|
||||
if(!delRep.getDataOperationResult().equals(OperationState.COMPLETE)){
|
||||
toReturn.setDataOperationResult(OperationState.WARN);
|
||||
|
@ -216,25 +244,40 @@ public class GISInterface {
|
|||
return toReturn;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @deprecated use publishDBTable(String workspace, String storeName, GSFeatureTypeEncoder fte,GSLayerEncoder layerEncoder,
|
||||
* Metadata geoNetworkMeta, String gnCategory,String gnStylesheet,LoginLevel level, boolean promoteMetadataPublishing)
|
||||
*
|
||||
* @param workspace
|
||||
* @param storeName
|
||||
* @param fte
|
||||
* @param layerEncoder
|
||||
* @param geoNetworkMeta
|
||||
* @param config
|
||||
* @param level
|
||||
* @param promoteMetadataPublishing
|
||||
* @return
|
||||
*/
|
||||
@Deprecated
|
||||
public PublishResponse publishDBTable(String workspace, String storeName, GSFeatureTypeEncoder fte,GSLayerEncoder layerEncoder,Metadata geoNetworkMeta, GNInsertConfiguration config,LoginLevel level,boolean promoteMetadataPublishing){
|
||||
boolean publishResult = false;
|
||||
PublishResponse toReturn=new PublishResponse(geoNetworkMeta);
|
||||
GeoServerRESTPublisher publisher=null;
|
||||
GeoServerDescriptor desc=getGeoServerSet(false).last();
|
||||
logger.debug("Publish db table : "+storeName+" under ws : "+workspace+", using geoserver "+desc);
|
||||
logger.debug("Using "+desc);
|
||||
AbstractGeoServerDescriptor desc=getCache().getDefaultDescriptor();
|
||||
log.debug("Publish db table : "+storeName+" under ws : "+workspace+", using geoserver "+desc);
|
||||
log.debug("Using "+desc);
|
||||
try{
|
||||
GeoServerRESTManager mng=getGeoServerManager(desc.getUrl(), desc.getUser(), desc.getPassword());
|
||||
publisher=mng.getPublisher();
|
||||
publisher=desc.getPublisher();
|
||||
// Publishing the file to geoserver depends on file type
|
||||
publishResult=publisher.publishDBLayer(workspace, storeName, fte, layerEncoder);
|
||||
|
||||
if(publishResult){
|
||||
desc.onChangedLayers();
|
||||
toReturn.setDataOperationResult(Report.OperationState.COMPLETE);
|
||||
|
||||
|
||||
logger.debug("Published data, enriching meta..");
|
||||
log.debug("Published data, enriching meta..");
|
||||
MetadataEnricher enricher=new MetadataEnricher(geoNetworkMeta, true);
|
||||
|
||||
ArrayList<String> distributionUris=new ArrayList<String>();
|
||||
|
@ -244,7 +287,7 @@ public class GISInterface {
|
|||
try{
|
||||
distributionUris.add(URIUtils.getGisLinkByUUID(enricher.getMetadataIdentifier()));
|
||||
}catch(Exception e){
|
||||
logger.warn("Unabel to get Gis Link ",e);
|
||||
log.warn("Unabel to get Gis Link ",e);
|
||||
toReturn.setMetaOperationResult(OperationState.WARN);
|
||||
toReturn.getMetaOperationMessages().add("Unable to generate GIS link, cause : "+e.getMessage());
|
||||
}
|
||||
|
@ -274,21 +317,21 @@ public class GISInterface {
|
|||
} catch (IllegalArgumentException e) {
|
||||
if(publisher==null){
|
||||
toReturn.getDataOperationMessages().add("Unable to instatiate GeoServerRESTPublisher, cause :"+e.getMessage());
|
||||
logger.debug("Unable to instatiate GeoServerRESTPublisher",e);
|
||||
log.debug("Unable to instatiate GeoServerRESTPublisher",e);
|
||||
}else {
|
||||
toReturn.getDataOperationMessages().add("Unable to publish data, cause :"+e.getMessage());
|
||||
logger.debug("Unable to publish data",e);
|
||||
log.debug("Unable to publish data",e);
|
||||
}
|
||||
} catch (MalformedURLException e) {
|
||||
toReturn.getDataOperationMessages().add("Unable to instatiate GeoServerRESTPublisher, cause :"+e.getMessage());
|
||||
logger.debug("Unable to instatiate GeoServerRESTPublisher",e);
|
||||
log.debug("Unable to instatiate GeoServerRESTPublisher",e);
|
||||
} catch (FileNotFoundException e) {
|
||||
toReturn.getDataOperationMessages().add("Unable to publish data, cause :"+e.getMessage());
|
||||
logger.debug("Unable to publish data",e);
|
||||
log.debug("Unable to publish data",e);
|
||||
} catch (Exception e) {
|
||||
// exceptions raised by publishing metadata, need to clean up
|
||||
toReturn.getMetaOperationMessages().add("Unable to publish metadata, cause :"+e.getMessage());
|
||||
logger.debug("Unable to publish metadata",e);
|
||||
log.debug("Unable to publish metadata",e);
|
||||
DeleteReport delRep=deleteLayer(workspace,fte.getName(),null,desc,level);
|
||||
if(!delRep.getDataOperationResult().equals(OperationState.COMPLETE)){
|
||||
toReturn.setDataOperationResult(OperationState.WARN);
|
||||
|
@ -299,19 +342,26 @@ public class GISInterface {
|
|||
return toReturn;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates the declared style in the default GeoServer descriptor
|
||||
*
|
||||
* @param sldBody
|
||||
* @param styleName
|
||||
* @return
|
||||
*/
|
||||
public PublishResponse publishStyle(String sldBody,String styleName){
|
||||
boolean publishResult = false;
|
||||
PublishResponse toReturn=new PublishResponse();
|
||||
GeoServerRESTPublisher publisher=null;
|
||||
GeoServerDescriptor desc=getGeoServerSet(false).last();
|
||||
logger.debug("Using "+desc);
|
||||
AbstractGeoServerDescriptor desc=getCache().getDefaultDescriptor();
|
||||
log.debug("Using "+desc);
|
||||
try{
|
||||
publisher=getGeoServerManager(desc.getUrl(), desc.getUser(), desc.getPassword()).getPublisher();
|
||||
publisher=desc.getPublisher();
|
||||
// Publishing the file to geoserver depends on file type
|
||||
publishResult=publisher.publishStyle(sldBody, styleName);
|
||||
|
||||
if(publishResult){
|
||||
desc.onChangedStyles();
|
||||
toReturn.setDataOperationResult(Report.OperationState.COMPLETE);
|
||||
}else toReturn.getDataOperationMessages().add("Publish operation returned false, unable to publish data");
|
||||
}catch(NoSuchElementException e){
|
||||
|
@ -319,18 +369,18 @@ public class GISInterface {
|
|||
} catch (IllegalArgumentException e) {
|
||||
if(publisher==null){
|
||||
toReturn.getDataOperationMessages().add("Unable to instatiate GeoServerRESTPublisher, cause :"+e.getMessage());
|
||||
logger.debug("Unable to instatiate GeoServerRESTPublisher",e);
|
||||
log.debug("Unable to instatiate GeoServerRESTPublisher",e);
|
||||
}else {
|
||||
toReturn.getDataOperationMessages().add("Unable to publish data, cause :"+e.getMessage());
|
||||
logger.debug("Unable to publish data",e);
|
||||
log.debug("Unable to publish data",e);
|
||||
}
|
||||
} catch (MalformedURLException e) {
|
||||
toReturn.getDataOperationMessages().add("Unable to instatiate GeoServerRESTPublisher, cause :"+e.getMessage());
|
||||
logger.debug("Unable to instatiate GeoServerRESTPublisher",e);
|
||||
log.debug("Unable to instatiate GeoServerRESTPublisher",e);
|
||||
} catch (Exception e) {
|
||||
// exceptions raised by publishing metadata, need to clean up
|
||||
toReturn.getMetaOperationMessages().add("Unable to publish metadata, cause :"+e.getMessage());
|
||||
logger.debug("Unable to publish metadata",e);
|
||||
log.debug("Unable to publish metadata",e);
|
||||
DeleteReport delRep=deleteStyle(styleName,desc);
|
||||
if(!delRep.getDataOperationResult().equals(OperationState.COMPLETE)){
|
||||
toReturn.setDataOperationResult(OperationState.WARN);
|
||||
|
@ -343,16 +393,22 @@ public class GISInterface {
|
|||
|
||||
// ********************* DELETE Logic
|
||||
/**
|
||||
* @see it.geosolutions.geoserver.rest.GeoServerRESTPublisher#removeDatastore(String, String, boolean)
|
||||
* Deletes the specified datastore from the GeoServer instance described in desc
|
||||
*
|
||||
* @param workspace
|
||||
* @param storeName
|
||||
* @param metadataUUID
|
||||
* @param desc
|
||||
* @return
|
||||
*/
|
||||
public DeleteReport deleteStore(String workspace,String storeName,Long metadataUUID,GeoServerDescriptor desc){
|
||||
public DeleteReport deleteStore(String workspace,String storeName,Long metadataUUID,AbstractGeoServerDescriptor desc){
|
||||
DeleteReport toReturn=new DeleteReport();
|
||||
GeoServerRESTPublisher publisher=null;
|
||||
try{
|
||||
publisher=getGeoServerManager(desc.getUrl(), desc.getUser(), desc.getPassword()).getPublisher();
|
||||
publisher=desc.getPublisher();
|
||||
boolean removed=publisher.removeDatastore(workspace, storeName,true);
|
||||
if(removed){
|
||||
desc.onChangedDataStores();
|
||||
toReturn.setDataOperationResult(Report.OperationState.COMPLETE);
|
||||
if(metadataUUID!=null){
|
||||
getGN().deleteMetadata(metadataUUID);
|
||||
|
@ -375,13 +431,24 @@ public class GISInterface {
|
|||
return toReturn;
|
||||
}
|
||||
|
||||
public DeleteReport deleteLayer(String workspace,String layerName, Long metadataUUID,GeoServerDescriptor desc,LoginLevel gnLoginLevel){
|
||||
/**
|
||||
* Deletes the specified layer from the GeoServer instance described by desc.
|
||||
*
|
||||
* @param workspace
|
||||
* @param layerName
|
||||
* @param metadataUUID
|
||||
* @param desc
|
||||
* @param gnLoginLevel
|
||||
* @return
|
||||
*/
|
||||
public DeleteReport deleteLayer(String workspace,String layerName, Long metadataUUID,AbstractGeoServerDescriptor desc,LoginLevel gnLoginLevel){
|
||||
DeleteReport toReturn=new DeleteReport();
|
||||
GeoServerRESTPublisher publisher=null;
|
||||
try{
|
||||
publisher=getGeoServerManager(desc.getUrl(), desc.getUser(), desc.getPassword()).getPublisher();
|
||||
publisher=desc.getPublisher();
|
||||
boolean removed=publisher.removeLayer(workspace, layerName);
|
||||
if(removed){
|
||||
desc.onChangedLayers();
|
||||
toReturn.setDataOperationResult(Report.OperationState.COMPLETE);
|
||||
if(metadataUUID!=null){
|
||||
GeoNetworkPublisher gnPub=getGN();
|
||||
|
@ -406,13 +473,21 @@ public class GISInterface {
|
|||
return toReturn;
|
||||
}
|
||||
|
||||
public DeleteReport deleteStyle(String styleName,GeoServerDescriptor desc){
|
||||
/**
|
||||
* Deletes a specific style from the GeoServer described by dec.
|
||||
*
|
||||
* @param styleName
|
||||
* @param desc
|
||||
* @return
|
||||
*/
|
||||
public DeleteReport deleteStyle(String styleName,AbstractGeoServerDescriptor desc){
|
||||
DeleteReport toReturn=new DeleteReport();
|
||||
GeoServerRESTPublisher publisher=null;
|
||||
try{
|
||||
publisher=getGeoServerManager(desc.getUrl(), desc.getUser(), desc.getPassword()).getPublisher();
|
||||
publisher=desc.getPublisher();
|
||||
boolean removed=publisher.removeStyle(styleName, true);
|
||||
if(removed){
|
||||
desc.onChangedStyles();
|
||||
toReturn.setDataOperationResult(Report.OperationState.COMPLETE);
|
||||
}else toReturn.getDataOperationMessages().add("Remove data operation returned false, unable to delete Store");
|
||||
}catch(NoSuchElementException e){
|
||||
|
@ -429,20 +504,98 @@ public class GISInterface {
|
|||
return toReturn;
|
||||
}
|
||||
|
||||
//************ PRIVATE
|
||||
//************* DATASTORES / WS
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Creates the specified workspace in all GeoServer instances of the current GeoServer pool
|
||||
*
|
||||
* @param workspace
|
||||
*/
|
||||
public void createWorkspace(String workspace){
|
||||
log.info("Create workspace {} in geoservers",workspace);
|
||||
if(workspace==null || workspace.length()<1) throw new RuntimeException("Invalid workspace name : "+workspace);
|
||||
for(AbstractGeoServerDescriptor gs:getCurrentCacheElements(false)){
|
||||
try{
|
||||
createWorkspace(workspace,gs);
|
||||
}catch(MalformedURLException e){
|
||||
log.warn("Wrong URL in descriptor {} ",gs.getUrl(),e);
|
||||
}catch(IllegalArgumentException e){
|
||||
log.warn("Unable to operate service in {} ",gs.getUrl(),e);
|
||||
}
|
||||
catch(Exception e){
|
||||
log.warn("Unable to check/create ws {} in {} ",workspace,gs.getUrl(),e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates the specified datastore under the mentioned workspace in all GeoServer instances of the current GeoServer pool.
|
||||
*
|
||||
* @param workspace
|
||||
* @param datastore
|
||||
*/
|
||||
public void createDataStore(String workspace,GSAbstractStoreEncoder datastore){
|
||||
log.info("Create datastore {}, ws {} in geoservers",datastore,workspace);
|
||||
if(workspace==null || workspace.length()<1) throw new RuntimeException("Invalid workspace name : "+workspace);
|
||||
if(datastore==null) throw new RuntimeException("Invalid datastore "+datastore);
|
||||
for(AbstractGeoServerDescriptor gs:getCurrentCacheElements(false)){
|
||||
try{
|
||||
createDataStore(workspace,datastore,gs);
|
||||
}catch(MalformedURLException e){
|
||||
log.warn("Wrong URL in descriptor {} ",gs.getUrl(),e);
|
||||
}catch(IllegalArgumentException e){
|
||||
log.warn("Unable to operate service in {} ",gs.getUrl(),e);
|
||||
}
|
||||
catch(Exception e){
|
||||
log.warn("Unable to check/create ws {} in {} ",workspace,gs.getUrl(),e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void createWorkspace(String workspace,AbstractGeoServerDescriptor gs) throws MalformedURLException, IllegalArgumentException,Exception{
|
||||
if(gs==null) throw new IllegalArgumentException("GeoServer Descriptor is "+gs);
|
||||
log.info("Creating ws {} in {} ",workspace,gs.getUrl());
|
||||
if(workspace==null || workspace.length()<1) throw new RuntimeException("Invalid workspace name : "+workspace);
|
||||
if(gs.getWorkspaces().contains(workspace))
|
||||
log.debug("Workspace {} already existing in {} ",workspace,gs.getUrl());
|
||||
else{
|
||||
boolean result =gs.getPublisher().createWorkspace(workspace);
|
||||
gs.onChangedWorkspaces();
|
||||
if(!gs.getWorkspaces().contains(workspace)) throw new Exception("Workspace is not created. Create operation returned "+result);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
private static void createDataStore(String workspace,GSAbstractStoreEncoder datastore,AbstractGeoServerDescriptor gs)throws MalformedURLException, IllegalArgumentException,Exception{
|
||||
if(gs==null) throw new IllegalArgumentException("GeoServer Descriptor is "+gs);
|
||||
log.info("Create datastore {}, ws {} in {} ",datastore,workspace,gs.getUrl());
|
||||
if(workspace==null || workspace.length()<1) throw new RuntimeException("Invalid workspace name : "+workspace);
|
||||
createWorkspace(workspace,gs);
|
||||
if(gs.getDatastores(workspace).contains(datastore.getName()))
|
||||
log.debug("Datastore {}:{} already existing in {}",workspace,datastore.getName(),gs.getUrl());
|
||||
else{
|
||||
boolean result =gs.getDataStoreManager().create(workspace, datastore);
|
||||
gs.onChangedDataStores();
|
||||
if(!gs.getDatastores(workspace).contains(datastore.getName())) throw new Exception("Datastore not created. Create operation returned "+result);
|
||||
}
|
||||
}
|
||||
//************ CACHE Management
|
||||
|
||||
private GeoServerCache theActualCache;
|
||||
|
||||
|
||||
private GeoServerCache getCache(){
|
||||
return theActualCache;
|
||||
}
|
||||
|
||||
//************
|
||||
|
||||
private GeoNetworkPublisher geoNetwork=null;
|
||||
private ConcurrentSkipListSet<GeoServerDescriptor> geoservers=new ConcurrentSkipListSet<GeoServerDescriptor>();
|
||||
private long lastAccessedTime=0l;
|
||||
|
||||
private synchronized SortedSet<GeoServerDescriptor> getGeoServerSet(boolean forceRefresh){
|
||||
if(forceRefresh||geoservers.size()==0||System.currentTimeMillis()-lastAccessedTime>MAX_GEOSERVER_CACHE_TIME){
|
||||
geoservers.clear();
|
||||
geoservers.addAll(InfrastructureCrawler.queryforGeoServer());
|
||||
lastAccessedTime=System.currentTimeMillis();
|
||||
}
|
||||
return geoservers;
|
||||
}
|
||||
|
||||
private synchronized GeoNetworkPublisher getGN() throws Exception{
|
||||
if(geoNetwork==null) {
|
||||
|
@ -454,12 +607,86 @@ public class GISInterface {
|
|||
return geoNetwork;
|
||||
}
|
||||
|
||||
private GeoServerRESTManager getGeoServerManager(String url,String user,String password) throws IllegalArgumentException, MalformedURLException{
|
||||
return new GeoServerRESTManager(new URL(url), user, password);
|
||||
/**
|
||||
* Returns the current GeoServer from the GeoServer pool. Selection is made according to Configuration file.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public AbstractGeoServerDescriptor getCurrentGeoServer(){
|
||||
return getCache().getDefaultDescriptor();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current GeoServer descriptors from the GeoServer pool.
|
||||
*
|
||||
* @param forceUpdate Set true to force re-initialization
|
||||
* @return
|
||||
*/
|
||||
public SortedSet<AbstractGeoServerDescriptor> getCurrentCacheElements(Boolean forceUpdate){
|
||||
return getCache().getDescriptorSet(forceUpdate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a GeoServer descriptor according to specified ResearchMethod method.
|
||||
*
|
||||
* @param forceUpdate Set true to force re-initialization
|
||||
* @return
|
||||
*/
|
||||
public AbstractGeoServerDescriptor getGeoServerByMethod(ResearchMethod method, Boolean forceUpdate){
|
||||
return getCache().getDescriptor(forceUpdate, method);
|
||||
}
|
||||
|
||||
//************************ DEPRECATED OBSOLETE METHODS
|
||||
|
||||
@Deprecated
|
||||
public GeoServerRESTReader getGeoServerReader(ResearchMethod method,boolean forceRefresh) throws Exception{
|
||||
log.warn("*************** ACCESS TO DEPRECATED METHOD GeoServerRESTReader getGeoServerReader(ResearchMethod method,boolean forceRefresh). Please update your code.");
|
||||
return getCache().getDescriptor(forceRefresh, method).getReader();
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public GeoServerRESTReader getGeoServerReader(GeoServerDescriptor desc)throws Exception{
|
||||
log.warn("*************** ACCESS TO DEPRECATED METHOD GeoServerRESTReader getGeoServerReader(GeoServerDescriptor desc). Please update your code.");
|
||||
return desc.getReader();
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public GeoServerRESTReader getGeoServerReader(String url,String user,String password) throws IllegalArgumentException, MalformedURLException{
|
||||
log.warn("*************** ACCESS TO DEPRECATED METHOD GeoServerRESTReader getGeoServerReader(String url,String user,String password). Please update your code.");
|
||||
return new GeoServerDescriptor(url,user,password,0l).getReader();
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public GeoServerRESTReader getGeoServerReader(String url) throws MalformedURLException{
|
||||
log.warn("*************** ACCESS TO DEPRECATED METHOD GeoServerRESTReader getGeoServerReader(String url). Please update your code.");
|
||||
return new GeoServerRESTReader(url);
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public GeoServerDescriptor getCurrentGeoServerDescriptor(){
|
||||
log.warn("*************** ACCESS TO DEPRECATED METHOD GeoServerDescriptor getCurrentGeoServerDescriptor(). Please update your code.");
|
||||
return translate(getCache().getDefaultDescriptor());
|
||||
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public SortedSet<GeoServerDescriptor> getGeoServerDescriptorSet(boolean forceRefresh){
|
||||
log.warn("*************** ACCESS TO DEPRECATED METHOD SortedSet<GeoServerDescriptor> getGeoServerDescriptorSet(boolean forceRefresh). Please update your code.");
|
||||
ConcurrentSkipListSet<GeoServerDescriptor> toReturn=new ConcurrentSkipListSet<GeoServerDescriptor>();
|
||||
for(Object desc: getCache().getDescriptorSet(forceRefresh)){
|
||||
toReturn.add(translate((AbstractGeoServerDescriptor) desc));
|
||||
}
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
private GeoServerDescriptor translate(AbstractGeoServerDescriptor desc){
|
||||
long count=0l;
|
||||
try{
|
||||
count=desc.getHostedLayersCount();
|
||||
}catch(Exception e){
|
||||
log.warn("Unable to get layer count from desc {} ",desc,e);
|
||||
}
|
||||
return new GeoServerDescriptor (desc.getUrl(),desc.getUser(),desc.getPassword(),count);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,113 @@
|
|||
package org.gcube.spatial.data.gis.is;
|
||||
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.Set;
|
||||
|
||||
import it.geosolutions.geoserver.rest.GeoServerRESTManager;
|
||||
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher;
|
||||
import it.geosolutions.geoserver.rest.GeoServerRESTReader;
|
||||
import it.geosolutions.geoserver.rest.manager.GeoServerRESTStoreManager;
|
||||
import lombok.Getter;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Getter
|
||||
@Slf4j
|
||||
public abstract class AbstractGeoServerDescriptor implements Comparable<AbstractGeoServerDescriptor>{
|
||||
|
||||
|
||||
private String url;
|
||||
private String user;
|
||||
private String password;
|
||||
|
||||
public AbstractGeoServerDescriptor(String url, String user, String password) {
|
||||
super();
|
||||
this.url = url;
|
||||
this.user = user;
|
||||
this.password = password;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(AbstractGeoServerDescriptor o) {
|
||||
Long localCount=0l;
|
||||
Long otherCount=0l;
|
||||
try {
|
||||
localCount=getHostedLayersCount();
|
||||
otherCount=o.getHostedLayersCount();
|
||||
} catch (MalformedURLException e) {
|
||||
log.warn("Unable to evaluate count. This could lead to unbalanced layer amounts between instances",e);
|
||||
}
|
||||
return localCount.compareTo(otherCount);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((url == null) ? 0 : url.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
AbstractGeoServerDescriptor other = (AbstractGeoServerDescriptor) obj;
|
||||
if (url == null) {
|
||||
if (other.url != null)
|
||||
return false;
|
||||
} else if (!url.equals(other.url))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
public abstract Long getHostedLayersCount() throws MalformedURLException;
|
||||
|
||||
public abstract Set<String> getWorkspaces() throws MalformedURLException;
|
||||
|
||||
public abstract Set<String> getStyles() throws MalformedURLException;
|
||||
|
||||
public abstract Set<String> getDatastores(String workspace) throws MalformedURLException;
|
||||
|
||||
|
||||
public GeoServerRESTReader getReader() throws MalformedURLException{
|
||||
return getManager().getReader();
|
||||
}
|
||||
|
||||
public GeoServerRESTStoreManager getDataStoreManager() throws IllegalArgumentException, MalformedURLException{
|
||||
return getManager().getStoreManager();
|
||||
}
|
||||
|
||||
public GeoServerRESTPublisher getPublisher() throws IllegalArgumentException, MalformedURLException{
|
||||
return getManager().getPublisher();
|
||||
}
|
||||
|
||||
protected GeoServerRESTManager getManager() throws IllegalArgumentException, MalformedURLException{
|
||||
return new GeoServerRESTManager(new URL(url), user, password);
|
||||
}
|
||||
|
||||
|
||||
|
||||
public void onChangedStyles(){}
|
||||
public void onChangedWorkspaces(){}
|
||||
public void onChangedDataStores(){}
|
||||
public void onChangedLayers(){}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
long layersCount=0l;
|
||||
try{
|
||||
layersCount=getHostedLayersCount();
|
||||
}catch(Exception e){
|
||||
log.warn("Unable to get layer count on {} ",url,e);
|
||||
}
|
||||
|
||||
return "AbstractGeoServerDescriptor [url=" + url + ", user=" + user + ", password=" + password
|
||||
+ ", layerCount=" + layersCount + "]";
|
||||
}
|
||||
}
|
|
@ -0,0 +1,122 @@
|
|||
package org.gcube.spatial.data.gis.is;
|
||||
|
||||
import java.net.MalformedURLException;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.gcube.spatial.data.gis.Configuration;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
public class CachedGeoServerDescriptor extends LiveGeoServerDescriptor {
|
||||
|
||||
|
||||
|
||||
|
||||
private long lastDatastoreUpdate=0l;
|
||||
private long lastWorkspaceUpdate=0l;
|
||||
private long lastStylesUpdate=0l;
|
||||
private long lastLayerCountUpdate=0l;
|
||||
|
||||
private HashMap<String,HashSet<String>> dataStores=null;
|
||||
private HashSet<String> workspaces=null;
|
||||
private HashSet<String> styles;
|
||||
private Long hostedLayerCount=0l;
|
||||
|
||||
|
||||
public CachedGeoServerDescriptor(String url, String user, String password) {
|
||||
super(url, user, password);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public synchronized Set<String> getDatastores(String workspace) throws MalformedURLException {
|
||||
if(dataStores==null || (System.currentTimeMillis()-lastDatastoreUpdate>Configuration.getTTL(Configuration.GEOSERVER_DATASTORE_TTL))){
|
||||
log.trace("Loading datastores for {} ",getUrl());
|
||||
HashMap<String,HashSet<String>> toSet=new HashMap<>();
|
||||
for(String ws: getWorkspaces()){
|
||||
HashSet<String> currentWsDatastores=new HashSet<>(super.getDatastores(ws));
|
||||
log.debug("Found {} ds in {} ws ",currentWsDatastores.size(),ws);
|
||||
toSet.put(ws, currentWsDatastores);
|
||||
}
|
||||
dataStores=toSet;
|
||||
lastDatastoreUpdate=System.currentTimeMillis();
|
||||
}
|
||||
return dataStores.get(workspace);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Long getHostedLayersCount() throws MalformedURLException {
|
||||
if(System.currentTimeMillis()-lastLayerCountUpdate>Configuration.getTTL(Configuration.GEOSERVER_HOSTED_LAYERS_TTL)){
|
||||
log.trace("Loading layer count for {} ",getUrl());
|
||||
hostedLayerCount=super.getHostedLayersCount();
|
||||
log.debug("Found {} layers ",hostedLayerCount);
|
||||
lastLayerCountUpdate=System.currentTimeMillis();
|
||||
}
|
||||
return hostedLayerCount;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public synchronized Set<String> getStyles() throws MalformedURLException {
|
||||
if(styles==null||(System.currentTimeMillis()-lastStylesUpdate>Configuration.getTTL(Configuration.GEOSERVER_STYLES_TTL))){
|
||||
log.trace("Loading styles for {} ",getUrl());
|
||||
styles=new HashSet<>(super.getStyles());
|
||||
log.debug("Found {} styles ",styles.size());
|
||||
lastStylesUpdate=System.currentTimeMillis();
|
||||
}
|
||||
return styles;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public synchronized Set<String> getWorkspaces() throws MalformedURLException {
|
||||
if(workspaces==null||(System.currentTimeMillis()-lastWorkspaceUpdate>Configuration.getTTL(Configuration.GEOSERVER_WORKSPACE_TTL))){
|
||||
log.trace("Loading workspaces for {} ",getUrl());
|
||||
workspaces=new HashSet<String>(super.getWorkspaces());
|
||||
log.debug("Found {} workspaces",workspaces.size());
|
||||
lastWorkspaceUpdate=0l;
|
||||
}
|
||||
return workspaces;
|
||||
}
|
||||
|
||||
|
||||
public void invalidateWorkspacesCache(){
|
||||
lastWorkspaceUpdate=0l;
|
||||
}
|
||||
|
||||
public void invalidateDatastoresCache(){
|
||||
lastDatastoreUpdate=0l;
|
||||
}
|
||||
|
||||
public void invalidateStylesCache(){
|
||||
lastStylesUpdate=0l;
|
||||
}
|
||||
|
||||
public void invalidateHostedLayersCountCache(){
|
||||
lastLayerCountUpdate=0l;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onChangedDataStores() {
|
||||
invalidateDatastoresCache();
|
||||
}
|
||||
@Override
|
||||
public void onChangedLayers() {
|
||||
invalidateHostedLayersCountCache();
|
||||
}
|
||||
@Override
|
||||
public void onChangedStyles() {
|
||||
invalidateStylesCache();
|
||||
}
|
||||
@Override
|
||||
public void onChangedWorkspaces() {
|
||||
invalidateWorkspacesCache();
|
||||
invalidateDatastoresCache();
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -1,144 +1,55 @@
|
|||
package org.gcube.spatial.data.gis.is;
|
||||
|
||||
public class GeoServerDescriptor implements Comparable<GeoServerDescriptor>{
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
public class GeoServerDescriptor extends AbstractGeoServerDescriptor {
|
||||
|
||||
private String url;
|
||||
private String user;
|
||||
private String password;
|
||||
private Long hostedLayersCount;
|
||||
|
||||
private HashMap<String,Set<String>> datastores=new HashMap<>();
|
||||
private HashSet<String> workspaces=new HashSet<>();
|
||||
private HashSet<String> styles=new HashSet<>();
|
||||
|
||||
public GeoServerDescriptor(String url, String user, String password,
|
||||
Long hostedLayersCount) {
|
||||
super();
|
||||
this.url = url;
|
||||
this.user = user;
|
||||
this.password = password;
|
||||
this.hostedLayersCount = hostedLayersCount;
|
||||
|
||||
public GeoServerDescriptor(String url, String user, String password, Long hostedLayersCount) {
|
||||
super(url, user, password);
|
||||
this.hostedLayersCount=hostedLayersCount;
|
||||
}
|
||||
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Comparable#compareTo(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public int compareTo(GeoServerDescriptor o) {
|
||||
// TODO Auto-generated method stub
|
||||
return hostedLayersCount.compareTo(o.hostedLayersCount);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the url
|
||||
*/
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param url the url to set
|
||||
*/
|
||||
public void setUrl(String url) {
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the user
|
||||
*/
|
||||
public String getUser() {
|
||||
return user;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param user the user to set
|
||||
*/
|
||||
public void setUser(String user) {
|
||||
this.user = user;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the password
|
||||
*/
|
||||
public String getPassword() {
|
||||
return password;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param password the password to set
|
||||
*/
|
||||
public void setPassword(String password) {
|
||||
this.password = password;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the hostedLayersCount
|
||||
*/
|
||||
public Long getHostedLayersCount() {
|
||||
return hostedLayersCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param hostedLayersCount the hostedLayersCount to set
|
||||
*/
|
||||
@Override
|
||||
public Set<String> getDatastores(String workspace) {
|
||||
return datastores.get(workspace);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> getStyles() {
|
||||
return styles;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> getWorkspaces() {
|
||||
return workspaces;
|
||||
}
|
||||
|
||||
public void setDatastores(HashMap<String, Set<String>> datastores) {
|
||||
this.datastores = datastores;
|
||||
}
|
||||
|
||||
public void setHostedLayersCount(Long hostedLayersCount) {
|
||||
this.hostedLayersCount = hostedLayersCount;
|
||||
}
|
||||
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((url == null) ? 0 : url.hashCode());
|
||||
return result;
|
||||
public void setStyles(HashSet<String> styles) {
|
||||
this.styles = styles;
|
||||
}
|
||||
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
GeoServerDescriptor other = (GeoServerDescriptor) obj;
|
||||
if (url == null) {
|
||||
if (other.url != null)
|
||||
return false;
|
||||
} else if (!url.equals(other.url))
|
||||
return false;
|
||||
return true;
|
||||
public void setWorkspaces(HashSet<String> workspaces) {
|
||||
this.workspaces = workspaces;
|
||||
}
|
||||
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
builder.append("GeoServerDescriptor [url=");
|
||||
builder.append(url);
|
||||
builder.append(", user=");
|
||||
builder.append(user);
|
||||
builder.append(", password=");
|
||||
builder.append(password);
|
||||
builder.append(", hostedLayersCount=");
|
||||
builder.append(hostedLayersCount);
|
||||
builder.append("]");
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,46 +0,0 @@
|
|||
package org.gcube.spatial.data.gis.is;
|
||||
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.common.encryption.StringEncrypter;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryClient;
|
||||
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class InfrastructureCrawler {
|
||||
|
||||
final static Logger logger= LoggerFactory.getLogger(InfrastructureCrawler.class);
|
||||
|
||||
public static List<GeoServerDescriptor> queryforGeoServer(){
|
||||
List<GeoServerDescriptor> toReturn=new ArrayList<GeoServerDescriptor>();
|
||||
SimpleQuery query = queryFor(ServiceEndpoint.class);
|
||||
|
||||
query.addCondition("$resource/Profile/Category/text() eq 'Gis'")
|
||||
.addCondition("$resource/Profile/Platform/Name/text() eq 'GeoServer'")
|
||||
.setResult("$resource/Profile/AccessPoint");
|
||||
|
||||
DiscoveryClient<AccessPoint> client = clientFor(AccessPoint.class);
|
||||
|
||||
List<AccessPoint> accesspoints = client.submit(query);
|
||||
// if(accesspoints.size()==0) throw new Exception("No Resource found under current scope "+ScopeProvider.instance.get());
|
||||
for (AccessPoint point : accesspoints) {
|
||||
try{
|
||||
toReturn.add(new GeoServerDescriptor(point.address(),point.username(),StringEncrypter.getEncrypter().decrypt(point.password()),0l));
|
||||
}catch(Exception e){
|
||||
logger.warn("Unable to decript password for "+point.username()+" in access point "+point.address()+", access to modify methods may fail");
|
||||
}
|
||||
// url=point.address();
|
||||
// user=point.username();
|
||||
// pwd=point.password();
|
||||
}
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
package org.gcube.spatial.data.gis.is;
|
||||
|
||||
import java.net.MalformedURLException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
public class LiveGeoServerDescriptor extends AbstractGeoServerDescriptor {
|
||||
|
||||
|
||||
public LiveGeoServerDescriptor(String url, String user, String password) {
|
||||
super(url, user, password);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> getDatastores(String workspace) throws MalformedURLException {
|
||||
return new HashSet<String>(getReader().getDatastores(workspace).getNames());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getHostedLayersCount() throws MalformedURLException {
|
||||
return new Long(getReader().getLayers().size());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> getStyles() throws MalformedURLException {
|
||||
return new HashSet<String>(getReader().getStyles().getNames());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> getWorkspaces() throws MalformedURLException {
|
||||
return new HashSet<String>(getReader().getWorkspaceNames());
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
package org.gcube.spatial.data.gis.is.cache;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.SortedSet;
|
||||
import java.util.concurrent.ConcurrentSkipListSet;
|
||||
|
||||
import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
|
||||
|
||||
public class ExplicitCache<T extends AbstractGeoServerDescriptor> extends GeoServerCache<T> {
|
||||
|
||||
|
||||
SortedSet<T> localCache=null;
|
||||
|
||||
public ExplicitCache(Collection<T> toUseDescriptors) {
|
||||
localCache=new ConcurrentSkipListSet<T>(toUseDescriptors);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SortedSet<T> getTheCache(Boolean forceUpdate) {
|
||||
return localCache;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,85 @@
|
|||
package org.gcube.spatial.data.gis.is.cache;
|
||||
|
||||
import java.util.Set;
|
||||
import java.util.SortedSet;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
|
||||
import org.gcube.spatial.data.geonetwork.utils.ScopeUtils;
|
||||
import org.gcube.spatial.data.gis.Configuration;
|
||||
import org.gcube.spatial.data.gis.ResearchMethod;
|
||||
import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
public abstract class GeoServerCache<T extends AbstractGeoServerDescriptor> {
|
||||
|
||||
private static final ResearchMethod DEFAULT_RESEARCH_METHOD=ResearchMethod.MOSTUNLOAD;
|
||||
|
||||
|
||||
|
||||
protected GeoServerCache() {
|
||||
|
||||
}
|
||||
|
||||
public SortedSet<T> getDescriptorSet(Boolean forceUpdate) {
|
||||
return getTheCache(forceUpdate);
|
||||
}
|
||||
|
||||
public T getDefaultDescriptor() {
|
||||
return getDefaultDescriptor(false);
|
||||
}
|
||||
|
||||
public T getDefaultDescriptor(Boolean forceUpdate) {
|
||||
return getDescriptor(forceUpdate,getDefaultMethod());
|
||||
}
|
||||
|
||||
public T getDescriptor(Boolean forceUpdate, ResearchMethod method) {
|
||||
SortedSet<T> cache=getTheCache(forceUpdate);
|
||||
log.debug("Access to {} instance in {} ",method,ScopeUtils.getCurrentScope());
|
||||
switch(method){
|
||||
case MOSTUNLOAD :
|
||||
return cache.first();
|
||||
|
||||
case RANDOM : {
|
||||
int size=cache.size();
|
||||
int randomIndex= ThreadLocalRandom.current().nextInt(0, size);
|
||||
log.debug("Accessing {} out of {} descriptors ",randomIndex,size);
|
||||
return (T) cache.toArray()[randomIndex];
|
||||
}
|
||||
default : throw new RuntimeException("Unrecognized method "+method);
|
||||
}
|
||||
}
|
||||
|
||||
protected ResearchMethod getDefaultMethod(){
|
||||
try{
|
||||
return ResearchMethod.valueOf(Configuration.get().getProperty(Configuration.IS_ACCESS_POLICY));
|
||||
}catch(Throwable t){
|
||||
log.warn("Unable to read research method. Using default {}. Cause : ",DEFAULT_RESEARCH_METHOD,t);
|
||||
return DEFAULT_RESEARCH_METHOD;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
protected abstract SortedSet<T> getTheCache(Boolean forceUpdate);
|
||||
|
||||
// private synchronized ConcurrentSkipListSet<CachedGeoServerDescriptor> getTheCache(Boolean forceUpdate){
|
||||
// if(forceUpdate || theCache==null || System.currentTimeMillis()-lastUpdate>Configuration.getTTL(Configuration.IS_CACHE_TTL)){
|
||||
// try{
|
||||
// log.debug("Going to retrieve information from IS..");
|
||||
// List<CachedGeoServerDescriptor> retrieved=queryforGeoServer();
|
||||
// theCache=new ConcurrentSkipListSet<>(retrieved);
|
||||
// log.trace("Retrieved {} instances in {}",theCache.size(),ScopeUtils.getCurrentScope());
|
||||
// lastUpdate=System.currentTimeMillis();
|
||||
// }catch(IOException e){
|
||||
// log.error("Unable to query IS ",e);
|
||||
// }
|
||||
// }
|
||||
// return theCache;
|
||||
// }
|
||||
//
|
||||
//
|
||||
|
||||
}
|
|
@ -0,0 +1,89 @@
|
|||
package org.gcube.spatial.data.gis.is.cache;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.SortedSet;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentSkipListSet;
|
||||
|
||||
import org.gcube.common.encryption.StringEncrypter;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryClient;
|
||||
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
|
||||
import org.gcube.spatial.data.geonetwork.utils.ScopeUtils;
|
||||
import org.gcube.spatial.data.gis.Configuration;
|
||||
import org.gcube.spatial.data.gis.is.CachedGeoServerDescriptor;
|
||||
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
@Slf4j
|
||||
public class ISGeoServerCache extends GeoServerCache<CachedGeoServerDescriptor> {
|
||||
|
||||
private static final ConcurrentHashMap<String,ConcurrentSkipListSet<CachedGeoServerDescriptor>> scopedMap=new ConcurrentHashMap<String,ConcurrentSkipListSet<CachedGeoServerDescriptor>>();
|
||||
|
||||
private static final ConcurrentHashMap<String,Long> scopeLastUpdate=new ConcurrentHashMap<String,Long>();
|
||||
|
||||
@Override
|
||||
protected synchronized SortedSet<CachedGeoServerDescriptor> getTheCache(Boolean forceUpdate) {
|
||||
String scope=ScopeUtils.getCurrentScope();
|
||||
if(forceUpdate ||
|
||||
! scopedMap.containsKey(scope) ||
|
||||
System.currentTimeMillis()-getLastUpdate()>Configuration.getTTL(Configuration.IS_CACHE_TTL)){
|
||||
try{
|
||||
log.debug("Going to retrieve information from IS. Scope is {} ",scope);
|
||||
List<CachedGeoServerDescriptor> retrieved=queryforGeoServer();
|
||||
scopedMap.put(scope, new ConcurrentSkipListSet<CachedGeoServerDescriptor>(retrieved));
|
||||
log.trace("Retrieved {} instances in {}",retrieved.size(),scope);
|
||||
setUpdated();
|
||||
}catch(IOException e){
|
||||
log.error("Unable to query IS ",e);
|
||||
}
|
||||
}
|
||||
return scopedMap.get(scope);
|
||||
}
|
||||
|
||||
|
||||
private static synchronized Long getLastUpdate(){
|
||||
String scope=ScopeUtils.getCurrentScope();
|
||||
log.debug("Accessing lastUpdate in scope {} ",scope);
|
||||
return scopeLastUpdate.containsKey(scope)?scopeLastUpdate.get(scope):0l;
|
||||
}
|
||||
|
||||
private static synchronized void setUpdated(){
|
||||
String scope=ScopeUtils.getCurrentScope();
|
||||
log.debug("Setting update for scope {} ",scope);
|
||||
scopeLastUpdate.put(scope, System.currentTimeMillis());
|
||||
}
|
||||
|
||||
private static List<CachedGeoServerDescriptor> queryforGeoServer() throws IOException{
|
||||
List<CachedGeoServerDescriptor> toReturn=new ArrayList<CachedGeoServerDescriptor>();
|
||||
SimpleQuery query = queryFor(ServiceEndpoint.class);
|
||||
|
||||
String category=Configuration.get().getProperty(Configuration.IS_SERVICE_PROFILE_CATEGORY);
|
||||
String name=Configuration.get().getProperty(Configuration.IS_SERVICE_PROFILE_PLATFORM_NAME);
|
||||
|
||||
log.debug("Querying IS for service profiles category {} , name {} ",category,name);
|
||||
|
||||
query.addCondition("$resource/Profile/Category/text() eq '"+category+"'")
|
||||
.addCondition("$resource/Profile/Platform/Name/text() eq '"+name+"'")
|
||||
.setResult("$resource/Profile/AccessPoint");
|
||||
|
||||
DiscoveryClient<AccessPoint> client = clientFor(AccessPoint.class);
|
||||
|
||||
List<AccessPoint> accesspoints = client.submit(query);
|
||||
|
||||
for (AccessPoint point : accesspoints) {
|
||||
try{
|
||||
toReturn.add(new CachedGeoServerDescriptor(point.address(),point.username(),StringEncrypter.getEncrypter().decrypt(point.password())));
|
||||
}catch(Exception e){
|
||||
log.warn("Skipping Geoserver at {}",point.address(),e);
|
||||
}
|
||||
|
||||
}
|
||||
return toReturn;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
package org.gcube.spatial.data.gis.utils;
|
||||
|
||||
public class Utils {
|
||||
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
#Cache configuration
|
||||
#TTL in ms
|
||||
geoserver.cache.hostedLayers.TTL=60000
|
||||
geoserver.cache.styles.TTL=60000
|
||||
geoserver.cache.workspaces.TTL=600000
|
||||
geoserver.cache.datastores.TTL=600000
|
||||
is.cache.TTL=600000
|
||||
is.serviceProfile.category=Gis
|
||||
is.serviceProfile.platform.name=GeoServer
|
||||
|
||||
#Avaliable methods are MOSTUNLOAD,RANDOM
|
||||
is.accessPolicy=MOSTUNLOAD
|
|
@ -1,12 +1,146 @@
|
|||
package org.gcube.spatial.data.gis;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.PrintWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.concurrent.ConcurrentSkipListMap;
|
||||
import java.util.concurrent.ConcurrentSkipListSet;
|
||||
|
||||
import org.gcube.spatial.data.geonetwork.utils.ScopeUtils;
|
||||
import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
|
||||
import org.junit.Test;
|
||||
|
||||
public class Environment {
|
||||
|
||||
|
||||
@Test
|
||||
public void test() throws Exception{
|
||||
TokenSetter.set("/d4science.research-infrastructures.eu");
|
||||
System.out.println(GISInterface.get().getCurrentGeoServerDescriptor());
|
||||
public void test() throws FileNotFoundException {
|
||||
ConcurrentSkipListMap<String, String> errors=new ConcurrentSkipListMap<String,String>();
|
||||
ConcurrentSkipListSet<String> scopes=new ConcurrentSkipListSet<String>(getScopes());
|
||||
|
||||
PrintWriter out = new PrintWriter("report.txt");
|
||||
for(String scope:scopes){
|
||||
TokenSetter.set(scope);
|
||||
try{
|
||||
GISInterface gis=GISInterface.get();
|
||||
out.println(printInfo(gis));
|
||||
}catch(Throwable t){
|
||||
// System.err.println(t);
|
||||
errors.put(scope, t.toString());
|
||||
}
|
||||
}
|
||||
|
||||
out.println("Problematic scopes: ");
|
||||
for(Entry<String,String> err:errors.entrySet())
|
||||
out.println(err.getKey() +" --> "+err.getValue());
|
||||
|
||||
out.flush();
|
||||
out.close();
|
||||
}
|
||||
|
||||
|
||||
|
||||
private String printInfo(GISInterface gis)throws Exception{
|
||||
StringBuilder builder=new StringBuilder("*********************************");
|
||||
builder.append(ScopeUtils.getCurrentScope()+"\n");
|
||||
for(AbstractGeoServerDescriptor desc: gis.getCurrentCacheElements(false)){
|
||||
builder.append(desc+"\n");
|
||||
builder.append("Styles : "+desc.getStyles()+" \n");
|
||||
for(String ws:desc.getWorkspaces())
|
||||
builder.append("Datastores in "+ws+" : "+desc.getDatastores(ws)+" \n");
|
||||
}
|
||||
builder.append("Selected : "+gis.getCurrentGeoServer());
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
private static ArrayList<String> getScopes(){
|
||||
ArrayList<String> scopes=new ArrayList<String>();
|
||||
|
||||
|
||||
//*************************** PRODUCTION
|
||||
scopes.add("/d4science.research-infrastructures.eu");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/InfraScience");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/ICES_TCRE");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/TabularDataLab");
|
||||
scopes.add("/d4science.research-infrastructures.eu/FARM/AquaMaps");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/PGFA-UFMT");
|
||||
scopes.add("/d4science.research-infrastructures.eu/FARM");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/EuBrazilOpenBio");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/EcologicalModelling");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/BlueBRIDGE-PSC");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/ENVRIPlus");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/ENVRI");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/BOBLME_HilsaAWG");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/ScalableDataMining");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/DESCRAMBLE");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/FAO_TunaAtlas");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/StocksAndFisheriesKB");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/BlueCommons");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/ICES_TCSSM");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/BlueBRIDGE-EAB");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/ARIADNE");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/ProtectedAreaImpactMaps");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/OpenIt");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/AquacultureAtlasGeneration");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/Parthenos");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/IGDI");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/EGIEngage");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/RStudioLab");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/TimeSeries");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/TCom");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/ICCAT_BFT-E");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/SoBigData.it");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/BlueBridgeProject");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/BlueUptake");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/gCube");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/KnowledgeBridging");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/EFG");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/StockAssessment");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/iSearch");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/ICOS_ETC");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/VesselActivitiesAnalyzer");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/BiOnym");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/SoBigData.eu");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/PerformanceEvaluationInAquaculture");
|
||||
scopes.add("/d4science.research-infrastructures.eu/gCubeApps/StrategicInvestmentAnalysis");
|
||||
|
||||
//******************** DEVELOPMENT
|
||||
|
||||
scopes.add("/gcube");
|
||||
scopes.add("/gcube/devsec");
|
||||
scopes.add("/gcube/devsec/BasicVRETest");
|
||||
scopes.add("/gcube/devsec/GSTProcessingTest");
|
||||
scopes.add("/gcube/devsec/StaTabTest");
|
||||
scopes.add("/gcube/devsec/USTORE_VRE");
|
||||
scopes.add("/gcube/devsec/TestTue10May_1822");
|
||||
scopes.add("/gcube/devsec/OpenAireDevVRE");
|
||||
scopes.add("/gcube/devsec/StaTabTest");
|
||||
scopes.add("/gcube/devsec/TabProcessing");
|
||||
scopes.add("/gcube/devsec/devVRE");
|
||||
scopes.add("/gcube/devsec/TestFri26Feb2016");
|
||||
scopes.add("/gcube/devsec/USTORE_VRE");
|
||||
scopes.add("/gcube/devsec/RMinerDev");
|
||||
scopes.add("/gcube/devsec/TabProcessing");
|
||||
scopes.add("/gcube/devsec/devVRE");
|
||||
scopes.add("/gcube/devsec/BlueVRE");
|
||||
scopes.add("/gcube/devsec/TestFri26Feb2016");
|
||||
scopes.add("/gcube/devsec/LucioVRE");
|
||||
|
||||
scopes.add("/gcube/preprod");
|
||||
scopes.add("/gcube/preprod/Dorne");
|
||||
scopes.add("/gcube/preprod/preVRE");
|
||||
|
||||
scopes.add("/gcube/devNext");
|
||||
scopes.add("/gcube/devNext/NextNext");
|
||||
|
||||
return scopes;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,42 @@
|
|||
package org.gcube.spatial.data.gis;
|
||||
|
||||
import java.net.MalformedURLException;
|
||||
|
||||
import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
|
||||
|
||||
import it.geosolutions.geoserver.rest.encoder.GSAbstractStoreEncoder;
|
||||
import it.geosolutions.geoserver.rest.encoder.datastore.GSPostGISDatastoreEncoder;
|
||||
|
||||
public class PublishStore {
|
||||
|
||||
public static void maind (String[] args){
|
||||
// GISInterface.get().
|
||||
public static void main (String[] args) throws Exception{
|
||||
TokenSetter.set("/gcube/devsec");
|
||||
|
||||
String workspace="My another workspace";
|
||||
|
||||
GISInterface gis= GISInterface.get();
|
||||
printWorkspaces(gis);
|
||||
|
||||
GISInterface.get().createWorkspace(workspace);
|
||||
//Second should skip where existing
|
||||
GISInterface.get().createWorkspace(workspace);
|
||||
|
||||
printWorkspaces(gis);
|
||||
|
||||
|
||||
GSPostGISDatastoreEncoder datastore=new GSPostGISDatastoreEncoder("My datastore");
|
||||
// datastore.set
|
||||
// Utils parameters to simplify caller's life will be provided
|
||||
|
||||
|
||||
// gis.createDataStore(workspace, datastore);
|
||||
}
|
||||
|
||||
|
||||
public static void printWorkspaces(GISInterface gis) throws MalformedURLException{
|
||||
for(AbstractGeoServerDescriptor gs: gis.getCurrentCacheElements(false)){
|
||||
System.out.println(gs.getWorkspaces());
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -55,7 +55,7 @@ public class PublishTable {
|
|||
|
||||
|
||||
GISInterface gis=GISInterface.get();
|
||||
System.out.println(gis.getCurrentGeoServerDescriptor());
|
||||
System.out.println(gis.getCurrentGeoServer());
|
||||
PublishResponse resp=gis.publishDBTable(workspace, datastore, fte, le, meta.getMetadata(), "datasets", "_none_", LoginLevel.DEFAULT,false);
|
||||
System.out.println(resp);
|
||||
}
|
||||
|
|
|
@ -1,51 +0,0 @@
|
|||
package org.gcube.spatial.data.gis;
|
||||
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.common.encryption.StringEncrypter;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryClient;
|
||||
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
|
||||
import org.gcube.spatial.data.gis.is.GeoServerDescriptor;
|
||||
|
||||
public class Query {
|
||||
|
||||
/**
|
||||
* @param args
|
||||
*/
|
||||
public static void main(String[] args) {
|
||||
TokenSetter.set("/gcube/devsec");
|
||||
System.out.println(queryforGeoServer());
|
||||
}
|
||||
|
||||
public static List<GeoServerDescriptor> queryforGeoServer(){
|
||||
List<GeoServerDescriptor> toReturn=new ArrayList<GeoServerDescriptor>();
|
||||
SimpleQuery query = queryFor(ServiceEndpoint.class);
|
||||
|
||||
query.addCondition("$resource/Profile/Category/text() eq 'Gis'")
|
||||
.addCondition("$resource/Profile/Platform/Name/text() eq 'GeoServer'")
|
||||
.setResult("$resource/Profile/AccessPoint");
|
||||
|
||||
DiscoveryClient<AccessPoint> client = clientFor(AccessPoint.class);
|
||||
|
||||
List<AccessPoint> accesspoints = client.submit(query);
|
||||
// if(accesspoints.size()==0) throw new Exception("No Resource found under current scope "+ScopeProvider.instance.get());
|
||||
for (AccessPoint point : accesspoints) {
|
||||
try{
|
||||
toReturn.add(new GeoServerDescriptor(point.address(),point.username(),StringEncrypter.getEncrypter().decrypt(point.password()),0l));
|
||||
}catch(Exception e){
|
||||
System.err.println("Unable to decript password for "+point.username()+" in access point "+point.address()+", access to modify methods may fail");
|
||||
}
|
||||
// url=point.address();
|
||||
// user=point.username();
|
||||
// pwd=point.password();
|
||||
}
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
}
|
|
@ -21,8 +21,11 @@ public class TokenSetter {
|
|||
|
||||
|
||||
public static void set(String scope){
|
||||
try{
|
||||
if(!props.containsKey(scope)) throw new RuntimeException("No token found for scope : "+scope);
|
||||
SecurityTokenProvider.instance.set(props.getProperty(scope));
|
||||
}catch(Throwable e){
|
||||
}
|
||||
ScopeProvider.instance.set(scope);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,18 @@
|
|||
log4j.rootLogger=DEBUG, stdout
|
||||
#log4j.rootLogger=DEBUG, stdout
|
||||
|
||||
#CONSOLE
|
||||
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
|
||||
log4j.appender.stdout.Threshold=INFO
|
||||
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.stdout.layout.ConversionPattern=[%t] %-5p %c %d{dd MMM yyyy ;HH:mm:ss.SSS} - %m%n
|
||||
|
||||
#CONSOLE LOCAL
|
||||
#service-specific logger with dedicated appender
|
||||
log4j.logger.org.gcube.spatial.data.gis=TRACE, AQS
|
||||
log4j.appender.AQS=org.apache.log4j.RollingFileAppender
|
||||
log4j.appender.AQS.file=report.fulllog
|
||||
log4j.appender.AQS.MaxFileSize=10000KB
|
||||
log4j.appender.AQS.MaxBackupIndex=40
|
||||
log4j.appender.AQS.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.AQS.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} [%t,%M:%L] %m%n
|
||||
log4j.appender.AQS.threshold=TRACE
|
Loading…
Reference in New Issue