diff --git a/distro/changelog.xml b/distro/changelog.xml
index 26fc43e..a900aa0 100644
--- a/distro/changelog.xml
+++ b/distro/changelog.xml
@@ -26,4 +26,9 @@
gCube 4 SDI policies integration
+
+ Cached information
+ Workspace methods
+ Datastore methods
+
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index 9afa2d9..e766304 100644
--- a/pom.xml
+++ b/pom.xml
@@ -8,7 +8,7 @@
org.gcube.spatial.data
gis-interface
- 2.3.1-SNAPSHOT
+ 2.4.0-SNAPSHOT
gis-interface
diff --git a/src/main/java/org/gcube/spatial/data/gis/Configuration.java b/src/main/java/org/gcube/spatial/data/gis/Configuration.java
new file mode 100644
index 0000000..c13f5d1
--- /dev/null
+++ b/src/main/java/org/gcube/spatial/data/gis/Configuration.java
@@ -0,0 +1,60 @@
+package org.gcube.spatial.data.gis;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Properties;
+
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public class Configuration {
+
+ private static final Long DEFAULT_TTL=60000l;
+
+ private static Configuration singleton=null;
+
+
+ public static synchronized Configuration get() throws IOException{
+ if(singleton==null) singleton=new Configuration();
+ return singleton;
+ }
+
+
+ public static final String GEOSERVER_HOSTED_LAYERS_TTL="geoserver.cache.hostedLayers.TTL";
+ public static final String GEOSERVER_STYLES_TTL="geoserver.cache.hostedLayers.TTL";
+ public static final String GEOSERVER_WORKSPACE_TTL="geoserver.cache.hostedLayers.TTL";
+ public static final String GEOSERVER_DATASTORE_TTL="geoserver.cache.hostedLayers.TTL";
+ public static final String IS_CACHE_TTL="geoserver.cache.hostedLayers.TTL";
+ public static final String IS_SERVICE_PROFILE_CATEGORY="is.serviceProfile.category";
+ public static final String IS_SERVICE_PROFILE_PLATFORM_NAME="is.serviceProfile.platform.name";
+ public static final String IS_ACCESS_POLICY="is.accessPolicy";
+
+
+
+ HashMap properties;
+ Properties props;
+
+ private Configuration() throws IOException {
+ props=new Properties();
+ try {
+ props.load(Configuration.class.getClassLoader().getResourceAsStream("gis-interface.properties"));
+ } catch (IOException e) {
+ log.warn("****************** Unable to load properties file ****************** ",e);
+ throw e;
+ }
+ }
+
+
+ public String getProperty(String propertyName){
+ return props.getProperty(propertyName);
+ }
+
+ public static Long getTTL(String TTLParameter) {
+ try{
+ return Long.parseLong(Configuration.get().getProperty(TTLParameter));
+ }catch(Exception e){
+ log.warn("Unable to get TTL "+TTLParameter,e);
+ return DEFAULT_TTL;
+ }
+ }
+}
diff --git a/src/main/java/org/gcube/spatial/data/gis/GISInterface.java b/src/main/java/org/gcube/spatial/data/gis/GISInterface.java
index a3f009a..ee98ac3 100644
--- a/src/main/java/org/gcube/spatial/data/gis/GISInterface.java
+++ b/src/main/java/org/gcube/spatial/data/gis/GISInterface.java
@@ -3,8 +3,9 @@ package org.gcube.spatial.data.gis;
import java.io.File;
import java.io.FileNotFoundException;
import java.net.MalformedURLException;
-import java.net.URL;
import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.NoSuchElementException;
@@ -18,8 +19,11 @@ import org.gcube.spatial.data.geonetwork.LoginLevel;
import org.gcube.spatial.data.geonetwork.configuration.XMLAdapter;
import org.gcube.spatial.data.geonetwork.iso.BoundingBox;
import org.gcube.spatial.data.geonetwork.utils.ScopeUtils;
+import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
import org.gcube.spatial.data.gis.is.GeoServerDescriptor;
-import org.gcube.spatial.data.gis.is.InfrastructureCrawler;
+import org.gcube.spatial.data.gis.is.cache.ExplicitCache;
+import org.gcube.spatial.data.gis.is.cache.GeoServerCache;
+import org.gcube.spatial.data.gis.is.cache.ISGeoServerCache;
import org.gcube.spatial.data.gis.meta.MetadataEnricher;
import org.gcube.spatial.data.gis.model.report.DeleteReport;
import org.gcube.spatial.data.gis.model.report.PublishResponse;
@@ -27,83 +31,77 @@ import org.gcube.spatial.data.gis.model.report.Report;
import org.gcube.spatial.data.gis.model.report.Report.OperationState;
import org.opengis.metadata.Metadata;
import org.opengis.metadata.citation.DateType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import it.geosolutions.geonetwork.util.GNInsertConfiguration;
-import it.geosolutions.geoserver.rest.GeoServerRESTManager;
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher;
import it.geosolutions.geoserver.rest.GeoServerRESTReader;
+import it.geosolutions.geoserver.rest.encoder.GSAbstractStoreEncoder;
import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder;
import it.geosolutions.geoserver.rest.encoder.GSResourceEncoder.ProjectionPolicy;
import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder;
+import lombok.extern.slf4j.Slf4j;
-public class GISInterface {
+@Slf4j
+public class GISInterface{
- final static Logger logger= LoggerFactory.getLogger(GISInterface.class);
-
- private static final long MAX_GEOSERVER_CACHE_TIME=2*60*1000;
-
- public static GISInterface get() throws Exception{
- return new GISInterface();
+
+
+ public static GISInterface get(AbstractGeoServerDescriptor... descriptors) throws Exception{
+ if(descriptors!=null&&descriptors.length>0)
+ return new GISInterface(Arrays.asList(descriptors));
+ else return new GISInterface();
}
-
-
-
+
+
+
//************ INSTANCE
-
+
private List toRegisterXMLAdapters=null;
-
-
+
+
private GISInterface() throws Exception{
-
+ theActualCache=new ISGeoServerCache();
}
+
+ private GISInterface(Collection collection){
+ theActualCache=new ExplicitCache(collection);
+ }
+
+
public void setToRegisterXMLAdapters(List toRegisterXMLAdapters) {
this.toRegisterXMLAdapters = toRegisterXMLAdapters;
}
-
+
//*******************READER getter METHODS
-
+
public GeoNetworkReader getGeoNetworkReader() throws Exception{
return getGN();
}
-
+
public GeoNetworkPublisher getGeoNewtorkPublisher()throws Exception{
return getGN();
}
-
- public GeoServerRESTReader getGeoServerReader(ResearchMethod method,boolean forceRefresh) throws Exception{
- GeoServerDescriptor desc=getGeoServerSet(forceRefresh).last();
- return getGeoServerReader(desc);
- }
-
- public GeoServerRESTReader getGeoServerReader(GeoServerDescriptor desc)throws Exception{
- return getGeoServerManager(desc.getUrl(), desc.getUser(), desc.getPassword()).getReader();
- }
-
- public GeoServerRESTReader getGeoServerReader(String url,String user,String password) throws IllegalArgumentException, MalformedURLException{
- return getGeoServerManager(url, user, password).getReader();
- }
-
- public GeoServerRESTReader getGeoServerReader(String url) throws MalformedURLException{
- return new GeoServerRESTReader(url);
- }
-
- public GeoServerDescriptor getCurrentGeoServerDescriptor(){
- return getGeoServerSet(false).last();
- }
-
- public SortedSet getGeoServerDescriptorSet(boolean forceRefresh){
- return getGeoServerSet(forceRefresh);
- }
-
- //******************* Create logic
-
- public Report.OperationState createWorkspace(String workspace){
- return null;
- }
-
+
+
+ /**
+ * Publishes toPublishFile [GeoTIFF] in the default GeoServer descriptor
+ *
+ * @param workspace
+ * @param storeName
+ * @param coverageName
+ * @param toPublishFile
+ * @param srs
+ * @param policy
+ * @param defaultStyle
+ * @param bbox
+ * @param geoNetworkMeta
+ * @param gnCategory
+ * @param gnStylesheet
+ * @param level
+ * @param promoteMetadataPublishing
+ * @return
+ */
public PublishResponse addGeoTIFF(String workspace, String storeName, String coverageName,
File toPublishFile,String srs,
ProjectionPolicy policy,String defaultStyle, double[] bbox,
@@ -112,33 +110,61 @@ public class GISInterface {
GeoNetworkPublisher gn=getGN();
gn.login(level);
GNInsertConfiguration config=gn.getCurrentUserConfiguration(gnCategory, gnStylesheet);
- return addGeoTIFF(workspace, storeName, coverageName, toPublishFile, srs, policy, defaultStyle, bbox, geoNetworkMeta, config, level,promoteMetadataPublishing);
+ return addGeoTIFF(workspace, storeName, coverageName, toPublishFile, srs, policy, defaultStyle, bbox, geoNetworkMeta, config, level,promoteMetadataPublishing);
}catch(Exception e){
PublishResponse response=new PublishResponse(geoNetworkMeta);
response.getMetaOperationMessages().add("Unable to get GN Configuration , cause :"+e.getMessage());
return response;
}
}
-
+
+ /**
+ * Publishes the specified fte in the default GeoServer
+ *
+ * @param workspace
+ * @param storeName
+ * @param fte
+ * @param layerEncoder
+ * @param geoNetworkMeta
+ * @param gnCategory
+ * @param gnStylesheet
+ * @param level
+ * @param promoteMetadataPublishing
+ * @return
+ */
public PublishResponse publishDBTable(String workspace, String storeName, GSFeatureTypeEncoder fte,GSLayerEncoder layerEncoder,
Metadata geoNetworkMeta, String gnCategory,String gnStylesheet,LoginLevel level, boolean promoteMetadataPublishing){
try{
GeoNetworkPublisher gn=getGN();
gn.login(level);
GNInsertConfiguration config=gn.getCurrentUserConfiguration(gnCategory, gnStylesheet);
- return publishDBTable(workspace, storeName, fte, layerEncoder, geoNetworkMeta, config, level,promoteMetadataPublishing);
+ return publishDBTable(workspace, storeName, fte, layerEncoder, geoNetworkMeta, config, level,promoteMetadataPublishing);
}catch(Exception e){
PublishResponse response=new PublishResponse(geoNetworkMeta);
response.getMetaOperationMessages().add("Unable to get GN Configuration , cause :"+e.getMessage());
return response;
}
}
-
+
/**
- * @see it.geosolutions.geoserver.rest.GeoServerRESTPublisher#publishGeoTIFF(String, String, String, File, String, ProjectionPolicy, String, double[])
- *
+ * @deprecated use addGeoTIFF(String workspace, String storeName, String coverageName,File toPublishFile,String srs,
+ * ProjectionPolicy policy,String defaultStyle, double[] bbox,
+ * Metadata geoNetworkMeta, String gnCategory,String gnStylesheet, LoginLevel level, boolean promoteMetadataPublishing)
+ *
+ * @param workspace
+ * @param storeName
+ * @param coverageName
+ * @param toPublishFile
+ * @param srs
+ * @param policy
+ * @param defaultStyle
+ * @param bbox
+ * @param geoNetworkMeta
+ * @param config
+ * @param level
+ * @param promoteMetadataPublishing
+ * @return
*/
-
@Deprecated
public PublishResponse addGeoTIFF(String workspace, String storeName, String coverageName,
File toPublishFile,String srs,
@@ -147,18 +173,20 @@ public class GISInterface {
boolean publishResult = false;
PublishResponse toReturn=new PublishResponse(geoNetworkMeta);
GeoServerRESTPublisher publisher=null;
- GeoServerDescriptor desc=getGeoServerSet(false).last();
- logger.debug("Using "+desc);
+ AbstractGeoServerDescriptor desc=getCache().getDefaultDescriptor();
+ log.debug("Using "+desc);
try{
- publisher=getGeoServerManager(desc.getUrl(), desc.getUser(), desc.getPassword()).getPublisher();
+ publisher=desc.getPublisher();
// Publishing the file to geoserver depends on file type
publishResult=publisher.publishGeoTIFF(workspace, storeName, coverageName, toPublishFile, srs, policy, defaultStyle, bbox);
-
+
if(publishResult){
+ // Data publish ok
+ desc.onChangedLayers();
toReturn.setDataOperationResult(Report.OperationState.COMPLETE);
-
+
MetadataEnricher enricher=new MetadataEnricher(geoNetworkMeta, true);
-
+
ArrayList distributionUris=new ArrayList();
distributionUris.add(URIUtils.getWmsUrl(desc.getUrl(), coverageName, defaultStyle, new BoundingBox(bbox[0],bbox[1],bbox[2],bbox[3])));
distributionUris.add(URIUtils.getWfsUrl(desc.getUrl(), coverageName));
@@ -166,11 +194,11 @@ public class GISInterface {
try{
distributionUris.add(URIUtils.getGisLinkByUUID(enricher.getMetadataIdentifier()));
}catch(Exception e){
- logger.warn("Unabel to get Gis Link ",e);
+ log.warn("Unabel to get Gis Link ",e);
toReturn.setMetaOperationResult(OperationState.WARN);
toReturn.getMetaOperationMessages().add("Unable to generate GIS link, cause : "+e.getMessage());
}
-
+
enricher.addDate(new Date(System.currentTimeMillis()), DateType.CREATION);
enricher.addPreview(distributionUris.get(0));
enricher.setdistributionURIs(distributionUris,coverageName);
@@ -178,11 +206,11 @@ public class GISInterface {
if(enricher.getMessages().size()>0)toReturn.setMetaOperationResult(OperationState.WARN);
GeoNetworkPublisher pub=getGN();
getGN().login(level);
-
+
Metadata enriched=enricher.getEnriched();
toReturn.setPublishedMetadata(enriched);
long returnedId=promoteMetadataPublishing?pub.insertAndPromoteMetadata(config, enriched):pub.insertMetadata(config,enriched);
-
+
toReturn.setReturnedMetaId(returnedId);
toReturn.setMetaOperationResult(OperationState.COMPLETE);
}else toReturn.getDataOperationMessages().add("Publish operation returned false, unable to publish data");
@@ -191,21 +219,21 @@ public class GISInterface {
} catch (IllegalArgumentException e) {
if(publisher==null){
toReturn.getDataOperationMessages().add("Unable to instatiate GeoServerRESTPublisher, cause :"+e.getMessage());
- logger.debug("Unable to instatiate GeoServerRESTPublisher",e);
+ log.debug("Unable to instatiate GeoServerRESTPublisher",e);
}else {
toReturn.getDataOperationMessages().add("Unable to publish data, cause :"+e.getMessage());
- logger.debug("Unable to publish data",e);
+ log.debug("Unable to publish data",e);
}
} catch (MalformedURLException e) {
toReturn.getDataOperationMessages().add("Unable to instatiate GeoServerRESTPublisher, cause :"+e.getMessage());
- logger.debug("Unable to instatiate GeoServerRESTPublisher",e);
+ log.debug("Unable to instatiate GeoServerRESTPublisher",e);
} catch (FileNotFoundException e) {
toReturn.getDataOperationMessages().add("Unable to publish data, cause :"+e.getMessage());
- logger.debug("Unable to publish data",e);
+ log.debug("Unable to publish data",e);
} catch (Exception e) {
// exceptions raised by publishing metadata, need to clean up
toReturn.getMetaOperationMessages().add("Unable to publish metadata, cause :"+e.getMessage());
- logger.debug("Unable to publish metadata",e);
+ log.debug("Unable to publish metadata",e);
DeleteReport delRep=deleteStore(workspace,storeName,null,desc);
if(!delRep.getDataOperationResult().equals(OperationState.COMPLETE)){
toReturn.setDataOperationResult(OperationState.WARN);
@@ -216,27 +244,42 @@ public class GISInterface {
return toReturn;
}
+
+ /**
+ * @deprecated use publishDBTable(String workspace, String storeName, GSFeatureTypeEncoder fte,GSLayerEncoder layerEncoder,
+ * Metadata geoNetworkMeta, String gnCategory,String gnStylesheet,LoginLevel level, boolean promoteMetadataPublishing)
+ *
+ * @param workspace
+ * @param storeName
+ * @param fte
+ * @param layerEncoder
+ * @param geoNetworkMeta
+ * @param config
+ * @param level
+ * @param promoteMetadataPublishing
+ * @return
+ */
@Deprecated
public PublishResponse publishDBTable(String workspace, String storeName, GSFeatureTypeEncoder fte,GSLayerEncoder layerEncoder,Metadata geoNetworkMeta, GNInsertConfiguration config,LoginLevel level,boolean promoteMetadataPublishing){
boolean publishResult = false;
PublishResponse toReturn=new PublishResponse(geoNetworkMeta);
GeoServerRESTPublisher publisher=null;
- GeoServerDescriptor desc=getGeoServerSet(false).last();
- logger.debug("Publish db table : "+storeName+" under ws : "+workspace+", using geoserver "+desc);
- logger.debug("Using "+desc);
+ AbstractGeoServerDescriptor desc=getCache().getDefaultDescriptor();
+ log.debug("Publish db table : "+storeName+" under ws : "+workspace+", using geoserver "+desc);
+ log.debug("Using "+desc);
try{
- GeoServerRESTManager mng=getGeoServerManager(desc.getUrl(), desc.getUser(), desc.getPassword());
- publisher=mng.getPublisher();
+ publisher=desc.getPublisher();
// Publishing the file to geoserver depends on file type
publishResult=publisher.publishDBLayer(workspace, storeName, fte, layerEncoder);
-
+
if(publishResult){
+ desc.onChangedLayers();
toReturn.setDataOperationResult(Report.OperationState.COMPLETE);
-
-
- logger.debug("Published data, enriching meta..");
+
+
+ log.debug("Published data, enriching meta..");
MetadataEnricher enricher=new MetadataEnricher(geoNetworkMeta, true);
-
+
ArrayList distributionUris=new ArrayList();
distributionUris.add(URIUtils.getWmsUrl(desc.getUrl(), fte.getName(), URIUtils.getStyleFromGSLayerEncoder(layerEncoder), BoundingBox.WORLD_EXTENT));
distributionUris.add(URIUtils.getWfsUrl(desc.getUrl(), fte.getName()));
@@ -244,20 +287,20 @@ public class GISInterface {
try{
distributionUris.add(URIUtils.getGisLinkByUUID(enricher.getMetadataIdentifier()));
}catch(Exception e){
- logger.warn("Unabel to get Gis Link ",e);
+ log.warn("Unabel to get Gis Link ",e);
toReturn.setMetaOperationResult(OperationState.WARN);
toReturn.getMetaOperationMessages().add("Unable to generate GIS link, cause : "+e.getMessage());
}
-
-
+
+
enricher.addDate(new Date(System.currentTimeMillis()), DateType.CREATION);
enricher.addPreview(distributionUris.get(0));
enricher.setdistributionURIs(distributionUris,fte.getName());
-
+
toReturn.getMetaOperationMessages().addAll(enricher.getMessages());
if(enricher.getMessages().size()>0)toReturn.setMetaOperationResult(OperationState.WARN);
-
-
+
+
GeoNetworkPublisher pub=getGN();
getGN().login(level);
Metadata enriched=enricher.getEnriched();
@@ -267,28 +310,28 @@ public class GISInterface {
toReturn.setMetaOperationResult(OperationState.COMPLETE);
}else {
toReturn.getDataOperationMessages().add("Publish operation returned false, unable to publish data");
-
+
}
}catch(NoSuchElementException e){
toReturn.getDataOperationMessages().add("No GeoServer Found under scope "+ScopeUtils.getCurrentScope());
} catch (IllegalArgumentException e) {
if(publisher==null){
toReturn.getDataOperationMessages().add("Unable to instatiate GeoServerRESTPublisher, cause :"+e.getMessage());
- logger.debug("Unable to instatiate GeoServerRESTPublisher",e);
+ log.debug("Unable to instatiate GeoServerRESTPublisher",e);
}else {
toReturn.getDataOperationMessages().add("Unable to publish data, cause :"+e.getMessage());
- logger.debug("Unable to publish data",e);
+ log.debug("Unable to publish data",e);
}
} catch (MalformedURLException e) {
toReturn.getDataOperationMessages().add("Unable to instatiate GeoServerRESTPublisher, cause :"+e.getMessage());
- logger.debug("Unable to instatiate GeoServerRESTPublisher",e);
+ log.debug("Unable to instatiate GeoServerRESTPublisher",e);
} catch (FileNotFoundException e) {
toReturn.getDataOperationMessages().add("Unable to publish data, cause :"+e.getMessage());
- logger.debug("Unable to publish data",e);
+ log.debug("Unable to publish data",e);
} catch (Exception e) {
// exceptions raised by publishing metadata, need to clean up
toReturn.getMetaOperationMessages().add("Unable to publish metadata, cause :"+e.getMessage());
- logger.debug("Unable to publish metadata",e);
+ log.debug("Unable to publish metadata",e);
DeleteReport delRep=deleteLayer(workspace,fte.getName(),null,desc,level);
if(!delRep.getDataOperationResult().equals(OperationState.COMPLETE)){
toReturn.setDataOperationResult(OperationState.WARN);
@@ -298,20 +341,27 @@ public class GISInterface {
}
return toReturn;
}
-
-
+
+ /**
+ * Creates the declared style in the default GeoServer descriptor
+ *
+ * @param sldBody
+ * @param styleName
+ * @return
+ */
public PublishResponse publishStyle(String sldBody,String styleName){
boolean publishResult = false;
PublishResponse toReturn=new PublishResponse();
GeoServerRESTPublisher publisher=null;
- GeoServerDescriptor desc=getGeoServerSet(false).last();
- logger.debug("Using "+desc);
+ AbstractGeoServerDescriptor desc=getCache().getDefaultDescriptor();
+ log.debug("Using "+desc);
try{
- publisher=getGeoServerManager(desc.getUrl(), desc.getUser(), desc.getPassword()).getPublisher();
+ publisher=desc.getPublisher();
// Publishing the file to geoserver depends on file type
publishResult=publisher.publishStyle(sldBody, styleName);
-
+
if(publishResult){
+ desc.onChangedStyles();
toReturn.setDataOperationResult(Report.OperationState.COMPLETE);
}else toReturn.getDataOperationMessages().add("Publish operation returned false, unable to publish data");
}catch(NoSuchElementException e){
@@ -319,18 +369,18 @@ public class GISInterface {
} catch (IllegalArgumentException e) {
if(publisher==null){
toReturn.getDataOperationMessages().add("Unable to instatiate GeoServerRESTPublisher, cause :"+e.getMessage());
- logger.debug("Unable to instatiate GeoServerRESTPublisher",e);
+ log.debug("Unable to instatiate GeoServerRESTPublisher",e);
}else {
toReturn.getDataOperationMessages().add("Unable to publish data, cause :"+e.getMessage());
- logger.debug("Unable to publish data",e);
+ log.debug("Unable to publish data",e);
}
} catch (MalformedURLException e) {
toReturn.getDataOperationMessages().add("Unable to instatiate GeoServerRESTPublisher, cause :"+e.getMessage());
- logger.debug("Unable to instatiate GeoServerRESTPublisher",e);
+ log.debug("Unable to instatiate GeoServerRESTPublisher",e);
} catch (Exception e) {
// exceptions raised by publishing metadata, need to clean up
toReturn.getMetaOperationMessages().add("Unable to publish metadata, cause :"+e.getMessage());
- logger.debug("Unable to publish metadata",e);
+ log.debug("Unable to publish metadata",e);
DeleteReport delRep=deleteStyle(styleName,desc);
if(!delRep.getDataOperationResult().equals(OperationState.COMPLETE)){
toReturn.setDataOperationResult(OperationState.WARN);
@@ -340,19 +390,25 @@ public class GISInterface {
}
return toReturn;
}
-
+
// ********************* DELETE Logic
/**
- * @see it.geosolutions.geoserver.rest.GeoServerRESTPublisher#removeDatastore(String, String, boolean)
+ * Deletes the specified datastore from the GeoServer instance described in desc
*
+ * @param workspace
+ * @param storeName
+ * @param metadataUUID
+ * @param desc
+ * @return
*/
- public DeleteReport deleteStore(String workspace,String storeName,Long metadataUUID,GeoServerDescriptor desc){
+ public DeleteReport deleteStore(String workspace,String storeName,Long metadataUUID,AbstractGeoServerDescriptor desc){
DeleteReport toReturn=new DeleteReport();
GeoServerRESTPublisher publisher=null;
try{
- publisher=getGeoServerManager(desc.getUrl(), desc.getUser(), desc.getPassword()).getPublisher();
+ publisher=desc.getPublisher();
boolean removed=publisher.removeDatastore(workspace, storeName,true);
- if(removed){
+ if(removed){
+ desc.onChangedDataStores();
toReturn.setDataOperationResult(Report.OperationState.COMPLETE);
if(metadataUUID!=null){
getGN().deleteMetadata(metadataUUID);
@@ -374,14 +430,25 @@ public class GISInterface {
}
return toReturn;
}
-
- public DeleteReport deleteLayer(String workspace,String layerName, Long metadataUUID,GeoServerDescriptor desc,LoginLevel gnLoginLevel){
+
+ /**
+ * Deletes the specified layer from the GeoServer instance described by desc.
+ *
+ * @param workspace
+ * @param layerName
+ * @param metadataUUID
+ * @param desc
+ * @param gnLoginLevel
+ * @return
+ */
+ public DeleteReport deleteLayer(String workspace,String layerName, Long metadataUUID,AbstractGeoServerDescriptor desc,LoginLevel gnLoginLevel){
DeleteReport toReturn=new DeleteReport();
GeoServerRESTPublisher publisher=null;
try{
- publisher=getGeoServerManager(desc.getUrl(), desc.getUser(), desc.getPassword()).getPublisher();
+ publisher=desc.getPublisher();
boolean removed=publisher.removeLayer(workspace, layerName);
- if(removed){
+ if(removed){
+ desc.onChangedLayers();
toReturn.setDataOperationResult(Report.OperationState.COMPLETE);
if(metadataUUID!=null){
GeoNetworkPublisher gnPub=getGN();
@@ -405,14 +472,22 @@ public class GISInterface {
}
return toReturn;
}
-
- public DeleteReport deleteStyle(String styleName,GeoServerDescriptor desc){
+
+ /**
+ * Deletes a specific style from the GeoServer described by dec.
+ *
+ * @param styleName
+ * @param desc
+ * @return
+ */
+ public DeleteReport deleteStyle(String styleName,AbstractGeoServerDescriptor desc){
DeleteReport toReturn=new DeleteReport();
GeoServerRESTPublisher publisher=null;
try{
- publisher=getGeoServerManager(desc.getUrl(), desc.getUser(), desc.getPassword()).getPublisher();
+ publisher=desc.getPublisher();
boolean removed=publisher.removeStyle(styleName, true);
- if(removed){
+ if(removed){
+ desc.onChangedStyles();
toReturn.setDataOperationResult(Report.OperationState.COMPLETE);
}else toReturn.getDataOperationMessages().add("Remove data operation returned false, unable to delete Store");
}catch(NoSuchElementException e){
@@ -428,22 +503,100 @@ public class GISInterface {
}
return toReturn;
}
-
- //************ PRIVATE
-
- private GeoNetworkPublisher geoNetwork=null;
- private ConcurrentSkipListSet geoservers=new ConcurrentSkipListSet();
- private long lastAccessedTime=0l;
-
- private synchronized SortedSet getGeoServerSet(boolean forceRefresh){
- if(forceRefresh||geoservers.size()==0||System.currentTimeMillis()-lastAccessedTime>MAX_GEOSERVER_CACHE_TIME){
- geoservers.clear();
- geoservers.addAll(InfrastructureCrawler.queryforGeoServer());
- lastAccessedTime=System.currentTimeMillis();
+
+ //************* DATASTORES / WS
+
+
+
+ /**
+ * Creates the specified workspace in all GeoServer instances of the current GeoServer pool
+ *
+ * @param workspace
+ */
+ public void createWorkspace(String workspace){
+ log.info("Create workspace {} in geoservers",workspace);
+ if(workspace==null || workspace.length()<1) throw new RuntimeException("Invalid workspace name : "+workspace);
+ for(AbstractGeoServerDescriptor gs:getCurrentCacheElements(false)){
+ try{
+ createWorkspace(workspace,gs);
+ }catch(MalformedURLException e){
+ log.warn("Wrong URL in descriptor {} ",gs.getUrl(),e);
+ }catch(IllegalArgumentException e){
+ log.warn("Unable to operate service in {} ",gs.getUrl(),e);
+ }
+ catch(Exception e){
+ log.warn("Unable to check/create ws {} in {} ",workspace,gs.getUrl(),e);
+ }
}
- return geoservers;
}
-
+
+
+ /**
+ * Creates the specified datastore under the mentioned workspace in all GeoServer instances of the current GeoServer pool.
+ *
+ * @param workspace
+ * @param datastore
+ */
+ public void createDataStore(String workspace,GSAbstractStoreEncoder datastore){
+ log.info("Create datastore {}, ws {} in geoservers",datastore,workspace);
+ if(workspace==null || workspace.length()<1) throw new RuntimeException("Invalid workspace name : "+workspace);
+ if(datastore==null) throw new RuntimeException("Invalid datastore "+datastore);
+ for(AbstractGeoServerDescriptor gs:getCurrentCacheElements(false)){
+ try{
+ createDataStore(workspace,datastore,gs);
+ }catch(MalformedURLException e){
+ log.warn("Wrong URL in descriptor {} ",gs.getUrl(),e);
+ }catch(IllegalArgumentException e){
+ log.warn("Unable to operate service in {} ",gs.getUrl(),e);
+ }
+ catch(Exception e){
+ log.warn("Unable to check/create ws {} in {} ",workspace,gs.getUrl(),e);
+ }
+ }
+ }
+
+ private static void createWorkspace(String workspace,AbstractGeoServerDescriptor gs) throws MalformedURLException, IllegalArgumentException,Exception{
+ if(gs==null) throw new IllegalArgumentException("GeoServer Descriptor is "+gs);
+ log.info("Creating ws {} in {} ",workspace,gs.getUrl());
+ if(workspace==null || workspace.length()<1) throw new RuntimeException("Invalid workspace name : "+workspace);
+ if(gs.getWorkspaces().contains(workspace))
+ log.debug("Workspace {} already existing in {} ",workspace,gs.getUrl());
+ else{
+ boolean result =gs.getPublisher().createWorkspace(workspace);
+ gs.onChangedWorkspaces();
+ if(!gs.getWorkspaces().contains(workspace)) throw new Exception("Workspace is not created. Create operation returned "+result);
+ }
+ }
+
+
+
+ private static void createDataStore(String workspace,GSAbstractStoreEncoder datastore,AbstractGeoServerDescriptor gs)throws MalformedURLException, IllegalArgumentException,Exception{
+ if(gs==null) throw new IllegalArgumentException("GeoServer Descriptor is "+gs);
+ log.info("Create datastore {}, ws {} in {} ",datastore,workspace,gs.getUrl());
+ if(workspace==null || workspace.length()<1) throw new RuntimeException("Invalid workspace name : "+workspace);
+ createWorkspace(workspace,gs);
+ if(gs.getDatastores(workspace).contains(datastore.getName()))
+ log.debug("Datastore {}:{} already existing in {}",workspace,datastore.getName(),gs.getUrl());
+ else{
+ boolean result =gs.getDataStoreManager().create(workspace, datastore);
+ gs.onChangedDataStores();
+ if(!gs.getDatastores(workspace).contains(datastore.getName())) throw new Exception("Datastore not created. Create operation returned "+result);
+ }
+ }
+ //************ CACHE Management
+
+ private GeoServerCache theActualCache;
+
+
+ private GeoServerCache getCache(){
+ return theActualCache;
+ }
+
+ //************
+
+ private GeoNetworkPublisher geoNetwork=null;
+
+
private synchronized GeoNetworkPublisher getGN() throws Exception{
if(geoNetwork==null) {
geoNetwork=GeoNetwork.get();
@@ -453,13 +606,87 @@ public class GISInterface {
}
return geoNetwork;
}
-
- private GeoServerRESTManager getGeoServerManager(String url,String user,String password) throws IllegalArgumentException, MalformedURLException{
- return new GeoServerRESTManager(new URL(url), user, password);
+
+ /**
+ * Returns the current GeoServer from the GeoServer pool. Selection is made according to Configuration file.
+ *
+ * @return
+ */
+ public AbstractGeoServerDescriptor getCurrentGeoServer(){
+ return getCache().getDefaultDescriptor();
+ }
+
+ /**
+ * Returns the current GeoServer descriptors from the GeoServer pool.
+ *
+ * @param forceUpdate Set true to force re-initialization
+ * @return
+ */
+ public SortedSet getCurrentCacheElements(Boolean forceUpdate){
+ return getCache().getDescriptorSet(forceUpdate);
}
+ /**
+ * Returns a GeoServer descriptor according to specified ResearchMethod method.
+ *
+ * @param forceUpdate Set true to force re-initialization
+ * @return
+ */
+ public AbstractGeoServerDescriptor getGeoServerByMethod(ResearchMethod method, Boolean forceUpdate){
+ return getCache().getDescriptor(forceUpdate, method);
+ }
-
-
-
+ //************************ DEPRECATED OBSOLETE METHODS
+
+ @Deprecated
+ public GeoServerRESTReader getGeoServerReader(ResearchMethod method,boolean forceRefresh) throws Exception{
+ log.warn("*************** ACCESS TO DEPRECATED METHOD GeoServerRESTReader getGeoServerReader(ResearchMethod method,boolean forceRefresh). Please update your code.");
+ return getCache().getDescriptor(forceRefresh, method).getReader();
+ }
+
+ @Deprecated
+ public GeoServerRESTReader getGeoServerReader(GeoServerDescriptor desc)throws Exception{
+ log.warn("*************** ACCESS TO DEPRECATED METHOD GeoServerRESTReader getGeoServerReader(GeoServerDescriptor desc). Please update your code.");
+ return desc.getReader();
+ }
+
+ @Deprecated
+ public GeoServerRESTReader getGeoServerReader(String url,String user,String password) throws IllegalArgumentException, MalformedURLException{
+ log.warn("*************** ACCESS TO DEPRECATED METHOD GeoServerRESTReader getGeoServerReader(String url,String user,String password). Please update your code.");
+ return new GeoServerDescriptor(url,user,password,0l).getReader();
+ }
+
+ @Deprecated
+ public GeoServerRESTReader getGeoServerReader(String url) throws MalformedURLException{
+ log.warn("*************** ACCESS TO DEPRECATED METHOD GeoServerRESTReader getGeoServerReader(String url). Please update your code.");
+ return new GeoServerRESTReader(url);
+ }
+
+ @Deprecated
+ public GeoServerDescriptor getCurrentGeoServerDescriptor(){
+ log.warn("*************** ACCESS TO DEPRECATED METHOD GeoServerDescriptor getCurrentGeoServerDescriptor(). Please update your code.");
+ return translate(getCache().getDefaultDescriptor());
+
+ }
+
+ @Deprecated
+ public SortedSet getGeoServerDescriptorSet(boolean forceRefresh){
+ log.warn("*************** ACCESS TO DEPRECATED METHOD SortedSet getGeoServerDescriptorSet(boolean forceRefresh). Please update your code.");
+ ConcurrentSkipListSet toReturn=new ConcurrentSkipListSet();
+ for(Object desc: getCache().getDescriptorSet(forceRefresh)){
+ toReturn.add(translate((AbstractGeoServerDescriptor) desc));
+ }
+ return toReturn;
+ }
+
+ @Deprecated
+ private GeoServerDescriptor translate(AbstractGeoServerDescriptor desc){
+ long count=0l;
+ try{
+ count=desc.getHostedLayersCount();
+ }catch(Exception e){
+ log.warn("Unable to get layer count from desc {} ",desc,e);
+ }
+ return new GeoServerDescriptor (desc.getUrl(),desc.getUser(),desc.getPassword(),count);
+ }
}
diff --git a/src/main/java/org/gcube/spatial/data/gis/is/AbstractGeoServerDescriptor.java b/src/main/java/org/gcube/spatial/data/gis/is/AbstractGeoServerDescriptor.java
new file mode 100644
index 0000000..210c613
--- /dev/null
+++ b/src/main/java/org/gcube/spatial/data/gis/is/AbstractGeoServerDescriptor.java
@@ -0,0 +1,113 @@
+package org.gcube.spatial.data.gis.is;
+
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.Set;
+
+import it.geosolutions.geoserver.rest.GeoServerRESTManager;
+import it.geosolutions.geoserver.rest.GeoServerRESTPublisher;
+import it.geosolutions.geoserver.rest.GeoServerRESTReader;
+import it.geosolutions.geoserver.rest.manager.GeoServerRESTStoreManager;
+import lombok.Getter;
+import lombok.extern.slf4j.Slf4j;
+
+@Getter
+@Slf4j
+public abstract class AbstractGeoServerDescriptor implements Comparable{
+
+
+ private String url;
+ private String user;
+ private String password;
+
+ public AbstractGeoServerDescriptor(String url, String user, String password) {
+ super();
+ this.url = url;
+ this.user = user;
+ this.password = password;
+ }
+
+ @Override
+ public int compareTo(AbstractGeoServerDescriptor o) {
+ Long localCount=0l;
+ Long otherCount=0l;
+ try {
+ localCount=getHostedLayersCount();
+ otherCount=o.getHostedLayersCount();
+ } catch (MalformedURLException e) {
+ log.warn("Unable to evaluate count. This could lead to unbalanced layer amounts between instances",e);
+ }
+ return localCount.compareTo(otherCount);
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((url == null) ? 0 : url.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ AbstractGeoServerDescriptor other = (AbstractGeoServerDescriptor) obj;
+ if (url == null) {
+ if (other.url != null)
+ return false;
+ } else if (!url.equals(other.url))
+ return false;
+ return true;
+ }
+
+
+ public abstract Long getHostedLayersCount() throws MalformedURLException;
+
+ public abstract Set getWorkspaces() throws MalformedURLException;
+
+ public abstract Set getStyles() throws MalformedURLException;
+
+ public abstract Set getDatastores(String workspace) throws MalformedURLException;
+
+
+ public GeoServerRESTReader getReader() throws MalformedURLException{
+ return getManager().getReader();
+ }
+
+ public GeoServerRESTStoreManager getDataStoreManager() throws IllegalArgumentException, MalformedURLException{
+ return getManager().getStoreManager();
+ }
+
+ public GeoServerRESTPublisher getPublisher() throws IllegalArgumentException, MalformedURLException{
+ return getManager().getPublisher();
+ }
+
+ protected GeoServerRESTManager getManager() throws IllegalArgumentException, MalformedURLException{
+ return new GeoServerRESTManager(new URL(url), user, password);
+ }
+
+
+
+ public void onChangedStyles(){}
+ public void onChangedWorkspaces(){}
+ public void onChangedDataStores(){}
+ public void onChangedLayers(){}
+
+ @Override
+ public String toString() {
+ long layersCount=0l;
+ try{
+ layersCount=getHostedLayersCount();
+ }catch(Exception e){
+ log.warn("Unable to get layer count on {} ",url,e);
+ }
+
+ return "AbstractGeoServerDescriptor [url=" + url + ", user=" + user + ", password=" + password
+ + ", layerCount=" + layersCount + "]";
+ }
+}
diff --git a/src/main/java/org/gcube/spatial/data/gis/is/CachedGeoServerDescriptor.java b/src/main/java/org/gcube/spatial/data/gis/is/CachedGeoServerDescriptor.java
new file mode 100644
index 0000000..54a11ba
--- /dev/null
+++ b/src/main/java/org/gcube/spatial/data/gis/is/CachedGeoServerDescriptor.java
@@ -0,0 +1,122 @@
+package org.gcube.spatial.data.gis.is;
+
+import java.net.MalformedURLException;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.gcube.spatial.data.gis.Configuration;
+
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public class CachedGeoServerDescriptor extends LiveGeoServerDescriptor {
+
+
+
+
+ private long lastDatastoreUpdate=0l;
+ private long lastWorkspaceUpdate=0l;
+ private long lastStylesUpdate=0l;
+ private long lastLayerCountUpdate=0l;
+
+ private HashMap> dataStores=null;
+ private HashSet workspaces=null;
+ private HashSet styles;
+ private Long hostedLayerCount=0l;
+
+
+ public CachedGeoServerDescriptor(String url, String user, String password) {
+ super(url, user, password);
+ // TODO Auto-generated constructor stub
+ }
+
+
+ @Override
+ public synchronized Set getDatastores(String workspace) throws MalformedURLException {
+ if(dataStores==null || (System.currentTimeMillis()-lastDatastoreUpdate>Configuration.getTTL(Configuration.GEOSERVER_DATASTORE_TTL))){
+ log.trace("Loading datastores for {} ",getUrl());
+ HashMap> toSet=new HashMap<>();
+ for(String ws: getWorkspaces()){
+ HashSet currentWsDatastores=new HashSet<>(super.getDatastores(ws));
+ log.debug("Found {} ds in {} ws ",currentWsDatastores.size(),ws);
+ toSet.put(ws, currentWsDatastores);
+ }
+ dataStores=toSet;
+ lastDatastoreUpdate=System.currentTimeMillis();
+ }
+ return dataStores.get(workspace);
+ }
+
+ @Override
+ public synchronized Long getHostedLayersCount() throws MalformedURLException {
+ if(System.currentTimeMillis()-lastLayerCountUpdate>Configuration.getTTL(Configuration.GEOSERVER_HOSTED_LAYERS_TTL)){
+ log.trace("Loading layer count for {} ",getUrl());
+ hostedLayerCount=super.getHostedLayersCount();
+ log.debug("Found {} layers ",hostedLayerCount);
+ lastLayerCountUpdate=System.currentTimeMillis();
+ }
+ return hostedLayerCount;
+ }
+
+
+ @Override
+ public synchronized Set getStyles() throws MalformedURLException {
+ if(styles==null||(System.currentTimeMillis()-lastStylesUpdate>Configuration.getTTL(Configuration.GEOSERVER_STYLES_TTL))){
+ log.trace("Loading styles for {} ",getUrl());
+ styles=new HashSet<>(super.getStyles());
+ log.debug("Found {} styles ",styles.size());
+ lastStylesUpdate=System.currentTimeMillis();
+ }
+ return styles;
+ }
+
+
+ @Override
+ public synchronized Set getWorkspaces() throws MalformedURLException {
+ if(workspaces==null||(System.currentTimeMillis()-lastWorkspaceUpdate>Configuration.getTTL(Configuration.GEOSERVER_WORKSPACE_TTL))){
+ log.trace("Loading workspaces for {} ",getUrl());
+ workspaces=new HashSet(super.getWorkspaces());
+ log.debug("Found {} workspaces",workspaces.size());
+ lastWorkspaceUpdate=0l;
+ }
+ return workspaces;
+ }
+
+
+ public void invalidateWorkspacesCache(){
+ lastWorkspaceUpdate=0l;
+ }
+
+ public void invalidateDatastoresCache(){
+ lastDatastoreUpdate=0l;
+ }
+
+ public void invalidateStylesCache(){
+ lastStylesUpdate=0l;
+ }
+
+ public void invalidateHostedLayersCountCache(){
+ lastLayerCountUpdate=0l;
+ }
+
+ @Override
+ public void onChangedDataStores() {
+ invalidateDatastoresCache();
+ }
+ @Override
+ public void onChangedLayers() {
+ invalidateHostedLayersCountCache();
+ }
+ @Override
+ public void onChangedStyles() {
+ invalidateStylesCache();
+ }
+ @Override
+ public void onChangedWorkspaces() {
+ invalidateWorkspacesCache();
+ invalidateDatastoresCache();
+ }
+
+
+}
diff --git a/src/main/java/org/gcube/spatial/data/gis/is/GeoServerDescriptor.java b/src/main/java/org/gcube/spatial/data/gis/is/GeoServerDescriptor.java
index 7987ed5..6f74e7f 100644
--- a/src/main/java/org/gcube/spatial/data/gis/is/GeoServerDescriptor.java
+++ b/src/main/java/org/gcube/spatial/data/gis/is/GeoServerDescriptor.java
@@ -1,144 +1,55 @@
package org.gcube.spatial.data.gis.is;
-public class GeoServerDescriptor implements Comparable{
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Set;
+public class GeoServerDescriptor extends AbstractGeoServerDescriptor {
- private String url;
- private String user;
- private String password;
private Long hostedLayersCount;
+ private HashMap> datastores=new HashMap<>();
+ private HashSet workspaces=new HashSet<>();
+ private HashSet styles=new HashSet<>();
- public GeoServerDescriptor(String url, String user, String password,
- Long hostedLayersCount) {
- super();
- this.url = url;
- this.user = user;
- this.password = password;
- this.hostedLayersCount = hostedLayersCount;
+
+ public GeoServerDescriptor(String url, String user, String password, Long hostedLayersCount) {
+ super(url, user, password);
+ this.hostedLayersCount=hostedLayersCount;
}
-
-
- /* (non-Javadoc)
- * @see java.lang.Comparable#compareTo(java.lang.Object)
- */
+
@Override
- public int compareTo(GeoServerDescriptor o) {
- // TODO Auto-generated method stub
- return hostedLayersCount.compareTo(o.hostedLayersCount);
- }
-
- /**
- * @return the url
- */
- public String getUrl() {
- return url;
- }
-
- /**
- * @param url the url to set
- */
- public void setUrl(String url) {
- this.url = url;
- }
-
- /**
- * @return the user
- */
- public String getUser() {
- return user;
- }
-
- /**
- * @param user the user to set
- */
- public void setUser(String user) {
- this.user = user;
- }
-
- /**
- * @return the password
- */
- public String getPassword() {
- return password;
- }
-
- /**
- * @param password the password to set
- */
- public void setPassword(String password) {
- this.password = password;
- }
-
- /**
- * @return the hostedLayersCount
- */
public Long getHostedLayersCount() {
return hostedLayersCount;
}
-
- /**
- * @param hostedLayersCount the hostedLayersCount to set
- */
+
+ @Override
+ public Set getDatastores(String workspace) {
+ return datastores.get(workspace);
+ }
+
+ @Override
+ public Set getStyles() {
+ return styles;
+ }
+
+ @Override
+ public Set getWorkspaces() {
+ return workspaces;
+ }
+
+ public void setDatastores(HashMap> datastores) {
+ this.datastores = datastores;
+ }
+
public void setHostedLayersCount(Long hostedLayersCount) {
this.hostedLayersCount = hostedLayersCount;
}
-
-
- /* (non-Javadoc)
- * @see java.lang.Object#hashCode()
- */
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((url == null) ? 0 : url.hashCode());
- return result;
+
+ public void setStyles(HashSet styles) {
+ this.styles = styles;
}
-
-
- /* (non-Javadoc)
- * @see java.lang.Object#equals(java.lang.Object)
- */
- @Override
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- GeoServerDescriptor other = (GeoServerDescriptor) obj;
- if (url == null) {
- if (other.url != null)
- return false;
- } else if (!url.equals(other.url))
- return false;
- return true;
+ public void setWorkspaces(HashSet workspaces) {
+ this.workspaces = workspaces;
}
-
-
- /* (non-Javadoc)
- * @see java.lang.Object#toString()
- */
- @Override
- public String toString() {
- StringBuilder builder = new StringBuilder();
- builder.append("GeoServerDescriptor [url=");
- builder.append(url);
- builder.append(", user=");
- builder.append(user);
- builder.append(", password=");
- builder.append(password);
- builder.append(", hostedLayersCount=");
- builder.append(hostedLayersCount);
- builder.append("]");
- return builder.toString();
- }
-
-
-
-
-
-
}
diff --git a/src/main/java/org/gcube/spatial/data/gis/is/InfrastructureCrawler.java b/src/main/java/org/gcube/spatial/data/gis/is/InfrastructureCrawler.java
deleted file mode 100644
index eb4f49d..0000000
--- a/src/main/java/org/gcube/spatial/data/gis/is/InfrastructureCrawler.java
+++ /dev/null
@@ -1,46 +0,0 @@
-package org.gcube.spatial.data.gis.is;
-
-import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
-import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.gcube.common.encryption.StringEncrypter;
-import org.gcube.common.resources.gcore.ServiceEndpoint;
-import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
-import org.gcube.resources.discovery.client.api.DiscoveryClient;
-import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class InfrastructureCrawler {
-
- final static Logger logger= LoggerFactory.getLogger(InfrastructureCrawler.class);
-
- public static List queryforGeoServer(){
- List toReturn=new ArrayList();
- SimpleQuery query = queryFor(ServiceEndpoint.class);
-
- query.addCondition("$resource/Profile/Category/text() eq 'Gis'")
- .addCondition("$resource/Profile/Platform/Name/text() eq 'GeoServer'")
- .setResult("$resource/Profile/AccessPoint");
-
- DiscoveryClient client = clientFor(AccessPoint.class);
-
- List accesspoints = client.submit(query);
-// if(accesspoints.size()==0) throw new Exception("No Resource found under current scope "+ScopeProvider.instance.get());
- for (AccessPoint point : accesspoints) {
- try{
- toReturn.add(new GeoServerDescriptor(point.address(),point.username(),StringEncrypter.getEncrypter().decrypt(point.password()),0l));
- }catch(Exception e){
- logger.warn("Unable to decript password for "+point.username()+" in access point "+point.address()+", access to modify methods may fail");
- }
-// url=point.address();
-// user=point.username();
-// pwd=point.password();
- }
- return toReturn;
- }
-
-}
diff --git a/src/main/java/org/gcube/spatial/data/gis/is/LiveGeoServerDescriptor.java b/src/main/java/org/gcube/spatial/data/gis/is/LiveGeoServerDescriptor.java
new file mode 100644
index 0000000..b57c254
--- /dev/null
+++ b/src/main/java/org/gcube/spatial/data/gis/is/LiveGeoServerDescriptor.java
@@ -0,0 +1,34 @@
+package org.gcube.spatial.data.gis.is;
+
+import java.net.MalformedURLException;
+import java.util.HashSet;
+import java.util.Set;
+
+public class LiveGeoServerDescriptor extends AbstractGeoServerDescriptor {
+
+
+ public LiveGeoServerDescriptor(String url, String user, String password) {
+ super(url, user, password);
+ }
+
+ @Override
+ public Set getDatastores(String workspace) throws MalformedURLException {
+ return new HashSet(getReader().getDatastores(workspace).getNames());
+ }
+
+ @Override
+ public Long getHostedLayersCount() throws MalformedURLException {
+ return new Long(getReader().getLayers().size());
+ }
+
+ @Override
+ public Set getStyles() throws MalformedURLException {
+ return new HashSet(getReader().getStyles().getNames());
+ }
+
+ @Override
+ public Set getWorkspaces() throws MalformedURLException {
+ return new HashSet(getReader().getWorkspaceNames());
+ }
+
+}
diff --git a/src/main/java/org/gcube/spatial/data/gis/is/cache/ExplicitCache.java b/src/main/java/org/gcube/spatial/data/gis/is/cache/ExplicitCache.java
new file mode 100644
index 0000000..b557a4b
--- /dev/null
+++ b/src/main/java/org/gcube/spatial/data/gis/is/cache/ExplicitCache.java
@@ -0,0 +1,22 @@
+package org.gcube.spatial.data.gis.is.cache;
+
+import java.util.Collection;
+import java.util.SortedSet;
+import java.util.concurrent.ConcurrentSkipListSet;
+
+import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
+
+public class ExplicitCache extends GeoServerCache {
+
+
+ SortedSet localCache=null;
+
+ public ExplicitCache(Collection toUseDescriptors) {
+ localCache=new ConcurrentSkipListSet(toUseDescriptors);
+ }
+
+ @Override
+ protected SortedSet getTheCache(Boolean forceUpdate) {
+ return localCache;
+ }
+}
diff --git a/src/main/java/org/gcube/spatial/data/gis/is/cache/GeoServerCache.java b/src/main/java/org/gcube/spatial/data/gis/is/cache/GeoServerCache.java
new file mode 100644
index 0000000..407cac0
--- /dev/null
+++ b/src/main/java/org/gcube/spatial/data/gis/is/cache/GeoServerCache.java
@@ -0,0 +1,85 @@
+package org.gcube.spatial.data.gis.is.cache;
+
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.concurrent.ThreadLocalRandom;
+
+import org.gcube.spatial.data.geonetwork.utils.ScopeUtils;
+import org.gcube.spatial.data.gis.Configuration;
+import org.gcube.spatial.data.gis.ResearchMethod;
+import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
+
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public abstract class GeoServerCache {
+
+ private static final ResearchMethod DEFAULT_RESEARCH_METHOD=ResearchMethod.MOSTUNLOAD;
+
+
+
+ protected GeoServerCache() {
+
+ }
+
+ public SortedSet getDescriptorSet(Boolean forceUpdate) {
+ return getTheCache(forceUpdate);
+ }
+
+ public T getDefaultDescriptor() {
+ return getDefaultDescriptor(false);
+ }
+
+ public T getDefaultDescriptor(Boolean forceUpdate) {
+ return getDescriptor(forceUpdate,getDefaultMethod());
+ }
+
+ public T getDescriptor(Boolean forceUpdate, ResearchMethod method) {
+ SortedSet cache=getTheCache(forceUpdate);
+ log.debug("Access to {} instance in {} ",method,ScopeUtils.getCurrentScope());
+ switch(method){
+ case MOSTUNLOAD :
+ return cache.first();
+
+ case RANDOM : {
+ int size=cache.size();
+ int randomIndex= ThreadLocalRandom.current().nextInt(0, size);
+ log.debug("Accessing {} out of {} descriptors ",randomIndex,size);
+ return (T) cache.toArray()[randomIndex];
+ }
+ default : throw new RuntimeException("Unrecognized method "+method);
+ }
+ }
+
+ protected ResearchMethod getDefaultMethod(){
+ try{
+ return ResearchMethod.valueOf(Configuration.get().getProperty(Configuration.IS_ACCESS_POLICY));
+ }catch(Throwable t){
+ log.warn("Unable to read research method. Using default {}. Cause : ",DEFAULT_RESEARCH_METHOD,t);
+ return DEFAULT_RESEARCH_METHOD;
+ }
+ }
+
+
+
+
+ protected abstract SortedSet getTheCache(Boolean forceUpdate);
+
+// private synchronized ConcurrentSkipListSet getTheCache(Boolean forceUpdate){
+// if(forceUpdate || theCache==null || System.currentTimeMillis()-lastUpdate>Configuration.getTTL(Configuration.IS_CACHE_TTL)){
+// try{
+// log.debug("Going to retrieve information from IS..");
+// List retrieved=queryforGeoServer();
+// theCache=new ConcurrentSkipListSet<>(retrieved);
+// log.trace("Retrieved {} instances in {}",theCache.size(),ScopeUtils.getCurrentScope());
+// lastUpdate=System.currentTimeMillis();
+// }catch(IOException e){
+// log.error("Unable to query IS ",e);
+// }
+// }
+// return theCache;
+// }
+//
+//
+
+}
diff --git a/src/main/java/org/gcube/spatial/data/gis/is/cache/ISGeoServerCache.java b/src/main/java/org/gcube/spatial/data/gis/is/cache/ISGeoServerCache.java
new file mode 100644
index 0000000..bb984d0
--- /dev/null
+++ b/src/main/java/org/gcube/spatial/data/gis/is/cache/ISGeoServerCache.java
@@ -0,0 +1,89 @@
+package org.gcube.spatial.data.gis.is.cache;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.SortedSet;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentSkipListSet;
+
+import org.gcube.common.encryption.StringEncrypter;
+import org.gcube.common.resources.gcore.ServiceEndpoint;
+import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
+import org.gcube.resources.discovery.client.api.DiscoveryClient;
+import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
+import org.gcube.spatial.data.geonetwork.utils.ScopeUtils;
+import org.gcube.spatial.data.gis.Configuration;
+import org.gcube.spatial.data.gis.is.CachedGeoServerDescriptor;
+
+import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
+import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
+
+import lombok.extern.slf4j.Slf4j;
+@Slf4j
+public class ISGeoServerCache extends GeoServerCache {
+
+ private static final ConcurrentHashMap> scopedMap=new ConcurrentHashMap>();
+
+ private static final ConcurrentHashMap scopeLastUpdate=new ConcurrentHashMap();
+
+ @Override
+ protected synchronized SortedSet getTheCache(Boolean forceUpdate) {
+ String scope=ScopeUtils.getCurrentScope();
+ if(forceUpdate ||
+ ! scopedMap.containsKey(scope) ||
+ System.currentTimeMillis()-getLastUpdate()>Configuration.getTTL(Configuration.IS_CACHE_TTL)){
+ try{
+ log.debug("Going to retrieve information from IS. Scope is {} ",scope);
+ List retrieved=queryforGeoServer();
+ scopedMap.put(scope, new ConcurrentSkipListSet(retrieved));
+ log.trace("Retrieved {} instances in {}",retrieved.size(),scope);
+ setUpdated();
+ }catch(IOException e){
+ log.error("Unable to query IS ",e);
+ }
+ }
+ return scopedMap.get(scope);
+ }
+
+
+ private static synchronized Long getLastUpdate(){
+ String scope=ScopeUtils.getCurrentScope();
+ log.debug("Accessing lastUpdate in scope {} ",scope);
+ return scopeLastUpdate.containsKey(scope)?scopeLastUpdate.get(scope):0l;
+ }
+
+ private static synchronized void setUpdated(){
+ String scope=ScopeUtils.getCurrentScope();
+ log.debug("Setting update for scope {} ",scope);
+ scopeLastUpdate.put(scope, System.currentTimeMillis());
+ }
+
+ private static List queryforGeoServer() throws IOException{
+ List toReturn=new ArrayList();
+ SimpleQuery query = queryFor(ServiceEndpoint.class);
+
+ String category=Configuration.get().getProperty(Configuration.IS_SERVICE_PROFILE_CATEGORY);
+ String name=Configuration.get().getProperty(Configuration.IS_SERVICE_PROFILE_PLATFORM_NAME);
+
+ log.debug("Querying IS for service profiles category {} , name {} ",category,name);
+
+ query.addCondition("$resource/Profile/Category/text() eq '"+category+"'")
+ .addCondition("$resource/Profile/Platform/Name/text() eq '"+name+"'")
+ .setResult("$resource/Profile/AccessPoint");
+
+ DiscoveryClient client = clientFor(AccessPoint.class);
+
+ List accesspoints = client.submit(query);
+
+ for (AccessPoint point : accesspoints) {
+ try{
+ toReturn.add(new CachedGeoServerDescriptor(point.address(),point.username(),StringEncrypter.getEncrypter().decrypt(point.password())));
+ }catch(Exception e){
+ log.warn("Skipping Geoserver at {}",point.address(),e);
+ }
+
+ }
+ return toReturn;
+ }
+}
diff --git a/src/main/java/org/gcube/spatial/data/gis/utils/Utils.java b/src/main/java/org/gcube/spatial/data/gis/utils/Utils.java
new file mode 100644
index 0000000..0781fad
--- /dev/null
+++ b/src/main/java/org/gcube/spatial/data/gis/utils/Utils.java
@@ -0,0 +1,5 @@
+package org.gcube.spatial.data.gis.utils;
+
+public class Utils {
+
+}
diff --git a/src/main/resources/gis-interface.properties b/src/main/resources/gis-interface.properties
new file mode 100644
index 0000000..54b217d
--- /dev/null
+++ b/src/main/resources/gis-interface.properties
@@ -0,0 +1,12 @@
+#Cache configuration
+#TTL in ms
+geoserver.cache.hostedLayers.TTL=60000
+geoserver.cache.styles.TTL=60000
+geoserver.cache.workspaces.TTL=600000
+geoserver.cache.datastores.TTL=600000
+is.cache.TTL=600000
+is.serviceProfile.category=Gis
+is.serviceProfile.platform.name=GeoServer
+
+#Avaliable methods are MOSTUNLOAD,RANDOM
+is.accessPolicy=MOSTUNLOAD
\ No newline at end of file
diff --git a/src/test/java/org/gcube/spatial/data/gis/Environment.java b/src/test/java/org/gcube/spatial/data/gis/Environment.java
index ae8566c..b10b750 100644
--- a/src/test/java/org/gcube/spatial/data/gis/Environment.java
+++ b/src/test/java/org/gcube/spatial/data/gis/Environment.java
@@ -1,12 +1,146 @@
package org.gcube.spatial.data.gis;
+import java.io.FileNotFoundException;
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.Map.Entry;
+import java.util.concurrent.ConcurrentSkipListMap;
+import java.util.concurrent.ConcurrentSkipListSet;
+
+import org.gcube.spatial.data.geonetwork.utils.ScopeUtils;
+import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
import org.junit.Test;
public class Environment {
+
+
@Test
- public void test() throws Exception{
- TokenSetter.set("/d4science.research-infrastructures.eu");
- System.out.println(GISInterface.get().getCurrentGeoServerDescriptor());
+ public void test() throws FileNotFoundException {
+ ConcurrentSkipListMap errors=new ConcurrentSkipListMap();
+ ConcurrentSkipListSet scopes=new ConcurrentSkipListSet(getScopes());
+
+ PrintWriter out = new PrintWriter("report.txt");
+ for(String scope:scopes){
+ TokenSetter.set(scope);
+ try{
+ GISInterface gis=GISInterface.get();
+ out.println(printInfo(gis));
+ }catch(Throwable t){
+// System.err.println(t);
+ errors.put(scope, t.toString());
+ }
+ }
+
+ out.println("Problematic scopes: ");
+ for(Entry err:errors.entrySet())
+ out.println(err.getKey() +" --> "+err.getValue());
+
+ out.flush();
+ out.close();
}
+
+
+
+ private String printInfo(GISInterface gis)throws Exception{
+ StringBuilder builder=new StringBuilder("*********************************");
+ builder.append(ScopeUtils.getCurrentScope()+"\n");
+ for(AbstractGeoServerDescriptor desc: gis.getCurrentCacheElements(false)){
+ builder.append(desc+"\n");
+ builder.append("Styles : "+desc.getStyles()+" \n");
+ for(String ws:desc.getWorkspaces())
+ builder.append("Datastores in "+ws+" : "+desc.getDatastores(ws)+" \n");
+ }
+ builder.append("Selected : "+gis.getCurrentGeoServer());
+ return builder.toString();
+ }
+
+
+
+
+ private static ArrayList getScopes(){
+ ArrayList scopes=new ArrayList();
+
+
+ //*************************** PRODUCTION
+ scopes.add("/d4science.research-infrastructures.eu");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/InfraScience");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/ICES_TCRE");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/TabularDataLab");
+ scopes.add("/d4science.research-infrastructures.eu/FARM/AquaMaps");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/PGFA-UFMT");
+ scopes.add("/d4science.research-infrastructures.eu/FARM");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/EuBrazilOpenBio");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/EcologicalModelling");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/BlueBRIDGE-PSC");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/ENVRIPlus");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/ENVRI");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/BOBLME_HilsaAWG");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/ScalableDataMining");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/DESCRAMBLE");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/FAO_TunaAtlas");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/StocksAndFisheriesKB");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/BlueCommons");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/ICES_TCSSM");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/BlueBRIDGE-EAB");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/ARIADNE");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/ProtectedAreaImpactMaps");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/OpenIt");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/AquacultureAtlasGeneration");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/Parthenos");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/IGDI");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/EGIEngage");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/RStudioLab");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/TimeSeries");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/TCom");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/ICCAT_BFT-E");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/SoBigData.it");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/BlueBridgeProject");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/BlueUptake");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/gCube");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/KnowledgeBridging");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/EFG");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/StockAssessment");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/iSearch");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/ICOS_ETC");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/VesselActivitiesAnalyzer");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/BiOnym");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/SoBigData.eu");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/PerformanceEvaluationInAquaculture");
+ scopes.add("/d4science.research-infrastructures.eu/gCubeApps/StrategicInvestmentAnalysis");
+
+ //******************** DEVELOPMENT
+
+ scopes.add("/gcube");
+ scopes.add("/gcube/devsec");
+ scopes.add("/gcube/devsec/BasicVRETest");
+ scopes.add("/gcube/devsec/GSTProcessingTest");
+ scopes.add("/gcube/devsec/StaTabTest");
+ scopes.add("/gcube/devsec/USTORE_VRE");
+ scopes.add("/gcube/devsec/TestTue10May_1822");
+ scopes.add("/gcube/devsec/OpenAireDevVRE");
+ scopes.add("/gcube/devsec/StaTabTest");
+ scopes.add("/gcube/devsec/TabProcessing");
+ scopes.add("/gcube/devsec/devVRE");
+ scopes.add("/gcube/devsec/TestFri26Feb2016");
+ scopes.add("/gcube/devsec/USTORE_VRE");
+ scopes.add("/gcube/devsec/RMinerDev");
+ scopes.add("/gcube/devsec/TabProcessing");
+ scopes.add("/gcube/devsec/devVRE");
+ scopes.add("/gcube/devsec/BlueVRE");
+ scopes.add("/gcube/devsec/TestFri26Feb2016");
+ scopes.add("/gcube/devsec/LucioVRE");
+
+ scopes.add("/gcube/preprod");
+ scopes.add("/gcube/preprod/Dorne");
+ scopes.add("/gcube/preprod/preVRE");
+
+ scopes.add("/gcube/devNext");
+ scopes.add("/gcube/devNext/NextNext");
+
+ return scopes;
+ }
}
diff --git a/src/test/java/org/gcube/spatial/data/gis/PublishStore.java b/src/test/java/org/gcube/spatial/data/gis/PublishStore.java
index c31fb0e..e65886a 100644
--- a/src/test/java/org/gcube/spatial/data/gis/PublishStore.java
+++ b/src/test/java/org/gcube/spatial/data/gis/PublishStore.java
@@ -1,9 +1,42 @@
package org.gcube.spatial.data.gis;
+import java.net.MalformedURLException;
+
+import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
+
+import it.geosolutions.geoserver.rest.encoder.GSAbstractStoreEncoder;
+import it.geosolutions.geoserver.rest.encoder.datastore.GSPostGISDatastoreEncoder;
+
public class PublishStore {
- public static void maind (String[] args){
-// GISInterface.get().
+ public static void main (String[] args) throws Exception{
+ TokenSetter.set("/gcube/devsec");
+
+ String workspace="My another workspace";
+
+ GISInterface gis= GISInterface.get();
+ printWorkspaces(gis);
+
+ GISInterface.get().createWorkspace(workspace);
+ //Second should skip where existing
+ GISInterface.get().createWorkspace(workspace);
+
+ printWorkspaces(gis);
+
+
+ GSPostGISDatastoreEncoder datastore=new GSPostGISDatastoreEncoder("My datastore");
+// datastore.set
+ // Utils parameters to simplify caller's life will be provided
+
+
+// gis.createDataStore(workspace, datastore);
}
+
+ public static void printWorkspaces(GISInterface gis) throws MalformedURLException{
+ for(AbstractGeoServerDescriptor gs: gis.getCurrentCacheElements(false)){
+ System.out.println(gs.getWorkspaces());
+
+ }
+ }
}
diff --git a/src/test/java/org/gcube/spatial/data/gis/PublishTable.java b/src/test/java/org/gcube/spatial/data/gis/PublishTable.java
index 4fd16aa..1f8e306 100644
--- a/src/test/java/org/gcube/spatial/data/gis/PublishTable.java
+++ b/src/test/java/org/gcube/spatial/data/gis/PublishTable.java
@@ -55,7 +55,7 @@ public class PublishTable {
GISInterface gis=GISInterface.get();
- System.out.println(gis.getCurrentGeoServerDescriptor());
+ System.out.println(gis.getCurrentGeoServer());
PublishResponse resp=gis.publishDBTable(workspace, datastore, fte, le, meta.getMetadata(), "datasets", "_none_", LoginLevel.DEFAULT,false);
System.out.println(resp);
}
diff --git a/src/test/java/org/gcube/spatial/data/gis/Query.java b/src/test/java/org/gcube/spatial/data/gis/Query.java
deleted file mode 100644
index 8cc66f5..0000000
--- a/src/test/java/org/gcube/spatial/data/gis/Query.java
+++ /dev/null
@@ -1,51 +0,0 @@
-package org.gcube.spatial.data.gis;
-
-import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
-import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.gcube.common.encryption.StringEncrypter;
-import org.gcube.common.resources.gcore.ServiceEndpoint;
-import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
-import org.gcube.resources.discovery.client.api.DiscoveryClient;
-import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
-import org.gcube.spatial.data.gis.is.GeoServerDescriptor;
-
-public class Query {
-
- /**
- * @param args
- */
- public static void main(String[] args) {
- TokenSetter.set("/gcube/devsec");
- System.out.println(queryforGeoServer());
- }
-
- public static List queryforGeoServer(){
- List toReturn=new ArrayList();
- SimpleQuery query = queryFor(ServiceEndpoint.class);
-
- query.addCondition("$resource/Profile/Category/text() eq 'Gis'")
- .addCondition("$resource/Profile/Platform/Name/text() eq 'GeoServer'")
- .setResult("$resource/Profile/AccessPoint");
-
- DiscoveryClient client = clientFor(AccessPoint.class);
-
- List accesspoints = client.submit(query);
-// if(accesspoints.size()==0) throw new Exception("No Resource found under current scope "+ScopeProvider.instance.get());
- for (AccessPoint point : accesspoints) {
- try{
- toReturn.add(new GeoServerDescriptor(point.address(),point.username(),StringEncrypter.getEncrypter().decrypt(point.password()),0l));
- }catch(Exception e){
- System.err.println("Unable to decript password for "+point.username()+" in access point "+point.address()+", access to modify methods may fail");
- }
-// url=point.address();
-// user=point.username();
-// pwd=point.password();
- }
- return toReturn;
- }
-
-}
diff --git a/src/test/java/org/gcube/spatial/data/gis/TokenSetter.java b/src/test/java/org/gcube/spatial/data/gis/TokenSetter.java
index 7fffa14..15f516c 100644
--- a/src/test/java/org/gcube/spatial/data/gis/TokenSetter.java
+++ b/src/test/java/org/gcube/spatial/data/gis/TokenSetter.java
@@ -20,9 +20,12 @@ public class TokenSetter {
}
- public static void set(String scope){
- if(!props.containsKey(scope)) throw new RuntimeException("No token found for scope : "+scope);
- SecurityTokenProvider.instance.set(props.getProperty(scope));
+ public static void set(String scope){
+ try{
+ if(!props.containsKey(scope)) throw new RuntimeException("No token found for scope : "+scope);
+ SecurityTokenProvider.instance.set(props.getProperty(scope));
+ }catch(Throwable e){
+ }
ScopeProvider.instance.set(scope);
}
diff --git a/src/test/resources/log4j.properties b/src/test/resources/log4j.properties
index b4ba341..48ce09b 100644
--- a/src/test/resources/log4j.properties
+++ b/src/test/resources/log4j.properties
@@ -1,7 +1,18 @@
-log4j.rootLogger=DEBUG, stdout
+#log4j.rootLogger=DEBUG, stdout
#CONSOLE
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.Threshold=INFO
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%t] %-5p %c %d{dd MMM yyyy ;HH:mm:ss.SSS} - %m%n
+
+#CONSOLE LOCAL
+#service-specific logger with dedicated appender
+log4j.logger.org.gcube.spatial.data.gis=TRACE, AQS
+log4j.appender.AQS=org.apache.log4j.RollingFileAppender
+log4j.appender.AQS.file=report.fulllog
+log4j.appender.AQS.MaxFileSize=10000KB
+log4j.appender.AQS.MaxBackupIndex=40
+log4j.appender.AQS.layout=org.apache.log4j.PatternLayout
+log4j.appender.AQS.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} [%t,%M:%L] %m%n
+log4j.appender.AQS.threshold=TRACE
\ No newline at end of file