imported components

This commit is contained in:
Fabio Sinibaldi 2021-02-12 18:00:21 +01:00
parent cae76f1adc
commit 58eb80b939
140 changed files with 10321 additions and 3 deletions

View File

@ -20,7 +20,12 @@
<dependencies>
<dependency>
<groupId>org.gcube.spatial.data</groupId>
<artifactId>sdi-generic-client</artifactId>
</dependency>
</dependencies>

38
pom.xml
View File

@ -40,10 +40,11 @@
<module>sdi-interface</module>
<!-- client -->
<!-- <module>sdi-library</module> -->
<module>sdi-library</module>
<!-- service -->
<!-- <module>sdi-service</module> -->
<module>sdi-service</module>
</modules>
@ -57,6 +58,39 @@
<type>pom</type>
<scope>import</scope>
</dependency>
<!-- modules -->
<dependency>
<groupId>org.gcube.spatial.data</groupId>
<artifactId>sdi-interface</artifactId>
<version>[1.0.0,2.0.0)</version>
</dependency>
<dependency>
<groupId>org.gcube.spatial.data</groupId>
<artifactId>sdi-library</artifactId>
<version>[1.0.0,2.0.0)</version>
</dependency>
<dependency>
<groupId>org.gcube.spatial.data</groupId>
<artifactId>sdi-generic-client</artifactId>
<version>[0.0.1-SNAPSHOT,2.0.0)</version>
</dependency>
<dependency>
<groupId>org.gcube.spatial.data</groupId>
<artifactId>gcube-geonetwork-client</artifactId>
<version>[0.0.1-SNAPSHOT,2.0.0)</version>
</dependency>
<dependency>
<groupId>org.gcube.spatial.data</groupId>
<artifactId>gcube-geoserver-client</artifactId>
<version>[0.0.1-SNAPSHOT,2.0.0)</version>
</dependency>
</dependencies>
</dependencyManagement>

BIN
sdi-library/.DS_Store vendored Normal file

Binary file not shown.

16
sdi-library/CHANGELOG.md Normal file
View File

@ -0,0 +1,16 @@
# Changelog for org.gcube.spatial.data.sdi-library
## [v1.3.0-SNAPSHOT] 2020-07-21
- Offer GIS basic functionalities
## [v1.2.0] 2020-07-21
## Enhancements
- Application Profile (https://support.d4science.org/issues/18939)
### Fixes
- Integration with gcube distribution (https://support.d4science.org/issues/19612)

26
sdi-library/FUNDING.md Normal file
View File

@ -0,0 +1,26 @@
# Acknowledgments
The projects leading to this software have received funding from a series of European Union programmes including:
- the Sixth Framework Programme for Research and Technological Development
- [DILIGENT](https://cordis.europa.eu/project/id/004260) (grant no. 004260).
- the Seventh Framework Programme for research, technological development and demonstration
- [D4Science](https://cordis.europa.eu/project/id/212488) (grant no. 212488);
- [D4Science-II](https://cordis.europa.eu/project/id/239019) (grant no.239019);
- [ENVRI](https://cordis.europa.eu/project/id/283465) (grant no. 283465);
- [iMarine](https://cordis.europa.eu/project/id/283644) (grant no. 283644);
- [EUBrazilOpenBio](https://cordis.europa.eu/project/id/288754) (grant no. 288754).
- the H2020 research and innovation programme
- [SoBigData](https://cordis.europa.eu/project/id/654024) (grant no. 654024);
- [PARTHENOS](https://cordis.europa.eu/project/id/654119) (grant no. 654119);
- [EGI-Engage](https://cordis.europa.eu/project/id/654142) (grant no. 654142);
- [ENVRI PLUS](https://cordis.europa.eu/project/id/654182) (grant no. 654182);
- [BlueBRIDGE](https://cordis.europa.eu/project/id/675680) (grant no. 675680);
- [PerformFISH](https://cordis.europa.eu/project/id/727610) (grant no. 727610);
- [AGINFRA PLUS](https://cordis.europa.eu/project/id/731001) (grant no. 731001);
- [DESIRA](https://cordis.europa.eu/project/id/818194) (grant no. 818194);
- [ARIADNEplus](https://cordis.europa.eu/project/id/823914) (grant no. 823914);
- [RISIS 2](https://cordis.europa.eu/project/id/824091) (grant no. 824091);
- [EOSC-Pillar](https://cordis.europa.eu/project/id/857650) (grant no. 857650);
- [Blue Cloud](https://cordis.europa.eu/project/id/862409) (grant no. 862409);
- [SoBigData-PlusPlus](https://cordis.europa.eu/project/id/871042) (grant no. 871042);

312
sdi-library/LICENSE.md Normal file
View File

@ -0,0 +1,312 @@
# European Union Public Licence V. 1.1
EUPL © the European Community 2007
This European Union Public Licence (the “EUPL”) applies to the Work or Software
(as defined below) which is provided under the terms of this Licence. Any use of
the Work, other than as authorised under this Licence is prohibited (to the
extent such use is covered by a right of the copyright holder of the Work).
The Original Work is provided under the terms of this Licence when the Licensor
(as defined below) has placed the following notice immediately following the
copyright notice for the Original Work:
Licensed under the EUPL V.1.1
or has expressed by any other mean his willingness to license under the EUPL.
## 1. Definitions
In this Licence, the following terms have the following meaning:
- The Licence: this Licence.
- The Original Work or the Software: the software distributed and/or
communicated by the Licensor under this Licence, available as Source Code and
also as Executable Code as the case may be.
- Derivative Works: the works or software that could be created by the Licensee,
based upon the Original Work or modifications thereof. This Licence does not
define the extent of modification or dependence on the Original Work required
in order to classify a work as a Derivative Work; this extent is determined by
copyright law applicable in the country mentioned in Article 15.
- The Work: the Original Work and/or its Derivative Works.
- The Source Code: the human-readable form of the Work which is the most
convenient for people to study and modify.
- The Executable Code: any code which has generally been compiled and which is
meant to be interpreted by a computer as a program.
- The Licensor: the natural or legal person that distributes and/or communicates
the Work under the Licence.
- Contributor(s): any natural or legal person who modifies the Work under the
Licence, or otherwise contributes to the creation of a Derivative Work.
- The Licensee or “You”: any natural or legal person who makes any usage of the
Software under the terms of the Licence.
- Distribution and/or Communication: any act of selling, giving, lending,
renting, distributing, communicating, transmitting, or otherwise making
available, on-line or off-line, copies of the Work or providing access to its
essential functionalities at the disposal of any other natural or legal
person.
## 2. Scope of the rights granted by the Licence
The Licensor hereby grants You a world-wide, royalty-free, non-exclusive,
sub-licensable licence to do the following, for the duration of copyright vested
in the Original Work:
- use the Work in any circumstance and for all usage, reproduce the Work, modify
- the Original Work, and make Derivative Works based upon the Work, communicate
- to the public, including the right to make available or display the Work or
- copies thereof to the public and perform publicly, as the case may be, the
- Work, distribute the Work or copies thereof, lend and rent the Work or copies
- thereof, sub-license rights in the Work or copies thereof.
Those rights can be exercised on any media, supports and formats, whether now
known or later invented, as far as the applicable law permits so.
In the countries where moral rights apply, the Licensor waives his right to
exercise his moral right to the extent allowed by law in order to make effective
the licence of the economic rights here above listed.
The Licensor grants to the Licensee royalty-free, non exclusive usage rights to
any patents held by the Licensor, to the extent necessary to make use of the
rights granted on the Work under this Licence.
## 3. Communication of the Source Code
The Licensor may provide the Work either in its Source Code form, or as
Executable Code. If the Work is provided as Executable Code, the Licensor
provides in addition a machine-readable copy of the Source Code of the Work
along with each copy of the Work that the Licensor distributes or indicates, in
a notice following the copyright notice attached to the Work, a repository where
the Source Code is easily and freely accessible for as long as the Licensor
continues to distribute and/or communicate the Work.
## 4. Limitations on copyright
Nothing in this Licence is intended to deprive the Licensee of the benefits from
any exception or limitation to the exclusive rights of the rights owners in the
Original Work or Software, of the exhaustion of those rights or of other
applicable limitations thereto.
## 5. Obligations of the Licensee
The grant of the rights mentioned above is subject to some restrictions and
obligations imposed on the Licensee. Those obligations are the following:
Attribution right: the Licensee shall keep intact all copyright, patent or
trademarks notices and all notices that refer to the Licence and to the
disclaimer of warranties. The Licensee must include a copy of such notices and a
copy of the Licence with every copy of the Work he/she distributes and/or
communicates. The Licensee must cause any Derivative Work to carry prominent
notices stating that the Work has been modified and the date of modification.
Copyleft clause: If the Licensee distributes and/or communicates copies of the
Original Works or Derivative Works based upon the Original Work, this
Distribution and/or Communication will be done under the terms of this Licence
or of a later version of this Licence unless the Original Work is expressly
distributed only under this version of the Licence. The Licensee (becoming
Licensor) cannot offer or impose any additional terms or conditions on the Work
or Derivative Work that alter or restrict the terms of the Licence.
Compatibility clause: If the Licensee Distributes and/or Communicates Derivative
Works or copies thereof based upon both the Original Work and another work
licensed under a Compatible Licence, this Distribution and/or Communication can
be done under the terms of this Compatible Licence. For the sake of this clause,
“Compatible Licence” refers to the licences listed in the appendix attached to
this Licence. Should the Licensees obligations under the Compatible Licence
conflict with his/her obligations under this Licence, the obligations of the
Compatible Licence shall prevail.
Provision of Source Code: When distributing and/or communicating copies of the
Work, the Licensee will provide a machine-readable copy of the Source Code or
indicate a repository where this Source will be easily and freely available for
as long as the Licensee continues to distribute and/or communicate the Work.
Legal Protection: This Licence does not grant permission to use the trade names,
trademarks, service marks, or names of the Licensor, except as required for
reasonable and customary use in describing the origin of the Work and
reproducing the content of the copyright notice.
## 6. Chain of Authorship
The original Licensor warrants that the copyright in the Original Work granted
hereunder is owned by him/her or licensed to him/her and that he/she has the
power and authority to grant the Licence.
Each Contributor warrants that the copyright in the modifications he/she brings
to the Work are owned by him/her or licensed to him/her and that he/she has the
power and authority to grant the Licence.
Each time You accept the Licence, the original Licensor and subsequent
Contributors grant You a licence to their contributions to the Work, under the
terms of this Licence.
## 7. Disclaimer of Warranty
The Work is a work in progress, which is continuously improved by numerous
contributors. It is not a finished work and may therefore contain defects or
“bugs” inherent to this type of software development.
For the above reason, the Work is provided under the Licence on an “as is” basis
and without warranties of any kind concerning the Work, including without
limitation merchantability, fitness for a particular purpose, absence of defects
or errors, accuracy, non-infringement of intellectual property rights other than
copyright as stated in Article 6 of this Licence.
This disclaimer of warranty is an essential part of the Licence and a condition
for the grant of any rights to the Work.
## 8. Disclaimer of Liability
Except in the cases of wilful misconduct or damages directly caused to natural
persons, the Licensor will in no event be liable for any direct or indirect,
material or moral, damages of any kind, arising out of the Licence or of the use
of the Work, including without limitation, damages for loss of goodwill, work
stoppage, computer failure or malfunction, loss of data or any commercial
damage, even if the Licensor has been advised of the possibility of such
damage. However, the Licensor will be liable under statutory product liability
laws as far such laws apply to the Work.
## 9. Additional agreements
While distributing the Original Work or Derivative Works, You may choose to
conclude an additional agreement to offer, and charge a fee for, acceptance of
support, warranty, indemnity, or other liability obligations and/or services
consistent with this Licence. However, in accepting such obligations, You may
act only on your own behalf and on your sole responsibility, not on behalf of
the original Licensor or any other Contributor, and only if You agree to
indemnify, defend, and hold each Contributor harmless for any liability incurred
by, or claims asserted against such Contributor by the fact You have accepted
any such warranty or additional liability.
## 10. Acceptance of the Licence
The provisions of this Licence can be accepted by clicking on an icon “I agree”
placed under the bottom of a window displaying the text of this Licence or by
affirming consent in any other similar way, in accordance with the rules of
applicable law. Clicking on that icon indicates your clear and irrevocable
acceptance of this Licence and all of its terms and conditions.
Similarly, you irrevocably accept this Licence and all of its terms and
conditions by exercising any rights granted to You by Article 2 of this Licence,
such as the use of the Work, the creation by You of a Derivative Work or the
Distribution and/or Communication by You of the Work or copies thereof.
## 11. Information to the public
In case of any Distribution and/or Communication of the Work by means of
electronic communication by You (for example, by offering to download the Work
from a remote location) the distribution channel or media (for example, a
website) must at least provide to the public the information requested by the
applicable law regarding the Licensor, the Licence and the way it may be
accessible, concluded, stored and reproduced by the Licensee.
## 12. Termination of the Licence
The Licence and the rights granted hereunder will terminate automatically upon
any breach by the Licensee of the terms of the Licence.
Such a termination will not terminate the licences of any person who has
received the Work from the Licensee under the Licence, provided such persons
remain in full compliance with the Licence.
## 13. Miscellaneous
Without prejudice of Article 9 above, the Licence represents the complete
agreement between the Parties as to the Work licensed hereunder.
If any provision of the Licence is invalid or unenforceable under applicable
law, this will not affect the validity or enforceability of the Licence as a
whole. Such provision will be construed and/or reformed so as necessary to make
it valid and enforceable.
The European Commission may publish other linguistic versions and/or new
versions of this Licence, so far this is required and reasonable, without
reducing the scope of the rights granted by the Licence. New versions of the
Licence will be published with a unique version number.
All linguistic versions of this Licence, approved by the European Commission,
have identical value. Parties can take advantage of the linguistic version of
their choice.
## 14. Jurisdiction
Any litigation resulting from the interpretation of this License, arising
between the European Commission, as a Licensor, and any Licensee, will be
subject to the jurisdiction of the Court of Justice of the European Communities,
as laid down in article 238 of the Treaty establishing the European Community.
Any litigation arising between Parties, other than the European Commission, and
resulting from the interpretation of this License, will be subject to the
exclusive jurisdiction of the competent court where the Licensor resides or
conducts its primary business.
## 15. Applicable Law
This Licence shall be governed by the law of the European Union country where
the Licensor resides or has his registered office.
This licence shall be governed by the Belgian law if:
- a litigation arises between the European Commission, as a Licensor, and any
- Licensee; the Licensor, other than the European Commission, has no residence
- or registered office inside a European Union country.
## Appendix
“Compatible Licences” according to article 5 EUPL are:
- GNU General Public License (GNU GPL) v. 2
- Open Software License (OSL) v. 2.1, v. 3.0
- Common Public License v. 1.0
- Eclipse Public License v. 1.0
- Cecill v. 2.0

51
sdi-library/README.md Normal file
View File

@ -0,0 +1,51 @@
Spatial Data Infrastucture - Library
--------------------------------------------------
SDI-Library is one of the subsystems forming the gCube Spatial Data Infrastructure Facilities. It aims to provide gCube Applications simplified feature in order to manage GeoSpatial Data and Metadata.
## Built with
* [OpenJDK](https://openjdk.java.net/) - The JDK used
* [JAX-RS](https://github.com/eclipse-ee4j/jaxrs-api) - Java™ API for RESTful Web Services
* [Jersey](https://jersey.github.io/) - JAX-RS runtime
* [Maven](https://maven.apache.org/) - Dependency Management
## Documentation
Documentation can be found [here](https://gcube.wiki.gcube-system.org/gcube/SDI-Service).
## Change log
See [CHANGELOG.md](CHANGELOG.md).
## License
This project is licensed under the EUPL V.1.1 License - see the [LICENSE.md](LICENSE.md) file for details.
## About the gCube Framework
This software is part of the [gCubeFramework](https://www.gcube-system.org/ "gCubeFramework"): an
open-source software toolkit used for building and operating Hybrid Data
Infrastructures enabling the dynamic deployment of Virtual Research Environments
by favouring the realisation of reuse oriented policies.
The projects leading to this software have received funding from a series of European Union programmes including:
- the Sixth Framework Programme for Research and Technological Development
- DILIGENT (grant no. 004260).
- the Seventh Framework Programme for research, technological development and demonstration
- D4Science (grant no. 212488);
- D4Science-II (grant no.239019);
- ENVRI (grant no. 283465);
- iMarine(grant no. 283644);
- EUBrazilOpenBio (grant no. 288754).
- the H2020 research and innovation programme
- SoBigData (grant no. 654024);
- PARTHENOS (grant no. 654119);
- EGIEngage (grant no. 654142);
- ENVRIplus (grant no. 654182);
- BlueBRIDGE (grant no. 675680);
- PerformFish (grant no. 727610);
- AGINFRAplus (grant no. 731001);
- DESIRA (grant no. 818194);
- ARIADNEplus (grant no. 823914);
- RISIS2 (grant no. 824091);

99
sdi-library/pom.xml Normal file
View File

@ -0,0 +1,99 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>gcube-sdi-suite</artifactId>
<groupId>org.gcube.spatial.data</groupId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<groupId>org.gcube.spatial.data</groupId>
<artifactId>sdi-library</artifactId>
<version>1.3.0-SNAPSHOT</version>
<name>SDI Library</name>
<description>Client library to interact with gCube SDI Service</description>
<scm>
<connection>scm:git:${gitBaseUrl}/gFeed</connection>
<developerConnection>scm:git:${gitBaseUrl}/gFeed</developerConnection>
<url>${gitBaseUrl}/gFeed</url>
</scm>
<dependencies>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-fw-clients</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>authorization-client</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-generic-clients</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-gcube-calls</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>common-jaxrs-client</artifactId>
<version>[1.0.0,2.0.0)</version>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-generic-clients</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.spatial.data</groupId>
<artifactId>sdi-interface</artifactId>
</dependency>
<!-- JERSEY -->
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-client</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-json-jackson</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-multipart</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<!-- TEST -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.10</version>
<scope>test</scope>
</dependency>
<!-- Test log binding -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,14 @@
package org.gcube.spatial.data.sdi;
import java.io.File;
import javax.xml.bind.JAXBException;
import org.opengis.metadata.Metadata;
public interface SDICatalog {
}

View File

@ -0,0 +1,7 @@
package org.gcube.spatial.data.sdi;
public interface SDIClient {
//public Scope getCurrentConfiguration();
}

View File

@ -0,0 +1,28 @@
package org.gcube.spatial.data.sdi;
import java.io.IOException;
import java.util.Collections;
import java.util.Map.Entry;
import javax.ws.rs.client.ClientRequestContext;
import javax.ws.rs.client.ClientRequestFilter;
import org.gcube.common.calls.Call;
import org.gcube.common.calls.Interceptors;
import org.gcube.common.calls.Request;
import org.gcube.spatial.data.sdi.utils.ScopeUtils;
public class TokenFilter implements ClientRequestFilter {
@Override
public void filter(final ClientRequestContext rc) throws IOException {
if (ScopeUtils.getCurrentScope()!=null){
Request requestContext = Interceptors.executeRequestChain(new Call());
for (Entry<String, String> entry: requestContext.getHeaders()){
rc.getHeaders().put(entry.getKey(), Collections.singletonList((Object)entry.getValue()));
}
}
}
}

View File

@ -0,0 +1,45 @@
package org.gcube.spatial.data.sdi.plugins;
import javax.ws.rs.client.WebTarget;
import javax.xml.namespace.QName;
import javax.xml.transform.dom.DOMResult;
import javax.xml.ws.EndpointReference;
import org.gcube.common.calls.jaxrs.GcubeService;
import org.gcube.common.calls.jaxrs.TargetFactory;
import org.gcube.common.clients.config.ProxyConfig;
import org.gcube.common.clients.delegates.ProxyDelegate;
import org.gcube.spatial.data.sdi.interfaces.Metadata;
import org.gcube.spatial.data.sdi.model.ServiceConstants;
import org.gcube.spatial.data.sdi.proxies.DefaultMetadata;
import org.w3c.dom.Node;
public class MetadataPlugin extends SDIAbstractPlugin<WebTarget, Metadata>{
public MetadataPlugin() {
super("sdi-service/gcube/service");
}
@Override
public Exception convert(Exception arg0, ProxyConfig<?, ?> arg1) {
return arg0;
}
@Override
public Metadata newProxy(ProxyDelegate<WebTarget> arg0) {
return new DefaultMetadata(arg0);
}
@Override
public WebTarget resolve(EndpointReference epr, ProxyConfig<?, ?> arg1) throws Exception {
DOMResult result = new DOMResult();
epr.writeTo(result);
Node node =result.getNode();
Node child=node.getFirstChild();
String address = child.getTextContent();
GcubeService service = GcubeService.service().
withName(new QName(ServiceConstants.NAMESPACE,ServiceConstants.Metadata.INTERFACE)).
andPath(ServiceConstants.Metadata.INTERFACE);
return TargetFactory.stubFor(service).at(address);
}
}

View File

@ -0,0 +1,62 @@
package org.gcube.spatial.data.sdi.plugins;
import javax.ws.rs.client.WebTarget;
import org.gcube.common.clients.ProxyBuilder;
import org.gcube.common.clients.ProxyBuilderImpl;
import org.gcube.common.clients.Plugin;
import org.gcube.spatial.data.sdi.interfaces.Metadata;
import org.gcube.spatial.data.sdi.model.ServiceConstants;
public abstract class SDIAbstractPlugin<S, P> implements Plugin<S, P>{
private static final MetadataPlugin metadata_plugin=new MetadataPlugin();
public static ProxyBuilder<Metadata> metadata() {
return new ProxyBuilderImpl<WebTarget,Metadata>(metadata_plugin);
}
// public static ProxyBuilder<ClassificationClient> classification() {
// return new ProxyBuilderImpl<WebTarget,ClassificationClient>(classification_plugin);
// }
//
// public static ProxyBuilder<OccurrenceClient> occurrences() {
// return new ProxyBuilderImpl<WebTarget,OccurrenceClient>(occurrence_plugin);
// }
//
// public static ProxyBuilder<ExecutorClient> executor() {
// return new ProxyBuilderImpl<WebTarget,ExecutorClient>(executor_plugin);
// }
//
// public static ProxyBuilder<ResultSetClient> resultset(String endpointId) {
// LegacyQuery query = new LegacyQuery(resultset_plugin);
// query.addCondition("$resource/ID/string() eq '"+endpointId+"'");
// return new ProxyBuilderImpl<WebTarget,ResultSetClient>(resultset_plugin, query);
// }
public final String name;
public SDIAbstractPlugin(String name) {
this.name = name;
}
@Override
public String serviceClass() {
return ServiceConstants.SERVICE_CLASS;
}
@Override
public String serviceName() {
return ServiceConstants.SERVICE_NAME;
}
@Override
public String name() {
return name;
}
@Override
public String namespace() {
return ServiceConstants.NAMESPACE;
}
}

View File

@ -0,0 +1,145 @@
package org.gcube.spatial.data.sdi.proxies;
import java.io.File;
import java.util.HashSet;
import java.util.Set;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.GenericType;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.gcube.common.clients.Call;
import org.gcube.common.clients.delegates.ProxyDelegate;
import org.gcube.spatial.data.sdi.interfaces.Metadata;
import org.gcube.spatial.data.sdi.model.ServiceConstants;
import org.gcube.spatial.data.sdi.model.metadata.MetadataPublishOptions;
import org.gcube.spatial.data.sdi.model.metadata.MetadataReport;
import org.gcube.spatial.data.sdi.model.metadata.TemplateCollection;
import org.gcube.spatial.data.sdi.model.metadata.TemplateDescriptor;
import org.gcube.spatial.data.sdi.model.metadata.TemplateInvocation;
import org.glassfish.jersey.media.multipart.FormDataMultiPart;
import org.glassfish.jersey.media.multipart.MultiPartFeature;
import org.glassfish.jersey.media.multipart.file.FileDataBodyPart;
public class DefaultMetadata implements Metadata{
private final ProxyDelegate<WebTarget> delegate;
public DefaultMetadata(ProxyDelegate<WebTarget> config){
this.delegate = config;
}
@Override
public TemplateCollection getAvailableTemplates() {
Call<WebTarget, Set<TemplateDescriptor>> call = new Call<WebTarget, Set<TemplateDescriptor>>() {
@Override
public Set<TemplateDescriptor> call(WebTarget templates) throws Exception {
GenericType<Set<TemplateDescriptor>> generic=new GenericType<Set<TemplateDescriptor>>() {
};
return templates.path(ServiceConstants.Metadata.LIST_METHOD).request(MediaType.APPLICATION_JSON).get(generic);
}
};
try {
return new TemplateCollection(new HashSet<TemplateDescriptor>(delegate.make(call)));
}catch(Exception e) {
throw new RuntimeException(e);
}
}
@Override
public MetadataReport pushMetadata(File toPublish) {
return pushMetadata(toPublish, new MetadataPublishOptions());
}
@Override
public MetadataReport pushMetadata(final File toPublish, final MetadataPublishOptions options) {
// upload Meta
Call<WebTarget,MetadataReport> applyTemplatesCall=null; //needs uploaded id
Call<WebTarget,MetadataReport> publishCall=null; //needs uploaded id
try{
Call<WebTarget,String> uploadCall=new Call<WebTarget, String>() {
@Override
public String call(WebTarget endpoint) throws Exception {
endpoint.register(MultiPartFeature.class);
FormDataMultiPart multi=new FormDataMultiPart();
// multi.field("file",toPublish,MediaType.APPLICATION_OCTET_STREAM_TYPE);
FileDataBodyPart fileDataBodyPart = new FileDataBodyPart(ServiceConstants.Metadata.UPLOADED_FILE_PARAMETER,
toPublish,MediaType.APPLICATION_OCTET_STREAM_TYPE);
multi.bodyPart(fileDataBodyPart);
Response resp= endpoint.request().post(Entity.entity(multi, multi.getMediaType()));
checkResponse(resp);
return resp.readEntity(String.class);
}
};
final String id=delegate.make(uploadCall);
applyTemplatesCall=new Call<WebTarget, MetadataReport>() {
@Override
public MetadataReport call(WebTarget endpoint) throws Exception {
Response resp= endpoint.path(id).
request(MediaType.APPLICATION_JSON).put(Entity.entity(
new HashSet<TemplateInvocation>(options.getTemplateInvocations()),MediaType.APPLICATION_JSON));
checkResponse(resp);
return resp.readEntity(MetadataReport.class);
}
};
publishCall=new Call<WebTarget,MetadataReport>(){
@Override
public MetadataReport call(WebTarget endpoint) throws Exception {
Response resp= endpoint.path(ServiceConstants.Metadata.PUBLISH_METHOD).path(id).path(options.getGeonetworkCategory()).
queryParam(ServiceConstants.Metadata.VALIDATE_PARAMETER, options.isValidate()).
queryParam(ServiceConstants.Metadata.PUBLIC_PARAMETER, options.isMakePublic()).
queryParam(ServiceConstants.Metadata.STYLESHEET_PARAMETER, options.getGeonetworkStyleSheet()).
request(MediaType.APPLICATION_JSON).get();
checkResponse(resp);
return resp.readEntity(MetadataReport.class);
}
};
}catch(Throwable t){
throw new RuntimeException("Unable to upload file.",t);
}
//APPLY TEMPLATES
MetadataReport templateReport =null;
try{
if(!options.getTemplateInvocations().isEmpty())
templateReport=delegate.make(applyTemplatesCall);
}catch(Throwable t){
throw new RuntimeException("Unable to apply templates",t);
}
//PUBLISH
try{
MetadataReport publishReport=delegate.make(publishCall);
if(templateReport!=null) publishReport.setAppliedTemplates(templateReport.getAppliedTemplates());
return publishReport;
}catch(Throwable t){
throw new RuntimeException("Unable to publish metadata. ",t);
}
}
protected void checkResponse(Response toCheck) throws Exception{
switch(toCheck.getStatusInfo().getFamily()){
case SUCCESSFUL : break;
default : throw new Exception("Unexpected Response code : "+toCheck.getStatus(),new Exception(toCheck.readEntity(String.class)));
}
}
}

View File

@ -0,0 +1,40 @@
package org.gcube.spatial.data.sdi.utils;
import org.gcube.common.authorization.library.AuthorizationEntry;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import static org.gcube.common.authorization.client.Constants.authorizationService;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class ScopeUtils {
public static String getCurrentScope(){
try{
String token=SecurityTokenProvider.instance.get();
log.debug("Token is : "+token);
if(token==null) throw new Exception("Security Token is null");
AuthorizationEntry entry = authorizationService().get(token);
return entry.getContext();
}catch(Exception e ){
log.debug("Unable to resolve token, checking scope provider..",e);
return ScopeProvider.instance.get();
}
}
public static String getCurrentCaller(){
try{
String token=SecurityTokenProvider.instance.get();
log.debug("Token is : "+token);
if(token==null) throw new Exception("Security Token is null");
AuthorizationEntry entry = authorizationService().get(token);
return entry.getClientInfo().getId();
}catch(Exception e ){
log.debug("Unable to resolve token, checking scope provider..",e);
return "Unidentified data-transfer user";
}
}
}

View File

@ -0,0 +1,53 @@
package org.gcube.spatial.data.sdi;
import java.io.File;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.Paths;
import org.gcube.spatial.data.sdi.interfaces.Metadata;
import org.gcube.spatial.data.sdi.model.metadata.MetadataPublishOptions;
import org.gcube.spatial.data.sdi.model.metadata.TemplateInvocationBuilder;
import org.gcube.spatial.data.sdi.plugins.SDIAbstractPlugin;
import org.junit.Before;
import org.junit.Test;
public class MetadataTests {
String sdiHostname="sdi-t.pre.d4science.org";
String scope ="/pred4s/preprod/preVRE";
//
// String sdiHostname="sdi-d-d4s.d4science.org";
// String scope = "/gcube/devsec/devVRE";
@Before
public void setScope(){
TokenSetter.set(scope);
}
@Test
public void getAvailableTemplatesTest() throws IllegalArgumentException, URISyntaxException{
// Metadata meta=SDIAbstractPlugin.metadata().at(new URI("http://"+sdiHostname+"/sdi-service/gcube/service")).build();
Metadata meta=SDIAbstractPlugin.metadata().build();
System.out.println(meta.getAvailableTemplates());
}
@Test
public void pushMetadata() throws IllegalArgumentException, URISyntaxException{
File toPubilsh=Paths.get("src/test/resources/toEnrichMeta.xml").toFile();
// Metadata meta=SDIAbstractPlugin.metadata().at(new URI("http://"+sdiHostname+"/sdi-service/gcube/service")).build();
Metadata meta=SDIAbstractPlugin.metadata().build();
System.out.println(meta.pushMetadata(toPubilsh));
MetadataPublishOptions opts=new MetadataPublishOptions(new TemplateInvocationBuilder().threddsOnlineResources("my_hostname", "some_dataset.nc", "myPersonalCatalog").get());
opts.setGeonetworkCategory("service");
opts.setValidate(false);
System.out.println(meta.pushMetadata(toPubilsh, opts));
}
}

View File

@ -0,0 +1,38 @@
package org.gcube.spatial.data.sdi;
import java.util.Properties;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class TokenSetter {
private static Properties props=new Properties();
static{
try {
props.load(TokenSetter.class.getResourceAsStream("/tokens.properties"));
} catch (Exception e) {
throw new RuntimeException("YOU NEED TO SET TOKEN FILE IN CONFIGURATION");
}
}
public static void set(String scope){
try{
if(!props.containsKey(scope)) throw new RuntimeException("No token found for scope : "+scope);
SecurityTokenProvider.instance.set(props.getProperty(scope));
}catch(Throwable e){
log.warn("Unable to set token for scope "+scope,e);
}
ScopeProvider.instance.set(scope);
}
}

View File

@ -0,0 +1,13 @@
package org.gcube.spatial.data.sdi.utils;
public class RegisterGN {
public static void main(String[] args) {
// Get definition
// push to service
GeonetworkDefinition
}
}

View File

@ -0,0 +1,298 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<gmd:MD_Metadata xmlns:gmi="http://www.isotc211.org/2005/gmi" xmlns:gco="http://www.isotc211.org/2005/gco" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:gmx="http://www.isotc211.org/2005/gmx" xmlns:fra="http://www.cnig.gouv.fr/2005/fra" xmlns:gmd="http://www.isotc211.org/2005/gmd" xmlns:gml="http://www.opengis.net/gml" xmlns:xlink="http://www.w3.org/1999/xlink">
<gmd:fileIdentifier>
<gco:CharacterString>a9dfe3b7-6b46-4013-ab1c-313ef2128d31</gco:CharacterString>
</gmd:fileIdentifier>
<gmd:language>
<gmd:LanguageCode codeListValue="eng" codeList="http://schemas.opengis.net/iso/19139/20070417/resources/Codelist/ML_gmxCodelists.xml#LanguageCode">English</gmd:LanguageCode>
</gmd:language>
<gmd:characterSet>
<gmd:MD_CharacterSetCode codeListValue="utf8" codeList="http://schemas.opengis.net/iso/19139/20070417/resources/Codelist/gmxCodelists.xml#MD_CharacterSetCode">UTF-8</gmd:MD_CharacterSetCode>
</gmd:characterSet>
<gmd:hierarchyLevel>
<gmd:MD_ScopeCode codeSpace="eng" codeListValue="dataset" codeList="http://schemas.opengis.net/iso/19139/20070417/resources/Codelist/gmxCodelists.xml#MD_ScopeCode">Dataset</gmd:MD_ScopeCode>
</gmd:hierarchyLevel>
<gmd:contact>
<gmd:CI_ResponsibleParty>
<gmd:individualName>
<gco:CharacterString>fabio.sinibaldi</gco:CharacterString>
</gmd:individualName>
<gmd:organisationName>
<gco:CharacterString>iMarine Consortium</gco:CharacterString>
</gmd:organisationName>
<gmd:role>
<gmd:CI_RoleCode codeListValue="author" codeList="http://schemas.opengis.net/iso/19139/20070417/resources/Codelist/gmxCodelists.xml#CI_RoleCode">Author</gmd:CI_RoleCode>
</gmd:role>
</gmd:CI_ResponsibleParty>
</gmd:contact>
<gmd:contact>
<gmd:CI_ResponsibleParty>
<gmd:individualName>
<gco:CharacterString>iMarine.eu</gco:CharacterString>
</gmd:individualName>
<gmd:organisationName>
<gco:CharacterString>iMarine Consortium</gco:CharacterString>
</gmd:organisationName>
<gmd:contactInfo>
<gmd:CI_Contact>
<gmd:address>
<gmd:CI_Address>
<gmd:electronicMailAddress>
<gco:CharacterString>info@i-marine.eu</gco:CharacterString>
</gmd:electronicMailAddress>
</gmd:CI_Address>
</gmd:address>
<gmd:onlineResource>
<gmd:CI_OnlineResource>
<gmd:linkage>
<gmd:URL>http://www.i-marine.eu</gmd:URL>
</gmd:linkage>
<gmd:protocol>
<gco:CharacterString>WWW:LINK-1.0-http--link</gco:CharacterString>
</gmd:protocol>
<gmd:name>
<gco:CharacterString>iMarine Consortium site.</gco:CharacterString>
</gmd:name>
</gmd:CI_OnlineResource>
</gmd:onlineResource>
</gmd:CI_Contact>
</gmd:contactInfo>
<gmd:role>
<gmd:CI_RoleCode codeListValue="distributor" codeList="http://schemas.opengis.net/iso/19139/20070417/resources/Codelist/gmxCodelists.xml#CI_RoleCode">Distributor</gmd:CI_RoleCode>
</gmd:role>
</gmd:CI_ResponsibleParty>
</gmd:contact>
<gmd:contact>
<gmd:CI_ResponsibleParty>
<gmd:individualName>
<gco:CharacterString>iMarine Consortium Technical Support</gco:CharacterString>
</gmd:individualName>
<gmd:organisationName>
<gco:CharacterString>iMarine Consortium</gco:CharacterString>
</gmd:organisationName>
<gmd:contactInfo>
<gmd:CI_Contact>
<gmd:address>
<gmd:CI_Address>
<gmd:electronicMailAddress>
<gco:CharacterString>support@i-marine.eu</gco:CharacterString>
</gmd:electronicMailAddress>
</gmd:CI_Address>
</gmd:address>
<gmd:onlineResource>
<gmd:CI_OnlineResource>
<gmd:linkage>
<gmd:URL>http://www.i-marine.eu</gmd:URL>
</gmd:linkage>
<gmd:protocol>
<gco:CharacterString>WWW:LINK-1.0-http--link</gco:CharacterString>
</gmd:protocol>
<gmd:name>
<gco:CharacterString>iMarine Consortium site.</gco:CharacterString>
</gmd:name>
</gmd:CI_OnlineResource>
</gmd:onlineResource>
</gmd:CI_Contact>
</gmd:contactInfo>
<gmd:role>
<gmd:CI_RoleCode codeListValue="resourceProvider" codeList="http://schemas.opengis.net/iso/19139/20070417/resources/Codelist/gmxCodelists.xml#CI_RoleCode">Resource provider</gmd:CI_RoleCode>
</gmd:role>
</gmd:CI_ResponsibleParty>
</gmd:contact>
<gmd:dateStamp>
<gco:DateTime>2019-10-02T17:50:11.671+02:00</gco:DateTime>
</gmd:dateStamp>
<gmd:spatialRepresentationInfo>
<gmd:MD_VectorSpatialRepresentation>
<gmd:topologyLevel>
<gmd:MD_TopologyLevelCode codeListValue="geometryOnly" codeList="http://schemas.opengis.net/iso/19139/20070417/resources/Codelist/gmxCodelists.xml#MD_TopologyLevelCode">Geometry only</gmd:MD_TopologyLevelCode>
</gmd:topologyLevel>
<gmd:geometricObjects>
<gmd:MD_GeometricObjects>
<gmd:geometricObjectType>
<gmd:MD_GeometricObjectTypeCode codeListValue="surface" codeList="http://schemas.opengis.net/iso/19139/20070417/resources/Codelist/gmxCodelists.xml#MD_GeometricObjectTypeCode">Surface</gmd:MD_GeometricObjectTypeCode>
</gmd:geometricObjectType>
<gmd:geometricObjectCount>
<gco:Integer>0</gco:Integer>
</gmd:geometricObjectCount>
</gmd:MD_GeometricObjects>
</gmd:geometricObjects>
</gmd:MD_VectorSpatialRepresentation>
</gmd:spatialRepresentationInfo>
<gmd:identificationInfo>
<gmd:MD_DataIdentification>
<gmd:citation>
<gmd:CI_Citation>
<gmd:title>
<gco:CharacterString>TrueMarble test</gco:CharacterString>
</gmd:title>
<gmd:date>
<gmd:CI_Date>
<gmd:date>
<gco:DateTime>2019-10-02T17:50:11.652+02:00</gco:DateTime>
</gmd:date>
<gmd:dateType>
<gmd:CI_DateTypeCode codeSpace="eng" codeListValue="creation" codeList="http://schemas.opengis.net/iso/19139/20070417/resources/Codelist/gmxCodelists.xml#CI_DateTypeCode">Creation</gmd:CI_DateTypeCode>
</gmd:dateType>
</gmd:CI_Date>
</gmd:date>
<gmd:identifier>
<gmd:MD_Identifier>
<gmd:code>
<gco:CharacterString>a9dfe3b7-6b46-4013-ab1c-313ef2128d31</gco:CharacterString>
</gmd:code>
</gmd:MD_Identifier>
</gmd:identifier>
<gmd:presentationForm>
<gmd:CI_PresentationFormCode codeListValue="imageDigital" codeList="http://schemas.opengis.net/iso/19139/20070417/resources/Codelist/gmxCodelists.xml#CI_PresentationFormCode">Image digital</gmd:CI_PresentationFormCode>
</gmd:presentationForm>
</gmd:CI_Citation>
</gmd:citation>
<gmd:abstract>
<gco:CharacterString>This layer is used as a base layer for GIS VIewer widget</gco:CharacterString>
</gmd:abstract>
<gmd:purpose>
<gco:CharacterString>This layer is used as a base layer for GIS VIewer widget</gco:CharacterString>
</gmd:purpose>
<gmd:credit>
<gco:CharacterString>This layer has been produced by iMarine (www.i-marine.eu). iMarine (283644) is funded by the European Commission under Framework Programme 7</gco:CharacterString>
</gmd:credit>
<gmd:resourceMaintenance>
<gmd:MD_MaintenanceInformation>
<gmd:maintenanceAndUpdateFrequency>
<gmd:MD_MaintenanceFrequencyCode codeListValue="asNeeded" codeList="http://schemas.opengis.net/iso/19139/20070417/resources/Codelist/gmxCodelists.xml#MD_MaintenanceFrequencyCode">As needed</gmd:MD_MaintenanceFrequencyCode>
</gmd:maintenanceAndUpdateFrequency>
</gmd:MD_MaintenanceInformation>
</gmd:resourceMaintenance>
<gmd:descriptiveKeywords>
<gmd:MD_Keywords>
<gmd:keyword>
<gco:CharacterString>iMarine Consortium</gco:CharacterString>
</gmd:keyword>
<gmd:keyword>
<gco:CharacterString>True Marble</gco:CharacterString>
</gmd:keyword>
<gmd:type>
<gmd:MD_KeywordTypeCode codeListValue="theme" codeList="http://schemas.opengis.net/iso/19139/20070417/resources/Codelist/gmxCodelists.xml#MD_KeywordTypeCode">Theme</gmd:MD_KeywordTypeCode>
</gmd:type>
<gmd:thesaurusName>
<gmd:CI_Citation>
<gmd:title>
<gco:CharacterString>General</gco:CharacterString>
</gmd:title>
<gmd:date>
<gmd:CI_Date>
<gmd:date>
<gco:DateTime>2013-07-04T14:09:55.783+02:00</gco:DateTime>
</gmd:date>
<gmd:dateType>
<gmd:CI_DateTypeCode codeSpace="eng" codeListValue="creation" codeList="http://schemas.opengis.net/iso/19139/20070417/resources/Codelist/gmxCodelists.xml#CI_DateTypeCode">Creation</gmd:CI_DateTypeCode>
</gmd:dateType>
</gmd:CI_Date>
</gmd:date>
</gmd:CI_Citation>
</gmd:thesaurusName>
</gmd:MD_Keywords>
</gmd:descriptiveKeywords>
<gmd:spatialResolution>
<gmd:MD_Resolution>
<gmd:distance>
<gco:Distance uom="http://schemas.opengis.net/iso/19139/20070417/resources/uom/gmxUom.xml#xpointer(//*[@gml:id='m'])">0.5</gco:Distance>
</gmd:distance>
</gmd:MD_Resolution>
</gmd:spatialResolution>
<gmd:language>
<gmd:LanguageCode codeListValue="eng" codeList="http://schemas.opengis.net/iso/19139/20070417/resources/Codelist/ML_gmxCodelists.xml#LanguageCode">English</gmd:LanguageCode>
</gmd:language>
<gmd:topicCategory>
<gmd:MD_TopicCategoryCode>environment</gmd:MD_TopicCategoryCode>
</gmd:topicCategory>
<gmd:extent>
<gmd:EX_Extent>
<gmd:geographicElement>
<gmd:EX_GeographicBoundingBox>
<gmd:extentTypeCode>
<gco:Boolean>true</gco:Boolean>
</gmd:extentTypeCode>
<gmd:westBoundLongitude>
<gco:Decimal>-180.0</gco:Decimal>
</gmd:westBoundLongitude>
<gmd:eastBoundLongitude>
<gco:Decimal>180.0</gco:Decimal>
</gmd:eastBoundLongitude>
<gmd:southBoundLatitude>
<gco:Decimal>-90.0</gco:Decimal>
</gmd:southBoundLatitude>
<gmd:northBoundLatitude>
<gco:Decimal>90.0</gco:Decimal>
</gmd:northBoundLatitude>
</gmd:EX_GeographicBoundingBox>
</gmd:geographicElement>
</gmd:EX_Extent>
</gmd:extent>
</gmd:MD_DataIdentification>
</gmd:identificationInfo>
<gmd:distributionInfo>
<gmd:MD_Distribution>
<gmd:distributionFormat>
<gmd:MD_Format>
<gmd:name>
<gco:CharacterString>WMS</gco:CharacterString>
</gmd:name>
<gmd:version>
<gco:CharacterString>1.3.0</gco:CharacterString>
</gmd:version>
</gmd:MD_Format>
</gmd:distributionFormat>
<gmd:distributionFormat>
<gmd:MD_Format>
<gmd:name>
<gco:CharacterString>WFS</gco:CharacterString>
</gmd:name>
<gmd:version>
<gco:CharacterString>1.0.0</gco:CharacterString>
</gmd:version>
</gmd:MD_Format>
</gmd:distributionFormat>
<gmd:distributionFormat>
<gmd:MD_Format>
<gmd:name>
<gco:CharacterString>WCS</gco:CharacterString>
</gmd:name>
<gmd:version>
<gco:CharacterString>1.0.0</gco:CharacterString>
</gmd:version>
</gmd:MD_Format>
</gmd:distributionFormat>
<gmd:distributionFormat>
<gmd:MD_Format>
<gmd:name>
<gco:CharacterString>HTTP</gco:CharacterString>
</gmd:name>
<gmd:version>
<gco:CharacterString>1.0.0</gco:CharacterString>
</gmd:version>
</gmd:MD_Format>
</gmd:distributionFormat>
<gmd:transferOptions>
<gmd:MD_DigitalTransferOptions/>
</gmd:transferOptions>
</gmd:MD_Distribution>
</gmd:distributionInfo>
<gmd:metadataConstraints>
<gmd:MD_LegalConstraints>
<gmd:useLimitation>
<gco:CharacterString>CC-BY-SA</gco:CharacterString>
</gmd:useLimitation>
<gmd:accessConstraints>
<gmd:MD_RestrictionCode codeListValue="license" codeList="http://schemas.opengis.net/iso/19139/20070417/resources/Codelist/gmxCodelists.xml#MD_RestrictionCode">License</gmd:MD_RestrictionCode>
</gmd:accessConstraints>
<gmd:useConstraints>
<gmd:MD_RestrictionCode codeListValue="license" codeList="http://schemas.opengis.net/iso/19139/20070417/resources/Codelist/gmxCodelists.xml#MD_RestrictionCode">License</gmd:MD_RestrictionCode>
</gmd:useConstraints>
<gmd:otherConstraints>
<gco:CharacterString>other restrictions</gco:CharacterString>
</gmd:otherConstraints>
</gmd:MD_LegalConstraints>
</gmd:metadataConstraints>
</gmd:MD_Metadata>

15
sdi-service/CHANGELOG.md Normal file
View File

@ -0,0 +1,15 @@
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
# Changelog for org.gcube.spatial.data.sdi-service
## [v1.5.0-SNAPSHOT] 2020-05-15
## [v1.4.3-SNAPSHOT] 2020-05-15
changed maven repos
## [v1.4.2] 2020-05-15
### Fixes
- Integration with gcube distribution (https://support.d4science.org/issues/19612)

26
sdi-service/FUNDING.md Normal file
View File

@ -0,0 +1,26 @@
# Acknowledgments
The projects leading to this software have received funding from a series of European Union programmes including:
- the Sixth Framework Programme for Research and Technological Development
- [DILIGENT](https://cordis.europa.eu/project/id/004260) (grant no. 004260).
- the Seventh Framework Programme for research, technological development and demonstration
- [D4Science](https://cordis.europa.eu/project/id/212488) (grant no. 212488);
- [D4Science-II](https://cordis.europa.eu/project/id/239019) (grant no.239019);
- [ENVRI](https://cordis.europa.eu/project/id/283465) (grant no. 283465);
- [iMarine](https://cordis.europa.eu/project/id/283644) (grant no. 283644);
- [EUBrazilOpenBio](https://cordis.europa.eu/project/id/288754) (grant no. 288754).
- the H2020 research and innovation programme
- [SoBigData](https://cordis.europa.eu/project/id/654024) (grant no. 654024);
- [PARTHENOS](https://cordis.europa.eu/project/id/654119) (grant no. 654119);
- [EGI-Engage](https://cordis.europa.eu/project/id/654142) (grant no. 654142);
- [ENVRI PLUS](https://cordis.europa.eu/project/id/654182) (grant no. 654182);
- [BlueBRIDGE](https://cordis.europa.eu/project/id/675680) (grant no. 675680);
- [PerformFISH](https://cordis.europa.eu/project/id/727610) (grant no. 727610);
- [AGINFRA PLUS](https://cordis.europa.eu/project/id/731001) (grant no. 731001);
- [DESIRA](https://cordis.europa.eu/project/id/818194) (grant no. 818194);
- [ARIADNEplus](https://cordis.europa.eu/project/id/823914) (grant no. 823914);
- [RISIS 2](https://cordis.europa.eu/project/id/824091) (grant no. 824091);
- [EOSC-Pillar](https://cordis.europa.eu/project/id/857650) (grant no. 857650);
- [Blue Cloud](https://cordis.europa.eu/project/id/862409) (grant no. 862409);
- [SoBigData-PlusPlus](https://cordis.europa.eu/project/id/871042) (grant no. 871042);

View File

@ -0,0 +1,34 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<Resource version="0.4.x">
<ID>2a3131f6-6ef6-4520-b8bf-70b29c7824f9</ID>
<Type>RuntimeResource</Type>
<Profile>
<Category>Gis</Category>
<Platform>
<Name>geonetwork</Name>
<Version>3</Version>
<MinorVersion>0</MinorVersion>
<RevisionVersion>5</RevisionVersion>
<BuildVersion>0</BuildVersion>
</Platform>
<RunTime>
<HostedOn>node3-d-d4s.d4science.org</HostedOn>
<Status>READY</Status>
</RunTime>
<AccessPoint>
<Interface>
<Endpoint EntryName="geonetwork">http://node3-d-d4s.d4science.org/geonetwork</Endpoint>
</Interface>
<AccessData>
<Username>admin</Username>
<Password>5jykeFZrlF1Xfa4vohyDYg==</Password>
</AccessData>
<Properties>
<Property>
<Name>priority</Name>
<Value encrypted="false">1</Value>
</Property>
</Properties>
</AccessPoint>
</Profile>
</Resource>

312
sdi-service/LICENSE.md Normal file
View File

@ -0,0 +1,312 @@
# European Union Public Licence V. 1.1
EUPL © the European Community 2007
This European Union Public Licence (the “EUPL”) applies to the Work or Software
(as defined below) which is provided under the terms of this Licence. Any use of
the Work, other than as authorised under this Licence is prohibited (to the
extent such use is covered by a right of the copyright holder of the Work).
The Original Work is provided under the terms of this Licence when the Licensor
(as defined below) has placed the following notice immediately following the
copyright notice for the Original Work:
Licensed under the EUPL V.1.1
or has expressed by any other mean his willingness to license under the EUPL.
## 1. Definitions
In this Licence, the following terms have the following meaning:
- The Licence: this Licence.
- The Original Work or the Software: the software distributed and/or
communicated by the Licensor under this Licence, available as Source Code and
also as Executable Code as the case may be.
- Derivative Works: the works or software that could be created by the Licensee,
based upon the Original Work or modifications thereof. This Licence does not
define the extent of modification or dependence on the Original Work required
in order to classify a work as a Derivative Work; this extent is determined by
copyright law applicable in the country mentioned in Article 15.
- The Work: the Original Work and/or its Derivative Works.
- The Source Code: the human-readable form of the Work which is the most
convenient for people to study and modify.
- The Executable Code: any code which has generally been compiled and which is
meant to be interpreted by a computer as a program.
- The Licensor: the natural or legal person that distributes and/or communicates
the Work under the Licence.
- Contributor(s): any natural or legal person who modifies the Work under the
Licence, or otherwise contributes to the creation of a Derivative Work.
- The Licensee or “You”: any natural or legal person who makes any usage of the
Software under the terms of the Licence.
- Distribution and/or Communication: any act of selling, giving, lending,
renting, distributing, communicating, transmitting, or otherwise making
available, on-line or off-line, copies of the Work or providing access to its
essential functionalities at the disposal of any other natural or legal
person.
## 2. Scope of the rights granted by the Licence
The Licensor hereby grants You a world-wide, royalty-free, non-exclusive,
sub-licensable licence to do the following, for the duration of copyright vested
in the Original Work:
- use the Work in any circumstance and for all usage, reproduce the Work, modify
- the Original Work, and make Derivative Works based upon the Work, communicate
- to the public, including the right to make available or display the Work or
- copies thereof to the public and perform publicly, as the case may be, the
- Work, distribute the Work or copies thereof, lend and rent the Work or copies
- thereof, sub-license rights in the Work or copies thereof.
Those rights can be exercised on any media, supports and formats, whether now
known or later invented, as far as the applicable law permits so.
In the countries where moral rights apply, the Licensor waives his right to
exercise his moral right to the extent allowed by law in order to make effective
the licence of the economic rights here above listed.
The Licensor grants to the Licensee royalty-free, non exclusive usage rights to
any patents held by the Licensor, to the extent necessary to make use of the
rights granted on the Work under this Licence.
## 3. Communication of the Source Code
The Licensor may provide the Work either in its Source Code form, or as
Executable Code. If the Work is provided as Executable Code, the Licensor
provides in addition a machine-readable copy of the Source Code of the Work
along with each copy of the Work that the Licensor distributes or indicates, in
a notice following the copyright notice attached to the Work, a repository where
the Source Code is easily and freely accessible for as long as the Licensor
continues to distribute and/or communicate the Work.
## 4. Limitations on copyright
Nothing in this Licence is intended to deprive the Licensee of the benefits from
any exception or limitation to the exclusive rights of the rights owners in the
Original Work or Software, of the exhaustion of those rights or of other
applicable limitations thereto.
## 5. Obligations of the Licensee
The grant of the rights mentioned above is subject to some restrictions and
obligations imposed on the Licensee. Those obligations are the following:
Attribution right: the Licensee shall keep intact all copyright, patent or
trademarks notices and all notices that refer to the Licence and to the
disclaimer of warranties. The Licensee must include a copy of such notices and a
copy of the Licence with every copy of the Work he/she distributes and/or
communicates. The Licensee must cause any Derivative Work to carry prominent
notices stating that the Work has been modified and the date of modification.
Copyleft clause: If the Licensee distributes and/or communicates copies of the
Original Works or Derivative Works based upon the Original Work, this
Distribution and/or Communication will be done under the terms of this Licence
or of a later version of this Licence unless the Original Work is expressly
distributed only under this version of the Licence. The Licensee (becoming
Licensor) cannot offer or impose any additional terms or conditions on the Work
or Derivative Work that alter or restrict the terms of the Licence.
Compatibility clause: If the Licensee Distributes and/or Communicates Derivative
Works or copies thereof based upon both the Original Work and another work
licensed under a Compatible Licence, this Distribution and/or Communication can
be done under the terms of this Compatible Licence. For the sake of this clause,
“Compatible Licence” refers to the licences listed in the appendix attached to
this Licence. Should the Licensees obligations under the Compatible Licence
conflict with his/her obligations under this Licence, the obligations of the
Compatible Licence shall prevail.
Provision of Source Code: When distributing and/or communicating copies of the
Work, the Licensee will provide a machine-readable copy of the Source Code or
indicate a repository where this Source will be easily and freely available for
as long as the Licensee continues to distribute and/or communicate the Work.
Legal Protection: This Licence does not grant permission to use the trade names,
trademarks, service marks, or names of the Licensor, except as required for
reasonable and customary use in describing the origin of the Work and
reproducing the content of the copyright notice.
## 6. Chain of Authorship
The original Licensor warrants that the copyright in the Original Work granted
hereunder is owned by him/her or licensed to him/her and that he/she has the
power and authority to grant the Licence.
Each Contributor warrants that the copyright in the modifications he/she brings
to the Work are owned by him/her or licensed to him/her and that he/she has the
power and authority to grant the Licence.
Each time You accept the Licence, the original Licensor and subsequent
Contributors grant You a licence to their contributions to the Work, under the
terms of this Licence.
## 7. Disclaimer of Warranty
The Work is a work in progress, which is continuously improved by numerous
contributors. It is not a finished work and may therefore contain defects or
“bugs” inherent to this type of software development.
For the above reason, the Work is provided under the Licence on an “as is” basis
and without warranties of any kind concerning the Work, including without
limitation merchantability, fitness for a particular purpose, absence of defects
or errors, accuracy, non-infringement of intellectual property rights other than
copyright as stated in Article 6 of this Licence.
This disclaimer of warranty is an essential part of the Licence and a condition
for the grant of any rights to the Work.
## 8. Disclaimer of Liability
Except in the cases of wilful misconduct or damages directly caused to natural
persons, the Licensor will in no event be liable for any direct or indirect,
material or moral, damages of any kind, arising out of the Licence or of the use
of the Work, including without limitation, damages for loss of goodwill, work
stoppage, computer failure or malfunction, loss of data or any commercial
damage, even if the Licensor has been advised of the possibility of such
damage. However, the Licensor will be liable under statutory product liability
laws as far such laws apply to the Work.
## 9. Additional agreements
While distributing the Original Work or Derivative Works, You may choose to
conclude an additional agreement to offer, and charge a fee for, acceptance of
support, warranty, indemnity, or other liability obligations and/or services
consistent with this Licence. However, in accepting such obligations, You may
act only on your own behalf and on your sole responsibility, not on behalf of
the original Licensor or any other Contributor, and only if You agree to
indemnify, defend, and hold each Contributor harmless for any liability incurred
by, or claims asserted against such Contributor by the fact You have accepted
any such warranty or additional liability.
## 10. Acceptance of the Licence
The provisions of this Licence can be accepted by clicking on an icon “I agree”
placed under the bottom of a window displaying the text of this Licence or by
affirming consent in any other similar way, in accordance with the rules of
applicable law. Clicking on that icon indicates your clear and irrevocable
acceptance of this Licence and all of its terms and conditions.
Similarly, you irrevocably accept this Licence and all of its terms and
conditions by exercising any rights granted to You by Article 2 of this Licence,
such as the use of the Work, the creation by You of a Derivative Work or the
Distribution and/or Communication by You of the Work or copies thereof.
## 11. Information to the public
In case of any Distribution and/or Communication of the Work by means of
electronic communication by You (for example, by offering to download the Work
from a remote location) the distribution channel or media (for example, a
website) must at least provide to the public the information requested by the
applicable law regarding the Licensor, the Licence and the way it may be
accessible, concluded, stored and reproduced by the Licensee.
## 12. Termination of the Licence
The Licence and the rights granted hereunder will terminate automatically upon
any breach by the Licensee of the terms of the Licence.
Such a termination will not terminate the licences of any person who has
received the Work from the Licensee under the Licence, provided such persons
remain in full compliance with the Licence.
## 13. Miscellaneous
Without prejudice of Article 9 above, the Licence represents the complete
agreement between the Parties as to the Work licensed hereunder.
If any provision of the Licence is invalid or unenforceable under applicable
law, this will not affect the validity or enforceability of the Licence as a
whole. Such provision will be construed and/or reformed so as necessary to make
it valid and enforceable.
The European Commission may publish other linguistic versions and/or new
versions of this Licence, so far this is required and reasonable, without
reducing the scope of the rights granted by the Licence. New versions of the
Licence will be published with a unique version number.
All linguistic versions of this Licence, approved by the European Commission,
have identical value. Parties can take advantage of the linguistic version of
their choice.
## 14. Jurisdiction
Any litigation resulting from the interpretation of this License, arising
between the European Commission, as a Licensor, and any Licensee, will be
subject to the jurisdiction of the Court of Justice of the European Communities,
as laid down in article 238 of the Treaty establishing the European Community.
Any litigation arising between Parties, other than the European Commission, and
resulting from the interpretation of this License, will be subject to the
exclusive jurisdiction of the competent court where the Licensor resides or
conducts its primary business.
## 15. Applicable Law
This Licence shall be governed by the law of the European Union country where
the Licensor resides or has his registered office.
This licence shall be governed by the Belgian law if:
- a litigation arises between the European Commission, as a Licensor, and any
- Licensee; the Licensor, other than the European Commission, has no residence
- or registered office inside a European Union country.
## Appendix
“Compatible Licences” according to article 5 EUPL are:
- GNU General Public License (GNU GPL) v. 2
- Open Software License (OSL) v. 2.1, v. 3.0
- Common Public License v. 1.0
- Eclipse Public License v. 1.0
- Cecill v. 2.0

51
sdi-service/README.md Normal file
View File

@ -0,0 +1,51 @@
Spatial Data Infrastucture - Service
--------------------------------------------------
SDI-Service is one of the subsystems forming the gCube Spatial Data Infrastructure Facilities. It aims to provide gCube Applications simplified feature in order to manage GeoSpatial Data and Metadata.
## Built with
* [OpenJDK](https://openjdk.java.net/) - The JDK used
* [JAX-RS](https://github.com/eclipse-ee4j/jaxrs-api) - Java™ API for RESTful Web Services
* [Jersey](https://jersey.github.io/) - JAX-RS runtime
* [Maven](https://maven.apache.org/) - Dependency Management
## Documentation
Documentation can be found [here](https://gcube.wiki.gcube-system.org/gcube/SDI-Service).
## Change log
See [CHANGELOG.md](CHANGELOG.md).
## License
This project is licensed under the EUPL V.1.1 License - see the [LICENSE.md](LICENSE.md) file for details.
## About the gCube Framework
This software is part of the [gCubeFramework](https://www.gcube-system.org/ "gCubeFramework"): an
open-source software toolkit used for building and operating Hybrid Data
Infrastructures enabling the dynamic deployment of Virtual Research Environments
by favouring the realisation of reuse oriented policies.
The projects leading to this software have received funding from a series of European Union programmes including:
- the Sixth Framework Programme for Research and Technological Development
- DILIGENT (grant no. 004260).
- the Seventh Framework Programme for research, technological development and demonstration
- D4Science (grant no. 212488);
- D4Science-II (grant no.239019);
- ENVRI (grant no. 283465);
- iMarine(grant no. 283644);
- EUBrazilOpenBio (grant no. 288754).
- the H2020 research and innovation programme
- SoBigData (grant no. 654024);
- PARTHENOS (grant no. 654119);
- EGIEngage (grant no. 654142);
- ENVRIplus (grant no. 654182);
- BlueBRIDGE (grant no. 675680);
- PerformFish (grant no. 727610);
- AGINFRAplus (grant no. 731001);
- DESIRA (grant no. 818194);
- ARIADNEplus (grant no. 823914);
- RISIS2 (grant no. 824091);

203
sdi-service/pom.xml Normal file
View File

@ -0,0 +1,203 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>gcube-sdi-suite</artifactId>
<groupId>org.gcube.spatial.data</groupId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<groupId>org.gcube.spatial.data</groupId>
<artifactId>sdi-service</artifactId>
<version>1.5.0-SNPASHOT</version>
<name>SDI Service</name>
<description>REST Interface towards SDI facilities</description>
<packaging>war</packaging>
<properties>
<jersey.version>2.25.1</jersey.version>
</properties>
<scm>
<connection>scm:git:${gitBaseUrl}/gFeed</connection>
<developerConnection>scm:git:${gitBaseUrl}/gFeed</developerConnection>
<url>${gitBaseUrl}/gFeed</url>
</scm>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.gcube.distribution</groupId>
<artifactId>gcube-smartgears-bom</artifactId>
<version>2.0.0</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.glassfish.jersey </groupId>
<artifactId>jersey-bom</artifactId>
<version>${jersey.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>org.gcube.spatial.data</groupId>
<artifactId>sdi-interface</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.portlets.user</groupId>
<artifactId>uri-resolver-manager</artifactId>
<version>[1.0.0, 2.0.0)</version>
</dependency>
<dependency>
<groupId>org.gcube.data.transfer</groupId>
<artifactId>data-transfer-library</artifactId>
<version>[1.2.0,2.0.0)</version>
</dependency>
<dependency>
<groupId>org.gcube.resources</groupId>
<artifactId>registry-publisher</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.resourcemanagement</groupId>
<artifactId>resourcemanager-client</artifactId>
<version>[1.0.0,2.0.0)</version>
</dependency>
<!-- SMARTGEARS -->
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-smartgears-app</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-smartgears</artifactId>
</dependency>
<!-- jersey -->
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>3.0.1</version>
</dependency>
<dependency>
<groupId>javax.ws.rs</groupId>
<artifactId>javax.ws.rs-api</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet</artifactId>
</dependency>
<dependency>
<groupId>javax.transaction</groupId>
<artifactId>javax.transaction-api</artifactId>
<version>1.2</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-json-jackson</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-multipart</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.ext.cdi</groupId>
<artifactId>jersey-cdi1x</artifactId>
</dependency>
<!-- WELD -->
<dependency>
<groupId>org.glassfish.jersey.ext.cdi</groupId>
<artifactId>jersey-weld2-se</artifactId>
</dependency>
<!-- swagger -->
<!-- <dependency>-->
<!-- <groupId>io.swagger</groupId>-->
<!-- <artifactId>swagger-jersey2-jaxrs</artifactId>-->
<!-- <version>1.5.0</version>-->
<!-- </dependency>-->
<!-- GN EXTERNAL -->
<dependency>
<groupId>it.geosolutions</groupId>
<artifactId>geonetwork-manager</artifactId>
<version>1.4-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.geotoolkit</groupId>
<artifactId>geotk-metadata</artifactId>
<version>3.20</version>
</dependency>
<dependency>
<groupId>org.w3c</groupId>
<artifactId>dom</artifactId>
<version>2.3.0-jaxb-1.0.6</version>
</dependency>
<dependency>
<groupId>org.geotoolkit</groupId>
<artifactId>geotk-referencing</artifactId>
<version>3.20</version>
</dependency>
<dependency>
<groupId>com.thoughtworks.xstream</groupId>
<artifactId>xstream</artifactId>
<version>1.4.9</version>
</dependency>
<!-- META -->
<dependency>
<groupId>org.freemarker</groupId>
<artifactId>freemarker</artifactId>
<version>2.3.25-incubating</version>
</dependency>
<!-- GS EXTERNAL -->
<dependency>
<groupId>it.geosolutions</groupId>
<artifactId>geoserver-manager</artifactId>
<version>1.5.2</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- test -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.test-framework.providers</groupId>
<artifactId>jersey-test-framework-provider-simple</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
</dependencies>
<repositories>
<repository>
<id>GeoSolutions-snap</id>
<url>https://nexus.d4science.org/nexus/content/repositories/geo-solutions-snapshots/</url>
<snapshots>
<enabled>true</enabled>
</snapshots>
<releases>
<enabled>false</enabled>
</releases>
</repository>
<repository>
<id>GeoSolutions-rels</id>
<url>https://nexus.d4science.org/nexus/content/repositories/geo-solutions/</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
<releases>
<enabled>true</enabled>
</releases>
</repository>
<repository>
<id>GeoToolkit</id>
<url>https://nexus.d4science.org/nexus/content/repositories/geotoolkit/</url>
</repository>
</repositories>
</project>

View File

@ -0,0 +1,119 @@
package org.gcube.spatial.data.sdi;
import java.net.URL;
import java.util.Properties;
import lombok.Synchronized;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class LocalConfiguration {
//GN
final static public String GEONETWORK_CACHE_TTL="gn.cache.TTL";
final static public String GEONETWORK_SE_CATEGORY="gn.se.category";
final static public String GEONETWORK_SE_PLATFORM="gn.se.platform";
final static public String GEONETWORK_SE_PRIORITY="gn.se.priority";
final static public String GEONETWORK_SE_ENDPOINT_NAME="gn.se.endpointName";
final static public String GEONETWORK_GE_SERVICE_CLASS="gn.ge.serviceClass";
final static public String GEONETWORK_GE_SERVICE_NAME="gn.ge.serviceName";
final static public String GEONETWORK_UPDATE_TIMEOUT="gn.update.timeout";
final static public String GEONETWORK_UPDATE_WAIT="gn.update.wait";
final static public String GEONETWORK_MAIL="gn.contact.mail";
final static public String GEONETWORK_PASSWORD_LENGTH="gn.password.length";
final static public String GEONETWORK_SE_SUFFIXES="gn.se.suffixes";
final static public String GEONETWORK_SE_ASSIGNED_SCOPE_PREFIX="gn.se.assigned.scope.prefix";
final static public String GEONETWORK_SE_SCOPE_USER_PREFIX="gn.se.scope.user.prefix";
final static public String GEONETWORK_SE_SCOPE_PASSWORD_PREFIX="gn.se.scope.password.prefix";
final static public String GEONETWORK_SE_CKAN_USER_PREFIX="gn.se.ckan.user.prefix";
final static public String GEONETWORK_SE_CKAN_PASSWORD_PREFIX="gn.se.ckan.password.prefix";
final static public String GEONETWORK_SE_MANAGER_USER_PREFIX="gn.se.manager.user.prefix";
final static public String GEONETWORK_SE_MANAGER_PASSWORD_PREFIX="gn.se.manager.password.prefix";
final static public String GEONETWORK_SE_DEFAULT_GROUP_PREFIX="gn.se.default.group.prefix";
final static public String GEONETWORK_SE_SHARED_GROUP_PREFIX="gn.se.shared.group.prefix";
final static public String GEONETWORK_SE_CONFIDENTIAL_GROUP_PREFIX="gn.se.confidential.group.prefix";
final static public String GEONETWORK_SE_CONTEXT_GROUP_PREFIX="gn.se.context.group.prefix";
final static public String GEONETWORK_GROUP_ALL="gn.groups.all";
final static public String GEONETWORK_GROUP_MAX_LENGTH="gn.groups.max_length";
final static public String GEONETWORK_MANDATORY_SG="gn.mandatorySG";
//GS
final static public String GEOSERVER_CACHE_TTL="gs.cache.TTL";
final static public String GEOSERVER_GE_SERVICE_CLASS="gs.ge.serviceClass";
final static public String GEOSERVER_GE_SERVICE_NAME="gs.ge.serviceName";
final static public String GEOSERVER_SE_CATEGORY="gs.se.category";
final static public String GEOSERVER_SE_PLATFORM="gs.se.platform";
final static public String GEOSERVER_SE_ENDPOINT_NAME="gs.se.endpointName";
public static final String GEOSERVER_HOSTED_LAYERS_TTL="gs.cache.hostedLayers.TTL";
public static final String GEOSERVER_STYLES_TTL="gs.cache.hostedLayers.TTL";
public static final String GEOSERVER_WORKSPACE_TTL="gs.cache.hostedLayers.TTL";
public static final String GEOSERVER_DATASTORE_TTL="gs.cache.hostedLayers.TTL";
final static public String GEOSERVER_MANDATORY_SG="gs.mandatorySG";
//TH
final static public String THREDDS_CACHE_TTL="th.cache.TTL";
final static public String THREDDS_SE_CATEGORY="th.se.category";
final static public String THREDDS_SE_PLATFORM="th.se.platform";
final static public String THREDDS_GE_SERVICE_CLASS="th.ge.serviceClass";
final static public String THREDDS_GE_SERVICE_NAME="th.ge.serviceName";
final static public String THREDDS_SE_ENDPOINT_NAME="th.se.endpointName";
final static public String THREDDS_MANDATORY_SG="th.mandatorySG";
final static public String THREDDS_SE_REMOTE_MANAGEMENT_ACCESS="th.se.remoteManagement.access";
//META
final static public String TEMPLATE_FOLDER="tpl.folder";
final static public String TEMPORARY_PERSISTENCE_TTL="temp.ttl";
final static public String IS_REGISTRATION_TIMEOUT="is.registration.timeout";
static LocalConfiguration instance=null;
@Synchronized
public static LocalConfiguration init(URL propertiesURL){
if(instance==null)
instance=new LocalConfiguration(propertiesURL);
return instance;
}
private Properties props=new Properties();
private LocalConfiguration(URL propertiesURL) {
try{
log.debug("Loading {} ",propertiesURL);
props.load(propertiesURL.openStream());
}catch(Exception e){
throw new RuntimeException(e);
}
}
public static String getProperty(String property){
return instance.props.getProperty(property);
}
public static String getProperty(String property,String defaultValue){
return instance.props.getProperty(property, defaultValue);
}
public static Long getTTL(String property) {
return Long.parseLong(getProperty(property));
}
public static boolean getFlag(String property) {
return Boolean.parseBoolean(property);
}
private static Object templateConfiguration=null;
public static Object getTemplateConfigurationObject() {return templateConfiguration;}
public static void setTemplateConfigurationObject(Object obj) {templateConfiguration=obj;}
}

View File

@ -0,0 +1,94 @@
package org.gcube.spatial.data.sdi;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.net.UnknownHostException;
import java.util.Base64;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class NetUtils {
public static boolean isSameHost(String toTestHost,String toLookForHost) throws UnknownHostException {
log.debug("Checking same hosts {},{}",toTestHost,toLookForHost);
if(toTestHost.equalsIgnoreCase(toLookForHost)) return true;
else {
InetAddress[] toTestHostIPs=InetAddress.getAllByName(toTestHost);
InetAddress[] toLookForHostIPs=InetAddress.getAllByName(toLookForHost);
log.debug("Checking IPs. ToTestIPs {}, ToLookForIPs {} ",toTestHostIPs,toLookForHostIPs);
for(InetAddress toTestIP:toTestHostIPs) {
for(InetAddress toLookForIP:toLookForHostIPs)
if(toTestIP.equals(toLookForIP)) return true;
}
}
log.debug("HOSTS are different.");
return false;
}
public static String getHostByURL(String url){
try{
return new URL(url).getHost();
}catch(MalformedURLException e) {
log.debug("Passed url {} is invalid. Assuming it's an hostname.");
return url;
}
}
public static final String getHost(String endpoint) throws MalformedURLException{
log.debug("Get host from endpoint {} ",endpoint);
if(endpoint.startsWith("http")){
log.debug("Endpoint seems url..");
return getHostByURL(endpoint);
}
return endpoint;
}
public static boolean isUp(String url) throws IOException {
String finalUrl=resolveRedirects(url);
log.debug("Checking {} availability .. ",finalUrl);
URL urlObj=new URL(finalUrl);
HttpURLConnection connection = (HttpURLConnection) urlObj.openConnection();
int status=connection.getResponseCode();
log.trace("HTTP Status response code for {} is {} ",finalUrl,status);
return status>=200&&status<300;
}
public static String resolveRedirects(String url) throws IOException{
log.debug("Resolving redirect for url {} ",url);
URL urlObj=new URL(url);
HttpURLConnection connection = (HttpURLConnection) urlObj.openConnection();
int status=connection.getResponseCode();
if(status>=300&&status<400){
String newUrl=connection.getHeaderField("Location");
log.debug("Following redirect from {} to {} ",url,newUrl);
return resolveRedirects(newUrl);
}else return url;
}
public static void makeAuthorizedCall(String host,String path,String user,String password) throws IOException {
String urlString=String.format("https://%s/%s", host,path);
makeAuthorizedCall(urlString, user, password);
}
public static void makeAuthorizedCall(String urlString,String user,String password) throws IOException {
log.debug("Connecting to {} ",urlString);
URL url = new URL(urlString);
URLConnection uc = url.openConnection();
String userpass = user + ":" + password;
String basicAuth = "Basic " + new String(Base64.getEncoder().encode(userpass.getBytes()));
uc.setRequestProperty ("Authorization", basicAuth);
uc.setRequestProperty("gcube-token", SecurityTokenProvider.instance.get());
InputStream in = uc.getInputStream();
}
}

View File

@ -0,0 +1,140 @@
package org.gcube.spatial.data.sdi;
import java.net.URL;
import javax.ws.rs.ApplicationPath;
import org.aopalliance.reflect.Metadata;
import org.gcube.smartgears.ContextProvider;
import org.gcube.smartgears.configuration.container.ContainerConfiguration;
import org.gcube.smartgears.context.application.ApplicationContext;
import org.gcube.spatial.data.sdi.engine.GISManager;
import org.gcube.spatial.data.sdi.engine.GeoNetworkManager;
import org.gcube.spatial.data.sdi.engine.RoleManager;
import org.gcube.spatial.data.sdi.engine.SDIManager;
import org.gcube.spatial.data.sdi.engine.TemplateManager;
import org.gcube.spatial.data.sdi.engine.TemporaryPersistence;
import org.gcube.spatial.data.sdi.engine.ThreddsManager;
import org.gcube.spatial.data.sdi.engine.impl.factories.GeoNetworkManagerFactory;
import org.gcube.spatial.data.sdi.engine.impl.factories.GeoServerManagerFactory;
import org.gcube.spatial.data.sdi.engine.impl.factories.MetadataTemplateManagerFactory;
import org.gcube.spatial.data.sdi.engine.impl.factories.RoleManagerFactory;
import org.gcube.spatial.data.sdi.engine.impl.factories.SDIManagerFactory;
import org.gcube.spatial.data.sdi.engine.impl.factories.TemporaryPersistenceFactory;
import org.gcube.spatial.data.sdi.engine.impl.factories.ThreddsManagerFactory;
import org.gcube.spatial.data.sdi.model.ServiceConstants;
import org.gcube.spatial.data.sdi.rest.GeoNetwork;
import org.gcube.spatial.data.sdi.rest.GeoServer;
import org.gcube.spatial.data.sdi.rest.SDI;
import org.gcube.spatial.data.sdi.rest.Thredds;
import org.glassfish.hk2.utilities.binding.AbstractBinder;
import org.glassfish.jersey.media.multipart.MultiPartFeature;
import org.glassfish.jersey.server.ResourceConfig;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@ApplicationPath(ServiceConstants.APPLICATION)
public class SDIService extends ResourceConfig{
// @Inject
// MetadataTemplateManager templateManager;
// @Inject
// TemporaryPersistence persistence;
//
public SDIService() {
super();
log.warn("Initializing App : Properties.. ");
ApplicationContext context=ContextProvider.get();
ContainerConfiguration configuration=context.container().configuration();
try{
URL resourceUrl = context.application().getResource("/WEB-INF/config.properties");
LocalConfiguration.init(resourceUrl).
setTemplateConfigurationObject(ContextProvider.get());
}catch(Throwable t){
log.debug("Listing available paths");
for(Object obj:context.application().getResourcePaths("/WEB-INF"))
log.debug("OBJ : {} ",obj);
throw new RuntimeException("Unable to load configuration properties",t);
}
packages("org.gcube.spatial.data");
log.warn("Initializing App : Binders");
AbstractBinder binder = new AbstractBinder() {
@Override
protected void configure() {
bindFactory(SDIManagerFactory.class).to(SDIManager.class);
bindFactory(GeoNetworkManagerFactory.class).to(GeoNetworkManager.class);
bindFactory(ThreddsManagerFactory.class).to(ThreddsManager.class);
bindFactory(GeoServerManagerFactory.class).to(GISManager.class);
bindFactory(MetadataTemplateManagerFactory.class).to(TemplateManager.class);
bindFactory(RoleManagerFactory.class).to(RoleManager.class);
bindFactory(TemporaryPersistenceFactory.class).to(TemporaryPersistence.class);
}
};
register(binder);
register(MultiPartFeature.class);
registerClasses(SDI.class);
registerClasses(GeoNetwork.class);
registerClasses(GeoServer.class);
registerClasses(Thredds.class);
registerClasses(Metadata.class);
log.warn("Initialization complete");
// register(MoxyXmlFeature.class);
// String hostName=configuration.hostname();
// Integer port=configuration.port();
//SWAGGER
// BeanConfig beanConfig = new BeanConfig();
// beanConfig.setVersion("1.0.0");
// beanConfig.setSchemes(new String[]{"http","https"});
// beanConfig.setHost(hostName+":"+port);
// beanConfig.setBasePath("/gcube/service/");
// beanConfig.setResourcePackage(GeoNetwork.class.getPackage().getName());
// beanConfig.setTitle("SDI Service");
// beanConfig.setDescription("REST Interface towards SDI facilities");
// beanConfig.setPrettyPrint(true);
// beanConfig.setScan(true);
//
// System.out.println("********************** SDI INIT *****************************");
//
// log.debug("Initializing persistence manager.. {} :",persistence);
//
// try {
// persistence.init();
// } catch (Throwable t) {
// throw new RuntimeException("Unabel to init persistence. ",t);
// }
// log.debug("Initializing template manager.. {} : ",templateManager);
//
// ApplicationContext ctx = ContextProvider.get();
// templateManager.init(ctx);
//
}
}

View File

@ -0,0 +1,38 @@
package org.gcube.spatial.data.sdi;
import javax.xml.bind.annotation.XmlRootElement;
import org.gcube.smartgears.handlers.application.ApplicationLifecycleEvent.Start;
import org.gcube.smartgears.handlers.application.ApplicationLifecycleEvent.Stop;
import org.gcube.smartgears.handlers.application.ApplicationLifecycleHandler;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@XmlRootElement(name = "sdi-lifecycle")
public class SDIServiceLifecycleManager extends ApplicationLifecycleHandler{
public SDIServiceLifecycleManager() {
// System.out.println("SDI Lifecycle manager created ");
// System.out.println("persistence manager is "+persistence);
// System.out.println("template manager is "+templateManager);
// for(StackTraceElement el:Thread.currentThread().getStackTrace())
// System.out.println(""+el);
}
@Override
public void onStart(Start e) {
super.onStart(e);
}
@Override
public void onStop(Stop e) {
super.onStop(e);
// System.out.println("********************** SDI SHUTDOWN *****************************");
// persistence.shutdown();
}
}

View File

@ -0,0 +1,22 @@
package org.gcube.spatial.data.sdi;
import org.gcube.smartgears.ApplicationManager;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class SDIServiceManager implements ApplicationManager {
@Override
public void onInit() {
}
@Override
public void onShutdown() {
}
}

View File

@ -0,0 +1,12 @@
package org.gcube.spatial.data.sdi.engine;
import org.gcube.spatial.data.sdi.model.service.GeoServerDescriptor;
import org.gcube.spatial.data.sdi.model.services.GeoServerDefinition;
public interface GISManager extends GeoServiceManager<GeoServerDescriptor, GeoServerDefinition>{
// public List<GeoServerDescriptor> getConfiguration() throws ConfigurationNotFoundException;
// public ServiceHealthReport getHealthReport();
// public String registerService(GeoServerDefinition definition)throws ServiceRegistrationException;
// String importHostFromToken(String sourceToken, String hostname) throws ServiceRegistrationException;
}

View File

@ -0,0 +1,15 @@
package org.gcube.spatial.data.sdi.engine;
import org.gcube.spatial.data.sdi.engine.impl.faults.ConfigurationNotFoundException;
import org.gcube.spatial.data.sdi.engine.impl.gn.extension.GeoNetworkClient;
import org.gcube.spatial.data.sdi.model.service.GeoNetworkDescriptor;
import org.gcube.spatial.data.sdi.model.services.GeoNetworkServiceDefinition;
public interface GeoNetworkManager extends GeoServiceManager<GeoNetworkDescriptor,GeoNetworkServiceDefinition>{
public GeoNetworkClient getClient() throws ConfigurationNotFoundException;
public GeoNetworkClient getClient(GeoNetworkDescriptor descriptor);
}

View File

@ -0,0 +1,19 @@
package org.gcube.spatial.data.sdi.engine;
import java.util.List;
import org.gcube.spatial.data.sdi.engine.impl.faults.ConfigurationNotFoundException;
import org.gcube.spatial.data.sdi.engine.impl.faults.ServiceRegistrationException;
import org.gcube.spatial.data.sdi.model.health.ServiceHealthReport;
import org.gcube.spatial.data.sdi.model.service.GeoServiceDescriptor;
import org.gcube.spatial.data.sdi.model.services.ServiceDefinition;
public interface GeoServiceManager<T extends GeoServiceDescriptor,E extends ServiceDefinition> {
public T getDescriptorByHostname(String hostname) throws ConfigurationNotFoundException;
public List<T> getAvailableInstances() throws ConfigurationNotFoundException;
public List<T> getSuggestedInstances() throws ConfigurationNotFoundException;
public String registerService(E toRegister) throws ServiceRegistrationException;
public String importHostFromToken(String sourceToken, String hostname) throws ServiceRegistrationException;
public ServiceHealthReport getHealthReport();
}

View File

@ -0,0 +1,14 @@
package org.gcube.spatial.data.sdi.engine;
import java.util.List;
import org.gcube.spatial.data.sdi.model.credentials.Credentials;
import org.gcube.spatial.data.sdi.model.service.GeoServiceDescriptor;
public interface RoleManager {
public Credentials getMostAccessible(List<Credentials> toFilter,boolean considerAdmin);
public <T extends GeoServiceDescriptor> List<T> filterByRole(List<T> toFilter, boolean considerAdmin);
}

View File

@ -0,0 +1,21 @@
package org.gcube.spatial.data.sdi.engine;
import org.gcube.spatial.data.sdi.engine.impl.faults.ServiceRegistrationException;
import org.gcube.spatial.data.sdi.model.ScopeConfiguration;
import org.gcube.spatial.data.sdi.model.health.HealthReport;
import org.gcube.spatial.data.sdi.model.services.ServiceDefinition;
public interface SDIManager {
public ScopeConfiguration getContextConfiguration();
public HealthReport getHealthReport();
public String registerService(ServiceDefinition definition) throws ServiceRegistrationException;
public String importService(String sourceToken,String host,ServiceDefinition.Type expectedType)throws ServiceRegistrationException;
public GeoNetworkManager getGeoNetworkManager();
public ThreddsManager getThreddsManager();
public GISManager getGeoServerManager();
}

View File

@ -0,0 +1,23 @@
package org.gcube.spatial.data.sdi.engine;
import java.io.File;
import java.io.IOException;
import java.util.Map;
import java.util.Set;
import javax.xml.transform.TransformerException;
import org.gcube.spatial.data.sdi.engine.impl.metadata.TemplateApplicationReport;
import org.gcube.spatial.data.sdi.model.metadata.TemplateCollection;
import org.gcube.spatial.data.sdi.model.metadata.TemplateInvocation;
public interface TemplateManager {
public TemplateCollection getAvailableMetadataTemplates();
public TemplateApplicationReport applyMetadataTemplates(File original,Set<TemplateInvocation> invocations) throws IOException, TransformerException;
public File generateFromTemplate(Map<String,String> parameters, String templateID) throws Exception;
}

View File

@ -0,0 +1,16 @@
package org.gcube.spatial.data.sdi.engine;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
public interface TemporaryPersistence {
public void init() throws IOException;
public String store(InputStream is) throws FileNotFoundException, IOException;
public void clean(String id) throws IOException;
public void update(String id, InputStream is) throws FileNotFoundException, IOException;
public File getById(String id) throws FileNotFoundException;
public void shutdown();
}

View File

@ -0,0 +1,17 @@
package org.gcube.spatial.data.sdi.engine;
import java.io.File;
import org.gcube.data.transfer.model.plugins.thredds.ThreddsCatalog;
import org.gcube.spatial.data.sdi.engine.impl.faults.ConfigurationNotFoundException;
import org.gcube.spatial.data.sdi.engine.impl.faults.ThreddsOperationFault;
import org.gcube.spatial.data.sdi.model.service.ThreddsDescriptor;
import org.gcube.spatial.data.sdi.model.services.ThreddsDefinition;
public interface ThreddsManager extends GeoServiceManager<ThreddsDescriptor, ThreddsDefinition>{
public ThreddsCatalog publishCatalog(File catalogFile,String catalogReference) throws ConfigurationNotFoundException, ThreddsOperationFault;
public ThreddsCatalog createCatalogFromTemplate(String authorityUrl,String catalogPath,
String datasetScanId,String datasetScanName, String subFolder, String catalogReference)throws Exception;
}

View File

@ -0,0 +1,51 @@
package org.gcube.spatial.data.sdi.engine.impl;
import java.util.ArrayList;
import java.util.List;
import org.gcube.spatial.data.sdi.engine.GeoServiceManager;
import org.gcube.spatial.data.sdi.engine.impl.cluster.AbstractCluster;
import org.gcube.spatial.data.sdi.engine.impl.cluster.GeoServiceController;
import org.gcube.spatial.data.sdi.engine.impl.faults.ConfigurationNotFoundException;
import org.gcube.spatial.data.sdi.engine.impl.faults.ServiceRegistrationException;
import org.gcube.spatial.data.sdi.engine.impl.is.ISModule;
import org.gcube.spatial.data.sdi.model.health.ServiceHealthReport;
import org.gcube.spatial.data.sdi.model.service.GeoServiceDescriptor;
import org.gcube.spatial.data.sdi.model.services.ServiceDefinition;
public abstract class AbstractManager<T extends GeoServiceDescriptor,E extends ServiceDefinition, L extends GeoServiceController<T>> implements GeoServiceManager<T,E>{
protected abstract ISModule getRetriever();
protected abstract AbstractCluster<T,L> getCluster();
@Override
public T getDescriptorByHostname(String hostname) throws ConfigurationNotFoundException {
return getCluster().getControllerByHostName(hostname).getDescriptor();
}
@Override
public List<T> getAvailableInstances() throws ConfigurationNotFoundException {
ArrayList<T> toReturn=new ArrayList<>();
for(L controller :getCluster().getActualCluster())
toReturn.add(controller.getDescriptor());
return toReturn;
}
@Override
public String registerService(E toRegister) throws ServiceRegistrationException {
return getRetriever().registerService(toRegister);
}
@Override
public String importHostFromToken(String sourceToken, String hostname) throws ServiceRegistrationException {
return getRetriever().importHostFromToken(sourceToken, hostname);
}
@Override
public ServiceHealthReport getHealthReport() {
return getRetriever().getHealthReport();
}
}

View File

@ -0,0 +1,44 @@
package org.gcube.spatial.data.sdi.engine.impl;
import java.util.List;
import javax.inject.Singleton;
import org.gcube.spatial.data.sdi.LocalConfiguration;
import org.gcube.spatial.data.sdi.engine.GISManager;
import org.gcube.spatial.data.sdi.engine.impl.cluster.AbstractCluster;
import org.gcube.spatial.data.sdi.engine.impl.cluster.GeoServerCluster;
import org.gcube.spatial.data.sdi.engine.impl.cluster.GeoServerController;
import org.gcube.spatial.data.sdi.engine.impl.faults.ConfigurationNotFoundException;
import org.gcube.spatial.data.sdi.engine.impl.is.GeoServerClusterRetriever;
import org.gcube.spatial.data.sdi.engine.impl.is.ISModule;
import org.gcube.spatial.data.sdi.model.service.GeoServerDescriptor;
import org.gcube.spatial.data.sdi.model.services.GeoServerDefinition;
@Singleton
public class GISManagerImpl extends AbstractManager<GeoServerDescriptor, GeoServerDefinition, GeoServerController> implements GISManager{
private GeoServerClusterRetriever retriever=null;
private GeoServerCluster cluster=null;
public GISManagerImpl() {
retriever=new GeoServerClusterRetriever();
cluster=new GeoServerCluster(LocalConfiguration.getTTL(LocalConfiguration.GEOSERVER_CACHE_TTL), retriever, "GeoServer - cache");
}
@Override
protected AbstractCluster<GeoServerDescriptor, GeoServerController> getCluster() {
return cluster;
}
@Override
protected ISModule getRetriever() {
return retriever;
}
@Override
public List<GeoServerDescriptor> getSuggestedInstances() throws ConfigurationNotFoundException {
return getAvailableInstances();
}
}

View File

@ -0,0 +1,69 @@
package org.gcube.spatial.data.sdi.engine.impl;
import java.util.Collections;
import java.util.List;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.gcube.spatial.data.sdi.LocalConfiguration;
import org.gcube.spatial.data.sdi.engine.GeoNetworkManager;
import org.gcube.spatial.data.sdi.engine.RoleManager;
import org.gcube.spatial.data.sdi.engine.impl.cluster.AbstractCluster;
import org.gcube.spatial.data.sdi.engine.impl.cluster.GeoNetworkCluster;
import org.gcube.spatial.data.sdi.engine.impl.cluster.GeoNetworkController;
import org.gcube.spatial.data.sdi.engine.impl.faults.ConfigurationNotFoundException;
import org.gcube.spatial.data.sdi.engine.impl.gn.extension.GeoNetworkClient;
import org.gcube.spatial.data.sdi.engine.impl.is.GeoNetworkRetriever;
import org.gcube.spatial.data.sdi.engine.impl.is.ISModule;
import org.gcube.spatial.data.sdi.model.credentials.Credentials;
import org.gcube.spatial.data.sdi.model.service.GeoNetworkDescriptor;
import org.gcube.spatial.data.sdi.model.services.GeoNetworkServiceDefinition;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Singleton
public class GeoNetworkManagerImpl extends AbstractManager<GeoNetworkDescriptor, GeoNetworkServiceDefinition, GeoNetworkController> implements GeoNetworkManager {
RoleManager roleManager;
private GeoNetworkRetriever retriever=null;
private GeoNetworkCluster cluster=null;
@Inject
public GeoNetworkManagerImpl(RoleManager roleManager) {
this.roleManager=roleManager;
retriever=new GeoNetworkRetriever();
cluster=new GeoNetworkCluster(LocalConfiguration.getTTL(LocalConfiguration.GEONETWORK_CACHE_TTL), retriever, "GeoNEtwork - cache");
}
@Override
protected AbstractCluster<GeoNetworkDescriptor, GeoNetworkController> getCluster() {
return cluster;
}
@Override
protected ISModule getRetriever() {
return retriever;
}
@Override
public List<GeoNetworkDescriptor> getSuggestedInstances() throws ConfigurationNotFoundException {
return Collections.singletonList(getCluster().getDefaultController().getDescriptor());
}
@Override
public GeoNetworkClient getClient() throws ConfigurationNotFoundException {
return getClient(getCluster().getDefaultController().getDescriptor());
}
@Override
public GeoNetworkClient getClient(GeoNetworkDescriptor descriptor) {
Credentials selected=roleManager.getMostAccessible(descriptor.getAccessibleCredentials(), false);
log.info("Logging in {} using {} ",descriptor,selected);
return new GeoNetworkClient(descriptor.getBaseEndpoint(), descriptor.getVersion(), selected.getPassword(), selected.getUsername(),descriptor);
}
}

View File

@ -0,0 +1,55 @@
package org.gcube.spatial.data.sdi.engine.impl;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Singleton;
import org.gcube.spatial.data.sdi.engine.RoleManager;
import org.gcube.spatial.data.sdi.model.credentials.AccessType;
import org.gcube.spatial.data.sdi.model.credentials.Credentials;
import org.gcube.spatial.data.sdi.model.service.GeoServiceDescriptor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Singleton
public class RoleManagerImpl implements RoleManager {
public RoleManagerImpl() {
// TODO Auto-generated constructor stub
}
@Override
public Credentials getMostAccessible(List<Credentials> toFilter, boolean considerAdmin) {
//need to check roles by contacting social
AccessType maxLevel=getMaxLevel(considerAdmin);
Credentials toReturn=null;
for(Credentials cred: toFilter) {
if(cred.getAccessType().compareTo(maxLevel)>=0) { // cred level
if(toReturn==null || cred.getAccessType().compareTo(toReturn.getAccessType())<0)
toReturn = cred;
}
}
return toReturn;
}
@Override
public <T extends GeoServiceDescriptor> List<T> filterByRole(List<T> toFilter, boolean considerAdmin) {
ArrayList<T> toReturn=new ArrayList<T>();
AccessType maxLevel=getMaxLevel(considerAdmin);
for(T descriptor:toFilter) {
}
return toReturn;
}
private AccessType getMaxLevel(boolean considerAdmin) {
//TOD ask to social manager
return considerAdmin?AccessType.ADMIN:AccessType.CONTEXT_MANAGER;
}
}

View File

@ -0,0 +1,158 @@
package org.gcube.spatial.data.sdi.engine.impl;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.gcube.spatial.data.sdi.engine.GISManager;
import org.gcube.spatial.data.sdi.engine.GeoNetworkManager;
import org.gcube.spatial.data.sdi.engine.SDIManager;
import org.gcube.spatial.data.sdi.engine.ThreddsManager;
import org.gcube.spatial.data.sdi.engine.impl.faults.InvalidServiceDefinitionException;
import org.gcube.spatial.data.sdi.engine.impl.faults.ServiceRegistrationException;
import org.gcube.spatial.data.sdi.model.ScopeConfiguration;
import org.gcube.spatial.data.sdi.model.health.HealthReport;
import org.gcube.spatial.data.sdi.model.health.Level;
import org.gcube.spatial.data.sdi.model.health.ServiceHealthReport;
import org.gcube.spatial.data.sdi.model.services.GeoNetworkServiceDefinition;
import org.gcube.spatial.data.sdi.model.services.GeoServerDefinition;
import org.gcube.spatial.data.sdi.model.services.ServiceDefinition;
import org.gcube.spatial.data.sdi.model.services.ThreddsDefinition;
import org.gcube.spatial.data.sdi.utils.ScopeUtils;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Singleton
public class SDIManagerImpl implements SDIManager {
GeoNetworkManager geonetworkManager;
ThreddsManager threddsManager;
GISManager gisManager;
@Inject
public SDIManagerImpl(GeoNetworkManager geonetworkManager, ThreddsManager threddsManager, GISManager gisManager) {
super();
this.geonetworkManager = geonetworkManager;
this.threddsManager = threddsManager;
this.gisManager = gisManager;
}
@Override
public ScopeConfiguration getContextConfiguration() {
// TODO filter info by user role
ScopeConfiguration toReturn=new ScopeConfiguration();
toReturn.setContextName(ScopeUtils.getCurrentScopeName());
try{
toReturn.setGeonetworkConfiguration(geonetworkManager.getSuggestedInstances());
}catch(Exception e){
log.warn("Scope is not well configured. Missing GeoNetwork. ",e);
}
try{
toReturn.setThreddsConfiguration(threddsManager.getSuggestedInstances());
}catch(Exception e){
log.warn("THREDDS not found in current scope {} ",ScopeUtils.getCurrentScope());
}
try{
toReturn.setGeoserverClusterConfiguration(gisManager.getSuggestedInstances());
}catch(Exception e){
log.warn("GeoServer not found in current scope {} ",ScopeUtils.getCurrentScope());
}
return toReturn;
}
@Override
public String toString() {
// TODO Auto-generated method stub
return super.toString();
}
@Override
public HealthReport getHealthReport() {
HealthReport report=new HealthReport();
report.setContext(ScopeUtils.getCurrentScope());
ServiceHealthReport threddsReport=threddsManager.getHealthReport();
report.setThredds(threddsReport);
ServiceHealthReport gnReport=geonetworkManager.getHealthReport();
report.setGeonetwork(gnReport);
ServiceHealthReport gsReport=gisManager.getHealthReport();
report.setGeoserverCluster(gsReport);
Level overall=Level.OK;
if(threddsReport.getOverallStatus().equals(Level.ERROR)||
gnReport.getOverallStatus().equals(Level.ERROR)||
gsReport.getOverallStatus().equals(Level.ERROR)) overall=Level.ERROR;
else if(threddsReport.getOverallStatus().equals(Level.WARNING)||
gnReport.getOverallStatus().equals(Level.WARNING)||
gsReport.getOverallStatus().equals(Level.WARNING)) overall=Level.WARNING;
report.setOverallStatus(overall);
log.debug("Returning report : {} ",report);
return report;
}
@Override
public String registerService(ServiceDefinition definition) throws ServiceRegistrationException{
try {
switch(definition.getType()) {
case GEONETWORK :
return geonetworkManager.registerService((GeoNetworkServiceDefinition)definition);
case GEOSERVER :
return gisManager.registerService((GeoServerDefinition)definition);
case THREDDS :
return threddsManager.registerService((ThreddsDefinition)definition);
default : throw new InvalidServiceDefinitionException("Unable to register. Invalid service type. Definition was "+definition);
}
}catch(ClassCastException e) {
throw new InvalidServiceDefinitionException("Unable to register. Incoherent service type. Definition was "+definition);
}
}
@Override
public String importService(String sourceToken, String host, ServiceDefinition.Type expectedType) throws ServiceRegistrationException {
switch(expectedType) {
case GEONETWORK :
return geonetworkManager.importHostFromToken(sourceToken, host);
case GEOSERVER :
return gisManager.importHostFromToken(sourceToken, host);
case THREDDS :
return threddsManager.importHostFromToken(sourceToken, host);
default : throw new InvalidServiceDefinitionException("Unable to register. Invalid service type "+expectedType);
}
}
@Override
public GeoNetworkManager getGeoNetworkManager() {
return geonetworkManager;
}
@Override
public GISManager getGeoServerManager() {
return gisManager;
}
@Override
public ThreddsManager getThreddsManager() {
return threddsManager;
}
}

View File

@ -0,0 +1,178 @@
package org.gcube.spatial.data.sdi.engine.impl;
import java.io.File;
import java.io.FileFilter;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.UUID;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.inject.Singleton;
import org.apache.commons.io.IOUtils;
import org.gcube.spatial.data.sdi.LocalConfiguration;
import org.gcube.spatial.data.sdi.engine.TemporaryPersistence;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Singleton
public class TemporaryPersistenceImpl implements TemporaryPersistence {
private final static String UPLOADING_FILE_SUFFIX=".part";
private final static FileFilter TO_CHECK_FILES_FILTER=new FileFilter() {
@Override
public boolean accept(File pathname) {
return !pathname.isDirectory()&&!pathname.getName().endsWith(UPLOADING_FILE_SUFFIX);
}
};
// *************** CHECKER THREAD
@AllArgsConstructor
private static class CleanUpThread implements Runnable{
private Long TTL;
private File persistenceLocation;
private FileFilter toCheckFiles;
@Override
public void run() {
try{
log.debug("Executing cleanup..");
long count=0l;
for(File found:persistenceLocation.listFiles(toCheckFiles))
if(found.lastModified()-System.currentTimeMillis()>=TTL){
try{
Files.delete(found.toPath());
}catch(Throwable t){
log.warn("Unable to delete {} ",found.getAbsolutePath(),t);
}
}
log.debug("Cleaned up {} files.",count);
}catch(Throwable t){
log.error("Unexpected error.",t);
}
}
}
// private static TemporaryPersistenceImpl singleton=null;
// *************** INSTANCE LOGIC
private File persistenceLocation=null;
private ScheduledExecutorService service=null;
@Override
@PostConstruct
public void init() {
try {
persistenceLocation=Files.createTempDirectory("SDI").toFile();
System.out.println("************************************** TEMPORARY PERSISTENCE INIT **************************");
System.out.println("SDI-Service - Temporary persistence location is "+persistenceLocation.getAbsolutePath());
System.out.println("**************************************");
log.trace("Temporary persistence is "+persistenceLocation.getAbsolutePath());
// init check thread
service = new ScheduledThreadPoolExecutor (1);
long TTL=Long.parseLong(LocalConfiguration.getProperty(LocalConfiguration.TEMPORARY_PERSISTENCE_TTL, "120000"));
log.debug("Temp TTL is {} ",TTL);
long delay=TTL/4;
service.scheduleWithFixedDelay(new CleanUpThread(TTL, persistenceLocation, TO_CHECK_FILES_FILTER), delay, delay, TimeUnit.MILLISECONDS);
}catch(Throwable t) {
throw new RuntimeException("Unable to init persistence ",t);
}
}
@Override
public File getById(String id) throws FileNotFoundException {
File toReturn=new File(persistenceLocation,id);
if(!toReturn.exists()) throw new FileNotFoundException();
return toReturn;
}
@Override
public String store(InputStream is) throws FileNotFoundException, IOException {
String partUUID=getUUID()+".part";
log.debug("Storing file "+partUUID);
File created=transferStream(is, new File(persistenceLocation,partUUID));
String toReturn=created.getName().substring(0, created.getName().lastIndexOf(".")-1);
created.renameTo(new File(persistenceLocation,toReturn));
log.debug("Completed. Part renamed to "+toReturn);
return toReturn;
}
@Override
@PreDestroy
public void clean(String id){
try{
System.out.println("*************************************** TEMPORARY PERSISTENCE PRE DESTROY ******************************");
Files.delete(Paths.get(persistenceLocation.getAbsolutePath(), id));
}catch(Throwable t) {
throw new RuntimeException("Unable to clean up temporary persistence. ",t);
}
}
@Override
public void shutdown() {
log.debug("Shutting down persistence..");
service.shutdownNow();
log.debug("Clearing persistence folder..");
for(File f:persistenceLocation.listFiles())
try{
if(!f.delete()) f.deleteOnExit();
}catch(Throwable t){
log.warn("Exception while clearing persistence.. ",t);
}
}
@Override
public void update(String id, InputStream is) throws FileNotFoundException, IOException {
File toUpdate=getById(id);
transferStream(is,toUpdate);
}
private static File transferStream(InputStream in, File destination) throws FileNotFoundException, IOException{
FileOutputStream out=null;
try{
;
out=new FileOutputStream(destination,false);
int read = 0;
byte[] bytes = new byte[1024];
while ((read = in.read(bytes)) != -1) {
out.write(bytes, 0, read);
}
out.flush();
return destination;
}finally{
if(out!=null) IOUtils.closeQuietly(out);
}
}
private static String getUUID(){
return UUID.randomUUID().toString().replace(" ", "_");
}
}

View File

@ -0,0 +1,87 @@
package org.gcube.spatial.data.sdi.engine.impl;
import java.io.File;
import java.util.HashMap;
import java.util.List;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.gcube.data.transfer.model.plugins.thredds.ThreddsCatalog;
import org.gcube.data.transfer.model.plugins.thredds.ThreddsInfo;
import org.gcube.spatial.data.sdi.LocalConfiguration;
import org.gcube.spatial.data.sdi.engine.TemplateManager;
import org.gcube.spatial.data.sdi.engine.ThreddsManager;
import org.gcube.spatial.data.sdi.engine.impl.cluster.AbstractCluster;
import org.gcube.spatial.data.sdi.engine.impl.cluster.ThreddsCluster;
import org.gcube.spatial.data.sdi.engine.impl.cluster.ThreddsController;
import org.gcube.spatial.data.sdi.engine.impl.faults.ConfigurationNotFoundException;
import org.gcube.spatial.data.sdi.engine.impl.faults.ThreddsOperationFault;
import org.gcube.spatial.data.sdi.engine.impl.is.ISModule;
import org.gcube.spatial.data.sdi.engine.impl.is.ThreddsRetriever;
import org.gcube.spatial.data.sdi.engine.impl.metadata.GenericTemplates;
import org.gcube.spatial.data.sdi.model.service.ThreddsDescriptor;
import org.gcube.spatial.data.sdi.model.services.ThreddsDefinition;
import lombok.extern.slf4j.Slf4j;
@Singleton
@Slf4j
public class ThreddsManagerImpl extends AbstractManager<ThreddsDescriptor, ThreddsDefinition, ThreddsController> implements ThreddsManager {
private ThreddsCluster cluster=null;
private ThreddsRetriever retriever=null;
private TemplateManager templateManager=null;
@Inject
public ThreddsManagerImpl(TemplateManager templateManager) {
retriever=new ThreddsRetriever();
cluster=new ThreddsCluster(LocalConfiguration.getTTL(LocalConfiguration.THREDDS_CACHE_TTL),retriever,"Thredds Cache");
this.templateManager=templateManager;
}
@Override
protected AbstractCluster<ThreddsDescriptor, ThreddsController> getCluster() {
return cluster;
}
@Override
protected ISModule getRetriever() {
return retriever;
}
@Override
public List<ThreddsDescriptor> getSuggestedInstances() throws ConfigurationNotFoundException {
return getAvailableInstances();
}
@Override
public ThreddsCatalog publishCatalog(File catalogFile, String catalogReference) throws ConfigurationNotFoundException, ThreddsOperationFault {
return getCluster().getDefaultController().publishCatalog(catalogFile, catalogReference);
}
@Override
public ThreddsCatalog createCatalogFromTemplate(String authorityUrl, String catalogPath, String datasetScanId,
String datasetScanName, String subFolder, String catalogReference) throws Exception {
ThreddsController controller=getCluster().getDefaultController();
ThreddsInfo info=controller.getThreddsInfo();
log.info("Going to create catalog for authorityURL {}, path {}, subFolder {} ",authorityUrl,catalogPath,subFolder);
HashMap<String,String> parameters=new HashMap<String,String>();
parameters.put(GenericTemplates.ThreddsCatalogTemplate.AUTHORITY_URL, authorityUrl);
parameters.put(GenericTemplates.ThreddsCatalogTemplate.CATALOG_PATH, catalogPath);
parameters.put(GenericTemplates.ThreddsCatalogTemplate.DATASET_SCAN_ID, datasetScanId);
parameters.put(GenericTemplates.ThreddsCatalogTemplate.DATASET_SCAN_NAME, datasetScanName);
parameters.put(GenericTemplates.ThreddsCatalogTemplate.LOCATION, info.getLocalBasePath()+"/"+subFolder);
File catalog=
templateManager.generateFromTemplate(parameters, GenericTemplates.ThreddsCatalogTemplate.FILENAME);
return controller.publishCatalog(catalog, catalogReference);
}
}

View File

@ -0,0 +1,100 @@
package org.gcube.spatial.data.sdi.engine.impl.cluster;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.concurrent.ConcurrentHashMap;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.spatial.data.sdi.utils.ScopeUtils;
import org.gcube.spatial.data.sdi.NetUtils;
import org.gcube.spatial.data.sdi.engine.impl.faults.ConfigurationNotFoundException;
import org.gcube.spatial.data.sdi.engine.impl.faults.InvalidServiceEndpointException;
import org.gcube.spatial.data.sdi.engine.impl.is.CachedObject;
import org.gcube.spatial.data.sdi.engine.impl.is.ISModule;
import org.gcube.spatial.data.sdi.model.service.GeoServiceDescriptor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public abstract class AbstractCluster<T extends GeoServiceDescriptor,E extends GeoServiceController<T>> {
private long objectsTTL;
private ConcurrentHashMap<String,CachedObject<ArrayList<E>>> scopedCache;
private ISModule retriever;
private String cacheName;
public synchronized ArrayList<E> getActualCluster() throws ConfigurationNotFoundException{
String key=ScopeUtils.getCurrentScope();
log.info("Getting object from cache{} , key is {} ",cacheName,key);
if((!scopedCache.containsKey(key))||(!scopedCache.get(key).isValid(objectsTTL)))
scopedCache.put(key, new CachedObject<ArrayList<E>>(getLiveControllerCollection()));
return scopedCache.get(key).getTheObject();
}
protected ArrayList<E> getLiveControllerCollection() throws ConfigurationNotFoundException{
ArrayList<E> toReturn=new ArrayList<E>();
for(ServiceEndpoint endpoint : retriever.getISInformation())
try {
toReturn.add(translate(endpoint));
}catch(Throwable t) {
log.warn("Unable to handle ServiceEndpoint [name {} , ID {}]",endpoint.profile().name(),endpoint.id(),t);
}
Comparator<E> comp=getComparator();
if(comp!=null)Collections.sort(toReturn, getComparator());
return toReturn;
}
protected abstract E translate(ServiceEndpoint e) throws InvalidServiceEndpointException;
public void invalidate(){
String key=ScopeUtils.getCurrentScope();
log.info("Invalidating cache {} under scope {} ",cacheName,key);
if(scopedCache.containsKey(key))scopedCache.get(key).invalidate();
}
public void invalidateAll(){
for(CachedObject<?> obj:scopedCache.values())obj.invalidate();
}
public E getDefaultController() throws ConfigurationNotFoundException {
return getActualCluster().get(0);
}
protected abstract Comparator<E> getComparator();
public E getControllerByHostName(String hostname) throws ConfigurationNotFoundException {
ArrayList<E> controllerCluster=getLiveControllerCollection();
log.debug("Looking for {} inside cluster [size = {}]",hostname,controllerCluster.size());
for(E toCheck:controllerCluster) {
String toCheckHostname=NetUtils.getHostByURL(toCheck.getDescriptor().getBaseEndpoint());
try {
if(NetUtils.isSameHost(toCheckHostname, hostname))
return toCheck;
} catch (UnknownHostException e) {
log.warn("Unable to check equality between {} and {} hosts.",toCheckHostname,hostname,e);
}
}
return null;
}
public AbstractCluster(long objectsTTL, ISModule retriever, String cacheName) {
super();
this.objectsTTL = objectsTTL;
this.retriever = retriever;
this.cacheName=cacheName;
scopedCache=new ConcurrentHashMap<>();
}
}

View File

@ -0,0 +1,52 @@
package org.gcube.spatial.data.sdi.engine.impl.cluster;
import java.util.ArrayList;
import java.util.Comparator;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.spatial.data.sdi.engine.impl.faults.ConfigurationNotFoundException;
import org.gcube.spatial.data.sdi.engine.impl.faults.InvalidServiceEndpointException;
import org.gcube.spatial.data.sdi.engine.impl.faults.ServiceInteractionException;
import org.gcube.spatial.data.sdi.engine.impl.is.ISModule;
import org.gcube.spatial.data.sdi.model.service.GeoNetworkDescriptor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class GeoNetworkCluster extends AbstractCluster<GeoNetworkDescriptor,GeoNetworkController>{
private static final Comparator<GeoNetworkController> comparator=new Comparator<GeoNetworkController>() {
@Override
public int compare(GeoNetworkController o1, GeoNetworkController o2) {
return o1.getPriority().compareTo(o2.getPriority());
}
};
public GeoNetworkCluster(long objectsTTL, ISModule retriever, String cacheName) {
super(objectsTTL, retriever, cacheName);
// TODO Auto-generated constructor stub
}
@Override
protected Comparator<GeoNetworkController> getComparator() {
return comparator;
}
@Override
protected GeoNetworkController translate(ServiceEndpoint e) throws InvalidServiceEndpointException {
return new GeoNetworkController(e);
}
@Override
protected ArrayList<GeoNetworkController> getLiveControllerCollection() throws ConfigurationNotFoundException {
ArrayList<GeoNetworkController> toReturn= super.getLiveControllerCollection();
for(GeoNetworkController controller:toReturn)
try{
controller.configure();
}catch(ServiceInteractionException e) {
log.warn("Unexpected exception while configuring GeoNetwork SE [ID : "+controller.getServiceEndpoint().id()+"]",e);
}
return toReturn;
}
}

View File

@ -0,0 +1,339 @@
package org.gcube.spatial.data.sdi.engine.impl.cluster;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
import org.gcube.spatial.data.sdi.LocalConfiguration;
import org.gcube.spatial.data.sdi.engine.impl.faults.InvalidServiceEndpointException;
import org.gcube.spatial.data.sdi.engine.impl.faults.OutdatedServiceEndpointException;
import org.gcube.spatial.data.sdi.engine.impl.faults.ServiceInteractionException;
import org.gcube.spatial.data.sdi.engine.impl.gn.extension.GeoNetworkClient;
import org.gcube.spatial.data.sdi.engine.impl.gn.extension.GeoNetworkUtils;
import org.gcube.spatial.data.sdi.engine.impl.gn.utils.UserUtils;
import org.gcube.spatial.data.sdi.engine.impl.is.ISUtils;
import org.gcube.spatial.data.sdi.model.credentials.AccessType;
import org.gcube.spatial.data.sdi.model.credentials.Credentials;
import org.gcube.spatial.data.sdi.model.gn.Group;
import org.gcube.spatial.data.sdi.model.gn.User;
import org.gcube.spatial.data.sdi.model.service.GeoNetworkDescriptor;
import org.gcube.spatial.data.sdi.utils.ScopeUtils;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class GeoNetworkController extends GeoServiceController<GeoNetworkDescriptor>{
private static String scopeUserPrefix=null;
private static String scopePasswordPrefix=null;
private static String ckanUserPrefix=null;
private static String ckanPasswordPrefix=null;
private static String managerUserPrefix=null;
private static String managerPasswordPrefix=null;
private static String assignedScopePrefix=null;
private static String defaultGroupPrefix=null;
private static String sharedGroupPrefix=null;
private static String confidentialGroupPrefix=null;
private static String contextGroupPrefix=null;
private static String suffixesProperty=null;
private static String priorityProperty=null;
static{
scopeUserPrefix=LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_SE_SCOPE_USER_PREFIX);
scopePasswordPrefix=LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_SE_SCOPE_PASSWORD_PREFIX);
ckanUserPrefix=LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_SE_CKAN_USER_PREFIX);
ckanPasswordPrefix=LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_SE_CKAN_PASSWORD_PREFIX);
managerUserPrefix=LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_SE_MANAGER_USER_PREFIX);
managerPasswordPrefix=LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_SE_MANAGER_PASSWORD_PREFIX);
assignedScopePrefix=LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_SE_ASSIGNED_SCOPE_PREFIX);
defaultGroupPrefix=LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_SE_DEFAULT_GROUP_PREFIX);
sharedGroupPrefix=LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_SE_SHARED_GROUP_PREFIX);
confidentialGroupPrefix=LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_SE_CONFIDENTIAL_GROUP_PREFIX);
contextGroupPrefix=LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_SE_CONTEXT_GROUP_PREFIX);
suffixesProperty=LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_SE_SUFFIXES);
priorityProperty=LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_SE_PRIORITY);
}
private String suffixes;
private Integer priority;
public Integer getPriority() {
return priority;
}
public GeoNetworkController(ServiceEndpoint serviceEndpoint) throws InvalidServiceEndpointException {
super(serviceEndpoint);
}
@Override
protected void setServiceEndpoint(ServiceEndpoint toSet) {
super.setServiceEndpoint(toSet);
suffixes=getSEProperty(suffixesProperty, true);
priority=Integer.parseInt(getSEProperty(priorityProperty, true));
}
@Override
protected GeoNetworkDescriptor getLiveDescriptor(){
GeoNetworkDescriptor descriptor=new GeoNetworkDescriptor();
descriptor.setBaseEndpoint(baseURL);
descriptor.setVersion(version);
String currentScopeName=ScopeUtils.getCurrentScopeName();
String suffix=getSuffixByScope(currentScopeName);
ArrayList<Credentials> availableCredentials=new ArrayList<Credentials>();
availableCredentials.add(adminAccount);
Credentials context=new Credentials(getSEProperty(scopeUserPrefix+suffix, true),
getSEProperty(scopePasswordPrefix+suffix, true), AccessType.CONTEXT_USER);
availableCredentials.add(context);
Credentials ckan=new Credentials(getSEProperty(ckanUserPrefix+suffix, true),
getSEProperty(ckanPasswordPrefix+suffix, true), AccessType.CKAN);
availableCredentials.add(ckan);
String managerUser=getSEProperty(managerUserPrefix+suffix, false);
if(managerUser!=null) {
Credentials manager=new Credentials(managerUser,getSEProperty(managerPasswordPrefix+suffix, true),AccessType.CONTEXT_MANAGER);
availableCredentials.add(manager);
}
descriptor.setAccessibleCredentials(availableCredentials);
descriptor.setPriority(priority);
descriptor.setContextGroup(getSEProperty(contextGroupPrefix+suffix, true));
descriptor.setSharedGroup(getSEProperty(sharedGroupPrefix+suffix, true));
String confidentialGroup=getSEProperty(confidentialGroupPrefix+suffix, false);
if(confidentialGroup!=null)
descriptor.setConfidentialGroup(confidentialGroup);
descriptor.setDefaultGroup(getSEProperty(defaultGroupPrefix+suffix, true));
descriptor.setPublicGroup(LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_GROUP_ALL));
return descriptor;
}
@Override
protected AccessPoint getTheRightAccessPoint(ServiceEndpoint endpoint) {
for(AccessPoint declaredPoint:endpoint.profile().accessPoints().asCollection()) {
if(declaredPoint.name().equals(LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_SE_ENDPOINT_NAME))) {
return declaredPoint;
}
}
return null;
}
@Override
protected void initServiceEndpoint() throws OutdatedServiceEndpointException, ServiceInteractionException {
String scopeName=ScopeUtils.getCurrentScopeName();
try {
if(getSuffixByScope(scopeName)==null) throw new InvalidServiceEndpointException("Scope not present in resource");
}catch(InvalidServiceEndpointException e) {
insertScopeInfo(ScopeUtils.getCurrentScope());
}
}
private void insertScopeInfo(String scope) throws OutdatedServiceEndpointException, ServiceInteractionException {
String scopeName=ScopeUtils.getScopeName(scope);
log.info("Creating scope {} configuration for GeoNetwork at {} ",scopeName,baseURL);
//Get GN Client
log.debug("Instantiating client as admin..");
GeoNetworkClient gnClient=new GeoNetworkClient(baseURL,version,adminAccount.getPassword(),adminAccount.getUsername());
log.debug("Getting Users and groups from instance..");
Set<Group> existingGroups=gnClient.getGroups();
Set<User> existingUsers=gnClient.getUsers();
// Get parent scopes users and groups
// configure parent [mng,ctx] to access [sh]
// configure siblings [mng,ctx] to access [sh]
// configure users [mng,ctx] to access siblings [sh] and parent [ctx,sh]
ArrayList<User> sharedGroupExternalUsers=new ArrayList<User>();
ArrayList<Integer> externalGroupsToAccess=new ArrayList<Integer>();
// gathering users and groups from siblings
log.debug("Getting Siblings information from SE..");
for(String siblingScope:ISUtils.getSiblingsScopesInResource(serviceEndpoint, scope))
try {
getSuffixByScope(ScopeUtils.getScopeName(siblingScope));
for(String username:getUserNamesByScope(siblingScope, true, true, false))
sharedGroupExternalUsers.add(UserUtils.getByName(existingUsers, username));
externalGroupsToAccess.addAll(getGroupIDSByScope(siblingScope, true, false, false));
}catch(InvalidServiceEndpointException e) {
log.debug("Sibling scope {} not found in resource. Skipping.",siblingScope);
}
log.debug("Getting Parents information from SE..");
// gathering users and groups from parents
for(String parentScope:ScopeUtils.getParentScopes(scope))
try {
getSuffixByScope(ScopeUtils.getScopeName(parentScope));
for(String username:getUserNamesByScope(parentScope, true, true, false))
sharedGroupExternalUsers.add(UserUtils.getByName(existingUsers, username));
externalGroupsToAccess.addAll(getGroupIDSByScope(parentScope, true, true, false));
}catch(InvalidServiceEndpointException e) {
log.debug("Parent scope {} not found in resource. Skipping it. ",parentScope);
}
// Creating groups
log.debug("Creating groups..");
String contactMail=LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_MAIL);
int passwordLength=Integer.parseInt(LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_PASSWORD_LENGTH, "10"));
// create user & groups [sh,conf,ctx]
Group shared=GeoNetworkUtils.generateGroup(existingGroups, "Shared_"+scopeName, "Shared metadata group for "+scopeName, contactMail);
shared=gnClient.createGroup(shared);
existingGroups.add(shared);
Group context=GeoNetworkUtils.generateGroup(existingGroups, "Context_"+scopeName, "Context metadata group for "+scopeName, contactMail);
context=gnClient.createGroup(context);
existingGroups.add(context);
Group confidential=GeoNetworkUtils.generateGroup(existingGroups, "Confidential_"+scopeName, "Confidential metadata group for "+scopeName, contactMail);
confidential=gnClient.createGroup(confidential);
existingGroups.add(confidential);
// Giving access to shared group
log.debug("Giving access to shared group from external scopes..");
for(User toUpdate:sharedGroupExternalUsers)
gnClient.editUser(toUpdate, Collections.singleton(shared.getId()));
log.debug("Creating users..");
// CKAN -> sh,ctx
User ckan=GeoNetworkUtils.generateUser(existingUsers, passwordLength, "CKAN_"+scopeName);
ckan.setId(gnClient.createUsers(ckan, Arrays.asList(shared.getId(),context.getId())).getId());
existingUsers.add(ckan);
// CTX-USR -> sh,ctx,siblings [sh], parents [sh,ctx]
User ctx=GeoNetworkUtils.generateUser(existingUsers, passwordLength, "Ctx_"+scopeName);
ArrayList<Integer> ctxUserAccessibleGroups=new ArrayList<>();
ctxUserAccessibleGroups.addAll(externalGroupsToAccess);
ctxUserAccessibleGroups.add(shared.getId());
ctxUserAccessibleGroups.add(context.getId());
ctx.setId(gnClient.createUsers(ctx, ctxUserAccessibleGroups).getId());
existingUsers.add(ctx);
// CTX-MANAGER -> sh,ctx,conf siblings [sh], parents [sh,ctx]
User manager=GeoNetworkUtils.generateUser(existingUsers, passwordLength, "Mng_"+scopeName);
ctxUserAccessibleGroups.add(confidential.getId());
manager.setId(gnClient.createUsers(manager, ctxUserAccessibleGroups).getId());
existingUsers.add(manager);
// Setting information in Service Endpoint
log.debug("Inserting configuration in Service Endpoint");
String generatedSuffix=generateSuffix(suffixes);
ArrayList<Property> toUpdateProperties=new ArrayList<>();
toUpdateProperties.add( new Property().nameAndValue(assignedScopePrefix+generatedSuffix, scopeName));
toUpdateProperties.add( new Property().nameAndValue(scopeUserPrefix+generatedSuffix, ctx.getUsername()));
toUpdateProperties.add( new Property().nameAndValue(scopePasswordPrefix+generatedSuffix, ISUtils.encryptString(ctx.getPassword())).encrypted(true));
toUpdateProperties.add( new Property().nameAndValue(ckanUserPrefix+generatedSuffix, ckan.getUsername()));
toUpdateProperties.add( new Property().nameAndValue(ckanPasswordPrefix+generatedSuffix, ISUtils.encryptString(ckan.getPassword())).encrypted(true));
toUpdateProperties.add( new Property().nameAndValue(managerUserPrefix+generatedSuffix, manager.getUsername()));
toUpdateProperties.add( new Property().nameAndValue(managerPasswordPrefix+generatedSuffix, ISUtils.encryptString(manager.getPassword())).encrypted(true));
toUpdateProperties.add( new Property().nameAndValue(sharedGroupPrefix+generatedSuffix, shared.getId()+""));
toUpdateProperties.add( new Property().nameAndValue(defaultGroupPrefix+generatedSuffix, shared.getId()+""));
toUpdateProperties.add( new Property().nameAndValue(confidentialGroupPrefix+generatedSuffix, confidential.getId()+""));
toUpdateProperties.add( new Property().nameAndValue(contextGroupPrefix+generatedSuffix, context.getId()+""));
String suffixesList=(suffixes!=null&&!suffixes.trim().isEmpty()&&suffixes!=",")?suffixes+","+generatedSuffix:generatedSuffix;
toUpdateProperties.add(new Property().nameAndValue(suffixesProperty, suffixesList));
accessPoint.properties().addAll(toUpdateProperties);
throw new OutdatedServiceEndpointException("Created scope configuration for "+scopeName);
}
private String getSuffixByScope(String scopeName) {
log.debug("looking for scope {} suffix. Available suffixes are : {} ",scopeName,suffixes);
if(suffixes!=null)
for(String suff:suffixes.split(","))
if(suff!=null&&!suff.isEmpty()) {
String propertyValue=getSEProperty(assignedScopePrefix+suff, false);
if(propertyValue!=null&&propertyValue.equals(scopeName)) return suff;
}
return null;
}
private static String generateSuffix(String existingSuffixes){
log.debug("Generating suffix, existing are : "+existingSuffixes);
String[] suffixArray=existingSuffixes==null?new String[0]:existingSuffixes.split(",");
int maxIndex=0;
for(String suff:suffixArray){
try{
int actual=Integer.parseInt(suff);
if(actual>maxIndex) maxIndex=actual;
}catch(Throwable t){
}
}
String generated=(maxIndex+1)+"";
log.debug("Generated suffix is : "+generated);
return generated;
}
private HashSet<String> getUserNamesByScope(String scope, boolean getContext, boolean getManager, boolean getCKAN){
HashSet<String> toReturn=new HashSet<String>();
String scopeName=ScopeUtils.getScopeName(scope);
String scopeSuffix=getSuffixByScope(scopeName);
if(scopeSuffix!=null) { // context might be not configured
if(getContext)toReturn.add(getSEProperty(scopeUserPrefix+scopeSuffix, true));
if(getManager) {
String scopeManagerUserName=getSEProperty(managerUserPrefix+scopeSuffix, false);
if(scopeManagerUserName!=null) toReturn.add(scopeManagerUserName);
}
if(getCKAN) toReturn.add(getSEProperty(ckanUserPrefix+scopeSuffix, true));
}
return toReturn;
}
private HashSet<Integer> getGroupIDSByScope(String scope, boolean getShared,boolean getContext,boolean getConfidential){
HashSet<Integer> toReturn=new HashSet<Integer>();
String scopeName=ScopeUtils.getScopeName(scope);
String scopeSuffix=getSuffixByScope(scopeName);
if(scopeSuffix!=null) {
if(getShared)toReturn.add(Integer.parseInt(getSEProperty(sharedGroupPrefix+scopeSuffix,true)));
if(getContext) toReturn.add(Integer.parseInt(getSEProperty(contextGroupPrefix+scopeSuffix, true)));
if(getConfidential) {
String confidentialGroupName=getSEProperty(confidentialGroupPrefix+scopeSuffix,true);
if(confidentialGroupName!=null) toReturn.add(Integer.parseInt(confidentialGroupName));
}
}
return toReturn;
}
}

View File

@ -0,0 +1,55 @@
package org.gcube.spatial.data.sdi.engine.impl.cluster;
import java.util.ArrayList;
import java.util.Comparator;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.spatial.data.sdi.engine.impl.faults.ConfigurationNotFoundException;
import org.gcube.spatial.data.sdi.engine.impl.faults.InvalidServiceEndpointException;
import org.gcube.spatial.data.sdi.engine.impl.faults.ServiceInteractionException;
import org.gcube.spatial.data.sdi.engine.impl.is.ISModule;
import org.gcube.spatial.data.sdi.model.service.GeoServerDescriptor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class GeoServerCluster extends AbstractCluster<GeoServerDescriptor,GeoServerController>{
private static final Comparator<GeoServerController> comparator=new Comparator<GeoServerController>() {
@Override
public int compare(GeoServerController o1, GeoServerController o2) {
return o1.getHostedLayersCount().compareTo(o2.getHostedLayersCount());
}
};
public GeoServerCluster(long objectsTTL, ISModule retriever, String cacheName) {
super(objectsTTL, retriever, cacheName);
// TODO Auto-generated constructor stub
}
@Override
protected Comparator<GeoServerController> getComparator() {
return comparator;
}
@Override
protected GeoServerController translate(ServiceEndpoint e) throws InvalidServiceEndpointException {
return new GeoServerController(e);
}
@Override
protected ArrayList<GeoServerController> getLiveControllerCollection() throws ConfigurationNotFoundException {
ArrayList<GeoServerController> toReturn= super.getLiveControllerCollection();
for(GeoServerController controller:toReturn)
try{
controller.configure();
}catch(ServiceInteractionException e) {
log.warn("Unexpected exception while configuring GeoServer SE [ID : "+controller.getServiceEndpoint().id()+"]",e);
}
return toReturn;
}
}

View File

@ -0,0 +1,244 @@
package org.gcube.spatial.data.sdi.engine.impl.cluster;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
import org.gcube.spatial.data.sdi.utils.ScopeUtils;
import org.gcube.spatial.data.sdi.LocalConfiguration;
import org.gcube.spatial.data.sdi.engine.impl.faults.InvalidServiceEndpointException;
import org.gcube.spatial.data.sdi.engine.impl.faults.OutdatedServiceEndpointException;
import org.gcube.spatial.data.sdi.engine.impl.is.ISUtils;
import org.gcube.spatial.data.sdi.model.credentials.AccessType;
import org.gcube.spatial.data.sdi.model.credentials.Credentials;
import org.gcube.spatial.data.sdi.model.service.GeoServerDescriptor;
import it.geosolutions.geoserver.rest.GeoServerRESTManager;
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher;
import it.geosolutions.geoserver.rest.GeoServerRESTReader;
import it.geosolutions.geoserver.rest.manager.GeoServerRESTStoreManager;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class GeoServerController extends GeoServiceController<GeoServerDescriptor>{
//CACHED INFO
private HashMap<String,HashSet<String>> dataStores=null;
private HashSet<String> workspaces=null;
private HashSet<String> styles;
private Long hostedLayerCount=0l;
public GeoServerController(ServiceEndpoint serviceEndpoint) throws InvalidServiceEndpointException {
super(serviceEndpoint);
}
@Override
public GeoServerDescriptor getLiveDescriptor() {
GeoServerDescriptor toReturn=new GeoServerDescriptor();
toReturn.setBaseEndpoint(baseURL);
toReturn.setVersion(version);
String scopeName=ScopeUtils.getCurrentScopeName();
Map<String,Property> pointProperties=accessPoint.propertyMap();
for(AccessType toLookForType:AccessType.values()) {
String userNameProperty=toLookForType+"_u_"+scopeName;
String passwordProperty=toLookForType+"_u_"+scopeName;
if(pointProperties.containsKey(userNameProperty)) {
String user=pointProperties.get(userNameProperty).value();
String password=ISUtils.decryptString(pointProperties.get(passwordProperty).value());
toReturn.getAccessibleCredentials().add(new Credentials(user,password,toLookForType));
}
}
toReturn.getAccessibleCredentials().add(adminAccount);
//Getting scope data spaces
String confidentialProperty="confidential_"+scopeName;
if(pointProperties.containsKey(confidentialProperty))
toReturn.setConfidentialWorkspace(pointProperties.get(confidentialProperty).value());
String contextProperty="context_"+scopeName;
if(pointProperties.containsKey(contextProperty))
toReturn.setContextVisibilityWorkspace(pointProperties.get(contextProperty).value());
String sharedProperty="shared_"+scopeName;
if(pointProperties.containsKey(sharedProperty))
toReturn.setSharedWorkspace(pointProperties.get(sharedProperty).value());
String publicProperty="public_"+scopeName;
if(pointProperties.containsKey(publicProperty))
toReturn.setPublicWorkspace(pointProperties.get(publicProperty).value());
toReturn.setHostedLayersCount(getHostedLayersCount());
return toReturn;
}
@Override
protected AccessPoint getTheRightAccessPoint(ServiceEndpoint endpoint) {
for(AccessPoint declaredPoint:endpoint.profile().accessPoints().asCollection()) {
if(declaredPoint.name().equals(LocalConfiguration.getProperty(LocalConfiguration.GEOSERVER_SE_ENDPOINT_NAME))) {
return declaredPoint;
}
}
return null;
}
// Controller logic
@Override
protected void initServiceEndpoint() throws OutdatedServiceEndpointException {
// TODO Auto-generated method stub
}
private long lastDatastoreUpdate=0l;
private long lastWorkspaceUpdate=0l;
private long lastStylesUpdate=0l;
private long lastLayerCountUpdate=0l;
public GeoServerRESTReader getReader() throws MalformedURLException{
return getManager().getReader();
}
public GeoServerRESTStoreManager getDataStoreManager() throws IllegalArgumentException, MalformedURLException{
return getManager().getStoreManager();
}
public GeoServerRESTPublisher getPublisher() throws IllegalArgumentException, MalformedURLException{
return getManager().getPublisher();
}
protected GeoServerRESTManager getManager() throws IllegalArgumentException, MalformedURLException{
return new GeoServerRESTManager(new URL(baseURL), adminAccount.getUsername(), adminAccount.getPassword());
}
public synchronized Set<String> getDatastores(String workspace){
try {
if(dataStores==null || (System.currentTimeMillis()-lastDatastoreUpdate>LocalConfiguration.getTTL(LocalConfiguration.GEOSERVER_DATASTORE_TTL))){
log.trace("Loading datastores for {} ",baseURL);
HashMap<String,HashSet<String>> toSet=new HashMap<>();
for(String ws: getWorkspaces()){
HashSet<String> currentWsDatastores=new HashSet<>(getLiveDatastores(ws));
log.debug("Found {} ds in {} ws ",currentWsDatastores.size(),ws);
toSet.put(ws, currentWsDatastores);
}
dataStores=toSet;
lastDatastoreUpdate=System.currentTimeMillis();
}
}catch(Throwable t) {
log.warn("Unable to get Datastores for {} ",baseURL,t);
}
return dataStores.get(workspace);
}
public synchronized Long getHostedLayersCount(){
try{
if(System.currentTimeMillis()-lastLayerCountUpdate>LocalConfiguration.getTTL(LocalConfiguration.GEOSERVER_HOSTED_LAYERS_TTL)){
log.trace("Loading layer count for {} ",baseURL);
hostedLayerCount=getLiveHostedLayersCount();
log.debug("Found {} layers ",hostedLayerCount);
lastLayerCountUpdate=System.currentTimeMillis();
}
}catch(Throwable t){
log.warn("Unable to get layer count for {} ",baseURL,t);
}
return hostedLayerCount;
}
public synchronized Set<String> getStyles(){
try {
if(styles==null||(System.currentTimeMillis()-lastStylesUpdate>LocalConfiguration.getTTL(LocalConfiguration.GEOSERVER_STYLES_TTL))){
log.trace("Loading styles for {} ",baseURL);
styles=new HashSet<>(getLiveStyles());
log.debug("Found {} styles ",styles.size());
lastStylesUpdate=System.currentTimeMillis();
}
}catch(Throwable t) {
log.warn("Unable to get Styles for {} ",baseURL,t);
}
return styles;
}
public synchronized Set<String> getWorkspaces() {
try {
if(workspaces==null||(System.currentTimeMillis()-lastWorkspaceUpdate>LocalConfiguration.getTTL(LocalConfiguration.GEOSERVER_WORKSPACE_TTL))){
log.trace("Loading workspaces for {} ",baseURL);
workspaces=new HashSet<String>(getLiveWorkspaces());
log.debug("Found {} workspaces",workspaces.size());
lastWorkspaceUpdate=0l;
}
}catch(Throwable t) {
log.warn("Unable to get Workspaces for {} ",baseURL,t);
}
return workspaces;
}
public void invalidateWorkspacesCache(){
lastWorkspaceUpdate=0l;
}
public void invalidateDatastoresCache(){
lastDatastoreUpdate=0l;
}
public void invalidateStylesCache(){
lastStylesUpdate=0l;
}
public void invalidateHostedLayersCountCache(){
lastLayerCountUpdate=0l;
}
public void onChangedDataStores() {
invalidateDatastoresCache();
}
public void onChangedLayers() {
invalidateHostedLayersCountCache();
}
public void onChangedStyles() {
invalidateStylesCache();
}
public void onChangedWorkspaces() {
invalidateWorkspacesCache();
invalidateDatastoresCache();
}
public Set<String> getLiveDatastores(String workspace) throws MalformedURLException {
return new HashSet<String>(getReader().getDatastores(workspace).getNames());
}
public Long getLiveHostedLayersCount() throws MalformedURLException {
return new Long(getReader().getLayers().size());
}
public Set<String> getLiveStyles() throws MalformedURLException {
return new HashSet<String>(getReader().getStyles().getNames());
}
public Set<String> getLiveWorkspaces() throws MalformedURLException {
return new HashSet<String>(getReader().getWorkspaceNames());
}
}

View File

@ -0,0 +1,100 @@
package org.gcube.spatial.data.sdi.engine.impl.cluster;
import java.util.Map;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.Profile;
import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
import org.gcube.common.resources.gcore.common.Platform;
import org.gcube.spatial.data.sdi.engine.impl.faults.InvalidServiceEndpointException;
import org.gcube.spatial.data.sdi.engine.impl.faults.OutdatedServiceEndpointException;
import org.gcube.spatial.data.sdi.engine.impl.faults.ServiceInteractionException;
import org.gcube.spatial.data.sdi.engine.impl.is.CachedObject;
import org.gcube.spatial.data.sdi.engine.impl.is.ISUtils;
import org.gcube.spatial.data.sdi.model.credentials.AccessType;
import org.gcube.spatial.data.sdi.model.credentials.Credentials;
import org.gcube.spatial.data.sdi.model.service.GeoServiceDescriptor;
import org.gcube.spatial.data.sdi.model.service.Version;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public abstract class GeoServiceController<T extends GeoServiceDescriptor> {
protected ServiceEndpoint serviceEndpoint;
protected AccessPoint accessPoint;
protected Map<String,Property> propertyMap;
protected String baseURL;
protected Credentials adminAccount;
protected Version version;
protected CachedObject<T> cachedDescriptor=null;
public synchronized T getDescriptor() {
if(cachedDescriptor==null||cachedDescriptor.isValid(500)) {
cachedDescriptor=new CachedObject<T>(getLiveDescriptor());
}
return cachedDescriptor.getTheObject();
}
protected abstract T getLiveDescriptor();
protected abstract AccessPoint getTheRightAccessPoint(ServiceEndpoint endpoint);
public GeoServiceController(ServiceEndpoint serviceEndpoint) throws InvalidServiceEndpointException{
super();
log.debug("Instantiating controller for SE {} ",serviceEndpoint);
setServiceEndpoint(serviceEndpoint);
}
public void onUpdateServiceEndpoint() {
setServiceEndpoint(ISUtils.updateAndWait(serviceEndpoint));
if(cachedDescriptor!=null)cachedDescriptor.invalidate();
}
protected void setServiceEndpoint(ServiceEndpoint toSet) {
this.serviceEndpoint = toSet;
Profile profile=serviceEndpoint.profile();
accessPoint=getTheRightAccessPoint(serviceEndpoint);
if(accessPoint!=null) {
propertyMap=this.accessPoint.propertyMap();
baseURL=accessPoint.address();
adminAccount=new Credentials(accessPoint.username(),ISUtils.decryptString(accessPoint.password()),AccessType.ADMIN);
}
Platform platform=profile.platform();
version=new Version(platform.version(),platform.minorVersion(),platform.revisionVersion());
}
protected abstract void initServiceEndpoint() throws OutdatedServiceEndpointException, ServiceInteractionException;
public void configure() throws ServiceInteractionException {
try {
initServiceEndpoint();
}catch(OutdatedServiceEndpointException e) {
onUpdateServiceEndpoint();
}
}
protected String getSEProperty(String property, boolean mandatory) throws InvalidServiceEndpointException{
if(!propertyMap.containsKey(property))
if(mandatory)
throw new InvalidServiceEndpointException("Expected property "+property+" was not found. in Resource ID "+getServiceEndpoint().id());
else return null;
else {
Property prop=propertyMap.get(property);
if(prop.isEncrypted()) return ISUtils.decryptString(prop.value());
else return prop.value();
}
}
public ServiceEndpoint getServiceEndpoint() {
return serviceEndpoint;
}
}

View File

@ -0,0 +1,28 @@
package org.gcube.spatial.data.sdi.engine.impl.cluster;
import java.util.Comparator;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.spatial.data.sdi.engine.impl.faults.InvalidServiceEndpointException;
import org.gcube.spatial.data.sdi.engine.impl.is.ISModule;
import org.gcube.spatial.data.sdi.model.service.ThreddsDescriptor;
public class ThreddsCluster extends AbstractCluster<ThreddsDescriptor,ThreddsController> {
public ThreddsCluster(long objectsTTL, ISModule retriever, String cacheName) {
super(objectsTTL, retriever, cacheName);
// TODO Auto-generated constructor stub
}
@Override
protected ThreddsController translate(ServiceEndpoint e) throws InvalidServiceEndpointException {
return new ThreddsController(e);
}
@Override
protected Comparator getComparator() {
return null;
}
}

View File

@ -0,0 +1,136 @@
package org.gcube.spatial.data.sdi.engine.impl.cluster;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.data.transfer.library.DataTransferClient;
import org.gcube.data.transfer.library.client.AuthorizationFilter;
import org.gcube.data.transfer.library.faults.DestinationNotSetException;
import org.gcube.data.transfer.library.faults.FailedTransferException;
import org.gcube.data.transfer.library.faults.InitializationException;
import org.gcube.data.transfer.library.faults.InvalidDestinationException;
import org.gcube.data.transfer.library.faults.InvalidSourceException;
import org.gcube.data.transfer.library.faults.SourceNotSetException;
import org.gcube.data.transfer.model.Destination;
import org.gcube.data.transfer.model.DestinationClashPolicy;
import org.gcube.data.transfer.model.PluginInvocation;
import org.gcube.data.transfer.model.TransferTicket;
import org.gcube.data.transfer.model.plugins.thredds.ThreddsCatalog;
import org.gcube.data.transfer.model.plugins.thredds.ThreddsInfo;
import org.gcube.spatial.data.sdi.LocalConfiguration;
import org.gcube.spatial.data.sdi.NetUtils;
import org.gcube.spatial.data.sdi.engine.impl.faults.InvalidServiceEndpointException;
import org.gcube.spatial.data.sdi.engine.impl.faults.OutdatedServiceEndpointException;
import org.gcube.spatial.data.sdi.engine.impl.faults.ThreddsOperationFault;
import org.gcube.spatial.data.sdi.engine.impl.is.ISUtils;
import org.gcube.spatial.data.sdi.engine.impl.metadata.GenericTemplates;
import org.gcube.spatial.data.sdi.model.CatalogDescriptor;
import org.gcube.spatial.data.sdi.model.service.ThreddsDescriptor;
import org.glassfish.jersey.client.ClientConfig;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class ThreddsController extends GeoServiceController<ThreddsDescriptor> {
@Override
protected ThreddsDescriptor getLiveDescriptor() {
return new ThreddsDescriptor(version,baseURL,Collections.EMPTY_LIST);
}
@Override
protected AccessPoint getTheRightAccessPoint(ServiceEndpoint endpoint) {
for(AccessPoint declaredPoint:endpoint.profile().accessPoints().asCollection()) {
if(declaredPoint.name().equals(LocalConfiguration.getProperty(LocalConfiguration.THREDDS_SE_REMOTE_MANAGEMENT_ACCESS))) {
return declaredPoint;
}
}
return null;
}
public ThreddsController(ServiceEndpoint serviceEndpoint) throws InvalidServiceEndpointException {
super(serviceEndpoint);
}
@Override
protected void initServiceEndpoint() throws OutdatedServiceEndpointException {
// TODO Auto-generated method stub
}
public ThreddsInfo getThreddsInfo() {
String infoPath=getThreddsInfoPath();
log.info("Loading thredds info from {} ",infoPath);
WebTarget target=getWebClient().target(infoPath);
return target.request(MediaType.APPLICATION_JSON).get(ThreddsInfo.class);
}
private void reloadCatalog() throws IOException {
AccessPoint ap=getTheRightAccessPoint(serviceEndpoint);
NetUtils.makeAuthorizedCall(ap.address(), ap.username(), ISUtils.decryptString(ap.password()));
}
private String getHostName() {
return getServiceEndpoint().profile().runtime().hostedOn();
}
private String getThreddsInfoPath() {
return "https://"+getHostName()+"/data-transfer-service/gcube/service/Capabilities/pluginInfo/REGISTER_CATALOG";
}
private Client getWebClient() {
return ClientBuilder.newClient(new ClientConfig().register(AuthorizationFilter.class));
}
public ThreddsCatalog publishCatalog(File catalogFile, String reference) throws ThreddsOperationFault {
log.trace("Registering Thredds catalog with reference {} ",reference);
try {
AccessPoint ap=getTheRightAccessPoint(getServiceEndpoint());
log.debug("AP address is {} ",ap.address());
DataTransferClient client=DataTransferClient.getInstanceByEndpoint(ap.address());
Destination dest=new Destination();
dest.setPersistenceId("thredds");
dest.setDestinationFileName(reference.replace(" ", "_")+".xml");
dest.setOnExistingFileName(DestinationClashPolicy.REWRITE);
PluginInvocation invocation=new PluginInvocation("REGISTER_CATALOG");
invocation.setParameters(Collections.singletonMap("CATALOG_REFERENCE", reference));
log.debug("Sending catalog file to Thredds for registration");
client.localFile(catalogFile, dest,invocation);
log.debug("Catalog registered, calling reload.. ");
reloadCatalog();
ThreddsInfo info=getThreddsInfo();
log.debug("returned ThreddsInfo is {} ",info);
return info.getById(reference);
} catch (InvalidSourceException | SourceNotSetException | FailedTransferException | InitializationException
| InvalidDestinationException | DestinationNotSetException e) {
throw new ThreddsOperationFault("Unable to register catalog "+reference, e);
}catch(Exception e) {
throw new ThreddsOperationFault("Unable to reload catalog "+reference,e);
}
}
}

View File

@ -0,0 +1,27 @@
package org.gcube.spatial.data.sdi.engine.impl.factories;
import javax.inject.Inject;
import org.gcube.spatial.data.sdi.engine.GeoNetworkManager;
import org.gcube.spatial.data.sdi.engine.RoleManager;
import org.gcube.spatial.data.sdi.engine.impl.GeoNetworkManagerImpl;
import org.glassfish.hk2.api.Factory;
public class GeoNetworkManagerFactory implements Factory<GeoNetworkManager>{
@Inject
private RoleManager manager;
@Override
public void dispose(GeoNetworkManager instance) {
// TODO Auto-generated method stub
}
@Override
public GeoNetworkManager provide() {
return new GeoNetworkManagerImpl(manager);
}
}

View File

@ -0,0 +1,31 @@
package org.gcube.spatial.data.sdi.engine.impl.factories;
import org.gcube.spatial.data.sdi.engine.GISManager;
import org.gcube.spatial.data.sdi.engine.impl.GISManagerImpl;
import org.glassfish.hk2.api.Factory;
import lombok.Synchronized;
public class GeoServerManagerFactory implements Factory<GISManager>{
@Override
public void dispose(GISManager instance) {
// TODO Auto-generated method stub
}
@Override
public GISManager provide() {
return getInstance();
}
private static GISManager instance=null;
@Synchronized
private static GISManager getInstance() {
if(instance==null)
instance=new GISManagerImpl();
return instance;
}
}

View File

@ -0,0 +1,36 @@
package org.gcube.spatial.data.sdi.engine.impl.factories;
import org.gcube.spatial.data.sdi.engine.TemplateManager;
import org.gcube.spatial.data.sdi.engine.impl.metadata.MetadataTemplateManagerImpl;
import org.glassfish.hk2.api.Factory;
import lombok.Synchronized;
public class MetadataTemplateManagerFactory implements Factory<TemplateManager>{
@Override
public TemplateManager provide() {
return getInstance();
}
@Override
public void dispose(TemplateManager instance) {
// TODO Auto-generated method stub
}
private static TemplateManager instance = null;
@Synchronized
private static final TemplateManager getInstance() {
if(instance==null) {
instance=new MetadataTemplateManagerImpl();
try {
((MetadataTemplateManagerImpl)instance).defaultInit();
}catch(Exception e) {
throw new RuntimeException("Unable to init temp ",e);
}
}
return instance;
}
}

View File

@ -0,0 +1,29 @@
package org.gcube.spatial.data.sdi.engine.impl.factories;
import org.gcube.spatial.data.sdi.engine.RoleManager;
import org.gcube.spatial.data.sdi.engine.impl.RoleManagerImpl;
import org.glassfish.hk2.api.Factory;
import lombok.Synchronized;
public class RoleManagerFactory implements Factory<RoleManager>{
@Override
public void dispose(RoleManager instance) {
}
private static RoleManager instance;
@Override
public RoleManager provide() {
return getInstance();
}
@Synchronized
private static RoleManager getInstance() {
if(instance==null)
instance=new RoleManagerImpl();
return instance;
}
}

View File

@ -0,0 +1,44 @@
package org.gcube.spatial.data.sdi.engine.impl.factories;
import javax.inject.Inject;
import org.gcube.spatial.data.sdi.engine.GISManager;
import org.gcube.spatial.data.sdi.engine.GeoNetworkManager;
import org.gcube.spatial.data.sdi.engine.SDIManager;
import org.gcube.spatial.data.sdi.engine.ThreddsManager;
import org.gcube.spatial.data.sdi.engine.impl.SDIManagerImpl;
import org.glassfish.hk2.api.Factory;
import lombok.Synchronized;
public class SDIManagerFactory implements Factory<SDIManager>{
@Override
public void dispose(SDIManager instance) {
// TODO Auto-generated method stub
}
@Override
public SDIManager provide() {
return getInstance(gnManager,gisManager,thManager);
}
private static SDIManager sdiManager=null;
@Inject
private GeoNetworkManager gnManager;
@Inject
private GISManager gisManager;
@Inject
private ThreddsManager thManager;
@Synchronized
private static SDIManager getInstance(GeoNetworkManager gnManager, GISManager gisManager,ThreddsManager thManager) {
if(sdiManager==null)
sdiManager=new SDIManagerImpl(gnManager,thManager,gisManager);
return sdiManager;
}
}

View File

@ -0,0 +1,37 @@
package org.gcube.spatial.data.sdi.engine.impl.factories;
import org.gcube.spatial.data.sdi.engine.TemporaryPersistence;
import org.gcube.spatial.data.sdi.engine.impl.TemporaryPersistenceImpl;
import org.glassfish.hk2.api.Factory;
import lombok.Synchronized;
public class TemporaryPersistenceFactory implements Factory<TemporaryPersistence>{
@Override
public void dispose(TemporaryPersistence arg0) {
arg0.shutdown();
}
@Override
public TemporaryPersistence provide() {
return getInstance();
}
private static TemporaryPersistence temp=null;
@Synchronized
private static TemporaryPersistence getInstance(){
if(temp==null) {
temp=new TemporaryPersistenceImpl();
try {
temp.init();
}catch(Exception e) {
throw new RuntimeException("Unable to init temp ",e);
}
}
return temp;
}
}

View File

@ -0,0 +1,37 @@
package org.gcube.spatial.data.sdi.engine.impl.factories;
import javax.inject.Inject;
import org.gcube.spatial.data.sdi.engine.TemplateManager;
import org.gcube.spatial.data.sdi.engine.ThreddsManager;
import org.gcube.spatial.data.sdi.engine.impl.ThreddsManagerImpl;
import org.glassfish.hk2.api.Factory;
import lombok.Synchronized;
public class ThreddsManagerFactory implements Factory<ThreddsManager>{
@Inject
private TemplateManager manager;
@Override
public ThreddsManager provide() {
return getInstance(manager);
}
@Override
public void dispose(ThreddsManager instance) {
// TODO Auto-generated method stub
}
private static ThreddsManager instance=null;
@Synchronized
private static ThreddsManager getInstance(TemplateManager manager) {
if(instance==null)
instance=new ThreddsManagerImpl(manager);
return instance;
}
}

View File

@ -0,0 +1,35 @@
package org.gcube.spatial.data.sdi.engine.impl.faults;
public class ClientInitializationException extends Exception {
/**
*
*/
private static final long serialVersionUID = -4466379337497096292L;
public ClientInitializationException() {
// TODO Auto-generated constructor stub
}
public ClientInitializationException(String message) {
super(message);
// TODO Auto-generated constructor stub
}
public ClientInitializationException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
public ClientInitializationException(String message, Throwable cause) {
super(message, cause);
// TODO Auto-generated constructor stub
}
public ClientInitializationException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
// TODO Auto-generated constructor stub
}
}

View File

@ -0,0 +1,30 @@
package org.gcube.spatial.data.sdi.engine.impl.faults;
public class ConfigurationNotFoundException extends Exception {
public ConfigurationNotFoundException() {
// TODO Auto-generated constructor stub
}
public ConfigurationNotFoundException(String message) {
super(message);
// TODO Auto-generated constructor stub
}
public ConfigurationNotFoundException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
public ConfigurationNotFoundException(String message, Throwable cause) {
super(message, cause);
// TODO Auto-generated constructor stub
}
public ConfigurationNotFoundException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
// TODO Auto-generated constructor stub
}
}

View File

@ -0,0 +1,41 @@
package org.gcube.spatial.data.sdi.engine.impl.faults;
import java.io.PrintWriter;
import java.io.StringWriter;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper;
import org.gcube.spatial.data.sdi.model.faults.ErrorMessage;
public class GenericExceptionMapper implements ExceptionMapper<Throwable> {
@Override
public Response toResponse(Throwable ex) {
ErrorMessage errorMessage = new ErrorMessage();
setHttpStatus(ex, errorMessage);
errorMessage.setCode(500);
errorMessage.setMessage(ex.getMessage());
StringWriter errorStackTrace = new StringWriter();
ex.printStackTrace(new PrintWriter(errorStackTrace));
errorMessage.setDeveloperMessage(errorStackTrace.toString());
errorMessage.setLink("www.d4science.org");
return Response.status(errorMessage.getStatus())
.entity(errorMessage)
.type(MediaType.APPLICATION_JSON)
.build();
}
private void setHttpStatus(Throwable ex, ErrorMessage errorMessage) {
if(ex instanceof WebApplicationException ) {
errorMessage.setStatus(((WebApplicationException)ex).getResponse().getStatus());
} else {
errorMessage.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); //defaults to internal server error 500
}
}
}

View File

@ -0,0 +1,39 @@
package org.gcube.spatial.data.sdi.engine.impl.faults;
public class InvalidServiceDefinitionException extends ServiceRegistrationException {
/**
*
*/
private static final long serialVersionUID = -5251767289981417513L;
public InvalidServiceDefinitionException() {
super();
// TODO Auto-generated constructor stub
}
public InvalidServiceDefinitionException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
// TODO Auto-generated constructor stub
}
public InvalidServiceDefinitionException(String message, Throwable cause) {
super(message, cause);
// TODO Auto-generated constructor stub
}
public InvalidServiceDefinitionException(String message) {
super(message);
// TODO Auto-generated constructor stub
}
public InvalidServiceDefinitionException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
}

View File

@ -0,0 +1,38 @@
package org.gcube.spatial.data.sdi.engine.impl.faults;
public class InvalidServiceEndpointException extends RuntimeException {
/**
*
*/
private static final long serialVersionUID = -3683038636163570578L;
public InvalidServiceEndpointException() {
super();
// TODO Auto-generated constructor stub
}
public InvalidServiceEndpointException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
// TODO Auto-generated constructor stub
}
public InvalidServiceEndpointException(String message, Throwable cause) {
super(message, cause);
// TODO Auto-generated constructor stub
}
public InvalidServiceEndpointException(String message) {
super(message);
// TODO Auto-generated constructor stub
}
public InvalidServiceEndpointException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
}

View File

@ -0,0 +1,38 @@
package org.gcube.spatial.data.sdi.engine.impl.faults;
public class OutdatedServiceEndpointException extends Exception {
/**
*
*/
private static final long serialVersionUID = -1874537989302709012L;
public OutdatedServiceEndpointException() {
super();
// TODO Auto-generated constructor stub
}
public OutdatedServiceEndpointException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
// TODO Auto-generated constructor stub
}
public OutdatedServiceEndpointException(String message, Throwable cause) {
super(message, cause);
// TODO Auto-generated constructor stub
}
public OutdatedServiceEndpointException(String message) {
super(message);
// TODO Auto-generated constructor stub
}
public OutdatedServiceEndpointException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
}

View File

@ -0,0 +1,35 @@
package org.gcube.spatial.data.sdi.engine.impl.faults;
public class ServiceInteractionException extends Exception {
/**
*
*/
private static final long serialVersionUID = 4708440073435829969L;
public ServiceInteractionException() {
// TODO Auto-generated constructor stub
}
public ServiceInteractionException(String message) {
super(message);
// TODO Auto-generated constructor stub
}
public ServiceInteractionException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
public ServiceInteractionException(String message, Throwable cause) {
super(message, cause);
// TODO Auto-generated constructor stub
}
public ServiceInteractionException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
// TODO Auto-generated constructor stub
}
}

View File

@ -0,0 +1,38 @@
package org.gcube.spatial.data.sdi.engine.impl.faults;
public class ServiceRegistrationException extends Exception {
/**
*
*/
private static final long serialVersionUID = -1570185699121566715L;
public ServiceRegistrationException() {
super();
// TODO Auto-generated constructor stub
}
public ServiceRegistrationException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
// TODO Auto-generated constructor stub
}
public ServiceRegistrationException(String message, Throwable cause) {
super(message, cause);
// TODO Auto-generated constructor stub
}
public ServiceRegistrationException(String message) {
super(message);
// TODO Auto-generated constructor stub
}
public ServiceRegistrationException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
}

View File

@ -0,0 +1,35 @@
package org.gcube.spatial.data.sdi.engine.impl.faults;
public class ThreddsOperationFault extends ServiceInteractionException {
/**
*
*/
private static final long serialVersionUID = -4389581996150834969L;
public ThreddsOperationFault() {
// TODO Auto-generated constructor stub
}
public ThreddsOperationFault(String message) {
super(message);
// TODO Auto-generated constructor stub
}
public ThreddsOperationFault(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
public ThreddsOperationFault(String message, Throwable cause) {
super(message, cause);
// TODO Auto-generated constructor stub
}
public ThreddsOperationFault(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
// TODO Auto-generated constructor stub
}
}

View File

@ -0,0 +1,37 @@
package org.gcube.spatial.data.sdi.engine.impl.faults.gn;
public class MetadataException extends Exception {
/**
*
*/
private static final long serialVersionUID = -234185402179551404L;
public MetadataException() {
super();
// TODO Auto-generated constructor stub
}
public MetadataException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
// TODO Auto-generated constructor stub
}
public MetadataException(String message, Throwable cause) {
super(message, cause);
// TODO Auto-generated constructor stub
}
public MetadataException(String message) {
super(message);
// TODO Auto-generated constructor stub
}
public MetadataException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
}

View File

@ -0,0 +1,38 @@
package org.gcube.spatial.data.sdi.engine.impl.faults.gn;
public class MetadataNotFoundException extends MetadataException {
/**
*
*/
private static final long serialVersionUID = 5964532576083669460L;
public MetadataNotFoundException() {
super();
// TODO Auto-generated constructor stub
}
public MetadataNotFoundException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
// TODO Auto-generated constructor stub
}
public MetadataNotFoundException(String message, Throwable cause) {
super(message, cause);
// TODO Auto-generated constructor stub
}
public MetadataNotFoundException(String message) {
super(message);
// TODO Auto-generated constructor stub
}
public MetadataNotFoundException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
}

View File

@ -0,0 +1,16 @@
package org.gcube.spatial.data.sdi.engine.impl.gn.extension;
import it.geosolutions.geonetwork.GN26Client;
public class GN26Extension extends GN26Client {
public GN26Extension(String serviceURL) {
super(serviceURL);
}
public GN26Extension(String serviceURL, String username, String password) {
super(serviceURL, username, password);
super.connection=new HttpUtilsExtensions(username, password);
}
}

View File

@ -0,0 +1,17 @@
package org.gcube.spatial.data.sdi.engine.impl.gn.extension;
import it.geosolutions.geonetwork.GN3Client;
public class GN3Extension extends GN3Client {
public GN3Extension(String serviceURL) {
super(serviceURL);
// TODO Auto-generated constructor stub
}
public GN3Extension(String serviceURL, String username, String password) {
super(serviceURL, username, password);
super.connection=new HttpUtilsExtensions(username, password);
}
}

View File

@ -0,0 +1,177 @@
package org.gcube.spatial.data.sdi.engine.impl.gn.extension;
import java.io.File;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import org.gcube.spatial.data.sdi.engine.impl.gn.utils.GroupUtils;
import org.gcube.spatial.data.sdi.engine.impl.gn.utils.UserUtils;
import org.gcube.spatial.data.sdi.model.gn.Group;
import org.gcube.spatial.data.sdi.model.gn.User;
import org.gcube.spatial.data.sdi.model.gn.User.Profile;
import org.jdom.Element;
import it.geosolutions.geonetwork.GNClient;
import it.geosolutions.geonetwork.exception.GNLibException;
import it.geosolutions.geonetwork.exception.GNServerException;
import it.geosolutions.geonetwork.op.gn3.GN3MetadataGetInfo.MetadataInfo;
import it.geosolutions.geonetwork.util.GNInsertConfiguration;
import it.geosolutions.geonetwork.util.GNPrivConfiguration;
import it.geosolutions.geonetwork.util.GNSearchRequest;
import it.geosolutions.geonetwork.util.GNSearchResponse;
import it.geosolutions.geonetwork.util.HTTPUtils;
public class GNClientExtension implements GNClient {
private GNClient client;
private ServerAccess access;
public GNClientExtension(ServerAccess access) {
this.access=access;
if(access.getVersion().getMajor()==2)
client=new GN26Extension(access.getGnServiceURL(), access.getUser(), access.getPassword());
else if(access.getVersion().getMajor()==3)
client = new GN3Extension(access.getGnServiceURL(), access.getUser(), access.getPassword());
else throw new RuntimeException("INVALID SERVER ACCESS "+access);
}
public void createGroup(String name, String description, String mail,Integer id)throws GNLibException, GNServerException {
GNMetadataAdminExtension.createGroup(getConnection(), access, name, description, mail, id);
}
public Set<Group> getGroups() throws GNLibException, GNServerException{
String groupResponse=GNMetadataAdminExtension.getGroups(getConnection(), access);
if(access.getVersion().getMajor()==2)
return GroupUtils.parseGroupXMLResponse(groupResponse);
else return GroupUtils.parseUserJSONResponse(groupResponse);
}
public Set<User> getUsers() throws GNLibException, GNServerException{
String userResponse=GNMetadataAdminExtension.getUsers(getConnection(), access);
if(access.getVersion().getMajor()==2)
return UserUtils.parseUserXMLResponse(userResponse);
else return UserUtils.parseUserJSONResponse(userResponse);
}
public void createUser(String name, String password, Profile profile, Collection<Integer> groups) throws GNServerException, GNLibException{
GNMetadataAdminExtension.createUser(getConnection(), access, name, password, profile, groups);
}
public void editUser(User toAdd, Collection<Integer> groups) throws GNServerException,GNLibException{
Set<Integer> alreadyAddedGroups=getGroupsByUser(toAdd.getId());
alreadyAddedGroups.addAll(groups);
GNMetadataAdminExtension.editUser(getConnection(), access, toAdd, alreadyAddedGroups);
}
public Set<Integer> getGroupsByUser(Integer userId) throws GNLibException, GNServerException{
return UserUtils.parseGroupsByUserResponse(GNMetadataAdminExtension.getUserGroupd(getConnection(), access, userId));
}
public void assignOwnership(List<Long> toTransferIds,Integer targetUserId,Integer targetGroupId) throws GNServerException, GNLibException{
try{
GNMetadataAdminExtension.selectMeta(getConnection(), access, toTransferIds);
GNMetadataAdminExtension.assignMassiveOwnership(getConnection(), access, targetUserId, targetGroupId);
}finally{
GNMetadataAdminExtension.clearMetaSelection(getConnection(), access);
}
}
public String getPossibleOwnershipTransfer(Integer userId) throws GNServerException, GNLibException{
return GNMetadataAdminExtension.allowedOwnershipTransfer(getConnection(), access, userId);
}
public String getMetadataOwners() throws GNServerException, GNLibException{
return GNMetadataAdminExtension.metadataOwners(getConnection(), access);
}
public void transferOwnership(Integer sourceUserId,Integer sourceGroupId,Integer targetUserId,Integer targetGroupId) throws GNServerException, GNLibException{
GNMetadataAdminExtension.transferOwnership(getConnection(), access, sourceUserId, sourceGroupId, targetUserId, targetGroupId);
}
//***************************** OVERRIDES
@Override
public boolean ping() {
return client.ping();
}
@Override
public long insertMetadata(GNInsertConfiguration cfg, File metadataFile) throws GNLibException, GNServerException {
return client.insertMetadata(cfg, metadataFile);
}
@Override
public long insertRequest(File requestFile) throws GNLibException, GNServerException {
return client.insertRequest(requestFile);
}
@Override
public void setPrivileges(long metadataId, GNPrivConfiguration cfg) throws GNLibException, GNServerException {
client.setPrivileges(metadataId, cfg);
}
@Override
public GNSearchResponse search(GNSearchRequest searchRequest) throws GNLibException, GNServerException {
return client.search(searchRequest);
}
@Override
public GNSearchResponse search(File fileRequest) throws GNLibException, GNServerException {
return client.search(fileRequest);
}
@Override
public Element get(Long id) throws GNLibException, GNServerException {
return client.get(id);
}
@Override
public Element get(String uuid) throws GNLibException, GNServerException {
return client.get(uuid);
}
@Override
public void deleteMetadata(long id) throws GNLibException, GNServerException {
client.deleteMetadata(id);
}
@Override
public void updateMetadata(long id, File metadataFile) throws GNLibException, GNServerException {
client.updateMetadata(id, metadataFile);
}
@Override
public void updateMetadata(long id, File metadataFile, String encoding) throws GNLibException, GNServerException {
client.updateMetadata(id, metadataFile,encoding);
}
@Override
public MetadataInfo getInfo(Long id) throws GNLibException, GNServerException {
return client.getInfo(id);
}
@Override
public MetadataInfo getInfo(String uuid) throws GNLibException, GNServerException {
return client.getInfo(uuid);
}
@Override
public HTTPUtils getConnection() throws GNLibException {
return client.getConnection();
}
}

View File

@ -0,0 +1,309 @@
package org.gcube.spatial.data.sdi.engine.impl.gn.extension;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.util.Collection;
import java.util.List;
import org.gcube.spatial.data.sdi.model.gn.User;
import org.gcube.spatial.data.sdi.model.gn.User.Profile;
import org.jdom.Element;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter;
import org.json.JSONArray;
import org.json.JSONObject;
import it.geosolutions.geonetwork.exception.GNLibException;
import it.geosolutions.geonetwork.exception.GNServerException;
import it.geosolutions.geonetwork.util.HTTPUtils;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class GNMetadataAdminExtension {
private final static XMLOutputter outputter = new XMLOutputter(Format.getCompactFormat());
private final static String USER_3="/srv/api/0.1/users";
private final static String GROUPS_3="/srv/api/0.1/groups";
private final static String CREATE_GROUP_METHOD_2="/srv/en/group.update";
private final static String GROUP_LIST_METHOD="/srv/en/xml.group.list";
private final static String USER_LIST_METHOD_2="/srv/en/xml.user.list";
private final static String CREATE_USER_METHOD="/srv/en/user.update";
private final static String GET_GROUPS_BY_USER="/srv/en/xml.usergroups.list";
private final static String METADATA_SELECT="/srv/en/metadata.select";
private final static String ASSIGN_MASSIVE_OWNERSHIP="/srv/en/metadata.massive.newowner";
private final static String AVAILABLE_OWNERSHIP="/srv/en/xml.ownership.groups";
private final static String METADATA_OWNERS="/srv/en/xml.ownership.editors";
private final static String TRANSFER_OWNSERSHIP="/srv/en/xml.ownership.transfer";
public static String allowedOwnershipTransfer(HTTPUtils connection, ServerAccess access, Integer userId) throws GNServerException, GNLibException{
log.debug("Getting available ownership transfer for user "+userId);
Element request=new Element("request");
request.addContent(new Element("id").setText(userId+""));
return gnCall(connection,access,request,AVAILABLE_OWNERSHIP);
}
public static String metadataOwners(HTTPUtils connection, ServerAccess access) throws GNServerException, GNLibException{
log.debug("Getting metadata owners");
Element request=new Element("request");
return gnCall(connection,access,request,METADATA_OWNERS);
}
public static String selectMeta (HTTPUtils connection, ServerAccess access, List<Long> toSelectIds) throws GNServerException, GNLibException{
log.debug("Massive metadata selection..");
Element request=buildSelectMetadata(toSelectIds);
return gnCall(connection,access,request,METADATA_SELECT);
}
public static String clearMetaSelection(HTTPUtils connection, ServerAccess access) throws GNServerException, GNLibException{
log.debug("Massive metadata selection..");
Element request=buildClearMetaSelection();
return gnCall(connection,access,request,METADATA_SELECT);
}
public static String assignMassiveOwnership(HTTPUtils connection, ServerAccess access,Integer userId, Integer groupId) throws GNServerException, GNLibException{
log.debug("Assign massive ownership to u:{},g:{} ",userId,groupId);
Element request=new Element("request");
request.addContent(new Element("user").setText(userId+""));
request.addContent(new Element("group").setText(groupId+""));
return gnCall(connection,access,request,ASSIGN_MASSIVE_OWNERSHIP);
}
public static String transferOwnership(HTTPUtils connection, ServerAccess access,Integer sourceUserId, Integer sourceGroupId,Integer destUserId, Integer destGroupId) throws GNServerException, GNLibException{
log.debug("Transfering ownership from u:{},g:{} to u:{},g:{}",sourceUserId,sourceGroupId,destUserId,destGroupId);
Element request=new Element("request");
request.addContent(new Element("sourceUser").setText(sourceUserId+""));
request.addContent(new Element("sourceGroup").setText(sourceGroupId+""));
request.addContent(new Element("targetUser").setText(destUserId+""));
request.addContent(new Element("targetGroup").setText(destGroupId+""));
return gnCall(connection,access,request,TRANSFER_OWNSERSHIP);
}
public static String editUser(HTTPUtils connection,ServerAccess access,User toAdd, Collection<Integer> groups)throws GNLibException, GNServerException {
log.debug("Coupling user {} to groups {} ",toAdd,groups);
Object request=null;
String method=null;
if(access.getVersion().getMajor()==2){
Element requestEl = new Element("request");
requestEl.addContent(new Element("operation").setText("editinfo"));
requestEl.addContent(new Element("id").setText(toAdd.getId()+""));
requestEl.addContent(new Element("username").setText(toAdd.getUsername()));
requestEl.addContent(new Element("password").setText(toAdd.getPassword()));
requestEl.addContent(new Element("profile").setText(toAdd.getProfile().name()));
if(groups!=null){
for(Integer groupId:groups)requestEl.addContent(new Element("groups").setText(groupId+""));
}
request=requestEl;
method=CREATE_USER_METHOD;
}else{
try{
JSONObject object=new JSONObject();
object.put("username", toAdd.getUsername());
object.put("password", toAdd.getPassword());
object.put("profile",toAdd.getProfile().toString());
object.put("enabled", true);
if(groups!=null){
JSONArray array=new JSONArray();
for(Integer groupId:groups) array.put(groupId+"");
object.put("groupsReviewer", array);
}
request= object;
method=USER_3+"/"+toAdd.getId();
}catch(Exception e){
throw new GNLibException("Unabel to create JSON request for group creation ", e);
}
// request=buildUpdateUserRequest(toAdd.getId(), toAdd.getUsername(), toAdd.getPassword(), toAdd.getProfile(), groups);
}
return gnCall(connection,access,request,method);
}
public static String getUserGroupd(HTTPUtils connection,ServerAccess access,Integer userId)throws GNLibException, GNServerException {
log.debug("Getting user groups..");
return gnCall(connection,access,new Element("request").addContent(new Element("id").setText(userId+"")),GET_GROUPS_BY_USER);
}
public static String getUsers(HTTPUtils connection, ServerAccess access) throws GNServerException, GNLibException{
log.debug("Requesting users..");
if(access.getVersion().getMajor()==2){
return gnCall(connection,access,new Element("request"),USER_LIST_METHOD_2);
}else {
String toReturn=gnCall(connection,access,null,USER_3);
return toReturn;
}
}
public static String createUser(HTTPUtils connection, ServerAccess access, String name, String password, Profile profile, Collection<Integer> groups ) throws GNServerException, GNLibException{
log.debug("Requesting users..");
log.debug("Compiling admin request document");
Object userRequest=null;
String method=null;
if(access.getVersion().getMajor()==2){
Element request = new Element("request");
request.addContent(new Element("operation").setText("newuser"));
request.addContent(new Element("username").setText(name));
request.addContent(new Element("password").setText(password));
request.addContent(new Element("profile").setText(profile.name()));
if(groups!=null){
for(Integer groupId:groups)request.addContent(new Element("groups").setText(groupId+""));
}
userRequest=request;
method=CREATE_USER_METHOD;
}else{
try{
JSONObject object=new JSONObject();
object.put("username", name);
object.put("password", password);
object.put("profile",profile);
object.put("enabled", true);
if(groups!=null){
JSONArray array=new JSONArray();
for(Integer groupId:groups) array.put(groupId+"");
object.put("groupsReviewer", array);
}
userRequest= object;
method=USER_3;
}catch(Exception e){
throw new GNLibException("Unabel to create JSON request for group creation ", e);
}
}
return gnCall(connection,access,userRequest,method);
}
public static String createGroup(HTTPUtils connection, ServerAccess access, String groupName, String groupDescription, String groupMail, Integer groupId) throws GNLibException, GNServerException {
log.debug(String.format("Creating group [Name : %s, Description : %s, Mail : %s ",groupName,groupDescription,groupMail));
Object adminRequest=null;
String method=null;
if(access.getVersion().getMajor()==2){
Element request = new Element("request");
request.addContent(new Element("name").setText(groupName));
request.addContent(new Element("description").setText(groupDescription));
request.addContent(new Element("email").setText(groupMail));
adminRequest= request;
method=CREATE_GROUP_METHOD_2;
} else {
try{
JSONObject object=new JSONObject();
object.put("name", groupName);
object.put("description", groupDescription);
object.put("email", groupMail);
object.put("id",groupId);
adminRequest= object;
method=GROUPS_3;
}catch(Exception e){
throw new GNLibException("Unabel to create JSON request for group creation ", e);
}
}
return gnCall(connection, access, adminRequest,method);
}
public static String getGroups(HTTPUtils connection,ServerAccess access) throws GNServerException, GNLibException{
log.debug("Requesting groups..");
Object request=null;
String method=null;
if(access.getVersion().getMajor()==2){
request=new Element("request");
method=GROUP_LIST_METHOD;
}else{
method=GROUPS_3;
}
return gnCall(connection, access, request,method);
}
private static String gnCall(HTTPUtils connection,ServerAccess access, final Object gnRequest,String toInvokeMethod)throws GNServerException, GNLibException {
String serviceURL = access.getGnServiceURL() + toInvokeMethod;
try{
String result=gnRequest==null?gnGET(connection,serviceURL):gnPut(connection, serviceURL, gnRequest);
int httpStatus=connection.getLastHttpStatus();
if(httpStatus<200 ||httpStatus>=300)
throw new GNServerException("Error executing call, received "+httpStatus+". Result is "+result);
return result;
}catch(MalformedURLException e){
throw new GNServerException("Unable to send request ",e);
}catch(UnsupportedEncodingException e){
throw new GNServerException("Unable to send request ", e);
}catch(GNLibException e){
throw e;
}
}
private static Element buildSelectMetadata(List<Long> toSelectIds){
log.debug("building selection request");
Element request = new Element("request");
if(toSelectIds!=null){
for(Long id:toSelectIds) request.addContent(new Element("id").setText(id.toString()));
request.addContent(new Element("selected").setText("add"));
}else request.addContent(new Element("selected").setText("add-all"));
return request;
}
private static Element buildClearMetaSelection(){
log.debug("building selection request");
Element request = new Element("request");
request.addContent(new Element("selected").setText("remove-all"));
return request;
}
private static String gnPut(HTTPUtils connection, String serviceURL, final Object gnRequest) throws UnsupportedEncodingException, GNLibException, GNServerException {
if(gnRequest instanceof Element){
String s = outputter.outputString((Element)gnRequest);
connection.setIgnoreResponseContentOnSuccess(false);
String res = connection.postXml(serviceURL, s);
return res;
} else if (gnRequest instanceof JSONObject){
String s=((JSONObject) gnRequest).toString();
connection.setIgnoreResponseContentOnSuccess(false);
return ((HttpUtilsExtensions)connection).putJSON(serviceURL, s);
} else throw new GNLibException("Unable to manage request element "+gnRequest);
}
private static String gnGET(HTTPUtils connection, String serviceURL) throws MalformedURLException, GNServerException {
connection.setIgnoreResponseContentOnSuccess(false);
String res = ((HttpUtilsExtensions)connection).getJSON(serviceURL);
return res;
}
}

View File

@ -0,0 +1,179 @@
package org.gcube.spatial.data.sdi.engine.impl.gn.extension;
import java.io.File;
import java.util.Collection;
import java.util.EnumSet;
import java.util.Set;
import org.gcube.spatial.data.sdi.LocalConfiguration;
import org.gcube.spatial.data.sdi.engine.impl.faults.ServiceInteractionException;
import org.gcube.spatial.data.sdi.engine.impl.gn.utils.GroupUtils;
import org.gcube.spatial.data.sdi.engine.impl.gn.utils.UserUtils;
import org.gcube.spatial.data.sdi.model.gn.Group;
import org.gcube.spatial.data.sdi.model.gn.User;
import org.gcube.spatial.data.sdi.model.service.GeoNetworkDescriptor;
import org.gcube.spatial.data.sdi.model.service.Version;
import it.geosolutions.geonetwork.exception.GNLibException;
import it.geosolutions.geonetwork.exception.GNServerException;
import it.geosolutions.geonetwork.util.GNInsertConfiguration;
import it.geosolutions.geonetwork.util.GNPriv;
import it.geosolutions.geonetwork.util.GNPrivConfiguration;
import it.geosolutions.geonetwork.util.GNSearchRequest;
import it.geosolutions.geonetwork.util.GNSearchResponse;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class GeoNetworkClient {
private ServerAccess access;
private GNClientExtension theClient=null;
private GeoNetworkDescriptor descriptor=null;
public GeoNetworkClient(String baseURL, Version version, String password, String user, GeoNetworkDescriptor descriptor) {
this(baseURL,version,password,user);
this.descriptor=descriptor;
}
public GeoNetworkClient(String baseURL, Version version, String password, String user) {
super();
this.access=new ServerAccess(baseURL,version,password,user);
theClient=new GNClientExtension(access);
}
//************************************** GROUPS AND USERS
public Group createGroup(Group group)throws ServiceInteractionException{
try {
theClient.createGroup(group.getName(), group.getDescription(), group.getMail(),group.getId());
long submitTime=System.currentTimeMillis();
long timeout=LocalConfiguration.getTTL(LocalConfiguration.GEONETWORK_UPDATE_TIMEOUT);
long wait=LocalConfiguration.getTTL(LocalConfiguration.GEONETWORK_UPDATE_WAIT);
log.debug("Waiting for created group to be available, timeout is {} ",timeout);
//wait for update to be available
Group created=null;
do{
try{Thread.sleep(wait);}catch(InterruptedException e){}
created=GroupUtils.getByName(theClient.getGroups(), group.getName());
}while(created==null && (System.currentTimeMillis()-submitTime>=timeout));
if(created==null) {
log.error("GN Update timeout {}ms reached. Group {} not created.",timeout,group);
throw new ServiceInteractionException("Reached timeout while creating group "+group.getName());
}
return created;
}catch(ServiceInteractionException e) {
throw e;
}catch(Throwable t) {
throw new ServiceInteractionException("Unable to create group. ",t);
}
}
public Set<Group> getGroups() throws ServiceInteractionException {
try {
return theClient.getGroups();
} catch (Exception e) {
throw new ServiceInteractionException("Unable to get Groups from "+access,e);
}
}
public Set<User> getUsers() throws ServiceInteractionException{
try {
return theClient.getUsers();
} catch (Exception e) {
throw new ServiceInteractionException("Unable to get Users from "+access,e);
}
}
public User createUsers(User user, Collection<Integer> groups) throws ServiceInteractionException {
try{
theClient.createUser(user.getUsername(), user.getPassword(), user.getProfile(), groups);
long submitTime=System.currentTimeMillis();
long timeout=LocalConfiguration.getTTL(LocalConfiguration.GEONETWORK_UPDATE_TIMEOUT);
long wait=LocalConfiguration.getTTL(LocalConfiguration.GEONETWORK_UPDATE_WAIT);
log.debug("Waiting for created group to be available, timeout is {} ",timeout);
//wait for update to be available
User created=null;
do{
try{Thread.sleep(wait);}catch(InterruptedException e){}
created=UserUtils.getByName(theClient.getUsers(), user.getUsername());
}while(created==null && (System.currentTimeMillis()-submitTime>=timeout));
if(created==null) {
log.error("GN Update timeout {}ms reached. User {} not created.",timeout,user.getUsername());
throw new ServiceInteractionException("Reached timeout while creating user "+user.getUsername());
}
return created;
}catch(ServiceInteractionException e) {
throw e;
}catch(Throwable t) {
throw new ServiceInteractionException("Unable to create User. ",t);
}
}
public void editUser(User toEdit, Collection<Integer> toAddGroups) throws ServiceInteractionException{
try{
Set<Integer> alreadyAddedGroups=getGroupsByUser(toEdit.getId());
alreadyAddedGroups.addAll(toAddGroups);
GNMetadataAdminExtension.editUser(theClient.getConnection(), access, toEdit, alreadyAddedGroups);
}catch(Throwable t) {
throw new ServiceInteractionException("Unable to create User. ",t);
}
}
public Set<Integer> getGroupsByUser(Integer userId) throws ServiceInteractionException{
try{
return UserUtils.parseGroupsByUserResponse(GNMetadataAdminExtension.getUserGroupd(theClient.getConnection(), access, userId));
}catch(Throwable t) {
throw new ServiceInteractionException(t);
}
}
//******************************* METADATA INSERTION
public long insertMetadata(String category, String styleSheet,boolean validate, int group, boolean makePublic, File metadataFile) throws GNLibException, GNServerException {
GNInsertConfiguration configuration=new GNInsertConfiguration();
configuration.setCategory(category);
configuration.setStyleSheet(styleSheet);
configuration.setValidate(validate);
configuration.setGroup(group+"");
log.debug("Inserting with {} ",configuration);
long toReturnId=theClient.insertMetadata(configuration, metadataFile);
GNPrivConfiguration privileges=(makePublic?getPrivileges(group,
Integer.parseInt(descriptor.getPublicGroup())):getPrivileges(group));
log.debug("Setting privileges {} on {} ",privileges,toReturnId);
theClient.setPrivileges(toReturnId, privileges);
return toReturnId;
}
private static final GNPrivConfiguration getPrivileges(Integer...groups ) {
GNPrivConfiguration toReturn=new GNPrivConfiguration();
for(Integer group:groups)
toReturn.addPrivileges(group, EnumSet.of(GNPriv.DOWNLOAD,GNPriv.DYNAMIC,GNPriv.EDITING,GNPriv.FEATURED,GNPriv.NOTIFY,GNPriv.VIEW));
return toReturn;
}
public void updateMeta(long toUpdateMetaId,File metadataFile) throws GNLibException, GNServerException{
log.debug("Updating metadata by ID "+toUpdateMetaId);
theClient.updateMetadata(toUpdateMetaId, metadataFile);
}
//********************************* SEARCH
public GNSearchResponse query(GNSearchRequest request) throws GNLibException, GNServerException{
return theClient.search(request);
}
}

View File

@ -0,0 +1,89 @@
package org.gcube.spatial.data.sdi.engine.impl.gn.extension;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import org.gcube.spatial.data.sdi.LocalConfiguration;
import org.gcube.spatial.data.sdi.engine.impl.faults.gn.MetadataNotFoundException;
import org.gcube.spatial.data.sdi.model.gn.Group;
import org.gcube.spatial.data.sdi.model.gn.User;
import org.gcube.spatial.data.sdi.model.gn.User.Profile;
import org.gcube.spatial.data.sdi.utils.StringUtils;
import it.geosolutions.geonetwork.exception.GNLibException;
import it.geosolutions.geonetwork.exception.GNServerException;
import it.geosolutions.geonetwork.util.GNSearchRequest;
import it.geosolutions.geonetwork.util.GNSearchRequest.Config;
import it.geosolutions.geonetwork.util.GNSearchResponse;
import it.geosolutions.geonetwork.util.GNSearchResponse.GNMetadata;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class GeoNetworkUtils {
/**
* Adds a suffix to groupName if needed
*
* @param existing
* @param groupName
* @return
*/
public static Group generateGroup(Set<Group> existing, String groupName, String description, String contactMail){
Set<String> existingNames=new HashSet<>();
int maxId=0;
for(Group g:existing){
existingNames.add(g.getName());
if(maxId<g.getId())maxId=g.getId();
}
String toUseName=clashSafeString(groupName,existingNames);
Integer maxLength=Integer.parseInt(LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_GROUP_MAX_LENGTH, "30"));
if(toUseName.length()>maxLength)
return generateGroup(existing, toUseName.substring(0, maxLength-2), description, contactMail);
return new Group(toUseName, description, contactMail, maxId+1);
}
public static User generateUser(Set<User> existing, Integer passwordLength, String username){
Set<String> existingNames=new HashSet<>();
for(User g:existing)existingNames.add(g.getUsername());
String toUseUserName=clashSafeString(username,existingNames);
return new User(0, // NB will be updated when creating it..
toUseUserName,
StringUtils.generateRandomString(passwordLength),Profile.Reviewer);
}
public static String clashSafeString(String originalString,Set<String> existingSet) {
String toReturn=originalString;
int suffix=1;
while(existingSet.contains(toReturn)) {
toReturn=originalString+"_"+suffix;
suffix++;
}
return toReturn;
}
public static long getIDByUUID(GeoNetworkClient client, String uuid) throws MetadataNotFoundException, GNLibException, GNServerException {
log.debug("Looking for uuid : {} ",uuid);
GNSearchRequest req=new GNSearchRequest();
req.addParam(GNSearchRequest.Param.any,uuid);
req.addConfig(Config.similarity, "1");
GNSearchResponse resp=client.query(req);
Iterator<GNMetadata> iterator=resp.iterator();
log.debug("Got {} hits for UUID {}",resp.getCount(),uuid);
while(iterator.hasNext()){
GNMetadata meta=iterator.next();
if(meta.getUUID().equals(uuid)) return meta.getId();
}
throw new MetadataNotFoundException("Unable to find metadata from uuid "+uuid);
}
}

View File

@ -0,0 +1,281 @@
package org.gcube.spatial.data.sdi.engine.impl.gn.extension;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.ConnectException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import org.apache.commons.httpclient.Credentials;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.httpclient.UsernamePasswordCredentials;
import org.apache.commons.httpclient.auth.AuthScope;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.httpclient.methods.PutMethod;
import org.apache.commons.httpclient.methods.RequestEntity;
import org.apache.commons.httpclient.methods.StringRequestEntity;
import org.apache.commons.io.IOUtils;
import it.geosolutions.geonetwork.exception.GNServerException;
import it.geosolutions.geonetwork.util.HTTPUtils;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class HttpUtilsExtensions extends HTTPUtils {
private final static String JSON_CONTENT_TYPE="application/json";
private final static String XML_CONTENT_TYPE="text/xml";
public HttpUtilsExtensions() {
super();
// TODO Auto-generated constructor stub
}
String username;
String pw;
private int lastHttpStatus=0;
public HttpUtilsExtensions(String userName, String password) {
super(userName, password);
this.username=userName;
this.pw=password;
}
HttpClient client=new HttpClient();
public String getJSON(String url) throws MalformedURLException, GNServerException {
GetMethod httpMethod = null;
try {
setAuth(client, url, username, pw);
// creating call
httpMethod = new GetMethod(url);
//only actual difference from superclass
httpMethod.setRequestHeader("Accept", JSON_CONTENT_TYPE);
client.getHttpConnectionManager().getParams().setConnectionTimeout(5000);
lastHttpStatus = client.executeMethod(httpMethod);
if(lastHttpStatus == HttpStatus.SC_OK) {
InputStream is = httpMethod.getResponseBodyAsStream();
String response = IOUtils.toString(is);
if(response.trim().length()==0) { // sometime gs rest fails
log.warn("ResponseBody is empty");
return null;
} else {
return response;
}
} else {
log.info("("+lastHttpStatus+") " + HttpStatus.getStatusText(lastHttpStatus) + " -- " + url );
throw new GNServerException("ERROR from calling "+url, lastHttpStatus);
}
} catch (ConnectException e) {
log.info("Couldn't connect to ["+url+"]");
} catch (IOException e) {
log.info("Error talking to ["+url+"]", e);
} finally {
if(httpMethod != null)
httpMethod.releaseConnection();
}
return null;
}
public String putJSON(String url, String content) throws UnsupportedEncodingException, GNServerException{
PutMethod httpMethod=null;
try {
setAuth(client, url, username, pw);
httpMethod=new PutMethod(url);
client.getHttpConnectionManager().getParams().setConnectionTimeout(5000);
httpMethod.setRequestEntity(new StringRequestEntity(content,JSON_CONTENT_TYPE,"UTF-8"));
//only actual difference from superclass
httpMethod.setRequestHeader("Accept", JSON_CONTENT_TYPE);
lastHttpStatus = client.executeMethod(httpMethod);
if((lastHttpStatus>=200)&&(lastHttpStatus<300)){
//OK responses
log.debug("HTTP "+ httpMethod.getStatusText() + " <-- " + url);
InputStream responseStream=httpMethod.getResponseBodyAsStream();
if(super.isIgnoreResponseContentOnSuccess()||responseStream==null)
return "";
return IOUtils.toString(responseStream);
}else{
//NOT OK responses
String badresponse = IOUtils.toString(httpMethod.getResponseBodyAsStream());
String message = super.getGeoNetworkErrorMessage(badresponse);
log.warn("Bad response: "+lastHttpStatus
+ " " + httpMethod.getStatusText()
+ " -- " + httpMethod.getName()
+ " " +url
+ " : "
+ message
);
log.debug("GeoNetwork response:\n"+badresponse);
throw new GNServerException("ERROR from calling "+url+". Message is "+badresponse, lastHttpStatus);
}
} catch (ConnectException e) {
log.info("Couldn't connect to ["+url+"]");
return null;
} catch (IOException e) {
log.error("Error talking to " + url + " : " + e.getLocalizedMessage());
return null;
} finally {
if(httpMethod != null)
httpMethod.releaseConnection();
}
}
protected void setAuth(HttpClient client, String url, String username, String pw) throws MalformedURLException {
URL u = new URL(url);
if(username != null && pw != null) {
Credentials defaultcreds = new UsernamePasswordCredentials(username, pw);
client.getState().setCredentials(new AuthScope(u.getHost(), u.getPort()), defaultcreds);
client.getParams().setAuthenticationPreemptive(true); // if we have the credentials, force them!
} else {
log.trace("Not setting credentials to access to " + url);
}
}
private void reset(){
// resets stats in subclass
this.lastHttpStatus=0;
}
private boolean isReset(){
return lastHttpStatus==0;
}
@Override
public int getLastHttpStatus() {
if(isReset())
return super.getLastHttpStatus();
else return this.lastHttpStatus;
}
// OVERRIDING superclass methods in order to discriminate on lastHttpStatus member
@Override
public boolean delete(String arg0) {
reset();
return super.delete(arg0);
}
@Override
public boolean exists(String arg0) {
reset();
return super.exists(arg0);
}
@Override
public String get(String arg0) throws MalformedURLException {
reset();
return super.get(arg0);
}
@Override
public boolean httpPing(String arg0) {
reset();
return super.httpPing(arg0);
}
@Override
public String post(String arg0, String arg1, String arg2, String arg3) {
reset();
return super.post(arg0, arg1, arg2, arg3);
}
@Override
public String post(String url, File file, String contentType) {
reset();
return super.post(url, file, contentType);
}
@Override
public String post(String url, InputStream content, String contentType) {
reset();
return super.post(url, content, contentType);
}
@Override
public String post(String url, RequestEntity requestEntity) {
reset();
return super.post(url, requestEntity);
}
@Override
public String post(String url, String content, String contentType) {
reset();
return super.post(url, content, contentType);
}
@Override
public String postXml(String url, InputStream content) {
reset();
return super.postXml(url, content);
}
@Override
public String postXml(String url, String content) {
reset();
return super.postXml(url, content);
}
@Override
public String postXml(String url, String content, String encoding) {
reset();
return super.postXml(url, content, encoding);
}
@Override
public String put(String arg0, String arg1, String arg2) {
reset();
return super.put(arg0, arg1, arg2);
}
@Override
public String put(String url, File file, String contentType) {
reset();
return super.put(url, file, contentType);
}
@Override
public String put(String url, RequestEntity requestEntity) {
reset();
return super.put(url, requestEntity);
}
@Override
public String putXml(String url, String content) {
reset();
return super.putXml(url, content);
}
}

View File

@ -0,0 +1,34 @@
package org.gcube.spatial.data.sdi.engine.impl.gn.extension;
import org.gcube.spatial.data.sdi.model.service.Version;
import lombok.AllArgsConstructor;
import lombok.Getter;
@Getter
@AllArgsConstructor
public class ServerAccess{
private String gnServiceURL;
private Version version;
private String password;
private String user;
public ServerAccess(String gnServiceURL, Version version) {
super();
this.gnServiceURL = gnServiceURL;
this.version = version;
}
@Override
public String toString() {
return "ServerAccess [gnServiceURL=" + gnServiceURL + ", version=" + version + ", password=****" + ", user=" + user + "]";
}
}

View File

@ -0,0 +1,88 @@
package org.gcube.spatial.data.sdi.engine.impl.gn.utils;
import java.io.StringReader;
import java.util.HashSet;
import java.util.Set;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathFactory;
import org.gcube.spatial.data.sdi.model.gn.Group;
import org.gcube.spatial.data.sdi.utils.StringUtils;
import org.jdom.Element;
import org.jdom.input.SAXBuilder;
import org.json.JSONArray;
import org.json.JSONObject;
import it.geosolutions.geonetwork.exception.GNLibException;
public class GroupUtils {
private static XPath xpath = XPathFactory.newInstance().newXPath();
public static Set<Group> parseGroupXMLResponse(String xml) throws GNLibException {
try{
HashSet<Group> toReturn=new HashSet<>();
SAXBuilder builder = new SAXBuilder();
org.jdom.Element responseEl= builder.build(new StringReader(xml)).detachRootElement();
for(Object recordObj:responseEl.getChildren("record")){
org.jdom.Element record=(org.jdom.Element) recordObj;
Integer id=Integer.parseInt(record.getChild("id").getText());
String name=record.getChild("name").getText();
Element descElement=record.getChild("description");
String description=descElement!=null?descElement.getText():"";
Element mailElement=record.getChild("email");
String email=mailElement!=null?mailElement.getText():"";
toReturn.add(new Group(name,description,email,id));
}
return toReturn;
}catch(Exception e){
throw new GNLibException("Unable to parse response", e);
}
}
public static Set<Group> parseUserJSONResponse(String groupResponse) throws GNLibException {
try{
HashSet<Group> toReturn=new HashSet<>();
JSONArray array=new JSONArray(groupResponse);
for(int i=0;i<array.length();i++){
JSONObject groupObj=array.getJSONObject(i);
Integer id=groupObj.getInt("id");
String name=groupObj.getString("name");
String description=groupObj.getString("description");
String email=groupObj.getString("email");
toReturn.add(new Group(name,description,email,id));
}
return toReturn;
}catch(Exception e){
throw new GNLibException("Unable to parse group JSON response ",e);
}
}
public static Group generateRandomGroup(Set<Group> existing, Integer nameLenght){
Set<String> existingNames=new HashSet<>();
int maxId=0;
for(Group g:existing){
existingNames.add(g.getName());
if(maxId<g.getId())maxId=g.getId();
}
return new Group(StringUtils.generateNewRandom(existingNames, nameLenght), "generated group", "no.mail@nothing.org", maxId+1);
}
public static Group getByName(Set<Group> toLookInto,String toLookFor){
for(Group g:toLookInto)
if(g.getName().equals(toLookFor)) return g;
return null;
}
}

View File

@ -0,0 +1,88 @@
package org.gcube.spatial.data.sdi.engine.impl.gn.utils;
import java.io.StringReader;
import java.util.HashSet;
import java.util.Set;
import org.gcube.spatial.data.sdi.model.gn.User;
import org.gcube.spatial.data.sdi.model.gn.User.Profile;
import org.gcube.spatial.data.sdi.utils.StringUtils;
import org.jdom.input.SAXBuilder;
import org.json.JSONArray;
import org.json.JSONObject;
import it.geosolutions.geonetwork.exception.GNLibException;
public class UserUtils {
public static Set<User> parseUserXMLResponse(String toParse) throws GNLibException{
try{
HashSet<User> toReturn=new HashSet<>();
SAXBuilder builder = new SAXBuilder();
org.jdom.Element responseEl= builder.build(new StringReader(toParse)).detachRootElement();
for(Object recordObj:responseEl.getChildren("record")){
org.jdom.Element record=(org.jdom.Element) recordObj;
Integer id=Integer.parseInt(record.getChildText("id"));
String username=record.getChildText("username");
String password=record.getChildText("password");
Profile profile=Profile.valueOf(record.getChildText("profile"));
toReturn.add(new User(id,username, password, profile));
}
return toReturn;
}catch(Exception e){
throw new GNLibException("Unable to parse users XML response", e);
}
}
public static Set<User> parseUserJSONResponse(String toParse)throws GNLibException{
try{
HashSet<User> toReturn=new HashSet<>();
JSONArray array=new JSONArray(toParse);
for(int i=0;i<array.length();i++){
JSONObject userObj=array.getJSONObject(i);
Integer id=userObj.getInt("id");
String username=userObj.getString("username");
String password=null; // password is not returned anymore by service responses
Profile profile=Profile.valueOf(userObj.getString("profile"));
toReturn.add(new User(id, username, password, profile));
}
return toReturn;
}catch(Exception e){
throw new GNLibException("Unable to parse users JSON response ",e);
}
}
public static User generateRandomUser(Set<User> existing, Integer nameLenght, Integer passwordLength){
Set<String> existingNames=new HashSet<>();
for(User g:existing)existingNames.add(g.getUsername());
return new User(0,StringUtils.generateNewRandom(existingNames, nameLenght),StringUtils.generateRandomString(passwordLength),Profile.RegisteredUser);
}
public static Set<Integer> parseGroupsByUserResponse(String toParse) throws GNLibException{
try{
HashSet<Integer> toReturn=new HashSet<>();
SAXBuilder builder = new SAXBuilder();
org.jdom.Element responseEl= builder.build(new StringReader(toParse)).detachRootElement();
for(Object recordObj:responseEl.getChildren("group")){
org.jdom.Element record=(org.jdom.Element) recordObj;
Integer id=Integer.parseInt(record.getChildText("id"));
toReturn.add(id);
}
return toReturn;
}catch(Exception e){
throw new GNLibException("Unable to Groups By User XML response", e);
}
}
public static User getByName(Set<User> toLookInto,String toLookFor){
for(User g:toLookInto)
if(g.getUsername().equals(toLookFor)) return g;
return null;
}
}

View File

@ -0,0 +1,260 @@
package org.gcube.spatial.data.sdi.engine.impl.is;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.resources.gcore.GCoreEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.Profile;
import org.gcube.common.resources.gcore.common.Platform;
import org.gcube.spatial.data.sdi.utils.ScopeUtils;
import org.gcube.spatial.data.sdi.LocalConfiguration;
import org.gcube.spatial.data.sdi.engine.impl.faults.ConfigurationNotFoundException;
import org.gcube.spatial.data.sdi.engine.impl.faults.InvalidServiceDefinitionException;
import org.gcube.spatial.data.sdi.engine.impl.faults.ServiceRegistrationException;
import org.gcube.spatial.data.sdi.model.health.Level;
import org.gcube.spatial.data.sdi.model.health.ServiceHealthReport;
import org.gcube.spatial.data.sdi.model.health.Status;
import org.gcube.spatial.data.sdi.model.services.ServiceDefinition;
import lombok.Synchronized;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public abstract class AbstractISModule implements ISModule {
protected abstract String getGCoreEndpointServiceClass();
protected abstract String getGCoreEndpointServiceName();
protected abstract String getServiceEndpointAccessPointName();
protected abstract String getServiceEndpointCategory();
protected abstract String getServiceEndpointPlatformName();
protected abstract String getManagedServiceType();
protected abstract boolean isSmartGearsMandatory();
@Override
public List<ServiceEndpoint> getISInformation() throws ConfigurationNotFoundException {
log.trace("Getting current information from IS. Scope {} ",ScopeUtils.getCurrentScope());
ArrayList<ServiceEndpoint> toReturn=new ArrayList<>();
log.debug("MANDATORY SG FOR {} is {} ",getManagedServiceType(),isSmartGearsMandatory());
if(isSmartGearsMandatory()) {
List<GCoreEndpoint> GCs=queryGcoreEndpoints();
for(ServiceEndpoint ep: queryServiceEndpoints()) {
String host=ISUtils.getHost(ep);
log.debug("Checking if ServiceEndpoint at {} is SmartGears .. ",host);
try{
if(ISUtils.getByHostnameInCollection(host, GCs)!=null) {
log.debug("Found GC. Service Endpoint {} seems valid. ",ep.profile().name());
toReturn.add(ep);
}
}catch(UnknownHostException e) {
log.warn("Unexpected Exception while checking hostnames. Please check configuration.",e);
}
}
}else {
toReturn.addAll(queryServiceEndpoints());
}
return toReturn;
}
@Override
public ServiceHealthReport getHealthReport() {
List<Status> checkStatuses=new ArrayList<>();
try {
log.trace("Checking {} heatlh under context {} ",getManagedServiceType(),ScopeUtils.getCurrentScope());
//Check if existing
List<GCoreEndpoint> gCoreEndpoints=queryGcoreEndpoints();
List<ServiceEndpoint> serviceEndpoints=queryServiceEndpoints();
log.debug("Found {} GC Endpoints and {} SE Endpoints",gCoreEndpoints.size(),serviceEndpoints.size());
if(serviceEndpoints.isEmpty())
if(gCoreEndpoints.isEmpty())checkStatuses.add(new Status("No "+getManagedServiceType()+" found in context "+ScopeUtils.getCurrentScope(),Level.ERROR));
else checkStatuses.add(new Status("Unregistered "+getManagedServiceType()+" instances found. Check following messages",Level.ERROR));
//For each GC check for missing SE
for(GCoreEndpoint gc:gCoreEndpoints) {
String hostname= gc.profile().endpoints().iterator().next().uri().getHost();
if(ISUtils.getByHostnameInCollection(hostname, serviceEndpoints)==null) {
String msg="Found unregistered "+getManagedServiceType()+" hosted on "+hostname;
log.debug(msg);
checkStatuses.add(new Status(msg,Level.WARNING));
}
}
for(ServiceEndpoint se : serviceEndpoints) {
try {
//check if GC up & running
String hostname=se.profile().runtime().hostedOn();
GCoreEndpoint found=ISUtils.getByHostnameInCollection(hostname, gCoreEndpoints);
if(found==null)
checkStatuses.add(new Status("Service endpoint [name = "+se.profile().name()+", host = "+hostname+" ID = "+se.id()+"] found but no related GC is present.",Level.ERROR));
else {
String status=found.profile().deploymentData().status();
switch(status) {
case "unreachable" :
case "down" : checkStatuses.add(new Status("GCoreEndpoint [ID "+found.id()+"] for instance hosted on "+hostname+" has status : "+status,Level.ERROR));
break;
default :
}
}
// perform specific checks
checkStatuses.addAll(performInstanceCheck(se));
}catch(Throwable t) {
log.error("Unable to perform checks on SE "+se.id(), t);
checkStatuses.add(new Status("Internal error while checking "+getManagedServiceType()+" [SE ID : "+se.id()+"]."+t.getMessage(),Level.ERROR));
}
}
}catch(Throwable t) {
log.error("Unable to perform checks", t);
checkStatuses.add(new Status("Internal error while checking "+getManagedServiceType()+" Status.",Level.ERROR));
}
return new ServiceHealthReport(checkStatuses);
}
protected abstract List<Status> performInstanceCheck(ServiceEndpoint se);
protected List<GCoreEndpoint> queryGcoreEndpoints(){
String geClass=getGCoreEndpointServiceClass();
String geName=getGCoreEndpointServiceName();
return ISUtils.queryForGCoreEndpoint(geClass, geName);
}
protected List<ServiceEndpoint> queryServiceEndpoints(){
String seCategory=getServiceEndpointCategory();
String sePlatform=getServiceEndpointPlatformName();
return ISUtils.queryForServiceEndpoints(seCategory, sePlatform);
}
@Override
public String importHostFromToken(String sourceToken, String host) throws ServiceRegistrationException {
log.trace("Importing host {} from token {} ",host,sourceToken);
String callerScope=ScopeUtils.getCurrentScope();
String callerToken=SecurityTokenProvider.instance.get();
try {
//Checking if already present
List<ServiceEndpoint> existingSEs=ISUtils.querySEByHostname(getServiceEndpointCategory(), getServiceEndpointPlatformName(), host);
if(existingSEs.size()>0) {
throw new ServiceRegistrationException("HOST "+host+" is already registered in current scope with ID : "+existingSEs.get(0).id());
}
// Getting from sourceToken..
SecurityTokenProvider.instance.set(sourceToken);
log.debug("Source token {} is from scope {}.",sourceToken,ScopeUtils.getCurrentScope());
List<ServiceEndpoint> foundSEs=ISUtils.querySEByHostname(getServiceEndpointCategory(), getServiceEndpointPlatformName(), host);
if(foundSEs.size()>1) throw new ServiceRegistrationException("Too many ServiceEndpoints found with hostname "+host);
else if(foundSEs.isEmpty()) throw new ServiceRegistrationException("No ServiceEndpoints found with hostname "+host);
ServiceEndpoint toImportSE= foundSEs.get(0);
try {
GCoreEndpoint toImportGC = ISUtils.getByHostnameInCollection(host, queryGcoreEndpoints());
if(toImportGC==null) throw new ServiceRegistrationException("No GCoreEndpoint found for hostname "+host);
log.debug("Registering resources to caller scope {} ",callerScope);
return ISUtils.addToScope(toImportSE, toImportGC,callerScope);
}catch(Exception e) {
throw new ServiceRegistrationException("Unable to register resources",e);
}
}finally {
if(!SecurityTokenProvider.instance.get().equals(callerToken))
SecurityTokenProvider.instance.set(callerToken);
}
}
@Override
@Synchronized
public String registerService(ServiceDefinition definition) throws ServiceRegistrationException {
log.info("Registering {} ",definition);
log.debug("Checking definition type..");
checkDefinitionType(definition);
log.debug("Checking IS ..");
checkDefinition(definition);
log.debug("Performing type specific checks..");
checkDefinitionForServiceType(definition);
log.debug("Preparing ServiceEndpoint.. ");
ServiceEndpoint ep=prepareEndpoint(definition);
log.debug("Publishing resource..");
String id=ISUtils.registerService(ep);
List<String> registered=null;
long registrationTime=System.currentTimeMillis();
long timeout=Long.parseLong(LocalConfiguration.getProperty(LocalConfiguration.IS_REGISTRATION_TIMEOUT));
do{
log.debug("Waiting for IS to update. Passed {} ms.",(System.currentTimeMillis()-registrationTime));
try{Thread.sleep(500);
}catch(Exception e) {}
registered=ISUtils.queryById(id);
}while(registered.isEmpty()&&((System.currentTimeMillis()-registrationTime)<=timeout));
if(registered.isEmpty()) {
log.warn("Registered resource [ID :{}] was not found before Timeout of {} ms. Returning id. ",id,timeout);
return id;
}else return registered.get(0);
}
protected abstract void checkDefinitionForServiceType(ServiceDefinition definition) throws InvalidServiceDefinitionException;
protected abstract void checkDefinitionType(ServiceDefinition definition) throws InvalidServiceDefinitionException;
protected void checkDefinition(ServiceDefinition definition) throws ServiceRegistrationException {
try{
String hostname=definition.getHostname();
List<ServiceEndpoint> serviceEndpoints=queryServiceEndpoints();
ServiceEndpoint existing=ISUtils.getByHostnameInCollection(hostname, serviceEndpoints);
if(existing!=null) {
throw new ServiceRegistrationException("Service is already registered");
}
List<GCoreEndpoint> gCoreNodes=queryGcoreEndpoints();
GCoreEndpoint running=ISUtils.getByHostnameInCollection(hostname, gCoreNodes);
if(running==null) throw new ServiceRegistrationException("No GCoreEndpoint found for "+definition);
}catch(ServiceRegistrationException e) {
throw e;
}catch(Throwable t) {
throw new ServiceRegistrationException("Unexpected exception while trying to register "+definition, t);
}
}
protected ServiceEndpoint prepareEndpoint(ServiceDefinition definition) throws ServiceRegistrationException {
try{
ServiceEndpoint toCreate=new ServiceEndpoint();
Profile profile=toCreate.newProfile();
profile.category(getServiceEndpointCategory());
profile.description(definition.getDescription());
profile.name(definition.getName());
Platform platform=profile.newPlatform();
platform.name(getServiceEndpointPlatformName()).
version(definition.getMajorVersion()).
minorVersion(definition.getMinorVersion()).
revisionVersion(definition.getReleaseVersion());
org.gcube.common.resources.gcore.ServiceEndpoint.Runtime runtime=profile.newRuntime();
runtime.hostedOn(definition.getHostname());
GCoreEndpoint relatedGHN=ISUtils.getByHostnameInCollection(definition.getHostname(), queryGcoreEndpoints());
runtime.ghnId(relatedGHN.id());
runtime.status("READY");
return toCreate;
}catch(Throwable t) {
throw new ServiceRegistrationException("Unexpected exception while trying to register "+definition, t);
}
}
}

View File

@ -0,0 +1,33 @@
package org.gcube.spatial.data.sdi.engine.impl.is;
public class CachedObject<T> {
private long lastUpdate=System.currentTimeMillis();
private T theObject;
public CachedObject(T theObject) {
super();
this.theObject = theObject;
}
public T getTheObject() {
return theObject;
}
public boolean isValid(long TTL){
return System.currentTimeMillis()-lastUpdate<TTL;
}
public void invalidate(){
lastUpdate=0l;
}
}

View File

@ -0,0 +1,217 @@
package org.gcube.spatial.data.sdi.engine.impl.is;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
import org.gcube.spatial.data.sdi.LocalConfiguration;
import org.gcube.spatial.data.sdi.engine.impl.faults.InvalidServiceDefinitionException;
import org.gcube.spatial.data.sdi.engine.impl.faults.ServiceRegistrationException;
import org.gcube.spatial.data.sdi.model.credentials.AccessType;
import org.gcube.spatial.data.sdi.model.credentials.Credentials;
import org.gcube.spatial.data.sdi.model.gn.Account;
import org.gcube.spatial.data.sdi.model.health.Status;
import org.gcube.spatial.data.sdi.model.services.GeoNetworkServiceDefinition;
import org.gcube.spatial.data.sdi.model.services.ServiceDefinition;
import org.gcube.spatial.data.sdi.model.services.ServiceDefinition.Type;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class GeoNetworkRetriever extends AbstractISModule{
// @Override
// public GeoNetworkDescriptor getObject() throws ConfigurationNotFoundException {
// //TODO skip library
// //TODO use both GCoreEndpoints and ServiceEndpoint
//
//
//// log.info("Gathering geonetwork information under scope {} ",ScopeUtils.getCurrentScope());
//// LocalConfiguration config=LocalConfiguration.get();
//// String category=config.getProperty(LocalConfiguration.GEONETWORK_SE_CATEGORY);
//// String platformName=config.getProperty(LocalConfiguration.GEONETWORK_SE_PLATFORM);
//// String priorityProperty=config.getProperty(LocalConfiguration.GEONETWORK_SE_PRIORITY);
//// String endpointName=config.getProperty(LocalConfiguration.GEONETWORK_SE_ENDPOINT_NAME);
//// ServiceEndpoint se=getTheRightServiceEndpoint(ISUtils.queryForServiceEndpoints(category, platformName), endpointName, priorityProperty);
//// AccessPoint access=getTheRightAccessPoint(se, endpointName, priorityProperty);
////
//
// try{
// //INIT LIB
// GISInterface gis=GISInterface.get();
// GeoNetworkAdministration gnAdmin=(GeoNetworkAdministration) gis.getGeoNewtorkPublisher();
// Configuration config=gnAdmin.getConfiguration();
//
// Version version=config.getGeoNetworkVersion().equals(ServerAccess.Version.TRE)?new Version(3,0,0):new Version(2,6,0);
// String baseEndpoint=config.getGeoNetworkEndpoint();
// ScopeConfiguration scopeConfig=config.getScopeConfiguration();
// List<Credentials> accessibleCredentials=new ArrayList();
// for(Account acc: scopeConfig.getAccounts().values()){
// accessibleCredentials.add(fromGeoNetworkAccount(acc));
// }
//
// Credentials adminCredentials=fromGeoNetworkAccount(config.getAdminAccount());
// // GN Lib doesn't expose ADMIN account type
// adminCredentials.setAccessType(AccessType.ADMIN);
// accessibleCredentials.add(adminCredentials);
// return new GeoNetworkDescriptor(version, baseEndpoint, accessibleCredentials, scopeConfig.getPrivateGroup()+"", scopeConfig.getPublicGroup()+"", "3");
// }catch(Exception e){
// log.warn("Unable to gather geonetwork information",e);
// throw new ConfigurationNotFoundException("Unable to gather information on geonetwork. Please contact administrator.",e);
// }
// }
protected static final Credentials fromGeoNetworkAccount(Account toTranslate){
switch(toTranslate.getType()){
case CKAN : return new Credentials(toTranslate.getUser(),toTranslate.getPassword(),AccessType.CKAN);
case SCOPE : return new Credentials(toTranslate.getUser(),toTranslate.getPassword(),AccessType.CONTEXT_USER);
default : throw new RuntimeException("Unrecognized account type "+toTranslate);
}
}
@Override
protected boolean isSmartGearsMandatory() {
return LocalConfiguration.getFlag(LocalConfiguration.GEONETWORK_MANDATORY_SG);
}
protected static final ServiceEndpoint getTheRightServiceEndpoint(List<ServiceEndpoint>resources, String endpointName,String priorityProperty){
ServiceEndpoint toReturn=null;
int priority=1000;
for(ServiceEndpoint resource: resources){
Iterator<AccessPoint> points=resource.profile().accessPoints().iterator();
while(points.hasNext()){
AccessPoint point= points.next();
log.debug(point.toString());
if(point.name().equals(endpointName)){
Map<String, Property> properties=point.propertyMap();
if(properties.containsKey(priorityProperty)){
int currentPriority=Integer.parseInt(properties.get(priorityProperty).value());
if(toReturn==null||(currentPriority<priority)){
toReturn=resource;
priority=currentPriority;
}
}
}
}
}
return toReturn;
}
/**
* look for the access point compliant with configured endpointName and maxPriority (1)
*
* @return null if not present
*/
protected static final AccessPoint getTheRightAccessPoint(ServiceEndpoint resource,String endpointName,String priorityProperty){
AccessPoint toReturn=null;
int priority=1000;
Iterator<AccessPoint> points=resource.profile().accessPoints().iterator();
while(points.hasNext()){
AccessPoint point= points.next();
log.debug(point.toString());
if(point.name().equals(endpointName)){
Map<String, Property> properties=point.propertyMap();
if(properties.containsKey(priorityProperty)){
int currentPriority=Integer.parseInt(properties.get(priorityProperty).value());
if(toReturn==null||(currentPriority<priority)){
toReturn=point;
priority=currentPriority;
}
}
}
}
return toReturn;
}
@Override
protected String getGCoreEndpointServiceClass() {
return LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_GE_SERVICE_CLASS);
}
@Override
protected String getGCoreEndpointServiceName() {
return LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_GE_SERVICE_NAME);
}
@Override
protected String getManagedServiceType() {
return "GeoNetwork";
}
@Override
protected String getServiceEndpointCategory() {
return LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_SE_CATEGORY);
}
@Override
protected String getServiceEndpointPlatformName() {
return LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_SE_PLATFORM);
}
@Override
protected String getServiceEndpointAccessPointName() {
return LocalConfiguration.getProperty(LocalConfiguration.GEONETWORK_SE_ENDPOINT_NAME);
}
@Override
protected List<Status> performInstanceCheck(ServiceEndpoint se) {
return Collections.EMPTY_LIST;
}
@Override
protected void checkDefinitionForServiceType(ServiceDefinition definition) {
log.info("Checking geonetwork for {} ",definition);
// Contact GN
// try to login with credentials
// check priority of other GNs against the defined one
}
@Override
protected ServiceEndpoint prepareEndpoint(ServiceDefinition definition) throws ServiceRegistrationException {
ServiceEndpoint toReturn= super.prepareEndpoint(definition);
GeoNetworkServiceDefinition gnDefinition=(GeoNetworkServiceDefinition) definition;
AccessPoint point=new AccessPoint();
point.address("http://"+definition.getHostname()+"/geonetwork");
point.credentials(ISUtils.encryptString(definition.getAdminPassword()), "admin");
point.description("Main Access point");
point.name(getServiceEndpointAccessPointName());
// Priority property
Property priorityProperty=new Property();
priorityProperty.nameAndValue("priority", gnDefinition.getPriority()+"");
point.properties().add(priorityProperty);
// Suffixes property
Property suffixesProperty=new Property();
suffixesProperty.nameAndValue("suffixes", "");
point.properties().add(suffixesProperty);
toReturn.profile().accessPoints().add(point);
return toReturn;
}
@Override
protected void checkDefinitionType(ServiceDefinition definition) throws InvalidServiceDefinitionException {
if(!definition.getType().equals(Type.GEONETWORK)||!(definition instanceof GeoNetworkServiceDefinition))
throw new InvalidServiceDefinitionException("Invalid service type [expected "+Type.GEONETWORK+"]. Definition was "+definition);
}
}

View File

@ -0,0 +1,120 @@
package org.gcube.spatial.data.sdi.engine.impl.is;
import java.util.Collections;
import java.util.List;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.spatial.data.sdi.LocalConfiguration;
import org.gcube.spatial.data.sdi.engine.impl.faults.InvalidServiceDefinitionException;
import org.gcube.spatial.data.sdi.model.health.Status;
import org.gcube.spatial.data.sdi.model.services.GeoServerDefinition;
import org.gcube.spatial.data.sdi.model.services.ServiceDefinition;
import org.gcube.spatial.data.sdi.model.services.ServiceDefinition.Type;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class GeoServerClusterRetriever extends AbstractISModule{
// @Override
// public List<ServiceEndpoint> ge throws ConfigurationNotFoundException {
// //TODO skip library
// //TODO use both GCoreEndpoints and ServiceEndpoint
// try {
// ArrayList<GeoServerDescriptor> availableInstances=new ArrayList<>();
// for(ServiceEndpoint ep: getServiceEndpoints()) {
// try{
// availableInstances.add(translate(ep));
// }catch(Throwable t) {
// log.warn("Unable to translate ServiceEndpoint [ID : {}].",ep.id(),t);
// }
// }
// }catch(Throwable e){
// log.warn("Unable to gather geoserver cluster configuration on scope "+ScopeUtils.getCurrentScope(),e);
// throw new ConfigurationNotFoundException("Unable to gather geoserver cluster configuration. Please ontact administrator.",e);
// }
//
// log.info("Retrieving GeoServer cluster configuration under scope {}",ScopeUtils.getCurrentScope());
// try{
// GISInterface gis=GISInterface.get();
// ArrayList<GeoServerDescriptor> availableInstances=new ArrayList<>();
// for(AbstractGeoServerDescriptor desc: gis.getCurrentCacheElements(true)){
// try{
// availableInstances.add(translate(desc));
// }catch(Throwable t){
// log.warn("Unable to translate descriptor for endpoint"+desc.getUrl(),t);
// }
// }
//
// return new GeoServerCluster(availableInstances);
// }catch(Exception e){
// log.warn("Unable to gather geoserver cluster configuration on scope "+ScopeUtils.getCurrentScope(),e);
// throw new ConfigurationNotFoundException("Unable to gather geoserver cluster configuration. Please ontact administrator.",e);
// }
// }
@Override
protected boolean isSmartGearsMandatory() {
return LocalConfiguration.getFlag(LocalConfiguration.GEOSERVER_MANDATORY_SG);
}
// private static final GeoServerDescriptor translate(AbstractGeoServerDescriptor desc){
// Version version=new Version(2,1,2);
// String baseEndpoint=desc.getUrl();
// List<Credentials> accessibleCredentials=Collections.singletonList(new Credentials(desc.getUser(), desc.getPassword(), AccessType.ADMIN));
// String confidentialWorkspace=null;
// String contextVisibilityWorkspace=null;
// String sharedWorkspace=null;
// String publicWorkspace=null;
// return new GeoServerDescriptor(version, baseEndpoint, accessibleCredentials, confidentialWorkspace, contextVisibilityWorkspace, sharedWorkspace, publicWorkspace);
// }
@Override
protected String getGCoreEndpointServiceClass() {
return LocalConfiguration.getProperty(LocalConfiguration.GEOSERVER_GE_SERVICE_CLASS);
}
@Override
protected String getGCoreEndpointServiceName() {
return LocalConfiguration.getProperty(LocalConfiguration.GEOSERVER_GE_SERVICE_NAME);
}
@Override
protected String getManagedServiceType() {
return "GeoServer";
}
@Override
protected String getServiceEndpointAccessPointName() {
return LocalConfiguration.getProperty(LocalConfiguration.GEOSERVER_SE_ENDPOINT_NAME);
}
@Override
protected String getServiceEndpointCategory() {
return LocalConfiguration.getProperty(LocalConfiguration.GEOSERVER_SE_CATEGORY);
}
@Override
protected String getServiceEndpointPlatformName() {
return LocalConfiguration.getProperty(LocalConfiguration.GEOSERVER_SE_PLATFORM);
}
@Override
protected List<Status> performInstanceCheck(ServiceEndpoint se) {
return Collections.EMPTY_LIST;
}
@Override
protected void checkDefinitionForServiceType(ServiceDefinition definition)
throws InvalidServiceDefinitionException {
// Contact GN
// try to login with credentials
}
@Override
protected void checkDefinitionType(ServiceDefinition definition) throws InvalidServiceDefinitionException {
if(!definition.getType().equals(Type.GEOSERVER)||!(definition instanceof GeoServerDefinition))
throw new InvalidServiceDefinitionException("Invalid service type [expected "+Type.GEOSERVER+"]. Definition was "+definition);
}
}

View File

@ -0,0 +1,17 @@
package org.gcube.spatial.data.sdi.engine.impl.is;
import java.util.List;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.spatial.data.sdi.engine.impl.faults.ConfigurationNotFoundException;
import org.gcube.spatial.data.sdi.engine.impl.faults.ServiceRegistrationException;
import org.gcube.spatial.data.sdi.model.health.ServiceHealthReport;
import org.gcube.spatial.data.sdi.model.services.ServiceDefinition;
public interface ISModule {
public List<ServiceEndpoint> getISInformation()throws ConfigurationNotFoundException;
public ServiceHealthReport getHealthReport();
public String registerService(ServiceDefinition definition) throws ServiceRegistrationException;
public String importHostFromToken(String sourceToken,String host)throws ServiceRegistrationException;
}

View File

@ -0,0 +1,233 @@
package org.gcube.spatial.data.sdi.engine.impl.is;
import static org.gcube.resources.discovery.icclient.ICFactory.client;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.io.ByteArrayOutputStream;
import java.lang.reflect.InvocationTargetException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import org.gcube.common.encryption.StringEncrypter;
import org.gcube.common.resources.gcore.GCoreEndpoint;
import org.gcube.common.resources.gcore.Resource;
import org.gcube.common.resources.gcore.Resources;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.informationsystem.publisher.RegistryPublisher;
import org.gcube.informationsystem.publisher.RegistryPublisherFactory;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.gcube.resources.discovery.client.queries.impl.QueryBox;
import org.gcube.spatial.data.sdi.LocalConfiguration;
import org.gcube.spatial.data.sdi.NetUtils;
import org.gcube.spatial.data.sdi.utils.ScopeUtils;
import org.gcube.vremanagement.resourcemanager.client.RMBinderLibrary;
import org.gcube.vremanagement.resourcemanager.client.exceptions.InvalidScopeException;
import org.gcube.vremanagement.resourcemanager.client.exceptions.ResourcesCreationException;
import org.gcube.vremanagement.resourcemanager.client.fws.Types.AddResourcesParameters;
import org.gcube.vremanagement.resourcemanager.client.fws.Types.ResourceItem;
import org.gcube.vremanagement.resourcemanager.client.fws.Types.ResourceList;
import org.gcube.vremanagement.resourcemanager.client.proxies.Proxies;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class ISUtils {
public static List<ServiceEndpoint> queryForServiceEndpoints(String category, String platformName){
log.debug("Querying for Service Endpoints [category : {} , platformName : {}, currentScope : {} ]",category,platformName,ScopeUtils.getCurrentScope());
SimpleQuery query = queryFor(ServiceEndpoint.class);
query.addCondition("$resource/Profile/Category/text() eq '"+category+"'")
.addCondition("$resource/Profile/Platform/Name/text() eq '"+platformName+"'");
// .setResult("$resource/Profile/AccessPoint");
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
return client.submit(query);
}
public static List<GCoreEndpoint> queryForGCoreEndpoint(String serviceClass,String serviceName){
log.debug("Querying for GCore Endpoints [ServiceClass : {} , ServiceName : {}, currentScope : {} ]",serviceClass,serviceName,ScopeUtils.getCurrentScope());
SimpleQuery query =queryFor(GCoreEndpoint.class);
query.addCondition("$resource/Profile/ServiceClass/text() eq '"+serviceClass+"'")
.addCondition("$resource/Profile/ServiceName/text() eq '"+serviceName+"'");
// .setResult("$resource/Profile/AccessPoint");
DiscoveryClient<GCoreEndpoint> client = clientFor(GCoreEndpoint.class);
return client.submit(query);
}
public static <T extends Resource> T getByHostnameInCollection(String hostname, Collection<T> toCheckList) throws UnknownHostException {
for(T gc:toCheckList) {
String currentHostToCheck=getHost(gc);
if(NetUtils.isSameHost(currentHostToCheck, hostname)) return gc;
}
return null;
}
public static String getHost(Resource res) {
if(res instanceof GCoreEndpoint)
return (((GCoreEndpoint)res).profile().endpoints().iterator().next().uri().getHost());
else return (((ServiceEndpoint)res).profile().runtime().hostedOn());
}
public static List<ServiceEndpoint> querySEByHostname(String category,String platformName,String hostname){
log.debug("Querying Service Endpoints by hostname [category : {} , platformName : {}, currentScope : {}, hostname {} ]",category,platformName,ScopeUtils.getCurrentScope(),hostname);
SimpleQuery query = queryFor(ServiceEndpoint.class);
query.addCondition("$resource/Profile/Category/text() eq '"+category+"'")
.addCondition("$resource/Profile/Platform/Name/text() eq '"+platformName+"'")
.addCondition("$resource/Profile/Runtime/HostedOn/text() eq '"+hostname+"'");
// .setResult("$resource/Profile/AccessPoint");
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
return client.submit(query);
}
public static List<String> queryById(String id) {
DiscoveryClient<String> client = client();
String queryString ="declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; "+
"for $profiles in collection('/db/Profiles')//Document/Data/ic:Profile/Resource "+
"where $profiles/ID/text() eq '"+id+"'"+
" return $profiles";
return client.submit(new QueryBox(queryString));
}
public static ServiceEndpoint querySEById(String id) {
SimpleQuery query = queryFor(ServiceEndpoint.class);
query.addCondition("$resource/ID/text() eq '"+id+"'");
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
return client.submit(query).get(0);
}
public static String registerService(ServiceEndpoint toRegister) {
RegistryPublisher rp=RegistryPublisherFactory.create();
if(log.isDebugEnabled())
Resources.print(toRegister);
Resource r=rp.create(toRegister);
return r.id();
}
public static String addToScope(ServiceEndpoint se,GCoreEndpoint gc, String targetScope) throws ResourcesCreationException, InvalidScopeException {
log.trace("Publishing GC [ID : {}, Sc : {}, Sn {}, GHN-ID : {} ], SE [ID : {}, name : {}] to Scope {} from Scope {}",
gc.id(), gc.profile().serviceClass(),gc.profile().serviceName(),gc.profile().ghnId(),
se.id(),se.profile().name(),targetScope,ScopeUtils.getCurrentScope());
AddResourcesParameters params=new AddResourcesParameters();
ResourceList resourceList=new ResourceList();
ArrayList<ResourceItem> list=new ArrayList<>();
ResourceItem ghnItem=new ResourceItem();
ghnItem.id=gc.profile().ghnId();
ghnItem.type="GHN";
list.add(ghnItem);
ResourceItem geItem=new ResourceItem();
geItem.id=gc.id();
geItem.type="RunningInstance";
list.add(geItem);
ResourceItem seItem=new ResourceItem();
seItem.id=se.id();
seItem.type="RuntimeResource";
list.add(seItem);
resourceList.setResource(list);
params.setTargetScope(targetScope);
params.setResources(resourceList);
RMBinderLibrary library=Proxies.binderService().build();
return library.addResources(params);
}
public static String decryptString(String toDecrypt){
try{
return StringEncrypter.getEncrypter().decrypt(toDecrypt);
}catch(Exception e) {
throw new RuntimeException("Unable to decrypt : "+toDecrypt,e);
}
}
public static String encryptString(String toEncrypt){
try{
return StringEncrypter.getEncrypter().encrypt(toEncrypt);
}catch(Exception e) {
throw new RuntimeException("Unable to encrypt : "+toEncrypt,e);
}
}
public static ServiceEndpoint update(ServiceEndpoint toUpdate) {
RegistryPublisher rp=RegistryPublisherFactory.create();
try{
return rp.update(toUpdate);
}catch(RuntimeException t) {
log.warn("Unable to update resource {} ",toUpdate.id());
log.debug("Updated resource is {} ",marshal(toUpdate));
throw t;
}
}
public static ServiceEndpoint updateAndWait(ServiceEndpoint toUpdate) {
boolean equals=true;
boolean timeoutReached=false;
long timeout=LocalConfiguration.getTTL(LocalConfiguration.IS_REGISTRATION_TIMEOUT);
log.trace("Going to update {}. Timeout is {} ",toUpdate.id(),timeout);
String toUpdateString=marshal(toUpdate);
update(toUpdate);
long updateTime=System.currentTimeMillis();
String updatedString=null;
do {
try {
Thread.sleep(500);
} catch (InterruptedException e) {}
updatedString=marshal(querySEById(toUpdate.id()));
equals=toUpdateString.equals(updatedString);
timeoutReached=(System.currentTimeMillis()-updateTime)>timeout;
}while(!equals&&(!timeoutReached));
if(timeoutReached) log.warn("Timeout reached. Check if {} is updated ",toUpdate.id());
return querySEById(toUpdate.id());
}
public static String marshal(Resource res) {
ByteArrayOutputStream stream=new ByteArrayOutputStream();
Resources.marshal(res, stream);
return stream.toString();
}
public static HashSet<String> getSiblingsScopesInResource(Resource res,String scope){
HashSet<String> toReturn=new HashSet<String>();
String parent=ScopeUtils.getParentScope(scope);
if (parent!=null)
for(String resourceScope:res.scopes().asCollection())
if(!resourceScope.equals(scope)) {
String resourceScopeParent=ScopeUtils.getParentScope(resourceScope);
if((resourceScopeParent!=null)&&(resourceScopeParent.equals(parent))) toReturn.add(resourceScope);
}
return toReturn;
}
}

View File

@ -0,0 +1,209 @@
package org.gcube.spatial.data.sdi.engine.impl.is;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.resources.gcore.common.Platform;
import org.gcube.spatial.data.sdi.LocalConfiguration;
import org.gcube.spatial.data.sdi.NetUtils;
import org.gcube.spatial.data.sdi.engine.impl.faults.InvalidServiceDefinitionException;
import org.gcube.spatial.data.sdi.model.credentials.AccessType;
import org.gcube.spatial.data.sdi.model.credentials.Credentials;
import org.gcube.spatial.data.sdi.model.health.Level;
import org.gcube.spatial.data.sdi.model.health.Status;
import org.gcube.spatial.data.sdi.model.service.ThreddsDescriptor;
import org.gcube.spatial.data.sdi.model.service.Version;
import org.gcube.spatial.data.sdi.model.services.ServiceDefinition;
import org.gcube.spatial.data.sdi.model.services.ServiceDefinition.Type;
import org.gcube.spatial.data.sdi.model.services.ThreddsDefinition;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class ThreddsRetriever extends AbstractISModule {
// @Override
// public ThreddsDescriptor getObject() throws ConfigurationNotFoundException{
// log.info("Loading Thredds information from IS. Current Scope is {} ",ScopeUtils.getCurrentScope());
//
// // Try to look for GCore Endpoints first
//
//// List<GCoreEndpoint> gCoreEndpoints=getGcoreEndpoints();
//// if(gCoreEndpoints!=null&&!gCoreEndpoints.isEmpty()){
//// log.debug("Found {} GCore Endpoints ",gCoreEndpoints.size());
//// for(int i=0;i<gCoreEndpoints.size();i++){
//// GCoreEndpoint endpoint=gCoreEndpoints.get(i);
//// try{
//// log.debug("Checking element {}, ID {} ",i,endpoint.id());
//// ThreddsConfiguration toReturn=translate(endpoint);
//// if(toReturn==null) throw new Exception("Translated configuration was null");
//// return toReturn;
//// }catch(Throwable t){
//// log.warn("Unable to read retrieved gCore endpoint ID "+endpoint.id(),t);
//// }
//// }
//// }
//
// // Code is executed only if no configuration has been retrieved from gCore endpoints
//
// List<ServiceEndpoint> threddsSE=getServiceEndpoints();
// if(threddsSE!=null&&!threddsSE.isEmpty()){
// log.debug("Found {} Service Endpoints ",threddsSE.size());
// for(int i=0;i<threddsSE.size();i++){
// ServiceEndpoint endpoint=threddsSE.get(i);
// try{
// log.debug("Checking element {}, ID {} ",i,endpoint.id());
// ThreddsDescriptor toReturn=translate(endpoint);
// if(toReturn==null) throw new Exception("Translated configuration was null");
// return toReturn;
// }catch(Throwable t){
// log.warn("Unable to read retrieved service endpoint ID "+endpoint.id(),t);
// }
// }
// }
//
// throw new ConfigurationNotFoundException("Thredds has not been found in current scope "+ScopeUtils.getCurrentScope());
//
// }
// @Override
// public ServiceHealthReport getHealthReport() {
// List<Status> checkStatuses=new ArrayList<>();
// try {
//
// log.trace("Checking Thredds heatlh under context {} ",ScopeUtils.getCurrentScope());
// //Check if existing
// List<GCoreEndpoint> gCoreEndpoints=getGcoreEndpoints();
// List<ServiceEndpoint> serviceEndpoints=getServiceEndpoints();
// log.debug("Found {} GC Endpoints and {} SE Endpoints",gCoreEndpoints.size(),serviceEndpoints.size());
//
// if(serviceEndpoints.isEmpty())
// if(gCoreEndpoints.isEmpty())checkStatuses.add(new Status("No Thredds service found in context "+ScopeUtils.getCurrentScope(),Level.ERROR));
// else checkStatuses.add(new Status("Unregistered Thredds instances found. Check following messages",Level.ERROR));
//
// //For each GC check for missing SE
// for(GCoreEndpoint gc:gCoreEndpoints) {
// String hostname= gc.profile().endpoints().iterator().next().uri().getHost();
// if(ISUtils.getGCEByHostname(hostname, serviceEndpoints)==null) {
// String msg="Found unregistered Thredds hosted on "+hostname;
// log.debug(msg);
// checkStatuses.add(new Status(msg,Level.WARNING));
// }
// }
//
// for(ServiceEndpoint se: serviceEndpoints) {
//
// }
// }catch(Throwable t) {
// log.error("Unable to perform checks", t);
// checkStatuses.add(new Status("Internal error while checking Thredds Status.",Level.ERROR));
// }
// return new ServiceHealthReport(checkStatuses);
// }
@Override
protected String getGCoreEndpointServiceClass() {
return LocalConfiguration.getProperty(LocalConfiguration.THREDDS_GE_SERVICE_CLASS);
}
@Override
protected String getGCoreEndpointServiceName() {
return LocalConfiguration.getProperty(LocalConfiguration.THREDDS_GE_SERVICE_NAME);
}
@Override
protected String getManagedServiceType() {
return "THREDDS";
}
@Override
protected String getServiceEndpointCategory() {
return LocalConfiguration.getProperty(LocalConfiguration.THREDDS_SE_CATEGORY);
}
@Override
protected String getServiceEndpointPlatformName() {
return LocalConfiguration.getProperty(LocalConfiguration.THREDDS_SE_PLATFORM);
}
@Override
protected String getServiceEndpointAccessPointName() {
return LocalConfiguration.getProperty(LocalConfiguration.THREDDS_SE_ENDPOINT_NAME);
}
@Override
protected boolean isSmartGearsMandatory() {
return LocalConfiguration.getFlag(LocalConfiguration.THREDDS_MANDATORY_SG);
}
@Override
protected List<Status> performInstanceCheck(ServiceEndpoint se) {
ArrayList<Status> toReturn=new ArrayList<Status>();
String hostname=se.profile().runtime().hostedOn();
try {
log.trace("Checking thredds hosted on {} ",hostname);
String publicCatalogUrl="www."+hostname+"/thredds/catalog/public/netcdf/catalog.html";
if(!NetUtils.isUp(publicCatalogUrl))
toReturn.add(new Status("Unreachable default THREDDS catalog at "+publicCatalogUrl,Level.ERROR));
else {
//
//
// DataTransferClient client=DataTransferClient.getInstanceByEndpoint(hostname);
// //check SIS plugin presence
// boolean found=false;
// for(PluginDescription desc: client.getDestinationCapabilities().getAvailablePlugins())
// if(desc.getId().equals("SIS/GEOTK")) {
// found=true;
// break;
// }
// if(!found) toReturn.add(new Status("SIS/GEOTK plugin for DataTransfer service not found on "+hostname, Level.ERROR));
}
}catch(IOException e) {
String msg="Unable to check thredds instance hosted on "+hostname;
log.warn(msg);
log.debug("Exception was ",e);
toReturn.add(new Status(msg,Level.WARNING));
// } catch (DataTransferException e) {
// String msg="DataTransfer not found in host "+hostname;
// log.warn(msg);
// log.debug("Exception was ",e);
// toReturn.add(new Status(msg,Level.ERROR));
}
return toReturn;
}
// private static final ThreddsConfiguration translate(GCoreEndpoint toTranslate){
////
//// ThreddsConfiguration toReturn=new ThreddsConfiguration(version, baseEndpoint, accessibleCredentials);
// return null;
// }
private static final ThreddsDescriptor translate(ServiceEndpoint toTranslate){
Platform platform=toTranslate.profile().platform();
Version version=new Version(platform.version(),platform.minorVersion(),platform.revisionVersion());
AccessPoint access=toTranslate.profile().accessPoints().iterator().next();
Credentials credentials=new Credentials(access.username(),access.password(),AccessType.ADMIN);
return new ThreddsDescriptor(version, access.address(), Collections.singletonList(credentials));
}
@Override
protected void checkDefinitionForServiceType(ServiceDefinition definition)
throws InvalidServiceDefinitionException {
// TODO Auto-generated method stub
}
@Override
protected void checkDefinitionType(ServiceDefinition definition) throws InvalidServiceDefinitionException {
if(!definition.getType().equals(Type.THREDDS)||!(definition instanceof ThreddsDefinition))
throw new InvalidServiceDefinitionException("Invalid service type [expected "+Type.THREDDS+"]. Definition was "+definition);
}
}

View File

@ -0,0 +1,16 @@
package org.gcube.spatial.data.sdi.engine.impl.metadata;
public class CommonMetadataPieces {
public static final String resourceIdentifier="<identifier xmlns=\"http://www.isotc211.org/2005/gmd\" xmlns:gco=\"http://www.isotc211.org/2005/gco\"> <MD_Identifier>"
+ " <code><gco:CharacterString>%s</gco:CharacterString></code>"
+ " </MD_Identifier>"
+ "</identifier>";
public static final String fileIdentifier="<fileIdentifier xmlns=\"http://www.isotc211.org/2005/gmd\" xmlns:gco=\"http://www.isotc211.org/2005/gco\">"
+ " <gco:CharacterString>%s</gco:CharacterString> </fileIdentifier>";
}

View File

@ -0,0 +1,15 @@
package org.gcube.spatial.data.sdi.engine.impl.metadata;
public class GenericTemplates {
public static class ThreddsCatalogTemplate{
public static final String FILENAME="thredds_catalog.ftlx";
public static final String CATALOG_PATH="CatalogPath";
public static final String LOCATION="Location";
public static final String DATASET_SCAN_NAME="DataSetScanName";
public static final String DATASET_SCAN_ID="DataSetScanID";
public static final String AUTHORITY_URL="AuthorityURL";
}
}

View File

@ -0,0 +1,85 @@
package org.gcube.spatial.data.sdi.engine.impl.metadata;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.List;
import java.util.UUID;
import javax.xml.transform.TransformerException;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.gcube.common.resources.gcore.utils.XPathHelper;
import org.gcube.spatial.data.sdi.engine.impl.metadata.templates.AbstractMetadataTemplate.InsertionPoint;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class MetadataHandler {
private Document document=null;
private String metaUUID=null;
private XPathHelper helper;
public MetadataHandler(File xmlFile){
// Get document owner
Element documentNode=null;
try{
InputStream inputStream= new FileInputStream(xmlFile);
Reader reader = new InputStreamReader(inputStream,"UTF-8");
InputSource is = new InputSource(reader);
documentNode = MetadataUtils.docBuilder.parse(is).getDocumentElement();
document=documentNode.getOwnerDocument();
helper=MetadataUtils.getHelper(document);
// document = (Document)xpath.evaluate("/", inputSource, XPathConstants.NODE);
}catch(Exception e){
// throw e;
throw new RuntimeException("Unable to fix : unable to get Document",e);
}
}
public String getUUID() throws SAXException, IOException{
//Set | get meta UUID
if(metaUUID==null){
log.debug("Managing metadata ID.. ");
List<String> metaUUIDList=helper.evaluate("//gmd:fileIdentifier/gco:CharacterString/text()");
if(metaUUIDList.isEmpty()){
metaUUID=UUID.randomUUID().toString();
log.debug("Stting uuid {} ",metaUUID);
MetadataUtils.addContent("gmd:MD_Metadata",document,String.format(CommonMetadataPieces.fileIdentifier, metaUUID),helper,MetadataUtils.Position.first_child);
}else {
metaUUID=metaUUIDList.get(0);
log.debug("Found meta UUID {} ",metaUUID);
}
}
return metaUUID;
}
public void addContent(String content, InsertionPoint insertion) throws SAXException, IOException{
MetadataUtils.addContent(insertion.getElementReference(), document, content, helper, insertion.getPosition());
}
public File writeOut() throws IOException, TransformerException{
DOMSource source = new DOMSource(document);
File output=File.createTempFile("meta_", ".xml");
output.createNewFile();
StreamResult result = new StreamResult(output);
MetadataUtils.transformer.transform(source, result);
return output;
}
}

View File

@ -0,0 +1,181 @@
package org.gcube.spatial.data.sdi.engine.impl.metadata;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.annotation.PostConstruct;
import javax.inject.Singleton;
import javax.xml.transform.TransformerException;
import org.apache.commons.io.IOUtils;
import org.gcube.smartgears.context.application.ApplicationContext;
import org.gcube.spatial.data.sdi.LocalConfiguration;
import org.gcube.spatial.data.sdi.engine.TemplateManager;
import org.gcube.spatial.data.sdi.engine.impl.metadata.templates.AbstractMetadataTemplate;
import org.gcube.spatial.data.sdi.engine.impl.metadata.templates.InvalidTemplateInvocationException;
import org.gcube.spatial.data.sdi.engine.impl.metadata.templates.ThreddsOnlineTemplate;
import org.gcube.spatial.data.sdi.model.metadata.TemplateCollection;
import org.gcube.spatial.data.sdi.model.metadata.TemplateDescriptor;
import org.gcube.spatial.data.sdi.model.metadata.TemplateInvocation;
import freemarker.template.Configuration;
import freemarker.template.Template;
import freemarker.template.TemplateExceptionHandler;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Singleton
public class MetadataTemplateManagerImpl implements TemplateManager {
private static Configuration cfg;
// private static ArrayList<TemplateDescriptor> templateDescriptors=new ArrayList<>();
private static HashMap<String,AbstractMetadataTemplate> availableMetadataTemplates=new HashMap<>();
private static TemplateCollection metadataTemplateDescriptors;
@PostConstruct
public void defaultInit() {
log.info("Default Initialization");
init(LocalConfiguration.getTemplateConfigurationObject());
}
public void init(Object configurationObject) {
log.debug("Configuring with {} ",configurationObject);
// Create your Configuration instance, and specify if up to what FreeMarker
// version (here 2.3.25) do you want to apply the fixes that are not 100%
// backward-compatible. See the Configuration JavaDoc for details.
cfg = new Configuration(Configuration.VERSION_2_3_25);
if(configurationObject instanceof ApplicationContext){
log.debug("Configuration is Context : {} ",configurationObject);
cfg.setServletContextForTemplateLoading(((ApplicationContext)configurationObject).application(),
LocalConfiguration.getProperty(LocalConfiguration.TEMPLATE_FOLDER));
}else if (configurationObject instanceof File){
try{
cfg.setDirectoryForTemplateLoading((File)configurationObject);
}catch(IOException e){
throw new RuntimeException(e);
}
}else throw new RuntimeException("Invalid configuration object");
// Set the preferred charset template files are stored in. UTF-8 is
// a good choice in most applications:
cfg.setDefaultEncoding("UTF-8");
// Sets how errors will appear.
// During web page *development* TemplateExceptionHandler.HTML_DEBUG_HANDLER is better.
cfg.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER);
// Don't log exceptions inside FreeMarker that it will thrown at you anyway:
cfg.setLogTemplateExceptions(false);
// availableTemplates.add(new TemplateDescriptor("THREDDS-ONLINE", "Thredds online resources", "Template for online resources exposed by thredds.", "http://sdi-d4s.d4science.org"));
ThreddsOnlineTemplate tpl=new ThreddsOnlineTemplate();
availableMetadataTemplates.put(tpl.getDescriptor().getId(), tpl);
ArrayList<TemplateDescriptor> metadataTemplates=new ArrayList();
metadataTemplates.add(tpl.getDescriptor());
log.debug("Loaded metadata templates : ");
for(TemplateDescriptor desc: metadataTemplates)
log.debug(desc.toString());
metadataTemplateDescriptors=new TemplateCollection(new HashSet<>(metadataTemplates));
}
@Override
public TemplateCollection getAvailableMetadataTemplates() {
return metadataTemplateDescriptors;
}
@Override
public TemplateApplicationReport applyMetadataTemplates(File original, Set<TemplateInvocation> invocations) throws IOException, TransformerException {
log.debug("Applying template invocations {} to {} ",invocations,original.getAbsolutePath());
TemplateApplicationReport report=new TemplateApplicationReport();
report.setRequestedInvocations(invocations);
HashSet<String> appliedTemplates=new HashSet<>();
MetadataHandler handler=new MetadataHandler(original);
for(TemplateInvocation invocation:invocations){
try{
applyTemplate(original, invocation,handler);
appliedTemplates.add(invocation.getToInvokeTemplateID());
}catch(Throwable t){
log.warn("Unable to apply template {} ",invocation.getToInvokeTemplateID());
log.debug("StackTrace : ",t);
}
}
log.debug("Writing out result..");
report.setGeneratedFilePath(handler.writeOut().getAbsolutePath());
report.setAppliedTemplates(appliedTemplates);
return report;
}
private static void applyTemplate(File original,TemplateInvocation invocation,MetadataHandler handler) throws Exception{
log.debug("Instantiating "+invocation);
AbstractMetadataTemplate tpl=availableMetadataTemplates.get(invocation.getToInvokeTemplateID());
if(tpl==null) throw new InvalidTemplateInvocationException("Template with ID "+invocation.getToInvokeTemplateID()+" was not found");
Writer out=null;
try{
Template temp = cfg.getTemplate(tpl.getFileName());
ByteArrayOutputStream baos=new ByteArrayOutputStream();
out=new OutputStreamWriter(baos);
temp.process(tpl.getInstantiationRequest(handler,invocation), out);
out.flush();
String instantiatedTemplate= baos.toString(StandardCharsets.UTF_8.toString());
//apply to original
handler.addContent(instantiatedTemplate, tpl.getInsertionPoint());
} catch (Exception e) {
log.error("Unable to apply template. Invocation was {} ",invocation,e);
throw e;
}finally{
if(out!=null)
IOUtils.closeQuietly(out);
}
}
@Override
public File generateFromTemplate(Map<String, String> parameters, String template) throws Exception {
Writer out=null;
try{
log.info("Generating from template {}. Parameters are {} ",template,parameters);
Template temp = cfg.getTemplate(template);
File toReturn=File.createTempFile(template, ".xml");
out=new FileWriter(toReturn);
temp.process(parameters, out);
out.flush();
return toReturn;
} catch (Exception e) {
log.error("Unable to apply template{}. Parameters were {} ",template,parameters,e);
throw e;
}finally{
if(out!=null)
IOUtils.closeQuietly(out);
}
}
}

View File

@ -0,0 +1,126 @@
package org.gcube.spatial.data.sdi.engine.impl.metadata;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import org.gcube.common.resources.gcore.utils.XPathHelper;
import org.gcube.portlets.user.uriresolvermanager.UriResolverManager;
import org.gcube.portlets.user.uriresolvermanager.exception.IllegalArgumentException;
import org.gcube.portlets.user.uriresolvermanager.exception.UriResolverMapException;
import org.gcube.spatial.data.sdi.utils.ScopeUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
public class MetadataUtils {
public static Transformer transformer =null;
public static DocumentBuilder docBuilder =null;
static HashMap<String,String> namespaces=new HashMap<String,String>();
static{
try{
DocumentBuilderFactory factory=DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
docBuilder = factory.newDocumentBuilder();
TransformerFactory transformerFactory = TransformerFactory.newInstance();
transformer = transformerFactory.newTransformer();
namespaces.put("gmd", "http://www.isotc211.org/2005/gmd");
namespaces.put("gco", "http://www.isotc211.org/2005/gco");
namespaces.put("fra", "http://www.cnig.gouv.fr/2005/fra");
namespaces.put("xlink", "http://www.w3.org/1999/xlink");
namespaces.put("gml", "http://www.opengis.net/gml");
namespaces.put("xsi", "http://www.w3.org/2001/XMLSchema-instance");
namespaces.put("gmi", "http://www.isotc211.org/2005/gmi");
namespaces.put("gmx", "http://www.isotc211.org/2005/gmx");
}catch(Exception e){
throw new RuntimeException("Unable to init Fixer ",e);
}
}
public static enum Position{
sibling_after,sibling_before,first_child,last_child,replace
}
public static XPathHelper getHelper(Node root){
XPathHelper toReturn =new XPathHelper(root);
for(Entry<String,String> entry:namespaces.entrySet())
toReturn.addNamespace(entry.getKey(), entry.getValue());
return toReturn;
}
public static String readFile(String path) throws IOException{
byte[] encoded = Files.readAllBytes(Paths.get(path));
return new String(encoded);
}
public static String getGisLinkByUUID(String uuid) throws UriResolverMapException, IllegalArgumentException {
Map<String,String> params=new HashMap();
params.put("scope", ScopeUtils.getCurrentScope());
params.put("gis-UUID", uuid);
UriResolverManager resolver = new UriResolverManager("GIS");
String toReturn= resolver.getLink(params, false);
return toReturn;
}
public static void addContent(String path, Document doc, String toAddContent, XPathHelper documentHelper,Position position) throws SAXException, IOException{
NodeList nodelist=documentHelper.evaluateForNodes(path);
if(nodelist==null||nodelist.getLength()==0) throw new RuntimeException("Path "+path+" not found in document");
// if(nodelist.getLength()>1) throw new RuntimeException("Invalid Path "+path+"."+nodelist.getLength()+" entries found");
Node targetNode=nodelist.item(0);
Document online=docBuilder.parse(new ByteArrayInputStream(toAddContent.getBytes()));
Node toAdd=doc.importNode(online.getDocumentElement(), true);
switch(position){
case first_child: {
targetNode.insertBefore(toAdd, targetNode.getFirstChild());
break;
}
case last_child:{targetNode.appendChild(toAdd);
break;}
case replace : {
Node parent=targetNode.getParentNode();
parent.replaceChild(toAdd, targetNode);
break;
}
case sibling_after :{
Node currentlyNext=targetNode.getNextSibling();
Node parent=targetNode.getParentNode();
if(currentlyNext!=null)parent.insertBefore(toAdd, currentlyNext);
else parent.appendChild(toAdd);
break;
}
case sibling_before :{
Node parent=targetNode.getParentNode();
parent.insertBefore(toAdd, targetNode);
break;
}
}
}
}

View File

@ -0,0 +1,15 @@
package org.gcube.spatial.data.sdi.engine.impl.metadata;
import java.util.Set;
import org.gcube.spatial.data.sdi.model.metadata.TemplateInvocation;
import lombok.Data;
@Data
public class TemplateApplicationReport {
private String generatedFilePath;
private Set<String> appliedTemplates;
private Set<TemplateInvocation> requestedInvocations;
}

View File

@ -0,0 +1,49 @@
package org.gcube.spatial.data.sdi.engine.impl.metadata.templates;
import java.util.List;
import org.gcube.spatial.data.sdi.engine.impl.metadata.MetadataHandler;
import org.gcube.spatial.data.sdi.engine.impl.metadata.MetadataUtils.Position;
import org.gcube.spatial.data.sdi.model.ParameterType;
import org.gcube.spatial.data.sdi.model.metadata.TemplateDescriptor;
import org.gcube.spatial.data.sdi.model.metadata.TemplateInvocation;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
@Getter
@AllArgsConstructor
public abstract class AbstractMetadataTemplate<T> {
@Getter
@Setter
@ToString
@AllArgsConstructor
public static class InsertionPoint{
private Position position;
private String elementReference;
}
private String fileName;
private InsertionPoint insertionPoint;
private TemplateDescriptor descriptor;
public abstract T getInstantiationRequest(MetadataHandler original, TemplateInvocation invocation) throws InvalidTemplateInvocationException,Exception;
protected String getParameter(String parameterName, List<ParameterType> parameters, boolean mandatory,String defaultValue)throws InvalidTemplateInvocationException{
//if collection not empty look for it
if(!(parameters==null || parameters.isEmpty()))
for(ParameterType param:parameters)
if(param.getName().equals(parameterName)) return param.getValue();
//nothing found..
if(mandatory) throw new InvalidTemplateInvocationException("Missing parameter "+parameterName+".");
else return defaultValue;
}
}

View File

@ -0,0 +1,36 @@
package org.gcube.spatial.data.sdi.engine.impl.metadata.templates;
public class InvalidTemplateInvocationException extends Exception {
/**
*
*/
private static final long serialVersionUID = 8921135030360257131L;
public InvalidTemplateInvocationException() {
super();
// TODO Auto-generated constructor stub
}
public InvalidTemplateInvocationException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
// TODO Auto-generated constructor stub
}
public InvalidTemplateInvocationException(String message, Throwable cause) {
super(message, cause);
// TODO Auto-generated constructor stub
}
public InvalidTemplateInvocationException(String message) {
super(message);
// TODO Auto-generated constructor stub
}
public InvalidTemplateInvocationException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
}

View File

@ -0,0 +1,66 @@
package org.gcube.spatial.data.sdi.engine.impl.metadata.templates;
import java.util.ArrayList;
import org.gcube.spatial.data.sdi.engine.impl.metadata.MetadataHandler;
import org.gcube.spatial.data.sdi.engine.impl.metadata.MetadataUtils;
import org.gcube.spatial.data.sdi.engine.impl.metadata.MetadataUtils.Position;
import org.gcube.spatial.data.sdi.engine.impl.metadata.templates.ThreddsOnlineTemplate.ThreddsOnlineRequest;
import org.gcube.spatial.data.sdi.model.ParameterType;
import org.gcube.spatial.data.sdi.model.metadata.TemplateDescriptor;
import org.gcube.spatial.data.sdi.model.metadata.TemplateInvocation;
import org.gcube.spatial.data.sdi.model.metadata.TemplateInvocationBuilder;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.ToString;
public class ThreddsOnlineTemplate extends AbstractMetadataTemplate<ThreddsOnlineRequest> {
private static ArrayList<ParameterType> EXPECTED_PARAMETERS=new ArrayList<ParameterType>();
private static String TEMPLATE_ID=TemplateInvocationBuilder.THREDDS_ONLINE.ID;
private static String TEMPLATE_NAME="Thredds Online Resources";
private static String FILENAME="ThreddsOnlineResources.ftlx";
private static InsertionPoint INSERTION=new InsertionPoint(Position.sibling_after, "//gmd:identificationInfo");
private static TemplateDescriptor DESCRIPTOR;
static {
EXPECTED_PARAMETERS.add(new ParameterType(TemplateInvocationBuilder.THREDDS_ONLINE.CATALOG, "The thredds catalog name"));
EXPECTED_PARAMETERS.add(new ParameterType(TemplateInvocationBuilder.THREDDS_ONLINE.FILENAME, "The dataset's file name"));
EXPECTED_PARAMETERS.add(new ParameterType(TemplateInvocationBuilder.THREDDS_ONLINE.HOSTNAME, "Thredds hostname"));
DESCRIPTOR=new TemplateDescriptor(TEMPLATE_ID, TEMPLATE_NAME, "Template for online resources exposed by thredds.", "http://sdi-d4s.d4science.org",EXPECTED_PARAMETERS);
}
public ThreddsOnlineTemplate() {
super(FILENAME, INSERTION, DESCRIPTOR);
}
@Getter
@AllArgsConstructor
@ToString
public static class ThreddsOnlineRequest{
private String hostname;
private String catalog;
private String filename;
private String gisViewerLink;
}
@Override
public ThreddsOnlineRequest getInstantiationRequest(MetadataHandler handler, TemplateInvocation invocation) throws InvalidTemplateInvocationException,Exception{
if(!invocation.getToInvokeTemplateID().equals(TEMPLATE_ID)) throw new InvalidTemplateInvocationException("Invalid template ID : "+invocation.getToInvokeTemplateID());
String filename =getParameter(TemplateInvocationBuilder.THREDDS_ONLINE.FILENAME, invocation.getTemplateParameters(), true, null);
String catalog =getParameter(TemplateInvocationBuilder.THREDDS_ONLINE.CATALOG, invocation.getTemplateParameters(), true, null);
String hostname =getParameter(TemplateInvocationBuilder.THREDDS_ONLINE.HOSTNAME, invocation.getTemplateParameters(), true, null);
String uuid=handler.getUUID();
String gisLink=MetadataUtils.getGisLinkByUUID(uuid);
return new ThreddsOnlineRequest(hostname, catalog, filename, gisLink);
}
}

View File

@ -0,0 +1,25 @@
package org.gcube.spatial.data.sdi.model;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import lombok.Setter;
@Getter
@Setter
@NoArgsConstructor
@RequiredArgsConstructor
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public class CatalogDescriptor {
@NonNull
private String catalogURL;
}

View File

@ -0,0 +1,41 @@
package org.gcube.spatial.data.sdi.model;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import org.gcube.spatial.data.sdi.model.service.GeoNetworkDescriptor;
import org.gcube.spatial.data.sdi.model.service.GeoServerDescriptor;
import org.gcube.spatial.data.sdi.model.service.ThreddsDescriptor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import lombok.Setter;
import lombok.ToString;
@Getter
@Setter
@ToString
@NoArgsConstructor
@RequiredArgsConstructor
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public class ScopeConfiguration {
@NonNull
private String contextName;
@NonNull
private List<GeoNetworkDescriptor> geonetworkConfiguration;
@NonNull
private List<GeoServerDescriptor> geoserverClusterConfiguration;
@NonNull
private List<ThreddsDescriptor> threddsConfiguration;
}

View File

@ -0,0 +1,32 @@
package org.gcube.spatial.data.sdi.model.health;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import lombok.ToString;
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
@ToString
@EqualsAndHashCode
public class HealthReport {
private Level overallStatus;
private String context;
private ServiceHealthReport thredds;
private ServiceHealthReport geonetwork;
private ServiceHealthReport geoserverCluster;
}

Some files were not shown because too many files have changed in this diff Show More