commit 5d2f69f50a96f42356e68238f6867a0a034a0e65 Author: francesco.mangiacrapa Date: Mon Dec 5 14:48:40 2022 +0100 Copied read ProjectEdit and ProjectView DTOs from Data-Viewer diff --git a/.classpath b/.classpath new file mode 100644 index 0000000..eb5bbae --- /dev/null +++ b/.classpath @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..b83d222 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +/target/ diff --git a/.project b/.project new file mode 100644 index 0000000..5bd80f0 --- /dev/null +++ b/.project @@ -0,0 +1,23 @@ + + + geoportal-data-mapper_no_git + + + + + + org.eclipse.jdt.core.javabuilder + + + + + org.eclipse.m2e.core.maven2Builder + + + + + + org.eclipse.jdt.core.javanature + org.eclipse.m2e.core.maven2Nature + + diff --git a/.settings/org.eclipse.core.resources.prefs b/.settings/org.eclipse.core.resources.prefs new file mode 100644 index 0000000..29abf99 --- /dev/null +++ b/.settings/org.eclipse.core.resources.prefs @@ -0,0 +1,6 @@ +eclipse.preferences.version=1 +encoding//src/main/java=UTF-8 +encoding//src/main/resources=UTF-8 +encoding//src/test/java=UTF-8 +encoding//src/test/resources=UTF-8 +encoding/=UTF-8 diff --git a/.settings/org.eclipse.jdt.core.prefs b/.settings/org.eclipse.jdt.core.prefs new file mode 100644 index 0000000..2f5cc74 --- /dev/null +++ b/.settings/org.eclipse.jdt.core.prefs @@ -0,0 +1,8 @@ +eclipse.preferences.version=1 +org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8 +org.eclipse.jdt.core.compiler.compliance=1.8 +org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled +org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning +org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=ignore +org.eclipse.jdt.core.compiler.release=disabled +org.eclipse.jdt.core.compiler.source=1.8 diff --git a/.settings/org.eclipse.m2e.core.prefs b/.settings/org.eclipse.m2e.core.prefs new file mode 100644 index 0000000..f897a7f --- /dev/null +++ b/.settings/org.eclipse.m2e.core.prefs @@ -0,0 +1,4 @@ +activeProfiles= +eclipse.preferences.version=1 +resolveWorkspaceProjects=true +version=1 diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..44412c8 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,9 @@ + +# Changelog + +All notable changes to this project will be documented in this file. +This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [v1.0.0-SNAPSHOT] - 2022-11-30 + +- First release diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000..1932b4c --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,311 @@ +#European Union Public Licence V.1.1 + +##*EUPL © the European Community 2007* + + +This **European Union Public Licence** (the **“EUPL”**) applies to the Work or Software +(as defined below) which is provided under the terms of this Licence. Any use of +the Work, other than as authorised under this Licence is prohibited (to the +extent such use is covered by a right of the copyright holder of the Work). + +The Original Work is provided under the terms of this Licence when the Licensor +(as defined below) has placed the following notice immediately following the +copyright notice for the Original Work: + +**Licensed under the EUPL V.1.1** + +or has expressed by any other mean his willingness to license under the EUPL. + + + +##1. Definitions + +In this Licence, the following terms have the following meaning: + +- The Licence: this Licence. + +- The Original Work or the Software: the software distributed and/or + communicated by the Licensor under this Licence, available as Source Code and + also as Executable Code as the case may be. + +- Derivative Works: the works or software that could be created by the Licensee, + based upon the Original Work or modifications thereof. This Licence does not + define the extent of modification or dependence on the Original Work required + in order to classify a work as a Derivative Work; this extent is determined by + copyright law applicable in the country mentioned in Article 15. + +- The Work: the Original Work and/or its Derivative Works. + +- The Source Code: the human-readable form of the Work which is the most + convenient for people to study and modify. + +- The Executable Code: any code which has generally been compiled and which is + meant to be interpreted by a computer as a program. + +- The Licensor: the natural or legal person that distributes and/or communicates + the Work under the Licence. + +- Contributor(s): any natural or legal person who modifies the Work under the + Licence, or otherwise contributes to the creation of a Derivative Work. + +- The Licensee or “You”: any natural or legal person who makes any usage of the + Software under the terms of the Licence. + +- Distribution and/or Communication: any act of selling, giving, lending, + renting, distributing, communicating, transmitting, or otherwise making + available, on-line or off-line, copies of the Work or providing access to its + essential functionalities at the disposal of any other natural or legal + person. + + + +##2. Scope of the rights granted by the Licence + +The Licensor hereby grants You a world-wide, royalty-free, non-exclusive, +sub-licensable licence to do the following, for the duration of copyright vested +in the Original Work: + +- use the Work in any circumstance and for all usage, reproduce the Work, modify +- the Original Work, and make Derivative Works based upon the Work, communicate +- to the public, including the right to make available or display the Work or +- copies thereof to the public and perform publicly, as the case may be, the +- Work, distribute the Work or copies thereof, lend and rent the Work or copies +- thereof, sub-license rights in the Work or copies thereof. + +Those rights can be exercised on any media, supports and formats, whether now +known or later invented, as far as the applicable law permits so. + +In the countries where moral rights apply, the Licensor waives his right to +exercise his moral right to the extent allowed by law in order to make effective +the licence of the economic rights here above listed. + +The Licensor grants to the Licensee royalty-free, non exclusive usage rights to +any patents held by the Licensor, to the extent necessary to make use of the +rights granted on the Work under this Licence. + + + +##3. Communication of the Source Code + +The Licensor may provide the Work either in its Source Code form, or as +Executable Code. If the Work is provided as Executable Code, the Licensor +provides in addition a machine-readable copy of the Source Code of the Work +along with each copy of the Work that the Licensor distributes or indicates, in +a notice following the copyright notice attached to the Work, a repository where +the Source Code is easily and freely accessible for as long as the Licensor +continues to distribute and/or communicate the Work. + + + +##4. Limitations on copyright + +Nothing in this Licence is intended to deprive the Licensee of the benefits from +any exception or limitation to the exclusive rights of the rights owners in the +Original Work or Software, of the exhaustion of those rights or of other +applicable limitations thereto. + + + +##5. Obligations of the Licensee + +The grant of the rights mentioned above is subject to some restrictions and +obligations imposed on the Licensee. Those obligations are the following: + +Attribution right: the Licensee shall keep intact all copyright, patent or +trademarks notices and all notices that refer to the Licence and to the +disclaimer of warranties. The Licensee must include a copy of such notices and a +copy of the Licence with every copy of the Work he/she distributes and/or +communicates. The Licensee must cause any Derivative Work to carry prominent +notices stating that the Work has been modified and the date of modification. + +Copyleft clause: If the Licensee distributes and/or communicates copies of the +Original Works or Derivative Works based upon the Original Work, this +Distribution and/or Communication will be done under the terms of this Licence +or of a later version of this Licence unless the Original Work is expressly +distributed only under this version of the Licence. The Licensee (becoming +Licensor) cannot offer or impose any additional terms or conditions on the Work +or Derivative Work that alter or restrict the terms of the Licence. + +Compatibility clause: If the Licensee Distributes and/or Communicates Derivative +Works or copies thereof based upon both the Original Work and another work +licensed under a Compatible Licence, this Distribution and/or Communication can +be done under the terms of this Compatible Licence. For the sake of this clause, +“Compatible Licence” refers to the licences listed in the appendix attached to +this Licence. Should the Licensee’s obligations under the Compatible Licence +conflict with his/her obligations under this Licence, the obligations of the +Compatible Licence shall prevail. + +Provision of Source Code: When distributing and/or communicating copies of the +Work, the Licensee will provide a machine-readable copy of the Source Code or +indicate a repository where this Source will be easily and freely available for +as long as the Licensee continues to distribute and/or communicate the Work. + +Legal Protection: This Licence does not grant permission to use the trade names, +trademarks, service marks, or names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the copyright notice. + + + +##6. Chain of Authorship + +The original Licensor warrants that the copyright in the Original Work granted +hereunder is owned by him/her or licensed to him/her and that he/she has the +power and authority to grant the Licence. + +Each Contributor warrants that the copyright in the modifications he/she brings +to the Work are owned by him/her or licensed to him/her and that he/she has the +power and authority to grant the Licence. + +Each time You accept the Licence, the original Licensor and subsequent +Contributors grant You a licence to their contributions to the Work, under the +terms of this Licence. + + + +##7. Disclaimer of Warranty + +The Work is a work in progress, which is continuously improved by numerous +contributors. It is not a finished work and may therefore contain defects or +“bugs” inherent to this type of software development. + +For the above reason, the Work is provided under the Licence on an “as is” basis +and without warranties of any kind concerning the Work, including without +limitation merchantability, fitness for a particular purpose, absence of defects +or errors, accuracy, non-infringement of intellectual property rights other than +copyright as stated in Article 6 of this Licence. + +This disclaimer of warranty is an essential part of the Licence and a condition +for the grant of any rights to the Work. + + + +##8. Disclaimer of Liability + +Except in the cases of wilful misconduct or damages directly caused to natural +persons, the Licensor will in no event be liable for any direct or indirect, +material or moral, damages of any kind, arising out of the Licence or of the use +of the Work, including without limitation, damages for loss of goodwill, work +stoppage, computer failure or malfunction, loss of data or any commercial +damage, even if the Licensor has been advised of the possibility of such +damage. However, the Licensor will be liable under statutory product liability +laws as far such laws apply to the Work. + + + +##9. Additional agreements + +While distributing the Original Work or Derivative Works, You may choose to +conclude an additional agreement to offer, and charge a fee for, acceptance of +support, warranty, indemnity, or other liability obligations and/or services +consistent with this Licence. However, in accepting such obligations, You may +act only on your own behalf and on your sole responsibility, not on behalf of +the original Licensor or any other Contributor, and only if You agree to +indemnify, defend, and hold each Contributor harmless for any liability incurred +by, or claims asserted against such Contributor by the fact You have accepted +any such warranty or additional liability. + + + +##10. Acceptance of the Licence + +The provisions of this Licence can be accepted by clicking on an icon “I agree” +placed under the bottom of a window displaying the text of this Licence or by +affirming consent in any other similar way, in accordance with the rules of +applicable law. Clicking on that icon indicates your clear and irrevocable +acceptance of this Licence and all of its terms and conditions. + +Similarly, you irrevocably accept this Licence and all of its terms and +conditions by exercising any rights granted to You by Article 2 of this Licence, +such as the use of the Work, the creation by You of a Derivative Work or the +Distribution and/or Communication by You of the Work or copies thereof. + + + +##11. Information to the public + +In case of any Distribution and/or Communication of the Work by means of +electronic communication by You (for example, by offering to download the Work +from a remote location) the distribution channel or media (for example, a +website) must at least provide to the public the information requested by the +applicable law regarding the Licensor, the Licence and the way it may be +accessible, concluded, stored and reproduced by the Licensee. + + + +##12. Termination of the Licence + +The Licence and the rights granted hereunder will terminate automatically upon +any breach by the Licensee of the terms of the Licence. + +Such a termination will not terminate the licences of any person who has +received the Work from the Licensee under the Licence, provided such persons +remain in full compliance with the Licence. + + + +##13. Miscellaneous + +Without prejudice of Article 9 above, the Licence represents the complete +agreement between the Parties as to the Work licensed hereunder. + +If any provision of the Licence is invalid or unenforceable under applicable +law, this will not affect the validity or enforceability of the Licence as a +whole. Such provision will be construed and/or reformed so as necessary to make +it valid and enforceable. + +The European Commission may publish other linguistic versions and/or new +versions of this Licence, so far this is required and reasonable, without +reducing the scope of the rights granted by the Licence. New versions of the +Licence will be published with a unique version number. + +All linguistic versions of this Licence, approved by the European Commission, +have identical value. Parties can take advantage of the linguistic version of +their choice. + + + +##14. Jurisdiction + +Any litigation resulting from the interpretation of this License, arising +between the European Commission, as a Licensor, and any Licensee, will be +subject to the jurisdiction of the Court of Justice of the European Communities, +as laid down in article 238 of the Treaty establishing the European Community. + +Any litigation arising between Parties, other than the European Commission, and +resulting from the interpretation of this License, will be subject to the +exclusive jurisdiction of the competent court where the Licensor resides or +conducts its primary business. + + + +##15. Applicable Law + +This Licence shall be governed by the law of the European Union country where +the Licensor resides or has his registered office. + +This licence shall be governed by the Belgian law if: + +- a litigation arises between the European Commission, as a Licensor, and any +- Licensee; the Licensor, other than the European Commission, has no residence +- or registered office inside a European Union country. + + +--- + + +##Appendix + + +**“Compatible Licences”** according to article 5 EUPL are: + + +- GNU General Public License (GNU GPL) v. 2 + +- Open Software License (OSL) v. 2.1, v. 3.0 + +- Common Public License v. 1.0 + +- Eclipse Public License v. 1.0 + +- Cecill v. 2.0 diff --git a/README.md b/README.md new file mode 100644 index 0000000..73aaa79 --- /dev/null +++ b/README.md @@ -0,0 +1,55 @@ +# GeoPortal Data Mapper + +GeoPortal Data Mapper is a common library used by GUI Data-Entry/Data-Viewer components developed for the D4Science Geoportal service + +## Built With + +* [OpenJDK](https://openjdk.java.net/) - The JDK used +* [Maven](https://maven.apache.org/) - Dependency Management + +## Documentation + +N/A + +## Change log + +See the [Releases](https://code-repo.d4science.org/gCubeSystem/geoportal-data-mapper/releases) + +## Authors + +* **Francesco Mangiacrapa** ([ORCID](https://orcid.org/0000-0002-6528-664X)) Computer Scientist at [ISTI-CNR Infrascience Group](http://nemis.isti.cnr.it/groups/infrascience) + +## License + +This project is licensed under the EUPL V.1.1 License - see the [LICENSE.md](LICENSE.md) file for details. + + +## About the gCube Framework +This software is part of the [gCubeFramework](https://www.gcube-system.org/ "gCubeFramework"): an +open-source software toolkit used for building and operating Hybrid Data +Infrastructures enabling the dynamic deployment of Virtual Research Environments +by favouring the realisation of reuse oriented policies. + +The projects leading to this software have received funding from a series of European Union programmes including: + +- the Sixth Framework Programme for Research and Technological Development + - DILIGENT (grant no. 004260). +- the Seventh Framework Programme for research, technological development and demonstration + - D4Science (grant no. 212488); + - D4Science-II (grant no.239019); + - ENVRI (grant no. 283465); + - EUBrazilOpenBio (grant no. 288754); + - iMarine(grant no. 283644). +- the H2020 research and innovation programme + - BlueBRIDGE (grant no. 675680); + - EGIEngage (grant no. 654142); + - ENVRIplus (grant no. 654182); + - PARTHENOS (grant no. 654119); + - SoBigData (grant no. 654024); + - DESIRA (grant no. 818194); + - ARIADNEplus (grant no. 823914); + - RISIS2 (grant no. 824091); + - PerformFish (grant no. 727610); + - AGINFRAplus (grant no. 731001). + + diff --git a/descriptor.xml b/descriptor.xml new file mode 100644 index 0000000..0487853 --- /dev/null +++ b/descriptor.xml @@ -0,0 +1,30 @@ + + servicearchive + + tar.gz + + / + + + / + true + + README.md + LICENSE.md + profile.xml + CHANGELOG.md + + 755 + true + + + + + target/${build.finalName}.${project.packaging} + /${artifactId} + + + diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..552e922 --- /dev/null +++ b/pom.xml @@ -0,0 +1,183 @@ + + 4.0.0 + + maven-parent + org.gcube.tools + 1.1.0 + + + org.gcube.application + geoportal-data-mapper + 1.0.0-SNAPSHOT + jar + + + scm:git:https://code-repo.d4science.org/gCubeSystem/${project.artifactId}.git + scm:git:https://code-repo.d4science.org/gCubeSystem/${project.artifactId}.git + https://code-repo.d4science.org/gCubeSystem/${project.artifactId} + + + + + Francesco Mangiacrapa + francesco.mangiacrapa@isti.cnr.it + CNR Pisa, Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" + + architect + developer + + + + + + 1.8 + 1.8 + UTF-8 + + 4.11 + 2.6.2 + 2.9.0 + + + + + + org.gcube.distribution + maven-portal-bom + 3.7.0-SNAPSHOT + pom + import + + + + + + + + com.google.gwt + gwt-user + ${gwt.version} + provided + + + + org.gcube.portlets.widgets + metadata-profile-form-builder-widget + [2.0.0-SNAPSHOT, 3.0.0-SNAPSHOT) + compile + + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + de.grundid.opendatalab + geojson-jackson + + + + + + org.gcube.application + geoportal-data-common + [2.0.0-SNAPSHOT, 3.0.0-SNAPSHOT) + compile + + + + com.google.guava + guava + + + + com.google.code.gson + gson + ${gson.version} + + + + com.liferay.portal + portal-service + + provided + + + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + test + + + + junit + junit + ${junit.version} + test + + + + + + + + src/main/java + + **/*.* + + + + + src/main/resources + + **/*.* + + + + + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + true + + + + + org.apache.maven.plugins + maven-assembly-plugin + + + descriptor.xml + + + + + servicearchive + install + + single + + + + + + + + diff --git a/profile.xml b/profile.xml new file mode 100644 index 0000000..7de4156 --- /dev/null +++ b/profile.xml @@ -0,0 +1,25 @@ + + + + Portlet + + ${project.description} + Application + ${project.artifactId} + 1.0.0 + + + ${project.artifactId} + ${project.description} + + ${project.groupId} + ${project.artifactId} + ${project.version} + + + ${project.build.finalName}.${project.packaging} + + + + + diff --git a/src/main/java/org/gcube/application/GeoportalDataCommon.gwt.xml b/src/main/java/org/gcube/application/GeoportalDataCommon.gwt.xml new file mode 100644 index 0000000..36449b0 --- /dev/null +++ b/src/main/java/org/gcube/application/GeoportalDataCommon.gwt.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/src/main/java/org/gcube/application/geoportaldatamapper/GcubeProfilesMetadataForUCD.java b/src/main/java/org/gcube/application/geoportaldatamapper/GcubeProfilesMetadataForUCD.java new file mode 100644 index 0000000..b09fcad --- /dev/null +++ b/src/main/java/org/gcube/application/geoportaldatamapper/GcubeProfilesMetadataForUCD.java @@ -0,0 +1,68 @@ +package org.gcube.application.geoportaldatamapper; + +import java.io.Serializable; +import java.util.List; + +import org.gcube.application.geoportalcommon.shared.geoportal.config.GcubeProfileDV; +import org.gcube.portlets.widgets.mpformbuilder.shared.metadata.MetaDataProfileBean; + +public class GcubeProfilesMetadataForUCD implements Serializable { + + /** + * + */ + private static final long serialVersionUID = -8953445402356442872L; + private String profileID; + private GcubeProfileDV gcubeProfile; + private List listMetadataProfileBean; + + public GcubeProfilesMetadataForUCD() { + + } + + public GcubeProfilesMetadataForUCD(String profileID, GcubeProfileDV gcubeProfile, + List listMetadataProfileBean) { + + this.profileID = profileID; + this.gcubeProfile = gcubeProfile; + this.listMetadataProfileBean = listMetadataProfileBean; + } + + public String getProfileID() { + return profileID; + } + + public GcubeProfileDV getGcubeProfile() { + return gcubeProfile; + } + + public List getListMetadataProfileBean() { + return listMetadataProfileBean; + } + + public void setProfileID(String profileID) { + this.profileID = profileID; + } + + public void setGcubeProfile(GcubeProfileDV gcubeProfile) { + this.gcubeProfile = gcubeProfile; + } + + public void setListMetadataProfileBean(List listMetadataProfileBean) { + this.listMetadataProfileBean = listMetadataProfileBean; + } + + @Override + public String toString() { + StringBuilder builder = new StringBuilder(); + builder.append("GcubeProfilesMetadataForUCD [profileID="); + builder.append(profileID); + builder.append(", gcubeProfile="); + builder.append(gcubeProfile); + builder.append(", listMetadataProfileBean="); + builder.append(listMetadataProfileBean); + builder.append("]"); + return builder.toString(); + } + +} diff --git a/src/main/java/org/gcube/application/geoportaldatamapper/GcubeProfilesPerUCDIdCache.java b/src/main/java/org/gcube/application/geoportaldatamapper/GcubeProfilesPerUCDIdCache.java new file mode 100644 index 0000000..5a54219 --- /dev/null +++ b/src/main/java/org/gcube/application/geoportaldatamapper/GcubeProfilesPerUCDIdCache.java @@ -0,0 +1,162 @@ +/** + * + */ + +package org.gcube.application.geoportaldatamapper; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import org.gcube.application.geoportal.common.model.useCaseDescriptor.HandlerDeclaration; +import org.gcube.application.geoportal.common.model.useCaseDescriptor.UseCaseDescriptor; +import org.gcube.application.geoportalcommon.ConvertToDataValueObjectModel; +import org.gcube.application.geoportalcommon.geoportal.GeoportalClientCaller; +import org.gcube.application.geoportalcommon.geoportal.UseCaseDescriptorCaller; +import org.gcube.application.geoportalcommon.shared.geoportal.ConfigurationDV; +import org.gcube.application.geoportalcommon.shared.geoportal.config.GcubeProfileDV; +import org.gcube.application.geoportalcommon.shared.geoportal.ucd.GEOPORTAL_CONFIGURATION_TYPE; +import org.gcube.application.geoportalcommon.shared.geoportal.ucd.GEOPORTAL_DATA_HANDLER; +import org.gcube.application.geoportalcommon.shared.geoportal.ucd.HandlerDeclarationDV; +import org.gcube.common.scope.api.ScopeProvider; +import org.gcube.portlets.widgets.mpformbuilder.server.MetadataProfileFormBuilderServiceImpl; +import org.gcube.portlets.widgets.mpformbuilder.shared.metadata.MetaDataProfileBean; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import com.google.common.cache.RemovalListener; +import com.google.common.cache.RemovalNotification; + +/** + * The Class GcubeProfilesPerUCDIdCache. + * + * @author Francesco Mangiacrapa at ISTI-CNR francesco.mangiacrapa@isti.cnr.it + * + * Oct 12, 2022 + */ +public class GcubeProfilesPerUCDIdCache { + + private static Logger LOG = LoggerFactory.getLogger(GcubeProfilesPerUCDIdCache.class); + + private static LoadingCache>> gCubeProfilesPerProfileIDCache; + + static { + + CacheLoader>> loader = new CacheLoader>>() { + @Override + public LinkedHashMap> load(String scope) throws Exception { + LOG.info("Loading the cache for scope: " + scope); + return loadGcubeProfilesForUCDIdInTheScope(scope); + } + }; + + RemovalListener>> removalListener = new RemovalListener>>() { + public void onRemoval( + RemovalNotification>> removal) { + LOG.info(GcubeProfilesPerUCDIdCache.class.getSimpleName() + " cache expired"); + } + }; + + gCubeProfilesPerProfileIDCache = CacheBuilder.newBuilder().maximumSize(1000) + .expireAfterWrite(30, TimeUnit.MINUTES).removalListener(removalListener).build(loader); + + LOG.info("cache instancied"); + } + + /** + * Gets the. + * + * @param scope the scope + * @return the geonetwork instance + * @throws Exception + */ + public static LinkedHashMap> get(String scope) throws Exception { + LOG.info("GcubeProfilesPerUCDIdCache get - called in the scope: " + scope); + LinkedHashMap> map = gCubeProfilesPerProfileIDCache.get(scope); + LOG.info("GcubeProfilesPerUCDIdCache returning map null? " + (map==null)); + return map; + } + + /** + * Load geonetwork instance. + * + * @param scope the scope + * @return the linked hash map + * @throws Exception the exception + */ + private static LinkedHashMap> loadGcubeProfilesForUCDIdInTheScope( + String scope) throws Exception { + LOG.info("loadGcubeProfilesForUCDIdInTheScope called in the scope: " + scope); + + LinkedHashMap> linkedMap_UCDId_gCubeProfiles = new LinkedHashMap>(); + try { + ScopeProvider.instance.set(scope); + UseCaseDescriptorCaller clientUCD = GeoportalClientCaller.useCaseDescriptors(); + List listUCDs = clientUCD.getList(); + LOG.debug("listUCDs: " + listUCDs); + + for (UseCaseDescriptor ucd : listUCDs) { + + LOG.info("Loaded UCD for ID: " + ucd.getId()); + String profileID = ucd.getId(); + GEOPORTAL_DATA_HANDLER theHandler = GEOPORTAL_DATA_HANDLER.geoportal_data_entry; + List handlers = ucd.getHandlersByType(theHandler.getType()); + + if (handlers.size() == 0) { + LOG.warn("No handler " + theHandler + "found into UCD " + ucd.getId() + ", continue..."); + continue; + } + + // Loading Handler gcube_profiles + HandlerDeclaration dataEntryHandler = handlers.get(0); + HandlerDeclarationDV handlerGcubeProfiles = ConvertToDataValueObjectModel + .toHandlerDeclarationDV(dataEntryHandler, ucd, GEOPORTAL_CONFIGURATION_TYPE.gcube_profiles); + + LOG.debug("Handler " + GEOPORTAL_CONFIGURATION_TYPE.gcube_profiles + " for PROFILE_ID: " + ucd.getId()); + LOG.debug("" + handlerGcubeProfiles); + + ConfigurationDV config = handlerGcubeProfiles.getConfiguration(); + // List of gCube Profiles defined in the UCD + List listGcubeProfiles = (List) config.getConfiguration(); + LOG.debug("List of GcubeProfileDV are: " + listGcubeProfiles); + + List listProfilesBean = new ArrayList(); + // Loading Metadata Profile from IS + MetadataProfileFormBuilderServiceImpl metaProfileBUilder = new MetadataProfileFormBuilderServiceImpl(); + + for (GcubeProfileDV gcubeProfileDV : listGcubeProfiles) { + ScopeProvider.instance.set(scope); + GcubeProfilesMetadataForUCD gCubeProfileMetadataForUCD = new GcubeProfilesMetadataForUCD(); + List listProfiles = metaProfileBUilder.getProfilesInTheScopeForName(scope, + gcubeProfileDV.getGcubeSecondaryType(), gcubeProfileDV.getGcubeName()); + + String key = gcubeProfileDV.getGcubeSecondaryType() + gcubeProfileDV.getGcubeName(); + LOG.debug("for key: " + key + " readi profiles: " + listGcubeProfiles); + gCubeProfileMetadataForUCD.setGcubeProfile(gcubeProfileDV); + gCubeProfileMetadataForUCD.setListMetadataProfileBean(listProfiles); + listProfilesBean.add(gCubeProfileMetadataForUCD); + + } + linkedMap_UCDId_gCubeProfiles.put(ucd.getId(), listProfilesBean); + + if (LOG.isDebugEnabled()) { + for (String key : linkedMap_UCDId_gCubeProfiles.keySet()) { + LOG.debug("For key '" + key + "' got profiles: " + linkedMap_UCDId_gCubeProfiles.get(key)); + } + } + + } + + } catch (Exception e) { + String erroMsg = "Error occurred on preloadgCubeProfilesForUCDs: "; + LOG.error(erroMsg, e); + } + + LOG.info("GcubeProfilesPerUCDIdCache loaded with " + linkedMap_UCDId_gCubeProfiles.size() + " item/s"); + return linkedMap_UCDId_gCubeProfiles; + } +} diff --git a/src/main/java/org/gcube/application/geoportaldatamapper/Geoportal_JSON_Mapper.java b/src/main/java/org/gcube/application/geoportaldatamapper/Geoportal_JSON_Mapper.java new file mode 100644 index 0000000..a160bc1 --- /dev/null +++ b/src/main/java/org/gcube/application/geoportaldatamapper/Geoportal_JSON_Mapper.java @@ -0,0 +1,966 @@ +package org.gcube.application.geoportaldatamapper; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.function.Function; + +import org.bson.Document; +import org.gcube.application.geoportal.client.utils.Serialization; +import org.gcube.application.geoportal.common.model.document.access.Access; +import org.gcube.application.geoportal.common.model.document.access.AccessPolicy; +import org.gcube.application.geoportalcommon.ConvertToDataValueObjectModel; +import org.gcube.application.geoportalcommon.geoportal.access.GeportalCheckAccessPolicy; +import org.gcube.application.geoportalcommon.geoportal.serdes.Payload; +import org.gcube.application.geoportalcommon.shared.geoportal.config.FilePathDV; +import org.gcube.application.geoportalcommon.shared.geoportal.config.GcubeProfileDV; +import org.gcube.application.geoportalcommon.shared.geoportal.materialization.GCubeSDIViewerLayerDV; +import org.gcube.application.geoportalcommon.shared.geoportal.materialization.innerobject.BBOXDV; +import org.gcube.application.geoportalcommon.shared.geoportal.materialization.innerobject.FilesetDV; +import org.gcube.application.geoportalcommon.shared.geoportal.materialization.innerobject.PayloadDV; +import org.gcube.application.geoportalcommon.shared.geoportal.project.ProjectDV; +import org.gcube.application.geoportalcommon.shared.geoportal.ucd.GEOPORTAL_CONFIGURATION_TYPE; +import org.gcube.application.geoportalcommon.shared.geoportal.view.ProjectView; +import org.gcube.application.geoportalcommon.shared.geoportal.view.SectionView; +import org.gcube.application.geoportalcommon.shared.geoportal.view.SubDocumentView; +import org.gcube.application.geoportaldatamapper.shared.MetaDataProfileBeanExt; +import org.gcube.application.geoportaldatamapper.shared.ProjectEdit; +import org.gcube.portlets.widgets.mpformbuilder.shared.metadata.MetaDataProfileBean; +import org.gcube.portlets.widgets.mpformbuilder.shared.metadata.MetadataFieldWrapper; +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import com.jayway.jsonpath.JsonPath; +import com.jayway.jsonpath.spi.json.JsonOrgJsonProvider; + +/** + * The Class Geoportal_JSON_Mapper. + * + * @author Francesco Mangiacrapa at ISTI-CNR francesco.mangiacrapa@isti.cnr.it + * + * Nov 30, 2022 + */ +public class Geoportal_JSON_Mapper { + + public static final String _PAYLOADS = "_payloads"; + + public static final String _MATERIALIZATIONS = "_materializations"; + + private static final Logger LOG = LoggerFactory.getLogger(Geoportal_JSON_Mapper.class); + + public static final String FILESET = "fileset"; + + public static final String _OGC_LINKS = "_ogcLinks"; + + public static final String _BBOX = "_bbox"; + + public static final String _TYPE = "_type"; + + public static final String JSON_$_POINTER = "$"; + + public static final String _THEDOCUMENT = "_theDocument"; + + /** + * Load project edit. + * + * @param theProjectDV the the project DV + * @param scope the scope + * @param username the username + * @return the project edit + * @throws Exception the exception + */ + public static ProjectEdit loadProjectEdit(ProjectDV theProjectDV, String scope, String username) throws Exception { + LOG.debug("loadProjectEdit called"); + + String theWholeProjectAsJSON = theProjectDV.getTheDocument().getDocumentAsJSON(); + + LOG.debug("theProjectDV as JSON: " + theWholeProjectAsJSON); + LOG.trace("theProjectDV as MAP: " + theProjectDV.getTheDocument().getDocumentAsMap()); + + ProjectEdit projectEdit = new ProjectEdit(); + projectEdit.setTheProjectDV(theProjectDV); + + LinkedHashMap> linkedMap_UCDId_gCubeProfiles = GcubeProfilesPerUCDIdCache + .get(scope); + + // NO UCD defined, applying default + if (linkedMap_UCDId_gCubeProfiles.size() == 0) { + LOG.warn("No " + GEOPORTAL_CONFIGURATION_TYPE.gcube_profiles + " found in the UCD"); + LOG.info("Applying default business logic to display the project"); + SectionView sectionView = new SectionView(); + sectionView.setSectionTitle("Document"); + Document sectionDoc = Document.parse(theProjectDV.getTheDocument().getDocumentAsJSON()); + + // Creating one Project with one SectionView and SubDocumentView + String wholeSectionDoc = sectionDoc.toJson(); + + List listFiles = new ArrayList(); + + // Reading Fileset _payloads + String filesetJSONPath = String.format("%s.%s", JSON_$_POINTER, FILESET); + List listPayloads = readPayloadsForFileset(filesetJSONPath, wholeSectionDoc); + FilesetDV filesetDV = new FilesetDV(); + filesetDV.setName(FILESET); + for (Payload payload : listPayloads) { + PayloadDV payloadDV = ConvertToDataValueObjectModel.toPayloadDV(payload); + filesetDV.addPayloadDV(payloadDV); + listFiles.add(filesetDV); + } + } + + List listProfilesBean = linkedMap_UCDId_gCubeProfiles + .get(theProjectDV.getProfileID()); + + com.jayway.jsonpath.Configuration configuration = com.jayway.jsonpath.Configuration.builder() + .jsonProvider(new JsonOrgJsonProvider()).build(); + + List listOfProfilesBeanExt = new ArrayList(); + + // Reading the Project according to list of Profile defined in the UCD + for (GcubeProfilesMetadataForUCD gcubeProfileMetaForUCD : listProfilesBean) { + + GcubeProfileDV gcubeProfileDV = gcubeProfileMetaForUCD.getGcubeProfile(); + LOG.debug("\n\n##### Creating the section: " + gcubeProfileDV.getSectionTitle()); + LOG.debug("\n\nThe profile is: " + gcubeProfileDV); + // Building JSON/section full PATH and section name + String sectionJSONPath = ""; + String parentPathFromProfile = gcubeProfileDV.getParentName() == null ? "" : gcubeProfileDV.getParentName(); + String theSectionName = gcubeProfileDV.getSectionName(); + + if (theSectionName.compareTo(JSON_$_POINTER) == 0 || theSectionName.compareTo(JSON_$_POINTER + ".") == 0) { + sectionJSONPath = JSON_$_POINTER; + theSectionName = ""; + } else { + sectionJSONPath = String.format("%s%s", + parentPathFromProfile.endsWith(".") ? parentPathFromProfile : parentPathFromProfile + ".", + theSectionName); + } + + LOG.debug("The sectionJSONPath is: " + sectionJSONPath); + + JsonPath theSectionJsonPath = null; + Object data = null; + try { + theSectionJsonPath = JsonPath.compile(sectionJSONPath); + data = theSectionJsonPath.read(theWholeProjectAsJSON, configuration); + } catch (Exception e) { + LOG.warn("Error on searching the section " + sectionJSONPath + " in the JSON Project: " + + theWholeProjectAsJSON); + continue; + } + + LOG.debug("Data is instace of: " + data.getClass()); + LOG.debug("data to string: " + data.toString()); + + // Splitting the General Document in bson.Document according to list of + // GcubeProfiles + List listBSONDocument = new ArrayList(); + if (data instanceof org.json.JSONObject) { + String jsonString = data.toString(); + LOG.debug("the JSON to string: " + jsonString); + Document sectionDoc = Document.parse(jsonString); + listBSONDocument.add(sectionDoc); + + } else if (data instanceof org.json.JSONArray) { + org.json.JSONArray dataArray = (org.json.JSONArray) data; + for (int i = 0; i < dataArray.length(); i++) { + String jsonString = dataArray.get(i).toString(); + LOG.debug("the array " + i + " JSON to string: " + jsonString); + Document sectionDoc = Document.parse(jsonString); + listBSONDocument.add(sectionDoc); + } + } + + LOG.debug("Result for " + gcubeProfileDV.getSectionName() + " is: " + listBSONDocument); + List theProfileBeans = gcubeProfileMetaForUCD.getListMetadataProfileBean(); + MetaDataProfileBean theProfileBean = theProfileBeans.get(0); + + // For each bson.Document filling the MetaDataProfileBean and its file + for (int i = 0; i < listBSONDocument.size(); i++) { + + LOG.trace("DOCUMENT number " + i + " of the section: " + theProfileBean.getTitle()); + MetaDataProfileBeanExt theProfileBeanExt = new MetaDataProfileBeanExt(); + theProfileBeanExt.setCategories(theProfileBean.getCategories()); + theProfileBeanExt.setTitle(theProfileBean.getTitle()); + theProfileBeanExt.setType(theProfileBean.getType()); + + Document fromSectionDoc = listBSONDocument.get(i); + LOG.debug("\n\nNew section DOC for index " + i + " is: " + + new JSONObject(fromSectionDoc.toJson()).toString(2)); + // Creating the corresponding MetaDataProfileBeanExt for each section + + // Reading policy and license statically + // eg. "_access":{"_policy":"OPEN","_license":"CC0-1.0"}} + Document docAccess = null; + Access access = null; + try { + docAccess = fromSectionDoc.get("_access", Document.class); + LOG.trace("docAccess is: " + docAccess); + access = new Access(); + access.setPolicy(AccessPolicy.valueOf(docAccess.getString("_policy"))); + access.setLicense(docAccess.getString("_license")); + // Access. access.get("_policy"); + // access.get("_license"); + System.out.println("access is: " + access); + } catch (Exception e) { + LOG.warn("No " + AccessPolicy.class.getSimpleName() + "found in the section " + + fromSectionDoc.toJson()); + LOG.debug("No AccessPolicy.class.getSimpleName(): ", e); + } + + List cloneListOfMFW = cloneList(theProfileBean.getMetadataFields()); + + for (MetadataFieldWrapper metadataField : cloneListOfMFW) { + + String theFieldName = metadataField.getFieldId() != null ? metadataField.getFieldId() + : metadataField.getFieldName(); + LOG.debug("reading theFieldName: " + theFieldName); + Object theOBJFieldValue = fromSectionDoc.get(theFieldName); + metadataField.setCurrentValue(theOBJFieldValue + ""); + + if (access != null) { + if (theFieldName.equalsIgnoreCase("policy")) { + metadataField.setCurrentValue(access.getPolicy().name()); + } else if (theFieldName.equalsIgnoreCase("licenseID")) { + metadataField.setCurrentValue(access.getLicense()); + } + } + } + + theProfileBeanExt.setMetadataFields(new ArrayList(cloneListOfMFW)); + LOG.debug("Metadata fields are: " + theProfileBeanExt.getMetadataFields()); + + // Reading filePaths + List filePaths = gcubeProfileDV.getFilePaths(); + + // READING fileset* field ACCORDING TO filePaths OF THE 'gcubeProfiles' CONFIG + if (filePaths != null) { + String fromSectionDocJSON = fromSectionDoc.toJson(); + List listFiles = new ArrayList(); +// List listLayers = new ArrayList(); + for (FilePathDV filePath : filePaths) { + + // Reading Fileset _payloads + String filesetJSONPath = String.format("%s.%s", JSON_$_POINTER, filePath.getFieldName()); + List listPayloads = readPayloadsForFileset(filesetJSONPath, fromSectionDocJSON); + FilesetDV filesetDV = new FilesetDV(); + filesetDV.setName(filePath.getGcubeProfileFieldName()); + for (Payload payload : listPayloads) { + PayloadDV payloadDV = ConvertToDataValueObjectModel.toPayloadDV(payload); + filesetDV.addPayloadDV(payloadDV); + } + + listFiles.add(filesetDV); + } + + theProfileBeanExt.setListFileset(listFiles); + } + + LOG.trace("\nputting theProfileBeanExt: " + theProfileBeanExt); + listOfProfilesBeanExt.add(theProfileBeanExt); + } + + } + + projectEdit.setTheProfileBeans(listOfProfilesBeanExt); + + LOG.info("returning ProjectEdit with " + projectEdit.getTheProfileBeans().size() + " profile beans"); + + return projectEdit; + } + + /** + * Clone list. + * + * @param list the list + * @return the list + */ + public static List cloneList(List list) { + List listCloned = new ArrayList(list.size()); + + Function cloneWrapper = (mfw) -> { + + MetadataFieldWrapper newMfw = new MetadataFieldWrapper(); + newMfw.setAsGroup(mfw.getAsGroup()); + newMfw.setAsTag(mfw.getAsTag()); + newMfw.setCurrentValue(mfw.getCurrentValue()); + newMfw.setDefaultValue(mfw.getDefaultValue()); + newMfw.setFieldId(mfw.getFieldId()); + newMfw.setFieldName(mfw.getFieldName()); + newMfw.setFieldNameFromCategory(mfw.getFieldNameFromCategory()); + newMfw.setMandatory(mfw.getMandatory()); + newMfw.setMaxOccurs(mfw.getMaxOccurs()); + newMfw.setMultiSelection(mfw.isMultiSelection()); + newMfw.setNote(mfw.getNote()); + newMfw.setOwnerCategory(mfw.getOwnerCategory()); + newMfw.setType(mfw.getType()); + newMfw.setValidator(mfw.getValidator()); + newMfw.setVocabulary(mfw.getVocabulary()); + + return newMfw; + + }; + + for (MetadataFieldWrapper item : list) { + MetadataFieldWrapper cloned = cloneWrapper.apply(item); + listCloned.add(cloned); + } + return listCloned; + } + + /** + * Load project view. + * + * @param theProjectDV the the project DV + * @param scope the scope + * @param username the username + * @return the project view + * @throws Exception the exception + */ + public static ProjectView loadProjectView(ProjectDV theProjectDV, String scope, String username) throws Exception { + + String theWholeProjectAsJSON = theProjectDV.getTheDocument().getDocumentAsJSON(); + + LOG.debug("theProjectDV as JSON: " + theWholeProjectAsJSON); + LOG.debug("theProjectDV as MAP: " + theProjectDV.getTheDocument().getDocumentAsMap()); + + ProjectView projectView = new ProjectView(); + projectView.setTheProjectDV(theProjectDV); + + LinkedHashMap> linkedMap_UCDId_gCubeProfiles = GcubeProfilesPerUCDIdCache + .get(scope); + + // NO UCD defined, applying default + if (linkedMap_UCDId_gCubeProfiles.size() == 0) { + LOG.warn("No " + GEOPORTAL_CONFIGURATION_TYPE.gcube_profiles + " found in the UCD"); + LOG.info("Applying default business logic to display the project"); + SectionView sectionView = new SectionView(); + sectionView.setSectionTitle("Document"); + SubDocumentView subDocumentView = new SubDocumentView(); + + Document sectionDoc = Document.parse(theProjectDV.getTheDocument().getDocumentAsJSON()); + boolean isAccessibleSection = isAccessibleSectionAccordingToPolicy(sectionDoc, JSON_$_POINTER, username); + + // If is accessible + if (isAccessibleSection) { + + // Creating one Project with one SectionView and SubDocumentView + String wholeSectionDoc = sectionDoc.toJson(); + subDocumentView.setMetadataAsJSON(wholeSectionDoc); + + List listFiles = new ArrayList(); + List listImages = new ArrayList(); + List listLayers = new ArrayList(); + + // Reading Fileset _payloads + String filesetJSONPath = String.format("%s.%s", JSON_$_POINTER, FILESET); + List listPayloads = readPayloadsForFileset(filesetJSONPath, wholeSectionDoc); + FilesetDV filesetDV = new FilesetDV(); + filesetDV.setName(FILESET); + for (Payload payload : listPayloads) { + PayloadDV payloadDV = ConvertToDataValueObjectModel.toPayloadDV(payload); + filesetDV.addPayloadDV(payloadDV); + boolean isImage = ImageDetector.isImage(payload.getMimetype()); + + if (isImage) { + listImages.add(filesetDV); + } else { + listFiles.add(filesetDV); + } + } + + subDocumentView.setListImages(listImages); + subDocumentView.setListFiles(listFiles); + + // Reading Fileset _materializations / layers + listLayers = readGcubeSDILayersForFileset(filesetJSONPath, wholeSectionDoc); + + subDocumentView.setListLayers(listLayers); + sectionView.addSubDocument(subDocumentView); + projectView.addSectionView(sectionView); + + } + + } + + List listProfilesBean = linkedMap_UCDId_gCubeProfiles + .get(theProjectDV.getProfileID()); + + com.jayway.jsonpath.Configuration configuration = com.jayway.jsonpath.Configuration.builder() + .jsonProvider(new JsonOrgJsonProvider()).build(); + + // Reading the Project according to list of Profile defined in the UCD + for (GcubeProfilesMetadataForUCD gcubeProfileMetaForUCD : listProfilesBean) { + + GcubeProfileDV gcubeProfileDV = gcubeProfileMetaForUCD.getGcubeProfile(); + SectionView sectionView = new SectionView(); + sectionView.setSectionTitle(gcubeProfileDV.getSectionTitle()); + LOG.debug("\n\nThe profile is: " + gcubeProfileDV); + // Building JSON/section full PATH and section name + String sectionJSONPath = ""; + String parentPathFromProfile = gcubeProfileDV.getParentName() == null ? "" : gcubeProfileDV.getParentName(); + String theSectionName = gcubeProfileDV.getSectionName(); + + if (theSectionName.compareTo(JSON_$_POINTER) == 0 || theSectionName.compareTo(JSON_$_POINTER + ".") == 0) { + sectionJSONPath = JSON_$_POINTER; + theSectionName = ""; + } else { + sectionJSONPath = String.format("%s%s", + parentPathFromProfile.endsWith(".") ? parentPathFromProfile : parentPathFromProfile + ".", + theSectionName); + } + + LOG.debug("The sectionJSONPath is: " + sectionJSONPath); + + JsonPath theSectionJsonPath = null; + Object data = null; + try { + theSectionJsonPath = JsonPath.compile(sectionJSONPath); + data = theSectionJsonPath.read(theWholeProjectAsJSON, configuration); + } catch (Exception e) { + LOG.warn("Error on searching the section " + sectionJSONPath + " in the JSON Project: " + + theWholeProjectAsJSON); + continue; + } + + LOG.debug("Data is instace of: " + data.getClass()); + LOG.debug("data to string: " + data.toString()); + + // Splitting the General Document in bson.Document according to list of + // GcubeProfiles + List listBSONDocument = new ArrayList(); + if (data instanceof org.json.JSONObject) { + String jsonString = data.toString(); + LOG.debug("the JSON to string: " + jsonString); + Document sectionDoc = Document.parse(jsonString); + boolean isAccessibleSection = isAccessibleSectionAccordingToPolicy(sectionDoc, sectionJSONPath, + username); + if (isAccessibleSection) { + listBSONDocument.add(sectionDoc); + } + + } else if (data instanceof org.json.JSONArray) { + org.json.JSONArray dataArray = (org.json.JSONArray) data; + for (int i = 0; i < dataArray.length(); i++) { + String jsonString = dataArray.get(i).toString(); + LOG.debug("the array " + i + " JSON to string: " + jsonString); + Document sectionDoc = Document.parse(jsonString); + boolean isAccessibleSection = isAccessibleSectionAccordingToPolicy(sectionDoc, sectionJSONPath, + username); + if (isAccessibleSection) { + listBSONDocument.add(sectionDoc); + } + + } + } + + LOG.debug("Result for " + gcubeProfileDV.getSectionName() + " is: " + listBSONDocument); + List theProfileBeans = gcubeProfileMetaForUCD.getListMetadataProfileBean(); + MetaDataProfileBean theProfileBean = theProfileBeans.get(0); + + // For each bson.Document creating the SubDocumentView + for (int i = 0; i < listBSONDocument.size(); i++) { + Document fromSectionDoc = listBSONDocument.get(i); + SubDocumentView subDocumentView = new SubDocumentView(); + Document toSectionDoc = new Document(); + // Filling the SubDocumentView metadata with the metadataField.getFieldName() + // read from the Profile + for (MetadataFieldWrapper metadataField : theProfileBean.getMetadataFields()) { + + String theFieldName = metadataField.getFieldId() != null ? metadataField.getFieldId() + : metadataField.getFieldName(); + LOG.debug("reading theFieldName: " + theFieldName); + Object theOBJFieldValue = fromSectionDoc.get(theFieldName); + + // NB: Using ALWAYS THE metadataField.getFieldName() as LABEL + toSectionDoc = sanitizeDocumentValue(toSectionDoc, metadataField.getFieldName(), theOBJFieldValue); + + } + String subToJSON = toSectionDoc.toJson(); + LOG.debug("theSubSetionDoc is: " + subToJSON); + subDocumentView.setMetadataAsJSON(toSectionDoc.toJson()); + + // Reading filePaths + List filePaths = gcubeProfileDV.getFilePaths(); + + // READING fileset* field ACCORDING TO filePaths OF THE 'gcubeProfiles' CONFIG + if (filePaths != null) { + String fromSectionDocJSON = fromSectionDoc.toJson(); + List listFiles = new ArrayList(); + List listImages = new ArrayList(); + List listLayers = new ArrayList(); + for (FilePathDV filePath : filePaths) { + + // Reading Fileset _payloads + String filesetJSONPath = String.format("%s.%s", JSON_$_POINTER, filePath.getFieldName()); + List listPayloads = readPayloadsForFileset(filesetJSONPath, fromSectionDocJSON); + FilesetDV filesetDV = new FilesetDV(); + filesetDV.setName(filePath.getGcubeProfileFieldName()); + for (Payload payload : listPayloads) { + PayloadDV payloadDV = ConvertToDataValueObjectModel.toPayloadDV(payload); + filesetDV.addPayloadDV(payloadDV); + boolean isImage = ImageDetector.isImage(payload.getMimetype()); + + if (isImage) { + listImages.add(filesetDV); + } else { + listFiles.add(filesetDV); + } + } + + // Reading Fileset _materializations + listLayers = readGcubeSDILayersForFileset(filesetJSONPath, fromSectionDocJSON); + + } + subDocumentView.setListFiles(listFiles); + subDocumentView.setListImages(listImages); + subDocumentView.setListLayers(listLayers); + } + + sectionView.addSubDocument(subDocumentView); + + } + + projectView.addSectionView(sectionView); + } + + return projectView; + } + + /** + * Read payloads for fileset. + * + * @param filesetJSONPath the fileset JSON path + * @param sectionJSONDocument the section JSON document + * @return the list + */ + public static List readPayloadsForFileset(String filesetJSONPath, String sectionJSONDocument) { + LOG.debug("readPayloadsForFileset called"); + + List listPayloads = new ArrayList(); + String _payloadsJSONPath = String.format("%s.%s", filesetJSONPath, _PAYLOADS); + try { + com.jayway.jsonpath.Configuration configuration = com.jayway.jsonpath.Configuration.builder() + .jsonProvider(new JsonOrgJsonProvider()).build(); + + LOG.info("Reading sectionPath at {} into section document {}", _payloadsJSONPath, sectionJSONDocument); + JsonPath theSectionPolycJsonPath = JsonPath.compile(_payloadsJSONPath); + Object _payloads = theSectionPolycJsonPath.read(sectionJSONDocument, configuration).toString(); + + if (_payloads instanceof String) { + String toStringPayloads = (String) _payloads; + LOG.trace("The _payloads is a String {}", toStringPayloads); + JSONArray jsonArray = new JSONArray(toStringPayloads); + for (int i = 0; i < jsonArray.length(); i++) { + Payload payload = Serialization.read(jsonArray.getJSONObject(i).toString(), Payload.class); + listPayloads.add(payload); + } + } + + LOG.info("returning list of payloads {}", listPayloads); + } catch (Exception e) { + LOG.warn("Error on reading the JSON Path " + _payloadsJSONPath + " in the doc " + sectionJSONDocument, e); + } + + return listPayloads; + + } + + /** + * Read images for fileset. + * + * @param parentJSONPath the parent JSON path + * @param sectionJSONDocument the section JSON document + * @param limitToFirstOneFound the limit to first one found + * @return the list + */ + public static List readImagesForFileset(String parentJSONPath, String sectionJSONDocument, + boolean limitToFirstOneFound) { + LOG.debug("readImagesForFileset called"); + + List listImages = new ArrayList(); + com.jayway.jsonpath.Configuration config = com.jayway.jsonpath.Configuration.builder() + .jsonProvider(new JsonOrgJsonProvider()).build(); + // Reading Fileset _payloads + String filesetJSONPath = String.format("%s.%s", parentJSONPath, _PAYLOADS); + LOG.info("Reading sectionPath {} into section document {}", filesetJSONPath, sectionJSONDocument); + JsonPath theSectionPolycJsonPath = JsonPath.compile(filesetJSONPath); + Object _filesets = theSectionPolycJsonPath.read(sectionJSONDocument, config); + + List payloads = new ArrayList(); + List listPayloads = null; + try { + listPayloads = recursiveFetchingPayloads(config, _filesets, payloads); + } catch (Exception e) { + LOG.warn("Error occurred on fetching the payloads: ", e); + } + if (listPayloads != null) { + for (Payload payload : listPayloads) { + boolean isImage = ImageDetector.isImage(payload.getMimetype()); + if (isImage) { + PayloadDV payloadDV = ConvertToDataValueObjectModel.toPayloadDV(payload); + listImages.add(payloadDV); + if (limitToFirstOneFound && listImages.size() == 1) + return listImages; + } + } + } + + LOG.debug("readImagesForFileset returning listOfImages: " + listImages); + return listImages; + + } + + /** + * Read gcube SDI layers for fileset. + * + * @param materializationParentJSONPath the materialization parent JSON path + * @param sectionJSONDocument the section JSON document + * @return the list + */ + public static List readGcubeSDILayersForFileset(String materializationParentJSONPath, + String sectionJSONDocument) { + LOG.debug("readGcubeSDILayersForFileset called"); + + List listSDILayers = new ArrayList(); + String _materializationsJSONPath = String.format("%s.%s", materializationParentJSONPath, _MATERIALIZATIONS); + + try { + com.jayway.jsonpath.Configuration configurationJSONSmart = com.jayway.jsonpath.Configuration.builder() + .jsonProvider(new JsonOrgJsonProvider()).build(); + + LOG.info("Reading sectionPath {} into section document {}", _materializationsJSONPath, sectionJSONDocument); + JsonPath theSectionPolycJsonPath = JsonPath.compile(_materializationsJSONPath); + Object _materializations = theSectionPolycJsonPath.read(sectionJSONDocument, configurationJSONSmart); + + LOG.trace("_materializations.class(): " + _materializations.getClass()); + + listSDILayers = recursiveFetchingLayers(configurationJSONSmart, _materializations, listSDILayers); + + } catch (Exception e) { + LOG.warn("Error on reading the JSON Path " + _materializationsJSONPath + " in the doc " + + sectionJSONDocument, e); + } + LOG.info("returning list of layers {}", listSDILayers); + return listSDILayers; + + } + + /** + * Recursive fetching layers. + * + * @param config the config + * @param objectJSON the object JSON + * @param listSDILayers the list SDI layers + * @return the list + * @throws JSONException the JSON exception + */ + private static List recursiveFetchingLayers(com.jayway.jsonpath.Configuration config, + Object objectJSON, List listSDILayers) throws JSONException { + + if (objectJSON == null) + return listSDILayers; + + if (objectJSON instanceof JSONArray) { + JSONArray theJsonArray = (JSONArray) objectJSON; + LOG.trace("jsonArray: " + theJsonArray.toString(3)); + + for (int i = 0; i < theJsonArray.length(); i++) { + recursiveFetchingLayers(config, theJsonArray.get(i), listSDILayers); + } + } else if (objectJSON instanceof JSONObject) { + JSONObject theJsonObject = (JSONObject) objectJSON; + LOG.trace("theJSONObject: " + theJsonObject.toString(3)); + GCubeSDIViewerLayerDV gsdiLayer = converLayer(config, theJsonObject); + listSDILayers.add(gsdiLayer); + } + + return listSDILayers; + } + + /** + * Recursive fetching payloads. + * + * @param config the config + * @param objectJSON the object JSON + * @param payloads the payloads + * @return the list + * @throws JSONException the JSON exception + */ + public static List recursiveFetchingPayloads(com.jayway.jsonpath.Configuration config, Object objectJSON, + List payloads) throws JSONException { + LOG.debug("recursiveFetchingPayloads called"); + + if (objectJSON == null) + return payloads; + + if (objectJSON instanceof JSONArray) { + JSONArray theJsonArray = (JSONArray) objectJSON; + LOG.trace("jsonArray: " + theJsonArray.toString(3)); + + for (int i = 0; i < theJsonArray.length(); i++) { + payloads = recursiveFetchingPayloads(config, theJsonArray.get(i), payloads); + } + } else if (objectJSON instanceof JSONObject) { + JSONObject toStringPayloads = (JSONObject) objectJSON; + LOG.trace("The _payloads is a String {}", toStringPayloads.toString(3)); + Payload payload; + try { + payload = Serialization.read(toStringPayloads.toString(), Payload.class); + payloads.add(payload); + } catch (Exception e) { + LOG.warn("Error on converting the JSON Boject " + toStringPayloads + "as " + + Payload.class.getSimpleName() + e.getMessage()); + } + + } + + return payloads; + + } + + /** + * Conver layer. + * + * @param config the config + * @param thJsonObject the th json object + * @return the g cube SDI viewer layer DV + */ + // TODO THIS PART SHOULD BE REVISITED/OPTIMIZED + private static GCubeSDIViewerLayerDV converLayer(com.jayway.jsonpath.Configuration config, + JSONObject thJsonObject) { + LOG.debug("converLayer called for " + thJsonObject); + + GCubeSDIViewerLayerDV gsdiLayer = new GCubeSDIViewerLayerDV(); + try { + String theType = thJsonObject.getString(_TYPE); + gsdiLayer.setType(theType); + LOG.debug(_TYPE + " is: " + theType); + } catch (Exception e) { + LOG.warn("No " + _TYPE + " found", e); + } + String toSerializeJSONOBJ = thJsonObject.toString(); + String jsonPath = null; + try { + jsonPath = String.format("%s.%s", JSON_$_POINTER, _BBOX); + HashMap bbox = JsonPath.using(config).parse(toSerializeJSONOBJ).read(jsonPath, + HashMap.class); + BBOXDV bboxDV = new BBOXDV(bbox); + gsdiLayer.setBbox(bboxDV); + LOG.debug(_BBOX + " is: " + bboxDV); + } catch (Exception e) { + LOG.warn(jsonPath + " error: ", e); + } + try { + jsonPath = String.format("%s.%s", JSON_$_POINTER, _OGC_LINKS); + String jsonString = JsonPath.using(config).parse(toSerializeJSONOBJ).read(jsonPath).toString(); + Gson gson = new Gson(); + HashMap map = gson.fromJson(jsonString, HashMap.class); + gsdiLayer.setOgcLinks(map); + LOG.debug(_OGC_LINKS + " are: " + map); + } catch (Exception e) { + LOG.warn(jsonPath + " error: ", e); + } + + try { + String wmsLink = gsdiLayer.getWMSLink(); + if (wmsLink != null) { + String layerName = URLParserUtil.extractValueOfParameterFromURL("layers", wmsLink); + gsdiLayer.setLayerName(layerName); + } + } catch (Exception e) { + LOG.warn(jsonPath + " error: ", e); + } + + LOG.debug("converLayer returning: " + gsdiLayer); + return gsdiLayer; + } + + /** + * Checks if is accessible section according to policy. + * + * @param section the section + * @param sectionJSONPath the section JSON path + * @param myLogin the my login + * @return true, if is accessible section according to policy + */ + private static boolean isAccessibleSectionAccordingToPolicy(Document section, String sectionJSONPath, + String myLogin) { + LOG.debug("isAccessibleSectionAccordingToPolicy called"); + boolean isAccessible = true; + + // Skipping the root, going to check the access_policy of subsections + if (sectionJSONPath.compareTo(JSON_$_POINTER) != 0) { + isAccessible = checkAccessPolicy(section.toJson(), myLogin); + } + + return isAccessible; + } + + /** + * Check access policy. + * + * @param sectionDocumentJSON the section document JSON + * @param myLogin the my login + * @return true, if successful + */ + private static boolean checkAccessPolicy(String sectionDocumentJSON, String myLogin) { + LOG.debug("checkAccessPolicy called"); + // CHECKING THE POLICY + String accessPolicyPath = JSON_$_POINTER + "._access._policy"; + boolean isAccessible = true; + try { + com.jayway.jsonpath.Configuration configuration = com.jayway.jsonpath.Configuration.builder() + .jsonProvider(new JsonOrgJsonProvider()).build(); + + LOG.info("Reading access policy at {} into section document {}", accessPolicyPath, sectionDocumentJSON); + JsonPath theSectionPolycJsonPath = JsonPath.compile(accessPolicyPath); + String _policy = theSectionPolycJsonPath.read(sectionDocumentJSON, configuration).toString(); + LOG.info("The section {} has policy {}", accessPolicyPath, _policy); + isAccessible = GeportalCheckAccessPolicy.isAccessible(_policy, myLogin); + } catch (Exception e) { + LOG.error(accessPolicyPath + " not found. Check OK"); + } + LOG.info("Is the section {} accessible? {}", sectionDocumentJSON, isAccessible); + return isAccessible; + } + + /** + * Sanitize document value. + * + * @param toDoc the to doc + * @param fieldLabel the field label + * @param theObjectFieldValue the the object field value + * @return the document + */ + private static Document sanitizeDocumentValue(Document toDoc, String fieldLabel, Object theObjectFieldValue) { + + if (theObjectFieldValue != null) { + if (theObjectFieldValue instanceof String) { + String toString = (String) theObjectFieldValue; + if (toString != null && !toString.isEmpty()) { + toDoc.append(fieldLabel, theObjectFieldValue); + } else { + LOG.debug("Skipping String field " + fieldLabel + " its value is null or empty"); + } + + } else if (theObjectFieldValue instanceof ArrayList) { + ArrayList toArrayList = (ArrayList) theObjectFieldValue; + if (toArrayList != null && !toArrayList.isEmpty()) { + toDoc.append(fieldLabel, theObjectFieldValue); + } else { + LOG.debug("Skipping ArrayList field " + fieldLabel + " its value is null or empty"); + } + } else { + toDoc.append(fieldLabel, theObjectFieldValue); + } + } else { + LOG.debug("Skipping field " + fieldLabel + " its value is null or empty"); + } + + return toDoc; + } + + /** + * Pretty print JSON. + * + * @param jsonString the json string + * @return the string + */ + private static String prettyPrintJSON(String jsonString) { + + Gson gson = new GsonBuilder().setPrettyPrinting().create(); + JsonObject jsonObject = new JsonParser().parse(jsonString).getAsJsonObject(); + return gson.toJson(jsonObject); + } + + /** + * Pretty print project view. + * + * @param projectView the project view + */ + public static void prettyPrintProjectView(ProjectView projectView) { + + for (SectionView section : projectView.getListSections()) { + System.out.println("\n\n###### Section Title: " + section.getSectionTitle() + " ######"); + int i = 1; + for (SubDocumentView subDocument : section.getListSubDocuments()) { + System.out.println("## " + SubDocumentView.class.getSimpleName() + " n." + i); + System.out.println("***** Metadata"); + System.out.println(prettyPrintJSON(subDocument.getMetadataAsJSON())); + System.out.println("***** Files"); + if (subDocument.getListFiles() != null) { + for (FilesetDV filesetDV : subDocument.getListFiles()) { + System.out.println("******* File Fileset name: " + filesetDV.getName()); + for (PayloadDV payload : filesetDV.getListPayload()) { + System.out.println("********* Payload: " + payload); + } + } + } + System.out.println("***** Images"); + if (subDocument.getListImages() != null) { + for (FilesetDV filesetDV : subDocument.getListImages()) { + System.out.println("******* Image Fileset name: " + filesetDV.getName()); + for (PayloadDV payload : filesetDV.getListPayload()) { + System.out.println("********* Payload: " + payload); + } + } + } + System.out.println("***** Layers"); + if (subDocument.getListLayers() != null) { + for (GCubeSDIViewerLayerDV layer : subDocument.getListLayers()) { + System.out.println("******* Layer type: " + layer.getType()); + System.out.println("******* Layer: " + layer); + } + } + i++; + } + + } + } + + /** + * Pretty print project view. + * + * @param projectEdit the project edit + */ + public static void prettyPrintProjectEdit(ProjectEdit projectEdit) { + + for (MetaDataProfileBeanExt mpb : projectEdit.getTheProfileBeans()) { + System.out.println("\n\n###### Title: " + mpb.getTitle() + " - Type: " + mpb.getType() + " ######"); + int i = 1; + for (MetadataFieldWrapper fieldWrapper : mpb.getMetadataFields()) { + System.out.println("## " + MetadataFieldWrapper.class.getSimpleName() + " n." + i); + System.out.println("***** Metadata"); + // System.out.println(mfw); + System.out.println("\tfieldId: " + fieldWrapper.getFieldId() + ", fieldName: " + + fieldWrapper.getFieldName() + ", CurrentValue: " + fieldWrapper.getCurrentValue()); + i++; + } + i = 1; + System.out.println("***** Files"); + for (FilesetDV fileSet : mpb.getListFileset()) { + System.out.println( + "## " + FilesetDV.class.getSimpleName() + " n." + i + " has name: " + fileSet.getName()); + + if (fileSet.getListPayload() != null) { + int j = 0; + for (PayloadDV payload : fileSet.getListPayload()) { + System.out.println("\t" + ++j + ") " + payload); + } + } + i++; + } + + } + } + +} diff --git a/src/main/java/org/gcube/application/geoportaldatamapper/ImageDetector.java b/src/main/java/org/gcube/application/geoportaldatamapper/ImageDetector.java new file mode 100644 index 0000000..395fce7 --- /dev/null +++ b/src/main/java/org/gcube/application/geoportaldatamapper/ImageDetector.java @@ -0,0 +1,54 @@ +package org.gcube.application.geoportaldatamapper; + +import java.util.Arrays; +import java.util.List; + +public class ImageDetector { + + public static enum COMMON_IMAGES_FORMAT { + gif, png, jpeg, jpg, bmp, tif, tiff, svg, avif, webp + } + + /** + * The Class ImageDetector. + * + * @author Francesco Mangiacrapa at ISTI-CNR francesco.mangiacrapa@isti.cnr.it + * + * Oct 18, 2022 + */ + + /** + * Gets the names. + * + * @param e the e + * @return the names + */ + private static String[] getNames(Class> e) { + return Arrays.stream(e.getEnumConstants()).map(Enum::name).toArray(String[]::new); + } + + public static List listFormatImages; + + static { + + String[] arrayImgs = ImageDetector.getNames(COMMON_IMAGES_FORMAT.class); + listFormatImages = Arrays.asList(arrayImgs); + + } + + /** + * Checks if is image. + * + * @param mimeType the mime type + * @return true, if is image + */ + public static boolean isImage(String mimeType) { + if (mimeType == null || mimeType.isEmpty()) + return false; + + String inputImageFormat = mimeType.replaceAll("image/", ""); + + return listFormatImages.contains(inputImageFormat); + } + +} diff --git a/src/main/java/org/gcube/application/geoportaldatamapper/URLParserUtil.java b/src/main/java/org/gcube/application/geoportaldatamapper/URLParserUtil.java new file mode 100644 index 0000000..4d6e70c --- /dev/null +++ b/src/main/java/org/gcube/application/geoportaldatamapper/URLParserUtil.java @@ -0,0 +1,96 @@ +package org.gcube.application.geoportaldatamapper; + +import java.io.UnsupportedEncodingException; +import java.net.URL; +import java.net.URLDecoder; +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + +/** + * The Class URLUtil. + * + * @author Francesco Mangiacrapa at ISTI-CNR (francesco.mangiacrapa@isti.cnr.it) + * + * Oct 29, 2020 + */ +public class URLParserUtil { + + /** + * Adds the parameter to query string. + * + * @param key the key + * @param value the value + * @param prefixAmpersand the prefix ampersand + * @param suffixAmpersand the suffix ampersand + * @return the string + */ + public static String addParameterToQueryString(String key, String value, boolean prefixAmpersand, + boolean suffixAmpersand) { + + String queryParameter = ""; + + if (prefixAmpersand) + queryParameter += "&"; + + queryParameter += key + "=" + value; + + if (suffixAmpersand) + queryParameter += "&"; + + return queryParameter; + + } + + /** + * Extract value of parameter from URL. + * + * @param paramName the param name + * @param url the url + * @return the string + */ + public static String extractValueOfParameterFromURL(String paramName, String url) { + int index = url.toLowerCase().indexOf(paramName.toLowerCase() + "="); // ADDING CHAR "=" IN TAIL TO BE SURE THAT + // IT + // IS A PARAMETER + String value = ""; + if (index > -1) { + + int start = index + paramName.length() + 1; // add +1 for char '=' + String sub = url.substring(start, url.length()); + int indexOfSeparator = sub.indexOf("&"); + int end = indexOfSeparator != -1 ? indexOfSeparator : sub.length(); + value = sub.substring(0, end); + } else + return null; + + return value; + } + + + /** + * Split query. + * + * @param url the url + * @return the map + * @throws UnsupportedEncodingException the unsupported encoding exception + */ + public static Map> splitQuery(URL url) throws UnsupportedEncodingException { + final Map> query_pairs = new LinkedHashMap>(); + final String[] pairs = url.getQuery().split("&"); + for (String pair : pairs) { + final int idx = pair.indexOf("="); + final String key = idx > 0 ? URLDecoder.decode(pair.substring(0, idx), "UTF-8") : pair; + if (!query_pairs.containsKey(key)) { + query_pairs.put(key, new LinkedList()); + } + final String value = idx > 0 && pair.length() > idx + 1 + ? URLDecoder.decode(pair.substring(idx + 1), "UTF-8") + : null; + query_pairs.get(key).add(value); + } + return query_pairs; + } + +} diff --git a/src/main/java/org/gcube/application/geoportaldatamapper/shared/MetaDataProfileBeanExt.java b/src/main/java/org/gcube/application/geoportaldatamapper/shared/MetaDataProfileBeanExt.java new file mode 100644 index 0000000..c908e1c --- /dev/null +++ b/src/main/java/org/gcube/application/geoportaldatamapper/shared/MetaDataProfileBeanExt.java @@ -0,0 +1,131 @@ +package org.gcube.application.geoportaldatamapper.shared; + +import java.util.ArrayList; +import java.util.List; + +import org.gcube.application.geoportalcommon.shared.geoportal.materialization.innerobject.FilesetDV; +import org.gcube.application.geoportalcommon.shared.geoportal.materialization.innerobject.PayloadDV; +import org.gcube.portlets.widgets.mpformbuilder.shared.metadata.CategoryWrapper; +import org.gcube.portlets.widgets.mpformbuilder.shared.metadata.MetaDataProfileBean; +import org.gcube.portlets.widgets.mpformbuilder.shared.metadata.MetadataFieldWrapper; + +/** + * The Class MetaDataProfileBeanExt. + * + * @author Francesco Mangiacrapa at ISTI-CNR francesco.mangiacrapa@isti.cnr.it + * + * Nov 23, 2022 + */ +public class MetaDataProfileBeanExt extends MetaDataProfileBean implements Cloneable { + + /** + * + */ + private static final long serialVersionUID = 2518128223147908835L; + private List listFileset = null; + + /** + * Instantiates a new meta data profile bean ext. + */ + public MetaDataProfileBeanExt() { + super(); + } + + /** + * Instantiates a new meta data profile bean ext. + * + * @param type the type + * @param title the title + * @param metadataFields the metadata fields + * @param categories the categories + */ + public MetaDataProfileBeanExt(String type, String title, List metadataFields, + List categories) { + super(type, title, metadataFields, categories); + } + + /** + * Gets the list fileset. + * + * @return the list fileset + */ + public List getListFileset() { + if (listFileset == null) + listFileset = new ArrayList(); + return listFileset; + } + + /** + * Sets the list fileset. + * + * @param listFileset the new list fileset + */ + public void setListFileset(List listFileset) { + this.listFileset = listFileset; + } + + @Override + protected MetaDataProfileBeanExt clone() throws CloneNotSupportedException { + + MetaDataProfileBeanExt clonedMDPBE = new MetaDataProfileBeanExt(); + clonedMDPBE.setTitle(this.getTitle()); + clonedMDPBE.setType(this.getType()); + ArrayList newListFileset = new ArrayList(); + for (FilesetDV filesetDV : this.getListFileset()) { + FilesetDV newFileset = new FilesetDV(); + for (PayloadDV payloadDV : filesetDV.getListPayload()) { + PayloadDV newPayloadDV = new PayloadDV(); + newPayloadDV.setLink(payloadDV.getLink()); + newPayloadDV.setMimetype(payloadDV.getMimetype()); + newPayloadDV.setName(payloadDV.getName()); + newPayloadDV.setStorageID(payloadDV.getStorageID()); + newFileset.addPayloadDV(newPayloadDV); + } + + newListFileset.add(newFileset); + } + clonedMDPBE.setListFileset(newListFileset); + + ArrayList newListMetadataFieldWrapper = new ArrayList(); + + for (MetadataFieldWrapper mfw : this.getMetadataFields()) { + MetadataFieldWrapper newMfw = new MetadataFieldWrapper(); + newMfw.setAsGroup(mfw.getAsGroup()); + newMfw.setAsTag(mfw.getAsTag()); + newMfw.setCurrentValue(mfw.getCurrentValue()); + newMfw.setDefaultValue(mfw.getDefaultValue()); + newMfw.setFieldId(mfw.getFieldId()); + newMfw.setFieldName(mfw.getFieldName()); + newMfw.setFieldNameFromCategory(mfw.getFieldNameFromCategory()); + newMfw.setMandatory(mfw.getMandatory()); + newMfw.setMaxOccurs(mfw.getMaxOccurs()); + newMfw.setMultiSelection(mfw.isMultiSelection()); + newMfw.setNote(mfw.getNote()); + newMfw.setOwnerCategory(mfw.getOwnerCategory()); + newMfw.setType(mfw.getType()); + newMfw.setValidator(mfw.getValidator()); + newMfw.setVocabulary(mfw.getVocabulary()); + + newListMetadataFieldWrapper.add(newMfw); + + } + + clonedMDPBE.setMetadataFields(newListMetadataFieldWrapper); + + return clonedMDPBE; + } + + @Override + public String toString() { + StringBuilder builder = new StringBuilder(); + builder.append("MetaDataProfileBeanExt [getType()="); + builder.append(getType()); + builder.append(", getTitle()="); + builder.append(getTitle()); + builder.append(", getMetadataFields()="); + builder.append(getMetadataFields()); + builder.append("]"); + return builder.toString(); + } + +} diff --git a/src/main/java/org/gcube/application/geoportaldatamapper/shared/ProjectEdit.java b/src/main/java/org/gcube/application/geoportaldatamapper/shared/ProjectEdit.java new file mode 100644 index 0000000..6433f7a --- /dev/null +++ b/src/main/java/org/gcube/application/geoportaldatamapper/shared/ProjectEdit.java @@ -0,0 +1,50 @@ +package org.gcube.application.geoportaldatamapper.shared; + +import java.io.Serializable; +import java.util.List; + +import org.gcube.application.geoportalcommon.shared.geoportal.project.ProjectDV; + +public class ProjectEdit implements Serializable { + + /** + * + */ + private static final long serialVersionUID = 2885327516680245601L; + + private ProjectDV theProjectDV; + + private List theProfileBeans; + + public ProjectEdit() { + + } + + public ProjectDV getTheProjectDV() { + return theProjectDV; + } + + public List getTheProfileBeans() { + return theProfileBeans; + } + + public void setTheProjectDV(ProjectDV theProjectDV) { + this.theProjectDV = theProjectDV; + } + + public void setTheProfileBeans(List theProfileBeans) { + this.theProfileBeans = theProfileBeans; + } + + @Override + public String toString() { + StringBuilder builder = new StringBuilder(); + builder.append("ProjectEdit [theProjectDV="); + builder.append(theProjectDV); + builder.append(", theProfileBeans="); + builder.append(theProfileBeans); + builder.append("]"); + return builder.toString(); + } + +} diff --git a/src/main/resources/org/gcube/application/geoportaldatamapper/GeoportalDataCommon.gwt.xml b/src/main/resources/org/gcube/application/geoportaldatamapper/GeoportalDataCommon.gwt.xml new file mode 100644 index 0000000..36449b0 --- /dev/null +++ b/src/main/resources/org/gcube/application/geoportaldatamapper/GeoportalDataCommon.gwt.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/src/test/java/Geoportal_DataMapper_Tests.java b/src/test/java/Geoportal_DataMapper_Tests.java new file mode 100644 index 0000000..5dd6c0d --- /dev/null +++ b/src/test/java/Geoportal_DataMapper_Tests.java @@ -0,0 +1,129 @@ + +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Properties; + +import org.gcube.application.geoportal.common.model.document.Project; +import org.gcube.application.geoportalcommon.ConvertToDataValueObjectModel; +import org.gcube.application.geoportalcommon.ProjectDVBuilder; +import org.gcube.application.geoportalcommon.geoportal.GeoportalClientCaller; +import org.gcube.application.geoportalcommon.geoportal.ProjectsCaller; +import org.gcube.application.geoportalcommon.geoportal.UseCaseDescriptorCaller; +import org.gcube.application.geoportalcommon.shared.geoportal.project.ProjectDV; +import org.gcube.application.geoportalcommon.shared.geoportal.view.ProjectView; +import org.gcube.application.geoportaldatamapper.Geoportal_JSON_Mapper; +import org.gcube.application.geoportaldatamapper.shared.ProjectEdit; +import org.gcube.common.authorization.library.provider.SecurityTokenProvider; +import org.gcube.common.scope.api.ScopeProvider; +import org.junit.Before; +import org.junit.Test; + +/** + * The Class Geoportal_DataMapper_Tests. + * + * @author Francesco Mangiacrapa at ISTI-CNR francesco.mangiacrapa@isti.cnr.it + * + * Sep 9, 2022 + */ +public class Geoportal_DataMapper_Tests { + + private static final String GCUBE_CONFIG_PROPERTIES_FILENAME = "gcube_config.properties"; + // APP Working Directory + /src/test/resources must be the location of + // gcube_config.properties + private static String gcube_config_path = String.format("%s/%s", + System.getProperty("user.dir") + "/src/test/resources", GCUBE_CONFIG_PROPERTIES_FILENAME); + private static String CONTEXT; + private static String TOKEN; + + private UseCaseDescriptorCaller clientUCD; + private ProjectsCaller clientProjects; + + private static String PROFILE_ID = "profiledConcessioni"; + private static String PROJECT_ID = "6384aaac308f5c28c5ee0888"; + + private static String USERNAME = "francesco.mangiacrapa"; + + public static void readContextSettings() { + + try (InputStream input = new FileInputStream(gcube_config_path)) { + + Properties prop = new Properties(); + + // load a properties file + prop.load(input); + + CONTEXT = prop.getProperty("CONTEXT"); + TOKEN = prop.getProperty("TOKEN"); + // get the property value and print it out + System.out.println("CONTEXT: " + CONTEXT); + System.out.println("TOKEN: " + TOKEN); + + } catch (IOException ex) { + ex.printStackTrace(); + } + } + + /** + * Gets the client. + * + * @return the client + */ + @Before + public void getClient() { + readContextSettings(); + // assumeTrue(GCubeTest.isTestInfrastructureEnabled()); + ScopeProvider.instance.set(CONTEXT); + SecurityTokenProvider.instance.set(TOKEN); + clientUCD = GeoportalClientCaller.useCaseDescriptors(); + clientProjects = GeoportalClientCaller.projects(); + } + + @Test + public void testReadProjectEdit() { + + try { + ScopeProvider.instance.set(CONTEXT); + SecurityTokenProvider.instance.set(TOKEN); + Project theProject = clientProjects.getProjectByID(PROFILE_ID, PROJECT_ID); + ProjectDVBuilder projectBuilder = ProjectDVBuilder.newBuilder().fullDocumentMap(true); + projectBuilder.relationships(true); + ProjectDV theProjectDV = ConvertToDataValueObjectModel.toProjectDV(theProject, projectBuilder); + ProjectEdit projectEdit = Geoportal_JSON_Mapper.loadProjectEdit(theProjectDV, CONTEXT, USERNAME); + Geoportal_JSON_Mapper.prettyPrintProjectEdit(projectEdit); + +// ProjectView projectView = Geoportal_JSON_Mapper.loadProjectView(theProjectDV, CONTEXT, USERNAME); +// Geoportal_JSON_Mapper.prettyPrintProjectView(projectView); + System.out.println("\n\n testReadProjectEdit terminated!!!"); + } catch (Exception e) { + System.err.println("Error"); + e.printStackTrace(); + } + } + + //@Test + public void testReadProjectView() { + + try { + ScopeProvider.instance.set(CONTEXT); + SecurityTokenProvider.instance.set(TOKEN); + Project theProject = clientProjects.getProjectByID(PROFILE_ID, PROJECT_ID); + ProjectDVBuilder projectBuilder = ProjectDVBuilder.newBuilder().fullDocumentMap(true); + projectBuilder.relationships(true); + + System.out.println("project json: " + theProject.getTheDocument().toJson()); + + ProjectDV theProjectDV = ConvertToDataValueObjectModel.toProjectDV(theProject, projectBuilder); + ProjectView projectView = Geoportal_JSON_Mapper.loadProjectView(theProjectDV, CONTEXT, USERNAME); + Geoportal_JSON_Mapper.prettyPrintProjectView(projectView); + +// ProjectView projectView = Geoportal_JSON_Mapper.loadProjectView(theProjectDV, CONTEXT, USERNAME); +// Geoportal_JSON_Mapper.prettyPrintProjectView(projectView); + System.out.println("\n\n testReadProjectView terminated!!!"); + } catch (Exception e) { + System.err.println("Error"); + e.printStackTrace(); + } + } + +} diff --git a/src/test/resources/.gitignore b/src/test/resources/.gitignore new file mode 100644 index 0000000..3f12f69 --- /dev/null +++ b/src/test/resources/.gitignore @@ -0,0 +1 @@ +/gcube_config.properties