Compare commits

..

73 Commits

Author SHA1 Message Date
Francesco Mangiacrapa f447964e79 removed -SNAPSHOT to be relased 2024-06-28 15:53:22 +02:00
Francesco Mangiacrapa 3b3555ac3e Merge pull request 'feature_27120' (#11) from feature_27120 into master
Reviewed-on: #11
2024-06-28 15:51:22 +02:00
Francesco Mangiacrapa 4923b9656f updated to PerformStepRequest 2024-04-15 12:33:53 +02:00
Francesco Mangiacrapa 3174b8f937 added new bean StepPerformedResultDV 2024-04-11 11:04:09 +02:00
Francesco Mangiacrapa ad646a8bf9 removed optionalMessage as parameter and added into StepExecutionRequest
bean
2024-04-11 09:49:36 +02:00
Francesco Mangiacrapa 05e9d3d426 indent 2024-04-10 17:18:27 +02:00
Francesco Mangiacrapa 06b257fcf3 - Added optional message when performing lifecycle step [#27192] 2024-04-08 17:18:54 +02:00
Francesco Mangiacrapa 4ca2a6b1bd renamed to GET_SHAREABLE_LINK 2024-04-05 15:57:32 +02:00
Francesco Mangiacrapa 235c12f294 - Integrated new Uri-Resolver-Manager [#27160]
- Added new operation "Share project"
2024-04-05 10:50:21 +02:00
Francesco Mangiacrapa c66e08ffc9 updated SearchingFilter 2024-03-26 16:58:47 +01:00
Francesco Mangiacrapa ff4e93ca23 Classes to migrate UCD data model 2024-02-02 09:42:12 +01:00
Francesco Mangiacrapa 6a0dc3c447 Added readContextSettings 2024-01-09 16:35:43 +01:00
Francesco Mangiacrapa 92da4837ee created script for updating GNA data model #26349 2024-01-09 12:14:26 +01:00
Francesco Mangiacrapa aeed289ca5 Updated changelog 2023-09-14 10:15:04 +02:00
Francesco Mangiacrapa 76909a2e68 removed -SNAPSHOT to be released 2023-09-11 14:20:07 +02:00
Francesco Mangiacrapa 474b870396 Merge pull request 'Read countByPhase from configuration [#25598]' (#10) from feature_25598 into master
Reviewed-on: #10
2023-09-11 14:19:20 +02:00
Francesco Mangiacrapa 704989f612 Read countByPhase from configuration [#25598] 2023-09-08 12:27:07 +02:00
Francesco Mangiacrapa 2a79c62038 removed -SNAPSHOT to be released 2023-09-05 15:24:07 +02:00
Francesco Mangiacrapa f819b0f10a Reduced/Optimized some LOGs, done [#25539] 2023-08-31 15:24:06 +02:00
Francesco Mangiacrapa 5def5c98fe removed -SNAPSHOT to be released 2023-06-09 11:08:21 +02:00
Francesco Mangiacrapa 93cf772a0c added -SNAPSHOT in the CHANGELOG 2023-06-09 11:00:53 +02:00
Francesco Mangiacrapa 759b2064f4 Passed to maven-parent 1.2.0 and SNAPSHOT version 2023-06-09 10:59:36 +02:00
Francesco Mangiacrapa 06923c3998 Removed -SNAPSHOT to be released 2023-06-09 10:34:31 +02:00
Francesco Mangiacrapa 3440ca8e8a Merge pull request 'feature_25074' (#9) from feature_25074 into master
Reviewed-on: #9
2023-06-05 14:30:06 +02:00
Francesco Mangiacrapa 11190e5605 Updated changelog 2023-06-05 14:28:53 +02:00
Francesco Mangiacrapa e003479415 added serializable 2023-05-25 09:20:18 +02:00
Francesco Mangiacrapa af82f20ca5 Read configuration and deserialization of
"GROUPED_CROSS_FILTERING_LAYERS"
2023-05-24 12:19:02 +02:00
Francesco Mangiacrapa 6a38ebc0c8 Renamed "grouped custom layers" to "grouped overlay layers" 2023-05-23 15:42:51 +02:00
Francesco Mangiacrapa f5785268ef - Integrated the grouped custom layers configuration [#25110] 2023-05-15 15:49:58 +02:00
Francesco Mangiacrapa 57d4ab211a Integrated the grouped custom layers configuration [#25110] 2023-05-15 15:49:36 +02:00
Francesco Mangiacrapa 472e5955d5 removed -SNAPSHOT to be released 2023-05-11 16:14:25 +02:00
Francesco Mangiacrapa 2747ffbb4d Added DELETE_RELATION into enum OPERATION_ON_ITEM 2023-05-11 16:13:56 +02:00
Francesco Mangiacrapa 702b24d3cc removed -SNAPSHOT. Moved to gcube-bom 2.3.0 2023-05-11 15:51:38 +02:00
Francesco Mangiacrapa 606fd1d218 Merge pull request 'task_24166' (#8) from task_24166 into master
Reviewed-on: #8
2023-05-11 15:49:16 +02:00
Francesco Mangiacrapa 2ba86a9f6f merged with branch task_24166 2023-05-11 15:49:06 +02:00
Francesco Mangiacrapa b1fcee1970 Added -SNAPSHOT for merging on master 2023-05-11 15:46:36 +02:00
Francesco Mangiacrapa 8d32c4e85b removed -SNAPSHOT to be released 2023-05-11 15:10:55 +02:00
Francesco Mangiacrapa 6b63090ec4 improved toSpatialReference method 2023-05-04 16:10:24 +02:00
Francesco Mangiacrapa 39f084b5c5 Updated list of operations 2023-05-03 10:53:21 +02:00
Francesco Mangiacrapa 0fd55e8a58 Integrated with the Geoportal_Resolver service [#25031] 2023-05-02 17:56:30 +02:00
Francesco Mangiacrapa f0508b4fd9 Updated "deleteFileset" added "ignoreErrors" parameter 2023-04-26 14:14:54 +02:00
Francesco Mangiacrapa 3db873b17e updated Test cases 2023-04-26 11:26:18 +02:00
Francesco Mangiacrapa f51896e28d updated 2023-04-18 09:38:46 +02:00
Francesco Mangiacrapa d58d933816 in progress on #24166 2023-04-17 16:02:18 +02:00
Francesco Mangiacrapa 9cbd162c40 Updated pom 2023-04-12 17:59:34 +02:00
Francesco Mangiacrapa b7f1e61b04 Pushed script for #24793 2023-04-12 17:49:43 +02:00
Francesco Mangiacrapa 9daa4bd3bc updated 2023-03-27 16:47:33 +02:00
Francesco Mangiacrapa 288756e686 Script completed for resynching Mongo -> PostGis -> Geoserver 2023-03-27 16:41:51 +02:00
Francesco Mangiacrapa 026fd35fd6 Added test classes 2023-03-17 16:41:33 +01:00
Francesco Mangiacrapa 6dd3cb4b79 Removed -SNAPSHOT to be released. Added gcube-bom 2.1.0 2023-03-07 11:05:40 +01:00
Francesco Mangiacrapa dc1a0146e5 Merge pull request 'task_24567' (#7) from task_24567 into master
Reviewed-on: #7
2023-03-07 11:04:06 +01:00
Francesco Mangiacrapa 4e19c5cdd0 Added gcube-bom 2.2.0-SNAPSHOT 2023-03-07 11:00:59 +01:00
Francesco Mangiacrapa 0e44b11c2a updated 2023-03-06 15:49:46 +01:00
Francesco Mangiacrapa acae9ffb77 - [#24569] Added the phase DRAFT in the enum (also required for #24571) 2023-02-09 12:37:29 +01:00
Francesco Mangiacrapa 62ce92b371 Casting to String, in order to avoid GWT serialization issue, see
#24537#note-9 and #24432
2023-02-07 11:47:39 +01:00
Francesco Mangiacrapa 955d670ce4 removed -SNAPSHOT to be released 2023-02-03 14:45:03 +01:00
Francesco Mangiacrapa 999d201737 Merge pull request 'task_24532' (#6) from task_24532 into master
Reviewed-on: #6
2023-02-03 14:43:30 +01:00
Francesco Mangiacrapa 2fb4767205 Added method get Access from the Document 2023-02-03 11:40:56 +01:00
Francesco Mangiacrapa 2ef9a4fa8c Reverted serialization from Sting to Object values returning the
Document as Map
2023-02-02 16:56:21 +01:00
Francesco Mangiacrapa ebdf9714c9 removed -SNAPSHOT to be released 2023-01-19 14:49:28 +01:00
Francesco Mangiacrapa a96899dbf7 Just for building the SNAPSHOT version 2023-01-19 14:48:58 +01:00
Francesco Mangiacrapa 178ba1bb44 Added WORKFLOW_PHASE [#24458] 2023-01-19 14:45:30 +01:00
Francesco Mangiacrapa d954dafa90 [#24432] Added a trim for returning value to solve a space as suffix
into returned date (fixing the data displayed in the Timeline Widget)
2023-01-19 11:31:48 +01:00
Francesco Mangiacrapa 9910e79e4e Removed -SNAPSHOT to be released 2023-01-17 16:59:51 +01:00
Francesco Mangiacrapa 0fb3faecff To build SNAPSHOT version 2023-01-17 16:59:11 +01:00
Francesco Mangiacrapa c88d71c9d2 Fixing issue: #24432 Using LinkedHashMap<String, String> instead of
LinkedHashMap<String, Object>
2023-01-17 16:51:01 +01:00
Francesco Mangiacrapa 9f38eac98e fixed bean 2023-01-12 16:45:47 +01:00
Francesco Mangiacrapa 5476833a7e Removed -SNAPSHOT to be released 2023-01-12 11:52:55 +01:00
Francesco Mangiacrapa 9450bf2ddc Building -SNAPSHOT version 2023-01-12 11:52:01 +01:00
Francesco Mangiacrapa 0eca260675 commented JUnit Tests 2023-01-12 11:47:15 +01:00
Francesco Mangiacrapa 6d7f5b9338 Added new method accessPolicyFromSessionLogin 2023-01-11 16:46:14 +01:00
Francesco Mangiacrapa 408780c6b1 Removed -SNAPSHOT to be released 2022-12-21 11:16:14 +01:00
Francesco Mangiacrapa a54747c5ac Fixing JSON library v20090211 (see #24263#note-1). New version is
2.0.1-SNAPSHOT
2022-12-13 14:23:04 +01:00
40 changed files with 3742 additions and 235 deletions

View File

@ -4,6 +4,48 @@
All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [v2.4.0]
- Search Filter for profileId and projectID [#27120]
- Integrated new Uri-Resolver-Manager [#27160]
- Added new operation "Share project"
- Added optional message when performing lifecycle step [#27192]
## [v2.3.0]
- Read countByPhase from configuration [#25598]
- Reduced/Optimized some LOGs [#25539]
## [v2.2.0]
- Integrated the cross-filtering configuration [#25074]
- Integrated the overlay layers configuration [#25110]
## [v2.1.0] - 2023-05-11
- Integrated the deleteFileset method [#24977]
- Integrated with the Geoportal_Resolver service [#25031]
## [v2.0.3] - 2023-02-09
#### Enhancements
- [#24569] Added the phase DRAFT in the enum (also required for #24571)
## [v2.0.2] - 2022-02-03
#### Enhancements
- [#24432] Reverted serialization from Sting to Object values returning the Document as Map
- [#24475] Propagated the Access Policy in the fileset
## [v2.0.1] - 2022-01-19
#### Bug fixes
- [#24263] Fixing JSON library v20090211
- [#24432] Fixing serialization issue using LinkedHashMap<String, String> instead of LinkedHashMap<String, Object>.
- [#24432] Added a trim for returning value to solve a space as suffix into returned date (fixing the data displayed in the Timeline Widget)
## [v2.0.0] - 2022-11-17
@ -17,6 +59,7 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm
- [#23927] Integrated with Relationship definition in UCD
- [#23834] Integrated with the create/view/delete Relationship facility
- [#24136] Integrated the temporal dimension on the front-end side
- [#24458] Published projects cannot be edited/updated
## [v1.4.0] - 2022-06-08

18
pom.xml
View File

@ -5,12 +5,12 @@
<parent>
<artifactId>maven-parent</artifactId>
<groupId>org.gcube.tools</groupId>
<version>1.1.0</version>
<version>1.2.0</version>
</parent>
<groupId>org.gcube.application</groupId>
<artifactId>geoportal-data-common</artifactId>
<version>2.0.0</version>
<version>2.4.0</version>
<description>GeoPortal Data Common is common library used by GUI components developed for GeoNA</description>
<scm>
@ -34,6 +34,7 @@
<properties>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<gcube.bom>2.4.0</gcube.bom>
</properties>
<dependencyManagement>
@ -41,7 +42,7 @@
<dependency>
<groupId>org.gcube.distribution</groupId>
<artifactId>gcube-bom</artifactId>
<version>2.0.2</version>
<version>${gcube.bom}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
@ -80,7 +81,7 @@
<dependency>
<groupId>org.gcube.portlets.user</groupId>
<artifactId>gcube-url-shortener</artifactId>
<artifactId>uri-resolver-manager</artifactId>
<version>[1.0.0,2.0.0-SNAPSHOT)</version>
<scope>compile</scope>
</dependency>
@ -88,7 +89,7 @@
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20140107</version>
<version>20090211</version>
<scope>compile</scope>
</dependency>
@ -120,6 +121,13 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>9.4.1212</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>

View File

@ -0,0 +1,71 @@
package org.gcube.application.geoportalcommon;
import org.gcube.application.geoportal.common.model.document.access.Access;
import org.gcube.application.geoportal.common.model.document.access.AccessPolicy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.jayway.jsonpath.JsonPath;
import com.jayway.jsonpath.spi.json.JsonOrgJsonProvider;
/**
* The Class ConvertToDataServiceModel.
*
* @author Francesco Mangiacrapa at ISTI-CNR francesco.mangiacrapa@isti.cnr.it
*
* Feb 3, 2023
*/
public class ConvertToDataServiceModel {
private static Logger LOG = LoggerFactory.getLogger(ConvertToDataServiceModel.class);
/**
* Gets the access from document section.
*
* @param theJSONDocument the the JSON document
* @param sectionJSONPath the section JSON path
* @return the access from document section
*/
public static Access getAccessFromDocumentSection(String theJSONDocument, String sectionJSONPath) {
String accessPolicyPath = String.format("%s.%s", sectionJSONPath, ConvertToDataValueObjectModel.POLICY);
AccessPolicy accessPolicy = null;
com.jayway.jsonpath.Configuration config = com.jayway.jsonpath.Configuration.builder()
.jsonProvider(new JsonOrgJsonProvider()).build();
// Reading policy fields
try {
JsonPath theSectionPolycJsonPath = JsonPath.compile(accessPolicyPath);
String policy = theSectionPolycJsonPath.read(theJSONDocument, config).toString();
LOG.debug("Read " + ConvertToDataValueObjectModel.POLICY + ": " + policy + ", from section: "
+ accessPolicyPath);
if (policy != null) {
accessPolicy = AccessPolicy.valueOf(policy.toUpperCase());
}
} catch (Exception e) {
LOG.info("No " + ConvertToDataValueObjectModel.POLICY + " found in the path: " + accessPolicyPath);
}
// Reading policy fields
String licenseIDPath = String.format("%s.%s", sectionJSONPath, ConvertToDataValueObjectModel.LICENSE_ID);
String licenseID = null;
try {
JsonPath theSectionLicenseJsonPath = JsonPath.compile(licenseIDPath);
licenseID = theSectionLicenseJsonPath.read(theJSONDocument, config).toString();
LOG.debug("Read " + ConvertToDataValueObjectModel.LICENSE_ID + ": " + licenseID + ", from section: "
+ licenseIDPath);
} catch (Exception e) {
LOG.info("No " + ConvertToDataValueObjectModel.LICENSE_ID + " found in the path: " + licenseIDPath);
}
Access access = new Access();
if (accessPolicy != null)
access.setPolicy(accessPolicy);
if (licenseID != null)
access.setLicense(licenseID);
LOG.info("Access is: " + access.getPolicy() + " / " + access.getLicense());
return access;
}
}

View File

@ -23,6 +23,7 @@ import org.gcube.application.geoportal.common.model.document.access.Access;
import org.gcube.application.geoportal.common.model.document.accounting.AccountingInfo;
import org.gcube.application.geoportal.common.model.document.accounting.PublicationInfo;
import org.gcube.application.geoportal.common.model.document.identification.IdentificationReference;
import org.gcube.application.geoportal.common.model.document.identification.SpatialReference;
import org.gcube.application.geoportal.common.model.document.lifecycle.LifecycleInformation;
import org.gcube.application.geoportal.common.model.document.relationships.Relationship;
import org.gcube.application.geoportal.common.model.useCaseDescriptor.HandlerDeclaration;
@ -41,7 +42,10 @@ import org.gcube.application.geoportalcommon.shared.geoportal.ResultDocumentDV;
import org.gcube.application.geoportalcommon.shared.geoportal.config.ActionDefinitionDV;
import org.gcube.application.geoportalcommon.shared.geoportal.config.FilePathDV;
import org.gcube.application.geoportalcommon.shared.geoportal.config.GcubeProfileDV;
import org.gcube.application.geoportalcommon.shared.geoportal.config.GroupedLayersDV;
import org.gcube.application.geoportalcommon.shared.geoportal.config.ItemFieldDV;
import org.gcube.application.geoportalcommon.shared.geoportal.config.layers.ConfiguredLayerDV;
import org.gcube.application.geoportalcommon.shared.geoportal.config.layers.CrossFilteringLayerDV;
import org.gcube.application.geoportalcommon.shared.geoportal.geojson.Crs;
import org.gcube.application.geoportalcommon.shared.geoportal.geojson.GeoJSON;
import org.gcube.application.geoportalcommon.shared.geoportal.materialization.IndexLayerDV;
@ -62,6 +66,7 @@ import org.gcube.application.geoportalcommon.shared.geoportal.ucd.HandlerDeclara
import org.gcube.application.geoportalcommon.shared.geoportal.ucd.RelationshipDefinitionDV;
import org.gcube.application.geoportalcommon.shared.geoportal.ucd.UseCaseDescriptorDV;
import org.gcube.application.geoportalcommon.util.DateUtils;
import org.gcube.application.geoportalcommon.util.StringUtil;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
@ -70,9 +75,11 @@ import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
import com.jayway.jsonpath.DocumentContext;
@ -88,6 +95,8 @@ import com.jayway.jsonpath.Option;
*/
public class ConvertToDataValueObjectModel {
public static final int _MAX_LENGHT_STRING_AT_INFO = 200;
private static Logger LOG = LoggerFactory.getLogger(ConvertToDataValueObjectModel.class);
private static final String NO_TIME = "T00:00";
@ -101,6 +110,10 @@ public class ConvertToDataValueObjectModel {
public static List<String> KEYSET_POSSIBLE_DATE = Arrays.asList("start", "end", "created", "updated", "inizio",
"fine", "creato", "aggiornato");
public static String LICENSE_ID = "licenseID";
public static String POLICY = "policy";
/**
* To use case descriptor DV.
*
@ -164,7 +177,9 @@ public class ConvertToDataValueObjectModel {
}
ucdVO.setHandlers(listHandlersDV);
LOG.info("returning {}", ucdVO);
LOG.info("returning {}", StringUtil.ellipsize(ucdVO.toString(), _MAX_LENGHT_STRING_AT_INFO));
if (LOG.isDebugEnabled())
LOG.debug("returning {}", ucdVO);
return ucdVO;
}
@ -292,7 +307,7 @@ public class ConvertToDataValueObjectModel {
int i = 0;
for (String asJSONString : jsonConfigurations) {
LOG.debug(++i + ") the gCubeProfile is: " + asJSONString);
LOG.debug(++i + ") the " + geoportalConfigType + " is: " + asJSONString);
GcubeProfile profile = org.gcube.application.geoportal.client.utils.Serialization
.read(asJSONString, GcubeProfile.class);
listGcubeProfiles.add(toGcubeProfileDV(profile));
@ -303,7 +318,10 @@ public class ConvertToDataValueObjectModel {
dDV.setConfiguration(listGcubeProfiles);
dDV.setConfigurationType(geoportalConfigType); // -> GEOPORTAL_CONFIGURATION_TYPE.gcube_profiles
hdDV.setConfiguration(dDV);
LOG.info("returning {}", hdDV);
LOG.info("Found config {}", GEOPORTAL_CONFIGURATION_TYPE.gcube_profiles);
LOG.info("returning {}", StringUtil.ellipsize(hdDV.toString(), _MAX_LENGHT_STRING_AT_INFO));
if (LOG.isDebugEnabled())
LOG.debug("returning {}", hdDV);
return hdDV;
}
case item_fields: {
@ -312,7 +330,7 @@ public class ConvertToDataValueObjectModel {
int i = 0;
for (String asJSONString : jsonConfigurations) {
LOG.debug(++i + ") the itemField is: " + asJSONString);
LOG.debug(++i + ") the " + geoportalConfigType + " is: " + asJSONString);
ItemField itemField = org.gcube.application.geoportal.client.utils.Serialization
.read(asJSONString, ItemField.class);
listItemFields.add(toItemFieldDV(itemField));
@ -322,7 +340,10 @@ public class ConvertToDataValueObjectModel {
dDV.setConfiguration(listItemFields);
dDV.setConfigurationType(geoportalConfigType); // -> GEOPORTAL_CONFIGURATION_TYPE.item_fields
hdDV.setConfiguration(dDV);
LOG.info("returning {}", hdDV);
LOG.info("Found config {}", GEOPORTAL_CONFIGURATION_TYPE.item_fields);
LOG.info("returning {}", StringUtil.ellipsize(hdDV.toString(), _MAX_LENGHT_STRING_AT_INFO));
if (LOG.isDebugEnabled())
LOG.debug("returning {}", hdDV);
return hdDV;
}
@ -365,11 +386,76 @@ public class ConvertToDataValueObjectModel {
dDV.setConfiguration(listActionsDef);
dDV.setConfigurationType(geoportalConfigType); // -> GEOPORTAL_CONFIGURATION_TYPE.actions_definition
hdDV.setConfiguration(dDV);
LOG.info("returning {}", hdDV);
LOG.info("Found config {}", GEOPORTAL_CONFIGURATION_TYPE.actions_definition);
LOG.info("returning {}", StringUtil.ellipsize(hdDV.toString(), _MAX_LENGHT_STRING_AT_INFO));
if (LOG.isDebugEnabled())
LOG.debug("returning {}", hdDV);
return hdDV;
}
case grouped_overlay_layers: {
List<GroupedLayersDV<ConfiguredLayerDV>> listGroupedCL = new ArrayList<GroupedLayersDV<ConfiguredLayerDV>>(
jsonConfigurations.size());
int i = 0;
ObjectMapper mapper = new ObjectMapper();
for (String asJSONString : jsonConfigurations) {
LOG.debug(++i + ") the " + GroupedLayersDV.class.getSimpleName() + " is: " + asJSONString);
TypeReference<GroupedLayersDV<ConfiguredLayerDV>> typeRef = new TypeReference<GroupedLayersDV<ConfiguredLayerDV>>() {
};
GroupedLayersDV<ConfiguredLayerDV> profile = mapper.readValue(asJSONString, typeRef);
// GroupedLayersDV<ConfiguredLayerDV> profile = org.gcube.application.geoportal.client.utils.Serialization
// .read(asJSONString, typeRef);
// listGroupedCL.add(toGcubeProfileDV(profile));
listGroupedCL.add(profile);
}
ConfigurationDV<List<GroupedLayersDV<ConfiguredLayerDV>>> dDV = new ConfigurationDV<List<GroupedLayersDV<ConfiguredLayerDV>>>(
listGroupedCL);
dDV.setConfiguration(listGroupedCL);
dDV.setConfigurationType(geoportalConfigType);
hdDV.setConfiguration(dDV);
LOG.info("Found config {}", GEOPORTAL_CONFIGURATION_TYPE.grouped_overlay_layers);
LOG.info("returning {}", StringUtil.ellipsize(hdDV.toString(), _MAX_LENGHT_STRING_AT_INFO));
if (LOG.isDebugEnabled())
LOG.debug("returning {}", hdDV);
return hdDV;
}
case grouped_cross_filtering_layers: {
List<GroupedLayersDV<CrossFilteringLayerDV>> listGroupedCL = new ArrayList<GroupedLayersDV<CrossFilteringLayerDV>>(
jsonConfigurations.size());
int i = 0;
ObjectMapper mapper = new ObjectMapper();
for (String asJSONString : jsonConfigurations) {
LOG.debug(++i + ") the " + GroupedLayersDV.class.getSimpleName() + " is: " + asJSONString);
TypeReference<GroupedLayersDV<CrossFilteringLayerDV>> typeRef = new TypeReference<GroupedLayersDV<CrossFilteringLayerDV>>() {
};
GroupedLayersDV<CrossFilteringLayerDV> profile = mapper.readValue(asJSONString, typeRef);
// GroupedLayersDV<CrossFilteringLayerDV> profile = org.gcube.application.geoportal.client.utils.Serialization
// .read(asJSONString, GroupedLayersDV.class);
// listGroupedCL.add(toGcubeProfileDV(profile));
listGroupedCL.add(profile);
}
ConfigurationDV<List<GroupedLayersDV<CrossFilteringLayerDV>>> dDV = new ConfigurationDV<List<GroupedLayersDV<CrossFilteringLayerDV>>>(
listGroupedCL);
dDV.setConfiguration(listGroupedCL);
dDV.setConfigurationType(geoportalConfigType);
hdDV.setConfiguration(dDV);
LOG.info("Found config {}", GEOPORTAL_CONFIGURATION_TYPE.grouped_cross_filtering_layers);
LOG.info("returning {}", StringUtil.ellipsize(hdDV.toString(), _MAX_LENGHT_STRING_AT_INFO));
if (LOG.isDebugEnabled())
LOG.debug("returning {}", hdDV);
return hdDV;
}
default:
LOG.warn("HANDLER NOT FOUND FOR configs: " + geoportalConfigType);
break;
}
}
@ -393,7 +479,7 @@ public class ConvertToDataValueObjectModel {
LOG.trace("toGcubeProfileDV called");
if (gCubeProfile == null) {
LOG.warn(GcubeProfile.class.getSimpleName() + " is null");
LOG.info(GcubeProfile.class.getSimpleName() + " is null");
return null;
}
@ -417,7 +503,9 @@ public class ConvertToDataValueObjectModel {
gpVO.setFilePaths(filePathsVO);
}
LOG.info("returning: " + gpVO);
LOG.info("returning {}", StringUtil.ellipsize(gpVO.toString(), _MAX_LENGHT_STRING_AT_INFO));
if (LOG.isDebugEnabled())
LOG.debug("returning {}", gpVO);
return gpVO;
}
@ -431,7 +519,7 @@ public class ConvertToDataValueObjectModel {
LOG.trace("toItemFieldDV called");
if (itemField == null) {
LOG.warn(ItemField.class.getSimpleName() + " is null");
LOG.info(ItemField.class.getSimpleName() + " is null");
return null;
}
@ -443,7 +531,7 @@ public class ConvertToDataValueObjectModel {
ifDV.setSearchable(itemField.isSearchable());
ifDV.setSortable(itemField.isSortable());
LOG.info("returning: " + ifDV);
LOG.debug("returning: " + ifDV);
return ifDV;
}
@ -470,7 +558,9 @@ public class ConvertToDataValueObjectModel {
actDef.setTitle(actionDefinition.getTitle());
actDef.setRoles(roles);
LOG.info("returning: " + actionDefinition);
LOG.info("returning {}", StringUtil.ellipsize(actionDefinition.toString(), _MAX_LENGHT_STRING_AT_INFO));
if (LOG.isDebugEnabled())
LOG.debug("returning {}", actionDefinition);
return actDef;
}
@ -484,7 +574,7 @@ public class ConvertToDataValueObjectModel {
LOG.trace("toFilePathDV called");
if (filePath == null) {
LOG.warn("List of " + FilePath.class.getSimpleName() + " is null");
LOG.info("List of " + FilePath.class.getSimpleName() + " is null for {}", filePath);
return null;
}
@ -493,7 +583,9 @@ public class ConvertToDataValueObjectModel {
fpVO.setFieldDefinition(filePath.getFieldDefinition());
fpVO.setGcubeProfileFieldName(filePath.getGcubeProfileFieldName());
LOG.info("returning: " + fpVO);
LOG.info("returning {}", StringUtil.ellipsize(fpVO.toString(), _MAX_LENGHT_STRING_AT_INFO));
if (LOG.isDebugEnabled())
LOG.debug("returning {}", fpVO);
return fpVO;
}
@ -604,18 +696,22 @@ public class ConvertToDataValueObjectModel {
return null;
GeoJSON geoJson = null;
try {
geoJson = new GeoJSON();
JSONObject jsonObject = new JSONObject(geoJSONObject).getJSONObject("geoJSON");
geoJson.setType(jsonObject.getString("type"));
geoJson.setBbox(GisUtil.fromJSONArray(jsonObject.getJSONArray("bbox")));
JSONObject jsonObject = new JSONObject(geoJSONObject).getJSONObject(SpatialReference.GEO_JSON);
geoJson.setType(jsonObject.getString(GeoJSON.TYPE));
geoJson.setBbox(GisUtil.fromJSONArray(jsonObject.getJSONArray(GeoJSON.BBOX)));
Crs crs = org.gcube.application.geoportal.client.utils.Serialization
.read(jsonObject.getJSONObject("crs").toString(), Crs.class);
.read(jsonObject.getJSONObject(GeoJSON.CRS).toString(), Crs.class);
geoJson.setCrs(crs);
geoJson.setGeoJSON(jsonObject.toString());
LOG.debug("toSpatialReference returning " + geoJson);
} catch (JSONException | IOException e) {
LOG.warn("Error on converting " + GeoJSON.class.getSimpleName() + " from : " + geoJSONObject, e);
LOG.warn("Error on converting " + GeoJSON.class.getSimpleName() + " from : " + geoJSONObject
+ ". Is it empty?");
LOG.debug("toSpatialReference error: ", e);
}
return geoJson;
@ -1000,7 +1096,11 @@ public class ConvertToDataValueObjectModel {
}
}
documentAsMap.put(key, value);
// Casting to String, in order to avoid GWT serialization issue, see
// #24537#note-9 and #24432
String valueString = value != null ? value + "" : null;
documentAsMap.put(key, valueString);
return documentAsMap;
}
@ -1242,9 +1342,10 @@ public class ConvertToDataValueObjectModel {
* @param targetProjectID the target project ID
* @param relationName the relation name
* @return the JSON object
* @throws JSONException
*/
public static JSONObject toTimelineJSONModel(Project theProject, JSONObject sourceJsonTemplate, String targetUCD,
String targetProjectID, String relationName) {
String targetProjectID, String relationName) throws JSONException {
com.jayway.jsonpath.Configuration jsonPathConfig = com.jayway.jsonpath.Configuration.defaultConfiguration()
.addOptions(Option.ALWAYS_RETURN_LIST);
@ -1257,7 +1358,10 @@ public class ConvertToDataValueObjectModel {
if (relationName != null)
targetJsonObject.put("relationship_name", relationName);
for (Object key : sourceJsonTemplate.keySet()) {
Iterator itKeys = sourceJsonTemplate.keys();
if (itKeys != null) {
while (itKeys.hasNext()) {
Object key = itKeys.next();
String jsonPath = null;
String theKey = null;
try {
@ -1268,13 +1372,15 @@ public class ConvertToDataValueObjectModel {
List<String> listValue = targetDoc.read(jsonPath);
String result = "";
for (int i = 0; i < listValue.size() - 1; i++) {
result += listValue.get(i) + ", ";
result += listValue.get(i).trim() + ", ";
}
result += listValue.get(listValue.size() - 1);
result += listValue.get(listValue.size() - 1).trim();
targetJsonObject.put(theKey, result);
} catch (Exception e) {
LOG.trace("Error on setting key: {}, path: {}", theKey, jsonPath);
}
}
}
return targetJsonObject;
@ -1287,8 +1393,10 @@ public class ConvertToDataValueObjectModel {
* @param theProject the the project
* @param timelineJSONObject the timeline JSON object
* @return the temporal reference DV
* @throws JSONException
*/
public static TemporalReferenceDV toTemporalReferenceDV(Project theProject, JSONObject timelineJSONObject) {
public static TemporalReferenceDV toTemporalReferenceDV(Project theProject, JSONObject timelineJSONObject)
throws JSONException {
TemporalReferenceDV tr = null;

View File

@ -3,11 +3,17 @@ package org.gcube.application.geoportalcommon;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.Arrays;
import java.util.Map;
import org.gcube.application.geoportalcommon.shared.GNADataEntryConfigProfile;
import org.gcube.application.geoportalcommon.shared.GNADataViewerConfigProfile;
import org.gcube.application.geoportalcommon.shared.GeoportalItemReferences;
import org.gcube.application.geoportalcommon.shared.GeoportalItemReferences.SHARE_LINK_TO;
import org.gcube.application.geoportalcommon.shared.PublicLink;
import org.gcube.portlets.user.uriresolvermanager.UriResolverManager;
import org.gcube.portlets.user.uriresolvermanager.resolvers.query.GeoportalResolverQueryStringBuilder;
import org.gcube.portlets.user.uriresolvermanager.resolvers.query.GeoportalResolverQueryStringBuilder.RESOLVE_AS;
import org.gcube.portlets.user.uriresolvermanager.resolvers.query.GeoportalResolverQueryStringBuilder.TARGET_GEOPORTAL_APP;
import org.gcube.portlets.user.urlshortener.UrlShortener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -17,39 +23,30 @@ import org.slf4j.LoggerFactory;
*
* @author Francesco Mangiacrapa at ISTI-CNR francesco.mangiacrapa@isti.cnr.it
*
* Aug 5, 2021
* May 2, 2023
*/
public class GeoportalCommon {
/** The Constant LOG. */
private static final Logger LOG = LoggerFactory.getLogger(GeoportalCommon.class);
private GNADataViewerConfigProfile geonaDataProfile;
/**
* Instantiates a new geoportal common.
*/
public GeoportalCommon() {
}
/**
* Instantiates a new geoportal common.
*
* @param geonaDataProfile the geona data profile
*/
public GeoportalCommon(GNADataViewerConfigProfile geonaDataProfile) {
this.geonaDataProfile = geonaDataProfile;
}
/**
* Gets the public links for.
*
* @param gcubeScope the gcube scope
* @param item the item
* @param createShortURL creates and returns the short URL also.
* @return the public links for
* @throws Exception the exception
*/
public GeoportalItemReferences getPublicLinksFor(GeoportalItemReferences item, boolean createShortURL) throws Exception {
public GeoportalItemReferences getPublicLinksFor(String gcubeScope, GeoportalItemReferences item,
boolean createShortURL) throws Exception {
LOG.info("getPublicLinksFor called for: " + item);
try {
@ -63,14 +60,34 @@ public class GeoportalCommon {
if (item.getProfileID() == null)
throw new Exception("Bad request, the ProfileID is null");
if (this.geonaDataProfile == null)
this.geonaDataProfile = readGNADataViewerConfig(GeoportalCommonConstants.GEOPORTAL_DATA_VIEWER_APP);
UriResolverManager resolver = new UriResolverManager("GEO");
// Restricted Link
String link = String.format("%s?%s=%s&%s=%s", geonaDataProfile.getRestrictedPortletURL(),
GeoportalCommonConstants.GET_GEONA_ITEM_ID, item.getProjectID(),
GeoportalCommonConstants.GET_GEONA_ITEM_TYPE, item.getProfileID());
// PRIVATE LINK
GeoportalResolverQueryStringBuilder builder = new GeoportalResolverQueryStringBuilder(item.getProfileID(),
item.getProjectID());
builder.scope(gcubeScope);
builder.resolverAs(RESOLVE_AS.PRIVATE);
SHARE_LINK_TO shareLinkTo = item.getShareLinkTo();
TARGET_GEOPORTAL_APP targetApp = null;
if (shareLinkTo != null) {
switch (shareLinkTo) {
case DATA_ENTRY:
targetApp = TARGET_GEOPORTAL_APP.GEO_DE;
break;
case DATA_VIEWER:
default:
targetApp = TARGET_GEOPORTAL_APP.GEO_DV;
break;
}
builder.targetApp(targetApp);
}
// builder.resolverAs(RESOLVE_AS.PRIVATE);
Map<String, String> params = builder.buildQueryParameters();
String link = resolver.getLink(params, false);
String shortUrl = link;
try {
if (createShortURL)
@ -80,13 +97,13 @@ public class GeoportalCommon {
}
item.setRestrictedLink(new PublicLink(link, shortUrl));
// Open Link
link = String.format("%s?%s=%s&%s=%s", geonaDataProfile.getOpenPortletURL(),
GeoportalCommonConstants.GET_GEONA_ITEM_ID, item.getProjectID(),
GeoportalCommonConstants.GET_GEONA_ITEM_TYPE, item.getProfileID());
// PUBLIC LINK
builder.resolverAs(RESOLVE_AS.PUBLIC);
params = builder.buildQueryParameters();
link = resolver.getLink(params, false);
shortUrl = link;
try {
if (createShortURL)
shortUrl = getShortUrl(link);
} catch (Exception e) {
LOG.warn("Error on shortening the URL: ", e);
@ -147,15 +164,6 @@ public class GeoportalCommon {
}
}
/**
* Gets the geona data profile.
*
* @return the geona data profile
*/
public GNADataViewerConfigProfile getGeonaDataProfile() {
return geonaDataProfile;
}
/**
* Read GNA data viewer config.
*

View File

@ -97,7 +97,7 @@ public class SerializerUtil {
@Override
public GeoServerPlatformInfoDV[] deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
LOG.info("Sono qui: " +jp.getValueAsString());
LOG.info("deserialize: " +jp.getValueAsString());
return jp.readValueAs(GeoServerPlatformInfoDV[].class);
//return jp.readValueAs(GeoServerPlatformInfoDV[].class);
//List<GeoServerPlatformInfoDV> listPlatform = jp.readValueAs(new TypeReference<List<GeoServerPlatformInfoDV>>() {});

View File

@ -57,7 +57,7 @@ public class GeoportalConfigUtil {
JSONArray arrayRoles = new JSONArray(data.get(0).toString());
for (int i = 0; i < arrayRoles.length(); i++) {
String role = arrayRoles.get(i).toString();
LOG.info("for STEP_ID {} read role {}", stepID, role);
LOG.debug("for STEP_ID {} read role {}", stepID, role);
listdata.add(role);
}
}catch (Exception e) {

View File

@ -21,12 +21,12 @@ import org.gcube.application.geoportal.common.model.document.Project;
import org.gcube.application.geoportal.common.model.document.relationships.RelationshipNavigationObject;
import org.gcube.application.geoportal.common.model.rest.CreateRelationshipRequest;
import org.gcube.application.geoportal.common.model.rest.DeleteRelationshipRequest;
import org.gcube.application.geoportal.common.model.rest.PerformStepRequest;
import org.gcube.application.geoportal.common.model.rest.QueryRequest;
import org.gcube.application.geoportal.common.model.rest.QueryRequest.OrderedRequest;
import org.gcube.application.geoportal.common.model.rest.QueryRequest.OrderedRequest.Direction;
import org.gcube.application.geoportal.common.model.rest.QueryRequest.PagedRequest;
import org.gcube.application.geoportal.common.model.rest.RegisterFileSetRequest;
import org.gcube.application.geoportal.common.model.rest.StepExecutionRequest;
import org.gcube.application.geoportal.common.rest.Projects;
import org.gcube.application.geoportal.common.utils.FileSets;
import org.gcube.application.geoportal.common.utils.StorageUtils;
@ -58,6 +58,7 @@ import com.mongodb.BasicDBObjectBuilder;
*/
public class ProjectsCaller {
public static final String DOCUMENT_STORE_COLLECTION = "DOCUMENT-STORE-COLLECTION";
private static Logger LOG = LoggerFactory.getLogger(GeoportalClientCaller.class);
/**
@ -170,8 +171,8 @@ public class ProjectsCaller {
* @return the relationship chain
* @throws Exception the exception
*/
public Iterator<RelationshipNavigationObject> getRelationshipChain(String profileID, String projectID, String relationID, Boolean deep)
throws Exception {
public Iterator<RelationshipNavigationObject> getRelationshipChain(String profileID, String projectID,
String relationID, Boolean deep) throws Exception {
LOG.info("getRelationshipChain called for projectID: {}, relationID: {}", projectID, projectID);
Projects<Project> client = (Projects<Project>) getClient(profileID);
return client.getRelationshipChain(projectID, relationID, deep);
@ -208,7 +209,7 @@ public class ProjectsCaller {
for (Archive archive : listArchives) {
String theType = archive.getString("_type");
if (theType.equalsIgnoreCase("DOCUMENT-STORE-COLLECTION")) {
if (theType.equalsIgnoreCase(DOCUMENT_STORE_COLLECTION)) {
String totalDocumentAre = archive.get("count").toString();
int total = Integer.parseInt(totalDocumentAre);
LOG.info("total docs for profileID: {}, are: {}", profileID, total);
@ -234,7 +235,7 @@ public class ProjectsCaller {
for (Archive archive : listArchives) {
String theType = archive.getString("_type");
if (theType.equalsIgnoreCase("DOCUMENT-STORE-COLLECTION")) {
if (theType.equalsIgnoreCase(DOCUMENT_STORE_COLLECTION)) {
com.jayway.jsonpath.Configuration configuration = com.jayway.jsonpath.Configuration.builder()
.jsonProvider(new JsonOrgJsonProvider()).build();
String toJSON = archive.toJson();
@ -255,6 +256,36 @@ public class ProjectsCaller {
return null;
}
public Integer getCountByPhaseFor(String profileID, String phase, String status) throws Exception {
LOG.info("getCountByPhaseFor called for profileID: {}, phase: {}, status: {}", profileID, phase, status);
Projects<Project> client = (Projects<Project>) getClient(profileID);
Configuration config = client.getConfiguration();
List<Archive> listArchives = config.getArchives();
Integer count = null;
for (Archive archive : listArchives) {
String theType = archive.getString("_type");
if (theType.equalsIgnoreCase(DOCUMENT_STORE_COLLECTION)) {
com.jayway.jsonpath.Configuration configuration = com.jayway.jsonpath.Configuration.builder()
.jsonProvider(new JsonOrgJsonProvider()).build();
String toJSON = archive.toJson();
JSONObject jObject = new JSONObject(toJSON);
String query = String.format("$.countByPhase[*][?(@._id.phase == '%s' && @._id.status == '%s')].count",
phase, status);
LOG.debug("Performing query: " + query);
JsonPath jsonPath = JsonPath.compile(query);
JSONArray counts = jsonPath.read(jObject, configuration);
try {
count = counts.getInt(0);
} catch (Exception e) {
LOG.warn("getCountByPhaseFor error: " + e.getLocalizedMessage());
}
}
}
LOG.info("getCountByPhaseFor returning: " + count);
return count;
}
/**
* Gets the phases into document store collection.
*
@ -270,7 +301,7 @@ public class ProjectsCaller {
for (Archive archive : listArchives) {
String theType = archive.getString("_type");
if (theType.equalsIgnoreCase("DOCUMENT-STORE-COLLECTION")) {
if (theType.equalsIgnoreCase(DOCUMENT_STORE_COLLECTION)) {
com.jayway.jsonpath.Configuration configuration = com.jayway.jsonpath.Configuration.builder()
.jsonProvider(new JsonOrgJsonProvider()).build();
String toJSON = archive.toJson();
@ -295,14 +326,15 @@ public class ProjectsCaller {
* @param profileID the profile ID
* @param projectID the project ID
* @param stepID the step ID
* @param optionalMessage the optional message
* @param options the options
* @return the project
* @throws Exception the exception
*/
public Project performStep(String profileID, String projectID, String stepID, Document options) throws Exception {
LOG.info("performStep called for profileID: {}, projectID: {}", profileID, projectID);
public Project performStep(String profileID, String projectID, String stepID, String optionalMessage, Document options) throws Exception {
LOG.info("performStep called for profileID: {}, projectID: {}. Optional message exists?: {}", profileID, projectID, optionalMessage!=null);
Projects<Project> client = (Projects<Project>) getClient(profileID);
StepExecutionRequest request = new StepExecutionRequest(stepID, options);
PerformStepRequest request = new PerformStepRequest(stepID, optionalMessage, options);
Project project = client.performStep(projectID, request);
LOG.info("performStep returning project ID: " + project.getId());
@ -391,6 +423,32 @@ public class ProjectsCaller {
client.deleteRelation(request);
}
/**
* Delete fileset.
*
* @param profileID the profile ID
* @param projectID the project ID
* @param jsonPathToFileset the json path to fileset
* @param force the force
* @param ignoreErrors the ignore errors
* @return the project
* @throws RemoteException the remote exception
*/
public Project deleteFileset(String profileID, String projectID, String jsonPathToFileset, Boolean force,
Boolean ignoreErrors) throws RemoteException {
LOG.info("deleteFileset called for profileID {} and projectID {}, fileset path: {}", profileID, projectID,
jsonPathToFileset);
Projects<Project> client = (Projects<Project>) getClient(profileID);
ignoreErrors = ignoreErrors == null ? false : ignoreErrors;
Project project = client.deleteFileSet(projectID, jsonPathToFileset, force, ignoreErrors);
LOG.info("fileset {} deleted", jsonPathToFileset);
LOG.debug("returning new project: {} ", project.getTheDocument());
return project;
}
/**
* Simple query.
*

View File

@ -56,7 +56,7 @@ public class GeportalCheckAccessPolicy {
}
/**
* Checks if is accessible accoding to access policies.
* Checks if is accessible according to access policies.
*
* @param policy the policy
* @param myLogin the my login
@ -91,4 +91,21 @@ public class GeportalCheckAccessPolicy {
return true;
}
/**
* Access policy from session login.
*
* @param mySessionLogin the my session login
* @return the access policy
*/
public static ACCESS_POLICY accessPolicyFromSessionLogin(String mySessionLogin) {
if (mySessionLogin == null || mySessionLogin.isEmpty()) {
// here is not open and the user is not authenticated
return ACCESS_POLICY.OPEN;
}
return ACCESS_POLICY.RESTICTED;
}
}

View File

@ -16,6 +16,17 @@ public class GeoportalItemReferences implements Serializable {
*/
private static final long serialVersionUID = -7021431511279022193L;
/**
* The Enum SHARE_LINK_TO.
*
* @author Francesco Mangiacrapa at ISTI-CNR francesco.mangiacrapa@isti.cnr.it
*
* Apr 5, 2024
*/
public static enum SHARE_LINK_TO {
DATA_VIEWER, DATA_ENTRY
}
// this is the mongoID
private String projectID;
private String profileID; // this is the profileID - UCD
@ -26,6 +37,8 @@ public class GeoportalItemReferences implements Serializable {
private String layerObjectType;
private SHARE_LINK_TO shareLinkTo;
/**
* Instantiates a new geo na object.
*/
@ -34,17 +47,33 @@ public class GeoportalItemReferences implements Serializable {
}
/**
* Instantiates a new geo na item ref.
* Instantiates a new geoportal item references.
* Backward compatibility. Use {{@link #GeoportalItemReferences(String, String, SHARE_LINK_TO)}}
*
* @param projectID the project ID
* @param profileID the profile ID
*/
@Deprecated
public GeoportalItemReferences(String projectID, String profileID) {
super();
this.projectID = projectID;
this.profileID = profileID;
}
/**
* Instantiates a new geo na item ref.
*
* @param projectID the project ID
* @param profileID the profile ID
* @param shareLinkTo the share link to
*/
public GeoportalItemReferences(String projectID, String profileID, SHARE_LINK_TO shareLinkTo) {
super();
this.projectID = projectID;
this.profileID = profileID;
this.shareLinkTo = shareLinkTo;
}
/**
* Instantiates a new geo na item ref.
*
@ -95,6 +124,15 @@ public class GeoportalItemReferences implements Serializable {
return itemName;
}
/**
* Gets the share link to.
*
* @return the share link to
*/
public SHARE_LINK_TO getShareLinkTo() {
return shareLinkTo;
}
/**
* Sets the item name.
*
@ -160,8 +198,9 @@ public class GeoportalItemReferences implements Serializable {
builder.append(openLink);
builder.append(", layerObjectType=");
builder.append(layerObjectType);
builder.append(", shareLinkTo=");
builder.append(shareLinkTo);
builder.append("]");
return builder.toString();
}
}

View File

@ -90,6 +90,10 @@ public class SearchingFilter implements Serializable {
private LinkedHashMap<String, Object> projection;
private String profileID;
private String projectID;
/**
* Instantiates a new sort filter.
*/
@ -180,6 +184,19 @@ public class SearchingFilter implements Serializable {
this.orderByFields = orderByFields;
}
public void setGetForIDs(String profileID, String projectID) {
this.profileID = profileID;
this.projectID = projectID;
}
public String getProfileID() {
return profileID;
}
public String getProjectID() {
return projectID;
}
/**
* Sets the order.
*
@ -200,8 +217,11 @@ public class SearchingFilter implements Serializable {
builder.append(conditions);
builder.append(", projection=");
builder.append(projection);
builder.append(", profileID=");
builder.append(profileID);
builder.append(", projectID=");
builder.append(projectID);
builder.append("]");
return builder.toString();
}
}

View File

@ -10,16 +10,19 @@ package org.gcube.application.geoportalcommon.shared.config;
*/
public enum OPERATION_ON_ITEM {
CREATE_NEW_PROJECT("Create New Project"),
VIEW_ON_MAP("View on Map"),
SHOW_METADATA("Show Metadata"),
VIEW_REPORT("View the Report"),
EDIT_PROJECT("Edit the Project"),
CLONE_PROJECT("Clone the Project"),
PUBLISH_UNPUBLISH_PROJECT("Publish/UnPublish the Project"),
DELETE_PROJECT("Delete the Project"),
CREATE_RELATION("Create Relation between two Projects"),
VIEW_RELATIONSHIPS("View the relationship/s created for the Project");
CREATE_NEW_PROJECT("new", "Create New Project"),
VIEW_PROJECT_AS_DOCUMENT("vpd", "View Project as Document"),
VIEW_PROJECT_AS_JSON("vpj", "View Projet as JSON"),
VIEW_ON_MAP("vpm","View on Map"),
GET_SHAREABLE_LINK("shl","Get Shareable Link"),
VIEW_REPORT("vpr","View the Report"),
EDIT_PROJECT("edt","Edit the Project"),
CLONE_PROJECT("cln","Clone the Project"),
PUBLISH_UNPUBLISH_PROJECT("pup","Publish/UnPublish the Project"),
DELETE_PROJECT("dlt","Delete the Project"),
CREATE_RELATION("crr","Create Relation between two Projects"),
DELETE_RELATION("dlr","Delete Relation between two Projects"),
VIEW_RELATIONSHIPS("vpr", "View the relationship/s created for the Project");
String label;
@ -28,7 +31,7 @@ public enum OPERATION_ON_ITEM {
*
* @param label the label
*/
OPERATION_ON_ITEM(String label){
OPERATION_ON_ITEM(String id, String label){
this.label = label;
}

View File

@ -19,6 +19,8 @@ public class ResultDocumentDV extends DocumentDV implements Serializable {
private GeoJSON spatialReference;
private WORKFLOW_PHASE worflowPhase; //never used. Just for serialization in GWT
/**
*
*/

View File

@ -0,0 +1,35 @@
package org.gcube.application.geoportalcommon.shared.geoportal;
/**
* The Enum WORKFLOW_PHASE.
*
* @author Francesco Mangiacrapa at ISTI-CNR francesco.mangiacrapa@isti.cnr.it
*
* Jan 19, 2023
*/
public enum WORKFLOW_PHASE {
PUBLISHED("Published"), //Should be always the last one
DRAFT("Draft");
private String label;
/**
* Instantiates a new workflow phase.
*
* @param label the label
*/
WORKFLOW_PHASE(String label) {
this.label = label;
}
/**
* Gets the label.
*
* @return the label
*/
public String getLabel() {
return label;
}
}

View File

@ -0,0 +1,82 @@
package org.gcube.application.geoportalcommon.shared.geoportal.config;
import java.io.Serializable;
import java.util.List;
import org.gcube.application.geoportalcommon.shared.geoportal.config.layers.LayerIDV;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* The Class GroupedLayersDV.
*
* @author Francesco Mangiacrapa at ISTI-CNR francesco.mangiacrapa@isti.cnr.it
*
* May 12, 2023
*/
public class GroupedLayersDV<T extends LayerIDV> implements Serializable, GeoportalConfigurationID {
/**
*
*/
private static final long serialVersionUID = -2021774489849084231L;
private String name;
private String description;
@JsonProperty(value = "layers")
private List<T> listCustomLayers;
@Override
public String getID() {
// TODO Auto-generated method stub
return null;
}
@Override
public void setID(String configID) {
// TODO Auto-generated method stub
}
public GroupedLayersDV() {
}
public String getName() {
return name;
}
public String getDescription() {
return description;
}
public List<T> getListCustomLayers() {
return listCustomLayers;
}
public void setName(String name) {
this.name = name;
}
public void setDescription(String description) {
this.description = description;
}
public void setListCustomLayers(List<T> listCustomLayers) {
this.listCustomLayers = listCustomLayers;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("GroupedLayersDV [name=");
builder.append(name);
builder.append(", description=");
builder.append(description);
builder.append(", listCustomLayers=");
builder.append(listCustomLayers);
builder.append("]");
return builder.toString();
}
}

View File

@ -0,0 +1,98 @@
package org.gcube.application.geoportalcommon.shared.geoportal.config.layers;
import java.io.Serializable;
/**
* The Class ConfiguredLayerDV.
*
* @author Francesco Mangiacrapa at ISTI-CNR francesco.mangiacrapa@isti.cnr.it
*
* May 12, 2023
*/
public class ConfiguredLayerDV implements LayerIDV, Serializable{
/**
*
*/
private static final long serialVersionUID = 6910607957385140987L;
private String title;
private String description;
private String name;
private String wms_url;
private String wfs_url;
boolean display = false;
public ConfiguredLayerDV() {
}
public String getTitle() {
return title;
}
public String getDescription() {
return description;
}
public String getName() {
return name;
}
public String getWMS_URL() {
return wms_url;
}
public String getWFS_URL() {
return wfs_url;
}
public boolean isDisplay() {
return display;
}
public void setTitle(String title) {
this.title = title;
}
public void setDescription(String description) {
this.description = description;
}
public void setName(String name) {
this.name = name;
}
public void setWms_url(String wms_url) {
this.wms_url = wms_url;
}
public void setWfs_url(String wfs_url) {
this.wfs_url = wfs_url;
}
public void setDisplay(boolean display) {
this.display = display;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("ConfiguredLayerDV [title=");
builder.append(title);
builder.append(", description=");
builder.append(description);
builder.append(", name=");
builder.append(name);
builder.append(", wms_url=");
builder.append(wms_url);
builder.append(", wfs_url=");
builder.append(wfs_url);
builder.append(", display=");
builder.append(display);
builder.append("]");
return builder.toString();
}
}

View File

@ -0,0 +1,78 @@
package org.gcube.application.geoportalcommon.shared.geoportal.config.layers;
import java.util.List;
public class CrossFilteringLayerDV extends ConfiguredLayerDV {
/**
*
*/
private static final long serialVersionUID = 1130075528037312939L;
String table_show_field;
String table_key_field;
String table_parent_key_field;
String table_geometry_name;
List<CrossFilteringLayerDV> related_to;
public CrossFilteringLayerDV() {
}
public String getTable_show_field() {
return table_show_field;
}
public String getTable_key_field() {
return table_key_field;
}
public String getTable_parent_key_field() {
return table_parent_key_field;
}
public String getTable_geometry_name() {
return table_geometry_name;
}
public List<CrossFilteringLayerDV> getRelated_to() {
return related_to;
}
public void setTable_show_field(String table_show_field) {
this.table_show_field = table_show_field;
}
public void setTable_key_field(String table_key_field) {
this.table_key_field = table_key_field;
}
public void setTable_parent_key_field(String table_parent_key_field) {
this.table_parent_key_field = table_parent_key_field;
}
public void setTable_geometry_name(String table_geometry_name) {
this.table_geometry_name = table_geometry_name;
}
public void setRelated_to(List<CrossFilteringLayerDV> related_to) {
this.related_to = related_to;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("CrossFilteringLayerDV [table_show_field=");
builder.append(table_show_field);
builder.append(", table_key_field=");
builder.append(table_key_field);
builder.append(", table_parent_key_field=");
builder.append(table_parent_key_field);
builder.append(", table_geometry_name=");
builder.append(table_geometry_name);
builder.append(", related_to=");
builder.append(related_to);
builder.append("]");
return builder.toString();
}
}

View File

@ -0,0 +1,10 @@
package org.gcube.application.geoportalcommon.shared.geoportal.config.layers;
public interface LayerIDV {
String getName();
String getTitle();
String getWMS_URL();
String getWFS_URL();
}

View File

@ -6,6 +6,10 @@ import org.gcube.application.geoportalcommon.shared.geoportal.materialization.in
public class GeoJSON implements Serializable {
public static final String TYPE = "type";
public static final String BBOX = "bbox";
public static final String CRS = "crs";
public static final String COORDINATES = "coordinates";
/**
*
*/

View File

@ -6,24 +6,32 @@ import java.util.List;
public class FilesetDV implements Serializable {
/**
*
*/
private static final long serialVersionUID = -3108729581669778828L;
private String name;
private List<PayloadDV> listPayloads;
private String gcubeProfileFieldName; // It is the gcubeProfileFieldName in the UCD filePaths
private String filesetFieldName; // It is the fieldName in the UCD filePaths
private List<PayloadDV> listPayloads = new ArrayList<PayloadDV>();
public FilesetDV() {
}
public String getName() {
return name;
public String getFilesetFieldName() {
return filesetFieldName;
}
public void setName(String name) {
this.name = name;
public void setFilesetFieldName(String filesetFieldName) {
this.filesetFieldName = filesetFieldName;
}
public String getGcubeProfileFieldName() {
return gcubeProfileFieldName;
}
public void setGcubeProfileFieldName(String name) {
this.gcubeProfileFieldName = name;
}
public List<PayloadDV> getListPayload() {
@ -31,28 +39,27 @@ public class FilesetDV implements Serializable {
}
public void addPayloadDV(PayloadDV payloadDV) {
if (listPayloads == null)
listPayloads = new ArrayList<PayloadDV>();
listPayloads.add(payloadDV);
}
public void addListPayloadsDV(List<PayloadDV> listPayloadsDV) {
if (listPayloads == null)
listPayloads = new ArrayList<PayloadDV>();
listPayloads.addAll(listPayloadsDV);
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("FilesetDV [name=");
builder.append(name);
builder.append("FilesetDV [gcubeProfileFieldName=");
builder.append(gcubeProfileFieldName);
builder.append(", filesetFieldName=");
builder.append(filesetFieldName);
builder.append(", listPayloads=");
builder.append(listPayloads);
builder.append("]");
return builder.toString();
}
}

View File

@ -0,0 +1,119 @@
package org.gcube.application.geoportalcommon.shared.geoportal.step;
import java.io.Serializable;
import org.gcube.application.geoportalcommon.shared.geoportal.project.LifecycleInformationDV;
/**
* The Class StepPerformedResultDV.
*
* @author Francesco Mangiacrapa at ISTI-CNR francesco.mangiacrapa@isti.cnr.it
*
* Apr 11, 2024
*/
public class StepPerformedResultDV implements Serializable {
/**
*
*/
private static final long serialVersionUID = 7065890375433142728L;
private String projectId;
private String profileID;
private LifecycleInformationDV lifecycleInfoDV;
/**
* Instantiates a new step performed result DV.
*/
public StepPerformedResultDV() {
}
/**
* Instantiates a new step performed result DV.
*
* @param profileID the profile ID
* @param projectId the project id
* @param lifecycleInfoDV the lifecycle info DV
*/
public StepPerformedResultDV(String profileID, String projectId, LifecycleInformationDV lifecycleInfoDV) {
super();
this.projectId = projectId;
this.profileID = profileID;
this.lifecycleInfoDV = lifecycleInfoDV;
}
/**
* Gets the project id.
*
* @return the project id
*/
public String getProjectId() {
return projectId;
}
/**
* Gets the profile ID.
*
* @return the profile ID
*/
public String getProfileID() {
return profileID;
}
/**
* Gets the lifecycle info DV.
*
* @return the lifecycle info DV
*/
public LifecycleInformationDV getLifecycleInfoDV() {
return lifecycleInfoDV;
}
/**
* Sets the project id.
*
* @param projectId the new project id
*/
public void setProjectId(String projectId) {
this.projectId = projectId;
}
/**
* Sets the profile ID.
*
* @param profileID the new profile ID
*/
public void setProfileID(String profileID) {
this.profileID = profileID;
}
/**
* Sets the lifecycle info DV.
*
* @param lifecycleInfoDV the new lifecycle info DV
*/
public void setLifecycleInfoDV(LifecycleInformationDV lifecycleInfoDV) {
this.lifecycleInfoDV = lifecycleInfoDV;
}
/**
* To string.
*
* @return the string
*/
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("StepPerformedResultDV [projectId=");
builder.append(projectId);
builder.append(", profileID=");
builder.append(profileID);
builder.append(", lifecycleInfoDV=");
builder.append(lifecycleInfoDV);
builder.append("]");
return builder.toString();
}
}

View File

@ -1,7 +1,11 @@
package org.gcube.application.geoportalcommon.shared.geoportal.ucd;
public enum GEOPORTAL_CONFIGURATION_TYPE {
item_fields("itemFields"), gcube_profiles("gcubeProfiles"), actions_definition("actionsDefinition");
item_fields("itemFields"),
gcube_profiles("gcubeProfiles"),
actions_definition("actionsDefinition"),
grouped_overlay_layers("grouped_overlay_layers"),
grouped_cross_filtering_layers("grouped_cross_filtering_layers");
String id;

View File

@ -7,6 +7,8 @@ public enum GEOPORTAL_DATA_HANDLER {
geoportal_data_entry("org.gcube.portlets.user.geoportal-data-entry-app", "DATA_ENTRY_GUI"),
geoportal_workflow_action_list("org.gcube.portlets.user.geoportal-workflow-action-list","WORKFLOW_ACTION_LIST_GUI"),
geoportal_timeline_json_template("org.gcube.portlets.user.geoportal-timeline-json-template","TIMELINE_JSON_TEMPLATE"),
geoportal_grouped_cross_filtering("org.gcube.portlets.user.grouped_cross_filtering_layers","GROUPED_CROSS_FILTERING_LAYERS"),
geoportal_grouped_overlay_layers("org.gcube.portlets.user.grouped_overlay_layers","GROUPED_OVERLAY_LAYERS"),
gna_concessioni_lc("GNA-CONCESSIONI-LC", "LifecycleManagement");
String id;

View File

@ -2,6 +2,6 @@ package org.gcube.application.geoportalcommon.shared.geoportal.view;
public interface CheckEmpty {
public boolean isEmpty();
public Boolean isEmpty();
}

View File

@ -35,7 +35,7 @@ public class SectionView implements Serializable, CheckEmpty {
* @return true, if is empty
*/
@Override
public boolean isEmpty() {
public Boolean isEmpty() {
if (listSubDocuments == null)
return true;
@ -53,7 +53,7 @@ public class SectionView implements Serializable, CheckEmpty {
*
* @return true, if successful
*/
public boolean hasSpatialLayers() {
public Boolean hasSpatialLayers() {
if (listSubDocuments == null)
return false;

View File

@ -24,7 +24,7 @@ public class SubDocumentView implements Serializable, CheckEmpty {
}
@Override
public boolean isEmpty() {
public Boolean isEmpty() {
if (metadataAsJSON != null && !metadataAsJSON.isEmpty())
return false;

View File

@ -1,5 +1,6 @@
package org.gcube.application.geoportalcommon.util;
import java.text.SimpleDateFormat;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
@ -82,6 +83,12 @@ public class DateUtils {
// TODO: handle exception
}
try {
return new SimpleDateFormat("yyy-MM-dd").parse(date);
} catch (Exception e) {
// TODO: handle exception
}
return null;
}
}

View File

@ -0,0 +1,27 @@
package org.gcube.application.geoportalcommon.util;
/**
* The Class StringUtil.
*
* @author Francesco Mangiacrapa at ISTI-CNR francesco.mangiacrapa@isti.cnr.it
*
* Aug 31, 2023
*/
public class StringUtil {
/**
* Ellipsize.
*
* @param input the input
* @param maxLength the max length
* @return the string
*/
public static String ellipsize(String input, int maxLength) {
String ellip = "...";
if (input == null || input.length() <= maxLength || input.length() < ellip.length()) {
return input;
}
return input.substring(0, maxLength - ellip.length()).concat(ellip);
}
}

1
src/test/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/resources/

View File

@ -0,0 +1,43 @@
package org.gcube.application;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
public class ContextConfigTest {
private static final String GCUBE_CONFIG_PROPERTIES_FILENAME = "gcube_config.properties";
// APP Working Directory + /src/test/resources must be the location of
// gcube_config.properties
private static String gcube_config_path = String.format("%s/%s",
System.getProperty("user.dir") + "/src/test/resources", GCUBE_CONFIG_PROPERTIES_FILENAME);
public static String CONTEXT;
public static String TOKEN;
/**
* Read context settings.
*/
public static void readContextSettings() {
try (InputStream input = new FileInputStream(gcube_config_path)) {
Properties prop = new Properties();
// load a properties file
prop.load(input);
CONTEXT = prop.getProperty("CONTEXT");
TOKEN = prop.getProperty("TOKEN");
// get the property value and print it out
System.out.println("CONTEXT: " + CONTEXT);
System.out.println("TOKEN: " + TOKEN);
} catch (IOException ex) {
ex.printStackTrace();
}
}
}

View File

@ -0,0 +1,428 @@
package org.gcube.application;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.gcube.application.geoportal.common.model.document.Project;
import org.gcube.application.geoportalcommon.ConvertToDataValueObjectModel;
import org.gcube.application.geoportalcommon.ProjectDVBuilder;
import org.gcube.application.geoportalcommon.geoportal.GeoportalClientCaller;
import org.gcube.application.geoportalcommon.geoportal.ProjectsCaller;
import org.gcube.application.geoportalcommon.shared.SearchingFilter;
import org.gcube.application.geoportalcommon.shared.SearchingFilter.LOGICAL_OP;
import org.gcube.application.geoportalcommon.shared.WhereClause;
import org.gcube.application.geoportalcommon.shared.geoportal.ResultDocumentDV;
import org.gcube.application.geoportalcommon.shared.geoportal.project.ProjectDV;
import org.gcube.application.se.RuntimeResourceReader;
import org.gcube.application.se.ServiceEndpointBean;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.portlets.user.uriresolvermanager.UriResolverManager;
import org.gcube.portlets.user.uriresolvermanager.resolvers.query.GeoportalResolverQueryStringBuilder;
import org.gcube.portlets.user.uriresolvermanager.resolvers.query.GeoportalResolverQueryStringBuilder.RESOLVE_AS;
public class CreateGeoportalGisLinkInTheCentroidLayers {
// private static String CONTEXT = "/gcube/devsec/devVRE";
// private static String TOKEN = ""; // devVRE
// private static final String CONTEXT = "/pred4s/preprod/preVRE";
// private static final String TOKEN = ""; //preVRE
// private static final String CONTEXT = "/d4science.research-infrastructures.eu/D4OS/GNA";
// private static final String TOKEN = ""; //GNA
//Esquiline
private static final String CONTEXT = "/d4science.research-infrastructures.eu/gCubeApps/Esquiline";
private static final String TOKEN = ""; //Esquiline
private static String PROFILE_ID = "esquilino";
// GEOPORTAL
//private static String PROFILE_ID = "profiledConcessioni";
// private static final String PROFILE_ID = "concessioni-estere";
private static final String JSON_KEY_DATA_FINE_PROGETTO = "dataFineProgetto";
private static final String JSON_KEY_DATA_INIZIO_PROGETTO = "dataInizioProgetto";
private static final String JSON_KEY_INTRODUZIONE = "introduzione";
// DB
private static final String platformName = "postgis";
private static final String category = "Database";
// private static final String resourceName = "GNA-POSTGIS-DB"; // devVRE
//private static final String resourceName = "Geoserver-t postgis"; //preVRE
private static final String resourceName = "GNA-postgis"; // GNA
private static final String TABLE_COLUMN_GEO_VIEWER_LINK = "geov_link";
private static ProjectsCaller client = null;
public static final boolean READ_ONLY_MODE = true;
/**
* Gets the client.
*
* @return the client
*/
// @Before
public static void initGeoportalClient() {
// assumeTrue(GCubeTest.isTestInfrastructureEnabled());
ScopeProvider.instance.set(CONTEXT);
SecurityTokenProvider.instance.set(TOKEN);
client = GeoportalClientCaller.projects();
}
public static void main(String[] args) {
System.out.println("READ_ONLY_MODE ENABLED: " + READ_ONLY_MODE);
System.out.println("CONTEXT IS: " + CONTEXT);
System.out.println("PROFILE_ID: " + PROFILE_ID);
initGeoportalClient();
//devVRE
// PROFILE_ID = "profiledConcessioni";
// String tableName = "profiledconcessioni_devvre_centroids";
// tableName = "profiledconcessioni_internal__devvre_centroids";
// PROFILE_ID = "concessioni-estere";
// String tableName = "concessioni_estere_devvre_centroids";
// tableName = "concessioni_estere_internal__devvre_centroids";
// preVRE
// PROFILE_ID = "profiledConcessioni";
//String tableName = "profiledconcessioni_internal__prevre_centroids";
// String tableName = "profiledconcessioni_prevre_centroids";
// PROFILE_ID = "concessioni-estere";
// String tableName = "concessioni_estere_internal__prevre_centroids";
// tableName = "concessioni_estere_prevre_centroids";
// GNA
// PROFILE_ID = "concessioni-estere";
// String tableName = "concessioni_estere_internal__gna_centroids";
// String tableName = "concessioni_estere_gna_centroids";
// PROFILE_ID = "profiledConcessioni";
// String tableName = "profiledconcessioni_internal__gna_centroids";
// String tableName = "profiledconcessioni_gna_centroids";
// Esquiline
PROFILE_ID = "esquilino";
String tableName = "esquilino_esquiline_centroids";
long startTime = System.currentTimeMillis();
try {
RuntimeResourceReader rrr = new RuntimeResourceReader(CONTEXT, resourceName, platformName, category, null);
ServiceEndpointBean se = rrr.getListSE().get(0);
AccessPoint ap = se.getListAP().get(0);
System.out.println(ap.address());
System.out.println(ap.username());
String pwdDB = RuntimeResourceReader.dectryptPassword(CONTEXT, ap);
System.out.println(pwdDB);
Connection dbConnection = getDatabaseConnection(ap.address(), ap.username(), pwdDB);
// MUST BE DONE JUST ONCE
try {
boolean columnExists = checkColumnExists(dbConnection, tableName, TABLE_COLUMN_GEO_VIEWER_LINK);
if(!columnExists) {
addColumnTable(dbConnection, tableName, TABLE_COLUMN_GEO_VIEWER_LINK, "TEXT");
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return;
}
List<String> listProjectIdsIntoDB = readTableIDs(dbConnection, tableName, "projectid");
ScopeProvider.instance.set(CONTEXT);
SecurityTokenProvider.instance.set(TOKEN);
// List<ResultDocumentDV> listPublishedProjectsIntoService =
// getListProjectsDVFiltered();
int counter = 1;
UriResolverManager uriResolverManager = new UriResolverManager("GEO");
for (String projectId : listProjectIdsIntoDB) {
Project project = null;
System.out.println("\n\n################ " + counter + " of " + listProjectIdsIntoDB.size()
+ ") Reading the project id: " + projectId);
try {
project = client.getProjectByID(PROFILE_ID, projectId);
} catch (Exception e) {
System.err.println("Project id not found: " + projectId);
}
try {
if (project == null)
continue;
System.out.println("\n###### Trying to update " + TABLE_COLUMN_GEO_VIEWER_LINK
+ " column for project id: " + projectId);
ProjectDVBuilder projectBuilder = ProjectDVBuilder.newBuilder().fullDocumentMap(false);
ProjectDV projectDV = ConvertToDataValueObjectModel.toProjectDV(project, projectBuilder);
//Geoportal Resolver QueryString Builder
GeoportalResolverQueryStringBuilder builder = new GeoportalResolverQueryStringBuilder(projectDV.getProfileID(),projectId);
builder.scope(CONTEXT);
if(tableName.contains("internal")) {
builder.resolverAs(RESOLVE_AS.PRIVATE);
}
Map<String, String> params = builder.buildQueryParameters();
String shortLink = uriResolverManager.getLink(params, true);
LinkedHashMap<String, String> hashMap = new LinkedHashMap<String, String>();
hashMap.put(TABLE_COLUMN_GEO_VIEWER_LINK, shortLink);
updateTable(dbConnection, tableName, "projectId='" + projectId + "'", hashMap);
System.out.println("###UPDATED project id: " + projectId);
Thread.sleep(500);
System.out.println("################ Update completed for: " + projectId);
} catch (Exception e) {
System.err.println("Error on updating the table for the project id: " + projectId);
}
counter++;
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
long endTime = System.currentTimeMillis();
System.out.println("\n\nSCRIPT END at: " + endTime);
double toSec = (endTime - startTime) / 1000;
System.out.println("SCRIPT TERMINATED in " + toSec + "sec");
}
public static List<ResultDocumentDV> getListProjectsDVFiltered() throws Exception {
// List<Project> listOfProjects = client.getListForProfileID(PROFILE_ID);
SearchingFilter filter = new SearchingFilter();
// Where Clause
List<WhereClause> conditions = new ArrayList<WhereClause>();
Map<String, Object> searchInto = new HashMap<String, Object>();
searchInto.put("_lifecycleInformation._phase", "Published");
WhereClause whereClause = new WhereClause(LOGICAL_OP.OR, searchInto);
conditions.add(whereClause);
filter.setConditions(conditions);
LinkedHashMap<String, Object> projection = new LinkedHashMap<String, Object>();
// default
// PROJECTION
String theDoc = Project.THE_DOCUMENT + ".";
projection.put(Project.ID, 1);
projection.put(theDoc + "nome", 1);
projection.put(theDoc + JSON_KEY_DATA_INIZIO_PROGETTO, 1);
projection.put(theDoc + JSON_KEY_DATA_FINE_PROGETTO, 1);
filter.setProjection(projection);
Integer totalDocs = client.getTotalDocument(PROFILE_ID);
Iterator<Project> projects = client.queryOnMongo(PROFILE_ID, totalDocs, 0, null, filter);
List<ResultDocumentDV> results = ConvertToDataValueObjectModel.toListResultDocument(projects);
int i = 0;
for (ResultDocumentDV projectDV : results) {
System.out.println(++i + ") " + projectDV.getId() + " " + JSON_KEY_DATA_INIZIO_PROGETTO + ": "
+ projectDV.getDocumentAsMap().get(JSON_KEY_DATA_INIZIO_PROGETTO) + " "
+ JSON_KEY_DATA_FINE_PROGETTO + ": " + projectDV.getDocumentAsMap().get(JSON_KEY_DATA_FINE_PROGETTO)
+ " " + JSON_KEY_INTRODUZIONE + ": " + projectDV.getDocumentAsMap().get(JSON_KEY_INTRODUZIONE));
}
return results;
}
/**
* Gets the database connection.
*
* @param dbURL the db url
* @param user the user
* @param pwd the pwd
* @return the database connection
*/
public static Connection getDatabaseConnection(String dbURL, String user, String pwd) {
System.out.println("dbURL: " + dbURL);
Connection c = null;
try {
Class.forName("org.postgresql.Driver");
c = DriverManager.getConnection(dbURL, user, pwd);
c.setAutoCommit(false);
} catch (Exception e) {
e.printStackTrace();
System.err.println(e.getClass().getName() + ": " + e.getMessage());
System.exit(0);
}
System.out.println("Opened database successfully");
return c;
}
public static void addColumnTable(Connection con, String tableName, String newColumn, String type)
throws SQLException {
String alterTableString = String.format("ALTER TABLE %s ADD COLUMN %s %s", tableName, newColumn, type);
System.out.println("Executing: " + alterTableString);
try {
con.setAutoCommit(false);
if (!READ_ONLY_MODE) {
con.createStatement().execute(alterTableString);
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
System.out.println("Executed: " + alterTableString);
}
public static Boolean checkColumnExists(Connection con, String tableName, String columnName) throws SQLException {
String columnExistsLabel = "COLUMN_EXISTS";
String sql = String.format(
"SELECT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name='%s' AND column_name='%s') as %s",
tableName, columnName, columnExistsLabel);
System.out.println("Executing: " + sql);
PreparedStatement p;
ResultSet resultSet;
try {
p = con.prepareStatement(sql);
resultSet = p.executeQuery();
// Expected 1 row
resultSet.next();
Boolean columnExists = resultSet.getBoolean(columnExistsLabel);
System.out.println("\n####Column exists: " + columnExists + "\n");
return columnExists;
} catch (SQLException e) {
System.err.println(e);
return false;
}
}
public static void updateTable(Connection con, String tableName, String whereCondition,
LinkedHashMap<String, String> mapColumnValue) throws SQLException {
StringBuilder updateSQL = new StringBuilder();
updateSQL.append("UPDATE " + tableName + " SET");
int i = 1;
for (String column : mapColumnValue.keySet()) {
updateSQL.append(" " + column + "=?");
if (i < mapColumnValue.size())
updateSQL.append(", ");
i++;
}
updateSQL.append(" WHERE " + whereCondition);
String update = updateSQL.toString();
// System.out.println("Executing: " + update);
try (PreparedStatement updatePS = con.prepareStatement(update);) {
int index = 1;
for (String column : mapColumnValue.keySet()) {
updatePS.setString(index, mapColumnValue.get(column));
index++;
}
con.setAutoCommit(false);
System.out.println("Executing: " + updatePS.toString());
if (!READ_ONLY_MODE) {
updatePS.executeUpdate();
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
}
// Step1: Main driver method
public static List<String> readTableIDs(Connection con, String tableName, String columnNameProjectId) {
PreparedStatement p = null;
ResultSet resultSet = null;
// Try block to catch exception/s
List<String> listProjectIds = new ArrayList<String>();
try {
// SQL command data stored in String datatype
String sql = String.format("SELECT * FROM %s", tableName);
p = con.prepareStatement(sql);
resultSet = p.executeQuery();
ResultSetMetaData rsmd = resultSet.getMetaData();
int columnsNumber = rsmd.getColumnCount();
int row = 1;
System.out.println("\n####TABLE: " + tableName + " content..\n");
while (resultSet.next()) {
System.out.print("" + row + "] ");
for (int i = 1; i <= columnsNumber; i++) {
String columnValue = resultSet.getString(i);
System.out.print("(" + rsmd.getColumnName(i) + ") " + columnValue + " | ");
}
row++;
System.out.println("\n");
String projectId = resultSet.getString(columnNameProjectId);
listProjectIds.add(projectId);
}
System.out.println("####TABLE: " + tableName + " end content\n");
}
// Catch block to handle exception
catch (SQLException e) {
// Print exception pop-up on screen
System.err.println(e);
}
System.out.println("returning list IDs: " + listProjectIds);
return listProjectIds;
}
}

View File

@ -0,0 +1,538 @@
package org.gcube.application;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.gcube.application.geoportalcommon.geoportal.GeoportalClientCaller;
import org.gcube.application.geoportalcommon.geoportal.ProjectsCaller;
import org.gcube.application.se.RuntimeResourceReader;
import org.gcube.application.se.ServiceEndpointBean;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.scope.api.ScopeProvider;
public class GNA_New_DataModel_IndexTable_25925 {
private static String PROFILE_ID = "profiledConcessioni";
private static final String platformName = "postgis";
private static final String category = "Database";
// These are defined via ContextConfigTest.readContextSettings();
private static String CONTEXT = "";
private static String TOKEN = "";
// #### DEV
// DB
// private static final String resourceName = "GNA-POSTGIS-DB"; // devVRE
// #### PRE
// DB
private static final String resourceName = "Geoserver-t postgis"; // preVRE
// #### PROD
// DB
// private static final String resourceName = "GNA-postgis"; // GNA
// Esquiline
// private static final String CONTEXT = "/d4science.research-infrastructures.eu/gCubeApps/Esquiline";
// private static final String TOKEN = ""; //Esquiline
// private static String PROFILE_ID = "esquilino";
// GEOPORTAL
// private static String PROFILE_ID = "profiledConcessioni";
// private static final String PROFILE_ID = "concessioni-estere";
private static final Map<String, String> ADD_TABLE_COLUMNS = new LinkedHashMap<>();
// static {
// ADD_TABLE_COLUMNS.put("ufficio_mic_competente", "TEXT");
// ADD_TABLE_COLUMNS.put("funzionario_responsabile", "TEXT");
// ADD_TABLE_COLUMNS.put("modalita_individuazione", "TEXT");
// ADD_TABLE_COLUMNS.put("contesto_indagine", "TEXT");
// ADD_TABLE_COLUMNS.put("denominazione", "TEXT");
// ADD_TABLE_COLUMNS.put("stato_attuale", "TEXT");
// ADD_TABLE_COLUMNS.put("accessibilita", "TEXT");
// ADD_TABLE_COLUMNS.put("cronologia_macrofase", "TEXT");
// ADD_TABLE_COLUMNS.put("specifiche_cronologia", "TEXT");
// ADD_TABLE_COLUMNS.put("quota_max", "TEXT");
// ADD_TABLE_COLUMNS.put("quota_min", "TEXT");
// }
private static final Map<String, String> DROP_TABLE_COLUMNS = new HashMap<>();
// static {
// DROP_TABLE_COLUMNS.put("autore", "");
// DROP_TABLE_COLUMNS.put("titolare", "");
// DROP_TABLE_COLUMNS.put("titolare_licenza", "");
// }
private static final Map<String, String> RENAME_TABLE_COLUMNS = new HashMap<>();
//Rename from column to column
// static {
// RENAME_TABLE_COLUMNS.put("ufficio_mic_competente", "ufficio_competente");
// }
private static ProjectsCaller client = null;
public static final boolean READ_ONLY_MODE = true;
public static List<String> listTable = new ArrayList<String>();
/**
* Gets the client.
*
* @return the client
*/
// @Before
public static void initGeoportalClient() {
// assumeTrue(GCubeTest.isTestInfrastructureEnabled());
ContextConfigTest.readContextSettings();
CONTEXT = ContextConfigTest.CONTEXT;
TOKEN = ContextConfigTest.TOKEN;
ScopeProvider.instance.set(CONTEXT);
SecurityTokenProvider.instance.set(TOKEN);
client = GeoportalClientCaller.projects();
}
public static void main(String[] args) {
initGeoportalClient();
System.out.println("READ_ONLY_MODE ENABLED: " + READ_ONLY_MODE);
System.out.println("CONTEXT IS: " + CONTEXT);
System.out.println("PROFILE_ID: " + PROFILE_ID);
// devVRE
// PROFILE_ID = "profiledConcessioni";
// String tableName = "profiledconcessioni_devvre_centroids";
// tableName = "profiledconcessioni_internal__devvre_centroids";
// PROFILE_ID = "concessioni-estere";
// String tableName = "concessioni_estere_devvre_centroids";
// tableName = "concessioni_estere_internal__devvre_centroids";
// preVRE
// PROFILE_ID = "profiledConcessioni";
// listTable.add("profiledconcessioni_internal__prevre_centroids");
// listTable.add("profiledconcessioni_prevre_centroids");
PROFILE_ID = "concessioni-estere";
listTable.add("concessioni_estere_internal__prevre_centroids");
listTable.add("concessioni_estere_prevre_centroids");
// GNA
// PROFILE_ID = "concessioni-estere";
// String tableName = "concessioni_estere_internal__gna_centroids";
// String tableName = "concessioni_estere_gna_centroids";
// PROFILE_ID = "profiledConcessioni";
// String tableName = "profiledconcessioni_internal__gna_centroids";
// String tableName = "profiledconcessioni_gna_centroids";
// Esquiline
// PROFILE_ID = "esquilino";
// String tableName = "esquilino_esquiline_centroids";
long startTime = System.currentTimeMillis();
try {
RuntimeResourceReader rrr = new RuntimeResourceReader(CONTEXT, resourceName, platformName, category, null);
ServiceEndpointBean se = rrr.getListSE().get(0);
AccessPoint ap = se.getListAP().get(0);
System.out.println(ap.address());
System.out.println(ap.username());
String pwdDB = RuntimeResourceReader.dectryptPassword(CONTEXT, ap);
System.out.println(pwdDB);
Connection dbConnection = getDatabaseConnection(ap.address(), ap.username(), pwdDB);
for (String tableName : listTable) {
System.out.println("\n\n# SOURCE TABLE");
Boolean tableExists = checkTableExists(dbConnection, tableName);
if (!tableExists) {
throw new Exception("Table '" + tableName + "' does not exits in the DB!!! Exit");
}
printTableColumn(dbConnection, tableName);
System.out.println("\n\n### ADDING COLUMNS...\n");
// ADD NEW COLUMNS
for (String columnName : ADD_TABLE_COLUMNS.keySet()) {
try {
boolean columnExists = checkColumnExists(dbConnection, tableName, columnName);
if (!columnExists) {
addColumnTable(dbConnection, tableName, columnName, ADD_TABLE_COLUMNS.get(columnName));
int sleeping = 500;
System.out.println("... sleeping " + sleeping + " ...");
Thread.sleep(sleeping);
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return;
}
}
System.out.println("\n\n### END ADD COLUMNS");
System.out.println("\n\n### DROPPING COLUMNS...\n");
// REMOVE OLD COLUMNS
for (String columnName : DROP_TABLE_COLUMNS.keySet()) {
try {
boolean columnExists = checkColumnExists(dbConnection, tableName, columnName);
if (columnExists) {
dropColumnTable(dbConnection, tableName, columnName, DROP_TABLE_COLUMNS.get(columnName));
int sleeping = 1000;
System.out.println("... sleeping " + sleeping + " ...");
Thread.sleep(sleeping);
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return;
}
}
System.out.println("\n\n### END DROP COLUMNS");
System.out.println("\n\n### RENAMING COLUMNS...\n");
// RENAME_TABLE_COLUMNS
for (String columnName : RENAME_TABLE_COLUMNS.keySet()) {
try {
boolean columnExists = checkColumnExists(dbConnection, tableName, columnName);
if (columnExists) {
renameColumnTable(dbConnection, tableName, columnName, RENAME_TABLE_COLUMNS.get(columnName));
int sleeping = 1000;
System.out.println("... sleeping " + sleeping + " ...");
Thread.sleep(sleeping);
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return;
}
}
System.out.println("\n\n### END RENAME COLUMNS");
System.out.println("\n\n# UPDATED TABLE");
printTableColumn(dbConnection, tableName);
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
long endTime = System.currentTimeMillis();
System.out.println("\n\nSCRIPT END at: " + endTime);
double toSec = (endTime - startTime) / 1000;
System.out.println("SCRIPT TERMINATED in " + toSec + "sec");
}
public static void printTableColumn(Connection con, String tablename) {
try {
DatabaseMetaData databaseMetaData = con.getMetaData();
// Print TABLE_TYPE "TABLE"
ResultSet columns = databaseMetaData.getColumns(null, null, tablename, null);
System.out.println("\n==== TABLE " + tablename + " ");
System.out.println("[COLUMN_NAME - TYPE_NAME ( COLUMN_SIZE )]");
System.out.println("-----------------------------------------------");
while (columns.next()) {
String columnName = columns.getString("COLUMN_NAME");
String columnType = columns.getString("TYPE_NAME");
int columnSize = columns.getInt("COLUMN_SIZE");
System.out.println("\t" + columnName + " - " + columnType + " (" + columnSize + ")");
}
System.out.println("-----------------------------------------------");
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* Gets the database connection.
*
* @param dbURL the db url
* @param user the user
* @param pwd the pwd
* @return the database connection
*/
public static Connection getDatabaseConnection(String dbURL, String user, String pwd) {
System.out.println("dbURL: " + dbURL);
Connection c = null;
try {
Class.forName("org.postgresql.Driver");
c = DriverManager.getConnection(dbURL, user, pwd);
c.setAutoCommit(false);
} catch (Exception e) {
e.printStackTrace();
System.err.println(e.getClass().getName() + ": " + e.getMessage());
System.exit(0);
}
System.out.println("Opened database successfully");
return c;
}
public static void addColumnTable(Connection con, String tableName, String newColumn, String type)
throws SQLException {
String alterTableString = String.format("ALTER TABLE %s ADD COLUMN %s %s", tableName, newColumn, type);
System.out.println("Executing: " + alterTableString);
try {
con.setAutoCommit(false);
if (!READ_ONLY_MODE) {
con.createStatement().execute(alterTableString);
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
System.out.println("Executed: " + alterTableString);
}
public static void renameColumnTable(Connection con, String tableName, String oldColumn, String newColumn)
throws SQLException {
String alterTableString = String.format("ALTER TABLE %s RENAME COLUMN %s TO %s", tableName, oldColumn,
newColumn);
System.out.println("Executing: " + alterTableString);
try {
con.setAutoCommit(false);
if (!READ_ONLY_MODE) {
con.createStatement().execute(alterTableString);
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
System.out.println("Executed: " + alterTableString);
}
public static void dropColumnTable(Connection con, String tableName, String oldColumn, String type)
throws SQLException {
String alterTableString = String.format("ALTER TABLE %s DROP COLUMN %s", tableName, oldColumn);
System.out.println("Executing: " + alterTableString);
try {
con.setAutoCommit(false);
if (!READ_ONLY_MODE) {
con.createStatement().execute(alterTableString);
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
System.out.println("Executed: " + alterTableString);
}
public static Boolean checkColumnExists(Connection con, String tableName, String columnName) throws SQLException {
String columnExistsLabel = "COLUMN_EXISTS";
String sql = String.format(
"SELECT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name='%s' AND column_name='%s') as %s",
tableName, columnName, columnExistsLabel);
System.out.println("\n+++ " + columnExistsLabel + " checking '" + tableName + "': " + sql);
PreparedStatement p;
ResultSet resultSet;
try {
p = con.prepareStatement(sql);
resultSet = p.executeQuery();
// Expected 1 row
resultSet.next();
Boolean columnExists = resultSet.getBoolean(columnExistsLabel);
System.out.println("\t RESP --> Column '" + columnName + "' exists: " + columnExists + "\n");
return columnExists;
} catch (SQLException e) {
System.err.println(e);
return false;
}
}
public static Boolean checkTableExists(Connection con, String tableName) throws SQLException {
String tableExistsLabel = "TABLE_EXISTS";
String sql = String.format(
"SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name='%s' AND table_type LIKE 'BASE TABLE' AND table_schema LIKE 'public') as %s",
tableName, tableExistsLabel);
System.out.println("\n+++ " + tableExistsLabel + " checking '" + tableName + "': " + sql);
PreparedStatement p;
ResultSet resultSet;
try {
p = con.prepareStatement(sql);
resultSet = p.executeQuery();
// Expected 1 row
resultSet.next();
Boolean tableExists = resultSet.getBoolean(tableExistsLabel);
System.out.println("\t RESP --> Table '" + tableName + "' exists: " + tableExists + "\n");
return tableExists;
} catch (SQLException e) {
System.err.println(e);
return false;
}
}
public static void updateTable(Connection con, String tableName, String whereCondition,
LinkedHashMap<String, String> mapColumnValue) throws SQLException {
StringBuilder updateSQL = new StringBuilder();
updateSQL.append("UPDATE " + tableName + " SET");
int i = 1;
for (String column : mapColumnValue.keySet()) {
updateSQL.append(" " + column + "=?");
if (i < mapColumnValue.size())
updateSQL.append(", ");
i++;
}
updateSQL.append(" WHERE " + whereCondition);
String update = updateSQL.toString();
// System.out.println("Executing: " + update);
try (PreparedStatement updatePS = con.prepareStatement(update);) {
int index = 1;
for (String column : mapColumnValue.keySet()) {
updatePS.setString(index, mapColumnValue.get(column));
index++;
}
con.setAutoCommit(false);
System.out.println("Executing: " + updatePS.toString());
if (!READ_ONLY_MODE) {
updatePS.executeUpdate();
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
}
// Step1: Main driver method
public static List<String> readTableIDs(Connection con, String tableName, String columnNameProjectId) {
PreparedStatement p = null;
ResultSet resultSet = null;
// Try block to catch exception/s
List<String> listProjectIds = new ArrayList<String>();
try {
// SQL command data stored in String datatype
String sql = String.format("SELECT * FROM %s", tableName);
p = con.prepareStatement(sql);
resultSet = p.executeQuery();
ResultSetMetaData rsmd = resultSet.getMetaData();
int columnsNumber = rsmd.getColumnCount();
int row = 1;
System.out.println("\n####TABLE: " + tableName + " content..\n");
while (resultSet.next()) {
System.out.print("" + row + "] ");
for (int i = 1; i <= columnsNumber; i++) {
String columnValue = resultSet.getString(i);
System.out.print("(" + rsmd.getColumnName(i) + ") " + columnValue + " | ");
}
row++;
System.out.println("\n");
String projectId = resultSet.getString(columnNameProjectId);
listProjectIds.add(projectId);
}
System.out.println("####TABLE: " + tableName + " end content\n");
}
// Catch block to handle exception
catch (SQLException e) {
// Print exception pop-up on screen
System.err.println(e);
}
System.out.println("returning list IDs: " + listProjectIds);
return listProjectIds;
}
}

View File

@ -0,0 +1,511 @@
package org.gcube.application;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.gcube.application.geoportalcommon.geoportal.GeoportalClientCaller;
import org.gcube.application.geoportalcommon.geoportal.ProjectsCaller;
import org.gcube.application.se.RuntimeResourceReader;
import org.gcube.application.se.ServiceEndpointBean;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.scope.api.ScopeProvider;
public class GNA_New_DataModel_IndexTable_PROD_25926 {
private static String PROFILE_ID = "profiledConcessioni";
private static final String platformName = "postgis";
private static final String category = "Database";
// These are defined via ContextConfigTest.readContextSettings();
private static String CONTEXT = "";
private static String TOKEN = "";
// #### PROD
// DB
private static final String resourceName = "GNA-postgis"; // GNA
// Esquiline
// private static final String CONTEXT = "/d4science.research-infrastructures.eu/gCubeApps/Esquiline";
// private static final String TOKEN = ""; //Esquiline
// private static String PROFILE_ID = "esquilino";
private static final Map<String, String> ADD_TABLE_COLUMNS = new LinkedHashMap<>();
// static {
// ADD_TABLE_COLUMNS.put("ufficio_mic_competente", "TEXT");
// ADD_TABLE_COLUMNS.put("funzionario_responsabile", "TEXT");
// ADD_TABLE_COLUMNS.put("modalita_individuazione", "TEXT");
// ADD_TABLE_COLUMNS.put("contesto_indagine", "TEXT");
// ADD_TABLE_COLUMNS.put("denominazione", "TEXT");
// ADD_TABLE_COLUMNS.put("stato_attuale", "TEXT");
// ADD_TABLE_COLUMNS.put("accessibilita", "TEXT");
// ADD_TABLE_COLUMNS.put("cronologia_macrofase", "TEXT");
// ADD_TABLE_COLUMNS.put("specifiche_cronologia", "TEXT");
// ADD_TABLE_COLUMNS.put("quota_max", "TEXT");
// ADD_TABLE_COLUMNS.put("quota_min", "TEXT");
// }
//
private static final Map<String, String> DROP_TABLE_COLUMNS = new HashMap<>();
// static {
// DROP_TABLE_COLUMNS.put("autore", "");
// DROP_TABLE_COLUMNS.put("titolare", "");
// DROP_TABLE_COLUMNS.put("titolare_licenza", "");
// }
private static final Map<String, String> RENAME_TABLE_COLUMNS = new HashMap<>();
// Rename from column to column
static {
RENAME_TABLE_COLUMNS.put("ufficio_mic_competente", "ufficio_competente");
}
private static ProjectsCaller client = null;
public static final boolean READ_ONLY_MODE = true;
public static List<String> listTable = new ArrayList<String>();
/**
* Gets the client.
*
* @return the client
*/
// @Before
public static void initGeoportalClient() {
// assumeTrue(GCubeTest.isTestInfrastructureEnabled());
ContextConfigTest.readContextSettings();
CONTEXT = ContextConfigTest.CONTEXT;
TOKEN = ContextConfigTest.TOKEN;
ScopeProvider.instance.set(CONTEXT);
SecurityTokenProvider.instance.set(TOKEN);
client = GeoportalClientCaller.projects();
}
public static void main(String[] args) {
initGeoportalClient();
System.out.println("READ_ONLY_MODE ENABLED: " + READ_ONLY_MODE);
System.out.println("CONTEXT IS: " + CONTEXT);
System.out.println("PROFILE_ID: " + PROFILE_ID);
// GNA
// PROFILE_ID = "profiledConcessioni";
// listTable.add("profiledconcessioni_internal__gna_centroids");
// listTable.add("profiledconcessioni_gna_centroids");
// PROFILE_ID = "concessioni-estere";
// listTable.add("concessioni_estere_internal__gna_centroids");
// listTable.add("concessioni_estere_gna_centroids");
// Esquiline
// PROFILE_ID = "esquilino";
// String tableName = "esquilino_esquiline_centroids";
long startTime = System.currentTimeMillis();
try {
RuntimeResourceReader rrr = new RuntimeResourceReader(CONTEXT, resourceName, platformName, category, null);
ServiceEndpointBean se = rrr.getListSE().get(0);
AccessPoint ap = se.getListAP().get(0);
System.out.println(ap.address());
System.out.println(ap.username());
String pwdDB = RuntimeResourceReader.dectryptPassword(CONTEXT, ap);
System.out.println(pwdDB);
Connection dbConnection = getDatabaseConnection(ap.address(), ap.username(), pwdDB);
for (String tableName : listTable) {
System.out.println("\n\n# SOURCE TABLE");
Boolean tableExists = checkTableExists(dbConnection, tableName);
if (!tableExists) {
throw new Exception("Table '" + tableName + "' does not exits in the DB!!! Exit");
}
printTableColumn(dbConnection, tableName);
System.out.println("\n\n### ADDING COLUMNS...\n");
// ADD NEW COLUMNS
for (String columnName : ADD_TABLE_COLUMNS.keySet()) {
try {
boolean columnExists = checkColumnExists(dbConnection, tableName, columnName);
if (!columnExists) {
addColumnTable(dbConnection, tableName, columnName, ADD_TABLE_COLUMNS.get(columnName));
int sleeping = 500;
System.out.println("... sleeping " + sleeping + " ...");
Thread.sleep(sleeping);
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return;
}
}
System.out.println("\n\n### END ADD COLUMNS");
System.out.println("\n\n### DROPPING COLUMNS...\n");
// REMOVE OLD COLUMNS
for (String columnName : DROP_TABLE_COLUMNS.keySet()) {
try {
boolean columnExists = checkColumnExists(dbConnection, tableName, columnName);
if (columnExists) {
dropColumnTable(dbConnection, tableName, columnName, DROP_TABLE_COLUMNS.get(columnName));
int sleeping = 1000;
System.out.println("... sleeping " + sleeping + " ...");
Thread.sleep(sleeping);
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return;
}
}
System.out.println("\n\n### END DROP COLUMNS");
System.out.println("\n\n### RENAMING COLUMNS...\n");
// RENAME_TABLE_COLUMNS
for (String columnName : RENAME_TABLE_COLUMNS.keySet()) {
try {
boolean columnExists = checkColumnExists(dbConnection, tableName, columnName);
if (columnExists) {
renameColumnTable(dbConnection, tableName, columnName,
RENAME_TABLE_COLUMNS.get(columnName));
int sleeping = 1000;
System.out.println("... sleeping " + sleeping + " ...");
Thread.sleep(sleeping);
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return;
}
}
System.out.println("\n\n### END RENAME COLUMNS");
System.out.println("\n\n# UPDATED TABLE");
printTableColumn(dbConnection, tableName);
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
long endTime = System.currentTimeMillis();
System.out.println("\n\nSCRIPT END at: " + endTime);
double toSec = (endTime - startTime) / 1000;
System.out.println("SCRIPT TERMINATED in " + toSec + "sec");
}
public static void printTableColumn(Connection con, String tablename) {
try {
DatabaseMetaData databaseMetaData = con.getMetaData();
// Print TABLE_TYPE "TABLE"
ResultSet columns = databaseMetaData.getColumns(null, null, tablename, null);
System.out.println("\n==== TABLE " + tablename + " ");
System.out.println("[COLUMN_NAME - TYPE_NAME ( COLUMN_SIZE )]");
System.out.println("-----------------------------------------------");
while (columns.next()) {
String columnName = columns.getString("COLUMN_NAME");
String columnType = columns.getString("TYPE_NAME");
int columnSize = columns.getInt("COLUMN_SIZE");
System.out.println("\t" + columnName + " - " + columnType + " (" + columnSize + ")");
}
System.out.println("-----------------------------------------------");
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* Gets the database connection.
*
* @param dbURL the db url
* @param user the user
* @param pwd the pwd
* @return the database connection
*/
public static Connection getDatabaseConnection(String dbURL, String user, String pwd) {
System.out.println("dbURL: " + dbURL);
Connection c = null;
try {
Class.forName("org.postgresql.Driver");
c = DriverManager.getConnection(dbURL, user, pwd);
c.setAutoCommit(false);
} catch (Exception e) {
e.printStackTrace();
System.err.println(e.getClass().getName() + ": " + e.getMessage());
System.exit(0);
}
System.out.println("Opened database successfully");
return c;
}
public static void addColumnTable(Connection con, String tableName, String newColumn, String type)
throws SQLException {
String alterTableString = String.format("ALTER TABLE %s ADD COLUMN %s %s", tableName, newColumn, type);
System.out.println("Executing: " + alterTableString);
try {
con.setAutoCommit(false);
if (!READ_ONLY_MODE) {
con.createStatement().execute(alterTableString);
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
System.out.println("Executed: " + alterTableString);
}
public static void renameColumnTable(Connection con, String tableName, String oldColumn, String newColumn)
throws SQLException {
String alterTableString = String.format("ALTER TABLE %s RENAME COLUMN %s TO %s", tableName, oldColumn,
newColumn);
System.out.println("Executing: " + alterTableString);
try {
con.setAutoCommit(false);
if (!READ_ONLY_MODE) {
con.createStatement().execute(alterTableString);
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
System.out.println("Executed: " + alterTableString);
}
public static void dropColumnTable(Connection con, String tableName, String oldColumn, String type)
throws SQLException {
String alterTableString = String.format("ALTER TABLE %s DROP COLUMN %s", tableName, oldColumn);
System.out.println("Executing: " + alterTableString);
try {
con.setAutoCommit(false);
if (!READ_ONLY_MODE) {
con.createStatement().execute(alterTableString);
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
System.out.println("Executed: " + alterTableString);
}
public static Boolean checkColumnExists(Connection con, String tableName, String columnName) throws SQLException {
String columnExistsLabel = "COLUMN_EXISTS";
String sql = String.format(
"SELECT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name='%s' AND column_name='%s') as %s",
tableName, columnName, columnExistsLabel);
System.out.println("\n+++ " + columnExistsLabel + " checking '" + tableName + "': " + sql);
PreparedStatement p;
ResultSet resultSet;
try {
p = con.prepareStatement(sql);
resultSet = p.executeQuery();
// Expected 1 row
resultSet.next();
Boolean columnExists = resultSet.getBoolean(columnExistsLabel);
System.out.println("\t RESP --> Column '" + columnName + "' exists: " + columnExists + "\n");
return columnExists;
} catch (SQLException e) {
System.err.println(e);
return false;
}
}
public static Boolean checkTableExists(Connection con, String tableName) throws SQLException {
String tableExistsLabel = "TABLE_EXISTS";
String sql = String.format(
"SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name='%s' AND table_type LIKE 'BASE TABLE' AND table_schema LIKE 'public') as %s",
tableName, tableExistsLabel);
System.out.println("\n+++ " + tableExistsLabel + " checking '" + tableName + "': " + sql);
PreparedStatement p;
ResultSet resultSet;
try {
p = con.prepareStatement(sql);
resultSet = p.executeQuery();
// Expected 1 row
resultSet.next();
Boolean tableExists = resultSet.getBoolean(tableExistsLabel);
System.out.println("\t RESP --> Table '" + tableName + "' exists: " + tableExists + "\n");
return tableExists;
} catch (SQLException e) {
System.err.println(e);
return false;
}
}
public static void updateTable(Connection con, String tableName, String whereCondition,
LinkedHashMap<String, String> mapColumnValue) throws SQLException {
StringBuilder updateSQL = new StringBuilder();
updateSQL.append("UPDATE " + tableName + " SET");
int i = 1;
for (String column : mapColumnValue.keySet()) {
updateSQL.append(" " + column + "=?");
if (i < mapColumnValue.size())
updateSQL.append(", ");
i++;
}
updateSQL.append(" WHERE " + whereCondition);
String update = updateSQL.toString();
// System.out.println("Executing: " + update);
try (PreparedStatement updatePS = con.prepareStatement(update);) {
int index = 1;
for (String column : mapColumnValue.keySet()) {
updatePS.setString(index, mapColumnValue.get(column));
index++;
}
con.setAutoCommit(false);
System.out.println("Executing: " + updatePS.toString());
if (!READ_ONLY_MODE) {
updatePS.executeUpdate();
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
}
// Step1: Main driver method
public static List<String> readTableIDs(Connection con, String tableName, String columnNameProjectId) {
PreparedStatement p = null;
ResultSet resultSet = null;
// Try block to catch exception/s
List<String> listProjectIds = new ArrayList<String>();
try {
// SQL command data stored in String datatype
String sql = String.format("SELECT * FROM %s", tableName);
p = con.prepareStatement(sql);
resultSet = p.executeQuery();
ResultSetMetaData rsmd = resultSet.getMetaData();
int columnsNumber = rsmd.getColumnCount();
int row = 1;
System.out.println("\n####TABLE: " + tableName + " content..\n");
while (resultSet.next()) {
System.out.print("" + row + "] ");
for (int i = 1; i <= columnsNumber; i++) {
String columnValue = resultSet.getString(i);
System.out.print("(" + rsmd.getColumnName(i) + ") " + columnValue + " | ");
}
row++;
System.out.println("\n");
String projectId = resultSet.getString(columnNameProjectId);
listProjectIds.add(projectId);
}
System.out.println("####TABLE: " + tableName + " end content\n");
}
// Catch block to handle exception
catch (SQLException e) {
// Print exception pop-up on screen
System.err.println(e);
}
System.out.println("returning list IDs: " + listProjectIds);
return listProjectIds;
}
}

View File

@ -4,6 +4,8 @@ import java.io.IOException;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
@ -13,6 +15,7 @@ import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import org.bson.Document;
import org.gcube.application.geoportal.common.model.configuration.Configuration;
import org.gcube.application.geoportal.common.model.document.Project;
import org.gcube.application.geoportal.common.model.document.lifecycle.LifecycleInformation;
@ -22,6 +25,7 @@ import org.gcube.application.geoportalcommon.ConvertToDataValueObjectModel;
import org.gcube.application.geoportalcommon.ProjectDVBuilder;
import org.gcube.application.geoportalcommon.geoportal.GeoportalClientCaller;
import org.gcube.application.geoportalcommon.geoportal.ProjectsCaller;
import org.gcube.application.geoportalcommon.geoportal.UseCaseDescriptorCaller;
import org.gcube.application.geoportalcommon.shared.SearchingFilter;
import org.gcube.application.geoportalcommon.shared.SearchingFilter.LOGICAL_OP;
import org.gcube.application.geoportalcommon.shared.WhereClause;
@ -32,7 +36,6 @@ import org.gcube.application.geoportalcommon.shared.geoportal.project.ProjectDV;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.junit.Before;
import org.junit.Test;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
@ -41,31 +44,51 @@ import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class Project_Tests {
private ProjectsCaller client = null;
// private static String CONTEXT = "/pred4s/preprod/preVRE";
// private static String TOKEN = ""; //preVRE
private static final String GCUBE_CONFIG_PROPERTIES_FILENAME = "gcube_config.properties";
// APP Working Directory + /src/test/resources must be the location of
// gcube_config.properties
private static String gcube_config_path = String.format("%s/%s",
System.getProperty("user.dir") + "/src/test/resources", GCUBE_CONFIG_PROPERTIES_FILENAME);
private static String CONTEXT;
private static String TOKEN;
private static UseCaseDescriptorCaller clientUCD = null;
private static ProjectsCaller clientPrj = null;
//private static String CONTEXT = "/gcube/devsec/devVRE";
//private static String TOKEN = ""; // devVRE
private static String PROFILE_ID = "profiledConcessioni";
private static String PROJECT_ID = "6365485fa8d67f4c82794cc4";
private static String PROJECT_ID = "6384aaac308f5c28c5ee0888";
private static String CONTEXT = "/pred4s/preprod/preVRE";
private static String TOKEN = ""; //preVRE
private static String MY_LOGIN = "francesco.mangiacrapa";
private static final String FOLLOWS = "follows";
private static final String PRECEDES = "precedes";
protected static class Phases {
public static final String PENDING_APPROVAL = "Pending Approval";
public static final String PUBLISHED = "Published";
public static final String UNPUBLISHED = "UnPublished";
}
@Before
public void getClient() {
// assumeTrue(GCubeTest.isTestInfrastructureEnabled());
public void init() {
ContextConfigTest.readContextSettings();
CONTEXT = ContextConfigTest.CONTEXT;
TOKEN = ContextConfigTest.TOKEN;
ScopeProvider.instance.set(CONTEXT);
SecurityTokenProvider.instance.set(TOKEN);
client = GeoportalClientCaller.projects();
clientPrj = GeoportalClientCaller.projects();
clientUCD = GeoportalClientCaller.useCaseDescriptors();
}
// @Test
public void getList() throws Exception {
List<Project> listOfProjects = client.getListForProfileID(PROFILE_ID);
List<Project> listOfProjects = clientPrj.getListForProfileID(PROFILE_ID);
int i = 0;
for (Project project : listOfProjects) {
@ -75,7 +98,7 @@ public class Project_Tests {
//@Test
public void getByID() throws Exception {
Project project = client.getProjectByID(PROFILE_ID, PROJECT_ID);
Project project = clientPrj.getProjectByID(PROFILE_ID, PROJECT_ID);
ProjectDVBuilder projectBuilder = ProjectDVBuilder.newBuilder().fullDocumentMap(true);
ProjectDV projectDV = ConvertToDataValueObjectModel.toProjectDV(project, projectBuilder);
System.out.println(projectDV);
@ -86,12 +109,27 @@ public class Project_Tests {
System.out.println("The key: " + key + " has value: " + theValue);
}
System.out.println(projectDV.getSpatialReference());
System.out.println("JSON: " + projectDV.getTheDocument().getDocumentAsJSON());
System.out.println("Spatial reference: " + projectDV.getSpatialReference());
}
// @Test
public void checkDocumentSerDes() throws Exception {
Project project = clientPrj.getProjectByID(PROFILE_ID, PROJECT_ID);
String documentValueAsJson = project.getTheDocument().toJson();
System.out.println("1 " + documentValueAsJson);
String updatedDocumentAsJson = new Document(Project.THE_DOCUMENT, documentValueAsJson).toJson();
System.out.println("2 " + updatedDocumentAsJson);
Document setUpdatedDocument = new Document("$set", updatedDocumentAsJson);
System.out.println("3 " + setUpdatedDocument);
}
// @Test
public void getListProjectsDV() throws Exception {
List<Project> listOfProjects = client.getListForProfileID(PROFILE_ID);
List<Project> listOfProjects = clientPrj.getListForProfileID(PROFILE_ID);
ProjectDVBuilder projectBuilder = ProjectDVBuilder.newBuilder().fullDocumentMap(true);
int i = 0;
for (Project project : listOfProjects) {
@ -114,10 +152,69 @@ public class Project_Tests {
}
}
@Test
public void getListProjectsDVFiltered() throws Exception {
//@Test
public void getListProjectsDVFilteredJSONDocument() throws Exception {
// List<Project> listOfProjects = client.getListForProfileID(PROFILE_ID);
SearchingFilter filter = new SearchingFilter();
// Where Clause
List<WhereClause> conditions = new ArrayList<WhereClause>();
Map<String, Object> searchInto = new HashMap<String, Object>();
//searchInto.put("_id", "6384aaac308f5c28c5ee0888");
searchInto.put("_theDocument.nome", "della Civita di Tarquinia ");
WhereClause whereClause = new WhereClause(LOGICAL_OP.OR, searchInto);
conditions.add(whereClause);
filter.setConditions(conditions);
/* ORDER BY */
// List<ItemFieldDV> orderByFields = new ArrayList<ItemFieldDV>();
// List<String> jsonFields = Arrays.asList("_theDocument.dataInizioProgetto");
// ItemFieldDV itemField = new ItemFieldDV("dataInizioProgetto", jsonFields, "$or", false, false, false);
// orderByFields.add(itemField);
// filter.setOrderByFields(orderByFields);
LinkedHashMap<String, Object> projection = new LinkedHashMap<String, Object>();
// default
// PROJECTION
projection.put(Project.ID, 1);
// projection.put("_theDocument.nome", 1);
// projection.put("_profileID", 1);
// projection.put("_profileVersion", 1);
// projection.put("_version", 1);
// projection.put("_theDocument", 1);
//
// projection.put("_theDocument.paroleChiaveLibere", 1);
// projection.put("_theDocument.editore", 1);
// projection.put("_theDocument.paroleChiaveICCD", 1);
// projection.put("_theDocument.responsabile", 1);
//
// projection.put("_theDocument.introduzione", 1);
// projection.put("_theDocument.authors", 1);
// projection.put("_theDocument.dataInizioProgetto", 1);
//filter.setProjection(projection);
Integer totalDocs = clientPrj.getTotalDocument(PROFILE_ID);
Iterator<Project> projects = clientPrj.queryOnMongo(PROFILE_ID, 10, 0, null, filter);
//Iterable<Project> itP = () -> projects;
//Stream<Project> targetStream = StreamSupport.stream(itP.spliterator(), false);
//List<String> listProjectIDs = targetStream.map(Project::getId).collect(Collectors.toList());
List<ResultDocumentDV> results = ConvertToDataValueObjectModel.toListResultDocument(projects);
int i = 0;
for (ResultDocumentDV projectDV : results) {
System.out.println(++i + ") " + projectDV.getId() + " JSON: "
+ projectDV.getDocumentAsJSON());
}
}
//@Test
public void getListProjectsDVFiltered() throws Exception {
// List<Project> listOfProjects = client.getListForProfileID(PROFILE_ID);
SearchingFilter filter = new SearchingFilter();
@ -166,20 +263,19 @@ public class Project_Tests {
filter.setProjection(projection);
Integer totalDocs = client.getTotalDocument(PROFILE_ID);
Iterator<Project> projects = client.queryOnMongo(PROFILE_ID, totalDocs, 0, null, filter);
Integer totalDocs = clientPrj.getTotalDocument(PROFILE_ID);
Iterator<Project> projects = clientPrj.queryOnMongo(PROFILE_ID, totalDocs, 0, null, filter);
Iterable<Project> itP = () -> projects;
Stream<Project> targetStream = StreamSupport.stream(itP.spliterator(), false);
List<String> listProjectIDs = targetStream.map(Project::getId).collect(Collectors.toList());
List<ResultDocumentDV> results = ConvertToDataValueObjectModel.toListResultDocument(projects);
int i = 0;
for (ResultDocumentDV projectDV : results) {
System.out.println(++i + ") " + projectDV.getId() + " dataInizioProgetto: "+projectDV.getDocumentAsMap().get("dataInizioProgetto"));
System.out.println(++i + ") " + projectDV.getId() + " dataInizioProgetto: "
+ projectDV.getDocumentAsMap().get("dataInizioProgetto"));
}
// TEST TO PROJECT DV
/*
* ProjectDVBuilder projectBuilder =
@ -191,50 +287,41 @@ public class Project_Tests {
* projectDV); }
*/
/*int limit = 2;
LinkedHashMap<String, Object> documentAsMap = new LinkedHashMap<String, Object>(limit);
try {
// Project theProject =
// GeoportalClientCaller.projects().getProjectByID(profileID, projectID);
QueryRequest request = new QueryRequest();
request.setFilter(Document.parse("{\"" + Project.ID + "\" : " + "{\"$eq\" : \"" + PROJECT_ID + "\"}}"));
request.setProjection(Document.parse("{\"" + Project.ID + "\" : " + "1}"));
// should be one
projects(PROFILE_ID).build().f
projects(PROFILE_ID).build().query(request).forEachRemaining(p -> {
try {
System.out.println("p is: "+p);
Iterator<Entry<String, Object>> entrySetsIt = p.getTheDocument().entrySet().iterator();
int i = 0;
while (entrySetsIt.hasNext()) {
if (i > limit)
break;
Entry<String, Object> entry = entrySetsIt.next();
documentAsMap.put(entry.getKey(), entry.getValue());
i++;
}
} catch (Exception e) {
throw e;
}
});
System.out.println("getEntrySetsDocumentForProjectID returning map: "+documentAsMap);
} catch (Exception e) {
String erroMsg = "Error occurred on loading EntrySets document for profileID " + PROFILE_ID
+ " and projectID " + PROJECT_ID;
e.printStackTrace();
}*/
/*
* int limit = 2; LinkedHashMap<String, Object> documentAsMap = new
* LinkedHashMap<String, Object>(limit);
*
* try { // Project theProject = //
* GeoportalClientCaller.projects().getProjectByID(profileID, projectID);
*
* QueryRequest request = new QueryRequest();
* request.setFilter(Document.parse("{\"" + Project.ID + "\" : " +
* "{\"$eq\" : \"" + PROJECT_ID + "\"}}"));
* request.setProjection(Document.parse("{\"" + Project.ID + "\" : " + "1}"));
* // should be one projects(PROFILE_ID).build().f
* projects(PROFILE_ID).build().query(request).forEachRemaining(p -> { try {
* System.out.println("p is: "+p); Iterator<Entry<String, Object>> entrySetsIt =
* p.getTheDocument().entrySet().iterator(); int i = 0; while
* (entrySetsIt.hasNext()) { if (i > limit) break;
*
* Entry<String, Object> entry = entrySetsIt.next();
* documentAsMap.put(entry.getKey(), entry.getValue()); i++; }
*
* } catch (Exception e) { throw e; } });
*
* System.out.println("getEntrySetsDocumentForProjectID returning map: "
* +documentAsMap);
*
*
* } catch (Exception e) { String erroMsg =
* "Error occurred on loading EntrySets document for profileID " + PROFILE_ID +
* " and projectID " + PROJECT_ID; e.printStackTrace(); }
*/
}
// @Test
public void getLifecycleForProjectId() throws Exception {
Project project = client.getProjectByID(PROFILE_ID, PROJECT_ID);
Project project = clientPrj.getProjectByID(PROFILE_ID, PROJECT_ID);
LifecycleInformation lci = project.getLifecycleInformation();
LifecycleInformationDV liDV = ConvertToDataValueObjectModel.toLifecycleInformationDV(lci);
System.out.println(liDV);
@ -243,39 +330,49 @@ public class Project_Tests {
// @Test
public void getConfiguration() throws Exception {
Configuration config = client.getConfiguration(PROFILE_ID);
Configuration config = clientPrj.getConfiguration(PROFILE_ID);
System.out.println(config);
}
// @Test
public void getTotalDocument() throws Exception {
System.out.println(client.getTotalDocument(PROFILE_ID));
System.out.println(clientPrj.getTotalDocument(PROFILE_ID));
}
// @Test
public void getListPhases() throws Exception {
List<String> idsPhases = client.getIDsPhases(PROFILE_ID);
List<String> idsPhases = clientPrj.getIDsPhases(PROFILE_ID);
System.out.println(idsPhases);
PhaseDV[] phases = client.getPhasesIntoDocumentStoreCollection(PROFILE_ID);
PhaseDV[] phases = clientPrj.getPhasesIntoDocumentStoreCollection(PROFILE_ID);
for (PhaseDV phaseDV : phases) {
System.out.println(phaseDV);
}
}
// @Test
public void getCountByPhase() throws Exception {
Integer integer = clientPrj.getCountByPhaseFor(PROFILE_ID, "Published", "OK");
System.out.println(integer);
integer = clientPrj.getCountByPhaseFor(PROFILE_ID, "Pending Approval", "OK");
System.out.println(integer);
}
// @Test
public void getRelationshipsChain() throws Exception {
System.out.println("getRelationshipsChain test");
Project project = client.getProjectByID(PROFILE_ID, PROJECT_ID);
Project project = clientPrj.getProjectByID(PROFILE_ID, PROJECT_ID);
List<Relationship> relations = project.getRelationships();
for (Relationship relationship : relations) {
System.out.println("\n\ngetRelationshipsChain for " + relationship);
Iterator<RelationshipNavigationObject> iterator = client.getRelationshipChain(PROFILE_ID, PROJECT_ID,
Iterator<RelationshipNavigationObject> iterator = clientPrj.getRelationshipChain(PROFILE_ID, PROJECT_ID,
relationship.getRelationshipName(), true);
while (iterator.hasNext()) {
@ -287,7 +384,8 @@ public class Project_Tests {
}
public void visitRelationshipsChain(RelationshipNavigationObject nav, String relationshipName) {
System.out.println("visitRelationshipsChain of target: "+nav.getTarget().getId() +", relationshipName: "+relationshipName+", doc: "+nav.getTarget().getTheDocument());
System.out.println("visitRelationshipsChain of target: " + nav.getTarget().getId() + ", relationshipName: "
+ relationshipName + ", doc: " + nav.getTarget().getTheDocument());
System.out.println("visitRelationshipsChain children " + nav.getChildren());
if (nav == null)
@ -359,4 +457,219 @@ public class Project_Tests {
return LocalDate.parse(p.getValueAsString(), FULL_FORMATTER);
}
}
// @Test
public void testEvaluateRelationshipsChain() throws Exception {
System.out.println("testEvaluateRelationshipsChain test");
// PROJECT_ID = "6399de3ca0a4545420373251";
//
// PROJECT_ID = "6399de68a0a4545420373257";
//
//
// PROJECT_ID = "6399de96a0a4545420373258"; //no Relation
//
PROJECT_ID = "63c80aebacb8c7657b858741";
Project project = clientPrj.getProjectByID(PROFILE_ID, PROJECT_ID);
evaluateAdditionalIndexParameters(project);
}
public static Document evaluateAdditionalIndexParameters(Project project) throws Exception {
Document toReturn = null;
if (toReturn == null)
toReturn = new Document();
Project indexingProject = project;
// Evaluate to display project IDs
log.debug("Evaluating Last ID in relationship chain. Current Concessione ID is {}", indexingProject.getId());
try {
ArrayList<Project> projects = new ArrayList<>();
// get Last ID in relation chain
projects.add(indexingProject);
if (!indexingProject.getRelationshipsByName(PRECEDES).isEmpty()) {
List<RelationshipNavigationObject> relChain = getRelationshipChain(indexingProject.getProfileID(),
indexingProject, PRECEDES, true);
scanRelation(projects, relChain.get(0), false);
}
if (!indexingProject.getRelationshipsByName(FOLLOWS).isEmpty()) {
List<RelationshipNavigationObject> relChain = getRelationshipChain(indexingProject.getProfileID(),
indexingProject, FOLLOWS, true);
scanRelation(projects, relChain.get(0), false);
}
log.debug("Produced full chain [size : {}] from {}, evaluating last available for PHASE {} ",
projects.size(), indexingProject.getId(), indexingProject.getLifecycleInformation().getPhase());
List<String> toDisplayId = new ArrayList<>();
List<String> toHideIds = new ArrayList<>();
log.info("projects are: " + projects.size());
// Reverse Order means from the last FOLLOW to the first one (temporal reverse
// order)
Collections.sort(projects, Collections.reverseOrder(new ProjectRelationComparator()));
int j = 0;
for (Project theProject : projects) {
log.info(++j + ") " + theProject.getId() + " data inizio: "
+ theProject.getTheDocument().get("dataInizioProgetto"));
}
log.trace("Checking from LAST.. ");
for (int i = 0; i < projects.size(); i++) {
Project p = projects.get(i);
String phase = p.getLifecycleInformation().getPhase();
// IS TO DISPLAY EMPTY? Step into only once
if (toDisplayId.isEmpty()) {
// IF PHASE IS PENDING APPROVAL OR PUBLISHED
if ((phase.equals(Phases.PENDING_APPROVAL) || phase.equals(Phases.PUBLISHED))) {
toDisplayId.add(p.getId());
}
} else {
switch (phase) {
case Phases.PENDING_APPROVAL:
case Phases.PUBLISHED: {
if ((p.getLifecycleInformation().getPhase().equals(Phases.PENDING_APPROVAL)
|| p.getLifecycleInformation().getPhase().equals(Phases.PUBLISHED)))
toHideIds.add(p.getId());
break;
}
}
}
}
toReturn.put("_toHideIds", toHideIds);
toReturn.put("_toDisplayIds", toDisplayId);
log.info("Indexing request for Concessione [ID {}] with to HIDE {} and toDisplay {} ",
indexingProject.getId(), toHideIds, toDisplayId);
return toReturn;
} catch (Exception e) {
log.error("Unable to evaluate to Hide and Display Ids ", e);
throw new Exception("Unable to evaluate chain ids to hide / display", e);
}
}
public static class ProjectRelationComparator implements Comparator<Project> {
@Override
public int compare(Project p1, Project p2) {
if (p1 == null)
return 1;
if (p2 == null)
return -1;
Integer compareResult = null;
compareResult = compareProjectAgainstRelations(p1, p2);
log.trace("p1 & p2, comparator result: {}", compareResult);
if (compareResult == null) {
log.debug("No relations beetween p1 & p2, checking inverted relations");
compareResult = compareProjectAgainstRelations(p2, p1);
log.trace("p2 & p1, comparator result: {}", compareResult);
if (compareResult == null) {
log.trace("p1 & p2, are not comparable, returning 0");
compareResult = 0;
}
}
log.debug("p1 & p2, comparator result, returns: {}", compareResult);
return compareResult;
}
}
public static Integer compareProjectAgainstRelations(Project source, Project target) {
log.debug("comparing source {} and target {}", source.getId(), target.getId());
List<Relationship> listRel = source.getRelationships();
log.debug("relationships of {} are : {}", source.getId(), listRel);
if (listRel != null) {
String targetId = target.getId();
for (Relationship relationship : listRel) {
String relTargetId = relationship.getTargetID();
if (targetId.compareTo(relTargetId) == 0) {
String relationName = relationship.getRelationshipName();
if (relationName.equals(PRECEDES)) {
log.debug("source {} [rel {} ] target {}, so source < target ", source.getId(), PRECEDES,
target.getId());
// source < target
return -1;
} else if (relationName.equals(FOLLOWS)) {
log.debug("source {} [rel {} ] target {}, so source > target ", source.getId(), FOLLOWS,
target.getId());
// source > target
return 1;
}
}
}
}
log.debug("No relations beetween a & b");
return null;
}
private static void scanRelation(ArrayList chain, RelationshipNavigationObject obj, boolean putBefore) {
if (putBefore)
chain.add(0, obj.getTarget());
else
chain.add(obj.getTarget());
if (obj.getChildren() != null)
obj.getChildren().forEach(r -> scanRelation(chain, r, putBefore));
}
public static List<RelationshipNavigationObject> getRelationshipChain(String ucid, Project current,
String relationshipId, Boolean deep) throws Exception {
// recursive
// ProfiledMongoManager manager = new ProfiledMongoManager(ucid);
log.info("UCD {} : Getting Relationships List for {} [rel : {}, recurse {}]", ucid, current.getId(),
relationshipId, deep);
// Project current = manager.getByID(id);
long startTime = System.currentTimeMillis();
List<RelationshipNavigationObject> toReturn = getLinked(current, relationshipId, deep);
log.info("Got {} relationship elements in {}ms", toReturn.size(), (System.currentTimeMillis() - startTime));
return toReturn;
}
private static List<RelationshipNavigationObject> getLinked(Project current, String relationName, Boolean recurse) {
log.debug("Getting Relationships Lists for {} [rel : {}, recurse {}]", current.getId(), relationName, recurse);
ArrayList<RelationshipNavigationObject> toReturn = new ArrayList<>();
List<Relationship> existing = current.getRelationshipsByName(relationName);
for (Relationship relationship : existing) {
try {
log.debug("Navigating from {} : {} to[rel {} ] {} : {}", relationship.getTargetUCD(),
relationship.getTargetID(), relationship.getRelationshipName(), current.getProfileID(),
current.getId());
RelationshipNavigationObject linkedProject = new RelationshipNavigationObject();
// linkedProject.setTarget(new
// ProfiledMongoManager(relationship.getTargetUCD()).getByID(relationship.getTargetID()));
// Project target = new Project();
// target.setProfileID(relationship.getTargetUCD());
// target.setId(relationship.getTargetID());
Project target = clientPrj.getProjectByID(relationship.getTargetUCD(), relationship.getTargetID());
linkedProject.setTarget(target);
if (recurse) {
List<RelationshipNavigationObject> linked = getLinked(linkedProject.getTarget(), relationName,
recurse);
linkedProject.setChildren(linked);
}
toReturn.add(linkedProject);
} catch (Exception e) {
log.warn("Unable to navigate from {} : {} to[rel {} ] {} : {}", relationship.getTargetUCD(),
relationship.getTargetID(), relationship.getRelationshipName(), current.getProfileID(),
current.getId(), e);
}
}
return toReturn;
}
}

View File

@ -0,0 +1,405 @@
package org.gcube.application;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.gcube.application.geoportal.common.model.document.Project;
import org.gcube.application.geoportalcommon.ConvertToDataValueObjectModel;
import org.gcube.application.geoportalcommon.ProjectDVBuilder;
import org.gcube.application.geoportalcommon.geoportal.GeoportalClientCaller;
import org.gcube.application.geoportalcommon.geoportal.ProjectsCaller;
import org.gcube.application.geoportalcommon.shared.SearchingFilter;
import org.gcube.application.geoportalcommon.shared.SearchingFilter.LOGICAL_OP;
import org.gcube.application.geoportalcommon.shared.WhereClause;
import org.gcube.application.geoportalcommon.shared.geoportal.ResultDocumentDV;
import org.gcube.application.geoportalcommon.shared.geoportal.project.ProjectDV;
import org.gcube.application.se.RuntimeResourceReader;
import org.gcube.application.se.ServiceEndpointBean;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.scope.api.ScopeProvider;
public class ReSyncCentroidsMetadataOnDB {
// private static final String CONTEXT = "/gcube/devsec/devVRE";
// private static final String TOKEN = ""; // devVRE
// private static final String CONTEXT = "/pred4s/preprod/preVRE";
// private static final String TOKEN = ""; //preVRE
private static final String CONTEXT = "/d4science.research-infrastructures.eu/D4OS/GNA";
private static final String TOKEN = ""; //GNA
// GEOPORTAL
private static String PROFILE_ID = "profiledConcessioni";
//private static final String PROFILE_ID = "concessioni-estere";
private static final String JSON_KEY_DATA_FINE_PROGETTO = "dataFineProgetto";
private static final String JSON_KEY_DATA_INIZIO_PROGETTO = "dataInizioProgetto";
private static final String JSON_KEY_INTRODUZIONE = "introduzione";
// DB
private static final String platformName = "postgis";
private static final String category = "Database";
//private static final String resourceName = "GNA-POSTGIS-DB"; //devVRE
//private static final String resourceName = "Geoserver-t postgis"; //preVRE
private static final String resourceName = "GNA-postgis"; //GNA
private static final String TABLE_COLUMN_DATA_FINE_PROGETTO = "data_fine_progetto";
private static final String TABLE_COLUMN_DATA_INIZIO_PROGETTO = "data_inizio_progetto";
private static final String TABLE_COLUMN_DESCRIZIONE = "descrizione";
private static ProjectsCaller client = null;
public static final boolean READ_ONLY_MODE = true;
/**
* Gets the client.
*
* @return the client
*/
// @Before
public static void initGeoportalClient() {
// assumeTrue(GCubeTest.isTestInfrastructureEnabled());
ScopeProvider.instance.set(CONTEXT);
SecurityTokenProvider.instance.set(TOKEN);
client = GeoportalClientCaller.projects();
}
public static void main(String[] args) {
System.out.println("READ_ONLY_MODE ENABLED: "+READ_ONLY_MODE);
System.out.println("CONTEXT IS: "+CONTEXT);
System.out.println("PROFILE_ID: "+PROFILE_ID);
initGeoportalClient();
//preVRE
// String tableName = "profiledconcessioni_internal__prevre_centroids";
// tableName = "profiledconcessioni_prevre_centroids";
// tableName = "concessioni_estere_prevre_centroids";
// tableName = "concessioni_estere_internal__prevre_centroids";
//GNA
// PROFILE_ID = "concessioni-estere";
// String tableName = "concessioni_estere_internal__gna_centroids";
// tableName = "concessioni_estere_gna_centroids";
PROFILE_ID = "profiledConcessioni";
String tableName = "profiledconcessioni_internal__gna_centroids";
tableName = "profiledconcessioni_gna_centroids";
long startTime = System.currentTimeMillis();
try {
RuntimeResourceReader rrr = new RuntimeResourceReader(CONTEXT, resourceName, platformName, category, null);
ServiceEndpointBean se = rrr.getListSE().get(0);
AccessPoint ap = se.getListAP().get(0);
System.out.println(ap.address());
System.out.println(ap.username());
String pwdDB = RuntimeResourceReader.dectryptPassword(CONTEXT, ap);
System.out.println(pwdDB);
Connection dbConnection = getDatabaseConnection(ap.address(), ap.username(), pwdDB);
// MUST BE DONE JUST ONCE
try {
renameColumnTable(dbConnection, tableName, "anno", TABLE_COLUMN_DATA_INIZIO_PROGETTO);
renameColumnTable(dbConnection, tableName, "date_scavo", TABLE_COLUMN_DATA_FINE_PROGETTO);
// addColumnTable(dbConnection, tableName, "gna_url", "TEXT"); //Could be a
// VARCHAR(2048). Is it manageable by service?
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return;
}
List<String> listProjectIdsIntoDB = readTableIDs(dbConnection, tableName, "projectid");
ScopeProvider.instance.set(CONTEXT);
SecurityTokenProvider.instance.set(TOKEN);
// List<ResultDocumentDV> listPublishedProjectsIntoService =
// getListProjectsDVFiltered();
int counter = 1;
for (String projectId : listProjectIdsIntoDB) {
Project project = null;
System.out.println("\n\n################ "+counter+" of "+listProjectIdsIntoDB.size()+ ") Reading the project id: " + projectId);
try {
project = client.getProjectByID(PROFILE_ID, projectId);
} catch (Exception e) {
System.err.println("Project id not found: " + projectId);
}
try {
if (project == null)
continue;
System.out.println("\n###### Trying to update Centroid TABLE for project id: " + projectId);
ProjectDVBuilder projectBuilder = ProjectDVBuilder.newBuilder().fullDocumentMap(true);
ProjectDV projectDV = ConvertToDataValueObjectModel.toProjectDV(project, projectBuilder);
LinkedHashMap<String, Object> projectMap = projectDV.getTheDocument().getDocumentAsMap();
LinkedHashMap<String, String> hashMap = new LinkedHashMap<String, String>();
hashMap.put(TABLE_COLUMN_DATA_INIZIO_PROGETTO, projectMap.get(JSON_KEY_DATA_INIZIO_PROGETTO) + "");
hashMap.put(TABLE_COLUMN_DATA_FINE_PROGETTO, projectMap.get(JSON_KEY_DATA_FINE_PROGETTO) + "");
hashMap.put(TABLE_COLUMN_DESCRIZIONE, projectMap.get(JSON_KEY_INTRODUZIONE) + "");
updateTable(dbConnection, tableName, "projectId='" + projectId + "'", hashMap);
System.out.println("###UPDATED project id: " + projectId);
Thread.sleep(500);
System.out.println("################ Update completed for: " + projectId);
} catch (Exception e) {
System.err.println("Error on updating the table for the project id: " + projectId);
}
counter++;
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
long endTime = System.currentTimeMillis();
System.out.println("\n\nSCRIPT END at: "+endTime);
double toSec = (endTime-startTime)/1000;
System.out.println("SCRIPT TERMINATED in "+toSec + "sec");
}
public static List<ResultDocumentDV> getListProjectsDVFiltered() throws Exception {
// List<Project> listOfProjects = client.getListForProfileID(PROFILE_ID);
SearchingFilter filter = new SearchingFilter();
// Where Clause
List<WhereClause> conditions = new ArrayList<WhereClause>();
Map<String, Object> searchInto = new HashMap<String, Object>();
searchInto.put("_lifecycleInformation._phase", "Published");
WhereClause whereClause = new WhereClause(LOGICAL_OP.OR, searchInto);
conditions.add(whereClause);
filter.setConditions(conditions);
LinkedHashMap<String, Object> projection = new LinkedHashMap<String, Object>();
// default
// PROJECTION
String theDoc = Project.THE_DOCUMENT + ".";
projection.put(Project.ID, 1);
projection.put(theDoc + "nome", 1);
projection.put(theDoc + JSON_KEY_DATA_INIZIO_PROGETTO, 1);
projection.put(theDoc + JSON_KEY_DATA_FINE_PROGETTO, 1);
filter.setProjection(projection);
Integer totalDocs = client.getTotalDocument(PROFILE_ID);
Iterator<Project> projects = client.queryOnMongo(PROFILE_ID, totalDocs, 0, null, filter);
List<ResultDocumentDV> results = ConvertToDataValueObjectModel.toListResultDocument(projects);
int i = 0;
for (ResultDocumentDV projectDV : results) {
System.out.println(++i + ") " + projectDV.getId() + " " + JSON_KEY_DATA_INIZIO_PROGETTO + ": "
+ projectDV.getDocumentAsMap().get(JSON_KEY_DATA_INIZIO_PROGETTO) + " "
+ JSON_KEY_DATA_FINE_PROGETTO + ": " + projectDV.getDocumentAsMap().get(JSON_KEY_DATA_FINE_PROGETTO)
+ " " + JSON_KEY_INTRODUZIONE + ": " + projectDV.getDocumentAsMap().get(JSON_KEY_INTRODUZIONE));
}
return results;
}
/**
* Gets the database connection.
*
* @param dbURL the db url
* @param user the user
* @param pwd the pwd
* @return the database connection
*/
public static Connection getDatabaseConnection(String dbURL, String user, String pwd) {
System.out.println("dbURL: " + dbURL);
Connection c = null;
try {
Class.forName("org.postgresql.Driver");
c = DriverManager.getConnection(dbURL, user, pwd);
c.setAutoCommit(false);
} catch (Exception e) {
e.printStackTrace();
System.err.println(e.getClass().getName() + ": " + e.getMessage());
System.exit(0);
}
System.out.println("Opened database successfully");
return c;
}
public static void renameColumnTable(Connection con, String tableName, String oldColumn, String newColumn)
throws SQLException {
String alterTableString = String.format("ALTER TABLE %s RENAME COLUMN %s TO %s", tableName, oldColumn,
newColumn);
System.out.println("Executing: " + alterTableString);
try {
con.setAutoCommit(false);
if (!READ_ONLY_MODE) {
con.createStatement().execute(alterTableString);
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
System.out.println("Executed: " + alterTableString);
}
public static void addColumnTable(Connection con, String tableName, String newColumn, String type)
throws SQLException {
String alterTableString = String.format("ALTER TABLE %s ADD COLUMN %s %s", tableName, newColumn, type);
System.out.println("Executing: " + alterTableString);
try {
con.setAutoCommit(false);
if (!READ_ONLY_MODE) {
con.createStatement().execute(alterTableString);
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
System.out.println("Executed: " + alterTableString);
}
public static void updateTable(Connection con, String tableName, String whereCondition,
LinkedHashMap<String, String> mapColumnValue) throws SQLException {
// String alterTableString = String.format("UPDATE TABLE %s SET COLUMN %s TO %s", tableName, oldColumn,
// newColumn);
//
StringBuilder updateSQL = new StringBuilder();
updateSQL.append("UPDATE " + tableName + " SET");
int i = 1;
for (String column : mapColumnValue.keySet()) {
updateSQL.append(" " + column + "=?");
if (i < mapColumnValue.size())
updateSQL.append(", ");
i++;
}
updateSQL.append(" WHERE " + whereCondition);
String update = updateSQL.toString();
// System.out.println("Executing: " + update);
try (PreparedStatement updatePS = con.prepareStatement(update);) {
int index = 1;
for (String column : mapColumnValue.keySet()) {
updatePS.setString(index, mapColumnValue.get(column));
index++;
}
con.setAutoCommit(false);
System.out.println("Executing: " + updatePS.toString());
if (!READ_ONLY_MODE) {
updatePS.executeUpdate();
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
}
// Step1: Main driver method
public static List<String> readTableIDs(Connection con, String tableName, String columnNameProjectId) {
PreparedStatement p = null;
ResultSet resultSet = null;
// Try block to catch exception/s
List<String> listProjectIds = new ArrayList<String>();
try {
// SQL command data stored in String datatype
String sql = String.format("SELECT * FROM %s", tableName);
p = con.prepareStatement(sql);
resultSet = p.executeQuery();
ResultSetMetaData rsmd = resultSet.getMetaData();
int columnsNumber = rsmd.getColumnCount();
int row = 1;
System.out.println("\n####TABLE: " + tableName + " content..\n");
while (resultSet.next()) {
System.out.print("" + row + "] ");
for (int i = 1; i <= columnsNumber; i++) {
String columnValue = resultSet.getString(i);
System.out.print("(" + rsmd.getColumnName(i) + ") " + columnValue + " | ");
}
row++;
System.out.println("\n");
String projectId = resultSet.getString(columnNameProjectId);
listProjectIds.add(projectId);
}
System.out.println("####TABLE: " + tableName + " end content\n");
}
// Catch block to handle exception
catch (SQLException e) {
// Print exception pop-up on screen
System.err.println(e);
}
System.out.println("returning list IDs: " + listProjectIds);
return listProjectIds;
}
}

View File

@ -1,6 +1,14 @@
package org.gcube.application;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import org.gcube.application.geoportalcommon.GeoportalCommon;
import org.gcube.application.geoportalcommon.geoportal.GeoportalClientCaller;
import org.gcube.application.geoportalcommon.geoportal.ProjectsCaller;
import org.gcube.application.geoportalcommon.geoportal.UseCaseDescriptorCaller;
import org.gcube.application.geoportalcommon.shared.GNADataEntryConfigProfile;
import org.gcube.application.geoportalcommon.shared.GNADataViewerConfigProfile;
import org.gcube.application.geoportalcommon.shared.GeoportalItemReferences;
@ -10,36 +18,73 @@ import org.gcube.common.scope.api.ScopeProvider;
public class TestGNACommon {
private static String TOKEN = "";
private static String CONTEXT = "/gcube/devsec/devVRE";
private static String USERNAME = "francesco.mangiacrapa";
private static final String GCUBE_CONFIG_PROPERTIES_FILENAME = "gcube_config.properties";
// APP Working Directory + /src/test/resources must be the location of
// gcube_config.properties
private static String gcube_config_path = String.format("%s/%s",
System.getProperty("user.dir") + "/src/test/resources", GCUBE_CONFIG_PROPERTIES_FILENAME);
private static String CONTEXT;
private static String TOKEN;
private static UseCaseDescriptorCaller clientUCD = null;
private static ProjectsCaller clientPrj = null;
private static String PROFILE_ID = "profiledConcessioni";
private static String PROJECT_ID = "644a66e944aad51c80409a3b";
private static String MY_LOGIN = "francesco.mangiacrapa";
/**
* Read context settings.
*/
public static void readContextSettings() {
try (InputStream input = new FileInputStream(gcube_config_path)) {
Properties prop = new Properties();
// load a properties file
prop.load(input);
CONTEXT = prop.getProperty("CONTEXT");
TOKEN = prop.getProperty("TOKEN");
// get the property value and print it out
System.out.println("CONTEXT: " + CONTEXT);
System.out.println("TOKEN: " + TOKEN);
} catch (IOException ex) {
ex.printStackTrace();
}
}
//@Before
public void init() {
readContextSettings();
ScopeProvider.instance.set(CONTEXT);
SecurityTokenProvider.instance.set(TOKEN);
clientPrj = GeoportalClientCaller.projects();
clientUCD = GeoportalClientCaller.useCaseDescriptors();
}
// @Test
public GNADataViewerConfigProfile getGeoNaDataViewProfile() throws Exception {
public void getGeoNaDataViewProfile() throws Exception {
System.out.println("getGeoNaDataViewProfile called");
ScopeProvider.instance.set(CONTEXT);
GeoportalCommon gc = new GeoportalCommon();
GNADataViewerConfigProfile profile = gc.readGNADataViewerConfig(null);
System.out.println("Returning profile: " + profile);
return profile;
}
//@Test
public GeoportalItemReferences getLinks() throws Exception {
System.out.println("getGeoNaDataViewProfile called");
public void getLinks() throws Exception {
System.out.println("getLinks called");
ScopeProvider.instance.set(CONTEXT);
GeoportalCommon gc = new GeoportalCommon();
GeoportalItemReferences item = new GeoportalItemReferences("", "concessione");
GeoportalItemReferences links = gc.getPublicLinksFor(item, true);
return links;
GeoportalItemReferences item = new GeoportalItemReferences(PROJECT_ID, PROFILE_ID);
GeoportalItemReferences links = gc.getPublicLinksFor(CONTEXT, item, true);
System.out.println(links);
}
// @Test

View File

@ -2,11 +2,15 @@ package org.gcube.application;
import static org.gcube.application.geoportal.client.plugins.GeoportalAbstractPlugin.useCaseDescriptors;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Properties;
import java.util.stream.Collectors;
import org.bson.Document;
@ -19,6 +23,7 @@ import org.gcube.application.geoportalcommon.shared.geoportal.ConfigurationDV;
import org.gcube.application.geoportalcommon.shared.geoportal.config.ActionDefinitionDV;
import org.gcube.application.geoportalcommon.shared.geoportal.config.GcubeProfileDV;
import org.gcube.application.geoportalcommon.shared.geoportal.config.ItemFieldDV;
import org.gcube.application.geoportalcommon.shared.geoportal.ucd.GEOPORTAL_CONFIGURATION_TYPE;
import org.gcube.application.geoportalcommon.shared.geoportal.ucd.GEOPORTAL_DATA_HANDLER;
import org.gcube.application.geoportalcommon.shared.geoportal.ucd.HandlerDeclarationDV;
import org.gcube.application.geoportalcommon.shared.geoportal.ucd.UseCaseDescriptorDV;
@ -26,6 +31,7 @@ import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.json.JSONArray;
import org.json.JSONObject;
import org.junit.Before;
import com.jayway.jsonpath.JsonPath;
import com.jayway.jsonpath.spi.json.JsonOrgJsonProvider;
@ -39,24 +45,53 @@ import com.jayway.jsonpath.spi.json.JsonOrgJsonProvider;
*/
public class UCD_Tests {
private UseCaseDescriptorCaller client = null;
private static final String GCUBE_CONFIG_PROPERTIES_FILENAME = "gcube_config.properties";
// APP Working Directory + /src/test/resources must be the location of
// gcube_config.properties
private static String gcube_config_path = String.format("%s/%s",
System.getProperty("user.dir") + "/src/test/resources", GCUBE_CONFIG_PROPERTIES_FILENAME);
private static String CONTEXT;
private static String TOKEN;
private static String CONTEXT = "/pred4s/preprod/preVRE";
private static String TOKEN = ""; //preVRE
private UseCaseDescriptorCaller client = null;
//private static String CONTEXT = "/gcube/devsec/devVRE";
//private static String TOKEN = ""; // devVRE
private static String PROFILE_ID = "profiledConcessioni";
/**
* Read context settings.
*/
public static void readContextSettings() {
try (InputStream input = new FileInputStream(gcube_config_path)) {
Properties prop = new Properties();
// load a properties file
prop.load(input);
CONTEXT = prop.getProperty("CONTEXT");
TOKEN = prop.getProperty("TOKEN");
// get the property value and print it out
System.out.println("CONTEXT: " + CONTEXT);
System.out.println("TOKEN: " + TOKEN);
} catch (IOException ex) {
ex.printStackTrace();
}
}
/**
* Gets the client.
*
* @return the client
*/
//@Before
@Before
public void getClient() {
// assumeTrue(GCubeTest.isTestInfrastructureEnabled());
readContextSettings();
ScopeProvider.instance.set(CONTEXT);
SecurityTokenProvider.instance.set(TOKEN);
client = GeoportalClientCaller.useCaseDescriptors();
@ -193,6 +228,77 @@ public class UCD_Tests {
}
//@Test
public void getUCDGroupedOverlayLayers() throws Exception {
ScopeProvider.instance.set(CONTEXT);
SecurityTokenProvider.instance.set(TOKEN);
List<String> handlersIds = null;
List<UseCaseDescriptor> listUseCaseDescriptor;
try {
UseCaseDescriptorCaller client = GeoportalClientCaller.useCaseDescriptors();
if (handlersIds == null) {
handlersIds = Arrays.asList(GEOPORTAL_DATA_HANDLER.geoportal_grouped_overlay_layers.getId());
System.out.println("handlersIds is null, so using default: " + handlersIds);
}
listUseCaseDescriptor = client.getListForHandlerIds(handlersIds);
} catch (Exception e) {
e.printStackTrace();
return;
}
if (listUseCaseDescriptor == null) {
listUseCaseDescriptor = new ArrayList<UseCaseDescriptor>();
}
List<UseCaseDescriptorDV> listUCDDV = new ArrayList<UseCaseDescriptorDV>(listUseCaseDescriptor.size());
for (UseCaseDescriptor ucd : listUseCaseDescriptor) {
listUCDDV.add(ConvertToDataValueObjectModel.toUseCaseDescriptorDV(ucd, null));
}
System.out.println(listUCDDV);
}
//@Test
public void getUCDGroupedCrossFilteringLayers() throws Exception {
System.out.println("Running getUCDGroupedCrossFilteringLayers");
ScopeProvider.instance.set(CONTEXT);
SecurityTokenProvider.instance.set(TOKEN);
List<String> handlersIds = null;
List<UseCaseDescriptor> listUseCaseDescriptor;
try {
UseCaseDescriptorCaller client = GeoportalClientCaller.useCaseDescriptors();
if (handlersIds == null) {
handlersIds = Arrays.asList(GEOPORTAL_DATA_HANDLER.geoportal_grouped_cross_filtering.getId());
System.out.println("handlersIds is null, so using default: " + handlersIds);
}
listUseCaseDescriptor = client.getListForHandlerIds(handlersIds);
} catch (Exception e) {
e.printStackTrace();
return;
}
if (listUseCaseDescriptor == null) {
listUseCaseDescriptor = new ArrayList<UseCaseDescriptor>();
}
List<UseCaseDescriptorDV> listUCDDV = new ArrayList<UseCaseDescriptorDV>(listUseCaseDescriptor.size());
for (UseCaseDescriptor ucd : listUseCaseDescriptor) {
listUCDDV.add(ConvertToDataValueObjectModel.toUseCaseDescriptorDV(ucd, null));
}
System.out.println(listUCDDV);
System.out.println("Terminated getUCDGroupedCrossFilteringLayers");
}
/**
* Gets the UCD for handler types.
*
@ -201,27 +307,36 @@ public class UCD_Tests {
*/
//@Test
public void getUCDForHandlerTypes() throws Exception {
System.out.println("Running getUCDForHandlerTypes");
ScopeProvider.instance.set(CONTEXT);
SecurityTokenProvider.instance.set(TOKEN);
String handlerType = GEOPORTAL_DATA_HANDLER.geoportal_grouped_cross_filtering.getType();
UseCaseDescriptor ucd = null;
List<HandlerDeclaration> handlers = null;
try {
UseCaseDescriptorCaller client = GeoportalClientCaller.useCaseDescriptors();
ucd = client.getUCDForId(PROFILE_ID);
handlers = client.getHandlersByType(PROFILE_ID, GEOPORTAL_DATA_HANDLER.geoportal_basic_data_list.getType());
handlers = client.getHandlersByType(PROFILE_ID, handlerType);
} catch (Exception e) {
e.printStackTrace();
return;
}
int i = 0;
for (HandlerDeclaration handlerDeclaration : handlers) {
System.out.println(handlerDeclaration);
System.out.println("Config json: "+handlerDeclaration.getConfiguration().toJson());
System.out.println(++i+")##Config json: "+handlerDeclaration.getConfiguration().toJson());
HandlerDeclarationDV handlerDV = ConvertToDataValueObjectModel.toHandlerDeclarationDV(handlerDeclaration, ucd, GEOPORTAL_CONFIGURATION_TYPE.grouped_cross_filtering_layers);
System.out.println("\nhandlerDV is: "+handlerDV);
}
System.out.println("\n");
System.out.println("Terminated getUCDForHandlerTypes");
}
}
/**

View File

@ -0,0 +1,185 @@
package org.gcube.application.se;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.util.ArrayList;
import java.util.List;
import org.gcube.common.encryption.StringEncrypter;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The Class RuntimeResourceReader.
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it Mar 10, 2017
*/
public class RuntimeResourceReader {
public static final Logger logger = LoggerFactory.getLogger(RuntimeResourceReader.class);
private List<ServiceEndpointBean> listSE = new ArrayList<ServiceEndpointBean>();
/**
* Instantiates a new runtime resource reader.
*
* @param scope the scope
* @param resourceName the resource name
* @param platformName the platform name
* @param category the category
* @param endPoint the end point
* @throws Exception the exception
*/
public RuntimeResourceReader(String scope, String resourceName, String platformName, String category,
String endPoint) throws Exception {
read(scope, resourceName, platformName, category, endPoint);
}
/**
* Read.
*
* @param scope the scope
* @param platformName the platform name
* @param endPoint the end point
* @return the server parameters
* @throws Exception the exception
*/
private List<ServiceEndpointBean> read(String scope, String resourceName, String platformName, String category,
String endPoint) throws Exception {
String originalScope = null;
try {
originalScope = ScopeProvider.instance.get();
ScopeProvider.instance.set(scope);
SimpleQuery query = queryFor(ServiceEndpoint.class);
if (resourceName != null)
query.addCondition("$resource/Profile/Name/text() eq '" + resourceName + "'");
query.addCondition("$resource/Profile/Platform/Name/text() eq '" + platformName + "'");
query.addCondition("$resource/Profile/Category/text() eq '" + category + "'");
if (endPoint != null && !endPoint.isEmpty())
query.addCondition("$resource/Profile/AccessPoint/Interface/Endpoint/text() eq '" + endPoint + "'");
// query.addVariable("$prop", "$resource/Profile/AccessPoint/Properties/Property")
// .addCondition("$prop/Name/text() eq 'priority'")
// .addCondition("$prop/Value/text() eq '1'");
logger.info("GeoRuntimeReader, using scope: " + scope + ", to get resource: " + platformName);
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
List<ServiceEndpoint> listServiceEndpoint = client.submit(query);
if (listServiceEndpoint == null || listServiceEndpoint.isEmpty())
throw new Exception("Cannot retrieve the runtime resource: " + platformName);
for (ServiceEndpoint serviceEndpoint : listServiceEndpoint) {
ServiceEndpointBean seb = new ServiceEndpointBean();
seb.setRuntime(serviceEndpoint.profile().runtime());
List<AccessPoint> listAp = new ArrayList<ServiceEndpoint.AccessPoint>();
try {
for (AccessPoint accessPoint : serviceEndpoint.profile().accessPoints()) {
listAp.add(accessPoint);
}
} catch (Exception e) {
System.err.println("Error on reading Access point not found");
}
seb.setListAP(listAp);
listSE.add(seb);
}
} catch (Exception e) {
logger.error("Sorry, an error occurred on reading parameters in Runtime Resources", e);
} finally {
if (originalScope != null && !originalScope.isEmpty()) {
ScopeProvider.instance.set(originalScope);
logger.info("scope provider setted to orginal scope: " + originalScope);
} else {
ScopeProvider.instance.reset();
logger.info("scope provider reset");
}
}
logger.info("returning list: " + listSE);
return listSE;
}
public List<ServiceEndpointBean> getListSE() {
return listSE;
}
public static String dectryptPassword(String scope, AccessPoint ap) {
ScopeProvider.instance.set(scope);
logger.info("username: " + ap.username());
logger.info("password: " + ap.password());
String decryptedPassword = null;
try {
decryptedPassword = StringEncrypter.getEncrypter().decrypt(ap.password());
logger.info("Decrypted Password: " + decryptedPassword);
} catch (Exception e) {
logger.info("ignoring exception during pwd decrypting");
}
return decryptedPassword;
}
/**
* The main method.
*
* @param args the arguments
*/
public static void main(String[] args) {
//String scope = "/gcube/devsec/devVRE";
//String scope = "/pred4s/preprod/preVRE";
String scope = "/d4science.research-infrastructures.eu/D4OS/GNA";
String platformName = "GeoServer";
String category = "Gis";
// String platformName = "postgis";
// String category = "Database";
// scope = "/pred4s/preprod/preVRE";
RuntimeResourceReader reader;
try {
ScopeProvider.instance.set(scope);
reader = new RuntimeResourceReader(scope, null, platformName, category, null);
for (ServiceEndpointBean seb : reader.getListSE()) {
System.out.println("Found: " + seb);
List<AccessPoint> listAp = seb.getListAP();
for (AccessPoint ap : listAp) {
System.out.println("username: " + ap.username());
System.out.println("password: " + ap.password());
try {
String decryptedPassword = StringEncrypter.getEncrypter().decrypt(ap.password());
System.out.println("Decrypted Password: " + decryptedPassword);
} catch (Exception e) {
System.out.println("ignoring exception during pwd decrypting");
}
}
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,60 @@
package org.gcube.application.se;
import java.io.Serializable;
import java.util.List;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
/**
* The Class ServerParameters.
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it May 16, 2017
*/
public class ServiceEndpointBean implements Serializable {
/**
*
*/
private static final long serialVersionUID = 2459971193655529274L;
protected org.gcube.common.resources.gcore.ServiceEndpoint.Runtime runtime;
protected List<AccessPoint> listAP;
public ServiceEndpointBean() {
// TODO Auto-generated constructor stub
}
public ServiceEndpointBean(org.gcube.common.resources.gcore.ServiceEndpoint.Runtime runtime, List<AccessPoint> listAP) {
super();
this.runtime = runtime;
this.listAP = listAP;
}
public org.gcube.common.resources.gcore.ServiceEndpoint.Runtime getRuntime() {
return runtime;
}
public List<AccessPoint> getListAP() {
return listAP;
}
public void setRuntime(org.gcube.common.resources.gcore.ServiceEndpoint.Runtime runtime2) {
this.runtime = runtime2;
}
public void setListAP(List<AccessPoint> listAP) {
this.listAP = listAP;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("ServiceEndpoint [runtime=");
builder.append(runtime);
builder.append(", listAP=");
builder.append(listAP);
builder.append("]");
return builder.toString();
}
}

View File

@ -6,3 +6,16 @@
/preprod.gcubekey
/geoportal-config.json
/devVRE_TOKEN.txt
/CNR.it.gcubekey
/D4OS.gcubekey
/D4Research.gcubekey
/ISTI.gcubekey
/OpenAIRE.gcubekey
/ParthenosVO.gcubekey
/SmartArea.gcubekey
/SoBigData.gcubekey
/d4science.research-infrastructures.eu.gcubekey
/FARM.gcubekey
/gCubeApps.gcubekey
/devVRE.gcubekey
/gcube_config.properties