Compare commits

...

19 Commits

Author SHA1 Message Date
Francesco Mangiacrapa f447964e79 removed -SNAPSHOT to be relased 2024-06-28 15:53:22 +02:00
Francesco Mangiacrapa 3b3555ac3e Merge pull request 'feature_27120' (#11) from feature_27120 into master
Reviewed-on: #11
2024-06-28 15:51:22 +02:00
Francesco Mangiacrapa 4923b9656f updated to PerformStepRequest 2024-04-15 12:33:53 +02:00
Francesco Mangiacrapa 3174b8f937 added new bean StepPerformedResultDV 2024-04-11 11:04:09 +02:00
Francesco Mangiacrapa ad646a8bf9 removed optionalMessage as parameter and added into StepExecutionRequest
bean
2024-04-11 09:49:36 +02:00
Francesco Mangiacrapa 05e9d3d426 indent 2024-04-10 17:18:27 +02:00
Francesco Mangiacrapa 06b257fcf3 - Added optional message when performing lifecycle step [#27192] 2024-04-08 17:18:54 +02:00
Francesco Mangiacrapa 4ca2a6b1bd renamed to GET_SHAREABLE_LINK 2024-04-05 15:57:32 +02:00
Francesco Mangiacrapa 235c12f294 - Integrated new Uri-Resolver-Manager [#27160]
- Added new operation "Share project"
2024-04-05 10:50:21 +02:00
Francesco Mangiacrapa c66e08ffc9 updated SearchingFilter 2024-03-26 16:58:47 +01:00
Francesco Mangiacrapa ff4e93ca23 Classes to migrate UCD data model 2024-02-02 09:42:12 +01:00
Francesco Mangiacrapa 6a0dc3c447 Added readContextSettings 2024-01-09 16:35:43 +01:00
Francesco Mangiacrapa 92da4837ee created script for updating GNA data model #26349 2024-01-09 12:14:26 +01:00
Francesco Mangiacrapa aeed289ca5 Updated changelog 2023-09-14 10:15:04 +02:00
Francesco Mangiacrapa 76909a2e68 removed -SNAPSHOT to be released 2023-09-11 14:20:07 +02:00
Francesco Mangiacrapa 474b870396 Merge pull request 'Read countByPhase from configuration [#25598]' (#10) from feature_25598 into master
Reviewed-on: #10
2023-09-11 14:19:20 +02:00
Francesco Mangiacrapa 704989f612 Read countByPhase from configuration [#25598] 2023-09-08 12:27:07 +02:00
Francesco Mangiacrapa 2a79c62038 removed -SNAPSHOT to be released 2023-09-05 15:24:07 +02:00
Francesco Mangiacrapa f819b0f10a Reduced/Optimized some LOGs, done [#25539] 2023-08-31 15:24:06 +02:00
19 changed files with 1538 additions and 91 deletions

View File

@ -4,6 +4,18 @@
All notable changes to this project will be documented in this file. All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [v2.4.0]
- Search Filter for profileId and projectID [#27120]
- Integrated new Uri-Resolver-Manager [#27160]
- Added new operation "Share project"
- Added optional message when performing lifecycle step [#27192]
## [v2.3.0]
- Read countByPhase from configuration [#25598]
- Reduced/Optimized some LOGs [#25539]
## [v2.2.0] ## [v2.2.0]
- Integrated the cross-filtering configuration [#25074] - Integrated the cross-filtering configuration [#25074]

View File

@ -10,7 +10,7 @@
<groupId>org.gcube.application</groupId> <groupId>org.gcube.application</groupId>
<artifactId>geoportal-data-common</artifactId> <artifactId>geoportal-data-common</artifactId>
<version>2.2.0</version> <version>2.4.0</version>
<description>GeoPortal Data Common is common library used by GUI components developed for GeoNA</description> <description>GeoPortal Data Common is common library used by GUI components developed for GeoNA</description>
<scm> <scm>
@ -34,6 +34,7 @@
<properties> <properties>
<maven.compiler.source>1.8</maven.compiler.source> <maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target> <maven.compiler.target>1.8</maven.compiler.target>
<gcube.bom>2.4.0</gcube.bom>
</properties> </properties>
<dependencyManagement> <dependencyManagement>
@ -41,7 +42,7 @@
<dependency> <dependency>
<groupId>org.gcube.distribution</groupId> <groupId>org.gcube.distribution</groupId>
<artifactId>gcube-bom</artifactId> <artifactId>gcube-bom</artifactId>
<version>2.3.0</version> <version>${gcube.bom}</version>
<type>pom</type> <type>pom</type>
<scope>import</scope> <scope>import</scope>
</dependency> </dependency>

View File

@ -66,6 +66,7 @@ import org.gcube.application.geoportalcommon.shared.geoportal.ucd.HandlerDeclara
import org.gcube.application.geoportalcommon.shared.geoportal.ucd.RelationshipDefinitionDV; import org.gcube.application.geoportalcommon.shared.geoportal.ucd.RelationshipDefinitionDV;
import org.gcube.application.geoportalcommon.shared.geoportal.ucd.UseCaseDescriptorDV; import org.gcube.application.geoportalcommon.shared.geoportal.ucd.UseCaseDescriptorDV;
import org.gcube.application.geoportalcommon.util.DateUtils; import org.gcube.application.geoportalcommon.util.DateUtils;
import org.gcube.application.geoportalcommon.util.StringUtil;
import org.json.JSONException; import org.json.JSONException;
import org.json.JSONObject; import org.json.JSONObject;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -94,6 +95,8 @@ import com.jayway.jsonpath.Option;
*/ */
public class ConvertToDataValueObjectModel { public class ConvertToDataValueObjectModel {
public static final int _MAX_LENGHT_STRING_AT_INFO = 200;
private static Logger LOG = LoggerFactory.getLogger(ConvertToDataValueObjectModel.class); private static Logger LOG = LoggerFactory.getLogger(ConvertToDataValueObjectModel.class);
private static final String NO_TIME = "T00:00"; private static final String NO_TIME = "T00:00";
@ -174,7 +177,9 @@ public class ConvertToDataValueObjectModel {
} }
ucdVO.setHandlers(listHandlersDV); ucdVO.setHandlers(listHandlersDV);
LOG.info("returning {}", ucdVO); LOG.info("returning {}", StringUtil.ellipsize(ucdVO.toString(), _MAX_LENGHT_STRING_AT_INFO));
if (LOG.isDebugEnabled())
LOG.debug("returning {}", ucdVO);
return ucdVO; return ucdVO;
} }
@ -313,7 +318,10 @@ public class ConvertToDataValueObjectModel {
dDV.setConfiguration(listGcubeProfiles); dDV.setConfiguration(listGcubeProfiles);
dDV.setConfigurationType(geoportalConfigType); // -> GEOPORTAL_CONFIGURATION_TYPE.gcube_profiles dDV.setConfigurationType(geoportalConfigType); // -> GEOPORTAL_CONFIGURATION_TYPE.gcube_profiles
hdDV.setConfiguration(dDV); hdDV.setConfiguration(dDV);
LOG.info("returning {}", hdDV); LOG.info("Found config {}", GEOPORTAL_CONFIGURATION_TYPE.gcube_profiles);
LOG.info("returning {}", StringUtil.ellipsize(hdDV.toString(), _MAX_LENGHT_STRING_AT_INFO));
if (LOG.isDebugEnabled())
LOG.debug("returning {}", hdDV);
return hdDV; return hdDV;
} }
case item_fields: { case item_fields: {
@ -332,7 +340,10 @@ public class ConvertToDataValueObjectModel {
dDV.setConfiguration(listItemFields); dDV.setConfiguration(listItemFields);
dDV.setConfigurationType(geoportalConfigType); // -> GEOPORTAL_CONFIGURATION_TYPE.item_fields dDV.setConfigurationType(geoportalConfigType); // -> GEOPORTAL_CONFIGURATION_TYPE.item_fields
hdDV.setConfiguration(dDV); hdDV.setConfiguration(dDV);
LOG.info("returning {}", hdDV); LOG.info("Found config {}", GEOPORTAL_CONFIGURATION_TYPE.item_fields);
LOG.info("returning {}", StringUtil.ellipsize(hdDV.toString(), _MAX_LENGHT_STRING_AT_INFO));
if (LOG.isDebugEnabled())
LOG.debug("returning {}", hdDV);
return hdDV; return hdDV;
} }
@ -375,7 +386,10 @@ public class ConvertToDataValueObjectModel {
dDV.setConfiguration(listActionsDef); dDV.setConfiguration(listActionsDef);
dDV.setConfigurationType(geoportalConfigType); // -> GEOPORTAL_CONFIGURATION_TYPE.actions_definition dDV.setConfigurationType(geoportalConfigType); // -> GEOPORTAL_CONFIGURATION_TYPE.actions_definition
hdDV.setConfiguration(dDV); hdDV.setConfiguration(dDV);
LOG.info("returning {}", hdDV); LOG.info("Found config {}", GEOPORTAL_CONFIGURATION_TYPE.actions_definition);
LOG.info("returning {}", StringUtil.ellipsize(hdDV.toString(), _MAX_LENGHT_STRING_AT_INFO));
if (LOG.isDebugEnabled())
LOG.debug("returning {}", hdDV);
return hdDV; return hdDV;
} }
@ -388,7 +402,8 @@ public class ConvertToDataValueObjectModel {
ObjectMapper mapper = new ObjectMapper(); ObjectMapper mapper = new ObjectMapper();
for (String asJSONString : jsonConfigurations) { for (String asJSONString : jsonConfigurations) {
LOG.debug(++i + ") the " + GroupedLayersDV.class.getSimpleName() + " is: " + asJSONString); LOG.debug(++i + ") the " + GroupedLayersDV.class.getSimpleName() + " is: " + asJSONString);
TypeReference<GroupedLayersDV<ConfiguredLayerDV>> typeRef = new TypeReference<GroupedLayersDV<ConfiguredLayerDV>>() {}; TypeReference<GroupedLayersDV<ConfiguredLayerDV>> typeRef = new TypeReference<GroupedLayersDV<ConfiguredLayerDV>>() {
};
GroupedLayersDV<ConfiguredLayerDV> profile = mapper.readValue(asJSONString, typeRef); GroupedLayersDV<ConfiguredLayerDV> profile = mapper.readValue(asJSONString, typeRef);
// GroupedLayersDV<ConfiguredLayerDV> profile = org.gcube.application.geoportal.client.utils.Serialization // GroupedLayersDV<ConfiguredLayerDV> profile = org.gcube.application.geoportal.client.utils.Serialization
// .read(asJSONString, typeRef); // .read(asJSONString, typeRef);
@ -401,7 +416,10 @@ public class ConvertToDataValueObjectModel {
dDV.setConfiguration(listGroupedCL); dDV.setConfiguration(listGroupedCL);
dDV.setConfigurationType(geoportalConfigType); dDV.setConfigurationType(geoportalConfigType);
hdDV.setConfiguration(dDV); hdDV.setConfiguration(dDV);
LOG.info("returning {}", hdDV); LOG.info("Found config {}", GEOPORTAL_CONFIGURATION_TYPE.grouped_overlay_layers);
LOG.info("returning {}", StringUtil.ellipsize(hdDV.toString(), _MAX_LENGHT_STRING_AT_INFO));
if (LOG.isDebugEnabled())
LOG.debug("returning {}", hdDV);
return hdDV; return hdDV;
} }
@ -414,7 +432,8 @@ public class ConvertToDataValueObjectModel {
ObjectMapper mapper = new ObjectMapper(); ObjectMapper mapper = new ObjectMapper();
for (String asJSONString : jsonConfigurations) { for (String asJSONString : jsonConfigurations) {
LOG.debug(++i + ") the " + GroupedLayersDV.class.getSimpleName() + " is: " + asJSONString); LOG.debug(++i + ") the " + GroupedLayersDV.class.getSimpleName() + " is: " + asJSONString);
TypeReference<GroupedLayersDV<CrossFilteringLayerDV>> typeRef = new TypeReference<GroupedLayersDV<CrossFilteringLayerDV>>() {}; TypeReference<GroupedLayersDV<CrossFilteringLayerDV>> typeRef = new TypeReference<GroupedLayersDV<CrossFilteringLayerDV>>() {
};
GroupedLayersDV<CrossFilteringLayerDV> profile = mapper.readValue(asJSONString, typeRef); GroupedLayersDV<CrossFilteringLayerDV> profile = mapper.readValue(asJSONString, typeRef);
// GroupedLayersDV<CrossFilteringLayerDV> profile = org.gcube.application.geoportal.client.utils.Serialization // GroupedLayersDV<CrossFilteringLayerDV> profile = org.gcube.application.geoportal.client.utils.Serialization
// .read(asJSONString, GroupedLayersDV.class); // .read(asJSONString, GroupedLayersDV.class);
@ -427,7 +446,10 @@ public class ConvertToDataValueObjectModel {
dDV.setConfiguration(listGroupedCL); dDV.setConfiguration(listGroupedCL);
dDV.setConfigurationType(geoportalConfigType); dDV.setConfigurationType(geoportalConfigType);
hdDV.setConfiguration(dDV); hdDV.setConfiguration(dDV);
LOG.info("returning {}", hdDV); LOG.info("Found config {}", GEOPORTAL_CONFIGURATION_TYPE.grouped_cross_filtering_layers);
LOG.info("returning {}", StringUtil.ellipsize(hdDV.toString(), _MAX_LENGHT_STRING_AT_INFO));
if (LOG.isDebugEnabled())
LOG.debug("returning {}", hdDV);
return hdDV; return hdDV;
} }
@ -457,7 +479,7 @@ public class ConvertToDataValueObjectModel {
LOG.trace("toGcubeProfileDV called"); LOG.trace("toGcubeProfileDV called");
if (gCubeProfile == null) { if (gCubeProfile == null) {
LOG.warn(GcubeProfile.class.getSimpleName() + " is null"); LOG.info(GcubeProfile.class.getSimpleName() + " is null");
return null; return null;
} }
@ -481,7 +503,9 @@ public class ConvertToDataValueObjectModel {
gpVO.setFilePaths(filePathsVO); gpVO.setFilePaths(filePathsVO);
} }
LOG.info("returning: " + gpVO); LOG.info("returning {}", StringUtil.ellipsize(gpVO.toString(), _MAX_LENGHT_STRING_AT_INFO));
if (LOG.isDebugEnabled())
LOG.debug("returning {}", gpVO);
return gpVO; return gpVO;
} }
@ -495,7 +519,7 @@ public class ConvertToDataValueObjectModel {
LOG.trace("toItemFieldDV called"); LOG.trace("toItemFieldDV called");
if (itemField == null) { if (itemField == null) {
LOG.warn(ItemField.class.getSimpleName() + " is null"); LOG.info(ItemField.class.getSimpleName() + " is null");
return null; return null;
} }
@ -507,7 +531,7 @@ public class ConvertToDataValueObjectModel {
ifDV.setSearchable(itemField.isSearchable()); ifDV.setSearchable(itemField.isSearchable());
ifDV.setSortable(itemField.isSortable()); ifDV.setSortable(itemField.isSortable());
LOG.info("returning: " + ifDV); LOG.debug("returning: " + ifDV);
return ifDV; return ifDV;
} }
@ -534,7 +558,9 @@ public class ConvertToDataValueObjectModel {
actDef.setTitle(actionDefinition.getTitle()); actDef.setTitle(actionDefinition.getTitle());
actDef.setRoles(roles); actDef.setRoles(roles);
LOG.info("returning: " + actionDefinition); LOG.info("returning {}", StringUtil.ellipsize(actionDefinition.toString(), _MAX_LENGHT_STRING_AT_INFO));
if (LOG.isDebugEnabled())
LOG.debug("returning {}", actionDefinition);
return actDef; return actDef;
} }
@ -548,7 +574,7 @@ public class ConvertToDataValueObjectModel {
LOG.trace("toFilePathDV called"); LOG.trace("toFilePathDV called");
if (filePath == null) { if (filePath == null) {
LOG.warn("List of " + FilePath.class.getSimpleName() + " is null"); LOG.info("List of " + FilePath.class.getSimpleName() + " is null for {}", filePath);
return null; return null;
} }
@ -557,7 +583,9 @@ public class ConvertToDataValueObjectModel {
fpVO.setFieldDefinition(filePath.getFieldDefinition()); fpVO.setFieldDefinition(filePath.getFieldDefinition());
fpVO.setGcubeProfileFieldName(filePath.getGcubeProfileFieldName()); fpVO.setGcubeProfileFieldName(filePath.getGcubeProfileFieldName());
LOG.info("returning: " + fpVO); LOG.info("returning {}", StringUtil.ellipsize(fpVO.toString(), _MAX_LENGHT_STRING_AT_INFO));
if (LOG.isDebugEnabled())
LOG.debug("returning {}", fpVO);
return fpVO; return fpVO;
} }

View File

@ -8,10 +8,12 @@ import java.util.Map;
import org.gcube.application.geoportalcommon.shared.GNADataEntryConfigProfile; import org.gcube.application.geoportalcommon.shared.GNADataEntryConfigProfile;
import org.gcube.application.geoportalcommon.shared.GNADataViewerConfigProfile; import org.gcube.application.geoportalcommon.shared.GNADataViewerConfigProfile;
import org.gcube.application.geoportalcommon.shared.GeoportalItemReferences; import org.gcube.application.geoportalcommon.shared.GeoportalItemReferences;
import org.gcube.application.geoportalcommon.shared.GeoportalItemReferences.SHARE_LINK_TO;
import org.gcube.application.geoportalcommon.shared.PublicLink; import org.gcube.application.geoportalcommon.shared.PublicLink;
import org.gcube.portlets.user.uriresolvermanager.UriResolverManager; import org.gcube.portlets.user.uriresolvermanager.UriResolverManager;
import org.gcube.portlets.user.uriresolvermanager.resolvers.query.GeoportalResolverQueryStringBuilder; import org.gcube.portlets.user.uriresolvermanager.resolvers.query.GeoportalResolverQueryStringBuilder;
import org.gcube.portlets.user.uriresolvermanager.resolvers.query.GeoportalResolverQueryStringBuilder.RESOLVE_AS; import org.gcube.portlets.user.uriresolvermanager.resolvers.query.GeoportalResolverQueryStringBuilder.RESOLVE_AS;
import org.gcube.portlets.user.uriresolvermanager.resolvers.query.GeoportalResolverQueryStringBuilder.TARGET_GEOPORTAL_APP;
import org.gcube.portlets.user.urlshortener.UrlShortener; import org.gcube.portlets.user.urlshortener.UrlShortener;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -65,6 +67,24 @@ public class GeoportalCommon {
item.getProjectID()); item.getProjectID());
builder.scope(gcubeScope); builder.scope(gcubeScope);
builder.resolverAs(RESOLVE_AS.PRIVATE); builder.resolverAs(RESOLVE_AS.PRIVATE);
SHARE_LINK_TO shareLinkTo = item.getShareLinkTo();
TARGET_GEOPORTAL_APP targetApp = null;
if (shareLinkTo != null) {
switch (shareLinkTo) {
case DATA_ENTRY:
targetApp = TARGET_GEOPORTAL_APP.GEO_DE;
break;
case DATA_VIEWER:
default:
targetApp = TARGET_GEOPORTAL_APP.GEO_DV;
break;
}
builder.targetApp(targetApp);
}
// builder.resolverAs(RESOLVE_AS.PRIVATE); // builder.resolverAs(RESOLVE_AS.PRIVATE);
Map<String, String> params = builder.buildQueryParameters(); Map<String, String> params = builder.buildQueryParameters();
String link = resolver.getLink(params, false); String link = resolver.getLink(params, false);

View File

@ -97,7 +97,7 @@ public class SerializerUtil {
@Override @Override
public GeoServerPlatformInfoDV[] deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { public GeoServerPlatformInfoDV[] deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
LOG.info("Sono qui: " +jp.getValueAsString()); LOG.info("deserialize: " +jp.getValueAsString());
return jp.readValueAs(GeoServerPlatformInfoDV[].class); return jp.readValueAs(GeoServerPlatformInfoDV[].class);
//return jp.readValueAs(GeoServerPlatformInfoDV[].class); //return jp.readValueAs(GeoServerPlatformInfoDV[].class);
//List<GeoServerPlatformInfoDV> listPlatform = jp.readValueAs(new TypeReference<List<GeoServerPlatformInfoDV>>() {}); //List<GeoServerPlatformInfoDV> listPlatform = jp.readValueAs(new TypeReference<List<GeoServerPlatformInfoDV>>() {});

View File

@ -57,7 +57,7 @@ public class GeoportalConfigUtil {
JSONArray arrayRoles = new JSONArray(data.get(0).toString()); JSONArray arrayRoles = new JSONArray(data.get(0).toString());
for (int i = 0; i < arrayRoles.length(); i++) { for (int i = 0; i < arrayRoles.length(); i++) {
String role = arrayRoles.get(i).toString(); String role = arrayRoles.get(i).toString();
LOG.info("for STEP_ID {} read role {}", stepID, role); LOG.debug("for STEP_ID {} read role {}", stepID, role);
listdata.add(role); listdata.add(role);
} }
}catch (Exception e) { }catch (Exception e) {

View File

@ -21,12 +21,12 @@ import org.gcube.application.geoportal.common.model.document.Project;
import org.gcube.application.geoportal.common.model.document.relationships.RelationshipNavigationObject; import org.gcube.application.geoportal.common.model.document.relationships.RelationshipNavigationObject;
import org.gcube.application.geoportal.common.model.rest.CreateRelationshipRequest; import org.gcube.application.geoportal.common.model.rest.CreateRelationshipRequest;
import org.gcube.application.geoportal.common.model.rest.DeleteRelationshipRequest; import org.gcube.application.geoportal.common.model.rest.DeleteRelationshipRequest;
import org.gcube.application.geoportal.common.model.rest.PerformStepRequest;
import org.gcube.application.geoportal.common.model.rest.QueryRequest; import org.gcube.application.geoportal.common.model.rest.QueryRequest;
import org.gcube.application.geoportal.common.model.rest.QueryRequest.OrderedRequest; import org.gcube.application.geoportal.common.model.rest.QueryRequest.OrderedRequest;
import org.gcube.application.geoportal.common.model.rest.QueryRequest.OrderedRequest.Direction; import org.gcube.application.geoportal.common.model.rest.QueryRequest.OrderedRequest.Direction;
import org.gcube.application.geoportal.common.model.rest.QueryRequest.PagedRequest; import org.gcube.application.geoportal.common.model.rest.QueryRequest.PagedRequest;
import org.gcube.application.geoportal.common.model.rest.RegisterFileSetRequest; import org.gcube.application.geoportal.common.model.rest.RegisterFileSetRequest;
import org.gcube.application.geoportal.common.model.rest.StepExecutionRequest;
import org.gcube.application.geoportal.common.rest.Projects; import org.gcube.application.geoportal.common.rest.Projects;
import org.gcube.application.geoportal.common.utils.FileSets; import org.gcube.application.geoportal.common.utils.FileSets;
import org.gcube.application.geoportal.common.utils.StorageUtils; import org.gcube.application.geoportal.common.utils.StorageUtils;
@ -58,6 +58,7 @@ import com.mongodb.BasicDBObjectBuilder;
*/ */
public class ProjectsCaller { public class ProjectsCaller {
public static final String DOCUMENT_STORE_COLLECTION = "DOCUMENT-STORE-COLLECTION";
private static Logger LOG = LoggerFactory.getLogger(GeoportalClientCaller.class); private static Logger LOG = LoggerFactory.getLogger(GeoportalClientCaller.class);
/** /**
@ -208,7 +209,7 @@ public class ProjectsCaller {
for (Archive archive : listArchives) { for (Archive archive : listArchives) {
String theType = archive.getString("_type"); String theType = archive.getString("_type");
if (theType.equalsIgnoreCase("DOCUMENT-STORE-COLLECTION")) { if (theType.equalsIgnoreCase(DOCUMENT_STORE_COLLECTION)) {
String totalDocumentAre = archive.get("count").toString(); String totalDocumentAre = archive.get("count").toString();
int total = Integer.parseInt(totalDocumentAre); int total = Integer.parseInt(totalDocumentAre);
LOG.info("total docs for profileID: {}, are: {}", profileID, total); LOG.info("total docs for profileID: {}, are: {}", profileID, total);
@ -234,7 +235,7 @@ public class ProjectsCaller {
for (Archive archive : listArchives) { for (Archive archive : listArchives) {
String theType = archive.getString("_type"); String theType = archive.getString("_type");
if (theType.equalsIgnoreCase("DOCUMENT-STORE-COLLECTION")) { if (theType.equalsIgnoreCase(DOCUMENT_STORE_COLLECTION)) {
com.jayway.jsonpath.Configuration configuration = com.jayway.jsonpath.Configuration.builder() com.jayway.jsonpath.Configuration configuration = com.jayway.jsonpath.Configuration.builder()
.jsonProvider(new JsonOrgJsonProvider()).build(); .jsonProvider(new JsonOrgJsonProvider()).build();
String toJSON = archive.toJson(); String toJSON = archive.toJson();
@ -255,6 +256,36 @@ public class ProjectsCaller {
return null; return null;
} }
public Integer getCountByPhaseFor(String profileID, String phase, String status) throws Exception {
LOG.info("getCountByPhaseFor called for profileID: {}, phase: {}, status: {}", profileID, phase, status);
Projects<Project> client = (Projects<Project>) getClient(profileID);
Configuration config = client.getConfiguration();
List<Archive> listArchives = config.getArchives();
Integer count = null;
for (Archive archive : listArchives) {
String theType = archive.getString("_type");
if (theType.equalsIgnoreCase(DOCUMENT_STORE_COLLECTION)) {
com.jayway.jsonpath.Configuration configuration = com.jayway.jsonpath.Configuration.builder()
.jsonProvider(new JsonOrgJsonProvider()).build();
String toJSON = archive.toJson();
JSONObject jObject = new JSONObject(toJSON);
String query = String.format("$.countByPhase[*][?(@._id.phase == '%s' && @._id.status == '%s')].count",
phase, status);
LOG.debug("Performing query: " + query);
JsonPath jsonPath = JsonPath.compile(query);
JSONArray counts = jsonPath.read(jObject, configuration);
try {
count = counts.getInt(0);
} catch (Exception e) {
LOG.warn("getCountByPhaseFor error: " + e.getLocalizedMessage());
}
}
}
LOG.info("getCountByPhaseFor returning: " + count);
return count;
}
/** /**
* Gets the phases into document store collection. * Gets the phases into document store collection.
* *
@ -270,7 +301,7 @@ public class ProjectsCaller {
for (Archive archive : listArchives) { for (Archive archive : listArchives) {
String theType = archive.getString("_type"); String theType = archive.getString("_type");
if (theType.equalsIgnoreCase("DOCUMENT-STORE-COLLECTION")) { if (theType.equalsIgnoreCase(DOCUMENT_STORE_COLLECTION)) {
com.jayway.jsonpath.Configuration configuration = com.jayway.jsonpath.Configuration.builder() com.jayway.jsonpath.Configuration configuration = com.jayway.jsonpath.Configuration.builder()
.jsonProvider(new JsonOrgJsonProvider()).build(); .jsonProvider(new JsonOrgJsonProvider()).build();
String toJSON = archive.toJson(); String toJSON = archive.toJson();
@ -295,14 +326,15 @@ public class ProjectsCaller {
* @param profileID the profile ID * @param profileID the profile ID
* @param projectID the project ID * @param projectID the project ID
* @param stepID the step ID * @param stepID the step ID
* @param optionalMessage the optional message
* @param options the options * @param options the options
* @return the project * @return the project
* @throws Exception the exception * @throws Exception the exception
*/ */
public Project performStep(String profileID, String projectID, String stepID, Document options) throws Exception { public Project performStep(String profileID, String projectID, String stepID, String optionalMessage, Document options) throws Exception {
LOG.info("performStep called for profileID: {}, projectID: {}", profileID, projectID); LOG.info("performStep called for profileID: {}, projectID: {}. Optional message exists?: {}", profileID, projectID, optionalMessage!=null);
Projects<Project> client = (Projects<Project>) getClient(profileID); Projects<Project> client = (Projects<Project>) getClient(profileID);
StepExecutionRequest request = new StepExecutionRequest(stepID, options); PerformStepRequest request = new PerformStepRequest(stepID, optionalMessage, options);
Project project = client.performStep(projectID, request); Project project = client.performStep(projectID, request);
LOG.info("performStep returning project ID: " + project.getId()); LOG.info("performStep returning project ID: " + project.getId());
@ -402,8 +434,8 @@ public class ProjectsCaller {
* @return the project * @return the project
* @throws RemoteException the remote exception * @throws RemoteException the remote exception
*/ */
public Project deleteFileset(String profileID, String projectID, String jsonPathToFileset, Boolean force, Boolean ignoreErrors) public Project deleteFileset(String profileID, String projectID, String jsonPathToFileset, Boolean force,
throws RemoteException { Boolean ignoreErrors) throws RemoteException {
LOG.info("deleteFileset called for profileID {} and projectID {}, fileset path: {}", profileID, projectID, LOG.info("deleteFileset called for profileID {} and projectID {}, fileset path: {}", profileID, projectID,
jsonPathToFileset); jsonPathToFileset);

View File

@ -16,6 +16,17 @@ public class GeoportalItemReferences implements Serializable {
*/ */
private static final long serialVersionUID = -7021431511279022193L; private static final long serialVersionUID = -7021431511279022193L;
/**
* The Enum SHARE_LINK_TO.
*
* @author Francesco Mangiacrapa at ISTI-CNR francesco.mangiacrapa@isti.cnr.it
*
* Apr 5, 2024
*/
public static enum SHARE_LINK_TO {
DATA_VIEWER, DATA_ENTRY
}
// this is the mongoID // this is the mongoID
private String projectID; private String projectID;
private String profileID; // this is the profileID - UCD private String profileID; // this is the profileID - UCD
@ -26,6 +37,8 @@ public class GeoportalItemReferences implements Serializable {
private String layerObjectType; private String layerObjectType;
private SHARE_LINK_TO shareLinkTo;
/** /**
* Instantiates a new geo na object. * Instantiates a new geo na object.
*/ */
@ -34,17 +47,33 @@ public class GeoportalItemReferences implements Serializable {
} }
/** /**
* Instantiates a new geo na item ref. * Instantiates a new geoportal item references.
* Backward compatibility. Use {{@link #GeoportalItemReferences(String, String, SHARE_LINK_TO)}}
* *
* @param projectID the project ID * @param projectID the project ID
* @param profileID the profile ID * @param profileID the profile ID
*/ */
@Deprecated
public GeoportalItemReferences(String projectID, String profileID) { public GeoportalItemReferences(String projectID, String profileID) {
super(); super();
this.projectID = projectID; this.projectID = projectID;
this.profileID = profileID; this.profileID = profileID;
} }
/**
* Instantiates a new geo na item ref.
*
* @param projectID the project ID
* @param profileID the profile ID
* @param shareLinkTo the share link to
*/
public GeoportalItemReferences(String projectID, String profileID, SHARE_LINK_TO shareLinkTo) {
super();
this.projectID = projectID;
this.profileID = profileID;
this.shareLinkTo = shareLinkTo;
}
/** /**
* Instantiates a new geo na item ref. * Instantiates a new geo na item ref.
* *
@ -95,6 +124,15 @@ public class GeoportalItemReferences implements Serializable {
return itemName; return itemName;
} }
/**
* Gets the share link to.
*
* @return the share link to
*/
public SHARE_LINK_TO getShareLinkTo() {
return shareLinkTo;
}
/** /**
* Sets the item name. * Sets the item name.
* *
@ -160,8 +198,9 @@ public class GeoportalItemReferences implements Serializable {
builder.append(openLink); builder.append(openLink);
builder.append(", layerObjectType="); builder.append(", layerObjectType=");
builder.append(layerObjectType); builder.append(layerObjectType);
builder.append(", shareLinkTo=");
builder.append(shareLinkTo);
builder.append("]"); builder.append("]");
return builder.toString(); return builder.toString();
} }
} }

View File

@ -90,6 +90,10 @@ public class SearchingFilter implements Serializable {
private LinkedHashMap<String, Object> projection; private LinkedHashMap<String, Object> projection;
private String profileID;
private String projectID;
/** /**
* Instantiates a new sort filter. * Instantiates a new sort filter.
*/ */
@ -180,6 +184,19 @@ public class SearchingFilter implements Serializable {
this.orderByFields = orderByFields; this.orderByFields = orderByFields;
} }
public void setGetForIDs(String profileID, String projectID) {
this.profileID = profileID;
this.projectID = projectID;
}
public String getProfileID() {
return profileID;
}
public String getProjectID() {
return projectID;
}
/** /**
* Sets the order. * Sets the order.
* *
@ -200,8 +217,11 @@ public class SearchingFilter implements Serializable {
builder.append(conditions); builder.append(conditions);
builder.append(", projection="); builder.append(", projection=");
builder.append(projection); builder.append(projection);
builder.append(", profileID=");
builder.append(profileID);
builder.append(", projectID=");
builder.append(projectID);
builder.append("]"); builder.append("]");
return builder.toString(); return builder.toString();
} }
} }

View File

@ -10,18 +10,19 @@ package org.gcube.application.geoportalcommon.shared.config;
*/ */
public enum OPERATION_ON_ITEM { public enum OPERATION_ON_ITEM {
CREATE_NEW_PROJECT("Create New Project"), CREATE_NEW_PROJECT("new", "Create New Project"),
VIEW_PROJECT_AS_DOCUMENT("View Project as Document"), VIEW_PROJECT_AS_DOCUMENT("vpd", "View Project as Document"),
VIEW_PROJECT_AS_JSON("View Projet as JSON"), VIEW_PROJECT_AS_JSON("vpj", "View Projet as JSON"),
VIEW_ON_MAP("View on Map"), VIEW_ON_MAP("vpm","View on Map"),
VIEW_REPORT("View the Report"), GET_SHAREABLE_LINK("shl","Get Shareable Link"),
EDIT_PROJECT("Edit the Project"), VIEW_REPORT("vpr","View the Report"),
CLONE_PROJECT("Clone the Project"), EDIT_PROJECT("edt","Edit the Project"),
PUBLISH_UNPUBLISH_PROJECT("Publish/UnPublish the Project"), CLONE_PROJECT("cln","Clone the Project"),
DELETE_PROJECT("Delete the Project"), PUBLISH_UNPUBLISH_PROJECT("pup","Publish/UnPublish the Project"),
CREATE_RELATION("Create Relation between two Projects"), DELETE_PROJECT("dlt","Delete the Project"),
DELETE_RELATION("Delete Relation between two Projects"), CREATE_RELATION("crr","Create Relation between two Projects"),
VIEW_RELATIONSHIPS("View the relationship/s created for the Project"); DELETE_RELATION("dlr","Delete Relation between two Projects"),
VIEW_RELATIONSHIPS("vpr", "View the relationship/s created for the Project");
String label; String label;
@ -30,7 +31,7 @@ public enum OPERATION_ON_ITEM {
* *
* @param label the label * @param label the label
*/ */
OPERATION_ON_ITEM(String label){ OPERATION_ON_ITEM(String id, String label){
this.label = label; this.label = label;
} }

View File

@ -0,0 +1,119 @@
package org.gcube.application.geoportalcommon.shared.geoportal.step;
import java.io.Serializable;
import org.gcube.application.geoportalcommon.shared.geoportal.project.LifecycleInformationDV;
/**
* The Class StepPerformedResultDV.
*
* @author Francesco Mangiacrapa at ISTI-CNR francesco.mangiacrapa@isti.cnr.it
*
* Apr 11, 2024
*/
public class StepPerformedResultDV implements Serializable {
/**
*
*/
private static final long serialVersionUID = 7065890375433142728L;
private String projectId;
private String profileID;
private LifecycleInformationDV lifecycleInfoDV;
/**
* Instantiates a new step performed result DV.
*/
public StepPerformedResultDV() {
}
/**
* Instantiates a new step performed result DV.
*
* @param profileID the profile ID
* @param projectId the project id
* @param lifecycleInfoDV the lifecycle info DV
*/
public StepPerformedResultDV(String profileID, String projectId, LifecycleInformationDV lifecycleInfoDV) {
super();
this.projectId = projectId;
this.profileID = profileID;
this.lifecycleInfoDV = lifecycleInfoDV;
}
/**
* Gets the project id.
*
* @return the project id
*/
public String getProjectId() {
return projectId;
}
/**
* Gets the profile ID.
*
* @return the profile ID
*/
public String getProfileID() {
return profileID;
}
/**
* Gets the lifecycle info DV.
*
* @return the lifecycle info DV
*/
public LifecycleInformationDV getLifecycleInfoDV() {
return lifecycleInfoDV;
}
/**
* Sets the project id.
*
* @param projectId the new project id
*/
public void setProjectId(String projectId) {
this.projectId = projectId;
}
/**
* Sets the profile ID.
*
* @param profileID the new profile ID
*/
public void setProfileID(String profileID) {
this.profileID = profileID;
}
/**
* Sets the lifecycle info DV.
*
* @param lifecycleInfoDV the new lifecycle info DV
*/
public void setLifecycleInfoDV(LifecycleInformationDV lifecycleInfoDV) {
this.lifecycleInfoDV = lifecycleInfoDV;
}
/**
* To string.
*
* @return the string
*/
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("StepPerformedResultDV [projectId=");
builder.append(projectId);
builder.append(", profileID=");
builder.append(profileID);
builder.append(", lifecycleInfoDV=");
builder.append(lifecycleInfoDV);
builder.append("]");
return builder.toString();
}
}

View File

@ -0,0 +1,27 @@
package org.gcube.application.geoportalcommon.util;
/**
* The Class StringUtil.
*
* @author Francesco Mangiacrapa at ISTI-CNR francesco.mangiacrapa@isti.cnr.it
*
* Aug 31, 2023
*/
public class StringUtil {
/**
* Ellipsize.
*
* @param input the input
* @param maxLength the max length
* @return the string
*/
public static String ellipsize(String input, int maxLength) {
String ellip = "...";
if (input == null || input.length() <= maxLength || input.length() < ellip.length()) {
return input;
}
return input.substring(0, maxLength - ellip.length()).concat(ellip);
}
}

View File

@ -0,0 +1,43 @@
package org.gcube.application;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
public class ContextConfigTest {
private static final String GCUBE_CONFIG_PROPERTIES_FILENAME = "gcube_config.properties";
// APP Working Directory + /src/test/resources must be the location of
// gcube_config.properties
private static String gcube_config_path = String.format("%s/%s",
System.getProperty("user.dir") + "/src/test/resources", GCUBE_CONFIG_PROPERTIES_FILENAME);
public static String CONTEXT;
public static String TOKEN;
/**
* Read context settings.
*/
public static void readContextSettings() {
try (InputStream input = new FileInputStream(gcube_config_path)) {
Properties prop = new Properties();
// load a properties file
prop.load(input);
CONTEXT = prop.getProperty("CONTEXT");
TOKEN = prop.getProperty("TOKEN");
// get the property value and print it out
System.out.println("CONTEXT: " + CONTEXT);
System.out.println("TOKEN: " + TOKEN);
} catch (IOException ex) {
ex.printStackTrace();
}
}
}

View File

@ -34,8 +34,8 @@ import org.gcube.portlets.user.uriresolvermanager.resolvers.query.GeoportalResol
public class CreateGeoportalGisLinkInTheCentroidLayers { public class CreateGeoportalGisLinkInTheCentroidLayers {
private static String CONTEXT = "/gcube/devsec/devVRE"; // private static String CONTEXT = "/gcube/devsec/devVRE";
private static String TOKEN = ""; // devVRE // private static String TOKEN = ""; // devVRE
// private static final String CONTEXT = "/pred4s/preprod/preVRE"; // private static final String CONTEXT = "/pred4s/preprod/preVRE";
// private static final String TOKEN = ""; //preVRE // private static final String TOKEN = ""; //preVRE
@ -43,8 +43,13 @@ public class CreateGeoportalGisLinkInTheCentroidLayers {
// private static final String CONTEXT = "/d4science.research-infrastructures.eu/D4OS/GNA"; // private static final String CONTEXT = "/d4science.research-infrastructures.eu/D4OS/GNA";
// private static final String TOKEN = ""; //GNA // private static final String TOKEN = ""; //GNA
//Esquiline
private static final String CONTEXT = "/d4science.research-infrastructures.eu/gCubeApps/Esquiline";
private static final String TOKEN = ""; //Esquiline
private static String PROFILE_ID = "esquilino";
// GEOPORTAL // GEOPORTAL
private static String PROFILE_ID = "profiledConcessioni"; //private static String PROFILE_ID = "profiledConcessioni";
// private static final String PROFILE_ID = "concessioni-estere"; // private static final String PROFILE_ID = "concessioni-estere";
private static final String JSON_KEY_DATA_FINE_PROGETTO = "dataFineProgetto"; private static final String JSON_KEY_DATA_FINE_PROGETTO = "dataFineProgetto";
private static final String JSON_KEY_DATA_INIZIO_PROGETTO = "dataInizioProgetto"; private static final String JSON_KEY_DATA_INIZIO_PROGETTO = "dataInizioProgetto";
@ -61,7 +66,7 @@ public class CreateGeoportalGisLinkInTheCentroidLayers {
private static ProjectsCaller client = null; private static ProjectsCaller client = null;
public static final boolean READ_ONLY_MODE = false; public static final boolean READ_ONLY_MODE = true;
/** /**
* Gets the client. * Gets the client.
@ -85,9 +90,9 @@ public class CreateGeoportalGisLinkInTheCentroidLayers {
initGeoportalClient(); initGeoportalClient();
//devVRE //devVRE
PROFILE_ID = "profiledConcessioni"; // PROFILE_ID = "profiledConcessioni";
String tableName = "profiledconcessioni_devvre_centroids"; // String tableName = "profiledconcessioni_devvre_centroids";
tableName = "profiledconcessioni_internal__devvre_centroids"; // tableName = "profiledconcessioni_internal__devvre_centroids";
// PROFILE_ID = "concessioni-estere"; // PROFILE_ID = "concessioni-estere";
// String tableName = "concessioni_estere_devvre_centroids"; // String tableName = "concessioni_estere_devvre_centroids";
// tableName = "concessioni_estere_internal__devvre_centroids"; // tableName = "concessioni_estere_internal__devvre_centroids";
@ -104,10 +109,15 @@ public class CreateGeoportalGisLinkInTheCentroidLayers {
// PROFILE_ID = "concessioni-estere"; // PROFILE_ID = "concessioni-estere";
// String tableName = "concessioni_estere_internal__gna_centroids"; // String tableName = "concessioni_estere_internal__gna_centroids";
// String tableName = "concessioni_estere_gna_centroids"; // String tableName = "concessioni_estere_gna_centroids";
PROFILE_ID = "profiledConcessioni"; // PROFILE_ID = "profiledConcessioni";
// String tableName = "profiledconcessioni_internal__gna_centroids"; // String tableName = "profiledconcessioni_internal__gna_centroids";
// String tableName = "profiledconcessioni_gna_centroids"; // String tableName = "profiledconcessioni_gna_centroids";
// Esquiline
PROFILE_ID = "esquilino";
String tableName = "esquilino_esquiline_centroids";
long startTime = System.currentTimeMillis(); long startTime = System.currentTimeMillis();
try { try {

View File

@ -0,0 +1,538 @@
package org.gcube.application;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.gcube.application.geoportalcommon.geoportal.GeoportalClientCaller;
import org.gcube.application.geoportalcommon.geoportal.ProjectsCaller;
import org.gcube.application.se.RuntimeResourceReader;
import org.gcube.application.se.ServiceEndpointBean;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.scope.api.ScopeProvider;
public class GNA_New_DataModel_IndexTable_25925 {
private static String PROFILE_ID = "profiledConcessioni";
private static final String platformName = "postgis";
private static final String category = "Database";
// These are defined via ContextConfigTest.readContextSettings();
private static String CONTEXT = "";
private static String TOKEN = "";
// #### DEV
// DB
// private static final String resourceName = "GNA-POSTGIS-DB"; // devVRE
// #### PRE
// DB
private static final String resourceName = "Geoserver-t postgis"; // preVRE
// #### PROD
// DB
// private static final String resourceName = "GNA-postgis"; // GNA
// Esquiline
// private static final String CONTEXT = "/d4science.research-infrastructures.eu/gCubeApps/Esquiline";
// private static final String TOKEN = ""; //Esquiline
// private static String PROFILE_ID = "esquilino";
// GEOPORTAL
// private static String PROFILE_ID = "profiledConcessioni";
// private static final String PROFILE_ID = "concessioni-estere";
private static final Map<String, String> ADD_TABLE_COLUMNS = new LinkedHashMap<>();
// static {
// ADD_TABLE_COLUMNS.put("ufficio_mic_competente", "TEXT");
// ADD_TABLE_COLUMNS.put("funzionario_responsabile", "TEXT");
// ADD_TABLE_COLUMNS.put("modalita_individuazione", "TEXT");
// ADD_TABLE_COLUMNS.put("contesto_indagine", "TEXT");
// ADD_TABLE_COLUMNS.put("denominazione", "TEXT");
// ADD_TABLE_COLUMNS.put("stato_attuale", "TEXT");
// ADD_TABLE_COLUMNS.put("accessibilita", "TEXT");
// ADD_TABLE_COLUMNS.put("cronologia_macrofase", "TEXT");
// ADD_TABLE_COLUMNS.put("specifiche_cronologia", "TEXT");
// ADD_TABLE_COLUMNS.put("quota_max", "TEXT");
// ADD_TABLE_COLUMNS.put("quota_min", "TEXT");
// }
private static final Map<String, String> DROP_TABLE_COLUMNS = new HashMap<>();
// static {
// DROP_TABLE_COLUMNS.put("autore", "");
// DROP_TABLE_COLUMNS.put("titolare", "");
// DROP_TABLE_COLUMNS.put("titolare_licenza", "");
// }
private static final Map<String, String> RENAME_TABLE_COLUMNS = new HashMap<>();
//Rename from column to column
// static {
// RENAME_TABLE_COLUMNS.put("ufficio_mic_competente", "ufficio_competente");
// }
private static ProjectsCaller client = null;
public static final boolean READ_ONLY_MODE = true;
public static List<String> listTable = new ArrayList<String>();
/**
* Gets the client.
*
* @return the client
*/
// @Before
public static void initGeoportalClient() {
// assumeTrue(GCubeTest.isTestInfrastructureEnabled());
ContextConfigTest.readContextSettings();
CONTEXT = ContextConfigTest.CONTEXT;
TOKEN = ContextConfigTest.TOKEN;
ScopeProvider.instance.set(CONTEXT);
SecurityTokenProvider.instance.set(TOKEN);
client = GeoportalClientCaller.projects();
}
public static void main(String[] args) {
initGeoportalClient();
System.out.println("READ_ONLY_MODE ENABLED: " + READ_ONLY_MODE);
System.out.println("CONTEXT IS: " + CONTEXT);
System.out.println("PROFILE_ID: " + PROFILE_ID);
// devVRE
// PROFILE_ID = "profiledConcessioni";
// String tableName = "profiledconcessioni_devvre_centroids";
// tableName = "profiledconcessioni_internal__devvre_centroids";
// PROFILE_ID = "concessioni-estere";
// String tableName = "concessioni_estere_devvre_centroids";
// tableName = "concessioni_estere_internal__devvre_centroids";
// preVRE
// PROFILE_ID = "profiledConcessioni";
// listTable.add("profiledconcessioni_internal__prevre_centroids");
// listTable.add("profiledconcessioni_prevre_centroids");
PROFILE_ID = "concessioni-estere";
listTable.add("concessioni_estere_internal__prevre_centroids");
listTable.add("concessioni_estere_prevre_centroids");
// GNA
// PROFILE_ID = "concessioni-estere";
// String tableName = "concessioni_estere_internal__gna_centroids";
// String tableName = "concessioni_estere_gna_centroids";
// PROFILE_ID = "profiledConcessioni";
// String tableName = "profiledconcessioni_internal__gna_centroids";
// String tableName = "profiledconcessioni_gna_centroids";
// Esquiline
// PROFILE_ID = "esquilino";
// String tableName = "esquilino_esquiline_centroids";
long startTime = System.currentTimeMillis();
try {
RuntimeResourceReader rrr = new RuntimeResourceReader(CONTEXT, resourceName, platformName, category, null);
ServiceEndpointBean se = rrr.getListSE().get(0);
AccessPoint ap = se.getListAP().get(0);
System.out.println(ap.address());
System.out.println(ap.username());
String pwdDB = RuntimeResourceReader.dectryptPassword(CONTEXT, ap);
System.out.println(pwdDB);
Connection dbConnection = getDatabaseConnection(ap.address(), ap.username(), pwdDB);
for (String tableName : listTable) {
System.out.println("\n\n# SOURCE TABLE");
Boolean tableExists = checkTableExists(dbConnection, tableName);
if (!tableExists) {
throw new Exception("Table '" + tableName + "' does not exits in the DB!!! Exit");
}
printTableColumn(dbConnection, tableName);
System.out.println("\n\n### ADDING COLUMNS...\n");
// ADD NEW COLUMNS
for (String columnName : ADD_TABLE_COLUMNS.keySet()) {
try {
boolean columnExists = checkColumnExists(dbConnection, tableName, columnName);
if (!columnExists) {
addColumnTable(dbConnection, tableName, columnName, ADD_TABLE_COLUMNS.get(columnName));
int sleeping = 500;
System.out.println("... sleeping " + sleeping + " ...");
Thread.sleep(sleeping);
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return;
}
}
System.out.println("\n\n### END ADD COLUMNS");
System.out.println("\n\n### DROPPING COLUMNS...\n");
// REMOVE OLD COLUMNS
for (String columnName : DROP_TABLE_COLUMNS.keySet()) {
try {
boolean columnExists = checkColumnExists(dbConnection, tableName, columnName);
if (columnExists) {
dropColumnTable(dbConnection, tableName, columnName, DROP_TABLE_COLUMNS.get(columnName));
int sleeping = 1000;
System.out.println("... sleeping " + sleeping + " ...");
Thread.sleep(sleeping);
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return;
}
}
System.out.println("\n\n### END DROP COLUMNS");
System.out.println("\n\n### RENAMING COLUMNS...\n");
// RENAME_TABLE_COLUMNS
for (String columnName : RENAME_TABLE_COLUMNS.keySet()) {
try {
boolean columnExists = checkColumnExists(dbConnection, tableName, columnName);
if (columnExists) {
renameColumnTable(dbConnection, tableName, columnName, RENAME_TABLE_COLUMNS.get(columnName));
int sleeping = 1000;
System.out.println("... sleeping " + sleeping + " ...");
Thread.sleep(sleeping);
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return;
}
}
System.out.println("\n\n### END RENAME COLUMNS");
System.out.println("\n\n# UPDATED TABLE");
printTableColumn(dbConnection, tableName);
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
long endTime = System.currentTimeMillis();
System.out.println("\n\nSCRIPT END at: " + endTime);
double toSec = (endTime - startTime) / 1000;
System.out.println("SCRIPT TERMINATED in " + toSec + "sec");
}
public static void printTableColumn(Connection con, String tablename) {
try {
DatabaseMetaData databaseMetaData = con.getMetaData();
// Print TABLE_TYPE "TABLE"
ResultSet columns = databaseMetaData.getColumns(null, null, tablename, null);
System.out.println("\n==== TABLE " + tablename + " ");
System.out.println("[COLUMN_NAME - TYPE_NAME ( COLUMN_SIZE )]");
System.out.println("-----------------------------------------------");
while (columns.next()) {
String columnName = columns.getString("COLUMN_NAME");
String columnType = columns.getString("TYPE_NAME");
int columnSize = columns.getInt("COLUMN_SIZE");
System.out.println("\t" + columnName + " - " + columnType + " (" + columnSize + ")");
}
System.out.println("-----------------------------------------------");
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* Gets the database connection.
*
* @param dbURL the db url
* @param user the user
* @param pwd the pwd
* @return the database connection
*/
public static Connection getDatabaseConnection(String dbURL, String user, String pwd) {
System.out.println("dbURL: " + dbURL);
Connection c = null;
try {
Class.forName("org.postgresql.Driver");
c = DriverManager.getConnection(dbURL, user, pwd);
c.setAutoCommit(false);
} catch (Exception e) {
e.printStackTrace();
System.err.println(e.getClass().getName() + ": " + e.getMessage());
System.exit(0);
}
System.out.println("Opened database successfully");
return c;
}
public static void addColumnTable(Connection con, String tableName, String newColumn, String type)
throws SQLException {
String alterTableString = String.format("ALTER TABLE %s ADD COLUMN %s %s", tableName, newColumn, type);
System.out.println("Executing: " + alterTableString);
try {
con.setAutoCommit(false);
if (!READ_ONLY_MODE) {
con.createStatement().execute(alterTableString);
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
System.out.println("Executed: " + alterTableString);
}
public static void renameColumnTable(Connection con, String tableName, String oldColumn, String newColumn)
throws SQLException {
String alterTableString = String.format("ALTER TABLE %s RENAME COLUMN %s TO %s", tableName, oldColumn,
newColumn);
System.out.println("Executing: " + alterTableString);
try {
con.setAutoCommit(false);
if (!READ_ONLY_MODE) {
con.createStatement().execute(alterTableString);
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
System.out.println("Executed: " + alterTableString);
}
public static void dropColumnTable(Connection con, String tableName, String oldColumn, String type)
throws SQLException {
String alterTableString = String.format("ALTER TABLE %s DROP COLUMN %s", tableName, oldColumn);
System.out.println("Executing: " + alterTableString);
try {
con.setAutoCommit(false);
if (!READ_ONLY_MODE) {
con.createStatement().execute(alterTableString);
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
System.out.println("Executed: " + alterTableString);
}
public static Boolean checkColumnExists(Connection con, String tableName, String columnName) throws SQLException {
String columnExistsLabel = "COLUMN_EXISTS";
String sql = String.format(
"SELECT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name='%s' AND column_name='%s') as %s",
tableName, columnName, columnExistsLabel);
System.out.println("\n+++ " + columnExistsLabel + " checking '" + tableName + "': " + sql);
PreparedStatement p;
ResultSet resultSet;
try {
p = con.prepareStatement(sql);
resultSet = p.executeQuery();
// Expected 1 row
resultSet.next();
Boolean columnExists = resultSet.getBoolean(columnExistsLabel);
System.out.println("\t RESP --> Column '" + columnName + "' exists: " + columnExists + "\n");
return columnExists;
} catch (SQLException e) {
System.err.println(e);
return false;
}
}
public static Boolean checkTableExists(Connection con, String tableName) throws SQLException {
String tableExistsLabel = "TABLE_EXISTS";
String sql = String.format(
"SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name='%s' AND table_type LIKE 'BASE TABLE' AND table_schema LIKE 'public') as %s",
tableName, tableExistsLabel);
System.out.println("\n+++ " + tableExistsLabel + " checking '" + tableName + "': " + sql);
PreparedStatement p;
ResultSet resultSet;
try {
p = con.prepareStatement(sql);
resultSet = p.executeQuery();
// Expected 1 row
resultSet.next();
Boolean tableExists = resultSet.getBoolean(tableExistsLabel);
System.out.println("\t RESP --> Table '" + tableName + "' exists: " + tableExists + "\n");
return tableExists;
} catch (SQLException e) {
System.err.println(e);
return false;
}
}
public static void updateTable(Connection con, String tableName, String whereCondition,
LinkedHashMap<String, String> mapColumnValue) throws SQLException {
StringBuilder updateSQL = new StringBuilder();
updateSQL.append("UPDATE " + tableName + " SET");
int i = 1;
for (String column : mapColumnValue.keySet()) {
updateSQL.append(" " + column + "=?");
if (i < mapColumnValue.size())
updateSQL.append(", ");
i++;
}
updateSQL.append(" WHERE " + whereCondition);
String update = updateSQL.toString();
// System.out.println("Executing: " + update);
try (PreparedStatement updatePS = con.prepareStatement(update);) {
int index = 1;
for (String column : mapColumnValue.keySet()) {
updatePS.setString(index, mapColumnValue.get(column));
index++;
}
con.setAutoCommit(false);
System.out.println("Executing: " + updatePS.toString());
if (!READ_ONLY_MODE) {
updatePS.executeUpdate();
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
}
// Step1: Main driver method
public static List<String> readTableIDs(Connection con, String tableName, String columnNameProjectId) {
PreparedStatement p = null;
ResultSet resultSet = null;
// Try block to catch exception/s
List<String> listProjectIds = new ArrayList<String>();
try {
// SQL command data stored in String datatype
String sql = String.format("SELECT * FROM %s", tableName);
p = con.prepareStatement(sql);
resultSet = p.executeQuery();
ResultSetMetaData rsmd = resultSet.getMetaData();
int columnsNumber = rsmd.getColumnCount();
int row = 1;
System.out.println("\n####TABLE: " + tableName + " content..\n");
while (resultSet.next()) {
System.out.print("" + row + "] ");
for (int i = 1; i <= columnsNumber; i++) {
String columnValue = resultSet.getString(i);
System.out.print("(" + rsmd.getColumnName(i) + ") " + columnValue + " | ");
}
row++;
System.out.println("\n");
String projectId = resultSet.getString(columnNameProjectId);
listProjectIds.add(projectId);
}
System.out.println("####TABLE: " + tableName + " end content\n");
}
// Catch block to handle exception
catch (SQLException e) {
// Print exception pop-up on screen
System.err.println(e);
}
System.out.println("returning list IDs: " + listProjectIds);
return listProjectIds;
}
}

View File

@ -0,0 +1,511 @@
package org.gcube.application;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.gcube.application.geoportalcommon.geoportal.GeoportalClientCaller;
import org.gcube.application.geoportalcommon.geoportal.ProjectsCaller;
import org.gcube.application.se.RuntimeResourceReader;
import org.gcube.application.se.ServiceEndpointBean;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.scope.api.ScopeProvider;
public class GNA_New_DataModel_IndexTable_PROD_25926 {
private static String PROFILE_ID = "profiledConcessioni";
private static final String platformName = "postgis";
private static final String category = "Database";
// These are defined via ContextConfigTest.readContextSettings();
private static String CONTEXT = "";
private static String TOKEN = "";
// #### PROD
// DB
private static final String resourceName = "GNA-postgis"; // GNA
// Esquiline
// private static final String CONTEXT = "/d4science.research-infrastructures.eu/gCubeApps/Esquiline";
// private static final String TOKEN = ""; //Esquiline
// private static String PROFILE_ID = "esquilino";
private static final Map<String, String> ADD_TABLE_COLUMNS = new LinkedHashMap<>();
// static {
// ADD_TABLE_COLUMNS.put("ufficio_mic_competente", "TEXT");
// ADD_TABLE_COLUMNS.put("funzionario_responsabile", "TEXT");
// ADD_TABLE_COLUMNS.put("modalita_individuazione", "TEXT");
// ADD_TABLE_COLUMNS.put("contesto_indagine", "TEXT");
// ADD_TABLE_COLUMNS.put("denominazione", "TEXT");
// ADD_TABLE_COLUMNS.put("stato_attuale", "TEXT");
// ADD_TABLE_COLUMNS.put("accessibilita", "TEXT");
// ADD_TABLE_COLUMNS.put("cronologia_macrofase", "TEXT");
// ADD_TABLE_COLUMNS.put("specifiche_cronologia", "TEXT");
// ADD_TABLE_COLUMNS.put("quota_max", "TEXT");
// ADD_TABLE_COLUMNS.put("quota_min", "TEXT");
// }
//
private static final Map<String, String> DROP_TABLE_COLUMNS = new HashMap<>();
// static {
// DROP_TABLE_COLUMNS.put("autore", "");
// DROP_TABLE_COLUMNS.put("titolare", "");
// DROP_TABLE_COLUMNS.put("titolare_licenza", "");
// }
private static final Map<String, String> RENAME_TABLE_COLUMNS = new HashMap<>();
// Rename from column to column
static {
RENAME_TABLE_COLUMNS.put("ufficio_mic_competente", "ufficio_competente");
}
private static ProjectsCaller client = null;
public static final boolean READ_ONLY_MODE = true;
public static List<String> listTable = new ArrayList<String>();
/**
* Gets the client.
*
* @return the client
*/
// @Before
public static void initGeoportalClient() {
// assumeTrue(GCubeTest.isTestInfrastructureEnabled());
ContextConfigTest.readContextSettings();
CONTEXT = ContextConfigTest.CONTEXT;
TOKEN = ContextConfigTest.TOKEN;
ScopeProvider.instance.set(CONTEXT);
SecurityTokenProvider.instance.set(TOKEN);
client = GeoportalClientCaller.projects();
}
public static void main(String[] args) {
initGeoportalClient();
System.out.println("READ_ONLY_MODE ENABLED: " + READ_ONLY_MODE);
System.out.println("CONTEXT IS: " + CONTEXT);
System.out.println("PROFILE_ID: " + PROFILE_ID);
// GNA
// PROFILE_ID = "profiledConcessioni";
// listTable.add("profiledconcessioni_internal__gna_centroids");
// listTable.add("profiledconcessioni_gna_centroids");
// PROFILE_ID = "concessioni-estere";
// listTable.add("concessioni_estere_internal__gna_centroids");
// listTable.add("concessioni_estere_gna_centroids");
// Esquiline
// PROFILE_ID = "esquilino";
// String tableName = "esquilino_esquiline_centroids";
long startTime = System.currentTimeMillis();
try {
RuntimeResourceReader rrr = new RuntimeResourceReader(CONTEXT, resourceName, platformName, category, null);
ServiceEndpointBean se = rrr.getListSE().get(0);
AccessPoint ap = se.getListAP().get(0);
System.out.println(ap.address());
System.out.println(ap.username());
String pwdDB = RuntimeResourceReader.dectryptPassword(CONTEXT, ap);
System.out.println(pwdDB);
Connection dbConnection = getDatabaseConnection(ap.address(), ap.username(), pwdDB);
for (String tableName : listTable) {
System.out.println("\n\n# SOURCE TABLE");
Boolean tableExists = checkTableExists(dbConnection, tableName);
if (!tableExists) {
throw new Exception("Table '" + tableName + "' does not exits in the DB!!! Exit");
}
printTableColumn(dbConnection, tableName);
System.out.println("\n\n### ADDING COLUMNS...\n");
// ADD NEW COLUMNS
for (String columnName : ADD_TABLE_COLUMNS.keySet()) {
try {
boolean columnExists = checkColumnExists(dbConnection, tableName, columnName);
if (!columnExists) {
addColumnTable(dbConnection, tableName, columnName, ADD_TABLE_COLUMNS.get(columnName));
int sleeping = 500;
System.out.println("... sleeping " + sleeping + " ...");
Thread.sleep(sleeping);
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return;
}
}
System.out.println("\n\n### END ADD COLUMNS");
System.out.println("\n\n### DROPPING COLUMNS...\n");
// REMOVE OLD COLUMNS
for (String columnName : DROP_TABLE_COLUMNS.keySet()) {
try {
boolean columnExists = checkColumnExists(dbConnection, tableName, columnName);
if (columnExists) {
dropColumnTable(dbConnection, tableName, columnName, DROP_TABLE_COLUMNS.get(columnName));
int sleeping = 1000;
System.out.println("... sleeping " + sleeping + " ...");
Thread.sleep(sleeping);
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return;
}
}
System.out.println("\n\n### END DROP COLUMNS");
System.out.println("\n\n### RENAMING COLUMNS...\n");
// RENAME_TABLE_COLUMNS
for (String columnName : RENAME_TABLE_COLUMNS.keySet()) {
try {
boolean columnExists = checkColumnExists(dbConnection, tableName, columnName);
if (columnExists) {
renameColumnTable(dbConnection, tableName, columnName,
RENAME_TABLE_COLUMNS.get(columnName));
int sleeping = 1000;
System.out.println("... sleeping " + sleeping + " ...");
Thread.sleep(sleeping);
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return;
}
}
System.out.println("\n\n### END RENAME COLUMNS");
System.out.println("\n\n# UPDATED TABLE");
printTableColumn(dbConnection, tableName);
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
long endTime = System.currentTimeMillis();
System.out.println("\n\nSCRIPT END at: " + endTime);
double toSec = (endTime - startTime) / 1000;
System.out.println("SCRIPT TERMINATED in " + toSec + "sec");
}
public static void printTableColumn(Connection con, String tablename) {
try {
DatabaseMetaData databaseMetaData = con.getMetaData();
// Print TABLE_TYPE "TABLE"
ResultSet columns = databaseMetaData.getColumns(null, null, tablename, null);
System.out.println("\n==== TABLE " + tablename + " ");
System.out.println("[COLUMN_NAME - TYPE_NAME ( COLUMN_SIZE )]");
System.out.println("-----------------------------------------------");
while (columns.next()) {
String columnName = columns.getString("COLUMN_NAME");
String columnType = columns.getString("TYPE_NAME");
int columnSize = columns.getInt("COLUMN_SIZE");
System.out.println("\t" + columnName + " - " + columnType + " (" + columnSize + ")");
}
System.out.println("-----------------------------------------------");
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* Gets the database connection.
*
* @param dbURL the db url
* @param user the user
* @param pwd the pwd
* @return the database connection
*/
public static Connection getDatabaseConnection(String dbURL, String user, String pwd) {
System.out.println("dbURL: " + dbURL);
Connection c = null;
try {
Class.forName("org.postgresql.Driver");
c = DriverManager.getConnection(dbURL, user, pwd);
c.setAutoCommit(false);
} catch (Exception e) {
e.printStackTrace();
System.err.println(e.getClass().getName() + ": " + e.getMessage());
System.exit(0);
}
System.out.println("Opened database successfully");
return c;
}
public static void addColumnTable(Connection con, String tableName, String newColumn, String type)
throws SQLException {
String alterTableString = String.format("ALTER TABLE %s ADD COLUMN %s %s", tableName, newColumn, type);
System.out.println("Executing: " + alterTableString);
try {
con.setAutoCommit(false);
if (!READ_ONLY_MODE) {
con.createStatement().execute(alterTableString);
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
System.out.println("Executed: " + alterTableString);
}
public static void renameColumnTable(Connection con, String tableName, String oldColumn, String newColumn)
throws SQLException {
String alterTableString = String.format("ALTER TABLE %s RENAME COLUMN %s TO %s", tableName, oldColumn,
newColumn);
System.out.println("Executing: " + alterTableString);
try {
con.setAutoCommit(false);
if (!READ_ONLY_MODE) {
con.createStatement().execute(alterTableString);
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
System.out.println("Executed: " + alterTableString);
}
public static void dropColumnTable(Connection con, String tableName, String oldColumn, String type)
throws SQLException {
String alterTableString = String.format("ALTER TABLE %s DROP COLUMN %s", tableName, oldColumn);
System.out.println("Executing: " + alterTableString);
try {
con.setAutoCommit(false);
if (!READ_ONLY_MODE) {
con.createStatement().execute(alterTableString);
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
System.out.println("Executed: " + alterTableString);
}
public static Boolean checkColumnExists(Connection con, String tableName, String columnName) throws SQLException {
String columnExistsLabel = "COLUMN_EXISTS";
String sql = String.format(
"SELECT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name='%s' AND column_name='%s') as %s",
tableName, columnName, columnExistsLabel);
System.out.println("\n+++ " + columnExistsLabel + " checking '" + tableName + "': " + sql);
PreparedStatement p;
ResultSet resultSet;
try {
p = con.prepareStatement(sql);
resultSet = p.executeQuery();
// Expected 1 row
resultSet.next();
Boolean columnExists = resultSet.getBoolean(columnExistsLabel);
System.out.println("\t RESP --> Column '" + columnName + "' exists: " + columnExists + "\n");
return columnExists;
} catch (SQLException e) {
System.err.println(e);
return false;
}
}
public static Boolean checkTableExists(Connection con, String tableName) throws SQLException {
String tableExistsLabel = "TABLE_EXISTS";
String sql = String.format(
"SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name='%s' AND table_type LIKE 'BASE TABLE' AND table_schema LIKE 'public') as %s",
tableName, tableExistsLabel);
System.out.println("\n+++ " + tableExistsLabel + " checking '" + tableName + "': " + sql);
PreparedStatement p;
ResultSet resultSet;
try {
p = con.prepareStatement(sql);
resultSet = p.executeQuery();
// Expected 1 row
resultSet.next();
Boolean tableExists = resultSet.getBoolean(tableExistsLabel);
System.out.println("\t RESP --> Table '" + tableName + "' exists: " + tableExists + "\n");
return tableExists;
} catch (SQLException e) {
System.err.println(e);
return false;
}
}
public static void updateTable(Connection con, String tableName, String whereCondition,
LinkedHashMap<String, String> mapColumnValue) throws SQLException {
StringBuilder updateSQL = new StringBuilder();
updateSQL.append("UPDATE " + tableName + " SET");
int i = 1;
for (String column : mapColumnValue.keySet()) {
updateSQL.append(" " + column + "=?");
if (i < mapColumnValue.size())
updateSQL.append(", ");
i++;
}
updateSQL.append(" WHERE " + whereCondition);
String update = updateSQL.toString();
// System.out.println("Executing: " + update);
try (PreparedStatement updatePS = con.prepareStatement(update);) {
int index = 1;
for (String column : mapColumnValue.keySet()) {
updatePS.setString(index, mapColumnValue.get(column));
index++;
}
con.setAutoCommit(false);
System.out.println("Executing: " + updatePS.toString());
if (!READ_ONLY_MODE) {
updatePS.executeUpdate();
con.commit();
}
} catch (SQLException e) {
e.printStackTrace();
if (con != null) {
try {
System.err.print("Transaction is being rolled back");
con.rollback();
} catch (SQLException excep) {
e.printStackTrace();
}
}
}
}
// Step1: Main driver method
public static List<String> readTableIDs(Connection con, String tableName, String columnNameProjectId) {
PreparedStatement p = null;
ResultSet resultSet = null;
// Try block to catch exception/s
List<String> listProjectIds = new ArrayList<String>();
try {
// SQL command data stored in String datatype
String sql = String.format("SELECT * FROM %s", tableName);
p = con.prepareStatement(sql);
resultSet = p.executeQuery();
ResultSetMetaData rsmd = resultSet.getMetaData();
int columnsNumber = rsmd.getColumnCount();
int row = 1;
System.out.println("\n####TABLE: " + tableName + " content..\n");
while (resultSet.next()) {
System.out.print("" + row + "] ");
for (int i = 1; i <= columnsNumber; i++) {
String columnValue = resultSet.getString(i);
System.out.print("(" + rsmd.getColumnName(i) + ") " + columnValue + " | ");
}
row++;
System.out.println("\n");
String projectId = resultSet.getString(columnNameProjectId);
listProjectIds.add(projectId);
}
System.out.println("####TABLE: " + tableName + " end content\n");
}
// Catch block to handle exception
catch (SQLException e) {
// Print exception pop-up on screen
System.err.println(e);
}
System.out.println("returning list IDs: " + listProjectIds);
return listProjectIds;
}
}

View File

@ -1,8 +1,6 @@
package org.gcube.application; package org.gcube.application;
import java.io.FileInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.time.LocalDate; import java.time.LocalDate;
import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatter;
import java.util.ArrayList; import java.util.ArrayList;
@ -13,7 +11,6 @@ import java.util.Iterator;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Properties;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import java.util.stream.StreamSupport; import java.util.stream.StreamSupport;
@ -38,6 +35,7 @@ import org.gcube.application.geoportalcommon.shared.geoportal.project.PhaseDV;
import org.gcube.application.geoportalcommon.shared.geoportal.project.ProjectDV; import org.gcube.application.geoportalcommon.shared.geoportal.project.ProjectDV;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider; import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider; import org.gcube.common.scope.api.ScopeProvider;
import org.junit.Before;
import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonParser;
@ -63,7 +61,7 @@ public class Project_Tests {
private static ProjectsCaller clientPrj = null; private static ProjectsCaller clientPrj = null;
private static String PROFILE_ID = "profiledConcessioni"; private static String PROFILE_ID = "profiledConcessioni";
private static String PROJECT_ID = "644a66e944aad51c80409a3b"; private static String PROJECT_ID = "6384aaac308f5c28c5ee0888";
private static String MY_LOGIN = "francesco.mangiacrapa"; private static String MY_LOGIN = "francesco.mangiacrapa";
@ -77,32 +75,11 @@ public class Project_Tests {
} }
/** @Before
* Read context settings.
*/
public static void readContextSettings() {
try (InputStream input = new FileInputStream(gcube_config_path)) {
Properties prop = new Properties();
// load a properties file
prop.load(input);
CONTEXT = prop.getProperty("CONTEXT");
TOKEN = prop.getProperty("TOKEN");
// get the property value and print it out
System.out.println("CONTEXT: " + CONTEXT);
System.out.println("TOKEN: " + TOKEN);
} catch (IOException ex) {
ex.printStackTrace();
}
}
// @Before
public void init() { public void init() {
readContextSettings(); ContextConfigTest.readContextSettings();
CONTEXT = ContextConfigTest.CONTEXT;
TOKEN = ContextConfigTest.TOKEN;
ScopeProvider.instance.set(CONTEXT); ScopeProvider.instance.set(CONTEXT);
SecurityTokenProvider.instance.set(TOKEN); SecurityTokenProvider.instance.set(TOKEN);
clientPrj = GeoportalClientCaller.projects(); clientPrj = GeoportalClientCaller.projects();
@ -132,7 +109,9 @@ public class Project_Tests {
System.out.println("The key: " + key + " has value: " + theValue); System.out.println("The key: " + key + " has value: " + theValue);
} }
System.out.println(projectDV.getSpatialReference()); System.out.println("JSON: " + projectDV.getTheDocument().getDocumentAsJSON());
System.out.println("Spatial reference: " + projectDV.getSpatialReference());
} }
// @Test // @Test
@ -173,6 +152,66 @@ public class Project_Tests {
} }
} }
//@Test
public void getListProjectsDVFilteredJSONDocument() throws Exception {
// List<Project> listOfProjects = client.getListForProfileID(PROFILE_ID);
SearchingFilter filter = new SearchingFilter();
// Where Clause
List<WhereClause> conditions = new ArrayList<WhereClause>();
Map<String, Object> searchInto = new HashMap<String, Object>();
//searchInto.put("_id", "6384aaac308f5c28c5ee0888");
searchInto.put("_theDocument.nome", "della Civita di Tarquinia ");
WhereClause whereClause = new WhereClause(LOGICAL_OP.OR, searchInto);
conditions.add(whereClause);
filter.setConditions(conditions);
/* ORDER BY */
// List<ItemFieldDV> orderByFields = new ArrayList<ItemFieldDV>();
// List<String> jsonFields = Arrays.asList("_theDocument.dataInizioProgetto");
// ItemFieldDV itemField = new ItemFieldDV("dataInizioProgetto", jsonFields, "$or", false, false, false);
// orderByFields.add(itemField);
// filter.setOrderByFields(orderByFields);
LinkedHashMap<String, Object> projection = new LinkedHashMap<String, Object>();
// default
// PROJECTION
projection.put(Project.ID, 1);
// projection.put("_theDocument.nome", 1);
// projection.put("_profileID", 1);
// projection.put("_profileVersion", 1);
// projection.put("_version", 1);
// projection.put("_theDocument", 1);
//
// projection.put("_theDocument.paroleChiaveLibere", 1);
// projection.put("_theDocument.editore", 1);
// projection.put("_theDocument.paroleChiaveICCD", 1);
// projection.put("_theDocument.responsabile", 1);
//
// projection.put("_theDocument.introduzione", 1);
// projection.put("_theDocument.authors", 1);
// projection.put("_theDocument.dataInizioProgetto", 1);
//filter.setProjection(projection);
Integer totalDocs = clientPrj.getTotalDocument(PROFILE_ID);
Iterator<Project> projects = clientPrj.queryOnMongo(PROFILE_ID, 10, 0, null, filter);
//Iterable<Project> itP = () -> projects;
//Stream<Project> targetStream = StreamSupport.stream(itP.spliterator(), false);
//List<String> listProjectIDs = targetStream.map(Project::getId).collect(Collectors.toList());
List<ResultDocumentDV> results = ConvertToDataValueObjectModel.toListResultDocument(projects);
int i = 0;
for (ResultDocumentDV projectDV : results) {
System.out.println(++i + ") " + projectDV.getId() + " JSON: "
+ projectDV.getDocumentAsJSON());
}
}
//@Test //@Test
public void getListProjectsDVFiltered() throws Exception { public void getListProjectsDVFiltered() throws Exception {
// List<Project> listOfProjects = client.getListForProfileID(PROFILE_ID); // List<Project> listOfProjects = client.getListForProfileID(PROFILE_ID);
@ -314,6 +353,16 @@ public class Project_Tests {
} }
// @Test
public void getCountByPhase() throws Exception {
Integer integer = clientPrj.getCountByPhaseFor(PROFILE_ID, "Published", "OK");
System.out.println(integer);
integer = clientPrj.getCountByPhaseFor(PROFILE_ID, "Pending Approval", "OK");
System.out.println(integer);
}
// @Test // @Test
public void getRelationshipsChain() throws Exception { public void getRelationshipsChain() throws Exception {
System.out.println("getRelationshipsChain test"); System.out.println("getRelationshipsChain test");

View File

@ -32,7 +32,6 @@ import org.gcube.common.scope.api.ScopeProvider;
import org.json.JSONArray; import org.json.JSONArray;
import org.json.JSONObject; import org.json.JSONObject;
import org.junit.Before; import org.junit.Before;
import org.junit.Test;
import com.jayway.jsonpath.JsonPath; import com.jayway.jsonpath.JsonPath;
import com.jayway.jsonpath.spi.json.JsonOrgJsonProvider; import com.jayway.jsonpath.spi.json.JsonOrgJsonProvider;

View File

@ -151,9 +151,7 @@ public class RuntimeResourceReader {
// String platformName = "postgis"; // String platformName = "postgis";
// String category = "Database"; // String category = "Database";
// scope = "/pred4s/preprod/preVRE"; // scope = "/pred4s/preprod/preVRE";
RuntimeResourceReader reader; RuntimeResourceReader reader;
try { try {
ScopeProvider.instance.set(scope); ScopeProvider.instance.set(scope);