geoportal-data-entry-app/src/main/java/org/gcube/portlets/user/geoportaldataentry/server/GeoportalDataEntryServiceIm...

1489 lines
58 KiB
Java

package org.gcube.portlets.user.geoportaldataentry.server;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import org.apache.commons.io.FileUtils;
import org.bson.Document;
import org.gcube.application.geoportal.client.utils.Serialization;
import org.gcube.application.geoportal.common.model.document.Project;
import org.gcube.application.geoportal.common.model.document.access.Access;
import org.gcube.application.geoportal.common.model.document.lifecycle.LifecycleInformation;
import org.gcube.application.geoportal.common.model.rest.TempFile;
import org.gcube.application.geoportal.common.model.useCaseDescriptor.RelationshipDefinition;
import org.gcube.application.geoportal.common.model.useCaseDescriptor.UseCaseDescriptor;
import org.gcube.application.geoportal.common.utils.StorageUtils;
import org.gcube.application.geoportalcommon.ConvertToDataServiceModel;
import org.gcube.application.geoportalcommon.ConvertToDataValueObjectModel;
import org.gcube.application.geoportalcommon.GeoportalCommon;
import org.gcube.application.geoportalcommon.ProjectDVBuilder;
import org.gcube.application.geoportalcommon.geoportal.GeoportalClientCaller;
import org.gcube.application.geoportalcommon.geoportal.ProjectsCaller;
import org.gcube.application.geoportalcommon.geoportal.UseCaseDescriptorCaller;
import org.gcube.application.geoportalcommon.shared.GNADataEntryConfigProfile;
import org.gcube.application.geoportalcommon.shared.GNADataViewerConfigProfile;
import org.gcube.application.geoportalcommon.shared.GeoportalItemReferences;
import org.gcube.application.geoportalcommon.shared.ResultSetPaginatedData;
import org.gcube.application.geoportalcommon.shared.SearchingFilter;
import org.gcube.application.geoportalcommon.shared.config.GcubeUserRole;
import org.gcube.application.geoportalcommon.shared.config.OPERATION_ON_ITEM;
import org.gcube.application.geoportalcommon.shared.config.RoleRights;
import org.gcube.application.geoportalcommon.shared.config.RoleRights.OPERATION_TYPE;
import org.gcube.application.geoportalcommon.shared.exception.GNAConfigException;
import org.gcube.application.geoportalcommon.shared.geoportal.DocumentDV;
import org.gcube.application.geoportalcommon.shared.geoportal.ResultDocumentDV;
import org.gcube.application.geoportalcommon.shared.geoportal.config.ActionDefinitionDV;
import org.gcube.application.geoportalcommon.shared.geoportal.config.FilePathDV;
import org.gcube.application.geoportalcommon.shared.geoportal.config.GcubeProfileDV;
import org.gcube.application.geoportalcommon.shared.geoportal.project.LifecycleInformationDV;
import org.gcube.application.geoportalcommon.shared.geoportal.project.ProjectDV;
import org.gcube.application.geoportalcommon.shared.geoportal.project.TemporalReferenceDV;
import org.gcube.application.geoportalcommon.shared.geoportal.ucd.GEOPORTAL_DATA_HANDLER;
import org.gcube.application.geoportalcommon.shared.geoportal.ucd.RelationshipDefinitionDV;
import org.gcube.application.geoportalcommon.shared.geoportal.ucd.UseCaseDescriptorDV;
import org.gcube.application.geoportalcommon.shared.geoportal.view.ProjectView;
import org.gcube.application.geoportaldatamapper.Geoportal_JSON_Mapper;
import org.gcube.application.geoportaldatamapper.shared.ProjectEdit;
import org.gcube.common.portal.PortalContext;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.gcube.portlets.user.geoportaldataentry.client.GeoportalDataEntryService;
import org.gcube.portlets.user.geoportaldataentry.client.ProjectFormCard;
import org.gcube.portlets.user.geoportaldataentry.server.json.JsonMerge;
import org.gcube.portlets.user.geoportaldataentry.server.json.JsonMerge.MERGE_OPTION;
import org.gcube.portlets.user.geoportaldataentry.shared.CommitReport;
import org.gcube.portlets.user.geoportaldataentry.shared.GNADataEntryExtendedConfigProfile;
import org.gcube.portlets.user.geoportaldataentry.shared.GeoNaFormDataObject;
import org.gcube.portlets.user.geoportaldataentry.shared.GeoportalISConfig;
import org.gcube.portlets.user.geoportaldataentry.shared.Tree_Node;
import org.gcube.portlets.user.geoportaldataentry.shared.UserRights;
import org.gcube.portlets.widgets.mpformbuilder.shared.GenericDatasetBean;
import org.gcube.portlets.widgets.mpformbuilder.shared.upload.FileUploaded;
import org.gcube.portlets.widgets.mpformbuilder.shared.upload.FileUploadedRemote;
import org.gcube.vomanagement.usermanagement.RoleManager;
import org.gcube.vomanagement.usermanagement.exception.GroupRetrievalFault;
import org.gcube.vomanagement.usermanagement.exception.UserRetrievalFault;
import org.gcube.vomanagement.usermanagement.impl.LiferayRoleManager;
import org.gcube.vomanagement.usermanagement.model.GCubeRole;
import org.gcube.vomanagement.usermanagement.model.GCubeUser;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
import com.google.gwt.user.client.Random;
import com.google.gwt.user.server.rpc.RemoteServiceServlet;
import com.jayway.jsonpath.Configuration;
import com.jayway.jsonpath.DocumentContext;
import com.jayway.jsonpath.JsonPath;
import com.jayway.jsonpath.spi.json.GsonJsonProvider;
/**
* The server side implementation of the RPC service.
*
* @author Francesco Mangiacrapa at ISTI-CNR (francesco.mangiacrapa@isti.cnr.it)
*
* Dec 1, 2020
*/
@SuppressWarnings("serial")
public class GeoportalDataEntryServiceImpl extends RemoteServiceServlet implements GeoportalDataEntryService {
public static final String GEONA_GENERIC_RESOURCE_SECONDARY_TYPE = "GEONA_GENERIC_RESOURCE_SECONDARY_TYPE";
private static final Logger LOG = LoggerFactory.getLogger(GeoportalDataEntryServiceImpl.class);
/**
* Gets the GNA data entry config profile.
*
* @return the GNA data entry config profile
* @throws Exception the exception
*/
private GNADataEntryConfigProfile getGNADataEntryConfigProfile() throws Exception {
GNADataEntryConfigProfile profile = SessionUtil.getGNADataEntryConfigProfile(getThreadLocalRequest());
if (profile == null) {
LOG.info(GNADataEntryConfigProfile.class.getSimpleName() + " is null, loading configurations from IS");
// to be sure
SessionUtil.getCurrentContext(this.getThreadLocalRequest(), true);
GeoportalCommon gCommon = new GeoportalCommon();
profile = gCommon.readGNADataEntryConfig();
SessionUtil.setGNADataEntryConfigProfile(getThreadLocalRequest(), profile);
} else {
LOG.info(GNADataEntryConfigProfile.class.getSimpleName() + " read from session");
}
return profile;
}
/**
* Save geona data forms.
*
* @param profileID the profile ID
* @param tree_Node the tree node
* @param stepsOnPostCreation the steps on post creation
* @return the commit report
* @throws Exception the exception
*/
@Override
public CommitReport saveGeonaDataForms(String profileID, Tree_Node<GeoNaFormDataObject> tree_Node,
List<String> stepsOnPostCreation) throws Exception {
LOG.info("saveGeonaDataForms called for profileID {}", profileID);
MongoServiceUtil mongoService = new MongoServiceUtil();
String theDocumentString = null;
try {
FormDataObjectToJSON metadataConverter = new FormDataObjectToJSON();
JSONObject theDocument = metadataConverter.convert(tree_Node, null);
theDocumentString = theDocument.toString();
LOG.info("Got Document: " + theDocumentString);
} catch (Exception e) {
LOG.error("Error on converting form data: ", e);
throw new Exception(
"Error occurred on converting data, try again or contact the support. Error: " + e.getMessage());
}
Project theProject = null;
try {
SessionUtil.getCurrentContext(this.getThreadLocalRequest(), true);
LOG.debug("Going to create the project...");
theProject = mongoService.createNew(profileID, theDocumentString);
LOG.info("Project created with id: " + theProject.getId() + " and profileID: " + theProject.getProfileID());
} catch (Exception e) {
LOG.error("Error on creating the project: ", e);
throw new Exception("Error occurred on creating new project, try again or contact the support. Error: "
+ e.getMessage());
}
List<File> listTempDirs = new ArrayList<File>();
try {
// Uploading files into tempDirs in order to avoid clashing of names
LOG.debug("Going to upload the files");
listTempDirs = recursiveUploadFileset(mongoService, profileID, theProject, tree_Node, null, listTempDirs);
} catch (Exception e) {
LOG.error("Error on uploading files: ", e);
throw new Exception(
"Error occurred on uploading files, try again or contact the support. Error: " + e.getMessage());
}
try {
ProjectsCaller client = GeoportalClientCaller.projects();
SessionUtil.getCurrentContext(getThreadLocalRequest(), true);
LOG.info("stepsOnPostCreation are {}", stepsOnPostCreation);
for (String stepID : stepsOnPostCreation) {
LOG.info("calling step OnPostCreation are {}", stepID);
theProject = client.performStep(theProject.getProfileID(), theProject.getId(), stepID, null);
}
} catch (Exception e) {
throw new Exception("Error occurred on performing steps " + stepsOnPostCreation + " on the project: "
+ theProject.getId() + ". Error: " + e.getMessage());
}
try {
LifecycleInformation lifecycleInfo = theProject.getLifecycleInformation();
LifecycleInformationDV liDV = ConvertToDataValueObjectModel.toLifecycleInformationDV(lifecycleInfo);
return new CommitReport(theProject.getId(), theProject.getProfileID(), theProject.getTheDocument().toJson(),
liDV);
} catch (Exception e) {
throw new Exception("Error occurred on loading LifecycleInformation for the project: " + theProject.getId()
+ ". Error: " + e.getMessage());
} finally {
LOG.debug("List listTempDirs is: " + listTempDirs);
if (listTempDirs != null && listTempDirs.size() > 0) {
for (File file : listTempDirs) {
try {
String dirName = file.getName();
LOG.debug("Deleting directory directory: " + dirName);
FileUtils.deleteDirectory(file);
LOG.debug("Directory {} deleted!", dirName);
} catch (Exception e) {
LOG.debug("Error on deleting the directory: " + file);
}
}
}
}
}
@Override
public CommitReport updateGeportalDataForm(String profileID, String projectID, GeoNaFormDataObject section,
String sectionPath, List<FilePathDV> listFilePaths) throws Exception {
LOG.info("updateGeonaDataForm called for profileID {}", profileID);
LOG.info("and sectionPath {}", sectionPath);
LOG.info("and listFilePaths {}", listFilePaths);
ProjectsCaller client = null;
Project currentProject = null;
JSONObject updatedSectionObject = null;
Configuration configurationGson = null;
GenericDatasetBean sectionBean = null;
try {
if (projectID == null)
throw new Exception("projectID is null");
if (profileID == null)
throw new Exception("profileID is null");
if (section == null || section.getListGDB() == null || section.getListGDB().get(0) == null)
throw new Exception("Input error. The section is null");
sectionBean = section.getListGDB().get(0);
// Converter
FormDataObjectToJSON metadataConverter = new FormDataObjectToJSON();
// JSON Section to update converted as JSONObject
updatedSectionObject = metadataConverter.genericDatasetBeanToJSON(sectionBean);
LOG.info("Input Json Section (to update): {}", updatedSectionObject.toString());
configurationGson = Configuration.builder().jsonProvider(new GsonJsonProvider()).build();
// Type type = new TypeToken<Set<LinkedTreeMap<String, Object>>>() {}.getType();
// Set<LinkedTreeMap<String, Object>> myMap = gson.fromJson(json, type);
} catch (Exception e) {
LOG.error("Error on converting form data: ", e);
throw new Exception(
"Error occurred on converting data, try again or contact the support. Error: " + e.getMessage());
}
Boolean errorOccurred = false;
try {
client = GeoportalClientCaller.projects();
SessionUtil.getCurrentContext(getThreadLocalRequest(), true);
currentProject = client.getProjectByID(profileID, projectID);
Document currentDoc = currentProject.getTheDocument();
// Source Project
String theDocumentJson = currentDoc.toJson();
LOG.debug("Source document: {}", theDocumentJson);
// If the section path is the Root document, passed as "$.", fixing as "$"
if (sectionPath.compareTo(FormDataObjectToJSON.JSON_$_POINTER + ".") == 0)
sectionPath = FormDataObjectToJSON.JSON_$_POINTER;
com.google.gson.JsonObject targetSectionJObject = JsonPath.parse(theDocumentJson, configurationGson)
.read(sectionPath);
LOG.debug("Current Section path {} in the Document is {}", sectionPath, targetSectionJObject.toString());
String srcJ = updatedSectionObject.toString();
String trgJ = targetSectionJObject.toString();
LOG.debug("Merging src {} in the target: {}", srcJ, trgJ);
String mergedDoc = JsonMerge.merge(srcJ, trgJ, MERGE_OPTION.REPLACE);
LOG.debug("mergedDoc: {}", mergedDoc);
String newDocJson;
// If Updating path is first level of the root
if (sectionPath.equals(FormDataObjectToJSON.JSON_$_POINTER)) {
// The merged DOC is the root Document, no action required
newDocJson = mergedDoc;
} else {
// If the merged DOC is a child of the root Document, setting it as child of the
// Document in the proper section
Gson gson = new Gson();
JsonObject gsonOject = gson.fromJson(mergedDoc, JsonObject.class);
// Putting the merged section into Document
DocumentContext newContextDocJson = JsonPath.parse(theDocumentJson, configurationGson).set(sectionPath,
gsonOject);
newDocJson = newContextDocJson.json().toString();
}
Document updatedDocument = Serialization.read(newDocJson.toString(), Document.class);
LOG.info("New document is: {}", updatedDocument.toJson());
Project updatedProject = client.updateProject(profileID, projectID, updatedDocument);
// Project project = client.getProjectByID(profileID, projectID);
LOG.debug("Medatata Updated with document: {}", updatedProject.getTheDocument());
SessionUtil.getCurrentContext(getThreadLocalRequest(), true);
if (listFilePaths != null) {
// Cleaning all the fileset path of the section (defined in the UCD)
for (FilePathDV filePath : listFilePaths) {
String filesetFieldName = filePath.getFieldName();
String filesetPath = sectionPath + "." + filesetFieldName;
// // Replacing $.abc with $..abc
// filesetPath = filesetPath.replaceFirst("\\.", "..");
LOG.info("Going to delete fileset path: {}", filesetPath);
try {
client.deleteFileset(profileID, projectID, filesetPath, true, true);
} catch (Exception e) {
LOG.warn("Error deleting the fileset path {} for the project {}", filesetPath, projectID);
}
}
// Registering new files
List<? extends FileUploaded> filesUploaded = sectionBean.getFilesUploaded();
if (filesUploaded != null && !filesUploaded.isEmpty()) {
replaceFiles(currentProject, sectionPath, section.getGcubeProfileDV(), filesUploaded);
}
}
LOG.info("Project with id " + currentProject.getId() + " updated correclty");
ProjectDVBuilder projectBuilder = ProjectDVBuilder.newBuilder().fullDocumentMap(true);
// Reading again the project to be sure
updatedProject = client.getProjectByID(profileID, projectID);
ProjectDV toProjectDV = ConvertToDataValueObjectModel.toProjectDV(updatedProject, projectBuilder);
String newDocumentString = toProjectDV.getTheDocument().getDocumentAsJSON();
LOG.info("Got Document: {} ", newDocumentString);
return new CommitReport(projectID, profileID, newDocumentString, null);
} catch (Exception e) {
errorOccurred = true;
LOG.error("Error on updating data: ", e);
throw new Exception("Error occurred on updating data, try again or contact the support. Error: "
+ e.getMessage() + ". Tried to revert the project to the previous version");
} finally {
// If an error occurs on updating, the previous version of the document will be
// restored
if (errorOccurred && currentProject != null) {
Document currentDocument = currentProject.getTheDocument();
try {
Project updatedProject = client.updateProject(profileID, projectID, currentDocument);
} catch (Exception e) {
// Silent
}
}
}
}
/**
* Recursive upload fileset.
*
* @param mongoService the mongo service
* @param profileID the profile ID
* @param theProject the the project
* @param tree_Node the tree node
* @param sectionJSONPathIndexer the json path indexer
* @param tempDirs the temp dirs
* @return the list
* @throws Exception the exception
*/
public List<File> recursiveUploadFileset(MongoServiceUtil mongoService, String profileID, Project theProject,
Tree_Node<GeoNaFormDataObject> tree_Node, Map<String, Integer> sectionJSONPathIndexer, List<File> tempDirs)
throws Exception {
LOG.debug("recursiveUploadFileset called [tree_Node: " + tree_Node + "], [jsonPathIndexer: "
+ sectionJSONPathIndexer + "]");
if (tree_Node == null)
return tempDirs;
if (sectionJSONPathIndexer == null) {
sectionJSONPathIndexer = new HashMap<String, Integer>();
}
String theJSONDocument = theProject.getTheDocument().toJson();
for (Tree_Node<GeoNaFormDataObject> treeNodeChild_GNA_DO : tree_Node.getChildren()) {
LOG.debug("Going to upload the files of tree node: " + treeNodeChild_GNA_DO);
// list GDB has always one element in the current implementation. One GDB per
// web-form/profile
List<GenericDatasetBean> listGDB = treeNodeChild_GNA_DO.getData().getListGDB();
GcubeProfileDV profile = treeNodeChild_GNA_DO.getData().getGcubeProfileDV();
// Building JSON/section full PATH and section name
String sectionJSONPath = "";
String parentPathIntoProfile = profile.getParentName() == null ? "" : profile.getParentName();
String theSectionName = profile.getSectionName();
if (theSectionName.compareTo(FormDataObjectToJSON.JSON_$_POINTER) == 0
|| theSectionName.compareTo(FormDataObjectToJSON.JSON_$_POINTER + ".") == 0) {
sectionJSONPath = FormDataObjectToJSON.JSON_$_POINTER;
theSectionName = "";
} else {
sectionJSONPath = String.format("%s%s",
parentPathIntoProfile.endsWith(".") ? parentPathIntoProfile : parentPathIntoProfile + ".",
theSectionName);
}
LOG.debug("The sectionJSONPath is: " + sectionJSONPath);
Integer jpcV = sectionJSONPathIndexer.get(sectionJSONPath);
if (jpcV == null) {
jpcV = 0;
sectionJSONPathIndexer.put(sectionJSONPath, jpcV);
} else {
jpcV = jpcV + 1;
sectionJSONPathIndexer.put(sectionJSONPath, jpcV);
}
LOG.debug("sectionJSONPathIndexer is: " + sectionJSONPathIndexer);
LOG.info("The profile is: " + profile);
for (GenericDatasetBean gdb : listGDB) {
Map<String, FileSetDataObject> collectFilesetPerFieldDef = new HashMap<String, FileSetDataObject>();
List<FileUploaded> files = gdb.getFilesUploaded();
if (files.size() > 0) {
// Iterating on the files upload for the section
for (int i = 0; i < files.size(); i++) {
FileUploaded file = files.get(i);
String formFieldName = file.getFilePath().getFormFieldLabel();
LOG.debug("Uploading file: " + file.getFileName() + ", from formFieldName: " + formFieldName);
FilePathDV filePath = retrieveFilePathForGcubeProfileFieldName(formFieldName, profile);
LOG.info("Found {} for the form fieldName {}", filePath, formFieldName);
if (filePath == null) {
String error = "It is not possible to register the file " + formFieldName
+ ", missing configuration in the filePaths config of: " + profile;
throw new Exception(error);
}
// Collecting Fileset per Field Definition
FileSetDataObject collFieldDef = collectFilesetPerFieldDef.get(filePath.getFieldDefinition());
if (collFieldDef == null) {
collFieldDef = new FileSetDataObject();
collFieldDef.setFilePathDV(filePath);
}
try {
File input = new File(file.getTempSystemPath());
LOG.debug("Temp file is: " + file.getTempSystemPath());
File tempDir = Files.createTempDirectory("GEOPORTAL_UPLOAD_").toFile();
String tmpDirPath = tempDir.getAbsolutePath();
File output = new File(tmpDirPath, file.getFileName());
// input.renameTo(output);
copyContent(input, output);
collFieldDef.addFile(output);
tempDirs.add(tempDir);
tempDir.deleteOnExit();
LOG.info("Temp file: " + file.getTempSystemPath() + ", copied to new file: "
+ file.getFileName());
} catch (Exception e) {
LOG.warn("Skipping file: " + file.getFileName() + ". Error: " + e.getMessage());
}
collectFilesetPerFieldDef.put(filePath.getFieldDefinition(), collFieldDef);
}
}
LOG.info("Cluster of fileset per fieldDefinition is: " + collectFilesetPerFieldDef);
for (String fieldDefinition : collectFilesetPerFieldDef.keySet()) {
FileSetDataObject uploadedFileset = collectFilesetPerFieldDef.get(fieldDefinition);
LOG.info("Uploading fileset: " + uploadedFileset);
File[] fileset = uploadedFileset.getFileset();
FilePathDV filePath = uploadedFileset.getFilePathDV();
Access access;
// If the maxOccurs is not 1
if (profile.getMaxOccurs() == 0 || profile.getMaxOccurs() > 1) {
LOG.info("The gCube Profile with the section " + sectionJSONPath
+ " has maxOccurs > 1 need to manage it as array, going to add the array index");
String arraySectionJSONPAth = String.format("%s[%d]", sectionJSONPath, jpcV);
LOG.debug("registering the fileset in the array section: " + sectionJSONPath);
access = ConvertToDataServiceModel.getAccessFromDocumentSection(theJSONDocument,
arraySectionJSONPAth);
mongoService.registerFileSet(profileID, theProject, arraySectionJSONPAth,
filePath.getFieldName(), filePath.getFieldDefinition(), access, fileset);
} else {
LOG.info("The gCube Profile with the section " + sectionJSONPath + " has maxOccurs = 1");
LOG.debug("registering the fileset in the section: " + sectionJSONPath);
access = ConvertToDataServiceModel.getAccessFromDocumentSection(theJSONDocument,
sectionJSONPath);
mongoService.registerFileSet(profileID, theProject, sectionJSONPath, filePath.getFieldName(),
filePath.getFieldDefinition(), access, fileset);
}
}
tempDirs = recursiveUploadFileset(mongoService, profileID, theProject, treeNodeChild_GNA_DO,
sectionJSONPathIndexer, tempDirs);
}
}
return tempDirs;
}
/**
* Replace files.
*
* @param theProject the the project
* @param sectionJSONPath the section JSON path
* @param gcubeProfile the gcube profile
* @param files the files
* @throws Exception the exception
*/
protected void replaceFiles(Project theProject, String sectionJSONPath, GcubeProfileDV gcubeProfile,
List<? extends FileUploaded> files) throws Exception {
LOG.debug("replaceFiles called [projectID: " + theProject.getId() + "], [sectionJSONPath: " + sectionJSONPath
+ "], [files: " + files + "]");
Map<String, FileSetDataObject> collectFilesetPerFieldDef = new HashMap<String, FileSetDataObject>();
if (files.size() > 0) {
// Iterating on the files upload for the section
for (int i = 0; i < files.size(); i++) {
FileUploaded file = files.get(i);
String formFieldName = file.getFilePath().getFormFieldLabel();
LOG.debug("Uploading file: " + file.getFileName() + ", from formFieldName: " + formFieldName);
FilePathDV filePath = retrieveFilePathForGcubeProfileFieldName(formFieldName, gcubeProfile);
LOG.info("Found {} for the form fieldName {}", filePath, formFieldName);
if (filePath == null) {
String error = "It is not possible to register the file " + formFieldName
+ ", missing configuration in the filePaths config of: " + gcubeProfile;
throw new Exception(error);
}
// Collecting Fileset per Field Definition
FileSetDataObject collFieldDef = collectFilesetPerFieldDef.get(filePath.getFieldDefinition());
if (collFieldDef == null) {
collFieldDef = new FileSetDataObject();
collFieldDef.setFilePathDV(filePath);
}
try {
File tempDir = Files.createTempDirectory("GEOPORTAL_REPLACE_FILES_").toFile();
String tmpDirPath = tempDir.getAbsolutePath();
File input;
File output;
if (file instanceof FileUploadedRemote) {
FileUploadedRemote remote = (FileUploadedRemote) file;
LOG.info("Uploaded file is remote: " + remote.getUrl());
InputStream in = new URL(remote.getUrl()).openStream();
String fileName = (remote.getFileName()==null || remote.getFileName().isEmpty())?"file_"+Random.nextInt():remote.getFileName();
output = new File(tmpDirPath, fileName);
Path outputAbsolutePath = Paths.get(output.getAbsolutePath());
Files.copy(in, outputAbsolutePath, StandardCopyOption.REPLACE_EXISTING);
LOG.info("Remote file: " + remote.getUrl() + ", copied to new file: " + output.getName());
} else {
LOG.info("Uploaded file is local: " + file.getTempSystemPath());
input = new File(file.getTempSystemPath());
output = new File(tmpDirPath, file.getFileName());
copyContent(input, output);
LOG.info(
"Temp file: " + file.getTempSystemPath() + ", copied to new file: " + output.getName());
}
collFieldDef.addFile(output);
tempDir.deleteOnExit();
} catch (Exception e) {
LOG.warn("Skipping file: " + file.getFileName() + ". Error: " + e.getMessage());
}
collectFilesetPerFieldDef.put(filePath.getFieldDefinition(), collFieldDef);
}
}
LOG.info("Cluster of fileset per fieldDefinition is: " + collectFilesetPerFieldDef);
String theJSONDocument = theProject.getTheDocument().toJson();
MongoServiceUtil mongoService = new MongoServiceUtil();
for (String fieldDefinition : collectFilesetPerFieldDef.keySet()) {
FileSetDataObject uploadedFileset = collectFilesetPerFieldDef.get(fieldDefinition);
LOG.info("Uploading fileset: " + uploadedFileset);
File[] fileset = uploadedFileset.getFileset();
FilePathDV filePath = uploadedFileset.getFilePathDV();
Access access = ConvertToDataServiceModel.getAccessFromDocumentSection(theJSONDocument, sectionJSONPath);
LOG.info("Going to registrer files: " + Arrays.asList(fileset).toString());
mongoService.registerFileSet(theProject.getProfileID(), theProject, sectionJSONPath,
filePath.getFieldName(), filePath.getFieldDefinition(), access, fileset);
}
}
/**
* Creates the temp file on storage.
*
* @param is the is
* @param fileName the file name
* @return the temp file
*/
public TempFile createTempFileOnStorage(InputStream is, String fileName) {
LOG.debug("createTempFileOnStorage called");
StorageUtils storage = new StorageUtils();
TempFile toUpload = null;
try {
LOG.info("calling putOntoStorage the stream with the fileName: " + fileName);
toUpload = storage.putOntoStorage(is, fileName);
} catch (RemoteBackendException | FileNotFoundException e) {
LOG.error("Error when uploading stream on Storage: ", e);
}
return toUpload;
}
/**
* Copy content.
*
* @param a the a
* @param b the b
* @throws Exception the exception
*/
public static void copyContent(File a, File b) throws Exception {
FileInputStream in = new FileInputStream(a);
FileOutputStream out = new FileOutputStream(b);
try {
int n;
// read() function to read the
// byte of data
while ((n = in.read()) != -1) {
// write() function to write
// the byte of data
out.write(n);
}
} finally {
if (in != null) {
// close() function to close the
// stream
in.close();
}
// close() function to close
// the stream
if (out != null) {
out.close();
}
}
LOG.debug("File Copied");
}
/**
* Retrieve file path for gcube profile field name.
*
* @param fieldName the field name
* @param profile the profile
* @return the file path DV
*/
public static FilePathDV retrieveFilePathForGcubeProfileFieldName(String fieldName, GcubeProfileDV profile) {
LOG.debug("Searching fieldDefinition for fieldName {} in the FilePaths {}", fieldName, profile.getFilePaths());
if (fieldName == null)
return null;
for (FilePathDV path : profile.getFilePaths()) {
if (path.getGcubeProfileFieldName().equals(fieldName)) {
return path;
}
}
return null;
}
/**
* Gets the geona init config.
*
* @return the geona init config
*/
@Override
public GeoportalISConfig getGeonaInitConfig() {
LOG.info("getConfig called");
String scope = SessionUtil.getCurrentContext(this.getThreadLocalRequest(), false);
String theSecondaryType;
try {
theSecondaryType = this.getServletContext().getInitParameter(GEONA_GENERIC_RESOURCE_SECONDARY_TYPE);
} catch (Exception e) {
LOG.warn("I cannot read the init parameter for: " + GEONA_GENERIC_RESOURCE_SECONDARY_TYPE, e);
theSecondaryType = "GeoNaMetadata";
LOG.warn("Using default SecondaryType: " + theSecondaryType);
}
// LOG.warn("\n\n\nHARD-CABLING THE SCOPE, PLEASE REMOTE IT!!!!\n\n\n");
// scope = "/gcube/devsec/devVRE";
GeoportalISConfig configs = new GeoportalISConfig(theSecondaryType, scope);
LOG.info("returning config: " + configs);
return configs;
}
/**
* Gets the links for.
*
* @param itemId the item id is the mongoId
* @param profileID the profile ID
* @return the links for
* @throws Exception the exception
*/
@Override
public GeoportalItemReferences getLinksFor(String itemId, String profileID) throws Exception {
LOG.info("getLinksFor called");
SessionUtil.getCurrentContext(this.getThreadLocalRequest(), true);
GNADataViewerConfigProfile grViewerProfile = SessionUtil
.getGeportalViewerResourceProfile(getThreadLocalRequest());
GeoportalCommon gc = new GeoportalCommon(grViewerProfile);
GeoportalItemReferences item = new GeoportalItemReferences(itemId, profileID);
item = gc.getPublicLinksFor(item, true);
LOG.info("Returning: " + item);
return item;
}
/**
* Gets the list projects.
*
* @param theProfileID the the profile ID
* @param start the start
* @param limit the limit
* @param filter the filter
* @param reloadFromService the reload from service
* @return the list projects
* @throws Exception the exception
*/
@Override
public ResultSetPaginatedData getListProjects(String theProfileID, Integer start, Integer limit,
SearchingFilter filter, boolean reloadFromService) throws Exception {
LOG.info("getListProjects called with profileID: " + theProfileID + ", start: " + start + ", limit: " + limit
+ ", filter: " + filter);
try {
ProjectsCaller client = GeoportalClientCaller.projects();
SessionUtil.getCurrentContext(getThreadLocalRequest(), true);
ResultSetPaginatedData searchedData = new ResultSetPaginatedData();
// If reloadFromService = true, loads the document from the service
Integer totalProjectForProfile = null;
// Loading total documents from the session
if (!reloadFromService) {
totalProjectForProfile = SessionUtil.getTotalDocumentForProfileID(getThreadLocalRequest(),
theProfileID);
}
if (totalProjectForProfile == null) {
totalProjectForProfile = client.getTotalDocument(theProfileID);
SessionUtil.setTotalDocumentForProfileID(getThreadLocalRequest(), theProfileID, totalProjectForProfile);
}
searchedData.setTotalItems(totalProjectForProfile);
LOG.info("Total Docs read from config: " + totalProjectForProfile);
// Saving client PROJECTION
LinkedHashMap<String, Object> originalProjection = filter.getProjection();
int totalItems = totalProjectForProfile;
// PERFORMING FIRST QUERY FOR IDS IF AND ONLY IF WHERE CONDITIONS IN THE QUERY.
// SEARCHING FACILITY IS ENACTING.
if (filter.getConditions() != null) {
// Setting PROJECTION ONLY FOR PROEJCT ID
LinkedHashMap<String, Object> projectionForIDs = new LinkedHashMap<String, Object>();
projectionForIDs.put(Project.ID, 1);
filter.setProjection(projectionForIDs);
// FIRST QUERY TO RETRIEVE IDs
// LIMIT IS NULL MEANS THAT IT IS EQUAL TO NUMBER TOTAL OF DOCUMENTS
// Calculating always the size starting from 0
final Iterator<Project> projectsIDs = client.queryOnMongo(theProfileID, totalProjectForProfile, 0, null,
filter);
// Getting the Project IDs from the Iterable
Iterable<Project> itP = () -> projectsIDs;
Stream<Project> targetStream = StreamSupport.stream(itP.spliterator(), false);
List<String> listProjectIDs = targetStream.map(Project::getId).collect(Collectors.toList());
totalItems = listProjectIDs.size();
searchedData.setTotalItems(totalItems);
LOG.info("Total Docs read from query per ID: " + totalItems);
}
// NOW PERFORMING THE (REAL) SECOND QUERY FROM CLIENT
// SETTING ORIGINAL PROJECTION FROM CLIENT
filter.setProjection(originalProjection);
// LIMIT IS FROM CLIENT
Iterator<Project> projects = client.queryOnMongo(theProfileID, totalItems, start, limit, filter);
searchedData.setClientStartIndex(start);
searchedData.setLimit(limit);
searchedData.setServerSearchFinished(false);
List<ResultDocumentDV> toReturnList = ConvertToDataValueObjectModel.toListResultDocument(projects);
searchedData.setData(toReturnList);
LOG.info(
"Total Docs page size returned:" + toReturnList.size() + ", start: " + start + ", limit: " + limit);
if (totalProjectForProfile == limit || totalProjectForProfile == 0) {
LOG.debug("Page completed returning " + totalProjectForProfile + " projects");
int newOffset = start + limit;
searchedData.setServerSearchFinished(newOffset > totalProjectForProfile || totalProjectForProfile == 0);
LOG.debug("is Search finished: " + searchedData.isServerSearchFinished());
}
if (LOG.isDebugEnabled()) {
LOG.debug("returning {}", searchedData.getData());
}
List<? extends DocumentDV> data = searchedData.getData();
if (data != null) {
LOG.info("returning {} project/s", data.size());
}
return searchedData;
} catch (Exception e) {
LOG.error("Error on loading paginated and filtered list of projects for id: ", e);
throw new Exception("Error occurred on loading list of Projects. Error: " + e.getMessage());
}
}
/**
* Delete project.
*
* @param profileID the profile ID
* @param projectID the project ID
* @return true, if successful
* @throws Exception the exception
*/
@Override
public boolean deleteProject(String profileID, String projectID) throws Exception {
LOG.info("deleteProject called for profileID {}, projectID {}", profileID, projectID);
try {
if (projectID == null)
throw new Exception("projectID is null");
ProjectsCaller client = GeoportalClientCaller.projects();
SessionUtil.getCurrentContext(getThreadLocalRequest(), true);
client.deleteProject(profileID, projectID, true);
// Updating count of Documents in session per profileID
Integer totalProjectForProfile = client.getTotalDocument(profileID);
SessionUtil.setTotalDocumentForProfileID(getThreadLocalRequest(), profileID, totalProjectForProfile);
return true;
} catch (Exception e) {
LOG.error("Error on deleting the project with id: " + projectID, e);
throw new Exception(
"Error occurred on deleting the project with id: " + projectID + ". Error: " + e.getMessage());
}
}
/**
* Update record.
*
* @param profileID the profile ID
* @param projectID the project ID
* @param jsonUpdate the json update
* @return the updated JSON string representing the itemId
* @throws Exception the exception
*/
@Override
public ProjectDV updateRecord(String profileID, String projectID, String jsonUpdate) throws Exception {
LOG.info("updateRecord called with profileID: " + profileID + ", projectID: " + projectID);
try {
if (projectID == null)
throw new Exception("projectID is null");
ProjectsCaller client = GeoportalClientCaller.projects();
SessionUtil.getCurrentContext(getThreadLocalRequest(), true);
Document updatedDocument = Serialization.read(jsonUpdate, Document.class);
LOG.info("updatedDocument is {}", updatedDocument);
Project project = client.updateProject(profileID, projectID, updatedDocument);
LOG.info("Project with id " + project.getId() + " updated correclty");
ProjectDVBuilder projectBuilder = ProjectDVBuilder.newBuilder().fullDocumentMap(true);
return ConvertToDataValueObjectModel.toProjectDV(project, projectBuilder);
} catch (Exception e) {
LOG.error("Error on updating the project with item id: " + projectID, e);
throw new Exception(
"Error occurred on updating the project with id: " + projectID + ". Error: " + e.getMessage());
}
}
/**
* Gets the JSON document in the project.
*
* @param profileID the profile ID
* @param projectID the project ID
* @return the JSON document in the project
* @throws Exception the exception
*/
@Override
public String getJSONDocumentInTheProject(String profileID, String projectID) throws Exception {
LOG.info("getJSONDocumentInTheProject called with profileID: " + profileID + ", projectID: " + projectID);
try {
if (projectID == null)
throw new Exception("projectID is null");
if (profileID == null)
throw new Exception("profileID is null");
ProjectsCaller client = GeoportalClientCaller.projects();
SessionUtil.getCurrentContext(getThreadLocalRequest(), true);
Project project = client.getProjectByID(profileID, projectID);
return project.getTheDocument().toJson();
} catch (Exception e) {
LOG.error("Error occurred on reading the JSON document in the project with ID: " + projectID, e);
throw new Exception("Error occurred on reading the JSON document in the project with ID: " + projectID
+ ". Error: " + e.getMessage());
}
}
/**
* Read data viewer config.
*
* @return the GNA data entry ext config profile
* @throws Exception the exception
*/
@Override
public GNADataEntryExtendedConfigProfile readDataViewerConfig() throws Exception {
LOG.info("readDataViewerConfig called");
GCubeUser user = null;
GNADataEntryExtendedConfigProfile gnaDEExtConfig = new GNADataEntryExtendedConfigProfile();
try {
PortalContext pContext = PortalContext.getConfiguration();
user = pContext.getCurrentUser(this.getThreadLocalRequest());
String scope = SessionUtil.getCurrentContext(this.getThreadLocalRequest(), true);
GNADataEntryConfigProfile config = getGNADataEntryConfigProfile();
List<RoleRights> listUserRightsForRole = config.getPermissionsForRole();
gnaDEExtConfig.setPermissionsForRole(listUserRightsForRole);
gnaDEExtConfig.setDataEntryGUIPresentation(config.getDataEntryGUIPresentation());
// DEV MODE
if (!SessionUtil.isIntoPortal()) {
LOG.warn("OUT OF PORTAL - DEV MODE detected");
GcubeUserRole myRole = GcubeUserRole.DATA_MANAGER;
for (RoleRights roleRight : listUserRightsForRole) {
if (roleRight.getUserRole().equals(myRole)) {
UserRights userRights = new UserRights(user.getUsername(), roleRight);
LOG.warn("DEV MODE returning: " + userRights);
gnaDEExtConfig.setUserRights(userRights);
return gnaDEExtConfig;
}
}
}
long groupId = pContext.getCurrentGroupId(this.getThreadLocalRequest());
if (user == null || scope == null) {
LOG.warn("called getMyRightsInTheContext with invalid parameter user: " + user + ", in the scope: "
+ scope, ", returning null");
return null;
}
LOG.info("reading GcubeUserRole for user: " + user.getUsername() + ", in the scope: " + scope);
// The following snippet should pass in the service-side
RoleManager roleManager = new LiferayRoleManager();
List<GCubeRole> roles = roleManager.listRolesByUserAndGroup(user.getUserId(), groupId);
List<GcubeUserRole> listMyRoles = new ArrayList<GcubeUserRole>();
for (GCubeRole gCubeRole : roles) {
if (gCubeRole.getRoleName().equalsIgnoreCase(GcubeUserRole.DATA_EDITOR.getName())) {
listMyRoles.add(GcubeUserRole.DATA_EDITOR);
}
if (gCubeRole.getRoleName().equalsIgnoreCase(GcubeUserRole.DATA_MANAGER.getName())) {
listMyRoles.add(GcubeUserRole.DATA_MANAGER);
}
}
// Mapping to roles "known"
LOG.info(
"For user: " + user.getUsername() + " in the scope: " + scope + " read the role/s: " + listMyRoles);
GcubeUserRole myRole = null;
if (listMyRoles.contains(GcubeUserRole.DATA_MANAGER))
myRole = GcubeUserRole.DATA_MANAGER;
else if (listMyRoles.contains(GcubeUserRole.DATA_EDITOR))
myRole = GcubeUserRole.DATA_EDITOR;
else
myRole = GcubeUserRole.DATA_MEMBER;
LOG.info("using highest role: " + myRole);
RoleRights toRoleRight = null;
for (RoleRights roleRight : listUserRightsForRole) {
if (roleRight.getUserRole().equals(myRole)) {
toRoleRight = roleRight;
break;
}
}
UserRights userRights = new UserRights(user.getUsername(), toRoleRight);
LOG.info("returning: " + userRights);
gnaDEExtConfig.setUserRights(userRights);
return gnaDEExtConfig;
} catch (UserRetrievalFault | GroupRetrievalFault | GNAConfigException e) {
LOG.error("An error occurred during getMyRightsInTheContext: " + user, e);
Map<OPERATION_ON_ITEM, OPERATION_TYPE> permissions = new HashMap<OPERATION_ON_ITEM, RoleRights.OPERATION_TYPE>();
RoleRights roleRights = new RoleRights(permissions, GcubeUserRole.DATA_MEMBER);
UserRights ur = new UserRights(user.getUsername(), roleRights);
LOG.warn("Returning default user rights: " + ur);
gnaDEExtConfig.setUserRights(ur);
return gnaDEExtConfig;
}
}
/**
* Gets the list use case descriptors.
*
* @param handlersIds the handlers ids
* @return the list use case descriptors
* @throws Exception the exception
*/
@Override
public List<UseCaseDescriptorDV> getListUseCaseDescriptors(List<String> handlersIds) throws Exception {
LOG.info("getListUseCaseDescriptors called for handlersIds: " + handlersIds);
try {
List<UseCaseDescriptor> listUseCaseDescriptor = null;
SessionUtil.getCurrentContext(this.getThreadLocalRequest(), true);
try {
UseCaseDescriptorCaller client = GeoportalClientCaller.useCaseDescriptors();
if (handlersIds == null) {
handlersIds = Arrays.asList(GEOPORTAL_DATA_HANDLER.geoportal_data_entry.getId());
LOG.info("handlersIds is null, so using default: " + handlersIds);
}
// TODO MUST BE USED THE FOLLOWING ONE
// listUseCaseDescriptor = client.getListForHandlerIds(handlersIds);
listUseCaseDescriptor = client.getList();
} catch (Exception e) {
String error = "Error on contacting the Geoportal service";
LOG.error(error + " for handlers: " + handlersIds, e);
throw new Exception(
"Error when contacting the Geoportal service. Refresh and try again or contact the support");
}
if (listUseCaseDescriptor == null) {
listUseCaseDescriptor = new ArrayList<UseCaseDescriptor>();
}
List<UseCaseDescriptorDV> listUCDDV = new ArrayList<UseCaseDescriptorDV>(listUseCaseDescriptor.size());
for (UseCaseDescriptor ucd : listUseCaseDescriptor) {
listUCDDV.add(ConvertToDataValueObjectModel.toUseCaseDescriptorDV(ucd, null));
}
LOG.info("for handlersIds {} returning {} " + UseCaseDescriptorDV.class.getSimpleName(), handlersIds,
listUCDDV.size());
return listUCDDV;
} catch (Exception e) {
LOG.error("Error on getting list of Use Case Descriptors", e);
throw new Exception("An error occurred when getting list of Use Case Descriptors", e);
}
}
/**
* Gets the lifecycle info for project id.
*
* @param profileID the profile ID
* @param projectID the project ID
* @return the lifecycle info for project id
* @throws Exception the exception
*/
@Override
public LifecycleInformationDV getLifecycleInfoForProjectId(String profileID, String projectID) throws Exception {
LOG.info("getLifecycleInfoForProjectId called for profileID {}, projectID {} ", profileID, projectID);
ProjectsCaller client = GeoportalClientCaller.projects();
SessionUtil.getCurrentContext(getThreadLocalRequest(), true);
try {
Project project = client.getProjectByID(profileID, projectID);
return ConvertToDataValueObjectModel.toLifecycleInformationDV(project.getLifecycleInformation());
} catch (Exception e) {
String error = "Error on retrieving the LifecycleInfo for profileID " + profileID + " and projectID "
+ projectID;
LOG.error(error, e);
throw new Exception(error + ". Refresh and try again or contact the support");
}
}
/**
* Perform action steps.
*
* @param profileID the profile ID
* @param projectID the project ID
* @param action the action
* @return the project DV
* @throws Exception the exception
*/
@Override
public ProjectDV performActionSteps(String profileID, String projectID, ActionDefinitionDV action)
throws Exception {
LOG.info("performActionSteps called for profileID {}, projectID {}, action: " + action, profileID, projectID);
ProjectsCaller client = GeoportalClientCaller.projects();
SessionUtil.getCurrentContext(getThreadLocalRequest(), true);
if (action == null || action.getCallSteps() == null || action.getCallSteps().length == 0) {
throw new Exception("Action called is invalid, no step defined");
}
try {
Project project = null;
for (String stepID : action.getCallSteps()) {
LOG.info("calling stepID {} on projectID {}", stepID, projectID);
project = client.performStep(profileID, projectID, stepID, null);
}
ProjectDVBuilder projectBuilder = ProjectDVBuilder.newBuilder().fullDocumentMap(true);
ProjectDV theProject = ConvertToDataValueObjectModel.toProjectDV(project, projectBuilder);
LOG.info("performActionSteps returning theProject with ID {}", projectID);
return theProject;
} catch (Exception e) {
String error = "Error occurred on performing the workflow step/s on the project id " + projectID;
LOG.error(error, e);
throw new Exception(
error + ". Error: " + e.getMessage() + ". Refresh and try again or contact the support");
}
}
/**
* Gets the relationships definition.
*
* @param profileID the profile ID
* @return the relationships definition
* @throws Exception the exception
*/
@Override
public List<RelationshipDefinitionDV> getRelationshipsDefinition(String profileID) throws Exception {
LOG.info("getRelationshipNames called for profileID {}", profileID);
try {
List<RelationshipDefinitionDV> listRelDefinitionDV = SessionUtil
.getRelationshipsDefinition(getThreadLocalRequest(), profileID);
if (listRelDefinitionDV == null) {
UseCaseDescriptorCaller client = GeoportalClientCaller.useCaseDescriptors();
SessionUtil.getCurrentContext(getThreadLocalRequest(), true);
List<RelationshipDefinition> listRelDefinition = client.getRelationshipDefinitions(profileID);
if (listRelDefinition == null || listRelDefinition.isEmpty()) {
listRelDefinitionDV = new ArrayList<RelationshipDefinitionDV>(1);
} else {
listRelDefinitionDV = new ArrayList<RelationshipDefinitionDV>(listRelDefinition.size());
}
for (RelationshipDefinition relationshipDefinition : listRelDefinition) {
RelationshipDefinitionDV relDef = ConvertToDataValueObjectModel
.toRelationshipDefinition(relationshipDefinition);
listRelDefinitionDV.add(relDef);
}
SessionUtil.setRelationshipDefinition(getThreadLocalRequest(), profileID, listRelDefinitionDV);
}
return listRelDefinitionDV;
} catch (Exception e) {
String error = "Error occurred on reading the relations config from UCD " + profileID;
LOG.error(error, e);
throw new Exception(
error + ". Error: " + e.getMessage() + ". Refresh and try again or contact the support");
}
}
/**
* Creates the relationship.
*
* @param fromProfileID the from profile ID
* @param fromProjectID the from project ID
* @param relationshipId the relationship id
* @param toProfileID the to profile ID
* @param toProjectID the to project ID
* @return the relationship DV
* @throws Exception the exception
*/
@Override
public void createRelationship(String fromProfileID, String fromProjectID, String relationshipId,
String toProfileID, String toProjectID) throws Exception {
LOG.info("createRelationship called");
try {
ProjectsCaller projects = GeoportalClientCaller.projects();
SessionUtil.getCurrentContext(getThreadLocalRequest(), true);
projects.createRelationship(fromProfileID, fromProjectID, relationshipId, toProfileID, toProjectID);
} catch (Exception e) {
String error = "Error occurred on creating the relationship";
LOG.error(error, e);
throw new Exception(
error + ". Error: " + e.getMessage() + ". Refresh and try again or contact the support");
}
}
/**
* Delete relationship.
*
* @param fromProfileID the from profile ID
* @param fromProjectID the from project ID
* @param relationshipId the relationship id
* @param toProfileID the to profile ID
* @param toProjectID the to project ID
* @return the result document DV
* @throws Exception the exception
*/
@Override
public ResultDocumentDV deleteRelationship(String fromProfileID, String fromProjectID, String relationshipId,
String toProfileID, String toProjectID) throws Exception {
LOG.info("deleteRelationship called");
try {
ProjectsCaller projects = GeoportalClientCaller.projects();
SessionUtil.getCurrentContext(getThreadLocalRequest(), true);
projects.deleteRelationship(fromProfileID, fromProjectID, relationshipId, toProfileID, toProjectID);
SessionUtil.getCurrentContext(getThreadLocalRequest(), true);
return getResultDocumentFoProjectByID(fromProfileID, fromProjectID);
} catch (Exception e) {
String error = "Error occurred on deleting the relationship";
LOG.error(error, e);
throw new Exception(
error + ". Error: " + e.getMessage() + ". Refresh and try again or contact the support");
}
}
/**
* Gets the project by ID.
*
* @param profileID the profile ID
* @param projectID the project ID
* @return the project by ID
* @throws Exception the exception
*/
@Override
public ProjectDV getProjectByID(String profileID, String projectID) throws Exception {
LOG.info("getProjectByID called");
try {
ProjectsCaller projects = GeoportalClientCaller.projects();
SessionUtil.getCurrentContext(getThreadLocalRequest(), true);
Project project = projects.getProjectByID(profileID, projectID);
ProjectDVBuilder projectBuilder = ProjectDVBuilder.newBuilder().fullDocumentMap(true);
ProjectDV projectDV = ConvertToDataValueObjectModel.toProjectDV(project, projectBuilder);
LOG.info("returning: " + projectDV.getId());
return projectDV;
} catch (Exception e) {
String error = "Error occurred on reading the project with id: " + projectID;
LOG.error(error, e);
throw new Exception(
error + ". Error: " + e.getMessage() + ". Refresh and try again or contact the support");
}
}
/**
* Gets the result document fo project by ID.
*
* @param profileID the profile ID
* @param projectID the project ID
* @return the result document fo project by ID
* @throws Exception the exception
*/
@Override
public ResultDocumentDV getResultDocumentFoProjectByID(String profileID, String projectID) throws Exception {
LOG.info("getResultDocumentFoProjectByID called");
try {
SessionUtil.getCurrentContext(getThreadLocalRequest(), true);
ProjectsCaller projects = GeoportalClientCaller.projects();
Project project = projects.getProjectByID(profileID, projectID);
ResultDocumentDV documentDV = ConvertToDataValueObjectModel.toResultDocumentDV(project);
LOG.info("returning: " + documentDV.getId());
return documentDV;
} catch (Exception e) {
String error = "Error occurred on reading the project with id: " + projectID;
LOG.error(error, e);
throw new Exception(
error + ". Error: " + e.getMessage() + ". Refresh and try again or contact the support");
}
}
/**
* Temporal reference for project.
*
* @param profileID the profile ID
* @param projectID the project ID
* @return the temporal reference DV
*/
@Override
public TemporalReferenceDV temporalReferenceForProject(String profileID, String projectID) {
LOG.info("temporalReferenceForProject called for profileID: {}, and projectID: {}", profileID, projectID);
String template = null;
if (profileID == null || projectID == null)
return null;
try {
SessionUtil.getCurrentContext(getThreadLocalRequest(), true);
ProjectsCaller projects = GeoportalClientCaller.projects();
Project theProject = projects.getProjectByID(profileID, projectID);
template = SessionUtil.getJSONTimelineTemplate(this.getThreadLocalRequest(), profileID);
try {
if (template == null) {
Document temporalJsonTemplate = GeoportalClientCaller.useCaseDescriptors()
.readTemporalDimensionTemplate(theProject.getProfileID());
if (temporalJsonTemplate != null) {
template = new Document(temporalJsonTemplate).toJson();
LOG.info(UseCaseDescriptorCaller.TIMELINE_CONFIG_TJT_DOCUMENT + " read is: " + template);
SessionUtil.setJSONTimelineTemplate(this.getThreadLocalRequest(), theProject.getProfileID(),
template);
}
}
} catch (Exception e) {
LOG.warn(GEOPORTAL_DATA_HANDLER.geoportal_timeline_json_template.getType()
+ " not found!!! Timeline cannot be available");
}
// String template = "{\"content\": \"$.nome\"," + "\"title\": \"$.nome\","
// + "\"start\" : \"$.dataInizioProgetto\"," + "\"end\" : \"$.dataFineProgetto\"}";
if (template != null) {
JSONObject sourceJsonTemplate = new JSONObject(template);
JSONObject targetJsonObject = ConvertToDataValueObjectModel.toTimelineJSONModel(theProject,
sourceJsonTemplate, theProject.getProfileID(), theProject.getId(), null);
return ConvertToDataValueObjectModel.toTemporalReferenceDV(theProject, targetJsonObject);
}
} catch (Exception e) {
String erroMsg = "Error occurred on reading temporalReference for id: " + projectID;
LOG.warn(erroMsg, e);
}
return null;
}
@Override
public ProjectEdit getProjectEdit(String profileID, String projectID) throws Exception {
LOG.info("getProjectEdit called for profileID: {}, and projectID: {}", profileID, projectID);
try {
PortalContext pContext = PortalContext.getConfiguration();
GCubeUser user = pContext.getCurrentUser(this.getThreadLocalRequest());
String scope = SessionUtil.getCurrentContext(getThreadLocalRequest(), true);
ProjectsCaller clientProjects = GeoportalClientCaller.projects();
Project theProject = clientProjects.getProjectByID(profileID, projectID);
ProjectDVBuilder projectBuilder = ProjectDVBuilder.newBuilder().fullDocumentMap(true);
projectBuilder.relationships(true);
ProjectDV theProjectDV = ConvertToDataValueObjectModel.toProjectDV(theProject, projectBuilder);
ProjectEdit projectEdit = Geoportal_JSON_Mapper.loadProjectEdit(theProjectDV, scope, user.getUsername());
if (LOG.isDebugEnabled()) {
Geoportal_JSON_Mapper.prettyPrintProjectEdit(projectEdit);
}
LOG.info(ProjectEdit.class.getSimpleName()+" returing not null: " + (projectEdit != null));
return projectEdit;
} catch (Exception e) {
String erroMsg = "Error occurred on reading "+ProjectEdit.class.getSimpleName()+" DTO for id: " + projectID;
LOG.warn(erroMsg, e);
throw new Exception(
erroMsg + ". Error: " + e.getMessage() + ". Refresh and try again or contact the support");
}
}
@Override
public ProjectView getProjectView(String profileID, String projectID) throws Exception {
LOG.info("getProjectEdit called for profileID: {}, and projectID: {}", profileID, projectID);
try {
PortalContext pContext = PortalContext.getConfiguration();
GCubeUser user = pContext.getCurrentUser(this.getThreadLocalRequest());
String scope = SessionUtil.getCurrentContext(getThreadLocalRequest(), true);
ProjectsCaller clientProjects = GeoportalClientCaller.projects();
Project theProject = clientProjects.getProjectByID(profileID, projectID);
ProjectDVBuilder projectBuilder = ProjectDVBuilder.newBuilder().fullDocumentMap(true);
projectBuilder.relationships(true);
ProjectDV theProjectDV = ConvertToDataValueObjectModel.toProjectDV(theProject, projectBuilder);
ProjectView projectView = Geoportal_JSON_Mapper.loadProjectView(theProjectDV, scope, user.getUsername());
if (LOG.isDebugEnabled()) {
Geoportal_JSON_Mapper.prettyPrintProjectView(projectView);
}
LOG.info(ProjectView.class.getSimpleName()+" returing not null: " + (projectView != null));
return projectView;
} catch (Exception e) {
String erroMsg = "Error occurred on reading "+ProjectView.class.getSimpleName()+" DTO for id: " + projectID;
LOG.warn(erroMsg, e);
throw new Exception(
erroMsg + ". Error: " + e.getMessage() + ". Refresh and try again or contact the support");
}
}
/**
* Pretty print client data entry map.
*
* @param toMap the to map
*/
private void prettyPrintClientDataEntryMap(HashMap<ProjectFormCard, List<GeoNaFormDataObject>> toMap) {
for (ProjectFormCard theType : toMap.keySet()) {
LOG.debug("\n\n");
LOG.debug(theType.toString());
List<GeoNaFormDataObject> list = toMap.get(theType);
for (GeoNaFormDataObject geoNaFormDataObject : list) {
LOG.debug("\t has " + geoNaFormDataObject.getListGDB().size() + " data bean/s");
int i = 0;
for (GenericDatasetBean gbd : geoNaFormDataObject.getListGDB()) {
LOG.debug("\t " + ++i + ") " + GenericDatasetBean.class.getSimpleName() + " entries:");
Map<String, List<String>> map = gbd.getFormDataEntryFields();
for (String keyEntry : map.keySet()) {
LOG.debug("\t " + keyEntry + ": " + map.get(keyEntry));
}
// for (FileUploaded fup : gbd.getFilesUploaded()) {
// LOG.debug("\t " + fup);
// }
}
}
}
}
}