2017-08-04 15:04:35 +02:00
|
|
|
package org.gcube.data_catalogue.grsf_publish_ws.utils;
|
2016-12-03 11:50:38 +01:00
|
|
|
|
|
|
|
import java.beans.PropertyDescriptor;
|
|
|
|
import java.lang.reflect.Field;
|
|
|
|
import java.util.ArrayList;
|
2017-07-12 18:05:35 +02:00
|
|
|
import java.util.Arrays;
|
2017-07-28 14:48:46 +02:00
|
|
|
import java.util.Collections;
|
2017-07-13 15:43:19 +02:00
|
|
|
import java.util.HashMap;
|
2016-12-08 21:57:08 +01:00
|
|
|
import java.util.HashSet;
|
2016-12-03 11:50:38 +01:00
|
|
|
import java.util.List;
|
|
|
|
import java.util.Map;
|
|
|
|
import java.util.Set;
|
2018-01-26 09:51:33 +01:00
|
|
|
import java.util.concurrent.ConcurrentHashMap;
|
2016-12-03 11:50:38 +01:00
|
|
|
|
2017-07-12 18:05:35 +02:00
|
|
|
import javax.servlet.ServletContext;
|
|
|
|
|
2016-12-03 11:50:38 +01:00
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CkanResource;
|
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField;
|
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group;
|
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag;
|
2017-10-25 16:45:34 +02:00
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.RefersToBean;
|
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.Resource;
|
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean;
|
2017-08-04 15:04:35 +02:00
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.Base;
|
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.Common;
|
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord;
|
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord;
|
2017-07-12 18:05:35 +02:00
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseCreationBean;
|
2017-10-27 14:51:24 +02:00
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.utils.csv.ManageTimeSeriesThread;
|
2017-07-12 18:05:35 +02:00
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.AssociationToGroupThread;
|
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.WritePostCatalogueManagerThread;
|
|
|
|
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
|
2017-03-08 17:09:34 +01:00
|
|
|
import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean;
|
2017-07-12 18:05:35 +02:00
|
|
|
import org.gcube.datacatalogue.ckanutillibrary.shared.RolesCkanGroupOrOrg;
|
2017-10-27 14:37:41 +02:00
|
|
|
import org.gcube.datacatalogue.common.Constants;
|
|
|
|
import org.gcube.datacatalogue.common.enums.Product_Type;
|
|
|
|
import org.gcube.datacatalogue.common.enums.Sources;
|
|
|
|
import org.gcube.datacatalogue.common.enums.Status;
|
2017-08-02 12:11:47 +02:00
|
|
|
import org.json.simple.JSONObject;
|
2016-12-03 11:50:38 +01:00
|
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
|
2017-07-31 18:29:48 +02:00
|
|
|
import eu.trentorise.opendata.jackan.model.CkanDataset;
|
2017-11-09 09:28:22 +01:00
|
|
|
import eu.trentorise.opendata.jackan.model.CkanLicense;
|
2017-07-31 18:29:48 +02:00
|
|
|
|
2016-12-03 11:50:38 +01:00
|
|
|
/**
|
|
|
|
* Services common utils.
|
|
|
|
* @author Costantino Perciante at ISTI-CNR
|
|
|
|
*/
|
2017-07-12 18:05:35 +02:00
|
|
|
@SuppressWarnings({"rawtypes", "unchecked"})
|
2016-12-03 11:50:38 +01:00
|
|
|
public class CommonServiceUtils {
|
|
|
|
|
|
|
|
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(CommonServiceUtils.class);
|
2017-12-01 13:32:51 +01:00
|
|
|
private static final int TAG_MAX_SIZE = 100;
|
2018-01-26 09:51:33 +01:00
|
|
|
private static Map<String, Boolean> extensionsCheck = new ConcurrentHashMap<>();
|
2016-12-03 11:50:38 +01:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Retrieve the list of licenses for stocks and fisheries
|
|
|
|
* @return
|
|
|
|
*/
|
2017-11-09 09:28:22 +01:00
|
|
|
public static Map<String, String> getLicenses(DataCatalogue catalogue){
|
|
|
|
logger.info("Requested licenses...");
|
|
|
|
Map<String, String> toReturn = new HashMap<String, String>();
|
|
|
|
List<CkanLicense> licenses = catalogue.getLicenses();
|
|
|
|
|
|
|
|
for (CkanLicense ckanLicense : licenses) {
|
|
|
|
toReturn.put(ckanLicense.getId(), ckanLicense.getTitle());
|
2016-12-03 11:50:38 +01:00
|
|
|
}
|
2017-11-09 09:28:22 +01:00
|
|
|
return toReturn;
|
2016-12-03 11:50:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Validate an aggregated GRSF record. TODO use @Valid tags
|
|
|
|
* @throws Exception
|
|
|
|
*/
|
|
|
|
public static void validateAggregatedRecord(Common record) throws Exception {
|
|
|
|
|
|
|
|
List<RefersToBean> refersToList = record.getRefersTo();
|
2017-08-01 11:54:21 +02:00
|
|
|
String shortTitle = record.getShortName();
|
2016-12-03 11:50:38 +01:00
|
|
|
Boolean traceabilityFlag = record.isTraceabilityFlag();
|
|
|
|
Status status = record.getStatus();
|
|
|
|
|
|
|
|
if(refersToList == null || refersToList.isEmpty())
|
|
|
|
throw new Exception("refers_to cannot be null/empty");
|
|
|
|
|
|
|
|
if(traceabilityFlag == null)
|
|
|
|
throw new Exception("traceability_flag cannot be null");
|
|
|
|
|
|
|
|
if(shortTitle == null || shortTitle.isEmpty())
|
|
|
|
throw new Exception("short_title cannot be null/empty");
|
|
|
|
|
|
|
|
if(status == null)
|
|
|
|
throw new Exception("status cannot be null/empty");
|
|
|
|
|
2016-12-04 12:24:37 +01:00
|
|
|
// check if it is a stock and perform related checks
|
2016-12-03 11:50:38 +01:00
|
|
|
if(record.getClass().equals(StockRecord.class)){
|
|
|
|
|
|
|
|
StockRecord stock = (StockRecord) record;
|
2017-06-30 15:32:38 +02:00
|
|
|
List<String> species = stock.getSpecies();
|
|
|
|
if(species == null || species.isEmpty())
|
|
|
|
throw new Exception("species cannot be null/empty in a GRSF record");
|
2016-12-03 11:50:38 +01:00
|
|
|
}
|
|
|
|
|
2016-12-10 11:24:47 +01:00
|
|
|
// check if it is a stock and perform related checks
|
|
|
|
if(record.getClass().equals(FisheryRecord.class)){
|
|
|
|
|
|
|
|
FisheryRecord fishery = (FisheryRecord) record;
|
|
|
|
|
|
|
|
List<String> fishingArea = fishery.getFishingArea();
|
|
|
|
List<String> jurisdictionArea = fishery.getJurisdictionArea();
|
|
|
|
|
|
|
|
if((fishingArea == null || fishingArea.isEmpty()) && (jurisdictionArea == null || jurisdictionArea.isEmpty()))
|
|
|
|
throw new Exception("fishing_area and jurisdiction_area cannot be null/empty at the same time!");
|
|
|
|
}
|
2016-12-03 11:50:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Parse the record to look up tags, groups and resources
|
|
|
|
* @param tags
|
2017-07-12 18:05:35 +02:00
|
|
|
* @param skipTags
|
2016-12-03 11:50:38 +01:00
|
|
|
* @param groups
|
2017-07-12 18:05:35 +02:00
|
|
|
* @param skipGroups
|
|
|
|
* @param resources
|
|
|
|
* @param skipResources
|
|
|
|
* @param extras
|
2016-12-03 11:50:38 +01:00
|
|
|
* @param record
|
|
|
|
* @param username
|
2017-07-12 18:05:35 +02:00
|
|
|
* @param source
|
2016-12-03 11:50:38 +01:00
|
|
|
*/
|
2016-12-04 12:24:37 +01:00
|
|
|
public static void getTagsGroupsResourcesExtrasByRecord(
|
2016-12-03 11:50:38 +01:00
|
|
|
Set<String> tags,
|
|
|
|
boolean skipTags,
|
|
|
|
Set<String> groups,
|
2017-07-12 18:05:35 +02:00
|
|
|
boolean skipGroups,
|
2016-12-03 11:50:38 +01:00
|
|
|
List<ResourceBean> resources,
|
2017-07-12 18:05:35 +02:00
|
|
|
boolean skipResources,
|
2016-12-03 11:50:38 +01:00
|
|
|
Map<String, List<String>> extras,
|
|
|
|
Base record,
|
|
|
|
String username,
|
2016-12-07 18:41:05 +01:00
|
|
|
Sources source // it comes from the source type e.g., "grsf-", "ram-" ..
|
2016-12-03 11:50:38 +01:00
|
|
|
){
|
|
|
|
|
|
|
|
Class<?> current = record.getClass();
|
|
|
|
do{
|
|
|
|
Field[] fields = current.getDeclaredFields();
|
|
|
|
for (Field field : fields) {
|
|
|
|
|
|
|
|
if(!skipTags)
|
|
|
|
getTagsByField(field, current, record, tags);
|
2017-07-12 18:05:35 +02:00
|
|
|
|
|
|
|
if(!skipGroups)
|
|
|
|
getGroupsByField(field, current, record, groups, source);
|
|
|
|
|
2018-01-25 15:09:20 +01:00
|
|
|
getExtrasByField(field, current, record, extras, source);
|
2017-07-12 18:05:35 +02:00
|
|
|
|
|
|
|
if(!skipResources)
|
|
|
|
getResourcesByField(field, current, record, username, resources);
|
2016-12-03 11:50:38 +01:00
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
while((current = current.getSuperclass())!=null); // start from the inherited class up to the Object.class
|
|
|
|
|
2017-07-12 18:05:35 +02:00
|
|
|
logger.debug("Tags are " + tags);
|
|
|
|
logger.debug("Groups are " + groups);
|
|
|
|
logger.debug("Extras are " + extras);
|
|
|
|
logger.debug("Resources without timeseries are " + resources);
|
2016-12-03 11:50:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Retrieve the list of tags for this object
|
|
|
|
*/
|
|
|
|
private static void getTagsByField(Field field, Class<?> current, Base record, Set<String> tags){
|
|
|
|
if(field.isAnnotationPresent(Tag.class)){
|
|
|
|
try{
|
|
|
|
Object f = new PropertyDescriptor(field.getName(), current).getReadMethod().invoke(record);
|
|
|
|
if(f != null){
|
|
|
|
if(f instanceof List<?>){
|
|
|
|
List asList = ((List) f);
|
|
|
|
if(!asList.isEmpty()){
|
|
|
|
|
|
|
|
logger.debug("The object annotated with @Tag is a list. Adding ... ");
|
|
|
|
|
|
|
|
int elementsToConsider = asList.size();
|
|
|
|
|
|
|
|
// check if it is a time series, in this take the last X elements
|
|
|
|
if(asList.get(0).getClass().equals(TimeSeriesBean.class)){
|
2017-10-27 14:37:41 +02:00
|
|
|
elementsToConsider = Math.min(elementsToConsider, Constants.TIME_SERIES_TAKE_LAST_VALUES);
|
2017-12-01 15:01:37 +01:00
|
|
|
for (int i = 0; i < elementsToConsider; i++) {
|
2017-10-27 14:37:41 +02:00
|
|
|
String finalTag = asList.get(i).toString().trim().replaceAll(Constants.REGEX_TAGS, "");
|
2017-12-01 13:32:51 +01:00
|
|
|
if(finalTag.length() <= TAG_MAX_SIZE)
|
|
|
|
tags.add(finalTag);
|
2016-12-03 11:50:38 +01:00
|
|
|
}
|
|
|
|
}else{
|
|
|
|
// else add all the available elements
|
|
|
|
for (int i = 0; i < elementsToConsider; i++) {
|
2017-10-27 14:37:41 +02:00
|
|
|
String finalTag = asList.get(i).toString().trim().replaceAll(Constants.REGEX_TAGS, "");
|
2017-12-01 13:32:51 +01:00
|
|
|
if(finalTag.length() <= TAG_MAX_SIZE)
|
|
|
|
tags.add(finalTag);
|
2016-12-03 11:50:38 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}else{
|
|
|
|
logger.debug("The object annotated with @Tag is a simple one. Adding ... ");
|
2017-10-27 14:37:41 +02:00
|
|
|
String finalTag = f.toString().trim().replaceAll(Constants.REGEX_TAGS, "");
|
2016-12-03 11:50:38 +01:00
|
|
|
logger.debug(finalTag);
|
2017-12-01 13:32:51 +01:00
|
|
|
if(finalTag.length() <= TAG_MAX_SIZE)
|
|
|
|
tags.add(finalTag);
|
2016-12-03 11:50:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
}catch(Exception e){
|
2016-12-04 14:53:15 +01:00
|
|
|
logger.error("Failed to read value for field " + field.getName() + " skipping", e);
|
2016-12-03 11:50:38 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Retrieve the list of groups' names for this object
|
|
|
|
*/
|
2016-12-07 18:41:05 +01:00
|
|
|
private static void getGroupsByField(Field field, Class<?> current, Base record, Set<String> groups, Sources source){
|
2016-12-03 11:50:38 +01:00
|
|
|
if(field.isAnnotationPresent(Group.class)){
|
2017-02-15 15:11:45 +01:00
|
|
|
String conditionToCheck = field.getAnnotation(Group.class).condition();
|
2017-02-20 18:44:26 +01:00
|
|
|
String groupNameOverValue = field.getAnnotation(Group.class).groupNameOverValue();
|
2016-12-03 11:50:38 +01:00
|
|
|
try{
|
2016-12-04 14:53:15 +01:00
|
|
|
Object f = new PropertyDescriptor(field.getName(), current).getReadMethod().invoke(record);
|
2016-12-03 11:50:38 +01:00
|
|
|
if(f != null){
|
|
|
|
if(f instanceof List<?>){
|
|
|
|
List asList = ((List) f);
|
|
|
|
if(!asList.isEmpty()){
|
|
|
|
|
|
|
|
logger.debug("The object annotated with @Group is a list. Adding ... ");
|
|
|
|
|
|
|
|
// else add all the available elements
|
|
|
|
for (int i = 0; i < asList.size(); i++) {
|
2017-07-12 18:05:35 +02:00
|
|
|
boolean match = conditionToCheck.isEmpty() ? true : asList.get(i).toString().trim().matches(conditionToCheck);
|
2017-02-15 15:11:45 +01:00
|
|
|
if(match){
|
2017-07-12 18:05:35 +02:00
|
|
|
String groupName = groupNameOverValue.isEmpty() ?
|
2017-02-20 18:44:26 +01:00
|
|
|
HelperMethods.getGroupNameOnCkan(source.toString().toLowerCase() + "-" + asList.get(i).toString().trim()) :
|
|
|
|
source.toString().toLowerCase() + "-" + groupNameOverValue;
|
|
|
|
groups.add(groupName);
|
2017-02-15 15:11:45 +01:00
|
|
|
}
|
2016-12-03 11:50:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
}else{
|
|
|
|
|
|
|
|
// also convert to the group name that should be on ckan
|
2017-07-12 18:05:35 +02:00
|
|
|
boolean match = conditionToCheck.isEmpty() ? true : f.toString().trim().matches(conditionToCheck);
|
2017-02-15 15:11:45 +01:00
|
|
|
if(match){
|
2017-02-20 18:44:26 +01:00
|
|
|
|
2017-07-12 18:05:35 +02:00
|
|
|
String groupName = groupNameOverValue.isEmpty() ?
|
2017-02-20 18:44:26 +01:00
|
|
|
HelperMethods.getGroupNameOnCkan(source.toString().toLowerCase() + "-" + f.toString().trim()) :
|
|
|
|
source.toString().toLowerCase() + "-" + groupNameOverValue;
|
|
|
|
groups.add(groupName);
|
|
|
|
|
2017-02-15 15:11:45 +01:00
|
|
|
}
|
2016-12-03 11:50:38 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}catch(Exception e){
|
2016-12-04 14:53:15 +01:00
|
|
|
logger.error("Failed to read value for field " + field.getName() + " skipping", e);
|
2016-12-03 11:50:38 +01:00
|
|
|
}
|
|
|
|
}
|
2017-07-31 18:29:48 +02:00
|
|
|
|
2016-12-03 11:50:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Retrieve the list of extras for this object
|
2018-01-25 15:09:20 +01:00
|
|
|
* @param source
|
2016-12-03 11:50:38 +01:00
|
|
|
*/
|
2018-01-25 15:09:20 +01:00
|
|
|
private static void getExtrasByField(Field field, Class<?> current, Base record, Map<String, List<String>> extras, Sources source){
|
2016-12-03 11:50:38 +01:00
|
|
|
if(field.isAnnotationPresent(CustomField.class)){
|
|
|
|
try{
|
|
|
|
Object f = new PropertyDescriptor(field.getName(), current).getReadMethod().invoke(record);
|
|
|
|
String keyField = field.getAnnotation(CustomField.class).key();
|
2018-01-25 15:09:20 +01:00
|
|
|
|
|
|
|
// manage no connections nor similar grsf records here for GRSF records only
|
|
|
|
if(source.equals(Sources.GRSF) && keyField.equals(Constants.SIMILAR_GRSF_RECORDS_CUSTOM_KEY)){
|
|
|
|
List asList = (List)f;
|
|
|
|
if(asList == null || asList.isEmpty()){
|
|
|
|
extras.put(keyField, Arrays.asList(Constants.NO_SIMILAR_GRSF_RECORDS));
|
|
|
|
return;
|
|
|
|
}
|
2018-01-25 15:20:45 +01:00
|
|
|
|
2018-01-25 15:09:20 +01:00
|
|
|
}
|
|
|
|
|
2018-01-30 15:46:18 +01:00
|
|
|
if(source.equals(Sources.GRSF) && keyField.equals(Constants.CONNECTED_CUSTOM_KEY)){
|
2018-01-25 15:09:20 +01:00
|
|
|
List asList = (List)f;
|
|
|
|
if(asList == null || asList.isEmpty()){
|
|
|
|
extras.put(keyField, Arrays.asList(Constants.NO_CONNECTED_RECORDS));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-12-03 11:50:38 +01:00
|
|
|
if(f != null){
|
2016-12-08 21:57:08 +01:00
|
|
|
Set<String> valuesForKey = null;
|
2016-12-03 11:50:38 +01:00
|
|
|
|
|
|
|
// check if the map already contains this key
|
|
|
|
if(extras.containsKey(keyField))
|
2016-12-08 21:57:08 +01:00
|
|
|
valuesForKey = new HashSet(extras.get(keyField));
|
|
|
|
else
|
|
|
|
valuesForKey = new HashSet<String>();
|
2016-12-03 11:50:38 +01:00
|
|
|
|
|
|
|
if(f instanceof List<?>){
|
|
|
|
logger.debug("The object " + field.getName() + " is a list and is annotated with @CustomField. Adding ...");
|
|
|
|
List asList = (List)f;
|
|
|
|
if(!asList.isEmpty()){
|
|
|
|
|
|
|
|
int elementsToConsider = asList.size();
|
|
|
|
|
|
|
|
// check if it is a time series, in this case take the last X elements
|
|
|
|
if(asList.get(0).getClass().equals(TimeSeriesBean.class)){
|
2017-10-27 14:37:41 +02:00
|
|
|
elementsToConsider = Math.min(elementsToConsider, Constants.TIME_SERIES_TAKE_LAST_VALUES);
|
2017-12-01 15:01:37 +01:00
|
|
|
for (int i = 0; i < elementsToConsider; i++) {
|
2016-12-03 11:50:38 +01:00
|
|
|
// trim and remove html
|
|
|
|
String clean = HelperMethods.removeHTML(asList.get(i).toString().trim());
|
|
|
|
valuesForKey.add(clean);
|
|
|
|
}
|
2017-09-22 11:29:45 +02:00
|
|
|
}
|
|
|
|
else
|
2016-12-03 11:50:38 +01:00
|
|
|
for (int i = 0; i < elementsToConsider; i++) {
|
|
|
|
String clean = HelperMethods.removeHTML(asList.get(i).toString().trim());
|
|
|
|
valuesForKey.add(clean);
|
|
|
|
}
|
|
|
|
}
|
2017-09-22 11:29:45 +02:00
|
|
|
|
2016-12-03 11:50:38 +01:00
|
|
|
}else{
|
|
|
|
String clean = HelperMethods.removeHTML(f.toString().trim());
|
|
|
|
valuesForKey.add(clean);
|
|
|
|
}
|
|
|
|
|
|
|
|
// add to the map
|
2016-12-08 21:57:08 +01:00
|
|
|
extras.put(keyField, new ArrayList<String>(valuesForKey));
|
2016-12-03 11:50:38 +01:00
|
|
|
}
|
|
|
|
}catch(Exception e){
|
2016-12-04 14:53:15 +01:00
|
|
|
logger.error("Failed to read value for field " + field.getName() + " skipping", e);
|
2016-12-03 11:50:38 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Retrieve the ResourceBean given the record (extract resources from Database Sources and Source of Information and others)
|
|
|
|
* @param record
|
|
|
|
* @param username
|
|
|
|
* @param tags
|
|
|
|
* @param resources
|
|
|
|
* @return
|
|
|
|
*/
|
|
|
|
private static void getResourcesByField(Field field, Class<?> current, Base record, String username, List<ResourceBean> resources){
|
|
|
|
if(field.isAnnotationPresent(CkanResource.class)){
|
|
|
|
try{
|
|
|
|
Object f = new PropertyDescriptor(field.getName(), current).getReadMethod().invoke(record);
|
|
|
|
if(f != null){
|
|
|
|
|
|
|
|
if(f instanceof List<?>){
|
|
|
|
|
|
|
|
List<Resource> listOfResources = (List<Resource>)f;
|
|
|
|
|
|
|
|
for (Resource resource : listOfResources) {
|
|
|
|
resources.add(new ResourceBean(resource.getUrl(), resource.getName().toString(), resource.getDescription(), null, username, null, null));
|
|
|
|
}
|
|
|
|
|
|
|
|
}else{
|
|
|
|
|
|
|
|
Resource res = (Resource)f;
|
|
|
|
resources.add(new ResourceBean(res.getUrl(), res.getName().toString(), res.getDescription(), null, username, null, null));
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}catch(Exception e){
|
2016-12-04 14:53:15 +01:00
|
|
|
logger.error("Failed to read value for field " + field.getName() + " skipping", e);
|
2016-12-03 11:50:38 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-12 18:05:35 +02:00
|
|
|
/**
|
|
|
|
* Evaluate if the user has the admin role
|
|
|
|
* Throws exception if he/she doesn't
|
|
|
|
*/
|
|
|
|
public static void hasAdminRole(String username, DataCatalogue catalogue, String apiKey, String organization) throws Exception{
|
|
|
|
|
2017-07-13 17:22:32 +02:00
|
|
|
String role = catalogue.getRoleOfUserInOrganization(username, organization, apiKey);
|
2017-07-12 18:05:35 +02:00
|
|
|
logger.info("Role of the user " + username + " is " + role + " in " + organization);
|
|
|
|
|
|
|
|
if(role == null || role.isEmpty() || !role.equalsIgnoreCase(RolesCkanGroupOrOrg.ADMIN.toString()))
|
|
|
|
throw new Exception("You are not authorized to create a product. Please check you have the Catalogue-Administrator role!");
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Check this record's name
|
|
|
|
* @param futureName
|
|
|
|
* @param catalogue
|
|
|
|
* @throws Exception on name check
|
|
|
|
*/
|
|
|
|
public static void checkName(String futureName, DataCatalogue catalogue) throws Exception {
|
|
|
|
|
|
|
|
if(!HelperMethods.isNameValid(futureName)){
|
|
|
|
throw new Exception("The 'uuid_knowledge_base' must contain only alphanumeric characters, and symbols like '.' or '_', '-'");
|
|
|
|
}else{
|
|
|
|
|
|
|
|
logger.debug("Checking if such name [" + futureName + "] doesn't exist ...");
|
|
|
|
boolean alreadyExists = catalogue.existProductWithNameOrId(futureName);
|
|
|
|
|
|
|
|
if(alreadyExists){
|
|
|
|
logger.debug("A product with 'uuid_knowledge_base' " + futureName + " already exists");
|
|
|
|
throw new Exception("A product with 'uuid_knowledge_base' " + futureName + " already exists");
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Validate and check sources
|
2017-07-27 16:45:18 +02:00
|
|
|
* @param apiKey
|
2017-07-12 18:05:35 +02:00
|
|
|
* @param context
|
|
|
|
* @param contextServlet
|
|
|
|
* @param sourceInPath
|
|
|
|
* @param record
|
|
|
|
* @param resources
|
|
|
|
* @param groups
|
|
|
|
* @param customFields
|
|
|
|
* @param tags
|
|
|
|
* @param futureTitle
|
|
|
|
* @param username
|
|
|
|
* @throws Exception
|
|
|
|
*/
|
2017-07-27 16:45:18 +02:00
|
|
|
public static void validateRecordAndMapFields(String apiKey, String context, ServletContext contextServlet,
|
|
|
|
Sources sourceInPath, Common record, Product_Type productType, Set<String> tags, Map<String, List<String>> customFields,
|
|
|
|
Set<String> groups, List<ResourceBean> resources, String username, String futureTitle) throws Exception {
|
2017-07-12 18:05:35 +02:00
|
|
|
|
|
|
|
// validate the record if it is a GRSF one and set the record type and in manage context
|
|
|
|
// Status field is needed only in the Manage context for GRSF records
|
|
|
|
if(context.equals((String)contextServlet.getInitParameter(HelperMethods.MANAGE_CONTEX_KEY))){
|
|
|
|
if(sourceInPath.equals(Sources.GRSF)){
|
2017-07-28 14:48:46 +02:00
|
|
|
|
|
|
|
List<RefersToBean> refersTo = record.getRefersTo();
|
|
|
|
if(refersTo == null || refersTo.isEmpty())
|
|
|
|
throw new Exception("refers_to is empty for a GRSF record");
|
|
|
|
|
2017-11-29 20:05:47 +01:00
|
|
|
Set<String> sourcesList = new HashSet<String>();
|
2017-07-28 14:48:46 +02:00
|
|
|
|
2017-08-02 12:11:47 +02:00
|
|
|
String databaseSource = "";
|
2017-07-28 14:48:46 +02:00
|
|
|
// we have the id within the catalog of this record. This means that we can retrieve the record and its system:type
|
|
|
|
for (RefersToBean refersToBean : refersTo) {
|
2017-11-29 20:05:47 +01:00
|
|
|
String sourceOrganization = getRecordOrganization(refersToBean.getId(), apiKey, context);
|
|
|
|
resources.add(new ResourceBean(refersToBean.getUrl(), sourceOrganization , "", null, username, null, null));
|
|
|
|
sourcesList.add(sourceOrganization.toLowerCase());
|
|
|
|
databaseSource += sourceOrganization + " ";
|
2017-07-12 18:05:35 +02:00
|
|
|
}
|
2017-08-04 15:16:36 +02:00
|
|
|
|
2017-08-02 12:11:47 +02:00
|
|
|
// create the Database Source information
|
2017-10-27 14:37:41 +02:00
|
|
|
customFields.put(Constants.GRSF_DATABASE_SOURCE, Arrays.asList(databaseSource.trim()));
|
2017-07-28 14:48:46 +02:00
|
|
|
|
|
|
|
// append to groups: we need to add this record to the correspondent group of the sources
|
2017-11-29 20:05:47 +01:00
|
|
|
addRecordToGroupSources(groups, new ArrayList(sourcesList), productType, sourceInPath);
|
2017-07-28 14:48:46 +02:00
|
|
|
|
|
|
|
// validate
|
2017-07-12 18:05:35 +02:00
|
|
|
CommonServiceUtils.validateAggregatedRecord(record);
|
|
|
|
}
|
|
|
|
}
|
2017-12-01 13:32:51 +01:00
|
|
|
|
2017-10-04 17:30:07 +02:00
|
|
|
// set the domain
|
|
|
|
record.setDomain(productType.getOrigName());
|
2017-10-25 17:13:58 +02:00
|
|
|
|
2017-11-29 12:08:57 +01:00
|
|
|
// set system type (it is equal to the GRSF Type for GRSF records, "Legacy" for source records)
|
2017-10-04 17:30:07 +02:00
|
|
|
record.setSystemType(sourceInPath.equals(Sources.GRSF) ?
|
|
|
|
productType.equals(Product_Type.FISHERY) ? ((FisheryRecord)record).getType().getOrigName() : ((StockRecord)record).getType().getOrigName()
|
2017-10-27 14:37:41 +02:00
|
|
|
: Constants.SYSTEM_TYPE_FOR_SOURCES_VALUE);
|
2017-10-25 17:13:58 +02:00
|
|
|
|
2017-11-29 12:08:57 +01:00
|
|
|
logger.debug("Domain is " + productType.getOrigName() + " and system type " + record.getSystemType());
|
|
|
|
|
|
|
|
// evaluate the custom fields/tags, resources and groups
|
|
|
|
groups.add(sourceInPath.getOrigName().toLowerCase() + "-" + productType.getOrigName().toLowerCase()); //e.g. grsf-fishery
|
|
|
|
boolean skipTags = !sourceInPath.equals(Sources.GRSF); // no tags for the Original records
|
|
|
|
CommonServiceUtils.getTagsGroupsResourcesExtrasByRecord(tags, skipTags, groups, false, resources, false, customFields, record, username, sourceInPath);
|
|
|
|
|
2017-07-28 14:48:46 +02:00
|
|
|
}
|
2017-07-27 16:45:18 +02:00
|
|
|
|
2017-07-28 14:48:46 +02:00
|
|
|
/**
|
|
|
|
* Add the record to the group of sources
|
|
|
|
* @param groups
|
|
|
|
* @param sourcesList
|
|
|
|
* @param productType
|
|
|
|
* @param sourceInPath
|
|
|
|
*/
|
|
|
|
private static void addRecordToGroupSources(Set<String> groups,
|
|
|
|
List<String> sourcesList, Product_Type productType, Sources sourceInPath) {
|
|
|
|
|
|
|
|
Collections.sort(sourcesList); // be sure the name are sorted because the groups have been generated this way
|
|
|
|
String groupName = sourceInPath.getOrigName().toLowerCase() + "-" + productType.getOrigName().toLowerCase();
|
|
|
|
for (String source : sourcesList) {
|
|
|
|
groupName += "-" + source;
|
2017-07-12 18:05:35 +02:00
|
|
|
}
|
|
|
|
|
2017-07-28 14:48:46 +02:00
|
|
|
groups.add(groupName);
|
2017-07-12 18:05:35 +02:00
|
|
|
}
|
|
|
|
|
2017-07-27 16:45:18 +02:00
|
|
|
/**
|
|
|
|
* Fetch the system:type property from a record
|
|
|
|
* @param itemIdOrName
|
|
|
|
* @param apiKey
|
|
|
|
* @return null on error
|
|
|
|
* @throws Exception
|
|
|
|
*/
|
|
|
|
public static String getSystemTypeValue(String itemIdOrName, String apiKey, String context) throws Exception{
|
|
|
|
|
|
|
|
DataCatalogue catalog = HelperMethods.getDataCatalogueRunningInstance(context);
|
2017-07-31 18:29:48 +02:00
|
|
|
CkanDataset dataset = catalog.getDataset(itemIdOrName, apiKey);
|
|
|
|
if(dataset == null)
|
|
|
|
throw new Exception("Unable to find record with id or name " + itemIdOrName);
|
2017-10-27 14:37:41 +02:00
|
|
|
String systemTypeValue = dataset.getExtrasAsHashMap().get(Constants.SYSTEM_TYPE_CUSTOM_KEY);
|
2017-07-27 16:45:18 +02:00
|
|
|
if(systemTypeValue == null || systemTypeValue.isEmpty())
|
2017-10-27 14:37:41 +02:00
|
|
|
throw new Exception(Constants.SYSTEM_TYPE_CUSTOM_KEY + " property not set in record " + itemIdOrName);
|
2017-07-27 16:45:18 +02:00
|
|
|
else
|
|
|
|
return systemTypeValue;
|
|
|
|
|
2017-11-29 20:05:47 +01:00
|
|
|
}
|
2017-12-01 13:32:51 +01:00
|
|
|
|
2017-11-29 20:05:47 +01:00
|
|
|
public static String getRecordOrganization(String itemIdOrName, String apiKey, String context) throws Exception{
|
|
|
|
DataCatalogue catalog = HelperMethods.getDataCatalogueRunningInstance(context);
|
|
|
|
CkanDataset dataset = catalog.getDataset(itemIdOrName, apiKey);
|
|
|
|
if(dataset == null)
|
|
|
|
throw new Exception("Unable to find record with id or name " + itemIdOrName);
|
|
|
|
else
|
|
|
|
return dataset.getOrganization().getTitle();
|
2017-07-27 16:45:18 +02:00
|
|
|
}
|
|
|
|
|
2017-07-12 18:05:35 +02:00
|
|
|
/**
|
|
|
|
* Actions to execute once the dataset has been updated or created.
|
|
|
|
* @param responseBean
|
|
|
|
* @param catalogue
|
|
|
|
* @param namespaces
|
|
|
|
* @param groups
|
|
|
|
* @param context
|
|
|
|
* @param token
|
|
|
|
* @param futureTitle
|
|
|
|
* @param authorFullname
|
|
|
|
* @param contextServlet
|
2017-08-02 12:11:47 +02:00
|
|
|
* @param partialDescription
|
2017-07-12 18:05:35 +02:00
|
|
|
* @throws InterruptedException
|
|
|
|
*/
|
|
|
|
public static void actionsPostCreateOrUpdate(
|
2017-08-04 15:16:36 +02:00
|
|
|
final String datasetId, final String futureName, final Common record, final String apiKey, final String username, final String organization, String itemUrl,
|
|
|
|
ResponseCreationBean responseBean, final DataCatalogue catalogue,
|
|
|
|
Map<String, String> namespaces, final Set<String> groups, final String context,
|
2017-10-31 12:31:52 +01:00
|
|
|
final String token, final String futureTitle, final String authorFullname, final ServletContext contextServlet, final boolean isUpdated,
|
|
|
|
String description) throws InterruptedException {
|
2017-07-12 18:05:35 +02:00
|
|
|
|
2017-10-31 12:31:52 +01:00
|
|
|
// on create, we need to add the item url... the description can be set on create and update instead
|
2017-07-13 15:43:19 +02:00
|
|
|
if(!isUpdated){
|
|
|
|
itemUrl = catalogue.getUnencryptedUrlFromDatasetIdOrName(futureName);
|
|
|
|
Map<String, List<String>> addField = new HashMap<String, List<String>>();
|
2017-10-27 14:37:41 +02:00
|
|
|
String modifiedUUIDKey = namespaces.containsKey(Constants.ITEM_URL_FIELD) ? namespaces.get(Constants.ITEM_URL_FIELD) : Constants.ITEM_URL_FIELD;
|
2017-07-13 15:43:19 +02:00
|
|
|
addField.put(modifiedUUIDKey, Arrays.asList(itemUrl));
|
|
|
|
catalogue.patchProductCustomFields(datasetId, apiKey, addField);
|
|
|
|
}
|
2017-11-09 09:28:22 +01:00
|
|
|
|
2017-10-31 12:31:52 +01:00
|
|
|
// update description anyway
|
2018-01-22 21:21:06 +01:00
|
|
|
description += "Record URL: " + itemUrl;
|
2017-10-31 12:31:52 +01:00
|
|
|
JSONObject obj = new JSONObject();
|
|
|
|
obj.put("notes", description);
|
|
|
|
catalogue.patchProductWithJSON(datasetId, obj, apiKey);
|
2017-07-13 15:43:19 +02:00
|
|
|
|
2017-07-12 18:05:35 +02:00
|
|
|
// set info in the response bean
|
|
|
|
responseBean.setId(datasetId);
|
|
|
|
responseBean.setItemUrl(itemUrl);
|
|
|
|
responseBean.setKbUuid(record.getUuid());
|
|
|
|
|
2017-08-04 15:16:36 +02:00
|
|
|
// it is needed...
|
|
|
|
final String itemUrlForThread = itemUrl;
|
2017-07-12 18:05:35 +02:00
|
|
|
|
2017-08-04 15:16:36 +02:00
|
|
|
new Thread(new Runnable() {
|
|
|
|
|
|
|
|
@Override
|
|
|
|
public void run() {
|
|
|
|
try {
|
|
|
|
// manage groups (wait thread to die: ckan doesn't support too much concurrency on same record ...)
|
|
|
|
if(!groups.isEmpty()){
|
|
|
|
logger.info("Launching thread for association to the list of groups " + groups);
|
|
|
|
AssociationToGroupThread threadGroups = new AssociationToGroupThread(new ArrayList<String>(groups), datasetId, organization, username, catalogue, apiKey);
|
|
|
|
threadGroups.start();
|
|
|
|
threadGroups.join();
|
|
|
|
}
|
|
|
|
// manage time series as resources
|
|
|
|
logger.info("Launching thread for time series handling");
|
|
|
|
new ManageTimeSeriesThread(record, futureName, username, catalogue, context, token).start();
|
|
|
|
|
|
|
|
// write a post if the product has been published in grsf context
|
|
|
|
if(!isUpdated && context.equals((String)contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY))){
|
|
|
|
new WritePostCatalogueManagerThread(
|
|
|
|
context,
|
|
|
|
token,
|
|
|
|
futureTitle,
|
|
|
|
itemUrlForThread,
|
2017-10-31 12:31:52 +01:00
|
|
|
true,
|
2017-08-04 15:16:36 +02:00
|
|
|
new ArrayList<String>(),
|
|
|
|
authorFullname).start();
|
|
|
|
logger.info("Thread to write a post about the new product has been launched");
|
|
|
|
}
|
|
|
|
}catch (InterruptedException e) {
|
|
|
|
logger.error("Error", e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}).start();
|
2017-07-12 18:05:35 +02:00
|
|
|
}
|
2017-09-19 16:42:15 +02:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Extend roles to other organization
|
|
|
|
* @param username
|
|
|
|
* @param catalogue
|
|
|
|
* @param organization
|
|
|
|
* @param admin
|
|
|
|
*/
|
2018-01-25 15:20:45 +01:00
|
|
|
public static void extendRoleToOtherOrganizations(String username, DataCatalogue catalogue, String organization, RolesCkanGroupOrOrg admin) {
|
2017-09-19 16:42:15 +02:00
|
|
|
|
|
|
|
logger.debug("Checking if role extension is needed here");
|
2018-01-25 15:20:45 +01:00
|
|
|
if(extensionsCheck.containsKey(username) && extensionsCheck.get(username))
|
|
|
|
return;
|
|
|
|
else{
|
|
|
|
catalogue.assignRolesOtherOrganization(username, organization, admin);
|
|
|
|
extensionsCheck.put(username, true);
|
|
|
|
}
|
2017-09-19 16:42:15 +02:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
2017-10-27 16:04:47 +02:00
|
|
|
* Evaluate in which organization a record has to be published. The only exception is when grsf_admin is involved.
|
2017-09-19 16:42:15 +02:00
|
|
|
* @param organization
|
|
|
|
* @param sourceInPath
|
|
|
|
* @return
|
|
|
|
*/
|
|
|
|
public static String evaluateOrganization(String organization, Sources sourceInPath) {
|
2017-10-27 14:37:41 +02:00
|
|
|
if(sourceInPath.equals(Sources.GRSF) && organization.equals(Constants.GRSF_ADMIN_ORGANIZATION_NAME))
|
|
|
|
return Constants.GRSF_ADMIN_ORGANIZATION_NAME;
|
2017-09-19 16:42:15 +02:00
|
|
|
else
|
|
|
|
return sourceInPath.getOrigName().toLowerCase();
|
|
|
|
}
|
2017-07-12 18:05:35 +02:00
|
|
|
}
|