2016-10-07 18:23:23 +02:00
|
|
|
package org.gcube.data_catalogue.grsf_publish_ws.utils;
|
|
|
|
|
2016-10-08 23:10:25 +02:00
|
|
|
import java.beans.PropertyDescriptor;
|
2016-10-10 18:50:11 +02:00
|
|
|
import java.io.BufferedReader;
|
2016-11-28 17:43:28 +01:00
|
|
|
import java.io.File;
|
2016-10-10 18:50:11 +02:00
|
|
|
import java.io.InputStreamReader;
|
2016-11-30 14:14:02 +01:00
|
|
|
import java.io.UnsupportedEncodingException;
|
2016-10-08 23:10:25 +02:00
|
|
|
import java.lang.reflect.Field;
|
2016-10-13 11:46:42 +02:00
|
|
|
import java.util.ArrayList;
|
2016-10-11 11:39:25 +02:00
|
|
|
import java.util.HashMap;
|
2016-10-08 23:10:25 +02:00
|
|
|
import java.util.List;
|
|
|
|
import java.util.Map;
|
|
|
|
|
2016-10-10 18:50:11 +02:00
|
|
|
import javax.servlet.ServletContext;
|
|
|
|
|
2016-11-28 17:43:28 +01:00
|
|
|
import org.gcube.common.homelibrary.home.exceptions.InternalErrorException;
|
|
|
|
import org.gcube.common.homelibrary.home.workspace.WorkspaceFolder;
|
|
|
|
import org.gcube.common.homelibrary.home.workspace.WorkspaceItem;
|
|
|
|
import org.gcube.common.homelibrary.home.workspace.exceptions.InsufficientPrivilegesException;
|
|
|
|
import org.gcube.common.homelibrary.home.workspace.exceptions.ItemAlreadyExistException;
|
|
|
|
import org.gcube.common.homelibrary.home.workspace.folder.items.ExternalFile;
|
2016-11-01 22:35:52 +01:00
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CkanResource;
|
2016-10-08 23:10:25 +02:00
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField;
|
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group;
|
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag;
|
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.json.input.Common;
|
2016-10-13 11:46:42 +02:00
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.json.input.Resource;
|
2016-11-24 17:53:50 +01:00
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.json.input.TimeSeriesBean;
|
2016-10-10 18:50:11 +02:00
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.utils.groups.Status;
|
2016-10-08 22:16:31 +02:00
|
|
|
import org.gcube.datacatalogue.ckanutillibrary.DataCatalogue;
|
|
|
|
import org.gcube.datacatalogue.ckanutillibrary.DataCatalogueFactory;
|
2016-10-10 18:50:11 +02:00
|
|
|
import org.gcube.datacatalogue.ckanutillibrary.DataCatalogueImpl;
|
2016-10-13 11:46:42 +02:00
|
|
|
import org.gcube.datacatalogue.ckanutillibrary.models.ResourceBean;
|
2016-11-30 11:43:30 +01:00
|
|
|
import org.jsoup.Jsoup;
|
2016-11-30 14:14:02 +01:00
|
|
|
import org.jsoup.safety.Whitelist;
|
2016-10-08 23:10:25 +02:00
|
|
|
import org.slf4j.LoggerFactory;
|
2016-10-08 22:16:31 +02:00
|
|
|
|
2016-10-10 18:50:11 +02:00
|
|
|
import eu.trentorise.opendata.jackan.internal.org.apache.http.HttpResponse;
|
|
|
|
import eu.trentorise.opendata.jackan.internal.org.apache.http.client.methods.HttpGet;
|
|
|
|
import eu.trentorise.opendata.jackan.internal.org.apache.http.impl.client.CloseableHttpClient;
|
|
|
|
import eu.trentorise.opendata.jackan.internal.org.apache.http.impl.client.HttpClientBuilder;
|
2016-10-11 11:39:25 +02:00
|
|
|
import eu.trentorise.opendata.jackan.model.CkanLicense;
|
2016-10-10 18:50:11 +02:00
|
|
|
|
2016-10-07 18:23:23 +02:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Helper methods
|
|
|
|
* @author Costantino Perciante at ISTI-CNR (costantino.perciante@isti.cnr.it)
|
|
|
|
*/
|
2016-11-27 12:33:41 +01:00
|
|
|
@SuppressWarnings({"unchecked","rawtypes"})
|
2016-10-07 18:23:23 +02:00
|
|
|
public abstract class HelperMethods {
|
|
|
|
|
2016-10-08 23:10:25 +02:00
|
|
|
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(HelperMethods.class);
|
|
|
|
|
2016-10-10 18:50:11 +02:00
|
|
|
// to be retrieved from the web.xml
|
2016-11-30 18:29:44 +01:00
|
|
|
public static final String PENDING_CONTEX_KEY = "PendingContext";
|
|
|
|
public static final String CONFIRMED_CONTEX_KEY = "ConfirmedContext";
|
2016-11-25 13:56:27 +01:00
|
|
|
private static final int TIME_SERIES_TAKE_LAST_VALUES = 5;
|
2016-11-28 17:43:28 +01:00
|
|
|
private static final String CSV_MIME = "text/csv";
|
|
|
|
private static final String PATH_SEPARATOR = "/";
|
2016-11-29 12:44:36 +01:00
|
|
|
private static final String REGEX_TAGS = "[^\\s\\w-_.]";
|
2016-10-10 18:50:11 +02:00
|
|
|
|
2016-11-27 12:33:41 +01:00
|
|
|
|
2016-10-08 22:16:31 +02:00
|
|
|
/**
|
|
|
|
* Convert a group name to its id on ckan
|
|
|
|
* @param origName
|
|
|
|
* @return
|
|
|
|
*/
|
2016-10-07 18:23:23 +02:00
|
|
|
public static String getGroupNameOnCkan(String origName){
|
2016-10-08 23:10:25 +02:00
|
|
|
|
2016-10-07 18:23:23 +02:00
|
|
|
if(origName == null)
|
|
|
|
throw new IllegalArgumentException("origName cannot be null");
|
2016-10-08 23:10:25 +02:00
|
|
|
|
2016-10-10 18:50:11 +02:00
|
|
|
String modified = origName.trim().toLowerCase().replaceAll("[^A-Za-z0-9-]", "-");
|
|
|
|
if(modified.startsWith("-"))
|
|
|
|
modified = modified.substring(1);
|
|
|
|
if(modified.endsWith("-"))
|
|
|
|
modified = modified.substring(0, modified.length() -1);
|
2016-11-30 18:43:57 +01:00
|
|
|
|
|
|
|
logger.info("Group name generated is " + modified);
|
2016-10-11 11:39:25 +02:00
|
|
|
|
2016-10-10 18:50:11 +02:00
|
|
|
return modified;
|
2016-10-07 18:23:23 +02:00
|
|
|
}
|
2016-10-08 23:10:25 +02:00
|
|
|
|
2016-10-08 22:16:31 +02:00
|
|
|
/**
|
|
|
|
* Retrieve the running instance of the data catalogue for this scope
|
|
|
|
* @return
|
|
|
|
* @throws Exception
|
|
|
|
*/
|
2016-11-04 16:26:19 +01:00
|
|
|
public static DataCatalogue getDataCatalogueRunningInstance(String scope){
|
2016-10-08 23:10:25 +02:00
|
|
|
|
2016-10-10 18:50:11 +02:00
|
|
|
try{
|
|
|
|
DataCatalogueImpl instance = DataCatalogueFactory.getFactory().getUtilsPerScope(scope);
|
|
|
|
return instance;
|
|
|
|
}catch(Exception e){
|
|
|
|
logger.error("Failed to instanciate data catalogue lib", e);
|
|
|
|
}
|
|
|
|
|
|
|
|
return null;
|
2016-10-08 23:10:25 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Retrieve the list of tags for this object
|
|
|
|
*/
|
|
|
|
public static void getTags(List<String> tags, Common record){
|
2016-10-13 14:06:15 +02:00
|
|
|
|
2016-10-08 23:10:25 +02:00
|
|
|
Class<?> current = record.getClass();
|
|
|
|
do{
|
|
|
|
Field[] fields = current.getDeclaredFields();
|
|
|
|
for (Field field : fields) {
|
|
|
|
if(field.isAnnotationPresent(Tag.class)){
|
|
|
|
try{
|
|
|
|
Object f = new PropertyDescriptor(field.getName(), current).getReadMethod().invoke(record);
|
|
|
|
if(f != null){
|
2016-11-04 16:26:19 +01:00
|
|
|
if(f instanceof List<?>){
|
|
|
|
List asList = ((List) f);
|
2016-11-25 13:56:27 +01:00
|
|
|
if(!asList.isEmpty()){
|
|
|
|
|
|
|
|
logger.debug("The object annotated with @Tag is a list. Adding ... ");
|
|
|
|
|
|
|
|
int elementsToConsider = asList.size();
|
|
|
|
|
|
|
|
// check if it is a time series, in this take the last X elements
|
|
|
|
if(asList.get(0).getClass().equals(TimeSeriesBean.class)){
|
|
|
|
elementsToConsider = Math.min(elementsToConsider, TIME_SERIES_TAKE_LAST_VALUES);
|
|
|
|
|
|
|
|
for (int i = (asList.size() - elementsToConsider); i < asList.size(); i++) {
|
2016-11-29 12:44:36 +01:00
|
|
|
String finalTag = asList.get(i).toString().trim().replaceAll(REGEX_TAGS, "");
|
|
|
|
logger.debug(finalTag);
|
|
|
|
tags.add(finalTag);
|
2016-11-25 13:56:27 +01:00
|
|
|
}
|
|
|
|
}else{
|
|
|
|
// else add all the available elements
|
|
|
|
for (int i = 0; i < elementsToConsider; i++) {
|
2016-11-29 12:44:36 +01:00
|
|
|
String finalTag = asList.get(i).toString().trim().replaceAll(REGEX_TAGS, "");
|
|
|
|
logger.debug(finalTag);
|
|
|
|
tags.add(finalTag);
|
2016-11-25 13:56:27 +01:00
|
|
|
}
|
|
|
|
}
|
2016-11-04 16:26:19 +01:00
|
|
|
}
|
|
|
|
}else{
|
|
|
|
logger.debug("The object annotated with @Tag is a simple one. Adding ... ");
|
2016-11-29 12:44:36 +01:00
|
|
|
String finalTag = f.toString().trim().replaceAll(REGEX_TAGS, "");
|
|
|
|
logger.debug(finalTag);
|
|
|
|
tags.add(finalTag);
|
2016-11-04 16:26:19 +01:00
|
|
|
}
|
2016-11-25 13:56:27 +01:00
|
|
|
|
2016-10-08 23:10:25 +02:00
|
|
|
}
|
|
|
|
}catch(Exception e){
|
|
|
|
logger.error("Failed ot read value for field " + field.getName() + " skipping", e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-11-06 21:21:11 +01:00
|
|
|
while((current = current.getSuperclass())!=null); // start from the inherited class up to the Object.class
|
|
|
|
|
2016-11-04 16:26:19 +01:00
|
|
|
logger.info("Tags are " + tags);
|
2016-10-08 22:16:31 +02:00
|
|
|
}
|
2016-10-08 23:10:25 +02:00
|
|
|
|
|
|
|
/**
|
2016-10-10 18:50:11 +02:00
|
|
|
* Retrieve the list of groups' names for this object
|
2016-10-08 23:10:25 +02:00
|
|
|
*/
|
|
|
|
public static void getGroups(List<String> groups, Common record){
|
2016-10-13 14:06:15 +02:00
|
|
|
|
2016-10-08 23:10:25 +02:00
|
|
|
Class<?> current = record.getClass();
|
|
|
|
do{
|
|
|
|
Field[] fields = current.getDeclaredFields();
|
|
|
|
for (Field field : fields) {
|
|
|
|
if(field.isAnnotationPresent(Group.class)){
|
|
|
|
try{
|
|
|
|
Object f = new PropertyDescriptor(field.getName(), current).getReadMethod().invoke(record);
|
|
|
|
if(f != null){
|
|
|
|
|
2016-10-09 12:52:33 +02:00
|
|
|
// also convert to the group name that should be on ckan
|
2016-10-13 14:06:15 +02:00
|
|
|
String groupName = getGroupNameOnCkan(f.toString().trim());
|
2016-10-13 11:46:42 +02:00
|
|
|
if(!groups.contains(groupName))
|
|
|
|
groups.add(groupName);
|
2016-10-08 23:10:25 +02:00
|
|
|
|
|
|
|
}
|
2016-11-30 18:29:44 +01:00
|
|
|
|
|
|
|
// check if the field is an enumerator, and the enum class is also annotated with @Group
|
|
|
|
if(field.getClass().isEnum() && field.getClass().isAnnotationPresent(Group.class)){
|
|
|
|
|
|
|
|
// extract the name from the enum class and add it to the groups
|
|
|
|
// also convert to the group name that should be on ckan
|
2016-11-30 18:43:24 +01:00
|
|
|
String groupName = getGroupNameOnCkan(field.getClass().getSimpleName());
|
2016-11-30 18:29:44 +01:00
|
|
|
if(!groups.contains(groupName))
|
|
|
|
groups.add(groupName);
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2016-10-08 23:10:25 +02:00
|
|
|
}catch(Exception e){
|
|
|
|
logger.error("Failed ot read value for field " + field.getName() + " skipping", e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-11-06 21:21:11 +01:00
|
|
|
while((current = current.getSuperclass())!=null); // start from the inherited class up to the Object.class
|
2016-10-13 11:46:42 +02:00
|
|
|
|
2016-11-04 16:26:19 +01:00
|
|
|
logger.info("Groups is " + groups);
|
2016-10-08 23:10:25 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Retrieve the list of extras for this object
|
|
|
|
*/
|
2016-10-27 11:40:19 +02:00
|
|
|
public static void getExtras(Map<String, List<String>> extras, Common record){
|
2016-10-13 14:06:15 +02:00
|
|
|
|
2016-10-08 23:10:25 +02:00
|
|
|
Class<?> current = record.getClass();
|
|
|
|
do{
|
|
|
|
Field[] fields = current.getDeclaredFields();
|
|
|
|
for (Field field : fields) {
|
|
|
|
if(field.isAnnotationPresent(CustomField.class)){
|
|
|
|
try{
|
|
|
|
Object f = new PropertyDescriptor(field.getName(), current).getReadMethod().invoke(record);
|
2016-11-02 16:02:28 +01:00
|
|
|
String keyField = field.getAnnotation(CustomField.class).key();
|
2016-10-08 23:10:25 +02:00
|
|
|
if(f != null){
|
2016-11-06 21:21:11 +01:00
|
|
|
List<String> valuesForKey = new ArrayList<String>();
|
|
|
|
|
|
|
|
// check if the map already contains this key
|
|
|
|
if(extras.containsKey(keyField))
|
|
|
|
valuesForKey = extras.get(keyField);
|
|
|
|
|
2016-11-04 16:26:19 +01:00
|
|
|
if(f instanceof List<?>){
|
|
|
|
logger.debug("The object " + field.getName() + " is a list and is annotated with @CustomField. Adding ...");
|
|
|
|
List asList = (List)f;
|
2016-11-25 13:56:27 +01:00
|
|
|
if(!asList.isEmpty()){
|
|
|
|
|
|
|
|
int elementsToConsider = asList.size();
|
2016-10-27 11:40:19 +02:00
|
|
|
|
2016-11-25 13:56:27 +01:00
|
|
|
// check if it is a time series, in this case take the last X elements
|
|
|
|
if(asList.get(0).getClass().equals(TimeSeriesBean.class)){
|
|
|
|
elementsToConsider = Math.min(elementsToConsider, TIME_SERIES_TAKE_LAST_VALUES);
|
|
|
|
|
|
|
|
for (int i = (asList.size() - elementsToConsider); i < asList.size(); i++) {
|
2016-11-30 11:43:30 +01:00
|
|
|
// trim and remove html
|
|
|
|
String clean = HelperMethods.removeHTML(asList.get(i).toString().trim());
|
|
|
|
logger.debug(clean);
|
|
|
|
valuesForKey.add(clean);
|
2016-11-25 13:56:27 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
}else{
|
|
|
|
for (int i = 0; i < elementsToConsider; i++) {
|
2016-11-30 11:43:30 +01:00
|
|
|
String clean = HelperMethods.removeHTML(asList.get(i).toString().trim());
|
|
|
|
logger.debug(clean);
|
|
|
|
valuesForKey.add(clean);
|
2016-11-25 13:56:27 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-10-27 11:40:19 +02:00
|
|
|
}else{
|
2016-11-30 11:43:30 +01:00
|
|
|
String clean = HelperMethods.removeHTML(f.toString().trim());
|
|
|
|
valuesForKey.add(clean);
|
2016-10-27 11:40:19 +02:00
|
|
|
}
|
2016-10-08 23:10:25 +02:00
|
|
|
|
2016-11-06 21:21:11 +01:00
|
|
|
// add to the map
|
|
|
|
extras.put(keyField, valuesForKey);
|
2016-10-08 23:10:25 +02:00
|
|
|
}
|
|
|
|
}catch(Exception e){
|
|
|
|
logger.error("Failed ot read value for field " + field.getName() + " skipping", e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-11-06 21:21:11 +01:00
|
|
|
while((current = current.getSuperclass())!=null); // start from the inherited class up to the Object.class
|
|
|
|
|
2016-11-04 16:26:19 +01:00
|
|
|
logger.info("Extras is " + extras);
|
2016-10-08 23:10:25 +02:00
|
|
|
}
|
|
|
|
|
2016-10-10 18:50:11 +02:00
|
|
|
/**
|
|
|
|
* Retrieve the organization name in which the user wants to publish starting from the scope
|
|
|
|
* @param contextInWhichPublish
|
|
|
|
* @return
|
|
|
|
*/
|
|
|
|
public static String retrieveOrgNameFromScope(String scope) {
|
|
|
|
|
|
|
|
String[] splittedScope = scope.split("/");
|
|
|
|
return splittedScope[splittedScope.length - 1].toLowerCase();
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Return the context in which the user wants to publish by the status information
|
|
|
|
* @param status
|
|
|
|
* @param contextServlet
|
|
|
|
* @return
|
|
|
|
*/
|
|
|
|
public static String getContextFromStatus(Status status, ServletContext contextServlet) {
|
|
|
|
|
|
|
|
String toReturn = null;
|
|
|
|
switch(status){
|
|
|
|
case Confirmed :
|
|
|
|
toReturn = (String)contextServlet.getInitParameter(CONFIRMED_CONTEX_KEY);
|
|
|
|
break;
|
|
|
|
case Pending:
|
|
|
|
toReturn = (String)contextServlet.getInitParameter(PENDING_CONTEX_KEY);
|
|
|
|
break;
|
|
|
|
default: break;
|
|
|
|
|
|
|
|
}
|
|
|
|
logger.debug("Context evaluated is " + toReturn);
|
|
|
|
return toReturn;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Validate the name the product will have
|
|
|
|
* @param futureName
|
|
|
|
* @return
|
|
|
|
*/
|
2016-11-06 21:21:11 +01:00
|
|
|
public static boolean isNameValid(String futureName) {
|
2016-10-10 18:50:11 +02:00
|
|
|
|
|
|
|
if(futureName == null || futureName.isEmpty())
|
|
|
|
return false;
|
|
|
|
else{
|
|
|
|
return futureName.matches("[\\sA-Za-z0-9_.-]+");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Retrieve the user's email given his/her username
|
|
|
|
* @param context
|
|
|
|
* @param token
|
|
|
|
* @return
|
|
|
|
* @throws Exception
|
|
|
|
*/
|
|
|
|
public static String getUserEmail(String context, String token){
|
|
|
|
|
2016-11-06 21:21:11 +01:00
|
|
|
String baseUrl = new ServiceEndPointReaderSocial(context).getBasePath();
|
|
|
|
String url = baseUrl + "users/getUserEmail?gcube-token=" + token;
|
|
|
|
logger.debug("Request url is " + url);
|
|
|
|
return executGETHttpRequest(url, 200);
|
2016-10-10 18:50:11 +02:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Retrieve the user's fullname given his/her username
|
|
|
|
* @param context
|
|
|
|
* @param token
|
|
|
|
* @return
|
|
|
|
* @throws Exception
|
|
|
|
*/
|
|
|
|
public static String getUserFullname(String context, String token){
|
|
|
|
|
2016-11-06 21:21:11 +01:00
|
|
|
String baseUrl = new ServiceEndPointReaderSocial(context).getBasePath();
|
|
|
|
String url = baseUrl + "users/getUserFullname?gcube-token=" + token;
|
|
|
|
logger.debug("Request url is " + url);
|
|
|
|
return executGETHttpRequest(url, 200);
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Execute the GET http request at this url, and return the result as string
|
|
|
|
* @return
|
|
|
|
*/
|
|
|
|
private static String executGETHttpRequest(String url, int expectedCodeOnSuccess){
|
|
|
|
|
2016-10-10 18:50:11 +02:00
|
|
|
try(CloseableHttpClient client = HttpClientBuilder.create().build();){
|
|
|
|
|
|
|
|
HttpGet getRequest = new HttpGet(url);
|
|
|
|
HttpResponse response = client.execute(getRequest);
|
|
|
|
|
2016-11-06 21:21:11 +01:00
|
|
|
if (response.getStatusLine().getStatusCode() != expectedCodeOnSuccess) {
|
2016-10-10 18:50:11 +02:00
|
|
|
throw new RuntimeException("Failed : HTTP error code : "
|
|
|
|
+ response.getStatusLine().getStatusCode());
|
|
|
|
}
|
|
|
|
|
|
|
|
BufferedReader br = new BufferedReader(
|
|
|
|
new InputStreamReader((response.getEntity().getContent())));
|
|
|
|
|
2016-11-06 21:21:11 +01:00
|
|
|
String res = "";
|
2016-10-10 18:50:11 +02:00
|
|
|
String temp = null;
|
|
|
|
|
|
|
|
while ((temp = br.readLine()) != null) {
|
2016-11-06 21:21:11 +01:00
|
|
|
res += temp;
|
2016-10-10 18:50:11 +02:00
|
|
|
}
|
|
|
|
|
2016-11-06 21:21:11 +01:00
|
|
|
return res;
|
2016-10-10 18:50:11 +02:00
|
|
|
}catch(Exception e){
|
2016-11-30 11:43:30 +01:00
|
|
|
logger.error("error while performing get method " + e.toString());
|
2016-10-10 18:50:11 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return null;
|
2016-11-06 21:21:11 +01:00
|
|
|
|
2016-10-10 18:50:11 +02:00
|
|
|
}
|
|
|
|
|
2016-10-11 11:39:25 +02:00
|
|
|
/**
|
|
|
|
* Retrieve the list of ckan licenses and build up a map <license_id, license_title>
|
|
|
|
* @return
|
|
|
|
* @throws Exception
|
|
|
|
*/
|
2016-11-06 21:21:11 +01:00
|
|
|
public static Map<String, String> getLicenses(DataCatalogue catalogue) throws Exception {
|
2016-10-11 11:39:25 +02:00
|
|
|
|
|
|
|
Map<String, String> toReturn = new HashMap<String, String>();
|
|
|
|
List<CkanLicense> licenses = catalogue.getLicenses();
|
|
|
|
|
|
|
|
for (CkanLicense ckanLicense : licenses) {
|
|
|
|
toReturn.put(ckanLicense.getId(), ckanLicense.getTitle());
|
|
|
|
}
|
|
|
|
return toReturn;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Check that the given license id is in CKAN
|
|
|
|
* @param license id to check
|
|
|
|
* @return
|
|
|
|
* @throws Exception
|
|
|
|
*/
|
2016-11-06 21:21:11 +01:00
|
|
|
public static boolean existsLicenseId(String license, DataCatalogue catalogue) throws Exception {
|
2016-10-11 11:39:25 +02:00
|
|
|
|
2016-11-04 16:26:19 +01:00
|
|
|
List<CkanLicense> licenses = catalogue.getLicenses();
|
|
|
|
for (CkanLicense ckanLicense : licenses) {
|
|
|
|
if(ckanLicense.getId().equals(license))
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
2016-10-11 11:39:25 +02:00
|
|
|
}
|
|
|
|
|
2016-10-13 11:46:42 +02:00
|
|
|
/**
|
2016-11-01 22:35:52 +01:00
|
|
|
* Retrieve the ResourceBean given the record (extract resources from Database Sources and Source of Information and others)
|
2016-10-13 11:46:42 +02:00
|
|
|
* @param record
|
2016-11-01 22:35:52 +01:00
|
|
|
* @param username
|
|
|
|
* @param tags
|
|
|
|
* @param groups
|
|
|
|
* @return
|
2016-10-13 11:46:42 +02:00
|
|
|
*/
|
2016-11-01 22:35:52 +01:00
|
|
|
public static List<ResourceBean> getResourcesFromBean(Common record, String username, List<String> tags, List<String> groups){
|
2016-11-06 21:21:11 +01:00
|
|
|
|
2016-10-13 11:46:42 +02:00
|
|
|
List<ResourceBean> toReturn = new ArrayList<ResourceBean>();
|
2016-11-01 22:35:52 +01:00
|
|
|
Class<?> current = record.getClass();
|
|
|
|
do{
|
|
|
|
Field[] fields = current.getDeclaredFields();
|
|
|
|
for (Field field : fields) {
|
|
|
|
if(field.isAnnotationPresent(CkanResource.class)){
|
|
|
|
try{
|
|
|
|
Object f = new PropertyDescriptor(field.getName(), current).getReadMethod().invoke(record);
|
|
|
|
if(f != null){
|
|
|
|
|
|
|
|
if(f instanceof List<?>){
|
|
|
|
|
|
|
|
List<Resource> listOfResources = (List<Resource>)f;
|
|
|
|
|
|
|
|
for (Resource resource : listOfResources) {
|
2016-11-04 16:26:19 +01:00
|
|
|
toReturn.add(new ResourceBean(resource.getUrl(), resource.getName().toString(), resource.getDescription(), null, username, null, null));
|
2016-11-01 22:35:52 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
}else{
|
|
|
|
|
|
|
|
Resource res = (Resource)f;
|
2016-11-04 16:26:19 +01:00
|
|
|
toReturn.add(new ResourceBean(res.getUrl(), res.getName().toString(), res.getDescription(), null, username, null, null));
|
2016-11-01 22:35:52 +01:00
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}catch(Exception e){
|
|
|
|
logger.error("Failed ot read value for field " + field.getName() + " skipping", e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-10-13 11:46:42 +02:00
|
|
|
}
|
2016-11-06 21:21:11 +01:00
|
|
|
while((current = current.getSuperclass())!=null); // iterate from the inherited class up to the Object.class
|
2016-11-01 22:35:52 +01:00
|
|
|
|
2016-11-04 16:26:19 +01:00
|
|
|
logger.info("Returning resources " + toReturn);
|
2016-10-13 11:46:42 +02:00
|
|
|
return toReturn;
|
|
|
|
}
|
2016-11-28 17:43:28 +01:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Upload a file in the shared folder
|
|
|
|
* @param resourceFormatFolder
|
|
|
|
* @param resourceToAttachName
|
|
|
|
* @param description
|
|
|
|
* @param csvFile
|
|
|
|
* @return
|
|
|
|
*/
|
|
|
|
public static ExternalFile uploadExternalFile(WorkspaceFolder resourceFormatFolder, String resourceToAttachName, String description, File csvFile) {
|
|
|
|
try {
|
|
|
|
return resourceFormatFolder.createExternalFileItem(resourceToAttachName, description, CSV_MIME, csvFile);
|
|
|
|
} catch (InsufficientPrivilegesException | ItemAlreadyExistException
|
|
|
|
| InternalErrorException e) {
|
|
|
|
logger.error("Failed to upload the file into the workspace shared folder for " + resourceToAttachName, e);
|
|
|
|
}
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Create subfolders in cascade, returning the last created ones
|
|
|
|
* It could be also used for getting them if they already exists
|
|
|
|
* @param folder
|
|
|
|
* @param subPath
|
|
|
|
* @return null if an error occurred
|
|
|
|
*/
|
|
|
|
public static WorkspaceFolder createOrGetSubFoldersByPath(WorkspaceFolder folder, String subPath){
|
|
|
|
|
|
|
|
WorkspaceFolder parentFolder = folder;
|
|
|
|
if(folder == null)
|
|
|
|
throw new IllegalArgumentException("Root folder is null!");
|
|
|
|
|
|
|
|
if(subPath == null || subPath.isEmpty())
|
|
|
|
throw new IllegalArgumentException("subPath is null/empty!");
|
|
|
|
|
|
|
|
try{
|
|
|
|
if(subPath.startsWith(PATH_SEPARATOR))
|
|
|
|
subPath = subPath.replaceFirst(PATH_SEPARATOR, "");
|
|
|
|
|
|
|
|
if(subPath.endsWith(PATH_SEPARATOR))
|
|
|
|
subPath = subPath.substring(0, subPath.length() - 1);
|
|
|
|
|
|
|
|
logger.debug("Splitting path " + subPath);
|
|
|
|
|
|
|
|
String[] splittedPaths = subPath.split(PATH_SEPARATOR);
|
|
|
|
|
|
|
|
for (String path : splittedPaths) {
|
|
|
|
WorkspaceFolder createdFolder = getFolderOrCreate(parentFolder, path, "");
|
|
|
|
logger.debug("Created subfolder with path " + createdFolder.getPath());
|
|
|
|
parentFolder = createdFolder;
|
|
|
|
}
|
|
|
|
|
|
|
|
}catch(Exception e){
|
|
|
|
logger.error("Failed to create the subfolders by path " + subPath);
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
return parentFolder;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Get a folder within the catalogue folder or create it if it doesn't exist.
|
|
|
|
* @return
|
|
|
|
*/
|
|
|
|
public static WorkspaceFolder getFolderOrCreate(WorkspaceFolder folder, String relativePath, String descriptionFolder){
|
|
|
|
WorkspaceFolder result = null;
|
|
|
|
try {
|
|
|
|
WorkspaceItem foundFolder = folder.find(relativePath);
|
|
|
|
if(foundFolder != null && foundFolder.isFolder())
|
|
|
|
result = (WorkspaceFolder)foundFolder;
|
|
|
|
|
|
|
|
if(result != null)
|
|
|
|
logger.debug("Folder found with name " + result.getName() + ", it has id " + result.getId());
|
|
|
|
else
|
|
|
|
throw new Exception("There is no folder with name " + relativePath + " under folder " + folder.getName());
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.debug("Probably the folder doesn't exist");
|
|
|
|
try{
|
|
|
|
result = folder.createFolder(relativePath, descriptionFolder);
|
|
|
|
} catch (InsufficientPrivilegesException | InternalErrorException | ItemAlreadyExistException e2) {
|
|
|
|
logger.error("Failed to get or generate this folder", e2);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return result;
|
|
|
|
}
|
2016-11-30 14:14:02 +01:00
|
|
|
|
2016-11-30 11:43:30 +01:00
|
|
|
/**
|
|
|
|
* Strip out HTML code
|
|
|
|
* @param html
|
|
|
|
* @return
|
2016-11-30 14:14:02 +01:00
|
|
|
* @throws UnsupportedEncodingException
|
2016-11-30 11:43:30 +01:00
|
|
|
*/
|
|
|
|
public static String removeHTML(String html) {
|
2016-11-30 11:50:47 +01:00
|
|
|
if(html == null || html.isEmpty())
|
|
|
|
return html;
|
2016-11-30 14:14:02 +01:00
|
|
|
|
|
|
|
logger.info("Incoming text is " + html);
|
|
|
|
|
|
|
|
// remove html and clean
|
2016-11-30 11:43:30 +01:00
|
|
|
String withoutHTML = Jsoup.parse(html).text();
|
2016-11-30 14:14:02 +01:00
|
|
|
withoutHTML = Jsoup.clean(withoutHTML, Whitelist.basic());
|
|
|
|
|
|
|
|
// TODO ...
|
|
|
|
// could contain non ascii chars ... try to convert them to question marks and then remove them
|
|
|
|
// try{
|
|
|
|
// String strippedWithQuestionMarks = Normalizer.normalize(withoutHTML, Normalizer.Form.NFD);
|
|
|
|
// String regex = "[\\p{InCombiningDiacriticalMarks}\\p{IsLm}\\p{IsSk}]+";
|
|
|
|
// strippedWithQuestionMarks = new String(strippedWithQuestionMarks.replaceAll(regex, "").getBytes("ascii"), "ascii");
|
|
|
|
// strippedWithQuestionMarks = strippedWithQuestionMarks.replaceAll("[?]+", " ");
|
|
|
|
// withoutHTML = strippedWithQuestionMarks;
|
|
|
|
// }catch(UnsupportedEncodingException e){
|
|
|
|
// logger.warn("Unable to convert to question marks non ascii chars..", e);
|
|
|
|
// // remove non ascii directly
|
|
|
|
// withoutHTML = withoutHTML.replaceAll("[^\\x00-\\x7F]", " ");
|
|
|
|
// }
|
|
|
|
|
|
|
|
// remove non ascii chars ...
|
|
|
|
withoutHTML = withoutHTML.replaceAll("[^\\p{ASCII}]", " ");
|
|
|
|
logger.info("Without html is " + withoutHTML);
|
|
|
|
return withoutHTML;
|
2016-11-30 11:43:30 +01:00
|
|
|
}
|
2016-10-07 18:23:23 +02:00
|
|
|
}
|