minor fixes
git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-catalogue/grsf-publisher-ws@151058 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
823c3c8f28
commit
f0606f9743
|
@ -101,8 +101,6 @@ public class Base {
|
|||
this.uuid = uuid;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public String getCatalogId() {
|
||||
return catalogId;
|
||||
}
|
||||
|
|
|
@ -255,6 +255,10 @@ public class Common extends Base{
|
|||
this.dataOwner = dataOwner;
|
||||
}
|
||||
|
||||
public List<TimeSeriesBean<Void, Void>> getReferenceYear() {
|
||||
return referenceYear;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Common [dataOwner=" + dataOwner + ", databaseSources="
|
||||
|
|
|
@ -4,6 +4,7 @@ import java.beans.PropertyDescriptor;
|
|||
import java.lang.reflect.Field;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -485,6 +486,15 @@ public class CommonServiceUtils {
|
|||
Map<String, String> namespaces, Set<String> groups, String context,
|
||||
String token, String futureTitle, String authorFullname, ServletContext contextServlet, boolean isUpdated) throws InterruptedException {
|
||||
|
||||
// on create, we need to add the item url
|
||||
if(!isUpdated){
|
||||
itemUrl = catalogue.getUnencryptedUrlFromDatasetIdOrName(futureName);
|
||||
Map<String, List<String>> addField = new HashMap<String, List<String>>();
|
||||
String modifiedUUIDKey = namespaces.containsKey(CommonServiceUtils.ITEM_URL_FIELD) ? namespaces.get(CommonServiceUtils.ITEM_URL_FIELD) : CommonServiceUtils.ITEM_URL_FIELD;
|
||||
addField.put(modifiedUUIDKey, Arrays.asList(itemUrl));
|
||||
catalogue.patchProductCustomFields(datasetId, apiKey, addField);
|
||||
}
|
||||
|
||||
// set info in the response bean
|
||||
responseBean.setId(datasetId);
|
||||
responseBean.setItemUrl(itemUrl);
|
||||
|
|
|
@ -146,10 +146,6 @@ public class GrsfPublisherFisheryService {
|
|||
|
||||
// set the visibility of the datatest according the context
|
||||
boolean publicDataset = context.equals((String)contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY));
|
||||
|
||||
// add the "Product URL" to the record
|
||||
String itemUrl = catalogue.getUnencryptedUrlFromDatasetIdOrName(futureName);
|
||||
customFields.put(CommonServiceUtils.ITEM_URL_FIELD, Arrays.asList(itemUrl));
|
||||
|
||||
// convert extras' keys to keys with namespace
|
||||
Map<String, String> namespaces = HelperMethods.getFieldToFieldNameSpaceMapping(HelperMethods.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_FISHERY);
|
||||
|
@ -185,7 +181,7 @@ public class GrsfPublisherFisheryService {
|
|||
logger.info("Created record with identifier " + id);
|
||||
CommonServiceUtils.actionsPostCreateOrUpdate(
|
||||
id, futureName, record, apiKey, username, organization,
|
||||
itemUrl, responseBean, catalogue, namespaces, groups, context, token, futureTitle, authorFullname,
|
||||
null, responseBean, catalogue, namespaces, groups, context, token, futureTitle, authorFullname,
|
||||
contextServlet, false);
|
||||
status = Status.CREATED;
|
||||
|
||||
|
|
|
@ -162,10 +162,6 @@ public class GrsfPublisherStockService {
|
|||
// set the visibility of the datatest according the context
|
||||
boolean publicDataset = context.equals((String)contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY));
|
||||
|
||||
// add the "Product URL" to the record
|
||||
String itemUrl = catalogue.getUnencryptedUrlFromDatasetIdOrName(futureName);
|
||||
customFields.put(CommonServiceUtils.ITEM_URL_FIELD, Arrays.asList(itemUrl));
|
||||
|
||||
// convert extras' keys to keys with namespace
|
||||
Map<String, String> namespaces = HelperMethods.getFieldToFieldNameSpaceMapping(HelperMethods.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_STOCK);
|
||||
|
||||
|
@ -185,20 +181,20 @@ public class GrsfPublisherStockService {
|
|||
authorFullname,
|
||||
authorMail,
|
||||
record.getMaintainer() == null? authorFullname : record.getMaintainer(),
|
||||
record.getMaintainerContact() == null? authorMail : record.getMaintainerContact(),
|
||||
version,
|
||||
HelperMethods.removeHTML(record.getDescription()),
|
||||
license,
|
||||
new ArrayList<String>(tags),
|
||||
customFields,
|
||||
resources,
|
||||
publicDataset);
|
||||
record.getMaintainerContact() == null? authorMail : record.getMaintainerContact(),
|
||||
version,
|
||||
HelperMethods.removeHTML(record.getDescription()),
|
||||
license,
|
||||
new ArrayList<String>(tags),
|
||||
customFields,
|
||||
resources,
|
||||
publicDataset);
|
||||
|
||||
if(id != null){
|
||||
|
||||
logger.info("Product created! Id is " + id);
|
||||
CommonServiceUtils.actionsPostCreateOrUpdate(
|
||||
id, futureName, record, apiKey, username, organization, itemUrl,
|
||||
id, futureName, record, apiKey, username, organization, null,
|
||||
responseBean, catalogue, namespaces, groups, context, token,
|
||||
futureTitle, authorFullname, contextServlet, false);
|
||||
status = Status.CREATED;
|
||||
|
|
|
@ -275,6 +275,12 @@ public abstract class HelperMethods {
|
|||
*/
|
||||
public static ExternalFile uploadExternalFile(WorkspaceFolder resourceFormatFolder, String resourceToAttachName, String description, File csvFile) {
|
||||
try {
|
||||
|
||||
WorkspaceItem existsFile = resourceFormatFolder.find(resourceToAttachName);
|
||||
|
||||
if(existsFile != null)
|
||||
return (ExternalFile)existsFile;
|
||||
|
||||
return resourceFormatFolder.createExternalFileItem(resourceToAttachName, description, CSV_MIME, csvFile);
|
||||
} catch (InsufficientPrivilegesException | ItemAlreadyExistException
|
||||
| InternalErrorException e) {
|
||||
|
|
|
@ -176,57 +176,60 @@ public class ManageTimeSeriesThread extends Thread{
|
|||
Field[] fields = current.getDeclaredFields();
|
||||
for (Field field : fields) {
|
||||
if (field.isAnnotationPresent(TimeSeries.class)) {
|
||||
try{
|
||||
Object f = new PropertyDescriptor(field.getName(), current).getReadMethod().invoke(record);
|
||||
if(f != null){
|
||||
|
||||
Object f = new PropertyDescriptor(field.getName(), current).getReadMethod().invoke(record);
|
||||
if(f != null){
|
||||
List asList = (List)f;
|
||||
|
||||
List asList = (List)f;
|
||||
if(!asList.isEmpty()){
|
||||
|
||||
if(!asList.isEmpty()){
|
||||
CustomField customAnnotation = field.getAnnotation(CustomField.class);
|
||||
logger.debug("A time series has been just found (from field " + customAnnotation.key() + ")");
|
||||
String resourceToAttachOnCkanName = (replaceIllegalChars(productName) + "_" + customAnnotation.key()).replaceAll("\\s", "_").replaceAll("[_]+", "_") + CSVHelpers.CSV_EXTENSION;
|
||||
String resourceToAttachOnCkanDescription = productName + " - " + customAnnotation.key() + " time series";
|
||||
|
||||
CustomField customAnnotation = field.getAnnotation(CustomField.class);
|
||||
logger.debug("A time series has been just found (from field " + customAnnotation.key() + ")");
|
||||
String resourceToAttachOnCkanName = (replaceIllegalChars(productName) + "_" + customAnnotation.key()).replaceAll("\\s", "_").replaceAll("[_]+", "_") + CSVHelpers.CSV_EXTENSION;
|
||||
String resourceToAttachOnCkanDescription = productName + " : " + customAnnotation.key() + " time series";
|
||||
File csvFile = CSVHelpers.listToCSV(asList);
|
||||
|
||||
File csvFile = CSVHelpers.listToCSV(asList);
|
||||
CkanResourceBase ckanResource = null;
|
||||
ExternalFile createdFileOnWorkspace = null;
|
||||
if(csvFile != null){
|
||||
|
||||
CkanResourceBase ckanResource = null;
|
||||
ExternalFile createdFileOnWorkspace = null;
|
||||
if(csvFile != null){
|
||||
for (int i = 0; i < CANCHES; i++) {
|
||||
|
||||
for (int i = 0; i < CANCHES; i++) {
|
||||
// upload this file on ckan
|
||||
if(ckanResource == null)
|
||||
ckanResource = uploadFileOnCatalogue(csvFile, uuidKB, catalogue, username, resourceToAttachOnCkanName, resourceToAttachOnCkanDescription, apiKeyUser);
|
||||
|
||||
// upload this file on ckan
|
||||
if(ckanResource == null)
|
||||
ckanResource = uploadFileOnCatalogue(csvFile, uuidKB, catalogue, username, resourceToAttachOnCkanName, resourceToAttachOnCkanDescription, apiKeyUser);
|
||||
//upload this file on the folder of the vre (under .catalogue) and change the url of the resource
|
||||
if(ckanResource != null){
|
||||
|
||||
//upload this file on the folder of the vre (under .catalogue) and change the url of the resource
|
||||
if(ckanResource != null){
|
||||
if(createdFileOnWorkspace == null)
|
||||
createdFileOnWorkspace = HelperMethods.uploadExternalFile(csvFolder, uuidKB + "_" + customAnnotation.key() + CSVHelpers.CSV_EXTENSION, resourceToAttachOnCkanDescription, csvFile);
|
||||
|
||||
if(createdFileOnWorkspace == null)
|
||||
createdFileOnWorkspace = HelperMethods.uploadExternalFile(csvFolder, uuidKB + CSVHelpers.CSV_EXTENSION, resourceToAttachOnCkanDescription, csvFile);
|
||||
if(createdFileOnWorkspace != null){
|
||||
String publicUrlToSetOnCkan = createdFileOnWorkspace.getPublicLink(true);
|
||||
logger.info("going to patch the created resource with id " + ckanResource.getId() + " with url " + publicUrlToSetOnCkan);
|
||||
boolean updated = catalogue.patchResource(ckanResource.getId(), publicUrlToSetOnCkan, resourceToAttachOnCkanName, resourceToAttachOnCkanDescription, "", apiKeyUser);
|
||||
|
||||
if(createdFileOnWorkspace != null){
|
||||
if(updated){
|
||||
logger.info("Resource has been updated with the new url");
|
||||
break;
|
||||
}else
|
||||
logger.error("Error while updating resource...");
|
||||
|
||||
String publicUrlToSetOnCkan = createdFileOnWorkspace.getPublicLink(true);
|
||||
logger.info("going to patch the created resource with id " + ckanResource.getId() + " with url " + publicUrlToSetOnCkan);
|
||||
boolean updated = catalogue.patchResource(ckanResource.getId(), publicUrlToSetOnCkan, resourceToAttachOnCkanName, resourceToAttachOnCkanDescription, "", apiKeyUser);
|
||||
|
||||
if(updated){
|
||||
logger.info("Resource has been updated with the new url");
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// delete the file
|
||||
csvFile.delete();
|
||||
}
|
||||
|
||||
// delete the file
|
||||
csvFile.delete();
|
||||
}
|
||||
}
|
||||
}catch(Exception e){
|
||||
logger.warn("Failed to perform all the operations about this timeseries ", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Reference in New Issue