Compare commits

...

7 Commits

31 changed files with 682 additions and 623 deletions

View File

@ -11,7 +11,7 @@
<groupId>org.gcube.data-catalogue</groupId> <groupId>org.gcube.data-catalogue</groupId>
<artifactId>grsf-publisher-ws</artifactId> <artifactId>grsf-publisher-ws</artifactId>
<version>1.11.0</version> <version>1.12.0</version>
<packaging>war</packaging> <packaging>war</packaging>
<name>grsf-publisher-ws</name> <name>grsf-publisher-ws</name>
<description>Utility library to publish GRSF products on GRSF catalogue.</description> <description>Utility library to publish GRSF products on GRSF catalogue.</description>

View File

@ -1,8 +1,8 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import javax.validation.constraints.Size; import javax.validation.constraints.Size;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.json.simple.JSONObject; import org.json.simple.JSONObject;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.record; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@ -8,7 +8,7 @@ import java.util.Map;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods; import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean; import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType; import java.lang.annotation.ElementType;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.record; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.util.List; import java.util.List;
@ -6,19 +6,9 @@ import javax.validation.Valid;
import javax.validation.constraints.NotNull; import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size; import javax.validation.constraints.Size;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CkanResource; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField; import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group; import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.TimeSeries;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.AnnotationBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.RefersToBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.SimilarRecordBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Sources;
import org.gcube.datacatalogue.common.enums.Status;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType; import java.lang.annotation.ElementType;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;

View File

@ -1,16 +1,13 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.record; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.util.List; import java.util.List;
import javax.validation.constraints.NotNull; import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size; import javax.validation.constraints.Size;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group; import org.gcube.datacatalogue.common.AAA_PORTED.Fishery_Type;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag; import org.gcube.datacatalogue.common.AAA_PORTED.Production_System_Type;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Fishery_Type;
import org.gcube.datacatalogue.common.enums.Production_System_Type;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType; import java.lang.annotation.ElementType;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;

View File

@ -1,8 +1,8 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import javax.validation.constraints.NotNull; import javax.validation.constraints.NotNull;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,10 +1,10 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import javax.validation.constraints.NotNull; import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size; import javax.validation.constraints.Size;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.enums.Sources; import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,6 +1,6 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.json.simple.JSONObject; import org.json.simple.JSONObject;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.record; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.util.List; import java.util.List;
@ -6,15 +6,10 @@ import javax.validation.Valid;
import javax.validation.constraints.NotNull; import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size; import javax.validation.constraints.Size;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField; import org.gcube.datacatalogue.common.AAA_PORTED.Abundance_Level;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag; import org.gcube.datacatalogue.common.AAA_PORTED.Fishing_Pressure;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.TimeSeries; import org.gcube.datacatalogue.common.AAA_PORTED.Stock_Type;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Abundance_Level;
import org.gcube.datacatalogue.common.enums.Fishing_Pressure;
import org.gcube.datacatalogue.common.enums.Stock_Type;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType; import java.lang.annotation.ElementType;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType; import java.lang.annotation.ElementType;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;

View File

@ -1,6 +1,6 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,11 +1,10 @@
package org.gcube.data_catalogue.grsf_publish_ws.ex; package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper; import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider; import javax.ws.rs.ext.Provider;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseBean;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
/** /**

View File

@ -1,9 +1,9 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others; package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import javax.validation.constraints.NotNull; import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size; import javax.validation.constraints.Size;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.utils; package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import static org.gcube.resources.discovery.icclient.ICFactory.client; import static org.gcube.resources.discovery.icclient.ICFactory.client;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor; import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.output; package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
/** /**
* Response bean to be used by the service. * Response bean to be used by the service.

View File

@ -1,6 +1,6 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.output; package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,10 +1,10 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others; package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import javax.validation.constraints.NotNull; import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size; import javax.validation.constraints.Size;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.enums.Status; import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.ex; package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import javax.validation.ConstraintViolation; import javax.validation.ConstraintViolation;
import javax.validation.ConstraintViolationException; import javax.validation.ConstraintViolationException;
@ -7,7 +7,6 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper; import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider; import javax.ws.rs.ext.Provider;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseBean;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
/** /**

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.utils.threads; package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import java.util.List; import java.util.List;

View File

@ -29,19 +29,19 @@ import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider; import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.authorization.library.utils.Caller; import org.gcube.common.authorization.library.utils.Caller;
import org.gcube.common.scope.api.ScopeProvider; import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.DeleteRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.UpdateRecordStatus; import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.DeleteRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseBean; import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseCreationBean; import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.UpdateRecordStatus;
import org.gcube.data_catalogue.grsf_publish_ws.utils.CommonServiceUtils; import org.gcube.data_catalogue.grsf_publish_ws.utils.CommonServiceUtils;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods; import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue; import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean; import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean;
import org.gcube.datacatalogue.ckanutillibrary.shared.RolesCkanGroupOrOrg; import org.gcube.datacatalogue.ckanutillibrary.shared.RolesCkanGroupOrOrg;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.enums.Product_Type; import org.gcube.datacatalogue.common.AAA_PORTED.Product_Type;
import org.gcube.datacatalogue.common.enums.Sources; import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import eu.trentorise.opendata.jackan.model.CkanDataset; import eu.trentorise.opendata.jackan.model.CkanDataset;
@ -61,33 +61,33 @@ public class GrsfPublisherFisheryService {
// Logger // Logger
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(GrsfPublisherFisheryService.class); private static final org.slf4j.Logger logger = LoggerFactory.getLogger(GrsfPublisherFisheryService.class);
@GET // @GET
@Path("hello") // @Path("hello")
@Produces(MediaType.TEXT_PLAIN) // @Produces(MediaType.TEXT_PLAIN)
public Response hello() { // public Response hello() {
return Response.ok("Hello.. Fishery service is here").build(); // return Response.ok("Hello.. Fishery service is here").build();
} // }
//
@GET // @GET
@Path("get-licenses") // @Path("get-licenses")
@Produces(MediaType.APPLICATION_JSON) // @Produces(MediaType.APPLICATION_JSON)
public Response getLicenses() { // public Response getLicenses() {
Status status = Status.OK; // Status status = Status.OK;
String context = ScopeProvider.instance.get(); // String context = ScopeProvider.instance.get();
DataCatalogue catalogue; // DataCatalogue catalogue;
try { // try {
catalogue = HelperMethods.getDataCatalogueRunningInstance(context); //// catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
Map<String,String> licenses = CommonServiceUtils.getLicenses(catalogue); // Map<String,String> licenses = CommonServiceUtils.getLicenses(catalogue);
if(licenses == null) // if(licenses == null)
status = Status.INTERNAL_SERVER_ERROR; // status = Status.INTERNAL_SERVER_ERROR;
return Response.status(status).entity(licenses).build(); // return Response.status(status).entity(licenses).build();
} catch(Exception e) { // } catch(Exception e) {
status = Status.INTERNAL_SERVER_ERROR; // status = Status.INTERNAL_SERVER_ERROR;
return Response.status(status) // return Response.status(status)
.entity(new ResponseBean(false, "Unable to retrieve license list " + e.getLocalizedMessage(), null)) // .entity(new ResponseBean(false, "Unable to retrieve license list " + e.getLocalizedMessage(), null))
.build(); // .build();
} // }
} // }
@POST @POST
@Path("publish-product") @Path("publish-product")
@ -113,34 +113,43 @@ public class GrsfPublisherFisheryService {
// Cast the source to the accepted ones // Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source); Sources sourceInPath = Sources.onDeserialize(source);
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context); DataCatalogue catalogue = null;
if(catalogue == null) { // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
throw new Exception("There was a problem while serving your request. No catalogue instance was found!"); // if(catalogue == null) {
} else { // throw new Exception("There was a problem while serving your request. No catalogue instance was found!");
// } else {
String apiKey = catalogue.getApiKeyFromUsername(username); String apiKey = "";
String organization = HelperMethods.retrieveOrgNameFromScope(context); String organization = "";
CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// extend this role to the other organizations in this context // String apiKey = catalogue.getApiKeyFromUsername(username);
CommonServiceUtils.extendRoleToOtherOrganizations(username, catalogue, organization, // String organization = HelperMethods.retrieveOrgNameFromScope(context);
RolesCkanGroupOrOrg.ADMIN); // CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
//
// // extend this role to the other organizations in this context
// CommonServiceUtils.extendRoleToOtherOrganizations(username, catalogue, organization,
// RolesCkanGroupOrOrg.ADMIN);
String authorMail = "";
String authorFullname = "";
// retrieve the user's email and fullname // retrieve the user's email and fullname
String authorMail = HelperMethods.getUserEmail(context, token); // String authorMail = HelperMethods.getUserEmail(context, token);
String authorFullname = HelperMethods.getUserFullname(context, token); // String authorFullname = HelperMethods.getUserFullname(context, token);
//
if(authorMail == null || authorFullname == null) { // if(authorMail == null || authorFullname == null) {
throw new Exception("Sorry but it was not possible to retrieve your fullname/email!"); // throw new Exception("Sorry but it was not possible to retrieve your fullname/email!");
} // }
// The name of the product will be the uuid of the kb. The title will be the fishery's fishery_name. Fishery has also the constraint that // The name of the product will be the uuid of the kb. The title will be the fishery's fishery_name. Fishery has also the constraint that
// fishing area and jurisdiction area cannot be empty at the same time // fishing area and jurisdiction area cannot be empty at the same time
String futureName = record.getUuid(); String futureName = "";
String futureTitle = record.getFisheryName(); String futureTitle = "";
// String futureName = record.getUuid();
// String futureTitle = record.getFisheryName();
// check name // check name
CommonServiceUtils.checkName(futureName, catalogue); // CommonServiceUtils.checkName(futureName, catalogue);
Map<String,List<String>> customFields = record.getExtrasFields(); Map<String,List<String>> customFields = record.getExtrasFields();
Set<String> tags = new HashSet<String>(); Set<String> tags = new HashSet<String>();
@ -152,26 +161,31 @@ public class GrsfPublisherFisheryService {
Product_Type.FISHERY, tags, customFields, groups, resources, username, futureTitle); Product_Type.FISHERY, tags, customFields, groups, resources, username, futureTitle);
// check the license id // check the license id
String license = null; String license = "";
if(record.getLicense() == null || record.getLicense().isEmpty()) // String license = null;
license = Constants.DEFAULT_LICENSE; // if(record.getLicense() == null || record.getLicense().isEmpty())
else if(HelperMethods.existsLicenseId(record.getLicense(), catalogue)) // license = Constants.DEFAULT_LICENSE;
license = record.getLicense(); // else if(HelperMethods.existsLicenseId(record.getLicense(), catalogue))
else // license = record.getLicense();
throw new Exception("Please check the license id!"); // else
// throw new Exception("Please check the license id!");
//
long version = record.getVersion() == null ? 1 : record.getVersion(); long version = record.getVersion() == null ? 1 : record.getVersion();
// set the visibility of the datatest according the context // set the visibility of the datatest according the context
boolean publicDataset = context boolean publicDataset = context
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY)); .equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY));
// convert extras' keys to keys with namespace
Map<String,String> namespaces = HelperMethods
.getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_FISHERY);
if(namespaces == null)
throw new Exception("Failed to retrieve the namespaces for the key fields!"); // convert extras' keys to keys with namespace
Map<String,String> namespaces = new HashMap<>();
// Map<String,String> namespaces = HelperMethods
// .getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_FISHERY);
// if(namespaces == null)
// throw new Exception("Failed to retrieve the namespaces for the key fields!");
customFields = HelperMethods.replaceFieldsKey(customFields, namespaces, customFields = HelperMethods.replaceFieldsKey(customFields, namespaces,
!sourceInPath.equals(Sources.GRSF)); !sourceInPath.equals(Sources.GRSF));
@ -180,6 +194,7 @@ public class GrsfPublisherFisheryService {
logger.info("Invoking create method.."); logger.info("Invoking create method..");
// create the product // create the product
id = catalogue.createCKanDatasetMultipleCustomFields(apiKey, futureTitle, futureName, id = catalogue.createCKanDatasetMultipleCustomFields(apiKey, futureTitle, futureName,
publishInOrganization, authorFullname, authorMail, publishInOrganization, authorFullname, authorMail,
@ -188,7 +203,7 @@ public class GrsfPublisherFisheryService {
null, license, new ArrayList<String>(tags), customFields, resources, publicDataset); null, license, new ArrayList<String>(tags), customFields, resources, publicDataset);
// post actions // post actions
if(id != null) { // if(id != null) {
logger.info("Created record with identifier " + id); logger.info("Created record with identifier " + id);
String description = Constants.SHORT_NAME_CUSTOM_KEY + ": " + record.getShortName() + "\n"; String description = Constants.SHORT_NAME_CUSTOM_KEY + ": " + record.getShortName() + "\n";
@ -203,10 +218,10 @@ public class GrsfPublisherFisheryService {
status = Status.CREATED; status = Status.CREATED;
} else { // } else {
throw new Exception("There was an error during the record generation, sorry"); // throw new Exception("There was an error during the record generation, sorry");
} // }
} // }
} catch(Exception e) { } catch(Exception e) {
logger.error("Failed to create fishery record" + e); logger.error("Failed to create fishery record" + e);
status = Status.INTERNAL_SERVER_ERROR; status = Status.INTERNAL_SERVER_ERROR;
@ -216,68 +231,69 @@ public class GrsfPublisherFisheryService {
return Response.status(status).entity(responseBean).build(); return Response.status(status).entity(responseBean).build();
} }
@DELETE // @DELETE
@Path("delete-product") // @Path("delete-product")
@Consumes(MediaType.APPLICATION_JSON) // @Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON) // @Produces(MediaType.APPLICATION_JSON)
public Response deleteFishery(@NotNull(message = "input value is missing") @Valid DeleteRecord recordToDelete, // public Response deleteFishery(@NotNull(message = "input value is missing") @Valid DeleteRecord recordToDelete,
@PathParam("source") String source) throws ValidationException { // @PathParam("source") String source) throws ValidationException {
//
// retrieve context and username // // retrieve context and username
Caller caller = AuthorizationProvider.instance.get(); // Caller caller = AuthorizationProvider.instance.get();
String username = caller.getClient().getId(); // String username = caller.getClient().getId();
String context = ScopeProvider.instance.get(); // String context = ScopeProvider.instance.get();
//
ResponseCreationBean responseBean = new ResponseCreationBean(); // ResponseCreationBean responseBean = new ResponseCreationBean();
Status status = Status.INTERNAL_SERVER_ERROR; // Status status = Status.INTERNAL_SERVER_ERROR;
//
// check it is a fishery ... // // check it is a fishery ...
logger.info( // logger.info(
"Received call to delete product with id " + recordToDelete.getId() + ", checking if it is a fishery"); // "Received call to delete product with id " + recordToDelete.getId() + ", checking if it is a fishery");
try { // try {
//
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context); // DataCatalogue catalogue = null;
//// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// Cast the source to the accepted ones //
Sources sourceInPath = Sources.onDeserialize(source); // // Cast the source to the accepted ones
logger.debug("The request is to delete a fishery object of source " + sourceInPath); // Sources sourceInPath = Sources.onDeserialize(source);
// logger.debug("The request is to delete a fishery object of source " + sourceInPath);
// retrieve the catalogue instance //
String apiKey = catalogue.getApiKeyFromUsername(username); // // retrieve the catalogue instance
CkanDataset fisheryInCkan = catalogue.getDataset(recordToDelete.getId(), apiKey); // String apiKey = catalogue.getApiKeyFromUsername(username);
// CkanDataset fisheryInCkan = catalogue.getDataset(recordToDelete.getId(), apiKey);
if(fisheryInCkan == null) { //
status = Status.NOT_FOUND; // if(fisheryInCkan == null) {
throw new Exception("There was a problem while serving your request. This item was not found"); // status = Status.NOT_FOUND;
} // throw new Exception("There was a problem while serving your request. This item was not found");
// }
// check it is in the right source and it is a fishery //
String type = fisheryInCkan.getExtrasAsHashMap().get(Constants.DOMAIN_CUSTOM_KEY); // // check it is in the right source and it is a fishery
// String type = fisheryInCkan.getExtrasAsHashMap().get(Constants.DOMAIN_CUSTOM_KEY);
if((fisheryInCkan.getOrganization().getName().equalsIgnoreCase(source) //
|| fisheryInCkan.getOrganization().getName().toLowerCase().contains(source)) // if((fisheryInCkan.getOrganization().getName().equalsIgnoreCase(source)
&& Product_Type.FISHERY.getOrigName().equals(type)) { // || fisheryInCkan.getOrganization().getName().toLowerCase().contains(source))
// && Product_Type.FISHERY.getOrigName().equals(type)) {
logger.debug("Ok, this is a fishery of the right source, removing it"); //
boolean deleted = catalogue.deleteProduct(fisheryInCkan.getId(), apiKey, true); // logger.debug("Ok, this is a fishery of the right source, removing it");
// boolean deleted = catalogue.deleteProduct(fisheryInCkan.getId(), apiKey, true);
if(deleted) { //
logger.info("Fishery DELETED AND PURGED!"); // if(deleted) {
status = Status.OK; // logger.info("Fishery DELETED AND PURGED!");
responseBean.setId(fisheryInCkan.getId()); // status = Status.OK;
} // responseBean.setId(fisheryInCkan.getId());
} else { // }
status = Status.BAD_REQUEST; // } else {
throw new Exception( // status = Status.BAD_REQUEST;
"The id you are using doesn't belong to a Fishery item having source " + source + "!"); // throw new Exception(
} // "The id you are using doesn't belong to a Fishery item having source " + source + "!");
} catch(Exception e) { // }
logger.error("Failed to delete this", e); // } catch(Exception e) {
responseBean.setError(e.getMessage()); // logger.error("Failed to delete this", e);
} // responseBean.setError(e.getMessage());
// }
return Response.status(status).entity(responseBean).build(); //
} // return Response.status(status).entity(responseBean).build();
// }
@GET @GET
@Path("get-fisheries-ids") @Path("get-fisheries-ids")
@ -298,11 +314,12 @@ public class GrsfPublisherFisheryService {
// Cast the source to the accepted ones // Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source); Sources sourceInPath = Sources.onDeserialize(source);
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context); DataCatalogue catalogue = null;
if(catalogue == null) { // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
throw new Exception("There was a problem while serving your request"); // if(catalogue == null) {
} // throw new Exception("There was a problem while serving your request");
// }
//
// if it is a request for GRSF records, we have Fishery - Stock groups, so it is easy. // if it is a request for GRSF records, we have Fishery - Stock groups, so it is easy.
// For other cases, records needs to be parsed // For other cases, records needs to be parsed
if(sourceInPath.equals(Sources.GRSF)) if(sourceInPath.equals(Sources.GRSF))
@ -345,10 +362,13 @@ public class GrsfPublisherFisheryService {
logger.info("Received call to get the catalogue identifier for the product with name " + name); logger.info("Received call to get the catalogue identifier for the product with name " + name);
try { try {
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context); // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) { // if(catalogue == null) {
throw new Exception("There was a problem while serving your request"); // throw new Exception("There was a problem while serving your request");
} // }
//
DataCatalogue catalogue = null;
CkanDataset dataset = catalogue.getDataset(name, catalogue.getApiKeyFromUsername(username)); CkanDataset dataset = catalogue.getDataset(name, catalogue.getApiKeyFromUsername(username));
if(dataset != null) { if(dataset != null) {
Map<String,String> result = new HashMap<String,String>(); Map<String,String> result = new HashMap<String,String>();
@ -395,33 +415,40 @@ public class GrsfPublisherFisheryService {
throw new Exception("Please specify the '" + Constants.CATALOG_ID + "' property"); throw new Exception("Please specify the '" + Constants.CATALOG_ID + "' property");
} }
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) { DataCatalogue catalogue = null;
throw new Exception( // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
"There was a problem while serving your request. No catalogue instance was found in this context!");
} else { // if(catalogue == null) {
// throw new Exception(
// "There was a problem while serving your request. No catalogue instance was found in this context!");
// } else {
// get already published record and modify it // get already published record and modify it
String apiKey = catalogue.getApiKeyFromUsername(username); String apiKey = "";
// String apiKey = catalogue.getApiKeyFromUsername(username);
CkanDataset recordPublished = catalogue.getDataset(catalogId, apiKey); CkanDataset recordPublished = catalogue.getDataset(catalogId, apiKey);
if(recordPublished == null) if(recordPublished == null)
throw new Exception("A record with catalogue id " + catalogId + " does not exist!"); throw new Exception("A record with catalogue id " + catalogId + " does not exist!");
// retrieve the user's email and fullname String authorMail = "";
String authorMail = HelperMethods.getUserEmail(context, token); String authorFullname = "";
String authorFullname = HelperMethods.getUserFullname(context, token);
if(authorMail == null || authorFullname == null) { // // retrieve the user's email and fullname
logger.debug("Author fullname or mail missing, cannot continue"); // String authorMail = HelperMethods.getUserEmail(context, token);
throw new Exception("Sorry but there was not possible to retrieve your fullname/email!"); // String authorFullname = HelperMethods.getUserFullname(context, token);
} //
// if(authorMail == null || authorFullname == null) {
// logger.debug("Author fullname or mail missing, cannot continue");
// throw new Exception("Sorry but there was not possible to retrieve your fullname/email!");
// }
String organization = HelperMethods.retrieveOrgNameFromScope(context); String organization = "";
// String organization = HelperMethods.retrieveOrgNameFromScope(context);
// check he/she has admin role // check he/she has admin role
CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization); // CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// name, product url and are going to remain unchanged (so we keep them from the already published record); // name, product url and are going to remain unchanged (so we keep them from the already published record);
String name = recordPublished.getName(); String name = recordPublished.getName();
@ -459,11 +486,12 @@ public class GrsfPublisherFisheryService {
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY)); .equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY));
// convert extras' keys to keys with namespace // convert extras' keys to keys with namespace
Map<String,String> namespaces = HelperMethods Map<String,String> namespaces = new HashMap<>();
.getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_FISHERY); // Map<String,String> namespaces = HelperMethods
// .getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_FISHERY);
if(namespaces == null) //
throw new Exception("Failed to retrieve the namespaces for the key fields!"); // if(namespaces == null)
// throw new Exception("Failed to retrieve the namespaces for the key fields!");
// retrieve the already generated url // retrieve the already generated url
String modifiedUUIDKey = namespaces.containsKey(Constants.ITEM_URL_FIELD) String modifiedUUIDKey = namespaces.containsKey(Constants.ITEM_URL_FIELD)
@ -502,7 +530,7 @@ public class GrsfPublisherFisheryService {
} else { } else {
throw new Exception("There was an error during the item updated, sorry"); throw new Exception("There was an error during the item updated, sorry");
} }
} // }
} catch(Exception e) { } catch(Exception e) {
logger.error("Failed to update fishery record" + e); logger.error("Failed to update fishery record" + e);
responseBean.setError(e.getMessage()); responseBean.setError(e.getMessage());
@ -527,14 +555,15 @@ public class GrsfPublisherFisheryService {
ResponseCreationBean responseBean = new ResponseCreationBean(); ResponseCreationBean responseBean = new ResponseCreationBean();
Status status = Status.INTERNAL_SERVER_ERROR; Status status = Status.INTERNAL_SERVER_ERROR;
try { // try {
DataCatalogue catalogue = null;
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context); // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) { // if(catalogue == null) {
throw new Exception( // throw new Exception(
"There was a problem while serving your request. No catalogue instance was found in this context!"); // "There was a problem while serving your request. No catalogue instance was found in this context!");
} else { // } else {
// catalog id must be reported // catalog id must be reported
String uuid = bean.getUuid(); String uuid = bean.getUuid();
@ -544,21 +573,24 @@ public class GrsfPublisherFisheryService {
String apiKeyUser = catalogue.getApiKeyFromUsername(username); String apiKeyUser = catalogue.getApiKeyFromUsername(username);
CkanDataset record = catalogue.getDataset(uuid, apiKeyUser); CkanDataset record = catalogue.getDataset(uuid, apiKeyUser);
if(record == null) if(record == null){
throw new Exception("A record with knowledge_base_id id " + uuid + " does not exist!"); // throw new Exception("A record with knowledge_base_id id " + uuid + " does not exist!");
}
// check system type // check system type
boolean isGRSF = !record.getExtrasAsHashMap().get(Constants.SYSTEM_TYPE_CUSTOM_KEY) boolean isGRSF = !record.getExtrasAsHashMap().get(Constants.SYSTEM_TYPE_CUSTOM_KEY)
.equals(Constants.SYSTEM_TYPE_FOR_SOURCES_VALUE); .equals(Constants.SYSTEM_TYPE_FOR_SOURCES_VALUE);
if(!isGRSF) if(!isGRSF) {
throw new Exception("You are trying to modify a Legacy record!"); // throw new Exception("You are trying to modify a Legacy record!");
}
boolean rightDomain = record.getExtrasAsHashMap().get(Constants.DOMAIN_CUSTOM_KEY) boolean rightDomain = record.getExtrasAsHashMap().get(Constants.DOMAIN_CUSTOM_KEY)
.equalsIgnoreCase(Product_Type.FISHERY.getOrigName()); .equalsIgnoreCase(Product_Type.FISHERY.getOrigName());
if(!rightDomain) if(!rightDomain) {
throw new Exception("This is not a Fishery record!"); // throw new Exception("This is not a Fishery record!");
}
// update it // update it
Map<String,List<String>> updateStatus = new HashMap<String,List<String>>(1); Map<String,List<String>> updateStatus = new HashMap<String,List<String>>(1);
@ -570,11 +602,11 @@ public class GrsfPublisherFisheryService {
responseBean.setId(record.getId()); responseBean.setId(record.getId());
responseBean.setItemUrl(record.getExtrasAsHashMap().get(Constants.ITEM_URL_FIELD)); responseBean.setItemUrl(record.getExtrasAsHashMap().get(Constants.ITEM_URL_FIELD));
} // }
} catch(Exception e) { // } catch(Exception e) {
logger.error("Failed to update fishery record's status", e); // logger.error("Failed to update fishery record's status", e);
responseBean.setError(e.getMessage()); // responseBean.setError(e.getMessage());
} // }
return Response.status(status).entity(responseBean).build(); return Response.status(status).entity(responseBean).build();

View File

@ -29,19 +29,19 @@ import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider; import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.authorization.library.utils.Caller; import org.gcube.common.authorization.library.utils.Caller;
import org.gcube.common.scope.api.ScopeProvider; import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.DeleteRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.UpdateRecordStatus; import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.DeleteRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseBean; import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseCreationBean; import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.UpdateRecordStatus;
import org.gcube.data_catalogue.grsf_publish_ws.utils.CommonServiceUtils; import org.gcube.data_catalogue.grsf_publish_ws.utils.CommonServiceUtils;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods; import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue; import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean; import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean;
import org.gcube.datacatalogue.ckanutillibrary.shared.RolesCkanGroupOrOrg; import org.gcube.datacatalogue.ckanutillibrary.shared.RolesCkanGroupOrOrg;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.enums.Product_Type; import org.gcube.datacatalogue.common.AAA_PORTED.Product_Type;
import org.gcube.datacatalogue.common.enums.Sources; import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import eu.trentorise.opendata.jackan.model.CkanDataset; import eu.trentorise.opendata.jackan.model.CkanDataset;
@ -61,33 +61,33 @@ public class GrsfPublisherStockService {
// Logger // Logger
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(GrsfPublisherStockService.class); private static final org.slf4j.Logger logger = LoggerFactory.getLogger(GrsfPublisherStockService.class);
@GET // @GET
@Path("hello") // @Path("hello")
@Produces(MediaType.TEXT_PLAIN) // @Produces(MediaType.TEXT_PLAIN)
public Response hello() { // public Response hello() {
return Response.ok("Hello.. Stock service is here").build(); // return Response.ok("Hello.. Stock service is here").build();
} // }
//
@GET // @GET
@Path("get-licenses") // @Path("get-licenses")
@Produces(MediaType.APPLICATION_JSON) // @Produces(MediaType.APPLICATION_JSON)
public Response getLicenses() { // public Response getLicenses() {
Status status = Status.OK; // Status status = Status.OK;
String context = ScopeProvider.instance.get(); // String context = ScopeProvider.instance.get();
DataCatalogue catalogue; // DataCatalogue catalogue;
try { // try {
catalogue = HelperMethods.getDataCatalogueRunningInstance(context); // catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
Map<String,String> licenses = CommonServiceUtils.getLicenses(catalogue); // Map<String,String> licenses = CommonServiceUtils.getLicenses(catalogue);
if(licenses == null) // if(licenses == null)
status = Status.INTERNAL_SERVER_ERROR; // status = Status.INTERNAL_SERVER_ERROR;
return Response.status(status).entity(licenses).build(); // return Response.status(status).entity(licenses).build();
} catch(Exception e) { // } catch(Exception e) {
status = Status.INTERNAL_SERVER_ERROR; // status = Status.INTERNAL_SERVER_ERROR;
return Response.status(status) // return Response.status(status)
.entity(new ResponseBean(false, "Unable to retrieve license list " + e.getLocalizedMessage(), null)) // .entity(new ResponseBean(false, "Unable to retrieve license list " + e.getLocalizedMessage(), null))
.build(); // .build();
} // }
} // }
@POST @POST
@Path("publish-product") @Path("publish-product")
@ -113,35 +113,46 @@ public class GrsfPublisherStockService {
// Cast the source to the accepted ones // Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source); Sources sourceInPath = Sources.onDeserialize(source);
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) { DataCatalogue catalogue = null;
throw new Exception("There was a problem while serving your request. No catalogue instance was found!"); // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
} else { // if(catalogue == null) {
// throw new Exception("There was a problem while serving your request. No catalogue instance was found!");
// } else {
//
String apiKey = catalogue.getApiKeyFromUsername(username); String apiKey = "";
String organization = HelperMethods.retrieveOrgNameFromScope(context); //"grsf_admin"; String organization = "";
// String apiKey = catalogue.getApiKeyFromUsername(username);
// String organization = HelperMethods.retrieveOrgNameFromScope(context); //"grsf_admin";
// check it has admin role or throw exception // check it has admin role or throw exception
CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization); // CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// extend this role to the other organizations in this context // extend this role to the other organizations in this context
CommonServiceUtils.extendRoleToOtherOrganizations(username, catalogue, organization, // To support this gCat must be modified according to the following ticket
RolesCkanGroupOrOrg.ADMIN); // https://support.d4science.org/issues/19365
// CommonServiceUtils.extendRoleToOtherOrganizations(username, catalogue, organization,
// RolesCkanGroupOrOrg.ADMIN);
String authorMail = "";
String authorFullname = "";
// retrieve the user's email and fullname // retrieve the user's email and fullname
String authorMail = HelperMethods.getUserEmail(context, token); // String authorMail = HelperMethods.getUserEmail(context, token);
String authorFullname = HelperMethods.getUserFullname(context, token); // String authorFullname = HelperMethods.getUserFullname(context, token);
//
if(authorMail == null || authorFullname == null) { // if(authorMail == null || authorFullname == null) {
throw new Exception("Sorry but it was not possible to retrieve your fullname/email!"); // throw new Exception("Sorry but it was not possible to retrieve your fullname/email!");
} // }
// check the record has a name, at least // check the record has a name, at least
String futureName = record.getUuid(); String futureName = record.getUuid();
String futureTitle = record.getStockName(); String futureTitle = record.getStockName();
// check name and throws exception // check name and throws exception
CommonServiceUtils.checkName(futureName, catalogue); // CommonServiceUtils.checkName(futureName, catalogue);
// load other information // load other information
Map<String,List<String>> customFields = record.getExtrasFields(); Map<String,List<String>> customFields = record.getExtrasFields();
@ -170,11 +181,12 @@ public class GrsfPublisherStockService {
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY)); .equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY));
// convert extras' keys to keys with namespace // convert extras' keys to keys with namespace
Map<String,String> namespaces = HelperMethods Map<String,String> namespaces = new HashMap<>();
.getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_STOCK); // Map<String,String> namespaces = HelperMethods
// .getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_STOCK);
if(namespaces == null) //
throw new Exception("Failed to retrieve the namespaces for the key fields!"); // if(namespaces == null)
// throw new Exception("Failed to retrieve the namespaces for the key fields!");
customFields = HelperMethods.replaceFieldsKey(customFields, namespaces, customFields = HelperMethods.replaceFieldsKey(customFields, namespaces,
!sourceInPath.equals(Sources.GRSF)); !sourceInPath.equals(Sources.GRSF));
@ -209,7 +221,7 @@ public class GrsfPublisherStockService {
} else } else
throw new Exception( throw new Exception(
"There was an error during the product generation, sorry! Unable to create the dataset"); "There was an error during the product generation, sorry! Unable to create the dataset");
} // }
} catch(Exception e) { } catch(Exception e) {
logger.error("Failed to create stock record", e); logger.error("Failed to create stock record", e);
status = Status.INTERNAL_SERVER_ERROR; status = Status.INTERNAL_SERVER_ERROR;
@ -238,11 +250,13 @@ public class GrsfPublisherStockService {
"Received call to delete product with id " + recordToDelete.getId() + ", checking if it is a stock"); "Received call to delete product with id " + recordToDelete.getId() + ", checking if it is a stock");
try { try {
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context); DataCatalogue catalogue = null;
if(catalogue == null) {
status = Status.INTERNAL_SERVER_ERROR; // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
throw new Exception("There was a problem while serving your request"); // if(catalogue == null) {
} // status = Status.INTERNAL_SERVER_ERROR;
// throw new Exception("There was a problem while serving your request");
// }
// Cast the source to the accepted ones // Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source); Sources sourceInPath = Sources.onDeserialize(source);
@ -304,11 +318,13 @@ public class GrsfPublisherStockService {
// Cast the source to the accepted ones // Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source); Sources sourceInPath = Sources.onDeserialize(source);
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) { DataCatalogue catalogue = null;
status = Status.INTERNAL_SERVER_ERROR; // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
throw new Exception("There was a problem while serving your request"); // if(catalogue == null) {
} // status = Status.INTERNAL_SERVER_ERROR;
// throw new Exception("There was a problem while serving your request");
// }
// if it is a request for GRSF records, we have Fishery - Stock groups, so it is easy. // if it is a request for GRSF records, we have Fishery - Stock groups, so it is easy.
// For other cases, records needs to be parsed // For other cases, records needs to be parsed
@ -348,10 +364,13 @@ public class GrsfPublisherStockService {
Status status = Status.INTERNAL_SERVER_ERROR; Status status = Status.INTERNAL_SERVER_ERROR;
logger.info("Received call to get the catalogue identifier for the product with name " + name); logger.info("Received call to get the catalogue identifier for the product with name " + name);
try { try {
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) { DataCatalogue catalogue = null;
throw new Exception("There was a problem while serving your request"); // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
} // if(catalogue == null) {
// throw new Exception("There was a problem while serving your request");
// }
CkanDataset dataset = catalogue.getDataset(name, catalogue.getApiKeyFromUsername(username)); CkanDataset dataset = catalogue.getDataset(name, catalogue.getApiKeyFromUsername(username));
if(dataset != null) { if(dataset != null) {
Map<String,String> result = new HashMap<String,String>(); Map<String,String> result = new HashMap<String,String>();
@ -398,32 +417,40 @@ public class GrsfPublisherStockService {
throw new Exception("Please specify the 'catalog_id' property"); throw new Exception("Please specify the 'catalog_id' property");
} }
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context); DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) { // if(catalogue == null) {
throw new Exception( // throw new Exception(
"There was a problem while serving your request. No catalogue instance was found in this context!"); // "There was a problem while serving your request. No catalogue instance was found in this context!");
} else { // } else {
// get already published record and modify it // get already published record and modify it
String apiKey = catalogue.getApiKeyFromUsername(username); String apiKey = "";
// String apiKey = catalogue.getApiKeyFromUsername(username);
CkanDataset recordPublished = catalogue.getDataset(catalogId, apiKey); CkanDataset recordPublished = catalogue.getDataset(catalogId, apiKey);
if(recordPublished == null) if(recordPublished == null)
throw new Exception("A record with id " + catalogId + " does not exist!"); throw new Exception("A record with id " + catalogId + " does not exist!");
// retrieve the user's email and fullname String authorMail = "";
String authorMail = HelperMethods.getUserEmail(context, token); String authorFullname = "";
String authorFullname = HelperMethods.getUserFullname(context, token);
if(authorMail == null || authorFullname == null) { // retrieve the user's email and fullname
logger.debug("Author fullname or mail missing, cannot continue"); // String authorMail = HelperMethods.getUserEmail(context, token);
throw new Exception("Sorry but there was not possible to retrieve your fullname/email!"); // String authorFullname = HelperMethods.getUserFullname(context, token);
} //
String organization = HelperMethods.retrieveOrgNameFromScope(context); //"grsf_admin"; // if(authorMail == null || authorFullname == null) {
// logger.debug("Author fullname or mail missing, cannot continue");
// throw new Exception("Sorry but there was not possible to retrieve your fullname/email!");
// }
String organization = "";
// String organization = HelperMethods.retrieveOrgNameFromScope(context); //"grsf_admin";
// check he/she has admin role // check he/she has admin role
CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization); // CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// name, product url and are going to remain unchanged (so we keep them from the publisher record); // name, product url and are going to remain unchanged (so we keep them from the publisher record);
String name = recordPublished.getName(); String name = recordPublished.getName();
@ -462,11 +489,12 @@ public class GrsfPublisherStockService {
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY)); .equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY));
// convert extras' keys to keys with namespace // convert extras' keys to keys with namespace
Map<String,String> namespaces = HelperMethods Map<String,String> namespaces = new HashMap<>();
.getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_STOCK); // Map<String,String> namespaces = HelperMethods
// .getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_STOCK);
if(namespaces == null) //
throw new Exception("Failed to retrieve the namespaces for the key fields!"); // if(namespaces == null)
// throw new Exception("Failed to retrieve the namespaces for the key fields!");
// retrieve the url // retrieve the url
String modifiedUUIDKey = namespaces.containsKey(Constants.ITEM_URL_FIELD) String modifiedUUIDKey = namespaces.containsKey(Constants.ITEM_URL_FIELD)
@ -506,7 +534,7 @@ public class GrsfPublisherStockService {
} else { } else {
throw new Exception("There was an error during the item updated, sorry"); throw new Exception("There was an error during the item updated, sorry");
} }
} // }
} catch(Exception e) { } catch(Exception e) {
logger.error("Failed to update stock record", e); logger.error("Failed to update stock record", e);
responseBean.setError(e.getMessage()); responseBean.setError(e.getMessage());
@ -534,12 +562,13 @@ public class GrsfPublisherStockService {
try { try {
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context); DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) { //
throw new Exception( // if(catalogue == null) {
"There was a problem while serving your request. No catalogue instance was found in this context!"); // throw new Exception(
} else { // "There was a problem while serving your request. No catalogue instance was found in this context!");
// } else {
// catalog id must be reported // catalog id must be reported
String uuid = bean.getUuid(); String uuid = bean.getUuid();
@ -574,7 +603,7 @@ public class GrsfPublisherStockService {
responseBean.setKbUuid(uuid); responseBean.setKbUuid(uuid);
responseBean.setId(record.getId()); responseBean.setId(record.getId());
responseBean.setItemUrl(record.getExtrasAsHashMap().get(Constants.ITEM_URL_FIELD)); responseBean.setItemUrl(record.getExtrasAsHashMap().get(Constants.ITEM_URL_FIELD));
} // }
} catch(Exception e) { } catch(Exception e) {
logger.error("Failed to update stock record's status", e); logger.error("Failed to update stock record's status", e);
responseBean.setError(e.getMessage()); responseBean.setError(e.getMessage());

View File

@ -14,35 +14,31 @@ import java.util.concurrent.ConcurrentHashMap;
import javax.servlet.ServletContext; import javax.servlet.ServletContext;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CkanResource; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Base;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.CkanResource;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Common;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.RefersToBean; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.Resource; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Group;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.RefersToBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.Base; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.Common; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.TimeSeriesBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseCreationBean; import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.WritePostCatalogueManagerThread;
import org.gcube.data_catalogue.grsf_publish_ws.utils.csv.ManageTimeSeriesThread; import org.gcube.data_catalogue.grsf_publish_ws.utils.csv.ManageTimeSeriesThread;
import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.AssociationToGroupThread; import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.AssociationToGroupThread;
import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.WritePostCatalogueManagerThread;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue; import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean; import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean;
import org.gcube.datacatalogue.ckanutillibrary.shared.RolesCkanGroupOrOrg; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Product_Type;
import org.gcube.datacatalogue.common.enums.Product_Type; import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.gcube.datacatalogue.common.enums.Sources; import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import org.gcube.datacatalogue.common.enums.Status; import org.gcube.datacatalogue.common.AAA_PORTED.Stock_Type;
import org.gcube.datacatalogue.common.enums.Stock_Type;
import org.json.simple.JSONObject; import org.json.simple.JSONObject;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import eu.trentorise.opendata.jackan.model.CkanDataset;
import eu.trentorise.opendata.jackan.model.CkanLicense;
/** /**
* Services common utils. * Services common utils.
* @author Costantino Perciante (ISTI - CNR) * @author Costantino Perciante (ISTI - CNR)
@ -55,20 +51,20 @@ public class CommonServiceUtils {
private static final int TAG_MAX_SIZE = 100; private static final int TAG_MAX_SIZE = 100;
private static Map<String,Boolean> extensionsCheck = new ConcurrentHashMap<>(); private static Map<String,Boolean> extensionsCheck = new ConcurrentHashMap<>();
/** // /**
* Retrieve the list of licenses for stocks and fisheries // * Retrieve the list of licenses for stocks and fisheries
* @return // * @return
*/ // */
public static Map<String,String> getLicenses(DataCatalogue catalogue) { // public static Map<String,String> getLicenses(DataCatalogue catalogue) {
logger.info("Requested licenses..."); // logger.info("Requested licenses...");
Map<String,String> toReturn = new HashMap<String,String>(); // Map<String,String> toReturn = new HashMap<String,String>();
List<CkanLicense> licenses = catalogue.getLicenses(); // List<CkanLicense> licenses = catalogue.getLicenses();
//
for(CkanLicense ckanLicense : licenses) { // for(CkanLicense ckanLicense : licenses) {
toReturn.put(ckanLicense.getId(), ckanLicense.getTitle()); // toReturn.put(ckanLicense.getId(), ckanLicense.getTitle());
} // }
return toReturn; // return toReturn;
} // }
/** /**
* Validate an aggregated GRSF record. TODO use @Valid tags * Validate an aggregated GRSF record. TODO use @Valid tags
@ -421,45 +417,46 @@ public class CommonServiceUtils {
} }
} }
/**
* Evaluate if the user has the admin role
* Throws exception if he/she doesn't
*/
public static void hasAdminRole(String username, DataCatalogue catalogue, String apiKey, String organization)
throws Exception {
String role = catalogue.getRoleOfUserInOrganization(username, organization, apiKey);
logger.info("Role of the user " + username + " is " + role + " in " + organization);
if(role == null || role.isEmpty() || !role.equalsIgnoreCase(RolesCkanGroupOrOrg.ADMIN.toString()))
throw new Exception(
"You are not authorized to create a product. Please check you have the Catalogue-Administrator role!");
}
/** // /**
* Check this record's name // * Evaluate if the user has the admin role
* @param futureName // * Throws exception if he/she doesn't
* @param catalogue // */
* @throws Exception on name check // public static void hasAdminRole(String username, DataCatalogue catalogue, String apiKey, String organization)
*/ // throws Exception {
public static void checkName(String futureName, DataCatalogue catalogue) throws Exception { //
// String role = catalogue.getRoleOfUserInOrganization(username, organization, apiKey);
if(!HelperMethods.isNameValid(futureName)) { // logger.info("Role of the user " + username + " is " + role + " in " + organization);
throw new Exception( //
"The 'uuid_knowledge_base' must contain only alphanumeric characters, and symbols like '.' or '_', '-'"); // if(role == null || role.isEmpty() || !role.equalsIgnoreCase(RolesCkanGroupOrOrg.ADMIN.toString()))
} else { // throw new Exception(
// "You are not authorized to create a product. Please check you have the Catalogue-Administrator role!");
logger.debug("Checking if such name [" + futureName + "] doesn't exist ..."); //
boolean alreadyExists = catalogue.existProductWithNameOrId(futureName); // }
if(alreadyExists) { // /**
logger.debug("A product with 'uuid_knowledge_base' " + futureName + " already exists"); // * Check this record's name
throw new Exception("A product with 'uuid_knowledge_base' " + futureName + " already exists"); // * @param futureName
// * @param catalogue
} // * @throws Exception on name check
} // */
} // public static void checkName(String futureName, DataCatalogue catalogue) throws Exception {
//
// if(!HelperMethods.isNameValid(futureName)) {
// throw new Exception(
// "The 'uuid_knowledge_base' must contain only alphanumeric characters, and symbols like '.' or '_', '-'");
// } else {
//
// logger.debug("Checking if such name [" + futureName + "] doesn't exist ...");
// boolean alreadyExists = catalogue.existProductWithNameOrId(futureName);
//
// if(alreadyExists) {
// logger.debug("A product with 'uuid_knowledge_base' " + futureName + " already exists");
// throw new Exception("A product with 'uuid_knowledge_base' " + futureName + " already exists");
//
// }
// }
// }
/** /**
* Validate and check sources * Validate and check sources
@ -485,20 +482,39 @@ public class CommonServiceUtils {
// validate the record if it is a GRSF one and set the record type and in manage context // validate the record if it is a GRSF one and set the record type and in manage context
// Status field is needed only in the Manage context for GRSF records // Status field is needed only in the Manage context for GRSF records
// In web.xml a parameter indicates the Admin VRE as full path.
if(context.equals((String) contextServlet.getInitParameter(HelperMethods.MANAGE_CONTEX_KEY))) { if(context.equals((String) contextServlet.getInitParameter(HelperMethods.MANAGE_CONTEX_KEY))) {
// If we are in Admin VRE and the source is GRSF
if(sourceInPath.equals(Sources.GRSF)) { if(sourceInPath.equals(Sources.GRSF)) {
// RefersTo cannot be empty or null in GRSF_Admin for a GRSF record
List<RefersToBean> refersTo = record.getRefersTo(); List<RefersToBean> refersTo = record.getRefersTo();
if(refersTo == null || refersTo.isEmpty()) if(refersTo == null || refersTo.isEmpty())
throw new Exception("refers_to is empty for a GRSF record"); throw new Exception("refers_to is empty for a GRSF record");
// For each RefersTo a Resource is created in the record. The resource point to the referred record.
// We have also to set database sources
String databaseSource = ""; String databaseSource = "";
// we have the id within the catalog of this record. This means that we can retrieve the record and its system:type // we have the id within the catalog of this record. This means that we can retrieve the record and its system:type
for(RefersToBean refersToBean : refersTo) { for(RefersToBean refersToBean : refersTo) {
String sourceOrganization = getRecordOrganization(refersToBean.getId(), apiKey, context);
String sourceOrganization = "";
// Here there is a lookup to the referred records.
// getRecordOrganization read the record and get the organization
// String sourceOrganization = getRecordOrganization(refersToBean.getId(), apiKey, context);
resources.add(new ResourceBean(refersToBean.getUrl(), sourceOrganization, "", null, username, null, resources.add(new ResourceBean(refersToBean.getUrl(), sourceOrganization, "", null, username, null,
null)); null));
sourcesList.add(sourceOrganization.toLowerCase()); sourcesList.add(sourceOrganization.toLowerCase());
// concatenating the organization in a string which provide the databaseSource value :O
// Better using a list
databaseSource += sourceOrganization + " "; databaseSource += sourceOrganization + " ";
} }
@ -563,35 +579,35 @@ public class CommonServiceUtils {
groups.add(groupName); groups.add(groupName);
} }
/** // /**
* Fetch the system:type property from a record // * Fetch the system:type property from a record
* @param itemIdOrName // * @param itemIdOrName
* @param apiKey // * @param apiKey
* @return null on error // * @return null on error
* @throws Exception // * @throws Exception
*/ // */
public static String getSystemTypeValue(String itemIdOrName, String apiKey, String context) throws Exception { // public static String getSystemTypeValue(String itemIdOrName, String apiKey, String context) throws Exception {
//
DataCatalogue catalog = HelperMethods.getDataCatalogueRunningInstance(context); // DataCatalogue catalog = HelperMethods.getDataCatalogueRunningInstance(context);
CkanDataset dataset = catalog.getDataset(itemIdOrName, apiKey); // CkanDataset dataset = catalog.getDataset(itemIdOrName, apiKey);
if(dataset == null) // if(dataset == null)
throw new Exception("Unable to find record with id or name " + itemIdOrName); // throw new Exception("Unable to find record with id or name " + itemIdOrName);
String systemTypeValue = dataset.getExtrasAsHashMap().get(Constants.SYSTEM_TYPE_CUSTOM_KEY); // String systemTypeValue = dataset.getExtrasAsHashMap().get(Constants.SYSTEM_TYPE_CUSTOM_KEY);
if(systemTypeValue == null || systemTypeValue.isEmpty()) // if(systemTypeValue == null || systemTypeValue.isEmpty())
throw new Exception(Constants.SYSTEM_TYPE_CUSTOM_KEY + " property not set in record " + itemIdOrName); // throw new Exception(Constants.SYSTEM_TYPE_CUSTOM_KEY + " property not set in record " + itemIdOrName);
else // else
return systemTypeValue; // return systemTypeValue;
//
} // }
//
public static String getRecordOrganization(String itemIdOrName, String apiKey, String context) throws Exception { // public static String getRecordOrganization(String itemIdOrName, String apiKey, String context) throws Exception {
DataCatalogue catalog = HelperMethods.getDataCatalogueRunningInstance(context); // DataCatalogue catalog = HelperMethods.getDataCatalogueRunningInstance(context);
CkanDataset dataset = catalog.getDataset(itemIdOrName, apiKey); // CkanDataset dataset = catalog.getDataset(itemIdOrName, apiKey);
if(dataset == null) // if(dataset == null)
throw new Exception("Unable to find record with id or name " + itemIdOrName); // throw new Exception("Unable to find record with id or name " + itemIdOrName);
else // else
return dataset.getOrganization().getTitle(); // return dataset.getOrganization().getTitle();
} // }
/** /**
* Actions to execute once the dataset has been updated or created. * Actions to execute once the dataset has been updated or created.
@ -657,12 +673,12 @@ public class CommonServiceUtils {
new ManageTimeSeriesThread(record, futureName, username, catalogue, context, token).start(); new ManageTimeSeriesThread(record, futureName, username, catalogue, context, token).start();
// write a post if the product has been published in grsf context // write a post if the product has been published in grsf context
if(catalogue.isSocialPostEnabled() && !isUpdated && context // if(catalogue.isSocialPostEnabled() && !isUpdated && context
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY))) { // .equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY))) {
new WritePostCatalogueManagerThread(context, token, futureTitle, itemUrlForThread, true, // new WritePostCatalogueManagerThread(context, token, futureTitle, itemUrlForThread, true,
new ArrayList<String>(), authorFullname).start(); // new ArrayList<String>(), authorFullname).start();
logger.info("Thread to write a post about the new product has been launched"); // logger.info("Thread to write a post about the new product has been launched");
} // }
} catch(InterruptedException e) { } catch(InterruptedException e) {
logger.error("Error", e); logger.error("Error", e);
} }
@ -670,25 +686,25 @@ public class CommonServiceUtils {
}).start(); }).start();
} }
/** // /**
* Extend roles to other organization // * Extend roles to other organization
* @param username // * @param username
* @param catalogue // * @param catalogue
* @param organization // * @param organization
* @param admin // * @param admin
*/ // */
public static void extendRoleToOtherOrganizations(String username, DataCatalogue catalogue, String organization, // public static void extendRoleToOtherOrganizations(String username, DataCatalogue catalogue, String organization,
RolesCkanGroupOrOrg admin) { // RolesCkanGroupOrOrg admin) {
//
logger.debug("Checking if role extension is needed here"); // logger.debug("Checking if role extension is needed here");
if(extensionsCheck.containsKey(username) && extensionsCheck.get(username)) // if(extensionsCheck.containsKey(username) && extensionsCheck.get(username))
return; // return;
else { // else {
catalogue.assignRolesOtherOrganization(username, organization, admin); // catalogue.assignRolesOtherOrganization(username, organization, admin);
extensionsCheck.put(username, true); // extensionsCheck.put(username, true);
} // }
//
} // }
/** /**
* Evaluate in which organization a record has to be published. The only exception is when grsf_admin is involved. * Evaluate in which organization a record has to be published. The only exception is when grsf_admin is involved.

View File

@ -29,10 +29,11 @@ import org.gcube.common.homelibrary.home.workspace.exceptions.ItemNotFoundExcept
import org.gcube.common.homelibrary.home.workspace.folder.items.ExternalFile; import org.gcube.common.homelibrary.home.workspace.folder.items.ExternalFile;
import org.gcube.common.resources.gcore.utils.XPathHelper; import org.gcube.common.resources.gcore.utils.XPathHelper;
import org.gcube.common.scope.api.ScopeProvider; import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.GcoreEndPointReaderSocial;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue; import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueFactory; import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueFactory;
import org.gcube.datacatalogue.common.caches.CacheImpl; import org.gcube.datacatalogue.common.AAA_UNNEEDED.CacheImpl;
import org.gcube.datacatalogue.common.caches.CacheInterface; import org.gcube.datacatalogue.common.AAA_UNNEEDED.CacheInterface;
import org.gcube.resources.discovery.client.api.DiscoveryClient; import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.Query; import org.gcube.resources.discovery.client.queries.api.Query;
import org.gcube.resources.discovery.client.queries.impl.QueryBox; import org.gcube.resources.discovery.client.queries.impl.QueryBox;
@ -103,38 +104,38 @@ public abstract class HelperMethods {
return modified; return modified;
} }
/** // /**
* Retrieve the running instance of the data catalogue for this scope // * Retrieve the running instance of the data catalogue for this scope
* @return // * @return
* @throws Exception // * @throws Exception
*/ // */
public static DataCatalogue getDataCatalogueRunningInstance(String scope) throws Exception{ // public static DataCatalogue getDataCatalogueRunningInstance(String scope) throws Exception{
//
// if(catalogueCache.get(scope) != null)
// return catalogueCache.get(scope);
// else{
// try{
// DataCatalogue instance = DataCatalogueFactory.getFactory().getUtilsPerScope(scope);
// catalogueCache.insert(scope, instance);
// return instance;
// }catch(Exception e){
// logger.error("Failed to instantiate data catalogue lib", e);
// throw new Exception("Failed to retrieve catalogue information");
// }
// }
// }
if(catalogueCache.get(scope) != null) // /**
return catalogueCache.get(scope); // * Retrieve the organization name in which the user wants to publish starting from the scope
else{ // * @param contextInWhichPublish
try{ // * @return
DataCatalogue instance = DataCatalogueFactory.getFactory().getUtilsPerScope(scope); // */
catalogueCache.insert(scope, instance); // public static String retrieveOrgNameFromScope(String scope) {
return instance; //
}catch(Exception e){ // String[] splittedScope = scope.split("/");
logger.error("Failed to instantiate data catalogue lib", e); // return splittedScope[splittedScope.length - 1].toLowerCase();
throw new Exception("Failed to retrieve catalogue information"); //
} // }
}
}
/**
* Retrieve the organization name in which the user wants to publish starting from the scope
* @param contextInWhichPublish
* @return
*/
public static String retrieveOrgNameFromScope(String scope) {
String[] splittedScope = scope.split("/");
return splittedScope[splittedScope.length - 1].toLowerCase();
}
/** /**
* Validate the name the product will have * Validate the name the product will have
@ -150,13 +151,14 @@ public abstract class HelperMethods {
} }
} }
/**
/* *//**
* Retrieve the user's email given his/her username * Retrieve the user's email given his/her username
* @param context * @param context
* @param token * @param token
* @return * @return
* @throws Exception * @throws Exception
*/ *//*
public static String getUserEmail(String context, String token){ public static String getUserEmail(String context, String token){
// check in cache // check in cache
@ -174,13 +176,13 @@ public abstract class HelperMethods {
return result; return result;
} }
/** *//**
* Retrieve the user's fullname given his/her username * Retrieve the user's fullname given his/her username
* @param context * @param context
* @param token * @param token
* @return * @return
* @throws Exception * @throws Exception
*/ *//*
public static String getUserFullname(String context, String token){ public static String getUserFullname(String context, String token){
// check in cache // check in cache
@ -198,10 +200,10 @@ public abstract class HelperMethods {
return result; return result;
} }
/** *//**
* Execute the GET http request at this url, and return the result as string * Execute the GET http request at this url, and return the result as string
* @return * @return
*/ *//*
private static String executGETHttpRequest(String url, int expectedCodeOnSuccess){ private static String executGETHttpRequest(String url, int expectedCodeOnSuccess){
try(CloseableHttpClient client = HttpClientBuilder.create().build();){ try(CloseableHttpClient client = HttpClientBuilder.create().build();){
@ -231,7 +233,7 @@ public abstract class HelperMethods {
return null; return null;
} }*/
/** /**
* Check that the given license id is in CKAN * Check that the given license id is in CKAN
@ -516,56 +518,56 @@ public abstract class HelperMethods {
} }
/** // /**
* Return a map for converting a key to a namespace:key format by reading a generic resource. // * Return a map for converting a key to a namespace:key format by reading a generic resource.
* @return a map // * @return a map
*/ // */
public static Map<String, String> getFieldToFieldNameSpaceMapping(String resourceName){ // public static Map<String, String> getFieldToFieldNameSpaceMapping(String resourceName){
Map<String, String> toReturn = new HashMap<String, String>(); // Map<String, String> toReturn = new HashMap<String, String>();
//
// check if data are in cache // // check if data are in cache
if(namespacesCache.get(resourceName) != null){ // if(namespacesCache.get(resourceName) != null){
return namespacesCache.get(resourceName); // return namespacesCache.get(resourceName);
} // }
else{ // else{
try { // try {
Query q = new QueryBox("for $profile in collection('/db/Profiles/GenericResource')//Resource " + // Query q = new QueryBox("for $profile in collection('/db/Profiles/GenericResource')//Resource " +
"where $profile/Profile/SecondaryType/string() eq '"+ "ApplicationProfile" + "' and $profile/Profile/Name/string() " + // "where $profile/Profile/SecondaryType/string() eq '"+ "ApplicationProfile" + "' and $profile/Profile/Name/string() " +
" eq '" + resourceName + "'" + // " eq '" + resourceName + "'" +
"return $profile"); // "return $profile");
//
DiscoveryClient<String> client = client(); // DiscoveryClient<String> client = client();
List<String> appProfile = client.submit(q); // List<String> appProfile = client.submit(q);
//
if (appProfile == null || appProfile.size() == 0) // if (appProfile == null || appProfile.size() == 0)
throw new Exception("Your applicationProfile is not registered in the infrastructure"); // throw new Exception("Your applicationProfile is not registered in the infrastructure");
else { // else {
//
String elem = appProfile.get(0); // String elem = appProfile.get(0);
DocumentBuilder docBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); // DocumentBuilder docBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
Node node = docBuilder.parse(new InputSource(new StringReader(elem))).getDocumentElement(); // Node node = docBuilder.parse(new InputSource(new StringReader(elem))).getDocumentElement();
XPathHelper helper = new XPathHelper(node); // XPathHelper helper = new XPathHelper(node);
//
NodeList nodeListKeys = helper.evaluateForNodes("//originalKey"); // NodeList nodeListKeys = helper.evaluateForNodes("//originalKey");
NodeList nodeListModifiedKeys = helper.evaluateForNodes("//modifiedKey"); // NodeList nodeListModifiedKeys = helper.evaluateForNodes("//modifiedKey");
int sizeKeys = nodeListKeys != null ? nodeListKeys.getLength() : 0; // int sizeKeys = nodeListKeys != null ? nodeListKeys.getLength() : 0;
int sizeKeysModifed = nodeListModifiedKeys != null ? nodeListModifiedKeys.getLength() : 0; // int sizeKeysModifed = nodeListModifiedKeys != null ? nodeListModifiedKeys.getLength() : 0;
if(sizeKeys != sizeKeysModifed) // if(sizeKeys != sizeKeysModifed)
throw new Exception("Malformed XML"); // throw new Exception("Malformed XML");
logger.debug("Size is " + sizeKeys); // logger.debug("Size is " + sizeKeys);
for (int i = 0; i < sizeKeys; i++) { // for (int i = 0; i < sizeKeys; i++) {
toReturn.put(nodeListKeys.item(i).getTextContent(), nodeListModifiedKeys.item(i).getTextContent()); // toReturn.put(nodeListKeys.item(i).getTextContent(), nodeListModifiedKeys.item(i).getTextContent());
} // }
} // }
logger.debug("Map is " + toReturn); // logger.debug("Map is " + toReturn);
namespacesCache.insert(resourceName, toReturn); // namespacesCache.insert(resourceName, toReturn);
return toReturn; // return toReturn;
} catch (Exception e) { // } catch (Exception e) {
logger.error("Error while trying to fetch applicationProfile profile from the infrastructure", e); // logger.error("Error while trying to fetch applicationProfile profile from the infrastructure", e);
return null; // return null;
} // }
} // }
} // }
/** /**
* Replace the extras' keys if needed * Replace the extras' keys if needed

View File

@ -8,8 +8,8 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.TimeSeriesBean;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -20,15 +20,15 @@ import org.gcube.common.homelibrary.home.workspace.exceptions.ItemNotFoundExcept
import org.gcube.common.homelibrary.home.workspace.exceptions.WorkspaceFolderNotFoundException; import org.gcube.common.homelibrary.home.workspace.exceptions.WorkspaceFolderNotFoundException;
import org.gcube.common.homelibrary.home.workspace.folder.items.ExternalFile; import org.gcube.common.homelibrary.home.workspace.folder.items.ExternalFile;
import org.gcube.common.scope.api.ScopeProvider; import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Common;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.TimeSeries; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.Common; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.TimeSeries;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods; import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue; import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.common.caches.CacheImpl; import org.gcube.datacatalogue.common.AAA_UNNEEDED.CacheImpl;
import org.gcube.datacatalogue.common.caches.CacheInterface; import org.gcube.datacatalogue.common.AAA_UNNEEDED.CacheInterface;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import eu.trentorise.opendata.jackan.model.CkanResourceBase; import eu.trentorise.opendata.jackan.model.CkanResourceBase;

View File

@ -8,15 +8,15 @@ import javax.ws.rs.core.Application;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.Resource; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.services.GrsfPublisherFisheryService; import org.gcube.data_catalogue.grsf_publish_ws.services.GrsfPublisherFisheryService;
import org.gcube.data_catalogue.grsf_publish_ws.services.GrsfPublisherStockService; import org.gcube.data_catalogue.grsf_publish_ws.services.GrsfPublisherStockService;
import org.gcube.datacatalogue.common.enums.Fishery_Type; import org.gcube.datacatalogue.common.AAA_PORTED.Fishery_Type;
import org.gcube.datacatalogue.common.enums.Sources; import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.gcube.datacatalogue.common.enums.Status; import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import org.gcube.datacatalogue.common.enums.Stock_Type; import org.gcube.datacatalogue.common.AAA_PORTED.Stock_Type;
import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.test.JerseyTest; import org.glassfish.jersey.test.JerseyTest;
import org.glassfish.jersey.test.TestProperties; import org.glassfish.jersey.test.TestProperties;

View File

@ -24,22 +24,22 @@ import org.gcube.common.homelibrary.home.workspace.catalogue.WorkspaceCatalogue;
import org.gcube.common.homelibrary.home.workspace.exceptions.InsufficientPrivilegesException; import org.gcube.common.homelibrary.home.workspace.exceptions.InsufficientPrivilegesException;
import org.gcube.common.homelibrary.home.workspace.exceptions.ItemAlreadyExistException; import org.gcube.common.homelibrary.home.workspace.exceptions.ItemAlreadyExistException;
import org.gcube.common.scope.api.ScopeProvider; import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Group;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.Resource; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.TimeSeriesBean;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods; import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.data_catalogue.grsf_publish_ws.utils.csv.CSVUtils; import org.gcube.data_catalogue.grsf_publish_ws.utils.csv.CSVUtils;
import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.AssociationToGroupThread; import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.AssociationToGroupThread;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue; import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueFactory; import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueFactory;
import org.gcube.datacatalogue.common.enums.Abundance_Level; import org.gcube.datacatalogue.common.AAA_PORTED.Abundance_Level;
import org.gcube.datacatalogue.common.enums.Fishery_Type; import org.gcube.datacatalogue.common.AAA_PORTED.Fishery_Type;
import org.gcube.datacatalogue.common.enums.Sources; import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.gcube.datacatalogue.common.enums.Status; import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
@ -378,39 +378,39 @@ public class JTests {
} }
//@Test // //@Test
public void testHierarchy() throws Exception{ // public void testHierarchy() throws Exception{
String name = "low-abundance"; // String name = "low-abundance";
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance("/gcube/devNext/NextNext"); // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance("/gcube/devNext/NextNext");
List<String> uniqueGroups = new ArrayList<String>(); // List<String> uniqueGroups = new ArrayList<String>();
uniqueGroups.add(name); // uniqueGroups.add(name);
uniqueGroups.add(name); // uniqueGroups.add(name);
AssociationToGroupThread.findHierarchy(uniqueGroups, catalogue, catalogue.getApiKeyFromUsername("costantino_perciante")); // AssociationToGroupThread.findHierarchy(uniqueGroups, catalogue, catalogue.getApiKeyFromUsername("costantino_perciante"));
logger.debug("Hierarchy is " + uniqueGroups); // logger.debug("Hierarchy is " + uniqueGroups);
} // }
//
//@Test // //@Test
public void testAssociationThread() throws Exception{ // public void testAssociationThread() throws Exception{
String name = "low-abundance"; // String name = "low-abundance";
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance("/gcube/devNext/NextNext"); // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance("/gcube/devNext/NextNext");
AssociationToGroupThread threadGroups = new AssociationToGroupThread(Arrays.asList(name), "another-test-test-please-ignore", "grsf", "costantino_perciante", catalogue, "apiKey"); // AssociationToGroupThread threadGroups = new AssociationToGroupThread(Arrays.asList(name), "another-test-test-please-ignore", "grsf", "costantino_perciante", catalogue, "apiKey");
threadGroups.start(); // threadGroups.start();
threadGroups.join(); // threadGroups.join();
logger.info("Thread stopped!"); // logger.info("Thread stopped!");
//
//
} // }
//
//@Test // //@Test
public void testCaches(){ // public void testCaches(){
//
String context = "/gcube/devNext/NextNext"; // String context = "/gcube/devNext/NextNext";
String token = ""; // String token = "";
for (int i = 0; i < 1000; i++) { // for (int i = 0; i < 1000; i++) {
logger.debug(HelperMethods.getUserEmail(context, token)); // logger.debug(HelperMethods.getUserEmail(context, token));
} // }
//
} // }
//@Test //@Test
public void testMatch(){ public void testMatch(){