Compare commits

...

7 Commits

@ -11,7 +11,7 @@
<groupId>org.gcube.data-catalogue</groupId>
<artifactId>grsf-publisher-ws</artifactId>
<version>1.11.0</version>
<version>1.12.0</version>
<packaging>war</packaging>
<name>grsf-publisher-ws</name>
<description>Utility library to publish GRSF products on GRSF catalogue.</description>

@ -1,8 +1,8 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import javax.validation.constraints.Size;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.json.simple.JSONObject;
import com.fasterxml.jackson.annotation.JsonProperty;

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.record;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.util.ArrayList;
import java.util.Arrays;
@ -8,7 +8,7 @@ import java.util.Map;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.record;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.util.List;
@ -6,19 +6,9 @@ import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CkanResource;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.TimeSeries;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.AnnotationBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.RefersToBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.SimilarRecordBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Sources;
import org.gcube.datacatalogue.common.enums.Status;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import com.fasterxml.jackson.annotation.JsonProperty;

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;

@ -1,16 +1,13 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.record;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.util.List;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Fishery_Type;
import org.gcube.datacatalogue.common.enums.Production_System_Type;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Fishery_Type;
import org.gcube.datacatalogue.common.AAA_PORTED.Production_System_Type;
import com.fasterxml.jackson.annotation.JsonProperty;

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;

@ -1,8 +1,8 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import javax.validation.constraints.NotNull;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;

@ -1,10 +1,10 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Sources;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;

@ -1,6 +1,6 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.json.simple.JSONObject;
import com.fasterxml.jackson.annotation.JsonProperty;

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.record;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.util.List;
@ -6,15 +6,10 @@ import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.TimeSeries;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Abundance_Level;
import org.gcube.datacatalogue.common.enums.Fishing_Pressure;
import org.gcube.datacatalogue.common.enums.Stock_Type;
import org.gcube.datacatalogue.common.AAA_PORTED.Abundance_Level;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Fishing_Pressure;
import org.gcube.datacatalogue.common.AAA_PORTED.Stock_Type;
import com.fasterxml.jackson.annotation.JsonProperty;

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;

@ -1,6 +1,6 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;

@ -1,11 +1,10 @@
package org.gcube.data_catalogue.grsf_publish_ws.ex;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseBean;
import org.slf4j.LoggerFactory;
/**

@ -1,9 +1,9 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.utils;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import static org.gcube.resources.discovery.icclient.ICFactory.client;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.output;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
/**
* Response bean to be used by the service.

@ -1,6 +1,6 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.output;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import com.fasterxml.jackson.annotation.JsonProperty;

@ -1,10 +1,10 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Status;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import com.fasterxml.jackson.annotation.JsonProperty;

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.ex;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import javax.validation.ConstraintViolation;
import javax.validation.ConstraintViolationException;
@ -7,7 +7,6 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseBean;
import org.slf4j.LoggerFactory;
/**

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.utils.threads;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import java.util.List;

@ -29,19 +29,19 @@ import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.authorization.library.utils.Caller;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.DeleteRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.UpdateRecordStatus;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.DeleteRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.UpdateRecordStatus;
import org.gcube.data_catalogue.grsf_publish_ws.utils.CommonServiceUtils;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean;
import org.gcube.datacatalogue.ckanutillibrary.shared.RolesCkanGroupOrOrg;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Product_Type;
import org.gcube.datacatalogue.common.enums.Sources;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Product_Type;
import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.slf4j.LoggerFactory;
import eu.trentorise.opendata.jackan.model.CkanDataset;
@ -61,33 +61,33 @@ public class GrsfPublisherFisheryService {
// Logger
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(GrsfPublisherFisheryService.class);
@GET
@Path("hello")
@Produces(MediaType.TEXT_PLAIN)
public Response hello() {
return Response.ok("Hello.. Fishery service is here").build();
}
@GET
@Path("get-licenses")
@Produces(MediaType.APPLICATION_JSON)
public Response getLicenses() {
Status status = Status.OK;
String context = ScopeProvider.instance.get();
DataCatalogue catalogue;
try {
catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
Map<String,String> licenses = CommonServiceUtils.getLicenses(catalogue);
if(licenses == null)
status = Status.INTERNAL_SERVER_ERROR;
return Response.status(status).entity(licenses).build();
} catch(Exception e) {
status = Status.INTERNAL_SERVER_ERROR;
return Response.status(status)
.entity(new ResponseBean(false, "Unable to retrieve license list " + e.getLocalizedMessage(), null))
.build();
}
}
// @GET
// @Path("hello")
// @Produces(MediaType.TEXT_PLAIN)
// public Response hello() {
// return Response.ok("Hello.. Fishery service is here").build();
// }
//
// @GET
// @Path("get-licenses")
// @Produces(MediaType.APPLICATION_JSON)
// public Response getLicenses() {
// Status status = Status.OK;
// String context = ScopeProvider.instance.get();
// DataCatalogue catalogue;
// try {
//// catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// Map<String,String> licenses = CommonServiceUtils.getLicenses(catalogue);
// if(licenses == null)
// status = Status.INTERNAL_SERVER_ERROR;
// return Response.status(status).entity(licenses).build();
// } catch(Exception e) {
// status = Status.INTERNAL_SERVER_ERROR;
// return Response.status(status)
// .entity(new ResponseBean(false, "Unable to retrieve license list " + e.getLocalizedMessage(), null))
// .build();
// }
// }
@POST
@Path("publish-product")
@ -113,34 +113,43 @@ public class GrsfPublisherFisheryService {
// Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source);
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
throw new Exception("There was a problem while serving your request. No catalogue instance was found!");
} else {
String apiKey = catalogue.getApiKeyFromUsername(username);
String organization = HelperMethods.retrieveOrgNameFromScope(context);
CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// extend this role to the other organizations in this context
CommonServiceUtils.extendRoleToOtherOrganizations(username, catalogue, organization,
RolesCkanGroupOrOrg.ADMIN);
DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// if(catalogue == null) {
// throw new Exception("There was a problem while serving your request. No catalogue instance was found!");
// } else {
String apiKey = "";
String organization = "";
// String apiKey = catalogue.getApiKeyFromUsername(username);
// String organization = HelperMethods.retrieveOrgNameFromScope(context);
// CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
//
// // extend this role to the other organizations in this context
// CommonServiceUtils.extendRoleToOtherOrganizations(username, catalogue, organization,
// RolesCkanGroupOrOrg.ADMIN);
String authorMail = "";
String authorFullname = "";
// retrieve the user's email and fullname
String authorMail = HelperMethods.getUserEmail(context, token);
String authorFullname = HelperMethods.getUserFullname(context, token);
if(authorMail == null || authorFullname == null) {
throw new Exception("Sorry but it was not possible to retrieve your fullname/email!");
}
// String authorMail = HelperMethods.getUserEmail(context, token);
// String authorFullname = HelperMethods.getUserFullname(context, token);
//
// if(authorMail == null || authorFullname == null) {
// throw new Exception("Sorry but it was not possible to retrieve your fullname/email!");
// }
// The name of the product will be the uuid of the kb. The title will be the fishery's fishery_name. Fishery has also the constraint that
// fishing area and jurisdiction area cannot be empty at the same time
String futureName = record.getUuid();
String futureTitle = record.getFisheryName();
String futureName = "";
String futureTitle = "";
// String futureName = record.getUuid();
// String futureTitle = record.getFisheryName();
// check name
CommonServiceUtils.checkName(futureName, catalogue);
// CommonServiceUtils.checkName(futureName, catalogue);
Map<String,List<String>> customFields = record.getExtrasFields();
Set<String> tags = new HashSet<String>();
@ -152,26 +161,31 @@ public class GrsfPublisherFisheryService {
Product_Type.FISHERY, tags, customFields, groups, resources, username, futureTitle);
// check the license id
String license = null;
if(record.getLicense() == null || record.getLicense().isEmpty())
license = Constants.DEFAULT_LICENSE;
else if(HelperMethods.existsLicenseId(record.getLicense(), catalogue))
license = record.getLicense();
else
throw new Exception("Please check the license id!");
String license = "";
// String license = null;
// if(record.getLicense() == null || record.getLicense().isEmpty())
// license = Constants.DEFAULT_LICENSE;
// else if(HelperMethods.existsLicenseId(record.getLicense(), catalogue))
// license = record.getLicense();
// else
// throw new Exception("Please check the license id!");
//
long version = record.getVersion() == null ? 1 : record.getVersion();
// set the visibility of the datatest according the context
boolean publicDataset = context
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY));
// convert extras' keys to keys with namespace
Map<String,String> namespaces = HelperMethods
.getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_FISHERY);
Map<String,String> namespaces = new HashMap<>();
if(namespaces == null)
throw new Exception("Failed to retrieve the namespaces for the key fields!");
// Map<String,String> namespaces = HelperMethods
// .getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_FISHERY);
// if(namespaces == null)
// throw new Exception("Failed to retrieve the namespaces for the key fields!");
customFields = HelperMethods.replaceFieldsKey(customFields, namespaces,
!sourceInPath.equals(Sources.GRSF));
@ -180,6 +194,7 @@ public class GrsfPublisherFisheryService {
logger.info("Invoking create method..");
// create the product
id = catalogue.createCKanDatasetMultipleCustomFields(apiKey, futureTitle, futureName,
publishInOrganization, authorFullname, authorMail,
@ -188,7 +203,7 @@ public class GrsfPublisherFisheryService {
null, license, new ArrayList<String>(tags), customFields, resources, publicDataset);
// post actions
if(id != null) {
// if(id != null) {
logger.info("Created record with identifier " + id);
String description = Constants.SHORT_NAME_CUSTOM_KEY + ": " + record.getShortName() + "\n";
@ -203,10 +218,10 @@ public class GrsfPublisherFisheryService {
status = Status.CREATED;
} else {
throw new Exception("There was an error during the record generation, sorry");
}
}
// } else {
// throw new Exception("There was an error during the record generation, sorry");
// }
// }
} catch(Exception e) {
logger.error("Failed to create fishery record" + e);
status = Status.INTERNAL_SERVER_ERROR;
@ -216,68 +231,69 @@ public class GrsfPublisherFisheryService {
return Response.status(status).entity(responseBean).build();
}
@DELETE
@Path("delete-product")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response deleteFishery(@NotNull(message = "input value is missing") @Valid DeleteRecord recordToDelete,
@PathParam("source") String source) throws ValidationException {
// retrieve context and username
Caller caller = AuthorizationProvider.instance.get();
String username = caller.getClient().getId();
String context = ScopeProvider.instance.get();
ResponseCreationBean responseBean = new ResponseCreationBean();
Status status = Status.INTERNAL_SERVER_ERROR;
// check it is a fishery ...
logger.info(
"Received call to delete product with id " + recordToDelete.getId() + ", checking if it is a fishery");
try {
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source);
logger.debug("The request is to delete a fishery object of source " + sourceInPath);
// retrieve the catalogue instance
String apiKey = catalogue.getApiKeyFromUsername(username);
CkanDataset fisheryInCkan = catalogue.getDataset(recordToDelete.getId(), apiKey);
if(fisheryInCkan == null) {
status = Status.NOT_FOUND;
throw new Exception("There was a problem while serving your request. This item was not found");
}
// check it is in the right source and it is a fishery
String type = fisheryInCkan.getExtrasAsHashMap().get(Constants.DOMAIN_CUSTOM_KEY);
if((fisheryInCkan.getOrganization().getName().equalsIgnoreCase(source)
|| fisheryInCkan.getOrganization().getName().toLowerCase().contains(source))
&& Product_Type.FISHERY.getOrigName().equals(type)) {
logger.debug("Ok, this is a fishery of the right source, removing it");
boolean deleted = catalogue.deleteProduct(fisheryInCkan.getId(), apiKey, true);
if(deleted) {
logger.info("Fishery DELETED AND PURGED!");
status = Status.OK;
responseBean.setId(fisheryInCkan.getId());
}
} else {
status = Status.BAD_REQUEST;
throw new Exception(
"The id you are using doesn't belong to a Fishery item having source " + source + "!");
}
} catch(Exception e) {
logger.error("Failed to delete this", e);
responseBean.setError(e.getMessage());
}
return Response.status(status).entity(responseBean).build();
}
// @DELETE
// @Path("delete-product")
// @Consumes(MediaType.APPLICATION_JSON)
// @Produces(MediaType.APPLICATION_JSON)
// public Response deleteFishery(@NotNull(message = "input value is missing") @Valid DeleteRecord recordToDelete,
// @PathParam("source") String source) throws ValidationException {
//
// // retrieve context and username
// Caller caller = AuthorizationProvider.instance.get();
// String username = caller.getClient().getId();
// String context = ScopeProvider.instance.get();
//
// ResponseCreationBean responseBean = new ResponseCreationBean();
// Status status = Status.INTERNAL_SERVER_ERROR;
//
// // check it is a fishery ...
// logger.info(
// "Received call to delete product with id " + recordToDelete.getId() + ", checking if it is a fishery");
// try {
//
// DataCatalogue catalogue = null;
//// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
//
// // Cast the source to the accepted ones
// Sources sourceInPath = Sources.onDeserialize(source);
// logger.debug("The request is to delete a fishery object of source " + sourceInPath);
//
// // retrieve the catalogue instance
// String apiKey = catalogue.getApiKeyFromUsername(username);
// CkanDataset fisheryInCkan = catalogue.getDataset(recordToDelete.getId(), apiKey);
//
// if(fisheryInCkan == null) {
// status = Status.NOT_FOUND;
// throw new Exception("There was a problem while serving your request. This item was not found");
// }
//
// // check it is in the right source and it is a fishery
// String type = fisheryInCkan.getExtrasAsHashMap().get(Constants.DOMAIN_CUSTOM_KEY);
//
// if((fisheryInCkan.getOrganization().getName().equalsIgnoreCase(source)
// || fisheryInCkan.getOrganization().getName().toLowerCase().contains(source))
// && Product_Type.FISHERY.getOrigName().equals(type)) {
//
// logger.debug("Ok, this is a fishery of the right source, removing it");
// boolean deleted = catalogue.deleteProduct(fisheryInCkan.getId(), apiKey, true);
//
// if(deleted) {
// logger.info("Fishery DELETED AND PURGED!");
// status = Status.OK;
// responseBean.setId(fisheryInCkan.getId());
// }
// } else {
// status = Status.BAD_REQUEST;
// throw new Exception(
// "The id you are using doesn't belong to a Fishery item having source " + source + "!");
// }
// } catch(Exception e) {
// logger.error("Failed to delete this", e);
// responseBean.setError(e.getMessage());
// }
//
// return Response.status(status).entity(responseBean).build();
// }
@GET
@Path("get-fisheries-ids")
@ -298,11 +314,12 @@ public class GrsfPublisherFisheryService {
// Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source);
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
throw new Exception("There was a problem while serving your request");
}
DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// if(catalogue == null) {
// throw new Exception("There was a problem while serving your request");
// }
//
// if it is a request for GRSF records, we have Fishery - Stock groups, so it is easy.
// For other cases, records needs to be parsed
if(sourceInPath.equals(Sources.GRSF))
@ -345,10 +362,13 @@ public class GrsfPublisherFisheryService {
logger.info("Received call to get the catalogue identifier for the product with name " + name);
try {
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
throw new Exception("There was a problem while serving your request");
}
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// if(catalogue == null) {
// throw new Exception("There was a problem while serving your request");
// }
//
DataCatalogue catalogue = null;
CkanDataset dataset = catalogue.getDataset(name, catalogue.getApiKeyFromUsername(username));
if(dataset != null) {
Map<String,String> result = new HashMap<String,String>();
@ -395,33 +415,40 @@ public class GrsfPublisherFisheryService {
throw new Exception("Please specify the '" + Constants.CATALOG_ID + "' property");
}
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
throw new Exception(
"There was a problem while serving your request. No catalogue instance was found in this context!");
} else {
DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// if(catalogue == null) {
// throw new Exception(
// "There was a problem while serving your request. No catalogue instance was found in this context!");
// } else {
// get already published record and modify it
String apiKey = catalogue.getApiKeyFromUsername(username);
String apiKey = "";
// String apiKey = catalogue.getApiKeyFromUsername(username);
CkanDataset recordPublished = catalogue.getDataset(catalogId, apiKey);
if(recordPublished == null)
throw new Exception("A record with catalogue id " + catalogId + " does not exist!");
// retrieve the user's email and fullname
String authorMail = HelperMethods.getUserEmail(context, token);
String authorFullname = HelperMethods.getUserFullname(context, token);
String authorMail = "";
String authorFullname = "";
if(authorMail == null || authorFullname == null) {
logger.debug("Author fullname or mail missing, cannot continue");
throw new Exception("Sorry but there was not possible to retrieve your fullname/email!");
}
// // retrieve the user's email and fullname
// String authorMail = HelperMethods.getUserEmail(context, token);
// String authorFullname = HelperMethods.getUserFullname(context, token);
//
// if(authorMail == null || authorFullname == null) {
// logger.debug("Author fullname or mail missing, cannot continue");
// throw new Exception("Sorry but there was not possible to retrieve your fullname/email!");
// }
String organization = HelperMethods.retrieveOrgNameFromScope(context);
String organization = "";
// String organization = HelperMethods.retrieveOrgNameFromScope(context);
// check he/she has admin role
CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// name, product url and are going to remain unchanged (so we keep them from the already published record);
String name = recordPublished.getName();
@ -459,11 +486,12 @@ public class GrsfPublisherFisheryService {
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY));
// convert extras' keys to keys with namespace
Map<String,String> namespaces = HelperMethods
.getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_FISHERY);
if(namespaces == null)
throw new Exception("Failed to retrieve the namespaces for the key fields!");
Map<String,String> namespaces = new HashMap<>();
// Map<String,String> namespaces = HelperMethods
// .getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_FISHERY);
//
// if(namespaces == null)
// throw new Exception("Failed to retrieve the namespaces for the key fields!");
// retrieve the already generated url
String modifiedUUIDKey = namespaces.containsKey(Constants.ITEM_URL_FIELD)
@ -502,7 +530,7 @@ public class GrsfPublisherFisheryService {
} else {
throw new Exception("There was an error during the item updated, sorry");
}
}
// }
} catch(Exception e) {
logger.error("Failed to update fishery record" + e);
responseBean.setError(e.getMessage());
@ -527,14 +555,15 @@ public class GrsfPublisherFisheryService {
ResponseCreationBean responseBean = new ResponseCreationBean();
Status status = Status.INTERNAL_SERVER_ERROR;
try {
// try {
DataCatalogue catalogue = null;
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
throw new Exception(
"There was a problem while serving your request. No catalogue instance was found in this context!");
} else {
// if(catalogue == null) {
// throw new Exception(
// "There was a problem while serving your request. No catalogue instance was found in this context!");
// } else {
// catalog id must be reported
String uuid = bean.getUuid();
@ -544,21 +573,24 @@ public class GrsfPublisherFisheryService {
String apiKeyUser = catalogue.getApiKeyFromUsername(username);
CkanDataset record = catalogue.getDataset(uuid, apiKeyUser);
if(record == null)
throw new Exception("A record with knowledge_base_id id " + uuid + " does not exist!");
if(record == null){
// throw new Exception("A record with knowledge_base_id id " + uuid + " does not exist!");
}
// check system type
boolean isGRSF = !record.getExtrasAsHashMap().get(Constants.SYSTEM_TYPE_CUSTOM_KEY)
.equals(Constants.SYSTEM_TYPE_FOR_SOURCES_VALUE);
if(!isGRSF)
throw new Exception("You are trying to modify a Legacy record!");
if(!isGRSF) {
// throw new Exception("You are trying to modify a Legacy record!");
}
boolean rightDomain = record.getExtrasAsHashMap().get(Constants.DOMAIN_CUSTOM_KEY)
.equalsIgnoreCase(Product_Type.FISHERY.getOrigName());
if(!rightDomain)
throw new Exception("This is not a Fishery record!");
if(!rightDomain) {
// throw new Exception("This is not a Fishery record!");
}
// update it
Map<String,List<String>> updateStatus = new HashMap<String,List<String>>(1);
@ -570,11 +602,11 @@ public class GrsfPublisherFisheryService {
responseBean.setId(record.getId());
responseBean.setItemUrl(record.getExtrasAsHashMap().get(Constants.ITEM_URL_FIELD));
}
} catch(Exception e) {
logger.error("Failed to update fishery record's status", e);
responseBean.setError(e.getMessage());
}
// }
// } catch(Exception e) {
// logger.error("Failed to update fishery record's status", e);
// responseBean.setError(e.getMessage());
// }
return Response.status(status).entity(responseBean).build();

@ -29,19 +29,19 @@ import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.authorization.library.utils.Caller;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.DeleteRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.UpdateRecordStatus;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.DeleteRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.UpdateRecordStatus;
import org.gcube.data_catalogue.grsf_publish_ws.utils.CommonServiceUtils;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean;
import org.gcube.datacatalogue.ckanutillibrary.shared.RolesCkanGroupOrOrg;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Product_Type;
import org.gcube.datacatalogue.common.enums.Sources;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Product_Type;
import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.slf4j.LoggerFactory;
import eu.trentorise.opendata.jackan.model.CkanDataset;
@ -61,33 +61,33 @@ public class GrsfPublisherStockService {
// Logger
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(GrsfPublisherStockService.class);
@GET
@Path("hello")
@Produces(MediaType.TEXT_PLAIN)
public Response hello() {
return Response.ok("Hello.. Stock service is here").build();
}
@GET
@Path("get-licenses")
@Produces(MediaType.APPLICATION_JSON)
public Response getLicenses() {
Status status = Status.OK;
String context = ScopeProvider.instance.get();
DataCatalogue catalogue;
try {
catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
Map<String,String> licenses = CommonServiceUtils.getLicenses(catalogue);
if(licenses == null)
status = Status.INTERNAL_SERVER_ERROR;
return Response.status(status).entity(licenses).build();
} catch(Exception e) {
status = Status.INTERNAL_SERVER_ERROR;
return Response.status(status)
.entity(new ResponseBean(false, "Unable to retrieve license list " + e.getLocalizedMessage(), null))
.build();
}
}
// @GET
// @Path("hello")
// @Produces(MediaType.TEXT_PLAIN)
// public Response hello() {
// return Response.ok("Hello.. Stock service is here").build();
// }
//
// @GET
// @Path("get-licenses")
// @Produces(MediaType.APPLICATION_JSON)
// public Response getLicenses() {
// Status status = Status.OK;
// String context = ScopeProvider.instance.get();
// DataCatalogue catalogue;
// try {
// catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// Map<String,String> licenses = CommonServiceUtils.getLicenses(catalogue);
// if(licenses == null)
// status = Status.INTERNAL_SERVER_ERROR;
// return Response.status(status).entity(licenses).build();
// } catch(Exception e) {
// status = Status.INTERNAL_SERVER_ERROR;
// return Response.status(status)
// .entity(new ResponseBean(false, "Unable to retrieve license list " + e.getLocalizedMessage(), null))
// .build();
// }
// }
@POST
@Path("publish-product")
@ -113,35 +113,46 @@ public class GrsfPublisherStockService {
// Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source);
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
throw new Exception("There was a problem while serving your request. No catalogue instance was found!");
} else {
DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// if(catalogue == null) {
// throw new Exception("There was a problem while serving your request. No catalogue instance was found!");
// } else {
//
String apiKey = catalogue.getApiKeyFromUsername(username);
String organization = HelperMethods.retrieveOrgNameFromScope(context); //"grsf_admin";
String apiKey = "";
String organization = "";
// String apiKey = catalogue.getApiKeyFromUsername(username);
// String organization = HelperMethods.retrieveOrgNameFromScope(context); //"grsf_admin";
// check it has admin role or throw exception
CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// extend this role to the other organizations in this context
CommonServiceUtils.extendRoleToOtherOrganizations(username, catalogue, organization,
RolesCkanGroupOrOrg.ADMIN);
// To support this gCat must be modified according to the following ticket
// https://support.d4science.org/issues/19365
// CommonServiceUtils.extendRoleToOtherOrganizations(username, catalogue, organization,
// RolesCkanGroupOrOrg.ADMIN);
// retrieve the user's email and fullname
String authorMail = HelperMethods.getUserEmail(context, token);
String authorFullname = HelperMethods.getUserFullname(context, token);
String authorMail = "";
String authorFullname = "";
if(authorMail == null || authorFullname == null) {
throw new Exception("Sorry but it was not possible to retrieve your fullname/email!");
}
// retrieve the user's email and fullname
// String authorMail = HelperMethods.getUserEmail(context, token);
// String authorFullname = HelperMethods.getUserFullname(context, token);
//
// if(authorMail == null || authorFullname == null) {
// throw new Exception("Sorry but it was not possible to retrieve your fullname/email!");
// }
// check the record has a name, at least
String futureName = record.getUuid();
String futureTitle = record.getStockName();
// check name and throws exception
CommonServiceUtils.checkName(futureName, catalogue);
// CommonServiceUtils.checkName(futureName, catalogue);
// load other information
Map<String,List<String>> customFields = record.getExtrasFields();
@ -170,11 +181,12 @@ public class GrsfPublisherStockService {
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY));
// convert extras' keys to keys with namespace
Map<String,String> namespaces = HelperMethods
.getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_STOCK);
if(namespaces == null)
throw new Exception("Failed to retrieve the namespaces for the key fields!");
Map<String,String> namespaces = new HashMap<>();
// Map<String,String> namespaces = HelperMethods
// .getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_STOCK);
//
// if(namespaces == null)
// throw new Exception("Failed to retrieve the namespaces for the key fields!");
customFields = HelperMethods.replaceFieldsKey(customFields, namespaces,
!sourceInPath.equals(Sources.GRSF));
@ -209,7 +221,7 @@ public class GrsfPublisherStockService {
} else
throw new Exception(
"There was an error during the product generation, sorry! Unable to create the dataset");
}
// }
} catch(Exception e) {
logger.error("Failed to create stock record", e);
status = Status.INTERNAL_SERVER_ERROR;
@ -238,11 +250,13 @@ public class GrsfPublisherStockService {
"Received call to delete product with id " + recordToDelete.getId() + ", checking if it is a stock");
try {
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
status = Status.INTERNAL_SERVER_ERROR;
throw new Exception("There was a problem while serving your request");
}
DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// if(catalogue == null) {
// status = Status.INTERNAL_SERVER_ERROR;
// throw new Exception("There was a problem while serving your request");
// }
// Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source);
@ -304,11 +318,13 @@ public class GrsfPublisherStockService {
// Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source);
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
status = Status.INTERNAL_SERVER_ERROR;
throw new Exception("There was a problem while serving your request");
}
DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// if(catalogue == null) {
// status = Status.INTERNAL_SERVER_ERROR;
// throw new Exception("There was a problem while serving your request");
// }
// if it is a request for GRSF records, we have Fishery - Stock groups, so it is easy.
// For other cases, records needs to be parsed
@ -348,10 +364,13 @@ public class GrsfPublisherStockService {
Status status = Status.INTERNAL_SERVER_ERROR;
logger.info("Received call to get the catalogue identifier for the product with name " + name);
try {
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
throw new Exception("There was a problem while serving your request");
}
DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// if(catalogue == null) {
// throw new Exception("There was a problem while serving your request");
// }
CkanDataset dataset = catalogue.getDataset(name, catalogue.getApiKeyFromUsername(username));
if(dataset != null) {
Map<String,String> result = new HashMap<String,String>();
@ -398,32 +417,40 @@ public class GrsfPublisherStockService {
throw new Exception("Please specify the 'catalog_id' property");
}
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
throw new Exception(
"There was a problem while serving your request. No catalogue instance was found in this context!");
} else {
// if(catalogue == null) {
// throw new Exception(
// "There was a problem while serving your request. No catalogue instance was found in this context!");
// } else {
// get already published record and modify it
String apiKey = catalogue.getApiKeyFromUsername(username);
String apiKey = "";
// String apiKey = catalogue.getApiKeyFromUsername(username);
CkanDataset recordPublished = catalogue.getDataset(catalogId, apiKey);
if(recordPublished == null)
throw new Exception("A record with id " + catalogId + " does not exist!");
String authorMail = "";
String authorFullname = "";
// retrieve the user's email and fullname
String authorMail = HelperMethods.getUserEmail(context, token);
String authorFullname = HelperMethods.getUserFullname(context, token);
// String authorMail = HelperMethods.getUserEmail(context, token);
// String authorFullname = HelperMethods.getUserFullname(context, token);
//
// if(authorMail == null || authorFullname == null) {
// logger.debug("Author fullname or mail missing, cannot continue");
// throw new Exception("Sorry but there was not possible to retrieve your fullname/email!");
// }
String organization = "";
if(authorMail == null || authorFullname == null) {
logger.debug("Author fullname or mail missing, cannot continue");
throw new Exception("Sorry but there was not possible to retrieve your fullname/email!");
}
String organization = HelperMethods.retrieveOrgNameFromScope(context); //"grsf_admin";
// String organization = HelperMethods.retrieveOrgNameFromScope(context); //"grsf_admin";
// check he/she has admin role
CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// name, product url and are going to remain unchanged (so we keep them from the publisher record);
String name = recordPublished.getName();
@ -462,11 +489,12 @@ public class GrsfPublisherStockService {
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY));
// convert extras' keys to keys with namespace
Map<String,String> namespaces = HelperMethods
.getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_STOCK);
if(namespaces == null)
throw new Exception("Failed to retrieve the namespaces for the key fields!");
Map<String,String> namespaces = new HashMap<>();
// Map<String,String> namespaces = HelperMethods
// .getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_STOCK);
//
// if(namespaces == null)
// throw new Exception("Failed to retrieve the namespaces for the key fields!");
// retrieve the url
String modifiedUUIDKey = namespaces.containsKey(Constants.ITEM_URL_FIELD)
@ -506,7 +534,7 @@ public class GrsfPublisherStockService {
} else {
throw new Exception("There was an error during the item updated, sorry");
}
}
// }
} catch(Exception e) {
logger.error("Failed to update stock record", e);
responseBean.setError(e.getMessage());
@ -534,12 +562,13 @@ public class GrsfPublisherStockService {
try {
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
throw new Exception(
"There was a problem while serving your request. No catalogue instance was found in this context!");
} else {
DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
//
// if(catalogue == null) {
// throw new Exception(
// "There was a problem while serving your request. No catalogue instance was found in this context!");
// } else {
// catalog id must be reported
String uuid = bean.getUuid();
@ -574,7 +603,7 @@ public class GrsfPublisherStockService {
responseBean.setKbUuid(uuid);
responseBean.setId(record.getId());
responseBean.setItemUrl(record.getExtrasAsHashMap().get(Constants.ITEM_URL_FIELD));
}
// }
} catch(Exception e) {
logger.error("Failed to update stock record's status", e);
responseBean.setError(e.getMessage());

@ -14,35 +14,31 @@ import java.util.concurrent.ConcurrentHashMap;
import javax.servlet.ServletContext;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CkanResource;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.RefersToBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.Base;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.Common;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Base;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.CkanResource;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Common;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Group;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.RefersToBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.TimeSeriesBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.WritePostCatalogueManagerThread;
import org.gcube.data_catalogue.grsf_publish_ws.utils.csv.ManageTimeSeriesThread;
import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.AssociationToGroupThread;
import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.WritePostCatalogueManagerThread;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean;
import org.gcube.datacatalogue.ckanutillibrary.shared.RolesCkanGroupOrOrg;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Product_Type;
import org.gcube.datacatalogue.common.enums.Sources;
import org.gcube.datacatalogue.common.enums.Status;
import org.gcube.datacatalogue.common.enums.Stock_Type;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Product_Type;
import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import org.gcube.datacatalogue.common.AAA_PORTED.Stock_Type;
import org.json.simple.JSONObject;
import org.slf4j.LoggerFactory;
import eu.trentorise.opendata.jackan.model.CkanDataset;
import eu.trentorise.opendata.jackan.model.CkanLicense;
/**
* Services common utils.
* @author Costantino Perciante (ISTI - CNR)
@ -55,20 +51,20 @@ public class CommonServiceUtils {
private static final int TAG_MAX_SIZE = 100;
private static Map<String,Boolean> extensionsCheck = new ConcurrentHashMap<>();
/**
* Retrieve the list of licenses for stocks and fisheries
* @return
*/
public static Map<String,String> getLicenses(DataCatalogue catalogue) {
logger.info("Requested licenses...");
Map<String,String> toReturn = new HashMap<String,String>();
List<CkanLicense> licenses = catalogue.getLicenses();
for(CkanLicense ckanLicense : licenses) {
toReturn.put(ckanLicense.getId(), ckanLicense.getTitle());
}
return toReturn;
}
// /**
// * Retrieve the list of licenses for stocks and fisheries
// * @return
// */
// public static Map<String,String> getLicenses(DataCatalogue catalogue) {
// logger.info("Requested licenses...");
// Map<String,String> toReturn = new HashMap<String,String>();
// List<CkanLicense> licenses = catalogue.getLicenses();
//
// for(CkanLicense ckanLicense : licenses) {
// toReturn.put(ckanLicense.getId(), ckanLicense.getTitle());
// }
// return toReturn;
// }
/**
* Validate an aggregated GRSF record. TODO use @Valid tags
@ -421,45 +417,46 @@ public class CommonServiceUtils {
}
}
/**
* Evaluate if the user has the admin role
* Throws exception if he/she doesn't
*/
public static void hasAdminRole(String username, DataCatalogue catalogue, String apiKey, String organization)
throws Exception {
String role = catalogue.getRoleOfUserInOrganization(username, organization, apiKey);
logger.info("Role of the user " + username + " is " + role + " in " + organization);
if(role == null || role.isEmpty() || !role.equalsIgnoreCase(RolesCkanGroupOrOrg.ADMIN.toString()))
throw new Exception(
"You are not authorized to create a product. Please check you have the Catalogue-Administrator role!");
}
/**
* Check this record's name
* @param futureName
* @param catalogue
* @throws Exception on name check
*/
public static void checkName(String futureName, DataCatalogue catalogue) throws Exception {
if(!HelperMethods.isNameValid(futureName)) {
throw new Exception(
"The 'uuid_knowledge_base' must contain only alphanumeric characters, and symbols like '.' or '_', '-'");
} else {
logger.debug("Checking if such name [" + futureName + "] doesn't exist ...");
boolean alreadyExists = catalogue.existProductWithNameOrId(futureName);
if(alreadyExists) {
logger.debug("A product with 'uuid_knowledge_base' " + futureName + " already exists");
throw new Exception("A product with 'uuid_knowledge_base' " + futureName + " already exists");
}
}
}
// /**
// * Evaluate if the user has the admin role
// * Throws exception if he/she doesn't
// */
// public static void hasAdminRole(String username, DataCatalogue catalogue, String apiKey, String organization)
// throws Exception {
//
// String role = catalogue.getRoleOfUserInOrganization(username, organization, apiKey);
// logger.info("Role of the user " + username + " is " + role + " in " + organization);
//
// if(role == null || role.isEmpty() || !role.equalsIgnoreCase(RolesCkanGroupOrOrg.ADMIN.toString()))
// throw new Exception(
// "You are not authorized to create a product. Please check you have the Catalogue-Administrator role!");
//
// }
// /**
// * Check this record's name
// * @param futureName
// * @param catalogue
// * @throws Exception on name check
// */
// public static void checkName(String futureName, DataCatalogue catalogue) throws Exception {
//
// if(!HelperMethods.isNameValid(futureName)) {
// throw new Exception(
// "The 'uuid_knowledge_base' must contain only alphanumeric characters, and symbols like '.' or '_', '-'");
// } else {
//
// logger.debug("Checking if such name [" + futureName + "] doesn't exist ...");
// boolean alreadyExists = catalogue.existProductWithNameOrId(futureName);
//
// if(alreadyExists) {
// logger.debug("A product with 'uuid_knowledge_base' " + futureName + " already exists");
// throw new Exception("A product with 'uuid_knowledge_base' " + futureName + " already exists");
//
// }
// }
// }
/**
* Validate and check sources
@ -485,20 +482,39 @@ public class CommonServiceUtils {
// validate the record if it is a GRSF one and set the record type and in manage context
// Status field is needed only in the Manage context for GRSF records
// In web.xml a parameter indicates the Admin VRE as full path.
if(context.equals((String) contextServlet.getInitParameter(HelperMethods.MANAGE_CONTEX_KEY))) {
// If we are in Admin VRE and the source is GRSF
if(sourceInPath.equals(Sources.GRSF)) {
// RefersTo cannot be empty or null in GRSF_Admin for a GRSF record
List<RefersToBean> refersTo = record.getRefersTo();
if(refersTo == null || refersTo.isEmpty())
throw new Exception("refers_to is empty for a GRSF record");
// For each RefersTo a Resource is created in the record. The resource point to the referred record.
// We have also to set database sources
String databaseSource = "";
// we have the id within the catalog of this record. This means that we can retrieve the record and its system:type
for(RefersToBean refersToBean : refersTo) {
String sourceOrganization = getRecordOrganization(refersToBean.getId(), apiKey, context);
String sourceOrganization = "";
// Here there is a lookup to the referred records.
// getRecordOrganization read the record and get the organization
// String sourceOrganization = getRecordOrganization(refersToBean.getId(), apiKey, context);
resources.add(new ResourceBean(refersToBean.getUrl(), sourceOrganization, "", null, username, null,
null));
sourcesList.add(sourceOrganization.toLowerCase());
// concatenating the organization in a string which provide the databaseSource value :O
// Better using a list
databaseSource += sourceOrganization + " ";
}
@ -563,35 +579,35 @@ public class CommonServiceUtils {
groups.add(groupName);
}
/**
* Fetch the system:type property from a record
* @param itemIdOrName
* @param apiKey
* @return null on error
* @throws Exception
*/
public static String getSystemTypeValue(String itemIdOrName, String apiKey, String context) throws Exception {
DataCatalogue catalog = HelperMethods.getDataCatalogueRunningInstance(context);
CkanDataset dataset = catalog.getDataset(itemIdOrName, apiKey);
if(dataset == null)
throw new Exception("Unable to find record with id or name " + itemIdOrName);
String systemTypeValue = dataset.getExtrasAsHashMap().get(Constants.SYSTEM_TYPE_CUSTOM_KEY);
if(systemTypeValue == null || systemTypeValue.isEmpty())
throw new Exception(Constants.SYSTEM_TYPE_CUSTOM_KEY + " property not set in record " + itemIdOrName);
else
return systemTypeValue;
}
public static String getRecordOrganization(String itemIdOrName, String apiKey, String context) throws Exception {
DataCatalogue catalog = HelperMethods.getDataCatalogueRunningInstance(context);
CkanDataset dataset = catalog.getDataset(itemIdOrName, apiKey);
if(dataset == null)
throw new Exception("Unable to find record with id or name " + itemIdOrName);
else
return dataset.getOrganization().getTitle();
}
// /**
// * Fetch the system:type property from a record
// * @param itemIdOrName
// * @param apiKey
// * @return null on error
// * @throws Exception
// */
// public static String getSystemTypeValue(String itemIdOrName, String apiKey, String context) throws Exception {
//
// DataCatalogue catalog = HelperMethods.getDataCatalogueRunningInstance(context);
// CkanDataset dataset = catalog.getDataset(itemIdOrName, apiKey);
// if(dataset == null)
// throw new Exception("Unable to find record with id or name " + itemIdOrName);
// String systemTypeValue = dataset.getExtrasAsHashMap().get(Constants.SYSTEM_TYPE_CUSTOM_KEY);
// if(systemTypeValue == null || systemTypeValue.isEmpty())
// throw new Exception(Constants.SYSTEM_TYPE_CUSTOM_KEY + " property not set in record " + itemIdOrName);
// else
// return systemTypeValue;
//
// }
//
// public static String getRecordOrganization(String itemIdOrName, String apiKey, String context) throws Exception {
// DataCatalogue catalog = HelperMethods.getDataCatalogueRunningInstance(context);
// CkanDataset dataset = catalog.getDataset(itemIdOrName, apiKey);
// if(dataset == null)
// throw new Exception("Unable to find record with id or name " + itemIdOrName);
// else
// return dataset.getOrganization().getTitle();
// }
/**
* Actions to execute once the dataset has been updated or created.
@ -657,12 +673,12 @@ public class CommonServiceUtils {
new ManageTimeSeriesThread(record, futureName, username, catalogue, context, token).start();
// write a post if the product has been published in grsf context
if(catalogue.isSocialPostEnabled() && !isUpdated && context
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY))) {
new WritePostCatalogueManagerThread(context, token, futureTitle, itemUrlForThread, true,
new ArrayList<String>(), authorFullname).start();
logger.info("Thread to write a post about the new product has been launched");
}
// if(catalogue.isSocialPostEnabled() && !isUpdated && context
// .equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY))) {
// new WritePostCatalogueManagerThread(context, token, futureTitle, itemUrlForThread, true,
// new ArrayList<String>(), authorFullname).start();
// logger.info("Thread to write a post about the new product has been launched");
// }
} catch(InterruptedException e) {
logger.error("Error", e);
}
@ -670,25 +686,25 @@ public class CommonServiceUtils {
}).start();
}
/**
* Extend roles to other organization
* @param username
* @param catalogue
* @param organization
* @param admin
*/
public static void extendRoleToOtherOrganizations(String username, DataCatalogue catalogue, String organization,
RolesCkanGroupOrOrg admin) {
logger.debug("Checking if role extension is needed here");
if(extensionsCheck.containsKey(username) && extensionsCheck.get(username))
return;
else {
catalogue.assignRolesOtherOrganization(username, organization, admin);
extensionsCheck.put(username, true);
}
}
// /**
// * Extend roles to other organization
// * @param username
// * @param catalogue
// * @param organization
// * @param admin
// */
// public static void extendRoleToOtherOrganizations(String username, DataCatalogue catalogue, String organization,
// RolesCkanGroupOrOrg admin) {
//
// logger.debug("Checking if role extension is needed here");
// if(extensionsCheck.containsKey(username) && extensionsCheck.get(username))
// return;
// else {
// catalogue.assignRolesOtherOrganization(username, organization, admin);
// extensionsCheck.put(username, true);
// }
//
// }
/**
* Evaluate in which organization a record has to be published. The only exception is when grsf_admin is involved.

@ -29,10 +29,11 @@ import org.gcube.common.homelibrary.home.workspace.exceptions.ItemNotFoundExcept
import org.gcube.common.homelibrary.home.workspace.folder.items.ExternalFile;
import org.gcube.common.resources.gcore.utils.XPathHelper;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.GcoreEndPointReaderSocial;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueFactory;
import org.gcube.datacatalogue.common.caches.CacheImpl;
import org.gcube.datacatalogue.common.caches.CacheInterface;
import org.gcube.datacatalogue.common.AAA_UNNEEDED.CacheImpl;
import org.gcube.datacatalogue.common.AAA_UNNEEDED.CacheInterface;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.Query;
import org.gcube.resources.discovery.client.queries.impl.QueryBox;
@ -103,38 +104,38 @@ public abstract class HelperMethods {
return modified;
}
/**
* Retrieve the running instance of the data catalogue for this scope
* @return
* @throws Exception
*/
public static DataCatalogue getDataCatalogueRunningInstance(String scope) throws Exception{
if(catalogueCache.get(scope) != null)
return catalogueCache.get(scope);
else{
try{
DataCatalogue instance = DataCatalogueFactory.getFactory().getUtilsPerScope(scope);
catalogueCache.insert(scope, instance);
return instance;
}catch(Exception e){
logger.error("Failed to instantiate data catalogue lib", e);
throw new Exception("Failed to retrieve catalogue information");
}
}
}
/**
* Retrieve the organization name in which the user wants to publish starting from the scope
* @param contextInWhichPublish
* @return
*/
public static String retrieveOrgNameFromScope(String scope) {
String[] splittedScope = scope.split("/");
return splittedScope[splittedScope.length - 1].toLowerCase();
}
// /**
// * Retrieve the running instance of the data catalogue for this scope
// * @return
// * @throws Exception
// */
// public static DataCatalogue getDataCatalogueRunningInstance(String scope) throws Exception{
//
// if(catalogueCache.get(scope) != null)
// return catalogueCache.get(scope);
// else{
// try{
// DataCatalogue instance = DataCatalogueFactory.getFactory().getUtilsPerScope(scope);
// catalogueCache.insert(scope, instance);
// return instance;
// }catch(Exception e){
// logger.error("Failed to instantiate data catalogue lib", e);
// throw new Exception("Failed to retrieve catalogue information");
// }
// }
// }
// /**
// * Retrieve the organization name in which the user wants to publish starting from the scope
// * @param contextInWhichPublish
// * @return
// */
// public static String retrieveOrgNameFromScope(String scope) {
//
// String[] splittedScope = scope.split("/");
// return splittedScope[splittedScope.length - 1].toLowerCase();
//
// }
/**
* Validate the name the product will have
@ -150,13 +151,14 @@ public abstract class HelperMethods {
}
}
/**
/* *//**
* Retrieve the user's email given his/her username
* @param context
* @param token
* @return
* @throws Exception
*/
*//*
public static String getUserEmail(String context, String token){
// check in cache
@ -174,13 +176,13 @@ public abstract class HelperMethods {
return result;
}
/**
*//**
* Retrieve the user's fullname given his/her username
* @param context
* @param token
* @return
* @throws Exception
*/
*//*
public static String getUserFullname(String context, String token){
// check in cache
@ -198,10 +200,10 @@ public abstract class HelperMethods {
return result;
}
/**
*//**
* Execute the GET http request at this url, and return the result as string
* @return
*/
*//*
private static String executGETHttpRequest(String url, int expectedCodeOnSuccess){
try(CloseableHttpClient client = HttpClientBuilder.create().build();){
@ -231,7 +233,7 @@ public abstract class HelperMethods {
return null;
}
}*/
/**
* Check that the given license id is in CKAN
@ -516,56 +518,56 @@ public abstract class HelperMethods {
}
/**
* Return a map for converting a key to a namespace:key format by reading a generic resource.
* @return a map
*/
public static Map<String, String> getFieldToFieldNameSpaceMapping(String resourceName){
Map<String, String> toReturn = new HashMap<String, String>();
// check if data are in cache
if(namespacesCache.get(resourceName) != null){
return namespacesCache.get(resourceName);
}
else{
try {
Query q = new QueryBox("for $profile in collection('/db/Profiles/GenericResource')//Resource " +
"where $profile/Profile/SecondaryType/string() eq '"+ "ApplicationProfile" + "' and $profile/Profile/Name/string() " +
" eq '" + resourceName + "'" +
"return $profile");
DiscoveryClient<String> client = client();
List<String> appProfile = client.submit(q);
if (appProfile == null || appProfile.size() == 0)
throw new Exception("Your applicationProfile is not registered in the infrastructure");
else {
String elem = appProfile.get(0);
DocumentBuilder docBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
Node node = docBuilder.parse(new InputSource(new StringReader(elem))).getDocumentElement();
XPathHelper helper = new XPathHelper(node);
NodeList nodeListKeys = helper.evaluateForNodes("//originalKey");
NodeList nodeListModifiedKeys = helper.evaluateForNodes("//modifiedKey");
int sizeKeys = nodeListKeys != null ? nodeListKeys.getLength() : 0;
int sizeKeysModifed = nodeListModifiedKeys != null ? nodeListModifiedKeys.getLength() : 0;
if(sizeKeys != sizeKeysModifed)
throw new Exception("Malformed XML");
logger.debug("Size is " + sizeKeys);
for (int i = 0; i < sizeKeys; i++) {
toReturn.put(nodeListKeys.item(i).getTextContent(), nodeListModifiedKeys.item(i).getTextContent());
}
}
logger.debug("Map is " + toReturn);
namespacesCache.insert(resourceName, toReturn);
return toReturn;
} catch (Exception e) {
logger.error("Error while trying to fetch applicationProfile profile from the infrastructure", e);
return null;
}
}
}
// /**
// * Return a map for converting a key to a namespace:key format by reading a generic resource.
// * @return a map
// */
// public static Map<String, String> getFieldToFieldNameSpaceMapping(String resourceName){
// Map<String, String> toReturn = new HashMap<String, String>();
//
// // check if data are in cache
// if(namespacesCache.get(resourceName) != null){
// return namespacesCache.get(resourceName);
// }
// else{
// try {
// Query q = new QueryBox("for $profile in collection('/db/Profiles/GenericResource')//Resource " +
// "where $profile/Profile/SecondaryType/string() eq '"+ "ApplicationProfile" + "' and $profile/Profile/Name/string() " +
// " eq '" + resourceName + "'" +
// "return $profile");
//
// DiscoveryClient<String> client = client();
// List<String> appProfile = client.submit(q);
//
// if (appProfile == null || appProfile.size() == 0)
// throw new Exception("Your applicationProfile is not registered in the infrastructure");
// else {
//
// String elem = appProfile.get(0);
// DocumentBuilder docBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
// Node node = docBuilder.parse(new InputSource(new StringReader(elem))).getDocumentElement();
// XPathHelper helper = new XPathHelper(node);
//
// NodeList nodeListKeys = helper.evaluateForNodes("//originalKey");
// NodeList nodeListModifiedKeys = helper.evaluateForNodes("//modifiedKey");
// int sizeKeys = nodeListKeys != null ? nodeListKeys.getLength() : 0;
// int sizeKeysModifed = nodeListModifiedKeys != null ? nodeListModifiedKeys.getLength() : 0;
// if(sizeKeys != sizeKeysModifed)
// throw new Exception("Malformed XML");
// logger.debug("Size is " + sizeKeys);
// for (int i = 0; i < sizeKeys; i++) {
// toReturn.put(nodeListKeys.item(i).getTextContent(), nodeListModifiedKeys.item(i).getTextContent());
// }
// }
// logger.debug("Map is " + toReturn);
// namespacesCache.insert(resourceName, toReturn);
// return toReturn;
// } catch (Exception e) {
// logger.error("Error while trying to fetch applicationProfile profile from the infrastructure", e);
// return null;
// }
// }
// }
/**
* Replace the extras' keys if needed

@ -8,8 +8,8 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.TimeSeriesBean;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.slf4j.LoggerFactory;

@ -20,15 +20,15 @@ import org.gcube.common.homelibrary.home.workspace.exceptions.ItemNotFoundExcept
import org.gcube.common.homelibrary.home.workspace.exceptions.WorkspaceFolderNotFoundException;
import org.gcube.common.homelibrary.home.workspace.folder.items.ExternalFile;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.TimeSeries;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.Common;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Common;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.TimeSeries;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.common.caches.CacheImpl;
import org.gcube.datacatalogue.common.caches.CacheInterface;
import org.gcube.datacatalogue.common.AAA_UNNEEDED.CacheImpl;
import org.gcube.datacatalogue.common.AAA_UNNEEDED.CacheInterface;
import org.slf4j.LoggerFactory;
import eu.trentorise.opendata.jackan.model.CkanResourceBase;

@ -8,15 +8,15 @@ import javax.ws.rs.core.Application;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.services.GrsfPublisherFisheryService;
import org.gcube.data_catalogue.grsf_publish_ws.services.GrsfPublisherStockService;
import org.gcube.datacatalogue.common.enums.Fishery_Type;
import org.gcube.datacatalogue.common.enums.Sources;
import org.gcube.datacatalogue.common.enums.Status;
import org.gcube.datacatalogue.common.enums.Stock_Type;
import org.gcube.datacatalogue.common.AAA_PORTED.Fishery_Type;
import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import org.gcube.datacatalogue.common.AAA_PORTED.Stock_Type;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.test.JerseyTest;
import org.glassfish.jersey.test.TestProperties;

@ -24,22 +24,22 @@ import org.gcube.common.homelibrary.home.workspace.catalogue.WorkspaceCatalogue;
import org.gcube.common.homelibrary.home.workspace.exceptions.InsufficientPrivilegesException;
import org.gcube.common.homelibrary.home.workspace.exceptions.ItemAlreadyExistException;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Group;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.TimeSeriesBean;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.data_catalogue.grsf_publish_ws.utils.csv.CSVUtils;
import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.AssociationToGroupThread;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueFactory;
import org.gcube.datacatalogue.common.enums.Abundance_Level;
import org.gcube.datacatalogue.common.enums.Fishery_Type;
import org.gcube.datacatalogue.common.enums.Sources;
import org.gcube.datacatalogue.common.enums.Status;
import org.gcube.datacatalogue.common.AAA_PORTED.Abundance_Level;
import org.gcube.datacatalogue.common.AAA_PORTED.Fishery_Type;
import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
@ -378,39 +378,39 @@ public class JTests {
}
//@Test
public void testHierarchy() throws Exception{
String name = "low-abundance";
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance("/gcube/devNext/NextNext");
List<String> uniqueGroups = new ArrayList<String>();
uniqueGroups.add(name);
uniqueGroups.add(name);
AssociationToGroupThread.findHierarchy(uniqueGroups, catalogue, catalogue.getApiKeyFromUsername("costantino_perciante"));
logger.debug("Hierarchy is " + uniqueGroups);
}
//@Test
public void testAssociationThread() throws Exception{
String name = "low-abundance";
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance("/gcube/devNext/NextNext");
AssociationToGroupThread threadGroups = new AssociationToGroupThread(Arrays.asList(name), "another-test-test-please-ignore", "grsf", "costantino_perciante", catalogue, "apiKey");
threadGroups.start();
threadGroups.join();
logger.info("Thread stopped!");
}
//@Test
public void testCaches(){
String context = "/gcube/devNext/NextNext";
String token = "";
for (int i = 0; i < 1000; i++) {
logger.debug(HelperMethods.getUserEmail(context, token));
}
}
// //@Test
// public void testHierarchy() throws Exception{
// String name = "low-abundance";
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance("/gcube/devNext/NextNext");
// List<String> uniqueGroups = new ArrayList<String>();
// uniqueGroups.add(name);
// uniqueGroups.add(name);
// AssociationToGroupThread.findHierarchy(uniqueGroups, catalogue, catalogue.getApiKeyFromUsername("costantino_perciante"));
// logger.debug("Hierarchy is " + uniqueGroups);
// }
//
// //@Test
// public void testAssociationThread() throws Exception{
// String name = "low-abundance";
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance("/gcube/devNext/NextNext");
// AssociationToGroupThread threadGroups = new AssociationToGroupThread(Arrays.asList(name), "another-test-test-please-ignore", "grsf", "costantino_perciante", catalogue, "apiKey");
// threadGroups.start();
// threadGroups.join();
// logger.info("Thread stopped!");
//
//
// }
//
// //@Test
// public void testCaches(){
//
// String context = "/gcube/devNext/NextNext";
// String token = "";
// for (int i = 0; i < 1000; i++) {
// logger.debug(HelperMethods.getUserEmail(context, token));
// }
//
// }
//@Test
public void testMatch(){

Loading…
Cancel
Save