fixed tags and fields generation

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-catalogue/grsf-publisher-ws@134784 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Costantino Perciante 2016-11-25 12:56:27 +00:00
parent 56165cbf42
commit fb2fbee11a
8 changed files with 118 additions and 40 deletions

View File

@ -1,5 +1,6 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input;
import java.util.Collections;
import java.util.List;
import javax.validation.Valid;
@ -213,6 +214,10 @@ public class FisheryRecord extends Common{
public void setCatchesOrLandings(List<TimeSeriesBean<String, String>> catchesOrLandings) {
this.catchesOrLandings = catchesOrLandings;
if(catchesOrLandings != null){
Collections.sort(catchesOrLandings);
}
}
@Override

View File

@ -1,5 +1,6 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input;
import java.util.Collections;
import java.util.List;
import javax.validation.Valid;
@ -21,7 +22,7 @@ import com.fasterxml.jackson.annotation.JsonProperty;
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class StockRecord extends Common{
@JsonProperty("stock_name")
@NotNull(message="stock_name cannot be null")
@Size(min=2, message="stock_name cannot be empty")
@ -266,6 +267,10 @@ public class StockRecord extends Common{
public void setExploitationRate(List<TimeSeriesBean<Exploitation_Rate, Void>> exploitationRate) {
this.exploitationRate = exploitationRate;
if(exploitationRate != null){
Collections.sort(exploitationRate);
}
}
public List<TimeSeriesBean<Abundance_Level, Void>> getAbundanceLevel() {
@ -274,6 +279,10 @@ public class StockRecord extends Common{
public void setAbundanceLevel(List<TimeSeriesBean<Abundance_Level, Void>> abundanceLevel) {
this.abundanceLevel = abundanceLevel;
if(abundanceLevel != null){
Collections.sort(abundanceLevel);
}
}
public String getNarrativeStateAndTrend() {

View File

@ -12,6 +12,8 @@ import com.fasterxml.jackson.annotation.JsonProperty;
* A time series bean that contains couple <year, value>
* Catches_and_landings contains also Unit
* @author Costantino Perciante at ISTI-CNR (costantino.perciante@isti.cnr.it)
* @param T the first type value of the series
* @param T1 the second type value of the series
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class TimeSeriesBean<T, T1> implements Comparable<TimeSeriesBean<T, T1>>{

View File

@ -81,10 +81,11 @@ public class CSVHelpers {
bw.newLine();
bw.flush();
}
bw.close();
// file created
logger.info("CSV file created correctly on this machine!");
bw.close();
// on exit delete it...
file.deleteOnExit();

View File

@ -15,7 +15,6 @@ import java.util.Map;
import javax.servlet.ServletContext;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CkanResource;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group;
@ -49,6 +48,7 @@ public abstract class HelperMethods {
private static final String PENDING_CONTEX_KEY = "PendingContext";
private static final String CONFIRMED_CONTEX_KEY = "ConfirmedContext";
private static final String CSV_FILE_FORMAT = ".csv";
private static final int TIME_SERIES_TAKE_LAST_VALUES = 5;
/**
* Convert a group name to its id on ckan
@ -101,16 +101,34 @@ public abstract class HelperMethods {
if(f != null){
if(f instanceof List<?>){
List asList = ((List) f);
logger.debug("The object annotated with @Tag is a list. Adding ... ");
for (Object object : asList) {
logger.debug(object.toString().trim());
tags.add(object.toString().trim());
if(!asList.isEmpty()){
logger.debug("The object annotated with @Tag is a list. Adding ... ");
int elementsToConsider = asList.size();
// check if it is a time series, in this take the last X elements
if(asList.get(0).getClass().equals(TimeSeriesBean.class)){
elementsToConsider = Math.min(elementsToConsider, TIME_SERIES_TAKE_LAST_VALUES);
for (int i = (asList.size() - elementsToConsider); i < asList.size(); i++) {
logger.debug(asList.get(i).toString().trim());
tags.add(asList.get(i).toString().trim());
}
}else{
// else add all the available elements
for (int i = 0; i < elementsToConsider; i++) {
logger.debug(asList.get(i).toString().trim());
tags.add(asList.get(i).toString().trim());
}
}
}
}else{
logger.debug("The object annotated with @Tag is a simple one. Adding ... ");
logger.debug(f.toString().trim());
tags.add(f.toString().trim());
}
}
}catch(Exception e){
logger.error("Failed ot read value for field " + field.getName() + " skipping", e);
@ -168,7 +186,6 @@ public abstract class HelperMethods {
Object f = new PropertyDescriptor(field.getName(), current).getReadMethod().invoke(record);
String keyField = field.getAnnotation(CustomField.class).key();
if(f != null){
List<String> valuesForKey = new ArrayList<String>();
// check if the map already contains this key
@ -178,11 +195,26 @@ public abstract class HelperMethods {
if(f instanceof List<?>){
logger.debug("The object " + field.getName() + " is a list and is annotated with @CustomField. Adding ...");
List asList = (List)f;
for (Object object : asList) {
logger.debug(object.toString().trim());
valuesForKey.add(object.toString().trim());
}
if(!asList.isEmpty()){
int elementsToConsider = asList.size();
// check if it is a time series, in this case take the last X elements
if(asList.get(0).getClass().equals(TimeSeriesBean.class)){
elementsToConsider = Math.min(elementsToConsider, TIME_SERIES_TAKE_LAST_VALUES);
for (int i = (asList.size() - elementsToConsider); i < asList.size(); i++) {
logger.debug(asList.get(i).toString().trim());
valuesForKey.add(asList.get(i).toString().trim());
}
}else{
for (int i = 0; i < elementsToConsider; i++) {
logger.debug(asList.get(i).toString().trim());
valuesForKey.add(asList.get(i).toString().trim());
}
}
}
}else{
valuesForKey.add(f.toString().trim());
}
@ -405,7 +437,7 @@ public abstract class HelperMethods {
* @throws IllegalArgumentException
* @throws IllegalAccessException
*/
public static void manageTimeSeries(Common record, String packageId, String username) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException, IntrospectionException{
public static void manageTimeSeries(Common record, String packageId, String username, DataCatalogue catalogue, String context) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException, IntrospectionException{
if(record == null)
throw new IllegalArgumentException("The given record is null!!");
@ -419,37 +451,38 @@ public abstract class HelperMethods {
Object f = new PropertyDescriptor(field.getName(), current).getReadMethod().invoke(record);
if(f != null){
List asList = (List)f;
if(!asList.isEmpty())
if(!asList.isEmpty()){
if(asList.get(0).getClass().equals(TimeSeriesBean.class)){
CustomField customAnnotation = field.getAnnotation(CustomField.class);
String resourceToAttachName = customAnnotation.key().replaceAll("\\s", "_") + CSV_FILE_FORMAT;
logger.debug("A time series has been just found");
File csvFile = CSVHelpers.listToCSV(asList);
// upload this file on ckan
DataCatalogue instance = getDataCatalogueRunningInstance(ScopeProvider.instance.get());
eu.trentorise.opendata.jackan.model.CkanResource res = instance.uploadResourceFile(
csvFile,
packageId,
instance.getApiKeyFromUsername(username),
resourceToAttachName,
customAnnotation.key() + " time series for this product");
if(csvFile != null){
// upload this file on the folder of the vre (under .catalogue) and change the url of the resource TODO
if(res != null){
// upload this file on ckan
eu.trentorise.opendata.jackan.model.CkanResource res = catalogue.uploadResourceFile(
csvFile,
packageId,
catalogue.getApiKeyFromUsername(username),
resourceToAttachName,
customAnnotation.key() + " time series for this product");
logger.debug("The resource returned is " + res.getName() + " with package id " + res.getPackageId() + " " + res.getDescription() + " " + res.getState());
// upload this file on the folder of the vre (under .catalogue) and change the url of the resource TODO
// if(res != null){
//
//
// }
}
// delete it
csvFile.delete();
}
}
}
}
}
}
while((current = current.getSuperclass())!=null); // iterate from the inherited class up to the Object.class

View File

@ -68,7 +68,7 @@ public class AssociationToGroupThread extends Thread {
}
}
logger.info("The Association Group thread ended correctly");
}
}

View File

@ -1,5 +1,8 @@
package org.gcube.data_catalogue.grsf_publish_ws.utils.threads;
import java.beans.IntrospectionException;
import java.lang.reflect.InvocationTargetException;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.Common;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.datacatalogue.ckanutillibrary.DataCatalogue;
@ -15,16 +18,41 @@ public class ManageTimeSeriesThread extends Thread{
// Logger
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(ManageTimeSeriesThread.class);
private Common record;
private String packageId;
private String username;
private DataCatalogue catalogue;
private String context;
/**
* @param record
* @param packageId
* @param username
* @param catalogue
* @param context
*/
public ManageTimeSeriesThread(Common record, String packageId,
String username, DataCatalogue catalogue, String context) {
super();
this.record = record;
this.packageId = packageId;
this.username = username;
this.catalogue = catalogue;
this.context = context;
}
@Override
public void run() {
logger.info("Time series manager thread started");
try {
HelperMethods.manageTimeSeries(record, packageId, username);
} catch (Exception e){
logger.error("Failed to attach one or more resource to the product", e);
HelperMethods.manageTimeSeries(record, packageId, username, catalogue, context);
logger.info("The time series manager thread ended correctly");
} catch (IllegalAccessException | IllegalArgumentException
| InvocationTargetException | IntrospectionException e) {
logger.error("Failed to attach csv files to the product...");
}
}
}

View File

@ -267,11 +267,11 @@ public class JTests {
}
//@Test
@Test
public void testJSONResource() throws Exception{
DataCatalogueFactory factory = DataCatalogueFactory.getFactory();
DataCatalogue instance = factory.getUtilsPerScope("/gcube/devNext/NextNext");
String datasetName = "test-product-with-resources";
String datasetName = "test-after-time-series-bean-5";
// time series
List<TimeSeriesBean<String, Void>> timeSeries = new ArrayList<TimeSeriesBean<String,Void>>();