start managing time series. The missing part is the one related to the copy of the csv resource on the shared folder of the vre. The url of this folder needs to be used to replace the one attached by ckan

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-catalogue/grsf-publisher-ws@134705 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Costantino Perciante 2016-11-24 16:53:50 +00:00
parent 5ae34426be
commit e399336c3f
12 changed files with 402 additions and 60 deletions

View File

@ -3,6 +3,9 @@
<wb-resource deploy-path="/" source-path="/target/m2e-wtp/web-resources"/>
<wb-resource deploy-path="/" source-path="/src/main/webapp" tag="defaultRootSource"/>
<wb-resource deploy-path="/WEB-INF/classes" source-path="/src/main/java"/>
<dependent-module archiveName="ckan-util-library-2.1.1-SNAPSHOT.jar" deploy-path="/WEB-INF/lib" handle="module:/resource/ckan-util-library/ckan-util-library">
<dependency-type>uses</dependency-type>
</dependent-module>
<property name="context-root" value="grsf-publisher-ws"/>
<property name="java-output-path" value="/grsf-publisher-ws/target/classes"/>
</wb-module>

View File

@ -53,11 +53,6 @@ public class Common {
@CustomField(key="Data owner")
private String dataOwner;
@JsonProperty("catches_or_landings")
@CkanResource
@Valid
private Resource<String> catchesOrLandings;
@JsonProperty("database_sources")
@CkanResource
@NotNull(message="database_source cannot be null")
@ -96,9 +91,6 @@ public class Common {
@NotNull(message="traceability_flag cannot be null")
private boolean traceabilityFlag;
@JsonProperty("extras")
private Map<String, List<String>> extras = new HashMap<>();
@JsonProperty("status")
@CustomField(key="Status")
@Group
@ -111,6 +103,9 @@ public class Common {
@CustomField(key="Product type")
@Tag
private String productType;
@JsonProperty("extras")
private Map<String, List<String>> extras = new HashMap<>();
public Common() {
super();
@ -139,7 +134,7 @@ public class Common {
*/
public Common(String description, String license, String author,
Long version, String authorContact, String maintainer,
String maintainerContact, Resource<String> catchesOrLandings,
String maintainerContact,
List<Resource<Source>> databaseSources,
List<Resource<String>> sourceOfInformation, String dataOwner,
Type type, String shortTitle, String uuid,
@ -153,7 +148,6 @@ public class Common {
this.authorContact = authorContact;
this.maintainer = maintainer;
this.maintainerContact = maintainerContact;
this.catchesOrLandings = catchesOrLandings;
this.databaseSources = databaseSources;
this.sourceOfInformation = sourceOfInformation;
this.dataOwner = dataOwner;
@ -230,14 +224,6 @@ public class Common {
this.maintainerContact = maintainerContact;
}
public Resource<String> getCatchesOrLandings() {
return catchesOrLandings;
}
public void setCatchesOrLandings(Resource<String> catchesOrLandings) {
this.catchesOrLandings = catchesOrLandings;
}
public List<Resource<Source>> getDatabaseSources() {
return databaseSources;
}
@ -316,13 +302,11 @@ public class Common {
+ ", author=" + author + ", version=" + version
+ ", authorContact=" + authorContact + ", maintainer="
+ maintainer + ", maintainerContact=" + maintainerContact
+ ", catchesOrLandings=" + catchesOrLandings
+ ", databaseSources=" + databaseSources
+ ", sourceOfInformation=" + sourceOfInformation
+ ", dataOwner=" + dataOwner + ", type=" + type
+ ", shortTitle=" + shortTitle + ", uuid=" + uuid
+ ", traceabilityFlag=" + traceabilityFlag + ", extras="
+ extras + ", status=" + status + ", productType="
+ productType + "]";
+ ", dataOwner=" + dataOwner + ", databaseSources="
+ databaseSources + ", sourceOfInformation="
+ sourceOfInformation + ", type=" + type + ", shortTitle="
+ shortTitle + ", uuid=" + uuid + ", traceabilityFlag="
+ traceabilityFlag + ", extras=" + extras + ", status="
+ status + ", productType=" + productType + "]";
}
}

View File

@ -1,5 +1,8 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input;
import java.util.List;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
@ -70,6 +73,12 @@ public class FisheryRecord extends Common{
@CustomField(key="Environment")
private String environment;
@JsonProperty("catches_or_landings")
@CustomField(key="Catches or landings")
@Tag
@Valid
private List<TimeSeriesBean<String, String>> catchesOrLandings;
public FisheryRecord() {
super();
}
@ -86,14 +95,15 @@ public class FisheryRecord extends Common{
* @param productionSystemType
* @param flagState
* @param fishingGear
* @param status
* @param environment
* @param catchesOrLandings
*/
public FisheryRecord(String fisheryName, String fisheryId,
String scientificName, String fishingArea, String exploitingStocks,
String managementEntity, String jurisdictionArea,
Production_System_Type productionSystemType, String flagState,
String fishingGear, String environment) {
String fishingGear, String environment,
List<TimeSeriesBean<String, String>> catchesOrLandings) {
super();
this.fisheryName = fisheryName;
this.fisheryId = fisheryId;
@ -106,6 +116,7 @@ public class FisheryRecord extends Common{
this.flagState = flagState;
this.fishingGear = fishingGear;
this.environment = environment;
this.catchesOrLandings = catchesOrLandings;
}
public String getFisheryName() {
@ -196,15 +207,24 @@ public class FisheryRecord extends Common{
this.environment = environment;
}
public List<TimeSeriesBean<String, String>> getCatchesOrLandings() {
return catchesOrLandings;
}
public void setCatchesOrLandings(List<TimeSeriesBean<String, String>> catchesOrLandings) {
this.catchesOrLandings = catchesOrLandings;
}
@Override
public String toString() {
return "FisheryRecord [" + super.toString() + ",fisheryName=" + fisheryName + ", fisheryId="
return "FisheryRecord [fisheryName=" + fisheryName + ", fisheryId="
+ fisheryId + ", scientificName=" + scientificName
+ ", fishingArea=" + fishingArea + ", exploitingStocks="
+ exploitingStocks + ", managementEntity=" + managementEntity
+ ", jurisdictionArea=" + jurisdictionArea
+ ", productionSystemType=" + productionSystemType
+ ", flagState=" + flagState + ", fishingGear=" + fishingGear
+ ", environment=" + environment + "]";
+ ", environment=" + environment + ", catchesOrLandings="
+ catchesOrLandings + "]";
}
}

View File

@ -65,13 +65,13 @@ public class StockRecord extends Common{
@CustomField(key="Exploitation Rate")
@Tag
@Valid
private List<TimeSeriesBean<Exploitation_Rate>> exploitationRate;
private List<TimeSeriesBean<Exploitation_Rate, Void>> exploitationRate;
@JsonProperty("abundance_level")
@CustomField(key="Abundance Level")
@Tag
@Valid
private List<TimeSeriesBean<Abundance_Level>> abundanceLevel;
private List<TimeSeriesBean<Abundance_Level, Void>> abundanceLevel;
@JsonProperty("exploitation_rate_for_grouping")
@Group
@ -134,8 +134,8 @@ public class StockRecord extends Common{
String speciesScientificName, String area,
String exploitingFishery, String managementEntity,
String assessmentMethods, String stateOfMarineResource,
List<TimeSeriesBean<Exploitation_Rate>> exploitationRate,
List<TimeSeriesBean<Abundance_Level>> abundanceLevel,
List<TimeSeriesBean<Exploitation_Rate, Void>> exploitationRate,
List<TimeSeriesBean<Abundance_Level, Void>> abundanceLevel,
Exploitation_Rate exploitationRateForGrouping,
Abundance_Level abundanceLevelForGrouping,
String narrativeStateAndTrend, String scientificAdvice,
@ -260,19 +260,19 @@ public class StockRecord extends Common{
this.stateOfMarineResource = stateOfMarineResource;
}
public List<TimeSeriesBean<Exploitation_Rate>> getExploitationRate() {
public List<TimeSeriesBean<Exploitation_Rate, Void>> getExploitationRate() {
return exploitationRate;
}
public void setExploitationRate(List<TimeSeriesBean<Exploitation_Rate>> exploitationRate) {
public void setExploitationRate(List<TimeSeriesBean<Exploitation_Rate, Void>> exploitationRate) {
this.exploitationRate = exploitationRate;
}
public List<TimeSeriesBean<Abundance_Level>> getAbundanceLevel() {
public List<TimeSeriesBean<Abundance_Level, Void>> getAbundanceLevel() {
return abundanceLevel;
}
public void setAbundanceLevel(List<TimeSeriesBean<Abundance_Level>> abundanceLevel) {
public void setAbundanceLevel(List<TimeSeriesBean<Abundance_Level, Void>> abundanceLevel) {
this.abundanceLevel = abundanceLevel;
}

View File

@ -8,52 +8,60 @@ import org.gcube.data_catalogue.grsf_publish_ws.utils.groups.Exploitation_Rate;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* A time series bean that contains couple <year, value>
* @author Costantino Perciante at ISTI-CNR
* (costantino.perciante@isti.cnr.it)
* Catches_and_landings contains also Unit
* @author Costantino Perciante at ISTI-CNR (costantino.perciante@isti.cnr.it)
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class TimeSeriesBean<T> {
@JsonProperty("value")
@NotNull(message="value of a time series cannot be null")
private T value;
public class TimeSeriesBean<T, T1> implements Comparable<TimeSeriesBean<T, T1>>{
@JsonProperty("year")
@NotNull(message="year of a time series cannot be null")
private Long year;
@JsonProperty("value")
@NotNull(message="value of a time series cannot be null")
private T value;
@JsonProperty("unit")
private T1 unit;
/**
*
*/
public TimeSeriesBean() {
super();
}
/**
* @param value
* @param year
*/
public TimeSeriesBean(T value, Long year) {
public TimeSeriesBean(T value, Long year, T1 unit) {
super();
this.value = value;
this.year = year;
this.unit = unit;
}
public T getValue() {
return value;
}
public void setValue(T value) {
this.value = value;
}
public Long getYear() {
return year;
}
public void setYear(Long year) {
this.year = year;
}
public T1 getUnit() {
return unit;
}
public void setUnit(T1 unit) {
this.unit = unit;
}
@Override
public String toString() {
@ -62,8 +70,15 @@ public class TimeSeriesBean<T> {
// when the value belongs to these classes annotated with @Tag..
if(valueClass.equals(Abundance_Level.class) || valueClass.equals(Exploitation_Rate.class))
return year + "-" + value;
else if(this.unit != null && this.unit.getClass().equals(String.class)) // catches and landings
return year + "-" + value + "-" + unit;
else
return "TimeSeriesBean [value=" + value + ", year=" + year + "]";
return "TimeSeriesBean [value=" + value + ", unit=" + unit + ", year="
+ year + "]";
}
@Override
public int compareTo(TimeSeriesBean<T, T1> o) {
return (int) (this.year - o.year); // ascending.. low to highest
}
}

View File

@ -28,8 +28,9 @@ import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.DeleteProductBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.utils.AssociationToGroupThread;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.AssociationToGroupThread;
import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.ManageTimeSeriesThread;
import org.gcube.datacatalogue.ckanutillibrary.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.models.ResourceBean;
import org.gcube.datacatalogue.ckanutillibrary.models.RolesCkanGroupOrOrg;
@ -235,6 +236,10 @@ public class GrsfPublisherFisheryService {
responseBean.setError(null);
responseBean.setProductUrl(catalogue.getPortletUrl() + "?" + URLEncoder.encode("path=/dataset/" + futureName, "UTF-8"));
responseBean.setKbUuid(record.getUuid());
// manage time series
logger.info("Launching thread for time series handling");
new ManageTimeSeriesThread(record, id, username, catalogue, ScopeProvider.instance.get()).start();
if(!groups.isEmpty()){
logger.info("Launching thread for association to the list of groups " + groups);

View File

@ -28,8 +28,9 @@ import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.DeleteProductBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.utils.AssociationToGroupThread;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.AssociationToGroupThread;
import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.ManageTimeSeriesThread;
import org.gcube.datacatalogue.ckanutillibrary.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.models.ResourceBean;
import org.gcube.datacatalogue.ckanutillibrary.models.RolesCkanGroupOrOrg;
@ -224,6 +225,10 @@ public class GrsfPublisherStockService {
status = Status.CREATED;
responseBean.setProductUrl(catalogue.getPortletUrl() + "?" + URLEncoder.encode("path=/dataset/" + futureName, "UTF-8"));
responseBean.setKbUuid(record.getUuid());
// manage time series
logger.info("Launching thread for time series handling");
new ManageTimeSeriesThread(record, id, username, catalogue, ScopeProvider.instance.get()).start();
if(!groups.isEmpty()){
logger.info("Launching thread for association to the list of groups " + groups);

View File

@ -0,0 +1,100 @@
package org.gcube.data_catalogue.grsf_publish_ws.utils;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.util.List;
import javax.swing.plaf.metal.MetalIconFactory.FolderIcon16;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.TimeSeriesBean;
import org.slf4j.LoggerFactory;
/**
* Convert lists to csv format helpers
* @author Costantino Perciante at ISTI-CNR (costantino.perciante@isti.cnr.it)
*/
public class CSVHelpers {
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(CSVHelpers.class);
private static final String CSV_SEPARATOR = ",";
private static final String UPLOAD_LOCATION_LOCAL = System.getProperty("java.io.tmpdir");
private static final String GRSF_SUB_PATH = "GRSF_TIME_SERIES";
/**
* Write a time series to a csv file, and returns the file reference.<br>
* Please give the timeSeries already sorted per year
* @param timeSeries
*/
public static <T, T1> File listToCSV(List<TimeSeriesBean<T, T1>> timeSeries){
if(timeSeries == null || timeSeries.isEmpty()){
logger.warn("The time series provided is null or empty ... " + timeSeries );
return null;
}else
try{
String fileName = UPLOAD_LOCATION_LOCAL + File.separator + GRSF_SUB_PATH + File.separator + "time_series_" + System.currentTimeMillis() + ".csv";
File file = new File(fileName);
file.getParentFile().mkdirs();
file.createNewFile();
FileOutputStream fo = new FileOutputStream(file);
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fo, "UTF-8"));
boolean isUnitPresent = false;
StringBuffer headerLine = new StringBuffer();
headerLine.append("year");
headerLine.append(CSV_SEPARATOR);
headerLine.append("value");
// first line is csv header, check the type of object
if(timeSeries.get(0).getUnit() != null && !timeSeries.get(0).getUnit().getClass().equals(Void.class)){
isUnitPresent = true;
headerLine.append(CSV_SEPARATOR);
headerLine.append("unit");
}else{
// ....nothing
}
bw.write(headerLine.toString());
bw.newLine();
bw.flush();
// now iterate over the rows.. they are already sorted in ascending order
for (TimeSeriesBean<T, T1> bean : timeSeries)
{
StringBuffer oneLine = new StringBuffer();
oneLine.append(bean.getYear());
oneLine.append(CSV_SEPARATOR);
oneLine.append(bean.getValue());
if(isUnitPresent){
oneLine.append(CSV_SEPARATOR);
oneLine.append(bean.getUnit() != null? bean.getUnit() : "");
}
bw.write(oneLine.toString());
bw.newLine();
bw.flush();
}
bw.close();
// file created
logger.info("CSV file created correctly on this machine!");
// on exit delete it...
file.deleteOnExit();
return file;
}
catch(Exception e){
logger.error("Failed to create csv file for time series", e);
return null;
}
}
}

View File

@ -1,22 +1,28 @@
package org.gcube.data_catalogue.grsf_publish_ws.utils;
import java.beans.IntrospectionException;
import java.beans.PropertyDescriptor;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.ServletContext;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CkanResource;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.Common;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.TimeSeriesBean;
import org.gcube.data_catalogue.grsf_publish_ws.utils.groups.Status;
import org.gcube.datacatalogue.ckanutillibrary.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.DataCatalogueFactory;
@ -42,6 +48,7 @@ public abstract class HelperMethods {
// to be retrieved from the web.xml
private static final String PENDING_CONTEX_KEY = "PendingContext";
private static final String CONFIRMED_CONTEX_KEY = "ConfirmedContext";
private static final String CSV_FILE_FORMAT = ".csv";
/**
* Convert a group name to its id on ckan
@ -388,4 +395,64 @@ public abstract class HelperMethods {
logger.info("Returning resources " + toReturn);
return toReturn;
}
/**
* Manage the time series bean within a resource (e.g., catches or landings, exploitation rate and so on).
* The method save the time series as csv on ckan, and also save the file in the .catalogue area of the shared vre folder.
* @param record
* @throws IntrospectionException
* @throws InvocationTargetException
* @throws IllegalArgumentException
* @throws IllegalAccessException
*/
public static void manageTimeSeries(Common record, String packageId, String username) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException, IntrospectionException{
if(record == null)
throw new IllegalArgumentException("The given record is null!!");
Class<?> current = record.getClass();
do{
Field[] fields = current.getDeclaredFields();
for (Field field : fields) {
if (Collection.class.isAssignableFrom(field.getType())) {
// if the list is not null, get an element
Object f = new PropertyDescriptor(field.getName(), current).getReadMethod().invoke(record);
if(f != null){
List asList = (List)f;
if(!asList.isEmpty())
if(asList.get(0).getClass().equals(TimeSeriesBean.class)){
CustomField customAnnotation = field.getAnnotation(CustomField.class);
String resourceToAttachName = customAnnotation.key().replaceAll("\\s", "_") + CSV_FILE_FORMAT;
logger.debug("A time series has been just found");
File csvFile = CSVHelpers.listToCSV(asList);
// upload this file on ckan
DataCatalogue instance = getDataCatalogueRunningInstance(ScopeProvider.instance.get());
eu.trentorise.opendata.jackan.model.CkanResource res = instance.uploadResourceFile(
csvFile,
packageId,
instance.getApiKeyFromUsername(username),
resourceToAttachName,
customAnnotation.key() + " time series for this product");
// upload this file on the folder of the vre (under .catalogue) and change the url of the resource TODO
if(res != null){
}
// delete it
csvFile.delete();
}
}
}
}
}
while((current = current.getSuperclass())!=null); // iterate from the inherited class up to the Object.class
}
}

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.utils;
package org.gcube.data_catalogue.grsf_publish_ws.utils.threads;
import java.util.List;

View File

@ -0,0 +1,30 @@
package org.gcube.data_catalogue.grsf_publish_ws.utils.threads;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.Common;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.datacatalogue.ckanutillibrary.DataCatalogue;
import org.slf4j.LoggerFactory;
/**
* Extract the time series present in the record, load them as resource on ckan and on the .catalogue
* folder under the vre folder.
* @author Costantino Perciante at ISTI-CNR (costantino.perciante@isti.cnr.it)
*/
public class ManageTimeSeriesThread extends Thread{
// Logger
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(ManageTimeSeriesThread.class);
public ManageTimeSeriesThread(Common record, String packageId,
String username, DataCatalogue catalogue, String context) {
logger.info("Time series manager thread started");
try {
HelperMethods.manageTimeSeries(record, packageId, username);
} catch (Exception e){
logger.error("Failed to attach one or more resource to the product", e);
}
}
}

View File

@ -3,12 +3,15 @@ package org.gcube.data_catalogue.grsf_publish_ws;
import java.beans.IntrospectionException;
import java.beans.PropertyDescriptor;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -19,11 +22,16 @@ import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.TimeSeriesBean;
import org.gcube.data_catalogue.grsf_publish_ws.utils.CSVHelpers;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.data_catalogue.grsf_publish_ws.utils.groups.Abundance_Level;
import org.gcube.data_catalogue.grsf_publish_ws.utils.groups.Source;
import org.gcube.data_catalogue.grsf_publish_ws.utils.groups.Status;
import org.gcube.data_catalogue.grsf_publish_ws.utils.groups.Type;
import org.gcube.datacatalogue.ckanutillibrary.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.DataCatalogueFactory;
import org.junit.Test;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
@ -258,4 +266,109 @@ public class JTests {
}
//@Test
public void testJSONResource() throws Exception{
DataCatalogueFactory factory = DataCatalogueFactory.getFactory();
DataCatalogue instance = factory.getUtilsPerScope("/gcube/devNext/NextNext");
String datasetName = "test-product-with-resources";
// time series
List<TimeSeriesBean<String, Void>> timeSeries = new ArrayList<TimeSeriesBean<String,Void>>();
timeSeries.add(new TimeSeriesBean<String, Void>("Value A", 2001L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value B", 2231L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value C", 1943L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value D", 1054L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value E", 3422L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value F", 2121L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value G", 2454L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value H", 1213L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value I", 2213L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value A", 2001L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value B", 2231L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value C", 1943L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value D", 1054L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value E", 3422L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value F", 2121L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value G", 2454L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value H", 1213L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value I", 2213L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value A", 2001L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value B", 2231L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value C", 1943L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value D", 1054L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value E", 3422L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value F", 2121L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value G", 2454L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value H", 1213L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value I", 2213L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value A", 2001L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value B", 2231L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value C", 1943L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value D", 1054L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value E", 3422L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value F", 2121L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value G", 2454L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value H", 1213L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value I", 2213L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value A", 2001L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value B", 2231L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value C", 1943L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value D", 1054L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value E", 3422L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value F", 2121L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value G", 2454L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value H", 1213L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value I", 2213L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value A", 2001L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value B", 2231L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value C", 1943L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value D", 1054L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value E", 3422L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value F", 2121L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value G", 2454L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value H", 1213L, null));
timeSeries.add(new TimeSeriesBean<String, Void>("Value I", 2213L, null));
Collections.sort(timeSeries);
File csvFile = CSVHelpers.listToCSV(timeSeries);
// send file
instance.uploadResourceFile(csvFile, datasetName, instance.getApiKeyFromUsername("costantino.perciante"), "random_name.csv", null);
}
//@Test
public void testlist() throws IllegalAccessException, IllegalArgumentException, InvocationTargetException, IntrospectionException{
FisheryRecord record = new FisheryRecord();
ArrayList<TimeSeriesBean<String, String>> list = new ArrayList<TimeSeriesBean<String,String>>();
list.add(new TimeSeriesBean<String, String>("as", 65445L, "asd"));
record.setCatchesOrLandings(list);
Class<?> current = record.getClass();
do{
Field[] fields = current.getDeclaredFields();
for (Field field : fields) {
if (Collection.class.isAssignableFrom(field.getType())) {
// if the list is not null, get an element
Object f = new PropertyDescriptor(field.getName(), current).getReadMethod().invoke(record);
if(f != null){
List asList = (List)f;
if(!asList.isEmpty())
if(asList.get(0).getClass().equals(TimeSeriesBean.class)){
logger.debug("Name is " + field.getName());
CustomField customAnnotation = field.getAnnotation(CustomField.class);
if(customAnnotation != null)
logger.debug("Name is " + customAnnotation.key());
}
}
}
}
}
while((current = current.getSuperclass())!=null); // iterate from the inherited class up to the Object.class
}
}