2016-11-24 17:53:50 +01:00
|
|
|
package org.gcube.data_catalogue.grsf_publish_ws.utils;
|
|
|
|
|
|
|
|
import java.io.BufferedWriter;
|
|
|
|
import java.io.File;
|
|
|
|
import java.io.FileOutputStream;
|
|
|
|
import java.io.OutputStreamWriter;
|
2017-09-19 16:42:15 +02:00
|
|
|
import java.util.HashSet;
|
2016-11-24 17:53:50 +01:00
|
|
|
import java.util.List;
|
2017-09-19 16:42:15 +02:00
|
|
|
import java.util.Set;
|
2016-11-24 17:53:50 +01:00
|
|
|
|
2017-10-25 16:45:34 +02:00
|
|
|
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean;
|
2016-11-24 17:53:50 +01:00
|
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Convert lists to csv format helpers
|
|
|
|
* @author Costantino Perciante at ISTI-CNR (costantino.perciante@isti.cnr.it)
|
|
|
|
*/
|
2017-08-04 15:06:11 +02:00
|
|
|
public class CSVUtils {
|
2016-11-24 17:53:50 +01:00
|
|
|
|
2017-08-04 15:06:11 +02:00
|
|
|
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(CSVUtils.class);
|
2016-11-24 17:53:50 +01:00
|
|
|
private static final String CSV_SEPARATOR = ",";
|
|
|
|
private static final String UPLOAD_LOCATION_LOCAL = System.getProperty("java.io.tmpdir");
|
|
|
|
private static final String GRSF_SUB_PATH = "GRSF_TIME_SERIES";
|
2017-07-12 18:05:35 +02:00
|
|
|
public static final String CSV_EXTENSION = ".csv";
|
2016-11-24 17:53:50 +01:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Write a time series to a csv file, and returns the file reference.<br>
|
|
|
|
* Please give the timeSeries already sorted per year
|
|
|
|
* @param timeSeries
|
2017-09-19 16:42:15 +02:00
|
|
|
* @param relevantSources
|
2016-11-25 18:25:23 +01:00
|
|
|
* @param <T>
|
|
|
|
* @param <T1>
|
2016-11-24 17:53:50 +01:00
|
|
|
*/
|
2017-09-19 16:42:15 +02:00
|
|
|
public static <T, T1> File listToCSV(List<TimeSeriesBean<T, T1>> timeSeries, String[] relevantSources){
|
2016-11-24 17:53:50 +01:00
|
|
|
|
|
|
|
if(timeSeries == null || timeSeries.isEmpty()){
|
|
|
|
logger.warn("The time series provided is null or empty ... " + timeSeries );
|
|
|
|
return null;
|
|
|
|
}else
|
|
|
|
|
|
|
|
try{
|
2017-07-12 18:05:35 +02:00
|
|
|
String fileName = UPLOAD_LOCATION_LOCAL + File.separator + GRSF_SUB_PATH + File.separator + "time_series_" + System.currentTimeMillis() + CSV_EXTENSION;
|
2016-11-24 17:53:50 +01:00
|
|
|
File file = new File(fileName);
|
|
|
|
file.getParentFile().mkdirs();
|
|
|
|
file.createNewFile();
|
|
|
|
FileOutputStream fo = new FileOutputStream(file);
|
|
|
|
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fo, "UTF-8"));
|
|
|
|
|
2017-09-19 16:42:15 +02:00
|
|
|
Set<String> sources = new HashSet<String>(3);
|
|
|
|
|
2017-07-13 11:50:05 +02:00
|
|
|
// discover how the header will look like
|
2017-07-27 16:45:18 +02:00
|
|
|
boolean isUnitPresent = false;
|
|
|
|
boolean isValuePresent = false;
|
|
|
|
boolean isSourcePresent = false;
|
|
|
|
boolean isAssessmentPresent = false;
|
2017-10-25 16:45:34 +02:00
|
|
|
boolean isDataOwnerPresent = false;
|
2017-09-19 16:42:15 +02:00
|
|
|
|
2017-07-27 16:45:18 +02:00
|
|
|
for (TimeSeriesBean<T, T1> timeSeriesBean : timeSeries) {
|
|
|
|
if(timeSeriesBean.isSourcePresent())
|
|
|
|
isSourcePresent = true;
|
|
|
|
if(timeSeriesBean.isAssessmentPresent())
|
|
|
|
isAssessmentPresent = true;
|
|
|
|
if(timeSeriesBean.isValuePresent())
|
|
|
|
isValuePresent = true;
|
|
|
|
if(timeSeriesBean.isUnitPresent())
|
|
|
|
isUnitPresent = true;
|
2017-10-25 16:45:34 +02:00
|
|
|
if(timeSeriesBean.isDataOwnerPresent())
|
|
|
|
isDataOwnerPresent = true;
|
|
|
|
|
|
|
|
if(isSourcePresent & isAssessmentPresent & isValuePresent & isUnitPresent & isDataOwnerPresent)
|
|
|
|
break;
|
2017-07-27 16:45:18 +02:00
|
|
|
}
|
2016-11-24 17:53:50 +01:00
|
|
|
|
|
|
|
StringBuffer headerLine = new StringBuffer();
|
2017-07-13 11:50:05 +02:00
|
|
|
headerLine.append(TimeSeriesBean.YEAR_FIELD);
|
2016-11-24 17:53:50 +01:00
|
|
|
|
2017-07-13 11:50:05 +02:00
|
|
|
if(isValuePresent){
|
|
|
|
headerLine.append(CSV_SEPARATOR);
|
|
|
|
headerLine.append(TimeSeriesBean.VALUE_FIELD);
|
|
|
|
}
|
2016-11-24 17:53:50 +01:00
|
|
|
|
2017-07-13 11:50:05 +02:00
|
|
|
if(isUnitPresent){
|
2016-11-24 17:53:50 +01:00
|
|
|
headerLine.append(CSV_SEPARATOR);
|
2017-07-13 11:50:05 +02:00
|
|
|
headerLine.append(TimeSeriesBean.UNIT_FIELD);
|
|
|
|
}
|
2016-11-24 17:53:50 +01:00
|
|
|
|
2017-07-13 11:50:05 +02:00
|
|
|
if(isSourcePresent){
|
|
|
|
headerLine.append(CSV_SEPARATOR);
|
2017-10-25 16:45:34 +02:00
|
|
|
headerLine.append(TimeSeriesBean.DB_SOURCE_FIELD);
|
|
|
|
}
|
|
|
|
|
|
|
|
if(isDataOwnerPresent){
|
|
|
|
headerLine.append(CSV_SEPARATOR);
|
|
|
|
headerLine.append(TimeSeriesBean.DATA_OWNER_FIELD);
|
2016-11-24 17:53:50 +01:00
|
|
|
}
|
2017-09-19 16:42:15 +02:00
|
|
|
|
2017-07-21 15:33:22 +02:00
|
|
|
if(isAssessmentPresent){
|
|
|
|
headerLine.append(CSV_SEPARATOR);
|
|
|
|
headerLine.append(TimeSeriesBean.ASSESSMENT_FIELD);
|
|
|
|
}
|
2017-07-13 11:50:05 +02:00
|
|
|
|
2016-11-24 17:53:50 +01:00
|
|
|
bw.write(headerLine.toString());
|
|
|
|
bw.newLine();
|
|
|
|
bw.flush();
|
|
|
|
|
|
|
|
// now iterate over the rows.. they are already sorted in ascending order
|
|
|
|
for (TimeSeriesBean<T, T1> bean : timeSeries)
|
|
|
|
{
|
|
|
|
StringBuffer oneLine = new StringBuffer();
|
|
|
|
oneLine.append(bean.getYear());
|
2017-07-13 11:50:05 +02:00
|
|
|
|
|
|
|
if(isValuePresent){
|
|
|
|
oneLine.append(CSV_SEPARATOR);
|
2017-07-15 10:55:38 +02:00
|
|
|
oneLine.append(bean.getValue() != null? bean.getValue().toString().contains(",") ? "\"" + bean.getValue() + "\"" : bean.getValue() : "");
|
2017-07-13 11:50:05 +02:00
|
|
|
}
|
2016-11-24 17:53:50 +01:00
|
|
|
|
|
|
|
if(isUnitPresent){
|
|
|
|
oneLine.append(CSV_SEPARATOR);
|
|
|
|
oneLine.append(bean.getUnit() != null? bean.getUnit() : "");
|
|
|
|
}
|
|
|
|
|
2017-07-13 11:50:05 +02:00
|
|
|
if(isSourcePresent){
|
|
|
|
oneLine.append(CSV_SEPARATOR);
|
2017-10-25 16:45:34 +02:00
|
|
|
oneLine.append(bean.getDatabaseSource() != null? bean.getDatabaseSource() : "");
|
|
|
|
if(bean.getDatabaseSource() != null && !bean.getDatabaseSource().isEmpty())
|
|
|
|
sources.add(bean.getDatabaseSource());
|
|
|
|
}
|
|
|
|
|
|
|
|
if(isDataOwnerPresent){
|
|
|
|
oneLine.append(CSV_SEPARATOR);
|
|
|
|
oneLine.append(bean.getDataOwner() != null? bean.getDataOwner() : "");
|
2017-07-13 11:50:05 +02:00
|
|
|
}
|
2017-09-19 16:42:15 +02:00
|
|
|
|
2017-07-21 15:33:22 +02:00
|
|
|
if(isAssessmentPresent){
|
|
|
|
oneLine.append(CSV_SEPARATOR);
|
|
|
|
oneLine.append(bean.getAssessment() != null? bean.getAssessment() : "");
|
|
|
|
}
|
2017-07-13 11:50:05 +02:00
|
|
|
|
2016-11-24 17:53:50 +01:00
|
|
|
bw.write(oneLine.toString());
|
|
|
|
bw.newLine();
|
2017-07-13 11:50:05 +02:00
|
|
|
bw.flush();
|
2016-11-24 17:53:50 +01:00
|
|
|
}
|
2017-07-13 11:50:05 +02:00
|
|
|
|
2016-11-25 13:56:27 +01:00
|
|
|
bw.close();
|
2017-07-13 11:50:05 +02:00
|
|
|
|
2017-07-12 18:05:35 +02:00
|
|
|
// file created
|
|
|
|
logger.debug("CSV file created correctly on this machine!");
|
2017-10-25 16:45:34 +02:00
|
|
|
|
|
|
|
// Add names of the sources to the file's name
|
2017-09-19 16:42:15 +02:00
|
|
|
for (String source : sources) {
|
|
|
|
relevantSources[0] += source + "_";
|
|
|
|
}
|
|
|
|
relevantSources[0] = relevantSources[0].substring(0, relevantSources[0].length() - 1);
|
2017-10-25 16:45:34 +02:00
|
|
|
|
2016-11-24 17:53:50 +01:00
|
|
|
// on exit delete it...
|
|
|
|
file.deleteOnExit();
|
|
|
|
return file;
|
|
|
|
}
|
|
|
|
catch(Exception e){
|
|
|
|
logger.error("Failed to create csv file for time series", e);
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
}
|
2017-09-19 16:42:15 +02:00
|
|
|
|
2017-07-13 11:50:05 +02:00
|
|
|
|
2017-07-12 18:05:35 +02:00
|
|
|
}
|