minor refactoring

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-catalogue/grsf-publisher-ws@152488 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Costantino Perciante 2017-08-04 13:06:11 +00:00
parent 159e2906d1
commit c35c19434c
3 changed files with 8 additions and 8 deletions

View File

@ -14,9 +14,9 @@ import org.slf4j.LoggerFactory;
* Convert lists to csv format helpers
* @author Costantino Perciante at ISTI-CNR (costantino.perciante@isti.cnr.it)
*/
public class CSVHelpers {
public class CSVUtils {
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(CSVHelpers.class);
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(CSVUtils.class);
private static final String CSV_SEPARATOR = ",";
private static final String UPLOAD_LOCATION_LOCAL = System.getProperty("java.io.tmpdir");
private static final String GRSF_SUB_PATH = "GRSF_TIME_SERIES";

View File

@ -25,7 +25,7 @@ import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.TimeSeries;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.Common;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.utils.CSVHelpers;
import org.gcube.data_catalogue.grsf_publish_ws.utils.CSVUtils;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.data_catalogue.grsf_publish_ws.utils.cache.CacheImpl;
import org.gcube.data_catalogue.grsf_publish_ws.utils.cache.CacheInterface;
@ -166,7 +166,7 @@ public class ManageTimeSeriesThread extends Thread{
char firstLetter = uuidKB.charAt(0);
// the whole path of the directory is going to be...
String csvDirectoryForThisProduct = recordTypeFolderName + PATH_SEPARATOR + firstLetter + PATH_SEPARATOR + replaceIllegalChars(uuidKB, "_") + PATH_SEPARATOR + CSVHelpers.CSV_EXTENSION.replace(".", "");
String csvDirectoryForThisProduct = recordTypeFolderName + PATH_SEPARATOR + firstLetter + PATH_SEPARATOR + replaceIllegalChars(uuidKB, "_") + PATH_SEPARATOR + CSVUtils.CSV_EXTENSION.replace(".", "");
logger.debug("The path under which the time series are going to be saved is " + csvDirectoryForThisProduct);
WorkspaceFolder csvFolder = HelperMethods.createOrGetSubFoldersByPath(catalogueFolder, csvDirectoryForThisProduct);
@ -194,7 +194,7 @@ public class ManageTimeSeriesThread extends Thread{
CkanResourceBase ckanResource = null;
ExternalFile createdFileOnWorkspace = null;
File csvFile = CSVHelpers.listToCSV(asList);
File csvFile = CSVUtils.listToCSV(asList);
if(csvFile != null){
for (int i = 0; i < CHANCES; i++) {
@ -207,7 +207,7 @@ public class ManageTimeSeriesThread extends Thread{
if(ckanResource != null){
if(createdFileOnWorkspace == null)
createdFileOnWorkspace = HelperMethods.uploadExternalFile(csvFolder, csvFileName + "_" + customAnnotation.key() + CSVHelpers.CSV_EXTENSION, resourceToAttachOnCkanDescription, csvFile);
createdFileOnWorkspace = HelperMethods.uploadExternalFile(csvFolder, csvFileName + "_" + customAnnotation.key() + CSVUtils.CSV_EXTENSION, resourceToAttachOnCkanDescription, csvFile);
if(createdFileOnWorkspace != null){
String publicUrlToSetOnCkan = createdFileOnWorkspace.getPublicLink(true);

View File

@ -32,7 +32,7 @@ import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.utils.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.utils.TimeSeriesBean;
import org.gcube.data_catalogue.grsf_publish_ws.utils.CSVHelpers;
import org.gcube.data_catalogue.grsf_publish_ws.utils.CSVUtils;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.data_catalogue.grsf_publish_ws.utils.groups.Abundance_Level;
import org.gcube.data_catalogue.grsf_publish_ws.utils.groups.Fishery_Type;
@ -233,7 +233,7 @@ public class JTests {
Collections.sort(timeSeries);
File csvFile = CSVHelpers.listToCSV(timeSeries);
File csvFile = CSVUtils.listToCSV(timeSeries);
// send file
instance.uploadResourceFile(csvFile, datasetName, instance.getApiKeyFromUsername("costantino.perciante"), "random_name.csv", null, null, null);