Use data transfer instead of http connection to account for https links

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngineGeoSpatialExtension@92784 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Gianpaolo Coro 2014-03-06 17:52:12 +00:00
parent 99e1b029dc
commit 7f394869a1
5 changed files with 44 additions and 7 deletions

View File

@ -5,13 +5,15 @@ import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLConnection;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.tools.ant.types.CommandlineJava.SysProperties;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.data.transfer.common.TransferUtil;
/**
@ -46,6 +48,7 @@ public class AscRasterReader
URLConnection urlConn =null;
if (filename.startsWith("http")){
AnalysisLogger.getLogger().debug("Getting file from http");
/*
URL fileurl = new URL(filename);
urlConn = fileurl.openConnection();
urlConn.setConnectTimeout(120000);
@ -53,6 +56,13 @@ public class AscRasterReader
urlConn.setAllowUserInteraction(false);
urlConn.setDoOutput(true);
input = new BufferedReader(new InputStreamReader(urlConn.getInputStream()));
*/
//using Manzi's data Transfer to overcome https issues
try {
input = new BufferedReader(new InputStreamReader(TransferUtil.getInputStream(new URI(filename),120000)));
} catch (URISyntaxException e) {
AnalysisLogger.getLogger().debug("Error: Bad URI "+filename);
}
}
else {
AnalysisLogger.getLogger().debug("Getting file from local file");

View File

@ -1,11 +1,13 @@
package org.gcube.dataanalysis.geo.connectors.geotiff;
import java.io.File;
import java.net.URI;
import java.util.List;
import java.util.UUID;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.data.transfer.common.TransferUtil;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.connectors.asc.ASC;
@ -37,7 +39,12 @@ public class GeoTiff implements GISDataConnector {
AnalysisLogger.getLogger().debug("Checking url: " + urlCheck);
String randomFile = new File(persistenceDir, "geotiff" + UUID.randomUUID().toString().replace("-", "")).getAbsolutePath();
uuid = randomFile + ".tiff";
HttpRequest.downloadFile(layerURL, uuid);
// HttpRequest.downloadFile(layerURL, uuid);
TransferUtil downloadutil = new TransferUtil();
downloadutil.setConnectiontimeout(120000);
downloadutil.setTransferTimeout(120000);
downloadutil.performTransfer(new URI(layerURL), uuid);
AnalysisLogger.getLogger().debug("Converting to ASCII file: " + uuid);
ascFile = GdalConverter.convertToASC(uuid);
AnalysisLogger.getLogger().debug("Conversion to ASCII complete: " + ascFile);

View File

@ -1,12 +1,14 @@
package org.gcube.dataanalysis.geo.connectors.wcs;
import java.io.File;
import java.net.URI;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.data.transfer.common.TransferUtil;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.connectors.asc.ASC;
@ -56,7 +58,11 @@ public class WCS implements GISDataConnector {
int urlCheck = HttpRequest.checkUrl(url, null, null);
AnalysisLogger.getLogger().debug("Checking url: " + urlCheck);
uuid = new File(persistenceDir, "geotiff" + UUID.randomUUID().toString().replace("-", "") + ".tiff").getAbsolutePath();
HttpRequest.downloadFile(url, uuid);
// HttpRequest.downloadFile(url, uuid);
TransferUtil downloadutil = new TransferUtil();
downloadutil.setConnectiontimeout(120000);
downloadutil.setTransferTimeout(120000);
downloadutil.performTransfer(new URI(url), uuid);
GeoTiffMetadata meta = new GeoTiffMetadata();
meta.readAndDisplayMetadata(uuid);
zScale = meta.zScale;
@ -154,7 +160,11 @@ public class WCS implements GISDataConnector {
AnalysisLogger.getLogger().debug("Checking url: " + urlCheck);
String randomFile = new File(persistenceDir, "geotiff" + UUID.randomUUID().toString().replace("-", "")).getAbsolutePath();
uuid = randomFile + ".tiff";
HttpRequest.downloadFile(url, uuid);
// HttpRequest.downloadFile(url, uuid);
TransferUtil downloadutil = new TransferUtil();
downloadutil.setConnectiontimeout(120000);
downloadutil.setTransferTimeout(120000);
downloadutil.performTransfer(new URI(url), uuid);
AnalysisLogger.getLogger().debug("Converting to ASCII file: " + uuid);
ascFile = GdalConverter.convertToASC(uuid);
AnalysisLogger.getLogger().debug("Conversion to ASCII complete: " + ascFile);

View File

@ -60,7 +60,17 @@ public class TestOccurrenceEnrichment {
config.setParam("OutputTableDBName","testenrichment");
config.setParam("OutputTableName","testenrichment");
String sep=AlgorithmConfiguration.getListSeparator();
config.setParam("Layers","http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridt__ENVIRONMENT_OCEANS_ELEVATION_1366210702774.nc"+sep+"4d597da9-dbfa-4a65-9de6-9bbff69eac19"+sep+"2c2304d1-681a-4f3a-8409-e8cdb5ed447f");
// config.setParam("Layers","http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridt__ENVIRONMENT_OCEANS_ELEVATION_1366210702774.nc"+sep+"4d597da9-dbfa-4a65-9de6-9bbff69eac19"+sep+"2c2304d1-681a-4f3a-8409-e8cdb5ed447f");
//ASC file
config.setParam("Layers","http://goo.gl/s6fOfS");
// config.setParam("Layers","https://dl.dropboxusercontent.com/u/12809149/geoserver-GetCoverage.image.asc");
//NETCDF http - cannot work
// config.setParam("Layers","http://goo.gl/qXtqiY");
// config.setParam("Layers","https://dl.dropboxusercontent.com/u/12809149/geoserver-GetCoverage.image.tiff");
config.setParam("Layers","https://dl.dropboxusercontent.com/u/12809149/TrueMarble.tif");
// config.setParam("Layers","8f5d883f-95bf-4b7c-8252-aaf0b2e6fd81"+sep+"4d597da9-dbfa-4a65-9de6-9bbff69eac19"+sep+"2c2304d1-681a-4f3a-8409-e8cdb5ed447f");
config.setParam("FeaturesNames","temperature"+sep+"chlorophyll"+sep+"ph");
// config.setParam("Layers","4d597da9-dbfa-4a65-9de6-9bbff69eac19"+sep+"2c2304d1-681a-4f3a-8409-e8cdb5ed447f");

View File

@ -13,8 +13,8 @@ import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TestTimeExtraction {
// static AlgorithmConfiguration[] configs = { testTimeExtractionTable(), testTimeExtractionNetCDF(),testTimeExtractionTable2()};
static AlgorithmConfiguration[] configs = { testTimeExtractionNetCDF()};
static AlgorithmConfiguration[] configs = { testTimeExtractionTable(), testTimeExtractionNetCDF(),testTimeExtractionTable2()};
// static AlgorithmConfiguration[] configs = { testTimeExtractionNetCDF()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");