Initial import.

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngineGeoSpatialExtension@78941 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Gianpaolo Coro 2013-07-10 16:36:26 +00:00
parent 24951edda8
commit 10e47a51c7
42 changed files with 0 additions and 5248 deletions

View File

@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java"/>
<classpathentry kind="src" path="src/main/resources"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>

View File

@ -1,23 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>EcologicalEngineGeoSpatialExtension</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.m2e.core.maven2Builder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.m2e.core.maven2Nature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>

View File

@ -1,4 +0,0 @@
#Mon Apr 08 18:49:41 CEST 2013
eclipse.preferences.version=1
encoding//src/main/java=UTF-8
encoding/<project>=UTF-8

View File

@ -1,13 +0,0 @@
#Mon Apr 08 18:49:41 CEST 2013
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
org.eclipse.jdt.core.compiler.compliance=1.6
org.eclipse.jdt.core.compiler.debug.lineNumber=generate
org.eclipse.jdt.core.compiler.debug.localVariable=generate
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
org.eclipse.jdt.core.compiler.source=1.6

View File

@ -1,5 +0,0 @@
#Mon Apr 08 18:15:49 CEST 2013
activeProfiles=
eclipse.preferences.version=1
resolveWorkspaceProjects=true
version=1

View File

@ -1,411 +0,0 @@
<!--Document specific SSI statements-->
<!--Add any additional meta tags to the following variable.-->
<!--This creates the header.-->
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<!-- NODC Standard Header File, the HTML Version -->
<head>
<title>World Ocean Atlas 2009 data in NetCDF format</title>
<meta name="Author" content="Olga Baranova">
<meta name="Description" content="/OC5/WOA09/netcdf_data.html">
<meta name="keywords" lang="en-us" content="world ocean atlas 2009, climatogy, objectively analyzed fields and statistics, NetCDF">
<link rel="shortcut icon" href="/Images/favicon.ico">
<link href="/nodc.css" rel="stylesheet" type="text/css">
<link href="/ocl.css" rel="stylesheet" type="text/css">
<link href="/jc.css" rel="stylesheet" type="text/css">
<link href="/bs.css" rel="stylesheet" type="text/css">
<link href="/styles/jquery.qtip.css" rel="stylesheet" type="text/css" media="all" />
<link rel="schema.DC" href="http://purl.org/dc/elements/1.1/">
<link rel="schema.DCTERMS" href="http://purl.org/dc/terms/">
<meta name="DC.title" lang="en" content="World Ocean Atlas 2009 data in NetCDF format">
<meta name="DC.creator" lang="en" content="US Department of Commerce, NOAA National Oceanographic Data Center">
<meta name="DCTERMS.modified" scheme="W3CDTF" content="2012-07-09">
<meta name="DC.language" scheme="RFC4646" content="en">
<meta name="DC.identifier" scheme="DCTERMS.URI" content="http://www.nodc.noaa.gov/OC5/WOA09/netcdf_data.html">
<script type="text/javascript" src="/js_1/federated-analytics.js"></script>
</head>
<body class="whitebg"
>
<!-- Banner follows -->
<table width="100%" border="0" cellpadding="0" cellspacing="0">
<tr>
<td colspan="2"><a href="http://www.noaa.gov"><img src="/Images/BS_noaalogo1.jpg" alt="National Oceanic and Atmospheric Administration" width="259" height="78" style="float:left; border:0;"></a><a href="/"><img src="/Images/BS_nodclogo1.jpg" alt="NODC, National Oceanographic Data Center" width="346" height="78" style="float:left; border:0;"></a><img src="/Images/BS_commercelogo1.jpg" alt="Department of Commerce" width="196" height="78" style="float:right; border:0;"></td>
</tr>
<tr class="fillinblack noaainfo">
<td><div class="indent"><a href="http://www.nesdis.noaa.gov/"><img src="/Images/NESDIS_topgraphic.gif" alt="NOAA Satellite and Information Service" width="179" height="12" border="0" title="NOAA Satellite and Information Service"></a></div></td>
<td align="right"><form action="http://search.usa.gov/search" method="get">
<label>
<input type="radio" name="affiliate" checked="checked" value="nodc.noaa.gov">
NODC</label>
<label>
<input type="radio" name="affiliate" value="noaa.gov">
All of NOAA</label>
<input type="hidden" name="v:project" value="firstgov">
<input class="search" type="text" name="query" size="18" value="Search" onfocus="this.value=''">
<input class="go" src="/Images/go.gif" alt="Go search the NOAA or NODC Website" title="Go search the NOAA or NODC Website" type="image">
</form></td>
</tr>
</table>
<!-- End of NODC Standard Header -->
<table class="tablebordernodc" cellpadding="3" cellspacing="0" width="100%">
<tbody><tr><td class="cookie" bgcolor="#ffffff" nowrap="nowrap">You are here: <a href="/" target="_top">NODC Home</a> &gt;
<a href="/OC5/" target="_top">Ocean Climate Laboratory</a> &gt;
<a href="/OC5/indprod.html" target="_top">OCL Products</a> &gt;
<a href="pr_woa09.html">WOA09</a> &gt;
WOA09 data in NetCDF format
</td>
</tr></tbody></table>
<div align="center">
<br />
<table width="80%">
<tr><td align="center"><span class="head1">World Ocean Atlas 2009 Data in NetCDF format</span>
<br /><span class="darkgreynotes">Data are available courtesy of Upendra Dadi.
For any questions about this product, please e-mail <a href="mailto:OCL.help@noaa.gov" class="darkgreynotes">OCLhelp desk</a>.</span>
</td></tr>
<tr><td class="red"><hr size="1" align="center" />
<!--The following netCDF-4 files are created using netCDF - 4.0.1 libraries.
Please make sure that you are using appropriate version of netCDF libraries to read the files.
NetCDF-3 libraries cannot read the files. -->The data can be accessed by using <a href="http://www.opendap.org/" class="red">OpenDAP</a> or
you can do simple analysis and visualization of the data using the NODC <a href="http://data.nodc.noaa.gov/las/getUI.do" class="red">Live Access Server</a>.
The THREDDS page below have an OpenDAP link to the netCDF files.
<hr size="1" align="center" />
</td></tr>
</table>
<table class="tableborder" border="1" cellspacing="0" cellpadding="4" width="80%">
<tbody>
<tr class="tablehd1">
<td align="center">Grid Size</td><td align="center">Variable</td><td align="center">Time Period</td><td align="center">FTP Link to NetCDF file</td><td align="center">THREDDS Link</td>
<td align="center">OpenDAP Metadata Link</td></tr>
<tr><td rowspan="24" align="center" class="bold">1 DEGREE</td>
<td rowspan="3" align="center" class="bold">Temperature</td>
<td align="center" class="italic">Annual</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/temperature_annual_1deg.nc">temperature_annual_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/temperature_annual_1deg.nc">temperature_annual_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/temperature_annual_1deg.nc.info">temperature_annual_1deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Seasonal</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/temperature_seasonal_1deg.nc">temperature_seasonal_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/temperature_seasonal_1deg.nc">temperature_seasonal_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/temperature_seasonal_1deg.nc.info">temperature_seasonal_1deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Monthly</td>
<td align="lleft"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/temperature_monthly_1deg.nc">temperature_monthly_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/temperature_monthly_1deg.nc">temperature_monthly_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/temperature_monthly_1deg.nc.info">temperature_monthly_1deg.nc.info</a></td></tr>
<tr><td rowspan="3" align="center" class="bold">Salinity</td>
<td align="center" class="italic">Annual</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/salinity_annual_1deg.nc">salinity_annual_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/salinity_annual_1deg.nc">salinity_annual_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/salinity_annual_1deg.nc.info">salinity_annual_1deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Seasonal</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/salinity_seasonal_1deg.nc">salinity_seasonal_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/salinity_seasonal_1deg.nc">salinity_seasonal_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/salinity_seasonal_1deg.nc.info">salinity_seasonal_1deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Monthly</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/salinity_monthly_1deg.nc">salinity_monthly_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/salinity_monthly_1deg.nc">salinity_monthly_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/salinity_monthly_1deg.nc.info">salinity_monthly_1deg.nc.info</a></td></tr>
<tr><td rowspan="3" align="center" class="bold">Dissolved Oxygen</td>
<td align="center" class="italic">Annual</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/dissolved_oxygen_annual_1deg.nc">dissolved_oxygen_annual_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/dissolved_oxygen_annual_1deg.nc">dissolved_oxygen_annual_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/dissolved_oxygen_annual_1deg.nc.info">dissolved_oxygen_annual_1deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Seasonal</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/dissolved_oxygen_seasonal_1deg.nc">dissolved_oxygen_seasonal_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/dissolved_oxygen_seasonal_1deg.nc">dissolved_oxygen_seasonal_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/dissolved_oxygen_seasonal_1deg.nc.info">dissolved_oxygen_seasonal_1deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Monthly</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/dissolved_oxygen_monthly_1deg.nc">dissolved_oxygen_monthly_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/dissolved_oxygen_monthly_1deg.nc">dissolved_oxygen_monthly_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/dissolved_oxygen_monthly_1deg.nc.info">dissolved_oxygen_monthly_1deg.nc.info</a></td></tr>
<tr><td rowspan="3" align="center" class="bold">Oxygen Saturation</td>
<td align="center" class="italic">Annual</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/oxygen_saturation_annual_1deg.nc">oxygen_saturation_annual_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/oxygen_saturation_annual_1deg.nc">oxygen_saturation_annual_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/oxygen_saturation_annual_1deg.nc.info">oxygen_saturation_annual_1deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Seasonal</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/oxygen_saturation_seasonal_1deg.nc">oxygen_saturation_seasonal_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/oxygen_saturation_seasonal_1deg.nc">oxygen_saturation_seasonal_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/oxygen_saturation_seasonal_1deg.nc.info">oxygen_saturation_seasonal_1deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Monthly</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/oxygen_saturation_monthly_1deg.nc">oxygen_saturation_monthly_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/oxygen_saturation_monthly_1deg.nc">oxygen_saturation_monthly_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/oxygen_saturation_monthly_1deg.nc.info">oxygen_saturation_monthly_1deg.nc.info</a></td></tr>
<tr><td rowspan="3" align="center" class="bold">Apparent Oxygen <br />Utilization</td>
<td align="center" class="italic">Annual</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/apparent_oxygen_utilization_annual_1deg.nc">apparent_oxygen_utilization_annual_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/apparent_oxygen_utilization_annual_1deg.nc">apparent_oxygen_utilization_annual_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/apparent_oxygen_utilization_annual_1deg.nc.info">apparent_oxygen_utilization_annual_1deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Seasonal</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/apparent_oxygen_utilization_seasonal_1deg.nc">apparent_oxygen_utilization_seasonal_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/apparent_oxygen_utilization_seasonal_1deg.nc">apparent_oxygen_utilization_seasonal_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/apparent_oxygen_utilization_seasonal_1deg.nc.info">apparent_oxygen_utilization_seasonal_1deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Monthly</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/apparent_oxygen_utilization_monthly_1deg.nc">apparent_oxygen_utilization_monthly_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/apparent_oxygen_utilization_monthly_1deg.nc">apparent_oxygen_utilization_monthly_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/apparent_oxygen_utilization_monthly_1deg.nc.info">apparent_oxygen_utilization_monthly_1deg.nc.info</a></td></tr>
<tr><td rowspan="3" align="center" class="bold">Phosphate</td>
<td align="center" class="italic">Annual</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/phosphate_annual_1deg.nc">phosphate_annual_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/phosphate_annual_1deg.nc">phosphate_annual_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/phosphate_annual_1deg.nc.info">phosphate_annual_1deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Seasonal</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/phosphate_seasonal_1deg.nc">phosphate_seasonal_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/phosphate_seasonal_1deg.nc">phosphate_seasonal_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/phosphate_seasonal_1deg.nc.info">phosphate_seasonal_1deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Monthly</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/phosphate_monthly_1deg.nc">phosphate_monthly_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/phosphate_monthly_1deg.nc">phosphate_monthly_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/phosphate_monthly_1deg.nc.info">phosphate_monthly_1deg.nc.info</a></td></tr>
<tr><td rowspan="3" align="center" class="bold">Silicate</td>
<td align="center" class="italic">Annual</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/silicate_annual_1deg.nc">silicate_annual_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/silicate_annual_1deg.nc">silicate_annual_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/silicate_annual_1deg.nc.info">silicate_annual_1deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Seasonal</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/silicate_seasonal_1deg.nc">silicate_seasonal_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/silicate_seasonal_1deg.nc">silicate_seasonal_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/silicate_seasonal_1deg.nc.info">silicate_seasonal_1deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Monthly</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/silicate_monthly_1deg.nc">silicate_monthly_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/silicate_monthly_1deg.nc">silicate_monthly_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/silicate_monthly_1deg.nc.info">silicate_monthly_1deg.nc.info</a></td></tr>
<tr><td rowspan="3" align="center" class="bold">Nitrate</td>
<td align="center" class="italic">Annual</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/nitrate_annual_1deg.nc">nitrate_annual_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/nitrate_annual_1deg.nc">nitrate_annual_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/nitrate_annual_1deg.nc.info">nitrate_annual_1deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Seasonal</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/nitrate_seasonal_1deg.nc">nitrate_seasonal_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/nitrate_seasonal_1deg.nc">nitrate_seasonal_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/nitrate_seasonal_1deg.nc.info">nitrate_seasonal_1deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Monthly</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/nitrate_monthly_1deg.nc">nitrate_monthly_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/nitrate_monthly_1deg.nc">nitrate_monthly_1deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/nitrate_monthly_1deg.nc.info">nitrate_monthly_1deg.nc.info</a></td></tr>
<tr><td rowspan="24" align="center" class="bold">5 DEGREE</td>
<td rowspan="3" align="center" class="bold">Temperature</td>
<td align="center" class="italic">Annual</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/temperature_annual_5deg.nc">temperature_annual_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/temperature_annual_5deg.nc">temperature_annual_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/temperature_annual_5deg.nc.info">temperature_annual_5deg.nc.info</a></td>
<tr><td align="center" class="italic">Seasonal</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/temperature_seasonal_5deg.nc">temperature_seasonal_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/temperature_seasonal_5deg.nc">temperature_seasonal_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/temperature_seasonal_5deg.nc.info">temperature_seasonal_5deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Monthly</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/temperature_monthly_5deg.nc">temperature_monthly_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/temperature_monthly_5deg.nc">temperature_monthly_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/temperature_monthly_5deg.nc.info">temperature_monthly_5deg.nc.info</a></td></tr>
<tr><td rowspan="3" align="center" class="bold">Salinity</td>
<td align="center" class="italic">Annual</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/salinity_annual_5deg.nc">salinity_annual_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/salinity_annual_5deg.nc">salinity_annual_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/salinity_annual_5deg.nc.info">salinity_annual_5deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Seasonal</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/salinity_seasonal_5deg.nc">salinity_seasonal_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/salinity_seasonal_5deg.nc">salinity_seasonal_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/salinity_seasonal_5deg.nc.info">salinity_seasonal_5deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Monthly</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/salinity_monthly_5deg.nc">salinity_monthly_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/salinity_monthly_5deg.nc">salinity_monthly_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/salinity_monthly_5deg.nc.info">salinity_monthly_5deg.nc.info</a></td></tr>
<tr><td rowspan="3" align="center" class="bold">Dissolved Oxygen</td>
<td align="center" class="italic">Annual</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/dissolved_oxygen_annual_5deg.nc">dissolved_oxygen_annual_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/dissolved_oxygen_annual_5deg.nc">dissolved_oxygen_annual_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/dissolved_oxygen_annual_5deg.nc.info">dissolved_oxygen_annual_5deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Seasonal</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/dissolved_oxygen_seasonal_5deg.nc">dissolved_oxygen_seasonal_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/dissolved_oxygen_seasonal_5deg.nc">dissolved_oxygen_seasonal_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/dissolved_oxygen_seasonal_5deg.nc.info">dissolved_oxygen_seasonal_5deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Monthly</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/dissolved_oxygen_monthly_5deg.nc">dissolved_oxygen_monthly_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/dissolved_oxygen_monthly_5deg.nc">dissolved_oxygen_monthly_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/dissolved_oxygen_monthly_5deg.nc.info">dissolved_oxygen_monthly_5deg.nc.info</a></td></tr>
<tr><td rowspan="3" align="center" class="bold">Oxygen Saturation</td>
<td align="center" class="italic">Annual</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/oxygen_saturation_annual_5deg.nc">oxygen_saturation_annual_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/oxygen_saturation_annual_5deg.nc">oxygen_saturation_annual_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/oxygen_saturation_annual_5deg.nc.info">oxygen_saturation_annual_5deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Seasonal</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/oxygen_saturation_seasonal_5deg.nc">oxygen_saturation_seasonal_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/oxygen_saturation_seasonal_5deg.nc">oxygen_saturation_seasonal_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/oxygen_saturation_seasonal_5deg.nc.info">oxygen_saturation_seasonal_5deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Monthly</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/oxygen_saturation_monthly_5deg.nc">oxygen_saturation_monthly_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/oxygen_saturation_monthly_5deg.nc">oxygen_saturation_monthly_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/oxygen_saturation_monthly_5deg.nc.info">oxygen_saturation_monthly_5deg.nc.info</a></td></tr>
<tr><td rowspan="3" align="center" class="bold">Apparent Oxygen <br />Utilization</td>
<td align="center" class="italic">Annual</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/apparent_oxygen_utilization_annual_5deg.nc">apparent_oxygen_utilization_annual_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/apparent_oxygen_utilization_annual_5deg.nc">apparent_oxygen_utilization_annual_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/apparent_oxygen_utilization_annual_5deg.nc.info">apparent_oxygen_utilization_annual_5deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Seasonal</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/apparent_oxygen_utilization_seasonal_5deg.nc">apparent_oxygen_utilization_seasonal_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/apparent_oxygen_utilization_seasonal_5deg.nc">apparent_oxygen_utilization_seasonal_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/apparent_oxygen_utilization_seasonal_5deg.nc.info">apparent_oxygen_utilization_seasonal_5deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Monthly</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/apparent_oxygen_utilization_monthly_5deg.nc">apparent_oxygen_utilization_monthly_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/apparent_oxygen_utilization_monthly_5deg.nc">apparent_oxygen_utilization_monthly_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/apparent_oxygen_utilization_monthly_5deg.nc.info">apparent_oxygen_utilization_monthly_5deg.nc.info</a></td></tr>
<tr><td rowspan="3" align="center" class="bold">Phosphate</td>
<td align="center" class="italic">Annual</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/phosphate_annual_5deg.nc">phosphate_annual_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/phosphate_annual_5deg.nc">phosphate_annual_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/phosphate_annual_5deg.nc.info">phosphate_annual_5deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Seasonal</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/phosphate_seasonal_5deg.nc">phosphate_seasonal_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/phosphate_seasonal_5deg.nc">phosphate_seasonal_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/phosphate_seasonal_5deg.nc.info">phosphate_seasonal_5deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Monthly</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/phosphate_monthly_5deg.nc">phosphate_monthly_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/phosphate_monthly_5deg.nc">phosphate_monthly_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/phosphate_monthly_5deg.nc.info">phosphate_monthly_5deg.nc.info</a></td></tr>
<tr><td rowspan="3" align="center" class="bold">Silicate</td>
<td align="center" class="italic">Annual</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/silicate_annual_5deg.nc">silicate_annual_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/silicate_annual_5deg.nc">silicate_annual_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/silicate_annual_5deg.nc.info">silicate_annual_5deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Seasonal</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/silicate_seasonal_5deg.nc">silicate_seasonal_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/silicate_seasonal_5deg.nc">silicate_seasonal_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/silicate_seasonal_5deg.nc.info">silicate_seasonal_5deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Monthly</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/silicate_monthly_5deg.nc">silicate_monthly_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/silicate_monthly_5deg.nc">silicate_monthly_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/silicate_monthly_5deg.nc.info">silicate_monthly_5deg.nc.info</a></td></tr>
<tr><td rowspan="3" align="center" class="bold">Nitrate</td>
<td align="center" class="italic">Annual</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/nitrate_annual_5deg.nc">nitrate_annual_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/nitrate_annual_5deg.nc">nitrate_annual_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/nitrate_annual_5deg.nc.info">nitrate_annual_5deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Seasonal</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/nitrate_seasonal_5deg.nc">nitrate_seasonal_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/nitrate_seasonal_5deg.nc">nitrate_seasonal_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/nitrate_seasonal_5deg.nc.info">nitrate_seasonal_5deg.nc.info</a></td></tr>
<tr><td align="center" class="italic">Monthly</td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/nitrate_monthly_5deg.nc">nitrate_monthly_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/catalog/woa/WOA09/NetCDFdata/catalog.html?dataset=woa/WOA09/NetCDFdata/nitrate_monthly_5deg.nc">nitrate_monthly_5deg.nc</a></td>
<td align="left"><a href="http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA09/NetCDFdata/nitrate_monthly_5deg.nc.info">nitrate_monthly_5deg.nc.info</a></td></tr>
</tbody>
</table></div>
<br />
<!-- This creates the footer. -->
<!-- NODC Standard Footer File, the HTML Version -->
<table width="100%" border="0" align="center" class="footersmall" id="footer2" cellpadding="0" cellspacing="0">
<tbody>
<tr>
<td colspan="4" class="bluebar" id="footer"><div align="center">
<a href="/access/">Access Data</a> - <a href="/submit/">Submit Data</a> - <a href="/General/datacom_form.html">Intended Use of the Data?</a> - <a href="http://ols.nndc.noaa.gov/plolstore/plsql/olstore.main?look=1">Online Store</a> - <a href="/about/contact.html">Customer Service</a></div>
</td>
</tr>
<tr>
<td align="left" valign="bottom" nowrap="nowrap">&nbsp;</td>
<td align="left" valign="bottom" nowrap="nowrap">
Last modified:&nbsp; &nbsp; Mon, 9-Jul-2012 14:08 UTC
</td>
<td align="left"><a href="mailto:NODC.Webmaster@noaa.gov" class="footersmall">NODC.Webmaster@noaa.gov</a></td>
</tr>
<tr>
<td width="19">&nbsp;</td>
<td width="735" valign="top"><div class="footer-icons"><a href="http://www.facebook.com/noaa.nodc"><img src="/media/images/common/facebook3.gif" alt="Like us on Facebook" width="20" height="20" /></a><a href="/rss/"><img src="/media/images/common/rssfeed-icon.jpg" alt="RSS feed" width="20" height="20" /></a></div> <div align="left"> <acronym title="Department of Commerce"><a href="http://www.doc.gov/">Dept. of Commerce</a></acronym> - <acronym title="National Oceanic and Atmospheric Administration"><a href="http://www.noaa.gov/">NOAA</a></acronym> - <acronym title="National Environmental, Satellite, Data and Information Service"><a href="http://www.nesdis.noaa.gov/">NESDIS</a></acronym> - <acronym title="National Oceanographic Data Center"><a href="/">NODC</a></acronym><br>
<div class="top"> <span class="nongov">*</span><span class="nongovfooter"> Offsite Link Notification</span></div></div>
</td>
<td width="403" valign="top" align="left">
<div class="top"><a href="http://www.facebook.com/noaa.nodc" class="footersmall">Like us on Facebook</a></div>
<div class="top"><a href="http://www.noaa.gov/privacy.html" target="NOAA_privacy" class="footersmall" >Privacy Policy</a> - <a href="/nodcdisclaimer.html" class="footersmall">Disclaimer</a> - <a href="http://www.cio.noaa.gov/Policy_Programs/info_quality.html"
target="Information Quality" class="footersmall" >Information Quality</a></div>
<div class="top"><a href="http://www.corporateservices.noaa.gov/%7Efoia/" target="FOIA" class="footersmall">
Freedom of Information Act</a> (FOIA)</div>
<div class="top"><acronym title="U.S. Government's Official Web Portal"><a href="http://www.usa.gov/">USA.gov</a></acronym> - The U.S. Government's Web Portal</div>
</td>
</tr>
</tbody>
</table>
</body>
</html>
<!-- End of NODC Standard Footer -->
<!-- #End Template -->

183
pom.xml
View File

@ -1,183 +0,0 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>maven-parent</artifactId>
<groupId>org.gcube.tools</groupId>
<version>1.0.0</version>
<relativePath />
</parent>
<groupId>org.gcube.dataanalysis</groupId>
<artifactId>ecological-engine-geospatial-extensions</artifactId>
<version>1.0.0-SNAPSHOT</version>
<name>ecological-engine-geospatial-extension</name>
<description>ecological-engine-geospatial-extension</description>
<properties>
<distroDirectory>${project.basedir}/distro</distroDirectory>
</properties>
<dependencies>
<dependency>
<groupId>org.gcube.dataanalysis</groupId>
<artifactId>ecological-engine</artifactId>
<version>1.7.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.gcube.spatial.data</groupId>
<artifactId>geonetwork</artifactId>
<version>1.0.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.jdom</groupId>
<artifactId>jdom</artifactId>
<version>1.1</version>
</dependency>
<dependency>
<groupId>org.gcube.externals</groupId>
<artifactId>geo-utils-custom-opendap</artifactId>
<version>2.2.0</version>
</dependency>
<dependency>
<groupId>org.gcube.externals</groupId>
<artifactId>geo-utils-custom-netcdfui</artifactId>
<version>4.2.0</version>
</dependency>
<dependency>
<groupId>org.gcube.externals</groupId>
<artifactId>geo-utils-custom-netcdf</artifactId>
<version>4.2.0</version>
</dependency>
<dependency>
<groupId>org.gcube.externals</groupId>
<artifactId>geo-utils-custom-bounce</artifactId>
<version>0.14.0</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>20041127.091804</version>
</dependency>
<dependency>
<groupId>commons-httpclient</groupId>
<artifactId>commons-httpclient</artifactId>
<version>3.1</version>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>1.1.1</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>1.7.1</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.6.4</version>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-utils-encryption</artifactId>
<version>[1.0.2-SNAPSHOT,2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.externals</groupId>
<artifactId>geo-utils-custom-opendap</artifactId>
<version>2.2.0</version>
</dependency>
<dependency>
<groupId>org.gcube.externals</groupId>
<artifactId>geo-utils-custom-netcdfui</artifactId>
<version>4.2.0</version>
</dependency>
<dependency>
<groupId>org.gcube.externals</groupId>
<artifactId>geo-utils-custom-netcdf</artifactId>
<version>4.2.0</version>
</dependency>
<dependency>
<groupId>org.gcube.externals</groupId>
<artifactId>geo-utils-custom-bounce</artifactId>
<version>0.14.0</version>
</dependency>
<dependency>
<groupId>org.gcube.externals</groupId>
<artifactId>geo-utils-custom-geopeo</artifactId>
<version>1.0.2-min</version>
</dependency>
</dependencies>
<repositories>
<!-- <repository> <id>GeoSolutions</id> <url>http://maven.research-infrastructures.eu:8081/nexus/content/repositories/geo-solutions/</url>
</repository> -->
<repository>
<id>osgeo</id>
<name>Open Source Geospatial Foundation Repository Mirror</name>
<url>http://maven.research-infrastructures.eu/nexus/content/repositories/osgeo//</url>
</repository>
</repositories>
<build>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.6</source>
<target>1.6</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.12</version>
<configuration>
<skipTests>true</skipTests>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<version>2.5</version>
<executions>
<execution>
<id>copy-profile</id>
<phase>install</phase>
<goals>
<goal>copy-resources</goal>
</goals>
<configuration>
<outputDirectory>target</outputDirectory>
<resources>
<resource>
<directory>${distroDirectory}</directory>
<filtering>true</filtering>
<includes>
<include>profile.xml</include>
</includes>
</resource>
</resources>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.2</version>
<configuration>
<descriptors>
<descriptor>${distroDirectory}/descriptor.xml</descriptor>
</descriptors>
</configuration>
<executions>
<execution>
<id>servicearchive</id>
<phase>install</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@ -1,285 +0,0 @@
package org.gcube.dataanalysis.geo.algorithms;
import java.awt.Image;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools;
import org.gcube.contentmanagement.graphtools.plotting.graphs.GaussianDistributionGraph;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.evaluation.DiscrepancyAnalysis;
import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.geo.insertion.RasterTable;
import org.gcube.dataanalysis.geo.meta.features.FeaturesManager;
import org.gcube.dataanalysis.geo.retrieval.GeoIntersector;
import org.jfree.chart.JFreeChart;
import org.jfree.data.function.NormalDistributionFunction2D;
import org.jfree.data.general.DatasetUtilities;
import org.jfree.data.xy.XYSeriesCollection;
import org.opengis.metadata.Metadata;
public class MapsComparator extends DataAnalysis {
static String layer1 = "Layer_1";
static String layer2 = "Layer_2";
static String zString = "Z";
static String t1 = "TimeIndex_1";
static String t2 = "TimeIndex_2";
static String valuesThr = "ValuesComparisonThreshold";
float status = 0;
public List<StatisticalType> inputs = new ArrayList<StatisticalType>();
public LinkedHashMap<String, String> outputParameters = new LinkedHashMap<String, String>();
@Override
public void init() throws Exception {
AnalysisLogger.getLogger().debug("Initialization");
}
@Override
public String getDescription() {
return "An algorithm for comparing two OGC/NetCDF maps in seamless way to the user";
}
@Override
public void compute() throws Exception{
status = 0;
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
long t0 = System.currentTimeMillis();
String layerT1 = IOHelper.getInputParameter(config,layer1);
String layerT2 = IOHelper.getInputParameter(config,layer2);
String z$ = IOHelper.getInputParameter(config,zString);
String valuesthr$ = IOHelper.getInputParameter(config,valuesThr);
String time1$ = IOHelper.getInputParameter(config,t1);
String time2$ = IOHelper.getInputParameter(config,t2);
int time1 = ((time1$ != null) && (time1$.trim().length() > 0)) ? Integer.parseInt(time1$) : 0;
int time2 = ((time2$ != null) && (time2$.trim().length() > 0)) ? Integer.parseInt(time2$) : 0;
if (time1 < 0)
time1 = 0;
if (time2 < 0)
time2 = 0;
double valuesthreshold = 0.1;
if ((valuesthr$ != null) && (valuesthr$.trim().length() > 0))
try {
valuesthreshold = Double.parseDouble(valuesthr$);
} catch (Exception ee) {
}
double z = 0;
if ((z$ != null) && (z$.trim().length() > 0))
try {
z = Double.parseDouble(z$);
} catch (Exception ee) {
}
try {
//TODO : delete this force
// String scope = config.getGcubeScope();
String scope = null;
AnalysisLogger.getLogger().debug("MapsComparator: Using Scope: " + scope + " Z: " + z + " Values Threshold: " + valuesthreshold + " Layer1: " + layerT1 + " vs " + layerT2);
GeoIntersector intersector = new GeoIntersector(scope, config.getConfigPath());
AnalysisLogger.getLogger().debug("MapsComparator: GeoIntersector initialized");
double x1 = -180;
double x2 = 180;
double y1 = -90;
double y2 = 90;
status = 10;
FeaturesManager fm = intersector.getFeaturer();
AnalysisLogger.getLogger().debug("MapsComparator: Taking info for the layer: " + layerT1);
Metadata meta1 = fm.getGNInfobyUUIDorName(layerT1);
if (meta1==null) throw new Exception("No Correspondence with Layer 1");
double resolution1 = 0;
try {
resolution1 = FeaturesManager.getResolution(meta1);
} catch (Exception e) {
AnalysisLogger.getLogger().debug("MapsComparator: Undefined resolution");
}
AnalysisLogger.getLogger().debug("MapsComparator: Resolution: " + resolution1);
status = 15;
AnalysisLogger.getLogger().debug("MapsComparator: Taking info for the layer: " + layerT2);
AnalysisLogger.getLogger().debug("MapsComparator: Trying with UUID..." + layerT2);
Metadata meta2 = fm.getGNInfobyUUIDorName(layerT2);
if (meta2==null) throw new Exception("No Correspondence with Layer 2");
double resolution2 = 0;
try {
resolution2 = FeaturesManager.getResolution(meta2);
} catch (Exception e) {
AnalysisLogger.getLogger().debug("MapsComparator: Undefined resolution");
}
AnalysisLogger.getLogger().debug("MapsComparator: Resolution: " + resolution2);
status = 20;
// take the lowest resolution to perform the comparison
double resolution = Math.max(resolution1, resolution2);
if (resolution == 0)
resolution = 0.5d;
AnalysisLogger.getLogger().debug("MapsComparator: Evaluation Resolution: " + resolution);
AnalysisLogger.getLogger().debug("MapsComparator: ****Rasterizing map 1****");
double[][] slice1 = intersector.takeTimeSlice(layerT1, time1, x1, x2, y1, y2, z, resolution, resolution);
AnalysisLogger.getLogger().debug("MapsComparator: Dumping map 1");
status = 30;
RasterTable raster1 = new RasterTable(x1, x2, y1, y2, z, resolution, resolution, slice1, config);
raster1.dumpGeoTable();
String rastertable1 = raster1.getTablename();
AnalysisLogger.getLogger().debug("MapsComparator: Map 1 was dumped in table: " + rastertable1);
status = 40;
AnalysisLogger.getLogger().debug("MapsComparator: ****Rasterizing map 2****");
double[][] slice2 = intersector.takeTimeSlice(layerT2, time2, x1, x2, y1, y2, z, resolution, resolution);
AnalysisLogger.getLogger().debug("MapsComparator: Dumping map 2");
status = 50;
RasterTable raster2 = new RasterTable(x1, x2, y1, y2, z, resolution, resolution, slice2, config);
raster2.dumpGeoTable();
String rastertable2 = raster2.getTablename();
AnalysisLogger.getLogger().debug("MapsComparator: Map 2 was dumped in table: " + rastertable2);
status = 60;
/*
* String rastertable1 = "rstr909f60c1d3f1472e9de998e844990724"; String rastertable2 = "rstre52e744c99224de3a1c5354263c6c8d8"; String resolution = "0.5";
*/
config.setNumberOfResources(1);
config.setParam("FirstTable", rastertable1);
config.setParam("SecondTable", rastertable2);
config.setParam("FirstTableCsquareColumn", RasterTable.csquareColumn);
config.setParam("SecondTableCsquareColumn", RasterTable.csquareColumn);
config.setParam("FirstTableProbabilityColumn", RasterTable.probabilityColumn);
config.setParam("SecondTableProbabilityColumn", RasterTable.probabilityColumn);
config.setParam("ComparisonThreshold", "" + valuesthreshold);
AnalysisLogger.getLogger().debug("MapsComparator: Analyzing discrepancy between maps: " + rastertable1 + " and " + rastertable2);
DiscrepancyAnalysis da = new DiscrepancyAnalysis();
da.setConfiguration(config);
da.init(false);
outputParameters = da.analyze();
outputParameters.put("RESOLUTION", "" + MathFunctions.roundDecimal(resolution,4));
status = 80;
AnalysisLogger.getLogger().debug("MapsComparator: Output: " + outputParameters);
// delete the tables
connection = DatabaseUtils.initDBSession(config);
AnalysisLogger.getLogger().debug("MapsComparator: Deleting table " + rastertable1);
DatabaseFactory.executeSQLUpdate(DatabaseUtils.dropTableStatement(rastertable1), connection);
status = 90;
AnalysisLogger.getLogger().debug("MapsComparator: Deleting table " + rastertable2);
DatabaseFactory.executeSQLUpdate(DatabaseUtils.dropTableStatement(rastertable2), connection);
AnalysisLogger.getLogger().debug("MapsComparator: Elapsed: Whole operation completed in " + ((double) (System.currentTimeMillis() - t0) / 1000d) + "s");
} catch (Exception e) {
e.printStackTrace();
AnalysisLogger.getLogger().debug("MapsComparator: ERROR!: " + e.getLocalizedMessage());
} finally {
DatabaseUtils.closeDBConnection(connection);
status = 100;
}
}
@Override
public List<StatisticalType> getInputParameters(){
IOHelper.addStringInput(inputs,layer1, "First Layer Title or UUID: The title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer", "86a7ac79-866a-49c6-b5d5-602fc2d87ddd");//"Sarda australis");
IOHelper.addStringInput(inputs, layer2, "Second Layer Title or UUID: The title or the UUID (preferred) of a second layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer", "0e03d0fa-9c44-4a0c-a7e3-9f6d48710d00");//"Sarda orientalis");
IOHelper.addIntegerInput(inputs, zString, "value of Z. Default is 0, that means comparison will be at surface level", "0");
IOHelper.addDoubleInput(inputs, valuesThr, "A comparison threshold for the values in the map. Null equals to 0.1", "0.1");
IOHelper.addIntegerInput(inputs, t1, "First Layer Time Index. The default is the first", "0");
IOHelper.addIntegerInput(inputs, t2, "Second Layer Time Index. The default is the first", "0");
IOHelper.addDoubleInput(inputs, "KThreshold", "Threshold for K-Statistic: over this threshold values will be considered 1 for agreement calculation. Default is 0.5","0.5");
DatabaseType.addDefaultDBPars(inputs);
return inputs;
}
@Override
public void shutdown() {
AnalysisLogger.getLogger().debug("Shutdown");
}
protected Image generateGaussian(double mean, double variance){
// gaussian
XYSeriesCollection xyseriescollection = new XYSeriesCollection();
if (variance == 0)
variance = 0.01;
AnalysisLogger.getLogger().debug("MapsComparator: Adopting mean:" + mean + " and variance:" + variance);
NormalDistributionFunction2D normaldistributionfunction2d = new NormalDistributionFunction2D(mean, variance);
org.jfree.data.xy.XYSeries xyseries = DatasetUtilities.sampleFunction2DToSeries(normaldistributionfunction2d, (mean - (2 * variance)), (mean + (2 * variance)), 121, "Distribution of the Error");
xyseriescollection.addSeries(xyseries);
// end gaussian
JFreeChart chart = GaussianDistributionGraph.createStaticChart(xyseriescollection, mean, variance);
Image image = ImageTools.toImage(chart.createBufferedImage(680, 420));
/*
* GaussianDistributionGraph graph = new GaussianDistributionGraph("Error Distribution"); graph.mean=mean;graph.variance=variance; graph.render(xyseriescollection);
*/
// end build image
AnalysisLogger.getLogger().debug("MapsComparator: Gaussian Distribution Produced");
return image;
}
@Override
public StatisticalType getOutput() {
// set the output map containing values
AnalysisLogger.getLogger().debug("MapsComparator: Producing Gaussian Distribution for the errors");
// build image:
HashMap<String, Image> producedImages = new HashMap<String, Image>();
double mean = Double.parseDouble(outputParameters.get("MEAN"));
double variance = Double.parseDouble(outputParameters.get("VARIANCE"));
producedImages.put("Error Distribution", generateGaussian(mean, variance));
PrimitiveType images = new PrimitiveType("Images", producedImages, PrimitiveTypes.IMAGES, "Distribution of the Error", "The distribution of the error along with variance");
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
for (String key:outputParameters.keySet()){
String value = outputParameters.get(key);
PrimitiveType val = new PrimitiveType(String.class.getName(), "" + value, PrimitiveTypes.STRING, key, key);
map.put(key, val);
}
// collect all the outputs
map.put("Images", images);
// generate a primitive type for the collection
PrimitiveType output = new PrimitiveType(HashMap.class.getName(), map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return output;
}
@Override
public float getStatus() {
return status;
}
@Override
public LinkedHashMap<String, String> analyze() throws Exception {
// TODO Auto-generated method stub
return null;
}
}

View File

@ -1,227 +0,0 @@
package org.gcube.dataanalysis.geo.batch;
import org.gcube.dataanalysis.geo.meta.GenericLayerMetadata;
import org.gcube.dataanalysis.geo.meta.NetCDFMetadata;
import org.opengis.metadata.identification.TopicCategory;
public class BaseLayerMetadataInsertDev {
static String geonetworkurl = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geonetwork/";
static String geoserverurl = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver";
static String user = "admin";
static String password = "admin";
public static void main(String[] args) throws Exception{
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
metadataInserter.setGeonetworkUrl(geonetworkurl);
metadataInserter.setGeonetworkUser(user);
metadataInserter.setGeonetworkPwd(password);
metadataInserter.setGeoserverUrl(geoserverurl);
metadataInserter.setResolution(0.5);
metadataInserter.setXLeftLow(-180);
metadataInserter.setYLeftLow(-85.5);
metadataInserter.setXRightUpper(180);
metadataInserter.setYRightUpper(85.5);
/*
faoarea(metadataInserter);
metadataInserter.insertMetaData();
eezall(metadataInserter);
metadataInserter.insertMetaData();
lme(metadataInserter);
metadataInserter.insertMetaData();
gebco();
meow(metadataInserter);
metadataInserter.insertMetaData();
ices(metadataInserter);
metadataInserter.insertMetaData();
longhurst(metadataInserter);
metadataInserter.insertMetaData();
ihovseez(metadataInserter);
metadataInserter.insertMetaData();
iho(metadataInserter);
metadataInserter.insertMetaData();
*/
/*
DepthMeanAnnual(metadataInserter);
metadataInserter.insertMetaData();
SSTAnMean(metadataInserter);
metadataInserter.insertMetaData();
SalinityMean(metadataInserter);
metadataInserter.insertMetaData();
PrimProdMean(metadataInserter);
metadataInserter.insertMetaData();
environments(metadataInserter);
metadataInserter.insertMetaData();
IceConAnn(metadataInserter);
metadataInserter.insertMetaData();
*/
}
// eezall
private static void eezall(GenericLayerMetadata metadataInserter ){
metadataInserter.setTitle("Exclusive Economic Zones Boundaries (EEZ)");
metadataInserter.setLayerName("aquamaps:WorldEEZv72012HR");
metadataInserter.setResolution(0);
metadataInserter.setCategoryTypes("_"+TopicCategory.OCEANS.name()+"_");
metadataInserter.setAbstractField("VLIZ (2012). Maritime Boundaries Geodatabase, version 7. Available online at http://www.marineregions.org/. Consulted on 2013-06-05. This dataset represents Exclusive Economic Zones (EEZ) of the world. Up to now, there was no global public domain cover available. Therefore, the Flanders Marine Institute decided to develop its own database. The database includes two global GIS-layers: one contains polylines that represent the maritime boundaries of the world countries, the other one is a polygon layer representing the Exclusive Economic Zone of countries. The database also contains digital information about treaties. Please note that the EEZ shapefile also includes the internal waters of each country");
metadataInserter.setCustomTopics("i-Marine","Exclusive Economic Zones","VLIZ");
}
// lme
private static void lme(GenericLayerMetadata metadataInserter ){
metadataInserter.setTitle("Large Marine Ecosystems of the World");
metadataInserter.setLayerName("aquamaps:lmes64");
metadataInserter.setResolution(0);
metadataInserter.setCategoryTypes("_"+TopicCategory.OCEANS.name()+"_");
metadataInserter.setAbstractField("This dataset represents the Large Marine Ecosystems of the world. It was composed by the National Oceanic and Atmospheric Administration (NOAA). The dataset exists as a polygon and as a polyline layer. The dataset can be downloaded from http://www.edc.uri.edu/lme/gisdata.htm.");
metadataInserter.setCustomTopics("i-Marine","Large Marine Ecosystems","NOAA");
}
//meow
private static void meow(GenericLayerMetadata metadataInserter ){
metadataInserter.setTitle("Marine Ecoregions of the World, MEOW (Spalding et al., 2007)");
metadataInserter.setLayerName("aquamaps:meowecos");
metadataInserter.setResolution(0);
metadataInserter.setCategoryTypes("_"+TopicCategory.OCEANS.name()+"_");
metadataInserter.setAbstractField("MEOW is a biogeographic classification of the world's coasts and shelves. It is the first ever comprehensive marine classification system with clearly defined boundaries and definitions and was developed to closely link to existing regional systems. The ecoregions nest within the broader biogeographic tiers of Realms and Provinces. MEOW represents broad-scale patterns of species and communities in the ocean, and was designed as a tool for planning conservation across a range of scales and assessing conservation efforts and gaps worldwide. The current system focuses on coast and shelf areas (as this is where the majority of human activity and conservation action is focused) and does not consider realms in pelagic or deep benthic environment. It is hoped that parallel but distinct systems for pelagic and deep benthic biotas will be devised in the near future. The project was led by The Nature Conservancy (TNC) and the World Wildlife Fund (WWF), with broad input from a working group representing key NGO, academic and intergovernmental conservation partners. (source: http://www.worldwildlife.org/science/ecoregions/marine/item1266.html). Reference: Spalding, M. D. Fox, H. E. Allen, G. R. Davidson, N. Ferdana, Z. A. Finlayson, M. Halpern, B. S. Jorge, M. A. Lombana, A. Lourie, S. A., (2007). Marine Ecoregions of the World: A Bioregionalization of Coastal and Shelf Areas. Bioscience 2007, VOL 57; numb 7, pages 573-584.");
metadataInserter.setCustomTopics("i-Marine","Marine Ecoregions of the World","MEOW","Spalding");
}
//ICES
private static void ices(GenericLayerMetadata metadataInserter ){
metadataInserter.setTitle("ICES Ecoregions");
metadataInserter.setLayerName("aquamaps:Ecoregions20090430");
metadataInserter.setResolution(0);
metadataInserter.setCategoryTypes("_"+TopicCategory.OCEANS.name()+"_");
metadataInserter.setAbstractField("ICES EcoRegions are large-scale management units for the ICES regional seas and are used in advisory reports to segment advice into the different sea areas. The EcoRegions were first referenced by the predecessor to ACOM (Advisory Committee) in 2004 (source: http://www.ices.dk/InSideOut/mayjun09/j.html)");
metadataInserter.setCustomTopics("i-Marine","ICES Ecoregions","ICES");
}
//longhurst
private static void longhurst(GenericLayerMetadata metadataInserter ){
metadataInserter.setTitle("Longhurst Biogeographical Provinces");
metadataInserter.setLayerName("aquamaps:Longhurstworldv42010");
metadataInserter.setResolution(0);
metadataInserter.setCategoryTypes("_"+TopicCategory.OCEANS.name()+"_");
metadataInserter.setAbstractField("VLIZ (2009). Longhurst Biogeographical Provinces. Available online at http://www.marineregions.org/. This dataset represents a partition of the world oceans into provinces as defined by Longhurst (1995; 1998; 2006), and are based on the prevailing role of physical forcing as a regulator of phytoplankton distribution. The dataset represents the initial static boundaries developed at the Bedford Institute of Oceanography, Canada. Note that the boundaries of these provinces are not fixed in time and space, but are dynamic and move under seasonal and interannual changes in physical forcing. At the first level of reduction, Longhurst recognised four principal biomes (also referred to as domains in earlier publications): the Polar Biome, the Westerlies Biome, the Trade-Winds Biome, and the Coastal Boundary Zone Biome. These four Biomes are recognisable in every major ocean basin. At the next level of reduction, the ocean basins are partitioned into provinces, roughly ten for each basin. These partitions provide a template for data analysis or for making parameter assignments on a global scale. Please refer to Longhurst's publications when using these shapefiles. Consulted on 2013-06-05. Reference: References: Longhurst, A.R et al. (1995). An estimate of global primary production in the ocean from satellite radiometer data. J. Plankton Res. 17, 1245-1271. Longhurst, A.R. (1995). Seasonal cycles of pelagic production and consumption. Prog. Oceanogr. 36, 77-167. Longhurst, A.R. (1998). Ecological Geography of the Sea. Academic Press, San Diego. 397p. (IMIS). Longhurst, A.R. (2006). Ecological Geography of the Sea. 2nd Edition. Academic Press, San Diego, 560p.");
metadataInserter.setCustomTopics("i-Marine","Longhurst Biogeographical Provinces","Longhurst","VLIZ");
}
//longhurst
private static void ihovseez(GenericLayerMetadata metadataInserter ){
metadataInserter.setTitle("Marineregions: the intersect of the Exclusive Economic Zones and IHO areas");
metadataInserter.setLayerName("aquamaps:EEZIHOunionv2");
metadataInserter.setResolution(0);
metadataInserter.setCategoryTypes("_"+TopicCategory.OCEANS.name()+"_");
metadataInserter.setAbstractField("VLIZ (2010). Intersect of IHO Sea Areas and Exclusive Economic Zones (version 1). Available online at http://www.marineregions.org/. Consulted on 2013-06-05. VLIZ (2012). Intersect of IHO Sea Areas and Exclusive Economic Zones (version 2). Available online at http://www.marineregions.org/. Consulted on 2013-06-05. The maritime boundaries provide a useful tool to limit national marine areas, but do not include information on marine regional and sub regional seas. This hampers the usage of these boundaries for implementing nature conservation strategies or analyzing marine biogeographic patterns. For example, a species occurring in the German EEZ can live in the North Sea, the Baltic Sea or Kattegat area. Each of these different marine areas has very distinct hydrological, oceanographic and ecological conditions. Therefore, by combining the information on regional seas and national maritime boundaries, we can include both a environmental and managerial factor. We propose to overlay the information from the maritime boundaries (the Exclusive Economic Zones) with the IHO Sea Areas (IHO, 1953). This map including the global oceans and seas, has been drafted for hydrographic purposes, but also gives an unequivocal and acceptable distinction of the regional seas and oceans from an oceanographic point of view. The combination of these two boundaries allows us for example to create national regional sea areas for the global ocean.");
metadataInserter.setCustomTopics("i-Marine","Marineregions: the intersect of the Exclusive Economic Zones and IHO areas","Marineregions","VLIZ","EEZ","IHO");
}
//iho
private static void iho(GenericLayerMetadata metadataInserter ){
metadataInserter.setTitle("IHO Sea Areas");
metadataInserter.setLayerName("aquamaps:WorldSeas");
metadataInserter.setResolution(0);
metadataInserter.setCategoryTypes("_"+TopicCategory.OCEANS.name()+"_");
metadataInserter.setAbstractField("VLIZ (2005). IHO Sea Areas. Available online at http://www.marineregions.org/. Consulted on 2013-06-05. This dataset represents the boundaries of the major oceans and seas of the world. The source for the boundaries is the publication 'Limits of Oceans & Seas, Special Publication No. 23' published by the IHO in 1953. The dataset was composed by the Flanders Marine Data and Information Centre. NB: The Southern Ocean is not included in the IHO publication and its limits are subject of discussion among the scientific community. The Flanders Marine Institute acknowledges the controversy around this subject but decided to include the Southern Ocean in the dataset as this term is often used by scientists working in this area.");
metadataInserter.setCustomTopics("i-Marine","IHO Sea Areas","Marineregions","VLIZ","IHO");
}
//gebco
private static void gebco() throws Exception{
NetCDFMetadata metadataInserter = new NetCDFMetadata();
metadataInserter.setGeonetworkUrl(geonetworkurl);
metadataInserter.setGeonetworkUser(user);
metadataInserter.setGeonetworkPwd(password);
metadataInserter.setThreddsCatalogUrl("http://thredds.research-infrastructures.eu/thredds/catalog/public/netcdf/catalog.xml");
metadataInserter.setTitle("General Bathymetric Chart of the Oceans (GEBCO) 3D");
metadataInserter.setLayerName("z");
metadataInserter.setLayerUrl("http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/gebco_08_OCEANS_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_.nc");
metadataInserter.setSourceFileName("gebco_08_OCEANS_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_.nc");
metadataInserter.setResolution(0.0083);
metadataInserter.setAbstractField("The GEBCO_08 Grid: a global 30 arc-second grid. The General Bathymetric Chart of the Oceans (GEBCO) consists of an international group of experts who work on the development of a range of bathymetric data sets and data products, including gridded bathymetric data sets, the GEBCO Digital Atlas, the GEBCO world map and the GEBCO Gazetteer of Undersea Feature Names.");
metadataInserter.setCustomTopics("i-Marine","General Bathymetric Chart of the Oceans","GEBCO","3D");
metadataInserter.insertMetaData();
}
// IceConAnn
private static void IceConAnn(GenericLayerMetadata metadataInserter ){
metadataInserter.setTitle("IceConAnn");
metadataInserter.setLayerName("aquamaps:iceConAnn");
metadataInserter.setCategoryTypes("_"+TopicCategory.ENVIRONMENT.name()+"_");
metadataInserter.setAbstractField("Mean Annual Ice Concentration");
metadataInserter.setCustomTopics("i-Marine","Mean Annual Ice Concentration");
}
//DepthMeanAnnual
private static void DepthMeanAnnual(GenericLayerMetadata metadataInserter ){
metadataInserter.setTitle("DepthMeanAnnual");
metadataInserter.setLayerName("aquamaps:DepthMeanAnnual");
metadataInserter.setCategoryTypes("_"+TopicCategory.ENVIRONMENT.name()+"_");
metadataInserter.setAbstractField("Mean Depth at half-degree resolution");
metadataInserter.setCustomTopics("i-Marine","Mean Depth");
}
//faoarea
private static void faoarea(GenericLayerMetadata metadataInserter ){
metadataInserter.setTitle("FAO Fishing Areas");
metadataInserter.setResolution(0);
metadataInserter.setLayerName("aquamaps:WorldFaoZones");
metadataInserter.setCategoryTypes("_"+TopicCategory.OCEANS.name()+"_");
metadataInserter.setAbstractField("The dataset represents the boundaries of the FAO Fishing Areas. The source for the boundaries is the description that can be found on the FAO website. The dataset was composed by the Flanders Marine Data and Information Centre.");
metadataInserter.setCustomTopics("i-Marine","FAO Areas");
}
//SSTANMean
private static void SSTAnMean(GenericLayerMetadata metadataInserter ){
metadataInserter.setTitle("SSTAnMean");
metadataInserter.setLayerName("aquamaps:sstAnMean");
metadataInserter.setCategoryTypes("_"+TopicCategory.ENVIRONMENT.name()+"_");
metadataInserter.setAbstractField("Mean Annual Sea Surface Temperature at half-degree resolution");
metadataInserter.setCustomTopics("i-Marine","Mean Annual Sea Surface Temperature");
}
//SalinityMean
private static void SalinityMean(GenericLayerMetadata metadataInserter ){
metadataInserter.setTitle("SalinityMean");
metadataInserter.setLayerName("aquamaps:salinityMean");
metadataInserter.setCategoryTypes("_"+TopicCategory.ENVIRONMENT.name()+"_");
metadataInserter.setAbstractField("Mean Annual Salinity at half-degree resolution");
metadataInserter.setCustomTopics("i-Marine","Mean Annual Salinity");
}
// PrimProdMean
private static void PrimProdMean(GenericLayerMetadata metadataInserter ){
metadataInserter.setTitle("PrimProdMean");
metadataInserter.setLayerName("aquamaps:primProdMean");
metadataInserter.setCategoryTypes("_"+TopicCategory.ENVIRONMENT.name()+"_"+TopicCategory.BIOTA.name()+"_");
metadataInserter.setAbstractField("Mean Annual Primary Production at half-degree resolution");
metadataInserter.setCustomTopics("i-Marine","Mean Annual Primary Production");
}
//environments
private static void environments(GenericLayerMetadata metadataInserter ){
metadataInserter.setTitle("environments");
metadataInserter.setLayerName("aquamaps:environments");
metadataInserter.setCategoryTypes("_"+TopicCategory.ENVIRONMENT.name()+"_"+TopicCategory.BIOTA.name()+"_");
metadataInserter.setAbstractField("Aggregated environmental and biota data at half-degree resolution");
metadataInserter.setCustomTopics("i-Marine","Aggregated environmental and biota data");
}
}

View File

@ -1,14 +0,0 @@
package org.gcube.dataanalysis.geo.batch;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.insertion.ThreddsFetcher;
public class ThreddsMetadataBatchInserter {
public static void main(String[] args) throws Exception{
AnalysisLogger.setLogger("./cfg/"+AlgorithmConfiguration.defaultLoggerFile);
ThreddsFetcher tf = new ThreddsFetcher(null);
tf.fetch("http://thredds.research-infrastructures.eu/thredds/catalog/public/netcdf/catalog.xml");
}
}

View File

@ -1,128 +0,0 @@
package org.gcube.dataanalysis.geo.insertion;
import java.util.List;
import java.util.UUID;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.retrieval.GeoIntersector;
import org.gcube.dataanalysis.geo.utils.CSquareCodesConverter;
import org.hibernate.SessionFactory;
/**
* transforms a raster map into a table
*
* @author coro
*
*/
public class RasterTable {
private double valuesMatrix[][];
double x1;
double x2;
double y1;
double y2;
double z;
double xResolution;
double yResolution;
private AlgorithmConfiguration configuration;
private String tablename = "rstr" + ("" + UUID.randomUUID()).replace("-", "");
static String createTableStatement = "CREATE TABLE %1$s (id serial, csquarecode character varying, x real, y real, z real, probability real)";
static String columnsnames = "csquarecode, x , y , z , probability";
public static String csquareColumn = "csquarecode";
public static String probabilityColumn = "probability";
public static String idColumn = "id";
public String getTablename() {
return tablename;
}
public void setTablename(String tablename) {
this.tablename = tablename;
}
public RasterTable(double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution, double[][] values, AlgorithmConfiguration configuration) {
this.valuesMatrix = values;
this.configuration = configuration;
this.x1 = x1;
this.x2 = x2;
this.y1 = y1;
this.y2 = y2;
this.z = z;
this.xResolution = xResolution;
this.yResolution = yResolution;
}
public void dumpGeoTable() {
// open the connection to the db
SessionFactory dbconnection = DatabaseUtils.initDBSession(configuration);
try {
AnalysisLogger.getLogger().debug("Database Initialized");
// create a table
DatabaseFactory.executeSQLUpdate(String.format(createTableStatement, tablename), dbconnection);
AnalysisLogger.getLogger().debug("Table " + tablename + " created");
List<Tuple<Double>> coordinates = GeoIntersector.generateCoordinateTriplets(x1, x2, y1, y2, z, xResolution, yResolution);
int triplets = coordinates.size();
AnalysisLogger.getLogger().debug("Generated " + triplets + " coordinates triples");
List<Double> values = GeoIntersector.associateValueToCoordinates(coordinates, valuesMatrix);
AnalysisLogger.getLogger().debug("Association to values completed - fulfilling buffer");
// for each element in the matrix, build the corresponding csquare code
StringBuffer sb = new StringBuffer();
for (int i = 0; i < triplets; i++) {
// save the string in a buffer
Tuple<Double> cset = coordinates.get(i);
double x = cset.getElements().get(0);
double y = cset.getElements().get(1);
String csquare = CSquareCodesConverter.convertAtResolution(y,x, xResolution);
Double value = values.get(i);
//we do not use NaNs in this case every value will be filled
if (value.isNaN())
value = 0d;
sb.append("('" + csquare + "'," + x + "," + y + "," + z + ",'" + value + "')");
if (i % 5000 == 0) {
// AnalysisLogger.getLogger().debug("Partial Inserting Buffer of " + sb.length() + " Values");
String insertStatement = DatabaseUtils.insertFromBuffer(tablename, columnsnames, sb);
DatabaseFactory.executeSQLUpdate(insertStatement, dbconnection);
// AnalysisLogger.getLogger().debug("Partial Insertion completed with Success!");
sb = new StringBuffer();
} else if (i < triplets - 1)
sb.append(",");
}
AnalysisLogger.getLogger().debug("Inserting Final Buffer of " + sb.length() + " Values");
// save all the strings on the table
if (sb.length() > 0) {
String insertStatement = DatabaseUtils.insertFromBuffer(tablename, columnsnames, sb);
DatabaseFactory.executeSQLUpdate(insertStatement, dbconnection);
AnalysisLogger.getLogger().debug("Insertion completed with Success!");
}
} catch (Exception e) {
e.printStackTrace();
AnalysisLogger.getLogger().debug("Error in dumping table: " + e.getLocalizedMessage());
} finally {
// close the connection
DatabaseUtils.closeDBConnection(dbconnection);
AnalysisLogger.getLogger().debug("Raster Geo Table DB closed!");
}
}
public void deleteTable() {
SessionFactory dbconnection = null;
try {
dbconnection = DatabaseUtils.initDBSession(configuration);
DatabaseFactory.executeSQLUpdate(DatabaseUtils.dropTableStatement(tablename), dbconnection);
} catch (Exception e) {
e.printStackTrace();
} finally {
DatabaseUtils.closeDBConnection(dbconnection);
}
}
}

View File

@ -1,198 +0,0 @@
package org.gcube.dataanalysis.geo.insertion;
import java.util.List;
import java.util.Locale;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.meta.NetCDFMetadata;
import org.gcube.dataanalysis.geo.meta.OGCFormatter;
import org.gcube.dataanalysis.geo.meta.features.FeaturesManager;
import org.gcube.dataanalysis.geo.utils.ThreddsDataExplorer;
import org.opengis.metadata.Metadata;
import ucar.nc2.dataset.CoordinateAxis;
import ucar.nc2.dt.GridDatatype;
import ucar.nc2.dt.grid.GridDataset;
import ucar.nc2.units.DateRange;
import com.ibm.icu.text.SimpleDateFormat;
public class ThreddsFetcher {
private FeaturesManager featurer;
public static String NetCDFDateFormat = "time: E MMM dd HH:mm:ss zzz yyyy";
public static String HumanDateFormat = "MM-dd-yy HH:mm";
public ThreddsFetcher(String scope) {
featurer = new FeaturesManager();
featurer.setScope(scope);
}
public void fetch(String threddsCatalogURL) throws Exception {
List<String> filesURL = ThreddsDataExplorer.getFiles(threddsCatalogURL);
for (String filename : filesURL) {
// if (!filename.equalsIgnoreCase("cami_0000-09-01_64x128_L26_c030918.nc"))
// continue;
String url = OGCFormatter.getOpenDapURL(threddsCatalogURL, filename);
if (ThreddsDataExplorer.isGridDataset(url)) {
// retrieve information
GridDataset gds = ucar.nc2.dt.grid.GridDataset.open(url);
List<GridDatatype> gridTypes = gds.getGrids();
for (GridDatatype gdt : gridTypes) {
String description = gdt.getDescription();
if ((description==null) || (description.length()==0))
description = gdt.getFullName();
// get date range
DateRange dr = gdt.getCoordinateSystem().getDateRange();
// SimpleDateFormat netcdfDF = new SimpleDateFormat(NetCDFDateFormat, Locale.ENGLISH);
SimpleDateFormat humanDF = new SimpleDateFormat(HumanDateFormat, Locale.ROOT);
String hStartDate = null;
String hEndDate = null;
String duration = null;
String resolution = gdt.getTimeDimension()==null?null:""+gdt.getTimeDimension().getLength();
int numberOfDimensions = 2;
if ((gdt.getZDimension()!=null)&&(gdt.getZDimension().getLength()>1)){
numberOfDimensions = 3;
AnalysisLogger.getLogger().debug("Length of Z Dimension: "+gdt.getZDimension().getLength());
}
else
AnalysisLogger.getLogger().debug("Bidimensional Layer ");
if (dr != null) {
hStartDate = dr.getStart() == null ? null : humanDF.format(dr.getStart().getDate());
hEndDate = dr.getEnd() == null ? null : humanDF.format(dr.getEnd().getDate());
duration = dr.getDuration() == null ? null : "" + dr.getDuration();
}
// control if the file is yet on GN
// String generatedTitle = generateTitle(filename, description, hStartDate, hEndDate, numberOfDimensions);
String generatedTitle = generateTitle(gds.getTitle()+": "+gds.getDescription(), description, hStartDate, hEndDate, numberOfDimensions);
CoordinateAxis xAxis = gdt.getCoordinateSystem().getXHorizAxis();
CoordinateAxis yAxis = gdt.getCoordinateSystem().getYHorizAxis();
AnalysisLogger.getLogger().debug("Bounds:"+xAxis.getMinValue()+","+yAxis.getMinValue()+","+xAxis.getMaxValue()+","+yAxis.getMaxValue());
Metadata previousmeta = featurer.getGNInfobyUUIDorName(generatedTitle);
if (previousmeta!=null){
AnalysisLogger.getLogger().debug("***WARNING: layer yet found on GeoNetwork***");
continue;
}
/*Layers check - for testing only
else {
AnalysisLogger.getLogger().debug("***layer retrieval failed***");
if (true) System.exit(0);
}
*/
// get resolution - take the maximum regular step
double resolutionX = Math.abs((double) (xAxis.getMaxValue() - xAxis.getMinValue()) / (double) xAxis.getShape()[0]);
double resolutionY = Math.abs((double) (yAxis.getMaxValue() - yAxis.getMinValue()) / (double) yAxis.getShape()[0]);
//build metadata
NetCDFMetadata metadataInserter = new NetCDFMetadata();
metadataInserter.setGeonetworkUrl(featurer.getGeonetworkURLFromScope());
metadataInserter.setGeonetworkUser(featurer.getGeonetworkUserFromScope());
metadataInserter.setGeonetworkPwd(featurer.getGeonetworkPasswordFromScope());
// Build standard info:
metadataInserter.setThreddsCatalogUrl(threddsCatalogURL);
metadataInserter.setLayerUrl(url);
metadataInserter.setLayerName(gdt.getFullName());
metadataInserter.setSourceFileName(filename);
// insert ranges and sampling
metadataInserter.setTitle(generatedTitle);
metadataInserter.setAbstractField(generateAbstractField(gdt.getFullName(), filename, description, gdt.getUnitsString().trim(), hStartDate, hEndDate, duration, resolution, numberOfDimensions, gds.getTitle(), gds.getDescription()));
metadataInserter.setResolution(Math.max(resolutionX, resolutionY));
// set Bounding box
double minX = ThreddsDataExplorer.getMinX(gdt.getCoordinateSystem());
double maxX = ThreddsDataExplorer.getMaxX(gdt.getCoordinateSystem());
double minY = ThreddsDataExplorer.getMinY(gdt.getCoordinateSystem());
double maxY = ThreddsDataExplorer.getMaxY(gdt.getCoordinateSystem());
if (gds.getTitle().toUpperCase().contains("WORLD OCEAN ATLAS"))
{
AnalysisLogger.getLogger().debug("Managing WoA Layer");
minX = minX-180;
maxX = maxX-180;
}
metadataInserter.setXLeftLow(minX);
metadataInserter.setYLeftLow(minY);
metadataInserter.setXRightUpper(maxX);
metadataInserter.setYRightUpper(maxY);
//set keywords
metadataInserter.setCustomTopics(filename, description,numberOfDimensions+"D",gds.getTitle(),gds.getDescription(),"unit:"+gdt.getUnitsString().trim());
//set Temporal Extent
if (hStartDate!=null){
metadataInserter.setStartDate(dr.getStart().getDate());
metadataInserter.setEndDate(dr.getEnd().getDate());
}
AnalysisLogger.getLogger().debug("title: " + metadataInserter.getTitle());
AnalysisLogger.getLogger().debug("abstract: " + metadataInserter.getAbstractField());
metadataInserter.insertMetaData();
// break;
}
}
// break;
}
}
public static String generateTitle(String filename, String description, String startDate, String endDate, int numberOfDimensions) {
String dateString = "";
if (startDate != null){
if (startDate.equals(endDate))
dateString = " in [" + startDate + "]";
else
dateString = " from [" + startDate + "] to [" + endDate + "]";
}
description = description + " "+dateString+" (" + numberOfDimensions+ "D) {" + filename + "}";
return description.replaceAll("( )+", " ");
}
public static String generateAbstractField(String layername, String filename, String description, String unit, String startDate, String endDate, String duration, String timeInstants, int numberOfDimensions, String netcdftitle, String netcdfdescription) {
String timeresolutionString = "";
String durationString = "";
if ((timeInstants != null) && (timeInstants.length() > 0))
timeresolutionString = " Number of time instants: " + timeInstants+".";
if ((duration != null) && (duration.length() > 0))
durationString = " Time interval lenght: " + duration+".";
String dateString = "";
if (startDate != null)
dateString = " in the time range between [" + startDate + "] and [" + endDate + "].";
String unitString = "";
if ((unit != null) && (unit.length()>0))
unitString= " (" + unit + ")";
String numberOfDimensionsString = "";
if (numberOfDimensions>0)
numberOfDimensionsString = " Number of Dimensions: "+numberOfDimensions+".";
String netcdfinfo = "";
if (netcdftitle!=null)
netcdfinfo = " "+netcdftitle+": "+netcdfdescription+".";
return layername + ": " + description + unitString+dateString + durationString + timeresolutionString + numberOfDimensionsString+netcdfinfo+" Local file in iMarine: " + filename + ".";
}
public static void main(String[] args) throws Exception {
AnalysisLogger.setLogger("./cfg/"+AlgorithmConfiguration.defaultLoggerFile);
// ThreddsFetcher tf = new ThreddsFetcher("/gcube/devsec");
ThreddsFetcher tf = new ThreddsFetcher(null);
tf.fetch("http://thredds.research-infrastructures.eu/thredds/catalog/public/netcdf/catalog.xml");
}
}

View File

@ -1,475 +0,0 @@
package org.gcube.dataanalysis.geo.meta;
import it.geosolutions.geonetwork.GNClient;
import it.geosolutions.geonetwork.util.GNInsertConfiguration;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map.Entry;
import javax.xml.bind.JAXBException;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.geotoolkit.metadata.iso.DefaultIdentifier;
import org.geotoolkit.metadata.iso.DefaultMetadata;
import org.geotoolkit.metadata.iso.citation.DefaultCitation;
import org.geotoolkit.metadata.iso.citation.DefaultCitationDate;
import org.geotoolkit.metadata.iso.citation.DefaultContact;
import org.geotoolkit.metadata.iso.citation.DefaultOnlineResource;
import org.geotoolkit.metadata.iso.citation.DefaultResponsibleParty;
import org.geotoolkit.metadata.iso.constraint.DefaultLegalConstraints;
import org.geotoolkit.metadata.iso.distribution.DefaultDigitalTransferOptions;
import org.geotoolkit.metadata.iso.distribution.DefaultDistribution;
import org.geotoolkit.metadata.iso.distribution.DefaultFormat;
import org.geotoolkit.metadata.iso.extent.DefaultExtent;
import org.geotoolkit.metadata.iso.extent.DefaultGeographicBoundingBox;
import org.geotoolkit.metadata.iso.identification.DefaultDataIdentification;
import org.geotoolkit.metadata.iso.identification.DefaultKeywords;
import org.geotoolkit.metadata.iso.identification.DefaultResolution;
import org.geotoolkit.metadata.iso.identification.DefaultUsage;
import org.geotoolkit.metadata.iso.lineage.DefaultLineage;
import org.geotoolkit.metadata.iso.lineage.DefaultNominalResolution;
import org.geotoolkit.metadata.iso.lineage.DefaultProcessStep;
import org.geotoolkit.metadata.iso.lineage.DefaultProcessing;
import org.geotoolkit.metadata.iso.lineage.DefaultSource;
import org.geotoolkit.metadata.iso.maintenance.DefaultMaintenanceInformation;
import org.geotoolkit.metadata.iso.quality.DefaultDataQuality;
import org.geotoolkit.metadata.iso.quality.DefaultScope;
import org.geotoolkit.metadata.iso.spatial.DefaultGeometricObjects;
import org.geotoolkit.metadata.iso.spatial.DefaultVectorSpatialRepresentation;
import org.geotoolkit.util.DefaultInternationalString;
import org.geotoolkit.xml.XML;
import org.opengis.metadata.Metadata;
import org.opengis.metadata.citation.DateType;
import org.opengis.metadata.citation.PresentationForm;
import org.opengis.metadata.citation.ResponsibleParty;
import org.opengis.metadata.citation.Role;
import org.opengis.metadata.constraint.Restriction;
import org.opengis.metadata.identification.KeywordType;
import org.opengis.metadata.identification.TopicCategory;
import org.opengis.metadata.lineage.ProcessStep;
import org.opengis.metadata.maintenance.MaintenanceFrequency;
import org.opengis.metadata.maintenance.ScopeCode;
import org.opengis.metadata.spatial.GeometricObjectType;
import org.opengis.metadata.spatial.SpatialRepresentationType;
import org.opengis.metadata.spatial.TopologyLevel;
import org.opengis.util.InternationalString;
public class GenericLayerMetadata {
private String geonetworkUrl = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geonetwork/";
private String geoserverUrl = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geonetwork/";
private String geonetworkUser = "admin";
private String geonetworkPwd = "admin";
private String title = "temperature 04091217ruc.nc";
private String layerName = "T";
private String usageField = "Environmental enrichment";
private String processdescription = "Maps publication";
private String usageLimitations = "Not for commercial scopes";
private Date sourceGenerationDate = new Date(System.currentTimeMillis());
private String categoryTypes = "_BIOTA_";
private String contactInfo = "support@d4science.research-infrastructures.eu";
private String abstractField = "";
private String purpose = "Maps publication";
private String author = "i-Marine";
private double res = 0.5d;
private double xLL = -180;
private double xRU = 180;
private double yLL = -85.5;
private double yRU = 85.5;
private HashSet<String> customTopics;
private Date startDate;
private Date endDate;
public void setCustomTopics(String... topics){
customTopics = new HashSet<String>();
for (String topic:topics)
customTopics.add(topic);
}
public HashSet<String> getCustomTopics(){
return customTopics;
}
public void setStartDate(Date date){
startDate=date;
}
public void setEndDate(Date date){
endDate=date;
}
public Date getStartDate(){
return startDate;
}
public Date getEndDate(){
return endDate;
}
public String getGeonetworkUrl() {
return geonetworkUrl;
}
public void setGeonetworkUrl(String geonetworkUrl) {
this.geonetworkUrl = geonetworkUrl;
}
public String getGeonetworkUser() {
return geonetworkUser;
}
public void setGeonetworkUser(String geonetworkUser) {
this.geonetworkUser = geonetworkUser;
}
public String getGeonetworkPwd() {
return geonetworkPwd;
}
public void setGeonetworkPwd(String geonetworkPwd) {
this.geonetworkPwd = geonetworkPwd;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getLayerName() {
return layerName;
}
public void setLayerName(String layerName) {
this.layerName = layerName;
}
public String getUsageField() {
return usageField;
}
public void setUsageField(String usageField) {
this.usageField = usageField;
}
public String getProcessdescription() {
return processdescription;
}
public void setProcessdescription(String processdescription) {
this.processdescription = processdescription;
}
public String getUsageLimitations() {
return usageLimitations;
}
public void setUsageLimitations(String usageLimitations) {
this.usageLimitations = usageLimitations;
}
public Date getSourceGenerationDate() {
return sourceGenerationDate;
}
public void setSourceGenerationDate(Date sourceGenerationDate) {
this.sourceGenerationDate = sourceGenerationDate;
}
public String getCategoryTypes() {
return categoryTypes;
}
public void setCategoryTypes(String categoryTypes) {
this.categoryTypes = categoryTypes;
}
public String getContactInfo() {
return contactInfo;
}
public void setContactInfo(String contactInfo) {
this.contactInfo = contactInfo;
}
public String getAbstractField() {
return abstractField;
}
public void setAbstractField(String abstractField) {
this.abstractField = abstractField;
}
public String getPurpose() {
return purpose;
}
public void setPurpose(String purpose) {
this.purpose = purpose;
}
public String getAuthor() {
return author;
}
public void setAuthor(String author) {
this.author = author;
}
public double getResolution() {
return res;
}
public void setResolution(double res) {
this.res = res;
}
public double getXLeftLow() {
return xLL;
}
public void setXLeftLow(double xLL) {
this.xLL = xLL;
}
public double getXRightUpper() {
return xRU;
}
public void setXRightUpper(double xRU) {
this.xRU = xRU;
}
public double getYLeftLow() {
return yLL;
}
public void setYLeftLow(double yLL) {
this.yLL = yLL;
}
public double getYRightUpper() {
return yRU;
}
public void setYRightUpper(double yRU) {
this.yRU = yRU;
}
public String getGeoserverUrl() {
return geoserverUrl;
}
public void setGeoserverUrl(String geoserverUrl) {
this.geoserverUrl = geoserverUrl;
}
static File meta2File(Metadata meta) throws IOException, JAXBException {
File temp = File.createTempFile("meta", ".xml");
FileWriter writer = new FileWriter(temp);
writer.write(XML.marshal(meta));
writer.close();
return temp;
}
public void insertMetaData() throws Exception {
// layer uri: wms, wfs wcs
List<String> layerUris = new ArrayList<String>();
layerUris.add(OGCFormatter.getWmsUrl(geoserverUrl, layerName, null, OGCFormatter.buildBoundingBox(xLL, yLL, xRU, yRU)));
layerUris.add(OGCFormatter.getWfsUrl(geoserverUrl, layerName, OGCFormatter.buildBoundingBox(xLL, yLL, xRU, yRU), 0, "json"));
layerUris.add(OGCFormatter.getWcsUrl(geoserverUrl, layerName, OGCFormatter.buildBoundingBox(xLL, yLL, xRU, yRU)));
// layer keywords
HashMap<KeywordType, HashSet<String>> descriptiveKeyWords = new HashMap<KeywordType, HashSet<String>>();
HashSet<String> keySet = new HashSet<String>();
keySet.add("i-Marine");
if (customTopics!=null)
keySet.addAll(customTopics);
descriptiveKeyWords.put(KeywordType.THEME, keySet);
if (startDate!=null){
HashSet<String> temporalkeySet = new HashSet<String>();
temporalkeySet.add(startDate.toString());
if (!endDate.equals(startDate))
temporalkeySet.add(endDate.toString());
descriptiveKeyWords.put(KeywordType.TEMPORAL, temporalkeySet);
}
// author:
DefaultResponsibleParty party = new DefaultResponsibleParty();
party.setIndividualName(author);
DefaultContact contact = new DefaultContact();
contact.setContactInstructions(new DefaultInternationalString(contactInfo));
party.setContactInfo(contact);
party.setRole(Role.ORIGINATOR);
// citation:
DefaultCitation citation = new DefaultCitation();
citation.setTitle(new DefaultInternationalString(title));
ArrayList<DefaultCitationDate> citDates = new ArrayList<DefaultCitationDate>();
citDates.add(new DefaultCitationDate(sourceGenerationDate, DateType.CREATION));
citDates.add(new DefaultCitationDate(sourceGenerationDate, DateType.PUBLICATION));
citDates.add(new DefaultCitationDate(sourceGenerationDate, DateType.REVISION));
citation.setDates(citDates);
ArrayList<InternationalString> citAltTitle = new ArrayList<InternationalString>();
citAltTitle.add(new DefaultInternationalString(title));
citation.setAlternateTitles(citAltTitle);
citation.setEditionDate(sourceGenerationDate);
citation.getPresentationForms().add(PresentationForm.MAP_DIGITAL);
ArrayList<DefaultKeywords> keywordslist = new ArrayList<DefaultKeywords>();
for (Entry<KeywordType, HashSet<String>> entry : descriptiveKeyWords.entrySet()) {
DefaultKeywords keywords = new DefaultKeywords();
for (String key : entry.getValue())
keywords.getKeywords().add(new DefaultInternationalString(key));
keywords.setType(entry.getKey());
DefaultCitation thesaurus = new DefaultCitation();
thesaurus.setTitle(new DefaultInternationalString("General"));
thesaurus.setDates(citDates);
keywords.setThesaurusName(thesaurus);
keywordslist.add(keywords);
}
// usage:
DefaultUsage usage = new DefaultUsage();
usage.setSpecificUsage(new DefaultInternationalString(usageField));
usage.setUsageDate(sourceGenerationDate);
usage.setUserDeterminedLimitations(new DefaultInternationalString(usageLimitations));
usage.setUserContactInfo(new ArrayList<ResponsibleParty>(Arrays.asList(party)));
ArrayList<DefaultUsage> usages = new ArrayList<DefaultUsage>(Arrays.asList(usage));
//build categories by guessing on the filename
List<TopicCategory> categories = guessTopicCategory(categoryTypes);
AnalysisLogger.getLogger().debug("Guessed Topics: "+categories);
// Spatial Rapresentation Info
DefaultGeometricObjects geoObjs = new DefaultGeometricObjects();
geoObjs.setGeometricObjectType(GeometricObjectType.COMPLEX);
DefaultVectorSpatialRepresentation spatial = new DefaultVectorSpatialRepresentation();
spatial.setTopologyLevel(TopologyLevel.GEOMETRY_ONLY);
spatial.getGeometricObjects().add(geoObjs);
// Extent:
DefaultExtent extent = new DefaultExtent();
extent.setGeographicElements(Collections.singleton(new DefaultGeographicBoundingBox(xLL, xRU, yLL, yRU)));
extent.setDescription(new DefaultInternationalString("Bounding box"));
/*Only with Geotoolkit 4.x
DefaultTemporalExtent stext = new DefaultTemporalExtent(startDate,endDate);
stext.setStartTime(startDate);
stext.setEndTime(endDate);
extent.setTemporalElements(Arrays.asList(stext));
*/
extent.freeze();
//resolution
DefaultNominalResolution resolution = new DefaultNominalResolution();
resolution.setGroundResolution(res);
resolution.setScanningResolution(res);
DefaultResolution dres = new DefaultResolution();
dres.setDistance(res);
// layers access:
DefaultDistribution distribution = new DefaultDistribution();
DefaultDigitalTransferOptions transferOptions = new DefaultDigitalTransferOptions();
for (String uri : layerUris)
transferOptions.getOnLines().add(new DefaultOnlineResource(new URI(uri)));
distribution.getTransferOptions().add(transferOptions);
DefaultFormat format1 = new DefaultFormat();
format1.setName(new DefaultInternationalString("WMS"));
format1.setVersion(new DefaultInternationalString("1.1.0"));
DefaultFormat format2 = new DefaultFormat();
format2.setName(new DefaultInternationalString("WFS"));
format2.setVersion(new DefaultInternationalString("1.1.0"));
DefaultFormat format3 = new DefaultFormat();
format3.setName(new DefaultInternationalString("WCS"));
format3.setVersion(new DefaultInternationalString("1.0.0"));
distribution.setDistributionFormats(new ArrayList<DefaultFormat>(Arrays.asList(format1, format2, format3)));
// legal constraints
DefaultLegalConstraints constraints = new DefaultLegalConstraints();
constraints.getUseLimitations().add(new DefaultInternationalString("Licensed"));
constraints.getAccessConstraints().add(Restriction.LICENSE);
constraints.getUseConstraints().add(Restriction.LICENSE);
// quality declaration:
DefaultDataQuality processQuality = new DefaultDataQuality();
//citation
DefaultCitation sourceCitation = new DefaultCitation();
sourceCitation.setTitle(new DefaultInternationalString(title));
sourceCitation.getDates().add(new DefaultCitationDate(sourceGenerationDate, DateType.CREATION));
sourceCitation.getIdentifiers().add(new DefaultIdentifier(categoryTypes));
//source
DefaultSource source = new DefaultSource();
source.setResolution(resolution);
source.setDescription(new DefaultInternationalString(title));
source.setSourceCitation(sourceCitation);
// provenance
DefaultProcessStep preprocessStep = new DefaultProcessStep();
DefaultProcessStep processStep = new DefaultProcessStep(preprocessStep);
DefaultProcessing processing = new DefaultProcessing();
processing.setSoftwareReferences(new ArrayList<DefaultCitation>(Arrays.asList(sourceCitation)));
processStep.setDescription(new DefaultInternationalString(processdescription));
DefaultLineage processLineage = new DefaultLineage();
processLineage.setProcessSteps(new ArrayList<ProcessStep>(Arrays.asList(processStep)));
processQuality.setLineage(processLineage);
processQuality.setScope(new DefaultScope(ScopeCode.DATASET));
// fulfill identification
DefaultDataIdentification ident = new DefaultDataIdentification();
ident.setCitation(citation);
ident.setAbstract(new DefaultInternationalString(abstractField));
ident.setPurpose(new DefaultInternationalString(purpose));
ident.getResourceMaintenances().add(new DefaultMaintenanceInformation(MaintenanceFrequency.AS_NEEDED));
ident.setDescriptiveKeywords(keywordslist);
ident.setTopicCategories(categories);
ident.setResourceSpecificUsages(usages);
ident.setExtents(new ArrayList<DefaultExtent>(Arrays.asList(extent)));
ident.setSpatialRepresentationTypes(new ArrayList<SpatialRepresentationType>(Arrays.asList(SpatialRepresentationType.GRID)));
ident.setSpatialResolutions(new ArrayList<DefaultResolution>(Arrays.asList(dres)));
ident.setLanguages(new ArrayList<Locale>(Arrays.asList(Locale.ENGLISH)));
// Metadata Obj:
DefaultMetadata meta = new DefaultMetadata(party, sourceGenerationDate, ident);
meta.getSpatialRepresentationInfo().add(spatial);
meta.setDistributionInfo(distribution);
meta.getMetadataConstraints().add(constraints);
meta.getDataQualityInfo().add(processQuality);
meta.setLanguage(Locale.ENGLISH);
// System.out.println(meta);
GNClient client = new GNClient(geonetworkUrl);
client.login(geonetworkUser, geonetworkPwd);
File tmetafile = meta2File(meta);
client.insertMetadata(new GNInsertConfiguration("3", "datasets", "_none_", true), tmetafile);
tmetafile.delete();
}
public static List<TopicCategory> guessTopicCategory(String refString){
String searcher = refString.toLowerCase();
List<TopicCategory> categories = new ArrayList<TopicCategory>();
for (TopicCategory topic:TopicCategory.values()){
if (searcher.contains("_"+topic.name().toLowerCase()+"_")){
categories.add(topic);
}
}
return categories;
}
}

View File

@ -1,492 +0,0 @@
package org.gcube.dataanalysis.geo.meta;
import it.geosolutions.geonetwork.GNClient;
import it.geosolutions.geonetwork.util.GNInsertConfiguration;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map.Entry;
import javax.xml.bind.JAXBException;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.geotoolkit.metadata.iso.DefaultIdentifier;
import org.geotoolkit.metadata.iso.DefaultMetadata;
import org.geotoolkit.metadata.iso.citation.DefaultCitation;
import org.geotoolkit.metadata.iso.citation.DefaultCitationDate;
import org.geotoolkit.metadata.iso.citation.DefaultContact;
import org.geotoolkit.metadata.iso.citation.DefaultOnlineResource;
import org.geotoolkit.metadata.iso.citation.DefaultResponsibleParty;
import org.geotoolkit.metadata.iso.constraint.DefaultLegalConstraints;
import org.geotoolkit.metadata.iso.distribution.DefaultDigitalTransferOptions;
import org.geotoolkit.metadata.iso.distribution.DefaultDistribution;
import org.geotoolkit.metadata.iso.distribution.DefaultFormat;
import org.geotoolkit.metadata.iso.extent.DefaultExtent;
import org.geotoolkit.metadata.iso.extent.DefaultGeographicBoundingBox;
import org.geotoolkit.metadata.iso.identification.DefaultDataIdentification;
import org.geotoolkit.metadata.iso.identification.DefaultKeywords;
import org.geotoolkit.metadata.iso.identification.DefaultResolution;
import org.geotoolkit.metadata.iso.identification.DefaultUsage;
import org.geotoolkit.metadata.iso.lineage.DefaultLineage;
import org.geotoolkit.metadata.iso.lineage.DefaultNominalResolution;
import org.geotoolkit.metadata.iso.lineage.DefaultProcessStep;
import org.geotoolkit.metadata.iso.lineage.DefaultProcessing;
import org.geotoolkit.metadata.iso.lineage.DefaultSource;
import org.geotoolkit.metadata.iso.maintenance.DefaultMaintenanceInformation;
import org.geotoolkit.metadata.iso.quality.DefaultDataQuality;
import org.geotoolkit.metadata.iso.quality.DefaultScope;
import org.geotoolkit.metadata.iso.spatial.DefaultGeometricObjects;
import org.geotoolkit.metadata.iso.spatial.DefaultVectorSpatialRepresentation;
import org.geotoolkit.util.DefaultInternationalString;
import org.geotoolkit.xml.XML;
import org.opengis.metadata.Metadata;
import org.opengis.metadata.citation.DateType;
import org.opengis.metadata.citation.PresentationForm;
import org.opengis.metadata.citation.ResponsibleParty;
import org.opengis.metadata.citation.Role;
import org.opengis.metadata.constraint.Restriction;
import org.opengis.metadata.identification.KeywordType;
import org.opengis.metadata.identification.TopicCategory;
import org.opengis.metadata.lineage.ProcessStep;
import org.opengis.metadata.maintenance.MaintenanceFrequency;
import org.opengis.metadata.maintenance.ScopeCode;
import org.opengis.metadata.spatial.GeometricObjectType;
import org.opengis.metadata.spatial.SpatialRepresentationType;
import org.opengis.metadata.spatial.TopologyLevel;
import org.opengis.util.InternationalString;
public class NetCDFMetadata {
private String geonetworkUrl = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geonetwork/";
private String geonetworkUser = "admin";
private String geonetworkPwd = "admin";
private String threddsCatalogUrl = "http://thredds.research-infrastructures.eu/thredds/catalog/public/netcdf/catalog.xml";
private String title = "temperature 04091217ruc.nc";
private String layerName = "T";
private String usageField = "Environmental enrichment";
private String processdescription = "Maps publication";
private String usageLimitations = "Not for commercial scopes";
private Date sourceGenerationDate = new Date(System.currentTimeMillis());
private String sourceFileName = "04091217_ruc.nc";
private String contactInfo = "support@d4science.research-infrastructures.eu";
private String abstractField = "T: temperature (degK) from 04091217ruc.nc resident on the THREDDS instance " + threddsCatalogUrl;
private String purpose = "Maps publication";
private String author = "i-Marine";
private double res = 0.5d;
private double xLL = -180;
private double xRU = 180;
private double yLL = -85.5;
private double yRU = 85.5;
private String layerUrl = "http://thredds.research-infrastructures.eu:8080/thredds/dodsC/public/netcdf/04091217_ruc.nc";
private HashSet<String> customTopics;
private Date startDate;
private Date endDate;
public void setCustomTopics(String... topics){
customTopics = new HashSet<String>();
for (String topic:topics)
customTopics.add(topic);
}
public HashSet<String> getCustomTopics(){
return customTopics;
}
public void setStartDate(Date date){
startDate=date;
}
public void setEndDate(Date date){
endDate=date;
}
public Date getStartDate(){
return startDate;
}
public Date getEndDate(){
return endDate;
}
public String getGeonetworkUrl() {
return geonetworkUrl;
}
public void setGeonetworkUrl(String geonetworkUrl) {
this.geonetworkUrl = geonetworkUrl;
}
public String getGeonetworkUser() {
return geonetworkUser;
}
public void setGeonetworkUser(String geonetworkUser) {
this.geonetworkUser = geonetworkUser;
}
public String getGeonetworkPwd() {
return geonetworkPwd;
}
public void setGeonetworkPwd(String geonetworkPwd) {
this.geonetworkPwd = geonetworkPwd;
}
public String getThreddsCatalogUrl() {
return threddsCatalogUrl;
}
public void setThreddsCatalogUrl(String threddsCatalogUrl) {
this.threddsCatalogUrl = threddsCatalogUrl;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getLayerName() {
return layerName;
}
public void setLayerName(String layerName) {
this.layerName = layerName;
}
public String getUsageField() {
return usageField;
}
public void setUsageField(String usageField) {
this.usageField = usageField;
}
public String getProcessdescription() {
return processdescription;
}
public void setProcessdescription(String processdescription) {
this.processdescription = processdescription;
}
public String getUsageLimitations() {
return usageLimitations;
}
public void setUsageLimitations(String usageLimitations) {
this.usageLimitations = usageLimitations;
}
public Date getSourceGenerationDate() {
return sourceGenerationDate;
}
public void setSourceGenerationDate(Date sourceGenerationDate) {
this.sourceGenerationDate = sourceGenerationDate;
}
public String getSourceFileName() {
return sourceFileName;
}
public void setSourceFileName(String sourceTableName) {
this.sourceFileName = sourceTableName;
}
public String getContactInfo() {
return contactInfo;
}
public void setContactInfo(String contactInfo) {
this.contactInfo = contactInfo;
}
public String getAbstractField() {
return abstractField;
}
public void setAbstractField(String abstractField) {
this.abstractField = abstractField;
}
public String getPurpose() {
return purpose;
}
public void setPurpose(String purpose) {
this.purpose = purpose;
}
public String getAuthor() {
return author;
}
public void setAuthor(String author) {
this.author = author;
}
public double getResolution() {
return res;
}
public void setResolution(double res) {
this.res = res;
}
public double getXLeftLow() {
return xLL;
}
public void setXLeftLow(double xLL) {
this.xLL = xLL;
}
public double getXRightUpper() {
return xRU;
}
public void setXRightUpper(double xRU) {
this.xRU = xRU;
}
public double getYLeftLow() {
return yLL;
}
public void setYLeftLow(double yLL) {
this.yLL = yLL;
}
public double getYRightUpper() {
return yRU;
}
public void setYRightUpper(double yRU) {
this.yRU = yRU;
}
public String getLayerUrl() {
return layerUrl;
}
public void setLayerUrl(String layerUrl) {
this.layerUrl = layerUrl;
}
static File meta2File(Metadata meta) throws IOException, JAXBException {
File temp = File.createTempFile("meta", ".xml");
FileWriter writer = new FileWriter(temp);
writer.write(XML.marshal(meta));
writer.close();
return temp;
}
public void insertMetaData() throws Exception{
insertMetaData("3", "datasets", "_none_", true);
}
public void insertMetaData(String group, String category, String stylesheet, boolean validate) throws Exception {
// layer uri: wms, wfs wcs
List<String> layerUris = new ArrayList<String>();
layerUris.add(OGCFormatter.getWmsNetCDFUrl(layerUrl, layerName, OGCFormatter.buildBoundingBox(xLL, yLL, xRU, yRU)));
layerUris.add(layerUrl);
layerUris.add(OGCFormatter.getWcsNetCDFUrl(layerUrl, layerName, OGCFormatter.buildBoundingBox(xLL, yLL, xRU, yRU)));
layerUris.add(threddsCatalogUrl);
// layer keywords
HashMap<KeywordType, HashSet<String>> descriptiveKeyWords = new HashMap<KeywordType, HashSet<String>>();
HashSet<String> keySet = new HashSet<String>();
keySet.add("THREDDS");
keySet.add("i-Marine");
keySet.add("NetCDF");
if (customTopics!=null)
keySet.addAll(customTopics);
descriptiveKeyWords.put(KeywordType.THEME, keySet);
if (startDate!=null){
HashSet<String> temporalkeySet = new HashSet<String>();
temporalkeySet.add(startDate.toString());
if (!endDate.equals(startDate))
temporalkeySet.add(endDate.toString());
descriptiveKeyWords.put(KeywordType.TEMPORAL, temporalkeySet);
}
// author:
DefaultResponsibleParty party = new DefaultResponsibleParty();
party.setIndividualName(author);
DefaultContact contact = new DefaultContact();
contact.setContactInstructions(new DefaultInternationalString(contactInfo));
party.setContactInfo(contact);
party.setRole(Role.ORIGINATOR);
// citation:
DefaultCitation citation = new DefaultCitation();
citation.setTitle(new DefaultInternationalString(title));
ArrayList<DefaultCitationDate> citDates = new ArrayList<DefaultCitationDate>();
citDates.add(new DefaultCitationDate(sourceGenerationDate, DateType.CREATION));
citDates.add(new DefaultCitationDate(sourceGenerationDate, DateType.PUBLICATION));
citDates.add(new DefaultCitationDate(sourceGenerationDate, DateType.REVISION));
citation.setDates(citDates);
ArrayList<InternationalString> citAltTitle = new ArrayList<InternationalString>();
citAltTitle.add(new DefaultInternationalString(title));
citation.setAlternateTitles(citAltTitle);
citation.setEditionDate(sourceGenerationDate);
citation.getPresentationForms().add(PresentationForm.MAP_DIGITAL);
ArrayList<DefaultKeywords> keywordslist = new ArrayList<DefaultKeywords>();
for (Entry<KeywordType, HashSet<String>> entry : descriptiveKeyWords.entrySet()) {
DefaultKeywords keywords = new DefaultKeywords();
for (String key : entry.getValue())
keywords.getKeywords().add(new DefaultInternationalString(key));
keywords.setType(entry.getKey());
DefaultCitation thesaurus = new DefaultCitation();
thesaurus.setTitle(new DefaultInternationalString("General"));
thesaurus.setDates(citDates);
keywords.setThesaurusName(thesaurus);
keywordslist.add(keywords);
}
// usage:
DefaultUsage usage = new DefaultUsage();
usage.setSpecificUsage(new DefaultInternationalString(usageField));
usage.setUsageDate(sourceGenerationDate);
usage.setUserDeterminedLimitations(new DefaultInternationalString(usageLimitations));
usage.setUserContactInfo(new ArrayList<ResponsibleParty>(Arrays.asList(party)));
ArrayList<DefaultUsage> usages = new ArrayList<DefaultUsage>(Arrays.asList(usage));
//build categories by guessing on the filename
List<TopicCategory> categories = guessTopicCategory(sourceFileName);
AnalysisLogger.getLogger().debug("Guessed Topics: "+categories);
// Spatial Rapresentation Info
DefaultGeometricObjects geoObjs = new DefaultGeometricObjects();
geoObjs.setGeometricObjectType(GeometricObjectType.COMPLEX);
DefaultVectorSpatialRepresentation spatial = new DefaultVectorSpatialRepresentation();
spatial.setTopologyLevel(TopologyLevel.GEOMETRY_ONLY);
spatial.getGeometricObjects().add(geoObjs);
// Extent:
DefaultExtent extent = new DefaultExtent();
extent.setGeographicElements(Collections.singleton(new DefaultGeographicBoundingBox(xLL, xRU, yLL, yRU)));
extent.setDescription(new DefaultInternationalString("Bounding box"));
/*Only with Geotoolkit 4.x
DefaultTemporalExtent stext = new DefaultTemporalExtent(startDate,endDate);
stext.setStartTime(startDate);
stext.setEndTime(endDate);
extent.setTemporalElements(Arrays.asList(stext));
*/
extent.freeze();
//resolution
DefaultNominalResolution resolution = new DefaultNominalResolution();
resolution.setGroundResolution(res);
resolution.setScanningResolution(res);
DefaultResolution dres = new DefaultResolution();
dres.setDistance(res);
// layers access:
DefaultDistribution distribution = new DefaultDistribution();
DefaultDigitalTransferOptions transferOptions = new DefaultDigitalTransferOptions();
for (String uri : layerUris)
transferOptions.getOnLines().add(new DefaultOnlineResource(new URI(uri)));
distribution.getTransferOptions().add(transferOptions);
DefaultFormat format1 = new DefaultFormat();
format1.setName(new DefaultInternationalString("WMS"));
format1.setVersion(new DefaultInternationalString("1.1.0"));
DefaultFormat format2 = new DefaultFormat();
format2.setName(new DefaultInternationalString("OPeNDAP"));
format2.setVersion(new DefaultInternationalString("2.0.0"));
DefaultFormat format3 = new DefaultFormat();
format3.setName(new DefaultInternationalString("WCS"));
format3.setVersion(new DefaultInternationalString("1.0.0"));
distribution.setDistributionFormats(new ArrayList<DefaultFormat>(Arrays.asList(format1, format2, format3)));
// legal constraints
DefaultLegalConstraints constraints = new DefaultLegalConstraints();
constraints.getUseLimitations().add(new DefaultInternationalString("Licensed"));
constraints.getAccessConstraints().add(Restriction.LICENSE);
constraints.getUseConstraints().add(Restriction.LICENSE);
// quality declaration:
DefaultDataQuality processQuality = new DefaultDataQuality();
//citation
DefaultCitation sourceCitation = new DefaultCitation();
sourceCitation.setTitle(new DefaultInternationalString(title));
sourceCitation.getDates().add(new DefaultCitationDate(sourceGenerationDate, DateType.CREATION));
sourceCitation.getIdentifiers().add(new DefaultIdentifier(sourceFileName));
//source
DefaultSource source = new DefaultSource();
source.setResolution(resolution);
source.setDescription(new DefaultInternationalString(title));
source.setSourceCitation(sourceCitation);
// provenance
DefaultProcessStep preprocessStep = new DefaultProcessStep();
DefaultProcessStep processStep = new DefaultProcessStep(preprocessStep);
DefaultProcessing processing = new DefaultProcessing();
processing.setSoftwareReferences(new ArrayList<DefaultCitation>(Arrays.asList(sourceCitation)));
processStep.setDescription(new DefaultInternationalString(processdescription));
DefaultLineage processLineage = new DefaultLineage();
processLineage.setProcessSteps(new ArrayList<ProcessStep>(Arrays.asList(processStep)));
processQuality.setLineage(processLineage);
processQuality.setScope(new DefaultScope(ScopeCode.DATASET));
// fulfill identification
DefaultDataIdentification ident = new DefaultDataIdentification();
ident.setCitation(citation);
ident.setAbstract(new DefaultInternationalString(abstractField));
ident.setPurpose(new DefaultInternationalString(purpose));
ident.getResourceMaintenances().add(new DefaultMaintenanceInformation(MaintenanceFrequency.AS_NEEDED));
ident.setDescriptiveKeywords(keywordslist);
ident.setTopicCategories(categories);
ident.setResourceSpecificUsages(usages);
ident.setExtents(new ArrayList<DefaultExtent>(Arrays.asList(extent)));
ident.setSpatialRepresentationTypes(new ArrayList<SpatialRepresentationType>(Arrays.asList(SpatialRepresentationType.GRID)));
ident.setSpatialResolutions(new ArrayList<DefaultResolution>(Arrays.asList(dres)));
ident.setLanguages(new ArrayList<Locale>(Arrays.asList(Locale.ENGLISH)));
// Metadata Obj:
DefaultMetadata meta = new DefaultMetadata(party, sourceGenerationDate, ident);
meta.getSpatialRepresentationInfo().add(spatial);
meta.setDistributionInfo(distribution);
meta.getMetadataConstraints().add(constraints);
meta.getDataQualityInfo().add(processQuality);
meta.setLanguage(Locale.ENGLISH);
// System.out.println(meta);
GNClient client = new GNClient(geonetworkUrl);
client.login(geonetworkUser, geonetworkPwd);
File tmetafile = meta2File(meta);
client.insertMetadata(new GNInsertConfiguration(group, category, stylesheet,validate), tmetafile);
tmetafile.delete();
}
public static List<TopicCategory> guessTopicCategory(String refString){
String searcher = refString.toLowerCase();
List<TopicCategory> categories = new ArrayList<TopicCategory>();
for (TopicCategory topic:TopicCategory.values()){
if (searcher.contains("_"+topic.name().toLowerCase()+"_")){
categories.add(topic);
}
}
return categories;
}
}

View File

@ -1,48 +0,0 @@
package org.gcube.dataanalysis.geo.meta;
public class OGCFormatter {
public static String getWfsUrl(String geoServerUrl, String layerName, String bbox, int limit, String format) {
return geoServerUrl + "/wfs?service=wfs&version=1.1.0&REQUEST=GetFeature" + "&TYPENAME=" + layerName + (bbox==null? "":"&BBOX=" + bbox) + (limit == 0 ? "" : "&MAXFEATURES=" + limit) + (format == null ? "" : "&OUTPUTFORMAT=" + format);
}
public static String getWmsUrl(String geoServerUrl, String layerName, String style, String bbox) {
return geoServerUrl + "/wms?service=wms&version=1.1.0" + "&request=GetMap&layers=" + layerName + "&styles=" + (style == null ? "" : style) + "&bbox=" + bbox + "&width=676&height=330&srs=EPSG:4326&format=application/openlayers";
}
public static String getWcsUrl(String geoServerUrl, String layerName, String bbox) {
return geoServerUrl + "/wcs?service=wcs&version=1.0.0" + "&request=GetCoverage&coverage=" + layerName + "&CRS=EPSG:4326" + "&bbox=" + bbox + "&width=676&height=330&format=geotiff";
}
public static String getWmsNetCDFUrl(String fileUrl, String layerName, String bbox) {
return fileUrl.replace("dodsC", "wms") + "?service=wms&version=1.3.0" + "&request=GetMap&layers=" + layerName + "&bbox=" + bbox + "&styles=&width=676&height=330&srs=EPSG:4326&CRS=EPSG:4326&format=image/png";
}
public static String getWcsNetCDFUrl(String fileUrl, String layerName, String bbox) {
return fileUrl.replace("dodsC", "wcs") + "?service=wcs&version=1.0.0" + "&request=GetCoverage&coverage=" + layerName + "&CRS=EPSG:4326" + "&bbox=" + bbox + "&width=676&height=330&format=geotiff";
}
public static String getOpenDapURL(String threddsCatalog, String filename) {
return threddsCatalog.replace("catalog.xml",filename).replace("catalog","dodsC");
}
public static String buildBoundingBox(double x1, double y1, double x2, double y2) {
// note: the bounding box is left,lower,right,upper
return (x1 + "," + y1 + "," + x2 + "," + y2);
}
public static String pointToBoundingBox(double x1, double y1, double tolerance) {
// note: the bounding box is left,lower,right,upper
double x11 = x1 - tolerance;
double y11 = y1 - tolerance;
double x22 = x1 + tolerance;
double y22 = y1 + tolerance;
return OGCFormatter.buildBoundingBox(x11, y11, x22, y22);
}
public static void main(String [] args){
//http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/wcs?service=WCS&version=1.0.0&request=GetCoverage&COVERAGE=aquamaps:WorldClimBio2&CRS=EPSG:4326&BBOX=-180,-90,180,90&WIDTH=640&HEIGHT=480&FORMAT=geotiff
String wcs = getWcsUrl("http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver", "aquamaps:WorldClimBio2", buildBoundingBox(-180, -85.5,180, 90));
System.out.println(wcs);
}
}

View File

@ -1,477 +0,0 @@
package org.gcube.dataanalysis.geo.meta.features;
import it.geosolutions.geonetwork.util.GNSearchRequest;
import it.geosolutions.geonetwork.util.GNSearchResponse;
import java.util.ArrayList;
import java.util.List;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.spatial.data.geonetwork.GeoNetwork;
import org.gcube.spatial.data.geonetwork.GeoNetworkReader;
import org.gcube.spatial.data.geonetwork.configuration.Configuration;
import org.gcube.spatial.data.geonetwork.configuration.ConfigurationManager;
import org.geotoolkit.metadata.iso.identification.DefaultDataIdentification;
import org.opengis.metadata.Metadata;
import org.opengis.metadata.citation.OnlineResource;
import org.opengis.metadata.distribution.DigitalTransferOptions;
import org.opengis.metadata.identification.Identification;
import org.opengis.metadata.identification.Resolution;
public class FeaturesManager {
// private String geonetworkUrl = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geonetwork/";
private String geonetworkUrl = "http://geoserver-last.d4science-ii.research-infrastructures.eu/geonetwork/";
// private String geonetworkUrl = "http://geoserver.d4science-ii.research-infrastructures.eu/geonetwork/";
private String geonetworkUser = "admin";
private String geonetworkPwd = "admin";
private String scope = "/gcube/devsec";
public String getScope() {
return scope;
}
public void setScope(String scope) {
this.scope = scope;
}
public static double getResolution(Metadata meta){
double res = 0;
try{
DefaultDataIdentification ddi = (DefaultDataIdentification) meta.getIdentificationInfo().iterator().next();
//take the lowest resolution
for (Resolution r:ddi.getSpatialResolutions()){
Double rr = r.getDistance();
if (rr ==null)
rr=r.getEquivalentScale().doubleValue();
if (rr!=null && rr>res){
res = rr;
}
}
}catch(Exception e){
e.printStackTrace();
AnalysisLogger.getLogger().debug("Could not get Data Identification");
}
AnalysisLogger.getLogger().debug("Calculated Resolution is:"+res);
return res;
}
public String getGeoserverLink(Metadata meta) {
String link = null;
String geoserverString = "/geoserver/";
String geoserverEndString = "/geoserver?";
String wmslink = getWMSLink(meta);
if (wmslink!=null){
int idx = wmslink.indexOf(geoserverString);
if (idx<0)
idx = wmslink.indexOf(geoserverEndString);
if (idx>0){
link = wmslink.substring(0,idx+geoserverString.length()-1);
return link;
}
}
String wfslink = getWFSLink(meta);
if (wfslink!=null){
int idx = wfslink.indexOf(geoserverString);
if (idx<0)
idx = wfslink.indexOf(geoserverEndString);
if (idx>0){
link = wfslink.substring(0,idx+geoserverString.length()-1);
return link;
}
}
String wcslink = getWCSLink(meta);
if (wcslink!=null){
int idx = wcslink.indexOf(geoserverString);
if (idx<0)
idx = wcslink.indexOf(geoserverEndString);
if (idx>0){
link = wcslink.substring(0,idx+geoserverString.length()-1);
return link;
}
}
if (link == null)
System.out.println("NO GEOSERVER LINK WAS FOUND ACCORDING TO THE CRITERION");
return link;
}
private String searchInUrl(Metadata meta, String criterion) {
String link = null;
for (DigitalTransferOptions option : meta.getDistributionInfo().getTransferOptions()) {
for (OnlineResource resource : option.getOnLines()) {
String tlink = resource.getLinkage().toString();
if (tlink.toLowerCase().contains(criterion.toLowerCase())) {
link = tlink;
break;
}
}
}
if (link == null)
System.out.println("NO ONLINE LINK WAS FOUND ACCORDING TO THE CRITERION :" + criterion);
return link;
}
private String searchLayerNameInMeta(Metadata meta) {
String innerlayername = null;
for (DigitalTransferOptions option : meta.getDistributionInfo().getTransferOptions()) {
for (OnlineResource resource : option.getOnLines()) {
String layername = resource.getName();
if (layername!=null) {
innerlayername = layername;
break;
}
}
}
if (innerlayername == null)
System.out.println("NO LAYER NAME WAS FOUND IN TRANSFER OPTIONS");
return innerlayername;
}
public String getWFSLink(Metadata meta) {
return searchInUrl(meta, "service=wfs");
}
// retrieves the wms link
public String getWMSLink(Metadata meta) {
return searchInUrl(meta, "service=wms");
}
public String getWCSLink(Metadata meta) {
return searchInUrl(meta, "service=wcs");
}
public String getOpenDapLink(Metadata meta) {
return searchInUrl(meta, "/dodsC");
}
public String getThreddsLink(Metadata meta) {
return searchInUrl(meta, "catalog.xml");
}
public String getLayerName(Metadata meta) {
AnalysisLogger.getLogger().debug("Retrieving Layer Name");
String wmslink = getWMSLink(meta);
String layer = null;
String finder = "layers=";
if (wmslink != null) {
AnalysisLogger.getLogger().debug("WMS layer found!");
int idxfinder = wmslink.indexOf(finder);
if (idxfinder > 0) {
AnalysisLogger.getLogger().debug("Searching for Layer Name inside the WMS Link");
wmslink = wmslink.substring(idxfinder);
int andIdx = wmslink.indexOf("&");
if (andIdx < 0)
andIdx = wmslink.length();
layer = wmslink.substring(finder.length(), andIdx).trim();
}
//if the layer is not inside the wmslink
else{
AnalysisLogger.getLogger().debug("Searching for Layer Name inside the file");
layer = searchLayerNameInMeta(meta);
}
}
return layer;
}
public boolean isThreddsFile(Metadata meta) {
return (getOpenDapLink(meta) != null);
}
public GeoNetworkReader initGeoNetworkReader() throws Exception {
if (scope!=null)
ScopeProvider.instance.set(scope);
else{
AnalysisLogger.getLogger().debug("Features Manager: Using manual configuration of GeoNetwork");
ConfigurationManager.setConfiguration(new Configuration() {
@Override
public String getGeoNetworkUser() {
return geonetworkUser;
}
@Override
public String getGeoNetworkPassword() {
return geonetworkPwd;
}
@Override
public String getGeoNetworkEndpoint() {
return geonetworkUrl;
}
});
}
GeoNetworkReader gn = GeoNetwork.get();
return gn;
}
public String getGeonetworkURLFromScope() throws Exception {
GeoNetworkReader gn = initGeoNetworkReader();
return gn.getConfiguration().getGeoNetworkEndpoint();
}
public String getGeonetworkUserFromScope() throws Exception {
GeoNetworkReader gn = initGeoNetworkReader();
return gn.getConfiguration().getGeoNetworkUser();
}
public String getGeonetworkPasswordFromScope() throws Exception {
GeoNetworkReader gn = initGeoNetworkReader();
return gn.getConfiguration().getGeoNetworkPassword();
}
private Metadata getGNInfobyTitle(String info) throws Exception {
GeoNetworkReader gn = initGeoNetworkReader();
// Form query object
gn.login();
GNSearchRequest req = new GNSearchRequest();
req.addParam(GNSearchRequest.Param.title, info);
// req.addConfig(GNSearchRequest.Config.similarity, "1");
GNSearchResponse resp = gn.query(req);
Metadata meta = null;
if (resp.getCount() != 0)
for (GNSearchResponse.GNMetadata metadata : resp) {
try {
meta = gn.getById(metadata.getUUID());
break;
} catch (Exception e) {
AnalysisLogger.getLogger().debug("Error retrieving information for some metadata");
}
}
return meta;
}
public List<Metadata> getAllGNInfobyTitle(String info, String tolerance) throws Exception {
GeoNetworkReader gn = initGeoNetworkReader();
// Form query object
gn.login();
GNSearchRequest req = new GNSearchRequest();
req.addParam(GNSearchRequest.Param.title, info);
req.addConfig(GNSearchRequest.Config.similarity, tolerance);
GNSearchResponse resp = gn.query(req);
Metadata meta = null;
List<Metadata> metadatalist = new ArrayList<Metadata>();
if (resp.getCount() != 0)
for (GNSearchResponse.GNMetadata metadata : resp) {
try {
meta = gn.getById(metadata.getUUID());
metadatalist.add(meta);
} catch (Exception e) {
AnalysisLogger.getLogger().debug("Error retrieving information for some metadata");
}
}
return metadatalist;
}
public List<Metadata> getAllGNInfobyText(String info, String tolerance) throws Exception {
GeoNetworkReader gn = initGeoNetworkReader();
// Form query object
gn.login();
GNSearchRequest req = new GNSearchRequest();
req.addParam(GNSearchRequest.Param.any, info);
req.addConfig(GNSearchRequest.Config.similarity, tolerance);
GNSearchResponse resp = gn.query(req);
Metadata meta = null;
List<Metadata> metadatalist = new ArrayList<Metadata>();
if (resp.getCount() != 0)
for (GNSearchResponse.GNMetadata metadata : resp) {
try {
meta = gn.getById(metadata.getUUID());
metadatalist.add(meta);
} catch (Exception e) {
AnalysisLogger.getLogger().debug("Error retrieving information for some metadata");
}
}
return metadatalist;
}
private List<Metadata> getFastGNInfobyTitle(String info, String completeTitle, String tolerance) throws Exception {
GeoNetworkReader gn = initGeoNetworkReader();
// Form query object
gn.login();
GNSearchRequest req = new GNSearchRequest();
req.addParam(GNSearchRequest.Param.title, info);
req.addConfig(GNSearchRequest.Config.similarity, tolerance);
GNSearchResponse resp = gn.query(req);
Metadata meta = null;
List<Metadata> metadatalist = new ArrayList<Metadata>();
if (resp.getCount() != 0){
AnalysisLogger.getLogger().debug("Retrieving information ...");
for (GNSearchResponse.GNMetadata metadata : resp) {
try {
meta = gn.getById(metadata.getUUID());
Identification id = meta.getIdentificationInfo().iterator().next();
String title = id.getCitation().getTitle().toString();
if (title.equalsIgnoreCase(completeTitle)){
AnalysisLogger.getLogger().debug("Found UUID:"+metadata.getUUID());
metadatalist.add(meta);
break;
}
} catch (Exception e) {
AnalysisLogger.getLogger().debug("Error retrieving information for some metadata");
}
}
AnalysisLogger.getLogger().debug("Information Successfully Retrieved");
}
return metadatalist;
}
private Metadata getGNInfobyUUID(String UUID) throws Exception {
GeoNetworkReader gn = initGeoNetworkReader();
// Form query object
gn.login();
Metadata meta = gn.getById(UUID);
AnalysisLogger.getLogger().debug("Layer with UUID: "+UUID+" successfully Retrieved!");
return meta;
}
public Metadata getGNInfobyUUIDorName(String layerUUIDorTitle) throws Exception {
AnalysisLogger.getLogger().debug("MapsComparator: Getting layer with UUID..."+layerUUIDorTitle);
Metadata meta = null;
try{
meta = getGNInfobyUUID(layerUUIDorTitle);
}catch(Exception e){
AnalysisLogger.getLogger().debug("MapsComparator: Impossible to get layer as UUID");
}
if (meta==null){
AnalysisLogger.getLogger().debug("MapsComparator: NO UUID Available - Trying with NAME..."+layerUUIDorTitle);
try{
meta = checkForMetadatabyTitle(FeaturesManager.treatTitleForGN(layerUUIDorTitle), layerUUIDorTitle);
}catch(Exception e){
throw new Exception("Layer does not exist");
}
}
return meta;
}
private Metadata checkForMetadatabyTitle(String searchString, String completetitle) throws Exception {
return checkForMetadatabyTitle(searchString, completetitle, "");
}
private Metadata checkForMetadatabyTitle(String searchString, String completetitle, String filename) throws Exception {
AnalysisLogger.getLogger().debug("Searching for: "+searchString);
List<Metadata> mlist = getFastGNInfobyTitle(searchString, completetitle,"1");
AnalysisLogger.getLogger().debug("Found:"+mlist.size()+" results");
Metadata mfound = null;
// DefaultInternationalString intfilename = new DefaultInternationalString(filename);
for (Metadata m : mlist) {
Identification id = m.getIdentificationInfo().iterator().next();
String title = id.getCitation().getTitle().toString();
if (completetitle.equalsIgnoreCase(title)) {
/*
Iterator<? extends Keywords> it = id.getDescriptiveKeywords().iterator();
while (it.hasNext()){
Keywords keys = (Keywords)it.next();
for (InternationalString is :keys.getKeywords()){
// System.out.println(is);
if (is.toString().equals(filename)){
mfound = m;
break;
}
}
if (mfound!=null)
break;
}
}
if (mfound!=null)
break;
*/
mfound = m;
break;
}
}
return mfound;
}
public String getGeonetworkUrl() {
return geonetworkUrl;
}
public void setGeonetworkUrl(String geonetworkUrl) {
this.geonetworkUrl = geonetworkUrl;
}
public String getGeonetworkUser() {
return geonetworkUser;
}
public void setGeonetworkUser(String geonetworkUser) {
this.geonetworkUser = geonetworkUser;
}
public String getGeonetworkPwd() {
return geonetworkPwd;
}
public void setGeonetworkPwd(String geonetworkPwd) {
this.geonetworkPwd = geonetworkPwd;
}
public static String treatTitleForGN(String origLayerTitle) {
String layerTitle = origLayerTitle.toLowerCase();
int idx = layerTitle.indexOf(" from [");
String layerTitle2 = layerTitle;
if (idx>0)
layerTitle2 = layerTitle.toLowerCase().substring(0, idx).trim();
else {
idx = layerTitle.indexOf(" in [");
if (idx>0)
layerTitle2 = layerTitle.toLowerCase().substring(0, idx).trim();
else{
idx = layerTitle.indexOf("(");
if (idx>0)
layerTitle2 = layerTitle.toLowerCase().substring(0, idx).trim();
}
}
layerTitle2 = layerTitle2.replaceAll("(\\(.*\\))", " ");
layerTitle2 = layerTitle2.replace("_", " ").replace("-", " ").replace("(", " ").replace(")", " ");
String punct = "[!\"#$%&'*+,./:;<=>?@\\^_`{|}~-]";
layerTitle2 = layerTitle2.replaceAll("( |^)+[^A-Za-z]+("+punct+")*[^A-Za-z]*", " ").trim();
return layerTitle2.replaceAll(punct, " ").replaceAll("( )+", " ");
}
public static void main1(String args[]) throws Exception {
// String title = "temperature (04091217ruc.nc)";
// String title = "Bathymetry";
// String title = "FAO aquatic species distribution map of Melanogrammus aeglefinus";
// String title = "geopotential height from [12/09/2004 19:00] to [12/09/2004 22:00] (04091217_ruc.nc)";
String title = "geopotential height";
FeaturesManager fm = new FeaturesManager();
Metadata meta = fm.getGNInfobyTitle(title);
System.out.println("is file? " + fm.isThreddsFile(meta));
System.out.println("opendap: " + fm.getOpenDapLink(meta));
System.out.println("wcs:" + fm.getWCSLink(meta));
System.out.println("wms:" + fm.getWMSLink(meta));
System.out.println("thredds:" + fm.getThreddsLink(meta));
}
public static void main(String args[]) throws Exception {
System.out.println(treatTitleForGN("sea/land/lake/ice field composite mask from"));
}
}

View File

@ -1,326 +0,0 @@
package org.gcube.dataanalysis.geo.retrieval;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.meta.features.FeaturesManager;
import org.gcube.dataanalysis.geo.utils.EnvDataExplorer;
import org.gcube.dataanalysis.geo.utils.FeaturedPolygon;
import org.gcube.dataanalysis.geo.utils.ThreddsDataExplorer;
import org.opengis.metadata.Metadata;
import org.opengis.metadata.identification.Identification;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.Point;
import com.vividsolutions.jts.geom.PrecisionModel;
import com.vividsolutions.jts.geom.impl.CoordinateArraySequence;
public class GeoIntersector {
private FeaturesManager featurer;
private String configDir;
public GeoIntersector(String scope, String cfgDir) {
featurer = new FeaturesManager();
featurer.setScope(scope);
this.configDir=cfgDir;
}
public FeaturesManager getFeaturer(){
return featurer;
}
public LinkedHashMap<String, Double> getFeaturesInTime(String layerTitle, double x, double y) throws Exception {
return getFeaturesInAllTimes(layerTitle, x, y, 0);
}
public LinkedHashMap<String, Double> getFeaturesInAllTimes(String layerTitle, double x, double y, double z) throws Exception {
LinkedHashMap<String, Double> features = new LinkedHashMap<String, Double>();
// get the layer
// Metadata meta = featurer.getGNInfobyTitle(layerTitle);
Metadata meta = featurer.getGNInfobyUUIDorName(layerTitle);
// if the layer is good
if (meta != null) {
String layer = featurer.getLayerName(meta);
if (layer == null)
layer = layerTitle;
// check if it is a NetCDF
if (featurer.isThreddsFile(meta)) {
Identification id = meta.getIdentificationInfo().iterator().next();
String title = id.getCitation().getTitle().toString();
AnalysisLogger.getLogger().debug("found a netCDF file with title " + title + " and layer name " + layer);
features = getFeaturesFromNetCDF(featurer.getOpenDapLink(meta), layer, x, y, z);
} else {
AnalysisLogger.getLogger().debug("found a Geo Layer with title " + layerTitle + " and layer name " + layer);
features = getFeaturesFromWFS(featurer.getWFSLink(meta), layer, x, y);
}
}
return features;
}
public List<Double> getFeaturesInTimeInstant(String layerTitle, int time, List<Tuple<Double>> triplets, double xL,double xR, double yL, double yR) throws Exception {
List<Double> features = new ArrayList<Double>();
// get the layer
Metadata meta = featurer.getGNInfobyUUIDorName(layerTitle);
// if the layer is good
if (meta != null) {
String layer = featurer.getLayerName(meta);
if (layer == null)
layer = layerTitle;
// check if it is a NetCDF
if (featurer.isThreddsFile(meta)) {
Identification id = meta.getIdentificationInfo().iterator().next();
String title = id.getCitation().getTitle().toString();
AnalysisLogger.getLogger().debug("found a netCDF file with title " + title + " and layer name " + layer);
features = getFeaturesFromNetCDF(featurer.getOpenDapLink(meta), layer, time, triplets, xL,xR, yL, yR);
/*
for (Tuple<Double> triplet : triplets) {
double x = triplet.getElements().get(0);
double y = triplet.getElements().get(1);
double z = 0;
if (triplet.getElements().size() > 2)
z = triplet.getElements().get(2);
AnalysisLogger.getLogger().debug("Taking point: (" + x + "," + y + "," + z + ")");
LinkedHashMap<String, Double> features = new LinkedHashMap<String, Double>();
features = getFeaturesFromNetCDF(featurer.getOpenDapLink(meta), layer, x, y, z);
AnalysisLogger.getLogger().debug("Got: (" + features + ")");
featuresSets.add(features);
}
*/
} else {
AnalysisLogger.getLogger().debug("found a Geo Layer with reference " + layerTitle + " and layer name " + layer);
// AnalysisLogger.getLogger().debug("Taking point: (" + x + "," + y + ")");
List<FeaturedPolygon> featuresInTime = new ArrayList<FeaturedPolygon>();
AnalysisLogger.getLogger().debug("taking WFS features");
featuresInTime = getFeaturesFromWFS(featurer.getGeoserverLink(meta), layer, xL,yL, xR, yR);
int tsize = triplets.size();
AnalysisLogger.getLogger().debug("Intersecting "+tsize+" vs "+featuresInTime.size() +" elements");
int ttc= 0;
Double[] featuresarray = new Double[tsize];
int k=0;
long t0=0;
GeometryFactory factory = new GeometryFactory(new PrecisionModel(), 4326);
// Tuple[] tripletss = new Tuple[tsize];
// tripletss = triplets.toArray(tripletss);
// for (Tuple<Double> triplet:triplets){
for (Tuple<Double> triplet:triplets){
ArrayList<Double> elements = triplet.getElements();
CoordinateArraySequence pcoords = new CoordinateArraySequence(new Coordinate[] { new Coordinate(elements.get(0),elements.get(1)),});
Point po = new Point(pcoords, factory);
boolean found = false;
for (FeaturedPolygon poly:featuresInTime){
if (poly!=null && poly.p!=null && poly.p.covers(po)){
// System.out.println(po+" intersected by "+poly.p+ " assigning value "+poly.value);
// features.add(poly.value);
featuresarray[k] = poly.value;
found = true;
break;
}
}
po = null;
if (!found){
// features.add(Double.NaN);
featuresarray[k] = Double.NaN;
}
if (ttc%10000==0){
AnalysisLogger.getLogger().debug("Status: "+((double)ttc*100d/(double)tsize));
// System.out.println("::"+((System.currentTimeMillis()-t0)/1000));
}
ttc++;
k++;
// if (ttc%1000==0)
// t0 = System.currentTimeMillis();
}
features = Arrays.asList(featuresarray);
}
}
return features;
}
private List<Double> getFeaturesFromNetCDF(String opendapURL, String layer, int time, List<Tuple<Double>> triplets, double xL,double xR, double yL, double yR) {
if (opendapURL == null)
return null;
return ThreddsDataExplorer.retrieveDataFromNetCDF(opendapURL, layer, time, triplets, xL,xR, yL, yR);
}
private LinkedHashMap<String, Double> getFeaturesFromNetCDF(String opendapURL, String layer, double x, double y, double z) {
if (opendapURL == null)
return null;
return ThreddsDataExplorer.retrieveDataFromNetCDF(opendapURL, layer, x, y, z);
}
private LinkedHashMap<String, Double> getFeaturesFromWFS(String geoserverUrl, String layer, double x, double y) {
if (geoserverUrl == null)
return null;
return EnvDataExplorer.getFeatures(geoserverUrl, layer, x, y);
}
private List<FeaturedPolygon> getFeaturesFromWFS(String geoserverUrl, String layer, double xL,double yL,double xR, double yR) {
if (geoserverUrl == null)
return null;
return EnvDataExplorer.getFeatures(geoserverUrl, layer, xL,yL,xR, yR);
}
public static List<Tuple<Double>> generateCoordinateTriplets(double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution){
int ysteps = (int) ((y2 - y1) / yResolution);
int xsteps = (int) ((x2 - x1) / xResolution);
List<Tuple<Double>> tuples = new ArrayList<Tuple<Double>>();
AnalysisLogger.getLogger().debug("Building the points grid according to YRes:" + yResolution + " and XRes:" + xResolution);
// build the tuples according to the desired resolution
for (int i = 0; i < ysteps + 1; i++) {
double y = (i * yResolution) + y1;
if (i == ysteps)
y = y2;
for (int j = 0; j < xsteps + 1; j++) {
double x = (j * xResolution) + x1;
if (j == xsteps)
x = x2;
tuples.add(new Tuple<Double>(x, y, z));
}
}
return tuples;
}
public static List<Double> associateValueToCoordinates(List<Tuple<Double>> coordinates, double[][] data){
List<Double> values = new ArrayList<Double>();
int k = 0;
int g = 0;
int ntriplets = coordinates.size();
int xsteps = data[0].length-1;
for (int t = 0; t < ntriplets; t++) {
values.add(data[k][g]);
if (g == xsteps) {
g = 0;
k++;
}
else
g++;
}
return values;
}
public double[][] takeTimeSlice(String layerTitle, int timeInstant, double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution) throws Exception {
AnalysisLogger.getLogger().debug("Bounding box: (" + x1 + "," + x2 + ";" + y1 + "," + y2 + ")");
boolean faolayer = false;
if (layerTitle.toLowerCase().contains("fao aquatic species distribution map") )
{
AnalysisLogger.getLogger().debug("FAO DISTRIBUTION LAYER ... TO APPY PATCH!");
faolayer=true;
}
if ((x2 < x1) || (y2 < y1)) {
AnalysisLogger.getLogger().debug("ERROR: BAD BOUNDING BOX!!!");
return new double[0][0];
}
int ysteps = (int) ((y2 - y1) / yResolution);
int xsteps = (int) ((x2 - x1) / xResolution);
double[][] slice = new double[ysteps + 1][xsteps + 1];
List<Tuple<Double>> tuples = new ArrayList<Tuple<Double>>();
AnalysisLogger.getLogger().debug("Building the points grid according to YRes:" + yResolution + " and XRes:" + xResolution);
// build the tuples according to the desired resolution
for (int i = 0; i < ysteps + 1; i++) {
double y = (i * yResolution) + y1;
if (i == ysteps)
y = y2;
for (int j = 0; j < xsteps + 1; j++) {
double x = (j * xResolution) + x1;
if (j == xsteps)
x = x2;
tuples.add(new Tuple<Double>(x, y, z));
}
}
AnalysisLogger.getLogger().debug("Taking " + ysteps + " values per "+xsteps+"="+(ysteps*xsteps)+ "...");
List<Double> timeValues = getFeaturesInTimeInstant(layerTitle, timeInstant, tuples, x1, x2, y1,y2);
AnalysisLogger.getLogger().debug("Taken " + timeValues.size() + " values");
// build back the values matrix
int k = 0;
int g = 0;
int ntriplets = timeValues.size();
//cycle on all the triplets to recontruct the matrix
for (int t = 0; t < ntriplets; t++) {
//take the corresponding (time,value) pair
Double value = timeValues.get(t);
//if there is value, then set it, otherwise set NaN
//the layer is undefined in that point and a value must be generated
//assign a value to the matrix
//WARNING: PATCH FOR FAO LAYERS:. Probability can be equal to 2 for uncertainty (Kolmogorov, forgive them for they know not what they do)
if (faolayer && (value>1)){
AnalysisLogger.getLogger().debug("APPLYING FAO PATCH!");
slice[k][g] = 0.5;
}
else
slice[k][g] = value;
//increase the x step according to the matrix
if (g == xsteps) {
g = 0;
k++;
}
else
g++;
}
/*
AnalysisLogger.getLogger().debug("Applying nearest Neighbor to all the rows");
//apply nearest neighbor to each row
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(configDir);
boolean rapidinit = false;
for (int i=0;i<slice.length;i++){
// AnalysisLogger.getLogger().debug("Checking for unfilled values");
boolean tofill = false;
for (int j=0;j<slice[i].length;j++) {
if (new Double(slice[i][j]).equals(Double.NaN))
tofill = true;
}
if (tofill){
if (!rapidinit){
config.initRapidMiner();
rapidinit=true;
}
AnalysisLogger.getLogger().debug("Filling signal");
double[] ssliced = SignalProcessing.fillSignal(slice[i]);
slice[i] = ssliced;
}
// else
// AnalysisLogger.getLogger().debug("Signal yet complete");
}
*/
AnalysisLogger.getLogger().debug("Features map: "+slice.length+","+slice[0].length);
return slice;
}
}

View File

@ -1,22 +0,0 @@
package org.gcube.dataanalysis.geo.test;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.retrieval.GeoIntersector;
public class TestChunkization {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
String layertitle = "Statistical Mean in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_annual_1deg_ENVIRONMENT_OCEANS_.nc}";
// String layertitle = "Mass Concentration of Chlorophyll in Sea Water in [03-30-13 01:00] (3D) {Mercator Ocean BIOMER1V1R1: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-analysis-bio-001-008-a}";
// String layertitle = "Objectively Analyzed Climatology in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_annual_1deg_ENVIRONMENT_OCEANS_.nc}";
long t0 = System.currentTimeMillis();
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
GeoIntersector intersector = new GeoIntersector(null, cfg);
// intersector.takeTimeSlice(layertitle, 0, -180, 180, -10, 10, 0, 1, 1);
// intersector.takeTimeSlice(layertitle, 0, -10, 10, -10, 10, 0,1, 1);
intersector.takeTimeSlice(layertitle, 0, -180, 180, -90, 90, 0, 0.5, 0.5);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
}
}

View File

@ -1,22 +0,0 @@
package org.gcube.dataanalysis.geo.test;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.retrieval.GeoIntersector;
public class TestChunkizationLayer {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
// String layertitle = "MyDistributionMap";
// String layertitle = "Mass Concentration of Chlorophyll in Sea Water in [03-30-13 01:00] (3D) {Mercator Ocean BIOMER1V1R1: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-analysis-bio-001-008-a}";
// String layertitle = "Objectively Analyzed Climatology in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_annual_1deg_ENVIRONMENT_OCEANS_.nc}";
String layertitle = "FAO AQUATIC SPECIES DISTRIBUTION MAP OF MEGALASPIS CORDYLA";
long t0 = System.currentTimeMillis();
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
GeoIntersector intersector = new GeoIntersector(null, cfg);
// intersector.takeTimeSlice(layertitle, 0, -10, 10, -10, 10, 0,1, 1);
intersector.takeTimeSlice(layertitle, 0, -180, 180, -90, 90, 0, 0.5, 0.5);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
}
}

View File

@ -1,92 +0,0 @@
package org.gcube.dataanalysis.geo.test;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.meta.features.FeaturesManager;
import org.gcube.dataanalysis.geo.retrieval.GeoIntersector;
import org.geotoolkit.metadata.iso.identification.DefaultDataIdentification;
import org.opengis.metadata.Metadata;
import org.opengis.metadata.identification.Identification;
import org.opengis.metadata.identification.Keywords;
import org.opengis.util.InternationalString;
public class TestLayersRetrieval {
static String cfg = "./cfg/";
//TODO: filter WoA names and attach them to the title
public static void main(String[] args) throws Exception{
long t0 = System.currentTimeMillis();
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
FeaturesManager featurer = new FeaturesManager();
featurer.setScope(null);
List<Metadata> metae = featurer.getAllGNInfobyText("thredds", "1");
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
String d = "#";
System.out.println("Parameter Name"+d+"Time Range"+d+"Dimensions"+d+"Unit of Measure"+d+"Resolution (decimal degrees)"+d+"Details");
List<String> table = new ArrayList<String>();
for (Metadata meta:metae){
Identification id = meta.getIdentificationInfo().iterator().next();
String title = id.getCitation().getTitle().toString();
DefaultDataIdentification did = (DefaultDataIdentification) id;
double resolution = MathFunctions.roundDecimal(did.getSpatialResolutions().iterator().next().getDistance(),3);
Collection<? extends Keywords> keys = id.getDescriptiveKeywords();
String unit = "";
for (Keywords key:keys){
for(InternationalString string:key.getKeywords()) {
String ss = string.toString();
if (ss.startsWith("unit:"))
unit = ss.substring(ss.indexOf(":")+1);
}
}
String[] elements = parseTitle(title);
String entry = elements[0]+d+elements[1]+d+elements[2]+d+unit+d+resolution+d+elements[3];
if (!table.contains(entry)){
table.add(entry);
// System.out.println(elements[0]+d+elements[1]+d+elements[2]+d+resolution+d+elements[3]);
}
}
Collections.sort(table);
for (String element:table){
System.out.println(element);
}
// System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
}
public static void main1(String[] args) throws Exception{
// String example = "Standard Deviation from Statistical Mean in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_annual_1deg_ENVIRONMENT_OCEANS_.nc}";
// String example = "Salinity from [12-15-99 01:00] to [12-15-09 01:00] (2D) {Native grid ORCA025.L75 monthly average: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-reanalysis-phys-001-004-b-ref-fr-mjm95-grids}";
String example = "Salinity from [12-15-99 01:00] to [12-15-09 01:00] (2D) {Native grid ORCA025.L75 monthly average: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-reanalysis-phys-001-004-b-ref-fr-mjm95-grids}";
parseTitle(example);
}
public static String[] parseTitle(String title){
String timerange = title.substring(title.indexOf("["),title.lastIndexOf("]")+1);
// timerange = timerange.replace("] to [", " ; ");
// System.out.println(timerange);
String realtitle = title.substring(0,title.indexOf("[")).trim();
realtitle = realtitle.substring(0,realtitle.lastIndexOf(" ")).trim();
// System.out.println(realtitle);
String dimensions = title.substring(title.indexOf("] (")+3);
dimensions = dimensions.substring(0,dimensions.indexOf(")")).trim();
// System.out.println(dimensions);
String notes = title.substring(title.indexOf("{")+1,title.lastIndexOf("}"));
String woa = "World Ocean Atlas 09:";
String prefixnote = "";
if (notes.startsWith(woa)){
prefixnote = notes.substring(woa.length()+1);
prefixnote = prefixnote.substring(0,prefixnote.indexOf(":")).trim()+": ";
}
// System.out.println(notes);
String[] elements = new String[]{prefixnote+realtitle, timerange, dimensions,notes};
return elements;
}
}

View File

@ -1,26 +0,0 @@
package org.gcube.dataanalysis.geo.test;
import org.gcube.dataanalysis.geo.meta.NetCDFMetadata;
public class TestNetCDFMetadataInsert {
public static void main(String[] args) throws Exception{
NetCDFMetadata metadataInserter = new NetCDFMetadata();
metadataInserter.setGeonetworkUrl("http://geoserver-dev.d4science-ii.research-infrastructures.eu/geonetwork/");
metadataInserter.setGeonetworkUser("admin");
metadataInserter.setGeonetworkPwd("admin");
metadataInserter.setThreddsCatalogUrl("http://thredds.research-infrastructures.eu/thredds/catalog/public/netcdf/catalog.xml");
metadataInserter.setLayerUrl("http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/04091217_ruc.nc");
metadataInserter.setTitle("temperature (04091217ruc.nc)");
metadataInserter.setLayerName("T");
metadataInserter.setSourceFileName("04091217_ruc.nc");
metadataInserter.setAbstractField("T: temperature (degK) from 04091217ruc.nc resident on a THREDDS instance");
metadataInserter.setResolution(0.5);
metadataInserter.setXLeftLow(-180);
metadataInserter.setYLeftLow(-85.5);
metadataInserter.setXRightUpper(180);
metadataInserter.setYRightUpper(85.5);
metadataInserter.insertMetaData();
}
}

View File

@ -1,51 +0,0 @@
package org.gcube.dataanalysis.geo.test;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.insertion.RasterTable;
import org.gcube.dataanalysis.geo.retrieval.GeoIntersector;
public class TestRasterTable {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
// String layertitle = "MyDistributionMap";
// String layertitle = "Mass Concentration of Chlorophyll in Sea Water in [03-30-13 01:00] (3D) {Mercator Ocean BIOMER1V1R1: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-analysis-bio-001-008-a}";
// String layertitle = "Objectively Analyzed Climatology in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_annual_1deg_ENVIRONMENT_OCEANS_.nc}";
String layertitle = "FAO AQUATIC SPECIES DISTRIBUTION MAP OF MEGALASPIS CORDYLA";
long t0 = System.currentTimeMillis();
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
config.setPersistencePath("./");
/*
config.setParam("DatabaseUserName","utente");
config.setParam("DatabasePassword","d4science");
config.setParam("DatabaseURL","jdbc:postgresql://dbtest.next.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
*/
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
GeoIntersector intersector = new GeoIntersector(null, cfg);
int t = 0;
double x1 = -180;
double x2 = 180;
double y1 = -90;
double y2 = 90;
double z = 0;
double xResolution = 0.5;
double yResolution = 0.5;
double[][] slice = intersector.takeTimeSlice(layertitle, t, x1, x2, y1,y2,z,xResolution,yResolution);
RasterTable raster = new RasterTable(x1, x2, y1, y2, z, xResolution, yResolution, slice, config);
raster.dumpGeoTable();
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
}
}

View File

@ -1,28 +0,0 @@
package org.gcube.dataanalysis.geo.test;
import org.gcube.dataanalysis.geo.meta.GenericLayerMetadata;
public class TestStandardLayerMetadataInsert {
public static void main(String[] args) throws Exception{
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
metadataInserter.setGeonetworkUrl("http://geoserver-dev.d4science-ii.research-infrastructures.eu/geonetwork/");
// metadataInserter.setGeonetworkUrl("http://geoserver.d4science-ii.research-infrastructures.eu/geonetwork/");
metadataInserter.setGeonetworkUser("admin");
metadataInserter.setGeonetworkPwd("admin");
metadataInserter.setGeoserverUrl("http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver");
metadataInserter.setTitle("Biodiversity according to LME - Obis");
metadataInserter.setLayerName("aquamaps:biodiversity_lme_geo");
metadataInserter.setCategoryTypes("_BIOTA_");
metadataInserter.setAbstractField("Biodiversity according to LME - Obis");
metadataInserter.setCustomTopics("Obis","Large Marine Ecosystems");
metadataInserter.setResolution(0.5);
metadataInserter.setXLeftLow(-180);
metadataInserter.setYLeftLow(-85.5);
metadataInserter.setXRightUpper(180);
metadataInserter.setYRightUpper(85.5);
metadataInserter.insertMetaData();
}
}

View File

@ -1,44 +0,0 @@
package org.gcube.dataanalysis.geo.test.maps;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class RegressionTestMapsComparison {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
List<ComputationalAgent> evaluators = EvaluatorsFactory.getEvaluators(testConfig1());
evaluators.get(0).init();
Regressor.process(evaluators.get(0));
evaluators = null;
}
private static AlgorithmConfiguration testConfig1() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setNumberOfResources(1);
config.setConfigPath("./cfg");
config.setPersistencePath("./");
config.setAgent("MAPS_COMPARISON");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Layer_1","86a7ac79-866a-49c6-b5d5-602fc2d87ddd");
config.setParam("Layer_2","86a7ac79-866a-49c6-b5d5-602fc2d87ddd");
config.setParam("ValuesComparisonThreshold",""+0.1);
config.setParam("Z","0");
config.setGcubeScope(null);
return config;
}
}

View File

@ -1,56 +0,0 @@
package org.gcube.dataanalysis.geo.test.maps;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.algorithms.MapsComparator;
import org.gcube.dataanalysis.geo.insertion.RasterTable;
import org.gcube.dataanalysis.geo.retrieval.GeoIntersector;
public class TestMapsComparison {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
// String layertitle = "Ice velocity u from [12-15-02 01:00] to [12-15-09 01:00] (2D) {Native grid ORCA025.L75 monthly average: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-reanalysis-phys-001-004-b-ref-fr-mjm95-icemod}";
// String layertitle2 = "Ice velocity v from [12-15-02 01:00] to [12-15-09 01:00] (2D) {Native grid ORCA025.L75 monthly average: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-reanalysis-phys-001-004-b-ref-fr-mjm95-icemod}";
// String layertitle = "Number of Observations in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Apparent Oxygen Utilization - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/apparent_oxygen_utilization_annual_1deg_ENVIRONMENT_BIOTA_.nc}";
// String layertitle2 = "Seasonal or Monthly Climatology minus Annual Climatology in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Apparent Oxygen Utilization - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/apparent_oxygen_utilization_annual_1deg_ENVIRONMENT_BIOTA_.nc}";
// String layertitle2 = "Number of Mean Values within Radius of Influence in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Apparent Oxygen Utilization - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/apparent_oxygen_utilization_annual_1deg_ENVIRONMENT_BIOTA_.nc}";
// String layertitle = "Ice velocity u from [12-15-02 01:00] to [12-15-09 01:00] (2D) {Native grid ORCA025.L75 monthly average: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-reanalysis-phys-001-004-b-ref-fr-mjm95-icemod}";
// String layertitle2 = "Ice velocity v from [12-15-02 01:00] to [12-15-09 01:00] (2D) {Native grid ORCA025.L75 monthly average: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-reanalysis-phys-001-004-b-ref-fr-mjm95-icemod}";
//String layertitle = "wind stress from [05-01-07 14:00] to [04-01-12 14:00] (2D) {Monthly ASCAT global wind field: Data extracted from dataset http://tds0.ifremer.fr/thredds/dodsC/CERSAT-GLO-CLIM_WIND_L4-OBS_FULL_TIME_SERIE}";
//String layertitle2 = "wind speed from [05-01-07 14:00] to [04-01-12 14:00] (2D) {Monthly ASCAT global wind field: Data extracted from dataset http://tds0.ifremer.fr/thredds/dodsC/CERSAT-GLO-CLIM_WIND_L4-OBS_FULL_TIME_SERIE}";
//String layertitle = "Objectively Analyzed Climatology from [02-16-01 01:00] to [11-16-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Salinity - seasonal: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/salinity_seasonal_1deg_ENVIRONMENT_OCEANS_.nc}";
//String layertitle2 = "Objectively Analyzed Climatology from [01-16-01 01:00] to [12-16-01 01:00] (3D) {World Ocean Atlas 09: Apparent Oxygen Utilization - monthly: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/apparent_oxygen_utilization_monthly_1deg_ENVIRONMENT_BIOTA_.nc}";
// String layertitle = "FAO aquatic species distribution map of Istiophorus platypterus";
// String layertitle2 = "FAO aquatic species distribution map of Teuthowenia megalops";
//{MEAN=1.0, VARIANCE=0.0, NUMBER_OF_ERRORS=38596, NUMBER_OF_COMPARISONS=260281, ACCURACY=85.17, MAXIMUM_ERROR=1.0, MAXIMUM_ERROR_POINT=3207:219:1, TREND=CONTRACTION, Resolution=0.5}
String layertitle = "Sarda orientalis";
String layertitle2 = "FAO aquatic species distribution map of Sarda chiliensis";
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Layer_1",layertitle);
config.setParam("Layer_2",layertitle2);
config.setParam("ValuesComparisonThreshold","0.01");
config.setParam("Z","0");
config.setGcubeScope(null);
MapsComparator mc = new MapsComparator();
mc.setConfiguration(config);
mc.init();
mc.compute();
mc.getOutput();
}
}

View File

@ -1,49 +0,0 @@
package org.gcube.dataanalysis.geo.test.maps;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.algorithms.MapsComparator;
import org.gcube.dataanalysis.geo.insertion.RasterTable;
import org.gcube.dataanalysis.geo.retrieval.GeoIntersector;
public class TestMapsComparisonAquaMapsvsFAO {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
String layertitle = "FAO aquatic species distribution map of Eleutheronema tetradactylum";
String layertitle2 = "Eleutheronema tetradactylum";
/*
* {MEAN=0.81, VARIANCE=0.02, NUMBER_OF_ERRORS=6691, NUMBER_OF_COMPARISONS=259200,
* ACCURACY=97.42,
* MAXIMUM_ERROR=1.0, MAXIMUM_ERROR_POINT=3005:363:1,
* COHENS_KAPPA=0.218,
* COHENS_KAPPA_CLASSIFICATION_LANDIS_KOCH=Fair,
* COHENS_KAPPA_CLASSIFICATION_FLEISS=Marginal,
* TREND=EXPANSION,
* Resolution=0.5}
*/
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Layer_1",layertitle);
config.setParam("Layer_2",layertitle2);
config.setParam("ValuesComparisonThreshold","0.5");
config.setParam("KThreshold","0.5");
config.setParam("Z","0");
config.setGcubeScope(null);
MapsComparator mc = new MapsComparator();
mc.setConfiguration(config);
mc.init();
mc.compute();
mc.getOutput();
}
}

View File

@ -1,36 +0,0 @@
package org.gcube.dataanalysis.geo.test.maps;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.algorithms.MapsComparator;
import org.gcube.dataanalysis.geo.insertion.RasterTable;
import org.gcube.dataanalysis.geo.retrieval.GeoIntersector;
public class TestMapsComparisonExampleTCOM {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
String layertitle2 = "FAO aquatic species distribution map of Eleutheronema tetradactylum";
String layertitle = "FAO aquatic species distribution map of Leptomelanosoma indicum";
//{MEAN=1.0, VARIANCE=0.0, NUMBER_OF_ERRORS=1823, NUMBER_OF_COMPARISONS=260281, ACCURACY=99.3, MAXIMUM_ERROR=1.0, MAXIMUM_ERROR_POINT=1008:390:1, TREND=EXPANSION, Resolution=0.5}
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Layer_1",layertitle);
config.setParam("Layer_2",layertitle2);
config.setParam("Z","0");
config.setGcubeScope(null);
MapsComparator mc = new MapsComparator();
mc.setConfiguration(config);
mc.init();
mc.compute();
}
}

View File

@ -1,41 +0,0 @@
package org.gcube.dataanalysis.geo.test.maps;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.algorithms.MapsComparator;
import org.gcube.dataanalysis.geo.insertion.RasterTable;
import org.gcube.dataanalysis.geo.retrieval.GeoIntersector;
public class TestMapsComparisonTemperature {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
// String layertitle = "MyDistributionMap";
// String layertitle = "Mass Concentration of Chlorophyll in Sea Water in [03-30-13 01:00] (3D) {Mercator Ocean BIOMER1V1R1: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-analysis-bio-001-008-a}";
String layertitle = "Temperature in [12-15-09 01:00] (3D) {Native grid ORCA025.L75 monthly average: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridt}";
String layertitle2 = "Statistical Mean in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_annual_1deg_ENVIRONMENT_OCEANS_.nc}";
// String layertitle2 = "Objectively Analyzed Climatology in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_annual_1deg_ENVIRONMENT_OCEANS_.nc}";
// String layertitle2 = "FAO AQUATIC SPECIES DISTRIBUTION MAP OF MEGALASPIS CORDYLA";
// {MEAN=224.49, VARIANCE=10337.11, NUMBER_OF_ERRORS=47054, NUMBER_OF_COMPARISONS=65522, ACCURACY=28.19, MAXIMUM_ERROR=303.6, MAXIMUM_ERROR_POINT=5006:104, Resolution=0.9972222222222222}
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Layer_1",layertitle);
config.setParam("Layer_2",layertitle2);
config.setParam("ValuesComparisonThreshold",""+100);
config.setParam("Z","0");
config.setGcubeScope(null);
MapsComparator mc = new MapsComparator();
mc.setConfiguration(config);
mc.init();
mc.compute();
mc.getOutput();
}
}

View File

@ -1,45 +0,0 @@
package org.gcube.dataanalysis.geo.test.maps;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.algorithms.MapsComparator;
import org.gcube.dataanalysis.geo.insertion.RasterTable;
import org.gcube.dataanalysis.geo.retrieval.GeoIntersector;
public class TestMapsComparisonTemperatureUUID {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
// String layertitle = "Temperature in [12-15-09 01:00] (3D) {Native grid ORCA025.L75 monthly average: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridt}";
// String layertitle = "Standard Deviation from Statistical Mean from [01-16-01 01:00] to [12-16-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - monthly: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_monthly_1deg_ENVIRONMENT_OCEANS_.nc}";
// String layertitle = "86a7ac79-866a-49c6-b5d5-602fc2d87ddd";
String layertitle = "35412bde-aa6e-49f6-b9ad-e99458882f92";
// String layertitle2 = "Statistical Mean in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_annual_1deg_ENVIRONMENT_OCEANS_.nc}";
// String layertitle2 = "0e03d0fa-9c44-4a0c-a7e3-9f6d48710d00";
// String layertitle2 = layertitle;
String layertitle2 = "625ba45b-c19f-4a9d-b3f7-be13e8f73101";
// {MEAN=224.49, VARIANCE=10337.11, NUMBER_OF_ERRORS=47054, NUMBER_OF_COMPARISONS=65522, ACCURACY=28.19, MAXIMUM_ERROR=303.6, MAXIMUM_ERROR_POINT=5006:104, Resolution=0.9972222222222222}
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Layer_1",layertitle);
config.setParam("Layer_2",layertitle2);
config.setParam("ValuesComparisonThreshold",""+0.1);
config.setParam("Z","0");
config.setGcubeScope(null);
MapsComparator mc = new MapsComparator();
mc.setConfiguration(config);
mc.init();
mc.compute();
mc.getOutput();
}
}

View File

@ -1,35 +0,0 @@
package org.gcube.dataanalysis.geo.test.maps;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.algorithms.MapsComparator;
public class TestMapsComparisonTemperatureWOA {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
String layertitle = "Temperature in [07-01-01 13:00] (3D) {World Ocean Atlas 2005: Tcl version: 8.4.13, NAP version: 6.2.2}";
String layertitle2 = "Statistical Mean in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_annual_1deg_ENVIRONMENT_OCEANS_.nc}";
//{MEAN=0.0, VARIANCE=0.0, NUMBER_OF_ERRORS=0, NUMBER_OF_COMPARISONS=65522, ACCURACY=100.0, MAXIMUM_ERROR=0.0, MAXIMUM_ERROR_POINT=null, TREND=STATIONARY, Resolution=0.9972222222222222}
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Layer_1",layertitle);
config.setParam("Layer_2",layertitle2);
config.setParam("ValuesComparisonThreshold",""+100);
config.setParam("Z","0");
config.setGcubeScope(null);
MapsComparator mc = new MapsComparator();
mc.setConfiguration(config);
mc.init();
mc.compute();
mc.getOutput();
}
}

View File

@ -1,65 +0,0 @@
package org.gcube.dataanalysis.geo.test.maps;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.algorithms.MapsComparator;
public class TestMapsComparisoninTime {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
int[] timeIdx = {0,12,24,36,48,60,72,84,96,108,120};
for (int i=1;i<timeIdx.length;i++){
compare(timeIdx[i-1], timeIdx[i]);
}
}
public static void compare(int t1, int t2) throws Exception{
String layertitle = "Temperature from [12-15-99 01:00] to [12-15-09 01:00] (2D) {Native grid ORCA025.L75 monthly average: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridt}";
//1vs0: {MEAN=6.23, VARIANCE=30.58, NUMBER_OF_ERRORS=39650, NUMBER_OF_COMPARISONS=522242, ACCURACY=92.41, MAXIMUM_ERROR=45.35, MAXIMUM_ERROR_POINT=3215:143, Resolution=0.3525954946131244}
/*
**********(12->0) {MEAN=0.53, VARIANCE=0.28, NUMBER_OF_ERRORS=36075, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.09, MAXIMUM_ERROR=6.0, MAXIMUM_ERROR_POINT=7309:456, TREND=EXPANSION, Resolution=0.3525954946131244} ELAPSED: 1370363639187
**********(24->12) {MEAN=0.56, VARIANCE=0.3, NUMBER_OF_ERRORS=36053, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.1, MAXIMUM_ERROR=6.95, MAXIMUM_ERROR_POINT=1313:143, TREND=EXPANSION, Resolution=0.3525954946131244} ELAPSED: 1370363722843 Japan
**********(36->24) {MEAN=0.62, VARIANCE=0.33, NUMBER_OF_ERRORS=35744, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.16, MAXIMUM_ERROR=6.87, MAXIMUM_ERROR_POINT=1314:465, TREND=EXPANSION, Resolution=0.3525954946131244} ELAPSED: 1370363798387
**********(48->36) {MEAN=0.49, VARIANCE=0.22, NUMBER_OF_ERRORS=35664, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.17, MAXIMUM_ERROR=7.54, MAXIMUM_ERROR_POINT=7307:456, TREND=CONTRACTION, Resolution=0.3525954946131244} ELAPSED: 1370363875063 North Carolina
**********(60->48) {MEAN=0.46, VARIANCE=0.23, NUMBER_OF_ERRORS=36133, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.08, MAXIMUM_ERROR=5.42, MAXIMUM_ERROR_POINT=7307:456, TREND=EXPANSION, Resolution=0.3525954946131244} ELAPSED: 1370363953390
**********(72->60) {MEAN=0.56, VARIANCE=0.31, NUMBER_OF_ERRORS=35970, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.11, MAXIMUM_ERROR=7.01, MAXIMUM_ERROR_POINT=7307:236, TREND=EXPANSION, Resolution=0.3525954946131244} ELAPSED: 1370364033154
**********(84->72) {MEAN=0.57, VARIANCE=0.31, NUMBER_OF_ERRORS=36148, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.08, MAXIMUM_ERROR=7.02, MAXIMUM_ERROR_POINT=7307:247, TREND=CONTRACTION, Resolution=0.3525954946131244} ELAPSED: 1370364110444
**********(96->84) {MEAN=0.59, VARIANCE=0.32, NUMBER_OF_ERRORS=35873, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.13, MAXIMUM_ERROR=5.23, MAXIMUM_ERROR_POINT=7306:249, TREND=CONTRACTION, Resolution=0.3525954946131244} ELAPSED: 1370364190900
**********(108->96) {MEAN=0.53, VARIANCE=0.27, NUMBER_OF_ERRORS=35789, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.15, MAXIMUM_ERROR=4.96, MAXIMUM_ERROR_POINT=7306:249, TREND=EXPANSION, Resolution=0.3525954946131244} ELAPSED: 1370364272133
**********(120->108) {MEAN=0.62, VARIANCE=0.37, NUMBER_OF_ERRORS=36194, NUMBER_OF_COMPARISONS=522242, ACCURACY=93.07, MAXIMUM_ERROR=5.51, MAXIMUM_ERROR_POINT=1316:352, TREND=EXPANSION, Resolution=0.3525954946131244} pacific ocean
*/
long t0=0;
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("Layer_1",layertitle);
config.setParam("Layer_2",layertitle);
config.setParam("TimeIndex_1",""+t1);
config.setParam("TimeIndex_2",""+t2);
config.setParam("ValuesComparisonThreshold","0.01");
config.setParam("Z","0");
config.setGcubeScope(null);
MapsComparator mc = new MapsComparator();
mc.setConfiguration(config);
mc.init();
mc.compute();
mc.getOutput();
System.out.println("*********("+t2+"->"+t1+") "+mc.outputParameters +" ELAPSED: "+(System.currentTimeMillis()-t0));
}
}

View File

@ -1,38 +0,0 @@
package org.gcube.dataanalysis.geo.utils;
public class CSquareCodesConverter {
public static String convertHalfDegree(double x, double y){
if (x ==0)
x = 0.25;
if (y ==0)
y = 0.25;
String csquare = it.cnr.aquamaps.CSquare.centroidToCode(x,y, 0.5);
return csquare;
}
public static String convertAtResolution(double x, double y, double resolution){
if (resolution==0)
resolution=0.1;
if (x ==0)
x = resolution;
if (y ==0)
y = resolution;
String csquare = it.cnr.aquamaps.CSquare.centroidToCode(x,y, resolution);
return csquare;
}
public static void main(String[] args){
//System.out.println(it.cnr.aquamaps.CSquare.centroidToCode(0.5,0.5, 0.0001));
// System.out.println(it.cnr.aquamaps.CSquare.centroidToCode(-49.99999999999994,35.75000000000006, 1));
// System.out.println(it.cnr.aquamaps.CSquare.centroidToCode(35.75000000000006,-49.99999999999994, 0.5));
// System.out.println(it.cnr.aquamaps.CSquare.centroidToCode(35.75,-50, 0.5));
System.out.println(it.cnr.aquamaps.CSquare.centroidToCode(35.7500,-49.9999, 0.5));
}
}

View File

@ -1,191 +0,0 @@
/*
* Copyright (c) 2009 The University of Reading
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the University of Reading, nor the names of the
* authors or contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.gcube.dataanalysis.geo.utils;
import java.util.ArrayList;
import java.util.List;
import org.geotoolkit.referencing.CRS;
import org.geotoolkit.referencing.crs.DefaultGeographicCRS;
import org.opengis.referencing.crs.CoordinateReferenceSystem;
import org.opengis.referencing.cs.CoordinateSystemAxis;
import org.opengis.referencing.operation.MathTransform;
import org.opengis.referencing.operation.TransformException;
import ucar.unidata.geoloc.LatLonPoint;
import ucar.unidata.geoloc.LatLonPointImpl;
import ucar.unidata.geoloc.ProjectionPoint;
import ucar.unidata.geoloc.ProjectionPointImpl;
/**
* This class wraps the GeoTools/GeoAPI coordinate reference system methods, providing a set of convenience methods such as transformations and validity checks.
*
* @todo this object is immutable and could be re-used.
* @author Jon
*/
public final class CrsHelper {
public static final String PLATE_CARREE_CRS_CODE = "CRS:84";
public static final List<String> SUPPORTED_CRS_CODES = new ArrayList<String>();
private CoordinateReferenceSystem crs;
private MathTransform crsToLatLon;
private MathTransform latLonToCrs;
private boolean isLatLon;
static {
// Find the supported CRS codes
// I think this is the appropriate method to get all the CRS codes
// that we can support
for (Object codeObj : CRS.getSupportedCodes("urn:ogc:def")) {
SUPPORTED_CRS_CODES.add((String) codeObj);
}
System.out.println("Supported Codes:"+SUPPORTED_CRS_CODES);
}
/** Private constructor to prevent direct instantiation */
private CrsHelper() {
}
public static CrsHelper fromCrsCode(String crsCode) throws Exception {
// TODO: could cache CrsHelpers with the same code
CrsHelper crsHelper = new CrsHelper();
try {
// The "true" means "force longitude first" axis order
crsHelper.crs = CRS.decode(crsCode, true);
// Get transformations to and from lat-lon.
// The "true" means "lenient", i.e. ignore datum shifts. This
// is necessary to prevent "Bursa wolf parameters required"
// errors (Some CRSs, including British National Grid, fail if
// we are not "lenient".)
crsHelper.crsToLatLon = CRS.findMathTransform(crsHelper.crs, DefaultGeographicCRS.WGS84, true);
crsHelper.latLonToCrs = CRS.findMathTransform(DefaultGeographicCRS.WGS84, crsHelper.crs, true);
crsHelper.isLatLon = crsHelper.crsToLatLon.isIdentity();
return crsHelper;
} catch (Exception e) {
throw new Exception("Error creating CrsHelper from code " + crsCode);
}
}
public CoordinateReferenceSystem getCoordinateReferenceSystem() {
return this.crs;
}
/**
* @return true if the given coordinate pair is within the valid range of both the x and y axis of this coordinate reference system.
*/
public boolean isPointValidForCrs(ProjectionPoint point) {
return this.isPointValidForCrs(point.getX(), point.getY());
}
/**
* @return true if the given coordinate pair is within the valid range of both the x and y axis of this coordinate reference system.
*/
public boolean isPointValidForCrs(double x, double y) {
CoordinateSystemAxis xAxis = this.crs.getCoordinateSystem().getAxis(0);
CoordinateSystemAxis yAxis = this.crs.getCoordinateSystem().getAxis(1);
return x >= xAxis.getMinimumValue() && x <= xAxis.getMaximumValue() && y >= yAxis.getMinimumValue() && y <= yAxis.getMaximumValue();
}
/**
* Transforms the given x-y point in this {@link #getCoordinateReferenceSystem() CRS} to a LatLonPoint.
*
* @throws TransformException
* if the required transformation could not be performed
*/
public LatLonPoint crsToLatLon(double x, double y) throws TransformException {
if (this.isLatLon) {
// We don't need to do the transformation
return new LatLonPointImpl(y, x);
}
// We know x must go first in this array because we selected
// "force longitude-first" when creating the CRS for this grid
double[] point = new double[] { x, y };
// Transform to lat-lon in-place
this.crsToLatLon.transform(point, 0, point, 0, 1);
return new LatLonPointImpl(point[1], point[0]);
}
/**
* Transforms the given x-y point in this {@link #getCoordinateReferenceSystem() CRS} to a LatLonPoint.
*
* @throws TransformException
* if the required transformation could not be performed
*/
public LatLonPoint crsToLatLon(ProjectionPoint point) throws TransformException {
return this.crsToLatLon(point.getX(), point.getY());
}
/**
* Transforms the given LatLonPoint to an x-y point in this {@link #getCoordinateReferenceSystem() CRS}.
*
* @throws TransformException
* if the required transformation could not be performed
*/
public ProjectionPoint latLonToCrs(LatLonPoint latLonPoint) throws TransformException {
return this.latLonToCrs(latLonPoint.getLongitude(), latLonPoint.getLatitude());
}
/**
* Transforms the given longitude-latitude point to an x-y point in this {@link #getCoordinateReferenceSystem() CRS}.
*
* @throws TransformException
* if the required transformation could not be performed
*/
public ProjectionPoint latLonToCrs(double longitude, double latitude) throws TransformException {
if (this.isLatLon) {
// We don't need to do the transformation
return new ProjectionPointImpl(longitude, latitude);
}
// We know x must go first in this array because we selected
// "force longitude-first" when creating the CRS for this grid
double[] point = new double[] { longitude, latitude };
// Transform to lat-lon in-place
this.latLonToCrs.transform(point, 0, point, 0, 1);
return new ProjectionPointImpl(point[0], point[1]);
}
/**
* @return true if this crs is lat-lon
*/
public boolean isLatLon() {
return this.isLatLon;
}
public static void main(String[] args) throws Exception{
CrsHelper helper = fromCrsCode("CRS:84");
// boolean valid = helper.isPointValidForCrs(180, 0);
// System.out.println(valid);
LatLonPoint point = helper.crsToLatLon(190,10);
double x = point.getLongitude();
double y = point.getLatitude();
System.out.println(point+" ("+x+","+y+")");
}
}

View File

@ -1,117 +0,0 @@
package org.gcube.dataanalysis.geo.utils;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.Writer;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.List;
public class Downloader {
public static void main(String[] args) throws Exception {
List<String> hfiles = getfiles("netcdf_data.html", "fileServer","http", ".nc");
System.out.println(hfiles);
System.out.println("Number of links:"+hfiles.size());
List<String> files = enrichfiles(hfiles);
System.out.println(files);
buildwgetFile("wgetfiles.sh", hfiles, files);
}
public static void buildwgetFile(String filename, List<String> hfiles, List<String> files) throws Exception{
int size = hfiles.size();
BufferedWriter bw = new BufferedWriter(new FileWriter(new File(filename)));
for (int i=0;i<size;i++){
bw.write(buildGetterString(hfiles.get(i), files.get(i))+System.getProperty("line.separator"));
}
bw.close();
}
public static List<String> enrichfiles(List<String> files) throws Exception{
List<String> arrayfile = new ArrayList<String>();
for (String fileh:files){
String file = fileh.substring(fileh.lastIndexOf("/")+1);
if (file.contains("temperature"))
file = buildTopicString(file,"ENVIRONMENT", "OCEANS");
else if (file.contains("salinity"))
file = buildTopicString(file,"ENVIRONMENT", "OCEANS");
else if (file.contains("oxygen"))
file = buildTopicString(file,"ENVIRONMENT", "BIOTA");
else if (file.contains("phosphate"))
file = buildTopicString(file,"ENVIRONMENT", "BIOTA");
else if (file.contains("nitrate"))
file = buildTopicString(file,"ENVIRONMENT", "BIOTA");
else if (file.contains("silicate"))
file = buildTopicString(file,"ENVIRONMENT", "BIOTA");
else
file = buildTopicString(file,"ENVIRONMENT", "OCEANS");
arrayfile.add(file);
}
return arrayfile;
}
public static List<String> getfiles(String filename,String criterion,String initselection, String endselection) throws Exception{
List<String> files = new ArrayList<String>();
BufferedReader br = new BufferedReader(new FileReader(new File(filename)));
String line = br.readLine();
while (line!=null){
if (line.contains(criterion)){
String cut = line.substring(line.indexOf(initselection),line.indexOf(endselection)+endselection.length());
files.add(cut);
}
line = br.readLine();
}
br.close();
return files;
}
public static String buildGetterString(String httpstring,String filename){
return String.format("wget --output-document=%1$s %2$s",filename,httpstring);
}
public static String buildTopicString(String filename, String... topics){
int idx = filename.indexOf(".n");
String file = filename.substring(0,idx);
String ext = filename.substring(idx);
for (String topic:topics){
file=file+"_"+topic;
}
return file+"_"+ext;
}
public static void downloadData(String endpoint, String file) throws Exception {
// Send data
String urlStr = endpoint;
URL url = new URL(urlStr);
URLConnection conn = url.openConnection();
InputStreamReader isr = new InputStreamReader(conn.getInputStream());
FileWriter fw = new FileWriter(new File(file));
pipe(isr, fw);
fw.close();
isr.close();
}
private static void pipe(Reader reader, Writer writer) throws IOException {
char[] buf = new char[1024];
int read = 0;
double bytes = 0;
long i = 0;
while ((read = reader.read(buf)) >= 0) {
writer.write(buf, 0, read);
bytes=(bytes+read);
if (i%1000==0)
System.out.println("B:"+bytes);
i++;
}
writer.flush();
}
}

View File

@ -1,317 +0,0 @@
package org.gcube.dataanalysis.geo.utils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.geo.meta.OGCFormatter;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.LinearRing;
import com.vividsolutions.jts.geom.MultiPolygon;
import com.vividsolutions.jts.geom.Point;
import com.vividsolutions.jts.geom.Polygon;
import com.vividsolutions.jts.geom.PrecisionModel;
import com.vividsolutions.jts.geom.impl.CoordinateArraySequence;
public class EnvDataExplorer {
private static String callWFS(String geoServer, String layer, double x, double y) {
float tolerance = 0.25f;
String wfsURL = OGCFormatter.getWfsUrl(geoServer, layer, OGCFormatter.pointToBoundingBox(x, y, tolerance), 1, "json");
AnalysisLogger.getLogger().debug("EnvDataExplorer-> Requesting URL: " + wfsURL);
String returned = null;
try {
returned = HttpRequest.sendGetRequest(wfsURL, null);
} catch (Exception e) {
AnalysisLogger.getLogger().debug("EnvDataExplorer-> ERROR " + e.getLocalizedMessage());
}
if (returned != null)
// AnalysisLogger.getLogger().debug("EnvDataExplorer-> Found Intersection: " + returned);
AnalysisLogger.getLogger().debug("EnvDataExplorer-> Found Intersection");
else
AnalysisLogger.getLogger().debug("EnvDataExplorer-> Found Nothing!");
return returned;
}
private static String callWFS(String geoServer, String layer, double xL, double yL, double xR, double yR) {
// String wfsURL = OGCFormatter.getWfsUrl(geoServer, layer, OGCFormatter.buildBoundingBox(xL, yL, xR, yR), 0, "json");
// there is a bug in WFS in the retrieval according to a bounding box: y must be in the range -180;180. then I preferred to take all the features
String wfsURL = OGCFormatter.getWfsUrl(geoServer, layer, null, 0, "json");
AnalysisLogger.getLogger().debug("EnvDataExplorer-> Requesting URL: " + wfsURL);
String returned = null;
try {
returned = HttpRequest.sendGetRequest(wfsURL, null);
} catch (Exception e) {
AnalysisLogger.getLogger().debug("EnvDataExplorer-> ERROR " + e.getLocalizedMessage());
}
if (returned != null)
// AnalysisLogger.getLogger().debug("EnvDataExplorer-> Found Intersection: " + returned);
AnalysisLogger.getLogger().debug("EnvDataExplorer-> Found Intersection");
else
AnalysisLogger.getLogger().debug("EnvDataExplorer-> Found Nothing!");
return returned;
}
public static LinkedHashMap<String, Double> getFeatures(String geoserver, String layer, double x, double y) {
try {
AnalysisLogger.getLogger().debug("Calling WFS towards Geoserver:" + geoserver + " and layer:" + layer);
String jsonString = callWFS(geoserver, layer, x, y);
LinkedHashMap<String, Object> map = JsonMapper.parse(jsonString);
LinkedHashMap<String, String> mapout = (LinkedHashMap<String, String>) ((HashMap<String, Object>) map.get("features")).get("properties");
LinkedHashMap<String, Double> values = new LinkedHashMap<String, Double>();
for (String key : mapout.keySet()) {
values.put(key, Double.parseDouble(mapout.get(key)));
}
return values;
} catch (Exception e) {
AnalysisLogger.getLogger().debug("EnvDataExplorer-> Error in getting properties");
return null;
}
}
public static List<FeaturedPolygon> getFeatures(String geoserver, String layer, double xL, double yL, double xR, double yR) {
try {
AnalysisLogger.getLogger().debug("Calling WFS towards Geoserver:" + geoserver + " and layer:" + layer);
String jsonString = callWFS(geoserver, layer, xL, yL, xR, yR);
// System.out.println("JSON:"+jsonString);
LinkedHashMap<String, Object> map = JsonMapper.parse(jsonString);
List<FeaturedPolygon> fpolygons = new ArrayList<FeaturedPolygon>();
FeaturedPolygon poly = null;
for (String key : map.keySet()) {
if (key.contains("features")) {
HashMap<String, Object> propertiesMap = (HashMap<String, Object>) map.get(key);
// cycle on all the properties
for (String properties : propertiesMap.keySet()) {
if (properties.contains("properties")) {
if (poly == null)
poly = new FeaturedPolygon();
LinkedHashMap<String, String> props = (LinkedHashMap<String, String>) propertiesMap.get(properties);
// fill the properties of the fpolygon
for (String keyprop : props.keySet()) {
try {
// fulfill the FeaturedPolygon
String value = props.get(keyprop);
try {
String lowcaseprop = keyprop.toLowerCase();
if ((poly.value == null) && !lowcaseprop.startsWith("id") && !lowcaseprop.endsWith("id"))
poly.setValue(Double.parseDouble(value));
else
poly.addFeature(keyprop, value);
} catch (Exception e2) {
poly.addFeature(keyprop, value);
}
} catch (Exception e) {
}
}
} else if (properties.contains("geometry") && !properties.contains("geometry_")) {
if (poly == null)
poly = new FeaturedPolygon();
else if (poly.p != null) {
if (poly.value == null)
poly.value = Double.NaN;
fpolygons.add(poly);
poly = new FeaturedPolygon();
}
LinkedHashMap<String, String> props = (LinkedHashMap<String, String>) propertiesMap.get(properties);
List<double[]> coords = WFS2Coordinates(props.toString());
Geometry p = buildGeometryFromCoordinates(coords);
if (p != null) {
poly.setPolygon(p);
AnalysisLogger.getLogger().trace(p);
}
/*
* GeometryFactory factory = new GeometryFactory(new PrecisionModel(), 4326); Polygon p = null; if (coords != null) {
*
* Coordinate[] coordarray = new Coordinate[coords.size()]; int i = 0; for (double[] pair : coords) { coordarray[i] = new Coordinate(pair[0], pair[1]);
*
* i++; } // TODO: build a multipoly if the ring is not closed! CoordinateArraySequence coordseq = new CoordinateArraySequence(coordarray); LinearRing ring = new LinearRing(coordseq, factory); p = new Polygon(ring, new LinearRing[] {}, factory); } poly.setPolygon(p);
*/
}
}
}
}// end for on all the wfs
if (poly != null) {
if (poly.value == null)
poly.value = Double.NaN;
fpolygons.add(poly);
}
return fpolygons;
} catch (Exception e) {
AnalysisLogger.getLogger().debug("EnvDataExplorer-> Error in getting properties");
e.printStackTrace();
return null;
}
}
public static Geometry buildGeometryFromCoordinates(List<double[]> coords) {
Geometry p = null;
GeometryFactory factory = new GeometryFactory(new PrecisionModel(), 4326);
int idx = -1;
List<Coordinate> coordinatesArray = new ArrayList<Coordinate>();
List<Polygon> polys = new ArrayList<Polygon>();
List<LinearRing> holespolys = new ArrayList<LinearRing>();
int j = 1;
int i = 1;
for (double[] pair : coords) {
Coordinate coordPair = new Coordinate(pair[0], pair[1]);
if ((idx = coordinatesArray.indexOf(coordPair)) >= 0) {
// System.out.println("List Contains: "+coordinatesArray.get(idx));
coordinatesArray.add(coordPair);
if (idx == 0) {
// System.out.println("Switching polygon: "+j+" "+coordinatesArray.get(idx));
j++;
i = 1;
Polygon pp = sequence2Polygon(factory, coordinatesArray.subList(idx, coordinatesArray.size()), holespolys);
// System.out.println(pp);
polys.add(pp);
coordinatesArray = new ArrayList<Coordinate>();
} else {
// System.out.println("Linear Ring "+i + " "+coordinatesArray.get(idx));
i++;
LinearRing ring = sequence2Ring(factory, coordinatesArray.subList(idx, coordinatesArray.size()));
holespolys.add(ring);
coordinatesArray = coordinatesArray.subList(0, idx);
}
} else
coordinatesArray.add(coordPair);
}
// build a multipoly if the ring is not closed!
if (polys.size() > 0) {
// cut the holes
List<Polygon> polysnoholes = new ArrayList<Polygon>();
for (Polygon pp : polys) {
boolean found = false;
int h = 0;
for (Polygon polnh : polysnoholes) {
if (polnh.covers(pp)) {
// System.out.println("found hole! "+pp+" vs "+polnh);
polysnoholes.set(h, (Polygon) polnh.difference(pp));
found = true;
} else if (pp.covers(polnh)) {
polysnoholes.set(h, (Polygon) pp.difference(polnh));
found = true;
}
h++;
}
if (!found)
polysnoholes.add(pp);
}
Polygon[] polyrawarray = polysnoholes.toArray(new Polygon[polysnoholes.size()]);
p = new MultiPolygon(polyrawarray, factory);
}
return p;
}
private static LinearRing sequence2Ring(GeometryFactory factory, List<Coordinate> coordinatesArray) {
// System.out.println(coordinatesArray);
Coordinate[] coordrawarray = coordinatesArray.toArray(new Coordinate[coordinatesArray.size()]);
CoordinateArraySequence coordseq = new CoordinateArraySequence(coordrawarray);
LinearRing ring = new LinearRing(coordseq, factory);
return ring;
}
private static Polygon sequence2Polygon(GeometryFactory factory, List<Coordinate> coordinatesArray, List<LinearRing> holespolys) {
// System.out.println(coordinatesArray);
Coordinate[] coordrawarray = coordinatesArray.toArray(new Coordinate[coordinatesArray.size()]);
LinearRing[] holes = holespolys.toArray(new LinearRing[holespolys.size()]);
CoordinateArraySequence coordseq = new CoordinateArraySequence(coordrawarray);
LinearRing ring = new LinearRing(coordseq, factory);
Polygon p = new Polygon(ring, holes, factory);
return p;
}
public static List<double[]> WFS2Coordinates(String wfsgeometry) {
// geometry935133b1-ba3c-493d-8e18-6fb496ced995={type=MultiPolygon, coordinates={966a275c-23aa-4a43-a943-7e1c7eaf5d65=[[[1.5,125.00000000000011],[1.5,124.5],[2.000000000000057,124.5],[2.000000000000057,125.00000000000011],[1.5,125.00000000000011]]]}},
String[] coordinatePairs = null;
List<double[]> dpairs = new ArrayList<double[]>();
if (wfsgeometry.toLowerCase().contains("multipolygon")) {
String coordString = "coordinates=";
String coordinates = wfsgeometry.substring(wfsgeometry.indexOf(coordString) + coordString.length());
coordinates = coordinates.substring(coordinates.indexOf("=") + 1);
if (coordinates.contains("=")) {
coordinates = coordinates.replaceAll("([A-Za-z0-9]|-|_)+=", "");
coordinates = coordinates.replaceAll("\\],( )+\\[", "],[");
}
coordinatePairs = coordinates.split("\\],\\[");
for (String coord : coordinatePairs) {
coord = coord.replaceAll("(\\[|\\]|\\}|\\{|)", "");
String[] coordpair = coord.split(",");
double[] dd = new double[2];
// invert the coordinates as the final must be are long,lat
dd[1] = Double.parseDouble(coordpair[0]);
dd[0] = Double.parseDouble(coordpair[1]);
dpairs.add(dd);
}
}
return dpairs;
}
public static void main1(String[] args) {
String geom = "{type=MultiPolygon, coordinates={cce4daf3-966e-4b5f-adea-f88ea2b93d03=[[[-16,-146.49999999999997],[-16,-146.99999999999994],[-15.5,-146.99999999999994],[-15.5,-146.49999999999997],[-16,-146.49999999999997]]]}}";
List<double[]> coords = WFS2Coordinates(geom);
GeometryFactory factory = new GeometryFactory(new PrecisionModel(), 4326);
// GeometryFactory factory = new GeometryFactory(new PrecisionModel(), 0);
/*
* CoordinateArraySequence coords = new CoordinateArraySequence(new Coordinate[] { new Coordinate(12.0, 34.23), new Coordinate(12.000, 54.555), new Coordinate(7, 8), new Coordinate(12.0, 34.23) }); LinearRing ring = new LinearRing(coords, factory); Polygon p = new Polygon(ring, null, factory); CoordinateArraySequence pcoords = new CoordinateArraySequence(new Coordinate[] { new Coordinate(12.0, 34.23),});
*/
// CoordinateArraySequence coords = new CoordinateArraySequence(new Coordinate[] { new Coordinate(1.5, 125.00000000000011), new Coordinate(1.5, 124.5), new Coordinate(2.000000000000057, 124.5), new Coordinate(2.000000000000057, 125.00000000000011), new Coordinate(1.5, 125.00000000000011) });
if (coords != null) {
Coordinate[] coordarray = new Coordinate[coords.size()];
int i = 0;
for (double[] pair : coords) {
coordarray[i] = new Coordinate(pair[0], pair[1]);
i++;
}
CoordinateArraySequence coordseq = new CoordinateArraySequence(coordarray);
LinearRing ring = new LinearRing(coordseq, factory);
Polygon p = new Polygon(ring, new LinearRing[] {}, factory);
// CoordinateArraySequence pcoords = new CoordinateArraySequence(new Coordinate[] { new Coordinate(-16,-146.49999999999997), });
CoordinateArraySequence pcoords = new CoordinateArraySequence(new Coordinate[] { new Coordinate(-150, -16), });
Point po = new Point(pcoords, factory);
// po = p.getCentroid();
System.out.println("contains: " + p.contains(po) + " boundary: " + p.covers(po));
}
}
public static void main(String[] args) {
String q = "[[[-10.0011869534696,151.288335840039],[-10.0353384533966,151.27859643813],[-10.0228061679999,151.308700562],[-10.0011869534696,151.288335840039]]], e3c47901-3de5-45d2-a272-c6f7d5df1dec=[[[-8.54674625399991,150.53036499],[-8.83403205899992,150.287445068],[-9.20889866086486,150.195933942647],[-9.20555999999993,150.21039],[-9.20777999999995,150.23218],[-9.27360999999991,150.33095],[-9.38638999999995,150.37717],[-9.39873372345699,150.375441317138],[-9.37888717699991,150.41633606],[-9.64140796699991,150.411376953],[-9.68103313399996,150.684051514],[-9.79481071047286,150.758883440934],[-9.74832999999995,150.75027],[-9.73082999999991,150.74884],[-9.70784999999995,150.76262],[-9.7194399999999,150.78802],[-9.73138999999991,150.80304],[-9.74693999999994,150.82163],[-9.81916999999993,150.90026],[-9.85235999999992,150.93539],[-9.89360999999991,150.96274],[-9.98527999999993,151.03055],[-9.99693999999994,151.03943],[-10.0169399999999,151.05996],[-10.0244399999999,151.07303],[-10.0466,151.11809],[-10.0413899999999,151.13666],[-10.03014,151.14818],[-10.0194499999999,151.14875],[-10.0033999999999,151.13893],[-9.98916999999994,151.13637],[-9.94207999999991,151.18817],[-9.93666999999993,151.20053],[-9.93091343037411,151.222140060489],[-9.68598556499995,150.991424561],[-9.45813846599992,150.936889648],[-9.30954170199993,151.03604126],[-9.13122558599991,150.961669922],[-8.80926608999994,151.055862427],[-8.66848054747773,151.099704833311],[-8.63888999999995,151.10107],[-8.56673125859819,151.063276911059],[-8.52198028599992,150.922012329],[-8.54674625399991,150.53036499]],[[-9.43832999999995,150.66666],[-9.44124999999991,150.67997],[-9.42805999999996,150.73191],[-9.42055999999991,150.7462],[-9.40541999999993,150.7615],[-9.41471999999993,150.77777],[-9.43277999999992,150.80442],[-9.45638999999994,150.8283],[-9.52319999999992,150.88692],[-9.64471999999995,150.93219],[-9.65916999999996,150.93055],[-9.67082999999991,150.92163],[-9.68207999999993,150.90387],[-9.67221999999992,150.89177],[-9.67916999999994,150.87523],[-9.71805999999992,150.84692],[-9.68555999999995,150.84412],[-9.65860999999995,150.80163],[-9.66249999999991,150.76331],[-9.66332999999991,150.69135],[-9.66291999999993,150.65804],[-9.65388999999993,150.62274],[-9.62332999999995,150.51443],[-9.5836099999999,150.4905],[-9.44082999999995,150.42746],[-9.4313899999999,150.42331],[-9.41471999999993,150.41999],[-9.40110999999996,150.41999],[-9.38943999999992,150.4219],[-9.37666999999993,150.42609],[-9.35707999999994,150.43913],[-9.33770999999996,150.48044],[-9.34124999999994,150.5022],[-9.35166999999995,150.53028],[-9.37054999999992,150.57135],[-9.38499999999993,150.59802],[-9.40110999999996,150.62149],[-9.4233299999999,150.63734],[-9.43832999999995,150.66666]]], c905ab63-23c2-4587-bdd6-d6d37a56be51=[[[-8.58588343092737,151.123734225448],[-8.59127089890423,151.123748898655],[-8.58637142199996,151.125274658],[-8.58588343092737,151.123734225448]]], 8471299d-4904-4a10-ab00-c6cc5605bf3b=[[[-10.1228941076499,151.06827675758],[-10.1141699999999,151.02582],[-10.1108299999999,150.99831],[-10.1127799999999,150.98331],[-10.1127665622499,150.982996372512],[-10.1466360089999,151.011245728],[-10.1228941076499,151.06827675758]]], d0a0b923-b401-4cec-ac35-c3d8c837bffc=[[[-10.0506772730004,150.931209804608],[-10.0041699999999,150.91553],[-9.92666999999994,150.87774],[-9.83888999999993,150.8269],[-9.80718113528387,150.767019514441],[-10.0277585979999,150.912094116],[-10.0506772730004,150.931209804608]]]}}";
q = q.replaceAll("([A-Za-z0-9]|-|_)+=", "");
// q = q.replaceAll("\\], .*=\\[", "],[");
System.out.println(q);
q = q.replaceAll("\\],( )+\\[", "],[");
System.out.println(q);
}
}

View File

@ -1,28 +0,0 @@
package org.gcube.dataanalysis.geo.utils;
import java.util.LinkedHashMap;
import com.vividsolutions.jts.geom.Geometry;
public class FeaturedPolygon {
public Geometry p;
public LinkedHashMap<String, String> features;
public Double value;
public FeaturedPolygon(){
}
public void setPolygon(Geometry p){
this.p=p;
}
public void setValue(Double v){
this.value=v;
}
public void addFeature(String key,String value){
if (features==null)
features = new LinkedHashMap<String, String>();
features.put(key,value);
}
}

View File

@ -1,67 +0,0 @@
package org.gcube.dataanalysis.geo.utils;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
public class JsonMapper {
public static LinkedHashMap<String, Object> parse(String json) {
Object genericobject = new com.google.gson.JsonParser().parse(json);
com.google.gson.JsonObject object = null;
LinkedHashMap<String, Object> map = new LinkedHashMap<String, Object>();
if (genericobject instanceof com.google.gson.JsonObject){
object = (JsonObject) genericobject;
parseMap(map,object);
}
else if (genericobject instanceof com.google.gson.JsonArray){
JsonArray ArrObject = (JsonArray) new com.google.gson.JsonParser().parse(json);
Iterator<JsonElement> iterator = ArrObject.iterator();
while (iterator.hasNext()) {
JsonElement element = iterator.next();
if (element instanceof JsonObject){
if (!element.isJsonPrimitive()) {
parseMap(map, (JsonObject) element);
}
}
else{
map.put(UUID.randomUUID().toString(), element.toString());
}
}
}
return map;
}
private static void parseMap(LinkedHashMap<String, Object> map, JsonObject object ){
Set<Map.Entry<String, JsonElement>> set = object.entrySet();
Iterator<Map.Entry<String, JsonElement>> iterator = set.iterator();
while (iterator.hasNext()) {
Map.Entry<String, JsonElement> entry = iterator.next();
String key = entry.getKey();
JsonElement value = entry.getValue();
if (map.get(key)!=null){
key+=UUID.randomUUID();
}
if (!value.isJsonPrimitive()) {
map.put(key, parse(value.toString()));
} else {
map.put(key, value.getAsString());
}
}
}
}

View File

@ -1,32 +0,0 @@
package org.gcube.dataanalysis.geo.utils;
import org.opengis.temporal.TemporalPrimitive;
public class NetCDFTemporalPrimitive implements TemporalPrimitive{
@Override
protected Object clone() throws CloneNotSupportedException {
return super.clone();
}
@Override
public boolean equals(Object obj) {
return super.equals(obj);
}
@Override
protected void finalize() throws Throwable {
super.finalize();
}
@Override
public int hashCode() {
return 1;
}
@Override
public String toString() {
return "<gmd:extent/>";
}
}

View File

@ -1,480 +0,0 @@
package org.gcube.dataanalysis.geo.utils;
import java.io.ByteArrayInputStream;
import java.util.ArrayList;
import java.util.Formatter;
import java.util.LinkedHashMap;
import java.util.List;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathFactory;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import ucar.ma2.Array;
import ucar.ma2.ArrayDouble;
import ucar.ma2.ArrayFloat;
import ucar.ma2.ArrayInt;
import ucar.ma2.ArrayLong;
import ucar.ma2.Range;
import ucar.ma2.StructureData;
import ucar.ma2.StructureMembers.Member;
import ucar.nc2.constants.FeatureType;
import ucar.nc2.dataset.CoordinateAxis;
import ucar.nc2.dataset.CoordinateAxis1DTime;
import ucar.nc2.dt.GridCoordSystem;
import ucar.nc2.dt.GridDatatype;
import ucar.nc2.dt.grid.GridDataset;
import ucar.nc2.ft.FeatureCollection;
import ucar.nc2.ft.FeatureDataset;
import ucar.nc2.ft.FeatureDatasetFactoryManager;
import ucar.nc2.ft.PointFeatureCollection;
import ucar.nc2.ft.PointFeatureIterator;
import ucar.nc2.ft.point.PointDatasetImpl;
import ucar.nc2.ft.point.standard.StandardPointCollectionImpl;
import ucar.unidata.geoloc.LatLonPointImpl;
import ucar.unidata.geoloc.LatLonRect;
public class ThreddsDataExplorer {
// http://thredds.research-infrastructures.eu:8080/thredds/catalog/public/netcdf/catalog.xml
public static String timePrefix = "time:";
public static List<String> getFiles(String catalogURL) throws Exception {
String xml = HttpRequest.sendGetRequest(catalogURL, null);
XPath xpath = XPathFactory.newInstance().newXPath();
XPathExpression xPathExpression = xpath.compile("//child::*[local-name()='catalog']/child::*[local-name()='dataset']/child::*[local-name()='dataset']");
InputSource inputSource = new InputSource(new ByteArrayInputStream(xml.getBytes("UTF-8")));
NodeList nodes = (NodeList) xPathExpression.evaluate(inputSource, XPathConstants.NODESET);
List<String> fileNames = new ArrayList<String>();
for (int i = 0; i < nodes.getLength(); i++) {
Node node = nodes.item(i);
String name = node.getAttributes().getNamedItem("name").getNodeValue();
if (name != null)
fileNames.add(name);
}
return fileNames;
}
public static LinkedHashMap<String, Double> retrieveDataFromNetCDF(String openDapLink, String layer, double x, double y, double z) {
try {
LinkedHashMap<String, Double> map = new LinkedHashMap<String, Double>();
if (isGridDataset(openDapLink)) {
AnalysisLogger.getLogger().debug("Managing Grid File");
return manageGridDataset(layer, openDapLink, x, y, z);
}
/*
* else if (isPointDataset(openDapLink)) { AnalysisLogger.getLogger().debug("Managing Points File"); }
*/
else
AnalysisLogger.getLogger().debug("Warning: the NETCDF file is of an unknown type");
return map;
} catch (Exception e) {
AnalysisLogger.getLogger().debug("ERROR: " + e.getMessage());
AnalysisLogger.getLogger().debug(e);
e.printStackTrace();
return null;
}
}
public static List<Double> retrieveDataFromNetCDF(String openDapLink, String layer, int time, List<Tuple<Double>> triplets, double xL, double xR, double yL, double yR) {
try {
List<Double> values = new ArrayList<Double>();
if (isGridDataset(openDapLink)) {
AnalysisLogger.getLogger().debug("Managing Grid File");
return manageGridDataset(layer, openDapLink, time, triplets, xL, xR, yL, yR);
}
/*
* else if (isPointDataset(openDapLink)) { AnalysisLogger.getLogger().debug("Managing Points File"); }
*/
else
AnalysisLogger.getLogger().debug("Warning: the NETCDF file is of an unknown type");
return values;
} catch (Exception e) {
AnalysisLogger.getLogger().debug("ERROR: " + e.getMessage());
AnalysisLogger.getLogger().debug(e);
e.printStackTrace();
return null;
}
}
// A GridDatatype is like a specialized Variable that explicitly handles X,Y,Z,T dimensions
public static List<Double> manageGridDataset(String layer, String filename, int time, List<Tuple<Double>> triplets, double xL, double xR, double yL, double yR) throws Exception {
List<Double> values = new ArrayList<Double>();
GridDataset gds = ucar.nc2.dt.grid.GridDataset.open(filename);
List<GridDatatype> gridTypes = gds.getGrids();
for (GridDatatype gdt : gridTypes) {
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getFullName());
if (layer.equalsIgnoreCase(gdt.getFullName())) {
AnalysisLogger.getLogger().debug("Found layer " + layer + " inside file");
GridDatatype grid = gds.findGridDatatype(gdt.getName());
CoordinateAxis zAxis = gdt.getCoordinateSystem().getVerticalAxis();
CoordinateAxis xAxis = gdt.getCoordinateSystem().getXHorizAxis();
CoordinateAxis yAxis = gdt.getCoordinateSystem().getYHorizAxis();
double resolutionZ = 0;
try{
resolutionZ = Math.abs((double) (zAxis.getMaxValue() - zAxis.getMinValue()) / (double) zAxis.getShape()[0]);
AnalysisLogger.getLogger().debug("Zmin:"+ zAxis.getMinValue()+" Zmax:"+zAxis.getMaxValue());
}catch(Exception e){};
double resolutionX = Math.abs((double) (xAxis.getMaxValue() - xAxis.getMinValue()) / (double) xAxis.getShape()[0]);
double resolutionY = Math.abs((double) (yAxis.getMaxValue() - yAxis.getMinValue()) / (double) yAxis.getShape()[0]);
int tsize = triplets.size();
long t01 = System.currentTimeMillis();
LatLonRect llr = null;
AnalysisLogger.getLogger().debug("Extracting subset...");
GridDatatype gdtsub = grid.makeSubset(new Range(time, time), null, llr, 1, 1, 1);
Array data = gdtsub.readVolumeData(time); // note order is t, z, y, x
int[] shapeD = data.getShape();
int zD = 0;
int xD = 0;
int yD = 0;
if (shapeD.length>2)
{
zD=shapeD[0];
yD=shapeD[1];
xD=shapeD[2];
}
else if (shapeD.length>1)
{
yD=shapeD[0];
xD=shapeD[1];
}
AnalysisLogger.getLogger().debug("Shape: Z:"+zD+" X:"+ xD+" Y:"+yD);
AnalysisLogger.getLogger().debug("Layer Information Retrieval ELAPSED Time: " + (System.currentTimeMillis() - t01));
int rank = data.getRank();
AnalysisLogger.getLogger().debug("Rank of the layer: " + rank);
ArrayFloat.D3 data3Float = null;
ArrayDouble.D3 data3Double = null;
ArrayInt.D3 data3Int = null;
ArrayLong.D3 data3Long = null;
ArrayFloat.D2 data2Float = null;
ArrayDouble.D2 data2Double= null;
ArrayInt.D2 data2Int = null;
ArrayLong.D2 data2Long = null;
if (data.getRank() == 3){
if (data instanceof ArrayFloat.D3)
data3Float = (ArrayFloat.D3) data;
else if (data instanceof ArrayInt.D3)
data3Int = (ArrayInt.D3) data;
else if (data instanceof ArrayDouble.D3)
data3Double = (ArrayDouble.D3) data;
else if (data instanceof ArrayDouble.D3)
data3Double = (ArrayDouble.D3) data;
else if (data instanceof ArrayLong.D3)
data3Long = (ArrayLong.D3) data;
else
throw new Exception("Layer data format not supported");
}
else{
if (data instanceof ArrayFloat.D2)
data2Float = (ArrayFloat.D2) data;
else if (data instanceof ArrayInt.D2)
data2Int = (ArrayInt.D2) data;
else if (data instanceof ArrayDouble.D2)
data2Double = (ArrayDouble.D2) data;
else if (data instanceof ArrayLong.D2)
data2Long = (ArrayLong.D2) data;
else
throw new Exception("Layer data format not supported");
}
double xmin = xAxis.getMinValue();
double xmax = xAxis.getMaxValue();
if (((xmax==360) && (xmin==0)) || ((xmax==359.5) && (xmin==0.5))){
xmax = 180;
xmin=-180;
}
AnalysisLogger.getLogger().debug("X dimension: "+xD+" Xmin:"+ xmax+" Xmax:"+xmin);
for (int i = 0; i < tsize; i++) {
int zint = 0;
int xint = 0;
int yint = 0;
Tuple<Double> triplet = triplets.get(i);
double x = triplet.getElements().get(0);
double y = triplet.getElements().get(1);
if (x == 180)
x = -180;
if (y == 90)
y = -90;
double z = 0;
if (triplet.getElements().size() > 1)
z = triplet.getElements().get(2);
if (resolutionZ > 0) {
if ((zAxis.getMinValue() <= z) && (zAxis.getMaxValue() >= z))
zint = Math.abs((int) Math.round((z - zAxis.getMinValue()) / resolutionZ));
}
// AnalysisLogger.getLogger().debug("Z Index: "+zint);
/*
GridCoordSystem gcs = grid.getCoordinateSystem();
int[] xy = gcs.findXYindexFromLatLon(x, y, null);
Array datas=grid.readDataSlice(time, zint, xy[1], xy[0]);
*/
if ((xmin <= x) && (xmax >= x))
xint = (int) Math.round((x - xmin) / resolutionX);
if ((yAxis.getMinValue() <= y) && (yAxis.getMaxValue() >= y))
yint = (int) Math.round((y - yAxis.getMinValue()) / resolutionY);
Double val = Double.NaN;
if (xint > xD-1)
xint = xD-1;
if (yint > yD-1)
yint = yD-1;
if (zint>zD-1)
zint = zD-1;
if (data3Float != null)
val = Double.valueOf(data3Float.get(zint, yint, xint));
else if (data3Int != null)
val = Double.valueOf(data3Int.get(zint, yint, xint));
else if (data3Double != null)
val = Double.valueOf(data3Double.get(zint, yint, xint));
else if (data3Long != null)
val = Double.valueOf(data3Long.get(zint, yint, xint));
else if (data2Float != null)
val = Double.valueOf(data2Float.get(yint, xint));
else if (data2Int != null)
val = Double.valueOf(data2Int.get(yint, xint));
else if (data2Double != null)
val = Double.valueOf(data2Double.get(yint, xint));
else if (data2Long != null)
val = Double.valueOf(data2Long.get(yint, xint));
values.add(val);
}
break;
}
}
return values;
}
// A GridDatatype is like a specialized Variable that explicitly handles X,Y,Z,T dimensions
public static LinkedHashMap<String, Double> manageGridDataset(String layer, String filename, double x, double y, double z) throws Exception {
LinkedHashMap<String, Double> valuesMap = new LinkedHashMap<String, Double>();
GridDataset gds = ucar.nc2.dt.grid.GridDataset.open(filename);
List<GridDatatype> gridTypes = gds.getGrids();
for (GridDatatype gdt : gridTypes) {
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getFullName());
if (layer.equalsIgnoreCase(gdt.getFullName())) {
AnalysisLogger.getLogger().debug("Found layer " + layer + " inside file");
GridDatatype grid = gds.findGridDatatype(gdt.getName());
GridCoordSystem gcs = grid.getCoordinateSystem();
long timeSteps = 0;
java.util.Date[] dates = null;
if (gcs.hasTimeAxis1D()) {
CoordinateAxis1DTime tAxis1D = gcs.getTimeAxis1D();
dates = tAxis1D.getTimeDates();
timeSteps = dates.length;
} else if (gcs.hasTimeAxis()) {
CoordinateAxis tAxis = gcs.getTimeAxis();
timeSteps = tAxis.getSize();
}
CoordinateAxis zAxis = gdt.getCoordinateSystem().getVerticalAxis();
double resolutionZ = Math.abs((double) (zAxis.getMaxValue() - zAxis.getMinValue()) / (double) zAxis.getShape()[0]);
int zint = 0;
if (resolutionZ > 0) {
if ((zAxis.getMinValue() <= z) && (zAxis.getMaxValue() >= z))
zint = Math.abs((int) Math.round((z - zAxis.getMinValue()) / resolutionZ));
}
AnalysisLogger.getLogger().debug("Z index to take: " + zint);
int[] xy = gcs.findXYindexFromLatLon(x, y, null);
for (int j = 0; j < timeSteps; j++) {
try {
Array data = grid.readDataSlice(j, zint, xy[1], xy[0]); // note order is t, z, y, x
Double val = takeFirstDouble(data);
if (!val.isNaN()) {
String date = "" + j;
if (dates != null)
date = dates[j].toString();
valuesMap.put(timePrefix + date, Double.parseDouble("" + val));
}
} catch (Exception e) {
AnalysisLogger.getLogger().debug("Error in getting grid values in (" + x + "," + y + "," + z + "= with zint: " + zint + " resolution: " + resolutionZ + " and shape: " + zAxis.getShape()[0]);
}
}
break;
}
}
return valuesMap;
}
public static Double takeFirstDouble(Array data) {
long datal = data.getSize();
Double val = Double.NaN;
try {
for (int k = 0; k < datal; k++) {
Double testVal = data.getDouble(k);
if (!testVal.isNaN()) {
val = testVal;
break;
}
}
} catch (Exception ee) {
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> WARNING: Error in getting value: " + ee.getLocalizedMessage());
}
return val;
}
// A GridDatatype is like a specialized Variable that explicitly handles X,Y,Z,T dimensions
public LinkedHashMap<String, String> managePointsDataset(String layer, String filename, double x, double y) throws Exception {
LinkedHashMap<String, String> valuesMap = new LinkedHashMap<String, String>();
float tolerance = 0.25f;
Formatter errlog = new Formatter();
FeatureDataset fdataset = FeatureDatasetFactoryManager.open(FeatureType.POINT, filename, null, errlog);
PointDatasetImpl ds = (PointDatasetImpl) fdataset;
List<FeatureCollection> lfc = ds.getPointFeatureCollectionList();
for (FeatureCollection fc : lfc) {
StandardPointCollectionImpl spf = (StandardPointCollectionImpl) fc;
PointFeatureIterator iter = null;
while ((y - tolerance > -90) && (x - tolerance > -180) && (y + tolerance < 90) && (x + tolerance < 180)) {
LatLonRect rect = new LatLonRect(new LatLonPointImpl(y - tolerance, x - tolerance), new LatLonPointImpl(y + tolerance, x + tolerance));
PointFeatureCollection coll = spf.subset(rect, null);
iter = coll.getPointFeatureIterator(100 * 1000); // 100Kb buffer
if (iter.getCount() == 0)
iter.finish();
else
break;
tolerance = tolerance + 0.25f;
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> tolerance = " + tolerance);
}
if (iter != null) {
try {
while (iter.hasNext()) {
ucar.nc2.ft.PointFeature pf = iter.next();
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> EarthLoc: " + pf.getLocation());
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> EarthTime: " + pf.getObservationTime());
StructureData sd = pf.getData();
List<Member> mems = sd.getMembers();
for (Member m : mems) {
String unit = m.getUnitsString();
if ((unit != null) && (unit.length() > 0)) {
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> description: " + m.getDescription());
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> data param: " + m.getDataParam());
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> name: " + m.getName());
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> unit: " + m.getUnitsString());
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> type: " + m.getDataType());
Array arr = sd.getArray(m.getName());
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> is Time: " + m.getDataType());
Double val = takeFirstDouble(arr);
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> extracted value: " + val);
}
}
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> EarthTime: ");
}
} finally {
iter.finish();
}
}
break;
}
return valuesMap;
}
// A GridDatatype is like a specialized Variable that explicitly handles X,Y,Z,T dimensions
public static boolean isGridDataset(String filename) {
try {
AnalysisLogger.getLogger().debug("Analyzing file " + filename);
Formatter errlog = new Formatter();
FeatureDataset fdataset = FeatureDatasetFactoryManager.open(FeatureType.GRID, filename, null, errlog);
if (fdataset == null) {
// System.out.printf("GRID Parse failed --> %s\n", errlog);
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> NOT GRID");
return false;
} else
return true;
} catch (Throwable e) {
return false;
}
}
// A GridDatatype is like a specialized Variable that explicitly handles X,Y,Z,T dimensions
public static boolean isPointDataset(String filename) {
try {
Formatter errlog = new Formatter();
FeatureDataset fdataset = FeatureDatasetFactoryManager.open(FeatureType.POINT, filename, null, errlog);
if (fdataset == null) {
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> NOT POINT");
return false;
} else
return true;
} catch (Exception e) {
return false;
}
}
public static boolean isDataset(String filename) throws Exception {
boolean isdataset = false;
try {
Formatter errlog = new Formatter();
FeatureType[] fts = FeatureType.values();
for (int i = 0; i < fts.length; i++) {
FeatureDataset fdataset = FeatureDatasetFactoryManager.open(fts[i], filename, null, errlog);
if (fdataset == null) {
// System.out.printf(fts[i]+": Parse failed --> %s\n",errlog);
} else {
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> " + fts[i] + " OK!");
isdataset = true;
}
}
} catch (Exception e) {
}
return isdataset;
}
public static double adjX(double x) {
/*
* if (x < -180) x = -180; if (x > 180) x = 180;
*/
return x;
}
public static double adjY(double y) {
/*
* if (y < -90) y = -90; if (y > 90) y = 90;
*/
return y;
}
public static double getMinX(GridCoordSystem gcs) {
CoordinateAxis xAxis = gcs.getXHorizAxis();
return adjX(xAxis.getMinValue());
}
public static double getMaxX(GridCoordSystem gcs) {
CoordinateAxis xAxis = gcs.getXHorizAxis();
return adjX(xAxis.getMaxValue());
}
public static double getMinY(GridCoordSystem gcs) {
CoordinateAxis yAxis = gcs.getYHorizAxis();
return adjY(yAxis.getMinValue());
}
public static double getMaxY(GridCoordSystem gcs) {
CoordinateAxis yAxis = gcs.getYHorizAxis();
return adjY(yAxis.getMaxValue());
}
}

View File

@ -1 +0,0 @@
< ャ@Qaj、F÷gク<67>

Binary file not shown.

View File

@ -1,48 +0,0 @@
wget --output-document=temperature_annual_1deg_ENVIRONMENT_OCEANS_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/temperature_annual_1deg.nc
wget --output-document=temperature_seasonal_1deg_ENVIRONMENT_OCEANS_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/temperature_seasonal_1deg.nc
wget --output-document=temperature_monthly_1deg_ENVIRONMENT_OCEANS_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/temperature_monthly_1deg.nc
wget --output-document=salinity_annual_1deg_ENVIRONMENT_OCEANS_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/salinity_annual_1deg.nc
wget --output-document=salinity_seasonal_1deg_ENVIRONMENT_OCEANS_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/salinity_seasonal_1deg.nc
wget --output-document=salinity_monthly_1deg_ENVIRONMENT_OCEANS_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/salinity_monthly_1deg.nc
wget --output-document=dissolved_oxygen_annual_1deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/dissolved_oxygen_annual_1deg.nc
wget --output-document=dissolved_oxygen_seasonal_1deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/dissolved_oxygen_seasonal_1deg.nc
wget --output-document=dissolved_oxygen_monthly_1deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/dissolved_oxygen_monthly_1deg.nc
wget --output-document=oxygen_saturation_annual_1deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/oxygen_saturation_annual_1deg.nc
wget --output-document=oxygen_saturation_seasonal_1deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/oxygen_saturation_seasonal_1deg.nc
wget --output-document=oxygen_saturation_monthly_1deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/oxygen_saturation_monthly_1deg.nc
wget --output-document=apparent_oxygen_utilization_annual_1deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/apparent_oxygen_utilization_annual_1deg.nc
wget --output-document=apparent_oxygen_utilization_seasonal_1deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/apparent_oxygen_utilization_seasonal_1deg.nc
wget --output-document=apparent_oxygen_utilization_monthly_1deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/apparent_oxygen_utilization_monthly_1deg.nc
wget --output-document=phosphate_annual_1deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/phosphate_annual_1deg.nc
wget --output-document=phosphate_seasonal_1deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/phosphate_seasonal_1deg.nc
wget --output-document=phosphate_monthly_1deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/phosphate_monthly_1deg.nc
wget --output-document=silicate_annual_1deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/silicate_annual_1deg.nc
wget --output-document=silicate_seasonal_1deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/silicate_seasonal_1deg.nc
wget --output-document=silicate_monthly_1deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/silicate_monthly_1deg.nc
wget --output-document=nitrate_annual_1deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/nitrate_annual_1deg.nc
wget --output-document=nitrate_seasonal_1deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/nitrate_seasonal_1deg.nc
wget --output-document=nitrate_monthly_1deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/nitrate_monthly_1deg.nc
wget --output-document=temperature_annual_5deg_ENVIRONMENT_OCEANS_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/temperature_annual_5deg.nc
wget --output-document=temperature_seasonal_5deg_ENVIRONMENT_OCEANS_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/temperature_seasonal_5deg.nc
wget --output-document=temperature_monthly_5deg_ENVIRONMENT_OCEANS_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/temperature_monthly_5deg.nc
wget --output-document=salinity_annual_5deg_ENVIRONMENT_OCEANS_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/salinity_annual_5deg.nc
wget --output-document=salinity_seasonal_5deg_ENVIRONMENT_OCEANS_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/salinity_seasonal_5deg.nc
wget --output-document=salinity_monthly_5deg_ENVIRONMENT_OCEANS_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/salinity_monthly_5deg.nc
wget --output-document=dissolved_oxygen_annual_5deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/dissolved_oxygen_annual_5deg.nc
wget --output-document=dissolved_oxygen_seasonal_5deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/dissolved_oxygen_seasonal_5deg.nc
wget --output-document=dissolved_oxygen_monthly_5deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/dissolved_oxygen_monthly_5deg.nc
wget --output-document=oxygen_saturation_annual_5deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/oxygen_saturation_annual_5deg.nc
wget --output-document=oxygen_saturation_seasonal_5deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/oxygen_saturation_seasonal_5deg.nc
wget --output-document=oxygen_saturation_monthly_5deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/oxygen_saturation_monthly_5deg.nc
wget --output-document=apparent_oxygen_utilization_annual_5deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/apparent_oxygen_utilization_annual_5deg.nc
wget --output-document=apparent_oxygen_utilization_seasonal_5deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/apparent_oxygen_utilization_seasonal_5deg.nc
wget --output-document=apparent_oxygen_utilization_monthly_5deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/apparent_oxygen_utilization_monthly_5deg.nc
wget --output-document=phosphate_annual_5deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/phosphate_annual_5deg.nc
wget --output-document=phosphate_seasonal_5deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/phosphate_seasonal_5deg.nc
wget --output-document=phosphate_monthly_5deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/phosphate_monthly_5deg.nc
wget --output-document=silicate_annual_5deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/silicate_annual_5deg.nc
wget --output-document=silicate_seasonal_5deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/silicate_seasonal_5deg.nc
wget --output-document=silicate_monthly_5deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/silicate_monthly_5deg.nc
wget --output-document=nitrate_annual_5deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/nitrate_annual_5deg.nc
wget --output-document=nitrate_seasonal_5deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/nitrate_seasonal_5deg.nc
wget --output-document=nitrate_monthly_5deg_ENVIRONMENT_BIOTA_.nc http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/nitrate_monthly_5deg.nc