From 75a37f8ce46ca6915cc704c1d09f1e5fb07ecc11 Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Wed, 11 Dec 2019 12:31:25 +0200
Subject: [PATCH 001/139] Split plugins
---
.../spatial/{plugin.py => plugin/__init__.py} | 55 +++++--------------
ckanext/spatial/plugin/flask_plugin.py | 41 ++++++++++++++
ckanext/spatial/plugin/pylons_plugin.py | 41 ++++++++++++++
pip-requirements.txt | 1 +
setup.py | 4 ++
5 files changed, 101 insertions(+), 41 deletions(-)
rename ckanext/spatial/{plugin.py => plugin/__init__.py} (90%)
create mode 100644 ckanext/spatial/plugin/flask_plugin.py
create mode 100644 ckanext/spatial/plugin/pylons_plugin.py
diff --git a/ckanext/spatial/plugin.py b/ckanext/spatial/plugin/__init__.py
similarity index 90%
rename from ckanext/spatial/plugin.py
rename to ckanext/spatial/plugin/__init__.py
index e2d73df..9847c42 100644
--- a/ckanext/spatial/plugin.py
+++ b/ckanext/spatial/plugin/__init__.py
@@ -9,6 +9,14 @@ from ckan import plugins as p
from ckan.lib.helpers import json
+if p.toolkit.check_ckan_version(min_version="2.9"):
+ from ckanext.spatial.plugin.flask_plugin import (
+ SpatialQueryMixin, HarvestMetadataApiMixin
+ )
+else:
+ from ckanext.spatial.plugin.pylons_plugin import (
+ SpatialQueryMixin, HarvestMetadataApiMixin
+ )
def check_geoalchemy_requirement():
'''Checks if a suitable geoalchemy version installed
@@ -77,9 +85,9 @@ class SpatialMetadata(p.SingletonPlugin):
''' Set up the resource library, public directory and
template directory for all the spatial extensions
'''
- p.toolkit.add_public_directory(config, 'public')
- p.toolkit.add_template_directory(config, 'templates')
- p.toolkit.add_resource('public', 'ckanext-spatial')
+ p.toolkit.add_public_directory(config, '../public')
+ p.toolkit.add_template_directory(config, '../templates')
+ p.toolkit.add_resource('../public', 'ckanext-spatial')
# Add media types for common extensions not included in the mimetypes
# module
@@ -150,9 +158,8 @@ class SpatialMetadata(p.SingletonPlugin):
'get_common_map_config' : spatial_helpers.get_common_map_config,
}
-class SpatialQuery(p.SingletonPlugin):
+class SpatialQuery(SpatialQueryMixin, p.SingletonPlugin):
- p.implements(p.IRoutes, inherit=True)
p.implements(p.IPackageController, inherit=True)
p.implements(p.IConfigurable, inherit=True)
@@ -166,13 +173,6 @@ class SpatialQuery(p.SingletonPlugin):
'Please upgrade CKAN or select the \'postgis\' backend.'
raise p.toolkit.CkanVersionException(msg)
- def before_map(self, map):
-
- map.connect('api_spatial_query', '/api/2/search/{register:dataset|package}/geo',
- controller='ckanext.spatial.controllers.api:ApiController',
- action='spatial_query')
- return map
-
def before_index(self, pkg_dict):
import shapely
import shapely.geometry
@@ -390,7 +390,7 @@ class SpatialQuery(p.SingletonPlugin):
search_results['results'] = pkgs
return search_results
-class HarvestMetadataApi(p.SingletonPlugin):
+class HarvestMetadataApi(HarvestMetadataApiMixin, p.SingletonPlugin):
'''
Harvest Metadata API
(previously called "InspireApi")
@@ -398,31 +398,4 @@ class HarvestMetadataApi(p.SingletonPlugin):
A way for a user to view the harvested metadata XML, either as a raw file or
styled to view in a web browser.
'''
- p.implements(p.IRoutes)
-
- def before_map(self, route_map):
- controller = "ckanext.spatial.controllers.api:HarvestMetadataApiController"
-
- # Showing the harvest object content is an action of the default
- # harvest plugin, so just redirect there
- route_map.redirect('/api/2/rest/harvestobject/{id:.*}/xml',
- '/harvest/object/{id}',
- _redirect_code='301 Moved Permanently')
-
- route_map.connect('/harvest/object/{id}/original', controller=controller,
- action='display_xml_original')
-
- route_map.connect('/harvest/object/{id}/html', controller=controller,
- action='display_html')
- route_map.connect('/harvest/object/{id}/html/original', controller=controller,
- action='display_html_original')
-
- # Redirect old URL to a nicer and unversioned one
- route_map.redirect('/api/2/rest/harvestobject/:id/html',
- '/harvest/object/{id}/html',
- _redirect_code='301 Moved Permanently')
-
- return route_map
-
- def after_map(self, route_map):
- return route_map
+ pass
diff --git a/ckanext/spatial/plugin/flask_plugin.py b/ckanext/spatial/plugin/flask_plugin.py
new file mode 100644
index 0000000..cbd5006
--- /dev/null
+++ b/ckanext/spatial/plugin/flask_plugin.py
@@ -0,0 +1,41 @@
+import ckan.plugins as p
+
+
+
+class SpatialQueryMixin(p.SingletonPlugin):
+ p.implements(p.IRoutes, inherit=True)
+
+ # IRoutes
+ def before_map(self, map):
+
+ map.connect('api_spatial_query', '/api/2/search/{register:dataset|package}/geo',
+ controller='ckanext.spatial.controllers.api:ApiController',
+ action='spatial_query')
+ return map
+
+class HarvestMetadataApiMixin(p.SingletonPlugin):
+ p.implements(p.IRoutes, inherit=True)
+
+ def before_map(self, route_map):
+ controller = "ckanext.spatial.controllers.api:HarvestMetadataApiController"
+
+ # Showing the harvest object content is an action of the default
+ # harvest plugin, so just redirect there
+ route_map.redirect('/api/2/rest/harvestobject/{id:.*}/xml',
+ '/harvest/object/{id}',
+ _redirect_code='301 Moved Permanently')
+
+ route_map.connect('/harvest/object/{id}/original', controller=controller,
+ action='display_xml_original')
+
+ route_map.connect('/harvest/object/{id}/html', controller=controller,
+ action='display_html')
+ route_map.connect('/harvest/object/{id}/html/original', controller=controller,
+ action='display_html_original')
+
+ # Redirect old URL to a nicer and unversioned one
+ route_map.redirect('/api/2/rest/harvestobject/:id/html',
+ '/harvest/object/{id}/html',
+ _redirect_code='301 Moved Permanently')
+
+ return route_map
diff --git a/ckanext/spatial/plugin/pylons_plugin.py b/ckanext/spatial/plugin/pylons_plugin.py
new file mode 100644
index 0000000..cbd5006
--- /dev/null
+++ b/ckanext/spatial/plugin/pylons_plugin.py
@@ -0,0 +1,41 @@
+import ckan.plugins as p
+
+
+
+class SpatialQueryMixin(p.SingletonPlugin):
+ p.implements(p.IRoutes, inherit=True)
+
+ # IRoutes
+ def before_map(self, map):
+
+ map.connect('api_spatial_query', '/api/2/search/{register:dataset|package}/geo',
+ controller='ckanext.spatial.controllers.api:ApiController',
+ action='spatial_query')
+ return map
+
+class HarvestMetadataApiMixin(p.SingletonPlugin):
+ p.implements(p.IRoutes, inherit=True)
+
+ def before_map(self, route_map):
+ controller = "ckanext.spatial.controllers.api:HarvestMetadataApiController"
+
+ # Showing the harvest object content is an action of the default
+ # harvest plugin, so just redirect there
+ route_map.redirect('/api/2/rest/harvestobject/{id:.*}/xml',
+ '/harvest/object/{id}',
+ _redirect_code='301 Moved Permanently')
+
+ route_map.connect('/harvest/object/{id}/original', controller=controller,
+ action='display_xml_original')
+
+ route_map.connect('/harvest/object/{id}/html', controller=controller,
+ action='display_html')
+ route_map.connect('/harvest/object/{id}/html/original', controller=controller,
+ action='display_html_original')
+
+ # Redirect old URL to a nicer and unversioned one
+ route_map.redirect('/api/2/rest/harvestobject/:id/html',
+ '/harvest/object/{id}/html',
+ _redirect_code='301 Moved Permanently')
+
+ return route_map
diff --git a/pip-requirements.txt b/pip-requirements.txt
index 2251af8..e948c50 100644
--- a/pip-requirements.txt
+++ b/pip-requirements.txt
@@ -6,3 +6,4 @@ lxml>=2.3
argparse
pyparsing>=2.1.10
requests>=1.1.0
+future>=0.18.2
diff --git a/setup.py b/setup.py
index 294d144..b5820e9 100644
--- a/setup.py
+++ b/setup.py
@@ -46,5 +46,9 @@ setup(
[ckan.test_plugins]
test_spatial_plugin = ckanext.spatial.tests.test_plugin.plugin:TestSpatialPlugin
+ [console_scripts]
+ spatial = ckanext.spatial.cli:spatial
+ ckan-pycsw = ckanext.spatial.cli:ckan_pycsw
+ validation = ckanext.spatial.cli:validation
""",
)
From 2ab242ed974f40c6b828c155970e0d2f8c99148f Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Wed, 11 Dec 2019 13:44:09 +0200
Subject: [PATCH 002/139] Update views
---
ckanext/spatial/controllers/view.py | 38 ----
ckanext/spatial/harvesters/base.py | 8 +-
ckanext/spatial/helpers.py | 6 +-
ckanext/spatial/lib/__init__.py | 7 +-
ckanext/spatial/plugin/__init__.py | 1 +
ckanext/spatial/plugin/flask_plugin.py | 41 +---
.../spatial/snippets/dataset_map_asset.html | 1 +
.../spatial/snippets/dataset_map_base.html | 5 +-
.../snippets/dataset_map_resource.html | 1 +
.../spatial/snippets/spatial_query.html | 35 ++--
.../spatial/snippets/spatial_query_asset.html | 1 +
.../snippets/spatial_query_resource.html | 1 +
ckanext/spatial/views.py | 180 ++++++++++++++++++
13 files changed, 232 insertions(+), 93 deletions(-)
delete mode 100644 ckanext/spatial/controllers/view.py
create mode 100644 ckanext/spatial/templates/spatial/snippets/dataset_map_asset.html
create mode 100644 ckanext/spatial/templates/spatial/snippets/dataset_map_resource.html
create mode 100644 ckanext/spatial/templates/spatial/snippets/spatial_query_asset.html
create mode 100644 ckanext/spatial/templates/spatial/snippets/spatial_query_resource.html
create mode 100644 ckanext/spatial/views.py
diff --git a/ckanext/spatial/controllers/view.py b/ckanext/spatial/controllers/view.py
deleted file mode 100644
index eb3eeef..0000000
--- a/ckanext/spatial/controllers/view.py
+++ /dev/null
@@ -1,38 +0,0 @@
-import urllib2
-
-from ckan.lib.base import BaseController, c, request, \
- response, render, abort
-
-from ckan.model import Package
-
-
-class ViewController(BaseController):
-
- def wms_preview(self, id):
- # check if package exists
- c.pkg = Package.get(id)
- if c.pkg is None:
- abort(404, 'Dataset not found')
-
- for res in c.pkg.resources:
- if res.format.lower() == 'wms':
- c.wms_url = res.url \
- if '?' not in res.url else res.url.split('?')[0]
- break
- if not c.wms_url:
- abort(400, 'This dataset does not have a WMS resource')
-
- return render('ckanext/spatial/wms_preview.html')
-
- def proxy(self):
- if 'url' not in request.params:
- abort(400)
- try:
- server_response = urllib2.urlopen(request.params['url'])
- headers = server_response.info()
- if headers.get('Content-Type'):
- response.content_type = headers.get('Content-Type')
- return server_response.read()
- except urllib2.HTTPError as e:
- response.status_int = e.getcode()
- return
diff --git a/ckanext/spatial/harvesters/base.py b/ckanext/spatial/harvesters/base.py
index 4bac371..0d3722b 100644
--- a/ckanext/spatial/harvesters/base.py
+++ b/ckanext/spatial/harvesters/base.py
@@ -13,7 +13,6 @@ import dateutil
import mimetypes
-from pylons import config
from owslib import wms
import requests
from lxml import etree
@@ -33,6 +32,11 @@ from ckanext.spatial.validation import Validators, all_validators
from ckanext.spatial.model import ISODocument
from ckanext.spatial.interfaces import ISpatialHarvester
+if p.toolkit.check_ckan_version("2.9"):
+ config = p.toolkit.config
+else:
+ from pylons import config
+
log = logging.getLogger(__name__)
DEFAULT_VALIDATOR_PROFILES = ['iso19139']
@@ -203,7 +207,7 @@ class SpatialHarvester(HarvesterBase):
:returns: A dataset dictionary (package_dict)
:rtype: dict
'''
-
+
tags = []
if 'tags' in iso_values:
diff --git a/ckanext/spatial/helpers.py b/ckanext/spatial/helpers.py
index 7b205d2..0d015ba 100644
--- a/ckanext/spatial/helpers.py
+++ b/ckanext/spatial/helpers.py
@@ -1,9 +1,13 @@
import logging
-from pylons import config
from ckan import plugins as p
from ckan.lib import helpers as h
+if p.toolkit.check_ckan_version("2.9"):
+ config = p.toolkit.config
+else:
+ from pylons import config
+
log = logging.getLogger(__name__)
diff --git a/ckanext/spatial/lib/__init__.py b/ckanext/spatial/lib/__init__.py
index 5b0bb94..b053af7 100644
--- a/ckanext/spatial/lib/__init__.py
+++ b/ckanext/spatial/lib/__init__.py
@@ -2,11 +2,16 @@ import logging
from string import Template
from ckan.model import Session, Package
-from ckan.lib.base import config
+import ckan.plugins.toolkit as tk
from ckanext.spatial.model import PackageExtent
from shapely.geometry import asShape
+if tk.check_ckan_version("2.9"):
+ config = tk.config
+else:
+ from ckan.lib.base import config
+
from ckanext.spatial.geoalchemy_common import (WKTElement, ST_Transform,
compare_geometry_fields,
)
diff --git a/ckanext/spatial/plugin/__init__.py b/ckanext/spatial/plugin/__init__.py
index 9847c42..3616d75 100644
--- a/ckanext/spatial/plugin/__init__.py
+++ b/ckanext/spatial/plugin/__init__.py
@@ -390,6 +390,7 @@ class SpatialQuery(SpatialQueryMixin, p.SingletonPlugin):
search_results['results'] = pkgs
return search_results
+
class HarvestMetadataApi(HarvestMetadataApiMixin, p.SingletonPlugin):
'''
Harvest Metadata API
diff --git a/ckanext/spatial/plugin/flask_plugin.py b/ckanext/spatial/plugin/flask_plugin.py
index cbd5006..3108d65 100644
--- a/ckanext/spatial/plugin/flask_plugin.py
+++ b/ckanext/spatial/plugin/flask_plugin.py
@@ -1,41 +1,18 @@
import ckan.plugins as p
-
-
+import ckanext.spatial.views as blueprints
class SpatialQueryMixin(p.SingletonPlugin):
- p.implements(p.IRoutes, inherit=True)
+ p.implements(p.IBlueprint)
- # IRoutes
- def before_map(self, map):
+ # IBlueprint
- map.connect('api_spatial_query', '/api/2/search/{register:dataset|package}/geo',
- controller='ckanext.spatial.controllers.api:ApiController',
- action='spatial_query')
- return map
+ def get_blueprint(self):
+ return [blueprints.api]
class HarvestMetadataApiMixin(p.SingletonPlugin):
- p.implements(p.IRoutes, inherit=True)
+ p.implements(p.IBlueprint)
- def before_map(self, route_map):
- controller = "ckanext.spatial.controllers.api:HarvestMetadataApiController"
+ # IBlueprint
- # Showing the harvest object content is an action of the default
- # harvest plugin, so just redirect there
- route_map.redirect('/api/2/rest/harvestobject/{id:.*}/xml',
- '/harvest/object/{id}',
- _redirect_code='301 Moved Permanently')
-
- route_map.connect('/harvest/object/{id}/original', controller=controller,
- action='display_xml_original')
-
- route_map.connect('/harvest/object/{id}/html', controller=controller,
- action='display_html')
- route_map.connect('/harvest/object/{id}/html/original', controller=controller,
- action='display_html_original')
-
- # Redirect old URL to a nicer and unversioned one
- route_map.redirect('/api/2/rest/harvestobject/:id/html',
- '/harvest/object/{id}/html',
- _redirect_code='301 Moved Permanently')
-
- return route_map
+ def get_blueprint(self):
+ return [blueprints.harvest_metadata]
diff --git a/ckanext/spatial/templates/spatial/snippets/dataset_map_asset.html b/ckanext/spatial/templates/spatial/snippets/dataset_map_asset.html
new file mode 100644
index 0000000..c9f18d2
--- /dev/null
+++ b/ckanext/spatial/templates/spatial/snippets/dataset_map_asset.html
@@ -0,0 +1 @@
+{% asset 'ckanext-spatial/dataset_map' %}
diff --git a/ckanext/spatial/templates/spatial/snippets/dataset_map_base.html b/ckanext/spatial/templates/spatial/snippets/dataset_map_base.html
index 2d42230..2582b28 100644
--- a/ckanext/spatial/templates/spatial/snippets/dataset_map_base.html
+++ b/ckanext/spatial/templates/spatial/snippets/dataset_map_base.html
@@ -14,8 +14,9 @@ extent
- {% snippet "spatial/snippets/map_attribution.html", map_config=map_config %}
+ {% snippet "spatial/snippets/map_attribution.html", map_config=map_config %}
-{% resource 'ckanext-spatial/dataset_map' %}
+{% set type = 'asset' if h.ckan_version() > '2.9' else 'resource' %}
+{% include 'spatial/snippets/dataset_map_' ~ type ~ '.html' %}
diff --git a/ckanext/spatial/templates/spatial/snippets/dataset_map_resource.html b/ckanext/spatial/templates/spatial/snippets/dataset_map_resource.html
new file mode 100644
index 0000000..38c2f42
--- /dev/null
+++ b/ckanext/spatial/templates/spatial/snippets/dataset_map_resource.html
@@ -0,0 +1 @@
+{% resource 'ckanext-spatial/dataset_map' %}
diff --git a/ckanext/spatial/templates/spatial/snippets/spatial_query.html b/ckanext/spatial/templates/spatial/snippets/spatial_query.html
index f847f06..b6e6cc8 100644
--- a/ckanext/spatial/templates/spatial/snippets/spatial_query.html
+++ b/ckanext/spatial/templates/spatial/snippets/spatial_query.html
@@ -2,29 +2,30 @@
Displays a map widget to define a spatial filter on the dataset search page sidebar
default_extent
- Initial map extent (Optional, defaults to the whole world). It can be defined
- either as a pair of coordinates or as a GeoJSON bounding box.
+Initial map extent (Optional, defaults to the whole world). It can be defined
+either as a pair of coordinates or as a GeoJSON bounding box.
e.g.
- {% snippet "spatial/snippets/spatial_query.html", default_extent=[[15.62, -139.21], [64.92, -61.87]] %}
+{% snippet "spatial/snippets/spatial_query.html", default_extent=[[15.62, -139.21], [64.92, -61.87]] %}
- {% snippet "spatial/snippets/spatial_query.html", default_extent="{ \"type\": \"Polygon\", \"coordinates\": [[[74.89, 29.39],[74.89, 38.45], [60.50, 38.45], [60.50, 29.39], [74.89, 29.39]]]}" %}
+{% snippet "spatial/snippets/spatial_query.html", default_extent="{ \"type\": \"Polygon\", \"coordinates\": [[[74.89, 29.39],[74.89, 38.45], [60.50, 38.45], [60.50, 29.39], [74.89, 29.39]]]}" %}
#}
-
-
- {{ _('Filter by location') }}
- {{ _('Clear') }}
-
- {% set map_config = h.get_common_map_config() %}
-
-
- {% snippet "spatial/snippets/map_attribution.html", map_config=map_config %}
-
+
+
+ {{ _('Filter by location') }}
+ {{ _('Clear') }}
+
+ {% set map_config = h.get_common_map_config() %}
+
+
+ {% snippet "spatial/snippets/map_attribution.html", map_config=map_config %}
+
-{% resource 'ckanext-spatial/spatial_query' %}
+{% set type = 'asset' if h.ckan_version() > '2.9' else 'resource' %}
+{% include 'spatial/snippets/spatial_query_' ~ type ~ '.html' %}
diff --git a/ckanext/spatial/templates/spatial/snippets/spatial_query_asset.html b/ckanext/spatial/templates/spatial/snippets/spatial_query_asset.html
new file mode 100644
index 0000000..649ef62
--- /dev/null
+++ b/ckanext/spatial/templates/spatial/snippets/spatial_query_asset.html
@@ -0,0 +1 @@
+{% asset 'ckanext-spatial/spatial_query' %}
diff --git a/ckanext/spatial/templates/spatial/snippets/spatial_query_resource.html b/ckanext/spatial/templates/spatial/snippets/spatial_query_resource.html
new file mode 100644
index 0000000..499cde4
--- /dev/null
+++ b/ckanext/spatial/templates/spatial/snippets/spatial_query_resource.html
@@ -0,0 +1 @@
+{% resource 'ckanext-spatial/spatial_query' %}
diff --git a/ckanext/spatial/views.py b/ckanext/spatial/views.py
new file mode 100644
index 0000000..dde6b97
--- /dev/null
+++ b/ckanext/spatial/views.py
@@ -0,0 +1,180 @@
+# -*- coding: utf-8 -*-
+
+import logging
+
+from flask import Blueprint, make_response
+from lxml import etree
+from pkg_resources import resource_stream
+
+import ckan.lib.helpers as h
+import ckan.plugins.toolkit as tk
+from ckan.common import request, config
+from ckan.model import Session
+from ckan.views.api import _finish_ok, _finish_bad_request
+
+from ckanext.spatial.lib import get_srid, validate_bbox, bbox_query
+
+
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+log = logging.getLogger(__name__)
+
+api = Blueprint("spatial_api", __name__)
+
+
+def spatial_query(register):
+ error_400_msg = \
+ 'Please provide a suitable bbox parameter [minx,miny,maxx,maxy]'
+
+ if not 'bbox' in request.args:
+ return _finish_bad_request(error_400_msg)
+
+ bbox = validate_bbox(request.params['bbox'])
+
+ if not bbox:
+ return _finish_bad_request(error_400_msg)
+
+ srid = get_srid(request.args.get('crs')) if 'crs' in \
+ request.args else None
+
+ extents = bbox_query(bbox, srid)
+
+ format = request.args.get('format', '')
+
+ ids = [extent.package_id for extent in extents]
+ output = dict(count=len(ids), results=ids)
+
+ return _finish_ok(output)
+
+
+api.add_url_rule('/api/2/search//geo', view_func=spatial_query)
+
+harvest_metadata = Blueprint("spatial_harvest_metadata", __name__)
+
+
+def harvest_object_redirect_xml(id):
+ return h.redirect_to('/harvest/object/{}'.format(id))
+
+
+def harvest_object_redirect_html(id):
+ return h.redirect_to('/harvest/object/{}/html'.format(id))
+
+
+def _get_original_content(id):
+ from ckanext.harvest.model import HarvestObject, HarvestObjectExtra
+
+ extra = Session.query(HarvestObjectExtra).join(HarvestObject) \
+ .filter(HarvestObject.id == id) \
+ .filter(
+ HarvestObjectExtra.key == 'original_document'
+ ).first()
+ if extra:
+ return extra.value
+ else:
+ return None
+
+
+def _get_content(id):
+ from ckanext.harvest.model import HarvestObject
+ obj = Session.query(HarvestObject) \
+ .filter(HarvestObject.id == id).first()
+ if obj:
+ return obj.content
+ else:
+ return None
+
+
+def _transform_to_html(content, xslt_package=None, xslt_path=None):
+
+ xslt_package = xslt_package or __name__
+ xslt_path = xslt_path or \
+ '../templates/ckanext/spatial/gemini2-html-stylesheet.xsl'
+
+ # optimise -- read transform only once and compile rather
+ # than at each request
+ with resource_stream(xslt_package, xslt_path) as style:
+ style_xml = etree.parse(style)
+ transformer = etree.XSLT(style_xml)
+
+ xml = etree.parse(StringIO(content.encode('utf-8')))
+ html = transformer(xml)
+
+ result = etree.tostring(html, pretty_print=True)
+
+ return result
+
+
+def _get_xslt(original=False):
+
+ if original:
+ config_option = \
+ 'ckanext.spatial.harvest.xslt_html_content_original'
+ else:
+ config_option = 'ckanext.spatial.harvest.xslt_html_content'
+
+ xslt_package = None
+ xslt_path = None
+ xslt = config.get(config_option, None)
+ if xslt:
+ if ':' in xslt:
+ xslt = xslt.split(':')
+ xslt_package = xslt[0]
+ xslt_path = xslt[1]
+ else:
+ log.error('XSLT should be defined in the form :' +
+ ', eg ckanext.myext:templates/my.xslt')
+
+ return xslt_package, xslt_path
+
+
+def display_xml_original(id):
+ content = _get_original_content(id)
+
+ if not content:
+ return tk.abort(404)
+
+ headers = {'Content-Type': 'application/xml; charset=utf-8'}
+
+ if not '\n' + content
+ response = make_response((content, 200, headers))
+
+
+def display_html(id):
+ content = _get_content(id)
+
+ if not content:
+ return tk.abort(404)
+ headers = {'Content-Type': 'text/html; charset=utf-8'}
+
+ xslt_package, xslt_path = _get_xslt()
+ content = _transform_to_html(content, xslt_package, xslt_path)
+ response = make_response((content, 200, headers))
+
+
+def display_html_original(id):
+ content = _get_original_content(id)
+
+ if content is None:
+ return tk.abort(404)
+ headers = {'Content-Type': 'text/html; charset=utf-8'}
+
+ xslt_package, xslt_path = _get_xslt(original=True)
+ content = _transform_to_html(content, xslt_package, xslt_path)
+ response = make_response((content, 200, headers))
+
+
+harvest_metadata.add_url_rule('/api/2/rest/harvestobject//xml',
+ view_func=harvest_object_redirect_xml)
+harvest_metadata.add_url_rule('/api/2/rest/harvestobject//html',
+ view_func=harvest_object_redirect_html)
+
+harvest_metadata.add_url_rule('/harvest/object//original',
+ view_func=display_xml_original)
+harvest_metadata.add_url_rule('/harvest/object//html',
+ view_func=display_html)
+harvest_metadata.add_url_rule('/harvest/object//html/original',
+ view_func=display_html_original)
From 2803729263de5c79b4a1d18c47221908d453cc64 Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Wed, 11 Dec 2019 14:22:28 +0200
Subject: [PATCH 003/139] Update commands
---
ckanext/spatial/cli.py | 55 ++++++++++
ckanext/spatial/commands/spatial.py | 59 ++---------
ckanext/spatial/commands/validation.py | 76 ++------------
ckanext/spatial/util.py | 133 +++++++++++++++++++++++++
setup.py | 1 -
5 files changed, 208 insertions(+), 116 deletions(-)
create mode 100644 ckanext/spatial/cli.py
create mode 100644 ckanext/spatial/util.py
diff --git a/ckanext/spatial/cli.py b/ckanext/spatial/cli.py
new file mode 100644
index 0000000..2cef239
--- /dev/null
+++ b/ckanext/spatial/cli.py
@@ -0,0 +1,55 @@
+# encoding: utf-8
+import click
+import logging
+from ckan.cli import click_config_option
+from ckan.cli.cli import CkanCommand
+
+import ckanext.spatial.util as util
+
+
+log = logging.getLogger(__name__)
+
+
+@click.group(short_help=u"Validation commands")
+@click.help_option(u"-h", u"--help")
+@click_config_option
+@click.pass_context
+def validation(ctx, config, *args, **kwargs):
+ ctx.obj = CkanCommand(config)
+
+
+@validation.command()
+@click.argument('pkg', required=False)
+def report(pkg):
+ return util.report(pkg)
+
+
+@validation.command('report-csv')
+@click.argument('filepath')
+def report_csv(filepath):
+ return util.report_csv(filepath)
+
+
+@validation.command('file')
+@click.argument('filepath')
+def validate_file(filepath):
+ return util.validate_file(filepath)
+
+
+@click.group(short_help=u"Performs spatially related operations.")
+@click.help_option(u"-h", u"--help")
+@click_config_option
+@click.pass_context
+def spatial(ctx, config, *args, **kwargs):
+ ctx.obj = CkanCommand(config)
+
+
+@spatial.command()
+@click.argument('srid', required=False)
+def initdb(srid):
+ return util.initdb(srid)
+
+
+@spatial.command('extents')
+def update_extents():
+ return util.update_extents()
diff --git a/ckanext/spatial/commands/spatial.py b/ckanext/spatial/commands/spatial.py
index 8f75af6..bc8e512 100644
--- a/ckanext/spatial/commands/spatial.py
+++ b/ckanext/spatial/commands/spatial.py
@@ -1,11 +1,11 @@
import sys
-import re
-from pprint import pprint
-import logging
+import logging
from ckan.lib.cli import CkanCommand
-from ckan.lib.helpers import json
-from ckanext.spatial.lib import save_package_extent
+
+import ckanext.spatial.util as util
+
+
log = logging.getLogger(__name__)
class Spatial(CkanCommand):
@@ -20,7 +20,7 @@ class Spatial(CkanCommand):
spatial extents
Creates or updates the extent geometry column for datasets with
an extent defined in the 'spatial' extra.
-
+
The commands should be run from the ckanext-spatial directory and expect
a development.ini file to be present. Most of the time you will
specify the config explicitly though::
@@ -31,7 +31,7 @@ class Spatial(CkanCommand):
summary = __doc__.split('\n')[0]
usage = __doc__
- max_args = 2
+ max_args = 2
min_args = 0
def command(self):
@@ -43,7 +43,7 @@ class Spatial(CkanCommand):
sys.exit(1)
cmd = self.args[0]
if cmd == 'initdb':
- self.initdb()
+ self.initdb()
elif cmd == 'extents':
self.update_extents()
else:
@@ -51,47 +51,10 @@ class Spatial(CkanCommand):
def initdb(self):
if len(self.args) >= 2:
- srid = unicode(self.args[1])
+ srid = self.args[1]
else:
srid = None
-
- from ckanext.spatial.model import setup as db_setup
-
- db_setup(srid)
-
- print 'DB tables created'
+ return util.initdb(srid)
def update_extents(self):
- from ckan.model import PackageExtra, Package, Session
- conn = Session.connection()
- packages = [extra.package \
- for extra in \
- Session.query(PackageExtra).filter(PackageExtra.key == 'spatial').all()]
-
- errors = []
- count = 0
- for package in packages:
- try:
- value = package.extras['spatial']
- log.debug('Received: %r' % value)
- geometry = json.loads(value)
-
- count += 1
- except ValueError,e:
- errors.append(u'Package %s - Error decoding JSON object: %s' % (package.id,str(e)))
- except TypeError,e:
- errors.append(u'Package %s - Error decoding JSON object: %s' % (package.id,str(e)))
-
- save_package_extent(package.id,geometry)
-
-
- Session.commit()
-
- if errors:
- msg = 'Errors were found:\n%s' % '\n'.join(errors)
- print msg
-
- msg = "Done. Extents generated for %i out of %i packages" % (count,len(packages))
-
- print msg
-
+ return util.update_extents()
diff --git a/ckanext/spatial/commands/validation.py b/ckanext/spatial/commands/validation.py
index b261967..66a67e9 100644
--- a/ckanext/spatial/commands/validation.py
+++ b/ckanext/spatial/commands/validation.py
@@ -1,13 +1,11 @@
import sys
-import re
-import os
-from pprint import pprint
+
import logging
-
-from lxml import etree
-
from ckan.lib.cli import CkanCommand
+import ckanext.spatial.util as util
+
+
log = logging.getLogger(__name__)
class Validation(CkanCommand):
@@ -21,7 +19,7 @@ class Validation(CkanCommand):
validation report-csv .csv
Performs validation on all the harvested metadata in the db and
writes a report in CSV format to the given filepath.
-
+
validation file .xml
Performs validation on the given metadata file.
'''
@@ -48,81 +46,25 @@ class Validation(CkanCommand):
print 'Command %s not recognized' % cmd
def report(self):
- from ckan import model
- from ckanext.harvest.model import HarvestObject
- from ckanext.spatial.lib.reports import validation_report
if len(self.args) >= 2:
- package_ref = unicode(self.args[1])
- pkg = model.Package.get(package_ref)
- if not pkg:
- print 'Package ref "%s" not recognised' % package_ref
- sys.exit(1)
+ pkg = self.args[1]
else:
pkg = None
-
- report = validation_report(package_id=pkg.id)
- for row in report.get_rows_html_formatted():
- print
- for i, col_name in enumerate(report.column_names):
- print ' %s: %s' % (col_name, row[i])
+ return util.report(pkg)
def validate_file(self):
- from ckanext.spatial.harvesters import SpatialHarvester
- from ckanext.spatial.model import ISODocument
-
if len(self.args) > 2:
print 'Too many parameters %i' % len(self.args)
sys.exit(1)
if len(self.args) < 2:
print 'Not enough parameters %i' % len(self.args)
sys.exit(1)
- metadata_filepath = self.args[1]
- if not os.path.exists(metadata_filepath):
- print 'Filepath %s not found' % metadata_filepath
- sys.exit(1)
- with open(metadata_filepath, 'rb') as f:
- metadata_xml = f.read()
- validators = SpatialHarvester()._get_validator()
- print 'Validators: %r' % validators.profiles
- try:
- xml_string = metadata_xml.encode("utf-8")
- except UnicodeDecodeError, e:
- print 'ERROR: Unicode Error reading file \'%s\': %s' % \
- (metadata_filepath, e)
- sys.exit(1)
- #import pdb; pdb.set_trace()
- xml = etree.fromstring(xml_string)
-
- # XML validation
- valid, errors = validators.is_valid(xml)
-
- # CKAN read of values
- if valid:
- try:
- iso_document = ISODocument(xml_string)
- iso_values = iso_document.read_values()
- except Exception, e:
- valid = False
- errors.append('CKAN exception reading values from ISODocument: %s' % e)
-
- print '***************'
- print 'Summary'
- print '***************'
- print 'File: \'%s\'' % metadata_filepath
- print 'Valid: %s' % valid
- if not valid:
- print 'Errors:'
- print pprint(errors)
- print '***************'
+ return util.validate_file(self.args[1])
def report_csv(self):
- from ckanext.spatial.lib.reports import validation_report
if len(self.args) != 2:
print 'Wrong number of arguments'
sys.exit(1)
- csv_filepath = self.args[1]
- report = validation_report()
- with open(csv_filepath, 'wb') as f:
- f.write(report.get_csv())
+ return util.report_csv(self.args[1])
diff --git a/ckanext/spatial/util.py b/ckanext/spatial/util.py
new file mode 100644
index 0000000..ee0aa26
--- /dev/null
+++ b/ckanext/spatial/util.py
@@ -0,0 +1,133 @@
+# -*- coding: utf-8 -*-
+
+import os
+import sys
+
+import logging
+from ckan.lib.helpers import json
+from lxml import etree
+from pprint import pprint
+
+from ckanext.spatial.lib import save_package_extent
+
+
+log = logging.getLogger(__name__)
+
+
+def report(pkg=None):
+ from ckan import model
+ from ckanext.harvest.model import HarvestObject
+ from ckanext.spatial.lib.reports import validation_report
+
+ if pkg:
+ package_ref = unicode(pkg)
+ pkg = model.Package.get(package_ref)
+ if not pkg:
+ print 'Package ref "%s" not recognised' % package_ref
+ sys.exit(1)
+
+ report = validation_report(package_id=pkg.id)
+ for row in report.get_rows_html_formatted():
+ print
+ for i, col_name in enumerate(report.column_names):
+ print ' %s: %s' % (col_name, row[i])
+
+
+def validate_file(metadata_filepath):
+ from ckanext.spatial.harvesters import SpatialHarvester
+ from ckanext.spatial.model import ISODocument
+
+ if not os.path.exists(metadata_filepath):
+ print 'Filepath %s not found' % metadata_filepath
+ sys.exit(1)
+ with open(metadata_filepath, 'rb') as f:
+ metadata_xml = f.read()
+
+ validators = SpatialHarvester()._get_validator()
+ print 'Validators: %r' % validators.profiles
+ try:
+ xml_string = metadata_xml.encode("utf-8")
+ except UnicodeDecodeError, e:
+ print 'ERROR: Unicode Error reading file \'%s\': %s' % \
+ (metadata_filepath, e)
+ sys.exit(1)
+ #import pdb; pdb.set_trace()
+ xml = etree.fromstring(xml_string)
+
+ # XML validation
+ valid, errors = validators.is_valid(xml)
+
+ # CKAN read of values
+ if valid:
+ try:
+ iso_document = ISODocument(xml_string)
+ iso_values = iso_document.read_values()
+ except Exception, e:
+ valid = False
+ errors.append(
+ 'CKAN exception reading values from ISODocument: %s' % e)
+
+ print '***************'
+ print 'Summary'
+ print '***************'
+ print 'File: \'%s\'' % metadata_filepath
+ print 'Valid: %s' % valid
+ if not valid:
+ print 'Errors:'
+ print pprint(errors)
+ print '***************'
+
+
+def report_csv(csv_filepath):
+ from ckanext.spatial.lib.reports import validation_report
+ report = validation_report()
+ with open(csv_filepath, 'wb') as f:
+ f.write(report.get_csv())
+
+
+def initdb(srid=None):
+ if srid:
+ srid = unicode(srid)
+
+ from ckanext.spatial.model import setup as db_setup
+
+ db_setup(srid)
+
+ print 'DB tables created'
+
+
+def update_extents():
+ from ckan.model import PackageExtra, Package, Session
+ conn = Session.connection()
+ packages = [extra.package \
+ for extra in \
+ Session.query(PackageExtra).filter(PackageExtra.key == 'spatial').all()]
+
+ errors = []
+ count = 0
+ for package in packages:
+ try:
+ value = package.extras['spatial']
+ log.debug('Received: %r' % value)
+ geometry = json.loads(value)
+
+ count += 1
+ except ValueError, e:
+ errors.append(u'Package %s - Error decoding JSON object: %s' %
+ (package.id, str(e)))
+ except TypeError, e:
+ errors.append(u'Package %s - Error decoding JSON object: %s' %
+ (package.id, str(e)))
+
+ save_package_extent(package.id, geometry)
+
+ Session.commit()
+
+ if errors:
+ msg = 'Errors were found:\n%s' % '\n'.join(errors)
+ print msg
+
+ msg = "Done. Extents generated for %i out of %i packages" % (count,
+ len(packages))
+
+ print msg
diff --git a/setup.py b/setup.py
index b5820e9..9aee440 100644
--- a/setup.py
+++ b/setup.py
@@ -48,7 +48,6 @@ setup(
[console_scripts]
spatial = ckanext.spatial.cli:spatial
- ckan-pycsw = ckanext.spatial.cli:ckan_pycsw
validation = ckanext.spatial.cli:validation
""",
)
From e063522032a97d189915f2c0504f283e511da662 Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Wed, 11 Dec 2019 14:23:03 +0200
Subject: [PATCH 004/139] Futurize
---
ckanext/spatial/commands/csw.py | 3 +-
ckanext/spatial/commands/spatial.py | 5 +-
ckanext/spatial/commands/validation.py | 11 ++--
ckanext/spatial/controllers/api.py | 6 ++-
ckanext/spatial/harvesters/base.py | 42 +++++++++-------
ckanext/spatial/harvesters/csw.py | 25 ++++++----
ckanext/spatial/harvesters/doc.py | 2 +-
ckanext/spatial/harvesters/gemini.py | 50 ++++++++++---------
ckanext/spatial/harvesters/waf.py | 18 ++++---
ckanext/spatial/helpers.py | 4 +-
ckanext/spatial/lib/__init__.py | 4 +-
ckanext/spatial/lib/csw_client.py | 14 +++---
ckanext/spatial/lib/report.py | 12 +++--
ckanext/spatial/model/__init__.py | 5 +-
ckanext/spatial/model/harvested_metadata.py | 8 +--
ckanext/spatial/model/package_extent.py | 2 +-
ckanext/spatial/plugin/__init__.py | 13 ++---
ckanext/spatial/tests/base.py | 1 +
ckanext/spatial/tests/lib/test_spatial.py | 13 +++--
.../tests/model/test_harvested_metadata.py | 2 +-
ckanext/spatial/tests/test_csw_client.py | 23 +++++----
ckanext/spatial/tests/test_harvest.py | 23 +++++----
ckanext/spatial/tests/test_validation.py | 3 +-
ckanext/spatial/tests/xml_file_server.py | 13 +++--
ckanext/spatial/util.py | 50 ++++++++++---------
ckanext/spatial/validation/__init__.py | 3 +-
ckanext/spatial/validation/validation.py | 1 +
ckanext/spatial/views.py | 6 ++-
28 files changed, 209 insertions(+), 153 deletions(-)
diff --git a/ckanext/spatial/commands/csw.py b/ckanext/spatial/commands/csw.py
index 88517a6..5b8a271 100644
--- a/ckanext/spatial/commands/csw.py
+++ b/ckanext/spatial/commands/csw.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
import sys
import logging
@@ -63,4 +64,4 @@ option:
elif cmd == 'clear':
ckan_pycsw.clear(config)
else:
- print 'Command %s not recognized' % cmd
+ print('Command %s not recognized' % cmd)
diff --git a/ckanext/spatial/commands/spatial.py b/ckanext/spatial/commands/spatial.py
index bc8e512..5708077 100644
--- a/ckanext/spatial/commands/spatial.py
+++ b/ckanext/spatial/commands/spatial.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
import sys
import logging
@@ -36,7 +37,7 @@ class Spatial(CkanCommand):
def command(self):
self._load_config()
- print ''
+ print('')
if len(self.args) == 0:
self.parser.print_usage()
@@ -47,7 +48,7 @@ class Spatial(CkanCommand):
elif cmd == 'extents':
self.update_extents()
else:
- print 'Command %s not recognized' % cmd
+ print('Command %s not recognized' % cmd)
def initdb(self):
if len(self.args) >= 2:
diff --git a/ckanext/spatial/commands/validation.py b/ckanext/spatial/commands/validation.py
index 66a67e9..18f8951 100644
--- a/ckanext/spatial/commands/validation.py
+++ b/ckanext/spatial/commands/validation.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
import sys
import logging
@@ -30,7 +31,7 @@ class Validation(CkanCommand):
def command(self):
if not self.args or self.args[0] in ['--help', '-h', 'help']:
- print self.usage
+ print(self.usage)
sys.exit(1)
self._load_config()
@@ -43,7 +44,7 @@ class Validation(CkanCommand):
elif cmd == 'file':
self.validate_file()
else:
- print 'Command %s not recognized' % cmd
+ print('Command %s not recognized' % cmd)
def report(self):
@@ -55,16 +56,16 @@ class Validation(CkanCommand):
def validate_file(self):
if len(self.args) > 2:
- print 'Too many parameters %i' % len(self.args)
+ print('Too many parameters %i' % len(self.args))
sys.exit(1)
if len(self.args) < 2:
- print 'Not enough parameters %i' % len(self.args)
+ print('Not enough parameters %i' % len(self.args))
sys.exit(1)
return util.validate_file(self.args[1])
def report_csv(self):
if len(self.args) != 2:
- print 'Wrong number of arguments'
+ print('Wrong number of arguments')
sys.exit(1)
return util.report_csv(self.args[1])
diff --git a/ckanext/spatial/controllers/api.py b/ckanext/spatial/controllers/api.py
index b0cb961..835ecae 100644
--- a/ckanext/spatial/controllers/api.py
+++ b/ckanext/spatial/controllers/api.py
@@ -1,9 +1,11 @@
+from future import standard_library
+standard_library.install_aliases()
import logging
try:
- from cStringIO import StringIO
+ from io import StringIO
except ImportError:
- from StringIO import StringIO
+ from io import StringIO
from pylons import response
from pkg_resources import resource_stream
diff --git a/ckanext/spatial/harvesters/base.py b/ckanext/spatial/harvesters/base.py
index 0d3722b..30b6024 100644
--- a/ckanext/spatial/harvesters/base.py
+++ b/ckanext/spatial/harvesters/base.py
@@ -1,11 +1,15 @@
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+from past.builtins import basestring
import re
import cgitb
import warnings
-import urllib2
+import urllib.request, urllib.error, urllib.parse
import sys
import logging
from string import Template
-from urlparse import urlparse
+from urllib.parse import urlparse
from datetime import datetime
import uuid
import hashlib
@@ -91,7 +95,7 @@ def guess_resource_format(url, use_mimetypes=True):
'arcgis_rest': ('arcgis/rest/services',),
}
- for resource_type, parts in resource_types.iteritems():
+ for resource_type, parts in resource_types.items():
if any(part in url for part in parts):
return resource_type
@@ -101,7 +105,7 @@ def guess_resource_format(url, use_mimetypes=True):
'gml': ('gml',),
}
- for file_type, extensions in file_types.iteritems():
+ for file_type, extensions in file_types.items():
if any(url.endswith(extension) for extension in extensions):
return file_type
@@ -159,7 +163,7 @@ class SpatialHarvester(HarvesterBase):
if not isinstance(source_config_obj[key],bool):
raise ValueError('%s must be boolean' % key)
- except ValueError, e:
+ except ValueError as e:
raise e
return source_config
@@ -338,7 +342,7 @@ class SpatialHarvester(HarvesterBase):
parties[party['organisation-name']].append(party['role'])
else:
parties[party['organisation-name']] = [party['role']]
- extras['responsible-party'] = [{'name': k, 'roles': v} for k, v in parties.iteritems()]
+ extras['responsible-party'] = [{'name': k, 'roles': v} for k, v in parties.items()]
if len(iso_values['bbox']) > 0:
bbox = iso_values['bbox'][0]
@@ -352,7 +356,7 @@ class SpatialHarvester(HarvesterBase):
xmax = float(bbox['east'])
ymin = float(bbox['south'])
ymax = float(bbox['north'])
- except ValueError, e:
+ except ValueError as e:
self._save_object_error('Error parsing bounding box value: {0}'.format(str(e)),
harvest_object, 'Import')
else:
@@ -406,7 +410,7 @@ class SpatialHarvester(HarvesterBase):
default_extras = self.source_config.get('default_extras',{})
if default_extras:
override_extras = self.source_config.get('override_extras',False)
- for key,value in default_extras.iteritems():
+ for key,value in default_extras.items():
log.debug('Processing extra %s', key)
if not key in extras or override_extras:
# Look for replacement strings
@@ -419,7 +423,7 @@ class SpatialHarvester(HarvesterBase):
extras[key] = value
extras_as_dict = []
- for key, value in extras.iteritems():
+ for key, value in extras.items():
if isinstance(value, (list, dict)):
extras_as_dict.append({'key': key, 'value': json.dumps(value)})
else:
@@ -513,7 +517,7 @@ class SpatialHarvester(HarvesterBase):
iso_parser = ISODocument(harvest_object.content)
iso_values = iso_parser.read_values()
- except Exception, e:
+ except Exception as e:
self._save_object_error('Error parsing ISO document for object {0}: {1}'.format(harvest_object.id, str(e)),
harvest_object, 'Import')
return False
@@ -584,7 +588,7 @@ class SpatialHarvester(HarvesterBase):
# The default package schema does not like Upper case tags
tag_schema = logic.schema.default_tags_schema()
- tag_schema['name'] = [not_empty, unicode]
+ tag_schema['name'] = [not_empty, str]
# Flag this object as the current one
harvest_object.current = True
@@ -597,8 +601,8 @@ class SpatialHarvester(HarvesterBase):
# We need to explicitly provide a package ID, otherwise ckanext-spatial
# won't be be able to link the extent to the package.
- package_dict['id'] = unicode(uuid.uuid4())
- package_schema['id'] = [unicode]
+ package_dict['id'] = str(uuid.uuid4())
+ package_schema['id'] = [str]
# Save reference to the package on the object
harvest_object.package_id = package_dict['id']
@@ -612,7 +616,7 @@ class SpatialHarvester(HarvesterBase):
try:
package_id = p.toolkit.get_action('package_create')(context, package_dict)
log.info('Created new package %s with guid %s', package_id, harvest_object.guid)
- except p.toolkit.ValidationError, e:
+ except p.toolkit.ValidationError as e:
self._save_object_error('Validation Error: %s' % str(e.error_summary), harvest_object, 'Import')
return False
@@ -658,7 +662,7 @@ class SpatialHarvester(HarvesterBase):
try:
package_id = p.toolkit.get_action('package_update')(context, package_dict)
log.info('Updated package %s with guid %s', package_id, harvest_object.guid)
- except p.toolkit.ValidationError, e:
+ except p.toolkit.ValidationError as e:
self._save_object_error('Validation Error: %s' % str(e.error_summary), harvest_object, 'Import')
return False
@@ -674,12 +678,12 @@ class SpatialHarvester(HarvesterBase):
'''
try:
capabilities_url = wms.WMSCapabilitiesReader().capabilities_url(url)
- res = urllib2.urlopen(capabilities_url, None, 10)
+ res = urllib.request.urlopen(capabilities_url, None, 10)
xml = res.read()
s = wms.WebMapService(url, xml=xml)
return isinstance(s.contents, dict) and s.contents != {}
- except Exception, e:
+ except Exception as e:
log.error('WMS check for %s failed with exception: %s' % (url, str(e)))
return False
@@ -771,7 +775,7 @@ class SpatialHarvester(HarvesterBase):
DEPRECATED: Use _get_content_as_unicode instead
'''
url = url.replace(' ', '%20')
- http_response = urllib2.urlopen(url)
+ http_response = urllib.request.urlopen(url)
return http_response.read()
def _get_content_as_unicode(self, url):
@@ -822,7 +826,7 @@ class SpatialHarvester(HarvesterBase):
try:
xml = etree.fromstring(document_string)
- except etree.XMLSyntaxError, e:
+ except etree.XMLSyntaxError as e:
self._save_object_error('Could not parse XML file: {0}'.format(str(e)), harvest_object, 'Import')
return False, None, []
diff --git a/ckanext/spatial/harvesters/csw.py b/ckanext/spatial/harvesters/csw.py
index 2853a10..ab17482 100644
--- a/ckanext/spatial/harvesters/csw.py
+++ b/ckanext/spatial/harvesters/csw.py
@@ -1,6 +1,9 @@
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
import re
-import urllib
-import urlparse
+import urllib.request, urllib.parse, urllib.error
+import urllib.parse
import logging
@@ -37,7 +40,7 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
filter(HarvestObject.id==harvest_object_id).\
first()
- parts = urlparse.urlparse(obj.source.url)
+ parts = urllib.parse.urlparse(obj.source.url)
params = {
'SERVICE': 'CSW',
@@ -48,12 +51,12 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
'ID': obj.guid
}
- url = urlparse.urlunparse((
+ url = urllib.parse.urlunparse((
parts.scheme,
parts.netloc,
parts.path,
None,
- urllib.urlencode(params),
+ urllib.parse.urlencode(params),
None
))
@@ -72,7 +75,7 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
try:
self._setup_csw_client(url)
- except Exception, e:
+ except Exception as e:
self._save_gather_error('Error contacting the CSW server: %s' % e, harvest_job)
return None
@@ -100,12 +103,12 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
continue
guids_in_harvest.add(identifier)
- except Exception, e:
+ except Exception as e:
self._save_gather_error('Error for the identifier %s [%r]' % (identifier,e), harvest_job)
continue
- except Exception, e:
+ except Exception as e:
log.error('Exception: %s' % text_traceback())
self._save_gather_error('Error gathering the identifiers from the CSW server [%s]' % str(e), harvest_job)
return None
@@ -157,7 +160,7 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
url = harvest_object.source.url
try:
self._setup_csw_client(url)
- except Exception, e:
+ except Exception as e:
self._save_object_error('Error contacting the CSW server: %s' % e,
harvest_object)
return False
@@ -165,7 +168,7 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
identifier = harvest_object.guid
try:
record = self.csw.getrecordbyid([identifier], outputschema=self.output_schema())
- except Exception, e:
+ except Exception as e:
self._save_object_error('Error getting the CSW record with GUID %s' % identifier, harvest_object)
return False
@@ -182,7 +185,7 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
harvest_object.content = content.strip()
harvest_object.save()
- except Exception,e:
+ except Exception as e:
self._save_object_error('Error saving the harvest object for GUID %s [%r]' % \
(identifier, e), harvest_object)
return False
diff --git a/ckanext/spatial/harvesters/doc.py b/ckanext/spatial/harvesters/doc.py
index e8a6daa..b370c12 100644
--- a/ckanext/spatial/harvesters/doc.py
+++ b/ckanext/spatial/harvesters/doc.py
@@ -52,7 +52,7 @@ class DocHarvester(SpatialHarvester, SingletonPlugin):
# Get contents
try:
content = self._get_content_as_unicode(url)
- except Exception,e:
+ except Exception as e:
self._save_gather_error('Unable to get content for URL: %s: %r' % \
(url, e),harvest_job)
return None
diff --git a/ckanext/spatial/harvesters/gemini.py b/ckanext/spatial/harvesters/gemini.py
index 8dc65d4..48ac8f2 100644
--- a/ckanext/spatial/harvesters/gemini.py
+++ b/ckanext/spatial/harvesters/gemini.py
@@ -8,8 +8,12 @@ but can be easily adapted for other INSPIRE/ISO19139 XML metadata
- GeminiWafHarvester - An index page with links to GEMINI resources
'''
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+from past.builtins import basestring
import os
-from urlparse import urlparse
+from urllib.parse import urlparse
from datetime import datetime
from numbers import Number
import uuid
@@ -70,7 +74,7 @@ class GeminiHarvester(SpatialHarvester):
try:
self.import_gemini_object(harvest_object.content)
return True
- except Exception, e:
+ except Exception as e:
log.error('Exception during import: %s' % text_traceback())
if not str(e).strip():
self._save_object_error('Error importing Gemini document.', harvest_object, 'Import')
@@ -97,7 +101,7 @@ class GeminiHarvester(SpatialHarvester):
log.error('Errors found for object with GUID %s:' % self.obj.guid)
self._save_object_error(out,self.obj,'Import')
- unicode_gemini_string = etree.tostring(xml, encoding=unicode, pretty_print=True)
+ unicode_gemini_string = etree.tostring(xml, encoding=str, pretty_print=True)
# may raise Exception for errors
package_dict = self.write_package_from_gemini_string(unicode_gemini_string)
@@ -223,10 +227,10 @@ class GeminiHarvester(SpatialHarvester):
extras['licence_url'] = licence_url_extracted
extras['access_constraints'] = gemini_values.get('limitations-on-public-access','')
- if gemini_values.has_key('temporal-extent-begin'):
+ if 'temporal-extent-begin' in gemini_values:
#gemini_values['temporal-extent-begin'].sort()
extras['temporal_coverage-from'] = gemini_values['temporal-extent-begin']
- if gemini_values.has_key('temporal-extent-end'):
+ if 'temporal-extent-end' in gemini_values:
#gemini_values['temporal-extent-end'].sort()
extras['temporal_coverage-to'] = gemini_values['temporal-extent-end']
@@ -318,7 +322,7 @@ class GeminiHarvester(SpatialHarvester):
view_resources[0]['ckan_recommended_wms_preview'] = True
extras_as_dict = []
- for key,value in extras.iteritems():
+ for key,value in extras.items():
if isinstance(value,(basestring,Number)):
extras_as_dict.append({'key':key,'value':value})
else:
@@ -453,7 +457,7 @@ class GeminiHarvester(SpatialHarvester):
# The default package schema does not like Upper case tags
tag_schema = logic.schema.default_tags_schema()
- tag_schema['name'] = [not_empty,unicode]
+ tag_schema['name'] = [not_empty,str]
package_schema['tags'] = tag_schema
# TODO: user
@@ -466,8 +470,8 @@ class GeminiHarvester(SpatialHarvester):
if not package:
# We need to explicitly provide a package ID, otherwise ckanext-spatial
# won't be be able to link the extent to the package.
- package_dict['id'] = unicode(uuid.uuid4())
- package_schema['id'] = [unicode]
+ package_dict['id'] = str(uuid.uuid4())
+ package_schema['id'] = [str]
action_function = get_action('package_create')
else:
@@ -476,7 +480,7 @@ class GeminiHarvester(SpatialHarvester):
try:
package_dict = action_function(context, package_dict)
- except ValidationError,e:
+ except ValidationError as e:
raise Exception('Validation Error: %s' % str(e.error_summary))
if debug_exception_mode:
raise
@@ -539,7 +543,7 @@ class GeminiCswHarvester(GeminiHarvester, SingletonPlugin):
try:
self._setup_csw_client(url)
- except Exception, e:
+ except Exception as e:
self._save_gather_error('Error contacting the CSW server: %s' % e, harvest_job)
return None
@@ -565,11 +569,11 @@ class GeminiCswHarvester(GeminiHarvester, SingletonPlugin):
ids.append(obj.id)
used_identifiers.append(identifier)
- except Exception, e:
+ except Exception as e:
self._save_gather_error('Error for the identifier %s [%r]' % (identifier,e), harvest_job)
continue
- except Exception, e:
+ except Exception as e:
log.error('Exception: %s' % text_traceback())
self._save_gather_error('Error gathering the identifiers from the CSW server [%s]' % str(e), harvest_job)
return None
@@ -587,7 +591,7 @@ class GeminiCswHarvester(GeminiHarvester, SingletonPlugin):
url = harvest_object.source.url
try:
self._setup_csw_client(url)
- except Exception, e:
+ except Exception as e:
self._save_object_error('Error contacting the CSW server: %s' % e,
harvest_object)
return False
@@ -595,7 +599,7 @@ class GeminiCswHarvester(GeminiHarvester, SingletonPlugin):
identifier = harvest_object.guid
try:
record = self.csw.getrecordbyid([identifier])
- except Exception, e:
+ except Exception as e:
self._save_object_error('Error getting the CSW record with GUID %s' % identifier, harvest_object)
return False
@@ -608,7 +612,7 @@ class GeminiCswHarvester(GeminiHarvester, SingletonPlugin):
# Save the fetch contents in the HarvestObject
harvest_object.content = record['xml']
harvest_object.save()
- except Exception,e:
+ except Exception as e:
self._save_object_error('Error saving the harvest object for GUID %s [%r]' % \
(identifier, e), harvest_object)
return False
@@ -646,7 +650,7 @@ class GeminiDocHarvester(GeminiHarvester, SingletonPlugin):
# Get contents
try:
content = self._get_content(url)
- except Exception,e:
+ except Exception as e:
self._save_gather_error('Unable to get content for URL: %s: %r' % \
(url, e),harvest_job)
return None
@@ -668,7 +672,7 @@ class GeminiDocHarvester(GeminiHarvester, SingletonPlugin):
else:
self._save_gather_error('Could not get the GUID for source %s' % url, harvest_job)
return None
- except Exception, e:
+ except Exception as e:
self._save_gather_error('Error parsing the document. Is this a valid Gemini document?: %s [%r]'% (url,e),harvest_job)
if debug_exception_mode:
raise
@@ -707,7 +711,7 @@ class GeminiWafHarvester(GeminiHarvester, SingletonPlugin):
# Get contents
try:
content = self._get_content(url)
- except Exception,e:
+ except Exception as e:
self._save_gather_error('Unable to get content for URL: %s: %r' % \
(url, e),harvest_job)
return None
@@ -716,7 +720,7 @@ class GeminiWafHarvester(GeminiHarvester, SingletonPlugin):
for url in self._extract_urls(content,url):
try:
content = self._get_content(url)
- except Exception, e:
+ except Exception as e:
msg = 'Couldn\'t harvest WAF link: %s: %s' % (url, e)
self._save_gather_error(msg,harvest_job)
continue
@@ -737,11 +741,11 @@ class GeminiWafHarvester(GeminiHarvester, SingletonPlugin):
ids.append(obj.id)
- except Exception,e:
+ except Exception as e:
msg = 'Could not get GUID for source %s: %r' % (url,e)
self._save_gather_error(msg,harvest_job)
continue
- except Exception,e:
+ except Exception as e:
msg = 'Error extracting URLs from %s' % url
self._save_gather_error(msg,harvest_job)
return None
@@ -765,7 +769,7 @@ class GeminiWafHarvester(GeminiHarvester, SingletonPlugin):
try:
parser = etree.HTMLParser()
tree = etree.fromstring(content, parser=parser)
- except Exception, inst:
+ except Exception as inst:
msg = 'Couldn\'t parse content into a tree: %s: %s' \
% (inst, content)
raise Exception(msg)
diff --git a/ckanext/spatial/harvesters/waf.py b/ckanext/spatial/harvesters/waf.py
index 488e960..e4b36de 100644
--- a/ckanext/spatial/harvesters/waf.py
+++ b/ckanext/spatial/harvesters/waf.py
@@ -1,6 +1,10 @@
+from __future__ import print_function
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
import logging
import hashlib
-from urlparse import urljoin
+from urllib.parse import urljoin
import dateutil.parser
import pyparsing as parse
import requests
@@ -61,7 +65,7 @@ class WAFHarvester(SpatialHarvester, SingletonPlugin):
try:
response = requests.get(source_url, timeout=60)
response.raise_for_status()
- except requests.exceptions.RequestException, e:
+ except requests.exceptions.RequestException as e:
self._save_gather_error('Unable to get content for URL: %s: %r' % \
(source_url, e),harvest_job)
return None
@@ -96,7 +100,7 @@ class WAFHarvester(SpatialHarvester, SingletonPlugin):
try:
for url, modified_date in _extract_waf(content,source_url,scraper):
url_to_modified_harvest[url] = modified_date
- except Exception,e:
+ except Exception as e:
msg = 'Error extracting URLs from %s, error was %s' % (source_url, e)
self._save_gather_error(msg,harvest_job)
return None
@@ -195,7 +199,7 @@ class WAFHarvester(SpatialHarvester, SingletonPlugin):
# Get contents
try:
content = self._get_content_as_unicode(url)
- except Exception, e:
+ except Exception as e:
msg = 'Could not harvest WAF link {0}: {1}'.format(url, e)
self._save_object_error(msg, harvest_object)
return False
@@ -298,8 +302,8 @@ def _extract_waf(content, base_url, scraper, results = None, depth=0):
try:
response = requests.get(new_url)
content = response.content
- except Exception, e:
- print str(e)
+ except Exception as e:
+ print(str(e))
continue
_extract_waf(content, new_url, scraper, results, new_depth)
continue
@@ -309,7 +313,7 @@ def _extract_waf(content, base_url, scraper, results = None, depth=0):
if date:
try:
date = str(dateutil.parser.parse(date))
- except Exception, e:
+ except Exception as e:
raise
date = None
results.append((urljoin(base_url, record.url), date))
diff --git a/ckanext/spatial/helpers.py b/ckanext/spatial/helpers.py
index 0d015ba..98466ea 100644
--- a/ckanext/spatial/helpers.py
+++ b/ckanext/spatial/helpers.py
@@ -59,7 +59,7 @@ def get_responsible_party(value):
out = []
parties = h.json.loads(value)
for party in parties:
- roles = [formatted[role] if role in formatted.keys() else p.toolkit._(role.capitalize()) for role in party['roles']]
+ roles = [formatted[role] if role in list(formatted.keys()) else p.toolkit._(role.capitalize()) for role in party['roles']]
out.append('{0} ({1})'.format(party['name'], ', '.join(roles)))
return '; '.join(out)
except (ValueError, TypeError):
@@ -72,4 +72,4 @@ def get_common_map_config():
base map (ie those starting with 'ckanext.spatial.common_map.')
'''
namespace = 'ckanext.spatial.common_map.'
- return dict([(k.replace(namespace, ''), v) for k, v in config.iteritems() if k.startswith(namespace)])
+ return dict([(k.replace(namespace, ''), v) for k, v in config.items() if k.startswith(namespace)])
diff --git a/ckanext/spatial/lib/__init__.py b/ckanext/spatial/lib/__init__.py
index b053af7..529698b 100644
--- a/ckanext/spatial/lib/__init__.py
+++ b/ckanext/spatial/lib/__init__.py
@@ -1,3 +1,5 @@
+from builtins import str
+from past.builtins import basestring
import logging
from string import Template
@@ -113,7 +115,7 @@ def validate_bbox(bbox_values):
bbox['miny'] = float(bbox_values[1])
bbox['maxx'] = float(bbox_values[2])
bbox['maxy'] = float(bbox_values[3])
- except ValueError,e:
+ except ValueError as e:
return None
return bbox
diff --git a/ckanext/spatial/lib/csw_client.py b/ckanext/spatial/lib/csw_client.py
index 207a0d4..4bb9f88 100644
--- a/ckanext/spatial/lib/csw_client.py
+++ b/ckanext/spatial/lib/csw_client.py
@@ -3,6 +3,8 @@ Some very thin wrapper classes around those in OWSLib
for convenience.
"""
+from past.builtins import basestring
+from builtins import object
import logging
from owslib.etree import etree
@@ -97,7 +99,7 @@ class CswService(OwsService):
csw.exceptionreport.exceptions
#log.error(err)
raise CswError(err)
- return [self._xmd(r) for r in csw.records.values()]
+ return [self._xmd(r) for r in list(csw.records.values())]
def getidentifiers(self, qtype=None, typenames="csw:Record", esn="brief",
keywords=[], limit=None, page=10, outputschema="gmd",
@@ -134,7 +136,7 @@ class CswService(OwsService):
if matches == 0:
matches = csw.results['matches']
- identifiers = csw.records.keys()
+ identifiers = list(csw.records.keys())
if limit is not None:
identifiers = identifiers[:(limit-startposition)]
for ident in identifiers:
@@ -170,7 +172,7 @@ class CswService(OwsService):
raise CswError(err)
if not csw.records:
return
- record = self._xmd(csw.records.values()[0])
+ record = self._xmd(list(csw.records.values())[0])
## strip off the enclosing results container, we only want the metadata
#md = csw._exml.find("/gmd:MD_Metadata")#, namespaces=namespaces)
@@ -178,13 +180,13 @@ class CswService(OwsService):
md = csw._exml.find("/{http://www.isotc211.org/2005/gmd}MD_Metadata")
mdtree = etree.ElementTree(md)
try:
- record["xml"] = etree.tostring(mdtree, pretty_print=True, encoding=unicode)
+ record["xml"] = etree.tostring(mdtree, pretty_print=True, encoding=str)
except TypeError:
# API incompatibilities between different flavours of elementtree
try:
- record["xml"] = etree.tostring(mdtree, pretty_print=True, encoding=unicode)
+ record["xml"] = etree.tostring(mdtree, pretty_print=True, encoding=str)
except AssertionError:
- record["xml"] = etree.tostring(md, pretty_print=True, encoding=unicode)
+ record["xml"] = etree.tostring(md, pretty_print=True, encoding=str)
record["xml"] = '\n' + record["xml"]
record["tree"] = mdtree
diff --git a/ckanext/spatial/lib/report.py b/ckanext/spatial/lib/report.py
index 411c767..cc05202 100644
--- a/ckanext/spatial/lib/report.py
+++ b/ckanext/spatial/lib/report.py
@@ -3,10 +3,14 @@ Library for creating reports that can be displayed easily in an HTML table
and then saved as a CSV.
'''
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+from builtins import object
import datetime
import csv
-try: from cStringIO import StringIO
-except ImportError: from StringIO import StringIO
+try: from io import StringIO
+except ImportError: from io import StringIO
class ReportTable(object):
def __init__(self, column_names):
@@ -51,7 +55,7 @@ class ReportTable(object):
for cell in row:
if isinstance(cell, datetime.datetime):
cell = cell.strftime('%Y-%m-%d %H:%M')
- elif isinstance(cell, (int, long)):
+ elif isinstance(cell, int):
cell = str(cell)
elif isinstance(cell, (list, tuple)):
cell = str(cell)
@@ -62,7 +66,7 @@ class ReportTable(object):
row_formatted.append(cell)
try:
csvwriter.writerow(row_formatted)
- except Exception, e:
+ except Exception as e:
raise Exception("%s: %s, %s"%(e, row, row_formatted))
csvout.seek(0)
return csvout.read()
diff --git a/ckanext/spatial/model/__init__.py b/ckanext/spatial/model/__init__.py
index 22f5324..8f042ac 100644
--- a/ckanext/spatial/model/__init__.py
+++ b/ckanext/spatial/model/__init__.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
# this is a namespace package
try:
import pkg_resources
@@ -6,5 +7,5 @@ except ImportError:
import pkgutil
__path__ = pkgutil.extend_path(__path__, __name__)
-from package_extent import *
-from harvested_metadata import *
+from .package_extent import *
+from .harvested_metadata import *
diff --git a/ckanext/spatial/model/harvested_metadata.py b/ckanext/spatial/model/harvested_metadata.py
index 28999b0..104fc3c 100644
--- a/ckanext/spatial/model/harvested_metadata.py
+++ b/ckanext/spatial/model/harvested_metadata.py
@@ -1,3 +1,5 @@
+from builtins import str
+from builtins import object
from lxml import etree
import logging
@@ -37,7 +39,7 @@ class MappedXmlDocument(MappedXmlObject):
def get_xml_tree(self):
if self.xml_tree is None:
parser = etree.XMLParser(remove_blank_text=True)
- if type(self.xml_str) == unicode:
+ if type(self.xml_str) == str:
xml_str = self.xml_str.encode('utf8')
else:
xml_str = self.xml_str
@@ -95,7 +97,7 @@ class MappedXmlElement(MappedXmlObject):
elif type(element) == etree._ElementStringResult:
value = str(element)
elif type(element) == etree._ElementUnicodeResult:
- value = unicode(element)
+ value = str(element)
else:
value = self.element_tostring(element)
return value
@@ -954,7 +956,7 @@ class ISODocument(MappedXmlDocument):
for responsible_party in values['responsible-organisation']:
if isinstance(responsible_party, dict) and \
isinstance(responsible_party.get('contact-info'), dict) and \
- responsible_party['contact-info'].has_key('email'):
+ 'email' in responsible_party['contact-info']:
value = responsible_party['contact-info']['email']
if value:
break
diff --git a/ckanext/spatial/model/package_extent.py b/ckanext/spatial/model/package_extent.py
index e5a342c..1446497 100644
--- a/ckanext/spatial/model/package_extent.py
+++ b/ckanext/spatial/model/package_extent.py
@@ -33,7 +33,7 @@ def setup(srid=None):
if not package_extent_table.exists():
try:
package_extent_table.create()
- except Exception,e:
+ except Exception as e:
# Make sure the table does not remain incorrectly created
# (eg without geom column or constraints)
if package_extent_table.exists():
diff --git a/ckanext/spatial/plugin/__init__.py b/ckanext/spatial/plugin/__init__.py
index 3616d75..9bcda9d 100644
--- a/ckanext/spatial/plugin/__init__.py
+++ b/ckanext/spatial/plugin/__init__.py
@@ -1,3 +1,4 @@
+from builtins import str
import os
import re
import mimetypes
@@ -55,7 +56,7 @@ def package_error_summary(error_dict):
return p.toolkit._(field_name.replace('_', ' '))
summary = {}
- for key, error in error_dict.iteritems():
+ for key, error in error_dict.items():
if key == 'resources':
summary[p.toolkit._('Resources')] = p.toolkit._(
'Package resource(s) invalid')
@@ -118,20 +119,20 @@ class SpatialMetadata(p.SingletonPlugin):
try:
log.debug('Received: %r' % extra.value)
geometry = json.loads(extra.value)
- except ValueError,e:
+ except ValueError as e:
error_dict = {'spatial':[u'Error decoding JSON object: %s' % str(e)]}
raise p.toolkit.ValidationError(error_dict, error_summary=package_error_summary(error_dict))
- except TypeError,e:
+ except TypeError as e:
error_dict = {'spatial':[u'Error decoding JSON object: %s' % str(e)]}
raise p.toolkit.ValidationError(error_dict, error_summary=package_error_summary(error_dict))
try:
save_package_extent(package.id,geometry)
- except ValueError,e:
+ except ValueError as e:
error_dict = {'spatial':[u'Error creating geometry: %s' % str(e)]}
raise p.toolkit.ValidationError(error_dict, error_summary=package_error_summary(error_dict))
- except Exception, e:
+ except Exception as e:
if bool(os.getenv('DEBUG')):
raise
error_dict = {'spatial':[u'Error: %s' % str(e)]}
@@ -180,7 +181,7 @@ class SpatialQuery(SpatialQueryMixin, p.SingletonPlugin):
if pkg_dict.get('extras_spatial', None) and self.search_backend in ('solr', 'solr-spatial-field'):
try:
geometry = json.loads(pkg_dict['extras_spatial'])
- except ValueError, e:
+ except ValueError as e:
log.error('Geometry not valid GeoJSON, not indexing')
return pkg_dict
diff --git a/ckanext/spatial/tests/base.py b/ckanext/spatial/tests/base.py
index dd4159c..e25675d 100644
--- a/ckanext/spatial/tests/base.py
+++ b/ckanext/spatial/tests/base.py
@@ -1,3 +1,4 @@
+from builtins import object
import os
import re
diff --git a/ckanext/spatial/tests/lib/test_spatial.py b/ckanext/spatial/tests/lib/test_spatial.py
index 2e46aef..d92e4f2 100644
--- a/ckanext/spatial/tests/lib/test_spatial.py
+++ b/ckanext/spatial/tests/lib/test_spatial.py
@@ -1,3 +1,8 @@
+from __future__ import print_function
+from builtins import str
+from builtins import range
+from past.builtins import basestring
+from builtins import object
import time
import random
@@ -42,7 +47,7 @@ class TestCompareGeometries(SpatialTestBase):
-class TestValidateBbox:
+class TestValidateBbox(object):
bbox_dict = {'minx': -4.96,
'miny': 55.70,
'maxx': -3.78,
@@ -133,17 +138,17 @@ class TestBboxQueryOrdered(SpatialQueryTestBase):
class TestBboxQueryPerformance(SpatialQueryTestBase):
# x values for the fixtures
fixtures_x = [(random.uniform(0, 3), random.uniform(3,9)) \
- for x in xrange(10)] # increase the number to 1000 say
+ for x in range(10)] # increase the number to 1000 say
def test_query(self):
bbox_dict = self.x_values_to_bbox((2, 7))
t0 = time.time()
q = bbox_query(bbox_dict)
t1 = time.time()
- print 'bbox_query took: ', t1-t0
+ print('bbox_query took: ', t1-t0)
def test_query_ordered(self):
bbox_dict = self.x_values_to_bbox((2, 7))
t0 = time.time()
q = bbox_query_ordered(bbox_dict)
t1 = time.time()
- print 'bbox_query_ordered took: ', t1-t0
+ print('bbox_query_ordered took: ', t1-t0)
diff --git a/ckanext/spatial/tests/model/test_harvested_metadata.py b/ckanext/spatial/tests/model/test_harvested_metadata.py
index 2c91dc0..7fb03f4 100644
--- a/ckanext/spatial/tests/model/test_harvested_metadata.py
+++ b/ckanext/spatial/tests/model/test_harvested_metadata.py
@@ -13,7 +13,7 @@ def open_xml_fixture(xml_filename):
try:
xml_string = xml_string_raw.encode("utf-8")
- except UnicodeDecodeError, e:
+ except UnicodeDecodeError as e:
assert 0, 'ERROR: Unicode Error reading file \'%s\': %s' % \
(metadata_filepath, e)
return xml_string
diff --git a/ckanext/spatial/tests/test_csw_client.py b/ckanext/spatial/tests/test_csw_client.py
index 494ad2a..f84f65f 100644
--- a/ckanext/spatial/tests/test_csw_client.py
+++ b/ckanext/spatial/tests/test_csw_client.py
@@ -1,5 +1,10 @@
+from future import standard_library
+standard_library.install_aliases()
+from builtins import range
+from future.utils import raise_
+from builtins import object
import time
-from urllib2 import urlopen
+from urllib.request import urlopen
import os
from pylons import config
@@ -9,13 +14,13 @@ from ckan.model import engine_is_sqlite
# copied from ckan/tests/__init__ to save importing it and therefore
# setting up Pylons.
-class CkanServerCase:
+class CkanServerCase(object):
@staticmethod
def _system(cmd):
- import commands
- (status, output) = commands.getstatusoutput(cmd)
+ import subprocess
+ (status, output) = subprocess.getstatusoutput(cmd)
if status:
- raise Exception, "Couldn't execute cmd: %s: %s" % (cmd, output)
+ raise_(Exception, "Couldn't execute cmd: %s: %s" % (cmd, output))
@classmethod
def _paster(cls, cmd, config_path_rel):
@@ -34,11 +39,11 @@ class CkanServerCase:
@staticmethod
def _wait_for_url(url='http://127.0.0.1:5000/', timeout=15):
for i in range(int(timeout)*100):
- import urllib2
+ import urllib.request, urllib.error, urllib.parse
import time
try:
- response = urllib2.urlopen(url)
- except urllib2.URLError:
+ response = urllib.request.urlopen(url)
+ except urllib.error.URLError:
time.sleep(0.01)
else:
break
@@ -48,7 +53,7 @@ class CkanServerCase:
pid = process.pid
pid = int(pid)
if os.system("kill -9 %d" % pid):
- raise Exception, "Can't kill foreign CKAN instance (pid: %d)." % pid
+ raise_(Exception, "Can't kill foreign CKAN instance (pid: %d)." % pid)
class CkanProcess(CkanServerCase):
@classmethod
diff --git a/ckanext/spatial/tests/test_harvest.py b/ckanext/spatial/tests/test_harvest.py
index 2421fa7..b720e95 100644
--- a/ckanext/spatial/tests/test_harvest.py
+++ b/ckanext/spatial/tests/test_harvest.py
@@ -1,3 +1,6 @@
+from __future__ import absolute_import
+from builtins import str
+from builtins import object
import os
from datetime import datetime, date
import lxml
@@ -25,7 +28,7 @@ from ckanext.spatial.harvesters.gemini import (GeminiDocHarvester,
from ckanext.spatial.harvesters.base import SpatialHarvester
from ckanext.spatial.tests.base import SpatialTestBase
-from xml_file_server import serve
+from .xml_file_server import serve
# Start simple HTTP server that serves XML test files
serve()
@@ -117,7 +120,7 @@ class TestHarvest(HarvestFixtureBase):
HarvestFixtureBase.setup_class()
def clean_tags(self, tags):
- return map(lambda x: {u'name': x['name']}, tags)
+ return [{u'name': x['name']} for x in tags]
def find_extra(self, pkg, key):
values = [e['value'] for e in pkg['extras'] if e['key'] == key]
@@ -207,7 +210,7 @@ class TestHarvest(HarvestFixtureBase):
package_dict['tags'] = self.clean_tags(package_dict['tags'])
- for key,value in expected.iteritems():
+ for key,value in expected.items():
if not package_dict[key] == value:
raise AssertionError('Unexpected value for %s: %s (was expecting %s)' % \
(key, package_dict[key], value))
@@ -244,7 +247,7 @@ class TestHarvest(HarvestFixtureBase):
'temporal_coverage-to': u'["2004-06-16"]',
}
- for key,value in expected_extras.iteritems():
+ for key,value in expected_extras.items():
extra_value = self.find_extra(package_dict, key)
if extra_value is None:
raise AssertionError('Extra %s not present in package' % key)
@@ -264,7 +267,7 @@ class TestHarvest(HarvestFixtureBase):
}
resource = package_dict['resources'][0]
- for key,value in expected_resource.iteritems():
+ for key,value in expected_resource.items():
if not key in resource:
raise AssertionError('Expected key not in resource: %s' % (key))
if not resource[key] == value:
@@ -318,7 +321,7 @@ class TestHarvest(HarvestFixtureBase):
package_dict['tags'] = self.clean_tags(package_dict['tags'])
- for key,value in expected.iteritems():
+ for key,value in expected.items():
if not package_dict[key] == value:
raise AssertionError('Unexpected value for %s: %s (was expecting %s)' % \
(key, package_dict[key], value))
@@ -353,7 +356,7 @@ class TestHarvest(HarvestFixtureBase):
'temporal_coverage-to': u'["2010"]',
}
- for key, value in expected_extras.iteritems():
+ for key, value in expected_extras.items():
extra_value = self.find_extra(package_dict, key)
if extra_value is None:
raise AssertionError('Extra %s not present in package' % key)
@@ -372,7 +375,7 @@ class TestHarvest(HarvestFixtureBase):
}
resource = package_dict['resources'][0]
- for key,value in expected_resource.iteritems():
+ for key,value in expected_resource.items():
if not resource[key] == value:
raise AssertionError('Unexpected value in resource for %s: %s (was expecting %s)' % \
(key, resource[key], value))
@@ -862,7 +865,7 @@ class TestHarvest(HarvestFixtureBase):
'publisher_identifier': 'dummy',
'metadata_created' : datetime.now(),
'metadata_modified' : datetime.now(),
- 'guid': unicode(uuid4()),
+ 'guid': str(uuid4()),
'identifier': 'dummy'}
package_data = call_action('package_create', context=context, **package_dict)
@@ -953,7 +956,7 @@ class TestGatherMethods(HarvestFixtureBase):
content = ''
assert_raises(lxml.etree.XMLSyntaxError, self.harvester.get_gemini_string_and_guid, content)
-class TestImportStageTools:
+class TestImportStageTools(object):
def test_licence_url_normal(self):
assert_equal(GeminiHarvester._extract_first_licence_url(
['Reference and PSMA Only',
diff --git a/ckanext/spatial/tests/test_validation.py b/ckanext/spatial/tests/test_validation.py
index 860c238..4cc7d26 100644
--- a/ckanext/spatial/tests/test_validation.py
+++ b/ckanext/spatial/tests/test_validation.py
@@ -1,3 +1,4 @@
+from builtins import object
import os
from lxml import etree
@@ -7,7 +8,7 @@ from ckanext.spatial import validation
# other validation tests are in test_harvest.py
-class TestValidation:
+class TestValidation(object):
def _get_file_path(self, file_name):
return os.path.join(os.path.dirname(__file__), 'xml', file_name)
diff --git a/ckanext/spatial/tests/xml_file_server.py b/ckanext/spatial/tests/xml_file_server.py
index 31e62f0..06712be 100644
--- a/ckanext/spatial/tests/xml_file_server.py
+++ b/ckanext/spatial/tests/xml_file_server.py
@@ -1,7 +1,10 @@
+from __future__ import print_function
+from future import standard_library
+standard_library.install_aliases()
import os
-import SimpleHTTPServer
-import SocketServer
+import http.server
+import socketserver
from threading import Thread
@@ -14,14 +17,14 @@ def serve(port=PORT):
os.chdir(os.path.join(os.path.dirname(os.path.abspath(__file__)),
'xml'))
- Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
+ Handler = http.server.SimpleHTTPRequestHandler
- class TestServer(SocketServer.TCPServer):
+ class TestServer(socketserver.TCPServer):
allow_reuse_address = True
httpd = TestServer(("", PORT), Handler)
- print 'Serving test HTTP server at port', PORT
+ print('Serving test HTTP server at port', PORT)
httpd_thread = Thread(target=httpd.serve_forever)
httpd_thread.setDaemon(True)
diff --git a/ckanext/spatial/util.py b/ckanext/spatial/util.py
index ee0aa26..5724474 100644
--- a/ckanext/spatial/util.py
+++ b/ckanext/spatial/util.py
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
+from __future__ import print_function
+from builtins import str
import os
import sys
@@ -20,17 +22,17 @@ def report(pkg=None):
from ckanext.spatial.lib.reports import validation_report
if pkg:
- package_ref = unicode(pkg)
+ package_ref = str(pkg)
pkg = model.Package.get(package_ref)
if not pkg:
- print 'Package ref "%s" not recognised' % package_ref
+ print('Package ref "%s" not recognised' % package_ref)
sys.exit(1)
report = validation_report(package_id=pkg.id)
for row in report.get_rows_html_formatted():
- print
+ print()
for i, col_name in enumerate(report.column_names):
- print ' %s: %s' % (col_name, row[i])
+ print(' %s: %s' % (col_name, row[i]))
def validate_file(metadata_filepath):
@@ -38,18 +40,18 @@ def validate_file(metadata_filepath):
from ckanext.spatial.model import ISODocument
if not os.path.exists(metadata_filepath):
- print 'Filepath %s not found' % metadata_filepath
+ print('Filepath %s not found' % metadata_filepath)
sys.exit(1)
with open(metadata_filepath, 'rb') as f:
metadata_xml = f.read()
validators = SpatialHarvester()._get_validator()
- print 'Validators: %r' % validators.profiles
+ print('Validators: %r' % validators.profiles)
try:
xml_string = metadata_xml.encode("utf-8")
- except UnicodeDecodeError, e:
- print 'ERROR: Unicode Error reading file \'%s\': %s' % \
- (metadata_filepath, e)
+ except UnicodeDecodeError as e:
+ print('ERROR: Unicode Error reading file \'%s\': %s' % \
+ (metadata_filepath, e))
sys.exit(1)
#import pdb; pdb.set_trace()
xml = etree.fromstring(xml_string)
@@ -62,20 +64,20 @@ def validate_file(metadata_filepath):
try:
iso_document = ISODocument(xml_string)
iso_values = iso_document.read_values()
- except Exception, e:
+ except Exception as e:
valid = False
errors.append(
'CKAN exception reading values from ISODocument: %s' % e)
- print '***************'
- print 'Summary'
- print '***************'
- print 'File: \'%s\'' % metadata_filepath
- print 'Valid: %s' % valid
+ print('***************')
+ print('Summary')
+ print('***************')
+ print('File: \'%s\'' % metadata_filepath)
+ print('Valid: %s' % valid)
if not valid:
- print 'Errors:'
- print pprint(errors)
- print '***************'
+ print('Errors:')
+ print(pprint(errors))
+ print('***************')
def report_csv(csv_filepath):
@@ -87,13 +89,13 @@ def report_csv(csv_filepath):
def initdb(srid=None):
if srid:
- srid = unicode(srid)
+ srid = str(srid)
from ckanext.spatial.model import setup as db_setup
db_setup(srid)
- print 'DB tables created'
+ print('DB tables created')
def update_extents():
@@ -112,10 +114,10 @@ def update_extents():
geometry = json.loads(value)
count += 1
- except ValueError, e:
+ except ValueError as e:
errors.append(u'Package %s - Error decoding JSON object: %s' %
(package.id, str(e)))
- except TypeError, e:
+ except TypeError as e:
errors.append(u'Package %s - Error decoding JSON object: %s' %
(package.id, str(e)))
@@ -125,9 +127,9 @@ def update_extents():
if errors:
msg = 'Errors were found:\n%s' % '\n'.join(errors)
- print msg
+ print(msg)
msg = "Done. Extents generated for %i out of %i packages" % (count,
len(packages))
- print msg
+ print(msg)
diff --git a/ckanext/spatial/validation/__init__.py b/ckanext/spatial/validation/__init__.py
index 8643dcc..c44247c 100644
--- a/ckanext/spatial/validation/__init__.py
+++ b/ckanext/spatial/validation/__init__.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
# this is a namespace package
try:
import pkg_resources
@@ -6,4 +7,4 @@ except ImportError:
import pkgutil
__path__ = pkgutil.extend_path(__path__, __name__)
-from validation import *
+from .validation import *
diff --git a/ckanext/spatial/validation/validation.py b/ckanext/spatial/validation/validation.py
index 28e8506..3f800b7 100644
--- a/ckanext/spatial/validation/validation.py
+++ b/ckanext/spatial/validation/validation.py
@@ -1,3 +1,4 @@
+from builtins import object
import os
from pkg_resources import resource_stream
from ckanext.spatial.model import ISODocument
diff --git a/ckanext/spatial/views.py b/ckanext/spatial/views.py
index dde6b97..df541fc 100644
--- a/ckanext/spatial/views.py
+++ b/ckanext/spatial/views.py
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
+from future import standard_library
+standard_library.install_aliases()
import logging
from flask import Blueprint, make_response
@@ -16,9 +18,9 @@ from ckanext.spatial.lib import get_srid, validate_bbox, bbox_query
try:
- from cStringIO import StringIO
+ from io import StringIO
except ImportError:
- from StringIO import StringIO
+ from io import StringIO
log = logging.getLogger(__name__)
From 93951db642e57343e3582ed8d38afc5c91dcbc08 Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Wed, 11 Dec 2019 14:34:31 +0200
Subject: [PATCH 005/139] Add webassets.yml
---
ckanext/spatial/public/webassets.yml | 37 +++++++++++++++++++
.../spatial/snippets/dataset_map_asset.html | 3 +-
.../spatial/snippets/spatial_query_asset.html | 3 +-
3 files changed, 41 insertions(+), 2 deletions(-)
create mode 100644 ckanext/spatial/public/webassets.yml
diff --git a/ckanext/spatial/public/webassets.yml b/ckanext/spatial/public/webassets.yml
new file mode 100644
index 0000000..b179869
--- /dev/null
+++ b/ckanext/spatial/public/webassets.yml
@@ -0,0 +1,37 @@
+dataset_map_js:
+ filter: rjsmin
+ output: ckanext-spatial/%(version)s_dataset_map.js
+ extra:
+ preload:
+ - base/main
+ contents:
+ - js/vendor/leaflet/leaflet.js
+ - js/common_map.js
+ - js/dataset_map.js
+
+dataset_map_css:
+ filters: cssrewrite
+ output: ckanext-spatial/%(version)s_dataset_map.css
+ contents:
+ - js/vendor/leaflet/leaflet.css
+ - css/dataset_map.css
+
+spatial_query_js:
+ filter: rjsmin
+ output: ckanext-spatial/%(version)s_spatial_query.js
+ extra:
+ preload:
+ - base/main
+ contents:
+ - js/vendor/leaflet/leaflet.js
+ - js/vendor/leaflet.draw/leaflet.draw.js
+ - js/common_map.js
+ - js/spatial_query.js
+
+spatial_query_css:
+ filters: cssrewrite
+ output: ckanext-spatial/%(version)s_spatial_query.css
+ contents:
+ - js/vendor/leaflet/leaflet.css
+ - js/vendor/leaflet.draw/leaflet.draw.css
+ - css/spatial_query.css
diff --git a/ckanext/spatial/templates/spatial/snippets/dataset_map_asset.html b/ckanext/spatial/templates/spatial/snippets/dataset_map_asset.html
index c9f18d2..dbbaf14 100644
--- a/ckanext/spatial/templates/spatial/snippets/dataset_map_asset.html
+++ b/ckanext/spatial/templates/spatial/snippets/dataset_map_asset.html
@@ -1 +1,2 @@
-{% asset 'ckanext-spatial/dataset_map' %}
+{% asset 'ckanext-spatial/dataset_map_js' %}
+{% asset 'ckanext-spatial/dataset_map_css' %}
diff --git a/ckanext/spatial/templates/spatial/snippets/spatial_query_asset.html b/ckanext/spatial/templates/spatial/snippets/spatial_query_asset.html
index 649ef62..79d2cab 100644
--- a/ckanext/spatial/templates/spatial/snippets/spatial_query_asset.html
+++ b/ckanext/spatial/templates/spatial/snippets/spatial_query_asset.html
@@ -1 +1,2 @@
-{% asset 'ckanext-spatial/spatial_query' %}
+{% asset 'ckanext-spatial/spatial_query_js' %}
+{% asset 'ckanext-spatial/spatial_query_css' %}
From 27057e0a39c7cf50b9b503c6468d25e0af4925ff Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Fri, 13 Dec 2019 16:48:20 +0200
Subject: [PATCH 006/139] IClick support
---
ckanext/spatial/cli.py | 32 ++++++++++++--------------
ckanext/spatial/plugin/flask_plugin.py | 11 +++++++++
setup.py | 4 ----
3 files changed, 26 insertions(+), 21 deletions(-)
diff --git a/ckanext/spatial/cli.py b/ckanext/spatial/cli.py
index 2cef239..7be5a0b 100644
--- a/ckanext/spatial/cli.py
+++ b/ckanext/spatial/cli.py
@@ -1,47 +1,45 @@
# encoding: utf-8
import click
import logging
-from ckan.cli import click_config_option
-from ckan.cli.cli import CkanCommand
import ckanext.spatial.util as util
log = logging.getLogger(__name__)
-
-@click.group(short_help=u"Validation commands")
-@click.help_option(u"-h", u"--help")
-@click_config_option
-@click.pass_context
-def validation(ctx, config, *args, **kwargs):
- ctx.obj = CkanCommand(config)
+def get_commands():
+ return [
+ spatial,
+ spatial_validation
+ ]
-@validation.command()
+@click.group(u"spatial-validation", short_help=u"Validation commands")
+def spatial_validation():
+ pass
+
+
+@spatial_validation.command()
@click.argument('pkg', required=False)
def report(pkg):
return util.report(pkg)
-@validation.command('report-csv')
+@spatial_validation.command('report-csv')
@click.argument('filepath')
def report_csv(filepath):
return util.report_csv(filepath)
-@validation.command('file')
+@spatial_validation.command('file')
@click.argument('filepath')
def validate_file(filepath):
return util.validate_file(filepath)
@click.group(short_help=u"Performs spatially related operations.")
-@click.help_option(u"-h", u"--help")
-@click_config_option
-@click.pass_context
-def spatial(ctx, config, *args, **kwargs):
- ctx.obj = CkanCommand(config)
+def spatial():
+ pass
@spatial.command()
diff --git a/ckanext/spatial/plugin/flask_plugin.py b/ckanext/spatial/plugin/flask_plugin.py
index 3108d65..3c77fe0 100644
--- a/ckanext/spatial/plugin/flask_plugin.py
+++ b/ckanext/spatial/plugin/flask_plugin.py
@@ -1,14 +1,25 @@
+# -*- coding: utf-8 -*-
+
import ckan.plugins as p
import ckanext.spatial.views as blueprints
+from ckanext.spatial.cli import get_commands
+
class SpatialQueryMixin(p.SingletonPlugin):
p.implements(p.IBlueprint)
+ p.implements(p.IClick)
# IBlueprint
def get_blueprint(self):
return [blueprints.api]
+ # IClick
+
+ def get_commands(self):
+ return get_commands()
+
+
class HarvestMetadataApiMixin(p.SingletonPlugin):
p.implements(p.IBlueprint)
diff --git a/setup.py b/setup.py
index 9aee440..a98871c 100644
--- a/setup.py
+++ b/setup.py
@@ -45,9 +45,5 @@ setup(
[ckan.test_plugins]
test_spatial_plugin = ckanext.spatial.tests.test_plugin.plugin:TestSpatialPlugin
-
- [console_scripts]
- spatial = ckanext.spatial.cli:spatial
- validation = ckanext.spatial.cli:validation
""",
)
From a0fc527131e5caeec47390a369adec8189c994db Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Wed, 15 Jan 2020 11:08:40 +0200
Subject: [PATCH 007/139] pylons config
---
ckanext/spatial/plugin/__init__.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/ckanext/spatial/plugin/__init__.py b/ckanext/spatial/plugin/__init__.py
index 9bcda9d..06e162c 100644
--- a/ckanext/spatial/plugin/__init__.py
+++ b/ckanext/spatial/plugin/__init__.py
@@ -4,17 +4,18 @@ import re
import mimetypes
from logging import getLogger
-from pylons import config
from ckan import plugins as p
from ckan.lib.helpers import json
if p.toolkit.check_ckan_version(min_version="2.9"):
+ config = p.toolkit.config
from ckanext.spatial.plugin.flask_plugin import (
SpatialQueryMixin, HarvestMetadataApiMixin
)
else:
+ from pylons import config
from ckanext.spatial.plugin.pylons_plugin import (
SpatialQueryMixin, HarvestMetadataApiMixin
)
From 6060bc5cb47dda6569aa74ffcf40ee05a6818519 Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Tue, 21 Jan 2020 14:25:53 +0200
Subject: [PATCH 008/139] Fix encoding for gemini harvester
---
ckanext/spatial/harvesters/gemini.py | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
diff --git a/ckanext/spatial/harvesters/gemini.py b/ckanext/spatial/harvesters/gemini.py
index 48ac8f2..8785818 100644
--- a/ckanext/spatial/harvesters/gemini.py
+++ b/ckanext/spatial/harvesters/gemini.py
@@ -101,7 +101,7 @@ class GeminiHarvester(SpatialHarvester):
log.error('Errors found for object with GUID %s:' % self.obj.guid)
self._save_object_error(out,self.obj,'Import')
- unicode_gemini_string = etree.tostring(xml, encoding=str, pretty_print=True)
+ unicode_gemini_string = etree.tostring(xml, encoding='utf8', pretty_print=True)
# may raise Exception for errors
package_dict = self.write_package_from_gemini_string(unicode_gemini_string)
@@ -799,5 +799,3 @@ class GeminiWafHarvester(GeminiHarvester, SingletonPlugin):
base_url += '/'
log.debug('WAF base URL: %s', base_url)
return [base_url + i for i in urls]
-
-
From ca35e7a97114053c0cafce11f1284a4b5282b822 Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Tue, 21 Jan 2020 14:31:34 +0200
Subject: [PATCH 009/139] Update encoding
---
ckanext/spatial/controllers/api.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/ckanext/spatial/controllers/api.py b/ckanext/spatial/controllers/api.py
index 835ecae..c2f4973 100644
--- a/ckanext/spatial/controllers/api.py
+++ b/ckanext/spatial/controllers/api.py
@@ -1,6 +1,7 @@
from future import standard_library
standard_library.install_aliases()
import logging
+import six
try:
from io import StringIO
@@ -87,7 +88,7 @@ class HarvestMetadataApiController(BaseApiController):
style_xml = etree.parse(style)
transformer = etree.XSLT(style_xml)
- xml = etree.parse(StringIO(content.encode('utf-8')))
+ xml = etree.parse(StringIO(content and six.ensure_text(content)))
html = transformer(xml)
response.headers['Content-Type'] = 'text/html; charset=utf-8'
From 1e6f68990dd9dc53f9413f2387cb11515ae07013 Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Tue, 21 Jan 2020 15:28:30 +0200
Subject: [PATCH 010/139] Use 1.11 six api
---
ckanext/spatial/controllers/api.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/ckanext/spatial/controllers/api.py b/ckanext/spatial/controllers/api.py
index c2f4973..6f985b1 100644
--- a/ckanext/spatial/controllers/api.py
+++ b/ckanext/spatial/controllers/api.py
@@ -88,7 +88,7 @@ class HarvestMetadataApiController(BaseApiController):
style_xml = etree.parse(style)
transformer = etree.XSLT(style_xml)
- xml = etree.parse(StringIO(content and six.ensure_text(content)))
+ xml = etree.parse(StringIO(content and six.text_type(content)))
html = transformer(xml)
response.headers['Content-Type'] = 'text/html; charset=utf-8'
From de975a0816a2aef3bc166dc910d5fd44d583a41c Mon Sep 17 00:00:00 2001
From: Yan Rudenko
Date: Thu, 30 Jan 2020 16:51:48 +0200
Subject: [PATCH 011/139] update requirements
---
pip-requirements.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pip-requirements.txt b/pip-requirements.txt
index e948c50..86dbb4c 100644
--- a/pip-requirements.txt
+++ b/pip-requirements.txt
@@ -1,7 +1,7 @@
GeoAlchemy>=0.6
GeoAlchemy2==0.5.0
Shapely>=1.2.13
-OWSLib==0.8.6
+OWSLib>=0.19.0
lxml>=2.3
argparse
pyparsing>=2.1.10
From 68a872a6cb27785ca41d7cf256401ee34913994f Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Tue, 14 Apr 2020 23:11:15 +0300
Subject: [PATCH 012/139] remove future
---
ckanext/spatial/controllers/api.py | 11 ++-----
ckanext/spatial/harvesters/base.py | 38 +++++++++++------------
ckanext/spatial/harvesters/csw.py | 20 +++++-------
ckanext/spatial/harvesters/gemini.py | 29 ++++++++---------
ckanext/spatial/harvesters/waf.py | 13 ++++----
ckanext/spatial/lib/__init__.py | 7 ++---
ckanext/spatial/lib/csw_client.py | 18 +++++------
ckanext/spatial/lib/report.py | 13 +++-----
ckanext/spatial/tests/lib/test_spatial.py | 16 +++++-----
ckanext/spatial/tests/test_csw_client.py | 22 +++++--------
ckanext/spatial/tests/xml_file_server.py | 25 +++++++++------
ckanext/spatial/views.py | 34 ++++++++------------
pip-requirements.txt | 2 +-
13 files changed, 107 insertions(+), 141 deletions(-)
diff --git a/ckanext/spatial/controllers/api.py b/ckanext/spatial/controllers/api.py
index 6f985b1..0d8eb95 100644
--- a/ckanext/spatial/controllers/api.py
+++ b/ckanext/spatial/controllers/api.py
@@ -1,12 +1,7 @@
-from future import standard_library
-standard_library.install_aliases()
import logging
import six
-try:
- from io import StringIO
-except ImportError:
- from io import StringIO
+from six import StringIO
from pylons import response
from pkg_resources import resource_stream
@@ -29,7 +24,7 @@ class ApiController(BaseApiController):
error_400_msg = \
'Please provide a suitable bbox parameter [minx,miny,maxx,maxy]'
- if not 'bbox' in request.params:
+ if 'bbox' not in request.params:
abort(400, error_400_msg)
bbox = validate_bbox(request.params['bbox'])
@@ -130,7 +125,7 @@ class HarvestMetadataApiController(BaseApiController):
response.headers['Content-Type'] = 'application/xml; charset=utf-8'
response.headers['Content-Length'] = len(content)
- if not '\n' + content
return content.encode('utf-8')
diff --git a/ckanext/spatial/harvesters/base.py b/ckanext/spatial/harvesters/base.py
index 30b6024..ff5839b 100644
--- a/ckanext/spatial/harvesters/base.py
+++ b/ckanext/spatial/harvesters/base.py
@@ -1,22 +1,20 @@
-from future import standard_library
-standard_library.install_aliases()
-from builtins import str
-from past.builtins import basestring
+import six
+from six.moves.urllib.parse import urlparse
+from six.moves.urllib.request import urlopen
+
import re
import cgitb
import warnings
-import urllib.request, urllib.error, urllib.parse
+
import sys
import logging
from string import Template
-from urllib.parse import urlparse
from datetime import datetime
import uuid
import hashlib
import dateutil
import mimetypes
-
from owslib import wms
import requests
from lxml import etree
@@ -243,7 +241,7 @@ class SpatialHarvester(HarvesterBase):
if package is None or package.title != iso_values['title']:
name = self._gen_new_name(iso_values['title'])
if not name:
- name = self._gen_new_name(str(iso_values['guid']))
+ name = self._gen_new_name(six.text_type(iso_values['guid']))
if not name:
raise Exception('Could not generate a unique name from the title or the GUID. Please choose a more unique title.')
package_dict['name'] = name
@@ -357,7 +355,7 @@ class SpatialHarvester(HarvesterBase):
ymin = float(bbox['south'])
ymax = float(bbox['north'])
except ValueError as e:
- self._save_object_error('Error parsing bounding box value: {0}'.format(str(e)),
+ self._save_object_error('Error parsing bounding box value: {0}'.format(six.text_type(e)),
harvest_object, 'Import')
else:
# Construct a GeoJSON extent so ckanext-spatial can register the extent geometry
@@ -414,7 +412,7 @@ class SpatialHarvester(HarvesterBase):
log.debug('Processing extra %s', key)
if not key in extras or override_extras:
# Look for replacement strings
- if isinstance(value,basestring):
+ if isinstance(value,six.string_types):
value = value.format(harvest_source_id=harvest_object.job.source.id,
harvest_source_url=harvest_object.job.source.url.strip('/'),
harvest_source_title=harvest_object.job.source.title,
@@ -518,7 +516,7 @@ class SpatialHarvester(HarvesterBase):
iso_parser = ISODocument(harvest_object.content)
iso_values = iso_parser.read_values()
except Exception as e:
- self._save_object_error('Error parsing ISO document for object {0}: {1}'.format(harvest_object.id, str(e)),
+ self._save_object_error('Error parsing ISO document for object {0}: {1}'.format(harvest_object.id, six.text_type(e)),
harvest_object, 'Import')
return False
@@ -588,7 +586,7 @@ class SpatialHarvester(HarvesterBase):
# The default package schema does not like Upper case tags
tag_schema = logic.schema.default_tags_schema()
- tag_schema['name'] = [not_empty, str]
+ tag_schema['name'] = [not_empty, six.text_type]
# Flag this object as the current one
harvest_object.current = True
@@ -601,8 +599,8 @@ class SpatialHarvester(HarvesterBase):
# We need to explicitly provide a package ID, otherwise ckanext-spatial
# won't be be able to link the extent to the package.
- package_dict['id'] = str(uuid.uuid4())
- package_schema['id'] = [str]
+ package_dict['id'] = six.text_type(uuid.uuid4())
+ package_schema['id'] = [six.text_type]
# Save reference to the package on the object
harvest_object.package_id = package_dict['id']
@@ -617,7 +615,7 @@ class SpatialHarvester(HarvesterBase):
package_id = p.toolkit.get_action('package_create')(context, package_dict)
log.info('Created new package %s with guid %s', package_id, harvest_object.guid)
except p.toolkit.ValidationError as e:
- self._save_object_error('Validation Error: %s' % str(e.error_summary), harvest_object, 'Import')
+ self._save_object_error('Validation Error: %s' % six.text_type(e.error_summary), harvest_object, 'Import')
return False
elif status == 'change':
@@ -663,7 +661,7 @@ class SpatialHarvester(HarvesterBase):
package_id = p.toolkit.get_action('package_update')(context, package_dict)
log.info('Updated package %s with guid %s', package_id, harvest_object.guid)
except p.toolkit.ValidationError as e:
- self._save_object_error('Validation Error: %s' % str(e.error_summary), harvest_object, 'Import')
+ self._save_object_error('Validation Error: %s' % six.text_type(e.error_summary), harvest_object, 'Import')
return False
model.Session.commit()
@@ -678,13 +676,13 @@ class SpatialHarvester(HarvesterBase):
'''
try:
capabilities_url = wms.WMSCapabilitiesReader().capabilities_url(url)
- res = urllib.request.urlopen(capabilities_url, None, 10)
+ res = urlopen(capabilities_url, None, 10)
xml = res.read()
s = wms.WebMapService(url, xml=xml)
return isinstance(s.contents, dict) and s.contents != {}
except Exception as e:
- log.error('WMS check for %s failed with exception: %s' % (url, str(e)))
+ log.error('WMS check for %s failed with exception: %s' % (url, six.text_type(e)))
return False
def _get_object_extra(self, harvest_object, key):
@@ -775,7 +773,7 @@ class SpatialHarvester(HarvesterBase):
DEPRECATED: Use _get_content_as_unicode instead
'''
url = url.replace(' ', '%20')
- http_response = urllib.request.urlopen(url)
+ http_response = urlopen(url)
return http_response.read()
def _get_content_as_unicode(self, url):
@@ -827,7 +825,7 @@ class SpatialHarvester(HarvesterBase):
try:
xml = etree.fromstring(document_string)
except etree.XMLSyntaxError as e:
- self._save_object_error('Could not parse XML file: {0}'.format(str(e)), harvest_object, 'Import')
+ self._save_object_error('Could not parse XML file: {0}'.format(six.text_type(e)), harvest_object, 'Import')
return False, None, []
valid, profile, errors = validator.is_valid(xml)
diff --git a/ckanext/spatial/harvesters/csw.py b/ckanext/spatial/harvesters/csw.py
index ab17482..c27824b 100644
--- a/ckanext/spatial/harvesters/csw.py
+++ b/ckanext/spatial/harvesters/csw.py
@@ -1,9 +1,6 @@
-from future import standard_library
-standard_library.install_aliases()
-from builtins import str
import re
-import urllib.request, urllib.parse, urllib.error
-import urllib.parse
+import six
+from six.moves.urllib.parse import urlparse, urlunparse, urlencode
import logging
@@ -25,7 +22,7 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
'''
implements(IHarvester)
- csw=None
+ csw = None
def info(self):
return {
@@ -34,13 +31,12 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
'description': 'A server that implements OGC\'s Catalog Service for the Web (CSW) standard'
}
-
def get_original_url(self, harvest_object_id):
obj = model.Session.query(HarvestObject).\
filter(HarvestObject.id==harvest_object_id).\
first()
- parts = urllib.parse.urlparse(obj.source.url)
+ parts = urlparse(obj.source.url)
params = {
'SERVICE': 'CSW',
@@ -51,12 +47,12 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
'ID': obj.guid
}
- url = urllib.parse.urlunparse((
+ url = urlunparse((
parts.scheme,
parts.netloc,
parts.path,
None,
- urllib.parse.urlencode(params),
+ urlencode(params),
None
))
@@ -107,10 +103,9 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
self._save_gather_error('Error for the identifier %s [%r]' % (identifier,e), harvest_job)
continue
-
except Exception as e:
log.error('Exception: %s' % text_traceback())
- self._save_gather_error('Error gathering the identifiers from the CSW server [%s]' % str(e), harvest_job)
+ self._save_gather_error('Error gathering the identifiers from the CSW server [%s]' % six.text_type(e), harvest_job)
return None
new = guids_in_harvest - guids_in_db
@@ -195,4 +190,3 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
def _setup_csw_client(self, url):
self.csw = CswService(url)
-
diff --git a/ckanext/spatial/harvesters/gemini.py b/ckanext/spatial/harvesters/gemini.py
index 8785818..18158be 100644
--- a/ckanext/spatial/harvesters/gemini.py
+++ b/ckanext/spatial/harvesters/gemini.py
@@ -8,12 +8,9 @@ but can be easily adapted for other INSPIRE/ISO19139 XML metadata
- GeminiWafHarvester - An index page with links to GEMINI resources
'''
-from future import standard_library
-standard_library.install_aliases()
-from builtins import str
-from past.builtins import basestring
+import six
import os
-from urllib.parse import urlparse
+from six.moves.urllib.parse import urlparse
from datetime import datetime
from numbers import Number
import uuid
@@ -76,10 +73,10 @@ class GeminiHarvester(SpatialHarvester):
return True
except Exception as e:
log.error('Exception during import: %s' % text_traceback())
- if not str(e).strip():
+ if not six.text_type(e).strip():
self._save_object_error('Error importing Gemini document.', harvest_object, 'Import')
else:
- self._save_object_error('Error importing Gemini document: %s' % str(e), harvest_object, 'Import')
+ self._save_object_error('Error importing Gemini document: %s' % six.text_type(e), harvest_object, 'Import')
raise
if debug_exception_mode:
raise
@@ -278,7 +275,7 @@ class GeminiHarvester(SpatialHarvester):
if package is None or package.title != gemini_values['title']:
name = self.gen_new_name(gemini_values['title'])
if not name:
- name = self.gen_new_name(str(gemini_guid))
+ name = self.gen_new_name(six.text_type(gemini_guid))
if not name:
raise Exception('Could not generate a unique name from the title or the GUID. Please choose a more unique title.')
package_dict['name'] = name
@@ -323,7 +320,7 @@ class GeminiHarvester(SpatialHarvester):
extras_as_dict = []
for key,value in extras.items():
- if isinstance(value,(basestring,Number)):
+ if isinstance(value, six.string_types + (Number,)):
extras_as_dict.append({'key':key,'value':value})
else:
extras_as_dict.append({'key':key,'value':json.dumps(value)})
@@ -416,8 +413,8 @@ class GeminiHarvester(SpatialHarvester):
else:
counter = 1
while counter < 101:
- if name+str(counter) not in taken:
- return name+str(counter)
+ if name+six.text_type(counter) not in taken:
+ return name+six.text_type(counter)
counter = counter + 1
return None
@@ -457,7 +454,7 @@ class GeminiHarvester(SpatialHarvester):
# The default package schema does not like Upper case tags
tag_schema = logic.schema.default_tags_schema()
- tag_schema['name'] = [not_empty,str]
+ tag_schema['name'] = [not_empty,six.text_type]
package_schema['tags'] = tag_schema
# TODO: user
@@ -470,8 +467,8 @@ class GeminiHarvester(SpatialHarvester):
if not package:
# We need to explicitly provide a package ID, otherwise ckanext-spatial
# won't be be able to link the extent to the package.
- package_dict['id'] = str(uuid.uuid4())
- package_schema['id'] = [str]
+ package_dict['id'] = six.text_type(uuid.uuid4())
+ package_schema['id'] = [six.text_type]
action_function = get_action('package_create')
else:
@@ -481,7 +478,7 @@ class GeminiHarvester(SpatialHarvester):
try:
package_dict = action_function(context, package_dict)
except ValidationError as e:
- raise Exception('Validation Error: %s' % str(e.error_summary))
+ raise Exception('Validation Error: %s' % six.text_type(e.error_summary))
if debug_exception_mode:
raise
@@ -575,7 +572,7 @@ class GeminiCswHarvester(GeminiHarvester, SingletonPlugin):
except Exception as e:
log.error('Exception: %s' % text_traceback())
- self._save_gather_error('Error gathering the identifiers from the CSW server [%s]' % str(e), harvest_job)
+ self._save_gather_error('Error gathering the identifiers from the CSW server [%s]' % six.text_type(e), harvest_job)
return None
if len(ids) == 0:
diff --git a/ckanext/spatial/harvesters/waf.py b/ckanext/spatial/harvesters/waf.py
index e4b36de..8f657e7 100644
--- a/ckanext/spatial/harvesters/waf.py
+++ b/ckanext/spatial/harvesters/waf.py
@@ -1,10 +1,10 @@
from __future__ import print_function
-from future import standard_library
-standard_library.install_aliases()
-from builtins import str
+
+import six
+from six.moves.urllib.parse import urljoin
import logging
import hashlib
-from urllib.parse import urljoin
+
import dateutil.parser
import pyparsing as parse
import requests
@@ -303,7 +303,7 @@ def _extract_waf(content, base_url, scraper, results = None, depth=0):
response = requests.get(new_url)
content = response.content
except Exception as e:
- print(str(e))
+ print(six.text_type(e))
continue
_extract_waf(content, new_url, scraper, results, new_depth)
continue
@@ -312,11 +312,10 @@ def _extract_waf(content, base_url, scraper, results = None, depth=0):
date = record.date
if date:
try:
- date = str(dateutil.parser.parse(date))
+ date = six.text_type(dateutil.parser.parse(date))
except Exception as e:
raise
date = None
results.append((urljoin(base_url, record.url), date))
return results
-
diff --git a/ckanext/spatial/lib/__init__.py b/ckanext/spatial/lib/__init__.py
index 529698b..1646a86 100644
--- a/ckanext/spatial/lib/__init__.py
+++ b/ckanext/spatial/lib/__init__.py
@@ -1,5 +1,4 @@
-from builtins import str
-from past.builtins import basestring
+import six
import logging
from string import Template
@@ -103,7 +102,7 @@ def validate_bbox(bbox_values):
Any problems and it returns None.
'''
- if isinstance(bbox_values,basestring):
+ if isinstance(bbox_values,six.string_types):
bbox_values = bbox_values.split(',')
if len(bbox_values) is not 4:
@@ -174,7 +173,7 @@ def bbox_query_ordered(bbox, srid=None):
input_geometry = _bbox_2_wkt(bbox, srid)
- params = {'query_bbox': str(input_geometry),
+ params = {'query_bbox': six.text_type(input_geometry),
'query_srid': input_geometry.srid}
# First get the area of the query box
diff --git a/ckanext/spatial/lib/csw_client.py b/ckanext/spatial/lib/csw_client.py
index 4bb9f88..0ac075e 100644
--- a/ckanext/spatial/lib/csw_client.py
+++ b/ckanext/spatial/lib/csw_client.py
@@ -2,9 +2,7 @@
Some very thin wrapper classes around those in OWSLib
for convenience.
"""
-
-from past.builtins import basestring
-from builtins import object
+import six
import logging
from owslib.etree import etree
@@ -19,14 +17,14 @@ class OwsService(object):
def __init__(self, endpoint=None):
if endpoint is not None:
self._ows(endpoint)
-
+
def __call__(self, args):
return getattr(self, args.operation)(**self._xmd(args))
-
+
@classmethod
def _operations(cls):
return [x for x in dir(cls) if not x.startswith("_")]
-
+
def _xmd(self, obj):
md = {}
for attr in [x for x in dir(obj) if not x.startswith("_")]:
@@ -35,7 +33,7 @@ class OwsService(object):
pass
elif callable(val):
pass
- elif isinstance(val, basestring):
+ elif isinstance(val, six.string_types):
md[attr] = val
elif isinstance(val, int):
md[attr] = val
@@ -44,7 +42,7 @@ class OwsService(object):
else:
md[attr] = self._xmd(val)
return md
-
+
def _ows(self, endpoint=None, **kw):
if not hasattr(self, "_Implementation"):
raise NotImplementedError("Needs an Implementation")
@@ -53,7 +51,7 @@ class OwsService(object):
raise ValueError("Must specify a service endpoint")
self.__ows_obj__ = self._Implementation(endpoint)
return self.__ows_obj__
-
+
def getcapabilities(self, debug=False, **kw):
ows = self._ows(**kw)
caps = self._xmd(ows)
@@ -62,7 +60,7 @@ class OwsService(object):
if "response" in caps: del caps["response"]
if "owscommon" in caps: del caps["owscommon"]
return caps
-
+
class CswService(OwsService):
"""
Perform various operations on a CSW service
diff --git a/ckanext/spatial/lib/report.py b/ckanext/spatial/lib/report.py
index cc05202..d1165d6 100644
--- a/ckanext/spatial/lib/report.py
+++ b/ckanext/spatial/lib/report.py
@@ -3,14 +3,10 @@ Library for creating reports that can be displayed easily in an HTML table
and then saved as a CSV.
'''
-from future import standard_library
-standard_library.install_aliases()
-from builtins import str
-from builtins import object
+from six import text_type, StringIO
import datetime
import csv
-try: from io import StringIO
-except ImportError: from io import StringIO
+
class ReportTable(object):
def __init__(self, column_names):
@@ -56,9 +52,9 @@ class ReportTable(object):
if isinstance(cell, datetime.datetime):
cell = cell.strftime('%Y-%m-%d %H:%M')
elif isinstance(cell, int):
- cell = str(cell)
+ cell = text_type(cell)
elif isinstance(cell, (list, tuple)):
- cell = str(cell)
+ cell = text_type(cell)
elif cell is None:
cell = ''
else:
@@ -70,4 +66,3 @@ class ReportTable(object):
raise Exception("%s: %s, %s"%(e, row, row_formatted))
csvout.seek(0)
return csvout.read()
-
diff --git a/ckanext/spatial/tests/lib/test_spatial.py b/ckanext/spatial/tests/lib/test_spatial.py
index d92e4f2..b6040fc 100644
--- a/ckanext/spatial/tests/lib/test_spatial.py
+++ b/ckanext/spatial/tests/lib/test_spatial.py
@@ -1,8 +1,6 @@
from __future__ import print_function
-from builtins import str
-from builtins import range
-from past.builtins import basestring
-from builtins import object
+import six
+
import time
import random
@@ -25,7 +23,7 @@ from ckanext.spatial.tests.base import SpatialTestBase
class TestCompareGeometries(SpatialTestBase):
def _get_extent_object(self, geometry):
- if isinstance(geometry, basestring):
+ if isinstance(geometry, six.string_types):
geometry = json.loads(geometry)
shape = asShape(geometry)
return PackageExtent(package_id='xxx',
@@ -46,7 +44,6 @@ class TestCompareGeometries(SpatialTestBase):
assert not compare_geometry_fields(extent1.the_geom, extent2.the_geom)
-
class TestValidateBbox(object):
bbox_dict = {'minx': -4.96,
'miny': 55.70,
@@ -69,9 +66,11 @@ class TestValidateBbox(object):
res = validate_bbox('random')
assert_equal(res, None)
+
def bbox_2_geojson(bbox_dict):
return '{"type":"Polygon","coordinates":[[[%(minx)s, %(miny)s],[%(minx)s, %(maxy)s], [%(maxx)s, %(maxy)s], [%(maxx)s, %(miny)s], [%(minx)s, %(miny)s]]]}' % bbox_dict
+
class SpatialQueryTestBase(SpatialTestBase):
'''Base class for tests of spatial queries'''
miny = 0
@@ -83,8 +82,8 @@ class SpatialQueryTestBase(SpatialTestBase):
for fixture_x in cls.fixtures_x:
bbox = cls.x_values_to_bbox(fixture_x)
bbox_geojson = bbox_2_geojson(bbox)
- cls.create_package(name=munge_title_to_name(str(fixture_x)),
- title=str(fixture_x),
+ cls.create_package(name=munge_title_to_name(six.text_type(fixture_x)),
+ title=six.text_type(fixture_x),
extras=[{'key': 'spatial',
'value': bbox_geojson}])
@@ -106,6 +105,7 @@ class SpatialQueryTestBase(SpatialTestBase):
return {'minx': x_tuple[0], 'maxx': x_tuple[1],
'miny': cls.miny, 'maxy': cls.maxy}
+
class TestBboxQuery(SpatialQueryTestBase):
# x values for the fixtures
fixtures_x = [(0, 1), (0, 3), (0, 4), (4, 5), (6, 7)]
diff --git a/ckanext/spatial/tests/test_csw_client.py b/ckanext/spatial/tests/test_csw_client.py
index f84f65f..de64323 100644
--- a/ckanext/spatial/tests/test_csw_client.py
+++ b/ckanext/spatial/tests/test_csw_client.py
@@ -1,10 +1,6 @@
-from future import standard_library
-standard_library.install_aliases()
-from builtins import range
-from future.utils import raise_
-from builtins import object
import time
-from urllib.request import urlopen
+from six.moves.urllib.request import urlopen
+from six.moves.urllib.error import URLError
import os
from pylons import config
@@ -12,6 +8,7 @@ from nose.plugins.skip import SkipTest
from ckan.model import engine_is_sqlite
+
# copied from ckan/tests/__init__ to save importing it and therefore
# setting up Pylons.
class CkanServerCase(object):
@@ -20,7 +17,7 @@ class CkanServerCase(object):
import subprocess
(status, output) = subprocess.getstatusoutput(cmd)
if status:
- raise_(Exception, "Couldn't execute cmd: %s: %s" % (cmd, output))
+ raise Exception("Couldn't execute cmd: %s: %s" % (cmd, output))
@classmethod
def _paster(cls, cmd, config_path_rel):
@@ -39,21 +36,19 @@ class CkanServerCase(object):
@staticmethod
def _wait_for_url(url='http://127.0.0.1:5000/', timeout=15):
for i in range(int(timeout)*100):
- import urllib.request, urllib.error, urllib.parse
- import time
try:
- response = urllib.request.urlopen(url)
- except urllib.error.URLError:
+ urlopen(url)
+ except URLError:
time.sleep(0.01)
else:
break
@staticmethod
- def _stop_ckan_server(process):
+ def _stop_ckan_server(process):
pid = process.pid
pid = int(pid)
if os.system("kill -9 %d" % pid):
- raise_(Exception, "Can't kill foreign CKAN instance (pid: %d)." % pid)
+ raise Exception("Can't kill foreign CKAN instance (pid: %d)." % pid)
class CkanProcess(CkanServerCase):
@classmethod
@@ -68,4 +63,3 @@ class CkanProcess(CkanServerCase):
@classmethod
def teardown_class(cls):
cls._stop_ckan_server(cls.pid)
-
diff --git a/ckanext/spatial/tests/xml_file_server.py b/ckanext/spatial/tests/xml_file_server.py
index 06712be..74f4fbf 100644
--- a/ckanext/spatial/tests/xml_file_server.py
+++ b/ckanext/spatial/tests/xml_file_server.py
@@ -1,29 +1,34 @@
from __future__ import print_function
-from future import standard_library
-standard_library.install_aliases()
+
import os
-import http.server
-import socketserver
+try:
+ from http.server import SimpleHTTPRequestHandler
+ from socketserver import TCPServer
+except ImportError:
+ from SimpleHTTPServer import SimpleHTTPRequestHandler
+ from SocketServer import TCPServer
+
from threading import Thread
PORT = 8999
+
def serve(port=PORT):
'''Serves test XML files over HTTP'''
-
+
# Make sure we serve from the tests' XML directory
os.chdir(os.path.join(os.path.dirname(os.path.abspath(__file__)),
'xml'))
- Handler = http.server.SimpleHTTPRequestHandler
-
- class TestServer(socketserver.TCPServer):
+ Handler = SimpleHTTPRequestHandler
+
+ class TestServer(TCPServer):
allow_reuse_address = True
-
+
httpd = TestServer(("", PORT), Handler)
-
+
print('Serving test HTTP server at port', PORT)
httpd_thread = Thread(target=httpd.serve_forever)
diff --git a/ckanext/spatial/views.py b/ckanext/spatial/views.py
index df541fc..7f287b9 100644
--- a/ckanext/spatial/views.py
+++ b/ckanext/spatial/views.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
-from future import standard_library
-standard_library.install_aliases()
import logging
from flask import Blueprint, make_response
@@ -16,11 +14,7 @@ from ckan.views.api import _finish_ok, _finish_bad_request
from ckanext.spatial.lib import get_srid, validate_bbox, bbox_query
-
-try:
- from io import StringIO
-except ImportError:
- from io import StringIO
+from six import StringIO
log = logging.getLogger(__name__)
@@ -31,7 +25,7 @@ def spatial_query(register):
error_400_msg = \
'Please provide a suitable bbox parameter [minx,miny,maxx,maxy]'
- if not 'bbox' in request.args:
+ if 'bbox' not in request.args:
return _finish_bad_request(error_400_msg)
bbox = validate_bbox(request.params['bbox'])
@@ -44,8 +38,6 @@ def spatial_query(register):
extents = bbox_query(bbox, srid)
- format = request.args.get('format', '')
-
ids = [extent.package_id for extent in extents]
output = dict(count=len(ids), results=ids)
@@ -68,11 +60,12 @@ def harvest_object_redirect_html(id):
def _get_original_content(id):
from ckanext.harvest.model import HarvestObject, HarvestObjectExtra
- extra = Session.query(HarvestObjectExtra).join(HarvestObject) \
- .filter(HarvestObject.id == id) \
- .filter(
- HarvestObjectExtra.key == 'original_document'
- ).first()
+ extra = Session.query(
+ HarvestObjectExtra
+ ).join(HarvestObject).filter(HarvestObject.id == id).filter(
+ HarvestObjectExtra.key == 'original_document'
+ ).first()
+
if extra:
return extra.value
else:
@@ -81,8 +74,7 @@ def _get_original_content(id):
def _get_content(id):
from ckanext.harvest.model import HarvestObject
- obj = Session.query(HarvestObject) \
- .filter(HarvestObject.id == id).first()
+ obj = Session.query(HarvestObject).filter(HarvestObject.id == id).first()
if obj:
return obj.content
else:
@@ -140,9 +132,9 @@ def display_xml_original(id):
headers = {'Content-Type': 'application/xml; charset=utf-8'}
- if not '\n' + content
- response = make_response((content, 200, headers))
+ return make_response((content, 200, headers))
def display_html(id):
@@ -154,7 +146,7 @@ def display_html(id):
xslt_package, xslt_path = _get_xslt()
content = _transform_to_html(content, xslt_package, xslt_path)
- response = make_response((content, 200, headers))
+ return make_response((content, 200, headers))
def display_html_original(id):
@@ -166,7 +158,7 @@ def display_html_original(id):
xslt_package, xslt_path = _get_xslt(original=True)
content = _transform_to_html(content, xslt_package, xslt_path)
- response = make_response((content, 200, headers))
+ return make_response((content, 200, headers))
harvest_metadata.add_url_rule('/api/2/rest/harvestobject//xml',
diff --git a/pip-requirements.txt b/pip-requirements.txt
index 86dbb4c..12710f3 100644
--- a/pip-requirements.txt
+++ b/pip-requirements.txt
@@ -6,4 +6,4 @@ lxml>=2.3
argparse
pyparsing>=2.1.10
requests>=1.1.0
-future>=0.18.2
+six
From a58510a4321208e3e05cfde8dd66659893aa482a Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Wed, 15 Apr 2020 00:06:11 +0300
Subject: [PATCH 013/139] Remove builtins
---
ckanext/spatial/model/harvested_metadata.py | 5 ++---
ckanext/spatial/plugin/__init__.py | 10 +++++-----
ckanext/spatial/tests/base.py | 2 --
ckanext/spatial/tests/test_harvest.py | 8 +++-----
ckanext/spatial/tests/test_validation.py | 1 -
ckanext/spatial/util.py | 11 ++++++-----
ckanext/spatial/validation/validation.py | 1 -
7 files changed, 16 insertions(+), 22 deletions(-)
diff --git a/ckanext/spatial/model/harvested_metadata.py b/ckanext/spatial/model/harvested_metadata.py
index 104fc3c..f69038e 100644
--- a/ckanext/spatial/model/harvested_metadata.py
+++ b/ckanext/spatial/model/harvested_metadata.py
@@ -1,6 +1,5 @@
-from builtins import str
-from builtins import object
from lxml import etree
+import six
import logging
log = logging.getLogger(__name__)
@@ -39,7 +38,7 @@ class MappedXmlDocument(MappedXmlObject):
def get_xml_tree(self):
if self.xml_tree is None:
parser = etree.XMLParser(remove_blank_text=True)
- if type(self.xml_str) == str:
+ if type(self.xml_str) == six.binary_type:
xml_str = self.xml_str.encode('utf8')
else:
xml_str = self.xml_str
diff --git a/ckanext/spatial/plugin/__init__.py b/ckanext/spatial/plugin/__init__.py
index 06e162c..8847a24 100644
--- a/ckanext/spatial/plugin/__init__.py
+++ b/ckanext/spatial/plugin/__init__.py
@@ -1,9 +1,9 @@
-from builtins import str
import os
import re
import mimetypes
from logging import getLogger
+import six
from ckan import plugins as p
@@ -121,22 +121,22 @@ class SpatialMetadata(p.SingletonPlugin):
log.debug('Received: %r' % extra.value)
geometry = json.loads(extra.value)
except ValueError as e:
- error_dict = {'spatial':[u'Error decoding JSON object: %s' % str(e)]}
+ error_dict = {'spatial':[u'Error decoding JSON object: %s' % six.text_type(e)]}
raise p.toolkit.ValidationError(error_dict, error_summary=package_error_summary(error_dict))
except TypeError as e:
- error_dict = {'spatial':[u'Error decoding JSON object: %s' % str(e)]}
+ error_dict = {'spatial':[u'Error decoding JSON object: %s' % six.text_type(e)]}
raise p.toolkit.ValidationError(error_dict, error_summary=package_error_summary(error_dict))
try:
save_package_extent(package.id,geometry)
except ValueError as e:
- error_dict = {'spatial':[u'Error creating geometry: %s' % str(e)]}
+ error_dict = {'spatial':[u'Error creating geometry: %s' % six.text_type(e)]}
raise p.toolkit.ValidationError(error_dict, error_summary=package_error_summary(error_dict))
except Exception as e:
if bool(os.getenv('DEBUG')):
raise
- error_dict = {'spatial':[u'Error: %s' % str(e)]}
+ error_dict = {'spatial':[u'Error: %s' % six.text_type(e)]}
raise p.toolkit.ValidationError(error_dict, error_summary=package_error_summary(error_dict))
elif (extra.state == 'active' and not extra.value) or extra.state == 'deleted':
diff --git a/ckanext/spatial/tests/base.py b/ckanext/spatial/tests/base.py
index e25675d..d204548 100644
--- a/ckanext/spatial/tests/base.py
+++ b/ckanext/spatial/tests/base.py
@@ -1,4 +1,3 @@
-from builtins import object
import os
import re
@@ -75,4 +74,3 @@ class SpatialTestBase(object):
@classmethod
def teardown_class(cls):
repo.rebuild_db()
-
diff --git a/ckanext/spatial/tests/test_harvest.py b/ckanext/spatial/tests/test_harvest.py
index b720e95..11423af 100644
--- a/ckanext/spatial/tests/test_harvest.py
+++ b/ckanext/spatial/tests/test_harvest.py
@@ -1,6 +1,4 @@
from __future__ import absolute_import
-from builtins import str
-from builtins import object
import os
from datetime import datetime, date
import lxml
@@ -815,7 +813,7 @@ class TestHarvest(HarvestFixtureBase):
assert source_dict['status']['total_datasets'] == 1
def test_clean_tags(self):
-
+
# Create source
source_fixture = {
'title': 'Test Source',
@@ -848,7 +846,7 @@ class TestHarvest(HarvestFixtureBase):
context={'user': user_name},
name='existing-group')
- context = {'user': 'dummy'}
+ context = {'user': 'dummy'}
package_schema = default_update_package_schema()
context['schema'] = package_schema
package_dict = {'frequency': 'manual',
@@ -867,7 +865,7 @@ class TestHarvest(HarvestFixtureBase):
'metadata_modified' : datetime.now(),
'guid': str(uuid4()),
'identifier': 'dummy'}
-
+
package_data = call_action('package_create', context=context, **package_dict)
package = Package.get('fakename')
diff --git a/ckanext/spatial/tests/test_validation.py b/ckanext/spatial/tests/test_validation.py
index 4cc7d26..f707e90 100644
--- a/ckanext/spatial/tests/test_validation.py
+++ b/ckanext/spatial/tests/test_validation.py
@@ -1,4 +1,3 @@
-from builtins import object
import os
from lxml import etree
diff --git a/ckanext/spatial/util.py b/ckanext/spatial/util.py
index 5724474..b25564e 100644
--- a/ckanext/spatial/util.py
+++ b/ckanext/spatial/util.py
@@ -1,10 +1,11 @@
# -*- coding: utf-8 -*-
from __future__ import print_function
-from builtins import str
import os
import sys
+import six
+
import logging
from ckan.lib.helpers import json
from lxml import etree
@@ -22,7 +23,7 @@ def report(pkg=None):
from ckanext.spatial.lib.reports import validation_report
if pkg:
- package_ref = str(pkg)
+ package_ref = six.text_type(pkg)
pkg = model.Package.get(package_ref)
if not pkg:
print('Package ref "%s" not recognised' % package_ref)
@@ -89,7 +90,7 @@ def report_csv(csv_filepath):
def initdb(srid=None):
if srid:
- srid = str(srid)
+ srid = six.text_type(srid)
from ckanext.spatial.model import setup as db_setup
@@ -116,10 +117,10 @@ def update_extents():
count += 1
except ValueError as e:
errors.append(u'Package %s - Error decoding JSON object: %s' %
- (package.id, str(e)))
+ (package.id, six.text_type(e)))
except TypeError as e:
errors.append(u'Package %s - Error decoding JSON object: %s' %
- (package.id, str(e)))
+ (package.id, six.text_type(e)))
save_package_extent(package.id, geometry)
diff --git a/ckanext/spatial/validation/validation.py b/ckanext/spatial/validation/validation.py
index 3f800b7..28e8506 100644
--- a/ckanext/spatial/validation/validation.py
+++ b/ckanext/spatial/validation/validation.py
@@ -1,4 +1,3 @@
-from builtins import object
import os
from pkg_resources import resource_stream
from ckanext.spatial.model import ISODocument
From e1abd5e0c3643bb95113ff398a73335486ae2578 Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Wed, 15 Apr 2020 00:54:15 +0300
Subject: [PATCH 014/139] downgrade owslib
---
pip-requirements.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pip-requirements.txt b/pip-requirements.txt
index 12710f3..299bb2d 100644
--- a/pip-requirements.txt
+++ b/pip-requirements.txt
@@ -1,7 +1,7 @@
GeoAlchemy>=0.6
GeoAlchemy2==0.5.0
Shapely>=1.2.13
-OWSLib>=0.19.0
+OWSLib>=0.18.0
lxml>=2.3
argparse
pyparsing>=2.1.10
From 03272a9cd52ed5f1bf52065eff5cfd0bc04ce94b Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Wed, 15 Apr 2020 02:20:30 +0300
Subject: [PATCH 015/139] Update tests
---
bin/travis-build.bash | 36 +-
bin/travis-run.sh | 7 +-
ckanext/spatial/tests/__init__.py | 2 +
ckanext/spatial/tests/base.py | 88 +-
ckanext/spatial/tests/conftest.py | 58 +
ckanext/spatial/tests/functional/__init__.py | 2 +
.../spatial/tests/functional/test_package.py | 182 ++-
.../spatial/tests/functional/test_widgets.py | 14 +-
ckanext/spatial/tests/lib/test_spatial.py | 100 +-
ckanext/spatial/tests/model/__init__.py | 2 +
.../tests/model/test_harvested_metadata.py | 25 +-
.../tests/model/test_package_extent.py | 73 +-
ckanext/spatial/tests/nose/__init__.py | 7 +
ckanext/spatial/tests/nose/base.py | 76 ++
ckanext/spatial/tests/nose/data/dataset.json | 1 +
.../spatial/tests/nose/functional/__init__.py | 7 +
.../tests/nose/functional/test_package.py | 147 +++
.../tests/nose/functional/test_widgets.py | 38 +
.../spatial/tests/nose/lib/test_spatial.py | 154 +++
ckanext/spatial/tests/nose/model/__init__.py | 7 +
.../nose/model/test_harvested_metadata.py | 34 +
.../tests/nose/model/test_package_extent.py | 90 ++
.../nose/model/xml/FCSConservancyPolygons.xml | 524 ++++++++
.../tests/nose/model/xml/gemini_dataset.xml | 498 +++++++
.../tests/nose/scripts/geometry_columns.sql | 25 +
.../spatial/tests/nose/scripts/postgis.sql | 41 +
.../tests/nose/scripts/spatial_ref_sys.sql | 23 +
ckanext/spatial/tests/nose/test_api.py | 274 ++++
ckanext/spatial/tests/nose/test_csw_client.py | 65 +
ckanext/spatial/tests/nose/test_harvest.py | 1141 ++++++++++++++++
.../tests/nose/test_plugin/__init__.py | 7 +
.../spatial/tests/nose/test_plugin/plugin.py | 9 +
.../templates/package/read_base.html | 11 +
.../test_plugin/templates/package/search.html | 9 +
ckanext/spatial/tests/nose/test_validation.py | 153 +++
.../tests/nose/xml/gemini2.1-waf/index.html | 11 +
.../tests/nose/xml/gemini2.1-waf/wales1.xml | 420 ++++++
.../tests/nose/xml/gemini2.1-waf/wales2.xml | 539 ++++++++
.../xml/gemini2.1/FCSConservancyPolygons.xml | 524 ++++++++
.../tests/nose/xml/gemini2.1/dataset1.xml | 498 +++++++
.../nose/xml/gemini2.1/error_bad_xml.xml | 15 +
.../nose/xml/gemini2.1/error_validation.xml | 293 ++++
.../tests/nose/xml/gemini2.1/service1.xml | 347 +++++
.../nose/xml/gemini2.1/service1_newer.xml | 347 +++++
.../xml/gemini2.1/source1/same_dataset.xml | 347 +++++
.../xml/gemini2.1/source2/same_dataset.xml | 347 +++++
...01_Dataset_Invalid_XSD_No_Such_Element.xml | 636 +++++++++
...taset_Invalid_XSD_No_Such_Element_unix.xml | 636 +++++++++
...aset_Invalid_19139_Missing_Data_Format.xml | 626 +++++++++
...Dataset_Invalid_GEMINI_Missing_Keyword.xml | 551 ++++++++
.../gemini2.1/validation/04_Dataset_Valid.xml | 637 +++++++++
.../05_Series_Invalid_XSD_No_Such_Element.xml | 594 +++++++++
...ries_Invalid_19139_Missing_Data_Format.xml | 584 ++++++++
..._Series_Invalid_GEMINI_Missing_Keyword.xml | 509 +++++++
.../gemini2.1/validation/08_Series_Valid.xml | 595 +++++++++
.../09_Service_Invalid_No_Such_Element.xml | 537 ++++++++
...ervice_Invalid_19139_Level_Description.xml | 530 ++++++++
...11_Service_Invalid_GEMINI_Service_Type.xml | 537 ++++++++
.../gemini2.1/validation/12_Service_Valid.xml | 537 ++++++++
.../13_Dataset_Invalid_Element_srv.xml | 610 +++++++++
.../nose/xml/iso19139/dataset-invalid.xml | 498 +++++++
.../tests/nose/xml/iso19139/dataset.xml | 495 +++++++
.../tests/nose/xml/wms/capabilities.xml | 127 ++
ckanext/spatial/tests/nose/xml_file_server.py | 36 +
ckanext/spatial/tests/test_api.py | 228 ++--
ckanext/spatial/tests/test_csw_client.py | 19 +-
ckanext/spatial/tests/test_harvest.py | 1174 ++++++++++-------
ckanext/spatial/tests/test_plugin/__init__.py | 2 +
ckanext/spatial/tests/test_plugin/plugin.py | 2 +-
ckanext/spatial/tests/test_validation.py | 191 ++-
ckanext/spatial/tests/xml_file_server.py | 7 +-
conftest.py | 6 +
setup.cfg | 7 +
73 files changed, 17610 insertions(+), 919 deletions(-)
create mode 100644 ckanext/spatial/tests/conftest.py
create mode 100644 ckanext/spatial/tests/nose/__init__.py
create mode 100644 ckanext/spatial/tests/nose/base.py
create mode 100644 ckanext/spatial/tests/nose/data/dataset.json
create mode 100644 ckanext/spatial/tests/nose/functional/__init__.py
create mode 100644 ckanext/spatial/tests/nose/functional/test_package.py
create mode 100644 ckanext/spatial/tests/nose/functional/test_widgets.py
create mode 100644 ckanext/spatial/tests/nose/lib/test_spatial.py
create mode 100644 ckanext/spatial/tests/nose/model/__init__.py
create mode 100644 ckanext/spatial/tests/nose/model/test_harvested_metadata.py
create mode 100644 ckanext/spatial/tests/nose/model/test_package_extent.py
create mode 100644 ckanext/spatial/tests/nose/model/xml/FCSConservancyPolygons.xml
create mode 100644 ckanext/spatial/tests/nose/model/xml/gemini_dataset.xml
create mode 100644 ckanext/spatial/tests/nose/scripts/geometry_columns.sql
create mode 100644 ckanext/spatial/tests/nose/scripts/postgis.sql
create mode 100644 ckanext/spatial/tests/nose/scripts/spatial_ref_sys.sql
create mode 100644 ckanext/spatial/tests/nose/test_api.py
create mode 100644 ckanext/spatial/tests/nose/test_csw_client.py
create mode 100644 ckanext/spatial/tests/nose/test_harvest.py
create mode 100644 ckanext/spatial/tests/nose/test_plugin/__init__.py
create mode 100644 ckanext/spatial/tests/nose/test_plugin/plugin.py
create mode 100644 ckanext/spatial/tests/nose/test_plugin/templates/package/read_base.html
create mode 100644 ckanext/spatial/tests/nose/test_plugin/templates/package/search.html
create mode 100644 ckanext/spatial/tests/nose/test_validation.py
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1-waf/index.html
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1-waf/wales1.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1-waf/wales2.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/FCSConservancyPolygons.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/dataset1.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/error_bad_xml.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/error_validation.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/service1.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/service1_newer.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/source1/same_dataset.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/source2/same_dataset.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element_unix.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/02_Dataset_Invalid_19139_Missing_Data_Format.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/03_Dataset_Invalid_GEMINI_Missing_Keyword.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/04_Dataset_Valid.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/05_Series_Invalid_XSD_No_Such_Element.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/06_Series_Invalid_19139_Missing_Data_Format.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/07_Series_Invalid_GEMINI_Missing_Keyword.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/08_Series_Valid.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/09_Service_Invalid_No_Such_Element.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/10_Service_Invalid_19139_Level_Description.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/11_Service_Invalid_GEMINI_Service_Type.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/12_Service_Valid.xml
create mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/13_Dataset_Invalid_Element_srv.xml
create mode 100644 ckanext/spatial/tests/nose/xml/iso19139/dataset-invalid.xml
create mode 100644 ckanext/spatial/tests/nose/xml/iso19139/dataset.xml
create mode 100644 ckanext/spatial/tests/nose/xml/wms/capabilities.xml
create mode 100644 ckanext/spatial/tests/nose/xml_file_server.py
create mode 100644 conftest.py
create mode 100644 setup.cfg
diff --git a/bin/travis-build.bash b/bin/travis-build.bash
index 9656c28..945b606 100644
--- a/bin/travis-build.bash
+++ b/bin/travis-build.bash
@@ -7,25 +7,36 @@ echo "Installing the packages that CKAN requires..."
sudo apt-get update -qq
sudo apt-get install solr-jetty
+
+if python -c 'import sys;exit(sys.version_info < (3,))'
+then
+ PYTHONVERSION=3
+else
+ PYTHONVERSION=2
+fi
+
echo "Installing CKAN and its Python dependencies..."
git clone https://github.com/ckan/ckan
cd ckan
-if [ $CKANVERSION != 'master' ]
+if [ $CKANVERSION == 'master' ]
then
- git checkout $CKANVERSION
+ echo "CKAN version: master"
+else
+ CKAN_TAG=$(git tag | grep ^ckan-$CKANVERSION | sort --version-sort | tail -n 1)
+ git checkout $CKAN_TAG
+ echo "CKAN version: ${CKAN_TAG#ckan-}"
fi
-# Unpin CKAN's psycopg2 dependency get an important bugfix
-# https://stackoverflow.com/questions/47044854/error-installing-psycopg2-2-6-2
-sed -i '/psycopg2/c\psycopg2' requirements.txt
-
python setup.py develop
-if [ -f requirements-py2.txt ]
+
+if [ -f requirements-py2.txt ] && [ $PYTHONVERSION = 2 ]
then
- pip install -r requirements-py2.txt
+ grep -v psycopg2 < requirements-py2.txt > reqs.txt
else
- pip install -r requirements.txt
+ grep -v psycopg2 < requirements.txt > reqs.txt
fi
+pip install psycopg2==2.7.7 # workaround travis 10 psycopg2 incompatibility
+pip install -r reqs.txt
pip install -r dev-requirements.txt
cd -
@@ -52,7 +63,12 @@ sudo apt-get install python-dev libxml2-dev libxslt1-dev libgeos-c1
echo "Initialising the database..."
cd ckan
-paster db init -c test-core.ini
+if [ $CKANVERSION \< '2.9' ]
+then
+ paster db init -c test-core.ini
+else
+ ckan -c test-core.ini db init
+fi
cd -
echo "Installing ckanext-harvest and its requirements..."
diff --git a/bin/travis-run.sh b/bin/travis-run.sh
index cd13759..f07c4e0 100644
--- a/bin/travis-run.sh
+++ b/bin/travis-run.sh
@@ -1,3 +1,8 @@
#!/bin/sh -e
-nosetests --ckan --nologcapture --with-pylons=subdir/test.ini ckanext/spatial
+if [ $CKANVERSION == 'master' ]
+then
+ pytest --ckan-ini=subdir/test.ini ckanext/spatial/tests
+else
+ nosetests --ckan --nologcapture --with-pylons=subdir/test.ini ckanext/spatial/tests/nose
+fi
diff --git a/ckanext/spatial/tests/__init__.py b/ckanext/spatial/tests/__init__.py
index 2e2033b..6d83202 100644
--- a/ckanext/spatial/tests/__init__.py
+++ b/ckanext/spatial/tests/__init__.py
@@ -1,7 +1,9 @@
# this is a namespace package
try:
import pkg_resources
+
pkg_resources.declare_namespace(__name__)
except ImportError:
import pkgutil
+
__path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/ckanext/spatial/tests/base.py b/ckanext/spatial/tests/base.py
index d204548..79983f4 100644
--- a/ckanext/spatial/tests/base.py
+++ b/ckanext/spatial/tests/base.py
@@ -1,76 +1,28 @@
-import os
-import re
+# -*- coding: utf-8 -*-
-from sqlalchemy import Table
-from nose.plugins.skip import SkipTest
-
-from ckan.model import Session, repo, meta, engine_is_sqlite
-from ckanext.spatial.geoalchemy_common import postgis_version
-from ckanext.spatial.model.package_extent import setup as spatial_db_setup
-from ckanext.harvest.model import setup as harvest_model_setup
+import pytest
geojson_examples = {
- 'point':'{"type":"Point","coordinates":[100.0,0.0]}',
- 'point_2':'{"type":"Point","coordinates":[20,10]}',
- 'line':'{"type":"LineString","coordinates":[[100.0,0.0],[101.0,1.0]]}',
- 'polygon':'{"type":"Polygon","coordinates":[[[100.0,0.0],[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]]]}',
- 'polygon_holes':'{"type":"Polygon","coordinates":[[[100.0,0.0],[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]],[[100.2,0.2],[100.8,0.2],[100.8,0.8],[100.2,0.8],[100.2,0.2]]]}',
- 'multipoint':'{"type":"MultiPoint","coordinates":[[100.0,0.0],[101.0,1.0]]}',
- 'multiline':'{"type":"MultiLineString","coordinates":[[[100.0,0.0],[101.0,1.0]],[[102.0,2.0],[103.0,3.0]]]}',
- 'multipolygon':'{"type":"MultiPolygon","coordinates":[[[[102.0,2.0],[103.0,2.0],[103.0,3.0],[102.0,3.0],[102.0,2.0]]],[[[100.0,0.0],[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]],[[100.2,0.2],[100.8,0.2],[100.8,0.8],[100.2,0.8],[100.2,0.2]]]]}'}
-
-
-def _execute_script(script_path):
-
- conn = Session.connection()
- script = open(script_path, 'r').read()
- for cmd in script.split(';'):
- cmd = re.sub(r'--(.*)|[\n\t]', '', cmd)
- if len(cmd):
- conn.execute(cmd)
-
- Session.commit()
-
-
-def create_postgis_tables():
- scripts_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
- 'scripts')
- if postgis_version()[:1] == '1':
- _execute_script(os.path.join(scripts_path, 'spatial_ref_sys.sql'))
- _execute_script(os.path.join(scripts_path, 'geometry_columns.sql'))
- else:
- _execute_script(os.path.join(scripts_path, 'spatial_ref_sys.sql'))
+ "point": '{"type":"Point","coordinates":[100.0,0.0]}',
+ "point_2": '{"type":"Point","coordinates":[20,10]}',
+ "line": '{"type":"LineString","coordinates":[[100.0,0.0],[101.0,1.0]]}',
+ "polygon": '{"type":"Polygon","coordinates":[[[100.0,0.0],[101.0,0.0],'
+ '[101.0,1.0],[100.0,1.0],[100.0,0.0]]]}',
+ "polygon_holes": '{"type":"Polygon","coordinates":[[[100.0,0.0],'
+ '[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]],[[100.2,0.2],'
+ '[100.8,0.2],[100.8,0.8],[100.2,0.8],[100.2,0.2]]]}',
+ "multipoint": '{"type":"MultiPoint","coordinates":'
+ '[[100.0,0.0],[101.0,1.0]]}',
+ "multiline": '{"type":"MultiLineString","coordinates":[[[100.0,0.0],'
+ '[101.0,1.0]],[[102.0,2.0],[103.0,3.0]]]}',
+ "multipolygon": '{"type":"MultiPolygon","coordinates":[[[[102.0,2.0],'
+ '[103.0,2.0],[103.0,3.0],[102.0,3.0],[102.0,2.0]]],[[[100.0,0.0],'
+ '[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]],[[100.2,0.2],'
+ '[100.8,0.2],[100.8,0.8],[100.2,0.8],[100.2,0.2]]]]}',
+}
+@pytest.mark.usefixtures("clean_db")
class SpatialTestBase(object):
-
db_srid = 4326
-
geojson_examples = geojson_examples
-
- @classmethod
- def setup_class(cls):
- if engine_is_sqlite():
- raise SkipTest("PostGIS is required for this test")
-
- # This will create the PostGIS tables (geometry_columns and
- # spatial_ref_sys) which were deleted when rebuilding the database
- table = Table('spatial_ref_sys', meta.metadata)
- if not table.exists():
- create_postgis_tables()
-
- # When running the tests with the --reset-db option for some
- # reason the metadata holds a reference to the `package_extent`
- # table after being deleted, causing an InvalidRequestError
- # exception when trying to recreate it further on
- if 'package_extent' in meta.metadata.tables:
- meta.metadata.remove(meta.metadata.tables['package_extent'])
-
- spatial_db_setup()
-
- # Setup the harvest tables
- harvest_model_setup()
-
- @classmethod
- def teardown_class(cls):
- repo.rebuild_db()
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
new file mode 100644
index 0000000..b93f69a
--- /dev/null
+++ b/ckanext/spatial/tests/conftest.py
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+
+import pytest
+import os
+import re
+from sqlalchemy import Table
+from nose.plugins.skip import SkipTest
+
+from ckan.model import Session, repo, meta, engine_is_sqlite
+from ckanext.spatial.geoalchemy_common import postgis_version
+from ckanext.spatial.model.package_extent import setup as spatial_db_setup
+from ckanext.harvest.model import setup as harvest_model_setup
+
+
+def _execute_script(script_path):
+
+ conn = Session.connection()
+ script = open(script_path, "r").read()
+ for cmd in script.split(";"):
+ cmd = re.sub(r"--(.*)|[\n\t]", "", cmd)
+ if len(cmd):
+ conn.execute(cmd)
+
+ Session.commit()
+
+
+def create_postgis_tables():
+ scripts_path = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), "scripts"
+ )
+ if postgis_version()[:1] == "1":
+ _execute_script(os.path.join(scripts_path, "spatial_ref_sys.sql"))
+ _execute_script(os.path.join(scripts_path, "geometry_columns.sql"))
+ else:
+ _execute_script(os.path.join(scripts_path, "spatial_ref_sys.sql"))
+
+
+@pytest.fixture
+def clean_db(reset_db):
+ reset_db()
+
+ # This will create the PostGIS tables (geometry_columns and
+ # spatial_ref_sys) which were deleted when rebuilding the database
+ table = Table("spatial_ref_sys", meta.metadata)
+ if not table.exists():
+ create_postgis_tables()
+
+ # When running the tests with the --reset-db option for some
+ # reason the metadata holds a reference to the `package_extent`
+ # table after being deleted, causing an InvalidRequestError
+ # exception when trying to recreate it further on
+ if "package_extent" in meta.metadata.tables:
+ meta.metadata.remove(meta.metadata.tables["package_extent"])
+
+ spatial_db_setup()
+
+ # Setup the harvest tables
+ harvest_model_setup()
diff --git a/ckanext/spatial/tests/functional/__init__.py b/ckanext/spatial/tests/functional/__init__.py
index 2e2033b..6d83202 100644
--- a/ckanext/spatial/tests/functional/__init__.py
+++ b/ckanext/spatial/tests/functional/__init__.py
@@ -1,7 +1,9 @@
# this is a namespace package
try:
import pkg_resources
+
pkg_resources.declare_namespace(__name__)
except ImportError:
import pkgutil
+
__path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/ckanext/spatial/tests/functional/test_package.py b/ckanext/spatial/tests/functional/test_package.py
index 1e36d20..41c9b6b 100644
--- a/ckanext/spatial/tests/functional/test_package.py
+++ b/ckanext/spatial/tests/functional/test_package.py
@@ -1,147 +1,145 @@
import json
-from nose.tools import assert_equals
+
+import pytest
from ckan.model import Session
from ckan.lib.helpers import url_for
-try:
- import ckan.new_tests.helpers as helpers
- import ckan.new_tests.factories as factories
-except ImportError:
- import ckan.tests.helpers as helpers
- import ckan.tests.factories as factories
+import ckan.tests.helpers as helpers
+import ckan.tests.factories as factories
from ckanext.spatial.model import PackageExtent
-from ckanext.spatial.geoalchemy_common import legacy_geoalchemy
from ckanext.spatial.tests.base import SpatialTestBase
-class TestSpatialExtra(SpatialTestBase, helpers.FunctionalTestBase):
-
- def test_spatial_extra(self):
- app = self._get_test_app()
+@pytest.mark.usefixtures("clean_db")
+class TestSpatialExtra(SpatialTestBase):
+ def test_spatial_extra_base(self, app):
user = factories.User()
- env = {'REMOTE_USER': user['name'].encode('ascii')}
+ env = {"REMOTE_USER": user["name"].encode("ascii")}
dataset = factories.Dataset(user=user)
- offset = url_for(controller='package', action='edit', id=dataset['id'])
+ offset = url_for("dataset.edit", id=dataset["id"])
res = app.get(offset, extra_environ=env)
- form = res.forms[1]
- form['extras__0__key'] = u'spatial'
- form['extras__0__value'] = self.geojson_examples['point']
+ data = {
+ "name": dataset['name'],
+ "extras__0__key": u"spatial",
+ "extras__0__value": self.geojson_examples["point"]
+ }
- res = helpers.submit_and_follow(app, form, env, 'save')
+ res = app.post(offset, environ_overrides=env, data=data)
- assert 'Error' not in res, res
+ assert "Error" not in res, res
- package_extent = Session.query(PackageExtent) \
- .filter(PackageExtent.package_id == dataset['id']).first()
+ package_extent = (
+ Session.query(PackageExtent)
+ .filter(PackageExtent.package_id == dataset["id"])
+ .first()
+ )
- geojson = json.loads(self.geojson_examples['point'])
+ geojson = json.loads(self.geojson_examples["point"])
- assert_equals(package_extent.package_id, dataset['id'])
- if legacy_geoalchemy:
- assert_equals(Session.scalar(package_extent.the_geom.x),
- geojson['coordinates'][0])
- assert_equals(Session.scalar(package_extent.the_geom.y),
- geojson['coordinates'][1])
- assert_equals(Session.scalar(package_extent.the_geom.srid),
- self.db_srid)
- else:
- from sqlalchemy import func
- assert_equals(
- Session.query(func.ST_X(package_extent.the_geom)).first()[0],
- geojson['coordinates'][0])
- assert_equals(
- Session.query(func.ST_Y(package_extent.the_geom)).first()[0],
- geojson['coordinates'][1])
- assert_equals(package_extent.the_geom.srid, self.db_srid)
+ assert package_extent.package_id == dataset["id"]
+ from sqlalchemy import func
- def test_spatial_extra_edit(self):
- app = self._get_test_app()
+ assert (
+ Session.query(func.ST_X(package_extent.the_geom)).first()[0]
+ == geojson["coordinates"][0]
+ )
+ assert (
+ Session.query(func.ST_Y(package_extent.the_geom)).first()[0]
+ == geojson["coordinates"][1]
+ )
+ assert package_extent.the_geom.srid == self.db_srid
+
+ def test_spatial_extra_edit(self, app):
user = factories.User()
- env = {'REMOTE_USER': user['name'].encode('ascii')}
+ env = {"REMOTE_USER": user["name"].encode("ascii")}
dataset = factories.Dataset(user=user)
- offset = url_for(controller='package', action='edit', id=dataset['id'])
+ offset = url_for("dataset.edit", id=dataset["id"])
res = app.get(offset, extra_environ=env)
- form = res.forms[1]
- form['extras__0__key'] = u'spatial'
- form['extras__0__value'] = self.geojson_examples['point']
+ data = {
+ "name": dataset['name'],
+ "extras__0__key": u"spatial",
+ "extras__0__value": self.geojson_examples["point"]
+ }
- res = helpers.submit_and_follow(app, form, env, 'save')
+ res = app.post(offset, environ_overrides=env, data=data)
- assert 'Error' not in res, res
+ assert "Error" not in res, res
res = app.get(offset, extra_environ=env)
- form = res.forms[1]
- form['extras__0__key'] = u'spatial'
- form['extras__0__value'] = self.geojson_examples['polygon']
+ data = {
+ "name": dataset['name'],
+ "extras__0__key": u"spatial",
+ "extras__0__value": self.geojson_examples["polygon"]
+ }
- res = helpers.submit_and_follow(app, form, env, 'save')
+ res = app.post(offset, environ_overrides=env, data=data)
- assert 'Error' not in res, res
+ assert "Error" not in res, res
- package_extent = Session.query(PackageExtent) \
- .filter(PackageExtent.package_id == dataset['id']).first()
+ package_extent = (
+ Session.query(PackageExtent)
+ .filter(PackageExtent.package_id == dataset["id"])
+ .first()
+ )
- assert_equals(package_extent.package_id, dataset['id'])
- if legacy_geoalchemy:
- assert_equals(
- Session.scalar(package_extent.the_geom.geometry_type),
- 'ST_Polygon')
- assert_equals(
- Session.scalar(package_extent.the_geom.srid),
- self.db_srid)
- else:
- from sqlalchemy import func
- assert_equals(
- Session.query(
- func.ST_GeometryType(package_extent.the_geom)).first()[0],
- 'ST_Polygon')
- assert_equals(package_extent.the_geom.srid, self.db_srid)
+ assert package_extent.package_id == dataset["id"]
+ from sqlalchemy import func
- def test_spatial_extra_bad_json(self):
- app = self._get_test_app()
+ assert (
+ Session.query(
+ func.ST_GeometryType(package_extent.the_geom)
+ ).first()[0]
+ == "ST_Polygon"
+ )
+ assert package_extent.the_geom.srid == self.db_srid
+
+ def test_spatial_extra_bad_json(self, app):
user = factories.User()
- env = {'REMOTE_USER': user['name'].encode('ascii')}
+ env = {"REMOTE_USER": user["name"].encode("ascii")}
dataset = factories.Dataset(user=user)
- offset = url_for(controller='package', action='edit', id=dataset['id'])
+ offset = url_for("dataset.edit", id=dataset["id"])
res = app.get(offset, extra_environ=env)
- form = res.forms[1]
- form['extras__0__key'] = u'spatial'
- form['extras__0__value'] = u'{"Type":Bad Json]'
+ data = {
+ "name": dataset['name'],
+ "extras__0__key": u"spatial",
+ "extras__0__value": u'{"Type":Bad Json]'
+ }
- res = helpers.webtest_submit(form, extra_environ=env, name='save')
+ res = app.post(offset, environ_overrides=env, data=data)
- assert 'Error' in res, res
- assert 'Spatial' in res
- assert 'Error decoding JSON object' in res
+ assert "Error" in res, res
+ assert "Spatial" in res
+ assert "Error decoding JSON object" in res
- def test_spatial_extra_bad_geojson(self):
- app = self._get_test_app()
+ def test_spatial_extra_bad_geojson(self, app):
user = factories.User()
- env = {'REMOTE_USER': user['name'].encode('ascii')}
+ env = {"REMOTE_USER": user["name"].encode("ascii")}
dataset = factories.Dataset(user=user)
- offset = url_for(controller='package', action='edit', id=dataset['id'])
+ offset = url_for("dataset.edit", id=dataset["id"])
res = app.get(offset, extra_environ=env)
- form = res.forms[1]
- form['extras__0__key'] = u'spatial'
- form['extras__0__value'] = u'{"Type":"Bad_GeoJSON","a":2}'
+ data = {
+ "name": dataset['name'],
+ "extras__0__key": u"spatial",
+ "extras__0__value": u'{"Type":"Bad_GeoJSON","a":2}'
+ }
- res = helpers.webtest_submit(form, extra_environ=env, name='save')
+ res = app.post(offset, environ_overrides=env, data=data)
- assert 'Error' in res, res
- assert 'Spatial' in res
- assert 'Error creating geometry' in res
+ assert "Error" in res, res
+ assert "Spatial" in res
+ assert "Error creating geometry" in res
diff --git a/ckanext/spatial/tests/functional/test_widgets.py b/ckanext/spatial/tests/functional/test_widgets.py
index fbe75ba..a93a69f 100644
--- a/ckanext/spatial/tests/functional/test_widgets.py
+++ b/ckanext/spatial/tests/functional/test_widgets.py
@@ -11,28 +11,28 @@ except ImportError:
class TestSpatialWidgets(SpatialTestBase, helpers.FunctionalTestBase):
-
def test_dataset_map(self):
app = self._get_test_app()
user = factories.User()
dataset = factories.Dataset(
user=user,
- extras=[{'key': 'spatial',
- 'value': self.geojson_examples['point']}]
+ extras=[
+ {"key": "spatial", "value": self.geojson_examples["point"]}
+ ],
)
- offset = url_for(controller='package', action='read', id=dataset['id'])
+ offset = url_for(controller="package", action="read", id=dataset["id"])
res = app.get(offset)
assert 'data-module="dataset-map"' in res
- assert 'dataset_map.js' in res
+ assert "dataset_map.js" in res
def test_spatial_search_widget(self):
app = self._get_test_app()
- offset = url_for(controller='package', action='search')
+ offset = url_for(controller="package", action="search")
res = app.get(offset)
assert 'data-module="spatial-query"' in res
- assert 'spatial_query.js' in res
+ assert "spatial_query.js" in res
diff --git a/ckanext/spatial/tests/lib/test_spatial.py b/ckanext/spatial/tests/lib/test_spatial.py
index b6040fc..bbb0d0b 100644
--- a/ckanext/spatial/tests/lib/test_spatial.py
+++ b/ckanext/spatial/tests/lib/test_spatial.py
@@ -16,39 +16,39 @@ from ckan.lib.munge import munge_title_to_name
from ckanext.spatial.model import PackageExtent
from ckanext.spatial.lib import validate_bbox, bbox_query, bbox_query_ordered
-from ckanext.spatial.geoalchemy_common import WKTElement, compare_geometry_fields
+from ckanext.spatial.geoalchemy_common import (
+ WKTElement,
+ compare_geometry_fields,
+)
from ckanext.spatial.tests.base import SpatialTestBase
class TestCompareGeometries(SpatialTestBase):
-
def _get_extent_object(self, geometry):
if isinstance(geometry, six.string_types):
geometry = json.loads(geometry)
shape = asShape(geometry)
- return PackageExtent(package_id='xxx',
- the_geom=WKTElement(shape.wkt, 4326))
+ return PackageExtent(
+ package_id="xxx", the_geom=WKTElement(shape.wkt, 4326)
+ )
def test_same_points(self):
- extent1 = self._get_extent_object(self.geojson_examples['point'])
- extent2 = self._get_extent_object(self.geojson_examples['point'])
+ extent1 = self._get_extent_object(self.geojson_examples["point"])
+ extent2 = self._get_extent_object(self.geojson_examples["point"])
assert compare_geometry_fields(extent1.the_geom, extent2.the_geom)
def test_different_points(self):
- extent1 = self._get_extent_object(self.geojson_examples['point'])
- extent2 = self._get_extent_object(self.geojson_examples['point_2'])
+ extent1 = self._get_extent_object(self.geojson_examples["point"])
+ extent2 = self._get_extent_object(self.geojson_examples["point_2"])
assert not compare_geometry_fields(extent1.the_geom, extent2.the_geom)
class TestValidateBbox(object):
- bbox_dict = {'minx': -4.96,
- 'miny': 55.70,
- 'maxx': -3.78,
- 'maxy': 56.43}
+ bbox_dict = {"minx": -4.96, "miny": 55.70, "maxx": -3.78, "maxy": 56.43}
def test_string(self):
res = validate_bbox("-4.96,55.70,-3.78,56.43")
@@ -63,16 +63,20 @@ class TestValidateBbox(object):
assert_equal(res, None)
def test_bad_2(self):
- res = validate_bbox('random')
+ res = validate_bbox("random")
assert_equal(res, None)
def bbox_2_geojson(bbox_dict):
- return '{"type":"Polygon","coordinates":[[[%(minx)s, %(miny)s],[%(minx)s, %(maxy)s], [%(maxx)s, %(maxy)s], [%(maxx)s, %(miny)s], [%(minx)s, %(miny)s]]]}' % bbox_dict
+ return (
+ '{"type":"Polygon","coordinates":[[[%(minx)s, %(miny)s],[%(minx)s, %(maxy)s], [%(maxx)s, %(maxy)s], [%(maxx)s, %(miny)s], [%(minx)s, %(miny)s]]]}'
+ % bbox_dict
+ )
class SpatialQueryTestBase(SpatialTestBase):
- '''Base class for tests of spatial queries'''
+ """Base class for tests of spatial queries"""
+
miny = 0
maxy = 1
@@ -82,28 +86,36 @@ class SpatialQueryTestBase(SpatialTestBase):
for fixture_x in cls.fixtures_x:
bbox = cls.x_values_to_bbox(fixture_x)
bbox_geojson = bbox_2_geojson(bbox)
- cls.create_package(name=munge_title_to_name(six.text_type(fixture_x)),
- title=six.text_type(fixture_x),
- extras=[{'key': 'spatial',
- 'value': bbox_geojson}])
+ cls.create_package(
+ name=munge_title_to_name(six.text_type(fixture_x)),
+ title=six.text_type(fixture_x),
+ extras=[{"key": "spatial", "value": bbox_geojson}],
+ )
@classmethod
def create_package(cls, **package_dict):
- user = plugins.toolkit.get_action('get_site_user')({'model': model, 'ignore_auth': True}, {})
- context = {'model': model,
- 'session': model.Session,
- 'user': user['name'],
- 'extras_as_string': True,
- 'api_version': 2,
- 'ignore_auth': True,
- }
+ user = plugins.toolkit.get_action("get_site_user")(
+ {"model": model, "ignore_auth": True}, {}
+ )
+ context = {
+ "model": model,
+ "session": model.Session,
+ "user": user["name"],
+ "extras_as_string": True,
+ "api_version": 2,
+ "ignore_auth": True,
+ }
package_dict = package_create(context, package_dict)
- return context.get('id')
+ return context.get("id")
@classmethod
def x_values_to_bbox(cls, x_tuple):
- return {'minx': x_tuple[0], 'maxx': x_tuple[1],
- 'miny': cls.miny, 'maxy': cls.maxy}
+ return {
+ "minx": x_tuple[0],
+ "maxx": x_tuple[1],
+ "miny": cls.miny,
+ "maxy": cls.maxy,
+ }
class TestBboxQuery(SpatialQueryTestBase):
@@ -114,13 +126,12 @@ class TestBboxQuery(SpatialQueryTestBase):
bbox_dict = self.x_values_to_bbox((2, 5))
package_ids = [res.package_id for res in bbox_query(bbox_dict)]
package_titles = [model.Package.get(id_).title for id_ in package_ids]
- assert_equal(set(package_titles),
- set(('(0, 3)', '(0, 4)', '(4, 5)')))
+ assert_equal(set(package_titles), set(("(0, 3)", "(0, 4)", "(4, 5)")))
+
class TestBboxQueryOrdered(SpatialQueryTestBase):
# x values for the fixtures
- fixtures_x = [(0, 9), (1, 8), (2, 7), (3, 6), (4, 5),
- (8, 9)]
+ fixtures_x = [(0, 9), (1, 8), (2, 7), (3, 6), (4, 5), (8, 9)]
def test_query(self):
bbox_dict = self.x_values_to_bbox((2, 7))
@@ -128,27 +139,32 @@ class TestBboxQueryOrdered(SpatialQueryTestBase):
package_ids = [res.package_id for res in q]
package_titles = [model.Package.get(id_).title for id_ in package_ids]
# check the right items are returned
- assert_equal(set(package_titles),
- set(('(0, 9)', '(1, 8)', '(2, 7)', '(3, 6)', '(4, 5)')))
+ assert_equal(
+ set(package_titles),
+ set(("(0, 9)", "(1, 8)", "(2, 7)", "(3, 6)", "(4, 5)")),
+ )
# check the order is good
- assert_equal(package_titles,
- ['(2, 7)', '(1, 8)', '(3, 6)', '(0, 9)', '(4, 5)'])
+ assert_equal(
+ package_titles, ["(2, 7)", "(1, 8)", "(3, 6)", "(0, 9)", "(4, 5)"]
+ )
class TestBboxQueryPerformance(SpatialQueryTestBase):
# x values for the fixtures
- fixtures_x = [(random.uniform(0, 3), random.uniform(3,9)) \
- for x in range(10)] # increase the number to 1000 say
+ fixtures_x = [
+ (random.uniform(0, 3), random.uniform(3, 9)) for x in range(10)
+ ] # increase the number to 1000 say
+
def test_query(self):
bbox_dict = self.x_values_to_bbox((2, 7))
t0 = time.time()
q = bbox_query(bbox_dict)
t1 = time.time()
- print('bbox_query took: ', t1-t0)
+ print("bbox_query took: ", t1 - t0)
def test_query_ordered(self):
bbox_dict = self.x_values_to_bbox((2, 7))
t0 = time.time()
q = bbox_query_ordered(bbox_dict)
t1 = time.time()
- print('bbox_query_ordered took: ', t1-t0)
+ print("bbox_query_ordered took: ", t1 - t0)
diff --git a/ckanext/spatial/tests/model/__init__.py b/ckanext/spatial/tests/model/__init__.py
index 2e2033b..6d83202 100644
--- a/ckanext/spatial/tests/model/__init__.py
+++ b/ckanext/spatial/tests/model/__init__.py
@@ -1,7 +1,9 @@
# this is a namespace package
try:
import pkg_resources
+
pkg_resources.declare_namespace(__name__)
except ImportError:
import pkgutil
+
__path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/ckanext/spatial/tests/model/test_harvested_metadata.py b/ckanext/spatial/tests/model/test_harvested_metadata.py
index 7fb03f4..9a4fbcc 100644
--- a/ckanext/spatial/tests/model/test_harvested_metadata.py
+++ b/ckanext/spatial/tests/model/test_harvested_metadata.py
@@ -4,31 +4,34 @@ from nose.tools import assert_equal
from ckanext.spatial.model import ISODocument
+
def open_xml_fixture(xml_filename):
- xml_filepath = os.path.join(os.path.dirname(__file__),
- 'xml',
- xml_filename)
- with open(xml_filepath, 'rb') as f:
+ xml_filepath = os.path.join(os.path.dirname(__file__), "xml", xml_filename)
+ with open(xml_filepath, "rb") as f:
xml_string_raw = f.read()
try:
xml_string = xml_string_raw.encode("utf-8")
except UnicodeDecodeError as e:
- assert 0, 'ERROR: Unicode Error reading file \'%s\': %s' % \
- (metadata_filepath, e)
+ assert 0, "ERROR: Unicode Error reading file '%s': %s" % (
+ metadata_filepath,
+ e,
+ )
return xml_string
+
def test_simple():
- xml_string = open_xml_fixture('gemini_dataset.xml')
+ xml_string = open_xml_fixture("gemini_dataset.xml")
iso_document = ISODocument(xml_string)
iso_values = iso_document.read_values()
- assert_equal(iso_values['guid'], 'test-dataset-1')
- assert_equal(iso_values['metadata-date'], '2011-09-23T10:06:08')
+ assert_equal(iso_values["guid"], "test-dataset-1")
+ assert_equal(iso_values["metadata-date"], "2011-09-23T10:06:08")
+
def test_multiplicity_warning():
# This dataset lacks a value for Metadata Date and should
# produce a log.warning, but not raise an exception.
- xml_string = open_xml_fixture('FCSConservancyPolygons.xml')
+ xml_string = open_xml_fixture("FCSConservancyPolygons.xml")
iso_document = ISODocument(xml_string)
iso_values = iso_document.read_values()
- assert_equal(iso_values['guid'], 'B8A22DF4-B0DC-4F0B-A713-0CF5F8784A28')
+ assert_equal(iso_values["guid"], "B8A22DF4-B0DC-4F0B-A713-0CF5F8784A28")
diff --git a/ckanext/spatial/tests/model/test_package_extent.py b/ckanext/spatial/tests/model/test_package_extent.py
index 812d15d..c4385b8 100644
--- a/ckanext/spatial/tests/model/test_package_extent.py
+++ b/ckanext/spatial/tests/model/test_package_extent.py
@@ -3,6 +3,7 @@ from shapely.geometry import asShape
from ckan.model import Session
from ckan.lib.helpers import json
+
try:
import ckan.new_tests.factories as factories
except ImportError:
@@ -14,77 +15,93 @@ from ckanext.spatial.tests.base import SpatialTestBase
class TestPackageExtent(SpatialTestBase):
-
def test_create_extent(self):
package = factories.Dataset()
- geojson = json.loads(self.geojson_examples['point'])
+ geojson = json.loads(self.geojson_examples["point"])
shape = asShape(geojson)
- package_extent = PackageExtent(package_id=package['id'],
- the_geom=WKTElement(shape.wkt,
- self.db_srid))
+ package_extent = PackageExtent(
+ package_id=package["id"],
+ the_geom=WKTElement(shape.wkt, self.db_srid),
+ )
package_extent.save()
- assert_equals(package_extent.package_id, package['id'])
+ assert_equals(package_extent.package_id, package["id"])
if legacy_geoalchemy:
- assert_equals(Session.scalar(package_extent.the_geom.x),
- geojson['coordinates'][0])
- assert_equals(Session.scalar(package_extent.the_geom.y),
- geojson['coordinates'][1])
- assert_equals(Session.scalar(package_extent.the_geom.srid),
- self.db_srid)
+ assert_equals(
+ Session.scalar(package_extent.the_geom.x),
+ geojson["coordinates"][0],
+ )
+ assert_equals(
+ Session.scalar(package_extent.the_geom.y),
+ geojson["coordinates"][1],
+ )
+ assert_equals(
+ Session.scalar(package_extent.the_geom.srid), self.db_srid
+ )
else:
from sqlalchemy import func
+
assert_equals(
Session.query(func.ST_X(package_extent.the_geom)).first()[0],
- geojson['coordinates'][0])
+ geojson["coordinates"][0],
+ )
assert_equals(
Session.query(func.ST_Y(package_extent.the_geom)).first()[0],
- geojson['coordinates'][1])
+ geojson["coordinates"][1],
+ )
assert_equals(package_extent.the_geom.srid, self.db_srid)
def test_update_extent(self):
package = factories.Dataset()
- geojson = json.loads(self.geojson_examples['point'])
+ geojson = json.loads(self.geojson_examples["point"])
shape = asShape(geojson)
- package_extent = PackageExtent(package_id=package['id'],
- the_geom=WKTElement(shape.wkt,
- self.db_srid))
+ package_extent = PackageExtent(
+ package_id=package["id"],
+ the_geom=WKTElement(shape.wkt, self.db_srid),
+ )
package_extent.save()
if legacy_geoalchemy:
assert_equals(
Session.scalar(package_extent.the_geom.geometry_type),
- 'ST_Point')
+ "ST_Point",
+ )
else:
from sqlalchemy import func
+
assert_equals(
Session.query(
- func.ST_GeometryType(package_extent.the_geom)).first()[0],
- 'ST_Point')
+ func.ST_GeometryType(package_extent.the_geom)
+ ).first()[0],
+ "ST_Point",
+ )
# Update the geometry (Point -> Polygon)
- geojson = json.loads(self.geojson_examples['polygon'])
+ geojson = json.loads(self.geojson_examples["polygon"])
shape = asShape(geojson)
package_extent.the_geom = WKTElement(shape.wkt, self.db_srid)
package_extent.save()
- assert_equals(package_extent.package_id, package['id'])
+ assert_equals(package_extent.package_id, package["id"])
if legacy_geoalchemy:
assert_equals(
Session.scalar(package_extent.the_geom.geometry_type),
- 'ST_Polygon')
+ "ST_Polygon",
+ )
assert_equals(
- Session.scalar(package_extent.the_geom.srid),
- self.db_srid)
+ Session.scalar(package_extent.the_geom.srid), self.db_srid
+ )
else:
assert_equals(
Session.query(
- func.ST_GeometryType(package_extent.the_geom)).first()[0],
- 'ST_Polygon')
+ func.ST_GeometryType(package_extent.the_geom)
+ ).first()[0],
+ "ST_Polygon",
+ )
assert_equals(package_extent.the_geom.srid, self.db_srid)
diff --git a/ckanext/spatial/tests/nose/__init__.py b/ckanext/spatial/tests/nose/__init__.py
new file mode 100644
index 0000000..2e2033b
--- /dev/null
+++ b/ckanext/spatial/tests/nose/__init__.py
@@ -0,0 +1,7 @@
+# this is a namespace package
+try:
+ import pkg_resources
+ pkg_resources.declare_namespace(__name__)
+except ImportError:
+ import pkgutil
+ __path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/ckanext/spatial/tests/nose/base.py b/ckanext/spatial/tests/nose/base.py
new file mode 100644
index 0000000..d204548
--- /dev/null
+++ b/ckanext/spatial/tests/nose/base.py
@@ -0,0 +1,76 @@
+import os
+import re
+
+from sqlalchemy import Table
+from nose.plugins.skip import SkipTest
+
+from ckan.model import Session, repo, meta, engine_is_sqlite
+from ckanext.spatial.geoalchemy_common import postgis_version
+from ckanext.spatial.model.package_extent import setup as spatial_db_setup
+from ckanext.harvest.model import setup as harvest_model_setup
+
+geojson_examples = {
+ 'point':'{"type":"Point","coordinates":[100.0,0.0]}',
+ 'point_2':'{"type":"Point","coordinates":[20,10]}',
+ 'line':'{"type":"LineString","coordinates":[[100.0,0.0],[101.0,1.0]]}',
+ 'polygon':'{"type":"Polygon","coordinates":[[[100.0,0.0],[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]]]}',
+ 'polygon_holes':'{"type":"Polygon","coordinates":[[[100.0,0.0],[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]],[[100.2,0.2],[100.8,0.2],[100.8,0.8],[100.2,0.8],[100.2,0.2]]]}',
+ 'multipoint':'{"type":"MultiPoint","coordinates":[[100.0,0.0],[101.0,1.0]]}',
+ 'multiline':'{"type":"MultiLineString","coordinates":[[[100.0,0.0],[101.0,1.0]],[[102.0,2.0],[103.0,3.0]]]}',
+ 'multipolygon':'{"type":"MultiPolygon","coordinates":[[[[102.0,2.0],[103.0,2.0],[103.0,3.0],[102.0,3.0],[102.0,2.0]]],[[[100.0,0.0],[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]],[[100.2,0.2],[100.8,0.2],[100.8,0.8],[100.2,0.8],[100.2,0.2]]]]}'}
+
+
+def _execute_script(script_path):
+
+ conn = Session.connection()
+ script = open(script_path, 'r').read()
+ for cmd in script.split(';'):
+ cmd = re.sub(r'--(.*)|[\n\t]', '', cmd)
+ if len(cmd):
+ conn.execute(cmd)
+
+ Session.commit()
+
+
+def create_postgis_tables():
+ scripts_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ 'scripts')
+ if postgis_version()[:1] == '1':
+ _execute_script(os.path.join(scripts_path, 'spatial_ref_sys.sql'))
+ _execute_script(os.path.join(scripts_path, 'geometry_columns.sql'))
+ else:
+ _execute_script(os.path.join(scripts_path, 'spatial_ref_sys.sql'))
+
+
+class SpatialTestBase(object):
+
+ db_srid = 4326
+
+ geojson_examples = geojson_examples
+
+ @classmethod
+ def setup_class(cls):
+ if engine_is_sqlite():
+ raise SkipTest("PostGIS is required for this test")
+
+ # This will create the PostGIS tables (geometry_columns and
+ # spatial_ref_sys) which were deleted when rebuilding the database
+ table = Table('spatial_ref_sys', meta.metadata)
+ if not table.exists():
+ create_postgis_tables()
+
+ # When running the tests with the --reset-db option for some
+ # reason the metadata holds a reference to the `package_extent`
+ # table after being deleted, causing an InvalidRequestError
+ # exception when trying to recreate it further on
+ if 'package_extent' in meta.metadata.tables:
+ meta.metadata.remove(meta.metadata.tables['package_extent'])
+
+ spatial_db_setup()
+
+ # Setup the harvest tables
+ harvest_model_setup()
+
+ @classmethod
+ def teardown_class(cls):
+ repo.rebuild_db()
diff --git a/ckanext/spatial/tests/nose/data/dataset.json b/ckanext/spatial/tests/nose/data/dataset.json
new file mode 100644
index 0000000..ae0e628
--- /dev/null
+++ b/ckanext/spatial/tests/nose/data/dataset.json
@@ -0,0 +1 @@
+{"keyword-inspire-theme": ["Global"], "resource-type": ["dataset"], "maintenance-note": "", "spatial-data-service-type": "", "spatial-reference-system": "4326", "keywords": [{"type": "place", "thesaurus-identifier": "", "keyword": ["Global"], "thesaurus-title": ""}], "guid": "9d87519e-f91d-11e6-83d9-9c4e3672cd50", "metadata-language": "", "metadata-point-of-contact": [{"individual-name": "", "contact-info": {"online-resource": "", "email": "ad@m.in"}, "organisation-name": "", "role": "originator", "position-name": ""}], "metadata-standard-version": "ISO 19115:2003", "usage": [], "spatial-resolution-units": "", "responsible-organisation": [{"individual-name": "", "contact-info": {"online-resource": "", "email": "ad@m.in"}, "organisation-name": "", "role": "originator", "position-name": ""}], "temporal-extent-begin": [], "contact-email": "ad@m.in", "metadata-date": "2017-10-10T16:07:12Z", "dataset-reference-date": [{"type": "publication", "value": "2017-02-22T17:33:00Z"}], "conformity-pass": "", "unique-resource-identifier": "", "bbox": [{"west": "-180.0000000000", "east": "180.0000000000", "north": "90.0000000000", "south": "-90.0000000000"}], "keyword-controlled-other": [], "equivalent-scale": [], "lineage": "", "temporal-extent-end": [], "coupled-resource": [], "metadata-standard-name": "ISO 19115:2003 - Geographic information - Metadata", "additional-information-source": "No information provided", "extent-free-text": [], "browse-graphic": [{"type": "image/png", "description": "Thumbnail for 'test'", "file": "http://localhost:8000/uploaded/thumbs/layer-9d87519e-f91d-11e6-83d9-9c4e3672cd50-thumb.png"}], "abstract": "No abstract provided", "presentation-form": ["mapDigital"], "aggregation-info": [], "access-constraints": [], "resource-locator-identification": [], "distributor": [], "dataset-language": [], "conformity-specification-title": "", "cited-responsible-party": [], "conformity-specification": "", "purpose": "", "date-created": "", "progress": ["completed"], "extent-controlled": [], "use-constraints": [], "alternate-title": [], "date-released": "2017-02-22T17:33:00Z", "date-updated": "", "data-format": [], "tags": ["Global"], "frequency-of-update": "", "limitations-on-public-access": ["Not Specified: The original author did not specify a license."], "publisher": "", "resource-locator": [{"url": "http://localhost:8000/layers/geonode:test", "function": "", "protocol": "WWW:LINK-1.0-http--link", "name": "", "description": "Online link to the 'test' description on GeoNode"}, {"url": "http://localhost:8080/geoserver/wms?layers=geonode%3Atest&width=1100&bbox=-180.0%2C-90.0%2C180.0%2C90.0&service=WMS&format=image%2Fjpeg&srs=EPSG%3A4326&request=GetMap&height=550", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.jpg", "description": "test (JPEG Format)"}, {"url": "http://localhost:8080/geoserver/wms?layers=geonode%3Atest&width=1100&bbox=-180.0%2C-90.0%2C180.0%2C90.0&service=WMS&format=application%2Fpdf&srs=EPSG%3A4326&request=GetMap&height=550", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.pdf", "description": "test (PDF Format)"}, {"url": "http://localhost:8080/geoserver/wms?layers=geonode%3Atest&width=1100&bbox=-180.0%2C-90.0%2C180.0%2C90.0&service=WMS&format=image%2Fpng&srs=EPSG%3A4326&request=GetMap&height=550", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.png", "description": "test (PNG Format)"}, {"url": "http://localhost:8080/geoserver/wfs?format_options=charset%3AUTF-8&typename=geonode%3Atest&outputFormat=SHAPE-ZIP&version=1.0.0&service=WFS&request=GetFeature", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.zip", "description": "test (Zipped Shapefile Format)"}, {"url": "http://localhost:8080/geoserver/wfs?typename=geonode%3Atest&outputFormat=gml2&version=1.0.0&request=GetFeature&service=WFS", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.gml", "description": "test (GML 2.0 Format)"}, {"url": "http://localhost:8080/geoserver/wfs?typename=geonode%3Atest&outputFormat=text%2Fxml%3B+subtype%3Dgml%2F3.1.1&version=1.0.0&request=GetFeature&service=WFS", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.gml", "description": "test (GML 3.1.1 Format)"}, {"url": "http://localhost:8080/geoserver/wfs?typename=geonode%3Atest&outputFormat=csv&version=1.0.0&request=GetFeature&service=WFS", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.csv", "description": "test (CSV Format)"}, {"url": "http://localhost:8080/geoserver/wfs?typename=geonode%3Atest&outputFormat=excel&version=1.0.0&request=GetFeature&service=WFS", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.excel", "description": "test (Excel Format)"}, {"url": "http://localhost:8080/geoserver/wfs?srsName=EPSG%3A4326&typename=geonode%3Atest&outputFormat=json&version=1.0.0&service=WFS&request=GetFeature", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.json", "description": "test (GeoJSON Format)"}, {"url": "http://localhost:8080/geoserver/wms/kml?layers=geonode%3Atest&mode=download", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.kml", "description": "test (KML Format)"}, {"url": "http://localhost:8080/geoserver/wms/kml?layers=geonode%3Atest&mode=refresh", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.kml", "description": "test (View in Google Earth Format)"}, {"url": "http://localhost:8080/geoserver/wms/reflect?layers=geonode:test&format=image/png8&height=150&width=200&bbox=-180.0,-90.0,180.0,90.0&TIME=-99999999999-01-01T00:00:00.0Z/99999999999-01-01T00:00:00.0Z", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.png", "description": "test (Remote Thumbnail Format)"}, {"url": "http://localhost:8000/uploaded/thumbs/layer-9d87519e-f91d-11e6-83d9-9c4e3672cd50-thumb.png", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.png", "description": "test (Thumbnail Format)"}, {"url": "http://localhost:8080/geoserver/wms?request=GetLegendGraphic&format=image/png&WIDTH=20&HEIGHT=20&LAYER=geonode:test&legend_options=fontAntiAliasing:true;fontSize:12;forceLabels:on", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.png", "description": "test (Legend Format)"}, {"url": "http://localhost:8080/geoserver/gwc/service/gmaps?layers=geonode:test&zoom={z}&x={x}&y={y}&format=image/png8", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.tiles", "description": "test (Tiles Format)"}, {"url": "http://localhost:8080/wms", "function": "", "protocol": "OGC:WMS", "name": "geonode:test", "description": " Service - Provides Layer: test"}, {"url": "http://localhost:8080/wfs", "function": "", "protocol": "OGC:WFS", "name": "geonode:test", "description": " Service - Provides Layer: test"}], "url": "", "title": "test", "contact": "", "topic-category": [], "vertical-extent": [], "conformity-explanation": "", "spatial-resolution": ""}
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/functional/__init__.py b/ckanext/spatial/tests/nose/functional/__init__.py
new file mode 100644
index 0000000..2e2033b
--- /dev/null
+++ b/ckanext/spatial/tests/nose/functional/__init__.py
@@ -0,0 +1,7 @@
+# this is a namespace package
+try:
+ import pkg_resources
+ pkg_resources.declare_namespace(__name__)
+except ImportError:
+ import pkgutil
+ __path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/ckanext/spatial/tests/nose/functional/test_package.py b/ckanext/spatial/tests/nose/functional/test_package.py
new file mode 100644
index 0000000..1e36d20
--- /dev/null
+++ b/ckanext/spatial/tests/nose/functional/test_package.py
@@ -0,0 +1,147 @@
+import json
+from nose.tools import assert_equals
+
+from ckan.model import Session
+from ckan.lib.helpers import url_for
+
+try:
+ import ckan.new_tests.helpers as helpers
+ import ckan.new_tests.factories as factories
+except ImportError:
+ import ckan.tests.helpers as helpers
+ import ckan.tests.factories as factories
+
+from ckanext.spatial.model import PackageExtent
+from ckanext.spatial.geoalchemy_common import legacy_geoalchemy
+from ckanext.spatial.tests.base import SpatialTestBase
+
+
+class TestSpatialExtra(SpatialTestBase, helpers.FunctionalTestBase):
+
+ def test_spatial_extra(self):
+ app = self._get_test_app()
+
+ user = factories.User()
+ env = {'REMOTE_USER': user['name'].encode('ascii')}
+ dataset = factories.Dataset(user=user)
+
+ offset = url_for(controller='package', action='edit', id=dataset['id'])
+ res = app.get(offset, extra_environ=env)
+
+ form = res.forms[1]
+ form['extras__0__key'] = u'spatial'
+ form['extras__0__value'] = self.geojson_examples['point']
+
+ res = helpers.submit_and_follow(app, form, env, 'save')
+
+ assert 'Error' not in res, res
+
+ package_extent = Session.query(PackageExtent) \
+ .filter(PackageExtent.package_id == dataset['id']).first()
+
+ geojson = json.loads(self.geojson_examples['point'])
+
+ assert_equals(package_extent.package_id, dataset['id'])
+ if legacy_geoalchemy:
+ assert_equals(Session.scalar(package_extent.the_geom.x),
+ geojson['coordinates'][0])
+ assert_equals(Session.scalar(package_extent.the_geom.y),
+ geojson['coordinates'][1])
+ assert_equals(Session.scalar(package_extent.the_geom.srid),
+ self.db_srid)
+ else:
+ from sqlalchemy import func
+ assert_equals(
+ Session.query(func.ST_X(package_extent.the_geom)).first()[0],
+ geojson['coordinates'][0])
+ assert_equals(
+ Session.query(func.ST_Y(package_extent.the_geom)).first()[0],
+ geojson['coordinates'][1])
+ assert_equals(package_extent.the_geom.srid, self.db_srid)
+
+ def test_spatial_extra_edit(self):
+ app = self._get_test_app()
+
+ user = factories.User()
+ env = {'REMOTE_USER': user['name'].encode('ascii')}
+ dataset = factories.Dataset(user=user)
+
+ offset = url_for(controller='package', action='edit', id=dataset['id'])
+ res = app.get(offset, extra_environ=env)
+
+ form = res.forms[1]
+ form['extras__0__key'] = u'spatial'
+ form['extras__0__value'] = self.geojson_examples['point']
+
+ res = helpers.submit_and_follow(app, form, env, 'save')
+
+ assert 'Error' not in res, res
+
+ res = app.get(offset, extra_environ=env)
+
+ form = res.forms[1]
+ form['extras__0__key'] = u'spatial'
+ form['extras__0__value'] = self.geojson_examples['polygon']
+
+ res = helpers.submit_and_follow(app, form, env, 'save')
+
+ assert 'Error' not in res, res
+
+ package_extent = Session.query(PackageExtent) \
+ .filter(PackageExtent.package_id == dataset['id']).first()
+
+ assert_equals(package_extent.package_id, dataset['id'])
+ if legacy_geoalchemy:
+ assert_equals(
+ Session.scalar(package_extent.the_geom.geometry_type),
+ 'ST_Polygon')
+ assert_equals(
+ Session.scalar(package_extent.the_geom.srid),
+ self.db_srid)
+ else:
+ from sqlalchemy import func
+ assert_equals(
+ Session.query(
+ func.ST_GeometryType(package_extent.the_geom)).first()[0],
+ 'ST_Polygon')
+ assert_equals(package_extent.the_geom.srid, self.db_srid)
+
+ def test_spatial_extra_bad_json(self):
+ app = self._get_test_app()
+
+ user = factories.User()
+ env = {'REMOTE_USER': user['name'].encode('ascii')}
+ dataset = factories.Dataset(user=user)
+
+ offset = url_for(controller='package', action='edit', id=dataset['id'])
+ res = app.get(offset, extra_environ=env)
+
+ form = res.forms[1]
+ form['extras__0__key'] = u'spatial'
+ form['extras__0__value'] = u'{"Type":Bad Json]'
+
+ res = helpers.webtest_submit(form, extra_environ=env, name='save')
+
+ assert 'Error' in res, res
+ assert 'Spatial' in res
+ assert 'Error decoding JSON object' in res
+
+ def test_spatial_extra_bad_geojson(self):
+ app = self._get_test_app()
+
+ user = factories.User()
+ env = {'REMOTE_USER': user['name'].encode('ascii')}
+ dataset = factories.Dataset(user=user)
+
+ offset = url_for(controller='package', action='edit', id=dataset['id'])
+ res = app.get(offset, extra_environ=env)
+
+ form = res.forms[1]
+ form['extras__0__key'] = u'spatial'
+ form['extras__0__value'] = u'{"Type":"Bad_GeoJSON","a":2}'
+
+ res = helpers.webtest_submit(form, extra_environ=env, name='save')
+
+ assert 'Error' in res, res
+ assert 'Spatial' in res
+ assert 'Error creating geometry' in res
diff --git a/ckanext/spatial/tests/nose/functional/test_widgets.py b/ckanext/spatial/tests/nose/functional/test_widgets.py
new file mode 100644
index 0000000..fbe75ba
--- /dev/null
+++ b/ckanext/spatial/tests/nose/functional/test_widgets.py
@@ -0,0 +1,38 @@
+from ckan.lib.helpers import url_for
+
+from ckanext.spatial.tests.base import SpatialTestBase
+
+try:
+ import ckan.new_tests.helpers as helpers
+ import ckan.new_tests.factories as factories
+except ImportError:
+ import ckan.tests.helpers as helpers
+ import ckan.tests.factories as factories
+
+
+class TestSpatialWidgets(SpatialTestBase, helpers.FunctionalTestBase):
+
+ def test_dataset_map(self):
+ app = self._get_test_app()
+
+ user = factories.User()
+ dataset = factories.Dataset(
+ user=user,
+ extras=[{'key': 'spatial',
+ 'value': self.geojson_examples['point']}]
+ )
+ offset = url_for(controller='package', action='read', id=dataset['id'])
+ res = app.get(offset)
+
+ assert 'data-module="dataset-map"' in res
+ assert 'dataset_map.js' in res
+
+ def test_spatial_search_widget(self):
+
+ app = self._get_test_app()
+
+ offset = url_for(controller='package', action='search')
+ res = app.get(offset)
+
+ assert 'data-module="spatial-query"' in res
+ assert 'spatial_query.js' in res
diff --git a/ckanext/spatial/tests/nose/lib/test_spatial.py b/ckanext/spatial/tests/nose/lib/test_spatial.py
new file mode 100644
index 0000000..b6040fc
--- /dev/null
+++ b/ckanext/spatial/tests/nose/lib/test_spatial.py
@@ -0,0 +1,154 @@
+from __future__ import print_function
+import six
+
+import time
+import random
+
+from nose.tools import assert_equal
+
+from shapely.geometry import asShape
+
+from ckan import model
+from ckan import plugins
+from ckan.lib.helpers import json
+from ckan.logic.action.create import package_create
+from ckan.lib.munge import munge_title_to_name
+
+from ckanext.spatial.model import PackageExtent
+from ckanext.spatial.lib import validate_bbox, bbox_query, bbox_query_ordered
+from ckanext.spatial.geoalchemy_common import WKTElement, compare_geometry_fields
+from ckanext.spatial.tests.base import SpatialTestBase
+
+
+class TestCompareGeometries(SpatialTestBase):
+
+ def _get_extent_object(self, geometry):
+ if isinstance(geometry, six.string_types):
+ geometry = json.loads(geometry)
+ shape = asShape(geometry)
+ return PackageExtent(package_id='xxx',
+ the_geom=WKTElement(shape.wkt, 4326))
+
+ def test_same_points(self):
+
+ extent1 = self._get_extent_object(self.geojson_examples['point'])
+ extent2 = self._get_extent_object(self.geojson_examples['point'])
+
+ assert compare_geometry_fields(extent1.the_geom, extent2.the_geom)
+
+ def test_different_points(self):
+
+ extent1 = self._get_extent_object(self.geojson_examples['point'])
+ extent2 = self._get_extent_object(self.geojson_examples['point_2'])
+
+ assert not compare_geometry_fields(extent1.the_geom, extent2.the_geom)
+
+
+class TestValidateBbox(object):
+ bbox_dict = {'minx': -4.96,
+ 'miny': 55.70,
+ 'maxx': -3.78,
+ 'maxy': 56.43}
+
+ def test_string(self):
+ res = validate_bbox("-4.96,55.70,-3.78,56.43")
+ assert_equal(res, self.bbox_dict)
+
+ def test_list(self):
+ res = validate_bbox([-4.96, 55.70, -3.78, 56.43])
+ assert_equal(res, self.bbox_dict)
+
+ def test_bad(self):
+ res = validate_bbox([-4.96, 55.70, -3.78])
+ assert_equal(res, None)
+
+ def test_bad_2(self):
+ res = validate_bbox('random')
+ assert_equal(res, None)
+
+
+def bbox_2_geojson(bbox_dict):
+ return '{"type":"Polygon","coordinates":[[[%(minx)s, %(miny)s],[%(minx)s, %(maxy)s], [%(maxx)s, %(maxy)s], [%(maxx)s, %(miny)s], [%(minx)s, %(miny)s]]]}' % bbox_dict
+
+
+class SpatialQueryTestBase(SpatialTestBase):
+ '''Base class for tests of spatial queries'''
+ miny = 0
+ maxy = 1
+
+ @classmethod
+ def setup_class(cls):
+ SpatialTestBase.setup_class()
+ for fixture_x in cls.fixtures_x:
+ bbox = cls.x_values_to_bbox(fixture_x)
+ bbox_geojson = bbox_2_geojson(bbox)
+ cls.create_package(name=munge_title_to_name(six.text_type(fixture_x)),
+ title=six.text_type(fixture_x),
+ extras=[{'key': 'spatial',
+ 'value': bbox_geojson}])
+
+ @classmethod
+ def create_package(cls, **package_dict):
+ user = plugins.toolkit.get_action('get_site_user')({'model': model, 'ignore_auth': True}, {})
+ context = {'model': model,
+ 'session': model.Session,
+ 'user': user['name'],
+ 'extras_as_string': True,
+ 'api_version': 2,
+ 'ignore_auth': True,
+ }
+ package_dict = package_create(context, package_dict)
+ return context.get('id')
+
+ @classmethod
+ def x_values_to_bbox(cls, x_tuple):
+ return {'minx': x_tuple[0], 'maxx': x_tuple[1],
+ 'miny': cls.miny, 'maxy': cls.maxy}
+
+
+class TestBboxQuery(SpatialQueryTestBase):
+ # x values for the fixtures
+ fixtures_x = [(0, 1), (0, 3), (0, 4), (4, 5), (6, 7)]
+
+ def test_query(self):
+ bbox_dict = self.x_values_to_bbox((2, 5))
+ package_ids = [res.package_id for res in bbox_query(bbox_dict)]
+ package_titles = [model.Package.get(id_).title for id_ in package_ids]
+ assert_equal(set(package_titles),
+ set(('(0, 3)', '(0, 4)', '(4, 5)')))
+
+class TestBboxQueryOrdered(SpatialQueryTestBase):
+ # x values for the fixtures
+ fixtures_x = [(0, 9), (1, 8), (2, 7), (3, 6), (4, 5),
+ (8, 9)]
+
+ def test_query(self):
+ bbox_dict = self.x_values_to_bbox((2, 7))
+ q = bbox_query_ordered(bbox_dict)
+ package_ids = [res.package_id for res in q]
+ package_titles = [model.Package.get(id_).title for id_ in package_ids]
+ # check the right items are returned
+ assert_equal(set(package_titles),
+ set(('(0, 9)', '(1, 8)', '(2, 7)', '(3, 6)', '(4, 5)')))
+ # check the order is good
+ assert_equal(package_titles,
+ ['(2, 7)', '(1, 8)', '(3, 6)', '(0, 9)', '(4, 5)'])
+
+
+class TestBboxQueryPerformance(SpatialQueryTestBase):
+ # x values for the fixtures
+ fixtures_x = [(random.uniform(0, 3), random.uniform(3,9)) \
+ for x in range(10)] # increase the number to 1000 say
+ def test_query(self):
+ bbox_dict = self.x_values_to_bbox((2, 7))
+ t0 = time.time()
+ q = bbox_query(bbox_dict)
+ t1 = time.time()
+ print('bbox_query took: ', t1-t0)
+
+ def test_query_ordered(self):
+ bbox_dict = self.x_values_to_bbox((2, 7))
+ t0 = time.time()
+ q = bbox_query_ordered(bbox_dict)
+ t1 = time.time()
+ print('bbox_query_ordered took: ', t1-t0)
diff --git a/ckanext/spatial/tests/nose/model/__init__.py b/ckanext/spatial/tests/nose/model/__init__.py
new file mode 100644
index 0000000..2e2033b
--- /dev/null
+++ b/ckanext/spatial/tests/nose/model/__init__.py
@@ -0,0 +1,7 @@
+# this is a namespace package
+try:
+ import pkg_resources
+ pkg_resources.declare_namespace(__name__)
+except ImportError:
+ import pkgutil
+ __path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/ckanext/spatial/tests/nose/model/test_harvested_metadata.py b/ckanext/spatial/tests/nose/model/test_harvested_metadata.py
new file mode 100644
index 0000000..7fb03f4
--- /dev/null
+++ b/ckanext/spatial/tests/nose/model/test_harvested_metadata.py
@@ -0,0 +1,34 @@
+import os
+
+from nose.tools import assert_equal
+
+from ckanext.spatial.model import ISODocument
+
+def open_xml_fixture(xml_filename):
+ xml_filepath = os.path.join(os.path.dirname(__file__),
+ 'xml',
+ xml_filename)
+ with open(xml_filepath, 'rb') as f:
+ xml_string_raw = f.read()
+
+ try:
+ xml_string = xml_string_raw.encode("utf-8")
+ except UnicodeDecodeError as e:
+ assert 0, 'ERROR: Unicode Error reading file \'%s\': %s' % \
+ (metadata_filepath, e)
+ return xml_string
+
+def test_simple():
+ xml_string = open_xml_fixture('gemini_dataset.xml')
+ iso_document = ISODocument(xml_string)
+ iso_values = iso_document.read_values()
+ assert_equal(iso_values['guid'], 'test-dataset-1')
+ assert_equal(iso_values['metadata-date'], '2011-09-23T10:06:08')
+
+def test_multiplicity_warning():
+ # This dataset lacks a value for Metadata Date and should
+ # produce a log.warning, but not raise an exception.
+ xml_string = open_xml_fixture('FCSConservancyPolygons.xml')
+ iso_document = ISODocument(xml_string)
+ iso_values = iso_document.read_values()
+ assert_equal(iso_values['guid'], 'B8A22DF4-B0DC-4F0B-A713-0CF5F8784A28')
diff --git a/ckanext/spatial/tests/nose/model/test_package_extent.py b/ckanext/spatial/tests/nose/model/test_package_extent.py
new file mode 100644
index 0000000..812d15d
--- /dev/null
+++ b/ckanext/spatial/tests/nose/model/test_package_extent.py
@@ -0,0 +1,90 @@
+from nose.tools import assert_equals
+from shapely.geometry import asShape
+
+from ckan.model import Session
+from ckan.lib.helpers import json
+try:
+ import ckan.new_tests.factories as factories
+except ImportError:
+ import ckan.tests.factories as factories
+
+from ckanext.spatial.model import PackageExtent
+from ckanext.spatial.geoalchemy_common import WKTElement, legacy_geoalchemy
+from ckanext.spatial.tests.base import SpatialTestBase
+
+
+class TestPackageExtent(SpatialTestBase):
+
+ def test_create_extent(self):
+
+ package = factories.Dataset()
+
+ geojson = json.loads(self.geojson_examples['point'])
+
+ shape = asShape(geojson)
+ package_extent = PackageExtent(package_id=package['id'],
+ the_geom=WKTElement(shape.wkt,
+ self.db_srid))
+ package_extent.save()
+
+ assert_equals(package_extent.package_id, package['id'])
+ if legacy_geoalchemy:
+ assert_equals(Session.scalar(package_extent.the_geom.x),
+ geojson['coordinates'][0])
+ assert_equals(Session.scalar(package_extent.the_geom.y),
+ geojson['coordinates'][1])
+ assert_equals(Session.scalar(package_extent.the_geom.srid),
+ self.db_srid)
+ else:
+ from sqlalchemy import func
+ assert_equals(
+ Session.query(func.ST_X(package_extent.the_geom)).first()[0],
+ geojson['coordinates'][0])
+ assert_equals(
+ Session.query(func.ST_Y(package_extent.the_geom)).first()[0],
+ geojson['coordinates'][1])
+ assert_equals(package_extent.the_geom.srid, self.db_srid)
+
+ def test_update_extent(self):
+
+ package = factories.Dataset()
+
+ geojson = json.loads(self.geojson_examples['point'])
+
+ shape = asShape(geojson)
+ package_extent = PackageExtent(package_id=package['id'],
+ the_geom=WKTElement(shape.wkt,
+ self.db_srid))
+ package_extent.save()
+ if legacy_geoalchemy:
+ assert_equals(
+ Session.scalar(package_extent.the_geom.geometry_type),
+ 'ST_Point')
+ else:
+ from sqlalchemy import func
+ assert_equals(
+ Session.query(
+ func.ST_GeometryType(package_extent.the_geom)).first()[0],
+ 'ST_Point')
+
+ # Update the geometry (Point -> Polygon)
+ geojson = json.loads(self.geojson_examples['polygon'])
+
+ shape = asShape(geojson)
+ package_extent.the_geom = WKTElement(shape.wkt, self.db_srid)
+ package_extent.save()
+
+ assert_equals(package_extent.package_id, package['id'])
+ if legacy_geoalchemy:
+ assert_equals(
+ Session.scalar(package_extent.the_geom.geometry_type),
+ 'ST_Polygon')
+ assert_equals(
+ Session.scalar(package_extent.the_geom.srid),
+ self.db_srid)
+ else:
+ assert_equals(
+ Session.query(
+ func.ST_GeometryType(package_extent.the_geom)).first()[0],
+ 'ST_Polygon')
+ assert_equals(package_extent.the_geom.srid, self.db_srid)
diff --git a/ckanext/spatial/tests/nose/model/xml/FCSConservancyPolygons.xml b/ckanext/spatial/tests/nose/model/xml/FCSConservancyPolygons.xml
new file mode 100644
index 0000000..a34f8aa
--- /dev/null
+++ b/ckanext/spatial/tests/nose/model/xml/FCSConservancyPolygons.xml
@@ -0,0 +1,524 @@
+
+
+
+ B8A22DF4-B0DC-4F0B-A713-0CF5F8784A28
+
+
+ utf8
+
+
+ dataset
+
+
+
+
+ Geo-Information Services
+
+
+ Forestry Commission Scotland
+
+
+ Geo-Information Services Delivery Manager
+
+
+
+
+
+
+ 0131 334 0303
+
+
+
+
+
+
+ Silvan House, 231 Corstorphine Road
+
+
+ Edinburgh
+
+
+ Scotland
+
+
+ EH12 7AT
+
+
+ United Kingdom
+
+
+ geoinformationservices.scotland@forestry.gsi.gov.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2012-11-30T10:51:36
+
+
+
+
+
+
+ urn:ogc:def:crs:EPSG::27700
+
+
+
+
+
+
+
+
+
+
+ FCS Conservancy Polygons
+
+
+ s_cons_pol
+
+
+ fc.s_cons_pol
+
+
+
+
+ 2004-06-06
+
+
+ creation
+
+
+
+
+
+
+ 2010-03-16
+
+
+ revision
+
+
+
+
+
+
+ Geo_Information Services
+
+
+
+
+
+
+
+
+
+ Description:
+
+This dataset depicts the five Forestry Commission Scotland Conservancy boundaries.
+
+
+
+Attributes:
+
+NAME : Conservancy Name
+ADDRESS_1 : Address
+ADDRESS_2 : Address
+ADDRESS_3 : Address
+ADDRESS_4 : Address
+POSTCODE : Postcode
+PHONE_NO : Telephone Number
+EMAIL : Email Address
+
+
+
+
+ Head of Grants & Licences
+
+
+ Forestry Commission Scotland
+
+
+ Head of Grants & Licences
+
+
+
+
+
+
+ 0131 334 0303
+
+
+
+
+
+
+ Silvan House, 231 Corstorphine Road
+
+
+ Edinburgh
+
+
+ Scotland
+
+
+ EH12 7AT
+
+
+ United Kingdom
+
+
+ cgis.scotland@forestry.gsi.gov.uk
+
+
+
+
+
+
+ owner
+
+
+
+
+
+
+ Geo-Information Services
+
+
+ Forestry Commission Scotland
+
+
+ Geo-Information Services Delivery Manager
+
+
+
+
+
+
+ 0131 334 0303
+
+
+
+
+
+
+ Silvan House, 231 Corstorphine Road
+
+
+ Edinburgh
+
+
+ Scotland
+
+
+ EH12 7AT
+
+
+ United Kingdom
+
+
+ geoinformationservices.scotland@forestry.gsi.gov.uk
+
+
+
+
+
+
+ custodian
+
+
+
+
+
+
+ Geo-Information Services
+
+
+ Forestry Commission Scotland
+
+
+ Geo-Information Services Delivery Manager
+
+
+
+
+
+
+ 0131 334 0303
+
+
+
+
+
+
+ Silvan House, 231 Corstorphine Road
+
+
+ Edinburgh
+
+
+ Scotland
+
+
+ EH12 7AT
+
+
+ United Kingdom
+
+
+ geoinformationservices.scotland@forestry.gsi.gov.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+ asNeeded
+
+
+
+
+
+
+ SDE Feature Class
+
+
+
+
+
+
+
+
+
+ administrative
+
+
+ regional
+
+
+
+
+
+
+ copyright
+
+
+ license
+
+
+ otherRestrictions
+
+
+ copyright
+
+
+ license
+
+
+ None
+
+
+
+
+
+
+ Copyright (Copyright Forestry Commission Scotland)
+
+
+
+
+
+
+
+
+
+
+
+ 10000
+
+
+
+
+
+
+ eng
+
+
+ boundaries
+
+
+ economy
+
+
+
+
+
+
+
+
+ Scotland
+
+
+
+
+
+
+
+
+ -9.229868
+
+
+ -0.705137
+
+
+ 54.513338
+
+
+ 60.866111
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Unknown
+
+
+ Unknown
+
+
+
+
+
+
+
+
+ Geo-Information Services
+
+
+ Forestry Commission Scotland
+
+
+ Geo-Information Services Delivery Manager
+
+
+
+
+
+
+ 0131 334 0303
+
+
+
+
+
+
+ Silvan House, 231 Corstorphine Road
+
+
+ Edinburgh
+
+
+ Scotland
+
+
+ EH12 7AT
+
+
+ United Kingdom
+
+
+ geoinformationservices.scotland@forestry.gsi.gov.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+ SDE Feature Class
+
+
+
+
+
+
+
+
+
+
+
+ Server=fcspatialsv5; Service=5151; User=fcproduct; Version=SDE.DEFAULT
+
+
+
+
+
+
+
+
+
+
+
+
+ http://www.forestry.gov.uk/datadownload
+
+
+
+
+
+
+
+
+
+
+
+
+ dataset
+
+
+
+
+
+
+ This dataset was derived by merging OS Boundary Line polygons together (with the exception of the boundary between north and south Fife, which was digitised by Geo-Information Services). Boundary Line is based on 1:10,000 scale mapping.
+
+
+
+
+
+
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/model/xml/gemini_dataset.xml b/ckanext/spatial/tests/nose/model/xml/gemini_dataset.xml
new file mode 100644
index 0000000..3f58f0e
--- /dev/null
+++ b/ckanext/spatial/tests/nose/model/xml/gemini_dataset.xml
@@ -0,0 +1,498 @@
+
+
+
+ test-dataset-1
+
+
+ eng
+
+
+ dataset
+
+
+
+
+ Lachlan Renwick
+
+
+ Scottish Natural Heritage
+
+
+ Geographic Systems and Data Coordinator
+
+
+
+
+
+
+ 01463 725000
+
+
+
+
+
+
+ Great Glen House, Leachkin Road
+
+
+ INVERNESS
+
+
+ IV3 8NW
+
+
+ United Kingdom
+
+
+ data_supply@snh.gov.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2011-09-23T10:06:08
+
+
+
+
+
+
+ urn:ogc:def:crs:EPSG::27700
+
+
+
+
+
+
+
+
+
+
+ Country Parks (Scotland)
+
+
+
+
+ 2004-02
+
+
+ creation
+
+
+
+
+
+
+ 2006-07-03
+
+
+ revision
+
+
+
+
+
+
+ CPK
+
+
+
+
+
+
+
+
+
+ Parks are set up by Local Authorities to provide open-air recreation facilities close to towns and cities. [edited]
+
+
+
+
+ Lachlan Renwick
+
+
+ Scottish Natural Heritage
+
+
+ Geographic Systems & Data Coordinator
+
+
+
+
+
+
+ 01463 725000
+
+
+
+
+
+
+ Great Glen House, Leachkin Road
+
+
+ INVERNESS
+
+
+ IV3 8NW
+
+
+ United Kingdom
+
+
+ data_supply@snh.gov.uk
+
+
+
+
+
+
+ custodian
+
+
+
+
+
+
+ Lachlan Renwick
+
+
+ Scottish Natural Heritage
+
+
+ Geographic Systems & Data Coordinator
+
+
+
+
+
+
+ 01463 725000
+
+
+
+
+
+
+ Great Glen House, Leachkin Road
+
+
+ INVERNESS
+
+
+ IV3 8NW
+
+
+ United Kingdom
+
+
+ data_supply@snh.gov.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+ irregular
+
+
+
+
+
+
+ SDE Feature Class
+
+
+
+
+
+
+
+
+
+ Nature conservation
+
+
+
+
+ Government Category List
+
+
+
+
+ 2004-07-15
+
+
+ revision
+
+
+
+
+
+
+
+
+
+
+ copyright
+
+
+ otherRestrictions
+
+
+ copyright
+
+
+ otherRestrictions
+
+
+ Copyright Scottish Natural Heritage
+
+
+
+
+
+
+ Reference and PSMA Only
+
+
+ http://www.test.gov.uk/licenseurl
+
+
+
+
+
+
+
+
+
+ 5
+
+
+
+
+ eng
+
+
+ environment
+
+
+
+
+
+
+
+
+
+
+ ISO 3166
+
+
+
+
+ 2007-09-02
+
+
+ revision
+
+
+
+
+
+
+ GB-SCT
+
+
+
+
+
+
+
+
+ -8.97114288
+
+
+ 0.205857204
+
+
+ 54.529947158
+
+
+ 61.06066944
+
+
+
+
+
+
+
+ 1998
+ 2010
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ESRI Shapefile
+
+
+ Unknown
+
+
+
+
+
+
+ KML
+
+
+ 2.1
+
+
+
+
+
+
+ GML
+
+
+ 3.1.1
+
+
+
+
+
+
+
+
+ Lachlan Renwick
+
+
+ Scottish Natural Heritage
+
+
+ Geographic Systems & Data Coordinator
+
+
+
+
+
+
+ 01463 725000
+
+
+
+
+
+
+ Great Glen House, Leachkin Road
+
+
+ INVERNESS
+
+
+ IV3 8NW
+
+
+ United Kingdom
+
+
+ data_supply@snh.gov.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+ SDE Feature Class
+
+
+
+
+
+
+
+
+
+
+
+ http://www.snh.org.uk/snhi
+
+
+
+
+
+
+
+
+
+
+
+
+ https://gateway.snh.gov.uk/pls/apex_ddtdb2/f?p=101
+
+
+ Test Resource Name
+
+
+ Test Resource Description
+
+
+ test-protocol
+
+
+ download
+
+
+
+
+
+
+
+
+
+
+
+
+ dataset
+
+
+
+
+
+
+ Country Park is not a statutory designation. Countryside (Scotland) Act 1967 Section 48 gives local authorities power to assess and review the need for Country Parks in consultation with SNH.
+
+
+
+
+
+
diff --git a/ckanext/spatial/tests/nose/scripts/geometry_columns.sql b/ckanext/spatial/tests/nose/scripts/geometry_columns.sql
new file mode 100644
index 0000000..e2bbb75
--- /dev/null
+++ b/ckanext/spatial/tests/nose/scripts/geometry_columns.sql
@@ -0,0 +1,25 @@
+-------------------------------------------------------------------
+-- WARNING: This is probably NOT the file you are looking for.
+-- This file is intended to be used only during tests, you won't
+-- get a functional PostGIS database executing it. Please install
+-- PostGIS as described in the README.
+-------------------------------------------------------------------
+
+-------------------------------------------------------------------
+-- GEOMETRY_COLUMNS
+-------------------------------------------------------------------
+CREATE TABLE geometry_columns (
+ f_table_catalog varchar(256) not null,
+ f_table_schema varchar(256) not null,
+ f_table_name varchar(256) not null,
+ f_geometry_column varchar(256) not null,
+ coord_dimension integer not null,
+ srid integer not null,
+ type varchar(30) not null,
+ CONSTRAINT geometry_columns_pk primary key (
+ f_table_catalog,
+ f_table_schema,
+ f_table_name,
+ f_geometry_column )
+) WITH OIDS;
+
diff --git a/ckanext/spatial/tests/nose/scripts/postgis.sql b/ckanext/spatial/tests/nose/scripts/postgis.sql
new file mode 100644
index 0000000..b9ea072
--- /dev/null
+++ b/ckanext/spatial/tests/nose/scripts/postgis.sql
@@ -0,0 +1,41 @@
+-------------------------------------------------------------------
+-- WARNING: This is probably NOT the file you are looking for.
+-- This file is intended to be used only during tests, you won't
+-- get a functional PostGIS database executing it. Please install
+-- PostGIS as described in the README.
+-------------------------------------------------------------------
+
+-------------------------------------------------------------------
+-- SPATIAL_REF_SYS
+-------------------------------------------------------------------
+CREATE TABLE spatial_ref_sys (
+ srid integer not null primary key,
+ auth_name varchar(256),
+ auth_srid integer,
+ srtext varchar(2048),
+ proj4text varchar(2048)
+);
+
+-------------------------------------------------------------------
+-- GEOMETRY_COLUMNS
+-------------------------------------------------------------------
+CREATE TABLE geometry_columns (
+ f_table_catalog varchar(256) not null,
+ f_table_schema varchar(256) not null,
+ f_table_name varchar(256) not null,
+ f_geometry_column varchar(256) not null,
+ coord_dimension integer not null,
+ srid integer not null,
+ type varchar(30) not null,
+ CONSTRAINT geometry_columns_pk primary key (
+ f_table_catalog,
+ f_table_schema,
+ f_table_name,
+ f_geometry_column )
+) WITH OIDS;
+
+---
+--- EPSG 4326 : WGS 84
+---
+INSERT INTO "spatial_ref_sys" ("srid","auth_name","auth_srid","srtext","proj4text") VALUES (4326,'EPSG',4326,'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs ');
+
diff --git a/ckanext/spatial/tests/nose/scripts/spatial_ref_sys.sql b/ckanext/spatial/tests/nose/scripts/spatial_ref_sys.sql
new file mode 100644
index 0000000..467a868
--- /dev/null
+++ b/ckanext/spatial/tests/nose/scripts/spatial_ref_sys.sql
@@ -0,0 +1,23 @@
+-------------------------------------------------------------------
+-- WARNING: This is probably NOT the file you are looking for.
+-- This file is intended to be used only during tests, you won't
+-- get a functional PostGIS database executing it. Please install
+-- PostGIS as described in the README.
+-------------------------------------------------------------------
+
+-------------------------------------------------------------------
+-- SPATIAL_REF_SYS
+-------------------------------------------------------------------
+CREATE TABLE spatial_ref_sys (
+ srid integer not null primary key,
+ auth_name varchar(256),
+ auth_srid integer,
+ srtext varchar(2048),
+ proj4text varchar(2048)
+);
+
+---
+--- EPSG 4326 : WGS 84
+---
+INSERT INTO "spatial_ref_sys" ("srid","auth_name","auth_srid","srtext","proj4text") VALUES (4326,'EPSG',4326,'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs ');
+
diff --git a/ckanext/spatial/tests/nose/test_api.py b/ckanext/spatial/tests/nose/test_api.py
new file mode 100644
index 0000000..ef268ca
--- /dev/null
+++ b/ckanext/spatial/tests/nose/test_api.py
@@ -0,0 +1,274 @@
+from nose.plugins.skip import SkipTest
+from nose.tools import assert_equals, assert_raises
+
+from ckan.model import Session
+from ckan.lib.search import SearchError
+try:
+ import ckan.new_tests.helpers as helpers
+ import ckan.new_tests.factories as factories
+except ImportError:
+ import ckan.tests.helpers as helpers
+ import ckan.tests.factories as factories
+
+from ckanext.spatial.tests.base import SpatialTestBase
+
+extents = {
+ 'nz': '{"type":"Polygon","coordinates":[[[174,-38],[176,-38],[176,-40],[174,-40],[174,-38]]]}',
+ 'ohio': '{"type": "Polygon","coordinates": [[[-84,38],[-84,40],[-80,42],[-80,38],[-84,38]]]}',
+ 'dateline': '{"type":"Polygon","coordinates":[[[169,70],[169,60],[192,60],[192,70],[169,70]]]}',
+ 'dateline2': '{"type":"Polygon","coordinates":[[[170,60],[-170,60],[-170,70],[170,70],[170,60]]]}',
+}
+
+
+class TestAction(SpatialTestBase):
+
+ def teardown(self):
+ helpers.reset_db()
+
+ def test_spatial_query(self):
+
+ dataset = factories.Dataset(
+ extras=[{'key': 'spatial',
+ 'value': self.geojson_examples['point']}]
+ )
+
+ result = helpers.call_action(
+ 'package_search',
+ extras={'ext_bbox': '-180,-90,180,90'})
+
+ assert_equals(result['count'], 1)
+ assert_equals(result['results'][0]['id'], dataset['id'])
+
+ def test_spatial_query_outside_bbox(self):
+
+ factories.Dataset(
+ extras=[{'key': 'spatial',
+ 'value': self.geojson_examples['point']}]
+ )
+
+ result = helpers.call_action(
+ 'package_search',
+ extras={'ext_bbox': '-10,-20,10,20'})
+
+ assert_equals(result['count'], 0)
+
+ def test_spatial_query_wrong_bbox(self):
+
+ assert_raises(SearchError, helpers.call_action,
+ 'package_search', extras={'ext_bbox': '-10,-20,10,a'})
+
+ def test_spatial_query_nz(self):
+
+ dataset = factories.Dataset(
+ extras=[{'key': 'spatial',
+ 'value': extents['nz']}]
+ )
+
+ result = helpers.call_action(
+ 'package_search',
+ extras={'ext_bbox': '56,-54,189,-28'})
+
+ assert_equals(result['count'], 1)
+ assert_equals(result['results'][0]['id'], dataset['id'])
+
+ def test_spatial_query_nz_wrap(self):
+
+ dataset = factories.Dataset(
+ extras=[{'key': 'spatial',
+ 'value': extents['nz']}]
+ )
+
+ result = helpers.call_action(
+ 'package_search',
+ extras={'ext_bbox': '-203,-54,-167,-28'})
+
+ assert_equals(result['count'], 1)
+ assert_equals(result['results'][0]['id'], dataset['id'])
+
+ def test_spatial_query_ohio(self):
+
+ dataset = factories.Dataset(
+ extras=[{'key': 'spatial',
+ 'value': extents['ohio']}]
+ )
+
+ result = helpers.call_action(
+ 'package_search',
+ extras={'ext_bbox': '-110,37,-78,53'})
+
+ assert_equals(result['count'], 1)
+ assert_equals(result['results'][0]['id'], dataset['id'])
+
+ def test_spatial_query_ohio_wrap(self):
+
+ dataset = factories.Dataset(
+ extras=[{'key': 'spatial',
+ 'value': extents['ohio']}]
+ )
+
+ result = helpers.call_action(
+ 'package_search',
+ extras={'ext_bbox': '258,37,281,51'})
+
+ assert_equals(result['count'], 1)
+ assert_equals(result['results'][0]['id'], dataset['id'])
+
+ def test_spatial_query_dateline_1(self):
+
+ dataset = factories.Dataset(
+ extras=[{'key': 'spatial',
+ 'value': extents['dateline']}]
+ )
+
+ result = helpers.call_action(
+ 'package_search',
+ extras={'ext_bbox': '-197,56,-128,70'})
+
+ assert_equals(result['count'], 1)
+ assert_equals(result['results'][0]['id'], dataset['id'])
+
+ def test_spatial_query_dateline_2(self):
+
+ dataset = factories.Dataset(
+ extras=[{'key': 'spatial',
+ 'value': extents['dateline']}]
+ )
+
+ result = helpers.call_action(
+ 'package_search',
+ extras={'ext_bbox': '162,54,237,70'})
+
+ assert_equals(result['count'], 1)
+ assert_equals(result['results'][0]['id'], dataset['id'])
+
+ def test_spatial_query_dateline_3(self):
+
+ dataset = factories.Dataset(
+ extras=[{'key': 'spatial',
+ 'value': extents['dateline2']}]
+ )
+
+ result = helpers.call_action(
+ 'package_search',
+ extras={'ext_bbox': '-197,56,-128,70'})
+
+ assert_equals(result['count'], 1)
+ assert_equals(result['results'][0]['id'], dataset['id'])
+
+ def test_spatial_query_dateline_4(self):
+
+ dataset = factories.Dataset(
+ extras=[{'key': 'spatial',
+ 'value': extents['dateline2']}]
+ )
+
+ result = helpers.call_action(
+ 'package_search',
+ extras={'ext_bbox': '162,54,237,70'})
+
+ assert_equals(result['count'], 1)
+ assert_equals(result['results'][0]['id'], dataset['id'])
+
+
+
+class TestHarvestedMetadataAPI(SpatialTestBase, helpers.FunctionalTestBase):
+
+ def test_api(self):
+ try:
+ from ckanext.harvest.model import (HarvestObject, HarvestJob,
+ HarvestSource,
+ HarvestObjectExtra)
+ except ImportError:
+ raise SkipTest('The harvester extension is needed for these tests')
+
+ content1 = 'Content 1'
+ ho1 = HarvestObject(
+ guid='test-ho-1',
+ job=HarvestJob(source=HarvestSource(url='http://', type='xx')),
+ content=content1)
+
+ content2 = 'Content 2'
+ original_content2 = 'Original Content 2'
+ ho2 = HarvestObject(
+ guid='test-ho-2',
+ job=HarvestJob(source=HarvestSource(url='http://', type='xx')),
+ content=content2)
+
+ hoe = HarvestObjectExtra(
+ key='original_document',
+ value=original_content2,
+ object=ho2)
+
+ Session.add(ho1)
+ Session.add(ho2)
+ Session.add(hoe)
+ Session.commit()
+
+ object_id_1 = ho1.id
+ object_id_2 = ho2.id
+
+ app = self._get_test_app()
+
+ # Test redirects for old URLs
+ url = '/api/2/rest/harvestobject/{0}/xml'.format(object_id_1)
+ r = app.get(url)
+ assert_equals(r.status_int, 301)
+ assert ('/harvest/object/{0}'.format(object_id_1)
+ in r.headers['Location'])
+
+ url = '/api/2/rest/harvestobject/{0}/html'.format(object_id_1)
+ r = app.get(url)
+ assert_equals(r.status_int, 301)
+ assert ('/harvest/object/{0}/html'.format(object_id_1)
+ in r.headers['Location'])
+
+ # Access object content
+ url = '/harvest/object/{0}'.format(object_id_1)
+ r = app.get(url)
+ assert_equals(r.status_int, 200)
+ assert_equals(r.headers['Content-Type'],
+ 'application/xml; charset=utf-8')
+ assert_equals(
+ r.body,
+ '\nContent 1')
+
+ # Access original content in object extra (if present)
+ url = '/harvest/object/{0}/original'.format(object_id_1)
+ r = app.get(url, status=404)
+ assert_equals(r.status_int, 404)
+
+ url = '/harvest/object/{0}/original'.format(object_id_2)
+ r = app.get(url)
+ assert_equals(r.status_int, 200)
+ assert_equals(r.headers['Content-Type'],
+ 'application/xml; charset=utf-8')
+ assert_equals(
+ r.body,
+ '\n'
+ + 'Original Content 2')
+
+ # Access HTML transformation
+ url = '/harvest/object/{0}/html'.format(object_id_1)
+ r = app.get(url)
+ assert_equals(r.status_int, 200)
+ assert_equals(r.headers['Content-Type'],
+ 'text/html; charset=utf-8')
+ assert 'GEMINI record about' in r.body
+
+ url = '/harvest/object/{0}/html/original'.format(object_id_1)
+ r = app.get(url, status=404)
+ assert_equals(r.status_int, 404)
+
+ url = '/harvest/object/{0}/html'.format(object_id_2)
+ r = app.get(url)
+ assert_equals(r.status_int, 200)
+ assert_equals(r.headers['Content-Type'],
+ 'text/html; charset=utf-8')
+ assert 'GEMINI record about' in r.body
+
+ url = '/harvest/object/{0}/html/original'.format(object_id_2)
+ r = app.get(url)
+ assert_equals(r.status_int, 200)
+ assert_equals(r.headers['Content-Type'],
+ 'text/html; charset=utf-8')
+ assert 'GEMINI record about' in r.body
diff --git a/ckanext/spatial/tests/nose/test_csw_client.py b/ckanext/spatial/tests/nose/test_csw_client.py
new file mode 100644
index 0000000..de64323
--- /dev/null
+++ b/ckanext/spatial/tests/nose/test_csw_client.py
@@ -0,0 +1,65 @@
+import time
+from six.moves.urllib.request import urlopen
+from six.moves.urllib.error import URLError
+import os
+
+from pylons import config
+from nose.plugins.skip import SkipTest
+
+from ckan.model import engine_is_sqlite
+
+
+# copied from ckan/tests/__init__ to save importing it and therefore
+# setting up Pylons.
+class CkanServerCase(object):
+ @staticmethod
+ def _system(cmd):
+ import subprocess
+ (status, output) = subprocess.getstatusoutput(cmd)
+ if status:
+ raise Exception("Couldn't execute cmd: %s: %s" % (cmd, output))
+
+ @classmethod
+ def _paster(cls, cmd, config_path_rel):
+ config_path = os.path.join(config['here'], config_path_rel)
+ cls._system('paster --plugin ckan %s --config=%s' % (cmd, config_path))
+
+ @staticmethod
+ def _start_ckan_server(config_file=None):
+ if not config_file:
+ config_file = config['__file__']
+ config_path = config_file
+ import subprocess
+ process = subprocess.Popen(['paster', 'serve', config_path])
+ return process
+
+ @staticmethod
+ def _wait_for_url(url='http://127.0.0.1:5000/', timeout=15):
+ for i in range(int(timeout)*100):
+ try:
+ urlopen(url)
+ except URLError:
+ time.sleep(0.01)
+ else:
+ break
+
+ @staticmethod
+ def _stop_ckan_server(process):
+ pid = process.pid
+ pid = int(pid)
+ if os.system("kill -9 %d" % pid):
+ raise Exception("Can't kill foreign CKAN instance (pid: %d)." % pid)
+
+class CkanProcess(CkanServerCase):
+ @classmethod
+ def setup_class(cls):
+ if engine_is_sqlite():
+ raise SkipTest("Non-memory database needed for this test")
+
+ cls.pid = cls._start_ckan_server()
+ ## Don't need to init database, since it is same database as this process uses
+ cls._wait_for_url()
+
+ @classmethod
+ def teardown_class(cls):
+ cls._stop_ckan_server(cls.pid)
diff --git a/ckanext/spatial/tests/nose/test_harvest.py b/ckanext/spatial/tests/nose/test_harvest.py
new file mode 100644
index 0000000..11423af
--- /dev/null
+++ b/ckanext/spatial/tests/nose/test_harvest.py
@@ -0,0 +1,1141 @@
+from __future__ import absolute_import
+import os
+from datetime import datetime, date
+import lxml
+import json
+from uuid import uuid4
+from nose.plugins.skip import SkipTest
+from nose.tools import assert_equal, assert_in, assert_raises
+
+from ckan.lib.base import config
+from ckan import model
+from ckan.model import Session, Package, Group, User
+from ckan.logic.schema import default_update_package_schema, default_create_package_schema
+from ckan.logic import get_action
+
+try:
+ from ckan.new_tests.helpers import call_action
+except ImportError:
+ from ckan.tests.helpers import call_action
+
+from ckanext.harvest.model import (HarvestSource, HarvestJob, HarvestObject)
+from ckanext.spatial.validation import Validators
+from ckanext.spatial.harvesters.gemini import (GeminiDocHarvester,
+ GeminiWafHarvester,
+ GeminiHarvester)
+from ckanext.spatial.harvesters.base import SpatialHarvester
+from ckanext.spatial.tests.base import SpatialTestBase
+
+from .xml_file_server import serve
+
+# Start simple HTTP server that serves XML test files
+serve()
+
+
+class HarvestFixtureBase(SpatialTestBase):
+
+ def setup(self):
+ # Add sysadmin user
+ harvest_user = model.User(name=u'harvest', password=u'test', sysadmin=True)
+ Session.add(harvest_user)
+ Session.commit()
+
+ package_schema = default_update_package_schema()
+ self.context ={'model':model,
+ 'session':Session,
+ 'user':u'harvest',
+ 'schema':package_schema,
+ 'api_version': '2'}
+
+ def teardown(self):
+ model.repo.rebuild_db()
+
+ def _create_job(self,source_id):
+ # Create a job
+ context ={'model':model,
+ 'session':Session,
+ 'user':u'harvest'}
+
+ job_dict=get_action('harvest_job_create')(context,{'source_id':source_id})
+ job = HarvestJob.get(job_dict['id'])
+ assert job
+
+ return job
+
+ def _create_source_and_job(self, source_fixture):
+ context ={'model':model,
+ 'session':Session,
+ 'user':u'harvest'}
+
+ if config.get('ckan.harvest.auth.profile') == u'publisher' \
+ and not 'publisher_id' in source_fixture:
+ source_fixture['publisher_id'] = self.publisher.id
+
+ source_dict=get_action('harvest_source_create')(context,source_fixture)
+ source = HarvestSource.get(source_dict['id'])
+ assert source
+
+ job = self._create_job(source.id)
+
+ return source, job
+
+ def _run_job_for_single_document(self,job,force_import=False,expect_gather_errors=False,expect_obj_errors=False):
+
+ harvester = GeminiDocHarvester()
+
+ harvester.force_import = force_import
+
+
+ object_ids = harvester.gather_stage(job)
+ assert object_ids, len(object_ids) == 1
+ if expect_gather_errors:
+ assert len(job.gather_errors) > 0
+ else:
+ assert len(job.gather_errors) == 0
+
+ assert harvester.fetch_stage(object_ids) == True
+
+ obj = HarvestObject.get(object_ids[0])
+ assert obj, obj.content
+
+ harvester.import_stage(obj)
+ Session.refresh(obj)
+ if expect_obj_errors:
+ assert len(obj.errors) > 0
+ else:
+ assert len(obj.errors) == 0
+
+ job.status = u'Finished'
+ job.save()
+
+ return obj
+
+class TestHarvest(HarvestFixtureBase):
+
+ @classmethod
+ def setup_class(cls):
+ SpatialHarvester._validator = Validators(profiles=['gemini2'])
+ HarvestFixtureBase.setup_class()
+
+ def clean_tags(self, tags):
+ return [{u'name': x['name']} for x in tags]
+
+ def find_extra(self, pkg, key):
+ values = [e['value'] for e in pkg['extras'] if e['key'] == key]
+ return values[0] if len(values) == 1 else None
+
+ def test_harvest_basic(self):
+
+ # Create source
+ source_fixture = {
+ 'title': 'Test Source',
+ 'name': 'test-source',
+ 'url': u'http://127.0.0.1:8999/gemini2.1-waf/index.html',
+ 'source_type': u'gemini-waf'
+ }
+
+ source, job = self._create_source_and_job(source_fixture)
+
+ harvester = GeminiWafHarvester()
+
+ # We need to send an actual job, not the dict
+ object_ids = harvester.gather_stage(job)
+
+ assert len(object_ids) == 2
+
+ # Fetch stage always returns True for Waf harvesters
+ assert harvester.fetch_stage(object_ids) == True
+
+ objects = []
+ for object_id in object_ids:
+ obj = HarvestObject.get(object_id)
+ assert obj
+ objects.append(obj)
+ harvester.import_stage(obj)
+
+ pkgs = Session.query(Package).filter(Package.type!=u'harvest').all()
+
+ assert_equal(len(pkgs), 2)
+
+ pkg_ids = [pkg.id for pkg in pkgs]
+
+ for obj in objects:
+ assert obj.current == True
+ assert obj.package_id in pkg_ids
+
+ def test_harvest_fields_service(self):
+
+ # Create source
+ source_fixture = {
+ 'title': 'Test Source',
+ 'name': 'test-source',
+ 'url': u'http://127.0.0.1:8999/gemini2.1/service1.xml',
+ 'source_type': u'gemini-single'
+ }
+
+ source, job = self._create_source_and_job(source_fixture)
+
+ harvester = GeminiDocHarvester()
+
+ object_ids = harvester.gather_stage(job)
+ assert object_ids, len(object_ids) == 1
+
+ # No gather errors
+ assert len(job.gather_errors) == 0
+
+ # Fetch stage always returns True for Single Doc harvesters
+ assert harvester.fetch_stage(object_ids) == True
+
+ obj = HarvestObject.get(object_ids[0])
+ assert obj, obj.content
+ assert obj.guid == u'test-service-1'
+
+ harvester.import_stage(obj)
+
+ # No object errors
+ assert len(obj.errors) == 0
+
+ package_dict = get_action('package_show')(self.context,{'id':obj.package_id})
+
+ assert package_dict
+
+ expected = {
+ 'name': u'one-scotland-address-gazetteer-web-map-service-wms',
+ 'title': u'One Scotland Address Gazetteer Web Map Service (WMS)',
+ 'tags': [{u'name': u'Addresses'}, {u'name': u'Scottish National Gazetteer'}],
+ 'notes': u'This service displays its contents at larger scale than 1:10000. [edited]',
+ }
+
+ package_dict['tags'] = self.clean_tags(package_dict['tags'])
+
+ for key,value in expected.items():
+ if not package_dict[key] == value:
+ raise AssertionError('Unexpected value for %s: %s (was expecting %s)' % \
+ (key, package_dict[key], value))
+
+ if config.get('ckan.harvest.auth.profile') == u'publisher':
+ assert package_dict['groups'] == [self.publisher.id]
+
+ expected_extras = {
+ # Basic
+ 'guid': obj.guid,
+ 'UKLP': u'True',
+ 'resource-type': u'service',
+ 'access_constraints': u'["No restriction on public access"]',
+ 'responsible-party': u'The Improvement Service (owner)',
+ 'provider':u'The Improvement Service',
+ 'contact-email': u'OSGCM@improvementservice.org.uk',
+ # Spatial
+ 'bbox-east-long': u'0.5242365625',
+ 'bbox-north-lat': u'61.0243',
+ 'bbox-south-lat': u'54.4764484375',
+ 'bbox-west-long': u'-9.099786875',
+ 'spatial': u'{"type": "Polygon", "coordinates": [[[0.5242365625, 54.4764484375], [-9.099786875, 54.4764484375], [-9.099786875, 61.0243], [0.5242365625, 61.0243], [0.5242365625, 54.4764484375]]]}',
+ # Other
+ 'coupled-resource': u'[{"href": ["http://scotgovsdi.edina.ac.uk/srv/en/csw?service=CSW&request=GetRecordById&version=2.0.2&outputSchema=http://www.isotc211.org/2005/gmd&elementSetName=full&id=250ea276-48e2-4189-8a89-fcc4ca92d652"], "uuid": ["250ea276-48e2-4189-8a89-fcc4ca92d652"], "title": []}]',
+ 'dataset-reference-date': u'[{"type": "publication", "value": "2011-09-08"}]',
+ 'frequency-of-update': u'daily',
+ 'licence': u'["Use of the One Scotland Gazetteer data used by this this service is available to any organisation that is a member of the One Scotland Mapping Agreement. It is not currently commercially available", "http://www.test.gov.uk/licenseurl"]',
+ 'licence_url': u'http://www.test.gov.uk/licenseurl',
+ 'metadata-date': u'2011-09-08T16:07:32',
+ 'metadata-language': u'eng',
+ 'spatial-data-service-type': u'other',
+ 'spatial-reference-system': u'OSGB 1936 / British National Grid (EPSG:27700)',
+ 'temporal_coverage-from': u'["1904-06-16"]',
+ 'temporal_coverage-to': u'["2004-06-16"]',
+ }
+
+ for key,value in expected_extras.items():
+ extra_value = self.find_extra(package_dict, key)
+ if extra_value is None:
+ raise AssertionError('Extra %s not present in package' % key)
+
+ if not extra_value == value:
+ raise AssertionError('Unexpected value for extra %s: %s (was expecting %s)' % \
+ (key, package_dict['extras'][key], value))
+
+ expected_resource = {
+ 'ckan_recommended_wms_preview': 'True',
+ 'description': 'Link to the GetCapabilities request for this service',
+ 'name': 'Web Map Service (WMS)',
+ 'resource_locator_function': 'download',
+ 'resource_locator_protocol': 'OGC:WMS-1.3.0-http-get-capabilities',
+ 'url': u'http://127.0.0.1:8999/wms/capabilities.xml',
+ 'verified': 'True',
+ }
+
+ resource = package_dict['resources'][0]
+ for key,value in expected_resource.items():
+ if not key in resource:
+ raise AssertionError('Expected key not in resource: %s' % (key))
+ if not resource[key] == value:
+ raise AssertionError('Unexpected value in resource for %s: %s (was expecting %s)' % \
+ (key, resource[key], value))
+ assert datetime.strptime(resource['verified_date'],'%Y-%m-%dT%H:%M:%S.%f').date() == date.today()
+ assert resource['format'].lower() == 'wms'
+
+ def test_harvest_fields_dataset(self):
+
+ # Create source
+ source_fixture = {
+ 'title': 'Test Source',
+ 'name': 'test-source',
+ 'url': u'http://127.0.0.1:8999/gemini2.1/dataset1.xml',
+ 'source_type': u'gemini-single'
+ }
+
+ source, job = self._create_source_and_job(source_fixture)
+
+ harvester = GeminiDocHarvester()
+
+ object_ids = harvester.gather_stage(job)
+ assert object_ids, len(object_ids) == 1
+
+ # No gather errors
+ assert len(job.gather_errors) == 0
+
+ # Fetch stage always returns True for Single Doc harvesters
+ assert harvester.fetch_stage(object_ids) == True
+
+ obj = HarvestObject.get(object_ids[0])
+ assert obj, obj.content
+ assert obj.guid == u'test-dataset-1'
+
+ harvester.import_stage(obj)
+
+ # No object errors
+ assert len(obj.errors) == 0
+
+ package_dict = get_action('package_show')(self.context,{'id':obj.package_id})
+
+ assert package_dict
+
+ expected = {
+ 'name': u'country-parks-scotland',
+ 'title': u'Country Parks (Scotland)',
+ 'tags': [{u'name': u'Nature conservation'}],
+ 'notes': u'Parks are set up by Local Authorities to provide open-air recreation facilities close to towns and cities. [edited]'
+ }
+
+ package_dict['tags'] = self.clean_tags(package_dict['tags'])
+
+ for key,value in expected.items():
+ if not package_dict[key] == value:
+ raise AssertionError('Unexpected value for %s: %s (was expecting %s)' % \
+ (key, package_dict[key], value))
+
+ if config.get('ckan.harvest.auth.profile') == u'publisher':
+ assert package_dict['groups'] == [self.publisher.id]
+
+ expected_extras = {
+ # Basic
+ 'guid': obj.guid,
+ 'resource-type': u'dataset',
+ 'responsible-party': u'Scottish Natural Heritage (custodian, distributor)',
+ 'access_constraints': u'["Copyright Scottish Natural Heritage"]',
+ 'contact-email': u'data_supply@snh.gov.uk',
+ 'provider':'',
+ # Spatial
+ 'bbox-east-long': u'0.205857204',
+ 'bbox-north-lat': u'61.06066944',
+ 'bbox-south-lat': u'54.529947158',
+ 'bbox-west-long': u'-8.97114288',
+ 'spatial': u'{"type": "Polygon", "coordinates": [[[0.205857204, 54.529947158], [-8.97114288, 54.529947158], [-8.97114288, 61.06066944], [0.205857204, 61.06066944], [0.205857204, 54.529947158]]]}',
+ # Other
+ 'coupled-resource': u'[]',
+ 'dataset-reference-date': u'[{"type": "creation", "value": "2004-02"}, {"type": "revision", "value": "2006-07-03"}]',
+ 'frequency-of-update': u'irregular',
+ 'licence': u'["Reference and PSMA Only", "http://www.test.gov.uk/licenseurl"]',
+ 'licence_url': u'http://www.test.gov.uk/licenseurl',
+ 'metadata-date': u'2011-09-23T10:06:08',
+ 'metadata-language': u'eng',
+ 'spatial-reference-system': u'urn:ogc:def:crs:EPSG::27700',
+ 'temporal_coverage-from': u'["1998"]',
+ 'temporal_coverage-to': u'["2010"]',
+ }
+
+ for key, value in expected_extras.items():
+ extra_value = self.find_extra(package_dict, key)
+ if extra_value is None:
+ raise AssertionError('Extra %s not present in package' % key)
+
+ if not extra_value == value:
+ raise AssertionError('Unexpected value for extra %s: %s (was expecting %s)' % \
+ (key, package_dict['extras'][key], value))
+
+ expected_resource = {
+ 'description': 'Test Resource Description',
+ 'format': u'',
+ 'name': 'Test Resource Name',
+ 'resource_locator_function': 'download',
+ 'resource_locator_protocol': 'test-protocol',
+ 'url': u'https://gateway.snh.gov.uk/pls/apex_ddtdb2/f?p=101',
+ }
+
+ resource = package_dict['resources'][0]
+ for key,value in expected_resource.items():
+ if not resource[key] == value:
+ raise AssertionError('Unexpected value in resource for %s: %s (was expecting %s)' % \
+ (key, resource[key], value))
+
+ def test_harvest_error_bad_xml(self):
+ # Create source
+ source_fixture = {
+ 'title': 'Test Source',
+ 'name': 'test-source',
+ 'url': u'http://127.0.0.1:8999/gemini2.1/error_bad_xml.xml',
+ 'source_type': u'gemini-single'
+ }
+
+ source, job = self._create_source_and_job(source_fixture)
+
+ harvester = GeminiDocHarvester()
+
+ try:
+ object_ids = harvester.gather_stage(job)
+ except lxml.etree.XMLSyntaxError:
+ # this only occurs in debug_exception_mode
+ pass
+ else:
+ assert object_ids is None
+
+ # Check gather errors
+ assert len(job.gather_errors) == 1
+ assert job.gather_errors[0].harvest_job_id == job.id
+ assert 'Error parsing the document' in job.gather_errors[0].message
+
+ def test_harvest_error_404(self):
+ # Create source
+ source_fixture = {
+ 'title': 'Test Source',
+ 'name': 'test-source',
+ 'url': u'http://127.0.0.1:8999/gemini2.1/not_there.xml',
+ 'source_type': u'gemini-single'
+ }
+
+ source, job = self._create_source_and_job(source_fixture)
+
+ harvester = GeminiDocHarvester()
+
+ object_ids = harvester.gather_stage(job)
+ assert object_ids is None
+
+ # Check gather errors
+ assert len(job.gather_errors) == 1
+ assert job.gather_errors[0].harvest_job_id == job.id
+ assert 'Unable to get content for URL' in job.gather_errors[0].message
+
+ def test_harvest_error_validation(self):
+
+ # Create source
+ source_fixture = {
+ 'title': 'Test Source',
+ 'name': 'test-source',
+ 'url': u'http://127.0.0.1:8999/gemini2.1/error_validation.xml',
+ 'source_type': u'gemini-single'
+ }
+
+ source, job = self._create_source_and_job(source_fixture)
+
+ harvester = GeminiDocHarvester()
+
+ object_ids = harvester.gather_stage(job)
+
+ # Right now the import process goes ahead even with validation errors
+ assert object_ids, len(object_ids) == 1
+
+ # No gather errors
+ assert len(job.gather_errors) == 0
+
+ # Fetch stage always returns True for Single Doc harvesters
+ assert harvester.fetch_stage(object_ids) == True
+
+ obj = HarvestObject.get(object_ids[0])
+ assert obj, obj.content
+ assert obj.guid == u'test-error-validation-1'
+
+ harvester.import_stage(obj)
+
+ # Check errors
+ assert len(obj.errors) == 1
+ assert obj.errors[0].harvest_object_id == obj.id
+
+ message = obj.errors[0].message
+
+ assert_in('One email address shall be provided', message)
+ assert_in('Service type shall be one of \'discovery\', \'view\', \'download\', \'transformation\', \'invoke\' or \'other\' following INSPIRE generic names', message)
+ assert_in('Limitations on public access code list value shall be \'otherRestrictions\'', message)
+ assert_in('One organisation name shall be provided', message)
+
+
+ def test_harvest_update_records(self):
+
+ # Create source
+ source_fixture = {
+ 'title': 'Test Source',
+ 'name': 'test-source',
+ 'url': u'http://127.0.0.1:8999/gemini2.1/dataset1.xml',
+ 'source_type': u'gemini-single'
+ }
+
+ source, first_job = self._create_source_and_job(source_fixture)
+
+ first_obj = self._run_job_for_single_document(first_job)
+
+ first_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+
+ # Package was created
+ assert first_package_dict
+ assert first_obj.current == True
+ assert first_obj.package
+
+ # Create and run a second job, the package should not be updated
+ second_job = self._create_job(source.id)
+
+ second_obj = self._run_job_for_single_document(second_job)
+
+ Session.remove()
+ Session.add(first_obj)
+ Session.add(second_obj)
+
+ Session.refresh(first_obj)
+ Session.refresh(second_obj)
+
+ second_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+
+ # Package was not updated
+ assert second_package_dict, first_package_dict['id'] == second_package_dict['id']
+ assert not second_obj.package, not second_obj.package_id
+ assert second_obj.current == False, first_obj.current == True
+
+ # Create and run a third job, forcing the importing to simulate an update in the package
+ third_job = self._create_job(source.id)
+ third_obj = self._run_job_for_single_document(third_job,force_import=True)
+
+ # For some reason first_obj does not get updated after the import_stage,
+ # and we have to force a refresh to get the actual DB values.
+ Session.remove()
+ Session.add(first_obj)
+ Session.add(second_obj)
+ Session.add(third_obj)
+
+ Session.refresh(first_obj)
+ Session.refresh(second_obj)
+ Session.refresh(third_obj)
+
+ third_package_dict = get_action('package_show')(self.context,{'id':third_obj.package_id})
+
+ # Package was updated
+ assert third_package_dict, first_package_dict['id'] == third_package_dict['id']
+ assert third_obj.package, third_obj.package_id == first_package_dict['id']
+ assert third_obj.current == True
+ assert second_obj.current == False
+ assert first_obj.current == False
+
+ def test_harvest_deleted_record(self):
+
+ # Create source
+ source_fixture = {
+ 'title': 'Test Source',
+ 'name': 'test-source',
+ 'url': u'http://127.0.0.1:8999/gemini2.1/service1.xml',
+ 'source_type': u'gemini-single'
+ }
+
+ source, first_job = self._create_source_and_job(source_fixture)
+
+ first_obj = self._run_job_for_single_document(first_job)
+
+ first_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+
+ # Package was created
+ assert first_package_dict
+ assert first_package_dict['state'] == u'active'
+ assert first_obj.current == True
+
+ # Delete package
+ first_package_dict['state'] = u'deleted'
+ self.context.update({'id':first_package_dict['id']})
+ updated_package_dict = get_action('package_update')(self.context,first_package_dict)
+
+ # Create and run a second job, the date has not changed, so the package should not be updated
+ # and remain deleted
+ first_job.status = u'Finished'
+ first_job.save()
+ second_job = self._create_job(source.id)
+
+ second_obj = self._run_job_for_single_document(second_job)
+
+ second_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+
+ # Package was not updated
+ assert second_package_dict, updated_package_dict['id'] == second_package_dict['id']
+ assert not second_obj.package, not second_obj.package_id
+ assert second_obj.current == False, first_obj.current == True
+
+
+ # Harvest an updated document, with a more recent modified date, package should be
+ # updated and reactivated
+ source.url = u'http://127.0.0.1:8999/gemini2.1/service1_newer.xml'
+ source.save()
+
+ third_job = self._create_job(source.id)
+
+ third_obj = self._run_job_for_single_document(third_job)
+
+ third_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+
+ Session.remove()
+ Session.add(first_obj)
+ Session.add(second_obj)
+ Session.add(third_obj)
+
+ Session.refresh(first_obj)
+ Session.refresh(second_obj)
+ Session.refresh(third_obj)
+
+ # Package was updated
+ assert third_package_dict, third_package_dict['id'] == second_package_dict['id']
+ assert third_obj.package, third_obj.package
+ assert third_obj.current == True, second_obj.current == False
+ assert first_obj.current == False
+
+ assert 'NEWER' in third_package_dict['title']
+ assert third_package_dict['state'] == u'active'
+
+
+
+ def test_harvest_different_sources_same_document(self):
+
+ # Create source1
+ source1_fixture = {
+ 'title': 'Test Source',
+ 'name': 'test-source',
+ 'url': u'http://127.0.0.1:8999/gemini2.1/source1/same_dataset.xml',
+ 'source_type': u'gemini-single'
+ }
+
+ source1, first_job = self._create_source_and_job(source1_fixture)
+
+ first_obj = self._run_job_for_single_document(first_job)
+
+ first_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+
+ # Package was created
+ assert first_package_dict
+ assert first_package_dict['state'] == u'active'
+ assert first_obj.current == True
+
+ # Harvest the same document, unchanged, from another source, the package
+ # is not updated.
+ # (As of https://github.com/okfn/ckanext-inspire/commit/9fb67
+ # we are no longer throwing an exception when this happens)
+ source2_fixture = {
+ 'title': 'Test Source 2',
+ 'name': 'test-source-2',
+ 'url': u'http://127.0.0.1:8999/gemini2.1/source2/same_dataset.xml',
+ 'source_type': u'gemini-single'
+ }
+
+ source2, second_job = self._create_source_and_job(source2_fixture)
+
+ second_obj = self._run_job_for_single_document(second_job)
+
+ second_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+
+ # Package was not updated
+ assert second_package_dict, first_package_dict['id'] == second_package_dict['id']
+ assert not second_obj.package, not second_obj.package_id
+ assert second_obj.current == False, first_obj.current == True
+
+ # Inactivate source1 and reharvest from source2, package should be updated
+ third_job = self._create_job(source2.id)
+ third_obj = self._run_job_for_single_document(third_job,force_import=True)
+
+ Session.remove()
+ Session.add(first_obj)
+ Session.add(second_obj)
+ Session.add(third_obj)
+
+ Session.refresh(first_obj)
+ Session.refresh(second_obj)
+ Session.refresh(third_obj)
+
+ third_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+
+ # Package was updated
+ assert third_package_dict, first_package_dict['id'] == third_package_dict['id']
+ assert third_obj.package, third_obj.package_id == first_package_dict['id']
+ assert third_obj.current == True
+ assert second_obj.current == False
+ assert first_obj.current == False
+
+
+ def test_harvest_different_sources_same_document_but_deleted_inbetween(self):
+
+ # Create source1
+ source1_fixture = {
+ 'title': 'Test Source',
+ 'name': 'test-source',
+ 'url': u'http://127.0.0.1:8999/gemini2.1/source1/same_dataset.xml',
+ 'source_type': u'gemini-single'
+ }
+
+ source1, first_job = self._create_source_and_job(source1_fixture)
+
+ first_obj = self._run_job_for_single_document(first_job)
+
+ first_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+
+ # Package was created
+ assert first_package_dict
+ assert first_package_dict['state'] == u'active'
+ assert first_obj.current == True
+
+ # Delete/withdraw the package
+ first_package_dict = get_action('package_delete')(self.context,{'id':first_obj.package_id})
+ first_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+
+ # Harvest the same document, unchanged, from another source
+ source2_fixture = {
+ 'title': 'Test Source 2',
+ 'name': 'test-source-2',
+ 'url': u'http://127.0.0.1:8999/gemini2.1/source2/same_dataset.xml',
+ 'source_type': u'gemini-single'
+ }
+
+ source2, second_job = self._create_source_and_job(source2_fixture)
+
+ second_obj = self._run_job_for_single_document(second_job)
+
+ second_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+
+ # It would be good if the package was updated, but we see that it isn't
+ assert second_package_dict, first_package_dict['id'] == second_package_dict['id']
+ assert not second_obj.package
+ assert second_obj.current == False
+ assert first_obj.current == True
+
+
+ def test_harvest_moves_sources(self):
+
+ # Create source1
+ source1_fixture = {
+ 'title': 'Test Source',
+ 'name': 'test-source',
+ 'url': u'http://127.0.0.1:8999/gemini2.1/service1.xml',
+ 'source_type': u'gemini-single'
+ }
+
+ source1, first_job = self._create_source_and_job(source1_fixture)
+
+ first_obj = self._run_job_for_single_document(first_job)
+
+ first_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+
+ # Package was created
+ assert first_package_dict
+ assert first_package_dict['state'] == u'active'
+ assert first_obj.current == True
+
+ # Harvest the same document GUID but with a newer date, from another source.
+ source2_fixture = {
+ 'title': 'Test Source 2',
+ 'name': 'test-source-2',
+ 'url': u'http://127.0.0.1:8999/gemini2.1/service1_newer.xml',
+ 'source_type': u'gemini-single'
+ }
+
+ source2, second_job = self._create_source_and_job(source2_fixture)
+
+ second_obj = self._run_job_for_single_document(second_job)
+
+ second_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+
+ # Now we have two packages
+ assert second_package_dict, first_package_dict['id'] == second_package_dict['id']
+ assert second_obj.package
+ assert second_obj.current == True
+ assert first_obj.current == True
+ # so currently, if you move a Gemini between harvest sources you need
+ # to update the date to get it to reharvest, and then you should
+ # withdraw the package relating to the original harvest source.
+
+
+ def test_harvest_import_command(self):
+
+ # Create source
+ source_fixture = {
+ 'title': 'Test Source',
+ 'name': 'test-source',
+ 'url': u'http://127.0.0.1:8999/gemini2.1/dataset1.xml',
+ 'source_type': u'gemini-single'
+ }
+
+ source, first_job = self._create_source_and_job(source_fixture)
+
+ first_obj = self._run_job_for_single_document(first_job)
+
+ before_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+
+ # Package was created
+ assert before_package_dict
+ assert first_obj.current == True
+ assert first_obj.package
+
+ # Create and run two more jobs, the package should not be updated
+ second_job = self._create_job(source.id)
+ second_obj = self._run_job_for_single_document(second_job)
+ third_job = self._create_job(source.id)
+ third_obj = self._run_job_for_single_document(third_job)
+
+ # Run the import command manually
+ imported_objects = get_action('harvest_objects_import')(self.context,{'source_id':source.id})
+ Session.remove()
+ Session.add(first_obj)
+ Session.add(second_obj)
+ Session.add(third_obj)
+
+ Session.refresh(first_obj)
+ Session.refresh(second_obj)
+ Session.refresh(third_obj)
+
+ after_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+
+ # Package was updated, and the current object remains the same
+ assert after_package_dict, before_package_dict['id'] == after_package_dict['id']
+ assert third_obj.current == False
+ assert second_obj.current == False
+ assert first_obj.current == True
+
+
+ source_dict = get_action('harvest_source_show')(self.context,{'id':source.id})
+ assert source_dict['status']['total_datasets'] == 1
+
+ def test_clean_tags(self):
+
+ # Create source
+ source_fixture = {
+ 'title': 'Test Source',
+ 'name': 'test-source',
+ 'url': u'http://127.0.0.1:8999/gemini2.1/dataset1.xml',
+ 'source_type': u'gemini-single',
+ 'owner_org': 'test-org',
+ 'metadata_created': datetime.now().strftime('%YYYY-%MM-%DD %HH:%MM:%s'),
+ 'metadata_modified': datetime.now().strftime('%YYYY-%MM-%DD %HH:%MM:%s'),
+
+ }
+
+ user = User.get('dummy')
+ if not user:
+ user = call_action('user_create',
+ name='dummy',
+ password='dummybummy',
+ email='dummy@dummy.com')
+ user_name = user['name']
+ else:
+ user_name = user.name
+ org = Group.by_name('test-org')
+ if org is None:
+ org = call_action('organization_create',
+ context={'user': user_name},
+ name='test-org')
+ existing_g = Group.by_name('existing-group')
+ if existing_g is None:
+ existing_g = call_action('group_create',
+ context={'user': user_name},
+ name='existing-group')
+
+ context = {'user': 'dummy'}
+ package_schema = default_update_package_schema()
+ context['schema'] = package_schema
+ package_dict = {'frequency': 'manual',
+ 'publisher_name': 'dummy',
+ 'extras': [{'key':'theme', 'value':['non-mappable', 'thememap1']}],
+ 'groups': [],
+ 'title': 'fakename',
+ 'holder_name': 'dummy',
+ 'holder_identifier': 'dummy',
+ 'name': 'fakename',
+ 'notes': 'dummy',
+ 'owner_org': 'test-org',
+ 'modified': datetime.now(),
+ 'publisher_identifier': 'dummy',
+ 'metadata_created' : datetime.now(),
+ 'metadata_modified' : datetime.now(),
+ 'guid': str(uuid4()),
+ 'identifier': 'dummy'}
+
+ package_data = call_action('package_create', context=context, **package_dict)
+
+ package = Package.get('fakename')
+ source, job = self._create_source_and_job(source_fixture)
+ job.package = package
+ job.guid = uuid4()
+ harvester = SpatialHarvester()
+ with open(os.path.join('..', 'data', 'dataset.json')) as f:
+ dataset = json.load(f)
+
+ # long tags are invalid in all cases
+ TAG_LONG_INVALID = 'abcdefghij' * 20
+ # if clean_tags is not set to true, tags will be truncated to 50 chars
+ TAG_LONG_VALID = TAG_LONG_INVALID[:50]
+ # default truncate to 100
+ TAG_LONG_VALID_LONG = TAG_LONG_INVALID[:100]
+
+ assert len(TAG_LONG_VALID) == 50
+ assert TAG_LONG_VALID[-1] == 'j'
+ TAG_CHARS_INVALID = 'Pretty-inv@lid.tag!'
+ TAG_CHARS_VALID = 'pretty-invlidtag'
+
+ dataset['tags'].append(TAG_LONG_INVALID)
+ dataset['tags'].append(TAG_CHARS_INVALID)
+
+ harvester.source_config = {'clean_tags': False}
+ out = harvester.get_package_dict(dataset, job)
+ tags = out['tags']
+
+ # no clean tags, so invalid chars are in
+ # but tags are truncated to 50 chars
+ assert {'name': TAG_CHARS_VALID} not in tags
+ assert {'name': TAG_CHARS_INVALID} in tags
+ assert {'name': TAG_LONG_VALID_LONG} in tags
+ assert {'name': TAG_LONG_INVALID} not in tags
+
+ harvester.source_config = {'clean_tags': True}
+
+ out = harvester.get_package_dict(dataset, job)
+ tags = out['tags']
+ assert {'name': TAG_CHARS_VALID} in tags
+ assert {'name': TAG_LONG_VALID_LONG} in tags
+
+
+BASIC_GEMINI = '''
+
+ e269743a-cfda-4632-a939-0c8416ae801e
+
+
+ service
+
+'''
+GUID = 'e269743a-cfda-4632-a939-0c8416ae801e'
+GEMINI_MISSING_GUID = ''''''
+
+class TestGatherMethods(HarvestFixtureBase):
+ def setup(self):
+ HarvestFixtureBase.setup(self)
+ # Create source
+ source_fixture = {
+ 'title': 'Test Source',
+ 'name': 'test-source',
+ 'url': u'http://127.0.0.1:8999/gemini2.1/dataset1.xml',
+ 'source_type': u'gemini-single'
+ }
+ source, job = self._create_source_and_job(source_fixture)
+ self.harvester = GeminiHarvester()
+ self.harvester.harvest_job = job
+
+ def teardown(self):
+ model.repo.rebuild_db()
+
+ def test_get_gemini_string_and_guid(self):
+ res = self.harvester.get_gemini_string_and_guid(BASIC_GEMINI, url=None)
+ assert_equal(res, (BASIC_GEMINI, GUID))
+
+ def test_get_gemini_string_and_guid__no_guid(self):
+ res = self.harvester.get_gemini_string_and_guid(GEMINI_MISSING_GUID, url=None)
+ assert_equal(res, (GEMINI_MISSING_GUID, ''))
+
+ def test_get_gemini_string_and_guid__non_parsing(self):
+ content = '' # no closing tag
+ assert_raises(lxml.etree.XMLSyntaxError, self.harvester.get_gemini_string_and_guid, content)
+
+ def test_get_gemini_string_and_guid__empty(self):
+ content = ''
+ assert_raises(lxml.etree.XMLSyntaxError, self.harvester.get_gemini_string_and_guid, content)
+
+class TestImportStageTools(object):
+ def test_licence_url_normal(self):
+ assert_equal(GeminiHarvester._extract_first_licence_url(
+ ['Reference and PSMA Only',
+ 'http://www.test.gov.uk/licenseurl']),
+ 'http://www.test.gov.uk/licenseurl')
+
+ def test_licence_url_multiple_urls(self):
+ # only the first URL is extracted
+ assert_equal(GeminiHarvester._extract_first_licence_url(
+ ['Reference and PSMA Only',
+ 'http://www.test.gov.uk/licenseurl',
+ 'http://www.test.gov.uk/2nd_licenseurl']),
+ 'http://www.test.gov.uk/licenseurl')
+
+ def test_licence_url_embedded(self):
+ # URL is embedded within the text field and not extracted
+ assert_equal(GeminiHarvester._extract_first_licence_url(
+ ['Reference and PSMA Only http://www.test.gov.uk/licenseurl']),
+ None)
+
+ def test_licence_url_embedded_at_start(self):
+ # URL is embedded at the start of the text field and the
+ # whole field is returned. Noting this unusual behaviour
+ assert_equal(GeminiHarvester._extract_first_licence_url(
+ ['http://www.test.gov.uk/licenseurl Reference and PSMA Only']),
+ 'http://www.test.gov.uk/licenseurl Reference and PSMA Only')
+
+ def test_responsible_organisation_basic(self):
+ responsible_organisation = [{'organisation-name': 'Ordnance Survey',
+ 'role': 'owner'},
+ {'organisation-name': 'Maps Ltd',
+ 'role': 'distributor'}]
+ assert_equal(GeminiHarvester._process_responsible_organisation(responsible_organisation),
+ ('Ordnance Survey', ['Maps Ltd (distributor)',
+ 'Ordnance Survey (owner)']))
+
+ def test_responsible_organisation_publisher(self):
+ # no owner, so falls back to publisher
+ responsible_organisation = [{'organisation-name': 'Ordnance Survey',
+ 'role': 'publisher'},
+ {'organisation-name': 'Maps Ltd',
+ 'role': 'distributor'}]
+ assert_equal(GeminiHarvester._process_responsible_organisation(responsible_organisation),
+ ('Ordnance Survey', ['Maps Ltd (distributor)',
+ 'Ordnance Survey (publisher)']))
+
+ def test_responsible_organisation_owner(self):
+ # provider is the owner (ignores publisher)
+ responsible_organisation = [{'organisation-name': 'Ordnance Survey',
+ 'role': 'publisher'},
+ {'organisation-name': 'Owner',
+ 'role': 'owner'},
+ {'organisation-name': 'Maps Ltd',
+ 'role': 'distributor'}]
+ assert_equal(GeminiHarvester._process_responsible_organisation(responsible_organisation),
+ ('Owner', ['Owner (owner)',
+ 'Maps Ltd (distributor)',
+ 'Ordnance Survey (publisher)',
+ ]))
+
+ def test_responsible_organisation_multiple_roles(self):
+ # provider is the owner (ignores publisher)
+ responsible_organisation = [{'organisation-name': 'Ordnance Survey',
+ 'role': 'publisher'},
+ {'organisation-name': 'Ordnance Survey',
+ 'role': 'custodian'},
+ {'organisation-name': 'Distributor',
+ 'role': 'distributor'}]
+ assert_equal(GeminiHarvester._process_responsible_organisation(responsible_organisation),
+ ('Ordnance Survey', ['Distributor (distributor)',
+ 'Ordnance Survey (publisher, custodian)',
+ ]))
+
+ def test_responsible_organisation_blank_provider(self):
+ # no owner or publisher, so blank provider
+ responsible_organisation = [{'organisation-name': 'Ordnance Survey',
+ 'role': 'resourceProvider'},
+ {'organisation-name': 'Maps Ltd',
+ 'role': 'distributor'}]
+ assert_equal(GeminiHarvester._process_responsible_organisation(responsible_organisation),
+ ('', ['Maps Ltd (distributor)',
+ 'Ordnance Survey (resourceProvider)']))
+
+ def test_responsible_organisation_blank(self):
+ # no owner or publisher, so blank provider
+ responsible_organisation = []
+ assert_equal(GeminiHarvester._process_responsible_organisation(responsible_organisation),
+ ('', []))
+
+
+class TestValidation(HarvestFixtureBase):
+
+ @classmethod
+ def setup_class(cls):
+
+ # TODO: Fix these tests, broken since 27c4ee81e
+ raise SkipTest('Validation tests not working since 27c4ee81e')
+
+ SpatialHarvester._validator = Validators(profiles=['iso19139eden', 'constraints', 'gemini2'])
+ HarvestFixtureBase.setup_class()
+
+ def get_validation_errors(self, validation_test_filename):
+ # Create source
+ source_fixture = {
+ 'title': 'Test Source',
+ 'name': 'test-source',
+ 'url': u'http://127.0.0.1:8999/gemini2.1/validation/%s' % validation_test_filename,
+ 'source_type': u'gemini-single'
+ }
+
+ source, job = self._create_source_and_job(source_fixture)
+
+ harvester = GeminiDocHarvester()
+
+ # Gather stage for GeminiDocHarvester includes validation
+ object_ids = harvester.gather_stage(job)
+
+
+ # Check the validation errors
+ errors = '; '.join([gather_error.message for gather_error in job.gather_errors])
+ return errors
+
+ def test_01_dataset_fail_iso19139_schema(self):
+ errors = self.get_validation_errors('01_Dataset_Invalid_XSD_No_Such_Element.xml')
+ assert len(errors) > 0
+ assert_in('Could not get the GUID', errors)
+
+ def test_02_dataset_fail_constraints_schematron(self):
+ errors = self.get_validation_errors('02_Dataset_Invalid_19139_Missing_Data_Format.xml')
+ assert len(errors) > 0
+ assert_in('MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0', errors)
+
+ def test_03_dataset_fail_gemini_schematron(self):
+ errors = self.get_validation_errors('03_Dataset_Invalid_GEMINI_Missing_Keyword.xml')
+ assert len(errors) > 0
+ assert_in('Descriptive keywords are mandatory', errors)
+
+ def test_04_dataset_valid(self):
+ errors = self.get_validation_errors('04_Dataset_Valid.xml')
+ assert len(errors) == 0
+
+ def test_05_series_fail_iso19139_schema(self):
+ errors = self.get_validation_errors('05_Series_Invalid_XSD_No_Such_Element.xml')
+ assert len(errors) > 0
+ assert_in('Could not get the GUID', errors)
+
+ def test_06_series_fail_constraints_schematron(self):
+ errors = self.get_validation_errors('06_Series_Invalid_19139_Missing_Data_Format.xml')
+ assert len(errors) > 0
+ assert_in('MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0', errors)
+
+ def test_07_series_fail_gemini_schematron(self):
+ errors = self.get_validation_errors('07_Series_Invalid_GEMINI_Missing_Keyword.xml')
+ assert len(errors) > 0
+ assert_in('Descriptive keywords are mandatory', errors)
+
+ def test_08_series_valid(self):
+ errors = self.get_validation_errors('08_Series_Valid.xml')
+ assert len(errors) == 0
+
+ def test_09_service_fail_iso19139_schema(self):
+ errors = self.get_validation_errors('09_Service_Invalid_No_Such_Element.xml')
+ assert len(errors) > 0
+ assert_in('Could not get the GUID', errors)
+
+ def test_10_service_fail_constraints_schematron(self):
+ errors = self.get_validation_errors('10_Service_Invalid_19139_Level_Description.xml')
+ assert len(errors) > 0
+ assert_in("DQ_Scope: 'levelDescription' is mandatory if 'level' notEqual 'dataset' or 'series'.", errors)
+
+ def test_11_service_fail_gemini_schematron(self):
+ errors = self.get_validation_errors('11_Service_Invalid_GEMINI_Service_Type.xml')
+ assert len(errors) > 0
+ assert_in("Service type shall be one of 'discovery', 'view', 'download', 'transformation', 'invoke' or 'other' following INSPIRE generic names.", errors)
+
+ def test_12_service_valid(self):
+ errors = self.get_validation_errors('12_Service_Valid.xml')
+ assert len(errors) == 0, errors
+
+ def test_13_dataset_fail_iso19139_schema_2(self):
+ # This test Dataset has srv tags and only Service metadata should.
+ errors = self.get_validation_errors('13_Dataset_Invalid_Element_srv.xml')
+ assert len(errors) > 0
+ assert_in('Element \'{http://www.isotc211.org/2005/srv}SV_ServiceIdentification\': This element is not expected.', errors)
diff --git a/ckanext/spatial/tests/nose/test_plugin/__init__.py b/ckanext/spatial/tests/nose/test_plugin/__init__.py
new file mode 100644
index 0000000..2e2033b
--- /dev/null
+++ b/ckanext/spatial/tests/nose/test_plugin/__init__.py
@@ -0,0 +1,7 @@
+# this is a namespace package
+try:
+ import pkg_resources
+ pkg_resources.declare_namespace(__name__)
+except ImportError:
+ import pkgutil
+ __path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/ckanext/spatial/tests/nose/test_plugin/plugin.py b/ckanext/spatial/tests/nose/test_plugin/plugin.py
new file mode 100644
index 0000000..2aa5a3d
--- /dev/null
+++ b/ckanext/spatial/tests/nose/test_plugin/plugin.py
@@ -0,0 +1,9 @@
+from ckan import plugins as p
+
+
+class TestSpatialPlugin(p.SingletonPlugin):
+
+ p.implements(p.IConfigurer, inherit=True)
+
+ def update_config(self, config):
+ p.toolkit.add_template_directory(config, 'templates')
diff --git a/ckanext/spatial/tests/nose/test_plugin/templates/package/read_base.html b/ckanext/spatial/tests/nose/test_plugin/templates/package/read_base.html
new file mode 100644
index 0000000..02a5cb6
--- /dev/null
+++ b/ckanext/spatial/tests/nose/test_plugin/templates/package/read_base.html
@@ -0,0 +1,11 @@
+{% ckan_extends %}
+
+{% block secondary_content %}
+ {{ super() }}
+
+ {% set dataset_extent = h.get_pkg_dict_extra(c.pkg_dict, 'spatial', '') %}
+ {% if dataset_extent %}
+ {% snippet "spatial/snippets/dataset_map_sidebar.html", extent=dataset_extent %}
+ {% endif %}
+
+{% endblock %}
diff --git a/ckanext/spatial/tests/nose/test_plugin/templates/package/search.html b/ckanext/spatial/tests/nose/test_plugin/templates/package/search.html
new file mode 100644
index 0000000..2d80283
--- /dev/null
+++ b/ckanext/spatial/tests/nose/test_plugin/templates/package/search.html
@@ -0,0 +1,9 @@
+{% ckan_extends %}
+
+{% block secondary_content %}
+
+ {% snippet "spatial/snippets/spatial_query.html" %}
+
+ {{ super() }}
+
+{% endblock %}
diff --git a/ckanext/spatial/tests/nose/test_validation.py b/ckanext/spatial/tests/nose/test_validation.py
new file mode 100644
index 0000000..f707e90
--- /dev/null
+++ b/ckanext/spatial/tests/nose/test_validation.py
@@ -0,0 +1,153 @@
+import os
+
+from lxml import etree
+from nose.tools import assert_equal, assert_in
+
+from ckanext.spatial import validation
+
+# other validation tests are in test_harvest.py
+
+class TestValidation(object):
+
+ def _get_file_path(self, file_name):
+ return os.path.join(os.path.dirname(__file__), 'xml', file_name)
+
+ def get_validation_errors(self, validator, validation_test_filename):
+ validation_test_filepath = self._get_file_path(validation_test_filename)
+ xml = etree.parse(validation_test_filepath)
+ is_valid, errors = validator.is_valid(xml)
+
+ return ';'.join([e[0] for e in errors])
+
+ def test_iso19139_failure(self):
+ errors = self.get_validation_errors(validation.ISO19139Schema,
+ 'iso19139/dataset-invalid.xml')
+
+ assert len(errors) > 0
+ assert_in('Dataset schema (gmx.xsd)', errors)
+ assert_in('{http://www.isotc211.org/2005/gmd}nosuchelement\': This element is not expected.', errors)
+
+ def test_iso19139_pass(self):
+ errors = self.get_validation_errors(validation.ISO19139Schema,
+ 'iso19139/dataset.xml')
+ assert_equal(errors, '')
+
+ # Gemini2.1 tests are basically the same as those in test_harvest.py but
+ # a few little differences make it worth not removing them in
+ # test_harvest
+
+ def test_01_dataset_fail_iso19139_schema(self):
+ errors = self.get_validation_errors(validation.ISO19139EdenSchema,
+ 'gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element.xml')
+ assert len(errors) > 0
+ assert_in('(gmx.xsd)', errors)
+ assert_in('\'{http://www.isotc211.org/2005/gmd}nosuchelement\': This element is not expected.', errors)
+
+ def test_02_dataset_fail_constraints_schematron(self):
+ errors = self.get_validation_errors(validation.ConstraintsSchematron14,
+ 'gemini2.1/validation/02_Dataset_Invalid_19139_Missing_Data_Format.xml')
+ assert len(errors) > 0
+ assert_in('MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0', errors)
+
+ def test_03_dataset_fail_gemini_schematron(self):
+ errors = self.get_validation_errors(validation.Gemini2Schematron,
+ 'gemini2.1/validation/03_Dataset_Invalid_GEMINI_Missing_Keyword.xml')
+ assert len(errors) > 0
+ assert_in('Descriptive keywords are mandatory', errors)
+
+ def assert_passes_all_gemini2_1_validation(self, xml_filepath):
+ errs = self.get_validation_errors(validation.ISO19139EdenSchema,
+ xml_filepath)
+ assert not errs, 'ISO19139EdenSchema: ' + errs
+ errs = self.get_validation_errors(validation.ConstraintsSchematron14,
+ xml_filepath)
+ assert not errs, 'ConstraintsSchematron14: ' + errs
+ errs = self.get_validation_errors(validation.Gemini2Schematron,
+ xml_filepath)
+ assert not errs, 'Gemini2Schematron: ' + errs
+
+ def test_04_dataset_valid(self):
+ self.assert_passes_all_gemini2_1_validation('gemini2.1/validation/04_Dataset_Valid.xml')
+
+ def test_05_series_fail_iso19139_schema(self):
+ errors = self.get_validation_errors(validation.ISO19139EdenSchema,
+ 'gemini2.1/validation/05_Series_Invalid_XSD_No_Such_Element.xml')
+ assert len(errors) > 0
+ assert_in('(gmx.xsd)', errors)
+ assert_in('\'{http://www.isotc211.org/2005/gmd}nosuchelement\': This element is not expected.', errors)
+
+ def test_06_series_fail_constraints_schematron(self):
+ errors = self.get_validation_errors(validation.ConstraintsSchematron14,
+ 'gemini2.1/validation/06_Series_Invalid_19139_Missing_Data_Format.xml')
+ assert len(errors) > 0
+ assert_in('MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0', errors)
+
+ def test_07_series_fail_gemini_schematron(self):
+ errors = self.get_validation_errors(validation.Gemini2Schematron,
+ 'gemini2.1/validation/07_Series_Invalid_GEMINI_Missing_Keyword.xml')
+ assert len(errors) > 0
+ assert_in('Descriptive keywords are mandatory', errors)
+
+ def test_08_series_valid(self):
+ self.assert_passes_all_gemini2_1_validation('gemini2.1/validation/08_Series_Valid.xml')
+
+ def test_09_service_fail_iso19139_schema(self):
+ errors = self.get_validation_errors(validation.ISO19139EdenSchema,
+ 'gemini2.1/validation/09_Service_Invalid_No_Such_Element.xml')
+ assert len(errors) > 0
+ assert_in('(gmx.xsd & srv.xsd)', errors)
+ assert_in('\'{http://www.isotc211.org/2005/gmd}nosuchelement\': This element is not expected.', errors)
+
+ def test_10_service_fail_constraints_schematron(self):
+ errors = self.get_validation_errors(validation.ConstraintsSchematron14,
+ 'gemini2.1/validation/10_Service_Invalid_19139_Level_Description.xml')
+ assert len(errors) > 0
+ assert_in("DQ_Scope: 'levelDescription' is mandatory if 'level' notEqual 'dataset' or 'series'.", errors)
+
+ def test_11_service_fail_gemini_schematron(self):
+ errors = self.get_validation_errors(validation.Gemini2Schematron,
+ 'gemini2.1/validation/11_Service_Invalid_GEMINI_Service_Type.xml')
+ assert len(errors) > 0
+ assert_in("Service type shall be one of 'discovery', 'view', 'download', 'transformation', 'invoke' or 'other' following INSPIRE generic names.", errors)
+
+ def test_12_service_valid(self):
+ self.assert_passes_all_gemini2_1_validation('gemini2.1/validation/12_Service_Valid.xml')
+
+ def test_13_dataset_fail_iso19139_schema_2(self):
+ # This test Dataset has srv tags and only Service metadata should.
+ errors = self.get_validation_errors(validation.ISO19139EdenSchema,
+ 'gemini2.1/validation/13_Dataset_Invalid_Element_srv.xml')
+ assert len(errors) > 0
+ assert_in('(gmx.xsd)', errors)
+ assert_in('Element \'{http://www.isotc211.org/2005/srv}SV_ServiceIdentification\': This element is not expected.', errors)
+
+ def test_schematron_error_extraction(self):
+ validation_error_xml = '''
+
+
+
+ Service type shall be one of 'discovery', 'view', 'download', 'transformation', 'invoke' or 'other' following INSPIRE generic names.
+
+
+
+'''
+ failure_xml = etree.fromstring(validation_error_xml)
+ fail_element = failure_xml.getchildren()[0]
+ details = validation.SchematronValidator.extract_error_details(fail_element)
+ if isinstance(details, tuple):
+ details = details[1]
+ assert_in("srv:serviceType/*[1] = 'discovery'", details)
+ assert_in("/*[local-name()='MD_Metadata'", details)
+ assert_in("Service type shall be one of 'discovery'", details)
+
+
+ def test_error_line_numbers(self):
+ file_path = self._get_file_path('iso19139/dataset-invalid.xml')
+ xml = etree.parse(file_path)
+ is_valid, profile, errors = validation.Validators(profiles=['iso19139']).is_valid(xml)
+ assert not is_valid
+ assert len(errors) == 2
+
+ message, line = errors[1]
+ assert 'This element is not expected' in message
+ assert line == 3
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1-waf/index.html b/ckanext/spatial/tests/nose/xml/gemini2.1-waf/index.html
new file mode 100644
index 0000000..9710d9f
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1-waf/index.html
@@ -0,0 +1,11 @@
+
+
+
+ Index of /waf
+
+
+ Index of /waf
+ wales1.xml
+ wales2.xml
+
+
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1-waf/wales1.xml b/ckanext/spatial/tests/nose/xml/gemini2.1-waf/wales1.xml
new file mode 100644
index 0000000..750c066
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1-waf/wales1.xml
@@ -0,0 +1,420 @@
+
+
+
+ 11edc4ec-5269-40b9-86c8-17201fa4e74e-new
+
+
+ eng
+
+
+
+
+
+
+
+ Welsh Government
+
+
+ Geography and Technology
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Cathays Park - 2
+
+
+ Cardiff
+
+
+ Wales
+
+
+ CF10 3NQ
+
+
+ UK
+
+
+ cartographics@wales.gsi.gov.uk
+
+
+
+
+
+
+
+
+
+
+
+ 2011-10-28T17:27:04
+
+
+ Gemini
+
+
+ 2.1
+
+
+ -
+
+ -
+
+ -
+
+
+ urn:ogc:def:crs:EPSG::Nat. Grid GB
+
+
+ OGP
+
+
+
+
+
+
+
+
+
+
+ World Heritage Sites in Wales GIS Polygon and Polyline Dataset
+
+
+ WHS in Wales GIS Dataset
+
+
+
+
+ 2005-01-01
+
+
+
+
+
+
+
+
+
+ 27700
+
+
+ EPSG
+
+
+
+
+
+
+ UNESCO (United Nations Educational, Scientific and Cultural Organization) World Heritage Sites are places or buildings of outstanding universal value. UNESCO's World Heritage mission is to encourage countries to ensure the protection of their own natural and cultural heritage.
+
+Wales has three World Heritage Sites, the Castles and Town Walls of King Edward in Gwynedd, the Blaenavon Industrial Landscape and the Pontcysyllte Aqueduct.
+
+All planning enquiries that may effect a World Heritage Site, its setting or significant view should be directed to Cadw.
+
+The World Heritage Sites dataset comprises 4 ESRI Shapefiles, these are:-
+
+1: World Heritage Sites (WHS)
+2: Essential Setting (ES)
+3: Significant View (SV)
+4: Arcs of View (AV)
+
+
+
+
+ CADW (The Historic Environment Service of the Welsh Assembly Government)
+
+
+ Mapping and Charting Officer
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ CADW
+
+
+ Plas Carew, Unit 5/7 Cefn Coed
+
+
+ Parc Nantgarw, Cardiff,
+
+
+ South Glamorgan
+
+
+ CF15 7QQ
+
+
+ United Kingdom
+
+
+ cadw@wales.gsi.gov.uk
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ CADW_Whs_s.png
+
+
+ thumbnail
+
+
+ png
+
+
+
+
+
+
+ CADW_Whs.png
+
+
+ large_thumbnail
+
+
+ png
+
+
+
+
+
+
+ Protected sites
+
+
+
+
+
+
+
+ GEMET - INSPIRE themes, version 1.0
+
+
+
+
+ 2008-06-01
+
+
+
+
+
+
+
+
+
+
+
+ CADW
+
+
+ World Heritage Sites
+
+
+
+
+
+
+ Copyright
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+ 5000
+
+
+
+
+
+
+
+
+
+ environment
+
+
+
+
+
+
+ -6.349669208373599
+
+
+ -1.8959807251206617
+
+
+ 50.948649366562954
+
+
+ 53.77117345513605
+
+
+
+
+
+ -
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ OGC Web Map Service
+
+
+ 1.3.0
+
+
+
+
+
+
+
+
+ http://inspire.wales.gov.uk/metadata?uuid=11edc4ec-5269-40b9-86c8-17201fa4e74e
+
+
+
+
+
+
+ http://inspire.wales.gov.uk/maps/Protected_sites/wms?request=getCapabilities
+
+
+
+
+
+
+ http://inspire.wales.gov.uk/maps/Protected_sites/wms?
+
+
+ OGC:WMS-1.3.0-http-get-map
+
+
+ Protected_sites:world_heritage_sites_feb10
+
+
+ UNESCO World Heritage Sites (Wales)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ UK GEMINI Standard version 2.0
+
+
+
+
+ 2009-07-20
+
+
+
+
+
+
+
+
+
+ Conforms to GEMINI2 2.0 draft schematron
+
+
+ true
+
+
+
+
+
+
+
+
+ The Purpose of this data is to map World Heritage Sites in Wales.
+
+
+
+
+
+
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1-waf/wales2.xml b/ckanext/spatial/tests/nose/xml/gemini2.1-waf/wales2.xml
new file mode 100644
index 0000000..188e153
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1-waf/wales2.xml
@@ -0,0 +1,539 @@
+
+
+
+ be06a48d-36fa-4369-a585-7fcc319db4c0-new
+
+
+ English
+
+
+
+
+
+ -
+
+
+ Spatial Evidence Officer
+
+
+ -
+
+
+ -
+
+
+ 01248 385500
+
+
+
+
+ -
+
+
+ Countryside Council for Wales
+
+
+ Maes-y-Ffynnon
+
+
+ LL57 2DW
+
+
+ LL57 2DW
+
+
+ enquiries@ccw.gov.uk
+
+
+
+
+
+
+
+
+
+
+
+ 2011-10-29T17:14:04
+
+
+ Gemini
+
+
+ 2.1
+
+
+ -
+
+ -
+
+ -
+
+
+ urn:ogc:def:crs:EPSG::Nat. Grid GB
+
+
+ OGP
+
+
+
+
+
+
+
+
+ -
+
+
+ Protected sites: Country Parks - GIS dataset
+
+
+ Parciau Cenedlaethol - Set Ddata SGDd
+
+
+ Sites (Country Parks)
+
+
+ -
+
+ -
+
+ 2009-00-00
+
+ -
+
+
+
+
+
+
+ -
+
+ -
+
+ 2009-01-01
+
+ -
+
+
+
+
+
+
+
+
+ 98742
+
+
+ http://www.ccw.gov.uk
+
+
+
+
+
+
+ This is a GIS dataset containing spatial objects such as points, lines, and polygons. It contains digital boundaries of country parks. There are about 250 recognised Country Parks in England and Wales. Most Country Parks were designated in the 1970s, under the Countryside Act 1968 with the support of the former Countryside Commission. In more recent times there has been no specific financial support for country parks directly, and fewer have been designated. Most are managed by local authorities, although other organisations and private individuals can also run them. A Country Park is an area designated for people to visit and enjoy recreation in a countryside environment. The purpose of a country park is to provide somewhere for visitors who do not necessarily want to go out into the wider countryside. Visitors can enjoy a public open space with an informal atmosphere, as opposed to a formal park as might be found in an urban area. For this reason country parks are usually found close to or on the edge of built-up areas, and rarely in the countryside proper.
+
+
+ -
+
+
+ Countryside Council for Wales (CCW)
+
+
+ -
+
+
+ -
+
+
+ 01248 385500
+
+
+
+
+ -
+
+
+ Countryside Council for Wales
+
+
+ Maes y Ffynnon
+
+
+ Bangor
+
+
+ Gwynedd
+
+
+ LL56 2DW
+
+
+ enquiries@ccw.gov.uk
+
+
+
+
+
+
+
+
+
+
+
+ -
+
+
+
+
+
+
+
+
+
+ CCW_Countryparks_s.png
+
+
+ thumbnail
+
+
+ png
+
+
+
+
+
+
+ CCW_Countryparks.png
+
+
+ large_thumbnail
+
+
+ png
+
+
+
+
+ -
+
+
+ CCW website; cd; dvd
+
+
+
+
+
+ -
+
+
+ Protected sites
+
+
+ -
+
+
+ GEMET - INSPIRE themes, version 1.0
+
+
+ -
+
+ -
+
+ 2011-07-05
+
+ -
+
+
+
+
+
+
+
+
+
+
+ -
+
+
+ Biodiversity
+
+
+ Country parks
+
+
+ Nature conservation
+
+
+ -
+
+
+ Integrated Public Sector Vocabulary (IPSV)
+
+
+ -
+
+ -
+
+ 2011-07-05
+
+ -
+
+
+
+
+
+
+
+
+
+
+
+
+ CCGC/CCW 2004. You may reproduce this dataset free of charge for non-commercial and internal business purposes in any format or medium, provided that you do so accurately, acknowledging both the source and CCW's copyright, and do not use it in a misleading context. To avoid using old information, we recommend that you obtain the latest version from the original source.
+
+
+
+
+
+
+
+
+
+ CCW is legally obliged to give public access to the data and information which it holds unless specific legal exceptions apply, for example, if the data is personal or if release of the data would result in environmental harm
+
+
+
+
+
+
+
+
+
+
+ biota
+
+
+ environment
+
+
+ geoscientificInformation
+
+
+ -
+
+
+
+
+
+ -
+
+ -
+
+
+ ISO3166 Countries
+
+
+ -
+
+ -
+
+ 2011-07-05
+
+ -
+
+
+
+
+
+
+
+ -
+
+ Wales (WLS)
+
+
+
+
+
+
+
+
+ -6.279468297494083
+
+
+ -2.4589218491907405
+
+
+ 51.02857752005919
+
+
+ 53.525259613573844
+
+
+
+
+
+ -
+
+
+ 1995-01-01
+ 2009-12-31
+
+
+
+
+
+
+
+ Keith Jones, Spatial Evidence Manager, PKCG.
+
+
+
+
+
+
+ -
+
+
+ -
+
+
+ GIS Team
+
+
+ -
+
+
+ -
+
+
+ Tel: 0845 1306229
+
+
+
+
+ -
+
+
+ Enquiries, Countryside Council for Wales
+
+
+ Maes-y-Ffynnon
+
+
+ Bangor
+
+
+ Gwynedd
+
+
+ LL57 2DW
+
+
+ gis.helpdesk@ccw.gov.uk
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -
+
+
+ -
+
+
+ http://inspire.wales.gov.uk/metadata/srv/en/main.home?uuid=be06a48d-36fa-4369-a585-7fcc319db4c0
+
+
+
+
+ -
+
+
+ http://inspire.wales.gov.uk/maps/Protected_sites/wms?request=getCapabilities
+
+
+
+
+
+
+ http://inspire.wales.gov.uk/maps/Protected_sites/wms?
+
+
+ OGC:WMS-1.3.0-http-get-map
+
+
+ Protected_sites:country_parks
+
+
+ Country Parks of Wales. Data produced by the CCW (Countryside Council for Wales).
+
+
+
+
+
+
+
+
+ -
+
+
+ -
+
+
+
+
+
+
+
+
+
+
+
+
+
+ UK GEMINI Standard version 2.0
+
+
+
+
+ 2009-07-20
+
+
+
+
+
+
+
+
+
+ Conforms to GEMINI2 2.0 draft schematron
+
+
+ true
+
+
+
+
+
+
+ -
+
+
+ Originally mapped on paper maps by Unitary Authorities. This dataset was originally put together from maps provided by Unitary Authorities and then digitised by CCW staff in the mid 1990s. Later updates were compiled from digital data supplied by unitary authorities and pdfs. Digital data captured to 1:2500 /1:10,000 (OS MasterMap has variable scale from uplands to urban areas)
+
+
+
+
+
+
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/FCSConservancyPolygons.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/FCSConservancyPolygons.xml
new file mode 100644
index 0000000..a34f8aa
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/FCSConservancyPolygons.xml
@@ -0,0 +1,524 @@
+
+
+
+ B8A22DF4-B0DC-4F0B-A713-0CF5F8784A28
+
+
+ utf8
+
+
+ dataset
+
+
+
+
+ Geo-Information Services
+
+
+ Forestry Commission Scotland
+
+
+ Geo-Information Services Delivery Manager
+
+
+
+
+
+
+ 0131 334 0303
+
+
+
+
+
+
+ Silvan House, 231 Corstorphine Road
+
+
+ Edinburgh
+
+
+ Scotland
+
+
+ EH12 7AT
+
+
+ United Kingdom
+
+
+ geoinformationservices.scotland@forestry.gsi.gov.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2012-11-30T10:51:36
+
+
+
+
+
+
+ urn:ogc:def:crs:EPSG::27700
+
+
+
+
+
+
+
+
+
+
+ FCS Conservancy Polygons
+
+
+ s_cons_pol
+
+
+ fc.s_cons_pol
+
+
+
+
+ 2004-06-06
+
+
+ creation
+
+
+
+
+
+
+ 2010-03-16
+
+
+ revision
+
+
+
+
+
+
+ Geo_Information Services
+
+
+
+
+
+
+
+
+
+ Description:
+
+This dataset depicts the five Forestry Commission Scotland Conservancy boundaries.
+
+
+
+Attributes:
+
+NAME : Conservancy Name
+ADDRESS_1 : Address
+ADDRESS_2 : Address
+ADDRESS_3 : Address
+ADDRESS_4 : Address
+POSTCODE : Postcode
+PHONE_NO : Telephone Number
+EMAIL : Email Address
+
+
+
+
+ Head of Grants & Licences
+
+
+ Forestry Commission Scotland
+
+
+ Head of Grants & Licences
+
+
+
+
+
+
+ 0131 334 0303
+
+
+
+
+
+
+ Silvan House, 231 Corstorphine Road
+
+
+ Edinburgh
+
+
+ Scotland
+
+
+ EH12 7AT
+
+
+ United Kingdom
+
+
+ cgis.scotland@forestry.gsi.gov.uk
+
+
+
+
+
+
+ owner
+
+
+
+
+
+
+ Geo-Information Services
+
+
+ Forestry Commission Scotland
+
+
+ Geo-Information Services Delivery Manager
+
+
+
+
+
+
+ 0131 334 0303
+
+
+
+
+
+
+ Silvan House, 231 Corstorphine Road
+
+
+ Edinburgh
+
+
+ Scotland
+
+
+ EH12 7AT
+
+
+ United Kingdom
+
+
+ geoinformationservices.scotland@forestry.gsi.gov.uk
+
+
+
+
+
+
+ custodian
+
+
+
+
+
+
+ Geo-Information Services
+
+
+ Forestry Commission Scotland
+
+
+ Geo-Information Services Delivery Manager
+
+
+
+
+
+
+ 0131 334 0303
+
+
+
+
+
+
+ Silvan House, 231 Corstorphine Road
+
+
+ Edinburgh
+
+
+ Scotland
+
+
+ EH12 7AT
+
+
+ United Kingdom
+
+
+ geoinformationservices.scotland@forestry.gsi.gov.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+ asNeeded
+
+
+
+
+
+
+ SDE Feature Class
+
+
+
+
+
+
+
+
+
+ administrative
+
+
+ regional
+
+
+
+
+
+
+ copyright
+
+
+ license
+
+
+ otherRestrictions
+
+
+ copyright
+
+
+ license
+
+
+ None
+
+
+
+
+
+
+ Copyright (Copyright Forestry Commission Scotland)
+
+
+
+
+
+
+
+
+
+
+
+ 10000
+
+
+
+
+
+
+ eng
+
+
+ boundaries
+
+
+ economy
+
+
+
+
+
+
+
+
+ Scotland
+
+
+
+
+
+
+
+
+ -9.229868
+
+
+ -0.705137
+
+
+ 54.513338
+
+
+ 60.866111
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Unknown
+
+
+ Unknown
+
+
+
+
+
+
+
+
+ Geo-Information Services
+
+
+ Forestry Commission Scotland
+
+
+ Geo-Information Services Delivery Manager
+
+
+
+
+
+
+ 0131 334 0303
+
+
+
+
+
+
+ Silvan House, 231 Corstorphine Road
+
+
+ Edinburgh
+
+
+ Scotland
+
+
+ EH12 7AT
+
+
+ United Kingdom
+
+
+ geoinformationservices.scotland@forestry.gsi.gov.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+ SDE Feature Class
+
+
+
+
+
+
+
+
+
+
+
+ Server=fcspatialsv5; Service=5151; User=fcproduct; Version=SDE.DEFAULT
+
+
+
+
+
+
+
+
+
+
+
+
+ http://www.forestry.gov.uk/datadownload
+
+
+
+
+
+
+
+
+
+
+
+
+ dataset
+
+
+
+
+
+
+ This dataset was derived by merging OS Boundary Line polygons together (with the exception of the boundary between north and south Fife, which was digitised by Geo-Information Services). Boundary Line is based on 1:10,000 scale mapping.
+
+
+
+
+
+
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/dataset1.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/dataset1.xml
new file mode 100644
index 0000000..3f58f0e
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/dataset1.xml
@@ -0,0 +1,498 @@
+
+
+
+ test-dataset-1
+
+
+ eng
+
+
+ dataset
+
+
+
+
+ Lachlan Renwick
+
+
+ Scottish Natural Heritage
+
+
+ Geographic Systems and Data Coordinator
+
+
+
+
+
+
+ 01463 725000
+
+
+
+
+
+
+ Great Glen House, Leachkin Road
+
+
+ INVERNESS
+
+
+ IV3 8NW
+
+
+ United Kingdom
+
+
+ data_supply@snh.gov.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2011-09-23T10:06:08
+
+
+
+
+
+
+ urn:ogc:def:crs:EPSG::27700
+
+
+
+
+
+
+
+
+
+
+ Country Parks (Scotland)
+
+
+
+
+ 2004-02
+
+
+ creation
+
+
+
+
+
+
+ 2006-07-03
+
+
+ revision
+
+
+
+
+
+
+ CPK
+
+
+
+
+
+
+
+
+
+ Parks are set up by Local Authorities to provide open-air recreation facilities close to towns and cities. [edited]
+
+
+
+
+ Lachlan Renwick
+
+
+ Scottish Natural Heritage
+
+
+ Geographic Systems & Data Coordinator
+
+
+
+
+
+
+ 01463 725000
+
+
+
+
+
+
+ Great Glen House, Leachkin Road
+
+
+ INVERNESS
+
+
+ IV3 8NW
+
+
+ United Kingdom
+
+
+ data_supply@snh.gov.uk
+
+
+
+
+
+
+ custodian
+
+
+
+
+
+
+ Lachlan Renwick
+
+
+ Scottish Natural Heritage
+
+
+ Geographic Systems & Data Coordinator
+
+
+
+
+
+
+ 01463 725000
+
+
+
+
+
+
+ Great Glen House, Leachkin Road
+
+
+ INVERNESS
+
+
+ IV3 8NW
+
+
+ United Kingdom
+
+
+ data_supply@snh.gov.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+ irregular
+
+
+
+
+
+
+ SDE Feature Class
+
+
+
+
+
+
+
+
+
+ Nature conservation
+
+
+
+
+ Government Category List
+
+
+
+
+ 2004-07-15
+
+
+ revision
+
+
+
+
+
+
+
+
+
+
+ copyright
+
+
+ otherRestrictions
+
+
+ copyright
+
+
+ otherRestrictions
+
+
+ Copyright Scottish Natural Heritage
+
+
+
+
+
+
+ Reference and PSMA Only
+
+
+ http://www.test.gov.uk/licenseurl
+
+
+
+
+
+
+
+
+
+ 5
+
+
+
+
+ eng
+
+
+ environment
+
+
+
+
+
+
+
+
+
+
+ ISO 3166
+
+
+
+
+ 2007-09-02
+
+
+ revision
+
+
+
+
+
+
+ GB-SCT
+
+
+
+
+
+
+
+
+ -8.97114288
+
+
+ 0.205857204
+
+
+ 54.529947158
+
+
+ 61.06066944
+
+
+
+
+
+
+
+ 1998
+ 2010
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ESRI Shapefile
+
+
+ Unknown
+
+
+
+
+
+
+ KML
+
+
+ 2.1
+
+
+
+
+
+
+ GML
+
+
+ 3.1.1
+
+
+
+
+
+
+
+
+ Lachlan Renwick
+
+
+ Scottish Natural Heritage
+
+
+ Geographic Systems & Data Coordinator
+
+
+
+
+
+
+ 01463 725000
+
+
+
+
+
+
+ Great Glen House, Leachkin Road
+
+
+ INVERNESS
+
+
+ IV3 8NW
+
+
+ United Kingdom
+
+
+ data_supply@snh.gov.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+ SDE Feature Class
+
+
+
+
+
+
+
+
+
+
+
+ http://www.snh.org.uk/snhi
+
+
+
+
+
+
+
+
+
+
+
+
+ https://gateway.snh.gov.uk/pls/apex_ddtdb2/f?p=101
+
+
+ Test Resource Name
+
+
+ Test Resource Description
+
+
+ test-protocol
+
+
+ download
+
+
+
+
+
+
+
+
+
+
+
+
+ dataset
+
+
+
+
+
+
+ Country Park is not a statutory designation. Countryside (Scotland) Act 1967 Section 48 gives local authorities power to assess and review the need for Country Parks in consultation with SNH.
+
+
+
+
+
+
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/error_bad_xml.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/error_bad_xml.xml
new file mode 100644
index 0000000..54a7dc1
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/error_bad_xml.xml
@@ -0,0 +1,15 @@
+
+
+
+ test-error-bad-xml-1
+
+
+ eng
+
+
+ service
+
+
+ Service
+
+
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/error_validation.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/error_validation.xml
new file mode 100644
index 0000000..5c60965
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/error_validation.xml
@@ -0,0 +1,293 @@
+
+
+
+ test-error-validation-1
+
+
+
+
+
+
+
+
+ Service
+
+
+
+
+
+
+
+
+ Forestry Commission Scotland
+
+
+ Geo-Information Services
+
+
+
+
+
+
+ 0131 334 0303
+
+
+
+
+
+
+
+
+
+ 231 Corstorphine Road
+
+
+ Edinburgh
+
+
+
+
+
+ EH12 7AT
+
+
+ United Kingdom
+
+
+ geoinformationservices@forestry.gsi.gov.uk
+
+
+
+
+
+
+
+
+
+
+
+ 2011-06-15T15:27:21
+
+
+
+
+
+
+ urn:ogc:def:crs:EPSG::27700
+
+
+
+
+
+
+
+
+
+
+ FCS Administrative Boundaries WMS
+
+
+ FCS_Admin_Boundaries
+
+
+
+
+ 2011-05-08
+
+
+
+
+
+
+
+
+
+ 2011-06-15
+
+
+
+
+
+
+
+
+
+ FCSADMINWMS
+
+
+
+
+
+
+ This interactive map service contains the following Forestry Commission Scotland administrative boundaries:
+
+
+
+FC Conservancy boundaries
+
+FC Forest District boundaries
+
+Woodlands In & Around Towns (WIAT)
+
+
+
+
+The layers can be switched on and off independently of each other. Please note that the Conservancy and Forest District boundaries have both been generalised in order to speed screen refresh response time.
+
+
+Forestry Commission Scotland would like to thank Scottish Natural Heritage for their co-operation in hosting this web service on behalf of FCS.
+
+
+PLEASE NOTE:
+In order to upload this Web Map Service into desktop GIS (eg. ESRI's ArcGIS), copy and paste the 'OnLine Resource' URL shown below up to, and including the '?'.
+ie:-
+http://mapgateway.snh.gov.uk/ServicesWMS/FCS_Admin_Boundaries/MapServer/WMSServer?
+
+
+
+
+
+
+
+
+ Forestry Commission Scotland
+
+
+ Geo-Information Services
+
+
+
+
+
+
+ 0131 334 0303
+
+
+
+
+
+
+
+
+
+ 231 Corstorphine Road
+
+
+ Edinburgh
+
+
+
+
+
+ EH12 7AT
+
+
+ United Kingdom
+
+
+ geoinformationservices@forestry.gsi.gov.uk
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Conservancy Boundaries
+
+
+ WIAT
+
+
+ Woodlands In & Around Towns
+
+
+ Forest District Boundaries
+
+
+
+
+
+
+
+
+
+ Copyright (Copyright Forestry Commission Scotland)
+
+
+
+
+
+
+ None
+
+
+
+
+ OGC:WMS
+
+
+
+
+
+
+ -9.22567
+
+
+ 2.69487
+
+
+ 49.833604
+
+
+ 60.86638
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://mapgateway.snh.gov.uk/ServicesWMS/FCS_Admin_Boundaries/MapServer/WMSServer?REQUEST=GetCapabilities&service=wms
+
+
+
+
+
+
+
+
+
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/service1.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/service1.xml
new file mode 100644
index 0000000..6ea0acf
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/service1.xml
@@ -0,0 +1,347 @@
+
+
+
+ test-service-1
+
+
+ eng
+
+
+ service
+
+
+ Service
+
+
+
+
+
+
+
+ The Improvement Service
+
+
+ Gazetteer Business manager
+
+
+
+
+
+
+ 01506 775558
+
+
+ 01506 775566
+
+
+
+
+
+
+ Westerton House
+
+
+ East Mains Industrial Estate
+
+
+ Broxburn
+
+
+
+
+
+ EH52 5AU
+
+
+ Scotland
+
+
+ OSGCM@improvementservice.org.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2011-09-08T16:07:32
+
+
+
+
+
+
+
+ OSGB 1936 / British National Grid (EPSG:27700)
+
+
+ EPSG
+
+
+ 7.4
+
+
+
+
+
+
+
+
+
+
+ One Scotland Address Gazetteer Web Map Service (WMS)
+
+
+
+
+ 2011-09-08
+
+
+ publication
+
+
+
+
+
+
+ This service displays its contents at larger scale than 1:10000. [edited]
+
+
+
+
+
+
+
+ The Improvement Service
+
+
+ Gazetteer Business Manager
+
+
+
+
+
+
+ 01506 775558
+
+
+ 01506 775566
+
+
+
+
+
+
+ Westerton House
+
+
+ East Mains Industrial Estate
+
+
+ Broxburn
+
+
+
+
+
+ EH52 5AU
+
+
+ Scotland
+
+
+ OSGCM@improvementservice.org.uk
+
+
+
+
+
+
+ owner
+
+
+
+
+
+
+ daily
+
+
+
+
+
+
+ Scottish National Gazetteer
+
+
+
+
+
+
+
+
+
+ Addresses
+
+
+
+
+
+
+
+ external.theme.inspire-theme
+
+
+
+
+
+
+
+
+
+ otherRestrictions
+
+
+ No restriction on public access
+
+
+
+
+
+
+ Use of the One Scotland Gazetteer data used by this this service is available to any organisation that is a member of the One Scotland Mapping Agreement. It is not currently commercially available
+
+
+ http://www.test.gov.uk/licenseurl
+
+
+
+
+ other
+
+
+
+
+
+
+ -9.099786875
+
+
+ 0.5242365625
+
+
+ 54.4764484375
+
+
+ 61.0243
+
+
+
+
+
+
+
+ 1904-06-16
+ 2004-06-16
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ image/png
+
+
+ NotApplicable
+
+
+
+
+
+
+
+
+ http://127.0.0.1:8999/wms/capabilities.xml
+
+
+ Web Map Service (WMS)
+
+
+ Link to the GetCapabilities request for this service
+
+
+ OGC:WMS-1.3.0-http-get-capabilities
+
+
+ download
+
+
+
+
+
+
+
+
+
+
+
+
+ service
+
+
+
+
+ Geographic web service
+
+
+
+
+
+
+
+
+
+
+
+
+ Technical Guidance for the implementation of INSPIRE View Services Version 3.0
+
+
+
+
+ 2011-03-21
+
+
+ publication
+
+
+
+
+
+
+ This is an INSPIRE Addresses theme focussed service that is not yet conformant to the address data specification but intends to be conformant to the service technical guidance at at: http://inspire.jrc.ec.europa.eu/documents/Network_Services/TechnicalGuidance_ViewServices_v3.0.pdf
+
+
+ false
+
+
+
+
+
+
+
+
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/service1_newer.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/service1_newer.xml
new file mode 100644
index 0000000..d2c2da7
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/service1_newer.xml
@@ -0,0 +1,347 @@
+
+
+
+ test-service-1
+
+
+ eng
+
+
+ service
+
+
+ Service
+
+
+
+
+
+
+
+ The Improvement Service
+
+
+ Gazetteer Business manager
+
+
+
+
+
+
+ 01506 775558
+
+
+ 01506 775566
+
+
+
+
+
+
+ Westerton House
+
+
+ East Mains Industrial Estate
+
+
+ Broxburn
+
+
+
+
+
+ EH52 5AU
+
+
+ Scotland
+
+
+ OSGCM@improvementservice.org.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2011-09-10T00:00:01
+
+
+
+
+
+
+
+ OSGB 1936 / British National Grid (EPSG:27700)
+
+
+ EPSG
+
+
+ 7.4
+
+
+
+
+
+
+
+
+
+
+ One Scotland Address Gazetteer Web Map Service (WMS) NEWER
+
+
+
+
+ 2011-09-08
+
+
+ publication
+
+
+
+
+
+
+ This service displays its contents at larger scale than 1:10000. [edited]
+
+
+
+
+
+
+
+ The Improvement Service
+
+
+ Gazetteer Business Manager
+
+
+
+
+
+
+ 01506 775558
+
+
+ 01506 775566
+
+
+
+
+
+
+ Westerton House
+
+
+ East Mains Industrial Estate
+
+
+ Broxburn
+
+
+
+
+
+ EH52 5AU
+
+
+ Scotland
+
+
+ OSGCM@improvementservice.org.uk
+
+
+
+
+
+
+ owner
+
+
+
+
+
+
+ daily
+
+
+
+
+
+
+ Scottish National Gazetteer
+
+
+
+
+
+
+
+
+
+ Addresses
+
+
+
+
+
+
+
+ external.theme.inspire-theme
+
+
+
+
+
+
+
+
+
+ otherRestrictions
+
+
+ No restriction on public access
+
+
+
+
+
+
+ Use of the One Scotland Gazetteer data used by this this service is available to any organisation that is a member of the One Scotland Mapping Agreement. It is not currently commercially available
+
+
+ http://www.test.gov.uk/licenseurl
+
+
+
+
+ other
+
+
+
+
+
+
+ -9.099786875
+
+
+ 0.5242365625
+
+
+ 54.4764484375
+
+
+ 61.0243
+
+
+
+
+
+
+
+ 1904-06-16
+ 2004-06-16
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ image/png
+
+
+ NotApplicable
+
+
+
+
+
+
+
+
+ http://127.0.0.1:8999/wms/capabilities.xml
+
+
+ Web Map Service (WMS)
+
+
+ Link to the GetCapabilities request for this service
+
+
+ OGC:WMS-1.3.0-http-get-capabilities
+
+
+ download
+
+
+
+
+
+
+
+
+
+
+
+
+ service
+
+
+
+
+ Geographic web service
+
+
+
+
+
+
+
+
+
+
+
+
+ Technical Guidance for the implementation of INSPIRE View Services Version 3.0
+
+
+
+
+ 2011-03-21
+
+
+ publication
+
+
+
+
+
+
+ This is an INSPIRE Addresses theme focussed service that is not yet conformant to the address data specification but intends to be conformant to the service technical guidance at at: http://inspire.jrc.ec.europa.eu/documents/Network_Services/TechnicalGuidance_ViewServices_v3.0.pdf
+
+
+ false
+
+
+
+
+
+
+
+
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/source1/same_dataset.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/source1/same_dataset.xml
new file mode 100644
index 0000000..bd2a21c
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/source1/same_dataset.xml
@@ -0,0 +1,347 @@
+
+
+
+ test-same-dataset-1
+
+
+ eng
+
+
+ service
+
+
+ Service
+
+
+
+
+
+
+
+ The Improvement Service
+
+
+ Gazetteer Business manager
+
+
+
+
+
+
+ 01506 775558
+
+
+ 01506 775566
+
+
+
+
+
+
+ Westerton House
+
+
+ East Mains Industrial Estate
+
+
+ Broxburn
+
+
+
+
+
+ EH52 5AU
+
+
+ Scotland
+
+
+ OSGCM@improvementservice.org.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2011-09-08T16:07:32
+
+
+
+
+
+
+
+ OSGB 1936 / British National Grid (EPSG:27700)
+
+
+ EPSG
+
+
+ 7.4
+
+
+
+
+
+
+
+
+
+
+ One Scotland Address Gazetteer Web Map Service (WMS)
+
+
+
+
+ 2011-09-08
+
+
+ publication
+
+
+
+
+
+
+ This service displays its contents at larger scale than 1:10000. [edited]
+
+
+
+
+
+
+
+ The Improvement Service
+
+
+ Gazetteer Business Manager
+
+
+
+
+
+
+ 01506 775558
+
+
+ 01506 775566
+
+
+
+
+
+
+ Westerton House
+
+
+ East Mains Industrial Estate
+
+
+ Broxburn
+
+
+
+
+
+ EH52 5AU
+
+
+ Scotland
+
+
+ OSGCM@improvementservice.org.uk
+
+
+
+
+
+
+ resourceProvider
+
+
+
+
+
+
+ daily
+
+
+
+
+
+
+ Scottish National Gazetteer
+
+
+
+
+
+
+
+
+
+ Addresses
+
+
+
+
+
+
+
+ external.theme.inspire-theme
+
+
+
+
+
+
+
+
+
+ otherRestrictions
+
+
+ No restriction on public access
+
+
+
+
+
+
+ Use of the One Scotland Gazetteer data used by this this service is available to any organisation that is a member of the One Scotland Mapping Agreement. It is not currently commercially available
+
+
+ http://www.test.gov.uk/licenseurl
+
+
+
+
+ other
+
+
+
+
+
+
+ -9.099786875
+
+
+ 0.5242365625
+
+
+ 54.4764484375
+
+
+ 61.0243
+
+
+
+
+
+
+
+ 1904-06-16
+ 2004-06-16
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ image/png
+
+
+ NotApplicable
+
+
+
+
+
+
+
+
+ http://sedsh13.sedsh.gov.uk/ArcGIS/services/OSG/OSG/MapServer/WMSServer?request=GetCapabilities&service=WMS
+
+
+ Web Map Service (WMS)
+
+
+ Link to the GetCapabilities request for this service
+
+
+ OGC:WMS-1.3.0-http-get-capabilities
+
+
+ download
+
+
+
+
+
+
+
+
+
+
+
+
+ service
+
+
+
+
+ Geographic web service
+
+
+
+
+
+
+
+
+
+
+
+
+ Technical Guidance for the implementation of INSPIRE View Services Version 3.0
+
+
+
+
+ 2011-03-21
+
+
+ publication
+
+
+
+
+
+
+ This is an INSPIRE Addresses theme focussed service that is not yet conformant to the address data specification but intends to be conformant to the service technical guidance at at: http://inspire.jrc.ec.europa.eu/documents/Network_Services/TechnicalGuidance_ViewServices_v3.0.pdf
+
+
+ false
+
+
+
+
+
+
+
+
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/source2/same_dataset.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/source2/same_dataset.xml
new file mode 100644
index 0000000..bd2a21c
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/source2/same_dataset.xml
@@ -0,0 +1,347 @@
+
+
+
+ test-same-dataset-1
+
+
+ eng
+
+
+ service
+
+
+ Service
+
+
+
+
+
+
+
+ The Improvement Service
+
+
+ Gazetteer Business manager
+
+
+
+
+
+
+ 01506 775558
+
+
+ 01506 775566
+
+
+
+
+
+
+ Westerton House
+
+
+ East Mains Industrial Estate
+
+
+ Broxburn
+
+
+
+
+
+ EH52 5AU
+
+
+ Scotland
+
+
+ OSGCM@improvementservice.org.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2011-09-08T16:07:32
+
+
+
+
+
+
+
+ OSGB 1936 / British National Grid (EPSG:27700)
+
+
+ EPSG
+
+
+ 7.4
+
+
+
+
+
+
+
+
+
+
+ One Scotland Address Gazetteer Web Map Service (WMS)
+
+
+
+
+ 2011-09-08
+
+
+ publication
+
+
+
+
+
+
+ This service displays its contents at larger scale than 1:10000. [edited]
+
+
+
+
+
+
+
+ The Improvement Service
+
+
+ Gazetteer Business Manager
+
+
+
+
+
+
+ 01506 775558
+
+
+ 01506 775566
+
+
+
+
+
+
+ Westerton House
+
+
+ East Mains Industrial Estate
+
+
+ Broxburn
+
+
+
+
+
+ EH52 5AU
+
+
+ Scotland
+
+
+ OSGCM@improvementservice.org.uk
+
+
+
+
+
+
+ resourceProvider
+
+
+
+
+
+
+ daily
+
+
+
+
+
+
+ Scottish National Gazetteer
+
+
+
+
+
+
+
+
+
+ Addresses
+
+
+
+
+
+
+
+ external.theme.inspire-theme
+
+
+
+
+
+
+
+
+
+ otherRestrictions
+
+
+ No restriction on public access
+
+
+
+
+
+
+ Use of the One Scotland Gazetteer data used by this this service is available to any organisation that is a member of the One Scotland Mapping Agreement. It is not currently commercially available
+
+
+ http://www.test.gov.uk/licenseurl
+
+
+
+
+ other
+
+
+
+
+
+
+ -9.099786875
+
+
+ 0.5242365625
+
+
+ 54.4764484375
+
+
+ 61.0243
+
+
+
+
+
+
+
+ 1904-06-16
+ 2004-06-16
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ image/png
+
+
+ NotApplicable
+
+
+
+
+
+
+
+
+ http://sedsh13.sedsh.gov.uk/ArcGIS/services/OSG/OSG/MapServer/WMSServer?request=GetCapabilities&service=WMS
+
+
+ Web Map Service (WMS)
+
+
+ Link to the GetCapabilities request for this service
+
+
+ OGC:WMS-1.3.0-http-get-capabilities
+
+
+ download
+
+
+
+
+
+
+
+
+
+
+
+
+ service
+
+
+
+
+ Geographic web service
+
+
+
+
+
+
+
+
+
+
+
+
+ Technical Guidance for the implementation of INSPIRE View Services Version 3.0
+
+
+
+
+ 2011-03-21
+
+
+ publication
+
+
+
+
+
+
+ This is an INSPIRE Addresses theme focussed service that is not yet conformant to the address data specification but intends to be conformant to the service technical guidance at at: http://inspire.jrc.ec.europa.eu/documents/Network_Services/TechnicalGuidance_ViewServices_v3.0.pdf
+
+
+ false
+
+
+
+
+
+
+
+
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element.xml
new file mode 100644
index 0000000..4b114e6
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element.xml
@@ -0,0 +1,636 @@
+
+
+
+ test-record-01
+
+
+
+
+
+ test-record-08
+
+
+
+
+
+ dataset
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2012-10-05
+
+
+ INSPIRE Implementing Rules for Metadata
+
+
+ 1.2
+
+
+
+
+
+
+ 27700
+
+
+ urn:ogc:def:crs:EPSG
+
+
+ 6.11.2
+
+
+
+
+
+
+
+
+
+
+ Test Record 01 Dataset No Such Element Name
+
+
+ Also known as xxx
+
+
+
+
+ 2011-04-08
+
+
+
+
+
+
+
+
+
+ 1300392329603
+
+
+ CEH:EIDC:
+
+
+ 1
+
+
+
+
+ R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+ This test record should fail XSD validation, as it has an element present which is not allowed by the schema.
+
+
+
+
+
+
+
+ Morton, D
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ danm@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+
+
+ Parr Section
+
+
+
+
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ resourceProvider
+
+
+
+
+
+
+
+
+
+ Not planned
+
+
+
+
+
+
+ https://gateway.ceh.ac.uk:443/smartEditor/preview/82a0f4a1-01ff-4ed1-853e-224d8404b3fd.png
+
+
+ thumbnail preview
+
+
+
+
+
+
+ Habitats and biotopes
+
+
+
+
+
+
+
+ GEMET - INSPIRE themes, version 1.0
+
+
+
+
+ 2012-10-04
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Great Britain
+
+
+ England
+
+
+ Scotland Wales
+
+
+
+
+
+
+
+
+
+ CEH Biodiversity Programme
+
+
+
+
+
+
+
+
+
+ CEH Project NEC03259
+
+
+ NERC_DDC
+
+
+ LCM2007
+
+
+
+
+
+
+
+
+
+ Test Link
+
+
+
+
+
+
+ Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+
+
+
+
+ Licence terms and conditions apply
+
+
+ Test Link
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+ environment
+
+
+ imageryBaseMapsEarthCover
+
+
+
+
+
+
+
+
+ ENG
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ WLS
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ SCT
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 2000-10-01
+ 2012-10-01
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ -9.227701
+
+
+ 2.687637
+
+
+ 49.83726
+
+
+ 60.850441
+
+
+
+
+
+
+ Some text
+
+
+
+
+
+
+
+
+ GeoTIFF
+
+
+ 1.0
+
+
+
+
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Maclean Building, Benson Lane, Crowmarsh Gifford
+
+
+ Wallingford
+
+
+ Oxfordshire
+
+
+ OX10 8BB
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+
+
+
+
+ http://www.ceh.ac.uk/LandCoverMap2007.html
+
+
+ Essential technical details
+
+
+ Link to further technical details about this data
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://www.countrysidesurvey.org.uk/
+
+
+ Countryside Survey website
+
+
+ Countryside Survey website
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://gateway.ceh.ac.uk/download?fileIdentifier=82a0f4a1-01ff-4ed1-853e-224d8404b3fd
+
+
+ Dataset download
+
+
+ Link to download this dataset
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://lasigprod.nerc-lancaster.ac.uk/arcgis/services/LandCoverMap/LCM2007_GB_1k_DOM_AGG/MapServer/WMSServer?request=GetCapabilities&service=WMS
+
+
+ Web Map Service
+
+
+ A web map service (WMS) is available for this data
+
+
+
+
+
+
+
+
+
+
+ http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
+
+
+ CS Technical Report
+
+
+ Final Report for LCM2007 - the new UK Land Cover Map
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
+
+
+
+
+
+
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element_unix.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element_unix.xml
new file mode 100644
index 0000000..cd0d5f2
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element_unix.xml
@@ -0,0 +1,636 @@
+
+
+
+ test-record-01
+
+
+
+
+
+ test-record-08
+
+
+
+
+
+ dataset
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2012-10-05
+
+
+ INSPIRE Implementing Rules for Metadata
+
+
+ 1.2
+
+
+
+
+
+
+ 27700
+
+
+ urn:ogc:def:crs:EPSG
+
+
+ 6.11.2
+
+
+
+
+
+
+
+
+
+
+ Test Record 01 Dataset No Such Element Name
+
+
+ Also known as xxx
+
+
+
+
+ 2011-04-08
+
+
+
+
+
+
+
+
+
+ 1300392329603
+
+
+ CEH:EIDC:
+
+
+ 1
+
+
+
+
+ R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+ This test record should fail XSD validation, as it has an element present which is not allowed by the schema.
+
+
+
+
+
+
+
+ Morton, D
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ danm@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+
+
+ Parr Section
+
+
+
+
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ resourceProvider
+
+
+
+
+
+
+
+
+
+ Not planned
+
+
+
+
+
+
+ https://gateway.ceh.ac.uk:443/smartEditor/preview/82a0f4a1-01ff-4ed1-853e-224d8404b3fd.png
+
+
+ thumbnail preview
+
+
+
+
+
+
+ Habitats and biotopes
+
+
+
+
+
+
+
+ GEMET - INSPIRE themes, version 1.0
+
+
+
+
+ 2012-10-04
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Great Britain
+
+
+ England
+
+
+ Scotland Wales
+
+
+
+
+
+
+
+
+
+ CEH Biodiversity Programme
+
+
+
+
+
+
+
+
+
+ CEH Project NEC03259
+
+
+ NERC_DDC
+
+
+ LCM2007
+
+
+
+
+
+
+
+
+
+ Test Link
+
+
+
+
+
+
+ Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+
+
+
+
+ Licence terms and conditions apply
+
+
+ Test Link
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+ environment
+
+
+ imageryBaseMapsEarthCover
+
+
+
+
+
+
+
+
+ ENG
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ WLS
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ SCT
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 2000-10-01
+ 2012-10-01
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ -9.227701
+
+
+ 2.687637
+
+
+ 49.83726
+
+
+ 60.850441
+
+
+
+
+
+
+ Some text
+
+
+
+
+
+
+
+
+ GeoTIFF
+
+
+ 1.0
+
+
+
+
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Maclean Building, Benson Lane, Crowmarsh Gifford
+
+
+ Wallingford
+
+
+ Oxfordshire
+
+
+ OX10 8BB
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+
+
+
+
+ http://www.ceh.ac.uk/LandCoverMap2007.html
+
+
+ Essential technical details
+
+
+ Link to further technical details about this data
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://www.countrysidesurvey.org.uk/
+
+
+ Countryside Survey website
+
+
+ Countryside Survey website
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://gateway.ceh.ac.uk/download?fileIdentifier=82a0f4a1-01ff-4ed1-853e-224d8404b3fd
+
+
+ Dataset download
+
+
+ Link to download this dataset
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://lasigprod.nerc-lancaster.ac.uk/arcgis/services/LandCoverMap/LCM2007_GB_1k_DOM_AGG/MapServer/WMSServer?request=GetCapabilities&service=WMS
+
+
+ Web Map Service
+
+
+ A web map service (WMS) is available for this data
+
+
+
+
+
+
+
+
+
+
+ http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
+
+
+ CS Technical Report
+
+
+ Final Report for LCM2007 - the new UK Land Cover Map
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
+
+
+
+
+
+
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/02_Dataset_Invalid_19139_Missing_Data_Format.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/02_Dataset_Invalid_19139_Missing_Data_Format.xml
new file mode 100644
index 0000000..6c41b94
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/02_Dataset_Invalid_19139_Missing_Data_Format.xml
@@ -0,0 +1,626 @@
+
+
+
+test-record-02
+
+
+
+
+
+ test-record-08
+
+
+
+
+
+ dataset
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2012-10-05
+
+
+ INSPIRE Implementing Rules for Metadata
+
+
+ 1.2
+
+
+
+
+
+
+ 27700
+
+
+ urn:ogc:def:crs:EPSG
+
+
+ 6.11.2
+
+
+
+
+
+
+
+
+
+
+ Test Record 02 Dataset Missing Data Format
+
+
+ Also known as xxx
+
+
+
+
+ 2011-04-08
+
+
+
+
+
+
+
+
+
+ 1300392329603
+
+
+ CEH:EIDC:
+
+
+ 1
+
+
+
+
+ R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+ This test record should pass XSD validation, but fail 19139 schematron, because it has no Distribution Format element.
+
+
+
+
+
+
+
+ Morton, D
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ danm@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+
+
+ Parr Section
+
+
+
+
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ resourceProvider
+
+
+
+
+
+
+
+
+
+ Not planned
+
+
+
+
+
+
+ https://gateway.ceh.ac.uk:443/smartEditor/preview/82a0f4a1-01ff-4ed1-853e-224d8404b3fd.png
+
+
+ thumbnail preview
+
+
+
+
+
+
+ Habitats and biotopes
+
+
+
+
+
+
+
+ GEMET - INSPIRE themes, version 1.0
+
+
+
+
+ 2012-10-04
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Great Britain
+
+
+ England
+
+
+ Scotland Wales
+
+
+
+
+
+
+
+
+
+ CEH Biodiversity Programme
+
+
+
+
+
+
+
+
+
+ CEH Project NEC03259
+
+
+ NERC_DDC
+
+
+ LCM2007
+
+
+
+
+
+
+
+
+
+ Test Link
+
+
+
+
+
+
+ Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+
+
+
+
+ Licence terms and conditions apply
+
+
+ Test Link
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+ environment
+
+
+ imageryBaseMapsEarthCover
+
+
+
+
+
+
+
+
+ ENG
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ WLS
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ SCT
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 2000-10-01
+ 2012-10-01
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ -9.227701
+
+
+ 2.687637
+
+
+ 49.83726
+
+
+ 60.850441
+
+
+
+
+
+
+ Some text
+
+
+
+
+
+
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Maclean Building, Benson Lane, Crowmarsh Gifford
+
+
+ Wallingford
+
+
+ Oxfordshire
+
+
+ OX10 8BB
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+
+
+
+
+ http://www.ceh.ac.uk/LandCoverMap2007.html
+
+
+ Essential technical details
+
+
+ Link to further technical details about this data
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://www.countrysidesurvey.org.uk/
+
+
+ Countryside Survey website
+
+
+ Countryside Survey website
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://gateway.ceh.ac.uk/download?fileIdentifier=82a0f4a1-01ff-4ed1-853e-224d8404b3fd
+
+
+ Dataset download
+
+
+ Link to download this dataset
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://lasigprod.nerc-lancaster.ac.uk/arcgis/services/LandCoverMap/LCM2007_GB_1k_DOM_AGG/MapServer/WMSServer?request=GetCapabilities&service=WMS
+
+
+ Web Map Service
+
+
+ A web map service (WMS) is available for this data
+
+
+
+
+
+
+
+
+
+
+ http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
+
+
+ CS Technical Report
+
+
+ Final Report for LCM2007 - the new UK Land Cover Map
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
+
+
+
+
+
+
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/03_Dataset_Invalid_GEMINI_Missing_Keyword.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/03_Dataset_Invalid_GEMINI_Missing_Keyword.xml
new file mode 100644
index 0000000..197de31
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/03_Dataset_Invalid_GEMINI_Missing_Keyword.xml
@@ -0,0 +1,551 @@
+
+
+
+test-record-03
+
+
+
+
+
+ test-record-08
+
+
+
+
+
+ dataset
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2012-10-05
+
+
+ INSPIRE Implementing Rules for Metadata
+
+
+ 1.2
+
+
+
+
+
+
+ 27700
+
+
+ urn:ogc:def:crs:EPSG
+
+
+ 6.11.2
+
+
+
+
+
+
+
+
+
+
+ Test Record 03 Dataset Missing Keyword
+
+
+ Also known as xxx
+
+
+
+
+ 2011-04-08
+
+
+
+
+
+
+
+
+
+ 1300392329603
+
+
+ CEH:EIDC:
+
+
+ 1
+
+
+
+
+ R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+ This test record should pass XSD validation and pass 19139 schematron, but fail GEMINI schematron because it has no Keyword element.
+
+
+
+
+
+
+
+ Morton, D
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ danm@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+
+
+ Parr Section
+
+
+
+
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ resourceProvider
+
+
+
+
+
+
+
+
+
+ Not planned
+
+
+
+
+
+
+ https://gateway.ceh.ac.uk:443/smartEditor/preview/82a0f4a1-01ff-4ed1-853e-224d8404b3fd.png
+
+
+ thumbnail preview
+
+
+
+
+
+
+ Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+
+
+
+
+ Licence terms and conditions apply
+
+
+ Test Link
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+ environment
+
+
+ imageryBaseMapsEarthCover
+
+
+
+
+
+
+
+
+ ENG
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ WLS
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ SCT
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 2000-10-01
+ 2012-10-01
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ -9.227701
+
+
+ 2.687637
+
+
+ 49.83726
+
+
+ 60.850441
+
+
+
+
+
+
+ Some text
+
+
+
+
+
+
+
+
+ GeoTIFF
+
+
+ 1.0
+
+
+
+
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Maclean Building, Benson Lane, Crowmarsh Gifford
+
+
+ Wallingford
+
+
+ Oxfordshire
+
+
+ OX10 8BB
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+
+
+
+
+ http://www.ceh.ac.uk/LandCoverMap2007.html
+
+
+ Essential technical details
+
+
+ Link to further technical details about this data
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://www.countrysidesurvey.org.uk/
+
+
+ Countryside Survey website
+
+
+ Countryside Survey website
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://gateway.ceh.ac.uk/download?fileIdentifier=82a0f4a1-01ff-4ed1-853e-224d8404b3fd
+
+
+ Dataset download
+
+
+ Link to download this dataset
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://lasigprod.nerc-lancaster.ac.uk/arcgis/services/LandCoverMap/LCM2007_GB_1k_DOM_AGG/MapServer/WMSServer?request=GetCapabilities&service=WMS
+
+
+ Web Map Service
+
+
+ A web map service (WMS) is available for this data
+
+
+
+
+
+
+
+
+
+
+ http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
+
+
+ CS Technical Report
+
+
+ Final Report for LCM2007 - the new UK Land Cover Map
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
+
+
+
+
+
+
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/04_Dataset_Valid.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/04_Dataset_Valid.xml
new file mode 100644
index 0000000..8fbb4b2
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/04_Dataset_Valid.xml
@@ -0,0 +1,637 @@
+
+
+
+test-record-04
+
+
+
+
+
+ test-record-08
+
+
+
+
+
+ dataset
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2012-10-05
+
+
+ INSPIRE Implementing Rules for Metadata
+
+
+ 1.2
+
+
+
+
+
+
+ 27700
+
+
+ urn:ogc:def:crs:EPSG
+
+
+ 6.11.2
+
+
+
+
+
+
+
+
+
+
+ Test Record 04 Dataset Valid
+
+
+ Also known as xxx
+
+
+
+
+ 2011-04-08
+
+
+
+
+
+
+
+
+
+ 1300392329603
+
+
+ CEH:EIDC:
+
+
+ 1
+
+
+
+
+ R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+ This test record should pass all validation.
+
+
+
+
+
+
+
+ Morton, D
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ danm@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+
+
+ Parr Section
+
+
+
+
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ resourceProvider
+
+
+
+
+
+
+
+
+
+ Not planned
+
+
+
+
+
+
+ https://gateway.ceh.ac.uk:443/smartEditor/preview/82a0f4a1-01ff-4ed1-853e-224d8404b3fd.png
+
+
+ thumbnail preview
+
+
+
+
+
+
+ Habitats and biotopes
+
+
+
+
+
+
+
+ GEMET - INSPIRE themes, version 1.0
+
+
+
+
+ 2012-10-04
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Great Britain
+
+
+ England
+
+
+ Scotland Wales
+
+
+
+
+
+
+
+
+
+ CEH Biodiversity Programme
+
+
+
+
+
+
+
+
+
+ CEH Project NEC03259
+
+
+ NERC_DDC
+
+
+ LCM2007
+
+
+
+
+
+
+
+
+
+ Test Link
+
+
+
+
+
+
+
+ Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+
+
+
+
+ Licence terms and conditions apply
+
+
+ Test Link
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+ environment
+
+
+ imageryBaseMapsEarthCover
+
+
+
+
+
+
+
+
+ ENG
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ WLS
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ SCT
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 2000-10-01
+ 2012-10-01
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ -9.227701
+
+
+ 2.687637
+
+
+ 49.83726
+
+
+ 60.850441
+
+
+
+
+
+
+ Some text
+
+
+
+
+
+
+
+
+ GeoTIFF
+
+
+ 1.0
+
+
+
+
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Maclean Building, Benson Lane, Crowmarsh Gifford
+
+
+ Wallingford
+
+
+ Oxfordshire
+
+
+ OX10 8BB
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+
+
+
+
+ http://www.ceh.ac.uk/LandCoverMap2007.html
+
+
+ Essential technical details
+
+
+ Link to further technical details about this data
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://www.countrysidesurvey.org.uk/
+
+
+ Countryside Survey website
+
+
+ Countryside Survey website
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://gateway.ceh.ac.uk/download?fileIdentifier=82a0f4a1-01ff-4ed1-853e-224d8404b3fd
+
+
+ Dataset download
+
+
+ Link to download this dataset
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://lasigprod.nerc-lancaster.ac.uk/arcgis/services/LandCoverMap/LCM2007_GB_1k_DOM_AGG/MapServer/WMSServer?request=GetCapabilities&service=WMS
+
+
+ Web Map Service
+
+
+ A web map service (WMS) is available for this data
+
+
+
+
+
+
+
+
+
+
+ http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
+
+
+ CS Technical Report
+
+
+ Final Report for LCM2007 - the new UK Land Cover Map
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
+
+
+
+
+
+
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/05_Series_Invalid_XSD_No_Such_Element.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/05_Series_Invalid_XSD_No_Such_Element.xml
new file mode 100644
index 0000000..14e53ed
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/05_Series_Invalid_XSD_No_Such_Element.xml
@@ -0,0 +1,594 @@
+
+
+
+ test-record-05
+
+
+
+
+
+
+
+
+ series
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2012-10-05
+
+
+ INSPIRE Implementing Rules for Metadata
+
+
+ 1.2
+
+
+
+
+
+
+ 27700
+
+
+ urn:ogc:def:crs:EPSG
+
+
+ 6.11.2
+
+
+
+
+
+
+
+
+
+
+ Test Record 05 Series No Such Element Name
+
+
+ Also known as xxx
+
+
+
+
+ 2011-04-08
+
+
+
+
+
+
+
+
+
+ 1400392329603
+
+
+ CEH:EIDC:
+
+
+ 1
+
+
+
+
+ R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+ This test record should fail XSD validation, as it has an element present which is not allowed by the schema.
+
+
+
+
+
+
+
+ Morton, D
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ danm@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+
+
+ Parr Section
+
+
+
+
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ resourceProvider
+
+
+
+
+
+
+
+
+
+ Not planned
+
+
+
+
+
+
+ https://gateway.ceh.ac.uk:443/smartEditor/preview/82a0f4a1-01ff-4ed1-853e-224d8404b3fd.png
+
+
+ thumbnail preview
+
+
+
+
+
+
+ Habitats and biotopes
+
+
+
+
+
+
+
+ GEMET - INSPIRE themes, version 1.0
+
+
+
+
+ 2012-10-04
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Great Britain
+
+
+ England
+
+
+ Scotland Wales
+
+
+
+
+
+
+
+
+
+ CEH Biodiversity Programme
+
+
+
+
+
+
+
+
+
+ CEH Project NEC03259
+
+
+ NERC_DDC
+
+
+ LCM2007
+
+
+
+
+
+
+
+
+
+ Test Link
+
+
+
+
+
+
+ Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+
+
+
+
+ Licence terms and conditions apply
+
+
+ Test Link
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+ environment
+
+
+ imageryBaseMapsEarthCover
+
+
+
+
+
+
+
+
+ ENG
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ WLS
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ SCT
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 2000-10-01
+ 2012-10-01
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ -9.227701
+
+
+ 2.687637
+
+
+ 49.83726
+
+
+ 60.850441
+
+
+
+
+
+
+ Some text
+
+
+
+
+
+
+
+
+ GeoTIFF
+
+
+ 1.0
+
+
+
+
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Maclean Building, Benson Lane, Crowmarsh Gifford
+
+
+ Wallingford
+
+
+ Oxfordshire
+
+
+ OX10 8BB
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+
+
+
+
+ http://www.ceh.ac.uk/LandCoverMap2007.html
+
+
+ Essential technical details
+
+
+ Link to further technical details about this data
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://www.countrysidesurvey.org.uk/
+
+
+ Countryside Survey website
+
+
+ Countryside Survey website
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
+
+
+ CS Technical Report
+
+
+ Final Report for LCM2007 - the new UK Land Cover Map
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
+
+
+
+
+
+
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/06_Series_Invalid_19139_Missing_Data_Format.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/06_Series_Invalid_19139_Missing_Data_Format.xml
new file mode 100644
index 0000000..7bf3ce3
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/06_Series_Invalid_19139_Missing_Data_Format.xml
@@ -0,0 +1,584 @@
+
+
+
+test-record-06
+
+
+
+
+
+
+
+
+ series
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2012-10-05
+
+
+ INSPIRE Implementing Rules for Metadata
+
+
+ 1.2
+
+
+
+
+
+
+ 27700
+
+
+ urn:ogc:def:crs:EPSG
+
+
+ 6.11.2
+
+
+
+
+
+
+
+
+
+
+ Test Record 06 Series Missing Data Format
+
+
+ Also known as xxx
+
+
+
+
+ 2011-04-08
+
+
+
+
+
+
+
+
+
+ 1400392329603
+
+
+ CEH:EIDC:
+
+
+ 1
+
+
+
+
+ R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+ This test record should pass XSD validation, but fail 19139 schematron, because it has no Distribution Format element.
+
+
+
+
+
+
+
+ Morton, D
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ danm@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+
+
+ Parr Section
+
+
+
+
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ resourceProvider
+
+
+
+
+
+
+
+
+
+ Not planned
+
+
+
+
+
+
+ https://gateway.ceh.ac.uk:443/smartEditor/preview/82a0f4a1-01ff-4ed1-853e-224d8404b3fd.png
+
+
+ thumbnail preview
+
+
+
+
+
+
+ Habitats and biotopes
+
+
+
+
+
+
+
+ GEMET - INSPIRE themes, version 1.0
+
+
+
+
+ 2012-10-04
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Great Britain
+
+
+ England
+
+
+ Scotland Wales
+
+
+
+
+
+
+
+
+
+ CEH Biodiversity Programme
+
+
+
+
+
+
+
+
+
+ CEH Project NEC03259
+
+
+ NERC_DDC
+
+
+ LCM2007
+
+
+
+
+
+
+
+
+
+ Test Link
+
+
+
+
+
+
+ Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+
+
+
+
+ Licence terms and conditions apply
+
+
+ Test Link
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+ environment
+
+
+ imageryBaseMapsEarthCover
+
+
+
+
+
+
+
+
+ ENG
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ WLS
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ SCT
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 2000-10-01
+ 2012-10-01
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ -9.227701
+
+
+ 2.687637
+
+
+ 49.83726
+
+
+ 60.850441
+
+
+
+
+
+
+ Some text
+
+
+
+
+
+
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Maclean Building, Benson Lane, Crowmarsh Gifford
+
+
+ Wallingford
+
+
+ Oxfordshire
+
+
+ OX10 8BB
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+
+
+
+
+ http://www.ceh.ac.uk/LandCoverMap2007.html
+
+
+ Essential technical details
+
+
+ Link to further technical details about this data
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://www.countrysidesurvey.org.uk/
+
+
+ Countryside Survey website
+
+
+ Countryside Survey website
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
+
+
+ CS Technical Report
+
+
+ Final Report for LCM2007 - the new UK Land Cover Map
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
+
+
+
+
+
+
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/07_Series_Invalid_GEMINI_Missing_Keyword.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/07_Series_Invalid_GEMINI_Missing_Keyword.xml
new file mode 100644
index 0000000..34aaeae
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/07_Series_Invalid_GEMINI_Missing_Keyword.xml
@@ -0,0 +1,509 @@
+
+
+
+test-record-07
+
+
+
+
+
+
+
+
+ series
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2012-10-05
+
+
+ INSPIRE Implementing Rules for Metadata
+
+
+ 1.2
+
+
+
+
+
+
+ 27700
+
+
+ urn:ogc:def:crs:EPSG
+
+
+ 6.11.2
+
+
+
+
+
+
+
+
+
+
+ Test Record 07 Series Missing Keyword
+
+
+ Also known as xxx
+
+
+
+
+ 2011-04-08
+
+
+
+
+
+
+
+
+
+ 1300392329603
+
+
+ CEH:EIDC:
+
+
+ 1
+
+
+
+
+ R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+ This test record should pass XSD validation and pass 19139 schematron, but fail GEMINI schematron because it has no Keyword element.
+
+
+
+
+
+
+
+ Morton, D
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ danm@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+
+
+ Parr Section
+
+
+
+
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ resourceProvider
+
+
+
+
+
+
+
+
+
+ Not planned
+
+
+
+
+
+
+ https://gateway.ceh.ac.uk:443/smartEditor/preview/82a0f4a1-01ff-4ed1-853e-224d8404b3fd.png
+
+
+ thumbnail preview
+
+
+
+
+
+
+ Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+
+
+
+
+ Licence terms and conditions apply
+
+
+ Test Link
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+ environment
+
+
+ imageryBaseMapsEarthCover
+
+
+
+
+
+
+
+
+ ENG
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ WLS
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ SCT
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 2000-10-01
+ 2012-10-01
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ -9.227701
+
+
+ 2.687637
+
+
+ 49.83726
+
+
+ 60.850441
+
+
+
+
+
+
+ Some text
+
+
+
+
+
+
+
+
+ GeoTIFF
+
+
+ 1.0
+
+
+
+
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Maclean Building, Benson Lane, Crowmarsh Gifford
+
+
+ Wallingford
+
+
+ Oxfordshire
+
+
+ OX10 8BB
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+
+
+
+
+ http://www.ceh.ac.uk/LandCoverMap2007.html
+
+
+ Essential technical details
+
+
+ Link to further technical details about this data
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://www.countrysidesurvey.org.uk/
+
+
+ Countryside Survey website
+
+
+ Countryside Survey website
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
+
+
+ CS Technical Report
+
+
+ Final Report for LCM2007 - the new UK Land Cover Map
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
+
+
+
+
+
+
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/08_Series_Valid.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/08_Series_Valid.xml
new file mode 100644
index 0000000..ef6872d
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/08_Series_Valid.xml
@@ -0,0 +1,595 @@
+
+
+
+test-record-08
+
+
+
+
+
+
+
+
+ series
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2012-10-05
+
+
+ INSPIRE Implementing Rules for Metadata
+
+
+ 1.2
+
+
+
+
+
+
+ 27700
+
+
+ urn:ogc:def:crs:EPSG
+
+
+ 6.11.2
+
+
+
+
+
+
+
+
+
+
+ Test Record 08 Series Valid
+
+
+ Also known as xxx
+
+
+
+
+ 2011-04-08
+
+
+
+
+
+
+
+
+
+ 1300392329603
+
+
+ CEH:EIDC:
+
+
+ 1
+
+
+
+
+ R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+ This test Series record should pass all validation.
+
+
+
+
+
+
+
+ Morton, D
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ danm@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+
+
+ Parr Section
+
+
+
+
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ resourceProvider
+
+
+
+
+
+
+
+
+
+ Not planned
+
+
+
+
+
+
+ https://gateway.ceh.ac.uk:443/smartEditor/preview/82a0f4a1-01ff-4ed1-853e-224d8404b3fd.png
+
+
+ thumbnail preview
+
+
+
+
+
+
+ Habitats and biotopes
+
+
+
+
+
+
+
+ GEMET - INSPIRE themes, version 1.0
+
+
+
+
+ 2012-10-04
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Great Britain
+
+
+ England
+
+
+ Scotland Wales
+
+
+
+
+
+
+
+
+
+ CEH Biodiversity Programme
+
+
+
+
+
+
+
+
+
+ CEH Project NEC03259
+
+
+ NERC_DDC
+
+
+ LCM2007
+
+
+
+
+
+
+
+
+
+ Test Link
+
+
+
+
+
+
+
+ Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+
+
+
+
+ Licence terms and conditions apply
+
+
+ Test Link
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+ environment
+
+
+ imageryBaseMapsEarthCover
+
+
+
+
+
+
+
+
+ ENG
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ WLS
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ SCT
+
+
+ ISO 3166
+
+
+ 2006, edition 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 2000-10-01
+ 2012-10-01
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ -9.227701
+
+
+ 2.687637
+
+
+ 49.83726
+
+
+ 60.850441
+
+
+
+
+
+
+ Some text
+
+
+
+
+
+
+
+
+ GeoTIFF
+
+
+ 1.0
+
+
+
+
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Maclean Building, Benson Lane, Crowmarsh Gifford
+
+
+ Wallingford
+
+
+ Oxfordshire
+
+
+ OX10 8BB
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+
+
+
+
+ http://www.ceh.ac.uk/LandCoverMap2007.html
+
+
+ Essential technical details
+
+
+ Link to further technical details about this data
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://www.countrysidesurvey.org.uk/
+
+
+ Countryside Survey website
+
+
+ Countryside Survey website
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
+
+
+ CS Technical Report
+
+
+ Final Report for LCM2007 - the new UK Land Cover Map
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
+
+
+
+
+
+
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/09_Service_Invalid_No_Such_Element.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/09_Service_Invalid_No_Such_Element.xml
new file mode 100644
index 0000000..1277f12
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/09_Service_Invalid_No_Such_Element.xml
@@ -0,0 +1,537 @@
+
+
+
+test-record-09
+
+
+
+
+
+
+
+
+service
+
+
+
+
+Claire Wood
+
+
+Centre for Ecology & Hydrology
+
+
+
+
+
+
+Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+Lancaster
+
+
+Lancashire
+
+
+LA1 4AP
+
+
+United Kingdom
+
+
+enquiries@ceh.ac.uk
+
+
+
+
+
+
+pointOfContact
+
+
+
+
+2012-10-05
+
+
+NERC profile of ISO19115:2003
+
+
+2003(E)
+
+
+
+
+
+
+27700
+
+
+urn:ogc:def:crs:EPSG
+
+
+6.11.2
+
+
+
+
+
+
+
+
+
+
+Test Record 09 Service Invalid No Such Element
+
+
+Also known as xxx
+
+
+
+
+2011-04-08
+
+
+
+
+
+
+
+
+
+This test Service record should fail xsd validation, as it contains an element not allowed under the schema.
+
+
+
+
+Dan Morton
+
+
+Centre for Ecology & Hydrology
+
+
+LA1 4AP
+
+
+
+
+
+
+Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+Lancaster
+
+
+Lancashire
+
+
+LA1 4AP
+
+
+United Kingdom
+
+
+enquiries@ceh.ac.uk
+
+
+
+
+
+
+pointOfContact
+
+
+
+
+
+
+Parr Section
+
+
+
+
+
+
+United Kingdom
+
+
+enquiries@ceh.ac.uk
+
+
+
+
+
+
+resourceProvider
+
+
+
+
+
+
+
+
+
+Not Planned
+
+
+
+
+
+
+https://gateway.ceh.ac.uk:443/smartEditor/preview/848fc7db-f8a8-4804-a5ec-6876a14d0a1a.png
+
+
+preview thumbnail
+
+
+
+
+
+
+Land cover
+
+
+Land use
+
+
+
+
+
+
+
+GEMET - INSPIRE themes, version 1.0
+
+
+
+
+2008-06-01
+
+
+
+
+
+
+
+
+
+
+
+
+
+CEH Biodiversity Programme
+
+
+CEH Project NEC03259
+
+
+
+
+
+
+
+
+
+NERC_DDC
+
+
+
+
+
+
+
+
+
+infoMapAccessService
+
+
+
+
+
+
+
+Commission Regulation (EC) No 1205/2008 of 3 December 2008 implementing Directive 2007/2/EC of the European Parliament and of the Council as regards Metadata
+
+
+
+
+2008-12-03
+
+
+
+
+
+
+
+
+
+
+
+
+
+Great Britain
+
+
+
+
+
+
+
+
+
+Test Link
+
+
+
+
+
+
+Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+
+
+
+
+This Web Mapping Service is made available for use subject to the Terms and Conditions of the CEH Information Gateway (https://gateway.ceh.ac.uk/disclaimer).
+
+The following acknowledgements and copyright notices (where applicable), shall, unless otherwise stated, be used on all copies of the Web Map Service, publications and reports, including but not limited to, use in presentations to any audience.
+
+LCM2007 © and database right NERC (CEH) 2011. All rights reserved. Contains Ordnance Survey data © Crown copyright and database right 2007.
+
+Geographical area - Acknowledgements
+The following datasets have been used in the derivation of LCM2007 25m raster and LCM2007 1km Dominant Coverage and LCM2007 1km Percent of Total Coverage:
+GB - Landsat-TM5 satellite imagery © <Satellite/Ground station operator> 2007. Distributed by Eurimage.
+GB - IRS-LISS3 satellite imagery supplied by European Space Agency © Euromap, Space Imaging and Antrix Corporation Limited.
+GB - SPOT-4 and SPOT-5 satellite imagery supplied by European Space Agency © Spot Image and Centre National D’Etudes Spatiales (CNES).
+GB - AWIFS satellite imagery © Antrix Corporation Limited, distributed by Euromap.
+GB - Contains Ordnance Survey mapping data © Crown copyright and database right 2007.
+GB - Digital elevation data © Intermap Technologies Inc. or its suppliers 2003.
+England and Wales - Soils data for England and Wales © Cranfield University (NSRI) and for the Controller of HMSO. 2011.
+England and Wales - Office for National Statistics data © Crown Copyright and database right. Contains Ordnance Survey data © Crown copyright and database right 2001.
+England - Boundaries from Rural Payments Agency © Crown copyright and database right and/or © third party licensors.
+Wales - Boundaries from Welsh Government, Department of Rural Affairs © Crown Copyright and database right and/or © third party licensors.
+Scotland - Boundaries from Scottish Government © Crown Copyright and database right and/or © third party licensors.
+Scotland - SSKIB derived pH for "semi-natural" soils for upper horizon for dominant soil © The James Hutton Institute 2010.
+Scotland - Land Cover of Scotland dataset, Crown Copyright 1992. It shall not be reproduced in any form whatever without the permission of The Controller of Her Majesty’s Stationery Office. Reproduced from OS Pathfinder Series with the permission of the Controller of HMSO. © Crown copyright 1992.
+Scotland - Scottish Government boundaries © Crown Copyright and database right and/or © third party licensors 2004. All rights reserved.
+
+
+The following copyright notice should be placed on all copies of information or images derived from the Web Service:
+[Information] or [Images] based upon LCM2007 © NERC (CEH) 2011. Contains Ordnance Survey data © Crown Copyright 2007. © third party licensors.
+
+
+Test Link
+
+
+
+
+view
+
+
+
+
+
+
+true
+
+
+-9.23
+
+
+2.69
+
+
+49.84
+
+
+60.85
+
+
+
+
+
+
+
+
+GetMap
+
+
+CEH:EIDC:#1300181654668
+
+
+
+
+
+
+GetMap
+
+
+https://gateway.ceh.ac.uk/soapServices/CSWStartup?Service=CSW&Request=GetRecordById&Version=2.0.2&outputSchema=http://www.isotc211.org/2005/gmd&elementSetname=full&id=337f9dea-726e-40c7-9f9b-e269911c9db6
+
+
+
+
+
+
+
+
+
+GetCapabilities
+
+
+
+
+
+
+
+http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
+
+
+
+
+
+
+
+
+GetMap
+
+
+
+
+
+
+
+http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+png
+
+
+unknown
+
+
+
+
+
+
+
+
+Centre for Ecology & Hydrology
+
+
+
+
+
+
+Maclean Building, Benson Lane, Crowmarsh Gifford
+
+
+Wallingford
+
+
+Oxfordshire
+
+
+OX10 8BB
+
+
+United Kingdom
+
+
+enquiries@ceh.ac.uk
+
+
+
+
+
+
+distributor
+
+
+
+
+
+
+
+
+
+
+http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer?request=getCapabilities&service=WMS
+
+
+WMS Service
+
+
+GetCapabilities for this service
+
+
+
+
+
+
+
+
+
+
+http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
+
+
+CS Technical Report
+
+
+Final Report for LCM2007 - the new UK Land Cover Map
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Land Cover Map 2007
+
+
+
+
+
+
+
+
+The service is based upon Land Cover Map 2007
+
+
+
+
+
+
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/10_Service_Invalid_19139_Level_Description.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/10_Service_Invalid_19139_Level_Description.xml
new file mode 100644
index 0000000..f3522a1
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/10_Service_Invalid_19139_Level_Description.xml
@@ -0,0 +1,530 @@
+
+
+
+test-record-10
+
+
+
+
+
+
+
+
+service
+
+
+
+
+Claire Wood
+
+
+Centre for Ecology & Hydrology
+
+
+
+
+
+
+Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+Lancaster
+
+
+Lancashire
+
+
+LA1 4AP
+
+
+United Kingdom
+
+
+enquiries@ceh.ac.uk
+
+
+
+
+
+
+pointOfContact
+
+
+
+
+2012-10-05
+
+
+NERC profile of ISO19115:2003
+
+
+2003(E)
+
+
+
+
+
+
+27700
+
+
+urn:ogc:def:crs:EPSG
+
+
+6.11.2
+
+
+
+
+
+
+
+
+
+
+Test Record 10 Service Invalid Missing Data Quality Info Level Desciption
+
+
+Also known as xxx
+
+
+
+
+2011-04-08
+
+
+
+
+
+
+
+
+
+This test Service record should pass XSD validation, but fail 19139 constraint validation, as it is a Service record which does not have a Level Description element in the dataQualityInfo.
+
+
+
+
+Dan Morton
+
+
+Centre for Ecology & Hydrology
+
+
+LA1 4AP
+
+
+
+
+
+
+Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+Lancaster
+
+
+Lancashire
+
+
+LA1 4AP
+
+
+United Kingdom
+
+
+enquiries@ceh.ac.uk
+
+
+
+
+
+
+pointOfContact
+
+
+
+
+
+
+Parr Section
+
+
+
+
+
+
+United Kingdom
+
+
+enquiries@ceh.ac.uk
+
+
+
+
+
+
+resourceProvider
+
+
+
+
+
+
+
+
+
+Not Planned
+
+
+
+
+
+
+https://gateway.ceh.ac.uk:443/smartEditor/preview/848fc7db-f8a8-4804-a5ec-6876a14d0a1a.png
+
+
+preview thumbnail
+
+
+
+
+
+
+Land cover
+
+
+Land use
+
+
+
+
+
+
+
+GEMET - INSPIRE themes, version 1.0
+
+
+
+
+2008-06-01
+
+
+
+
+
+
+
+
+
+
+
+
+
+CEH Biodiversity Programme
+
+
+CEH Project NEC03259
+
+
+
+
+
+
+
+
+
+NERC_DDC
+
+
+
+
+
+
+
+
+
+infoMapAccessService
+
+
+
+
+
+
+
+Commission Regulation (EC) No 1205/2008 of 3 December 2008 implementing Directive 2007/2/EC of the European Parliament and of the Council as regards Metadata
+
+
+
+
+2008-12-03
+
+
+
+
+
+
+
+
+
+
+
+
+
+Great Britain
+
+
+
+
+
+
+
+
+
+Test Link
+
+
+
+
+
+
+Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+
+
+
+
+This Web Mapping Service is made available for use subject to the Terms and Conditions of the CEH Information Gateway (https://gateway.ceh.ac.uk/disclaimer).
+
+The following acknowledgements and copyright notices (where applicable), shall, unless otherwise stated, be used on all copies of the Web Map Service, publications and reports, including but not limited to, use in presentations to any audience.
+
+LCM2007 © and database right NERC (CEH) 2011. All rights reserved. Contains Ordnance Survey data © Crown copyright and database right 2007.
+
+Geographical area - Acknowledgements
+The following datasets have been used in the derivation of LCM2007 25m raster and LCM2007 1km Dominant Coverage and LCM2007 1km Percent of Total Coverage:
+GB - Landsat-TM5 satellite imagery © <Satellite/Ground station operator> 2007. Distributed by Eurimage.
+GB - IRS-LISS3 satellite imagery supplied by European Space Agency © Euromap, Space Imaging and Antrix Corporation Limited.
+GB - SPOT-4 and SPOT-5 satellite imagery supplied by European Space Agency © Spot Image and Centre National D’Etudes Spatiales (CNES).
+GB - AWIFS satellite imagery © Antrix Corporation Limited, distributed by Euromap.
+GB - Contains Ordnance Survey mapping data © Crown copyright and database right 2007.
+GB - Digital elevation data © Intermap Technologies Inc. or its suppliers 2003.
+England and Wales - Soils data for England and Wales © Cranfield University (NSRI) and for the Controller of HMSO. 2011.
+England and Wales - Office for National Statistics data © Crown Copyright and database right. Contains Ordnance Survey data © Crown copyright and database right 2001.
+England - Boundaries from Rural Payments Agency © Crown copyright and database right and/or © third party licensors.
+Wales - Boundaries from Welsh Government, Department of Rural Affairs © Crown Copyright and database right and/or © third party licensors.
+Scotland - Boundaries from Scottish Government © Crown Copyright and database right and/or © third party licensors.
+Scotland - SSKIB derived pH for "semi-natural" soils for upper horizon for dominant soil © The James Hutton Institute 2010.
+Scotland - Land Cover of Scotland dataset, Crown Copyright 1992. It shall not be reproduced in any form whatever without the permission of The Controller of Her Majesty’s Stationery Office. Reproduced from OS Pathfinder Series with the permission of the Controller of HMSO. © Crown copyright 1992.
+Scotland - Scottish Government boundaries © Crown Copyright and database right and/or © third party licensors 2004. All rights reserved.
+
+
+The following copyright notice should be placed on all copies of information or images derived from the Web Service:
+[Information] or [Images] based upon LCM2007 © NERC (CEH) 2011. Contains Ordnance Survey data © Crown Copyright 2007. © third party licensors.
+
+
+Test Link
+
+
+
+
+view
+
+
+
+
+
+
+true
+
+
+-9.23
+
+
+2.69
+
+
+49.84
+
+
+60.85
+
+
+
+
+
+
+
+
+GetMap
+
+
+CEH:EIDC:#1300181654668
+
+
+
+
+
+
+GetMap
+
+
+https://gateway.ceh.ac.uk/soapServices/CSWStartup?Service=CSW&Request=GetRecordById&Version=2.0.2&outputSchema=http://www.isotc211.org/2005/gmd&elementSetname=full&id=337f9dea-726e-40c7-9f9b-e269911c9db6
+
+
+
+
+
+
+
+
+
+GetCapabilities
+
+
+
+
+
+
+
+http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
+
+
+
+
+
+
+
+
+GetMap
+
+
+
+
+
+
+
+http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+png
+
+
+unknown
+
+
+
+
+
+
+
+
+Centre for Ecology & Hydrology
+
+
+
+
+
+
+Maclean Building, Benson Lane, Crowmarsh Gifford
+
+
+Wallingford
+
+
+Oxfordshire
+
+
+OX10 8BB
+
+
+United Kingdom
+
+
+enquiries@ceh.ac.uk
+
+
+
+
+
+
+distributor
+
+
+
+
+
+
+
+
+
+
+http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer?request=getCapabilities&service=WMS
+
+
+WMS Service
+
+
+GetCapabilities for this service
+
+
+
+
+
+
+
+
+
+
+http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
+
+
+CS Technical Report
+
+
+Final Report for LCM2007 - the new UK Land Cover Map
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+The service is based upon Land Cover Map 2007
+
+
+
+
+
+
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/11_Service_Invalid_GEMINI_Service_Type.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/11_Service_Invalid_GEMINI_Service_Type.xml
new file mode 100644
index 0000000..6da753e
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/11_Service_Invalid_GEMINI_Service_Type.xml
@@ -0,0 +1,537 @@
+
+
+
+test-record-11
+
+
+
+
+
+
+
+
+service
+
+
+
+
+Claire Wood
+
+
+Centre for Ecology & Hydrology
+
+
+
+
+
+
+Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+Lancaster
+
+
+Lancashire
+
+
+LA1 4AP
+
+
+United Kingdom
+
+
+enquiries@ceh.ac.uk
+
+
+
+
+
+
+pointOfContact
+
+
+
+
+2012-10-05
+
+
+NERC profile of ISO19115:2003
+
+
+2003(E)
+
+
+
+
+
+
+27700
+
+
+urn:ogc:def:crs:EPSG
+
+
+6.11.2
+
+
+
+
+
+
+
+
+
+
+Test Record 11 Service Invalid GEMINI Service Type
+
+
+Also known as xxx
+
+
+
+
+2011-04-08
+
+
+
+
+
+
+
+
+
+This test Service record should pass XSD validation and 19139 Schematron validation, but fail GEMINI2.1 Schematron. It has an element value for Service Type which is not in the list of allowed values.
+
+
+
+
+Dan Morton
+
+
+Centre for Ecology & Hydrology
+
+
+LA1 4AP
+
+
+
+
+
+
+Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+Lancaster
+
+
+Lancashire
+
+
+LA1 4AP
+
+
+United Kingdom
+
+
+enquiries@ceh.ac.uk
+
+
+
+
+
+
+pointOfContact
+
+
+
+
+
+
+Parr Section
+
+
+
+
+
+
+United Kingdom
+
+
+enquiries@ceh.ac.uk
+
+
+
+
+
+
+resourceProvider
+
+
+
+
+
+
+
+
+
+Not Planned
+
+
+
+
+
+
+https://gateway.ceh.ac.uk:443/smartEditor/preview/848fc7db-f8a8-4804-a5ec-6876a14d0a1a.png
+
+
+preview thumbnail
+
+
+
+
+
+
+Land cover
+
+
+Land use
+
+
+
+
+
+
+
+GEMET - INSPIRE themes, version 1.0
+
+
+
+
+2008-06-01
+
+
+
+
+
+
+
+
+
+
+
+
+
+CEH Biodiversity Programme
+
+
+CEH Project NEC03259
+
+
+
+
+
+
+
+
+
+NERC_DDC
+
+
+
+
+
+
+
+
+
+infoMapAccessService
+
+
+
+
+
+
+
+Commission Regulation (EC) No 1205/2008 of 3 December 2008 implementing Directive 2007/2/EC of the European Parliament and of the Council as regards Metadata
+
+
+
+
+2008-12-03
+
+
+
+
+
+
+
+
+
+
+
+
+
+Great Britain
+
+
+
+
+
+
+
+
+
+Test Link
+
+
+
+
+
+
+Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+
+
+
+
+This Web Mapping Service is made available for use subject to the Terms and Conditions of the CEH Information Gateway (https://gateway.ceh.ac.uk/disclaimer).
+
+The following acknowledgements and copyright notices (where applicable), shall, unless otherwise stated, be used on all copies of the Web Map Service, publications and reports, including but not limited to, use in presentations to any audience.
+
+LCM2007 © and database right NERC (CEH) 2011. All rights reserved. Contains Ordnance Survey data © Crown copyright and database right 2007.
+
+Geographical area - Acknowledgements
+The following datasets have been used in the derivation of LCM2007 25m raster and LCM2007 1km Dominant Coverage and LCM2007 1km Percent of Total Coverage:
+GB - Landsat-TM5 satellite imagery © <Satellite/Ground station operator> 2007. Distributed by Eurimage.
+GB - IRS-LISS3 satellite imagery supplied by European Space Agency © Euromap, Space Imaging and Antrix Corporation Limited.
+GB - SPOT-4 and SPOT-5 satellite imagery supplied by European Space Agency © Spot Image and Centre National D’Etudes Spatiales (CNES).
+GB - AWIFS satellite imagery © Antrix Corporation Limited, distributed by Euromap.
+GB - Contains Ordnance Survey mapping data © Crown copyright and database right 2007.
+GB - Digital elevation data © Intermap Technologies Inc. or its suppliers 2003.
+England and Wales - Soils data for England and Wales © Cranfield University (NSRI) and for the Controller of HMSO. 2011.
+England and Wales - Office for National Statistics data © Crown Copyright and database right. Contains Ordnance Survey data © Crown copyright and database right 2001.
+England - Boundaries from Rural Payments Agency © Crown copyright and database right and/or © third party licensors.
+Wales - Boundaries from Welsh Government, Department of Rural Affairs © Crown Copyright and database right and/or © third party licensors.
+Scotland - Boundaries from Scottish Government © Crown Copyright and database right and/or © third party licensors.
+Scotland - SSKIB derived pH for "semi-natural" soils for upper horizon for dominant soil © The James Hutton Institute 2010.
+Scotland - Land Cover of Scotland dataset, Crown Copyright 1992. It shall not be reproduced in any form whatever without the permission of The Controller of Her Majesty’s Stationery Office. Reproduced from OS Pathfinder Series with the permission of the Controller of HMSO. © Crown copyright 1992.
+Scotland - Scottish Government boundaries © Crown Copyright and database right and/or © third party licensors 2004. All rights reserved.
+
+
+The following copyright notice should be placed on all copies of information or images derived from the Web Service:
+[Information] or [Images] based upon LCM2007 © NERC (CEH) 2011. Contains Ordnance Survey data © Crown Copyright 2007. © third party licensors.
+
+
+Test Link
+
+
+
+
+unknown
+
+
+
+
+
+
+true
+
+
+-9.23
+
+
+2.69
+
+
+49.84
+
+
+60.85
+
+
+
+
+
+
+
+
+GetMap
+
+
+CEH:EIDC:#1300181654668
+
+
+
+
+
+
+GetMap
+
+
+https://gateway.ceh.ac.uk/soapServices/CSWStartup?Service=CSW&Request=GetRecordById&Version=2.0.2&outputSchema=http://www.isotc211.org/2005/gmd&elementSetname=full&id=337f9dea-726e-40c7-9f9b-e269911c9db6
+
+
+
+
+
+
+
+
+
+GetCapabilities
+
+
+
+
+
+
+
+http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
+
+
+
+
+
+
+
+
+GetMap
+
+
+
+
+
+
+
+http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+png
+
+
+unknown
+
+
+
+
+
+
+
+
+Centre for Ecology & Hydrology
+
+
+
+
+
+
+Maclean Building, Benson Lane, Crowmarsh Gifford
+
+
+Wallingford
+
+
+Oxfordshire
+
+
+OX10 8BB
+
+
+United Kingdom
+
+
+enquiries@ceh.ac.uk
+
+
+
+
+
+
+distributor
+
+
+
+
+
+
+
+
+
+
+http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer?request=getCapabilities&service=WMS
+
+
+WMS Service
+
+
+GetCapabilities for this service
+
+
+
+
+
+
+
+
+
+
+http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
+
+
+CS Technical Report
+
+
+Final Report for LCM2007 - the new UK Land Cover Map
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Land Cover Map 2007
+
+
+
+
+
+
+
+
+The service is based upon Land Cover Map 2007
+
+
+
+
+
+
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/12_Service_Valid.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/12_Service_Valid.xml
new file mode 100644
index 0000000..d413261
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/12_Service_Valid.xml
@@ -0,0 +1,537 @@
+
+
+
+ test-record-12
+
+
+
+
+
+
+
+
+ service
+
+
+
+
+ Claire Wood
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2012-10-05
+
+
+ NERC profile of ISO19115:2003
+
+
+ 2003(E)
+
+
+
+
+
+
+ 27700
+
+
+ urn:ogc:def:crs:EPSG
+
+
+ 6.11.2
+
+
+
+
+
+
+
+
+
+
+ Test Record 12 Service Valid
+
+
+ Also known as xxx
+
+
+
+
+ 2011-04-08
+
+
+
+
+
+
+
+
+
+ This test Service record should pass all validation.
+
+
+
+
+ Dan Morton
+
+
+ Centre for Ecology & Hydrology
+
+
+ LA1 4AP
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+
+
+ Parr Section
+
+
+
+
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ resourceProvider
+
+
+
+
+
+
+
+
+
+ Not Planned
+
+
+
+
+
+
+ https://gateway.ceh.ac.uk:443/smartEditor/preview/848fc7db-f8a8-4804-a5ec-6876a14d0a1a.png
+
+
+ preview thumbnail
+
+
+
+
+
+
+ Land cover
+
+
+ Land use
+
+
+
+
+
+
+
+ GEMET - INSPIRE themes, version 1.0
+
+
+
+
+ 2008-06-01
+
+
+
+
+
+
+
+
+
+
+
+
+
+ CEH Biodiversity Programme
+
+
+ CEH Project NEC03259
+
+
+
+
+
+
+
+
+
+ NERC_DDC
+
+
+
+
+
+
+
+
+
+ infoMapAccessService
+
+
+
+
+
+
+
+ Commission Regulation (EC) No 1205/2008 of 3 December 2008 implementing Directive 2007/2/EC of the European Parliament and of the Council as regards Metadata
+
+
+
+
+ 2008-12-03
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Great Britain
+
+
+
+
+
+
+
+
+
+ Test Link
+
+
+
+
+
+
+ Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+
+
+
+
+ This Web Mapping Service is made available for use subject to the Terms and Conditions of the CEH Information Gateway (https://gateway.ceh.ac.uk/disclaimer).
+
+ The following acknowledgements and copyright notices (where applicable), shall, unless otherwise stated, be used on all copies of the Web Map Service, publications and reports, including but not limited to, use in presentations to any audience.
+
+ LCM2007 © and database right NERC (CEH) 2011. All rights reserved. Contains Ordnance Survey data © Crown copyright and database right 2007.
+
+ Geographical area - Acknowledgements
+ The following datasets have been used in the derivation of LCM2007 25m raster and LCM2007 1km Dominant Coverage and LCM2007 1km Percent of Total Coverage:
+ GB - Landsat-TM5 satellite imagery © <Satellite/Ground station operator> 2007. Distributed by Eurimage.
+ GB - IRS-LISS3 satellite imagery supplied by European Space Agency © Euromap, Space Imaging and Antrix Corporation Limited.
+ GB - SPOT-4 and SPOT-5 satellite imagery supplied by European Space Agency © Spot Image and Centre National D’Etudes Spatiales (CNES).
+ GB - AWIFS satellite imagery © Antrix Corporation Limited, distributed by Euromap.
+ GB - Contains Ordnance Survey mapping data © Crown copyright and database right 2007.
+ GB - Digital elevation data © Intermap Technologies Inc. or its suppliers 2003.
+ England and Wales - Soils data for England and Wales © Cranfield University (NSRI) and for the Controller of HMSO. 2011.
+ England and Wales - Office for National Statistics data © Crown Copyright and database right. Contains Ordnance Survey data © Crown copyright and database right 2001.
+ England - Boundaries from Rural Payments Agency © Crown copyright and database right and/or © third party licensors.
+ Wales - Boundaries from Welsh Government, Department of Rural Affairs © Crown Copyright and database right and/or © third party licensors.
+ Scotland - Boundaries from Scottish Government © Crown Copyright and database right and/or © third party licensors.
+ Scotland - SSKIB derived pH for "semi-natural" soils for upper horizon for dominant soil © The James Hutton Institute 2010.
+ Scotland - Land Cover of Scotland dataset, Crown Copyright 1992. It shall not be reproduced in any form whatever without the permission of The Controller of Her Majesty’s Stationery Office. Reproduced from OS Pathfinder Series with the permission of the Controller of HMSO. © Crown copyright 1992.
+ Scotland - Scottish Government boundaries © Crown Copyright and database right and/or © third party licensors 2004. All rights reserved.
+
+
+ The following copyright notice should be placed on all copies of information or images derived from the Web Service:
+ [Information] or [Images] based upon LCM2007 © NERC (CEH) 2011. Contains Ordnance Survey data © Crown Copyright 2007. © third party licensors.
+
+
+ Test Link
+
+
+
+
+ view
+
+
+
+
+
+
+ true
+
+
+ -9.23
+
+
+ 2.69
+
+
+ 49.84
+
+
+ 60.85
+
+
+
+
+
+
+
+
+ GetMap
+
+
+ CEH:EIDC:#1300181654668
+
+
+
+
+
+
+ GetMap
+
+
+ https://gateway.ceh.ac.uk/soapServices/CSWStartup?Service=CSW&Request=GetRecordById&Version=2.0.2&outputSchema=http://www.isotc211.org/2005/gmd&elementSetname=full&id=337f9dea-726e-40c7-9f9b-e269911c9db6
+
+
+
+
+
+
+
+
+
+ GetCapabilities
+
+
+
+
+
+
+
+ http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
+
+
+
+
+
+
+
+
+ GetMap
+
+
+
+
+
+
+
+ http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ png
+
+
+ unknown
+
+
+
+
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Maclean Building, Benson Lane, Crowmarsh Gifford
+
+
+ Wallingford
+
+
+ Oxfordshire
+
+
+ OX10 8BB
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+
+
+
+
+ http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer?request=getCapabilities&service=WMS
+
+
+ WMS Service
+
+
+ GetCapabilities for this service
+
+
+
+
+
+
+
+
+
+
+ http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
+
+
+ CS Technical Report
+
+
+ Final Report for LCM2007 - the new UK Land Cover Map
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Land Cover Map 2007
+
+
+
+
+
+
+
+
+ The service is based upon Land Cover Map 2007
+
+
+
+
+
+
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/13_Dataset_Invalid_Element_srv.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/13_Dataset_Invalid_Element_srv.xml
new file mode 100644
index 0000000..41a75b3
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/13_Dataset_Invalid_Element_srv.xml
@@ -0,0 +1,610 @@
+
+
+
+test-record-13
+
+
+
+
+
+ test-record-04
+
+
+
+
+
+ dataset
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2012-10-05
+
+
+ INSPIRE Implementing Rules for Metadata
+
+
+ 1.2
+
+
+
+
+
+
+ 27700
+
+
+ urn:ogc:def:crs:EPSG
+
+
+ 6.11.2
+
+
+
+
+
+
+
+
+
+
+
+
+ Test Record 13
+
+
+ Also known as xxx
+
+
+
+
+ 2011-04-08
+
+
+
+
+
+
+
+
+
+ This test Dataset record should fail.
+
+
+
+
+ Dan Morton
+
+
+ Centre for Ecology & Hydrology
+
+
+ LA1 4AP
+
+
+
+
+
+
+ Lancaster Environment Centre, Library Avenue, Bailrigg
+
+
+ Lancaster
+
+
+ Lancashire
+
+
+ LA1 4AP
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+
+
+ Parr Section
+
+
+
+
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ resourceProvider
+
+
+
+
+
+
+
+
+
+ Not Planned
+
+
+
+
+
+
+ https://gateway.ceh.ac.uk:443/smartEditor/preview/848fc7db-f8a8-4804-a5ec-6876a14d0a1a.png
+
+
+ preview thumbnail
+
+
+
+
+
+
+ Land cover
+
+
+ Land use
+
+
+
+
+
+
+
+ GEMET - INSPIRE themes, version 1.0
+
+
+
+
+ 2008-06-01
+
+
+
+
+
+
+
+
+
+
+
+
+
+ CEH Biodiversity Programme
+
+
+ CEH Project NEC03259
+
+
+
+
+
+
+
+
+
+ NERC_DDC
+
+
+
+
+
+
+
+
+
+ infoMapAccessService
+
+
+
+
+
+
+
+ Commission Regulation (EC) No 1205/2008 of 3 December 2008 implementing Directive 2007/2/EC of the European Parliament and of the Council as regards Metadata
+
+
+
+
+ 2008-12-03
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Great Britain
+
+
+
+
+
+
+
+
+
+ Test Link
+
+
+
+
+
+
+ Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
+
+
+
+
+
+
+
+
+ This Web Mapping Service is made available for use subject to the Terms and Conditions of the CEH Information Gateway (https://gateway.ceh.ac.uk/disclaimer).
+
+ The following acknowledgements and copyright notices (where applicable), shall, unless otherwise stated, be used on all copies of the Web Map Service, publications and reports, including but not limited to, use in presentations to any audience.
+
+ LCM2007 © and database right NERC (CEH) 2011. All rights reserved. Contains Ordnance Survey data © Crown copyright and database right 2007.
+
+ Geographical area - Acknowledgements
+ The following datasets have been used in the derivation of LCM2007 25m raster and LCM2007 1km Dominant Coverage and LCM2007 1km Percent of Total Coverage:
+ GB - Landsat-TM5 satellite imagery © <Satellite/Ground station operator> 2007. Distributed by Eurimage.
+ GB - IRS-LISS3 satellite imagery supplied by European Space Agency © Euromap, Space Imaging and Antrix Corporation Limited.
+ GB - SPOT-4 and SPOT-5 satellite imagery supplied by European Space Agency © Spot Image and Centre National D’Etudes Spatiales (CNES).
+ GB - AWIFS satellite imagery © Antrix Corporation Limited, distributed by Euromap.
+ GB - Contains Ordnance Survey mapping data © Crown copyright and database right 2007.
+ GB - Digital elevation data © Intermap Technologies Inc. or its suppliers 2003.
+ England and Wales - Soils data for England and Wales © Cranfield University (NSRI) and for the Controller of HMSO. 2011.
+ England and Wales - Office for National Statistics data © Crown Copyright and database right. Contains Ordnance Survey data © Crown copyright and database right 2001.
+ England - Boundaries from Rural Payments Agency © Crown copyright and database right and/or © third party licensors.
+ Wales - Boundaries from Welsh Government, Department of Rural Affairs © Crown Copyright and database right and/or © third party licensors.
+ Scotland - Boundaries from Scottish Government © Crown Copyright and database right and/or © third party licensors.
+ Scotland - SSKIB derived pH for "semi-natural" soils for upper horizon for dominant soil © The James Hutton Institute 2010.
+ Scotland - Land Cover of Scotland dataset, Crown Copyright 1992. It shall not be reproduced in any form whatever without the permission of The Controller of Her Majesty’s Stationery Office. Reproduced from OS Pathfinder Series with the permission of the Controller of HMSO. © Crown copyright 1992.
+ Scotland - Scottish Government boundaries © Crown Copyright and database right and/or © third party licensors 2004. All rights reserved.
+
+
+ The following copyright notice should be placed on all copies of information or images derived from the Web Service:
+ [Information] or [Images] based upon LCM2007 © NERC (CEH) 2011. Contains Ordnance Survey data © Crown Copyright 2007. © third party licensors.
+
+
+ Test Link
+
+
+
+
+ view
+
+
+
+
+
+
+ true
+
+
+ -9.23
+
+
+ 2.69
+
+
+ 49.84
+
+
+ 60.85
+
+
+
+
+
+
+
+
+ GetMap
+
+
+ CEH:EIDC:#1300181654668
+
+
+
+
+
+
+ GetMap
+
+
+ https://gateway.ceh.ac.uk/soapServices/CSWStartup?Service=CSW&Request=GetRecordById&Version=2.0.2&outputSchema=http://www.isotc211.org/2005/gmd&elementSetname=full&id=337f9dea-726e-40c7-9f9b-e269911c9db6
+
+
+
+
+
+
+
+
+
+ GetCapabilities
+
+
+
+
+
+
+
+ http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
+
+
+
+
+
+
+
+
+ GetMap
+
+
+
+
+
+
+
+ http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ GeoTIFF
+
+
+ 1.0
+
+
+
+
+
+
+
+
+ Centre for Ecology & Hydrology
+
+
+
+
+
+
+ Maclean Building, Benson Lane, Crowmarsh Gifford
+
+
+ Wallingford
+
+
+ Oxfordshire
+
+
+ OX10 8BB
+
+
+ United Kingdom
+
+
+ enquiries@ceh.ac.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+
+
+
+
+ http://www.ceh.ac.uk/LandCoverMap2007.html
+
+
+ Essential technical details
+
+
+ Link to further technical details about this data
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://www.countrysidesurvey.org.uk/
+
+
+ Countryside Survey website
+
+
+ Countryside Survey website
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://gateway.ceh.ac.uk/download?fileIdentifier=82a0f4a1-01ff-4ed1-853e-224d8404b3fd
+
+
+ Dataset download
+
+
+ Link to download this dataset
+
+
+
+
+
+
+
+
+
+
+
+
+
+ http://lasigprod.nerc-lancaster.ac.uk/arcgis/services/LandCoverMap/LCM2007_GB_1k_DOM_AGG/MapServer/WMSServer?request=GetCapabilities&service=WMS
+
+
+ Web Map Service
+
+
+ A web map service (WMS) is available for this data
+
+
+
+
+
+
+
+
+
+
+ http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
+
+
+ CS Technical Report
+
+
+ Final Report for LCM2007 - the new UK Land Cover Map
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
+
+
+
+
+
+
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/iso19139/dataset-invalid.xml b/ckanext/spatial/tests/nose/xml/iso19139/dataset-invalid.xml
new file mode 100644
index 0000000..a299e03
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/iso19139/dataset-invalid.xml
@@ -0,0 +1,498 @@
+
+
+
+ test-record
+
+
+ test-dataset
+
+
+ eng
+
+
+ dataset
+
+
+
+
+ Lachlan Renwick
+
+
+ Scottish Natural Heritage
+
+
+ Geographic Systems and Data Coordinator
+
+
+
+
+
+
+ 01463 725000
+
+
+
+
+
+
+ Great Glen House, Leachkin Road
+
+
+ INVERNESS
+
+
+ IV3 8NW
+
+
+ United Kingdom
+
+
+ data_supply@snh.gov.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2011-09-23T10:06:08
+
+
+
+
+
+
+ urn:ogc:def:crs:EPSG::27700
+
+
+
+
+
+
+
+
+
+
+ Country Parks (Scotland)
+
+
+
+
+ 2004-02
+
+
+ creation
+
+
+
+
+
+
+ 2006-07-03
+
+
+ revision
+
+
+
+
+
+
+ CPK
+
+
+
+
+
+
+
+
+
+ Parks are set up by Local Authorities to provide open-air recreation facilities close to towns and cities. [edited]
+
+
+
+
+ Lachlan Renwick
+
+
+ Scottish Natural Heritage
+
+
+ Geographic Systems & Data Coordinator
+
+
+
+
+
+
+ 01463 725000
+
+
+
+
+
+
+ Great Glen House, Leachkin Road
+
+
+ INVERNESS
+
+
+ IV3 8NW
+
+
+ United Kingdom
+
+
+ data_supply@snh.gov.uk
+
+
+
+
+
+
+ custodian
+
+
+
+
+
+
+ Lachlan Renwick
+
+
+ Scottish Natural Heritage
+
+
+ Geographic Systems & Data Coordinator
+
+
+
+
+
+
+ 01463 725000
+
+
+
+
+
+
+ Great Glen House, Leachkin Road
+
+
+ INVERNESS
+
+
+ IV3 8NW
+
+
+ United Kingdom
+
+
+ data_supply@snh.gov.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+ irregular
+
+
+
+
+
+
+ SDE Feature Class
+
+
+
+
+
+
+
+
+
+ Nature conservation
+
+
+
+
+ Government Category List
+
+
+
+
+ 2004-07-15
+
+
+ revision
+
+
+
+
+
+
+
+
+
+
+ copyright
+
+
+ otherRestrictions
+
+
+ copyright
+
+
+ otherRestrictions
+
+
+ Copyright Scottish Natural Heritage
+
+
+
+
+
+
+ Reference and PSMA Only
+
+
+ http://www.test.gov.uk/licenseurl
+
+
+
+
+
+
+
+
+
+ 5
+
+
+
+
+ eng
+
+
+ environment
+
+
+
+
+
+
+
+
+
+
+ ISO 3166
+
+
+
+
+ 2007-09-02
+
+
+ revision
+
+
+
+
+
+
+ GB-SCT
+
+
+
+
+
+
+
+
+ -8.97114288
+
+
+ 0.205857204
+
+
+ 54.529947158
+
+
+ 61.06066944
+
+
+
+
+
+
+
+ 1998
+ 2010
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ESRI Shapefile
+
+
+ Unknown
+
+
+
+
+
+
+ KML
+
+
+ 2.1
+
+
+
+
+
+
+ GML
+
+
+ 3.1.1
+
+
+
+
+
+
+
+
+ Lachlan Renwick
+
+
+ Scottish Natural Heritage
+
+
+ Geographic Systems & Data Coordinator
+
+
+
+
+
+
+ 01463 725000
+
+
+
+
+
+
+ Great Glen House, Leachkin Road
+
+
+ INVERNESS
+
+
+ IV3 8NW
+
+
+ United Kingdom
+
+
+ data_supply@snh.gov.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+ SDE Feature Class
+
+
+
+
+
+
+
+
+
+
+
+ http://www.snh.org.uk/snhi
+
+
+
+
+
+
+
+
+
+
+
+
+ https://gateway.snh.gov.uk/pls/apex_ddtdb2/f?p=101
+
+
+ Test Resource Name
+
+
+ Test Resource Description
+
+
+ download
+
+
+
+
+
+
+
+
+
+
+
+
+ dataset
+
+
+
+
+
+
+ Country Park is not a statutory designation. Countryside (Scotland) Act 1967 Section 48 gives local authorities power to assess and review the need for Country Parks in consultation with SNH.
+
+
+
+
+
+
diff --git a/ckanext/spatial/tests/nose/xml/iso19139/dataset.xml b/ckanext/spatial/tests/nose/xml/iso19139/dataset.xml
new file mode 100644
index 0000000..885a9b5
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/iso19139/dataset.xml
@@ -0,0 +1,495 @@
+
+
+
+ test-dataset-1
+
+
+ eng
+
+
+ dataset
+
+
+
+
+ Lachlan Renwick
+
+
+ Scottish Natural Heritage
+
+
+ Geographic Systems and Data Coordinator
+
+
+
+
+
+
+ 01463 725000
+
+
+
+
+
+
+ Great Glen House, Leachkin Road
+
+
+ INVERNESS
+
+
+ IV3 8NW
+
+
+ United Kingdom
+
+
+ data_supply@snh.gov.uk
+
+
+
+
+
+
+ pointOfContact
+
+
+
+
+ 2011-09-23T10:06:08
+
+
+
+
+
+
+ urn:ogc:def:crs:EPSG::27700
+
+
+
+
+
+
+
+
+
+
+ Country Parks (Scotland)
+
+
+
+
+ 2004-02
+
+
+ creation
+
+
+
+
+
+
+ 2006-07-03
+
+
+ revision
+
+
+
+
+
+
+ CPK
+
+
+
+
+
+
+
+
+
+ Parks are set up by Local Authorities to provide open-air recreation facilities close to towns and cities. [edited]
+
+
+
+
+ Lachlan Renwick
+
+
+ Scottish Natural Heritage
+
+
+ Geographic Systems & Data Coordinator
+
+
+
+
+
+
+ 01463 725000
+
+
+
+
+
+
+ Great Glen House, Leachkin Road
+
+
+ INVERNESS
+
+
+ IV3 8NW
+
+
+ United Kingdom
+
+
+ data_supply@snh.gov.uk
+
+
+
+
+
+
+ custodian
+
+
+
+
+
+
+ Lachlan Renwick
+
+
+ Scottish Natural Heritage
+
+
+ Geographic Systems & Data Coordinator
+
+
+
+
+
+
+ 01463 725000
+
+
+
+
+
+
+ Great Glen House, Leachkin Road
+
+
+ INVERNESS
+
+
+ IV3 8NW
+
+
+ United Kingdom
+
+
+ data_supply@snh.gov.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+ irregular
+
+
+
+
+
+
+ SDE Feature Class
+
+
+
+
+
+
+
+
+
+ Nature conservation
+
+
+
+
+ Government Category List
+
+
+
+
+ 2004-07-15
+
+
+ revision
+
+
+
+
+
+
+
+
+
+
+ copyright
+
+
+ otherRestrictions
+
+
+ copyright
+
+
+ otherRestrictions
+
+
+ Copyright Scottish Natural Heritage
+
+
+
+
+
+
+ Reference and PSMA Only
+
+
+ http://www.test.gov.uk/licenseurl
+
+
+
+
+
+
+
+
+
+ 5
+
+
+
+
+ eng
+
+
+ environment
+
+
+
+
+
+
+
+
+
+
+ ISO 3166
+
+
+
+
+ 2007-09-02
+
+
+ revision
+
+
+
+
+
+
+ GB-SCT
+
+
+
+
+
+
+
+
+ -8.97114288
+
+
+ 0.205857204
+
+
+ 54.529947158
+
+
+ 61.06066944
+
+
+
+
+
+
+
+ 1998
+ 2010
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ESRI Shapefile
+
+
+ Unknown
+
+
+
+
+
+
+ KML
+
+
+ 2.1
+
+
+
+
+
+
+ GML
+
+
+ 3.1.1
+
+
+
+
+
+
+
+
+ Lachlan Renwick
+
+
+ Scottish Natural Heritage
+
+
+ Geographic Systems & Data Coordinator
+
+
+
+
+
+
+ 01463 725000
+
+
+
+
+
+
+ Great Glen House, Leachkin Road
+
+
+ INVERNESS
+
+
+ IV3 8NW
+
+
+ United Kingdom
+
+
+ data_supply@snh.gov.uk
+
+
+
+
+
+
+ distributor
+
+
+
+
+
+
+ SDE Feature Class
+
+
+
+
+
+
+
+
+
+
+
+ http://www.snh.org.uk/snhi
+
+
+
+
+
+
+
+
+
+
+
+
+ https://gateway.snh.gov.uk/pls/apex_ddtdb2/f?p=101
+
+
+ Test Resource Name
+
+
+ Test Resource Description
+
+
+ download
+
+
+
+
+
+
+
+
+
+
+
+
+ dataset
+
+
+
+
+
+
+ Country Park is not a statutory designation. Countryside (Scotland) Act 1967 Section 48 gives local authorities power to assess and review the need for Country Parks in consultation with SNH.
+
+
+
+
+
+
diff --git a/ckanext/spatial/tests/nose/xml/wms/capabilities.xml b/ckanext/spatial/tests/nose/xml/wms/capabilities.xml
new file mode 100644
index 0000000..1f62497
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml/wms/capabilities.xml
@@ -0,0 +1,127 @@
+
+
+
+
+
+
+ OGC:WMS
+ Wikipedia articles
+ This service provides access to a subset of the georeferenced articles of the English version of Wikipedia.The features were extracted from the DBpedia Geographic Coordinates dataset corresponding to the English version of Wikipedia. The original Wikipedia dump from where it was derived was generated on October 2010.
+
+ Wikipedia
+ Articles
+ World
+
+
+
+
+ Free
+ No restrictions, but please don't abuse the server!
+
+
+
+
+
+ application/vnd.ogc.wms_xml
+
+
+
+
+
+
+
+
+ image/gif
+ image/png
+ image/png; mode=24bit
+ image/jpeg
+ image/vnd.wap.wbmp
+ image/tiff
+ image/svg+xml
+
+
+
+
+
+
+
+
+ text/plain
+ application/vnd.ogc.gml
+
+
+
+
+
+
+
+
+ text/xml
+
+
+
+
+
+
+
+
+ image/gif
+ image/png
+ image/png; mode=24bit
+ image/jpeg
+ image/vnd.wap.wbmp
+
+
+
+
+
+
+
+
+ text/xml
+
+
+
+
+
+
+
+
+
+ application/vnd.ogc.se_xml
+ application/vnd.ogc.se_inimage
+ application/vnd.ogc.se_blank
+
+
+
+
+ wikipedia
+ Wikipedia articles
+ This service provides access to a subset of the georeferenced articles of the English version of Wikipedia.The features were extracted from the DBpedia Geographic Coordinates dataset corresponding to the English version of Wikipedia. The original Wikipedia dump from where it was derived was generated on October 2010.
+
+ Wikipedia
+ Articles
+ World
+
+ EPSG:4326
+ EPSG:3857
+
+
+ articles
+ Wikipedia articles
+ EPSG:4326
+ EPSG:3857
+
+
+
+
+
+
+
diff --git a/ckanext/spatial/tests/nose/xml_file_server.py b/ckanext/spatial/tests/nose/xml_file_server.py
new file mode 100644
index 0000000..74f4fbf
--- /dev/null
+++ b/ckanext/spatial/tests/nose/xml_file_server.py
@@ -0,0 +1,36 @@
+from __future__ import print_function
+
+import os
+
+try:
+ from http.server import SimpleHTTPRequestHandler
+ from socketserver import TCPServer
+except ImportError:
+ from SimpleHTTPServer import SimpleHTTPRequestHandler
+ from SocketServer import TCPServer
+
+from threading import Thread
+
+
+PORT = 8999
+
+
+def serve(port=PORT):
+ '''Serves test XML files over HTTP'''
+
+ # Make sure we serve from the tests' XML directory
+ os.chdir(os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ 'xml'))
+
+ Handler = SimpleHTTPRequestHandler
+
+ class TestServer(TCPServer):
+ allow_reuse_address = True
+
+ httpd = TestServer(("", PORT), Handler)
+
+ print('Serving test HTTP server at port', PORT)
+
+ httpd_thread = Thread(target=httpd.serve_forever)
+ httpd_thread.setDaemon(True)
+ httpd_thread.start()
diff --git a/ckanext/spatial/tests/test_api.py b/ckanext/spatial/tests/test_api.py
index ef268ca..90ff7f2 100644
--- a/ckanext/spatial/tests/test_api.py
+++ b/ckanext/spatial/tests/test_api.py
@@ -3,6 +3,7 @@ from nose.tools import assert_equals, assert_raises
from ckan.model import Session
from ckan.lib.search import SearchError
+
try:
import ckan.new_tests.helpers as helpers
import ckan.new_tests.factories as factories
@@ -13,191 +14,190 @@ except ImportError:
from ckanext.spatial.tests.base import SpatialTestBase
extents = {
- 'nz': '{"type":"Polygon","coordinates":[[[174,-38],[176,-38],[176,-40],[174,-40],[174,-38]]]}',
- 'ohio': '{"type": "Polygon","coordinates": [[[-84,38],[-84,40],[-80,42],[-80,38],[-84,38]]]}',
- 'dateline': '{"type":"Polygon","coordinates":[[[169,70],[169,60],[192,60],[192,70],[169,70]]]}',
- 'dateline2': '{"type":"Polygon","coordinates":[[[170,60],[-170,60],[-170,70],[170,70],[170,60]]]}',
+ "nz": '{"type":"Polygon","coordinates":[[[174,-38],[176,-38],[176,-40],[174,-40],[174,-38]]]}',
+ "ohio": '{"type": "Polygon","coordinates": [[[-84,38],[-84,40],[-80,42],[-80,38],[-84,38]]]}',
+ "dateline": '{"type":"Polygon","coordinates":[[[169,70],[169,60],[192,60],[192,70],[169,70]]]}',
+ "dateline2": '{"type":"Polygon","coordinates":[[[170,60],[-170,60],[-170,70],[170,70],[170,60]]]}',
}
class TestAction(SpatialTestBase):
-
def teardown(self):
helpers.reset_db()
def test_spatial_query(self):
dataset = factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': self.geojson_examples['point']}]
+ extras=[
+ {"key": "spatial", "value": self.geojson_examples["point"]}
+ ]
)
result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '-180,-90,180,90'})
+ "package_search", extras={"ext_bbox": "-180,-90,180,90"}
+ )
- assert_equals(result['count'], 1)
- assert_equals(result['results'][0]['id'], dataset['id'])
+ assert_equals(result["count"], 1)
+ assert_equals(result["results"][0]["id"], dataset["id"])
def test_spatial_query_outside_bbox(self):
factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': self.geojson_examples['point']}]
+ extras=[
+ {"key": "spatial", "value": self.geojson_examples["point"]}
+ ]
)
result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '-10,-20,10,20'})
+ "package_search", extras={"ext_bbox": "-10,-20,10,20"}
+ )
- assert_equals(result['count'], 0)
+ assert_equals(result["count"], 0)
def test_spatial_query_wrong_bbox(self):
- assert_raises(SearchError, helpers.call_action,
- 'package_search', extras={'ext_bbox': '-10,-20,10,a'})
+ assert_raises(
+ SearchError,
+ helpers.call_action,
+ "package_search",
+ extras={"ext_bbox": "-10,-20,10,a"},
+ )
def test_spatial_query_nz(self):
dataset = factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': extents['nz']}]
+ extras=[{"key": "spatial", "value": extents["nz"]}]
)
result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '56,-54,189,-28'})
+ "package_search", extras={"ext_bbox": "56,-54,189,-28"}
+ )
- assert_equals(result['count'], 1)
- assert_equals(result['results'][0]['id'], dataset['id'])
+ assert_equals(result["count"], 1)
+ assert_equals(result["results"][0]["id"], dataset["id"])
def test_spatial_query_nz_wrap(self):
dataset = factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': extents['nz']}]
+ extras=[{"key": "spatial", "value": extents["nz"]}]
)
result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '-203,-54,-167,-28'})
+ "package_search", extras={"ext_bbox": "-203,-54,-167,-28"}
+ )
- assert_equals(result['count'], 1)
- assert_equals(result['results'][0]['id'], dataset['id'])
+ assert_equals(result["count"], 1)
+ assert_equals(result["results"][0]["id"], dataset["id"])
def test_spatial_query_ohio(self):
dataset = factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': extents['ohio']}]
+ extras=[{"key": "spatial", "value": extents["ohio"]}]
)
result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '-110,37,-78,53'})
+ "package_search", extras={"ext_bbox": "-110,37,-78,53"}
+ )
- assert_equals(result['count'], 1)
- assert_equals(result['results'][0]['id'], dataset['id'])
+ assert_equals(result["count"], 1)
+ assert_equals(result["results"][0]["id"], dataset["id"])
def test_spatial_query_ohio_wrap(self):
dataset = factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': extents['ohio']}]
+ extras=[{"key": "spatial", "value": extents["ohio"]}]
)
result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '258,37,281,51'})
+ "package_search", extras={"ext_bbox": "258,37,281,51"}
+ )
- assert_equals(result['count'], 1)
- assert_equals(result['results'][0]['id'], dataset['id'])
+ assert_equals(result["count"], 1)
+ assert_equals(result["results"][0]["id"], dataset["id"])
def test_spatial_query_dateline_1(self):
dataset = factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': extents['dateline']}]
+ extras=[{"key": "spatial", "value": extents["dateline"]}]
)
result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '-197,56,-128,70'})
+ "package_search", extras={"ext_bbox": "-197,56,-128,70"}
+ )
- assert_equals(result['count'], 1)
- assert_equals(result['results'][0]['id'], dataset['id'])
+ assert_equals(result["count"], 1)
+ assert_equals(result["results"][0]["id"], dataset["id"])
def test_spatial_query_dateline_2(self):
dataset = factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': extents['dateline']}]
+ extras=[{"key": "spatial", "value": extents["dateline"]}]
)
result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '162,54,237,70'})
+ "package_search", extras={"ext_bbox": "162,54,237,70"}
+ )
- assert_equals(result['count'], 1)
- assert_equals(result['results'][0]['id'], dataset['id'])
+ assert_equals(result["count"], 1)
+ assert_equals(result["results"][0]["id"], dataset["id"])
def test_spatial_query_dateline_3(self):
dataset = factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': extents['dateline2']}]
+ extras=[{"key": "spatial", "value": extents["dateline2"]}]
)
result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '-197,56,-128,70'})
+ "package_search", extras={"ext_bbox": "-197,56,-128,70"}
+ )
- assert_equals(result['count'], 1)
- assert_equals(result['results'][0]['id'], dataset['id'])
+ assert_equals(result["count"], 1)
+ assert_equals(result["results"][0]["id"], dataset["id"])
def test_spatial_query_dateline_4(self):
dataset = factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': extents['dateline2']}]
+ extras=[{"key": "spatial", "value": extents["dateline2"]}]
)
result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '162,54,237,70'})
-
- assert_equals(result['count'], 1)
- assert_equals(result['results'][0]['id'], dataset['id'])
+ "package_search", extras={"ext_bbox": "162,54,237,70"}
+ )
+ assert_equals(result["count"], 1)
+ assert_equals(result["results"][0]["id"], dataset["id"])
class TestHarvestedMetadataAPI(SpatialTestBase, helpers.FunctionalTestBase):
-
def test_api(self):
try:
- from ckanext.harvest.model import (HarvestObject, HarvestJob,
- HarvestSource,
- HarvestObjectExtra)
+ from ckanext.harvest.model import (
+ HarvestObject,
+ HarvestJob,
+ HarvestSource,
+ HarvestObjectExtra,
+ )
except ImportError:
- raise SkipTest('The harvester extension is needed for these tests')
+ raise SkipTest("The harvester extension is needed for these tests")
- content1 = 'Content 1'
+ content1 = "Content 1"
ho1 = HarvestObject(
- guid='test-ho-1',
- job=HarvestJob(source=HarvestSource(url='http://', type='xx')),
- content=content1)
+ guid="test-ho-1",
+ job=HarvestJob(source=HarvestSource(url="http://", type="xx")),
+ content=content1,
+ )
- content2 = 'Content 2'
- original_content2 = 'Original Content 2'
+ content2 = "Content 2"
+ original_content2 = "Original Content 2"
ho2 = HarvestObject(
- guid='test-ho-2',
- job=HarvestJob(source=HarvestSource(url='http://', type='xx')),
- content=content2)
+ guid="test-ho-2",
+ job=HarvestJob(source=HarvestSource(url="http://", type="xx")),
+ content=content2,
+ )
hoe = HarvestObjectExtra(
- key='original_document',
- value=original_content2,
- object=ho2)
+ key="original_document", value=original_content2, object=ho2
+ )
Session.add(ho1)
Session.add(ho2)
@@ -210,65 +210,69 @@ class TestHarvestedMetadataAPI(SpatialTestBase, helpers.FunctionalTestBase):
app = self._get_test_app()
# Test redirects for old URLs
- url = '/api/2/rest/harvestobject/{0}/xml'.format(object_id_1)
+ url = "/api/2/rest/harvestobject/{0}/xml".format(object_id_1)
r = app.get(url)
assert_equals(r.status_int, 301)
- assert ('/harvest/object/{0}'.format(object_id_1)
- in r.headers['Location'])
+ assert (
+ "/harvest/object/{0}".format(object_id_1) in r.headers["Location"]
+ )
- url = '/api/2/rest/harvestobject/{0}/html'.format(object_id_1)
+ url = "/api/2/rest/harvestobject/{0}/html".format(object_id_1)
r = app.get(url)
assert_equals(r.status_int, 301)
- assert ('/harvest/object/{0}/html'.format(object_id_1)
- in r.headers['Location'])
+ assert (
+ "/harvest/object/{0}/html".format(object_id_1)
+ in r.headers["Location"]
+ )
# Access object content
- url = '/harvest/object/{0}'.format(object_id_1)
+ url = "/harvest/object/{0}".format(object_id_1)
r = app.get(url)
assert_equals(r.status_int, 200)
- assert_equals(r.headers['Content-Type'],
- 'application/xml; charset=utf-8')
+ assert_equals(
+ r.headers["Content-Type"], "application/xml; charset=utf-8"
+ )
assert_equals(
r.body,
- '\nContent 1')
+ '\nContent 1',
+ )
# Access original content in object extra (if present)
- url = '/harvest/object/{0}/original'.format(object_id_1)
+ url = "/harvest/object/{0}/original".format(object_id_1)
r = app.get(url, status=404)
assert_equals(r.status_int, 404)
- url = '/harvest/object/{0}/original'.format(object_id_2)
+ url = "/harvest/object/{0}/original".format(object_id_2)
r = app.get(url)
assert_equals(r.status_int, 200)
- assert_equals(r.headers['Content-Type'],
- 'application/xml; charset=utf-8')
+ assert_equals(
+ r.headers["Content-Type"], "application/xml; charset=utf-8"
+ )
assert_equals(
r.body,
'\n'
- + 'Original Content 2')
+ + "Original Content 2",
+ )
# Access HTML transformation
- url = '/harvest/object/{0}/html'.format(object_id_1)
+ url = "/harvest/object/{0}/html".format(object_id_1)
r = app.get(url)
assert_equals(r.status_int, 200)
- assert_equals(r.headers['Content-Type'],
- 'text/html; charset=utf-8')
- assert 'GEMINI record about' in r.body
+ assert_equals(r.headers["Content-Type"], "text/html; charset=utf-8")
+ assert "GEMINI record about" in r.body
- url = '/harvest/object/{0}/html/original'.format(object_id_1)
+ url = "/harvest/object/{0}/html/original".format(object_id_1)
r = app.get(url, status=404)
assert_equals(r.status_int, 404)
- url = '/harvest/object/{0}/html'.format(object_id_2)
+ url = "/harvest/object/{0}/html".format(object_id_2)
r = app.get(url)
assert_equals(r.status_int, 200)
- assert_equals(r.headers['Content-Type'],
- 'text/html; charset=utf-8')
- assert 'GEMINI record about' in r.body
+ assert_equals(r.headers["Content-Type"], "text/html; charset=utf-8")
+ assert "GEMINI record about" in r.body
- url = '/harvest/object/{0}/html/original'.format(object_id_2)
+ url = "/harvest/object/{0}/html/original".format(object_id_2)
r = app.get(url)
assert_equals(r.status_int, 200)
- assert_equals(r.headers['Content-Type'],
- 'text/html; charset=utf-8')
- assert 'GEMINI record about' in r.body
+ assert_equals(r.headers["Content-Type"], "text/html; charset=utf-8")
+ assert "GEMINI record about" in r.body
diff --git a/ckanext/spatial/tests/test_csw_client.py b/ckanext/spatial/tests/test_csw_client.py
index de64323..1c0408e 100644
--- a/ckanext/spatial/tests/test_csw_client.py
+++ b/ckanext/spatial/tests/test_csw_client.py
@@ -15,27 +15,29 @@ class CkanServerCase(object):
@staticmethod
def _system(cmd):
import subprocess
+
(status, output) = subprocess.getstatusoutput(cmd)
if status:
raise Exception("Couldn't execute cmd: %s: %s" % (cmd, output))
@classmethod
def _paster(cls, cmd, config_path_rel):
- config_path = os.path.join(config['here'], config_path_rel)
- cls._system('paster --plugin ckan %s --config=%s' % (cmd, config_path))
+ config_path = os.path.join(config["here"], config_path_rel)
+ cls._system("paster --plugin ckan %s --config=%s" % (cmd, config_path))
@staticmethod
def _start_ckan_server(config_file=None):
if not config_file:
- config_file = config['__file__']
+ config_file = config["__file__"]
config_path = config_file
import subprocess
- process = subprocess.Popen(['paster', 'serve', config_path])
+
+ process = subprocess.Popen(["paster", "serve", config_path])
return process
@staticmethod
- def _wait_for_url(url='http://127.0.0.1:5000/', timeout=15):
- for i in range(int(timeout)*100):
+ def _wait_for_url(url="http://127.0.0.1:5000/", timeout=15):
+ for i in range(int(timeout) * 100):
try:
urlopen(url)
except URLError:
@@ -48,7 +50,10 @@ class CkanServerCase(object):
pid = process.pid
pid = int(pid)
if os.system("kill -9 %d" % pid):
- raise Exception("Can't kill foreign CKAN instance (pid: %d)." % pid)
+ raise Exception(
+ "Can't kill foreign CKAN instance (pid: %d)." % pid
+ )
+
class CkanProcess(CkanServerCase):
@classmethod
diff --git a/ckanext/spatial/tests/test_harvest.py b/ckanext/spatial/tests/test_harvest.py
index 11423af..39a6a45 100644
--- a/ckanext/spatial/tests/test_harvest.py
+++ b/ckanext/spatial/tests/test_harvest.py
@@ -10,7 +10,10 @@ from nose.tools import assert_equal, assert_in, assert_raises
from ckan.lib.base import config
from ckan import model
from ckan.model import Session, Package, Group, User
-from ckan.logic.schema import default_update_package_schema, default_create_package_schema
+from ckan.logic.schema import (
+ default_update_package_schema,
+ default_create_package_schema,
+)
from ckan.logic import get_action
try:
@@ -18,11 +21,13 @@ try:
except ImportError:
from ckan.tests.helpers import call_action
-from ckanext.harvest.model import (HarvestSource, HarvestJob, HarvestObject)
+from ckanext.harvest.model import HarvestSource, HarvestJob, HarvestObject
from ckanext.spatial.validation import Validators
-from ckanext.spatial.harvesters.gemini import (GeminiDocHarvester,
- GeminiWafHarvester,
- GeminiHarvester)
+from ckanext.spatial.harvesters.gemini import (
+ GeminiDocHarvester,
+ GeminiWafHarvester,
+ GeminiHarvester,
+)
from ckanext.spatial.harvesters.base import SpatialHarvester
from ckanext.spatial.tests.base import SpatialTestBase
@@ -33,59 +38,69 @@ serve()
class HarvestFixtureBase(SpatialTestBase):
-
def setup(self):
# Add sysadmin user
- harvest_user = model.User(name=u'harvest', password=u'test', sysadmin=True)
+ harvest_user = model.User(
+ name=u"harvest", password=u"test", sysadmin=True
+ )
Session.add(harvest_user)
Session.commit()
package_schema = default_update_package_schema()
- self.context ={'model':model,
- 'session':Session,
- 'user':u'harvest',
- 'schema':package_schema,
- 'api_version': '2'}
+ self.context = {
+ "model": model,
+ "session": Session,
+ "user": u"harvest",
+ "schema": package_schema,
+ "api_version": "2",
+ }
def teardown(self):
- model.repo.rebuild_db()
+ model.repo.rebuild_db()
- def _create_job(self,source_id):
+ def _create_job(self, source_id):
# Create a job
- context ={'model':model,
- 'session':Session,
- 'user':u'harvest'}
+ context = {"model": model, "session": Session, "user": u"harvest"}
- job_dict=get_action('harvest_job_create')(context,{'source_id':source_id})
- job = HarvestJob.get(job_dict['id'])
+ job_dict = get_action("harvest_job_create")(
+ context, {"source_id": source_id}
+ )
+ job = HarvestJob.get(job_dict["id"])
assert job
return job
def _create_source_and_job(self, source_fixture):
- context ={'model':model,
- 'session':Session,
- 'user':u'harvest'}
+ context = {"model": model, "session": Session, "user": u"harvest"}
- if config.get('ckan.harvest.auth.profile') == u'publisher' \
- and not 'publisher_id' in source_fixture:
- source_fixture['publisher_id'] = self.publisher.id
+ if (
+ config.get("ckan.harvest.auth.profile") == u"publisher"
+ and not "publisher_id" in source_fixture
+ ):
+ source_fixture["publisher_id"] = self.publisher.id
- source_dict=get_action('harvest_source_create')(context,source_fixture)
- source = HarvestSource.get(source_dict['id'])
+ source_dict = get_action("harvest_source_create")(
+ context, source_fixture
+ )
+ source = HarvestSource.get(source_dict["id"])
assert source
job = self._create_job(source.id)
return source, job
- def _run_job_for_single_document(self,job,force_import=False,expect_gather_errors=False,expect_obj_errors=False):
+ def _run_job_for_single_document(
+ self,
+ job,
+ force_import=False,
+ expect_gather_errors=False,
+ expect_obj_errors=False,
+ ):
harvester = GeminiDocHarvester()
harvester.force_import = force_import
-
object_ids = harvester.gather_stage(job)
assert object_ids, len(object_ids) == 1
if expect_gather_errors:
@@ -105,33 +120,33 @@ class HarvestFixtureBase(SpatialTestBase):
else:
assert len(obj.errors) == 0
- job.status = u'Finished'
+ job.status = u"Finished"
job.save()
return obj
-class TestHarvest(HarvestFixtureBase):
+class TestHarvest(HarvestFixtureBase):
@classmethod
def setup_class(cls):
- SpatialHarvester._validator = Validators(profiles=['gemini2'])
+ SpatialHarvester._validator = Validators(profiles=["gemini2"])
HarvestFixtureBase.setup_class()
def clean_tags(self, tags):
- return [{u'name': x['name']} for x in tags]
+ return [{u"name": x["name"]} for x in tags]
def find_extra(self, pkg, key):
- values = [e['value'] for e in pkg['extras'] if e['key'] == key]
+ values = [e["value"] for e in pkg["extras"] if e["key"] == key]
return values[0] if len(values) == 1 else None
def test_harvest_basic(self):
# Create source
source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1-waf/index.html',
- 'source_type': u'gemini-waf'
+ "title": "Test Source",
+ "name": "test-source",
+ "url": u"http://127.0.0.1:8999/gemini2.1-waf/index.html",
+ "source_type": u"gemini-waf",
}
source, job = self._create_source_and_job(source_fixture)
@@ -153,7 +168,7 @@ class TestHarvest(HarvestFixtureBase):
objects.append(obj)
harvester.import_stage(obj)
- pkgs = Session.query(Package).filter(Package.type!=u'harvest').all()
+ pkgs = Session.query(Package).filter(Package.type != u"harvest").all()
assert_equal(len(pkgs), 2)
@@ -167,10 +182,10 @@ class TestHarvest(HarvestFixtureBase):
# Create source
source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/service1.xml',
- 'source_type': u'gemini-single'
+ "title": "Test Source",
+ "name": "test-source",
+ "url": u"http://127.0.0.1:8999/gemini2.1/service1.xml",
+ "source_type": u"gemini-single",
}
source, job = self._create_source_and_job(source_fixture)
@@ -188,203 +203,229 @@ class TestHarvest(HarvestFixtureBase):
obj = HarvestObject.get(object_ids[0])
assert obj, obj.content
- assert obj.guid == u'test-service-1'
+ assert obj.guid == u"test-service-1"
harvester.import_stage(obj)
# No object errors
assert len(obj.errors) == 0
- package_dict = get_action('package_show')(self.context,{'id':obj.package_id})
+ package_dict = get_action("package_show")(
+ self.context, {"id": obj.package_id}
+ )
assert package_dict
expected = {
- 'name': u'one-scotland-address-gazetteer-web-map-service-wms',
- 'title': u'One Scotland Address Gazetteer Web Map Service (WMS)',
- 'tags': [{u'name': u'Addresses'}, {u'name': u'Scottish National Gazetteer'}],
- 'notes': u'This service displays its contents at larger scale than 1:10000. [edited]',
+ "name": u"one-scotland-address-gazetteer-web-map-service-wms",
+ "title": u"One Scotland Address Gazetteer Web Map Service (WMS)",
+ "tags": [
+ {u"name": u"Addresses"},
+ {u"name": u"Scottish National Gazetteer"},
+ ],
+ "notes": u"This service displays its contents at larger scale than 1:10000. [edited]",
}
- package_dict['tags'] = self.clean_tags(package_dict['tags'])
+ package_dict["tags"] = self.clean_tags(package_dict["tags"])
- for key,value in expected.items():
+ for key, value in expected.items():
if not package_dict[key] == value:
- raise AssertionError('Unexpected value for %s: %s (was expecting %s)' % \
- (key, package_dict[key], value))
+ raise AssertionError(
+ "Unexpected value for %s: %s (was expecting %s)"
+ % (key, package_dict[key], value)
+ )
- if config.get('ckan.harvest.auth.profile') == u'publisher':
- assert package_dict['groups'] == [self.publisher.id]
+ if config.get("ckan.harvest.auth.profile") == u"publisher":
+ assert package_dict["groups"] == [self.publisher.id]
expected_extras = {
# Basic
- 'guid': obj.guid,
- 'UKLP': u'True',
- 'resource-type': u'service',
- 'access_constraints': u'["No restriction on public access"]',
- 'responsible-party': u'The Improvement Service (owner)',
- 'provider':u'The Improvement Service',
- 'contact-email': u'OSGCM@improvementservice.org.uk',
+ "guid": obj.guid,
+ "UKLP": u"True",
+ "resource-type": u"service",
+ "access_constraints": u'["No restriction on public access"]',
+ "responsible-party": u"The Improvement Service (owner)",
+ "provider": u"The Improvement Service",
+ "contact-email": u"OSGCM@improvementservice.org.uk",
# Spatial
- 'bbox-east-long': u'0.5242365625',
- 'bbox-north-lat': u'61.0243',
- 'bbox-south-lat': u'54.4764484375',
- 'bbox-west-long': u'-9.099786875',
- 'spatial': u'{"type": "Polygon", "coordinates": [[[0.5242365625, 54.4764484375], [-9.099786875, 54.4764484375], [-9.099786875, 61.0243], [0.5242365625, 61.0243], [0.5242365625, 54.4764484375]]]}',
+ "bbox-east-long": u"0.5242365625",
+ "bbox-north-lat": u"61.0243",
+ "bbox-south-lat": u"54.4764484375",
+ "bbox-west-long": u"-9.099786875",
+ "spatial": u'{"type": "Polygon", "coordinates": [[[0.5242365625, 54.4764484375], [-9.099786875, 54.4764484375], [-9.099786875, 61.0243], [0.5242365625, 61.0243], [0.5242365625, 54.4764484375]]]}',
# Other
- 'coupled-resource': u'[{"href": ["http://scotgovsdi.edina.ac.uk/srv/en/csw?service=CSW&request=GetRecordById&version=2.0.2&outputSchema=http://www.isotc211.org/2005/gmd&elementSetName=full&id=250ea276-48e2-4189-8a89-fcc4ca92d652"], "uuid": ["250ea276-48e2-4189-8a89-fcc4ca92d652"], "title": []}]',
- 'dataset-reference-date': u'[{"type": "publication", "value": "2011-09-08"}]',
- 'frequency-of-update': u'daily',
- 'licence': u'["Use of the One Scotland Gazetteer data used by this this service is available to any organisation that is a member of the One Scotland Mapping Agreement. It is not currently commercially available", "http://www.test.gov.uk/licenseurl"]',
- 'licence_url': u'http://www.test.gov.uk/licenseurl',
- 'metadata-date': u'2011-09-08T16:07:32',
- 'metadata-language': u'eng',
- 'spatial-data-service-type': u'other',
- 'spatial-reference-system': u'OSGB 1936 / British National Grid (EPSG:27700)',
- 'temporal_coverage-from': u'["1904-06-16"]',
- 'temporal_coverage-to': u'["2004-06-16"]',
- }
-
- for key,value in expected_extras.items():
- extra_value = self.find_extra(package_dict, key)
- if extra_value is None:
- raise AssertionError('Extra %s not present in package' % key)
-
- if not extra_value == value:
- raise AssertionError('Unexpected value for extra %s: %s (was expecting %s)' % \
- (key, package_dict['extras'][key], value))
-
- expected_resource = {
- 'ckan_recommended_wms_preview': 'True',
- 'description': 'Link to the GetCapabilities request for this service',
- 'name': 'Web Map Service (WMS)',
- 'resource_locator_function': 'download',
- 'resource_locator_protocol': 'OGC:WMS-1.3.0-http-get-capabilities',
- 'url': u'http://127.0.0.1:8999/wms/capabilities.xml',
- 'verified': 'True',
- }
-
- resource = package_dict['resources'][0]
- for key,value in expected_resource.items():
- if not key in resource:
- raise AssertionError('Expected key not in resource: %s' % (key))
- if not resource[key] == value:
- raise AssertionError('Unexpected value in resource for %s: %s (was expecting %s)' % \
- (key, resource[key], value))
- assert datetime.strptime(resource['verified_date'],'%Y-%m-%dT%H:%M:%S.%f').date() == date.today()
- assert resource['format'].lower() == 'wms'
-
- def test_harvest_fields_dataset(self):
-
- # Create source
- source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/dataset1.xml',
- 'source_type': u'gemini-single'
- }
-
- source, job = self._create_source_and_job(source_fixture)
-
- harvester = GeminiDocHarvester()
-
- object_ids = harvester.gather_stage(job)
- assert object_ids, len(object_ids) == 1
-
- # No gather errors
- assert len(job.gather_errors) == 0
-
- # Fetch stage always returns True for Single Doc harvesters
- assert harvester.fetch_stage(object_ids) == True
-
- obj = HarvestObject.get(object_ids[0])
- assert obj, obj.content
- assert obj.guid == u'test-dataset-1'
-
- harvester.import_stage(obj)
-
- # No object errors
- assert len(obj.errors) == 0
-
- package_dict = get_action('package_show')(self.context,{'id':obj.package_id})
-
- assert package_dict
-
- expected = {
- 'name': u'country-parks-scotland',
- 'title': u'Country Parks (Scotland)',
- 'tags': [{u'name': u'Nature conservation'}],
- 'notes': u'Parks are set up by Local Authorities to provide open-air recreation facilities close to towns and cities. [edited]'
- }
-
- package_dict['tags'] = self.clean_tags(package_dict['tags'])
-
- for key,value in expected.items():
- if not package_dict[key] == value:
- raise AssertionError('Unexpected value for %s: %s (was expecting %s)' % \
- (key, package_dict[key], value))
-
- if config.get('ckan.harvest.auth.profile') == u'publisher':
- assert package_dict['groups'] == [self.publisher.id]
-
- expected_extras = {
- # Basic
- 'guid': obj.guid,
- 'resource-type': u'dataset',
- 'responsible-party': u'Scottish Natural Heritage (custodian, distributor)',
- 'access_constraints': u'["Copyright Scottish Natural Heritage"]',
- 'contact-email': u'data_supply@snh.gov.uk',
- 'provider':'',
- # Spatial
- 'bbox-east-long': u'0.205857204',
- 'bbox-north-lat': u'61.06066944',
- 'bbox-south-lat': u'54.529947158',
- 'bbox-west-long': u'-8.97114288',
- 'spatial': u'{"type": "Polygon", "coordinates": [[[0.205857204, 54.529947158], [-8.97114288, 54.529947158], [-8.97114288, 61.06066944], [0.205857204, 61.06066944], [0.205857204, 54.529947158]]]}',
- # Other
- 'coupled-resource': u'[]',
- 'dataset-reference-date': u'[{"type": "creation", "value": "2004-02"}, {"type": "revision", "value": "2006-07-03"}]',
- 'frequency-of-update': u'irregular',
- 'licence': u'["Reference and PSMA Only", "http://www.test.gov.uk/licenseurl"]',
- 'licence_url': u'http://www.test.gov.uk/licenseurl',
- 'metadata-date': u'2011-09-23T10:06:08',
- 'metadata-language': u'eng',
- 'spatial-reference-system': u'urn:ogc:def:crs:EPSG::27700',
- 'temporal_coverage-from': u'["1998"]',
- 'temporal_coverage-to': u'["2010"]',
+ "coupled-resource": u'[{"href": ["http://scotgovsdi.edina.ac.uk/srv/en/csw?service=CSW&request=GetRecordById&version=2.0.2&outputSchema=http://www.isotc211.org/2005/gmd&elementSetName=full&id=250ea276-48e2-4189-8a89-fcc4ca92d652"], "uuid": ["250ea276-48e2-4189-8a89-fcc4ca92d652"], "title": []}]',
+ "dataset-reference-date": u'[{"type": "publication", "value": "2011-09-08"}]',
+ "frequency-of-update": u"daily",
+ "licence": u'["Use of the One Scotland Gazetteer data used by this this service is available to any organisation that is a member of the One Scotland Mapping Agreement. It is not currently commercially available", "http://www.test.gov.uk/licenseurl"]',
+ "licence_url": u"http://www.test.gov.uk/licenseurl",
+ "metadata-date": u"2011-09-08T16:07:32",
+ "metadata-language": u"eng",
+ "spatial-data-service-type": u"other",
+ "spatial-reference-system": u"OSGB 1936 / British National Grid (EPSG:27700)",
+ "temporal_coverage-from": u'["1904-06-16"]',
+ "temporal_coverage-to": u'["2004-06-16"]',
}
for key, value in expected_extras.items():
extra_value = self.find_extra(package_dict, key)
if extra_value is None:
- raise AssertionError('Extra %s not present in package' % key)
+ raise AssertionError("Extra %s not present in package" % key)
if not extra_value == value:
- raise AssertionError('Unexpected value for extra %s: %s (was expecting %s)' % \
- (key, package_dict['extras'][key], value))
+ raise AssertionError(
+ "Unexpected value for extra %s: %s (was expecting %s)"
+ % (key, package_dict["extras"][key], value)
+ )
expected_resource = {
- 'description': 'Test Resource Description',
- 'format': u'',
- 'name': 'Test Resource Name',
- 'resource_locator_function': 'download',
- 'resource_locator_protocol': 'test-protocol',
- 'url': u'https://gateway.snh.gov.uk/pls/apex_ddtdb2/f?p=101',
+ "ckan_recommended_wms_preview": "True",
+ "description": "Link to the GetCapabilities request for this service",
+ "name": "Web Map Service (WMS)",
+ "resource_locator_function": "download",
+ "resource_locator_protocol": "OGC:WMS-1.3.0-http-get-capabilities",
+ "url": u"http://127.0.0.1:8999/wms/capabilities.xml",
+ "verified": "True",
}
- resource = package_dict['resources'][0]
- for key,value in expected_resource.items():
+ resource = package_dict["resources"][0]
+ for key, value in expected_resource.items():
+ if not key in resource:
+ raise AssertionError(
+ "Expected key not in resource: %s" % (key)
+ )
if not resource[key] == value:
- raise AssertionError('Unexpected value in resource for %s: %s (was expecting %s)' % \
- (key, resource[key], value))
+ raise AssertionError(
+ "Unexpected value in resource for %s: %s (was expecting %s)"
+ % (key, resource[key], value)
+ )
+ assert (
+ datetime.strptime(
+ resource["verified_date"], "%Y-%m-%dT%H:%M:%S.%f"
+ ).date()
+ == date.today()
+ )
+ assert resource["format"].lower() == "wms"
+
+ def test_harvest_fields_dataset(self):
+
+ # Create source
+ source_fixture = {
+ "title": "Test Source",
+ "name": "test-source",
+ "url": u"http://127.0.0.1:8999/gemini2.1/dataset1.xml",
+ "source_type": u"gemini-single",
+ }
+
+ source, job = self._create_source_and_job(source_fixture)
+
+ harvester = GeminiDocHarvester()
+
+ object_ids = harvester.gather_stage(job)
+ assert object_ids, len(object_ids) == 1
+
+ # No gather errors
+ assert len(job.gather_errors) == 0
+
+ # Fetch stage always returns True for Single Doc harvesters
+ assert harvester.fetch_stage(object_ids) == True
+
+ obj = HarvestObject.get(object_ids[0])
+ assert obj, obj.content
+ assert obj.guid == u"test-dataset-1"
+
+ harvester.import_stage(obj)
+
+ # No object errors
+ assert len(obj.errors) == 0
+
+ package_dict = get_action("package_show")(
+ self.context, {"id": obj.package_id}
+ )
+
+ assert package_dict
+
+ expected = {
+ "name": u"country-parks-scotland",
+ "title": u"Country Parks (Scotland)",
+ "tags": [{u"name": u"Nature conservation"}],
+ "notes": u"Parks are set up by Local Authorities to provide open-air recreation facilities close to towns and cities. [edited]",
+ }
+
+ package_dict["tags"] = self.clean_tags(package_dict["tags"])
+
+ for key, value in expected.items():
+ if not package_dict[key] == value:
+ raise AssertionError(
+ "Unexpected value for %s: %s (was expecting %s)"
+ % (key, package_dict[key], value)
+ )
+
+ if config.get("ckan.harvest.auth.profile") == u"publisher":
+ assert package_dict["groups"] == [self.publisher.id]
+
+ expected_extras = {
+ # Basic
+ "guid": obj.guid,
+ "resource-type": u"dataset",
+ "responsible-party": u"Scottish Natural Heritage (custodian, distributor)",
+ "access_constraints": u'["Copyright Scottish Natural Heritage"]',
+ "contact-email": u"data_supply@snh.gov.uk",
+ "provider": "",
+ # Spatial
+ "bbox-east-long": u"0.205857204",
+ "bbox-north-lat": u"61.06066944",
+ "bbox-south-lat": u"54.529947158",
+ "bbox-west-long": u"-8.97114288",
+ "spatial": u'{"type": "Polygon", "coordinates": [[[0.205857204, 54.529947158], [-8.97114288, 54.529947158], [-8.97114288, 61.06066944], [0.205857204, 61.06066944], [0.205857204, 54.529947158]]]}',
+ # Other
+ "coupled-resource": u"[]",
+ "dataset-reference-date": u'[{"type": "creation", "value": "2004-02"}, {"type": "revision", "value": "2006-07-03"}]',
+ "frequency-of-update": u"irregular",
+ "licence": u'["Reference and PSMA Only", "http://www.test.gov.uk/licenseurl"]',
+ "licence_url": u"http://www.test.gov.uk/licenseurl",
+ "metadata-date": u"2011-09-23T10:06:08",
+ "metadata-language": u"eng",
+ "spatial-reference-system": u"urn:ogc:def:crs:EPSG::27700",
+ "temporal_coverage-from": u'["1998"]',
+ "temporal_coverage-to": u'["2010"]',
+ }
+
+ for key, value in expected_extras.items():
+ extra_value = self.find_extra(package_dict, key)
+ if extra_value is None:
+ raise AssertionError("Extra %s not present in package" % key)
+
+ if not extra_value == value:
+ raise AssertionError(
+ "Unexpected value for extra %s: %s (was expecting %s)"
+ % (key, package_dict["extras"][key], value)
+ )
+
+ expected_resource = {
+ "description": "Test Resource Description",
+ "format": u"",
+ "name": "Test Resource Name",
+ "resource_locator_function": "download",
+ "resource_locator_protocol": "test-protocol",
+ "url": u"https://gateway.snh.gov.uk/pls/apex_ddtdb2/f?p=101",
+ }
+
+ resource = package_dict["resources"][0]
+ for key, value in expected_resource.items():
+ if not resource[key] == value:
+ raise AssertionError(
+ "Unexpected value in resource for %s: %s (was expecting %s)"
+ % (key, resource[key], value)
+ )
def test_harvest_error_bad_xml(self):
# Create source
source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/error_bad_xml.xml',
- 'source_type': u'gemini-single'
+ "title": "Test Source",
+ "name": "test-source",
+ "url": u"http://127.0.0.1:8999/gemini2.1/error_bad_xml.xml",
+ "source_type": u"gemini-single",
}
source, job = self._create_source_and_job(source_fixture)
@@ -402,15 +443,15 @@ class TestHarvest(HarvestFixtureBase):
# Check gather errors
assert len(job.gather_errors) == 1
assert job.gather_errors[0].harvest_job_id == job.id
- assert 'Error parsing the document' in job.gather_errors[0].message
+ assert "Error parsing the document" in job.gather_errors[0].message
def test_harvest_error_404(self):
# Create source
source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/not_there.xml',
- 'source_type': u'gemini-single'
+ "title": "Test Source",
+ "name": "test-source",
+ "url": u"http://127.0.0.1:8999/gemini2.1/not_there.xml",
+ "source_type": u"gemini-single",
}
source, job = self._create_source_and_job(source_fixture)
@@ -423,16 +464,16 @@ class TestHarvest(HarvestFixtureBase):
# Check gather errors
assert len(job.gather_errors) == 1
assert job.gather_errors[0].harvest_job_id == job.id
- assert 'Unable to get content for URL' in job.gather_errors[0].message
+ assert "Unable to get content for URL" in job.gather_errors[0].message
def test_harvest_error_validation(self):
# Create source
source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/error_validation.xml',
- 'source_type': u'gemini-single'
+ "title": "Test Source",
+ "name": "test-source",
+ "url": u"http://127.0.0.1:8999/gemini2.1/error_validation.xml",
+ "source_type": u"gemini-single",
}
source, job = self._create_source_and_job(source_fixture)
@@ -452,7 +493,7 @@ class TestHarvest(HarvestFixtureBase):
obj = HarvestObject.get(object_ids[0])
assert obj, obj.content
- assert obj.guid == u'test-error-validation-1'
+ assert obj.guid == u"test-error-validation-1"
harvester.import_stage(obj)
@@ -462,27 +503,34 @@ class TestHarvest(HarvestFixtureBase):
message = obj.errors[0].message
- assert_in('One email address shall be provided', message)
- assert_in('Service type shall be one of \'discovery\', \'view\', \'download\', \'transformation\', \'invoke\' or \'other\' following INSPIRE generic names', message)
- assert_in('Limitations on public access code list value shall be \'otherRestrictions\'', message)
- assert_in('One organisation name shall be provided', message)
-
+ assert_in("One email address shall be provided", message)
+ assert_in(
+ "Service type shall be one of 'discovery', 'view', 'download', 'transformation', 'invoke' or 'other' following INSPIRE generic names",
+ message,
+ )
+ assert_in(
+ "Limitations on public access code list value shall be 'otherRestrictions'",
+ message,
+ )
+ assert_in("One organisation name shall be provided", message)
def test_harvest_update_records(self):
# Create source
source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/dataset1.xml',
- 'source_type': u'gemini-single'
+ "title": "Test Source",
+ "name": "test-source",
+ "url": u"http://127.0.0.1:8999/gemini2.1/dataset1.xml",
+ "source_type": u"gemini-single",
}
source, first_job = self._create_source_and_job(source_fixture)
first_obj = self._run_job_for_single_document(first_job)
- first_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+ first_package_dict = get_action("package_show")(
+ self.context, {"id": first_obj.package_id}
+ )
# Package was created
assert first_package_dict
@@ -501,16 +549,22 @@ class TestHarvest(HarvestFixtureBase):
Session.refresh(first_obj)
Session.refresh(second_obj)
- second_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+ second_package_dict = get_action("package_show")(
+ self.context, {"id": first_obj.package_id}
+ )
# Package was not updated
- assert second_package_dict, first_package_dict['id'] == second_package_dict['id']
+ assert second_package_dict, (
+ first_package_dict["id"] == second_package_dict["id"]
+ )
assert not second_obj.package, not second_obj.package_id
assert second_obj.current == False, first_obj.current == True
# Create and run a third job, forcing the importing to simulate an update in the package
third_job = self._create_job(source.id)
- third_obj = self._run_job_for_single_document(third_job,force_import=True)
+ third_obj = self._run_job_for_single_document(
+ third_job, force_import=True
+ )
# For some reason first_obj does not get updated after the import_stage,
# and we have to force a refresh to get the actual DB values.
@@ -523,11 +577,17 @@ class TestHarvest(HarvestFixtureBase):
Session.refresh(second_obj)
Session.refresh(third_obj)
- third_package_dict = get_action('package_show')(self.context,{'id':third_obj.package_id})
+ third_package_dict = get_action("package_show")(
+ self.context, {"id": third_obj.package_id}
+ )
# Package was updated
- assert third_package_dict, first_package_dict['id'] == third_package_dict['id']
- assert third_obj.package, third_obj.package_id == first_package_dict['id']
+ assert third_package_dict, (
+ first_package_dict["id"] == third_package_dict["id"]
+ )
+ assert third_obj.package, (
+ third_obj.package_id == first_package_dict["id"]
+ )
assert third_obj.current == True
assert second_obj.current == False
assert first_obj.current == False
@@ -536,54 +596,63 @@ class TestHarvest(HarvestFixtureBase):
# Create source
source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/service1.xml',
- 'source_type': u'gemini-single'
+ "title": "Test Source",
+ "name": "test-source",
+ "url": u"http://127.0.0.1:8999/gemini2.1/service1.xml",
+ "source_type": u"gemini-single",
}
source, first_job = self._create_source_and_job(source_fixture)
first_obj = self._run_job_for_single_document(first_job)
- first_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+ first_package_dict = get_action("package_show")(
+ self.context, {"id": first_obj.package_id}
+ )
# Package was created
assert first_package_dict
- assert first_package_dict['state'] == u'active'
+ assert first_package_dict["state"] == u"active"
assert first_obj.current == True
# Delete package
- first_package_dict['state'] = u'deleted'
- self.context.update({'id':first_package_dict['id']})
- updated_package_dict = get_action('package_update')(self.context,first_package_dict)
+ first_package_dict["state"] = u"deleted"
+ self.context.update({"id": first_package_dict["id"]})
+ updated_package_dict = get_action("package_update")(
+ self.context, first_package_dict
+ )
# Create and run a second job, the date has not changed, so the package should not be updated
# and remain deleted
- first_job.status = u'Finished'
+ first_job.status = u"Finished"
first_job.save()
second_job = self._create_job(source.id)
second_obj = self._run_job_for_single_document(second_job)
- second_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+ second_package_dict = get_action("package_show")(
+ self.context, {"id": first_obj.package_id}
+ )
# Package was not updated
- assert second_package_dict, updated_package_dict['id'] == second_package_dict['id']
+ assert second_package_dict, (
+ updated_package_dict["id"] == second_package_dict["id"]
+ )
assert not second_obj.package, not second_obj.package_id
assert second_obj.current == False, first_obj.current == True
-
# Harvest an updated document, with a more recent modified date, package should be
# updated and reactivated
- source.url = u'http://127.0.0.1:8999/gemini2.1/service1_newer.xml'
+ source.url = u"http://127.0.0.1:8999/gemini2.1/service1_newer.xml"
source.save()
third_job = self._create_job(source.id)
third_obj = self._run_job_for_single_document(third_job)
- third_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+ third_package_dict = get_action("package_show")(
+ self.context, {"id": first_obj.package_id}
+ )
Session.remove()
Session.add(first_obj)
@@ -595,35 +664,37 @@ class TestHarvest(HarvestFixtureBase):
Session.refresh(third_obj)
# Package was updated
- assert third_package_dict, third_package_dict['id'] == second_package_dict['id']
+ assert third_package_dict, (
+ third_package_dict["id"] == second_package_dict["id"]
+ )
assert third_obj.package, third_obj.package
assert third_obj.current == True, second_obj.current == False
assert first_obj.current == False
- assert 'NEWER' in third_package_dict['title']
- assert third_package_dict['state'] == u'active'
-
-
+ assert "NEWER" in third_package_dict["title"]
+ assert third_package_dict["state"] == u"active"
def test_harvest_different_sources_same_document(self):
# Create source1
source1_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/source1/same_dataset.xml',
- 'source_type': u'gemini-single'
+ "title": "Test Source",
+ "name": "test-source",
+ "url": u"http://127.0.0.1:8999/gemini2.1/source1/same_dataset.xml",
+ "source_type": u"gemini-single",
}
source1, first_job = self._create_source_and_job(source1_fixture)
first_obj = self._run_job_for_single_document(first_job)
- first_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+ first_package_dict = get_action("package_show")(
+ self.context, {"id": first_obj.package_id}
+ )
# Package was created
assert first_package_dict
- assert first_package_dict['state'] == u'active'
+ assert first_package_dict["state"] == u"active"
assert first_obj.current == True
# Harvest the same document, unchanged, from another source, the package
@@ -631,26 +702,32 @@ class TestHarvest(HarvestFixtureBase):
# (As of https://github.com/okfn/ckanext-inspire/commit/9fb67
# we are no longer throwing an exception when this happens)
source2_fixture = {
- 'title': 'Test Source 2',
- 'name': 'test-source-2',
- 'url': u'http://127.0.0.1:8999/gemini2.1/source2/same_dataset.xml',
- 'source_type': u'gemini-single'
+ "title": "Test Source 2",
+ "name": "test-source-2",
+ "url": u"http://127.0.0.1:8999/gemini2.1/source2/same_dataset.xml",
+ "source_type": u"gemini-single",
}
source2, second_job = self._create_source_and_job(source2_fixture)
second_obj = self._run_job_for_single_document(second_job)
- second_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+ second_package_dict = get_action("package_show")(
+ self.context, {"id": first_obj.package_id}
+ )
# Package was not updated
- assert second_package_dict, first_package_dict['id'] == second_package_dict['id']
+ assert second_package_dict, (
+ first_package_dict["id"] == second_package_dict["id"]
+ )
assert not second_obj.package, not second_obj.package_id
assert second_obj.current == False, first_obj.current == True
# Inactivate source1 and reharvest from source2, package should be updated
third_job = self._create_job(source2.id)
- third_obj = self._run_job_for_single_document(third_job,force_import=True)
+ third_obj = self._run_job_for_single_document(
+ third_job, force_import=True
+ )
Session.remove()
Session.add(first_obj)
@@ -661,99 +738,121 @@ class TestHarvest(HarvestFixtureBase):
Session.refresh(second_obj)
Session.refresh(third_obj)
- third_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+ third_package_dict = get_action("package_show")(
+ self.context, {"id": first_obj.package_id}
+ )
# Package was updated
- assert third_package_dict, first_package_dict['id'] == third_package_dict['id']
- assert third_obj.package, third_obj.package_id == first_package_dict['id']
+ assert third_package_dict, (
+ first_package_dict["id"] == third_package_dict["id"]
+ )
+ assert third_obj.package, (
+ third_obj.package_id == first_package_dict["id"]
+ )
assert third_obj.current == True
assert second_obj.current == False
assert first_obj.current == False
-
- def test_harvest_different_sources_same_document_but_deleted_inbetween(self):
+ def test_harvest_different_sources_same_document_but_deleted_inbetween(
+ self,
+ ):
# Create source1
source1_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/source1/same_dataset.xml',
- 'source_type': u'gemini-single'
+ "title": "Test Source",
+ "name": "test-source",
+ "url": u"http://127.0.0.1:8999/gemini2.1/source1/same_dataset.xml",
+ "source_type": u"gemini-single",
}
source1, first_job = self._create_source_and_job(source1_fixture)
first_obj = self._run_job_for_single_document(first_job)
- first_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+ first_package_dict = get_action("package_show")(
+ self.context, {"id": first_obj.package_id}
+ )
# Package was created
assert first_package_dict
- assert first_package_dict['state'] == u'active'
+ assert first_package_dict["state"] == u"active"
assert first_obj.current == True
# Delete/withdraw the package
- first_package_dict = get_action('package_delete')(self.context,{'id':first_obj.package_id})
- first_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+ first_package_dict = get_action("package_delete")(
+ self.context, {"id": first_obj.package_id}
+ )
+ first_package_dict = get_action("package_show")(
+ self.context, {"id": first_obj.package_id}
+ )
# Harvest the same document, unchanged, from another source
source2_fixture = {
- 'title': 'Test Source 2',
- 'name': 'test-source-2',
- 'url': u'http://127.0.0.1:8999/gemini2.1/source2/same_dataset.xml',
- 'source_type': u'gemini-single'
+ "title": "Test Source 2",
+ "name": "test-source-2",
+ "url": u"http://127.0.0.1:8999/gemini2.1/source2/same_dataset.xml",
+ "source_type": u"gemini-single",
}
source2, second_job = self._create_source_and_job(source2_fixture)
second_obj = self._run_job_for_single_document(second_job)
- second_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+ second_package_dict = get_action("package_show")(
+ self.context, {"id": first_obj.package_id}
+ )
# It would be good if the package was updated, but we see that it isn't
- assert second_package_dict, first_package_dict['id'] == second_package_dict['id']
+ assert second_package_dict, (
+ first_package_dict["id"] == second_package_dict["id"]
+ )
assert not second_obj.package
assert second_obj.current == False
assert first_obj.current == True
-
def test_harvest_moves_sources(self):
# Create source1
source1_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/service1.xml',
- 'source_type': u'gemini-single'
+ "title": "Test Source",
+ "name": "test-source",
+ "url": u"http://127.0.0.1:8999/gemini2.1/service1.xml",
+ "source_type": u"gemini-single",
}
source1, first_job = self._create_source_and_job(source1_fixture)
first_obj = self._run_job_for_single_document(first_job)
- first_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+ first_package_dict = get_action("package_show")(
+ self.context, {"id": first_obj.package_id}
+ )
# Package was created
assert first_package_dict
- assert first_package_dict['state'] == u'active'
+ assert first_package_dict["state"] == u"active"
assert first_obj.current == True
# Harvest the same document GUID but with a newer date, from another source.
source2_fixture = {
- 'title': 'Test Source 2',
- 'name': 'test-source-2',
- 'url': u'http://127.0.0.1:8999/gemini2.1/service1_newer.xml',
- 'source_type': u'gemini-single'
+ "title": "Test Source 2",
+ "name": "test-source-2",
+ "url": u"http://127.0.0.1:8999/gemini2.1/service1_newer.xml",
+ "source_type": u"gemini-single",
}
source2, second_job = self._create_source_and_job(source2_fixture)
second_obj = self._run_job_for_single_document(second_job)
- second_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+ second_package_dict = get_action("package_show")(
+ self.context, {"id": first_obj.package_id}
+ )
# Now we have two packages
- assert second_package_dict, first_package_dict['id'] == second_package_dict['id']
+ assert second_package_dict, (
+ first_package_dict["id"] == second_package_dict["id"]
+ )
assert second_obj.package
assert second_obj.current == True
assert first_obj.current == True
@@ -761,22 +860,23 @@ class TestHarvest(HarvestFixtureBase):
# to update the date to get it to reharvest, and then you should
# withdraw the package relating to the original harvest source.
-
def test_harvest_import_command(self):
# Create source
source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/dataset1.xml',
- 'source_type': u'gemini-single'
+ "title": "Test Source",
+ "name": "test-source",
+ "url": u"http://127.0.0.1:8999/gemini2.1/dataset1.xml",
+ "source_type": u"gemini-single",
}
source, first_job = self._create_source_and_job(source_fixture)
first_obj = self._run_job_for_single_document(first_job)
- before_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+ before_package_dict = get_action("package_show")(
+ self.context, {"id": first_obj.package_id}
+ )
# Package was created
assert before_package_dict
@@ -790,7 +890,9 @@ class TestHarvest(HarvestFixtureBase):
third_obj = self._run_job_for_single_document(third_job)
# Run the import command manually
- imported_objects = get_action('harvest_objects_import')(self.context,{'source_id':source.id})
+ imported_objects = get_action("harvest_objects_import")(
+ self.context, {"source_id": source.id}
+ )
Session.remove()
Session.add(first_obj)
Session.add(second_obj)
@@ -800,136 +902,157 @@ class TestHarvest(HarvestFixtureBase):
Session.refresh(second_obj)
Session.refresh(third_obj)
- after_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
+ after_package_dict = get_action("package_show")(
+ self.context, {"id": first_obj.package_id}
+ )
# Package was updated, and the current object remains the same
- assert after_package_dict, before_package_dict['id'] == after_package_dict['id']
+ assert after_package_dict, (
+ before_package_dict["id"] == after_package_dict["id"]
+ )
assert third_obj.current == False
assert second_obj.current == False
assert first_obj.current == True
-
- source_dict = get_action('harvest_source_show')(self.context,{'id':source.id})
- assert source_dict['status']['total_datasets'] == 1
+ source_dict = get_action("harvest_source_show")(
+ self.context, {"id": source.id}
+ )
+ assert source_dict["status"]["total_datasets"] == 1
def test_clean_tags(self):
# Create source
source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/dataset1.xml',
- 'source_type': u'gemini-single',
- 'owner_org': 'test-org',
- 'metadata_created': datetime.now().strftime('%YYYY-%MM-%DD %HH:%MM:%s'),
- 'metadata_modified': datetime.now().strftime('%YYYY-%MM-%DD %HH:%MM:%s'),
-
+ "title": "Test Source",
+ "name": "test-source",
+ "url": u"http://127.0.0.1:8999/gemini2.1/dataset1.xml",
+ "source_type": u"gemini-single",
+ "owner_org": "test-org",
+ "metadata_created": datetime.now().strftime(
+ "%YYYY-%MM-%DD %HH:%MM:%s"
+ ),
+ "metadata_modified": datetime.now().strftime(
+ "%YYYY-%MM-%DD %HH:%MM:%s"
+ ),
}
- user = User.get('dummy')
+ user = User.get("dummy")
if not user:
- user = call_action('user_create',
- name='dummy',
- password='dummybummy',
- email='dummy@dummy.com')
- user_name = user['name']
+ user = call_action(
+ "user_create",
+ name="dummy",
+ password="dummybummy",
+ email="dummy@dummy.com",
+ )
+ user_name = user["name"]
else:
user_name = user.name
- org = Group.by_name('test-org')
+ org = Group.by_name("test-org")
if org is None:
- org = call_action('organization_create',
- context={'user': user_name},
- name='test-org')
- existing_g = Group.by_name('existing-group')
+ org = call_action(
+ "organization_create",
+ context={"user": user_name},
+ name="test-org",
+ )
+ existing_g = Group.by_name("existing-group")
if existing_g is None:
- existing_g = call_action('group_create',
- context={'user': user_name},
- name='existing-group')
+ existing_g = call_action(
+ "group_create",
+ context={"user": user_name},
+ name="existing-group",
+ )
- context = {'user': 'dummy'}
+ context = {"user": "dummy"}
package_schema = default_update_package_schema()
- context['schema'] = package_schema
- package_dict = {'frequency': 'manual',
- 'publisher_name': 'dummy',
- 'extras': [{'key':'theme', 'value':['non-mappable', 'thememap1']}],
- 'groups': [],
- 'title': 'fakename',
- 'holder_name': 'dummy',
- 'holder_identifier': 'dummy',
- 'name': 'fakename',
- 'notes': 'dummy',
- 'owner_org': 'test-org',
- 'modified': datetime.now(),
- 'publisher_identifier': 'dummy',
- 'metadata_created' : datetime.now(),
- 'metadata_modified' : datetime.now(),
- 'guid': str(uuid4()),
- 'identifier': 'dummy'}
+ context["schema"] = package_schema
+ package_dict = {
+ "frequency": "manual",
+ "publisher_name": "dummy",
+ "extras": [
+ {"key": "theme", "value": ["non-mappable", "thememap1"]}
+ ],
+ "groups": [],
+ "title": "fakename",
+ "holder_name": "dummy",
+ "holder_identifier": "dummy",
+ "name": "fakename",
+ "notes": "dummy",
+ "owner_org": "test-org",
+ "modified": datetime.now(),
+ "publisher_identifier": "dummy",
+ "metadata_created": datetime.now(),
+ "metadata_modified": datetime.now(),
+ "guid": str(uuid4()),
+ "identifier": "dummy",
+ }
- package_data = call_action('package_create', context=context, **package_dict)
+ package_data = call_action(
+ "package_create", context=context, **package_dict
+ )
- package = Package.get('fakename')
+ package = Package.get("fakename")
source, job = self._create_source_and_job(source_fixture)
job.package = package
job.guid = uuid4()
harvester = SpatialHarvester()
- with open(os.path.join('..', 'data', 'dataset.json')) as f:
+ with open(os.path.join("..", "data", "dataset.json")) as f:
dataset = json.load(f)
# long tags are invalid in all cases
- TAG_LONG_INVALID = 'abcdefghij' * 20
+ TAG_LONG_INVALID = "abcdefghij" * 20
# if clean_tags is not set to true, tags will be truncated to 50 chars
TAG_LONG_VALID = TAG_LONG_INVALID[:50]
# default truncate to 100
TAG_LONG_VALID_LONG = TAG_LONG_INVALID[:100]
assert len(TAG_LONG_VALID) == 50
- assert TAG_LONG_VALID[-1] == 'j'
- TAG_CHARS_INVALID = 'Pretty-inv@lid.tag!'
- TAG_CHARS_VALID = 'pretty-invlidtag'
+ assert TAG_LONG_VALID[-1] == "j"
+ TAG_CHARS_INVALID = "Pretty-inv@lid.tag!"
+ TAG_CHARS_VALID = "pretty-invlidtag"
- dataset['tags'].append(TAG_LONG_INVALID)
- dataset['tags'].append(TAG_CHARS_INVALID)
+ dataset["tags"].append(TAG_LONG_INVALID)
+ dataset["tags"].append(TAG_CHARS_INVALID)
- harvester.source_config = {'clean_tags': False}
+ harvester.source_config = {"clean_tags": False}
out = harvester.get_package_dict(dataset, job)
- tags = out['tags']
+ tags = out["tags"]
# no clean tags, so invalid chars are in
# but tags are truncated to 50 chars
- assert {'name': TAG_CHARS_VALID} not in tags
- assert {'name': TAG_CHARS_INVALID} in tags
- assert {'name': TAG_LONG_VALID_LONG} in tags
- assert {'name': TAG_LONG_INVALID} not in tags
+ assert {"name": TAG_CHARS_VALID} not in tags
+ assert {"name": TAG_CHARS_INVALID} in tags
+ assert {"name": TAG_LONG_VALID_LONG} in tags
+ assert {"name": TAG_LONG_INVALID} not in tags
- harvester.source_config = {'clean_tags': True}
+ harvester.source_config = {"clean_tags": True}
out = harvester.get_package_dict(dataset, job)
- tags = out['tags']
- assert {'name': TAG_CHARS_VALID} in tags
- assert {'name': TAG_LONG_VALID_LONG} in tags
+ tags = out["tags"]
+ assert {"name": TAG_CHARS_VALID} in tags
+ assert {"name": TAG_LONG_VALID_LONG} in tags
-BASIC_GEMINI = '''
+BASIC_GEMINI = """
e269743a-cfda-4632-a939-0c8416ae801e
service
-'''
-GUID = 'e269743a-cfda-4632-a939-0c8416ae801e'
-GEMINI_MISSING_GUID = ''''''
+"""
+GUID = "e269743a-cfda-4632-a939-0c8416ae801e"
+GEMINI_MISSING_GUID = """"""
+
class TestGatherMethods(HarvestFixtureBase):
def setup(self):
HarvestFixtureBase.setup(self)
# Create source
source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/dataset1.xml',
- 'source_type': u'gemini-single'
+ "title": "Test Source",
+ "name": "test-source",
+ "url": u"http://127.0.0.1:8999/gemini2.1/dataset1.xml",
+ "source_type": u"gemini-single",
}
source, job = self._create_source_and_job(source_fixture)
self.harvester = GeminiHarvester()
@@ -943,126 +1066,197 @@ class TestGatherMethods(HarvestFixtureBase):
assert_equal(res, (BASIC_GEMINI, GUID))
def test_get_gemini_string_and_guid__no_guid(self):
- res = self.harvester.get_gemini_string_and_guid(GEMINI_MISSING_GUID, url=None)
- assert_equal(res, (GEMINI_MISSING_GUID, ''))
+ res = self.harvester.get_gemini_string_and_guid(
+ GEMINI_MISSING_GUID, url=None
+ )
+ assert_equal(res, (GEMINI_MISSING_GUID, ""))
def test_get_gemini_string_and_guid__non_parsing(self):
- content = '' # no closing tag
- assert_raises(lxml.etree.XMLSyntaxError, self.harvester.get_gemini_string_and_guid, content)
+ content = '' # no closing tag
+ assert_raises(
+ lxml.etree.XMLSyntaxError,
+ self.harvester.get_gemini_string_and_guid,
+ content,
+ )
def test_get_gemini_string_and_guid__empty(self):
- content = ''
- assert_raises(lxml.etree.XMLSyntaxError, self.harvester.get_gemini_string_and_guid, content)
+ content = ""
+ assert_raises(
+ lxml.etree.XMLSyntaxError,
+ self.harvester.get_gemini_string_and_guid,
+ content,
+ )
+
class TestImportStageTools(object):
def test_licence_url_normal(self):
- assert_equal(GeminiHarvester._extract_first_licence_url(
- ['Reference and PSMA Only',
- 'http://www.test.gov.uk/licenseurl']),
- 'http://www.test.gov.uk/licenseurl')
+ assert_equal(
+ GeminiHarvester._extract_first_licence_url(
+ [
+ "Reference and PSMA Only",
+ "http://www.test.gov.uk/licenseurl",
+ ]
+ ),
+ "http://www.test.gov.uk/licenseurl",
+ )
def test_licence_url_multiple_urls(self):
# only the first URL is extracted
- assert_equal(GeminiHarvester._extract_first_licence_url(
- ['Reference and PSMA Only',
- 'http://www.test.gov.uk/licenseurl',
- 'http://www.test.gov.uk/2nd_licenseurl']),
- 'http://www.test.gov.uk/licenseurl')
+ assert_equal(
+ GeminiHarvester._extract_first_licence_url(
+ [
+ "Reference and PSMA Only",
+ "http://www.test.gov.uk/licenseurl",
+ "http://www.test.gov.uk/2nd_licenseurl",
+ ]
+ ),
+ "http://www.test.gov.uk/licenseurl",
+ )
def test_licence_url_embedded(self):
# URL is embedded within the text field and not extracted
- assert_equal(GeminiHarvester._extract_first_licence_url(
- ['Reference and PSMA Only http://www.test.gov.uk/licenseurl']),
- None)
+ assert_equal(
+ GeminiHarvester._extract_first_licence_url(
+ ["Reference and PSMA Only http://www.test.gov.uk/licenseurl"]
+ ),
+ None,
+ )
def test_licence_url_embedded_at_start(self):
# URL is embedded at the start of the text field and the
# whole field is returned. Noting this unusual behaviour
- assert_equal(GeminiHarvester._extract_first_licence_url(
- ['http://www.test.gov.uk/licenseurl Reference and PSMA Only']),
- 'http://www.test.gov.uk/licenseurl Reference and PSMA Only')
+ assert_equal(
+ GeminiHarvester._extract_first_licence_url(
+ ["http://www.test.gov.uk/licenseurl Reference and PSMA Only"]
+ ),
+ "http://www.test.gov.uk/licenseurl Reference and PSMA Only",
+ )
def test_responsible_organisation_basic(self):
- responsible_organisation = [{'organisation-name': 'Ordnance Survey',
- 'role': 'owner'},
- {'organisation-name': 'Maps Ltd',
- 'role': 'distributor'}]
- assert_equal(GeminiHarvester._process_responsible_organisation(responsible_organisation),
- ('Ordnance Survey', ['Maps Ltd (distributor)',
- 'Ordnance Survey (owner)']))
+ responsible_organisation = [
+ {"organisation-name": "Ordnance Survey", "role": "owner"},
+ {"organisation-name": "Maps Ltd", "role": "distributor"},
+ ]
+ assert_equal(
+ GeminiHarvester._process_responsible_organisation(
+ responsible_organisation
+ ),
+ (
+ "Ordnance Survey",
+ ["Maps Ltd (distributor)", "Ordnance Survey (owner)"],
+ ),
+ )
def test_responsible_organisation_publisher(self):
# no owner, so falls back to publisher
- responsible_organisation = [{'organisation-name': 'Ordnance Survey',
- 'role': 'publisher'},
- {'organisation-name': 'Maps Ltd',
- 'role': 'distributor'}]
- assert_equal(GeminiHarvester._process_responsible_organisation(responsible_organisation),
- ('Ordnance Survey', ['Maps Ltd (distributor)',
- 'Ordnance Survey (publisher)']))
+ responsible_organisation = [
+ {"organisation-name": "Ordnance Survey", "role": "publisher"},
+ {"organisation-name": "Maps Ltd", "role": "distributor"},
+ ]
+ assert_equal(
+ GeminiHarvester._process_responsible_organisation(
+ responsible_organisation
+ ),
+ (
+ "Ordnance Survey",
+ ["Maps Ltd (distributor)", "Ordnance Survey (publisher)"],
+ ),
+ )
def test_responsible_organisation_owner(self):
# provider is the owner (ignores publisher)
- responsible_organisation = [{'organisation-name': 'Ordnance Survey',
- 'role': 'publisher'},
- {'organisation-name': 'Owner',
- 'role': 'owner'},
- {'organisation-name': 'Maps Ltd',
- 'role': 'distributor'}]
- assert_equal(GeminiHarvester._process_responsible_organisation(responsible_organisation),
- ('Owner', ['Owner (owner)',
- 'Maps Ltd (distributor)',
- 'Ordnance Survey (publisher)',
- ]))
+ responsible_organisation = [
+ {"organisation-name": "Ordnance Survey", "role": "publisher"},
+ {"organisation-name": "Owner", "role": "owner"},
+ {"organisation-name": "Maps Ltd", "role": "distributor"},
+ ]
+ assert_equal(
+ GeminiHarvester._process_responsible_organisation(
+ responsible_organisation
+ ),
+ (
+ "Owner",
+ [
+ "Owner (owner)",
+ "Maps Ltd (distributor)",
+ "Ordnance Survey (publisher)",
+ ],
+ ),
+ )
def test_responsible_organisation_multiple_roles(self):
# provider is the owner (ignores publisher)
- responsible_organisation = [{'organisation-name': 'Ordnance Survey',
- 'role': 'publisher'},
- {'organisation-name': 'Ordnance Survey',
- 'role': 'custodian'},
- {'organisation-name': 'Distributor',
- 'role': 'distributor'}]
- assert_equal(GeminiHarvester._process_responsible_organisation(responsible_organisation),
- ('Ordnance Survey', ['Distributor (distributor)',
- 'Ordnance Survey (publisher, custodian)',
- ]))
+ responsible_organisation = [
+ {"organisation-name": "Ordnance Survey", "role": "publisher"},
+ {"organisation-name": "Ordnance Survey", "role": "custodian"},
+ {"organisation-name": "Distributor", "role": "distributor"},
+ ]
+ assert_equal(
+ GeminiHarvester._process_responsible_organisation(
+ responsible_organisation
+ ),
+ (
+ "Ordnance Survey",
+ [
+ "Distributor (distributor)",
+ "Ordnance Survey (publisher, custodian)",
+ ],
+ ),
+ )
def test_responsible_organisation_blank_provider(self):
# no owner or publisher, so blank provider
- responsible_organisation = [{'organisation-name': 'Ordnance Survey',
- 'role': 'resourceProvider'},
- {'organisation-name': 'Maps Ltd',
- 'role': 'distributor'}]
- assert_equal(GeminiHarvester._process_responsible_organisation(responsible_organisation),
- ('', ['Maps Ltd (distributor)',
- 'Ordnance Survey (resourceProvider)']))
+ responsible_organisation = [
+ {
+ "organisation-name": "Ordnance Survey",
+ "role": "resourceProvider",
+ },
+ {"organisation-name": "Maps Ltd", "role": "distributor"},
+ ]
+ assert_equal(
+ GeminiHarvester._process_responsible_organisation(
+ responsible_organisation
+ ),
+ (
+ "",
+ [
+ "Maps Ltd (distributor)",
+ "Ordnance Survey (resourceProvider)",
+ ],
+ ),
+ )
def test_responsible_organisation_blank(self):
# no owner or publisher, so blank provider
responsible_organisation = []
- assert_equal(GeminiHarvester._process_responsible_organisation(responsible_organisation),
- ('', []))
+ assert_equal(
+ GeminiHarvester._process_responsible_organisation(
+ responsible_organisation
+ ),
+ ("", []),
+ )
class TestValidation(HarvestFixtureBase):
-
@classmethod
def setup_class(cls):
# TODO: Fix these tests, broken since 27c4ee81e
- raise SkipTest('Validation tests not working since 27c4ee81e')
+ raise SkipTest("Validation tests not working since 27c4ee81e")
- SpatialHarvester._validator = Validators(profiles=['iso19139eden', 'constraints', 'gemini2'])
+ SpatialHarvester._validator = Validators(
+ profiles=["iso19139eden", "constraints", "gemini2"]
+ )
HarvestFixtureBase.setup_class()
def get_validation_errors(self, validation_test_filename):
# Create source
source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/validation/%s' % validation_test_filename,
- 'source_type': u'gemini-single'
+ "title": "Test Source",
+ "name": "test-source",
+ "url": u"http://127.0.0.1:8999/gemini2.1/validation/%s"
+ % validation_test_filename,
+ "source_type": u"gemini-single",
}
source, job = self._create_source_and_job(source_fixture)
@@ -1072,70 +1266,106 @@ class TestValidation(HarvestFixtureBase):
# Gather stage for GeminiDocHarvester includes validation
object_ids = harvester.gather_stage(job)
-
# Check the validation errors
- errors = '; '.join([gather_error.message for gather_error in job.gather_errors])
+ errors = "; ".join(
+ [gather_error.message for gather_error in job.gather_errors]
+ )
return errors
def test_01_dataset_fail_iso19139_schema(self):
- errors = self.get_validation_errors('01_Dataset_Invalid_XSD_No_Such_Element.xml')
+ errors = self.get_validation_errors(
+ "01_Dataset_Invalid_XSD_No_Such_Element.xml"
+ )
assert len(errors) > 0
- assert_in('Could not get the GUID', errors)
+ assert_in("Could not get the GUID", errors)
def test_02_dataset_fail_constraints_schematron(self):
- errors = self.get_validation_errors('02_Dataset_Invalid_19139_Missing_Data_Format.xml')
+ errors = self.get_validation_errors(
+ "02_Dataset_Invalid_19139_Missing_Data_Format.xml"
+ )
assert len(errors) > 0
- assert_in('MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0', errors)
+ assert_in(
+ "MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0",
+ errors,
+ )
def test_03_dataset_fail_gemini_schematron(self):
- errors = self.get_validation_errors('03_Dataset_Invalid_GEMINI_Missing_Keyword.xml')
+ errors = self.get_validation_errors(
+ "03_Dataset_Invalid_GEMINI_Missing_Keyword.xml"
+ )
assert len(errors) > 0
- assert_in('Descriptive keywords are mandatory', errors)
+ assert_in("Descriptive keywords are mandatory", errors)
def test_04_dataset_valid(self):
- errors = self.get_validation_errors('04_Dataset_Valid.xml')
+ errors = self.get_validation_errors("04_Dataset_Valid.xml")
assert len(errors) == 0
def test_05_series_fail_iso19139_schema(self):
- errors = self.get_validation_errors('05_Series_Invalid_XSD_No_Such_Element.xml')
+ errors = self.get_validation_errors(
+ "05_Series_Invalid_XSD_No_Such_Element.xml"
+ )
assert len(errors) > 0
- assert_in('Could not get the GUID', errors)
+ assert_in("Could not get the GUID", errors)
def test_06_series_fail_constraints_schematron(self):
- errors = self.get_validation_errors('06_Series_Invalid_19139_Missing_Data_Format.xml')
+ errors = self.get_validation_errors(
+ "06_Series_Invalid_19139_Missing_Data_Format.xml"
+ )
assert len(errors) > 0
- assert_in('MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0', errors)
+ assert_in(
+ "MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0",
+ errors,
+ )
def test_07_series_fail_gemini_schematron(self):
- errors = self.get_validation_errors('07_Series_Invalid_GEMINI_Missing_Keyword.xml')
+ errors = self.get_validation_errors(
+ "07_Series_Invalid_GEMINI_Missing_Keyword.xml"
+ )
assert len(errors) > 0
- assert_in('Descriptive keywords are mandatory', errors)
+ assert_in("Descriptive keywords are mandatory", errors)
def test_08_series_valid(self):
- errors = self.get_validation_errors('08_Series_Valid.xml')
+ errors = self.get_validation_errors("08_Series_Valid.xml")
assert len(errors) == 0
def test_09_service_fail_iso19139_schema(self):
- errors = self.get_validation_errors('09_Service_Invalid_No_Such_Element.xml')
+ errors = self.get_validation_errors(
+ "09_Service_Invalid_No_Such_Element.xml"
+ )
assert len(errors) > 0
- assert_in('Could not get the GUID', errors)
+ assert_in("Could not get the GUID", errors)
def test_10_service_fail_constraints_schematron(self):
- errors = self.get_validation_errors('10_Service_Invalid_19139_Level_Description.xml')
+ errors = self.get_validation_errors(
+ "10_Service_Invalid_19139_Level_Description.xml"
+ )
assert len(errors) > 0
- assert_in("DQ_Scope: 'levelDescription' is mandatory if 'level' notEqual 'dataset' or 'series'.", errors)
+ assert_in(
+ "DQ_Scope: 'levelDescription' is mandatory if 'level' notEqual 'dataset' or 'series'.",
+ errors,
+ )
def test_11_service_fail_gemini_schematron(self):
- errors = self.get_validation_errors('11_Service_Invalid_GEMINI_Service_Type.xml')
+ errors = self.get_validation_errors(
+ "11_Service_Invalid_GEMINI_Service_Type.xml"
+ )
assert len(errors) > 0
- assert_in("Service type shall be one of 'discovery', 'view', 'download', 'transformation', 'invoke' or 'other' following INSPIRE generic names.", errors)
+ assert_in(
+ "Service type shall be one of 'discovery', 'view', 'download', 'transformation', 'invoke' or 'other' following INSPIRE generic names.",
+ errors,
+ )
def test_12_service_valid(self):
- errors = self.get_validation_errors('12_Service_Valid.xml')
+ errors = self.get_validation_errors("12_Service_Valid.xml")
assert len(errors) == 0, errors
def test_13_dataset_fail_iso19139_schema_2(self):
# This test Dataset has srv tags and only Service metadata should.
- errors = self.get_validation_errors('13_Dataset_Invalid_Element_srv.xml')
+ errors = self.get_validation_errors(
+ "13_Dataset_Invalid_Element_srv.xml"
+ )
assert len(errors) > 0
- assert_in('Element \'{http://www.isotc211.org/2005/srv}SV_ServiceIdentification\': This element is not expected.', errors)
+ assert_in(
+ "Element '{http://www.isotc211.org/2005/srv}SV_ServiceIdentification': This element is not expected.",
+ errors,
+ )
diff --git a/ckanext/spatial/tests/test_plugin/__init__.py b/ckanext/spatial/tests/test_plugin/__init__.py
index 2e2033b..6d83202 100644
--- a/ckanext/spatial/tests/test_plugin/__init__.py
+++ b/ckanext/spatial/tests/test_plugin/__init__.py
@@ -1,7 +1,9 @@
# this is a namespace package
try:
import pkg_resources
+
pkg_resources.declare_namespace(__name__)
except ImportError:
import pkgutil
+
__path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/ckanext/spatial/tests/test_plugin/plugin.py b/ckanext/spatial/tests/test_plugin/plugin.py
index 2aa5a3d..17cab1c 100644
--- a/ckanext/spatial/tests/test_plugin/plugin.py
+++ b/ckanext/spatial/tests/test_plugin/plugin.py
@@ -6,4 +6,4 @@ class TestSpatialPlugin(p.SingletonPlugin):
p.implements(p.IConfigurer, inherit=True)
def update_config(self, config):
- p.toolkit.add_template_directory(config, 'templates')
+ p.toolkit.add_template_directory(config, "templates")
diff --git a/ckanext/spatial/tests/test_validation.py b/ckanext/spatial/tests/test_validation.py
index f707e90..0a2e738 100644
--- a/ckanext/spatial/tests/test_validation.py
+++ b/ckanext/spatial/tests/test_validation.py
@@ -7,122 +7,182 @@ from ckanext.spatial import validation
# other validation tests are in test_harvest.py
-class TestValidation(object):
+class TestValidation(object):
def _get_file_path(self, file_name):
- return os.path.join(os.path.dirname(__file__), 'xml', file_name)
+ return os.path.join(os.path.dirname(__file__), "xml", file_name)
def get_validation_errors(self, validator, validation_test_filename):
- validation_test_filepath = self._get_file_path(validation_test_filename)
+ validation_test_filepath = self._get_file_path(
+ validation_test_filename
+ )
xml = etree.parse(validation_test_filepath)
is_valid, errors = validator.is_valid(xml)
- return ';'.join([e[0] for e in errors])
+ return ";".join([e[0] for e in errors])
def test_iso19139_failure(self):
- errors = self.get_validation_errors(validation.ISO19139Schema,
- 'iso19139/dataset-invalid.xml')
+ errors = self.get_validation_errors(
+ validation.ISO19139Schema, "iso19139/dataset-invalid.xml"
+ )
assert len(errors) > 0
- assert_in('Dataset schema (gmx.xsd)', errors)
- assert_in('{http://www.isotc211.org/2005/gmd}nosuchelement\': This element is not expected.', errors)
+ assert_in("Dataset schema (gmx.xsd)", errors)
+ assert_in(
+ "{http://www.isotc211.org/2005/gmd}nosuchelement': This element is not expected.",
+ errors,
+ )
def test_iso19139_pass(self):
- errors = self.get_validation_errors(validation.ISO19139Schema,
- 'iso19139/dataset.xml')
- assert_equal(errors, '')
+ errors = self.get_validation_errors(
+ validation.ISO19139Schema, "iso19139/dataset.xml"
+ )
+ assert_equal(errors, "")
# Gemini2.1 tests are basically the same as those in test_harvest.py but
# a few little differences make it worth not removing them in
# test_harvest
def test_01_dataset_fail_iso19139_schema(self):
- errors = self.get_validation_errors(validation.ISO19139EdenSchema,
- 'gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element.xml')
+ errors = self.get_validation_errors(
+ validation.ISO19139EdenSchema,
+ "gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element.xml",
+ )
assert len(errors) > 0
- assert_in('(gmx.xsd)', errors)
- assert_in('\'{http://www.isotc211.org/2005/gmd}nosuchelement\': This element is not expected.', errors)
+ assert_in("(gmx.xsd)", errors)
+ assert_in(
+ "'{http://www.isotc211.org/2005/gmd}nosuchelement': This element is not expected.",
+ errors,
+ )
def test_02_dataset_fail_constraints_schematron(self):
- errors = self.get_validation_errors(validation.ConstraintsSchematron14,
- 'gemini2.1/validation/02_Dataset_Invalid_19139_Missing_Data_Format.xml')
+ errors = self.get_validation_errors(
+ validation.ConstraintsSchematron14,
+ "gemini2.1/validation/02_Dataset_Invalid_19139_Missing_Data_Format.xml",
+ )
assert len(errors) > 0
- assert_in('MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0', errors)
+ assert_in(
+ "MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0",
+ errors,
+ )
def test_03_dataset_fail_gemini_schematron(self):
- errors = self.get_validation_errors(validation.Gemini2Schematron,
- 'gemini2.1/validation/03_Dataset_Invalid_GEMINI_Missing_Keyword.xml')
+ errors = self.get_validation_errors(
+ validation.Gemini2Schematron,
+ "gemini2.1/validation/03_Dataset_Invalid_GEMINI_Missing_Keyword.xml",
+ )
assert len(errors) > 0
- assert_in('Descriptive keywords are mandatory', errors)
+ assert_in("Descriptive keywords are mandatory", errors)
def assert_passes_all_gemini2_1_validation(self, xml_filepath):
- errs = self.get_validation_errors(validation.ISO19139EdenSchema,
- xml_filepath)
- assert not errs, 'ISO19139EdenSchema: ' + errs
- errs = self.get_validation_errors(validation.ConstraintsSchematron14,
- xml_filepath)
- assert not errs, 'ConstraintsSchematron14: ' + errs
- errs = self.get_validation_errors(validation.Gemini2Schematron,
- xml_filepath)
- assert not errs, 'Gemini2Schematron: ' + errs
+ errs = self.get_validation_errors(
+ validation.ISO19139EdenSchema, xml_filepath
+ )
+ assert not errs, "ISO19139EdenSchema: " + errs
+ errs = self.get_validation_errors(
+ validation.ConstraintsSchematron14, xml_filepath
+ )
+ assert not errs, "ConstraintsSchematron14: " + errs
+ errs = self.get_validation_errors(
+ validation.Gemini2Schematron, xml_filepath
+ )
+ assert not errs, "Gemini2Schematron: " + errs
def test_04_dataset_valid(self):
- self.assert_passes_all_gemini2_1_validation('gemini2.1/validation/04_Dataset_Valid.xml')
+ self.assert_passes_all_gemini2_1_validation(
+ "gemini2.1/validation/04_Dataset_Valid.xml"
+ )
def test_05_series_fail_iso19139_schema(self):
- errors = self.get_validation_errors(validation.ISO19139EdenSchema,
- 'gemini2.1/validation/05_Series_Invalid_XSD_No_Such_Element.xml')
+ errors = self.get_validation_errors(
+ validation.ISO19139EdenSchema,
+ "gemini2.1/validation/05_Series_Invalid_XSD_No_Such_Element.xml",
+ )
assert len(errors) > 0
- assert_in('(gmx.xsd)', errors)
- assert_in('\'{http://www.isotc211.org/2005/gmd}nosuchelement\': This element is not expected.', errors)
+ assert_in("(gmx.xsd)", errors)
+ assert_in(
+ "'{http://www.isotc211.org/2005/gmd}nosuchelement': This element is not expected.",
+ errors,
+ )
def test_06_series_fail_constraints_schematron(self):
- errors = self.get_validation_errors(validation.ConstraintsSchematron14,
- 'gemini2.1/validation/06_Series_Invalid_19139_Missing_Data_Format.xml')
+ errors = self.get_validation_errors(
+ validation.ConstraintsSchematron14,
+ "gemini2.1/validation/06_Series_Invalid_19139_Missing_Data_Format.xml",
+ )
assert len(errors) > 0
- assert_in('MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0', errors)
+ assert_in(
+ "MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0",
+ errors,
+ )
def test_07_series_fail_gemini_schematron(self):
- errors = self.get_validation_errors(validation.Gemini2Schematron,
- 'gemini2.1/validation/07_Series_Invalid_GEMINI_Missing_Keyword.xml')
+ errors = self.get_validation_errors(
+ validation.Gemini2Schematron,
+ "gemini2.1/validation/07_Series_Invalid_GEMINI_Missing_Keyword.xml",
+ )
assert len(errors) > 0
- assert_in('Descriptive keywords are mandatory', errors)
+ assert_in("Descriptive keywords are mandatory", errors)
def test_08_series_valid(self):
- self.assert_passes_all_gemini2_1_validation('gemini2.1/validation/08_Series_Valid.xml')
+ self.assert_passes_all_gemini2_1_validation(
+ "gemini2.1/validation/08_Series_Valid.xml"
+ )
def test_09_service_fail_iso19139_schema(self):
- errors = self.get_validation_errors(validation.ISO19139EdenSchema,
- 'gemini2.1/validation/09_Service_Invalid_No_Such_Element.xml')
+ errors = self.get_validation_errors(
+ validation.ISO19139EdenSchema,
+ "gemini2.1/validation/09_Service_Invalid_No_Such_Element.xml",
+ )
assert len(errors) > 0
- assert_in('(gmx.xsd & srv.xsd)', errors)
- assert_in('\'{http://www.isotc211.org/2005/gmd}nosuchelement\': This element is not expected.', errors)
+ assert_in("(gmx.xsd & srv.xsd)", errors)
+ assert_in(
+ "'{http://www.isotc211.org/2005/gmd}nosuchelement': This element is not expected.",
+ errors,
+ )
def test_10_service_fail_constraints_schematron(self):
- errors = self.get_validation_errors(validation.ConstraintsSchematron14,
- 'gemini2.1/validation/10_Service_Invalid_19139_Level_Description.xml')
+ errors = self.get_validation_errors(
+ validation.ConstraintsSchematron14,
+ "gemini2.1/validation/10_Service_Invalid_19139_Level_Description.xml",
+ )
assert len(errors) > 0
- assert_in("DQ_Scope: 'levelDescription' is mandatory if 'level' notEqual 'dataset' or 'series'.", errors)
+ assert_in(
+ "DQ_Scope: 'levelDescription' is mandatory if 'level' notEqual 'dataset' or 'series'.",
+ errors,
+ )
def test_11_service_fail_gemini_schematron(self):
- errors = self.get_validation_errors(validation.Gemini2Schematron,
- 'gemini2.1/validation/11_Service_Invalid_GEMINI_Service_Type.xml')
+ errors = self.get_validation_errors(
+ validation.Gemini2Schematron,
+ "gemini2.1/validation/11_Service_Invalid_GEMINI_Service_Type.xml",
+ )
assert len(errors) > 0
- assert_in("Service type shall be one of 'discovery', 'view', 'download', 'transformation', 'invoke' or 'other' following INSPIRE generic names.", errors)
+ assert_in(
+ "Service type shall be one of 'discovery', 'view', 'download', 'transformation', 'invoke' or 'other' following INSPIRE generic names.",
+ errors,
+ )
def test_12_service_valid(self):
- self.assert_passes_all_gemini2_1_validation('gemini2.1/validation/12_Service_Valid.xml')
+ self.assert_passes_all_gemini2_1_validation(
+ "gemini2.1/validation/12_Service_Valid.xml"
+ )
def test_13_dataset_fail_iso19139_schema_2(self):
# This test Dataset has srv tags and only Service metadata should.
- errors = self.get_validation_errors(validation.ISO19139EdenSchema,
- 'gemini2.1/validation/13_Dataset_Invalid_Element_srv.xml')
+ errors = self.get_validation_errors(
+ validation.ISO19139EdenSchema,
+ "gemini2.1/validation/13_Dataset_Invalid_Element_srv.xml",
+ )
assert len(errors) > 0
- assert_in('(gmx.xsd)', errors)
- assert_in('Element \'{http://www.isotc211.org/2005/srv}SV_ServiceIdentification\': This element is not expected.', errors)
+ assert_in("(gmx.xsd)", errors)
+ assert_in(
+ "Element '{http://www.isotc211.org/2005/srv}SV_ServiceIdentification': This element is not expected.",
+ errors,
+ )
def test_schematron_error_extraction(self):
- validation_error_xml = '''
+ validation_error_xml = """
@@ -130,24 +190,27 @@ class TestValidation(object):
-'''
+"""
failure_xml = etree.fromstring(validation_error_xml)
fail_element = failure_xml.getchildren()[0]
- details = validation.SchematronValidator.extract_error_details(fail_element)
+ details = validation.SchematronValidator.extract_error_details(
+ fail_element
+ )
if isinstance(details, tuple):
details = details[1]
assert_in("srv:serviceType/*[1] = 'discovery'", details)
assert_in("/*[local-name()='MD_Metadata'", details)
assert_in("Service type shall be one of 'discovery'", details)
-
def test_error_line_numbers(self):
- file_path = self._get_file_path('iso19139/dataset-invalid.xml')
+ file_path = self._get_file_path("iso19139/dataset-invalid.xml")
xml = etree.parse(file_path)
- is_valid, profile, errors = validation.Validators(profiles=['iso19139']).is_valid(xml)
+ is_valid, profile, errors = validation.Validators(
+ profiles=["iso19139"]
+ ).is_valid(xml)
assert not is_valid
assert len(errors) == 2
message, line = errors[1]
- assert 'This element is not expected' in message
+ assert "This element is not expected" in message
assert line == 3
diff --git a/ckanext/spatial/tests/xml_file_server.py b/ckanext/spatial/tests/xml_file_server.py
index 74f4fbf..9cd9116 100644
--- a/ckanext/spatial/tests/xml_file_server.py
+++ b/ckanext/spatial/tests/xml_file_server.py
@@ -16,11 +16,10 @@ PORT = 8999
def serve(port=PORT):
- '''Serves test XML files over HTTP'''
+ """Serves test XML files over HTTP"""
# Make sure we serve from the tests' XML directory
- os.chdir(os.path.join(os.path.dirname(os.path.abspath(__file__)),
- 'xml'))
+ os.chdir(os.path.join(os.path.dirname(os.path.abspath(__file__)), "xml"))
Handler = SimpleHTTPRequestHandler
@@ -29,7 +28,7 @@ def serve(port=PORT):
httpd = TestServer(("", PORT), Handler)
- print('Serving test HTTP server at port', PORT)
+ print("Serving test HTTP server at port", PORT)
httpd_thread = Thread(target=httpd.serve_forever)
httpd_thread.setDaemon(True)
diff --git a/conftest.py b/conftest.py
new file mode 100644
index 0000000..fae1fc4
--- /dev/null
+++ b/conftest.py
@@ -0,0 +1,6 @@
+# -*- coding: utf-8 -*-
+
+pytest_plugins = [
+ u'ckan.tests.pytest_ckan.ckan_setup',
+ u'ckan.tests.pytest_ckan.fixtures',
+]
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..ea548d6
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,7 @@
+[tool:pytest]
+norecursedirs=ckanext/scheming/tests/nose
+
+filterwarnings =
+ ignore::sqlalchemy.exc.SADeprecationWarning
+ ignore::sqlalchemy.exc.SAWarning
+ ignore::DeprecationWarning
From 63274022fdf15154cce5c4a93b6d98322bbff0dc Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Wed, 15 Apr 2020 02:34:45 +0300
Subject: [PATCH 016/139] owslib==0.18.0
---
ckanext/spatial/tests/functional/test_package.py | 1 -
pip-requirements.txt | 2 +-
2 files changed, 1 insertion(+), 2 deletions(-)
diff --git a/ckanext/spatial/tests/functional/test_package.py b/ckanext/spatial/tests/functional/test_package.py
index 41c9b6b..9478aa4 100644
--- a/ckanext/spatial/tests/functional/test_package.py
+++ b/ckanext/spatial/tests/functional/test_package.py
@@ -12,7 +12,6 @@ from ckanext.spatial.model import PackageExtent
from ckanext.spatial.tests.base import SpatialTestBase
-@pytest.mark.usefixtures("clean_db")
class TestSpatialExtra(SpatialTestBase):
def test_spatial_extra_base(self, app):
diff --git a/pip-requirements.txt b/pip-requirements.txt
index 299bb2d..5061280 100644
--- a/pip-requirements.txt
+++ b/pip-requirements.txt
@@ -1,7 +1,7 @@
GeoAlchemy>=0.6
GeoAlchemy2==0.5.0
Shapely>=1.2.13
-OWSLib>=0.18.0
+OWSLib==0.18.0
lxml>=2.3
argparse
pyparsing>=2.1.10
From f7dc2bf40d7ff385f0668907cb09bb0a7dd30050 Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Sun, 3 May 2020 20:28:33 +0300
Subject: [PATCH 017/139] Update tests
---
ckanext/spatial/model/harvested_metadata.py | 5 +-
ckanext/spatial/tests/base.py | 1 -
ckanext/spatial/tests/conftest.py | 3 +-
.../spatial/tests/functional/test_package.py | 2 +-
.../spatial/tests/functional/test_widgets.py | 26 +-
ckanext/spatial/tests/lib/test_spatial.py | 75 +-
.../tests/model/test_harvested_metadata.py | 37 -
.../tests/model/test_package_extent.py | 73 +-
ckanext/spatial/tests/test_api.py | 145 +-
ckanext/spatial/tests/test_csw_client.py | 7 +-
ckanext/spatial/tests/test_harvest.py | 1371 -----------------
ckanext/spatial/validation/validation.py | 2 +-
setup.cfg | 2 +-
test.ini | 4 +
14 files changed, 140 insertions(+), 1613 deletions(-)
delete mode 100644 ckanext/spatial/tests/model/test_harvested_metadata.py
delete mode 100644 ckanext/spatial/tests/test_harvest.py
diff --git a/ckanext/spatial/model/harvested_metadata.py b/ckanext/spatial/model/harvested_metadata.py
index f69038e..a722518 100644
--- a/ckanext/spatial/model/harvested_metadata.py
+++ b/ckanext/spatial/model/harvested_metadata.py
@@ -38,10 +38,7 @@ class MappedXmlDocument(MappedXmlObject):
def get_xml_tree(self):
if self.xml_tree is None:
parser = etree.XMLParser(remove_blank_text=True)
- if type(self.xml_str) == six.binary_type:
- xml_str = self.xml_str.encode('utf8')
- else:
- xml_str = self.xml_str
+ xml_str = six.ensure_str(self.xml_str)
self.xml_tree = etree.fromstring(xml_str, parser=parser)
return self.xml_tree
diff --git a/ckanext/spatial/tests/base.py b/ckanext/spatial/tests/base.py
index 79983f4..aee2fc7 100644
--- a/ckanext/spatial/tests/base.py
+++ b/ckanext/spatial/tests/base.py
@@ -22,7 +22,6 @@ geojson_examples = {
}
-@pytest.mark.usefixtures("clean_db")
class SpatialTestBase(object):
db_srid = 4326
geojson_examples = geojson_examples
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index b93f69a..cf40a5f 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -4,9 +4,8 @@ import pytest
import os
import re
from sqlalchemy import Table
-from nose.plugins.skip import SkipTest
-from ckan.model import Session, repo, meta, engine_is_sqlite
+from ckan.model import Session, meta
from ckanext.spatial.geoalchemy_common import postgis_version
from ckanext.spatial.model.package_extent import setup as spatial_db_setup
from ckanext.harvest.model import setup as harvest_model_setup
diff --git a/ckanext/spatial/tests/functional/test_package.py b/ckanext/spatial/tests/functional/test_package.py
index 9478aa4..c39c860 100644
--- a/ckanext/spatial/tests/functional/test_package.py
+++ b/ckanext/spatial/tests/functional/test_package.py
@@ -5,13 +5,13 @@ import pytest
from ckan.model import Session
from ckan.lib.helpers import url_for
-import ckan.tests.helpers as helpers
import ckan.tests.factories as factories
from ckanext.spatial.model import PackageExtent
from ckanext.spatial.tests.base import SpatialTestBase
+@pytest.mark.usefixtures("clean_db")
class TestSpatialExtra(SpatialTestBase):
def test_spatial_extra_base(self, app):
diff --git a/ckanext/spatial/tests/functional/test_widgets.py b/ckanext/spatial/tests/functional/test_widgets.py
index a93a69f..480fba9 100644
--- a/ckanext/spatial/tests/functional/test_widgets.py
+++ b/ckanext/spatial/tests/functional/test_widgets.py
@@ -1,37 +1,27 @@
+import pytest
from ckan.lib.helpers import url_for
from ckanext.spatial.tests.base import SpatialTestBase
-try:
- import ckan.new_tests.helpers as helpers
- import ckan.new_tests.factories as factories
-except ImportError:
- import ckan.tests.helpers as helpers
- import ckan.tests.factories as factories
+import ckan.tests.factories as factories
-class TestSpatialWidgets(SpatialTestBase, helpers.FunctionalTestBase):
- def test_dataset_map(self):
- app = self._get_test_app()
-
- user = factories.User()
+class TestSpatialWidgets(SpatialTestBase):
+ @pytest.mark.usefixtures("clean_db")
+ def test_dataset_map(self, app):
dataset = factories.Dataset(
- user=user,
extras=[
{"key": "spatial", "value": self.geojson_examples["point"]}
],
)
- offset = url_for(controller="package", action="read", id=dataset["id"])
+ offset = url_for("dataset.read", id=dataset["id"])
res = app.get(offset)
assert 'data-module="dataset-map"' in res
assert "dataset_map.js" in res
- def test_spatial_search_widget(self):
-
- app = self._get_test_app()
-
- offset = url_for(controller="package", action="search")
+ def test_spatial_search_widget(self, app):
+ offset = url_for("dataset.search")
res = app.get(offset)
assert 'data-module="spatial-query"' in res
diff --git a/ckanext/spatial/tests/lib/test_spatial.py b/ckanext/spatial/tests/lib/test_spatial.py
index bbb0d0b..6ae019f 100644
--- a/ckanext/spatial/tests/lib/test_spatial.py
+++ b/ckanext/spatial/tests/lib/test_spatial.py
@@ -1,10 +1,9 @@
-from __future__ import print_function
import six
import time
import random
-from nose.tools import assert_equal
+import pytest
from shapely.geometry import asShape
@@ -23,6 +22,23 @@ from ckanext.spatial.geoalchemy_common import (
from ckanext.spatial.tests.base import SpatialTestBase
+def create_package(**package_dict):
+ user = plugins.toolkit.get_action("get_site_user")(
+ {"model": model, "ignore_auth": True}, {}
+ )
+ context = {
+ "model": model,
+ "session": model.Session,
+ "user": user["name"],
+ "extras_as_string": True,
+ "api_version": 2,
+ "ignore_auth": True,
+ }
+ package_dict = package_create(context, package_dict)
+ return context.get("id")
+
+
+@pytest.mark.usefixtures("clean_db")
class TestCompareGeometries(SpatialTestBase):
def _get_extent_object(self, geometry):
if isinstance(geometry, six.string_types):
@@ -52,24 +68,26 @@ class TestValidateBbox(object):
def test_string(self):
res = validate_bbox("-4.96,55.70,-3.78,56.43")
- assert_equal(res, self.bbox_dict)
+ assert(res == self.bbox_dict)
def test_list(self):
res = validate_bbox([-4.96, 55.70, -3.78, 56.43])
- assert_equal(res, self.bbox_dict)
+ assert(res == self.bbox_dict)
def test_bad(self):
res = validate_bbox([-4.96, 55.70, -3.78])
- assert_equal(res, None)
+ assert(res is None)
def test_bad_2(self):
res = validate_bbox("random")
- assert_equal(res, None)
+ assert(res is None)
def bbox_2_geojson(bbox_dict):
return (
- '{"type":"Polygon","coordinates":[[[%(minx)s, %(miny)s],[%(minx)s, %(maxy)s], [%(maxx)s, %(maxy)s], [%(maxx)s, %(miny)s], [%(minx)s, %(miny)s]]]}'
+ '{"type":"Polygon","coordinates":[[[%(minx)s, %(miny)s],'
+ '[%(minx)s, %(maxy)s], [%(maxx)s, %(maxy)s], '
+ '[%(maxx)s, %(miny)s], [%(minx)s, %(miny)s]]]}'
% bbox_dict
)
@@ -80,34 +98,17 @@ class SpatialQueryTestBase(SpatialTestBase):
miny = 0
maxy = 1
- @classmethod
- def setup_class(cls):
- SpatialTestBase.setup_class()
- for fixture_x in cls.fixtures_x:
- bbox = cls.x_values_to_bbox(fixture_x)
+ @pytest.fixture(autouse=True)
+ def initial_data(self, clean_db):
+ for fixture_x in self.fixtures_x:
+ bbox = self.x_values_to_bbox(fixture_x)
bbox_geojson = bbox_2_geojson(bbox)
- cls.create_package(
+ create_package(
name=munge_title_to_name(six.text_type(fixture_x)),
title=six.text_type(fixture_x),
extras=[{"key": "spatial", "value": bbox_geojson}],
)
- @classmethod
- def create_package(cls, **package_dict):
- user = plugins.toolkit.get_action("get_site_user")(
- {"model": model, "ignore_auth": True}, {}
- )
- context = {
- "model": model,
- "session": model.Session,
- "user": user["name"],
- "extras_as_string": True,
- "api_version": 2,
- "ignore_auth": True,
- }
- package_dict = package_create(context, package_dict)
- return context.get("id")
-
@classmethod
def x_values_to_bbox(cls, x_tuple):
return {
@@ -126,7 +127,7 @@ class TestBboxQuery(SpatialQueryTestBase):
bbox_dict = self.x_values_to_bbox((2, 5))
package_ids = [res.package_id for res in bbox_query(bbox_dict)]
package_titles = [model.Package.get(id_).title for id_ in package_ids]
- assert_equal(set(package_titles), set(("(0, 3)", "(0, 4)", "(4, 5)")))
+ assert(set(package_titles) == set(("(0, 3)", "(0, 4)", "(4, 5)")))
class TestBboxQueryOrdered(SpatialQueryTestBase):
@@ -139,13 +140,13 @@ class TestBboxQueryOrdered(SpatialQueryTestBase):
package_ids = [res.package_id for res in q]
package_titles = [model.Package.get(id_).title for id_ in package_ids]
# check the right items are returned
- assert_equal(
- set(package_titles),
- set(("(0, 9)", "(1, 8)", "(2, 7)", "(3, 6)", "(4, 5)")),
+ assert(
+ set(package_titles) ==
+ set(("(0, 9)", "(1, 8)", "(2, 7)", "(3, 6)", "(4, 5)"))
)
# check the order is good
- assert_equal(
- package_titles, ["(2, 7)", "(1, 8)", "(3, 6)", "(0, 9)", "(4, 5)"]
+ assert(
+ package_titles == ["(2, 7)", "(1, 8)", "(3, 6)", "(0, 9)", "(4, 5)"]
)
@@ -158,13 +159,13 @@ class TestBboxQueryPerformance(SpatialQueryTestBase):
def test_query(self):
bbox_dict = self.x_values_to_bbox((2, 7))
t0 = time.time()
- q = bbox_query(bbox_dict)
+ bbox_query(bbox_dict)
t1 = time.time()
print("bbox_query took: ", t1 - t0)
def test_query_ordered(self):
bbox_dict = self.x_values_to_bbox((2, 7))
t0 = time.time()
- q = bbox_query_ordered(bbox_dict)
+ bbox_query_ordered(bbox_dict)
t1 = time.time()
print("bbox_query_ordered took: ", t1 - t0)
diff --git a/ckanext/spatial/tests/model/test_harvested_metadata.py b/ckanext/spatial/tests/model/test_harvested_metadata.py
deleted file mode 100644
index 9a4fbcc..0000000
--- a/ckanext/spatial/tests/model/test_harvested_metadata.py
+++ /dev/null
@@ -1,37 +0,0 @@
-import os
-
-from nose.tools import assert_equal
-
-from ckanext.spatial.model import ISODocument
-
-
-def open_xml_fixture(xml_filename):
- xml_filepath = os.path.join(os.path.dirname(__file__), "xml", xml_filename)
- with open(xml_filepath, "rb") as f:
- xml_string_raw = f.read()
-
- try:
- xml_string = xml_string_raw.encode("utf-8")
- except UnicodeDecodeError as e:
- assert 0, "ERROR: Unicode Error reading file '%s': %s" % (
- metadata_filepath,
- e,
- )
- return xml_string
-
-
-def test_simple():
- xml_string = open_xml_fixture("gemini_dataset.xml")
- iso_document = ISODocument(xml_string)
- iso_values = iso_document.read_values()
- assert_equal(iso_values["guid"], "test-dataset-1")
- assert_equal(iso_values["metadata-date"], "2011-09-23T10:06:08")
-
-
-def test_multiplicity_warning():
- # This dataset lacks a value for Metadata Date and should
- # produce a log.warning, but not raise an exception.
- xml_string = open_xml_fixture("FCSConservancyPolygons.xml")
- iso_document = ISODocument(xml_string)
- iso_values = iso_document.read_values()
- assert_equal(iso_values["guid"], "B8A22DF4-B0DC-4F0B-A713-0CF5F8784A28")
diff --git a/ckanext/spatial/tests/model/test_package_extent.py b/ckanext/spatial/tests/model/test_package_extent.py
index c4385b8..eb6cd9c 100644
--- a/ckanext/spatial/tests/model/test_package_extent.py
+++ b/ckanext/spatial/tests/model/test_package_extent.py
@@ -1,19 +1,18 @@
-from nose.tools import assert_equals
+import pytest
+
from shapely.geometry import asShape
from ckan.model import Session
from ckan.lib.helpers import json
-try:
- import ckan.new_tests.factories as factories
-except ImportError:
- import ckan.tests.factories as factories
+import ckan.tests.factories as factories
from ckanext.spatial.model import PackageExtent
from ckanext.spatial.geoalchemy_common import WKTElement, legacy_geoalchemy
from ckanext.spatial.tests.base import SpatialTestBase
+@pytest.mark.usefixtures("clean_db")
class TestPackageExtent(SpatialTestBase):
def test_create_extent(self):
@@ -28,31 +27,31 @@ class TestPackageExtent(SpatialTestBase):
)
package_extent.save()
- assert_equals(package_extent.package_id, package["id"])
+ assert(package_extent.package_id == package["id"])
if legacy_geoalchemy:
- assert_equals(
- Session.scalar(package_extent.the_geom.x),
- geojson["coordinates"][0],
+ assert(
+ Session.scalar(package_extent.the_geom.x) ==
+ geojson["coordinates"][0]
)
- assert_equals(
- Session.scalar(package_extent.the_geom.y),
- geojson["coordinates"][1],
+ assert(
+ Session.scalar(package_extent.the_geom.y) ==
+ geojson["coordinates"][1]
)
- assert_equals(
- Session.scalar(package_extent.the_geom.srid), self.db_srid
+ assert(
+ Session.scalar(package_extent.the_geom.srid) == self.db_srid
)
else:
from sqlalchemy import func
- assert_equals(
- Session.query(func.ST_X(package_extent.the_geom)).first()[0],
- geojson["coordinates"][0],
+ assert(
+ Session.query(func.ST_X(package_extent.the_geom)).first()[0] ==
+ geojson["coordinates"][0]
)
- assert_equals(
- Session.query(func.ST_Y(package_extent.the_geom)).first()[0],
- geojson["coordinates"][1],
+ assert(
+ Session.query(func.ST_Y(package_extent.the_geom)).first()[0] ==
+ geojson["coordinates"][1]
)
- assert_equals(package_extent.the_geom.srid, self.db_srid)
+ assert(package_extent.the_geom.srid == self.db_srid)
def test_update_extent(self):
@@ -67,18 +66,18 @@ class TestPackageExtent(SpatialTestBase):
)
package_extent.save()
if legacy_geoalchemy:
- assert_equals(
- Session.scalar(package_extent.the_geom.geometry_type),
- "ST_Point",
+ assert(
+ Session.scalar(package_extent.the_geom.geometry_type) ==
+ "ST_Point"
)
else:
from sqlalchemy import func
- assert_equals(
+ assert(
Session.query(
func.ST_GeometryType(package_extent.the_geom)
- ).first()[0],
- "ST_Point",
+ ).first()[0] ==
+ "ST_Point"
)
# Update the geometry (Point -> Polygon)
@@ -88,20 +87,20 @@ class TestPackageExtent(SpatialTestBase):
package_extent.the_geom = WKTElement(shape.wkt, self.db_srid)
package_extent.save()
- assert_equals(package_extent.package_id, package["id"])
+ assert(package_extent.package_id == package["id"])
if legacy_geoalchemy:
- assert_equals(
- Session.scalar(package_extent.the_geom.geometry_type),
- "ST_Polygon",
+ assert(
+ Session.scalar(package_extent.the_geom.geometry_type) ==
+ "ST_Polygon"
)
- assert_equals(
- Session.scalar(package_extent.the_geom.srid), self.db_srid
+ assert(
+ Session.scalar(package_extent.the_geom.srid) == self.db_srid
)
else:
- assert_equals(
+ assert(
Session.query(
func.ST_GeometryType(package_extent.the_geom)
- ).first()[0],
- "ST_Polygon",
+ ).first()[0] ==
+ "ST_Polygon"
)
- assert_equals(package_extent.the_geom.srid, self.db_srid)
+ assert(package_extent.the_geom.srid == self.db_srid)
diff --git a/ckanext/spatial/tests/test_api.py b/ckanext/spatial/tests/test_api.py
index 90ff7f2..9798f14 100644
--- a/ckanext/spatial/tests/test_api.py
+++ b/ckanext/spatial/tests/test_api.py
@@ -1,15 +1,10 @@
-from nose.plugins.skip import SkipTest
-from nose.tools import assert_equals, assert_raises
+import pytest
from ckan.model import Session
from ckan.lib.search import SearchError
-try:
- import ckan.new_tests.helpers as helpers
- import ckan.new_tests.factories as factories
-except ImportError:
- import ckan.tests.helpers as helpers
- import ckan.tests.factories as factories
+import ckan.tests.helpers as helpers
+import ckan.tests.factories as factories
from ckanext.spatial.tests.base import SpatialTestBase
@@ -21,12 +16,9 @@ extents = {
}
+@pytest.mark.usefixtures("clean_db")
class TestAction(SpatialTestBase):
- def teardown(self):
- helpers.reset_db()
-
def test_spatial_query(self):
-
dataset = factories.Dataset(
extras=[
{"key": "spatial", "value": self.geojson_examples["point"]}
@@ -37,8 +29,8 @@ class TestAction(SpatialTestBase):
"package_search", extras={"ext_bbox": "-180,-90,180,90"}
)
- assert_equals(result["count"], 1)
- assert_equals(result["results"][0]["id"], dataset["id"])
+ assert(result["count"] == 1)
+ assert(result["results"][0]["id"] == dataset["id"])
def test_spatial_query_outside_bbox(self):
@@ -52,19 +44,16 @@ class TestAction(SpatialTestBase):
"package_search", extras={"ext_bbox": "-10,-20,10,20"}
)
- assert_equals(result["count"], 0)
+ assert(result["count"] == 0)
def test_spatial_query_wrong_bbox(self):
-
- assert_raises(
- SearchError,
- helpers.call_action,
- "package_search",
- extras={"ext_bbox": "-10,-20,10,a"},
- )
+ with pytest.raises(SearchError):
+ helpers.call_action(
+ "package_search",
+ extras={"ext_bbox": "-10,-20,10,a"},
+ )
def test_spatial_query_nz(self):
-
dataset = factories.Dataset(
extras=[{"key": "spatial", "value": extents["nz"]}]
)
@@ -73,21 +62,19 @@ class TestAction(SpatialTestBase):
"package_search", extras={"ext_bbox": "56,-54,189,-28"}
)
- assert_equals(result["count"], 1)
- assert_equals(result["results"][0]["id"], dataset["id"])
+ assert(result["count"] == 1)
+ assert(result["results"][0]["id"] == dataset["id"])
def test_spatial_query_nz_wrap(self):
-
dataset = factories.Dataset(
extras=[{"key": "spatial", "value": extents["nz"]}]
)
-
result = helpers.call_action(
"package_search", extras={"ext_bbox": "-203,-54,-167,-28"}
)
- assert_equals(result["count"], 1)
- assert_equals(result["results"][0]["id"], dataset["id"])
+ assert(result["count"] == 1)
+ assert(result["results"][0]["id"] == dataset["id"])
def test_spatial_query_ohio(self):
@@ -99,8 +86,8 @@ class TestAction(SpatialTestBase):
"package_search", extras={"ext_bbox": "-110,37,-78,53"}
)
- assert_equals(result["count"], 1)
- assert_equals(result["results"][0]["id"], dataset["id"])
+ assert(result["count"] == 1)
+ assert(result["results"][0]["id"] == dataset["id"])
def test_spatial_query_ohio_wrap(self):
@@ -112,8 +99,8 @@ class TestAction(SpatialTestBase):
"package_search", extras={"ext_bbox": "258,37,281,51"}
)
- assert_equals(result["count"], 1)
- assert_equals(result["results"][0]["id"], dataset["id"])
+ assert(result["count"] == 1)
+ assert(result["results"][0]["id"] == dataset["id"])
def test_spatial_query_dateline_1(self):
@@ -125,8 +112,8 @@ class TestAction(SpatialTestBase):
"package_search", extras={"ext_bbox": "-197,56,-128,70"}
)
- assert_equals(result["count"], 1)
- assert_equals(result["results"][0]["id"], dataset["id"])
+ assert(result["count"] == 1)
+ assert(result["results"][0]["id"] == dataset["id"])
def test_spatial_query_dateline_2(self):
@@ -138,8 +125,8 @@ class TestAction(SpatialTestBase):
"package_search", extras={"ext_bbox": "162,54,237,70"}
)
- assert_equals(result["count"], 1)
- assert_equals(result["results"][0]["id"], dataset["id"])
+ assert(result["count"] == 1)
+ assert(result["results"][0]["id"] == dataset["id"])
def test_spatial_query_dateline_3(self):
@@ -151,8 +138,8 @@ class TestAction(SpatialTestBase):
"package_search", extras={"ext_bbox": "-197,56,-128,70"}
)
- assert_equals(result["count"], 1)
- assert_equals(result["results"][0]["id"], dataset["id"])
+ assert(result["count"] == 1)
+ assert(result["results"][0]["id"] == dataset["id"])
def test_spatial_query_dateline_4(self):
@@ -164,12 +151,13 @@ class TestAction(SpatialTestBase):
"package_search", extras={"ext_bbox": "162,54,237,70"}
)
- assert_equals(result["count"], 1)
- assert_equals(result["results"][0]["id"], dataset["id"])
+ assert(result["count"] == 1)
+ assert(result["results"][0]["id"] == dataset["id"])
-class TestHarvestedMetadataAPI(SpatialTestBase, helpers.FunctionalTestBase):
- def test_api(self):
+@pytest.mark.usefixtures("clean_db")
+class TestHarvestedMetadataAPI(SpatialTestBase):
+ def test_api(self, app):
try:
from ckanext.harvest.model import (
HarvestObject,
@@ -178,7 +166,8 @@ class TestHarvestedMetadataAPI(SpatialTestBase, helpers.FunctionalTestBase):
HarvestObjectExtra,
)
except ImportError:
- raise SkipTest("The harvester extension is needed for these tests")
+ raise pytest.skip(
+ "The harvester extension is needed for these tests")
content1 = "Content 1"
ho1 = HarvestObject(
@@ -207,72 +196,28 @@ class TestHarvestedMetadataAPI(SpatialTestBase, helpers.FunctionalTestBase):
object_id_1 = ho1.id
object_id_2 = ho2.id
- app = self._get_test_app()
-
- # Test redirects for old URLs
- url = "/api/2/rest/harvestobject/{0}/xml".format(object_id_1)
- r = app.get(url)
- assert_equals(r.status_int, 301)
- assert (
- "/harvest/object/{0}".format(object_id_1) in r.headers["Location"]
- )
-
- url = "/api/2/rest/harvestobject/{0}/html".format(object_id_1)
- r = app.get(url)
- assert_equals(r.status_int, 301)
- assert (
- "/harvest/object/{0}/html".format(object_id_1)
- in r.headers["Location"]
- )
-
# Access object content
url = "/harvest/object/{0}".format(object_id_1)
- r = app.get(url)
- assert_equals(r.status_int, 200)
- assert_equals(
- r.headers["Content-Type"], "application/xml; charset=utf-8"
+ r = app.get(url, status=200)
+ assert(
+ r.headers["Content-Type"] == "application/xml; charset=utf-8"
)
- assert_equals(
- r.body,
- '\nContent 1',
+ assert(
+ r.body ==
+ '\nContent 1'
)
# Access original content in object extra (if present)
url = "/harvest/object/{0}/original".format(object_id_1)
r = app.get(url, status=404)
- assert_equals(r.status_int, 404)
url = "/harvest/object/{0}/original".format(object_id_2)
- r = app.get(url)
- assert_equals(r.status_int, 200)
- assert_equals(
- r.headers["Content-Type"], "application/xml; charset=utf-8"
+ r = app.get(url, status=200)
+ assert(
+ r.headers["Content-Type"] == "application/xml; charset=utf-8"
)
- assert_equals(
- r.body,
+ assert(
+ r.body ==
'\n'
- + "Original Content 2",
+ + "Original Content 2"
)
-
- # Access HTML transformation
- url = "/harvest/object/{0}/html".format(object_id_1)
- r = app.get(url)
- assert_equals(r.status_int, 200)
- assert_equals(r.headers["Content-Type"], "text/html; charset=utf-8")
- assert "GEMINI record about" in r.body
-
- url = "/harvest/object/{0}/html/original".format(object_id_1)
- r = app.get(url, status=404)
- assert_equals(r.status_int, 404)
-
- url = "/harvest/object/{0}/html".format(object_id_2)
- r = app.get(url)
- assert_equals(r.status_int, 200)
- assert_equals(r.headers["Content-Type"], "text/html; charset=utf-8")
- assert "GEMINI record about" in r.body
-
- url = "/harvest/object/{0}/html/original".format(object_id_2)
- r = app.get(url)
- assert_equals(r.status_int, 200)
- assert_equals(r.headers["Content-Type"], "text/html; charset=utf-8")
- assert "GEMINI record about" in r.body
diff --git a/ckanext/spatial/tests/test_csw_client.py b/ckanext/spatial/tests/test_csw_client.py
index 1c0408e..032f34a 100644
--- a/ckanext/spatial/tests/test_csw_client.py
+++ b/ckanext/spatial/tests/test_csw_client.py
@@ -3,8 +3,9 @@ from six.moves.urllib.request import urlopen
from six.moves.urllib.error import URLError
import os
-from pylons import config
-from nose.plugins.skip import SkipTest
+import pytest
+
+from ckan.plugins.toolkit import config
from ckan.model import engine_is_sqlite
@@ -59,7 +60,7 @@ class CkanProcess(CkanServerCase):
@classmethod
def setup_class(cls):
if engine_is_sqlite():
- raise SkipTest("Non-memory database needed for this test")
+ return pytest.skip("Non-memory database needed for this test")
cls.pid = cls._start_ckan_server()
## Don't need to init database, since it is same database as this process uses
diff --git a/ckanext/spatial/tests/test_harvest.py b/ckanext/spatial/tests/test_harvest.py
deleted file mode 100644
index 39a6a45..0000000
--- a/ckanext/spatial/tests/test_harvest.py
+++ /dev/null
@@ -1,1371 +0,0 @@
-from __future__ import absolute_import
-import os
-from datetime import datetime, date
-import lxml
-import json
-from uuid import uuid4
-from nose.plugins.skip import SkipTest
-from nose.tools import assert_equal, assert_in, assert_raises
-
-from ckan.lib.base import config
-from ckan import model
-from ckan.model import Session, Package, Group, User
-from ckan.logic.schema import (
- default_update_package_schema,
- default_create_package_schema,
-)
-from ckan.logic import get_action
-
-try:
- from ckan.new_tests.helpers import call_action
-except ImportError:
- from ckan.tests.helpers import call_action
-
-from ckanext.harvest.model import HarvestSource, HarvestJob, HarvestObject
-from ckanext.spatial.validation import Validators
-from ckanext.spatial.harvesters.gemini import (
- GeminiDocHarvester,
- GeminiWafHarvester,
- GeminiHarvester,
-)
-from ckanext.spatial.harvesters.base import SpatialHarvester
-from ckanext.spatial.tests.base import SpatialTestBase
-
-from .xml_file_server import serve
-
-# Start simple HTTP server that serves XML test files
-serve()
-
-
-class HarvestFixtureBase(SpatialTestBase):
- def setup(self):
- # Add sysadmin user
- harvest_user = model.User(
- name=u"harvest", password=u"test", sysadmin=True
- )
- Session.add(harvest_user)
- Session.commit()
-
- package_schema = default_update_package_schema()
- self.context = {
- "model": model,
- "session": Session,
- "user": u"harvest",
- "schema": package_schema,
- "api_version": "2",
- }
-
- def teardown(self):
- model.repo.rebuild_db()
-
- def _create_job(self, source_id):
- # Create a job
- context = {"model": model, "session": Session, "user": u"harvest"}
-
- job_dict = get_action("harvest_job_create")(
- context, {"source_id": source_id}
- )
- job = HarvestJob.get(job_dict["id"])
- assert job
-
- return job
-
- def _create_source_and_job(self, source_fixture):
- context = {"model": model, "session": Session, "user": u"harvest"}
-
- if (
- config.get("ckan.harvest.auth.profile") == u"publisher"
- and not "publisher_id" in source_fixture
- ):
- source_fixture["publisher_id"] = self.publisher.id
-
- source_dict = get_action("harvest_source_create")(
- context, source_fixture
- )
- source = HarvestSource.get(source_dict["id"])
- assert source
-
- job = self._create_job(source.id)
-
- return source, job
-
- def _run_job_for_single_document(
- self,
- job,
- force_import=False,
- expect_gather_errors=False,
- expect_obj_errors=False,
- ):
-
- harvester = GeminiDocHarvester()
-
- harvester.force_import = force_import
-
- object_ids = harvester.gather_stage(job)
- assert object_ids, len(object_ids) == 1
- if expect_gather_errors:
- assert len(job.gather_errors) > 0
- else:
- assert len(job.gather_errors) == 0
-
- assert harvester.fetch_stage(object_ids) == True
-
- obj = HarvestObject.get(object_ids[0])
- assert obj, obj.content
-
- harvester.import_stage(obj)
- Session.refresh(obj)
- if expect_obj_errors:
- assert len(obj.errors) > 0
- else:
- assert len(obj.errors) == 0
-
- job.status = u"Finished"
- job.save()
-
- return obj
-
-
-class TestHarvest(HarvestFixtureBase):
- @classmethod
- def setup_class(cls):
- SpatialHarvester._validator = Validators(profiles=["gemini2"])
- HarvestFixtureBase.setup_class()
-
- def clean_tags(self, tags):
- return [{u"name": x["name"]} for x in tags]
-
- def find_extra(self, pkg, key):
- values = [e["value"] for e in pkg["extras"] if e["key"] == key]
- return values[0] if len(values) == 1 else None
-
- def test_harvest_basic(self):
-
- # Create source
- source_fixture = {
- "title": "Test Source",
- "name": "test-source",
- "url": u"http://127.0.0.1:8999/gemini2.1-waf/index.html",
- "source_type": u"gemini-waf",
- }
-
- source, job = self._create_source_and_job(source_fixture)
-
- harvester = GeminiWafHarvester()
-
- # We need to send an actual job, not the dict
- object_ids = harvester.gather_stage(job)
-
- assert len(object_ids) == 2
-
- # Fetch stage always returns True for Waf harvesters
- assert harvester.fetch_stage(object_ids) == True
-
- objects = []
- for object_id in object_ids:
- obj = HarvestObject.get(object_id)
- assert obj
- objects.append(obj)
- harvester.import_stage(obj)
-
- pkgs = Session.query(Package).filter(Package.type != u"harvest").all()
-
- assert_equal(len(pkgs), 2)
-
- pkg_ids = [pkg.id for pkg in pkgs]
-
- for obj in objects:
- assert obj.current == True
- assert obj.package_id in pkg_ids
-
- def test_harvest_fields_service(self):
-
- # Create source
- source_fixture = {
- "title": "Test Source",
- "name": "test-source",
- "url": u"http://127.0.0.1:8999/gemini2.1/service1.xml",
- "source_type": u"gemini-single",
- }
-
- source, job = self._create_source_and_job(source_fixture)
-
- harvester = GeminiDocHarvester()
-
- object_ids = harvester.gather_stage(job)
- assert object_ids, len(object_ids) == 1
-
- # No gather errors
- assert len(job.gather_errors) == 0
-
- # Fetch stage always returns True for Single Doc harvesters
- assert harvester.fetch_stage(object_ids) == True
-
- obj = HarvestObject.get(object_ids[0])
- assert obj, obj.content
- assert obj.guid == u"test-service-1"
-
- harvester.import_stage(obj)
-
- # No object errors
- assert len(obj.errors) == 0
-
- package_dict = get_action("package_show")(
- self.context, {"id": obj.package_id}
- )
-
- assert package_dict
-
- expected = {
- "name": u"one-scotland-address-gazetteer-web-map-service-wms",
- "title": u"One Scotland Address Gazetteer Web Map Service (WMS)",
- "tags": [
- {u"name": u"Addresses"},
- {u"name": u"Scottish National Gazetteer"},
- ],
- "notes": u"This service displays its contents at larger scale than 1:10000. [edited]",
- }
-
- package_dict["tags"] = self.clean_tags(package_dict["tags"])
-
- for key, value in expected.items():
- if not package_dict[key] == value:
- raise AssertionError(
- "Unexpected value for %s: %s (was expecting %s)"
- % (key, package_dict[key], value)
- )
-
- if config.get("ckan.harvest.auth.profile") == u"publisher":
- assert package_dict["groups"] == [self.publisher.id]
-
- expected_extras = {
- # Basic
- "guid": obj.guid,
- "UKLP": u"True",
- "resource-type": u"service",
- "access_constraints": u'["No restriction on public access"]',
- "responsible-party": u"The Improvement Service (owner)",
- "provider": u"The Improvement Service",
- "contact-email": u"OSGCM@improvementservice.org.uk",
- # Spatial
- "bbox-east-long": u"0.5242365625",
- "bbox-north-lat": u"61.0243",
- "bbox-south-lat": u"54.4764484375",
- "bbox-west-long": u"-9.099786875",
- "spatial": u'{"type": "Polygon", "coordinates": [[[0.5242365625, 54.4764484375], [-9.099786875, 54.4764484375], [-9.099786875, 61.0243], [0.5242365625, 61.0243], [0.5242365625, 54.4764484375]]]}',
- # Other
- "coupled-resource": u'[{"href": ["http://scotgovsdi.edina.ac.uk/srv/en/csw?service=CSW&request=GetRecordById&version=2.0.2&outputSchema=http://www.isotc211.org/2005/gmd&elementSetName=full&id=250ea276-48e2-4189-8a89-fcc4ca92d652"], "uuid": ["250ea276-48e2-4189-8a89-fcc4ca92d652"], "title": []}]',
- "dataset-reference-date": u'[{"type": "publication", "value": "2011-09-08"}]',
- "frequency-of-update": u"daily",
- "licence": u'["Use of the One Scotland Gazetteer data used by this this service is available to any organisation that is a member of the One Scotland Mapping Agreement. It is not currently commercially available", "http://www.test.gov.uk/licenseurl"]',
- "licence_url": u"http://www.test.gov.uk/licenseurl",
- "metadata-date": u"2011-09-08T16:07:32",
- "metadata-language": u"eng",
- "spatial-data-service-type": u"other",
- "spatial-reference-system": u"OSGB 1936 / British National Grid (EPSG:27700)",
- "temporal_coverage-from": u'["1904-06-16"]',
- "temporal_coverage-to": u'["2004-06-16"]',
- }
-
- for key, value in expected_extras.items():
- extra_value = self.find_extra(package_dict, key)
- if extra_value is None:
- raise AssertionError("Extra %s not present in package" % key)
-
- if not extra_value == value:
- raise AssertionError(
- "Unexpected value for extra %s: %s (was expecting %s)"
- % (key, package_dict["extras"][key], value)
- )
-
- expected_resource = {
- "ckan_recommended_wms_preview": "True",
- "description": "Link to the GetCapabilities request for this service",
- "name": "Web Map Service (WMS)",
- "resource_locator_function": "download",
- "resource_locator_protocol": "OGC:WMS-1.3.0-http-get-capabilities",
- "url": u"http://127.0.0.1:8999/wms/capabilities.xml",
- "verified": "True",
- }
-
- resource = package_dict["resources"][0]
- for key, value in expected_resource.items():
- if not key in resource:
- raise AssertionError(
- "Expected key not in resource: %s" % (key)
- )
- if not resource[key] == value:
- raise AssertionError(
- "Unexpected value in resource for %s: %s (was expecting %s)"
- % (key, resource[key], value)
- )
- assert (
- datetime.strptime(
- resource["verified_date"], "%Y-%m-%dT%H:%M:%S.%f"
- ).date()
- == date.today()
- )
- assert resource["format"].lower() == "wms"
-
- def test_harvest_fields_dataset(self):
-
- # Create source
- source_fixture = {
- "title": "Test Source",
- "name": "test-source",
- "url": u"http://127.0.0.1:8999/gemini2.1/dataset1.xml",
- "source_type": u"gemini-single",
- }
-
- source, job = self._create_source_and_job(source_fixture)
-
- harvester = GeminiDocHarvester()
-
- object_ids = harvester.gather_stage(job)
- assert object_ids, len(object_ids) == 1
-
- # No gather errors
- assert len(job.gather_errors) == 0
-
- # Fetch stage always returns True for Single Doc harvesters
- assert harvester.fetch_stage(object_ids) == True
-
- obj = HarvestObject.get(object_ids[0])
- assert obj, obj.content
- assert obj.guid == u"test-dataset-1"
-
- harvester.import_stage(obj)
-
- # No object errors
- assert len(obj.errors) == 0
-
- package_dict = get_action("package_show")(
- self.context, {"id": obj.package_id}
- )
-
- assert package_dict
-
- expected = {
- "name": u"country-parks-scotland",
- "title": u"Country Parks (Scotland)",
- "tags": [{u"name": u"Nature conservation"}],
- "notes": u"Parks are set up by Local Authorities to provide open-air recreation facilities close to towns and cities. [edited]",
- }
-
- package_dict["tags"] = self.clean_tags(package_dict["tags"])
-
- for key, value in expected.items():
- if not package_dict[key] == value:
- raise AssertionError(
- "Unexpected value for %s: %s (was expecting %s)"
- % (key, package_dict[key], value)
- )
-
- if config.get("ckan.harvest.auth.profile") == u"publisher":
- assert package_dict["groups"] == [self.publisher.id]
-
- expected_extras = {
- # Basic
- "guid": obj.guid,
- "resource-type": u"dataset",
- "responsible-party": u"Scottish Natural Heritage (custodian, distributor)",
- "access_constraints": u'["Copyright Scottish Natural Heritage"]',
- "contact-email": u"data_supply@snh.gov.uk",
- "provider": "",
- # Spatial
- "bbox-east-long": u"0.205857204",
- "bbox-north-lat": u"61.06066944",
- "bbox-south-lat": u"54.529947158",
- "bbox-west-long": u"-8.97114288",
- "spatial": u'{"type": "Polygon", "coordinates": [[[0.205857204, 54.529947158], [-8.97114288, 54.529947158], [-8.97114288, 61.06066944], [0.205857204, 61.06066944], [0.205857204, 54.529947158]]]}',
- # Other
- "coupled-resource": u"[]",
- "dataset-reference-date": u'[{"type": "creation", "value": "2004-02"}, {"type": "revision", "value": "2006-07-03"}]',
- "frequency-of-update": u"irregular",
- "licence": u'["Reference and PSMA Only", "http://www.test.gov.uk/licenseurl"]',
- "licence_url": u"http://www.test.gov.uk/licenseurl",
- "metadata-date": u"2011-09-23T10:06:08",
- "metadata-language": u"eng",
- "spatial-reference-system": u"urn:ogc:def:crs:EPSG::27700",
- "temporal_coverage-from": u'["1998"]',
- "temporal_coverage-to": u'["2010"]',
- }
-
- for key, value in expected_extras.items():
- extra_value = self.find_extra(package_dict, key)
- if extra_value is None:
- raise AssertionError("Extra %s not present in package" % key)
-
- if not extra_value == value:
- raise AssertionError(
- "Unexpected value for extra %s: %s (was expecting %s)"
- % (key, package_dict["extras"][key], value)
- )
-
- expected_resource = {
- "description": "Test Resource Description",
- "format": u"",
- "name": "Test Resource Name",
- "resource_locator_function": "download",
- "resource_locator_protocol": "test-protocol",
- "url": u"https://gateway.snh.gov.uk/pls/apex_ddtdb2/f?p=101",
- }
-
- resource = package_dict["resources"][0]
- for key, value in expected_resource.items():
- if not resource[key] == value:
- raise AssertionError(
- "Unexpected value in resource for %s: %s (was expecting %s)"
- % (key, resource[key], value)
- )
-
- def test_harvest_error_bad_xml(self):
- # Create source
- source_fixture = {
- "title": "Test Source",
- "name": "test-source",
- "url": u"http://127.0.0.1:8999/gemini2.1/error_bad_xml.xml",
- "source_type": u"gemini-single",
- }
-
- source, job = self._create_source_and_job(source_fixture)
-
- harvester = GeminiDocHarvester()
-
- try:
- object_ids = harvester.gather_stage(job)
- except lxml.etree.XMLSyntaxError:
- # this only occurs in debug_exception_mode
- pass
- else:
- assert object_ids is None
-
- # Check gather errors
- assert len(job.gather_errors) == 1
- assert job.gather_errors[0].harvest_job_id == job.id
- assert "Error parsing the document" in job.gather_errors[0].message
-
- def test_harvest_error_404(self):
- # Create source
- source_fixture = {
- "title": "Test Source",
- "name": "test-source",
- "url": u"http://127.0.0.1:8999/gemini2.1/not_there.xml",
- "source_type": u"gemini-single",
- }
-
- source, job = self._create_source_and_job(source_fixture)
-
- harvester = GeminiDocHarvester()
-
- object_ids = harvester.gather_stage(job)
- assert object_ids is None
-
- # Check gather errors
- assert len(job.gather_errors) == 1
- assert job.gather_errors[0].harvest_job_id == job.id
- assert "Unable to get content for URL" in job.gather_errors[0].message
-
- def test_harvest_error_validation(self):
-
- # Create source
- source_fixture = {
- "title": "Test Source",
- "name": "test-source",
- "url": u"http://127.0.0.1:8999/gemini2.1/error_validation.xml",
- "source_type": u"gemini-single",
- }
-
- source, job = self._create_source_and_job(source_fixture)
-
- harvester = GeminiDocHarvester()
-
- object_ids = harvester.gather_stage(job)
-
- # Right now the import process goes ahead even with validation errors
- assert object_ids, len(object_ids) == 1
-
- # No gather errors
- assert len(job.gather_errors) == 0
-
- # Fetch stage always returns True for Single Doc harvesters
- assert harvester.fetch_stage(object_ids) == True
-
- obj = HarvestObject.get(object_ids[0])
- assert obj, obj.content
- assert obj.guid == u"test-error-validation-1"
-
- harvester.import_stage(obj)
-
- # Check errors
- assert len(obj.errors) == 1
- assert obj.errors[0].harvest_object_id == obj.id
-
- message = obj.errors[0].message
-
- assert_in("One email address shall be provided", message)
- assert_in(
- "Service type shall be one of 'discovery', 'view', 'download', 'transformation', 'invoke' or 'other' following INSPIRE generic names",
- message,
- )
- assert_in(
- "Limitations on public access code list value shall be 'otherRestrictions'",
- message,
- )
- assert_in("One organisation name shall be provided", message)
-
- def test_harvest_update_records(self):
-
- # Create source
- source_fixture = {
- "title": "Test Source",
- "name": "test-source",
- "url": u"http://127.0.0.1:8999/gemini2.1/dataset1.xml",
- "source_type": u"gemini-single",
- }
-
- source, first_job = self._create_source_and_job(source_fixture)
-
- first_obj = self._run_job_for_single_document(first_job)
-
- first_package_dict = get_action("package_show")(
- self.context, {"id": first_obj.package_id}
- )
-
- # Package was created
- assert first_package_dict
- assert first_obj.current == True
- assert first_obj.package
-
- # Create and run a second job, the package should not be updated
- second_job = self._create_job(source.id)
-
- second_obj = self._run_job_for_single_document(second_job)
-
- Session.remove()
- Session.add(first_obj)
- Session.add(second_obj)
-
- Session.refresh(first_obj)
- Session.refresh(second_obj)
-
- second_package_dict = get_action("package_show")(
- self.context, {"id": first_obj.package_id}
- )
-
- # Package was not updated
- assert second_package_dict, (
- first_package_dict["id"] == second_package_dict["id"]
- )
- assert not second_obj.package, not second_obj.package_id
- assert second_obj.current == False, first_obj.current == True
-
- # Create and run a third job, forcing the importing to simulate an update in the package
- third_job = self._create_job(source.id)
- third_obj = self._run_job_for_single_document(
- third_job, force_import=True
- )
-
- # For some reason first_obj does not get updated after the import_stage,
- # and we have to force a refresh to get the actual DB values.
- Session.remove()
- Session.add(first_obj)
- Session.add(second_obj)
- Session.add(third_obj)
-
- Session.refresh(first_obj)
- Session.refresh(second_obj)
- Session.refresh(third_obj)
-
- third_package_dict = get_action("package_show")(
- self.context, {"id": third_obj.package_id}
- )
-
- # Package was updated
- assert third_package_dict, (
- first_package_dict["id"] == third_package_dict["id"]
- )
- assert third_obj.package, (
- third_obj.package_id == first_package_dict["id"]
- )
- assert third_obj.current == True
- assert second_obj.current == False
- assert first_obj.current == False
-
- def test_harvest_deleted_record(self):
-
- # Create source
- source_fixture = {
- "title": "Test Source",
- "name": "test-source",
- "url": u"http://127.0.0.1:8999/gemini2.1/service1.xml",
- "source_type": u"gemini-single",
- }
-
- source, first_job = self._create_source_and_job(source_fixture)
-
- first_obj = self._run_job_for_single_document(first_job)
-
- first_package_dict = get_action("package_show")(
- self.context, {"id": first_obj.package_id}
- )
-
- # Package was created
- assert first_package_dict
- assert first_package_dict["state"] == u"active"
- assert first_obj.current == True
-
- # Delete package
- first_package_dict["state"] = u"deleted"
- self.context.update({"id": first_package_dict["id"]})
- updated_package_dict = get_action("package_update")(
- self.context, first_package_dict
- )
-
- # Create and run a second job, the date has not changed, so the package should not be updated
- # and remain deleted
- first_job.status = u"Finished"
- first_job.save()
- second_job = self._create_job(source.id)
-
- second_obj = self._run_job_for_single_document(second_job)
-
- second_package_dict = get_action("package_show")(
- self.context, {"id": first_obj.package_id}
- )
-
- # Package was not updated
- assert second_package_dict, (
- updated_package_dict["id"] == second_package_dict["id"]
- )
- assert not second_obj.package, not second_obj.package_id
- assert second_obj.current == False, first_obj.current == True
-
- # Harvest an updated document, with a more recent modified date, package should be
- # updated and reactivated
- source.url = u"http://127.0.0.1:8999/gemini2.1/service1_newer.xml"
- source.save()
-
- third_job = self._create_job(source.id)
-
- third_obj = self._run_job_for_single_document(third_job)
-
- third_package_dict = get_action("package_show")(
- self.context, {"id": first_obj.package_id}
- )
-
- Session.remove()
- Session.add(first_obj)
- Session.add(second_obj)
- Session.add(third_obj)
-
- Session.refresh(first_obj)
- Session.refresh(second_obj)
- Session.refresh(third_obj)
-
- # Package was updated
- assert third_package_dict, (
- third_package_dict["id"] == second_package_dict["id"]
- )
- assert third_obj.package, third_obj.package
- assert third_obj.current == True, second_obj.current == False
- assert first_obj.current == False
-
- assert "NEWER" in third_package_dict["title"]
- assert third_package_dict["state"] == u"active"
-
- def test_harvest_different_sources_same_document(self):
-
- # Create source1
- source1_fixture = {
- "title": "Test Source",
- "name": "test-source",
- "url": u"http://127.0.0.1:8999/gemini2.1/source1/same_dataset.xml",
- "source_type": u"gemini-single",
- }
-
- source1, first_job = self._create_source_and_job(source1_fixture)
-
- first_obj = self._run_job_for_single_document(first_job)
-
- first_package_dict = get_action("package_show")(
- self.context, {"id": first_obj.package_id}
- )
-
- # Package was created
- assert first_package_dict
- assert first_package_dict["state"] == u"active"
- assert first_obj.current == True
-
- # Harvest the same document, unchanged, from another source, the package
- # is not updated.
- # (As of https://github.com/okfn/ckanext-inspire/commit/9fb67
- # we are no longer throwing an exception when this happens)
- source2_fixture = {
- "title": "Test Source 2",
- "name": "test-source-2",
- "url": u"http://127.0.0.1:8999/gemini2.1/source2/same_dataset.xml",
- "source_type": u"gemini-single",
- }
-
- source2, second_job = self._create_source_and_job(source2_fixture)
-
- second_obj = self._run_job_for_single_document(second_job)
-
- second_package_dict = get_action("package_show")(
- self.context, {"id": first_obj.package_id}
- )
-
- # Package was not updated
- assert second_package_dict, (
- first_package_dict["id"] == second_package_dict["id"]
- )
- assert not second_obj.package, not second_obj.package_id
- assert second_obj.current == False, first_obj.current == True
-
- # Inactivate source1 and reharvest from source2, package should be updated
- third_job = self._create_job(source2.id)
- third_obj = self._run_job_for_single_document(
- third_job, force_import=True
- )
-
- Session.remove()
- Session.add(first_obj)
- Session.add(second_obj)
- Session.add(third_obj)
-
- Session.refresh(first_obj)
- Session.refresh(second_obj)
- Session.refresh(third_obj)
-
- third_package_dict = get_action("package_show")(
- self.context, {"id": first_obj.package_id}
- )
-
- # Package was updated
- assert third_package_dict, (
- first_package_dict["id"] == third_package_dict["id"]
- )
- assert third_obj.package, (
- third_obj.package_id == first_package_dict["id"]
- )
- assert third_obj.current == True
- assert second_obj.current == False
- assert first_obj.current == False
-
- def test_harvest_different_sources_same_document_but_deleted_inbetween(
- self,
- ):
-
- # Create source1
- source1_fixture = {
- "title": "Test Source",
- "name": "test-source",
- "url": u"http://127.0.0.1:8999/gemini2.1/source1/same_dataset.xml",
- "source_type": u"gemini-single",
- }
-
- source1, first_job = self._create_source_and_job(source1_fixture)
-
- first_obj = self._run_job_for_single_document(first_job)
-
- first_package_dict = get_action("package_show")(
- self.context, {"id": first_obj.package_id}
- )
-
- # Package was created
- assert first_package_dict
- assert first_package_dict["state"] == u"active"
- assert first_obj.current == True
-
- # Delete/withdraw the package
- first_package_dict = get_action("package_delete")(
- self.context, {"id": first_obj.package_id}
- )
- first_package_dict = get_action("package_show")(
- self.context, {"id": first_obj.package_id}
- )
-
- # Harvest the same document, unchanged, from another source
- source2_fixture = {
- "title": "Test Source 2",
- "name": "test-source-2",
- "url": u"http://127.0.0.1:8999/gemini2.1/source2/same_dataset.xml",
- "source_type": u"gemini-single",
- }
-
- source2, second_job = self._create_source_and_job(source2_fixture)
-
- second_obj = self._run_job_for_single_document(second_job)
-
- second_package_dict = get_action("package_show")(
- self.context, {"id": first_obj.package_id}
- )
-
- # It would be good if the package was updated, but we see that it isn't
- assert second_package_dict, (
- first_package_dict["id"] == second_package_dict["id"]
- )
- assert not second_obj.package
- assert second_obj.current == False
- assert first_obj.current == True
-
- def test_harvest_moves_sources(self):
-
- # Create source1
- source1_fixture = {
- "title": "Test Source",
- "name": "test-source",
- "url": u"http://127.0.0.1:8999/gemini2.1/service1.xml",
- "source_type": u"gemini-single",
- }
-
- source1, first_job = self._create_source_and_job(source1_fixture)
-
- first_obj = self._run_job_for_single_document(first_job)
-
- first_package_dict = get_action("package_show")(
- self.context, {"id": first_obj.package_id}
- )
-
- # Package was created
- assert first_package_dict
- assert first_package_dict["state"] == u"active"
- assert first_obj.current == True
-
- # Harvest the same document GUID but with a newer date, from another source.
- source2_fixture = {
- "title": "Test Source 2",
- "name": "test-source-2",
- "url": u"http://127.0.0.1:8999/gemini2.1/service1_newer.xml",
- "source_type": u"gemini-single",
- }
-
- source2, second_job = self._create_source_and_job(source2_fixture)
-
- second_obj = self._run_job_for_single_document(second_job)
-
- second_package_dict = get_action("package_show")(
- self.context, {"id": first_obj.package_id}
- )
-
- # Now we have two packages
- assert second_package_dict, (
- first_package_dict["id"] == second_package_dict["id"]
- )
- assert second_obj.package
- assert second_obj.current == True
- assert first_obj.current == True
- # so currently, if you move a Gemini between harvest sources you need
- # to update the date to get it to reharvest, and then you should
- # withdraw the package relating to the original harvest source.
-
- def test_harvest_import_command(self):
-
- # Create source
- source_fixture = {
- "title": "Test Source",
- "name": "test-source",
- "url": u"http://127.0.0.1:8999/gemini2.1/dataset1.xml",
- "source_type": u"gemini-single",
- }
-
- source, first_job = self._create_source_and_job(source_fixture)
-
- first_obj = self._run_job_for_single_document(first_job)
-
- before_package_dict = get_action("package_show")(
- self.context, {"id": first_obj.package_id}
- )
-
- # Package was created
- assert before_package_dict
- assert first_obj.current == True
- assert first_obj.package
-
- # Create and run two more jobs, the package should not be updated
- second_job = self._create_job(source.id)
- second_obj = self._run_job_for_single_document(second_job)
- third_job = self._create_job(source.id)
- third_obj = self._run_job_for_single_document(third_job)
-
- # Run the import command manually
- imported_objects = get_action("harvest_objects_import")(
- self.context, {"source_id": source.id}
- )
- Session.remove()
- Session.add(first_obj)
- Session.add(second_obj)
- Session.add(third_obj)
-
- Session.refresh(first_obj)
- Session.refresh(second_obj)
- Session.refresh(third_obj)
-
- after_package_dict = get_action("package_show")(
- self.context, {"id": first_obj.package_id}
- )
-
- # Package was updated, and the current object remains the same
- assert after_package_dict, (
- before_package_dict["id"] == after_package_dict["id"]
- )
- assert third_obj.current == False
- assert second_obj.current == False
- assert first_obj.current == True
-
- source_dict = get_action("harvest_source_show")(
- self.context, {"id": source.id}
- )
- assert source_dict["status"]["total_datasets"] == 1
-
- def test_clean_tags(self):
-
- # Create source
- source_fixture = {
- "title": "Test Source",
- "name": "test-source",
- "url": u"http://127.0.0.1:8999/gemini2.1/dataset1.xml",
- "source_type": u"gemini-single",
- "owner_org": "test-org",
- "metadata_created": datetime.now().strftime(
- "%YYYY-%MM-%DD %HH:%MM:%s"
- ),
- "metadata_modified": datetime.now().strftime(
- "%YYYY-%MM-%DD %HH:%MM:%s"
- ),
- }
-
- user = User.get("dummy")
- if not user:
- user = call_action(
- "user_create",
- name="dummy",
- password="dummybummy",
- email="dummy@dummy.com",
- )
- user_name = user["name"]
- else:
- user_name = user.name
- org = Group.by_name("test-org")
- if org is None:
- org = call_action(
- "organization_create",
- context={"user": user_name},
- name="test-org",
- )
- existing_g = Group.by_name("existing-group")
- if existing_g is None:
- existing_g = call_action(
- "group_create",
- context={"user": user_name},
- name="existing-group",
- )
-
- context = {"user": "dummy"}
- package_schema = default_update_package_schema()
- context["schema"] = package_schema
- package_dict = {
- "frequency": "manual",
- "publisher_name": "dummy",
- "extras": [
- {"key": "theme", "value": ["non-mappable", "thememap1"]}
- ],
- "groups": [],
- "title": "fakename",
- "holder_name": "dummy",
- "holder_identifier": "dummy",
- "name": "fakename",
- "notes": "dummy",
- "owner_org": "test-org",
- "modified": datetime.now(),
- "publisher_identifier": "dummy",
- "metadata_created": datetime.now(),
- "metadata_modified": datetime.now(),
- "guid": str(uuid4()),
- "identifier": "dummy",
- }
-
- package_data = call_action(
- "package_create", context=context, **package_dict
- )
-
- package = Package.get("fakename")
- source, job = self._create_source_and_job(source_fixture)
- job.package = package
- job.guid = uuid4()
- harvester = SpatialHarvester()
- with open(os.path.join("..", "data", "dataset.json")) as f:
- dataset = json.load(f)
-
- # long tags are invalid in all cases
- TAG_LONG_INVALID = "abcdefghij" * 20
- # if clean_tags is not set to true, tags will be truncated to 50 chars
- TAG_LONG_VALID = TAG_LONG_INVALID[:50]
- # default truncate to 100
- TAG_LONG_VALID_LONG = TAG_LONG_INVALID[:100]
-
- assert len(TAG_LONG_VALID) == 50
- assert TAG_LONG_VALID[-1] == "j"
- TAG_CHARS_INVALID = "Pretty-inv@lid.tag!"
- TAG_CHARS_VALID = "pretty-invlidtag"
-
- dataset["tags"].append(TAG_LONG_INVALID)
- dataset["tags"].append(TAG_CHARS_INVALID)
-
- harvester.source_config = {"clean_tags": False}
- out = harvester.get_package_dict(dataset, job)
- tags = out["tags"]
-
- # no clean tags, so invalid chars are in
- # but tags are truncated to 50 chars
- assert {"name": TAG_CHARS_VALID} not in tags
- assert {"name": TAG_CHARS_INVALID} in tags
- assert {"name": TAG_LONG_VALID_LONG} in tags
- assert {"name": TAG_LONG_INVALID} not in tags
-
- harvester.source_config = {"clean_tags": True}
-
- out = harvester.get_package_dict(dataset, job)
- tags = out["tags"]
- assert {"name": TAG_CHARS_VALID} in tags
- assert {"name": TAG_LONG_VALID_LONG} in tags
-
-
-BASIC_GEMINI = """
-
- e269743a-cfda-4632-a939-0c8416ae801e
-
-
- service
-
-"""
-GUID = "e269743a-cfda-4632-a939-0c8416ae801e"
-GEMINI_MISSING_GUID = """"""
-
-
-class TestGatherMethods(HarvestFixtureBase):
- def setup(self):
- HarvestFixtureBase.setup(self)
- # Create source
- source_fixture = {
- "title": "Test Source",
- "name": "test-source",
- "url": u"http://127.0.0.1:8999/gemini2.1/dataset1.xml",
- "source_type": u"gemini-single",
- }
- source, job = self._create_source_and_job(source_fixture)
- self.harvester = GeminiHarvester()
- self.harvester.harvest_job = job
-
- def teardown(self):
- model.repo.rebuild_db()
-
- def test_get_gemini_string_and_guid(self):
- res = self.harvester.get_gemini_string_and_guid(BASIC_GEMINI, url=None)
- assert_equal(res, (BASIC_GEMINI, GUID))
-
- def test_get_gemini_string_and_guid__no_guid(self):
- res = self.harvester.get_gemini_string_and_guid(
- GEMINI_MISSING_GUID, url=None
- )
- assert_equal(res, (GEMINI_MISSING_GUID, ""))
-
- def test_get_gemini_string_and_guid__non_parsing(self):
- content = '' # no closing tag
- assert_raises(
- lxml.etree.XMLSyntaxError,
- self.harvester.get_gemini_string_and_guid,
- content,
- )
-
- def test_get_gemini_string_and_guid__empty(self):
- content = ""
- assert_raises(
- lxml.etree.XMLSyntaxError,
- self.harvester.get_gemini_string_and_guid,
- content,
- )
-
-
-class TestImportStageTools(object):
- def test_licence_url_normal(self):
- assert_equal(
- GeminiHarvester._extract_first_licence_url(
- [
- "Reference and PSMA Only",
- "http://www.test.gov.uk/licenseurl",
- ]
- ),
- "http://www.test.gov.uk/licenseurl",
- )
-
- def test_licence_url_multiple_urls(self):
- # only the first URL is extracted
- assert_equal(
- GeminiHarvester._extract_first_licence_url(
- [
- "Reference and PSMA Only",
- "http://www.test.gov.uk/licenseurl",
- "http://www.test.gov.uk/2nd_licenseurl",
- ]
- ),
- "http://www.test.gov.uk/licenseurl",
- )
-
- def test_licence_url_embedded(self):
- # URL is embedded within the text field and not extracted
- assert_equal(
- GeminiHarvester._extract_first_licence_url(
- ["Reference and PSMA Only http://www.test.gov.uk/licenseurl"]
- ),
- None,
- )
-
- def test_licence_url_embedded_at_start(self):
- # URL is embedded at the start of the text field and the
- # whole field is returned. Noting this unusual behaviour
- assert_equal(
- GeminiHarvester._extract_first_licence_url(
- ["http://www.test.gov.uk/licenseurl Reference and PSMA Only"]
- ),
- "http://www.test.gov.uk/licenseurl Reference and PSMA Only",
- )
-
- def test_responsible_organisation_basic(self):
- responsible_organisation = [
- {"organisation-name": "Ordnance Survey", "role": "owner"},
- {"organisation-name": "Maps Ltd", "role": "distributor"},
- ]
- assert_equal(
- GeminiHarvester._process_responsible_organisation(
- responsible_organisation
- ),
- (
- "Ordnance Survey",
- ["Maps Ltd (distributor)", "Ordnance Survey (owner)"],
- ),
- )
-
- def test_responsible_organisation_publisher(self):
- # no owner, so falls back to publisher
- responsible_organisation = [
- {"organisation-name": "Ordnance Survey", "role": "publisher"},
- {"organisation-name": "Maps Ltd", "role": "distributor"},
- ]
- assert_equal(
- GeminiHarvester._process_responsible_organisation(
- responsible_organisation
- ),
- (
- "Ordnance Survey",
- ["Maps Ltd (distributor)", "Ordnance Survey (publisher)"],
- ),
- )
-
- def test_responsible_organisation_owner(self):
- # provider is the owner (ignores publisher)
- responsible_organisation = [
- {"organisation-name": "Ordnance Survey", "role": "publisher"},
- {"organisation-name": "Owner", "role": "owner"},
- {"organisation-name": "Maps Ltd", "role": "distributor"},
- ]
- assert_equal(
- GeminiHarvester._process_responsible_organisation(
- responsible_organisation
- ),
- (
- "Owner",
- [
- "Owner (owner)",
- "Maps Ltd (distributor)",
- "Ordnance Survey (publisher)",
- ],
- ),
- )
-
- def test_responsible_organisation_multiple_roles(self):
- # provider is the owner (ignores publisher)
- responsible_organisation = [
- {"organisation-name": "Ordnance Survey", "role": "publisher"},
- {"organisation-name": "Ordnance Survey", "role": "custodian"},
- {"organisation-name": "Distributor", "role": "distributor"},
- ]
- assert_equal(
- GeminiHarvester._process_responsible_organisation(
- responsible_organisation
- ),
- (
- "Ordnance Survey",
- [
- "Distributor (distributor)",
- "Ordnance Survey (publisher, custodian)",
- ],
- ),
- )
-
- def test_responsible_organisation_blank_provider(self):
- # no owner or publisher, so blank provider
- responsible_organisation = [
- {
- "organisation-name": "Ordnance Survey",
- "role": "resourceProvider",
- },
- {"organisation-name": "Maps Ltd", "role": "distributor"},
- ]
- assert_equal(
- GeminiHarvester._process_responsible_organisation(
- responsible_organisation
- ),
- (
- "",
- [
- "Maps Ltd (distributor)",
- "Ordnance Survey (resourceProvider)",
- ],
- ),
- )
-
- def test_responsible_organisation_blank(self):
- # no owner or publisher, so blank provider
- responsible_organisation = []
- assert_equal(
- GeminiHarvester._process_responsible_organisation(
- responsible_organisation
- ),
- ("", []),
- )
-
-
-class TestValidation(HarvestFixtureBase):
- @classmethod
- def setup_class(cls):
-
- # TODO: Fix these tests, broken since 27c4ee81e
- raise SkipTest("Validation tests not working since 27c4ee81e")
-
- SpatialHarvester._validator = Validators(
- profiles=["iso19139eden", "constraints", "gemini2"]
- )
- HarvestFixtureBase.setup_class()
-
- def get_validation_errors(self, validation_test_filename):
- # Create source
- source_fixture = {
- "title": "Test Source",
- "name": "test-source",
- "url": u"http://127.0.0.1:8999/gemini2.1/validation/%s"
- % validation_test_filename,
- "source_type": u"gemini-single",
- }
-
- source, job = self._create_source_and_job(source_fixture)
-
- harvester = GeminiDocHarvester()
-
- # Gather stage for GeminiDocHarvester includes validation
- object_ids = harvester.gather_stage(job)
-
- # Check the validation errors
- errors = "; ".join(
- [gather_error.message for gather_error in job.gather_errors]
- )
- return errors
-
- def test_01_dataset_fail_iso19139_schema(self):
- errors = self.get_validation_errors(
- "01_Dataset_Invalid_XSD_No_Such_Element.xml"
- )
- assert len(errors) > 0
- assert_in("Could not get the GUID", errors)
-
- def test_02_dataset_fail_constraints_schematron(self):
- errors = self.get_validation_errors(
- "02_Dataset_Invalid_19139_Missing_Data_Format.xml"
- )
- assert len(errors) > 0
- assert_in(
- "MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0",
- errors,
- )
-
- def test_03_dataset_fail_gemini_schematron(self):
- errors = self.get_validation_errors(
- "03_Dataset_Invalid_GEMINI_Missing_Keyword.xml"
- )
- assert len(errors) > 0
- assert_in("Descriptive keywords are mandatory", errors)
-
- def test_04_dataset_valid(self):
- errors = self.get_validation_errors("04_Dataset_Valid.xml")
- assert len(errors) == 0
-
- def test_05_series_fail_iso19139_schema(self):
- errors = self.get_validation_errors(
- "05_Series_Invalid_XSD_No_Such_Element.xml"
- )
- assert len(errors) > 0
- assert_in("Could not get the GUID", errors)
-
- def test_06_series_fail_constraints_schematron(self):
- errors = self.get_validation_errors(
- "06_Series_Invalid_19139_Missing_Data_Format.xml"
- )
- assert len(errors) > 0
- assert_in(
- "MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0",
- errors,
- )
-
- def test_07_series_fail_gemini_schematron(self):
- errors = self.get_validation_errors(
- "07_Series_Invalid_GEMINI_Missing_Keyword.xml"
- )
- assert len(errors) > 0
- assert_in("Descriptive keywords are mandatory", errors)
-
- def test_08_series_valid(self):
- errors = self.get_validation_errors("08_Series_Valid.xml")
- assert len(errors) == 0
-
- def test_09_service_fail_iso19139_schema(self):
- errors = self.get_validation_errors(
- "09_Service_Invalid_No_Such_Element.xml"
- )
- assert len(errors) > 0
- assert_in("Could not get the GUID", errors)
-
- def test_10_service_fail_constraints_schematron(self):
- errors = self.get_validation_errors(
- "10_Service_Invalid_19139_Level_Description.xml"
- )
- assert len(errors) > 0
- assert_in(
- "DQ_Scope: 'levelDescription' is mandatory if 'level' notEqual 'dataset' or 'series'.",
- errors,
- )
-
- def test_11_service_fail_gemini_schematron(self):
- errors = self.get_validation_errors(
- "11_Service_Invalid_GEMINI_Service_Type.xml"
- )
- assert len(errors) > 0
- assert_in(
- "Service type shall be one of 'discovery', 'view', 'download', 'transformation', 'invoke' or 'other' following INSPIRE generic names.",
- errors,
- )
-
- def test_12_service_valid(self):
- errors = self.get_validation_errors("12_Service_Valid.xml")
- assert len(errors) == 0, errors
-
- def test_13_dataset_fail_iso19139_schema_2(self):
- # This test Dataset has srv tags and only Service metadata should.
- errors = self.get_validation_errors(
- "13_Dataset_Invalid_Element_srv.xml"
- )
- assert len(errors) > 0
- assert_in(
- "Element '{http://www.isotc211.org/2005/srv}SV_ServiceIdentification': This element is not expected.",
- errors,
- )
diff --git a/ckanext/spatial/validation/validation.py b/ckanext/spatial/validation/validation.py
index 28e8506..f89a42c 100644
--- a/ckanext/spatial/validation/validation.py
+++ b/ckanext/spatial/validation/validation.py
@@ -256,7 +256,7 @@ class SchematronValidator(BaseValidator):
"xml/schematron/iso_abstract_expand.xsl",
"xml/schematron/iso_svrl_for_xslt1.xsl",
]
- if isinstance(schema, file):
+ if hasattr(schema, 'read'):
compiled = etree.parse(schema)
else:
compiled = schema
diff --git a/setup.cfg b/setup.cfg
index ea548d6..9c3e584 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,5 +1,5 @@
[tool:pytest]
-norecursedirs=ckanext/scheming/tests/nose
+norecursedirs=ckanext/spatial/tests/nose
filterwarnings =
ignore::sqlalchemy.exc.SADeprecationWarning
diff --git a/test.ini b/test.ini
index b50dfc7..c752556 100644
--- a/test.ini
+++ b/test.ini
@@ -20,9 +20,13 @@ ckan.spatial.default_map_extent=-6.88,49.74,0.50,59.2
ckan.spatial.testing = true
ckan.spatial.validator.profiles = iso19139,constraints,gemini2
ckan.harvest.mq.type = redis
+
# NB: other test configuration should go in test-core.ini, which is
# what the postgres tests use.
+package_new_return_url = http://test.ckan.net/dataset/?test=new
+package_edit_return_url = http://test.ckan.net/dataset/?test=edit
+
# Logging configuration
[loggers]
From 2420d628471304cf6c31e9f74e2d0ce11db8e3f7 Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Wed, 6 May 2020 01:20:21 +0300
Subject: [PATCH 018/139] Fix import for nose tests
---
bin/travis-build.bash | 5 +++++
ckanext/spatial/tests/nose/functional/test_package.py | 2 +-
ckanext/spatial/tests/nose/functional/test_widgets.py | 2 +-
ckanext/spatial/tests/nose/lib/test_spatial.py | 2 +-
ckanext/spatial/tests/nose/model/test_package_extent.py | 2 +-
ckanext/spatial/tests/nose/test_api.py | 2 +-
ckanext/spatial/tests/nose/test_harvest.py | 2 +-
7 files changed, 11 insertions(+), 6 deletions(-)
diff --git a/bin/travis-build.bash b/bin/travis-build.bash
index 945b606..3fdf9ea 100644
--- a/bin/travis-build.bash
+++ b/bin/travis-build.bash
@@ -29,6 +29,11 @@ fi
python setup.py develop
+if [ -f requirement-setuptools.txt ]
+then
+ pip install -r requirement-setuptools.txt
+fi
+
if [ -f requirements-py2.txt ] && [ $PYTHONVERSION = 2 ]
then
grep -v psycopg2 < requirements-py2.txt > reqs.txt
diff --git a/ckanext/spatial/tests/nose/functional/test_package.py b/ckanext/spatial/tests/nose/functional/test_package.py
index 1e36d20..5dcba65 100644
--- a/ckanext/spatial/tests/nose/functional/test_package.py
+++ b/ckanext/spatial/tests/nose/functional/test_package.py
@@ -13,7 +13,7 @@ except ImportError:
from ckanext.spatial.model import PackageExtent
from ckanext.spatial.geoalchemy_common import legacy_geoalchemy
-from ckanext.spatial.tests.base import SpatialTestBase
+from ckanext.spatial.tests.nose.base import SpatialTestBase
class TestSpatialExtra(SpatialTestBase, helpers.FunctionalTestBase):
diff --git a/ckanext/spatial/tests/nose/functional/test_widgets.py b/ckanext/spatial/tests/nose/functional/test_widgets.py
index fbe75ba..b6359f1 100644
--- a/ckanext/spatial/tests/nose/functional/test_widgets.py
+++ b/ckanext/spatial/tests/nose/functional/test_widgets.py
@@ -1,6 +1,6 @@
from ckan.lib.helpers import url_for
-from ckanext.spatial.tests.base import SpatialTestBase
+from ckanext.spatial.tests.nose.base import SpatialTestBase
try:
import ckan.new_tests.helpers as helpers
diff --git a/ckanext/spatial/tests/nose/lib/test_spatial.py b/ckanext/spatial/tests/nose/lib/test_spatial.py
index b6040fc..4229824 100644
--- a/ckanext/spatial/tests/nose/lib/test_spatial.py
+++ b/ckanext/spatial/tests/nose/lib/test_spatial.py
@@ -17,7 +17,7 @@ from ckan.lib.munge import munge_title_to_name
from ckanext.spatial.model import PackageExtent
from ckanext.spatial.lib import validate_bbox, bbox_query, bbox_query_ordered
from ckanext.spatial.geoalchemy_common import WKTElement, compare_geometry_fields
-from ckanext.spatial.tests.base import SpatialTestBase
+from ckanext.spatial.tests.nose.base import SpatialTestBase
class TestCompareGeometries(SpatialTestBase):
diff --git a/ckanext/spatial/tests/nose/model/test_package_extent.py b/ckanext/spatial/tests/nose/model/test_package_extent.py
index 812d15d..712d289 100644
--- a/ckanext/spatial/tests/nose/model/test_package_extent.py
+++ b/ckanext/spatial/tests/nose/model/test_package_extent.py
@@ -10,7 +10,7 @@ except ImportError:
from ckanext.spatial.model import PackageExtent
from ckanext.spatial.geoalchemy_common import WKTElement, legacy_geoalchemy
-from ckanext.spatial.tests.base import SpatialTestBase
+from ckanext.spatial.tests.nose.base import SpatialTestBase
class TestPackageExtent(SpatialTestBase):
diff --git a/ckanext/spatial/tests/nose/test_api.py b/ckanext/spatial/tests/nose/test_api.py
index ef268ca..7755c91 100644
--- a/ckanext/spatial/tests/nose/test_api.py
+++ b/ckanext/spatial/tests/nose/test_api.py
@@ -10,7 +10,7 @@ except ImportError:
import ckan.tests.helpers as helpers
import ckan.tests.factories as factories
-from ckanext.spatial.tests.base import SpatialTestBase
+from ckanext.spatial.tests.nose.base import SpatialTestBase
extents = {
'nz': '{"type":"Polygon","coordinates":[[[174,-38],[176,-38],[176,-40],[174,-40],[174,-38]]]}',
diff --git a/ckanext/spatial/tests/nose/test_harvest.py b/ckanext/spatial/tests/nose/test_harvest.py
index 11423af..f8acbbf 100644
--- a/ckanext/spatial/tests/nose/test_harvest.py
+++ b/ckanext/spatial/tests/nose/test_harvest.py
@@ -24,7 +24,7 @@ from ckanext.spatial.harvesters.gemini import (GeminiDocHarvester,
GeminiWafHarvester,
GeminiHarvester)
from ckanext.spatial.harvesters.base import SpatialHarvester
-from ckanext.spatial.tests.base import SpatialTestBase
+from ckanext.spatial.tests.nose.base import SpatialTestBase
from .xml_file_server import serve
From e84430a92d4b3f9472188707d7f058219dee42f6 Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Wed, 6 May 2020 01:42:30 +0300
Subject: [PATCH 019/139] Install setuptools
---
bin/travis-build.bash | 4 ++--
ckanext/spatial/tests/nose/test_harvest.py | 1 -
2 files changed, 2 insertions(+), 3 deletions(-)
diff --git a/bin/travis-build.bash b/bin/travis-build.bash
index 3fdf9ea..3a642f7 100644
--- a/bin/travis-build.bash
+++ b/bin/travis-build.bash
@@ -27,13 +27,13 @@ else
echo "CKAN version: ${CKAN_TAG#ckan-}"
fi
-python setup.py develop
-
if [ -f requirement-setuptools.txt ]
then
pip install -r requirement-setuptools.txt
fi
+python setup.py develop
+
if [ -f requirements-py2.txt ] && [ $PYTHONVERSION = 2 ]
then
grep -v psycopg2 < requirements-py2.txt > reqs.txt
diff --git a/ckanext/spatial/tests/nose/test_harvest.py b/ckanext/spatial/tests/nose/test_harvest.py
index f8acbbf..2eb0192 100644
--- a/ckanext/spatial/tests/nose/test_harvest.py
+++ b/ckanext/spatial/tests/nose/test_harvest.py
@@ -261,7 +261,6 @@ class TestHarvest(HarvestFixtureBase):
'resource_locator_function': 'download',
'resource_locator_protocol': 'OGC:WMS-1.3.0-http-get-capabilities',
'url': u'http://127.0.0.1:8999/wms/capabilities.xml',
- 'verified': 'True',
}
resource = package_dict['resources'][0]
From 123fc5a6fdb66fa63703ae7c752367d0efaf3e1e Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Wed, 6 May 2020 15:19:21 +0300
Subject: [PATCH 020/139] Backport pytests
---
.travis.yml | 64 +-
bin/travis-run.sh | 7 +-
ckanext/spatial/tests/ckan_setup.py | 38 +
ckanext/spatial/tests/fixtures.py | 152 +++
ckanext/spatial/tests/nose/__init__.py | 7 -
ckanext/spatial/tests/nose/base.py | 76 --
ckanext/spatial/tests/nose/data/dataset.json | 1 -
.../spatial/tests/nose/functional/__init__.py | 7 -
.../tests/nose/functional/test_package.py | 147 ---
.../tests/nose/functional/test_widgets.py | 38 -
.../spatial/tests/nose/lib/test_spatial.py | 154 ---
ckanext/spatial/tests/nose/model/__init__.py | 7 -
.../nose/model/test_harvested_metadata.py | 34 -
.../tests/nose/model/test_package_extent.py | 90 --
.../nose/model/xml/FCSConservancyPolygons.xml | 524 --------
.../tests/nose/model/xml/gemini_dataset.xml | 498 -------
.../tests/nose/scripts/geometry_columns.sql | 25 -
.../spatial/tests/nose/scripts/postgis.sql | 41 -
.../tests/nose/scripts/spatial_ref_sys.sql | 23 -
ckanext/spatial/tests/nose/test_api.py | 274 ----
ckanext/spatial/tests/nose/test_csw_client.py | 65 -
ckanext/spatial/tests/nose/test_harvest.py | 1140 -----------------
.../tests/nose/test_plugin/__init__.py | 7 -
.../spatial/tests/nose/test_plugin/plugin.py | 9 -
.../templates/package/read_base.html | 11 -
.../test_plugin/templates/package/search.html | 9 -
ckanext/spatial/tests/nose/test_validation.py | 153 ---
.../tests/nose/xml/gemini2.1-waf/index.html | 11 -
.../tests/nose/xml/gemini2.1-waf/wales1.xml | 420 ------
.../tests/nose/xml/gemini2.1-waf/wales2.xml | 539 --------
.../xml/gemini2.1/FCSConservancyPolygons.xml | 524 --------
.../tests/nose/xml/gemini2.1/dataset1.xml | 498 -------
.../nose/xml/gemini2.1/error_bad_xml.xml | 15 -
.../nose/xml/gemini2.1/error_validation.xml | 293 -----
.../tests/nose/xml/gemini2.1/service1.xml | 347 -----
.../nose/xml/gemini2.1/service1_newer.xml | 347 -----
.../xml/gemini2.1/source1/same_dataset.xml | 347 -----
.../xml/gemini2.1/source2/same_dataset.xml | 347 -----
...01_Dataset_Invalid_XSD_No_Such_Element.xml | 636 ---------
...taset_Invalid_XSD_No_Such_Element_unix.xml | 636 ---------
...aset_Invalid_19139_Missing_Data_Format.xml | 626 ---------
...Dataset_Invalid_GEMINI_Missing_Keyword.xml | 551 --------
.../gemini2.1/validation/04_Dataset_Valid.xml | 637 ---------
.../05_Series_Invalid_XSD_No_Such_Element.xml | 594 ---------
...ries_Invalid_19139_Missing_Data_Format.xml | 584 ---------
..._Series_Invalid_GEMINI_Missing_Keyword.xml | 509 --------
.../gemini2.1/validation/08_Series_Valid.xml | 595 ---------
.../09_Service_Invalid_No_Such_Element.xml | 537 --------
...ervice_Invalid_19139_Level_Description.xml | 530 --------
...11_Service_Invalid_GEMINI_Service_Type.xml | 537 --------
.../gemini2.1/validation/12_Service_Valid.xml | 537 --------
.../13_Dataset_Invalid_Element_srv.xml | 610 ---------
.../nose/xml/iso19139/dataset-invalid.xml | 498 -------
.../tests/nose/xml/iso19139/dataset.xml | 495 -------
.../tests/nose/xml/wms/capabilities.xml | 127 --
ckanext/spatial/tests/nose/xml_file_server.py | 36 -
conftest.py | 4 +-
57 files changed, 238 insertions(+), 16330 deletions(-)
create mode 100644 ckanext/spatial/tests/ckan_setup.py
create mode 100644 ckanext/spatial/tests/fixtures.py
delete mode 100644 ckanext/spatial/tests/nose/__init__.py
delete mode 100644 ckanext/spatial/tests/nose/base.py
delete mode 100644 ckanext/spatial/tests/nose/data/dataset.json
delete mode 100644 ckanext/spatial/tests/nose/functional/__init__.py
delete mode 100644 ckanext/spatial/tests/nose/functional/test_package.py
delete mode 100644 ckanext/spatial/tests/nose/functional/test_widgets.py
delete mode 100644 ckanext/spatial/tests/nose/lib/test_spatial.py
delete mode 100644 ckanext/spatial/tests/nose/model/__init__.py
delete mode 100644 ckanext/spatial/tests/nose/model/test_harvested_metadata.py
delete mode 100644 ckanext/spatial/tests/nose/model/test_package_extent.py
delete mode 100644 ckanext/spatial/tests/nose/model/xml/FCSConservancyPolygons.xml
delete mode 100644 ckanext/spatial/tests/nose/model/xml/gemini_dataset.xml
delete mode 100644 ckanext/spatial/tests/nose/scripts/geometry_columns.sql
delete mode 100644 ckanext/spatial/tests/nose/scripts/postgis.sql
delete mode 100644 ckanext/spatial/tests/nose/scripts/spatial_ref_sys.sql
delete mode 100644 ckanext/spatial/tests/nose/test_api.py
delete mode 100644 ckanext/spatial/tests/nose/test_csw_client.py
delete mode 100644 ckanext/spatial/tests/nose/test_harvest.py
delete mode 100644 ckanext/spatial/tests/nose/test_plugin/__init__.py
delete mode 100644 ckanext/spatial/tests/nose/test_plugin/plugin.py
delete mode 100644 ckanext/spatial/tests/nose/test_plugin/templates/package/read_base.html
delete mode 100644 ckanext/spatial/tests/nose/test_plugin/templates/package/search.html
delete mode 100644 ckanext/spatial/tests/nose/test_validation.py
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1-waf/index.html
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1-waf/wales1.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1-waf/wales2.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/FCSConservancyPolygons.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/dataset1.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/error_bad_xml.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/error_validation.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/service1.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/service1_newer.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/source1/same_dataset.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/source2/same_dataset.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element_unix.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/02_Dataset_Invalid_19139_Missing_Data_Format.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/03_Dataset_Invalid_GEMINI_Missing_Keyword.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/04_Dataset_Valid.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/05_Series_Invalid_XSD_No_Such_Element.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/06_Series_Invalid_19139_Missing_Data_Format.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/07_Series_Invalid_GEMINI_Missing_Keyword.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/08_Series_Valid.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/09_Service_Invalid_No_Such_Element.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/10_Service_Invalid_19139_Level_Description.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/11_Service_Invalid_GEMINI_Service_Type.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/12_Service_Valid.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/gemini2.1/validation/13_Dataset_Invalid_Element_srv.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/iso19139/dataset-invalid.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/iso19139/dataset.xml
delete mode 100644 ckanext/spatial/tests/nose/xml/wms/capabilities.xml
delete mode 100644 ckanext/spatial/tests/nose/xml_file_server.py
diff --git a/.travis.yml b/.travis.yml
index 30a20c4..bf79f5d 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,25 +1,51 @@
-language: python
dist: trusty
-python:
- - "2.7"
-cache: pip
-env:
- - CKANVERSION=master
- - CKANVERSION=release-v2.6-latest
- - CKANVERSION=2.7
- - CKANVERSION=2.8
-sudo: required
+
+os: linux
+language: python
+
+install:
+ - bash bin/travis-build.bash
+services:
+ - redis
+ - postgresql
addons:
postgresql: 9.6
apt:
packages:
- postgresql-9.6-postgis-2.3
-services:
- - redis-server
-install:
- - bash bin/travis-build.bash
-script: sh bin/travis-run.sh
-branches:
- except:
- - stable
- - release-v2.0
+
+script: bash bin/travis-run.bash
+before_install:
+ - pip install codecov
+after_success:
+ - codecov
+
+jobs:
+ include:
+ - stage: Flake8
+ python: 2.7
+ env: FLAKE8=True
+ install:
+ - pip install flake8==3.5.0
+ - pip install pycodestyle==2.3.0
+ script:
+ - flake8 --version
+ # stop the build if there are Python syntax errors or undefined names
+ - flake8 . --count --select=E901,E999,F821,F822,F823 --show-source --statistics --exclude ckan
+ # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
+ # - flake8 . --count --max-line-length=127 --statistics --exclude ckan --exit-zero
+ - stage: Tests
+ python: "2.7"
+ env: CKANVERSION=master
+ - python: "3.6"
+ env: CKANVERSION=master
+ - python: "2.7"
+ env: CKANVERSION=2.8
+ - python: "2.7"
+ env: CKANVERSION=2.7
+ - python: "2.7"
+ env: CKANVERSION=2.6
+
+cache:
+ directories:
+ - $HOME/.cache/pip
diff --git a/bin/travis-run.sh b/bin/travis-run.sh
index f07c4e0..9a791ad 100644
--- a/bin/travis-run.sh
+++ b/bin/travis-run.sh
@@ -1,8 +1,3 @@
#!/bin/sh -e
-if [ $CKANVERSION == 'master' ]
-then
- pytest --ckan-ini=subdir/test.ini ckanext/spatial/tests
-else
- nosetests --ckan --nologcapture --with-pylons=subdir/test.ini ckanext/spatial/tests/nose
-fi
+pytest --ckan-ini=subdir/test.ini ckanext/spatial/tests
diff --git a/ckanext/spatial/tests/ckan_setup.py b/ckanext/spatial/tests/ckan_setup.py
new file mode 100644
index 0000000..a953c22
--- /dev/null
+++ b/ckanext/spatial/tests/ckan_setup.py
@@ -0,0 +1,38 @@
+try:
+ from ckan.tests.pytest_ckan.ckan_setup import *
+except ImportError:
+ from ckan.config.middleware import make_app
+ from ckan.common import config
+
+ import pkg_resources
+ from paste.deploy import loadapp
+ import sys
+ import os
+
+ import pylons
+ from pylons.i18n.translation import _get_translator
+
+ def pytest_addoption(parser):
+ """Allow using custom config file during tests.
+ """
+ parser.addoption(u"--ckan-ini", action=u"store")
+
+ def pytest_sessionstart(session):
+ """Initialize CKAN environment.
+ """
+ global pylonsapp
+ path = os.getcwd()
+ sys.path.insert(0, path)
+ pkg_resources.working_set.add_entry(path)
+ pylonsapp = loadapp(
+ "config:" + session.config.option.ckan_ini, relative_to=path,
+ )
+
+ # Initialize a translator for tests that utilize i18n
+ translator = _get_translator(pylons.config.get("lang"))
+ pylons.translator._push_object(translator)
+
+ class FakeResponse:
+ headers = {} # because render wants to delete Pragma
+
+ pylons.response._push_object(FakeResponse)
diff --git a/ckanext/spatial/tests/fixtures.py b/ckanext/spatial/tests/fixtures.py
new file mode 100644
index 0000000..bfe29a0
--- /dev/null
+++ b/ckanext/spatial/tests/fixtures.py
@@ -0,0 +1,152 @@
+# -*- coding: utf-8 -*-
+
+try:
+ from ckan.tests.pytest_ckan.fixtures import *
+
+except ImportError:
+ import pytest
+
+ import ckan.tests.helpers as test_helpers
+ import ckan.plugins
+ import ckan.lib.search as search
+
+ from ckan.common import config
+
+ @pytest.fixture
+ def ckan_config(request, monkeypatch):
+ """Allows to override the configuration object used by tests
+
+ Takes into account config patches introduced by the ``ckan_config``
+ mark.
+
+ If you just want to set one or more configuration options for the
+ scope of a test (or a test class), use the ``ckan_config`` mark::
+
+ @pytest.mark.ckan_config('ckan.auth.create_unowned_dataset', True)
+ def test_auth_create_unowned_dataset():
+
+ # ...
+
+ To use the custom config inside a test, apply the
+ ``ckan_config`` mark to it and inject the ``ckan_config`` fixture:
+
+ .. literalinclude:: /../ckan/tests/pytest_ckan/test_fixtures.py
+ :start-after: # START-CONFIG-OVERRIDE
+ :end-before: # END-CONFIG-OVERRIDE
+
+ If the change only needs to be applied locally, use the
+ ``monkeypatch`` fixture
+
+ .. literalinclude:: /../ckan/tests/test_common.py
+ :start-after: # START-CONFIG-OVERRIDE
+ :end-before: # END-CONFIG-OVERRIDE
+
+ """
+ _original = config.copy()
+ for mark in request.node.iter_markers(u"ckan_config"):
+ monkeypatch.setitem(config, *mark.args)
+ yield config
+ config.clear()
+ config.update(_original)
+
+ @pytest.fixture
+ def make_app(ckan_config):
+ """Factory for client app instances.
+
+ Unless you need to create app instances lazily for some reason,
+ use the ``app`` fixture instead.
+ """
+ return test_helpers._get_test_app
+
+ @pytest.fixture
+ def app(make_app):
+ """Returns a client app instance to use in functional tests
+
+ To use it, just add the ``app`` parameter to your test function signature::
+
+ def test_dataset_search(self, app):
+
+ url = h.url_for('dataset.search')
+
+ response = app.get(url)
+
+
+ """
+ return make_app()
+
+ @pytest.fixture(scope=u"session")
+ def reset_db():
+ """Callable for resetting the database to the initial state.
+
+ If possible use the ``clean_db`` fixture instead.
+
+ """
+ return test_helpers.reset_db
+
+ @pytest.fixture(scope=u"session")
+ def reset_index():
+ """Callable for cleaning search index.
+
+ If possible use the ``clean_index`` fixture instead.
+ """
+ return search.clear_all
+
+ @pytest.fixture
+ def clean_db(reset_db):
+ """Resets the database to the initial state.
+
+ This can be used either for all tests in a class::
+
+ @pytest.mark.usefixtures("clean_db")
+ class TestExample(object):
+
+ def test_example(self):
+
+ or for a single test::
+
+ class TestExample(object):
+
+ @pytest.mark.usefixtures("clean_db")
+ def test_example(self):
+
+ """
+ reset_db()
+
+ @pytest.fixture
+ def clean_index(reset_index):
+ """Clear search index before starting the test.
+ """
+ reset_index()
+
+ @pytest.fixture
+ def with_plugins(ckan_config):
+ """Load all plugins specified by the ``ckan.plugins`` config option
+ at the beginning of the test. When the test ends (even it fails), it will
+ unload all the plugins in the reverse order.
+
+ .. literalinclude:: /../ckan/tests/test_factories.py
+ :start-after: # START-CONFIG-OVERRIDE
+ :end-before: # END-CONFIG-OVERRIDE
+
+ """
+ plugins = ckan_config["ckan.plugins"].split()
+ for plugin in plugins:
+ if not ckan.plugins.plugin_loaded(plugin):
+ ckan.plugins.load(plugin)
+ yield
+ for plugin in reversed(plugins):
+ if ckan.plugins.plugin_loaded(plugin):
+ ckan.plugins.unload(plugin)
+
+ @pytest.fixture
+ def test_request_context(app):
+ """Provide function for creating Flask request context.
+ """
+ return app.flask_app.test_request_context
+
+ @pytest.fixture
+ def with_request_context(test_request_context):
+ """Execute test inside requests context
+ """
+ with test_request_context():
+ yield
diff --git a/ckanext/spatial/tests/nose/__init__.py b/ckanext/spatial/tests/nose/__init__.py
deleted file mode 100644
index 2e2033b..0000000
--- a/ckanext/spatial/tests/nose/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# this is a namespace package
-try:
- import pkg_resources
- pkg_resources.declare_namespace(__name__)
-except ImportError:
- import pkgutil
- __path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/ckanext/spatial/tests/nose/base.py b/ckanext/spatial/tests/nose/base.py
deleted file mode 100644
index d204548..0000000
--- a/ckanext/spatial/tests/nose/base.py
+++ /dev/null
@@ -1,76 +0,0 @@
-import os
-import re
-
-from sqlalchemy import Table
-from nose.plugins.skip import SkipTest
-
-from ckan.model import Session, repo, meta, engine_is_sqlite
-from ckanext.spatial.geoalchemy_common import postgis_version
-from ckanext.spatial.model.package_extent import setup as spatial_db_setup
-from ckanext.harvest.model import setup as harvest_model_setup
-
-geojson_examples = {
- 'point':'{"type":"Point","coordinates":[100.0,0.0]}',
- 'point_2':'{"type":"Point","coordinates":[20,10]}',
- 'line':'{"type":"LineString","coordinates":[[100.0,0.0],[101.0,1.0]]}',
- 'polygon':'{"type":"Polygon","coordinates":[[[100.0,0.0],[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]]]}',
- 'polygon_holes':'{"type":"Polygon","coordinates":[[[100.0,0.0],[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]],[[100.2,0.2],[100.8,0.2],[100.8,0.8],[100.2,0.8],[100.2,0.2]]]}',
- 'multipoint':'{"type":"MultiPoint","coordinates":[[100.0,0.0],[101.0,1.0]]}',
- 'multiline':'{"type":"MultiLineString","coordinates":[[[100.0,0.0],[101.0,1.0]],[[102.0,2.0],[103.0,3.0]]]}',
- 'multipolygon':'{"type":"MultiPolygon","coordinates":[[[[102.0,2.0],[103.0,2.0],[103.0,3.0],[102.0,3.0],[102.0,2.0]]],[[[100.0,0.0],[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]],[[100.2,0.2],[100.8,0.2],[100.8,0.8],[100.2,0.8],[100.2,0.2]]]]}'}
-
-
-def _execute_script(script_path):
-
- conn = Session.connection()
- script = open(script_path, 'r').read()
- for cmd in script.split(';'):
- cmd = re.sub(r'--(.*)|[\n\t]', '', cmd)
- if len(cmd):
- conn.execute(cmd)
-
- Session.commit()
-
-
-def create_postgis_tables():
- scripts_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
- 'scripts')
- if postgis_version()[:1] == '1':
- _execute_script(os.path.join(scripts_path, 'spatial_ref_sys.sql'))
- _execute_script(os.path.join(scripts_path, 'geometry_columns.sql'))
- else:
- _execute_script(os.path.join(scripts_path, 'spatial_ref_sys.sql'))
-
-
-class SpatialTestBase(object):
-
- db_srid = 4326
-
- geojson_examples = geojson_examples
-
- @classmethod
- def setup_class(cls):
- if engine_is_sqlite():
- raise SkipTest("PostGIS is required for this test")
-
- # This will create the PostGIS tables (geometry_columns and
- # spatial_ref_sys) which were deleted when rebuilding the database
- table = Table('spatial_ref_sys', meta.metadata)
- if not table.exists():
- create_postgis_tables()
-
- # When running the tests with the --reset-db option for some
- # reason the metadata holds a reference to the `package_extent`
- # table after being deleted, causing an InvalidRequestError
- # exception when trying to recreate it further on
- if 'package_extent' in meta.metadata.tables:
- meta.metadata.remove(meta.metadata.tables['package_extent'])
-
- spatial_db_setup()
-
- # Setup the harvest tables
- harvest_model_setup()
-
- @classmethod
- def teardown_class(cls):
- repo.rebuild_db()
diff --git a/ckanext/spatial/tests/nose/data/dataset.json b/ckanext/spatial/tests/nose/data/dataset.json
deleted file mode 100644
index ae0e628..0000000
--- a/ckanext/spatial/tests/nose/data/dataset.json
+++ /dev/null
@@ -1 +0,0 @@
-{"keyword-inspire-theme": ["Global"], "resource-type": ["dataset"], "maintenance-note": "", "spatial-data-service-type": "", "spatial-reference-system": "4326", "keywords": [{"type": "place", "thesaurus-identifier": "", "keyword": ["Global"], "thesaurus-title": ""}], "guid": "9d87519e-f91d-11e6-83d9-9c4e3672cd50", "metadata-language": "", "metadata-point-of-contact": [{"individual-name": "", "contact-info": {"online-resource": "", "email": "ad@m.in"}, "organisation-name": "", "role": "originator", "position-name": ""}], "metadata-standard-version": "ISO 19115:2003", "usage": [], "spatial-resolution-units": "", "responsible-organisation": [{"individual-name": "", "contact-info": {"online-resource": "", "email": "ad@m.in"}, "organisation-name": "", "role": "originator", "position-name": ""}], "temporal-extent-begin": [], "contact-email": "ad@m.in", "metadata-date": "2017-10-10T16:07:12Z", "dataset-reference-date": [{"type": "publication", "value": "2017-02-22T17:33:00Z"}], "conformity-pass": "", "unique-resource-identifier": "", "bbox": [{"west": "-180.0000000000", "east": "180.0000000000", "north": "90.0000000000", "south": "-90.0000000000"}], "keyword-controlled-other": [], "equivalent-scale": [], "lineage": "", "temporal-extent-end": [], "coupled-resource": [], "metadata-standard-name": "ISO 19115:2003 - Geographic information - Metadata", "additional-information-source": "No information provided", "extent-free-text": [], "browse-graphic": [{"type": "image/png", "description": "Thumbnail for 'test'", "file": "http://localhost:8000/uploaded/thumbs/layer-9d87519e-f91d-11e6-83d9-9c4e3672cd50-thumb.png"}], "abstract": "No abstract provided", "presentation-form": ["mapDigital"], "aggregation-info": [], "access-constraints": [], "resource-locator-identification": [], "distributor": [], "dataset-language": [], "conformity-specification-title": "", "cited-responsible-party": [], "conformity-specification": "", "purpose": "", "date-created": "", "progress": ["completed"], "extent-controlled": [], "use-constraints": [], "alternate-title": [], "date-released": "2017-02-22T17:33:00Z", "date-updated": "", "data-format": [], "tags": ["Global"], "frequency-of-update": "", "limitations-on-public-access": ["Not Specified: The original author did not specify a license."], "publisher": "", "resource-locator": [{"url": "http://localhost:8000/layers/geonode:test", "function": "", "protocol": "WWW:LINK-1.0-http--link", "name": "", "description": "Online link to the 'test' description on GeoNode"}, {"url": "http://localhost:8080/geoserver/wms?layers=geonode%3Atest&width=1100&bbox=-180.0%2C-90.0%2C180.0%2C90.0&service=WMS&format=image%2Fjpeg&srs=EPSG%3A4326&request=GetMap&height=550", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.jpg", "description": "test (JPEG Format)"}, {"url": "http://localhost:8080/geoserver/wms?layers=geonode%3Atest&width=1100&bbox=-180.0%2C-90.0%2C180.0%2C90.0&service=WMS&format=application%2Fpdf&srs=EPSG%3A4326&request=GetMap&height=550", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.pdf", "description": "test (PDF Format)"}, {"url": "http://localhost:8080/geoserver/wms?layers=geonode%3Atest&width=1100&bbox=-180.0%2C-90.0%2C180.0%2C90.0&service=WMS&format=image%2Fpng&srs=EPSG%3A4326&request=GetMap&height=550", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.png", "description": "test (PNG Format)"}, {"url": "http://localhost:8080/geoserver/wfs?format_options=charset%3AUTF-8&typename=geonode%3Atest&outputFormat=SHAPE-ZIP&version=1.0.0&service=WFS&request=GetFeature", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.zip", "description": "test (Zipped Shapefile Format)"}, {"url": "http://localhost:8080/geoserver/wfs?typename=geonode%3Atest&outputFormat=gml2&version=1.0.0&request=GetFeature&service=WFS", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.gml", "description": "test (GML 2.0 Format)"}, {"url": "http://localhost:8080/geoserver/wfs?typename=geonode%3Atest&outputFormat=text%2Fxml%3B+subtype%3Dgml%2F3.1.1&version=1.0.0&request=GetFeature&service=WFS", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.gml", "description": "test (GML 3.1.1 Format)"}, {"url": "http://localhost:8080/geoserver/wfs?typename=geonode%3Atest&outputFormat=csv&version=1.0.0&request=GetFeature&service=WFS", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.csv", "description": "test (CSV Format)"}, {"url": "http://localhost:8080/geoserver/wfs?typename=geonode%3Atest&outputFormat=excel&version=1.0.0&request=GetFeature&service=WFS", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.excel", "description": "test (Excel Format)"}, {"url": "http://localhost:8080/geoserver/wfs?srsName=EPSG%3A4326&typename=geonode%3Atest&outputFormat=json&version=1.0.0&service=WFS&request=GetFeature", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.json", "description": "test (GeoJSON Format)"}, {"url": "http://localhost:8080/geoserver/wms/kml?layers=geonode%3Atest&mode=download", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.kml", "description": "test (KML Format)"}, {"url": "http://localhost:8080/geoserver/wms/kml?layers=geonode%3Atest&mode=refresh", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.kml", "description": "test (View in Google Earth Format)"}, {"url": "http://localhost:8080/geoserver/wms/reflect?layers=geonode:test&format=image/png8&height=150&width=200&bbox=-180.0,-90.0,180.0,90.0&TIME=-99999999999-01-01T00:00:00.0Z/99999999999-01-01T00:00:00.0Z", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.png", "description": "test (Remote Thumbnail Format)"}, {"url": "http://localhost:8000/uploaded/thumbs/layer-9d87519e-f91d-11e6-83d9-9c4e3672cd50-thumb.png", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.png", "description": "test (Thumbnail Format)"}, {"url": "http://localhost:8080/geoserver/wms?request=GetLegendGraphic&format=image/png&WIDTH=20&HEIGHT=20&LAYER=geonode:test&legend_options=fontAntiAliasing:true;fontSize:12;forceLabels:on", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.png", "description": "test (Legend Format)"}, {"url": "http://localhost:8080/geoserver/gwc/service/gmaps?layers=geonode:test&zoom={z}&x={x}&y={y}&format=image/png8", "function": "", "protocol": "WWW:DOWNLOAD-1.0-http--download", "name": "test.tiles", "description": "test (Tiles Format)"}, {"url": "http://localhost:8080/wms", "function": "", "protocol": "OGC:WMS", "name": "geonode:test", "description": " Service - Provides Layer: test"}, {"url": "http://localhost:8080/wfs", "function": "", "protocol": "OGC:WFS", "name": "geonode:test", "description": " Service - Provides Layer: test"}], "url": "", "title": "test", "contact": "", "topic-category": [], "vertical-extent": [], "conformity-explanation": "", "spatial-resolution": ""}
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/functional/__init__.py b/ckanext/spatial/tests/nose/functional/__init__.py
deleted file mode 100644
index 2e2033b..0000000
--- a/ckanext/spatial/tests/nose/functional/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# this is a namespace package
-try:
- import pkg_resources
- pkg_resources.declare_namespace(__name__)
-except ImportError:
- import pkgutil
- __path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/ckanext/spatial/tests/nose/functional/test_package.py b/ckanext/spatial/tests/nose/functional/test_package.py
deleted file mode 100644
index 5dcba65..0000000
--- a/ckanext/spatial/tests/nose/functional/test_package.py
+++ /dev/null
@@ -1,147 +0,0 @@
-import json
-from nose.tools import assert_equals
-
-from ckan.model import Session
-from ckan.lib.helpers import url_for
-
-try:
- import ckan.new_tests.helpers as helpers
- import ckan.new_tests.factories as factories
-except ImportError:
- import ckan.tests.helpers as helpers
- import ckan.tests.factories as factories
-
-from ckanext.spatial.model import PackageExtent
-from ckanext.spatial.geoalchemy_common import legacy_geoalchemy
-from ckanext.spatial.tests.nose.base import SpatialTestBase
-
-
-class TestSpatialExtra(SpatialTestBase, helpers.FunctionalTestBase):
-
- def test_spatial_extra(self):
- app = self._get_test_app()
-
- user = factories.User()
- env = {'REMOTE_USER': user['name'].encode('ascii')}
- dataset = factories.Dataset(user=user)
-
- offset = url_for(controller='package', action='edit', id=dataset['id'])
- res = app.get(offset, extra_environ=env)
-
- form = res.forms[1]
- form['extras__0__key'] = u'spatial'
- form['extras__0__value'] = self.geojson_examples['point']
-
- res = helpers.submit_and_follow(app, form, env, 'save')
-
- assert 'Error' not in res, res
-
- package_extent = Session.query(PackageExtent) \
- .filter(PackageExtent.package_id == dataset['id']).first()
-
- geojson = json.loads(self.geojson_examples['point'])
-
- assert_equals(package_extent.package_id, dataset['id'])
- if legacy_geoalchemy:
- assert_equals(Session.scalar(package_extent.the_geom.x),
- geojson['coordinates'][0])
- assert_equals(Session.scalar(package_extent.the_geom.y),
- geojson['coordinates'][1])
- assert_equals(Session.scalar(package_extent.the_geom.srid),
- self.db_srid)
- else:
- from sqlalchemy import func
- assert_equals(
- Session.query(func.ST_X(package_extent.the_geom)).first()[0],
- geojson['coordinates'][0])
- assert_equals(
- Session.query(func.ST_Y(package_extent.the_geom)).first()[0],
- geojson['coordinates'][1])
- assert_equals(package_extent.the_geom.srid, self.db_srid)
-
- def test_spatial_extra_edit(self):
- app = self._get_test_app()
-
- user = factories.User()
- env = {'REMOTE_USER': user['name'].encode('ascii')}
- dataset = factories.Dataset(user=user)
-
- offset = url_for(controller='package', action='edit', id=dataset['id'])
- res = app.get(offset, extra_environ=env)
-
- form = res.forms[1]
- form['extras__0__key'] = u'spatial'
- form['extras__0__value'] = self.geojson_examples['point']
-
- res = helpers.submit_and_follow(app, form, env, 'save')
-
- assert 'Error' not in res, res
-
- res = app.get(offset, extra_environ=env)
-
- form = res.forms[1]
- form['extras__0__key'] = u'spatial'
- form['extras__0__value'] = self.geojson_examples['polygon']
-
- res = helpers.submit_and_follow(app, form, env, 'save')
-
- assert 'Error' not in res, res
-
- package_extent = Session.query(PackageExtent) \
- .filter(PackageExtent.package_id == dataset['id']).first()
-
- assert_equals(package_extent.package_id, dataset['id'])
- if legacy_geoalchemy:
- assert_equals(
- Session.scalar(package_extent.the_geom.geometry_type),
- 'ST_Polygon')
- assert_equals(
- Session.scalar(package_extent.the_geom.srid),
- self.db_srid)
- else:
- from sqlalchemy import func
- assert_equals(
- Session.query(
- func.ST_GeometryType(package_extent.the_geom)).first()[0],
- 'ST_Polygon')
- assert_equals(package_extent.the_geom.srid, self.db_srid)
-
- def test_spatial_extra_bad_json(self):
- app = self._get_test_app()
-
- user = factories.User()
- env = {'REMOTE_USER': user['name'].encode('ascii')}
- dataset = factories.Dataset(user=user)
-
- offset = url_for(controller='package', action='edit', id=dataset['id'])
- res = app.get(offset, extra_environ=env)
-
- form = res.forms[1]
- form['extras__0__key'] = u'spatial'
- form['extras__0__value'] = u'{"Type":Bad Json]'
-
- res = helpers.webtest_submit(form, extra_environ=env, name='save')
-
- assert 'Error' in res, res
- assert 'Spatial' in res
- assert 'Error decoding JSON object' in res
-
- def test_spatial_extra_bad_geojson(self):
- app = self._get_test_app()
-
- user = factories.User()
- env = {'REMOTE_USER': user['name'].encode('ascii')}
- dataset = factories.Dataset(user=user)
-
- offset = url_for(controller='package', action='edit', id=dataset['id'])
- res = app.get(offset, extra_environ=env)
-
- form = res.forms[1]
- form['extras__0__key'] = u'spatial'
- form['extras__0__value'] = u'{"Type":"Bad_GeoJSON","a":2}'
-
- res = helpers.webtest_submit(form, extra_environ=env, name='save')
-
- assert 'Error' in res, res
- assert 'Spatial' in res
- assert 'Error creating geometry' in res
diff --git a/ckanext/spatial/tests/nose/functional/test_widgets.py b/ckanext/spatial/tests/nose/functional/test_widgets.py
deleted file mode 100644
index b6359f1..0000000
--- a/ckanext/spatial/tests/nose/functional/test_widgets.py
+++ /dev/null
@@ -1,38 +0,0 @@
-from ckan.lib.helpers import url_for
-
-from ckanext.spatial.tests.nose.base import SpatialTestBase
-
-try:
- import ckan.new_tests.helpers as helpers
- import ckan.new_tests.factories as factories
-except ImportError:
- import ckan.tests.helpers as helpers
- import ckan.tests.factories as factories
-
-
-class TestSpatialWidgets(SpatialTestBase, helpers.FunctionalTestBase):
-
- def test_dataset_map(self):
- app = self._get_test_app()
-
- user = factories.User()
- dataset = factories.Dataset(
- user=user,
- extras=[{'key': 'spatial',
- 'value': self.geojson_examples['point']}]
- )
- offset = url_for(controller='package', action='read', id=dataset['id'])
- res = app.get(offset)
-
- assert 'data-module="dataset-map"' in res
- assert 'dataset_map.js' in res
-
- def test_spatial_search_widget(self):
-
- app = self._get_test_app()
-
- offset = url_for(controller='package', action='search')
- res = app.get(offset)
-
- assert 'data-module="spatial-query"' in res
- assert 'spatial_query.js' in res
diff --git a/ckanext/spatial/tests/nose/lib/test_spatial.py b/ckanext/spatial/tests/nose/lib/test_spatial.py
deleted file mode 100644
index 4229824..0000000
--- a/ckanext/spatial/tests/nose/lib/test_spatial.py
+++ /dev/null
@@ -1,154 +0,0 @@
-from __future__ import print_function
-import six
-
-import time
-import random
-
-from nose.tools import assert_equal
-
-from shapely.geometry import asShape
-
-from ckan import model
-from ckan import plugins
-from ckan.lib.helpers import json
-from ckan.logic.action.create import package_create
-from ckan.lib.munge import munge_title_to_name
-
-from ckanext.spatial.model import PackageExtent
-from ckanext.spatial.lib import validate_bbox, bbox_query, bbox_query_ordered
-from ckanext.spatial.geoalchemy_common import WKTElement, compare_geometry_fields
-from ckanext.spatial.tests.nose.base import SpatialTestBase
-
-
-class TestCompareGeometries(SpatialTestBase):
-
- def _get_extent_object(self, geometry):
- if isinstance(geometry, six.string_types):
- geometry = json.loads(geometry)
- shape = asShape(geometry)
- return PackageExtent(package_id='xxx',
- the_geom=WKTElement(shape.wkt, 4326))
-
- def test_same_points(self):
-
- extent1 = self._get_extent_object(self.geojson_examples['point'])
- extent2 = self._get_extent_object(self.geojson_examples['point'])
-
- assert compare_geometry_fields(extent1.the_geom, extent2.the_geom)
-
- def test_different_points(self):
-
- extent1 = self._get_extent_object(self.geojson_examples['point'])
- extent2 = self._get_extent_object(self.geojson_examples['point_2'])
-
- assert not compare_geometry_fields(extent1.the_geom, extent2.the_geom)
-
-
-class TestValidateBbox(object):
- bbox_dict = {'minx': -4.96,
- 'miny': 55.70,
- 'maxx': -3.78,
- 'maxy': 56.43}
-
- def test_string(self):
- res = validate_bbox("-4.96,55.70,-3.78,56.43")
- assert_equal(res, self.bbox_dict)
-
- def test_list(self):
- res = validate_bbox([-4.96, 55.70, -3.78, 56.43])
- assert_equal(res, self.bbox_dict)
-
- def test_bad(self):
- res = validate_bbox([-4.96, 55.70, -3.78])
- assert_equal(res, None)
-
- def test_bad_2(self):
- res = validate_bbox('random')
- assert_equal(res, None)
-
-
-def bbox_2_geojson(bbox_dict):
- return '{"type":"Polygon","coordinates":[[[%(minx)s, %(miny)s],[%(minx)s, %(maxy)s], [%(maxx)s, %(maxy)s], [%(maxx)s, %(miny)s], [%(minx)s, %(miny)s]]]}' % bbox_dict
-
-
-class SpatialQueryTestBase(SpatialTestBase):
- '''Base class for tests of spatial queries'''
- miny = 0
- maxy = 1
-
- @classmethod
- def setup_class(cls):
- SpatialTestBase.setup_class()
- for fixture_x in cls.fixtures_x:
- bbox = cls.x_values_to_bbox(fixture_x)
- bbox_geojson = bbox_2_geojson(bbox)
- cls.create_package(name=munge_title_to_name(six.text_type(fixture_x)),
- title=six.text_type(fixture_x),
- extras=[{'key': 'spatial',
- 'value': bbox_geojson}])
-
- @classmethod
- def create_package(cls, **package_dict):
- user = plugins.toolkit.get_action('get_site_user')({'model': model, 'ignore_auth': True}, {})
- context = {'model': model,
- 'session': model.Session,
- 'user': user['name'],
- 'extras_as_string': True,
- 'api_version': 2,
- 'ignore_auth': True,
- }
- package_dict = package_create(context, package_dict)
- return context.get('id')
-
- @classmethod
- def x_values_to_bbox(cls, x_tuple):
- return {'minx': x_tuple[0], 'maxx': x_tuple[1],
- 'miny': cls.miny, 'maxy': cls.maxy}
-
-
-class TestBboxQuery(SpatialQueryTestBase):
- # x values for the fixtures
- fixtures_x = [(0, 1), (0, 3), (0, 4), (4, 5), (6, 7)]
-
- def test_query(self):
- bbox_dict = self.x_values_to_bbox((2, 5))
- package_ids = [res.package_id for res in bbox_query(bbox_dict)]
- package_titles = [model.Package.get(id_).title for id_ in package_ids]
- assert_equal(set(package_titles),
- set(('(0, 3)', '(0, 4)', '(4, 5)')))
-
-class TestBboxQueryOrdered(SpatialQueryTestBase):
- # x values for the fixtures
- fixtures_x = [(0, 9), (1, 8), (2, 7), (3, 6), (4, 5),
- (8, 9)]
-
- def test_query(self):
- bbox_dict = self.x_values_to_bbox((2, 7))
- q = bbox_query_ordered(bbox_dict)
- package_ids = [res.package_id for res in q]
- package_titles = [model.Package.get(id_).title for id_ in package_ids]
- # check the right items are returned
- assert_equal(set(package_titles),
- set(('(0, 9)', '(1, 8)', '(2, 7)', '(3, 6)', '(4, 5)')))
- # check the order is good
- assert_equal(package_titles,
- ['(2, 7)', '(1, 8)', '(3, 6)', '(0, 9)', '(4, 5)'])
-
-
-class TestBboxQueryPerformance(SpatialQueryTestBase):
- # x values for the fixtures
- fixtures_x = [(random.uniform(0, 3), random.uniform(3,9)) \
- for x in range(10)] # increase the number to 1000 say
- def test_query(self):
- bbox_dict = self.x_values_to_bbox((2, 7))
- t0 = time.time()
- q = bbox_query(bbox_dict)
- t1 = time.time()
- print('bbox_query took: ', t1-t0)
-
- def test_query_ordered(self):
- bbox_dict = self.x_values_to_bbox((2, 7))
- t0 = time.time()
- q = bbox_query_ordered(bbox_dict)
- t1 = time.time()
- print('bbox_query_ordered took: ', t1-t0)
diff --git a/ckanext/spatial/tests/nose/model/__init__.py b/ckanext/spatial/tests/nose/model/__init__.py
deleted file mode 100644
index 2e2033b..0000000
--- a/ckanext/spatial/tests/nose/model/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# this is a namespace package
-try:
- import pkg_resources
- pkg_resources.declare_namespace(__name__)
-except ImportError:
- import pkgutil
- __path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/ckanext/spatial/tests/nose/model/test_harvested_metadata.py b/ckanext/spatial/tests/nose/model/test_harvested_metadata.py
deleted file mode 100644
index 7fb03f4..0000000
--- a/ckanext/spatial/tests/nose/model/test_harvested_metadata.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import os
-
-from nose.tools import assert_equal
-
-from ckanext.spatial.model import ISODocument
-
-def open_xml_fixture(xml_filename):
- xml_filepath = os.path.join(os.path.dirname(__file__),
- 'xml',
- xml_filename)
- with open(xml_filepath, 'rb') as f:
- xml_string_raw = f.read()
-
- try:
- xml_string = xml_string_raw.encode("utf-8")
- except UnicodeDecodeError as e:
- assert 0, 'ERROR: Unicode Error reading file \'%s\': %s' % \
- (metadata_filepath, e)
- return xml_string
-
-def test_simple():
- xml_string = open_xml_fixture('gemini_dataset.xml')
- iso_document = ISODocument(xml_string)
- iso_values = iso_document.read_values()
- assert_equal(iso_values['guid'], 'test-dataset-1')
- assert_equal(iso_values['metadata-date'], '2011-09-23T10:06:08')
-
-def test_multiplicity_warning():
- # This dataset lacks a value for Metadata Date and should
- # produce a log.warning, but not raise an exception.
- xml_string = open_xml_fixture('FCSConservancyPolygons.xml')
- iso_document = ISODocument(xml_string)
- iso_values = iso_document.read_values()
- assert_equal(iso_values['guid'], 'B8A22DF4-B0DC-4F0B-A713-0CF5F8784A28')
diff --git a/ckanext/spatial/tests/nose/model/test_package_extent.py b/ckanext/spatial/tests/nose/model/test_package_extent.py
deleted file mode 100644
index 712d289..0000000
--- a/ckanext/spatial/tests/nose/model/test_package_extent.py
+++ /dev/null
@@ -1,90 +0,0 @@
-from nose.tools import assert_equals
-from shapely.geometry import asShape
-
-from ckan.model import Session
-from ckan.lib.helpers import json
-try:
- import ckan.new_tests.factories as factories
-except ImportError:
- import ckan.tests.factories as factories
-
-from ckanext.spatial.model import PackageExtent
-from ckanext.spatial.geoalchemy_common import WKTElement, legacy_geoalchemy
-from ckanext.spatial.tests.nose.base import SpatialTestBase
-
-
-class TestPackageExtent(SpatialTestBase):
-
- def test_create_extent(self):
-
- package = factories.Dataset()
-
- geojson = json.loads(self.geojson_examples['point'])
-
- shape = asShape(geojson)
- package_extent = PackageExtent(package_id=package['id'],
- the_geom=WKTElement(shape.wkt,
- self.db_srid))
- package_extent.save()
-
- assert_equals(package_extent.package_id, package['id'])
- if legacy_geoalchemy:
- assert_equals(Session.scalar(package_extent.the_geom.x),
- geojson['coordinates'][0])
- assert_equals(Session.scalar(package_extent.the_geom.y),
- geojson['coordinates'][1])
- assert_equals(Session.scalar(package_extent.the_geom.srid),
- self.db_srid)
- else:
- from sqlalchemy import func
- assert_equals(
- Session.query(func.ST_X(package_extent.the_geom)).first()[0],
- geojson['coordinates'][0])
- assert_equals(
- Session.query(func.ST_Y(package_extent.the_geom)).first()[0],
- geojson['coordinates'][1])
- assert_equals(package_extent.the_geom.srid, self.db_srid)
-
- def test_update_extent(self):
-
- package = factories.Dataset()
-
- geojson = json.loads(self.geojson_examples['point'])
-
- shape = asShape(geojson)
- package_extent = PackageExtent(package_id=package['id'],
- the_geom=WKTElement(shape.wkt,
- self.db_srid))
- package_extent.save()
- if legacy_geoalchemy:
- assert_equals(
- Session.scalar(package_extent.the_geom.geometry_type),
- 'ST_Point')
- else:
- from sqlalchemy import func
- assert_equals(
- Session.query(
- func.ST_GeometryType(package_extent.the_geom)).first()[0],
- 'ST_Point')
-
- # Update the geometry (Point -> Polygon)
- geojson = json.loads(self.geojson_examples['polygon'])
-
- shape = asShape(geojson)
- package_extent.the_geom = WKTElement(shape.wkt, self.db_srid)
- package_extent.save()
-
- assert_equals(package_extent.package_id, package['id'])
- if legacy_geoalchemy:
- assert_equals(
- Session.scalar(package_extent.the_geom.geometry_type),
- 'ST_Polygon')
- assert_equals(
- Session.scalar(package_extent.the_geom.srid),
- self.db_srid)
- else:
- assert_equals(
- Session.query(
- func.ST_GeometryType(package_extent.the_geom)).first()[0],
- 'ST_Polygon')
- assert_equals(package_extent.the_geom.srid, self.db_srid)
diff --git a/ckanext/spatial/tests/nose/model/xml/FCSConservancyPolygons.xml b/ckanext/spatial/tests/nose/model/xml/FCSConservancyPolygons.xml
deleted file mode 100644
index a34f8aa..0000000
--- a/ckanext/spatial/tests/nose/model/xml/FCSConservancyPolygons.xml
+++ /dev/null
@@ -1,524 +0,0 @@
-
-
-
- B8A22DF4-B0DC-4F0B-A713-0CF5F8784A28
-
-
- utf8
-
-
- dataset
-
-
-
-
- Geo-Information Services
-
-
- Forestry Commission Scotland
-
-
- Geo-Information Services Delivery Manager
-
-
-
-
-
-
- 0131 334 0303
-
-
-
-
-
-
- Silvan House, 231 Corstorphine Road
-
-
- Edinburgh
-
-
- Scotland
-
-
- EH12 7AT
-
-
- United Kingdom
-
-
- geoinformationservices.scotland@forestry.gsi.gov.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2012-11-30T10:51:36
-
-
-
-
-
-
- urn:ogc:def:crs:EPSG::27700
-
-
-
-
-
-
-
-
-
-
- FCS Conservancy Polygons
-
-
- s_cons_pol
-
-
- fc.s_cons_pol
-
-
-
-
- 2004-06-06
-
-
- creation
-
-
-
-
-
-
- 2010-03-16
-
-
- revision
-
-
-
-
-
-
- Geo_Information Services
-
-
-
-
-
-
-
-
-
- Description:
-
-This dataset depicts the five Forestry Commission Scotland Conservancy boundaries.
-
-
-
-Attributes:
-
-NAME : Conservancy Name
-ADDRESS_1 : Address
-ADDRESS_2 : Address
-ADDRESS_3 : Address
-ADDRESS_4 : Address
-POSTCODE : Postcode
-PHONE_NO : Telephone Number
-EMAIL : Email Address
-
-
-
-
- Head of Grants & Licences
-
-
- Forestry Commission Scotland
-
-
- Head of Grants & Licences
-
-
-
-
-
-
- 0131 334 0303
-
-
-
-
-
-
- Silvan House, 231 Corstorphine Road
-
-
- Edinburgh
-
-
- Scotland
-
-
- EH12 7AT
-
-
- United Kingdom
-
-
- cgis.scotland@forestry.gsi.gov.uk
-
-
-
-
-
-
- owner
-
-
-
-
-
-
- Geo-Information Services
-
-
- Forestry Commission Scotland
-
-
- Geo-Information Services Delivery Manager
-
-
-
-
-
-
- 0131 334 0303
-
-
-
-
-
-
- Silvan House, 231 Corstorphine Road
-
-
- Edinburgh
-
-
- Scotland
-
-
- EH12 7AT
-
-
- United Kingdom
-
-
- geoinformationservices.scotland@forestry.gsi.gov.uk
-
-
-
-
-
-
- custodian
-
-
-
-
-
-
- Geo-Information Services
-
-
- Forestry Commission Scotland
-
-
- Geo-Information Services Delivery Manager
-
-
-
-
-
-
- 0131 334 0303
-
-
-
-
-
-
- Silvan House, 231 Corstorphine Road
-
-
- Edinburgh
-
-
- Scotland
-
-
- EH12 7AT
-
-
- United Kingdom
-
-
- geoinformationservices.scotland@forestry.gsi.gov.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
- asNeeded
-
-
-
-
-
-
- SDE Feature Class
-
-
-
-
-
-
-
-
-
- administrative
-
-
- regional
-
-
-
-
-
-
- copyright
-
-
- license
-
-
- otherRestrictions
-
-
- copyright
-
-
- license
-
-
- None
-
-
-
-
-
-
- Copyright (Copyright Forestry Commission Scotland)
-
-
-
-
-
-
-
-
-
-
-
- 10000
-
-
-
-
-
-
- eng
-
-
- boundaries
-
-
- economy
-
-
-
-
-
-
-
-
- Scotland
-
-
-
-
-
-
-
-
- -9.229868
-
-
- -0.705137
-
-
- 54.513338
-
-
- 60.866111
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Unknown
-
-
- Unknown
-
-
-
-
-
-
-
-
- Geo-Information Services
-
-
- Forestry Commission Scotland
-
-
- Geo-Information Services Delivery Manager
-
-
-
-
-
-
- 0131 334 0303
-
-
-
-
-
-
- Silvan House, 231 Corstorphine Road
-
-
- Edinburgh
-
-
- Scotland
-
-
- EH12 7AT
-
-
- United Kingdom
-
-
- geoinformationservices.scotland@forestry.gsi.gov.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
- SDE Feature Class
-
-
-
-
-
-
-
-
-
-
-
- Server=fcspatialsv5; Service=5151; User=fcproduct; Version=SDE.DEFAULT
-
-
-
-
-
-
-
-
-
-
-
-
- http://www.forestry.gov.uk/datadownload
-
-
-
-
-
-
-
-
-
-
-
-
- dataset
-
-
-
-
-
-
- This dataset was derived by merging OS Boundary Line polygons together (with the exception of the boundary between north and south Fife, which was digitised by Geo-Information Services). Boundary Line is based on 1:10,000 scale mapping.
-
-
-
-
-
-
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/model/xml/gemini_dataset.xml b/ckanext/spatial/tests/nose/model/xml/gemini_dataset.xml
deleted file mode 100644
index 3f58f0e..0000000
--- a/ckanext/spatial/tests/nose/model/xml/gemini_dataset.xml
+++ /dev/null
@@ -1,498 +0,0 @@
-
-
-
- test-dataset-1
-
-
- eng
-
-
- dataset
-
-
-
-
- Lachlan Renwick
-
-
- Scottish Natural Heritage
-
-
- Geographic Systems and Data Coordinator
-
-
-
-
-
-
- 01463 725000
-
-
-
-
-
-
- Great Glen House, Leachkin Road
-
-
- INVERNESS
-
-
- IV3 8NW
-
-
- United Kingdom
-
-
- data_supply@snh.gov.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2011-09-23T10:06:08
-
-
-
-
-
-
- urn:ogc:def:crs:EPSG::27700
-
-
-
-
-
-
-
-
-
-
- Country Parks (Scotland)
-
-
-
-
- 2004-02
-
-
- creation
-
-
-
-
-
-
- 2006-07-03
-
-
- revision
-
-
-
-
-
-
- CPK
-
-
-
-
-
-
-
-
-
- Parks are set up by Local Authorities to provide open-air recreation facilities close to towns and cities. [edited]
-
-
-
-
- Lachlan Renwick
-
-
- Scottish Natural Heritage
-
-
- Geographic Systems & Data Coordinator
-
-
-
-
-
-
- 01463 725000
-
-
-
-
-
-
- Great Glen House, Leachkin Road
-
-
- INVERNESS
-
-
- IV3 8NW
-
-
- United Kingdom
-
-
- data_supply@snh.gov.uk
-
-
-
-
-
-
- custodian
-
-
-
-
-
-
- Lachlan Renwick
-
-
- Scottish Natural Heritage
-
-
- Geographic Systems & Data Coordinator
-
-
-
-
-
-
- 01463 725000
-
-
-
-
-
-
- Great Glen House, Leachkin Road
-
-
- INVERNESS
-
-
- IV3 8NW
-
-
- United Kingdom
-
-
- data_supply@snh.gov.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
- irregular
-
-
-
-
-
-
- SDE Feature Class
-
-
-
-
-
-
-
-
-
- Nature conservation
-
-
-
-
- Government Category List
-
-
-
-
- 2004-07-15
-
-
- revision
-
-
-
-
-
-
-
-
-
-
- copyright
-
-
- otherRestrictions
-
-
- copyright
-
-
- otherRestrictions
-
-
- Copyright Scottish Natural Heritage
-
-
-
-
-
-
- Reference and PSMA Only
-
-
- http://www.test.gov.uk/licenseurl
-
-
-
-
-
-
-
-
-
- 5
-
-
-
-
- eng
-
-
- environment
-
-
-
-
-
-
-
-
-
-
- ISO 3166
-
-
-
-
- 2007-09-02
-
-
- revision
-
-
-
-
-
-
- GB-SCT
-
-
-
-
-
-
-
-
- -8.97114288
-
-
- 0.205857204
-
-
- 54.529947158
-
-
- 61.06066944
-
-
-
-
-
-
-
- 1998
- 2010
-
-
-
-
-
-
-
-
-
-
-
-
-
- ESRI Shapefile
-
-
- Unknown
-
-
-
-
-
-
- KML
-
-
- 2.1
-
-
-
-
-
-
- GML
-
-
- 3.1.1
-
-
-
-
-
-
-
-
- Lachlan Renwick
-
-
- Scottish Natural Heritage
-
-
- Geographic Systems & Data Coordinator
-
-
-
-
-
-
- 01463 725000
-
-
-
-
-
-
- Great Glen House, Leachkin Road
-
-
- INVERNESS
-
-
- IV3 8NW
-
-
- United Kingdom
-
-
- data_supply@snh.gov.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
- SDE Feature Class
-
-
-
-
-
-
-
-
-
-
-
- http://www.snh.org.uk/snhi
-
-
-
-
-
-
-
-
-
-
-
-
- https://gateway.snh.gov.uk/pls/apex_ddtdb2/f?p=101
-
-
- Test Resource Name
-
-
- Test Resource Description
-
-
- test-protocol
-
-
- download
-
-
-
-
-
-
-
-
-
-
-
-
- dataset
-
-
-
-
-
-
- Country Park is not a statutory designation. Countryside (Scotland) Act 1967 Section 48 gives local authorities power to assess and review the need for Country Parks in consultation with SNH.
-
-
-
-
-
-
diff --git a/ckanext/spatial/tests/nose/scripts/geometry_columns.sql b/ckanext/spatial/tests/nose/scripts/geometry_columns.sql
deleted file mode 100644
index e2bbb75..0000000
--- a/ckanext/spatial/tests/nose/scripts/geometry_columns.sql
+++ /dev/null
@@ -1,25 +0,0 @@
--------------------------------------------------------------------
--- WARNING: This is probably NOT the file you are looking for.
--- This file is intended to be used only during tests, you won't
--- get a functional PostGIS database executing it. Please install
--- PostGIS as described in the README.
--------------------------------------------------------------------
-
--------------------------------------------------------------------
--- GEOMETRY_COLUMNS
--------------------------------------------------------------------
-CREATE TABLE geometry_columns (
- f_table_catalog varchar(256) not null,
- f_table_schema varchar(256) not null,
- f_table_name varchar(256) not null,
- f_geometry_column varchar(256) not null,
- coord_dimension integer not null,
- srid integer not null,
- type varchar(30) not null,
- CONSTRAINT geometry_columns_pk primary key (
- f_table_catalog,
- f_table_schema,
- f_table_name,
- f_geometry_column )
-) WITH OIDS;
-
diff --git a/ckanext/spatial/tests/nose/scripts/postgis.sql b/ckanext/spatial/tests/nose/scripts/postgis.sql
deleted file mode 100644
index b9ea072..0000000
--- a/ckanext/spatial/tests/nose/scripts/postgis.sql
+++ /dev/null
@@ -1,41 +0,0 @@
--------------------------------------------------------------------
--- WARNING: This is probably NOT the file you are looking for.
--- This file is intended to be used only during tests, you won't
--- get a functional PostGIS database executing it. Please install
--- PostGIS as described in the README.
--------------------------------------------------------------------
-
--------------------------------------------------------------------
--- SPATIAL_REF_SYS
--------------------------------------------------------------------
-CREATE TABLE spatial_ref_sys (
- srid integer not null primary key,
- auth_name varchar(256),
- auth_srid integer,
- srtext varchar(2048),
- proj4text varchar(2048)
-);
-
--------------------------------------------------------------------
--- GEOMETRY_COLUMNS
--------------------------------------------------------------------
-CREATE TABLE geometry_columns (
- f_table_catalog varchar(256) not null,
- f_table_schema varchar(256) not null,
- f_table_name varchar(256) not null,
- f_geometry_column varchar(256) not null,
- coord_dimension integer not null,
- srid integer not null,
- type varchar(30) not null,
- CONSTRAINT geometry_columns_pk primary key (
- f_table_catalog,
- f_table_schema,
- f_table_name,
- f_geometry_column )
-) WITH OIDS;
-
----
---- EPSG 4326 : WGS 84
----
-INSERT INTO "spatial_ref_sys" ("srid","auth_name","auth_srid","srtext","proj4text") VALUES (4326,'EPSG',4326,'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs ');
-
diff --git a/ckanext/spatial/tests/nose/scripts/spatial_ref_sys.sql b/ckanext/spatial/tests/nose/scripts/spatial_ref_sys.sql
deleted file mode 100644
index 467a868..0000000
--- a/ckanext/spatial/tests/nose/scripts/spatial_ref_sys.sql
+++ /dev/null
@@ -1,23 +0,0 @@
--------------------------------------------------------------------
--- WARNING: This is probably NOT the file you are looking for.
--- This file is intended to be used only during tests, you won't
--- get a functional PostGIS database executing it. Please install
--- PostGIS as described in the README.
--------------------------------------------------------------------
-
--------------------------------------------------------------------
--- SPATIAL_REF_SYS
--------------------------------------------------------------------
-CREATE TABLE spatial_ref_sys (
- srid integer not null primary key,
- auth_name varchar(256),
- auth_srid integer,
- srtext varchar(2048),
- proj4text varchar(2048)
-);
-
----
---- EPSG 4326 : WGS 84
----
-INSERT INTO "spatial_ref_sys" ("srid","auth_name","auth_srid","srtext","proj4text") VALUES (4326,'EPSG',4326,'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs ');
-
diff --git a/ckanext/spatial/tests/nose/test_api.py b/ckanext/spatial/tests/nose/test_api.py
deleted file mode 100644
index 7755c91..0000000
--- a/ckanext/spatial/tests/nose/test_api.py
+++ /dev/null
@@ -1,274 +0,0 @@
-from nose.plugins.skip import SkipTest
-from nose.tools import assert_equals, assert_raises
-
-from ckan.model import Session
-from ckan.lib.search import SearchError
-try:
- import ckan.new_tests.helpers as helpers
- import ckan.new_tests.factories as factories
-except ImportError:
- import ckan.tests.helpers as helpers
- import ckan.tests.factories as factories
-
-from ckanext.spatial.tests.nose.base import SpatialTestBase
-
-extents = {
- 'nz': '{"type":"Polygon","coordinates":[[[174,-38],[176,-38],[176,-40],[174,-40],[174,-38]]]}',
- 'ohio': '{"type": "Polygon","coordinates": [[[-84,38],[-84,40],[-80,42],[-80,38],[-84,38]]]}',
- 'dateline': '{"type":"Polygon","coordinates":[[[169,70],[169,60],[192,60],[192,70],[169,70]]]}',
- 'dateline2': '{"type":"Polygon","coordinates":[[[170,60],[-170,60],[-170,70],[170,70],[170,60]]]}',
-}
-
-
-class TestAction(SpatialTestBase):
-
- def teardown(self):
- helpers.reset_db()
-
- def test_spatial_query(self):
-
- dataset = factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': self.geojson_examples['point']}]
- )
-
- result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '-180,-90,180,90'})
-
- assert_equals(result['count'], 1)
- assert_equals(result['results'][0]['id'], dataset['id'])
-
- def test_spatial_query_outside_bbox(self):
-
- factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': self.geojson_examples['point']}]
- )
-
- result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '-10,-20,10,20'})
-
- assert_equals(result['count'], 0)
-
- def test_spatial_query_wrong_bbox(self):
-
- assert_raises(SearchError, helpers.call_action,
- 'package_search', extras={'ext_bbox': '-10,-20,10,a'})
-
- def test_spatial_query_nz(self):
-
- dataset = factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': extents['nz']}]
- )
-
- result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '56,-54,189,-28'})
-
- assert_equals(result['count'], 1)
- assert_equals(result['results'][0]['id'], dataset['id'])
-
- def test_spatial_query_nz_wrap(self):
-
- dataset = factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': extents['nz']}]
- )
-
- result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '-203,-54,-167,-28'})
-
- assert_equals(result['count'], 1)
- assert_equals(result['results'][0]['id'], dataset['id'])
-
- def test_spatial_query_ohio(self):
-
- dataset = factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': extents['ohio']}]
- )
-
- result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '-110,37,-78,53'})
-
- assert_equals(result['count'], 1)
- assert_equals(result['results'][0]['id'], dataset['id'])
-
- def test_spatial_query_ohio_wrap(self):
-
- dataset = factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': extents['ohio']}]
- )
-
- result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '258,37,281,51'})
-
- assert_equals(result['count'], 1)
- assert_equals(result['results'][0]['id'], dataset['id'])
-
- def test_spatial_query_dateline_1(self):
-
- dataset = factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': extents['dateline']}]
- )
-
- result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '-197,56,-128,70'})
-
- assert_equals(result['count'], 1)
- assert_equals(result['results'][0]['id'], dataset['id'])
-
- def test_spatial_query_dateline_2(self):
-
- dataset = factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': extents['dateline']}]
- )
-
- result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '162,54,237,70'})
-
- assert_equals(result['count'], 1)
- assert_equals(result['results'][0]['id'], dataset['id'])
-
- def test_spatial_query_dateline_3(self):
-
- dataset = factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': extents['dateline2']}]
- )
-
- result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '-197,56,-128,70'})
-
- assert_equals(result['count'], 1)
- assert_equals(result['results'][0]['id'], dataset['id'])
-
- def test_spatial_query_dateline_4(self):
-
- dataset = factories.Dataset(
- extras=[{'key': 'spatial',
- 'value': extents['dateline2']}]
- )
-
- result = helpers.call_action(
- 'package_search',
- extras={'ext_bbox': '162,54,237,70'})
-
- assert_equals(result['count'], 1)
- assert_equals(result['results'][0]['id'], dataset['id'])
-
-
-
-class TestHarvestedMetadataAPI(SpatialTestBase, helpers.FunctionalTestBase):
-
- def test_api(self):
- try:
- from ckanext.harvest.model import (HarvestObject, HarvestJob,
- HarvestSource,
- HarvestObjectExtra)
- except ImportError:
- raise SkipTest('The harvester extension is needed for these tests')
-
- content1 = 'Content 1'
- ho1 = HarvestObject(
- guid='test-ho-1',
- job=HarvestJob(source=HarvestSource(url='http://', type='xx')),
- content=content1)
-
- content2 = 'Content 2'
- original_content2 = 'Original Content 2'
- ho2 = HarvestObject(
- guid='test-ho-2',
- job=HarvestJob(source=HarvestSource(url='http://', type='xx')),
- content=content2)
-
- hoe = HarvestObjectExtra(
- key='original_document',
- value=original_content2,
- object=ho2)
-
- Session.add(ho1)
- Session.add(ho2)
- Session.add(hoe)
- Session.commit()
-
- object_id_1 = ho1.id
- object_id_2 = ho2.id
-
- app = self._get_test_app()
-
- # Test redirects for old URLs
- url = '/api/2/rest/harvestobject/{0}/xml'.format(object_id_1)
- r = app.get(url)
- assert_equals(r.status_int, 301)
- assert ('/harvest/object/{0}'.format(object_id_1)
- in r.headers['Location'])
-
- url = '/api/2/rest/harvestobject/{0}/html'.format(object_id_1)
- r = app.get(url)
- assert_equals(r.status_int, 301)
- assert ('/harvest/object/{0}/html'.format(object_id_1)
- in r.headers['Location'])
-
- # Access object content
- url = '/harvest/object/{0}'.format(object_id_1)
- r = app.get(url)
- assert_equals(r.status_int, 200)
- assert_equals(r.headers['Content-Type'],
- 'application/xml; charset=utf-8')
- assert_equals(
- r.body,
- '\nContent 1')
-
- # Access original content in object extra (if present)
- url = '/harvest/object/{0}/original'.format(object_id_1)
- r = app.get(url, status=404)
- assert_equals(r.status_int, 404)
-
- url = '/harvest/object/{0}/original'.format(object_id_2)
- r = app.get(url)
- assert_equals(r.status_int, 200)
- assert_equals(r.headers['Content-Type'],
- 'application/xml; charset=utf-8')
- assert_equals(
- r.body,
- '\n'
- + 'Original Content 2')
-
- # Access HTML transformation
- url = '/harvest/object/{0}/html'.format(object_id_1)
- r = app.get(url)
- assert_equals(r.status_int, 200)
- assert_equals(r.headers['Content-Type'],
- 'text/html; charset=utf-8')
- assert 'GEMINI record about' in r.body
-
- url = '/harvest/object/{0}/html/original'.format(object_id_1)
- r = app.get(url, status=404)
- assert_equals(r.status_int, 404)
-
- url = '/harvest/object/{0}/html'.format(object_id_2)
- r = app.get(url)
- assert_equals(r.status_int, 200)
- assert_equals(r.headers['Content-Type'],
- 'text/html; charset=utf-8')
- assert 'GEMINI record about' in r.body
-
- url = '/harvest/object/{0}/html/original'.format(object_id_2)
- r = app.get(url)
- assert_equals(r.status_int, 200)
- assert_equals(r.headers['Content-Type'],
- 'text/html; charset=utf-8')
- assert 'GEMINI record about' in r.body
diff --git a/ckanext/spatial/tests/nose/test_csw_client.py b/ckanext/spatial/tests/nose/test_csw_client.py
deleted file mode 100644
index de64323..0000000
--- a/ckanext/spatial/tests/nose/test_csw_client.py
+++ /dev/null
@@ -1,65 +0,0 @@
-import time
-from six.moves.urllib.request import urlopen
-from six.moves.urllib.error import URLError
-import os
-
-from pylons import config
-from nose.plugins.skip import SkipTest
-
-from ckan.model import engine_is_sqlite
-
-
-# copied from ckan/tests/__init__ to save importing it and therefore
-# setting up Pylons.
-class CkanServerCase(object):
- @staticmethod
- def _system(cmd):
- import subprocess
- (status, output) = subprocess.getstatusoutput(cmd)
- if status:
- raise Exception("Couldn't execute cmd: %s: %s" % (cmd, output))
-
- @classmethod
- def _paster(cls, cmd, config_path_rel):
- config_path = os.path.join(config['here'], config_path_rel)
- cls._system('paster --plugin ckan %s --config=%s' % (cmd, config_path))
-
- @staticmethod
- def _start_ckan_server(config_file=None):
- if not config_file:
- config_file = config['__file__']
- config_path = config_file
- import subprocess
- process = subprocess.Popen(['paster', 'serve', config_path])
- return process
-
- @staticmethod
- def _wait_for_url(url='http://127.0.0.1:5000/', timeout=15):
- for i in range(int(timeout)*100):
- try:
- urlopen(url)
- except URLError:
- time.sleep(0.01)
- else:
- break
-
- @staticmethod
- def _stop_ckan_server(process):
- pid = process.pid
- pid = int(pid)
- if os.system("kill -9 %d" % pid):
- raise Exception("Can't kill foreign CKAN instance (pid: %d)." % pid)
-
-class CkanProcess(CkanServerCase):
- @classmethod
- def setup_class(cls):
- if engine_is_sqlite():
- raise SkipTest("Non-memory database needed for this test")
-
- cls.pid = cls._start_ckan_server()
- ## Don't need to init database, since it is same database as this process uses
- cls._wait_for_url()
-
- @classmethod
- def teardown_class(cls):
- cls._stop_ckan_server(cls.pid)
diff --git a/ckanext/spatial/tests/nose/test_harvest.py b/ckanext/spatial/tests/nose/test_harvest.py
deleted file mode 100644
index 2eb0192..0000000
--- a/ckanext/spatial/tests/nose/test_harvest.py
+++ /dev/null
@@ -1,1140 +0,0 @@
-from __future__ import absolute_import
-import os
-from datetime import datetime, date
-import lxml
-import json
-from uuid import uuid4
-from nose.plugins.skip import SkipTest
-from nose.tools import assert_equal, assert_in, assert_raises
-
-from ckan.lib.base import config
-from ckan import model
-from ckan.model import Session, Package, Group, User
-from ckan.logic.schema import default_update_package_schema, default_create_package_schema
-from ckan.logic import get_action
-
-try:
- from ckan.new_tests.helpers import call_action
-except ImportError:
- from ckan.tests.helpers import call_action
-
-from ckanext.harvest.model import (HarvestSource, HarvestJob, HarvestObject)
-from ckanext.spatial.validation import Validators
-from ckanext.spatial.harvesters.gemini import (GeminiDocHarvester,
- GeminiWafHarvester,
- GeminiHarvester)
-from ckanext.spatial.harvesters.base import SpatialHarvester
-from ckanext.spatial.tests.nose.base import SpatialTestBase
-
-from .xml_file_server import serve
-
-# Start simple HTTP server that serves XML test files
-serve()
-
-
-class HarvestFixtureBase(SpatialTestBase):
-
- def setup(self):
- # Add sysadmin user
- harvest_user = model.User(name=u'harvest', password=u'test', sysadmin=True)
- Session.add(harvest_user)
- Session.commit()
-
- package_schema = default_update_package_schema()
- self.context ={'model':model,
- 'session':Session,
- 'user':u'harvest',
- 'schema':package_schema,
- 'api_version': '2'}
-
- def teardown(self):
- model.repo.rebuild_db()
-
- def _create_job(self,source_id):
- # Create a job
- context ={'model':model,
- 'session':Session,
- 'user':u'harvest'}
-
- job_dict=get_action('harvest_job_create')(context,{'source_id':source_id})
- job = HarvestJob.get(job_dict['id'])
- assert job
-
- return job
-
- def _create_source_and_job(self, source_fixture):
- context ={'model':model,
- 'session':Session,
- 'user':u'harvest'}
-
- if config.get('ckan.harvest.auth.profile') == u'publisher' \
- and not 'publisher_id' in source_fixture:
- source_fixture['publisher_id'] = self.publisher.id
-
- source_dict=get_action('harvest_source_create')(context,source_fixture)
- source = HarvestSource.get(source_dict['id'])
- assert source
-
- job = self._create_job(source.id)
-
- return source, job
-
- def _run_job_for_single_document(self,job,force_import=False,expect_gather_errors=False,expect_obj_errors=False):
-
- harvester = GeminiDocHarvester()
-
- harvester.force_import = force_import
-
-
- object_ids = harvester.gather_stage(job)
- assert object_ids, len(object_ids) == 1
- if expect_gather_errors:
- assert len(job.gather_errors) > 0
- else:
- assert len(job.gather_errors) == 0
-
- assert harvester.fetch_stage(object_ids) == True
-
- obj = HarvestObject.get(object_ids[0])
- assert obj, obj.content
-
- harvester.import_stage(obj)
- Session.refresh(obj)
- if expect_obj_errors:
- assert len(obj.errors) > 0
- else:
- assert len(obj.errors) == 0
-
- job.status = u'Finished'
- job.save()
-
- return obj
-
-class TestHarvest(HarvestFixtureBase):
-
- @classmethod
- def setup_class(cls):
- SpatialHarvester._validator = Validators(profiles=['gemini2'])
- HarvestFixtureBase.setup_class()
-
- def clean_tags(self, tags):
- return [{u'name': x['name']} for x in tags]
-
- def find_extra(self, pkg, key):
- values = [e['value'] for e in pkg['extras'] if e['key'] == key]
- return values[0] if len(values) == 1 else None
-
- def test_harvest_basic(self):
-
- # Create source
- source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1-waf/index.html',
- 'source_type': u'gemini-waf'
- }
-
- source, job = self._create_source_and_job(source_fixture)
-
- harvester = GeminiWafHarvester()
-
- # We need to send an actual job, not the dict
- object_ids = harvester.gather_stage(job)
-
- assert len(object_ids) == 2
-
- # Fetch stage always returns True for Waf harvesters
- assert harvester.fetch_stage(object_ids) == True
-
- objects = []
- for object_id in object_ids:
- obj = HarvestObject.get(object_id)
- assert obj
- objects.append(obj)
- harvester.import_stage(obj)
-
- pkgs = Session.query(Package).filter(Package.type!=u'harvest').all()
-
- assert_equal(len(pkgs), 2)
-
- pkg_ids = [pkg.id for pkg in pkgs]
-
- for obj in objects:
- assert obj.current == True
- assert obj.package_id in pkg_ids
-
- def test_harvest_fields_service(self):
-
- # Create source
- source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/service1.xml',
- 'source_type': u'gemini-single'
- }
-
- source, job = self._create_source_and_job(source_fixture)
-
- harvester = GeminiDocHarvester()
-
- object_ids = harvester.gather_stage(job)
- assert object_ids, len(object_ids) == 1
-
- # No gather errors
- assert len(job.gather_errors) == 0
-
- # Fetch stage always returns True for Single Doc harvesters
- assert harvester.fetch_stage(object_ids) == True
-
- obj = HarvestObject.get(object_ids[0])
- assert obj, obj.content
- assert obj.guid == u'test-service-1'
-
- harvester.import_stage(obj)
-
- # No object errors
- assert len(obj.errors) == 0
-
- package_dict = get_action('package_show')(self.context,{'id':obj.package_id})
-
- assert package_dict
-
- expected = {
- 'name': u'one-scotland-address-gazetteer-web-map-service-wms',
- 'title': u'One Scotland Address Gazetteer Web Map Service (WMS)',
- 'tags': [{u'name': u'Addresses'}, {u'name': u'Scottish National Gazetteer'}],
- 'notes': u'This service displays its contents at larger scale than 1:10000. [edited]',
- }
-
- package_dict['tags'] = self.clean_tags(package_dict['tags'])
-
- for key,value in expected.items():
- if not package_dict[key] == value:
- raise AssertionError('Unexpected value for %s: %s (was expecting %s)' % \
- (key, package_dict[key], value))
-
- if config.get('ckan.harvest.auth.profile') == u'publisher':
- assert package_dict['groups'] == [self.publisher.id]
-
- expected_extras = {
- # Basic
- 'guid': obj.guid,
- 'UKLP': u'True',
- 'resource-type': u'service',
- 'access_constraints': u'["No restriction on public access"]',
- 'responsible-party': u'The Improvement Service (owner)',
- 'provider':u'The Improvement Service',
- 'contact-email': u'OSGCM@improvementservice.org.uk',
- # Spatial
- 'bbox-east-long': u'0.5242365625',
- 'bbox-north-lat': u'61.0243',
- 'bbox-south-lat': u'54.4764484375',
- 'bbox-west-long': u'-9.099786875',
- 'spatial': u'{"type": "Polygon", "coordinates": [[[0.5242365625, 54.4764484375], [-9.099786875, 54.4764484375], [-9.099786875, 61.0243], [0.5242365625, 61.0243], [0.5242365625, 54.4764484375]]]}',
- # Other
- 'coupled-resource': u'[{"href": ["http://scotgovsdi.edina.ac.uk/srv/en/csw?service=CSW&request=GetRecordById&version=2.0.2&outputSchema=http://www.isotc211.org/2005/gmd&elementSetName=full&id=250ea276-48e2-4189-8a89-fcc4ca92d652"], "uuid": ["250ea276-48e2-4189-8a89-fcc4ca92d652"], "title": []}]',
- 'dataset-reference-date': u'[{"type": "publication", "value": "2011-09-08"}]',
- 'frequency-of-update': u'daily',
- 'licence': u'["Use of the One Scotland Gazetteer data used by this this service is available to any organisation that is a member of the One Scotland Mapping Agreement. It is not currently commercially available", "http://www.test.gov.uk/licenseurl"]',
- 'licence_url': u'http://www.test.gov.uk/licenseurl',
- 'metadata-date': u'2011-09-08T16:07:32',
- 'metadata-language': u'eng',
- 'spatial-data-service-type': u'other',
- 'spatial-reference-system': u'OSGB 1936 / British National Grid (EPSG:27700)',
- 'temporal_coverage-from': u'["1904-06-16"]',
- 'temporal_coverage-to': u'["2004-06-16"]',
- }
-
- for key,value in expected_extras.items():
- extra_value = self.find_extra(package_dict, key)
- if extra_value is None:
- raise AssertionError('Extra %s not present in package' % key)
-
- if not extra_value == value:
- raise AssertionError('Unexpected value for extra %s: %s (was expecting %s)' % \
- (key, package_dict['extras'][key], value))
-
- expected_resource = {
- 'ckan_recommended_wms_preview': 'True',
- 'description': 'Link to the GetCapabilities request for this service',
- 'name': 'Web Map Service (WMS)',
- 'resource_locator_function': 'download',
- 'resource_locator_protocol': 'OGC:WMS-1.3.0-http-get-capabilities',
- 'url': u'http://127.0.0.1:8999/wms/capabilities.xml',
- }
-
- resource = package_dict['resources'][0]
- for key,value in expected_resource.items():
- if not key in resource:
- raise AssertionError('Expected key not in resource: %s' % (key))
- if not resource[key] == value:
- raise AssertionError('Unexpected value in resource for %s: %s (was expecting %s)' % \
- (key, resource[key], value))
- assert datetime.strptime(resource['verified_date'],'%Y-%m-%dT%H:%M:%S.%f').date() == date.today()
- assert resource['format'].lower() == 'wms'
-
- def test_harvest_fields_dataset(self):
-
- # Create source
- source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/dataset1.xml',
- 'source_type': u'gemini-single'
- }
-
- source, job = self._create_source_and_job(source_fixture)
-
- harvester = GeminiDocHarvester()
-
- object_ids = harvester.gather_stage(job)
- assert object_ids, len(object_ids) == 1
-
- # No gather errors
- assert len(job.gather_errors) == 0
-
- # Fetch stage always returns True for Single Doc harvesters
- assert harvester.fetch_stage(object_ids) == True
-
- obj = HarvestObject.get(object_ids[0])
- assert obj, obj.content
- assert obj.guid == u'test-dataset-1'
-
- harvester.import_stage(obj)
-
- # No object errors
- assert len(obj.errors) == 0
-
- package_dict = get_action('package_show')(self.context,{'id':obj.package_id})
-
- assert package_dict
-
- expected = {
- 'name': u'country-parks-scotland',
- 'title': u'Country Parks (Scotland)',
- 'tags': [{u'name': u'Nature conservation'}],
- 'notes': u'Parks are set up by Local Authorities to provide open-air recreation facilities close to towns and cities. [edited]'
- }
-
- package_dict['tags'] = self.clean_tags(package_dict['tags'])
-
- for key,value in expected.items():
- if not package_dict[key] == value:
- raise AssertionError('Unexpected value for %s: %s (was expecting %s)' % \
- (key, package_dict[key], value))
-
- if config.get('ckan.harvest.auth.profile') == u'publisher':
- assert package_dict['groups'] == [self.publisher.id]
-
- expected_extras = {
- # Basic
- 'guid': obj.guid,
- 'resource-type': u'dataset',
- 'responsible-party': u'Scottish Natural Heritage (custodian, distributor)',
- 'access_constraints': u'["Copyright Scottish Natural Heritage"]',
- 'contact-email': u'data_supply@snh.gov.uk',
- 'provider':'',
- # Spatial
- 'bbox-east-long': u'0.205857204',
- 'bbox-north-lat': u'61.06066944',
- 'bbox-south-lat': u'54.529947158',
- 'bbox-west-long': u'-8.97114288',
- 'spatial': u'{"type": "Polygon", "coordinates": [[[0.205857204, 54.529947158], [-8.97114288, 54.529947158], [-8.97114288, 61.06066944], [0.205857204, 61.06066944], [0.205857204, 54.529947158]]]}',
- # Other
- 'coupled-resource': u'[]',
- 'dataset-reference-date': u'[{"type": "creation", "value": "2004-02"}, {"type": "revision", "value": "2006-07-03"}]',
- 'frequency-of-update': u'irregular',
- 'licence': u'["Reference and PSMA Only", "http://www.test.gov.uk/licenseurl"]',
- 'licence_url': u'http://www.test.gov.uk/licenseurl',
- 'metadata-date': u'2011-09-23T10:06:08',
- 'metadata-language': u'eng',
- 'spatial-reference-system': u'urn:ogc:def:crs:EPSG::27700',
- 'temporal_coverage-from': u'["1998"]',
- 'temporal_coverage-to': u'["2010"]',
- }
-
- for key, value in expected_extras.items():
- extra_value = self.find_extra(package_dict, key)
- if extra_value is None:
- raise AssertionError('Extra %s not present in package' % key)
-
- if not extra_value == value:
- raise AssertionError('Unexpected value for extra %s: %s (was expecting %s)' % \
- (key, package_dict['extras'][key], value))
-
- expected_resource = {
- 'description': 'Test Resource Description',
- 'format': u'',
- 'name': 'Test Resource Name',
- 'resource_locator_function': 'download',
- 'resource_locator_protocol': 'test-protocol',
- 'url': u'https://gateway.snh.gov.uk/pls/apex_ddtdb2/f?p=101',
- }
-
- resource = package_dict['resources'][0]
- for key,value in expected_resource.items():
- if not resource[key] == value:
- raise AssertionError('Unexpected value in resource for %s: %s (was expecting %s)' % \
- (key, resource[key], value))
-
- def test_harvest_error_bad_xml(self):
- # Create source
- source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/error_bad_xml.xml',
- 'source_type': u'gemini-single'
- }
-
- source, job = self._create_source_and_job(source_fixture)
-
- harvester = GeminiDocHarvester()
-
- try:
- object_ids = harvester.gather_stage(job)
- except lxml.etree.XMLSyntaxError:
- # this only occurs in debug_exception_mode
- pass
- else:
- assert object_ids is None
-
- # Check gather errors
- assert len(job.gather_errors) == 1
- assert job.gather_errors[0].harvest_job_id == job.id
- assert 'Error parsing the document' in job.gather_errors[0].message
-
- def test_harvest_error_404(self):
- # Create source
- source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/not_there.xml',
- 'source_type': u'gemini-single'
- }
-
- source, job = self._create_source_and_job(source_fixture)
-
- harvester = GeminiDocHarvester()
-
- object_ids = harvester.gather_stage(job)
- assert object_ids is None
-
- # Check gather errors
- assert len(job.gather_errors) == 1
- assert job.gather_errors[0].harvest_job_id == job.id
- assert 'Unable to get content for URL' in job.gather_errors[0].message
-
- def test_harvest_error_validation(self):
-
- # Create source
- source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/error_validation.xml',
- 'source_type': u'gemini-single'
- }
-
- source, job = self._create_source_and_job(source_fixture)
-
- harvester = GeminiDocHarvester()
-
- object_ids = harvester.gather_stage(job)
-
- # Right now the import process goes ahead even with validation errors
- assert object_ids, len(object_ids) == 1
-
- # No gather errors
- assert len(job.gather_errors) == 0
-
- # Fetch stage always returns True for Single Doc harvesters
- assert harvester.fetch_stage(object_ids) == True
-
- obj = HarvestObject.get(object_ids[0])
- assert obj, obj.content
- assert obj.guid == u'test-error-validation-1'
-
- harvester.import_stage(obj)
-
- # Check errors
- assert len(obj.errors) == 1
- assert obj.errors[0].harvest_object_id == obj.id
-
- message = obj.errors[0].message
-
- assert_in('One email address shall be provided', message)
- assert_in('Service type shall be one of \'discovery\', \'view\', \'download\', \'transformation\', \'invoke\' or \'other\' following INSPIRE generic names', message)
- assert_in('Limitations on public access code list value shall be \'otherRestrictions\'', message)
- assert_in('One organisation name shall be provided', message)
-
-
- def test_harvest_update_records(self):
-
- # Create source
- source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/dataset1.xml',
- 'source_type': u'gemini-single'
- }
-
- source, first_job = self._create_source_and_job(source_fixture)
-
- first_obj = self._run_job_for_single_document(first_job)
-
- first_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
-
- # Package was created
- assert first_package_dict
- assert first_obj.current == True
- assert first_obj.package
-
- # Create and run a second job, the package should not be updated
- second_job = self._create_job(source.id)
-
- second_obj = self._run_job_for_single_document(second_job)
-
- Session.remove()
- Session.add(first_obj)
- Session.add(second_obj)
-
- Session.refresh(first_obj)
- Session.refresh(second_obj)
-
- second_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
-
- # Package was not updated
- assert second_package_dict, first_package_dict['id'] == second_package_dict['id']
- assert not second_obj.package, not second_obj.package_id
- assert second_obj.current == False, first_obj.current == True
-
- # Create and run a third job, forcing the importing to simulate an update in the package
- third_job = self._create_job(source.id)
- third_obj = self._run_job_for_single_document(third_job,force_import=True)
-
- # For some reason first_obj does not get updated after the import_stage,
- # and we have to force a refresh to get the actual DB values.
- Session.remove()
- Session.add(first_obj)
- Session.add(second_obj)
- Session.add(third_obj)
-
- Session.refresh(first_obj)
- Session.refresh(second_obj)
- Session.refresh(third_obj)
-
- third_package_dict = get_action('package_show')(self.context,{'id':third_obj.package_id})
-
- # Package was updated
- assert third_package_dict, first_package_dict['id'] == third_package_dict['id']
- assert third_obj.package, third_obj.package_id == first_package_dict['id']
- assert third_obj.current == True
- assert second_obj.current == False
- assert first_obj.current == False
-
- def test_harvest_deleted_record(self):
-
- # Create source
- source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/service1.xml',
- 'source_type': u'gemini-single'
- }
-
- source, first_job = self._create_source_and_job(source_fixture)
-
- first_obj = self._run_job_for_single_document(first_job)
-
- first_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
-
- # Package was created
- assert first_package_dict
- assert first_package_dict['state'] == u'active'
- assert first_obj.current == True
-
- # Delete package
- first_package_dict['state'] = u'deleted'
- self.context.update({'id':first_package_dict['id']})
- updated_package_dict = get_action('package_update')(self.context,first_package_dict)
-
- # Create and run a second job, the date has not changed, so the package should not be updated
- # and remain deleted
- first_job.status = u'Finished'
- first_job.save()
- second_job = self._create_job(source.id)
-
- second_obj = self._run_job_for_single_document(second_job)
-
- second_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
-
- # Package was not updated
- assert second_package_dict, updated_package_dict['id'] == second_package_dict['id']
- assert not second_obj.package, not second_obj.package_id
- assert second_obj.current == False, first_obj.current == True
-
-
- # Harvest an updated document, with a more recent modified date, package should be
- # updated and reactivated
- source.url = u'http://127.0.0.1:8999/gemini2.1/service1_newer.xml'
- source.save()
-
- third_job = self._create_job(source.id)
-
- third_obj = self._run_job_for_single_document(third_job)
-
- third_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
-
- Session.remove()
- Session.add(first_obj)
- Session.add(second_obj)
- Session.add(third_obj)
-
- Session.refresh(first_obj)
- Session.refresh(second_obj)
- Session.refresh(third_obj)
-
- # Package was updated
- assert third_package_dict, third_package_dict['id'] == second_package_dict['id']
- assert third_obj.package, third_obj.package
- assert third_obj.current == True, second_obj.current == False
- assert first_obj.current == False
-
- assert 'NEWER' in third_package_dict['title']
- assert third_package_dict['state'] == u'active'
-
-
-
- def test_harvest_different_sources_same_document(self):
-
- # Create source1
- source1_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/source1/same_dataset.xml',
- 'source_type': u'gemini-single'
- }
-
- source1, first_job = self._create_source_and_job(source1_fixture)
-
- first_obj = self._run_job_for_single_document(first_job)
-
- first_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
-
- # Package was created
- assert first_package_dict
- assert first_package_dict['state'] == u'active'
- assert first_obj.current == True
-
- # Harvest the same document, unchanged, from another source, the package
- # is not updated.
- # (As of https://github.com/okfn/ckanext-inspire/commit/9fb67
- # we are no longer throwing an exception when this happens)
- source2_fixture = {
- 'title': 'Test Source 2',
- 'name': 'test-source-2',
- 'url': u'http://127.0.0.1:8999/gemini2.1/source2/same_dataset.xml',
- 'source_type': u'gemini-single'
- }
-
- source2, second_job = self._create_source_and_job(source2_fixture)
-
- second_obj = self._run_job_for_single_document(second_job)
-
- second_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
-
- # Package was not updated
- assert second_package_dict, first_package_dict['id'] == second_package_dict['id']
- assert not second_obj.package, not second_obj.package_id
- assert second_obj.current == False, first_obj.current == True
-
- # Inactivate source1 and reharvest from source2, package should be updated
- third_job = self._create_job(source2.id)
- third_obj = self._run_job_for_single_document(third_job,force_import=True)
-
- Session.remove()
- Session.add(first_obj)
- Session.add(second_obj)
- Session.add(third_obj)
-
- Session.refresh(first_obj)
- Session.refresh(second_obj)
- Session.refresh(third_obj)
-
- third_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
-
- # Package was updated
- assert third_package_dict, first_package_dict['id'] == third_package_dict['id']
- assert third_obj.package, third_obj.package_id == first_package_dict['id']
- assert third_obj.current == True
- assert second_obj.current == False
- assert first_obj.current == False
-
-
- def test_harvest_different_sources_same_document_but_deleted_inbetween(self):
-
- # Create source1
- source1_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/source1/same_dataset.xml',
- 'source_type': u'gemini-single'
- }
-
- source1, first_job = self._create_source_and_job(source1_fixture)
-
- first_obj = self._run_job_for_single_document(first_job)
-
- first_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
-
- # Package was created
- assert first_package_dict
- assert first_package_dict['state'] == u'active'
- assert first_obj.current == True
-
- # Delete/withdraw the package
- first_package_dict = get_action('package_delete')(self.context,{'id':first_obj.package_id})
- first_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
-
- # Harvest the same document, unchanged, from another source
- source2_fixture = {
- 'title': 'Test Source 2',
- 'name': 'test-source-2',
- 'url': u'http://127.0.0.1:8999/gemini2.1/source2/same_dataset.xml',
- 'source_type': u'gemini-single'
- }
-
- source2, second_job = self._create_source_and_job(source2_fixture)
-
- second_obj = self._run_job_for_single_document(second_job)
-
- second_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
-
- # It would be good if the package was updated, but we see that it isn't
- assert second_package_dict, first_package_dict['id'] == second_package_dict['id']
- assert not second_obj.package
- assert second_obj.current == False
- assert first_obj.current == True
-
-
- def test_harvest_moves_sources(self):
-
- # Create source1
- source1_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/service1.xml',
- 'source_type': u'gemini-single'
- }
-
- source1, first_job = self._create_source_and_job(source1_fixture)
-
- first_obj = self._run_job_for_single_document(first_job)
-
- first_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
-
- # Package was created
- assert first_package_dict
- assert first_package_dict['state'] == u'active'
- assert first_obj.current == True
-
- # Harvest the same document GUID but with a newer date, from another source.
- source2_fixture = {
- 'title': 'Test Source 2',
- 'name': 'test-source-2',
- 'url': u'http://127.0.0.1:8999/gemini2.1/service1_newer.xml',
- 'source_type': u'gemini-single'
- }
-
- source2, second_job = self._create_source_and_job(source2_fixture)
-
- second_obj = self._run_job_for_single_document(second_job)
-
- second_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
-
- # Now we have two packages
- assert second_package_dict, first_package_dict['id'] == second_package_dict['id']
- assert second_obj.package
- assert second_obj.current == True
- assert first_obj.current == True
- # so currently, if you move a Gemini between harvest sources you need
- # to update the date to get it to reharvest, and then you should
- # withdraw the package relating to the original harvest source.
-
-
- def test_harvest_import_command(self):
-
- # Create source
- source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/dataset1.xml',
- 'source_type': u'gemini-single'
- }
-
- source, first_job = self._create_source_and_job(source_fixture)
-
- first_obj = self._run_job_for_single_document(first_job)
-
- before_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
-
- # Package was created
- assert before_package_dict
- assert first_obj.current == True
- assert first_obj.package
-
- # Create and run two more jobs, the package should not be updated
- second_job = self._create_job(source.id)
- second_obj = self._run_job_for_single_document(second_job)
- third_job = self._create_job(source.id)
- third_obj = self._run_job_for_single_document(third_job)
-
- # Run the import command manually
- imported_objects = get_action('harvest_objects_import')(self.context,{'source_id':source.id})
- Session.remove()
- Session.add(first_obj)
- Session.add(second_obj)
- Session.add(third_obj)
-
- Session.refresh(first_obj)
- Session.refresh(second_obj)
- Session.refresh(third_obj)
-
- after_package_dict = get_action('package_show')(self.context,{'id':first_obj.package_id})
-
- # Package was updated, and the current object remains the same
- assert after_package_dict, before_package_dict['id'] == after_package_dict['id']
- assert third_obj.current == False
- assert second_obj.current == False
- assert first_obj.current == True
-
-
- source_dict = get_action('harvest_source_show')(self.context,{'id':source.id})
- assert source_dict['status']['total_datasets'] == 1
-
- def test_clean_tags(self):
-
- # Create source
- source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/dataset1.xml',
- 'source_type': u'gemini-single',
- 'owner_org': 'test-org',
- 'metadata_created': datetime.now().strftime('%YYYY-%MM-%DD %HH:%MM:%s'),
- 'metadata_modified': datetime.now().strftime('%YYYY-%MM-%DD %HH:%MM:%s'),
-
- }
-
- user = User.get('dummy')
- if not user:
- user = call_action('user_create',
- name='dummy',
- password='dummybummy',
- email='dummy@dummy.com')
- user_name = user['name']
- else:
- user_name = user.name
- org = Group.by_name('test-org')
- if org is None:
- org = call_action('organization_create',
- context={'user': user_name},
- name='test-org')
- existing_g = Group.by_name('existing-group')
- if existing_g is None:
- existing_g = call_action('group_create',
- context={'user': user_name},
- name='existing-group')
-
- context = {'user': 'dummy'}
- package_schema = default_update_package_schema()
- context['schema'] = package_schema
- package_dict = {'frequency': 'manual',
- 'publisher_name': 'dummy',
- 'extras': [{'key':'theme', 'value':['non-mappable', 'thememap1']}],
- 'groups': [],
- 'title': 'fakename',
- 'holder_name': 'dummy',
- 'holder_identifier': 'dummy',
- 'name': 'fakename',
- 'notes': 'dummy',
- 'owner_org': 'test-org',
- 'modified': datetime.now(),
- 'publisher_identifier': 'dummy',
- 'metadata_created' : datetime.now(),
- 'metadata_modified' : datetime.now(),
- 'guid': str(uuid4()),
- 'identifier': 'dummy'}
-
- package_data = call_action('package_create', context=context, **package_dict)
-
- package = Package.get('fakename')
- source, job = self._create_source_and_job(source_fixture)
- job.package = package
- job.guid = uuid4()
- harvester = SpatialHarvester()
- with open(os.path.join('..', 'data', 'dataset.json')) as f:
- dataset = json.load(f)
-
- # long tags are invalid in all cases
- TAG_LONG_INVALID = 'abcdefghij' * 20
- # if clean_tags is not set to true, tags will be truncated to 50 chars
- TAG_LONG_VALID = TAG_LONG_INVALID[:50]
- # default truncate to 100
- TAG_LONG_VALID_LONG = TAG_LONG_INVALID[:100]
-
- assert len(TAG_LONG_VALID) == 50
- assert TAG_LONG_VALID[-1] == 'j'
- TAG_CHARS_INVALID = 'Pretty-inv@lid.tag!'
- TAG_CHARS_VALID = 'pretty-invlidtag'
-
- dataset['tags'].append(TAG_LONG_INVALID)
- dataset['tags'].append(TAG_CHARS_INVALID)
-
- harvester.source_config = {'clean_tags': False}
- out = harvester.get_package_dict(dataset, job)
- tags = out['tags']
-
- # no clean tags, so invalid chars are in
- # but tags are truncated to 50 chars
- assert {'name': TAG_CHARS_VALID} not in tags
- assert {'name': TAG_CHARS_INVALID} in tags
- assert {'name': TAG_LONG_VALID_LONG} in tags
- assert {'name': TAG_LONG_INVALID} not in tags
-
- harvester.source_config = {'clean_tags': True}
-
- out = harvester.get_package_dict(dataset, job)
- tags = out['tags']
- assert {'name': TAG_CHARS_VALID} in tags
- assert {'name': TAG_LONG_VALID_LONG} in tags
-
-
-BASIC_GEMINI = '''
-
- e269743a-cfda-4632-a939-0c8416ae801e
-
-
- service
-
-'''
-GUID = 'e269743a-cfda-4632-a939-0c8416ae801e'
-GEMINI_MISSING_GUID = ''''''
-
-class TestGatherMethods(HarvestFixtureBase):
- def setup(self):
- HarvestFixtureBase.setup(self)
- # Create source
- source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/dataset1.xml',
- 'source_type': u'gemini-single'
- }
- source, job = self._create_source_and_job(source_fixture)
- self.harvester = GeminiHarvester()
- self.harvester.harvest_job = job
-
- def teardown(self):
- model.repo.rebuild_db()
-
- def test_get_gemini_string_and_guid(self):
- res = self.harvester.get_gemini_string_and_guid(BASIC_GEMINI, url=None)
- assert_equal(res, (BASIC_GEMINI, GUID))
-
- def test_get_gemini_string_and_guid__no_guid(self):
- res = self.harvester.get_gemini_string_and_guid(GEMINI_MISSING_GUID, url=None)
- assert_equal(res, (GEMINI_MISSING_GUID, ''))
-
- def test_get_gemini_string_and_guid__non_parsing(self):
- content = '' # no closing tag
- assert_raises(lxml.etree.XMLSyntaxError, self.harvester.get_gemini_string_and_guid, content)
-
- def test_get_gemini_string_and_guid__empty(self):
- content = ''
- assert_raises(lxml.etree.XMLSyntaxError, self.harvester.get_gemini_string_and_guid, content)
-
-class TestImportStageTools(object):
- def test_licence_url_normal(self):
- assert_equal(GeminiHarvester._extract_first_licence_url(
- ['Reference and PSMA Only',
- 'http://www.test.gov.uk/licenseurl']),
- 'http://www.test.gov.uk/licenseurl')
-
- def test_licence_url_multiple_urls(self):
- # only the first URL is extracted
- assert_equal(GeminiHarvester._extract_first_licence_url(
- ['Reference and PSMA Only',
- 'http://www.test.gov.uk/licenseurl',
- 'http://www.test.gov.uk/2nd_licenseurl']),
- 'http://www.test.gov.uk/licenseurl')
-
- def test_licence_url_embedded(self):
- # URL is embedded within the text field and not extracted
- assert_equal(GeminiHarvester._extract_first_licence_url(
- ['Reference and PSMA Only http://www.test.gov.uk/licenseurl']),
- None)
-
- def test_licence_url_embedded_at_start(self):
- # URL is embedded at the start of the text field and the
- # whole field is returned. Noting this unusual behaviour
- assert_equal(GeminiHarvester._extract_first_licence_url(
- ['http://www.test.gov.uk/licenseurl Reference and PSMA Only']),
- 'http://www.test.gov.uk/licenseurl Reference and PSMA Only')
-
- def test_responsible_organisation_basic(self):
- responsible_organisation = [{'organisation-name': 'Ordnance Survey',
- 'role': 'owner'},
- {'organisation-name': 'Maps Ltd',
- 'role': 'distributor'}]
- assert_equal(GeminiHarvester._process_responsible_organisation(responsible_organisation),
- ('Ordnance Survey', ['Maps Ltd (distributor)',
- 'Ordnance Survey (owner)']))
-
- def test_responsible_organisation_publisher(self):
- # no owner, so falls back to publisher
- responsible_organisation = [{'organisation-name': 'Ordnance Survey',
- 'role': 'publisher'},
- {'organisation-name': 'Maps Ltd',
- 'role': 'distributor'}]
- assert_equal(GeminiHarvester._process_responsible_organisation(responsible_organisation),
- ('Ordnance Survey', ['Maps Ltd (distributor)',
- 'Ordnance Survey (publisher)']))
-
- def test_responsible_organisation_owner(self):
- # provider is the owner (ignores publisher)
- responsible_organisation = [{'organisation-name': 'Ordnance Survey',
- 'role': 'publisher'},
- {'organisation-name': 'Owner',
- 'role': 'owner'},
- {'organisation-name': 'Maps Ltd',
- 'role': 'distributor'}]
- assert_equal(GeminiHarvester._process_responsible_organisation(responsible_organisation),
- ('Owner', ['Owner (owner)',
- 'Maps Ltd (distributor)',
- 'Ordnance Survey (publisher)',
- ]))
-
- def test_responsible_organisation_multiple_roles(self):
- # provider is the owner (ignores publisher)
- responsible_organisation = [{'organisation-name': 'Ordnance Survey',
- 'role': 'publisher'},
- {'organisation-name': 'Ordnance Survey',
- 'role': 'custodian'},
- {'organisation-name': 'Distributor',
- 'role': 'distributor'}]
- assert_equal(GeminiHarvester._process_responsible_organisation(responsible_organisation),
- ('Ordnance Survey', ['Distributor (distributor)',
- 'Ordnance Survey (publisher, custodian)',
- ]))
-
- def test_responsible_organisation_blank_provider(self):
- # no owner or publisher, so blank provider
- responsible_organisation = [{'organisation-name': 'Ordnance Survey',
- 'role': 'resourceProvider'},
- {'organisation-name': 'Maps Ltd',
- 'role': 'distributor'}]
- assert_equal(GeminiHarvester._process_responsible_organisation(responsible_organisation),
- ('', ['Maps Ltd (distributor)',
- 'Ordnance Survey (resourceProvider)']))
-
- def test_responsible_organisation_blank(self):
- # no owner or publisher, so blank provider
- responsible_organisation = []
- assert_equal(GeminiHarvester._process_responsible_organisation(responsible_organisation),
- ('', []))
-
-
-class TestValidation(HarvestFixtureBase):
-
- @classmethod
- def setup_class(cls):
-
- # TODO: Fix these tests, broken since 27c4ee81e
- raise SkipTest('Validation tests not working since 27c4ee81e')
-
- SpatialHarvester._validator = Validators(profiles=['iso19139eden', 'constraints', 'gemini2'])
- HarvestFixtureBase.setup_class()
-
- def get_validation_errors(self, validation_test_filename):
- # Create source
- source_fixture = {
- 'title': 'Test Source',
- 'name': 'test-source',
- 'url': u'http://127.0.0.1:8999/gemini2.1/validation/%s' % validation_test_filename,
- 'source_type': u'gemini-single'
- }
-
- source, job = self._create_source_and_job(source_fixture)
-
- harvester = GeminiDocHarvester()
-
- # Gather stage for GeminiDocHarvester includes validation
- object_ids = harvester.gather_stage(job)
-
-
- # Check the validation errors
- errors = '; '.join([gather_error.message for gather_error in job.gather_errors])
- return errors
-
- def test_01_dataset_fail_iso19139_schema(self):
- errors = self.get_validation_errors('01_Dataset_Invalid_XSD_No_Such_Element.xml')
- assert len(errors) > 0
- assert_in('Could not get the GUID', errors)
-
- def test_02_dataset_fail_constraints_schematron(self):
- errors = self.get_validation_errors('02_Dataset_Invalid_19139_Missing_Data_Format.xml')
- assert len(errors) > 0
- assert_in('MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0', errors)
-
- def test_03_dataset_fail_gemini_schematron(self):
- errors = self.get_validation_errors('03_Dataset_Invalid_GEMINI_Missing_Keyword.xml')
- assert len(errors) > 0
- assert_in('Descriptive keywords are mandatory', errors)
-
- def test_04_dataset_valid(self):
- errors = self.get_validation_errors('04_Dataset_Valid.xml')
- assert len(errors) == 0
-
- def test_05_series_fail_iso19139_schema(self):
- errors = self.get_validation_errors('05_Series_Invalid_XSD_No_Such_Element.xml')
- assert len(errors) > 0
- assert_in('Could not get the GUID', errors)
-
- def test_06_series_fail_constraints_schematron(self):
- errors = self.get_validation_errors('06_Series_Invalid_19139_Missing_Data_Format.xml')
- assert len(errors) > 0
- assert_in('MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0', errors)
-
- def test_07_series_fail_gemini_schematron(self):
- errors = self.get_validation_errors('07_Series_Invalid_GEMINI_Missing_Keyword.xml')
- assert len(errors) > 0
- assert_in('Descriptive keywords are mandatory', errors)
-
- def test_08_series_valid(self):
- errors = self.get_validation_errors('08_Series_Valid.xml')
- assert len(errors) == 0
-
- def test_09_service_fail_iso19139_schema(self):
- errors = self.get_validation_errors('09_Service_Invalid_No_Such_Element.xml')
- assert len(errors) > 0
- assert_in('Could not get the GUID', errors)
-
- def test_10_service_fail_constraints_schematron(self):
- errors = self.get_validation_errors('10_Service_Invalid_19139_Level_Description.xml')
- assert len(errors) > 0
- assert_in("DQ_Scope: 'levelDescription' is mandatory if 'level' notEqual 'dataset' or 'series'.", errors)
-
- def test_11_service_fail_gemini_schematron(self):
- errors = self.get_validation_errors('11_Service_Invalid_GEMINI_Service_Type.xml')
- assert len(errors) > 0
- assert_in("Service type shall be one of 'discovery', 'view', 'download', 'transformation', 'invoke' or 'other' following INSPIRE generic names.", errors)
-
- def test_12_service_valid(self):
- errors = self.get_validation_errors('12_Service_Valid.xml')
- assert len(errors) == 0, errors
-
- def test_13_dataset_fail_iso19139_schema_2(self):
- # This test Dataset has srv tags and only Service metadata should.
- errors = self.get_validation_errors('13_Dataset_Invalid_Element_srv.xml')
- assert len(errors) > 0
- assert_in('Element \'{http://www.isotc211.org/2005/srv}SV_ServiceIdentification\': This element is not expected.', errors)
diff --git a/ckanext/spatial/tests/nose/test_plugin/__init__.py b/ckanext/spatial/tests/nose/test_plugin/__init__.py
deleted file mode 100644
index 2e2033b..0000000
--- a/ckanext/spatial/tests/nose/test_plugin/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# this is a namespace package
-try:
- import pkg_resources
- pkg_resources.declare_namespace(__name__)
-except ImportError:
- import pkgutil
- __path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/ckanext/spatial/tests/nose/test_plugin/plugin.py b/ckanext/spatial/tests/nose/test_plugin/plugin.py
deleted file mode 100644
index 2aa5a3d..0000000
--- a/ckanext/spatial/tests/nose/test_plugin/plugin.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from ckan import plugins as p
-
-
-class TestSpatialPlugin(p.SingletonPlugin):
-
- p.implements(p.IConfigurer, inherit=True)
-
- def update_config(self, config):
- p.toolkit.add_template_directory(config, 'templates')
diff --git a/ckanext/spatial/tests/nose/test_plugin/templates/package/read_base.html b/ckanext/spatial/tests/nose/test_plugin/templates/package/read_base.html
deleted file mode 100644
index 02a5cb6..0000000
--- a/ckanext/spatial/tests/nose/test_plugin/templates/package/read_base.html
+++ /dev/null
@@ -1,11 +0,0 @@
-{% ckan_extends %}
-
-{% block secondary_content %}
- {{ super() }}
-
- {% set dataset_extent = h.get_pkg_dict_extra(c.pkg_dict, 'spatial', '') %}
- {% if dataset_extent %}
- {% snippet "spatial/snippets/dataset_map_sidebar.html", extent=dataset_extent %}
- {% endif %}
-
-{% endblock %}
diff --git a/ckanext/spatial/tests/nose/test_plugin/templates/package/search.html b/ckanext/spatial/tests/nose/test_plugin/templates/package/search.html
deleted file mode 100644
index 2d80283..0000000
--- a/ckanext/spatial/tests/nose/test_plugin/templates/package/search.html
+++ /dev/null
@@ -1,9 +0,0 @@
-{% ckan_extends %}
-
-{% block secondary_content %}
-
- {% snippet "spatial/snippets/spatial_query.html" %}
-
- {{ super() }}
-
-{% endblock %}
diff --git a/ckanext/spatial/tests/nose/test_validation.py b/ckanext/spatial/tests/nose/test_validation.py
deleted file mode 100644
index f707e90..0000000
--- a/ckanext/spatial/tests/nose/test_validation.py
+++ /dev/null
@@ -1,153 +0,0 @@
-import os
-
-from lxml import etree
-from nose.tools import assert_equal, assert_in
-
-from ckanext.spatial import validation
-
-# other validation tests are in test_harvest.py
-
-class TestValidation(object):
-
- def _get_file_path(self, file_name):
- return os.path.join(os.path.dirname(__file__), 'xml', file_name)
-
- def get_validation_errors(self, validator, validation_test_filename):
- validation_test_filepath = self._get_file_path(validation_test_filename)
- xml = etree.parse(validation_test_filepath)
- is_valid, errors = validator.is_valid(xml)
-
- return ';'.join([e[0] for e in errors])
-
- def test_iso19139_failure(self):
- errors = self.get_validation_errors(validation.ISO19139Schema,
- 'iso19139/dataset-invalid.xml')
-
- assert len(errors) > 0
- assert_in('Dataset schema (gmx.xsd)', errors)
- assert_in('{http://www.isotc211.org/2005/gmd}nosuchelement\': This element is not expected.', errors)
-
- def test_iso19139_pass(self):
- errors = self.get_validation_errors(validation.ISO19139Schema,
- 'iso19139/dataset.xml')
- assert_equal(errors, '')
-
- # Gemini2.1 tests are basically the same as those in test_harvest.py but
- # a few little differences make it worth not removing them in
- # test_harvest
-
- def test_01_dataset_fail_iso19139_schema(self):
- errors = self.get_validation_errors(validation.ISO19139EdenSchema,
- 'gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element.xml')
- assert len(errors) > 0
- assert_in('(gmx.xsd)', errors)
- assert_in('\'{http://www.isotc211.org/2005/gmd}nosuchelement\': This element is not expected.', errors)
-
- def test_02_dataset_fail_constraints_schematron(self):
- errors = self.get_validation_errors(validation.ConstraintsSchematron14,
- 'gemini2.1/validation/02_Dataset_Invalid_19139_Missing_Data_Format.xml')
- assert len(errors) > 0
- assert_in('MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0', errors)
-
- def test_03_dataset_fail_gemini_schematron(self):
- errors = self.get_validation_errors(validation.Gemini2Schematron,
- 'gemini2.1/validation/03_Dataset_Invalid_GEMINI_Missing_Keyword.xml')
- assert len(errors) > 0
- assert_in('Descriptive keywords are mandatory', errors)
-
- def assert_passes_all_gemini2_1_validation(self, xml_filepath):
- errs = self.get_validation_errors(validation.ISO19139EdenSchema,
- xml_filepath)
- assert not errs, 'ISO19139EdenSchema: ' + errs
- errs = self.get_validation_errors(validation.ConstraintsSchematron14,
- xml_filepath)
- assert not errs, 'ConstraintsSchematron14: ' + errs
- errs = self.get_validation_errors(validation.Gemini2Schematron,
- xml_filepath)
- assert not errs, 'Gemini2Schematron: ' + errs
-
- def test_04_dataset_valid(self):
- self.assert_passes_all_gemini2_1_validation('gemini2.1/validation/04_Dataset_Valid.xml')
-
- def test_05_series_fail_iso19139_schema(self):
- errors = self.get_validation_errors(validation.ISO19139EdenSchema,
- 'gemini2.1/validation/05_Series_Invalid_XSD_No_Such_Element.xml')
- assert len(errors) > 0
- assert_in('(gmx.xsd)', errors)
- assert_in('\'{http://www.isotc211.org/2005/gmd}nosuchelement\': This element is not expected.', errors)
-
- def test_06_series_fail_constraints_schematron(self):
- errors = self.get_validation_errors(validation.ConstraintsSchematron14,
- 'gemini2.1/validation/06_Series_Invalid_19139_Missing_Data_Format.xml')
- assert len(errors) > 0
- assert_in('MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0', errors)
-
- def test_07_series_fail_gemini_schematron(self):
- errors = self.get_validation_errors(validation.Gemini2Schematron,
- 'gemini2.1/validation/07_Series_Invalid_GEMINI_Missing_Keyword.xml')
- assert len(errors) > 0
- assert_in('Descriptive keywords are mandatory', errors)
-
- def test_08_series_valid(self):
- self.assert_passes_all_gemini2_1_validation('gemini2.1/validation/08_Series_Valid.xml')
-
- def test_09_service_fail_iso19139_schema(self):
- errors = self.get_validation_errors(validation.ISO19139EdenSchema,
- 'gemini2.1/validation/09_Service_Invalid_No_Such_Element.xml')
- assert len(errors) > 0
- assert_in('(gmx.xsd & srv.xsd)', errors)
- assert_in('\'{http://www.isotc211.org/2005/gmd}nosuchelement\': This element is not expected.', errors)
-
- def test_10_service_fail_constraints_schematron(self):
- errors = self.get_validation_errors(validation.ConstraintsSchematron14,
- 'gemini2.1/validation/10_Service_Invalid_19139_Level_Description.xml')
- assert len(errors) > 0
- assert_in("DQ_Scope: 'levelDescription' is mandatory if 'level' notEqual 'dataset' or 'series'.", errors)
-
- def test_11_service_fail_gemini_schematron(self):
- errors = self.get_validation_errors(validation.Gemini2Schematron,
- 'gemini2.1/validation/11_Service_Invalid_GEMINI_Service_Type.xml')
- assert len(errors) > 0
- assert_in("Service type shall be one of 'discovery', 'view', 'download', 'transformation', 'invoke' or 'other' following INSPIRE generic names.", errors)
-
- def test_12_service_valid(self):
- self.assert_passes_all_gemini2_1_validation('gemini2.1/validation/12_Service_Valid.xml')
-
- def test_13_dataset_fail_iso19139_schema_2(self):
- # This test Dataset has srv tags and only Service metadata should.
- errors = self.get_validation_errors(validation.ISO19139EdenSchema,
- 'gemini2.1/validation/13_Dataset_Invalid_Element_srv.xml')
- assert len(errors) > 0
- assert_in('(gmx.xsd)', errors)
- assert_in('Element \'{http://www.isotc211.org/2005/srv}SV_ServiceIdentification\': This element is not expected.', errors)
-
- def test_schematron_error_extraction(self):
- validation_error_xml = '''
-
-
-
- Service type shall be one of 'discovery', 'view', 'download', 'transformation', 'invoke' or 'other' following INSPIRE generic names.
-
-
-
-'''
- failure_xml = etree.fromstring(validation_error_xml)
- fail_element = failure_xml.getchildren()[0]
- details = validation.SchematronValidator.extract_error_details(fail_element)
- if isinstance(details, tuple):
- details = details[1]
- assert_in("srv:serviceType/*[1] = 'discovery'", details)
- assert_in("/*[local-name()='MD_Metadata'", details)
- assert_in("Service type shall be one of 'discovery'", details)
-
-
- def test_error_line_numbers(self):
- file_path = self._get_file_path('iso19139/dataset-invalid.xml')
- xml = etree.parse(file_path)
- is_valid, profile, errors = validation.Validators(profiles=['iso19139']).is_valid(xml)
- assert not is_valid
- assert len(errors) == 2
-
- message, line = errors[1]
- assert 'This element is not expected' in message
- assert line == 3
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1-waf/index.html b/ckanext/spatial/tests/nose/xml/gemini2.1-waf/index.html
deleted file mode 100644
index 9710d9f..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1-waf/index.html
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
- Index of /waf
-
-
- Index of /waf
- wales1.xml
- wales2.xml
-
-
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1-waf/wales1.xml b/ckanext/spatial/tests/nose/xml/gemini2.1-waf/wales1.xml
deleted file mode 100644
index 750c066..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1-waf/wales1.xml
+++ /dev/null
@@ -1,420 +0,0 @@
-
-
-
- 11edc4ec-5269-40b9-86c8-17201fa4e74e-new
-
-
- eng
-
-
-
-
-
-
-
- Welsh Government
-
-
- Geography and Technology
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Cathays Park - 2
-
-
- Cardiff
-
-
- Wales
-
-
- CF10 3NQ
-
-
- UK
-
-
- cartographics@wales.gsi.gov.uk
-
-
-
-
-
-
-
-
-
-
-
- 2011-10-28T17:27:04
-
-
- Gemini
-
-
- 2.1
-
-
- -
-
- -
-
- -
-
-
- urn:ogc:def:crs:EPSG::Nat. Grid GB
-
-
- OGP
-
-
-
-
-
-
-
-
-
-
- World Heritage Sites in Wales GIS Polygon and Polyline Dataset
-
-
- WHS in Wales GIS Dataset
-
-
-
-
- 2005-01-01
-
-
-
-
-
-
-
-
-
- 27700
-
-
- EPSG
-
-
-
-
-
-
- UNESCO (United Nations Educational, Scientific and Cultural Organization) World Heritage Sites are places or buildings of outstanding universal value. UNESCO's World Heritage mission is to encourage countries to ensure the protection of their own natural and cultural heritage.
-
-Wales has three World Heritage Sites, the Castles and Town Walls of King Edward in Gwynedd, the Blaenavon Industrial Landscape and the Pontcysyllte Aqueduct.
-
-All planning enquiries that may effect a World Heritage Site, its setting or significant view should be directed to Cadw.
-
-The World Heritage Sites dataset comprises 4 ESRI Shapefiles, these are:-
-
-1: World Heritage Sites (WHS)
-2: Essential Setting (ES)
-3: Significant View (SV)
-4: Arcs of View (AV)
-
-
-
-
- CADW (The Historic Environment Service of the Welsh Assembly Government)
-
-
- Mapping and Charting Officer
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- CADW
-
-
- Plas Carew, Unit 5/7 Cefn Coed
-
-
- Parc Nantgarw, Cardiff,
-
-
- South Glamorgan
-
-
- CF15 7QQ
-
-
- United Kingdom
-
-
- cadw@wales.gsi.gov.uk
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- CADW_Whs_s.png
-
-
- thumbnail
-
-
- png
-
-
-
-
-
-
- CADW_Whs.png
-
-
- large_thumbnail
-
-
- png
-
-
-
-
-
-
- Protected sites
-
-
-
-
-
-
-
- GEMET - INSPIRE themes, version 1.0
-
-
-
-
- 2008-06-01
-
-
-
-
-
-
-
-
-
-
-
- CADW
-
-
- World Heritage Sites
-
-
-
-
-
-
- Copyright
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
- 5000
-
-
-
-
-
-
-
-
-
- environment
-
-
-
-
-
-
- -6.349669208373599
-
-
- -1.8959807251206617
-
-
- 50.948649366562954
-
-
- 53.77117345513605
-
-
-
-
-
- -
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- OGC Web Map Service
-
-
- 1.3.0
-
-
-
-
-
-
-
-
- http://inspire.wales.gov.uk/metadata?uuid=11edc4ec-5269-40b9-86c8-17201fa4e74e
-
-
-
-
-
-
- http://inspire.wales.gov.uk/maps/Protected_sites/wms?request=getCapabilities
-
-
-
-
-
-
- http://inspire.wales.gov.uk/maps/Protected_sites/wms?
-
-
- OGC:WMS-1.3.0-http-get-map
-
-
- Protected_sites:world_heritage_sites_feb10
-
-
- UNESCO World Heritage Sites (Wales)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- UK GEMINI Standard version 2.0
-
-
-
-
- 2009-07-20
-
-
-
-
-
-
-
-
-
- Conforms to GEMINI2 2.0 draft schematron
-
-
- true
-
-
-
-
-
-
-
-
- The Purpose of this data is to map World Heritage Sites in Wales.
-
-
-
-
-
-
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1-waf/wales2.xml b/ckanext/spatial/tests/nose/xml/gemini2.1-waf/wales2.xml
deleted file mode 100644
index 188e153..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1-waf/wales2.xml
+++ /dev/null
@@ -1,539 +0,0 @@
-
-
-
- be06a48d-36fa-4369-a585-7fcc319db4c0-new
-
-
- English
-
-
-
-
-
- -
-
-
- Spatial Evidence Officer
-
-
- -
-
-
- -
-
-
- 01248 385500
-
-
-
-
- -
-
-
- Countryside Council for Wales
-
-
- Maes-y-Ffynnon
-
-
- LL57 2DW
-
-
- LL57 2DW
-
-
- enquiries@ccw.gov.uk
-
-
-
-
-
-
-
-
-
-
-
- 2011-10-29T17:14:04
-
-
- Gemini
-
-
- 2.1
-
-
- -
-
- -
-
- -
-
-
- urn:ogc:def:crs:EPSG::Nat. Grid GB
-
-
- OGP
-
-
-
-
-
-
-
-
- -
-
-
- Protected sites: Country Parks - GIS dataset
-
-
- Parciau Cenedlaethol - Set Ddata SGDd
-
-
- Sites (Country Parks)
-
-
- -
-
- -
-
- 2009-00-00
-
- -
-
-
-
-
-
-
- -
-
- -
-
- 2009-01-01
-
- -
-
-
-
-
-
-
-
-
- 98742
-
-
- http://www.ccw.gov.uk
-
-
-
-
-
-
- This is a GIS dataset containing spatial objects such as points, lines, and polygons. It contains digital boundaries of country parks. There are about 250 recognised Country Parks in England and Wales. Most Country Parks were designated in the 1970s, under the Countryside Act 1968 with the support of the former Countryside Commission. In more recent times there has been no specific financial support for country parks directly, and fewer have been designated. Most are managed by local authorities, although other organisations and private individuals can also run them. A Country Park is an area designated for people to visit and enjoy recreation in a countryside environment. The purpose of a country park is to provide somewhere for visitors who do not necessarily want to go out into the wider countryside. Visitors can enjoy a public open space with an informal atmosphere, as opposed to a formal park as might be found in an urban area. For this reason country parks are usually found close to or on the edge of built-up areas, and rarely in the countryside proper.
-
-
- -
-
-
- Countryside Council for Wales (CCW)
-
-
- -
-
-
- -
-
-
- 01248 385500
-
-
-
-
- -
-
-
- Countryside Council for Wales
-
-
- Maes y Ffynnon
-
-
- Bangor
-
-
- Gwynedd
-
-
- LL56 2DW
-
-
- enquiries@ccw.gov.uk
-
-
-
-
-
-
-
-
-
-
-
- -
-
-
-
-
-
-
-
-
-
- CCW_Countryparks_s.png
-
-
- thumbnail
-
-
- png
-
-
-
-
-
-
- CCW_Countryparks.png
-
-
- large_thumbnail
-
-
- png
-
-
-
-
- -
-
-
- CCW website; cd; dvd
-
-
-
-
-
- -
-
-
- Protected sites
-
-
- -
-
-
- GEMET - INSPIRE themes, version 1.0
-
-
- -
-
- -
-
- 2011-07-05
-
- -
-
-
-
-
-
-
-
-
-
-
- -
-
-
- Biodiversity
-
-
- Country parks
-
-
- Nature conservation
-
-
- -
-
-
- Integrated Public Sector Vocabulary (IPSV)
-
-
- -
-
- -
-
- 2011-07-05
-
- -
-
-
-
-
-
-
-
-
-
-
-
-
- CCGC/CCW 2004. You may reproduce this dataset free of charge for non-commercial and internal business purposes in any format or medium, provided that you do so accurately, acknowledging both the source and CCW's copyright, and do not use it in a misleading context. To avoid using old information, we recommend that you obtain the latest version from the original source.
-
-
-
-
-
-
-
-
-
- CCW is legally obliged to give public access to the data and information which it holds unless specific legal exceptions apply, for example, if the data is personal or if release of the data would result in environmental harm
-
-
-
-
-
-
-
-
-
-
- biota
-
-
- environment
-
-
- geoscientificInformation
-
-
- -
-
-
-
-
-
- -
-
- -
-
-
- ISO3166 Countries
-
-
- -
-
- -
-
- 2011-07-05
-
- -
-
-
-
-
-
-
-
- -
-
- Wales (WLS)
-
-
-
-
-
-
-
-
- -6.279468297494083
-
-
- -2.4589218491907405
-
-
- 51.02857752005919
-
-
- 53.525259613573844
-
-
-
-
-
- -
-
-
- 1995-01-01
- 2009-12-31
-
-
-
-
-
-
-
- Keith Jones, Spatial Evidence Manager, PKCG.
-
-
-
-
-
-
- -
-
-
- -
-
-
- GIS Team
-
-
- -
-
-
- -
-
-
- Tel: 0845 1306229
-
-
-
-
- -
-
-
- Enquiries, Countryside Council for Wales
-
-
- Maes-y-Ffynnon
-
-
- Bangor
-
-
- Gwynedd
-
-
- LL57 2DW
-
-
- gis.helpdesk@ccw.gov.uk
-
-
-
-
-
-
-
-
-
-
-
-
-
- -
-
-
- -
-
-
- http://inspire.wales.gov.uk/metadata/srv/en/main.home?uuid=be06a48d-36fa-4369-a585-7fcc319db4c0
-
-
-
-
- -
-
-
- http://inspire.wales.gov.uk/maps/Protected_sites/wms?request=getCapabilities
-
-
-
-
-
-
- http://inspire.wales.gov.uk/maps/Protected_sites/wms?
-
-
- OGC:WMS-1.3.0-http-get-map
-
-
- Protected_sites:country_parks
-
-
- Country Parks of Wales. Data produced by the CCW (Countryside Council for Wales).
-
-
-
-
-
-
-
-
- -
-
-
- -
-
-
-
-
-
-
-
-
-
-
-
-
-
- UK GEMINI Standard version 2.0
-
-
-
-
- 2009-07-20
-
-
-
-
-
-
-
-
-
- Conforms to GEMINI2 2.0 draft schematron
-
-
- true
-
-
-
-
-
-
- -
-
-
- Originally mapped on paper maps by Unitary Authorities. This dataset was originally put together from maps provided by Unitary Authorities and then digitised by CCW staff in the mid 1990s. Later updates were compiled from digital data supplied by unitary authorities and pdfs. Digital data captured to 1:2500 /1:10,000 (OS MasterMap has variable scale from uplands to urban areas)
-
-
-
-
-
-
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/FCSConservancyPolygons.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/FCSConservancyPolygons.xml
deleted file mode 100644
index a34f8aa..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/FCSConservancyPolygons.xml
+++ /dev/null
@@ -1,524 +0,0 @@
-
-
-
- B8A22DF4-B0DC-4F0B-A713-0CF5F8784A28
-
-
- utf8
-
-
- dataset
-
-
-
-
- Geo-Information Services
-
-
- Forestry Commission Scotland
-
-
- Geo-Information Services Delivery Manager
-
-
-
-
-
-
- 0131 334 0303
-
-
-
-
-
-
- Silvan House, 231 Corstorphine Road
-
-
- Edinburgh
-
-
- Scotland
-
-
- EH12 7AT
-
-
- United Kingdom
-
-
- geoinformationservices.scotland@forestry.gsi.gov.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2012-11-30T10:51:36
-
-
-
-
-
-
- urn:ogc:def:crs:EPSG::27700
-
-
-
-
-
-
-
-
-
-
- FCS Conservancy Polygons
-
-
- s_cons_pol
-
-
- fc.s_cons_pol
-
-
-
-
- 2004-06-06
-
-
- creation
-
-
-
-
-
-
- 2010-03-16
-
-
- revision
-
-
-
-
-
-
- Geo_Information Services
-
-
-
-
-
-
-
-
-
- Description:
-
-This dataset depicts the five Forestry Commission Scotland Conservancy boundaries.
-
-
-
-Attributes:
-
-NAME : Conservancy Name
-ADDRESS_1 : Address
-ADDRESS_2 : Address
-ADDRESS_3 : Address
-ADDRESS_4 : Address
-POSTCODE : Postcode
-PHONE_NO : Telephone Number
-EMAIL : Email Address
-
-
-
-
- Head of Grants & Licences
-
-
- Forestry Commission Scotland
-
-
- Head of Grants & Licences
-
-
-
-
-
-
- 0131 334 0303
-
-
-
-
-
-
- Silvan House, 231 Corstorphine Road
-
-
- Edinburgh
-
-
- Scotland
-
-
- EH12 7AT
-
-
- United Kingdom
-
-
- cgis.scotland@forestry.gsi.gov.uk
-
-
-
-
-
-
- owner
-
-
-
-
-
-
- Geo-Information Services
-
-
- Forestry Commission Scotland
-
-
- Geo-Information Services Delivery Manager
-
-
-
-
-
-
- 0131 334 0303
-
-
-
-
-
-
- Silvan House, 231 Corstorphine Road
-
-
- Edinburgh
-
-
- Scotland
-
-
- EH12 7AT
-
-
- United Kingdom
-
-
- geoinformationservices.scotland@forestry.gsi.gov.uk
-
-
-
-
-
-
- custodian
-
-
-
-
-
-
- Geo-Information Services
-
-
- Forestry Commission Scotland
-
-
- Geo-Information Services Delivery Manager
-
-
-
-
-
-
- 0131 334 0303
-
-
-
-
-
-
- Silvan House, 231 Corstorphine Road
-
-
- Edinburgh
-
-
- Scotland
-
-
- EH12 7AT
-
-
- United Kingdom
-
-
- geoinformationservices.scotland@forestry.gsi.gov.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
- asNeeded
-
-
-
-
-
-
- SDE Feature Class
-
-
-
-
-
-
-
-
-
- administrative
-
-
- regional
-
-
-
-
-
-
- copyright
-
-
- license
-
-
- otherRestrictions
-
-
- copyright
-
-
- license
-
-
- None
-
-
-
-
-
-
- Copyright (Copyright Forestry Commission Scotland)
-
-
-
-
-
-
-
-
-
-
-
- 10000
-
-
-
-
-
-
- eng
-
-
- boundaries
-
-
- economy
-
-
-
-
-
-
-
-
- Scotland
-
-
-
-
-
-
-
-
- -9.229868
-
-
- -0.705137
-
-
- 54.513338
-
-
- 60.866111
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Unknown
-
-
- Unknown
-
-
-
-
-
-
-
-
- Geo-Information Services
-
-
- Forestry Commission Scotland
-
-
- Geo-Information Services Delivery Manager
-
-
-
-
-
-
- 0131 334 0303
-
-
-
-
-
-
- Silvan House, 231 Corstorphine Road
-
-
- Edinburgh
-
-
- Scotland
-
-
- EH12 7AT
-
-
- United Kingdom
-
-
- geoinformationservices.scotland@forestry.gsi.gov.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
- SDE Feature Class
-
-
-
-
-
-
-
-
-
-
-
- Server=fcspatialsv5; Service=5151; User=fcproduct; Version=SDE.DEFAULT
-
-
-
-
-
-
-
-
-
-
-
-
- http://www.forestry.gov.uk/datadownload
-
-
-
-
-
-
-
-
-
-
-
-
- dataset
-
-
-
-
-
-
- This dataset was derived by merging OS Boundary Line polygons together (with the exception of the boundary between north and south Fife, which was digitised by Geo-Information Services). Boundary Line is based on 1:10,000 scale mapping.
-
-
-
-
-
-
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/dataset1.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/dataset1.xml
deleted file mode 100644
index 3f58f0e..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/dataset1.xml
+++ /dev/null
@@ -1,498 +0,0 @@
-
-
-
- test-dataset-1
-
-
- eng
-
-
- dataset
-
-
-
-
- Lachlan Renwick
-
-
- Scottish Natural Heritage
-
-
- Geographic Systems and Data Coordinator
-
-
-
-
-
-
- 01463 725000
-
-
-
-
-
-
- Great Glen House, Leachkin Road
-
-
- INVERNESS
-
-
- IV3 8NW
-
-
- United Kingdom
-
-
- data_supply@snh.gov.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2011-09-23T10:06:08
-
-
-
-
-
-
- urn:ogc:def:crs:EPSG::27700
-
-
-
-
-
-
-
-
-
-
- Country Parks (Scotland)
-
-
-
-
- 2004-02
-
-
- creation
-
-
-
-
-
-
- 2006-07-03
-
-
- revision
-
-
-
-
-
-
- CPK
-
-
-
-
-
-
-
-
-
- Parks are set up by Local Authorities to provide open-air recreation facilities close to towns and cities. [edited]
-
-
-
-
- Lachlan Renwick
-
-
- Scottish Natural Heritage
-
-
- Geographic Systems & Data Coordinator
-
-
-
-
-
-
- 01463 725000
-
-
-
-
-
-
- Great Glen House, Leachkin Road
-
-
- INVERNESS
-
-
- IV3 8NW
-
-
- United Kingdom
-
-
- data_supply@snh.gov.uk
-
-
-
-
-
-
- custodian
-
-
-
-
-
-
- Lachlan Renwick
-
-
- Scottish Natural Heritage
-
-
- Geographic Systems & Data Coordinator
-
-
-
-
-
-
- 01463 725000
-
-
-
-
-
-
- Great Glen House, Leachkin Road
-
-
- INVERNESS
-
-
- IV3 8NW
-
-
- United Kingdom
-
-
- data_supply@snh.gov.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
- irregular
-
-
-
-
-
-
- SDE Feature Class
-
-
-
-
-
-
-
-
-
- Nature conservation
-
-
-
-
- Government Category List
-
-
-
-
- 2004-07-15
-
-
- revision
-
-
-
-
-
-
-
-
-
-
- copyright
-
-
- otherRestrictions
-
-
- copyright
-
-
- otherRestrictions
-
-
- Copyright Scottish Natural Heritage
-
-
-
-
-
-
- Reference and PSMA Only
-
-
- http://www.test.gov.uk/licenseurl
-
-
-
-
-
-
-
-
-
- 5
-
-
-
-
- eng
-
-
- environment
-
-
-
-
-
-
-
-
-
-
- ISO 3166
-
-
-
-
- 2007-09-02
-
-
- revision
-
-
-
-
-
-
- GB-SCT
-
-
-
-
-
-
-
-
- -8.97114288
-
-
- 0.205857204
-
-
- 54.529947158
-
-
- 61.06066944
-
-
-
-
-
-
-
- 1998
- 2010
-
-
-
-
-
-
-
-
-
-
-
-
-
- ESRI Shapefile
-
-
- Unknown
-
-
-
-
-
-
- KML
-
-
- 2.1
-
-
-
-
-
-
- GML
-
-
- 3.1.1
-
-
-
-
-
-
-
-
- Lachlan Renwick
-
-
- Scottish Natural Heritage
-
-
- Geographic Systems & Data Coordinator
-
-
-
-
-
-
- 01463 725000
-
-
-
-
-
-
- Great Glen House, Leachkin Road
-
-
- INVERNESS
-
-
- IV3 8NW
-
-
- United Kingdom
-
-
- data_supply@snh.gov.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
- SDE Feature Class
-
-
-
-
-
-
-
-
-
-
-
- http://www.snh.org.uk/snhi
-
-
-
-
-
-
-
-
-
-
-
-
- https://gateway.snh.gov.uk/pls/apex_ddtdb2/f?p=101
-
-
- Test Resource Name
-
-
- Test Resource Description
-
-
- test-protocol
-
-
- download
-
-
-
-
-
-
-
-
-
-
-
-
- dataset
-
-
-
-
-
-
- Country Park is not a statutory designation. Countryside (Scotland) Act 1967 Section 48 gives local authorities power to assess and review the need for Country Parks in consultation with SNH.
-
-
-
-
-
-
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/error_bad_xml.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/error_bad_xml.xml
deleted file mode 100644
index 54a7dc1..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/error_bad_xml.xml
+++ /dev/null
@@ -1,15 +0,0 @@
-
-
-
- test-error-bad-xml-1
-
-
- eng
-
-
- service
-
-
- Service
-
-
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/error_validation.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/error_validation.xml
deleted file mode 100644
index 5c60965..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/error_validation.xml
+++ /dev/null
@@ -1,293 +0,0 @@
-
-
-
- test-error-validation-1
-
-
-
-
-
-
-
-
- Service
-
-
-
-
-
-
-
-
- Forestry Commission Scotland
-
-
- Geo-Information Services
-
-
-
-
-
-
- 0131 334 0303
-
-
-
-
-
-
-
-
-
- 231 Corstorphine Road
-
-
- Edinburgh
-
-
-
-
-
- EH12 7AT
-
-
- United Kingdom
-
-
- geoinformationservices@forestry.gsi.gov.uk
-
-
-
-
-
-
-
-
-
-
-
- 2011-06-15T15:27:21
-
-
-
-
-
-
- urn:ogc:def:crs:EPSG::27700
-
-
-
-
-
-
-
-
-
-
- FCS Administrative Boundaries WMS
-
-
- FCS_Admin_Boundaries
-
-
-
-
- 2011-05-08
-
-
-
-
-
-
-
-
-
- 2011-06-15
-
-
-
-
-
-
-
-
-
- FCSADMINWMS
-
-
-
-
-
-
- This interactive map service contains the following Forestry Commission Scotland administrative boundaries:
-
-
-
-FC Conservancy boundaries
-
-FC Forest District boundaries
-
-Woodlands In & Around Towns (WIAT)
-
-
-
-
-The layers can be switched on and off independently of each other. Please note that the Conservancy and Forest District boundaries have both been generalised in order to speed screen refresh response time.
-
-
-Forestry Commission Scotland would like to thank Scottish Natural Heritage for their co-operation in hosting this web service on behalf of FCS.
-
-
-PLEASE NOTE:
-In order to upload this Web Map Service into desktop GIS (eg. ESRI's ArcGIS), copy and paste the 'OnLine Resource' URL shown below up to, and including the '?'.
-ie:-
-http://mapgateway.snh.gov.uk/ServicesWMS/FCS_Admin_Boundaries/MapServer/WMSServer?
-
-
-
-
-
-
-
-
- Forestry Commission Scotland
-
-
- Geo-Information Services
-
-
-
-
-
-
- 0131 334 0303
-
-
-
-
-
-
-
-
-
- 231 Corstorphine Road
-
-
- Edinburgh
-
-
-
-
-
- EH12 7AT
-
-
- United Kingdom
-
-
- geoinformationservices@forestry.gsi.gov.uk
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Conservancy Boundaries
-
-
- WIAT
-
-
- Woodlands In & Around Towns
-
-
- Forest District Boundaries
-
-
-
-
-
-
-
-
-
- Copyright (Copyright Forestry Commission Scotland)
-
-
-
-
-
-
- None
-
-
-
-
- OGC:WMS
-
-
-
-
-
-
- -9.22567
-
-
- 2.69487
-
-
- 49.833604
-
-
- 60.86638
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://mapgateway.snh.gov.uk/ServicesWMS/FCS_Admin_Boundaries/MapServer/WMSServer?REQUEST=GetCapabilities&service=wms
-
-
-
-
-
-
-
-
-
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/service1.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/service1.xml
deleted file mode 100644
index 6ea0acf..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/service1.xml
+++ /dev/null
@@ -1,347 +0,0 @@
-
-
-
- test-service-1
-
-
- eng
-
-
- service
-
-
- Service
-
-
-
-
-
-
-
- The Improvement Service
-
-
- Gazetteer Business manager
-
-
-
-
-
-
- 01506 775558
-
-
- 01506 775566
-
-
-
-
-
-
- Westerton House
-
-
- East Mains Industrial Estate
-
-
- Broxburn
-
-
-
-
-
- EH52 5AU
-
-
- Scotland
-
-
- OSGCM@improvementservice.org.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2011-09-08T16:07:32
-
-
-
-
-
-
-
- OSGB 1936 / British National Grid (EPSG:27700)
-
-
- EPSG
-
-
- 7.4
-
-
-
-
-
-
-
-
-
-
- One Scotland Address Gazetteer Web Map Service (WMS)
-
-
-
-
- 2011-09-08
-
-
- publication
-
-
-
-
-
-
- This service displays its contents at larger scale than 1:10000. [edited]
-
-
-
-
-
-
-
- The Improvement Service
-
-
- Gazetteer Business Manager
-
-
-
-
-
-
- 01506 775558
-
-
- 01506 775566
-
-
-
-
-
-
- Westerton House
-
-
- East Mains Industrial Estate
-
-
- Broxburn
-
-
-
-
-
- EH52 5AU
-
-
- Scotland
-
-
- OSGCM@improvementservice.org.uk
-
-
-
-
-
-
- owner
-
-
-
-
-
-
- daily
-
-
-
-
-
-
- Scottish National Gazetteer
-
-
-
-
-
-
-
-
-
- Addresses
-
-
-
-
-
-
-
- external.theme.inspire-theme
-
-
-
-
-
-
-
-
-
- otherRestrictions
-
-
- No restriction on public access
-
-
-
-
-
-
- Use of the One Scotland Gazetteer data used by this this service is available to any organisation that is a member of the One Scotland Mapping Agreement. It is not currently commercially available
-
-
- http://www.test.gov.uk/licenseurl
-
-
-
-
- other
-
-
-
-
-
-
- -9.099786875
-
-
- 0.5242365625
-
-
- 54.4764484375
-
-
- 61.0243
-
-
-
-
-
-
-
- 1904-06-16
- 2004-06-16
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- image/png
-
-
- NotApplicable
-
-
-
-
-
-
-
-
- http://127.0.0.1:8999/wms/capabilities.xml
-
-
- Web Map Service (WMS)
-
-
- Link to the GetCapabilities request for this service
-
-
- OGC:WMS-1.3.0-http-get-capabilities
-
-
- download
-
-
-
-
-
-
-
-
-
-
-
-
- service
-
-
-
-
- Geographic web service
-
-
-
-
-
-
-
-
-
-
-
-
- Technical Guidance for the implementation of INSPIRE View Services Version 3.0
-
-
-
-
- 2011-03-21
-
-
- publication
-
-
-
-
-
-
- This is an INSPIRE Addresses theme focussed service that is not yet conformant to the address data specification but intends to be conformant to the service technical guidance at at: http://inspire.jrc.ec.europa.eu/documents/Network_Services/TechnicalGuidance_ViewServices_v3.0.pdf
-
-
- false
-
-
-
-
-
-
-
-
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/service1_newer.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/service1_newer.xml
deleted file mode 100644
index d2c2da7..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/service1_newer.xml
+++ /dev/null
@@ -1,347 +0,0 @@
-
-
-
- test-service-1
-
-
- eng
-
-
- service
-
-
- Service
-
-
-
-
-
-
-
- The Improvement Service
-
-
- Gazetteer Business manager
-
-
-
-
-
-
- 01506 775558
-
-
- 01506 775566
-
-
-
-
-
-
- Westerton House
-
-
- East Mains Industrial Estate
-
-
- Broxburn
-
-
-
-
-
- EH52 5AU
-
-
- Scotland
-
-
- OSGCM@improvementservice.org.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2011-09-10T00:00:01
-
-
-
-
-
-
-
- OSGB 1936 / British National Grid (EPSG:27700)
-
-
- EPSG
-
-
- 7.4
-
-
-
-
-
-
-
-
-
-
- One Scotland Address Gazetteer Web Map Service (WMS) NEWER
-
-
-
-
- 2011-09-08
-
-
- publication
-
-
-
-
-
-
- This service displays its contents at larger scale than 1:10000. [edited]
-
-
-
-
-
-
-
- The Improvement Service
-
-
- Gazetteer Business Manager
-
-
-
-
-
-
- 01506 775558
-
-
- 01506 775566
-
-
-
-
-
-
- Westerton House
-
-
- East Mains Industrial Estate
-
-
- Broxburn
-
-
-
-
-
- EH52 5AU
-
-
- Scotland
-
-
- OSGCM@improvementservice.org.uk
-
-
-
-
-
-
- owner
-
-
-
-
-
-
- daily
-
-
-
-
-
-
- Scottish National Gazetteer
-
-
-
-
-
-
-
-
-
- Addresses
-
-
-
-
-
-
-
- external.theme.inspire-theme
-
-
-
-
-
-
-
-
-
- otherRestrictions
-
-
- No restriction on public access
-
-
-
-
-
-
- Use of the One Scotland Gazetteer data used by this this service is available to any organisation that is a member of the One Scotland Mapping Agreement. It is not currently commercially available
-
-
- http://www.test.gov.uk/licenseurl
-
-
-
-
- other
-
-
-
-
-
-
- -9.099786875
-
-
- 0.5242365625
-
-
- 54.4764484375
-
-
- 61.0243
-
-
-
-
-
-
-
- 1904-06-16
- 2004-06-16
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- image/png
-
-
- NotApplicable
-
-
-
-
-
-
-
-
- http://127.0.0.1:8999/wms/capabilities.xml
-
-
- Web Map Service (WMS)
-
-
- Link to the GetCapabilities request for this service
-
-
- OGC:WMS-1.3.0-http-get-capabilities
-
-
- download
-
-
-
-
-
-
-
-
-
-
-
-
- service
-
-
-
-
- Geographic web service
-
-
-
-
-
-
-
-
-
-
-
-
- Technical Guidance for the implementation of INSPIRE View Services Version 3.0
-
-
-
-
- 2011-03-21
-
-
- publication
-
-
-
-
-
-
- This is an INSPIRE Addresses theme focussed service that is not yet conformant to the address data specification but intends to be conformant to the service technical guidance at at: http://inspire.jrc.ec.europa.eu/documents/Network_Services/TechnicalGuidance_ViewServices_v3.0.pdf
-
-
- false
-
-
-
-
-
-
-
-
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/source1/same_dataset.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/source1/same_dataset.xml
deleted file mode 100644
index bd2a21c..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/source1/same_dataset.xml
+++ /dev/null
@@ -1,347 +0,0 @@
-
-
-
- test-same-dataset-1
-
-
- eng
-
-
- service
-
-
- Service
-
-
-
-
-
-
-
- The Improvement Service
-
-
- Gazetteer Business manager
-
-
-
-
-
-
- 01506 775558
-
-
- 01506 775566
-
-
-
-
-
-
- Westerton House
-
-
- East Mains Industrial Estate
-
-
- Broxburn
-
-
-
-
-
- EH52 5AU
-
-
- Scotland
-
-
- OSGCM@improvementservice.org.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2011-09-08T16:07:32
-
-
-
-
-
-
-
- OSGB 1936 / British National Grid (EPSG:27700)
-
-
- EPSG
-
-
- 7.4
-
-
-
-
-
-
-
-
-
-
- One Scotland Address Gazetteer Web Map Service (WMS)
-
-
-
-
- 2011-09-08
-
-
- publication
-
-
-
-
-
-
- This service displays its contents at larger scale than 1:10000. [edited]
-
-
-
-
-
-
-
- The Improvement Service
-
-
- Gazetteer Business Manager
-
-
-
-
-
-
- 01506 775558
-
-
- 01506 775566
-
-
-
-
-
-
- Westerton House
-
-
- East Mains Industrial Estate
-
-
- Broxburn
-
-
-
-
-
- EH52 5AU
-
-
- Scotland
-
-
- OSGCM@improvementservice.org.uk
-
-
-
-
-
-
- resourceProvider
-
-
-
-
-
-
- daily
-
-
-
-
-
-
- Scottish National Gazetteer
-
-
-
-
-
-
-
-
-
- Addresses
-
-
-
-
-
-
-
- external.theme.inspire-theme
-
-
-
-
-
-
-
-
-
- otherRestrictions
-
-
- No restriction on public access
-
-
-
-
-
-
- Use of the One Scotland Gazetteer data used by this this service is available to any organisation that is a member of the One Scotland Mapping Agreement. It is not currently commercially available
-
-
- http://www.test.gov.uk/licenseurl
-
-
-
-
- other
-
-
-
-
-
-
- -9.099786875
-
-
- 0.5242365625
-
-
- 54.4764484375
-
-
- 61.0243
-
-
-
-
-
-
-
- 1904-06-16
- 2004-06-16
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- image/png
-
-
- NotApplicable
-
-
-
-
-
-
-
-
- http://sedsh13.sedsh.gov.uk/ArcGIS/services/OSG/OSG/MapServer/WMSServer?request=GetCapabilities&service=WMS
-
-
- Web Map Service (WMS)
-
-
- Link to the GetCapabilities request for this service
-
-
- OGC:WMS-1.3.0-http-get-capabilities
-
-
- download
-
-
-
-
-
-
-
-
-
-
-
-
- service
-
-
-
-
- Geographic web service
-
-
-
-
-
-
-
-
-
-
-
-
- Technical Guidance for the implementation of INSPIRE View Services Version 3.0
-
-
-
-
- 2011-03-21
-
-
- publication
-
-
-
-
-
-
- This is an INSPIRE Addresses theme focussed service that is not yet conformant to the address data specification but intends to be conformant to the service technical guidance at at: http://inspire.jrc.ec.europa.eu/documents/Network_Services/TechnicalGuidance_ViewServices_v3.0.pdf
-
-
- false
-
-
-
-
-
-
-
-
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/source2/same_dataset.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/source2/same_dataset.xml
deleted file mode 100644
index bd2a21c..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/source2/same_dataset.xml
+++ /dev/null
@@ -1,347 +0,0 @@
-
-
-
- test-same-dataset-1
-
-
- eng
-
-
- service
-
-
- Service
-
-
-
-
-
-
-
- The Improvement Service
-
-
- Gazetteer Business manager
-
-
-
-
-
-
- 01506 775558
-
-
- 01506 775566
-
-
-
-
-
-
- Westerton House
-
-
- East Mains Industrial Estate
-
-
- Broxburn
-
-
-
-
-
- EH52 5AU
-
-
- Scotland
-
-
- OSGCM@improvementservice.org.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2011-09-08T16:07:32
-
-
-
-
-
-
-
- OSGB 1936 / British National Grid (EPSG:27700)
-
-
- EPSG
-
-
- 7.4
-
-
-
-
-
-
-
-
-
-
- One Scotland Address Gazetteer Web Map Service (WMS)
-
-
-
-
- 2011-09-08
-
-
- publication
-
-
-
-
-
-
- This service displays its contents at larger scale than 1:10000. [edited]
-
-
-
-
-
-
-
- The Improvement Service
-
-
- Gazetteer Business Manager
-
-
-
-
-
-
- 01506 775558
-
-
- 01506 775566
-
-
-
-
-
-
- Westerton House
-
-
- East Mains Industrial Estate
-
-
- Broxburn
-
-
-
-
-
- EH52 5AU
-
-
- Scotland
-
-
- OSGCM@improvementservice.org.uk
-
-
-
-
-
-
- resourceProvider
-
-
-
-
-
-
- daily
-
-
-
-
-
-
- Scottish National Gazetteer
-
-
-
-
-
-
-
-
-
- Addresses
-
-
-
-
-
-
-
- external.theme.inspire-theme
-
-
-
-
-
-
-
-
-
- otherRestrictions
-
-
- No restriction on public access
-
-
-
-
-
-
- Use of the One Scotland Gazetteer data used by this this service is available to any organisation that is a member of the One Scotland Mapping Agreement. It is not currently commercially available
-
-
- http://www.test.gov.uk/licenseurl
-
-
-
-
- other
-
-
-
-
-
-
- -9.099786875
-
-
- 0.5242365625
-
-
- 54.4764484375
-
-
- 61.0243
-
-
-
-
-
-
-
- 1904-06-16
- 2004-06-16
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- image/png
-
-
- NotApplicable
-
-
-
-
-
-
-
-
- http://sedsh13.sedsh.gov.uk/ArcGIS/services/OSG/OSG/MapServer/WMSServer?request=GetCapabilities&service=WMS
-
-
- Web Map Service (WMS)
-
-
- Link to the GetCapabilities request for this service
-
-
- OGC:WMS-1.3.0-http-get-capabilities
-
-
- download
-
-
-
-
-
-
-
-
-
-
-
-
- service
-
-
-
-
- Geographic web service
-
-
-
-
-
-
-
-
-
-
-
-
- Technical Guidance for the implementation of INSPIRE View Services Version 3.0
-
-
-
-
- 2011-03-21
-
-
- publication
-
-
-
-
-
-
- This is an INSPIRE Addresses theme focussed service that is not yet conformant to the address data specification but intends to be conformant to the service technical guidance at at: http://inspire.jrc.ec.europa.eu/documents/Network_Services/TechnicalGuidance_ViewServices_v3.0.pdf
-
-
- false
-
-
-
-
-
-
-
-
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element.xml
deleted file mode 100644
index 4b114e6..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element.xml
+++ /dev/null
@@ -1,636 +0,0 @@
-
-
-
- test-record-01
-
-
-
-
-
- test-record-08
-
-
-
-
-
- dataset
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2012-10-05
-
-
- INSPIRE Implementing Rules for Metadata
-
-
- 1.2
-
-
-
-
-
-
- 27700
-
-
- urn:ogc:def:crs:EPSG
-
-
- 6.11.2
-
-
-
-
-
-
-
-
-
-
- Test Record 01 Dataset No Such Element Name
-
-
- Also known as xxx
-
-
-
-
- 2011-04-08
-
-
-
-
-
-
-
-
-
- 1300392329603
-
-
- CEH:EIDC:
-
-
- 1
-
-
-
-
- R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
- This test record should fail XSD validation, as it has an element present which is not allowed by the schema.
-
-
-
-
-
-
-
- Morton, D
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- danm@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
-
-
- Parr Section
-
-
-
-
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- resourceProvider
-
-
-
-
-
-
-
-
-
- Not planned
-
-
-
-
-
-
- https://gateway.ceh.ac.uk:443/smartEditor/preview/82a0f4a1-01ff-4ed1-853e-224d8404b3fd.png
-
-
- thumbnail preview
-
-
-
-
-
-
- Habitats and biotopes
-
-
-
-
-
-
-
- GEMET - INSPIRE themes, version 1.0
-
-
-
-
- 2012-10-04
-
-
-
-
-
-
-
-
-
-
-
-
-
- Great Britain
-
-
- England
-
-
- Scotland Wales
-
-
-
-
-
-
-
-
-
- CEH Biodiversity Programme
-
-
-
-
-
-
-
-
-
- CEH Project NEC03259
-
-
- NERC_DDC
-
-
- LCM2007
-
-
-
-
-
-
-
-
-
- Test Link
-
-
-
-
-
-
- Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
-
-
-
-
- Licence terms and conditions apply
-
-
- Test Link
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- environment
-
-
- imageryBaseMapsEarthCover
-
-
-
-
-
-
-
-
- ENG
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- WLS
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- SCT
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2000-10-01
- 2012-10-01
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- -9.227701
-
-
- 2.687637
-
-
- 49.83726
-
-
- 60.850441
-
-
-
-
-
-
- Some text
-
-
-
-
-
-
-
-
- GeoTIFF
-
-
- 1.0
-
-
-
-
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Maclean Building, Benson Lane, Crowmarsh Gifford
-
-
- Wallingford
-
-
- Oxfordshire
-
-
- OX10 8BB
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
-
-
-
-
- http://www.ceh.ac.uk/LandCoverMap2007.html
-
-
- Essential technical details
-
-
- Link to further technical details about this data
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://www.countrysidesurvey.org.uk/
-
-
- Countryside Survey website
-
-
- Countryside Survey website
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://gateway.ceh.ac.uk/download?fileIdentifier=82a0f4a1-01ff-4ed1-853e-224d8404b3fd
-
-
- Dataset download
-
-
- Link to download this dataset
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://lasigprod.nerc-lancaster.ac.uk/arcgis/services/LandCoverMap/LCM2007_GB_1k_DOM_AGG/MapServer/WMSServer?request=GetCapabilities&service=WMS
-
-
- Web Map Service
-
-
- A web map service (WMS) is available for this data
-
-
-
-
-
-
-
-
-
-
- http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
-
-
- CS Technical Report
-
-
- Final Report for LCM2007 - the new UK Land Cover Map
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
-
-
-
-
-
-
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element_unix.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element_unix.xml
deleted file mode 100644
index cd0d5f2..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element_unix.xml
+++ /dev/null
@@ -1,636 +0,0 @@
-
-
-
- test-record-01
-
-
-
-
-
- test-record-08
-
-
-
-
-
- dataset
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2012-10-05
-
-
- INSPIRE Implementing Rules for Metadata
-
-
- 1.2
-
-
-
-
-
-
- 27700
-
-
- urn:ogc:def:crs:EPSG
-
-
- 6.11.2
-
-
-
-
-
-
-
-
-
-
- Test Record 01 Dataset No Such Element Name
-
-
- Also known as xxx
-
-
-
-
- 2011-04-08
-
-
-
-
-
-
-
-
-
- 1300392329603
-
-
- CEH:EIDC:
-
-
- 1
-
-
-
-
- R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
- This test record should fail XSD validation, as it has an element present which is not allowed by the schema.
-
-
-
-
-
-
-
- Morton, D
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- danm@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
-
-
- Parr Section
-
-
-
-
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- resourceProvider
-
-
-
-
-
-
-
-
-
- Not planned
-
-
-
-
-
-
- https://gateway.ceh.ac.uk:443/smartEditor/preview/82a0f4a1-01ff-4ed1-853e-224d8404b3fd.png
-
-
- thumbnail preview
-
-
-
-
-
-
- Habitats and biotopes
-
-
-
-
-
-
-
- GEMET - INSPIRE themes, version 1.0
-
-
-
-
- 2012-10-04
-
-
-
-
-
-
-
-
-
-
-
-
-
- Great Britain
-
-
- England
-
-
- Scotland Wales
-
-
-
-
-
-
-
-
-
- CEH Biodiversity Programme
-
-
-
-
-
-
-
-
-
- CEH Project NEC03259
-
-
- NERC_DDC
-
-
- LCM2007
-
-
-
-
-
-
-
-
-
- Test Link
-
-
-
-
-
-
- Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
-
-
-
-
- Licence terms and conditions apply
-
-
- Test Link
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- environment
-
-
- imageryBaseMapsEarthCover
-
-
-
-
-
-
-
-
- ENG
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- WLS
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- SCT
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2000-10-01
- 2012-10-01
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- -9.227701
-
-
- 2.687637
-
-
- 49.83726
-
-
- 60.850441
-
-
-
-
-
-
- Some text
-
-
-
-
-
-
-
-
- GeoTIFF
-
-
- 1.0
-
-
-
-
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Maclean Building, Benson Lane, Crowmarsh Gifford
-
-
- Wallingford
-
-
- Oxfordshire
-
-
- OX10 8BB
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
-
-
-
-
- http://www.ceh.ac.uk/LandCoverMap2007.html
-
-
- Essential technical details
-
-
- Link to further technical details about this data
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://www.countrysidesurvey.org.uk/
-
-
- Countryside Survey website
-
-
- Countryside Survey website
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://gateway.ceh.ac.uk/download?fileIdentifier=82a0f4a1-01ff-4ed1-853e-224d8404b3fd
-
-
- Dataset download
-
-
- Link to download this dataset
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://lasigprod.nerc-lancaster.ac.uk/arcgis/services/LandCoverMap/LCM2007_GB_1k_DOM_AGG/MapServer/WMSServer?request=GetCapabilities&service=WMS
-
-
- Web Map Service
-
-
- A web map service (WMS) is available for this data
-
-
-
-
-
-
-
-
-
-
- http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
-
-
- CS Technical Report
-
-
- Final Report for LCM2007 - the new UK Land Cover Map
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
-
-
-
-
-
-
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/02_Dataset_Invalid_19139_Missing_Data_Format.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/02_Dataset_Invalid_19139_Missing_Data_Format.xml
deleted file mode 100644
index 6c41b94..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/02_Dataset_Invalid_19139_Missing_Data_Format.xml
+++ /dev/null
@@ -1,626 +0,0 @@
-
-
-
-test-record-02
-
-
-
-
-
- test-record-08
-
-
-
-
-
- dataset
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2012-10-05
-
-
- INSPIRE Implementing Rules for Metadata
-
-
- 1.2
-
-
-
-
-
-
- 27700
-
-
- urn:ogc:def:crs:EPSG
-
-
- 6.11.2
-
-
-
-
-
-
-
-
-
-
- Test Record 02 Dataset Missing Data Format
-
-
- Also known as xxx
-
-
-
-
- 2011-04-08
-
-
-
-
-
-
-
-
-
- 1300392329603
-
-
- CEH:EIDC:
-
-
- 1
-
-
-
-
- R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
- This test record should pass XSD validation, but fail 19139 schematron, because it has no Distribution Format element.
-
-
-
-
-
-
-
- Morton, D
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- danm@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
-
-
- Parr Section
-
-
-
-
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- resourceProvider
-
-
-
-
-
-
-
-
-
- Not planned
-
-
-
-
-
-
- https://gateway.ceh.ac.uk:443/smartEditor/preview/82a0f4a1-01ff-4ed1-853e-224d8404b3fd.png
-
-
- thumbnail preview
-
-
-
-
-
-
- Habitats and biotopes
-
-
-
-
-
-
-
- GEMET - INSPIRE themes, version 1.0
-
-
-
-
- 2012-10-04
-
-
-
-
-
-
-
-
-
-
-
-
-
- Great Britain
-
-
- England
-
-
- Scotland Wales
-
-
-
-
-
-
-
-
-
- CEH Biodiversity Programme
-
-
-
-
-
-
-
-
-
- CEH Project NEC03259
-
-
- NERC_DDC
-
-
- LCM2007
-
-
-
-
-
-
-
-
-
- Test Link
-
-
-
-
-
-
- Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
-
-
-
-
- Licence terms and conditions apply
-
-
- Test Link
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- environment
-
-
- imageryBaseMapsEarthCover
-
-
-
-
-
-
-
-
- ENG
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- WLS
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- SCT
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2000-10-01
- 2012-10-01
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- -9.227701
-
-
- 2.687637
-
-
- 49.83726
-
-
- 60.850441
-
-
-
-
-
-
- Some text
-
-
-
-
-
-
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Maclean Building, Benson Lane, Crowmarsh Gifford
-
-
- Wallingford
-
-
- Oxfordshire
-
-
- OX10 8BB
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
-
-
-
-
- http://www.ceh.ac.uk/LandCoverMap2007.html
-
-
- Essential technical details
-
-
- Link to further technical details about this data
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://www.countrysidesurvey.org.uk/
-
-
- Countryside Survey website
-
-
- Countryside Survey website
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://gateway.ceh.ac.uk/download?fileIdentifier=82a0f4a1-01ff-4ed1-853e-224d8404b3fd
-
-
- Dataset download
-
-
- Link to download this dataset
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://lasigprod.nerc-lancaster.ac.uk/arcgis/services/LandCoverMap/LCM2007_GB_1k_DOM_AGG/MapServer/WMSServer?request=GetCapabilities&service=WMS
-
-
- Web Map Service
-
-
- A web map service (WMS) is available for this data
-
-
-
-
-
-
-
-
-
-
- http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
-
-
- CS Technical Report
-
-
- Final Report for LCM2007 - the new UK Land Cover Map
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
-
-
-
-
-
-
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/03_Dataset_Invalid_GEMINI_Missing_Keyword.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/03_Dataset_Invalid_GEMINI_Missing_Keyword.xml
deleted file mode 100644
index 197de31..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/03_Dataset_Invalid_GEMINI_Missing_Keyword.xml
+++ /dev/null
@@ -1,551 +0,0 @@
-
-
-
-test-record-03
-
-
-
-
-
- test-record-08
-
-
-
-
-
- dataset
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2012-10-05
-
-
- INSPIRE Implementing Rules for Metadata
-
-
- 1.2
-
-
-
-
-
-
- 27700
-
-
- urn:ogc:def:crs:EPSG
-
-
- 6.11.2
-
-
-
-
-
-
-
-
-
-
- Test Record 03 Dataset Missing Keyword
-
-
- Also known as xxx
-
-
-
-
- 2011-04-08
-
-
-
-
-
-
-
-
-
- 1300392329603
-
-
- CEH:EIDC:
-
-
- 1
-
-
-
-
- R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
- This test record should pass XSD validation and pass 19139 schematron, but fail GEMINI schematron because it has no Keyword element.
-
-
-
-
-
-
-
- Morton, D
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- danm@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
-
-
- Parr Section
-
-
-
-
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- resourceProvider
-
-
-
-
-
-
-
-
-
- Not planned
-
-
-
-
-
-
- https://gateway.ceh.ac.uk:443/smartEditor/preview/82a0f4a1-01ff-4ed1-853e-224d8404b3fd.png
-
-
- thumbnail preview
-
-
-
-
-
-
- Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
-
-
-
-
- Licence terms and conditions apply
-
-
- Test Link
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- environment
-
-
- imageryBaseMapsEarthCover
-
-
-
-
-
-
-
-
- ENG
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- WLS
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- SCT
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2000-10-01
- 2012-10-01
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- -9.227701
-
-
- 2.687637
-
-
- 49.83726
-
-
- 60.850441
-
-
-
-
-
-
- Some text
-
-
-
-
-
-
-
-
- GeoTIFF
-
-
- 1.0
-
-
-
-
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Maclean Building, Benson Lane, Crowmarsh Gifford
-
-
- Wallingford
-
-
- Oxfordshire
-
-
- OX10 8BB
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
-
-
-
-
- http://www.ceh.ac.uk/LandCoverMap2007.html
-
-
- Essential technical details
-
-
- Link to further technical details about this data
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://www.countrysidesurvey.org.uk/
-
-
- Countryside Survey website
-
-
- Countryside Survey website
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://gateway.ceh.ac.uk/download?fileIdentifier=82a0f4a1-01ff-4ed1-853e-224d8404b3fd
-
-
- Dataset download
-
-
- Link to download this dataset
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://lasigprod.nerc-lancaster.ac.uk/arcgis/services/LandCoverMap/LCM2007_GB_1k_DOM_AGG/MapServer/WMSServer?request=GetCapabilities&service=WMS
-
-
- Web Map Service
-
-
- A web map service (WMS) is available for this data
-
-
-
-
-
-
-
-
-
-
- http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
-
-
- CS Technical Report
-
-
- Final Report for LCM2007 - the new UK Land Cover Map
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
-
-
-
-
-
-
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/04_Dataset_Valid.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/04_Dataset_Valid.xml
deleted file mode 100644
index 8fbb4b2..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/04_Dataset_Valid.xml
+++ /dev/null
@@ -1,637 +0,0 @@
-
-
-
-test-record-04
-
-
-
-
-
- test-record-08
-
-
-
-
-
- dataset
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2012-10-05
-
-
- INSPIRE Implementing Rules for Metadata
-
-
- 1.2
-
-
-
-
-
-
- 27700
-
-
- urn:ogc:def:crs:EPSG
-
-
- 6.11.2
-
-
-
-
-
-
-
-
-
-
- Test Record 04 Dataset Valid
-
-
- Also known as xxx
-
-
-
-
- 2011-04-08
-
-
-
-
-
-
-
-
-
- 1300392329603
-
-
- CEH:EIDC:
-
-
- 1
-
-
-
-
- R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
- This test record should pass all validation.
-
-
-
-
-
-
-
- Morton, D
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- danm@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
-
-
- Parr Section
-
-
-
-
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- resourceProvider
-
-
-
-
-
-
-
-
-
- Not planned
-
-
-
-
-
-
- https://gateway.ceh.ac.uk:443/smartEditor/preview/82a0f4a1-01ff-4ed1-853e-224d8404b3fd.png
-
-
- thumbnail preview
-
-
-
-
-
-
- Habitats and biotopes
-
-
-
-
-
-
-
- GEMET - INSPIRE themes, version 1.0
-
-
-
-
- 2012-10-04
-
-
-
-
-
-
-
-
-
-
-
-
-
- Great Britain
-
-
- England
-
-
- Scotland Wales
-
-
-
-
-
-
-
-
-
- CEH Biodiversity Programme
-
-
-
-
-
-
-
-
-
- CEH Project NEC03259
-
-
- NERC_DDC
-
-
- LCM2007
-
-
-
-
-
-
-
-
-
- Test Link
-
-
-
-
-
-
-
- Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
-
-
-
-
- Licence terms and conditions apply
-
-
- Test Link
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- environment
-
-
- imageryBaseMapsEarthCover
-
-
-
-
-
-
-
-
- ENG
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- WLS
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- SCT
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2000-10-01
- 2012-10-01
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- -9.227701
-
-
- 2.687637
-
-
- 49.83726
-
-
- 60.850441
-
-
-
-
-
-
- Some text
-
-
-
-
-
-
-
-
- GeoTIFF
-
-
- 1.0
-
-
-
-
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Maclean Building, Benson Lane, Crowmarsh Gifford
-
-
- Wallingford
-
-
- Oxfordshire
-
-
- OX10 8BB
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
-
-
-
-
- http://www.ceh.ac.uk/LandCoverMap2007.html
-
-
- Essential technical details
-
-
- Link to further technical details about this data
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://www.countrysidesurvey.org.uk/
-
-
- Countryside Survey website
-
-
- Countryside Survey website
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://gateway.ceh.ac.uk/download?fileIdentifier=82a0f4a1-01ff-4ed1-853e-224d8404b3fd
-
-
- Dataset download
-
-
- Link to download this dataset
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://lasigprod.nerc-lancaster.ac.uk/arcgis/services/LandCoverMap/LCM2007_GB_1k_DOM_AGG/MapServer/WMSServer?request=GetCapabilities&service=WMS
-
-
- Web Map Service
-
-
- A web map service (WMS) is available for this data
-
-
-
-
-
-
-
-
-
-
- http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
-
-
- CS Technical Report
-
-
- Final Report for LCM2007 - the new UK Land Cover Map
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
-
-
-
-
-
-
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/05_Series_Invalid_XSD_No_Such_Element.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/05_Series_Invalid_XSD_No_Such_Element.xml
deleted file mode 100644
index 14e53ed..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/05_Series_Invalid_XSD_No_Such_Element.xml
+++ /dev/null
@@ -1,594 +0,0 @@
-
-
-
- test-record-05
-
-
-
-
-
-
-
-
- series
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2012-10-05
-
-
- INSPIRE Implementing Rules for Metadata
-
-
- 1.2
-
-
-
-
-
-
- 27700
-
-
- urn:ogc:def:crs:EPSG
-
-
- 6.11.2
-
-
-
-
-
-
-
-
-
-
- Test Record 05 Series No Such Element Name
-
-
- Also known as xxx
-
-
-
-
- 2011-04-08
-
-
-
-
-
-
-
-
-
- 1400392329603
-
-
- CEH:EIDC:
-
-
- 1
-
-
-
-
- R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
- This test record should fail XSD validation, as it has an element present which is not allowed by the schema.
-
-
-
-
-
-
-
- Morton, D
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- danm@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
-
-
- Parr Section
-
-
-
-
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- resourceProvider
-
-
-
-
-
-
-
-
-
- Not planned
-
-
-
-
-
-
- https://gateway.ceh.ac.uk:443/smartEditor/preview/82a0f4a1-01ff-4ed1-853e-224d8404b3fd.png
-
-
- thumbnail preview
-
-
-
-
-
-
- Habitats and biotopes
-
-
-
-
-
-
-
- GEMET - INSPIRE themes, version 1.0
-
-
-
-
- 2012-10-04
-
-
-
-
-
-
-
-
-
-
-
-
-
- Great Britain
-
-
- England
-
-
- Scotland Wales
-
-
-
-
-
-
-
-
-
- CEH Biodiversity Programme
-
-
-
-
-
-
-
-
-
- CEH Project NEC03259
-
-
- NERC_DDC
-
-
- LCM2007
-
-
-
-
-
-
-
-
-
- Test Link
-
-
-
-
-
-
- Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
-
-
-
-
- Licence terms and conditions apply
-
-
- Test Link
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- environment
-
-
- imageryBaseMapsEarthCover
-
-
-
-
-
-
-
-
- ENG
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- WLS
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- SCT
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2000-10-01
- 2012-10-01
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- -9.227701
-
-
- 2.687637
-
-
- 49.83726
-
-
- 60.850441
-
-
-
-
-
-
- Some text
-
-
-
-
-
-
-
-
- GeoTIFF
-
-
- 1.0
-
-
-
-
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Maclean Building, Benson Lane, Crowmarsh Gifford
-
-
- Wallingford
-
-
- Oxfordshire
-
-
- OX10 8BB
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
-
-
-
-
- http://www.ceh.ac.uk/LandCoverMap2007.html
-
-
- Essential technical details
-
-
- Link to further technical details about this data
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://www.countrysidesurvey.org.uk/
-
-
- Countryside Survey website
-
-
- Countryside Survey website
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
-
-
- CS Technical Report
-
-
- Final Report for LCM2007 - the new UK Land Cover Map
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
-
-
-
-
-
-
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/06_Series_Invalid_19139_Missing_Data_Format.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/06_Series_Invalid_19139_Missing_Data_Format.xml
deleted file mode 100644
index 7bf3ce3..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/06_Series_Invalid_19139_Missing_Data_Format.xml
+++ /dev/null
@@ -1,584 +0,0 @@
-
-
-
-test-record-06
-
-
-
-
-
-
-
-
- series
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2012-10-05
-
-
- INSPIRE Implementing Rules for Metadata
-
-
- 1.2
-
-
-
-
-
-
- 27700
-
-
- urn:ogc:def:crs:EPSG
-
-
- 6.11.2
-
-
-
-
-
-
-
-
-
-
- Test Record 06 Series Missing Data Format
-
-
- Also known as xxx
-
-
-
-
- 2011-04-08
-
-
-
-
-
-
-
-
-
- 1400392329603
-
-
- CEH:EIDC:
-
-
- 1
-
-
-
-
- R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
- This test record should pass XSD validation, but fail 19139 schematron, because it has no Distribution Format element.
-
-
-
-
-
-
-
- Morton, D
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- danm@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
-
-
- Parr Section
-
-
-
-
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- resourceProvider
-
-
-
-
-
-
-
-
-
- Not planned
-
-
-
-
-
-
- https://gateway.ceh.ac.uk:443/smartEditor/preview/82a0f4a1-01ff-4ed1-853e-224d8404b3fd.png
-
-
- thumbnail preview
-
-
-
-
-
-
- Habitats and biotopes
-
-
-
-
-
-
-
- GEMET - INSPIRE themes, version 1.0
-
-
-
-
- 2012-10-04
-
-
-
-
-
-
-
-
-
-
-
-
-
- Great Britain
-
-
- England
-
-
- Scotland Wales
-
-
-
-
-
-
-
-
-
- CEH Biodiversity Programme
-
-
-
-
-
-
-
-
-
- CEH Project NEC03259
-
-
- NERC_DDC
-
-
- LCM2007
-
-
-
-
-
-
-
-
-
- Test Link
-
-
-
-
-
-
- Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
-
-
-
-
- Licence terms and conditions apply
-
-
- Test Link
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- environment
-
-
- imageryBaseMapsEarthCover
-
-
-
-
-
-
-
-
- ENG
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- WLS
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- SCT
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2000-10-01
- 2012-10-01
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- -9.227701
-
-
- 2.687637
-
-
- 49.83726
-
-
- 60.850441
-
-
-
-
-
-
- Some text
-
-
-
-
-
-
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Maclean Building, Benson Lane, Crowmarsh Gifford
-
-
- Wallingford
-
-
- Oxfordshire
-
-
- OX10 8BB
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
-
-
-
-
- http://www.ceh.ac.uk/LandCoverMap2007.html
-
-
- Essential technical details
-
-
- Link to further technical details about this data
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://www.countrysidesurvey.org.uk/
-
-
- Countryside Survey website
-
-
- Countryside Survey website
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
-
-
- CS Technical Report
-
-
- Final Report for LCM2007 - the new UK Land Cover Map
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
-
-
-
-
-
-
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/07_Series_Invalid_GEMINI_Missing_Keyword.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/07_Series_Invalid_GEMINI_Missing_Keyword.xml
deleted file mode 100644
index 34aaeae..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/07_Series_Invalid_GEMINI_Missing_Keyword.xml
+++ /dev/null
@@ -1,509 +0,0 @@
-
-
-
-test-record-07
-
-
-
-
-
-
-
-
- series
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2012-10-05
-
-
- INSPIRE Implementing Rules for Metadata
-
-
- 1.2
-
-
-
-
-
-
- 27700
-
-
- urn:ogc:def:crs:EPSG
-
-
- 6.11.2
-
-
-
-
-
-
-
-
-
-
- Test Record 07 Series Missing Keyword
-
-
- Also known as xxx
-
-
-
-
- 2011-04-08
-
-
-
-
-
-
-
-
-
- 1300392329603
-
-
- CEH:EIDC:
-
-
- 1
-
-
-
-
- R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
- This test record should pass XSD validation and pass 19139 schematron, but fail GEMINI schematron because it has no Keyword element.
-
-
-
-
-
-
-
- Morton, D
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- danm@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
-
-
- Parr Section
-
-
-
-
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- resourceProvider
-
-
-
-
-
-
-
-
-
- Not planned
-
-
-
-
-
-
- https://gateway.ceh.ac.uk:443/smartEditor/preview/82a0f4a1-01ff-4ed1-853e-224d8404b3fd.png
-
-
- thumbnail preview
-
-
-
-
-
-
- Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
-
-
-
-
- Licence terms and conditions apply
-
-
- Test Link
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- environment
-
-
- imageryBaseMapsEarthCover
-
-
-
-
-
-
-
-
- ENG
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- WLS
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- SCT
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2000-10-01
- 2012-10-01
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- -9.227701
-
-
- 2.687637
-
-
- 49.83726
-
-
- 60.850441
-
-
-
-
-
-
- Some text
-
-
-
-
-
-
-
-
- GeoTIFF
-
-
- 1.0
-
-
-
-
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Maclean Building, Benson Lane, Crowmarsh Gifford
-
-
- Wallingford
-
-
- Oxfordshire
-
-
- OX10 8BB
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
-
-
-
-
- http://www.ceh.ac.uk/LandCoverMap2007.html
-
-
- Essential technical details
-
-
- Link to further technical details about this data
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://www.countrysidesurvey.org.uk/
-
-
- Countryside Survey website
-
-
- Countryside Survey website
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
-
-
- CS Technical Report
-
-
- Final Report for LCM2007 - the new UK Land Cover Map
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
-
-
-
-
-
-
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/08_Series_Valid.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/08_Series_Valid.xml
deleted file mode 100644
index ef6872d..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/08_Series_Valid.xml
+++ /dev/null
@@ -1,595 +0,0 @@
-
-
-
-test-record-08
-
-
-
-
-
-
-
-
- series
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2012-10-05
-
-
- INSPIRE Implementing Rules for Metadata
-
-
- 1.2
-
-
-
-
-
-
- 27700
-
-
- urn:ogc:def:crs:EPSG
-
-
- 6.11.2
-
-
-
-
-
-
-
-
-
-
- Test Record 08 Series Valid
-
-
- Also known as xxx
-
-
-
-
- 2011-04-08
-
-
-
-
-
-
-
-
-
- 1300392329603
-
-
- CEH:EIDC:
-
-
- 1
-
-
-
-
- R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
- This test Series record should pass all validation.
-
-
-
-
-
-
-
- Morton, D
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- danm@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
-
-
- Parr Section
-
-
-
-
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- resourceProvider
-
-
-
-
-
-
-
-
-
- Not planned
-
-
-
-
-
-
- https://gateway.ceh.ac.uk:443/smartEditor/preview/82a0f4a1-01ff-4ed1-853e-224d8404b3fd.png
-
-
- thumbnail preview
-
-
-
-
-
-
- Habitats and biotopes
-
-
-
-
-
-
-
- GEMET - INSPIRE themes, version 1.0
-
-
-
-
- 2012-10-04
-
-
-
-
-
-
-
-
-
-
-
-
-
- Great Britain
-
-
- England
-
-
- Scotland Wales
-
-
-
-
-
-
-
-
-
- CEH Biodiversity Programme
-
-
-
-
-
-
-
-
-
- CEH Project NEC03259
-
-
- NERC_DDC
-
-
- LCM2007
-
-
-
-
-
-
-
-
-
- Test Link
-
-
-
-
-
-
-
- Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
-
-
-
-
- Licence terms and conditions apply
-
-
- Test Link
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- environment
-
-
- imageryBaseMapsEarthCover
-
-
-
-
-
-
-
-
- ENG
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- WLS
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- SCT
-
-
- ISO 3166
-
-
- 2006, edition 2
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2000-10-01
- 2012-10-01
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- -9.227701
-
-
- 2.687637
-
-
- 49.83726
-
-
- 60.850441
-
-
-
-
-
-
- Some text
-
-
-
-
-
-
-
-
- GeoTIFF
-
-
- 1.0
-
-
-
-
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Maclean Building, Benson Lane, Crowmarsh Gifford
-
-
- Wallingford
-
-
- Oxfordshire
-
-
- OX10 8BB
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
-
-
-
-
- http://www.ceh.ac.uk/LandCoverMap2007.html
-
-
- Essential technical details
-
-
- Link to further technical details about this data
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://www.countrysidesurvey.org.uk/
-
-
- Countryside Survey website
-
-
- Countryside Survey website
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
-
-
- CS Technical Report
-
-
- Final Report for LCM2007 - the new UK Land Cover Map
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
-
-
-
-
-
-
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/09_Service_Invalid_No_Such_Element.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/09_Service_Invalid_No_Such_Element.xml
deleted file mode 100644
index 1277f12..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/09_Service_Invalid_No_Such_Element.xml
+++ /dev/null
@@ -1,537 +0,0 @@
-
-
-
-test-record-09
-
-
-
-
-
-
-
-
-service
-
-
-
-
-Claire Wood
-
-
-Centre for Ecology & Hydrology
-
-
-
-
-
-
-Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
-Lancaster
-
-
-Lancashire
-
-
-LA1 4AP
-
-
-United Kingdom
-
-
-enquiries@ceh.ac.uk
-
-
-
-
-
-
-pointOfContact
-
-
-
-
-2012-10-05
-
-
-NERC profile of ISO19115:2003
-
-
-2003(E)
-
-
-
-
-
-
-27700
-
-
-urn:ogc:def:crs:EPSG
-
-
-6.11.2
-
-
-
-
-
-
-
-
-
-
-Test Record 09 Service Invalid No Such Element
-
-
-Also known as xxx
-
-
-
-
-2011-04-08
-
-
-
-
-
-
-
-
-
-This test Service record should fail xsd validation, as it contains an element not allowed under the schema.
-
-
-
-
-Dan Morton
-
-
-Centre for Ecology & Hydrology
-
-
-LA1 4AP
-
-
-
-
-
-
-Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
-Lancaster
-
-
-Lancashire
-
-
-LA1 4AP
-
-
-United Kingdom
-
-
-enquiries@ceh.ac.uk
-
-
-
-
-
-
-pointOfContact
-
-
-
-
-
-
-Parr Section
-
-
-
-
-
-
-United Kingdom
-
-
-enquiries@ceh.ac.uk
-
-
-
-
-
-
-resourceProvider
-
-
-
-
-
-
-
-
-
-Not Planned
-
-
-
-
-
-
-https://gateway.ceh.ac.uk:443/smartEditor/preview/848fc7db-f8a8-4804-a5ec-6876a14d0a1a.png
-
-
-preview thumbnail
-
-
-
-
-
-
-Land cover
-
-
-Land use
-
-
-
-
-
-
-
-GEMET - INSPIRE themes, version 1.0
-
-
-
-
-2008-06-01
-
-
-
-
-
-
-
-
-
-
-
-
-
-CEH Biodiversity Programme
-
-
-CEH Project NEC03259
-
-
-
-
-
-
-
-
-
-NERC_DDC
-
-
-
-
-
-
-
-
-
-infoMapAccessService
-
-
-
-
-
-
-
-Commission Regulation (EC) No 1205/2008 of 3 December 2008 implementing Directive 2007/2/EC of the European Parliament and of the Council as regards Metadata
-
-
-
-
-2008-12-03
-
-
-
-
-
-
-
-
-
-
-
-
-
-Great Britain
-
-
-
-
-
-
-
-
-
-Test Link
-
-
-
-
-
-
-Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
-
-
-
-
-This Web Mapping Service is made available for use subject to the Terms and Conditions of the CEH Information Gateway (https://gateway.ceh.ac.uk/disclaimer).
-
-The following acknowledgements and copyright notices (where applicable), shall, unless otherwise stated, be used on all copies of the Web Map Service, publications and reports, including but not limited to, use in presentations to any audience.
-
-LCM2007 © and database right NERC (CEH) 2011. All rights reserved. Contains Ordnance Survey data © Crown copyright and database right 2007.
-
-Geographical area - Acknowledgements
-The following datasets have been used in the derivation of LCM2007 25m raster and LCM2007 1km Dominant Coverage and LCM2007 1km Percent of Total Coverage:
-GB - Landsat-TM5 satellite imagery © <Satellite/Ground station operator> 2007. Distributed by Eurimage.
-GB - IRS-LISS3 satellite imagery supplied by European Space Agency © Euromap, Space Imaging and Antrix Corporation Limited.
-GB - SPOT-4 and SPOT-5 satellite imagery supplied by European Space Agency © Spot Image and Centre National D’Etudes Spatiales (CNES).
-GB - AWIFS satellite imagery © Antrix Corporation Limited, distributed by Euromap.
-GB - Contains Ordnance Survey mapping data © Crown copyright and database right 2007.
-GB - Digital elevation data © Intermap Technologies Inc. or its suppliers 2003.
-England and Wales - Soils data for England and Wales © Cranfield University (NSRI) and for the Controller of HMSO. 2011.
-England and Wales - Office for National Statistics data © Crown Copyright and database right. Contains Ordnance Survey data © Crown copyright and database right 2001.
-England - Boundaries from Rural Payments Agency © Crown copyright and database right and/or © third party licensors.
-Wales - Boundaries from Welsh Government, Department of Rural Affairs © Crown Copyright and database right and/or © third party licensors.
-Scotland - Boundaries from Scottish Government © Crown Copyright and database right and/or © third party licensors.
-Scotland - SSKIB derived pH for "semi-natural" soils for upper horizon for dominant soil © The James Hutton Institute 2010.
-Scotland - Land Cover of Scotland dataset, Crown Copyright 1992. It shall not be reproduced in any form whatever without the permission of The Controller of Her Majesty’s Stationery Office. Reproduced from OS Pathfinder Series with the permission of the Controller of HMSO. © Crown copyright 1992.
-Scotland - Scottish Government boundaries © Crown Copyright and database right and/or © third party licensors 2004. All rights reserved.
-
-
-The following copyright notice should be placed on all copies of information or images derived from the Web Service:
-[Information] or [Images] based upon LCM2007 © NERC (CEH) 2011. Contains Ordnance Survey data © Crown Copyright 2007. © third party licensors.
-
-
-Test Link
-
-
-
-
-view
-
-
-
-
-
-
-true
-
-
--9.23
-
-
-2.69
-
-
-49.84
-
-
-60.85
-
-
-
-
-
-
-
-
-GetMap
-
-
-CEH:EIDC:#1300181654668
-
-
-
-
-
-
-GetMap
-
-
-https://gateway.ceh.ac.uk/soapServices/CSWStartup?Service=CSW&Request=GetRecordById&Version=2.0.2&outputSchema=http://www.isotc211.org/2005/gmd&elementSetname=full&id=337f9dea-726e-40c7-9f9b-e269911c9db6
-
-
-
-
-
-
-
-
-
-GetCapabilities
-
-
-
-
-
-
-
-http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
-
-
-
-
-
-
-
-
-GetMap
-
-
-
-
-
-
-
-http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-png
-
-
-unknown
-
-
-
-
-
-
-
-
-Centre for Ecology & Hydrology
-
-
-
-
-
-
-Maclean Building, Benson Lane, Crowmarsh Gifford
-
-
-Wallingford
-
-
-Oxfordshire
-
-
-OX10 8BB
-
-
-United Kingdom
-
-
-enquiries@ceh.ac.uk
-
-
-
-
-
-
-distributor
-
-
-
-
-
-
-
-
-
-
-http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer?request=getCapabilities&service=WMS
-
-
-WMS Service
-
-
-GetCapabilities for this service
-
-
-
-
-
-
-
-
-
-
-http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
-
-
-CS Technical Report
-
-
-Final Report for LCM2007 - the new UK Land Cover Map
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-Land Cover Map 2007
-
-
-
-
-
-
-
-
-The service is based upon Land Cover Map 2007
-
-
-
-
-
-
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/10_Service_Invalid_19139_Level_Description.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/10_Service_Invalid_19139_Level_Description.xml
deleted file mode 100644
index f3522a1..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/10_Service_Invalid_19139_Level_Description.xml
+++ /dev/null
@@ -1,530 +0,0 @@
-
-
-
-test-record-10
-
-
-
-
-
-
-
-
-service
-
-
-
-
-Claire Wood
-
-
-Centre for Ecology & Hydrology
-
-
-
-
-
-
-Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
-Lancaster
-
-
-Lancashire
-
-
-LA1 4AP
-
-
-United Kingdom
-
-
-enquiries@ceh.ac.uk
-
-
-
-
-
-
-pointOfContact
-
-
-
-
-2012-10-05
-
-
-NERC profile of ISO19115:2003
-
-
-2003(E)
-
-
-
-
-
-
-27700
-
-
-urn:ogc:def:crs:EPSG
-
-
-6.11.2
-
-
-
-
-
-
-
-
-
-
-Test Record 10 Service Invalid Missing Data Quality Info Level Desciption
-
-
-Also known as xxx
-
-
-
-
-2011-04-08
-
-
-
-
-
-
-
-
-
-This test Service record should pass XSD validation, but fail 19139 constraint validation, as it is a Service record which does not have a Level Description element in the dataQualityInfo.
-
-
-
-
-Dan Morton
-
-
-Centre for Ecology & Hydrology
-
-
-LA1 4AP
-
-
-
-
-
-
-Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
-Lancaster
-
-
-Lancashire
-
-
-LA1 4AP
-
-
-United Kingdom
-
-
-enquiries@ceh.ac.uk
-
-
-
-
-
-
-pointOfContact
-
-
-
-
-
-
-Parr Section
-
-
-
-
-
-
-United Kingdom
-
-
-enquiries@ceh.ac.uk
-
-
-
-
-
-
-resourceProvider
-
-
-
-
-
-
-
-
-
-Not Planned
-
-
-
-
-
-
-https://gateway.ceh.ac.uk:443/smartEditor/preview/848fc7db-f8a8-4804-a5ec-6876a14d0a1a.png
-
-
-preview thumbnail
-
-
-
-
-
-
-Land cover
-
-
-Land use
-
-
-
-
-
-
-
-GEMET - INSPIRE themes, version 1.0
-
-
-
-
-2008-06-01
-
-
-
-
-
-
-
-
-
-
-
-
-
-CEH Biodiversity Programme
-
-
-CEH Project NEC03259
-
-
-
-
-
-
-
-
-
-NERC_DDC
-
-
-
-
-
-
-
-
-
-infoMapAccessService
-
-
-
-
-
-
-
-Commission Regulation (EC) No 1205/2008 of 3 December 2008 implementing Directive 2007/2/EC of the European Parliament and of the Council as regards Metadata
-
-
-
-
-2008-12-03
-
-
-
-
-
-
-
-
-
-
-
-
-
-Great Britain
-
-
-
-
-
-
-
-
-
-Test Link
-
-
-
-
-
-
-Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
-
-
-
-
-This Web Mapping Service is made available for use subject to the Terms and Conditions of the CEH Information Gateway (https://gateway.ceh.ac.uk/disclaimer).
-
-The following acknowledgements and copyright notices (where applicable), shall, unless otherwise stated, be used on all copies of the Web Map Service, publications and reports, including but not limited to, use in presentations to any audience.
-
-LCM2007 © and database right NERC (CEH) 2011. All rights reserved. Contains Ordnance Survey data © Crown copyright and database right 2007.
-
-Geographical area - Acknowledgements
-The following datasets have been used in the derivation of LCM2007 25m raster and LCM2007 1km Dominant Coverage and LCM2007 1km Percent of Total Coverage:
-GB - Landsat-TM5 satellite imagery © <Satellite/Ground station operator> 2007. Distributed by Eurimage.
-GB - IRS-LISS3 satellite imagery supplied by European Space Agency © Euromap, Space Imaging and Antrix Corporation Limited.
-GB - SPOT-4 and SPOT-5 satellite imagery supplied by European Space Agency © Spot Image and Centre National D’Etudes Spatiales (CNES).
-GB - AWIFS satellite imagery © Antrix Corporation Limited, distributed by Euromap.
-GB - Contains Ordnance Survey mapping data © Crown copyright and database right 2007.
-GB - Digital elevation data © Intermap Technologies Inc. or its suppliers 2003.
-England and Wales - Soils data for England and Wales © Cranfield University (NSRI) and for the Controller of HMSO. 2011.
-England and Wales - Office for National Statistics data © Crown Copyright and database right. Contains Ordnance Survey data © Crown copyright and database right 2001.
-England - Boundaries from Rural Payments Agency © Crown copyright and database right and/or © third party licensors.
-Wales - Boundaries from Welsh Government, Department of Rural Affairs © Crown Copyright and database right and/or © third party licensors.
-Scotland - Boundaries from Scottish Government © Crown Copyright and database right and/or © third party licensors.
-Scotland - SSKIB derived pH for "semi-natural" soils for upper horizon for dominant soil © The James Hutton Institute 2010.
-Scotland - Land Cover of Scotland dataset, Crown Copyright 1992. It shall not be reproduced in any form whatever without the permission of The Controller of Her Majesty’s Stationery Office. Reproduced from OS Pathfinder Series with the permission of the Controller of HMSO. © Crown copyright 1992.
-Scotland - Scottish Government boundaries © Crown Copyright and database right and/or © third party licensors 2004. All rights reserved.
-
-
-The following copyright notice should be placed on all copies of information or images derived from the Web Service:
-[Information] or [Images] based upon LCM2007 © NERC (CEH) 2011. Contains Ordnance Survey data © Crown Copyright 2007. © third party licensors.
-
-
-Test Link
-
-
-
-
-view
-
-
-
-
-
-
-true
-
-
--9.23
-
-
-2.69
-
-
-49.84
-
-
-60.85
-
-
-
-
-
-
-
-
-GetMap
-
-
-CEH:EIDC:#1300181654668
-
-
-
-
-
-
-GetMap
-
-
-https://gateway.ceh.ac.uk/soapServices/CSWStartup?Service=CSW&Request=GetRecordById&Version=2.0.2&outputSchema=http://www.isotc211.org/2005/gmd&elementSetname=full&id=337f9dea-726e-40c7-9f9b-e269911c9db6
-
-
-
-
-
-
-
-
-
-GetCapabilities
-
-
-
-
-
-
-
-http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
-
-
-
-
-
-
-
-
-GetMap
-
-
-
-
-
-
-
-http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-png
-
-
-unknown
-
-
-
-
-
-
-
-
-Centre for Ecology & Hydrology
-
-
-
-
-
-
-Maclean Building, Benson Lane, Crowmarsh Gifford
-
-
-Wallingford
-
-
-Oxfordshire
-
-
-OX10 8BB
-
-
-United Kingdom
-
-
-enquiries@ceh.ac.uk
-
-
-
-
-
-
-distributor
-
-
-
-
-
-
-
-
-
-
-http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer?request=getCapabilities&service=WMS
-
-
-WMS Service
-
-
-GetCapabilities for this service
-
-
-
-
-
-
-
-
-
-
-http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
-
-
-CS Technical Report
-
-
-Final Report for LCM2007 - the new UK Land Cover Map
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-The service is based upon Land Cover Map 2007
-
-
-
-
-
-
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/11_Service_Invalid_GEMINI_Service_Type.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/11_Service_Invalid_GEMINI_Service_Type.xml
deleted file mode 100644
index 6da753e..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/11_Service_Invalid_GEMINI_Service_Type.xml
+++ /dev/null
@@ -1,537 +0,0 @@
-
-
-
-test-record-11
-
-
-
-
-
-
-
-
-service
-
-
-
-
-Claire Wood
-
-
-Centre for Ecology & Hydrology
-
-
-
-
-
-
-Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
-Lancaster
-
-
-Lancashire
-
-
-LA1 4AP
-
-
-United Kingdom
-
-
-enquiries@ceh.ac.uk
-
-
-
-
-
-
-pointOfContact
-
-
-
-
-2012-10-05
-
-
-NERC profile of ISO19115:2003
-
-
-2003(E)
-
-
-
-
-
-
-27700
-
-
-urn:ogc:def:crs:EPSG
-
-
-6.11.2
-
-
-
-
-
-
-
-
-
-
-Test Record 11 Service Invalid GEMINI Service Type
-
-
-Also known as xxx
-
-
-
-
-2011-04-08
-
-
-
-
-
-
-
-
-
-This test Service record should pass XSD validation and 19139 Schematron validation, but fail GEMINI2.1 Schematron. It has an element value for Service Type which is not in the list of allowed values.
-
-
-
-
-Dan Morton
-
-
-Centre for Ecology & Hydrology
-
-
-LA1 4AP
-
-
-
-
-
-
-Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
-Lancaster
-
-
-Lancashire
-
-
-LA1 4AP
-
-
-United Kingdom
-
-
-enquiries@ceh.ac.uk
-
-
-
-
-
-
-pointOfContact
-
-
-
-
-
-
-Parr Section
-
-
-
-
-
-
-United Kingdom
-
-
-enquiries@ceh.ac.uk
-
-
-
-
-
-
-resourceProvider
-
-
-
-
-
-
-
-
-
-Not Planned
-
-
-
-
-
-
-https://gateway.ceh.ac.uk:443/smartEditor/preview/848fc7db-f8a8-4804-a5ec-6876a14d0a1a.png
-
-
-preview thumbnail
-
-
-
-
-
-
-Land cover
-
-
-Land use
-
-
-
-
-
-
-
-GEMET - INSPIRE themes, version 1.0
-
-
-
-
-2008-06-01
-
-
-
-
-
-
-
-
-
-
-
-
-
-CEH Biodiversity Programme
-
-
-CEH Project NEC03259
-
-
-
-
-
-
-
-
-
-NERC_DDC
-
-
-
-
-
-
-
-
-
-infoMapAccessService
-
-
-
-
-
-
-
-Commission Regulation (EC) No 1205/2008 of 3 December 2008 implementing Directive 2007/2/EC of the European Parliament and of the Council as regards Metadata
-
-
-
-
-2008-12-03
-
-
-
-
-
-
-
-
-
-
-
-
-
-Great Britain
-
-
-
-
-
-
-
-
-
-Test Link
-
-
-
-
-
-
-Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
-
-
-
-
-This Web Mapping Service is made available for use subject to the Terms and Conditions of the CEH Information Gateway (https://gateway.ceh.ac.uk/disclaimer).
-
-The following acknowledgements and copyright notices (where applicable), shall, unless otherwise stated, be used on all copies of the Web Map Service, publications and reports, including but not limited to, use in presentations to any audience.
-
-LCM2007 © and database right NERC (CEH) 2011. All rights reserved. Contains Ordnance Survey data © Crown copyright and database right 2007.
-
-Geographical area - Acknowledgements
-The following datasets have been used in the derivation of LCM2007 25m raster and LCM2007 1km Dominant Coverage and LCM2007 1km Percent of Total Coverage:
-GB - Landsat-TM5 satellite imagery © <Satellite/Ground station operator> 2007. Distributed by Eurimage.
-GB - IRS-LISS3 satellite imagery supplied by European Space Agency © Euromap, Space Imaging and Antrix Corporation Limited.
-GB - SPOT-4 and SPOT-5 satellite imagery supplied by European Space Agency © Spot Image and Centre National D’Etudes Spatiales (CNES).
-GB - AWIFS satellite imagery © Antrix Corporation Limited, distributed by Euromap.
-GB - Contains Ordnance Survey mapping data © Crown copyright and database right 2007.
-GB - Digital elevation data © Intermap Technologies Inc. or its suppliers 2003.
-England and Wales - Soils data for England and Wales © Cranfield University (NSRI) and for the Controller of HMSO. 2011.
-England and Wales - Office for National Statistics data © Crown Copyright and database right. Contains Ordnance Survey data © Crown copyright and database right 2001.
-England - Boundaries from Rural Payments Agency © Crown copyright and database right and/or © third party licensors.
-Wales - Boundaries from Welsh Government, Department of Rural Affairs © Crown Copyright and database right and/or © third party licensors.
-Scotland - Boundaries from Scottish Government © Crown Copyright and database right and/or © third party licensors.
-Scotland - SSKIB derived pH for "semi-natural" soils for upper horizon for dominant soil © The James Hutton Institute 2010.
-Scotland - Land Cover of Scotland dataset, Crown Copyright 1992. It shall not be reproduced in any form whatever without the permission of The Controller of Her Majesty’s Stationery Office. Reproduced from OS Pathfinder Series with the permission of the Controller of HMSO. © Crown copyright 1992.
-Scotland - Scottish Government boundaries © Crown Copyright and database right and/or © third party licensors 2004. All rights reserved.
-
-
-The following copyright notice should be placed on all copies of information or images derived from the Web Service:
-[Information] or [Images] based upon LCM2007 © NERC (CEH) 2011. Contains Ordnance Survey data © Crown Copyright 2007. © third party licensors.
-
-
-Test Link
-
-
-
-
-unknown
-
-
-
-
-
-
-true
-
-
--9.23
-
-
-2.69
-
-
-49.84
-
-
-60.85
-
-
-
-
-
-
-
-
-GetMap
-
-
-CEH:EIDC:#1300181654668
-
-
-
-
-
-
-GetMap
-
-
-https://gateway.ceh.ac.uk/soapServices/CSWStartup?Service=CSW&Request=GetRecordById&Version=2.0.2&outputSchema=http://www.isotc211.org/2005/gmd&elementSetname=full&id=337f9dea-726e-40c7-9f9b-e269911c9db6
-
-
-
-
-
-
-
-
-
-GetCapabilities
-
-
-
-
-
-
-
-http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
-
-
-
-
-
-
-
-
-GetMap
-
-
-
-
-
-
-
-http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-png
-
-
-unknown
-
-
-
-
-
-
-
-
-Centre for Ecology & Hydrology
-
-
-
-
-
-
-Maclean Building, Benson Lane, Crowmarsh Gifford
-
-
-Wallingford
-
-
-Oxfordshire
-
-
-OX10 8BB
-
-
-United Kingdom
-
-
-enquiries@ceh.ac.uk
-
-
-
-
-
-
-distributor
-
-
-
-
-
-
-
-
-
-
-http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer?request=getCapabilities&service=WMS
-
-
-WMS Service
-
-
-GetCapabilities for this service
-
-
-
-
-
-
-
-
-
-
-http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
-
-
-CS Technical Report
-
-
-Final Report for LCM2007 - the new UK Land Cover Map
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-Land Cover Map 2007
-
-
-
-
-
-
-
-
-The service is based upon Land Cover Map 2007
-
-
-
-
-
-
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/12_Service_Valid.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/12_Service_Valid.xml
deleted file mode 100644
index d413261..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/12_Service_Valid.xml
+++ /dev/null
@@ -1,537 +0,0 @@
-
-
-
- test-record-12
-
-
-
-
-
-
-
-
- service
-
-
-
-
- Claire Wood
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2012-10-05
-
-
- NERC profile of ISO19115:2003
-
-
- 2003(E)
-
-
-
-
-
-
- 27700
-
-
- urn:ogc:def:crs:EPSG
-
-
- 6.11.2
-
-
-
-
-
-
-
-
-
-
- Test Record 12 Service Valid
-
-
- Also known as xxx
-
-
-
-
- 2011-04-08
-
-
-
-
-
-
-
-
-
- This test Service record should pass all validation.
-
-
-
-
- Dan Morton
-
-
- Centre for Ecology & Hydrology
-
-
- LA1 4AP
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
-
-
- Parr Section
-
-
-
-
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- resourceProvider
-
-
-
-
-
-
-
-
-
- Not Planned
-
-
-
-
-
-
- https://gateway.ceh.ac.uk:443/smartEditor/preview/848fc7db-f8a8-4804-a5ec-6876a14d0a1a.png
-
-
- preview thumbnail
-
-
-
-
-
-
- Land cover
-
-
- Land use
-
-
-
-
-
-
-
- GEMET - INSPIRE themes, version 1.0
-
-
-
-
- 2008-06-01
-
-
-
-
-
-
-
-
-
-
-
-
-
- CEH Biodiversity Programme
-
-
- CEH Project NEC03259
-
-
-
-
-
-
-
-
-
- NERC_DDC
-
-
-
-
-
-
-
-
-
- infoMapAccessService
-
-
-
-
-
-
-
- Commission Regulation (EC) No 1205/2008 of 3 December 2008 implementing Directive 2007/2/EC of the European Parliament and of the Council as regards Metadata
-
-
-
-
- 2008-12-03
-
-
-
-
-
-
-
-
-
-
-
-
-
- Great Britain
-
-
-
-
-
-
-
-
-
- Test Link
-
-
-
-
-
-
- Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
-
-
-
-
- This Web Mapping Service is made available for use subject to the Terms and Conditions of the CEH Information Gateway (https://gateway.ceh.ac.uk/disclaimer).
-
- The following acknowledgements and copyright notices (where applicable), shall, unless otherwise stated, be used on all copies of the Web Map Service, publications and reports, including but not limited to, use in presentations to any audience.
-
- LCM2007 © and database right NERC (CEH) 2011. All rights reserved. Contains Ordnance Survey data © Crown copyright and database right 2007.
-
- Geographical area - Acknowledgements
- The following datasets have been used in the derivation of LCM2007 25m raster and LCM2007 1km Dominant Coverage and LCM2007 1km Percent of Total Coverage:
- GB - Landsat-TM5 satellite imagery © <Satellite/Ground station operator> 2007. Distributed by Eurimage.
- GB - IRS-LISS3 satellite imagery supplied by European Space Agency © Euromap, Space Imaging and Antrix Corporation Limited.
- GB - SPOT-4 and SPOT-5 satellite imagery supplied by European Space Agency © Spot Image and Centre National D’Etudes Spatiales (CNES).
- GB - AWIFS satellite imagery © Antrix Corporation Limited, distributed by Euromap.
- GB - Contains Ordnance Survey mapping data © Crown copyright and database right 2007.
- GB - Digital elevation data © Intermap Technologies Inc. or its suppliers 2003.
- England and Wales - Soils data for England and Wales © Cranfield University (NSRI) and for the Controller of HMSO. 2011.
- England and Wales - Office for National Statistics data © Crown Copyright and database right. Contains Ordnance Survey data © Crown copyright and database right 2001.
- England - Boundaries from Rural Payments Agency © Crown copyright and database right and/or © third party licensors.
- Wales - Boundaries from Welsh Government, Department of Rural Affairs © Crown Copyright and database right and/or © third party licensors.
- Scotland - Boundaries from Scottish Government © Crown Copyright and database right and/or © third party licensors.
- Scotland - SSKIB derived pH for "semi-natural" soils for upper horizon for dominant soil © The James Hutton Institute 2010.
- Scotland - Land Cover of Scotland dataset, Crown Copyright 1992. It shall not be reproduced in any form whatever without the permission of The Controller of Her Majesty’s Stationery Office. Reproduced from OS Pathfinder Series with the permission of the Controller of HMSO. © Crown copyright 1992.
- Scotland - Scottish Government boundaries © Crown Copyright and database right and/or © third party licensors 2004. All rights reserved.
-
-
- The following copyright notice should be placed on all copies of information or images derived from the Web Service:
- [Information] or [Images] based upon LCM2007 © NERC (CEH) 2011. Contains Ordnance Survey data © Crown Copyright 2007. © third party licensors.
-
-
- Test Link
-
-
-
-
- view
-
-
-
-
-
-
- true
-
-
- -9.23
-
-
- 2.69
-
-
- 49.84
-
-
- 60.85
-
-
-
-
-
-
-
-
- GetMap
-
-
- CEH:EIDC:#1300181654668
-
-
-
-
-
-
- GetMap
-
-
- https://gateway.ceh.ac.uk/soapServices/CSWStartup?Service=CSW&Request=GetRecordById&Version=2.0.2&outputSchema=http://www.isotc211.org/2005/gmd&elementSetname=full&id=337f9dea-726e-40c7-9f9b-e269911c9db6
-
-
-
-
-
-
-
-
-
- GetCapabilities
-
-
-
-
-
-
-
- http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
-
-
-
-
-
-
-
-
- GetMap
-
-
-
-
-
-
-
- http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- png
-
-
- unknown
-
-
-
-
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Maclean Building, Benson Lane, Crowmarsh Gifford
-
-
- Wallingford
-
-
- Oxfordshire
-
-
- OX10 8BB
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
-
-
-
-
- http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer?request=getCapabilities&service=WMS
-
-
- WMS Service
-
-
- GetCapabilities for this service
-
-
-
-
-
-
-
-
-
-
- http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
-
-
- CS Technical Report
-
-
- Final Report for LCM2007 - the new UK Land Cover Map
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Land Cover Map 2007
-
-
-
-
-
-
-
-
- The service is based upon Land Cover Map 2007
-
-
-
-
-
-
diff --git a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/13_Dataset_Invalid_Element_srv.xml b/ckanext/spatial/tests/nose/xml/gemini2.1/validation/13_Dataset_Invalid_Element_srv.xml
deleted file mode 100644
index 41a75b3..0000000
--- a/ckanext/spatial/tests/nose/xml/gemini2.1/validation/13_Dataset_Invalid_Element_srv.xml
+++ /dev/null
@@ -1,610 +0,0 @@
-
-
-
-test-record-13
-
-
-
-
-
- test-record-04
-
-
-
-
-
- dataset
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2012-10-05
-
-
- INSPIRE Implementing Rules for Metadata
-
-
- 1.2
-
-
-
-
-
-
- 27700
-
-
- urn:ogc:def:crs:EPSG
-
-
- 6.11.2
-
-
-
-
-
-
-
-
-
-
-
-
- Test Record 13
-
-
- Also known as xxx
-
-
-
-
- 2011-04-08
-
-
-
-
-
-
-
-
-
- This test Dataset record should fail.
-
-
-
-
- Dan Morton
-
-
- Centre for Ecology & Hydrology
-
-
- LA1 4AP
-
-
-
-
-
-
- Lancaster Environment Centre, Library Avenue, Bailrigg
-
-
- Lancaster
-
-
- Lancashire
-
-
- LA1 4AP
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
-
-
- Parr Section
-
-
-
-
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- resourceProvider
-
-
-
-
-
-
-
-
-
- Not Planned
-
-
-
-
-
-
- https://gateway.ceh.ac.uk:443/smartEditor/preview/848fc7db-f8a8-4804-a5ec-6876a14d0a1a.png
-
-
- preview thumbnail
-
-
-
-
-
-
- Land cover
-
-
- Land use
-
-
-
-
-
-
-
- GEMET - INSPIRE themes, version 1.0
-
-
-
-
- 2008-06-01
-
-
-
-
-
-
-
-
-
-
-
-
-
- CEH Biodiversity Programme
-
-
- CEH Project NEC03259
-
-
-
-
-
-
-
-
-
- NERC_DDC
-
-
-
-
-
-
-
-
-
- infoMapAccessService
-
-
-
-
-
-
-
- Commission Regulation (EC) No 1205/2008 of 3 December 2008 implementing Directive 2007/2/EC of the European Parliament and of the Council as regards Metadata
-
-
-
-
- 2008-12-03
-
-
-
-
-
-
-
-
-
-
-
-
-
- Great Britain
-
-
-
-
-
-
-
-
-
- Test Link
-
-
-
-
-
-
- Refer to: R.D. Morton, C. Rowland, C. Wood, L. Meek, C. Marston, G. Smith, R. Wadsworth, I. Simpson. July 2011 CS Technical Report No 11/07: Final Report for LCM2007 - the new UK land cover map. NERC/Centre for Ecology & Hydrology (CEH Project Number NEC03259).
-
-
-
-
-
-
-
-
- This Web Mapping Service is made available for use subject to the Terms and Conditions of the CEH Information Gateway (https://gateway.ceh.ac.uk/disclaimer).
-
- The following acknowledgements and copyright notices (where applicable), shall, unless otherwise stated, be used on all copies of the Web Map Service, publications and reports, including but not limited to, use in presentations to any audience.
-
- LCM2007 © and database right NERC (CEH) 2011. All rights reserved. Contains Ordnance Survey data © Crown copyright and database right 2007.
-
- Geographical area - Acknowledgements
- The following datasets have been used in the derivation of LCM2007 25m raster and LCM2007 1km Dominant Coverage and LCM2007 1km Percent of Total Coverage:
- GB - Landsat-TM5 satellite imagery © <Satellite/Ground station operator> 2007. Distributed by Eurimage.
- GB - IRS-LISS3 satellite imagery supplied by European Space Agency © Euromap, Space Imaging and Antrix Corporation Limited.
- GB - SPOT-4 and SPOT-5 satellite imagery supplied by European Space Agency © Spot Image and Centre National D’Etudes Spatiales (CNES).
- GB - AWIFS satellite imagery © Antrix Corporation Limited, distributed by Euromap.
- GB - Contains Ordnance Survey mapping data © Crown copyright and database right 2007.
- GB - Digital elevation data © Intermap Technologies Inc. or its suppliers 2003.
- England and Wales - Soils data for England and Wales © Cranfield University (NSRI) and for the Controller of HMSO. 2011.
- England and Wales - Office for National Statistics data © Crown Copyright and database right. Contains Ordnance Survey data © Crown copyright and database right 2001.
- England - Boundaries from Rural Payments Agency © Crown copyright and database right and/or © third party licensors.
- Wales - Boundaries from Welsh Government, Department of Rural Affairs © Crown Copyright and database right and/or © third party licensors.
- Scotland - Boundaries from Scottish Government © Crown Copyright and database right and/or © third party licensors.
- Scotland - SSKIB derived pH for "semi-natural" soils for upper horizon for dominant soil © The James Hutton Institute 2010.
- Scotland - Land Cover of Scotland dataset, Crown Copyright 1992. It shall not be reproduced in any form whatever without the permission of The Controller of Her Majesty’s Stationery Office. Reproduced from OS Pathfinder Series with the permission of the Controller of HMSO. © Crown copyright 1992.
- Scotland - Scottish Government boundaries © Crown Copyright and database right and/or © third party licensors 2004. All rights reserved.
-
-
- The following copyright notice should be placed on all copies of information or images derived from the Web Service:
- [Information] or [Images] based upon LCM2007 © NERC (CEH) 2011. Contains Ordnance Survey data © Crown Copyright 2007. © third party licensors.
-
-
- Test Link
-
-
-
-
- view
-
-
-
-
-
-
- true
-
-
- -9.23
-
-
- 2.69
-
-
- 49.84
-
-
- 60.85
-
-
-
-
-
-
-
-
- GetMap
-
-
- CEH:EIDC:#1300181654668
-
-
-
-
-
-
- GetMap
-
-
- https://gateway.ceh.ac.uk/soapServices/CSWStartup?Service=CSW&Request=GetRecordById&Version=2.0.2&outputSchema=http://www.isotc211.org/2005/gmd&elementSetname=full&id=337f9dea-726e-40c7-9f9b-e269911c9db6
-
-
-
-
-
-
-
-
-
- GetCapabilities
-
-
-
-
-
-
-
- http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
-
-
-
-
-
-
-
-
- GetMap
-
-
-
-
-
-
-
- http://lasigpublic.nerc-lancaster.ac.uk/ArcGIS/services/LandCoverMap/LCM2007_GB_1k_DOM_TAR/MapServer/WMSServer
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- GeoTIFF
-
-
- 1.0
-
-
-
-
-
-
-
-
- Centre for Ecology & Hydrology
-
-
-
-
-
-
- Maclean Building, Benson Lane, Crowmarsh Gifford
-
-
- Wallingford
-
-
- Oxfordshire
-
-
- OX10 8BB
-
-
- United Kingdom
-
-
- enquiries@ceh.ac.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
-
-
-
-
- http://www.ceh.ac.uk/LandCoverMap2007.html
-
-
- Essential technical details
-
-
- Link to further technical details about this data
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://www.countrysidesurvey.org.uk/
-
-
- Countryside Survey website
-
-
- Countryside Survey website
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://gateway.ceh.ac.uk/download?fileIdentifier=82a0f4a1-01ff-4ed1-853e-224d8404b3fd
-
-
- Dataset download
-
-
- Link to download this dataset
-
-
-
-
-
-
-
-
-
-
-
-
-
- http://lasigprod.nerc-lancaster.ac.uk/arcgis/services/LandCoverMap/LCM2007_GB_1k_DOM_AGG/MapServer/WMSServer?request=GetCapabilities&service=WMS
-
-
- Web Map Service
-
-
- A web map service (WMS) is available for this data
-
-
-
-
-
-
-
-
-
-
- http://cs2007.ceh.ac.uk/sites/default/files/LCM2007%20Final%20Report%20-%20vCS%20Web.pdf
-
-
- CS Technical Report
-
-
- Final Report for LCM2007 - the new UK Land Cover Map
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- LCM2007 uses a spatial framework based on OS MasterMap (R). MasterMap was generalised to remove unnecessary detail, then the framework was segmented according to the underlying satellite data to split areas of non-uniform landscape. The data was classified according to a parcel-based supervised maximum likelihood classification procedure. The raster products are derived from the vector products.
-
-
-
-
-
-
\ No newline at end of file
diff --git a/ckanext/spatial/tests/nose/xml/iso19139/dataset-invalid.xml b/ckanext/spatial/tests/nose/xml/iso19139/dataset-invalid.xml
deleted file mode 100644
index a299e03..0000000
--- a/ckanext/spatial/tests/nose/xml/iso19139/dataset-invalid.xml
+++ /dev/null
@@ -1,498 +0,0 @@
-
-
-
- test-record
-
-
- test-dataset
-
-
- eng
-
-
- dataset
-
-
-
-
- Lachlan Renwick
-
-
- Scottish Natural Heritage
-
-
- Geographic Systems and Data Coordinator
-
-
-
-
-
-
- 01463 725000
-
-
-
-
-
-
- Great Glen House, Leachkin Road
-
-
- INVERNESS
-
-
- IV3 8NW
-
-
- United Kingdom
-
-
- data_supply@snh.gov.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2011-09-23T10:06:08
-
-
-
-
-
-
- urn:ogc:def:crs:EPSG::27700
-
-
-
-
-
-
-
-
-
-
- Country Parks (Scotland)
-
-
-
-
- 2004-02
-
-
- creation
-
-
-
-
-
-
- 2006-07-03
-
-
- revision
-
-
-
-
-
-
- CPK
-
-
-
-
-
-
-
-
-
- Parks are set up by Local Authorities to provide open-air recreation facilities close to towns and cities. [edited]
-
-
-
-
- Lachlan Renwick
-
-
- Scottish Natural Heritage
-
-
- Geographic Systems & Data Coordinator
-
-
-
-
-
-
- 01463 725000
-
-
-
-
-
-
- Great Glen House, Leachkin Road
-
-
- INVERNESS
-
-
- IV3 8NW
-
-
- United Kingdom
-
-
- data_supply@snh.gov.uk
-
-
-
-
-
-
- custodian
-
-
-
-
-
-
- Lachlan Renwick
-
-
- Scottish Natural Heritage
-
-
- Geographic Systems & Data Coordinator
-
-
-
-
-
-
- 01463 725000
-
-
-
-
-
-
- Great Glen House, Leachkin Road
-
-
- INVERNESS
-
-
- IV3 8NW
-
-
- United Kingdom
-
-
- data_supply@snh.gov.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
- irregular
-
-
-
-
-
-
- SDE Feature Class
-
-
-
-
-
-
-
-
-
- Nature conservation
-
-
-
-
- Government Category List
-
-
-
-
- 2004-07-15
-
-
- revision
-
-
-
-
-
-
-
-
-
-
- copyright
-
-
- otherRestrictions
-
-
- copyright
-
-
- otherRestrictions
-
-
- Copyright Scottish Natural Heritage
-
-
-
-
-
-
- Reference and PSMA Only
-
-
- http://www.test.gov.uk/licenseurl
-
-
-
-
-
-
-
-
-
- 5
-
-
-
-
- eng
-
-
- environment
-
-
-
-
-
-
-
-
-
-
- ISO 3166
-
-
-
-
- 2007-09-02
-
-
- revision
-
-
-
-
-
-
- GB-SCT
-
-
-
-
-
-
-
-
- -8.97114288
-
-
- 0.205857204
-
-
- 54.529947158
-
-
- 61.06066944
-
-
-
-
-
-
-
- 1998
- 2010
-
-
-
-
-
-
-
-
-
-
-
-
-
- ESRI Shapefile
-
-
- Unknown
-
-
-
-
-
-
- KML
-
-
- 2.1
-
-
-
-
-
-
- GML
-
-
- 3.1.1
-
-
-
-
-
-
-
-
- Lachlan Renwick
-
-
- Scottish Natural Heritage
-
-
- Geographic Systems & Data Coordinator
-
-
-
-
-
-
- 01463 725000
-
-
-
-
-
-
- Great Glen House, Leachkin Road
-
-
- INVERNESS
-
-
- IV3 8NW
-
-
- United Kingdom
-
-
- data_supply@snh.gov.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
- SDE Feature Class
-
-
-
-
-
-
-
-
-
-
-
- http://www.snh.org.uk/snhi
-
-
-
-
-
-
-
-
-
-
-
-
- https://gateway.snh.gov.uk/pls/apex_ddtdb2/f?p=101
-
-
- Test Resource Name
-
-
- Test Resource Description
-
-
- download
-
-
-
-
-
-
-
-
-
-
-
-
- dataset
-
-
-
-
-
-
- Country Park is not a statutory designation. Countryside (Scotland) Act 1967 Section 48 gives local authorities power to assess and review the need for Country Parks in consultation with SNH.
-
-
-
-
-
-
diff --git a/ckanext/spatial/tests/nose/xml/iso19139/dataset.xml b/ckanext/spatial/tests/nose/xml/iso19139/dataset.xml
deleted file mode 100644
index 885a9b5..0000000
--- a/ckanext/spatial/tests/nose/xml/iso19139/dataset.xml
+++ /dev/null
@@ -1,495 +0,0 @@
-
-
-
- test-dataset-1
-
-
- eng
-
-
- dataset
-
-
-
-
- Lachlan Renwick
-
-
- Scottish Natural Heritage
-
-
- Geographic Systems and Data Coordinator
-
-
-
-
-
-
- 01463 725000
-
-
-
-
-
-
- Great Glen House, Leachkin Road
-
-
- INVERNESS
-
-
- IV3 8NW
-
-
- United Kingdom
-
-
- data_supply@snh.gov.uk
-
-
-
-
-
-
- pointOfContact
-
-
-
-
- 2011-09-23T10:06:08
-
-
-
-
-
-
- urn:ogc:def:crs:EPSG::27700
-
-
-
-
-
-
-
-
-
-
- Country Parks (Scotland)
-
-
-
-
- 2004-02
-
-
- creation
-
-
-
-
-
-
- 2006-07-03
-
-
- revision
-
-
-
-
-
-
- CPK
-
-
-
-
-
-
-
-
-
- Parks are set up by Local Authorities to provide open-air recreation facilities close to towns and cities. [edited]
-
-
-
-
- Lachlan Renwick
-
-
- Scottish Natural Heritage
-
-
- Geographic Systems & Data Coordinator
-
-
-
-
-
-
- 01463 725000
-
-
-
-
-
-
- Great Glen House, Leachkin Road
-
-
- INVERNESS
-
-
- IV3 8NW
-
-
- United Kingdom
-
-
- data_supply@snh.gov.uk
-
-
-
-
-
-
- custodian
-
-
-
-
-
-
- Lachlan Renwick
-
-
- Scottish Natural Heritage
-
-
- Geographic Systems & Data Coordinator
-
-
-
-
-
-
- 01463 725000
-
-
-
-
-
-
- Great Glen House, Leachkin Road
-
-
- INVERNESS
-
-
- IV3 8NW
-
-
- United Kingdom
-
-
- data_supply@snh.gov.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
- irregular
-
-
-
-
-
-
- SDE Feature Class
-
-
-
-
-
-
-
-
-
- Nature conservation
-
-
-
-
- Government Category List
-
-
-
-
- 2004-07-15
-
-
- revision
-
-
-
-
-
-
-
-
-
-
- copyright
-
-
- otherRestrictions
-
-
- copyright
-
-
- otherRestrictions
-
-
- Copyright Scottish Natural Heritage
-
-
-
-
-
-
- Reference and PSMA Only
-
-
- http://www.test.gov.uk/licenseurl
-
-
-
-
-
-
-
-
-
- 5
-
-
-
-
- eng
-
-
- environment
-
-
-
-
-
-
-
-
-
-
- ISO 3166
-
-
-
-
- 2007-09-02
-
-
- revision
-
-
-
-
-
-
- GB-SCT
-
-
-
-
-
-
-
-
- -8.97114288
-
-
- 0.205857204
-
-
- 54.529947158
-
-
- 61.06066944
-
-
-
-
-
-
-
- 1998
- 2010
-
-
-
-
-
-
-
-
-
-
-
-
-
- ESRI Shapefile
-
-
- Unknown
-
-
-
-
-
-
- KML
-
-
- 2.1
-
-
-
-
-
-
- GML
-
-
- 3.1.1
-
-
-
-
-
-
-
-
- Lachlan Renwick
-
-
- Scottish Natural Heritage
-
-
- Geographic Systems & Data Coordinator
-
-
-
-
-
-
- 01463 725000
-
-
-
-
-
-
- Great Glen House, Leachkin Road
-
-
- INVERNESS
-
-
- IV3 8NW
-
-
- United Kingdom
-
-
- data_supply@snh.gov.uk
-
-
-
-
-
-
- distributor
-
-
-
-
-
-
- SDE Feature Class
-
-
-
-
-
-
-
-
-
-
-
- http://www.snh.org.uk/snhi
-
-
-
-
-
-
-
-
-
-
-
-
- https://gateway.snh.gov.uk/pls/apex_ddtdb2/f?p=101
-
-
- Test Resource Name
-
-
- Test Resource Description
-
-
- download
-
-
-
-
-
-
-
-
-
-
-
-
- dataset
-
-
-
-
-
-
- Country Park is not a statutory designation. Countryside (Scotland) Act 1967 Section 48 gives local authorities power to assess and review the need for Country Parks in consultation with SNH.
-
-
-
-
-
-
diff --git a/ckanext/spatial/tests/nose/xml/wms/capabilities.xml b/ckanext/spatial/tests/nose/xml/wms/capabilities.xml
deleted file mode 100644
index 1f62497..0000000
--- a/ckanext/spatial/tests/nose/xml/wms/capabilities.xml
+++ /dev/null
@@ -1,127 +0,0 @@
-
-
-
-
-
-
- OGC:WMS
- Wikipedia articles
- This service provides access to a subset of the georeferenced articles of the English version of Wikipedia.The features were extracted from the DBpedia Geographic Coordinates dataset corresponding to the English version of Wikipedia. The original Wikipedia dump from where it was derived was generated on October 2010.
-
- Wikipedia
- Articles
- World
-
-
-
-
- Free
- No restrictions, but please don't abuse the server!
-
-
-
-
-
- application/vnd.ogc.wms_xml
-
-
-
-
-
-
-
-
- image/gif
- image/png
- image/png; mode=24bit
- image/jpeg
- image/vnd.wap.wbmp
- image/tiff
- image/svg+xml
-
-
-
-
-
-
-
-
- text/plain
- application/vnd.ogc.gml
-
-
-
-
-
-
-
-
- text/xml
-
-
-
-
-
-
-
-
- image/gif
- image/png
- image/png; mode=24bit
- image/jpeg
- image/vnd.wap.wbmp
-
-
-
-
-
-
-
-
- text/xml
-
-
-
-
-
-
-
-
-
- application/vnd.ogc.se_xml
- application/vnd.ogc.se_inimage
- application/vnd.ogc.se_blank
-
-
-
-
- wikipedia
- Wikipedia articles
- This service provides access to a subset of the georeferenced articles of the English version of Wikipedia.The features were extracted from the DBpedia Geographic Coordinates dataset corresponding to the English version of Wikipedia. The original Wikipedia dump from where it was derived was generated on October 2010.
-
- Wikipedia
- Articles
- World
-
- EPSG:4326
- EPSG:3857
-
-
- articles
- Wikipedia articles
- EPSG:4326
- EPSG:3857
-
-
-
-
-
-
-
diff --git a/ckanext/spatial/tests/nose/xml_file_server.py b/ckanext/spatial/tests/nose/xml_file_server.py
deleted file mode 100644
index 74f4fbf..0000000
--- a/ckanext/spatial/tests/nose/xml_file_server.py
+++ /dev/null
@@ -1,36 +0,0 @@
-from __future__ import print_function
-
-import os
-
-try:
- from http.server import SimpleHTTPRequestHandler
- from socketserver import TCPServer
-except ImportError:
- from SimpleHTTPServer import SimpleHTTPRequestHandler
- from SocketServer import TCPServer
-
-from threading import Thread
-
-
-PORT = 8999
-
-
-def serve(port=PORT):
- '''Serves test XML files over HTTP'''
-
- # Make sure we serve from the tests' XML directory
- os.chdir(os.path.join(os.path.dirname(os.path.abspath(__file__)),
- 'xml'))
-
- Handler = SimpleHTTPRequestHandler
-
- class TestServer(TCPServer):
- allow_reuse_address = True
-
- httpd = TestServer(("", PORT), Handler)
-
- print('Serving test HTTP server at port', PORT)
-
- httpd_thread = Thread(target=httpd.serve_forever)
- httpd_thread.setDaemon(True)
- httpd_thread.start()
diff --git a/conftest.py b/conftest.py
index fae1fc4..89d0a32 100644
--- a/conftest.py
+++ b/conftest.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
pytest_plugins = [
- u'ckan.tests.pytest_ckan.ckan_setup',
- u'ckan.tests.pytest_ckan.fixtures',
+ u'ckanext.spatial.tests.ckan_setup',
+ u'ckanext.spatial.tests.fixtures',
]
From 411e20da50523a65aeed37646a4302b8415e1bde Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Wed, 6 May 2020 15:29:16 +0300
Subject: [PATCH 021/139] Update travis scripts
---
bin/travis-build.bash | 15 +++++++++++++--
bin/{travis-run.sh => travis-run.bash} | 0
2 files changed, 13 insertions(+), 2 deletions(-)
rename bin/{travis-run.sh => travis-run.bash} (100%)
diff --git a/bin/travis-build.bash b/bin/travis-build.bash
index 3a642f7..f041cc3 100644
--- a/bin/travis-build.bash
+++ b/bin/travis-build.bash
@@ -82,7 +82,13 @@ cd ckanext-harvest
python setup.py develop
pip install -r pip-requirements.txt
-paster harvester initdb -c ../ckan/test-core.ini
+if [ $CKANVERSION \< '2.9' ]
+then
+ paster harvester initdb -c ../ckan/test-core.ini
+else
+ ckan -c test-core.ini harvester initdb
+fi
+
cd -
echo "Installing ckanext-spatial and its requirements..."
@@ -94,6 +100,11 @@ echo "Moving test.ini into a subdir..."
mkdir subdir
mv test.ini subdir
-paster spatial initdb -c subdir/test.ini
+if [ $CKANVERSION \< '2.9' ]
+then
+ paster spatial initdb -c subdir/test.ini
+else
+ ckan -c test-core.ini spatial initdb
+fi
echo "travis-build.bash is done."
diff --git a/bin/travis-run.sh b/bin/travis-run.bash
similarity index 100%
rename from bin/travis-run.sh
rename to bin/travis-run.bash
From 4b7cd9fca7ff5f9eccec0c1d7513224824ef8554 Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Wed, 6 May 2020 15:59:31 +0300
Subject: [PATCH 022/139] rename clean_db fixture
---
bin/travis-build.bash | 11 +++--------
ckanext/spatial/tests/conftest.py | 2 +-
ckanext/spatial/tests/functional/test_package.py | 2 +-
ckanext/spatial/tests/functional/test_widgets.py | 2 +-
ckanext/spatial/tests/lib/test_spatial.py | 4 ++--
ckanext/spatial/tests/model/test_package_extent.py | 2 +-
6 files changed, 9 insertions(+), 14 deletions(-)
diff --git a/bin/travis-build.bash b/bin/travis-build.bash
index f041cc3..8b639bb 100644
--- a/bin/travis-build.bash
+++ b/bin/travis-build.bash
@@ -82,13 +82,6 @@ cd ckanext-harvest
python setup.py develop
pip install -r pip-requirements.txt
-if [ $CKANVERSION \< '2.9' ]
-then
- paster harvester initdb -c ../ckan/test-core.ini
-else
- ckan -c test-core.ini harvester initdb
-fi
-
cd -
echo "Installing ckanext-spatial and its requirements..."
@@ -102,9 +95,11 @@ mv test.ini subdir
if [ $CKANVERSION \< '2.9' ]
then
+ paster harvester initdb -c subdir/test.ini
paster spatial initdb -c subdir/test.ini
else
- ckan -c test-core.ini spatial initdb
+ ckan -c subdir/test.ini harvester initdb
+ ckan -c subdir/test.ini spatial initdb
fi
echo "travis-build.bash is done."
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index cf40a5f..d936b88 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -35,7 +35,7 @@ def create_postgis_tables():
@pytest.fixture
-def clean_db(reset_db):
+def spatial_clean_db(reset_db):
reset_db()
# This will create the PostGIS tables (geometry_columns and
diff --git a/ckanext/spatial/tests/functional/test_package.py b/ckanext/spatial/tests/functional/test_package.py
index c39c860..8a83805 100644
--- a/ckanext/spatial/tests/functional/test_package.py
+++ b/ckanext/spatial/tests/functional/test_package.py
@@ -11,7 +11,7 @@ from ckanext.spatial.model import PackageExtent
from ckanext.spatial.tests.base import SpatialTestBase
-@pytest.mark.usefixtures("clean_db")
+@pytest.mark.usefixtures("spatial_clean_db")
class TestSpatialExtra(SpatialTestBase):
def test_spatial_extra_base(self, app):
diff --git a/ckanext/spatial/tests/functional/test_widgets.py b/ckanext/spatial/tests/functional/test_widgets.py
index 480fba9..57bd1ab 100644
--- a/ckanext/spatial/tests/functional/test_widgets.py
+++ b/ckanext/spatial/tests/functional/test_widgets.py
@@ -7,7 +7,7 @@ import ckan.tests.factories as factories
class TestSpatialWidgets(SpatialTestBase):
- @pytest.mark.usefixtures("clean_db")
+ @pytest.mark.usefixtures("spatial_clean_db")
def test_dataset_map(self, app):
dataset = factories.Dataset(
extras=[
diff --git a/ckanext/spatial/tests/lib/test_spatial.py b/ckanext/spatial/tests/lib/test_spatial.py
index 6ae019f..409ed70 100644
--- a/ckanext/spatial/tests/lib/test_spatial.py
+++ b/ckanext/spatial/tests/lib/test_spatial.py
@@ -38,7 +38,7 @@ def create_package(**package_dict):
return context.get("id")
-@pytest.mark.usefixtures("clean_db")
+@pytest.mark.usefixtures("spatial_clean_db")
class TestCompareGeometries(SpatialTestBase):
def _get_extent_object(self, geometry):
if isinstance(geometry, six.string_types):
@@ -99,7 +99,7 @@ class SpatialQueryTestBase(SpatialTestBase):
maxy = 1
@pytest.fixture(autouse=True)
- def initial_data(self, clean_db):
+ def initial_data(self, spatial_clean_db):
for fixture_x in self.fixtures_x:
bbox = self.x_values_to_bbox(fixture_x)
bbox_geojson = bbox_2_geojson(bbox)
diff --git a/ckanext/spatial/tests/model/test_package_extent.py b/ckanext/spatial/tests/model/test_package_extent.py
index eb6cd9c..5a28598 100644
--- a/ckanext/spatial/tests/model/test_package_extent.py
+++ b/ckanext/spatial/tests/model/test_package_extent.py
@@ -12,7 +12,7 @@ from ckanext.spatial.geoalchemy_common import WKTElement, legacy_geoalchemy
from ckanext.spatial.tests.base import SpatialTestBase
-@pytest.mark.usefixtures("clean_db")
+@pytest.mark.usefixtures("spatial_clean_db")
class TestPackageExtent(SpatialTestBase):
def test_create_extent(self):
From bc8cdc37e13628ac435b1223c04ef55b0876f81e Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Wed, 6 May 2020 17:13:06 +0300
Subject: [PATCH 023/139] add pycsw
---
.travis.yml | 4 ++--
bin/travis-build.bash | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index bf79f5d..fe6cc7a 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -9,10 +9,10 @@ services:
- redis
- postgresql
addons:
- postgresql: 9.6
+ postgresql: 11.2
apt:
packages:
- - postgresql-9.6-postgis-2.3
+ - postgresql-11.2-postgis-2.5
script: bash bin/travis-run.bash
before_install:
diff --git a/bin/travis-build.bash b/bin/travis-build.bash
index 8b639bb..3640d28 100644
--- a/bin/travis-build.bash
+++ b/bin/travis-build.bash
@@ -87,7 +87,7 @@ cd -
echo "Installing ckanext-spatial and its requirements..."
pip install -r pip-requirements.txt
python setup.py develop
-
+pip install pycsw
echo "Moving test.ini into a subdir..."
mkdir subdir
From 8ea5312ff132dd06d76fdc905bf1d439df77d9a1 Mon Sep 17 00:00:00 2001
From: Sergey Motornyuk
Date: Wed, 6 May 2020 17:19:09 +0300
Subject: [PATCH 024/139] changies in db init
---
.travis.yml | 4 ++--
bin/travis-build.bash | 5 ++++-
2 files changed, 6 insertions(+), 3 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index fe6cc7a..bf79f5d 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -9,10 +9,10 @@ services:
- redis
- postgresql
addons:
- postgresql: 11.2
+ postgresql: 9.6
apt:
packages:
- - postgresql-11.2-postgis-2.5
+ - postgresql-9.6-postgis-2.3
script: bash bin/travis-run.bash
before_install:
diff --git a/bin/travis-build.bash b/bin/travis-build.bash
index 3640d28..1bad2d5 100644
--- a/bin/travis-build.bash
+++ b/bin/travis-build.bash
@@ -81,6 +81,10 @@ git clone https://github.com/ckan/ckanext-harvest
cd ckanext-harvest
python setup.py develop
pip install -r pip-requirements.txt
+if [ $CKANVERSION \< '2.9' ]
+then
+ paster harvester initdb -c ../ckan/test-core.ini
+fi
cd -
@@ -95,7 +99,6 @@ mv test.ini subdir
if [ $CKANVERSION \< '2.9' ]
then
- paster harvester initdb -c subdir/test.ini
paster spatial initdb -c subdir/test.ini
else
ckan -c subdir/test.ini harvester initdb
From 920c7258f3339bd95411650a6cf9deb95d1547ff Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 14:37:38 +0900
Subject: [PATCH 025/139] WIP: ci test
---
.github/workflows/ci.yml | 99 ++++++++++++++++++++++++++++++++++++++++
test-github-actions.ini | 62 +++++++++++++++++++++++++
2 files changed, 161 insertions(+)
create mode 100644 .github/workflows/ci.yml
create mode 100644 test-github-actions.ini
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 0000000..2336adb
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,99 @@
+name: ckanext-spatial ci
+on: [push, pull_request]
+
+jobs:
+ # Label of the container job
+ container-job:
+ # Containers must run in Linux based operating systems
+ runs-on: ubuntu-latest
+ # Docker Hub image that `container-job` executes in
+ container:
+ image: python:3-stretch
+ env:
+ CKAN_DATASTORE_POSTGRES_DB: datastore_test
+ CKAN_DATASTORE_POSTGRES_READ_USER: datastore_read
+ CKAN_DATASTORE_POSTGRES_READ_PWD: pass
+ CKAN_DATASTORE_POSTGRES_WRITE_USER: datastore_write
+ CKAN_DATASTORE_POSTGRES_WRITE_PWD: pass
+ CKAN_POSTGRES_DB: ckan_test
+ CKAN_POSTGRES_USER: ckan_default
+ CKAN_POSTGRES_PWD: pass
+ PGPASSWORD: ckan
+
+ # Service containers to run with `container-job`
+ services:
+ # Label used to access the service container
+ ckan-postgres:
+ # Docker Hub image
+ image: postgis/postgis:12-3.0
+ # Provide the user and password for postgres
+ env:
+ POSTGRES_PASSWORD: ckan
+ POSTGRES_USER: ckan
+ # Set health checks to wait until postgres has started
+ options: >-
+ --health-cmd pg_isready
+ --health-interval 10s
+ --health-timeout 5s
+ --health-retries 5
+ ckan-redis:
+ # Docker Hub image
+ image: redis:3
+
+ steps:
+ # Downloads a copy of the code in your repository before running CI tests
+ - name: Check out repository code
+ uses: actions/checkout@v2
+
+ - name: Install node js and dependency
+ run: |
+ curl -sL https://deb.nodesource.com/setup_10.x | bash -
+ apt install -y nodejs
+ apt install -y libgtk2.0-0 libgtk-3-0 libnotify-dev libgconf-2-4 libnss3 libxss1 libasound2 libxtst6 xauth xvfb
+ apt install -y postgresql-client solr-jetty openjdk-8-jdk
+
+ - name: install and setup ckan
+ run: |
+ git clone https://github.com/ckan/ckan
+ cd ckan
+ pip install -r requirement-setuptools.txt
+ pip install -r requirements.txt
+ pip install -r dev-requirements.txt
+ python setup.py develop
+ # SOLR config
+ cp config/solr/schema.xml /etc/solr/config/schema.xml
+ service jetty9 restart || true # erroring out but does seem to work
+ # Database Creation
+ psql --host=ckan-postgres --username=ckan --command="CREATE USER ${CKAN_POSTGRES_USER} WITH PASSWORD '${CKAN_POSTGRES_PWD}' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
+ createdb --encoding=utf-8 --host=ckan-postgres --username=ckan --owner=${CKAN_POSTGRES_USER} ${CKAN_POSTGRES_DB}
+ psql --host=ckan-postgres --username=ckan --command="CREATE USER ${CKAN_DATASTORE_POSTGRES_READ_USER} WITH PASSWORD '${CKAN_DATASTORE_POSTGRES_READ_PWD}' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
+ psql --host=ckan-postgres --username=ckan --command="CREATE USER ${CKAN_DATASTORE_POSTGRES_WRITE_USER} WITH PASSWORD '${CKAN_DATASTORE_POSTGRES_WRITE_PWD}' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
+ createdb --encoding=utf-8 --host=ckan-postgres --username=ckan --owner=${CKAN_DATASTORE_POSTGRES_WRITE_USER} ${CKAN_DATASTORE_POSTGRES_DB}
+
+ # Database Initialization
+ ckan -c test-core-circle-ci.ini datastore set-permissions | psql --host=ckan-postgres --username=ckan
+ ckan -c test-core-circle-ci.ini db init
+
+ - name: install ckanext-harvest
+ run: |
+ git clone https://github.com/ckan/ckanext-harvest
+ cd ckanext-harvest
+ python setup.py develop
+ pip install -r pip-requirements.txt
+
+ - name: install ckanext-spatial
+ run: |
+ pip install -r pip-requirements.txt
+ python setup.py develop
+ pip install pycsw
+
+ mkdir subdir
+ mv test-github-actions.ini subdir
+
+ ckan -c subdir/test-github-actions.ini harvester initdb
+ ckan -c subdir/test-github-actions.ini spatial initdb
+
+
+ - name: run ci
+ run: |
+ pytest --ckan-ini=subdir/test-github-actions.ini ckanext/spatial/tests
diff --git a/test-github-actions.ini b/test-github-actions.ini
new file mode 100644
index 0000000..294a51e
--- /dev/null
+++ b/test-github-actions.ini
@@ -0,0 +1,62 @@
+[DEFAULT]
+debug = false
+# Uncomment and replace with the address which should receive any error reports
+#email_to = you@yourdomain.com
+smtp_server = localhost
+error_email_from = paste@localhost
+
+[server:main]
+use = egg:Paste#http
+host = 0.0.0.0
+port = 5000
+
+
+[app:main]
+use = config:../ckan/test-core-circle-ci.ini
+ckan.legacy_templates = false
+ckan.plugins = test_spatial_plugin harvest spatial_metadata spatial_query spatial_harvest_metadata_api gemini_csw_harvester gemini_doc_harvester gemini_waf_harvester
+ckan.spatial.srid = 4326
+ckan.spatial.default_map_extent=-6.88,49.74,0.50,59.2
+ckan.spatial.testing = true
+ckan.spatial.validator.profiles = iso19139,constraints,gemini2
+ckan.harvest.mq.type = redis
+
+# NB: other test configuration should go in test-core.ini, which is
+# what the postgres tests use.
+
+package_new_return_url = http://test.ckan.net/dataset/?test=new
+package_edit_return_url = http://test.ckan.net/dataset/?test=edit
+
+
+# Logging configuration
+[loggers]
+keys = root, ckan, sqlalchemy
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic
+
+[logger_root]
+level = WARN
+handlers = console
+
+[logger_ckan]
+qualname = ckan
+handlers =
+level = INFO
+
+[logger_sqlalchemy]
+handlers =
+qualname = sqlalchemy.engine
+level = WARN
+
+[handler_console]
+class = StreamHandler
+args = (sys.stdout,)
+level = NOTSET
+formatter = generic
+
+[formatter_generic]
+format = %(asctime)s %(levelname)-5.5s [%(name)s] %(message)s
From e798694d38e62d8294c28eb3d1326bd0197fa614 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 14:46:12 +0900
Subject: [PATCH 026/139] WIP: github actions ci: fixes solr/schema.xml path
---
.github/workflows/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 2336adb..a660a9e 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -61,7 +61,7 @@ jobs:
pip install -r dev-requirements.txt
python setup.py develop
# SOLR config
- cp config/solr/schema.xml /etc/solr/config/schema.xml
+ cp ckan/config/solr/schema.xml /etc/solr/config/schema.xml
service jetty9 restart || true # erroring out but does seem to work
# Database Creation
psql --host=ckan-postgres --username=ckan --command="CREATE USER ${CKAN_POSTGRES_USER} WITH PASSWORD '${CKAN_POSTGRES_PWD}' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
From 81f8685287a751ab756de38ead64327cc4586430 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 14:50:54 +0900
Subject: [PATCH 027/139] WIP: github actions ci: fixes solr conf path
---
.github/workflows/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index a660a9e..1476947 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -61,7 +61,7 @@ jobs:
pip install -r dev-requirements.txt
python setup.py develop
# SOLR config
- cp ckan/config/solr/schema.xml /etc/solr/config/schema.xml
+ cp ckan/config/solr/schema.xml /etc/solr/conf/schema.xml
service jetty9 restart || true # erroring out but does seem to work
# Database Creation
psql --host=ckan-postgres --username=ckan --command="CREATE USER ${CKAN_POSTGRES_USER} WITH PASSWORD '${CKAN_POSTGRES_PWD}' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
From fbf948dc5b4ee79cb80a9f158aa7e290b8ef9b7c Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 15:02:15 +0900
Subject: [PATCH 028/139] WIP: github actions ci: use test-core.ini and copy
configurations from ckan/test-core-circle-ci.ini
---
test-github-actions.ini | 17 ++++++++++++++---
1 file changed, 14 insertions(+), 3 deletions(-)
diff --git a/test-github-actions.ini b/test-github-actions.ini
index 294a51e..a4bdd7c 100644
--- a/test-github-actions.ini
+++ b/test-github-actions.ini
@@ -12,7 +12,7 @@ port = 5000
[app:main]
-use = config:../ckan/test-core-circle-ci.ini
+use = config:../ckan/test-core.ini
ckan.legacy_templates = false
ckan.plugins = test_spatial_plugin harvest spatial_metadata spatial_query spatial_harvest_metadata_api gemini_csw_harvester gemini_doc_harvester gemini_waf_harvester
ckan.spatial.srid = 4326
@@ -27,8 +27,16 @@ ckan.harvest.mq.type = redis
package_new_return_url = http://test.ckan.net/dataset/?test=new
package_edit_return_url = http://test.ckan.net/dataset/?test=edit
+# copy from ckan/test-core-circle-ci.ini
+ckan.datastore.write_url = postgresql://datastore_write:pass@ckan-postgres/datastore_test
+ckan.datastore.read_url = postgresql://datastore_read:pass@ckan-postgres/datastore_test
+
+ckan.redis.url = redis://ckan-redis:6379/1
+
+sqlalchemy.url = postgresql://ckan_default:pass@ckan-postgres/ckan_test
+
+solr_url = http://localhost:8080/solr
-# Logging configuration
[loggers]
keys = root, ckan, sqlalchemy
@@ -50,7 +58,10 @@ level = INFO
[logger_sqlalchemy]
handlers =
qualname = sqlalchemy.engine
-level = WARN
+level = WARNING
+# "level = INFO" logs SQL queries.
+# "level = DEBUG" logs SQL queries and results.
+# "level = WARNING" logs neither.
[handler_console]
class = StreamHandler
From ac46868055ccf20ef0f4d59af93a8a8282cd0f0f Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 15:18:38 +0900
Subject: [PATCH 029/139] WIP: github actions ci: setup spatial tables
---
.github/workflows/ci.yml | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 1476947..69f3934 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -90,6 +90,11 @@ jobs:
mkdir subdir
mv test-github-actions.ini subdir
+ # setup spatial table
+ psql --host=ckan-postgres --username=ckan --command="CREATE EXTENSION postgis;"
+ psql --host=ckan-postgres --username=ckan --command="ALTER VIEW geometry_columns OWNER TO ckan_default;"
+ psql --host=ckan-postgres --username=ckan --command="ALTER TABLE spatial_ref_sys OWNER TO ckan_default;"
+
ckan -c subdir/test-github-actions.ini harvester initdb
ckan -c subdir/test-github-actions.ini spatial initdb
From a3f8b971a36614d1b6d722fec0fd51f7410f7373 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 15:39:32 +0900
Subject: [PATCH 030/139] WIP: github actions ci: remove create extension
---
.github/workflows/ci.yml | 1 -
1 file changed, 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 69f3934..e628a7d 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -91,7 +91,6 @@ jobs:
mv test-github-actions.ini subdir
# setup spatial table
- psql --host=ckan-postgres --username=ckan --command="CREATE EXTENSION postgis;"
psql --host=ckan-postgres --username=ckan --command="ALTER VIEW geometry_columns OWNER TO ckan_default;"
psql --host=ckan-postgres --username=ckan --command="ALTER TABLE spatial_ref_sys OWNER TO ckan_default;"
From db9ed4aaae4f1004e5dae56471dc6eea603ab32a Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 15:52:10 +0900
Subject: [PATCH 031/139] WIP: github actions ci: set table to alter view/table
---
.github/workflows/ci.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index e628a7d..5ea7c1f 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -91,8 +91,8 @@ jobs:
mv test-github-actions.ini subdir
# setup spatial table
- psql --host=ckan-postgres --username=ckan --command="ALTER VIEW geometry_columns OWNER TO ckan_default;"
- psql --host=ckan-postgres --username=ckan --command="ALTER TABLE spatial_ref_sys OWNER TO ckan_default;"
+ psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER VIEW geometry_columns OWNER TO ckan_default;"
+ psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER TABLE spatial_ref_sys OWNER TO ckan_default;"
ckan -c subdir/test-github-actions.ini harvester initdb
ckan -c subdir/test-github-actions.ini spatial initdb
From 078c18411be75ab04be5c1eda8803b613b18e370 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 15:57:02 +0900
Subject: [PATCH 032/139] WIP: github actions ci: install postgis to ckan_test
database
---
.github/workflows/ci.yml | 1 +
1 file changed, 1 insertion(+)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 5ea7c1f..6c66ae3 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -91,6 +91,7 @@ jobs:
mv test-github-actions.ini subdir
# setup spatial table
+ psql --host=ckan-postgres --username=ckan -d ckan_test --command="CREATE EXTENSION postgis;"
psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER VIEW geometry_columns OWNER TO ckan_default;"
psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER TABLE spatial_ref_sys OWNER TO ckan_default;"
From f11101659f12ecc42002680b9f3c7cc8dd9bd192 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 16:21:10 +0900
Subject: [PATCH 033/139] WIP: reset_db will fail to drop table
spatial_ref_sys, so use drop extension instead of drop table
---
ckanext/spatial/tests/conftest.py | 19 ++++++++++++++++++-
1 file changed, 18 insertions(+), 1 deletion(-)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index d936b88..8ebd92a 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -23,6 +23,18 @@ def _execute_script(script_path):
Session.commit()
+def _create_postgis_extension():
+ conn = Session.connection()
+ conn.execute("create extension postgis;")
+ Session.commit()
+
+
+def _drop_postgis_extension():
+ conn = Session.connection()
+ conn.execute("drop extension postgis;")
+ Session.commit()
+
+
def create_postgis_tables():
scripts_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "scripts"
@@ -30,12 +42,17 @@ def create_postgis_tables():
if postgis_version()[:1] == "1":
_execute_script(os.path.join(scripts_path, "spatial_ref_sys.sql"))
_execute_script(os.path.join(scripts_path, "geometry_columns.sql"))
- else:
+ elif postgis_version()[:1] == "2":
_execute_script(os.path.join(scripts_path, "spatial_ref_sys.sql"))
+ else:
+ _create_postgis_extension()
@pytest.fixture
def spatial_clean_db(reset_db):
+ # reset_db will fail to drop table spatial_ref_sys
+ if postgis_version()[:1] == "3":
+ _drop_postgis_extension()
reset_db()
# This will create the PostGIS tables (geometry_columns and
From 44e9ec0afb69dd4c83a33f2ba167efb227533199 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 16:32:38 +0900
Subject: [PATCH 034/139] WIP: close all session before drop postgis extension
---
ckanext/spatial/tests/conftest.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index 8ebd92a..a0df5f0 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -51,8 +51,8 @@ def create_postgis_tables():
@pytest.fixture
def spatial_clean_db(reset_db):
# reset_db will fail to drop table spatial_ref_sys
- if postgis_version()[:1] == "3":
- _drop_postgis_extension()
+ Session.close_all()
+ _drop_postgis_extension()
reset_db()
# This will create the PostGIS tables (geometry_columns and
From 50311ac610f310a31798c94bc663978c57a7752d Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 17:03:39 +0900
Subject: [PATCH 035/139] WIP: added cascade option
---
ckanext/spatial/tests/conftest.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index a0df5f0..eb8c01f 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -31,7 +31,7 @@ def _create_postgis_extension():
def _drop_postgis_extension():
conn = Session.connection()
- conn.execute("drop extension postgis;")
+ conn.execute("drop extension postgis cascade;")
Session.commit()
From f8e3b708973fd8565e7db67ed0338236c1f11ab7 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 19:03:55 +0900
Subject: [PATCH 036/139] WIP: test: setup only create extension
---
.github/workflows/ci.yml | 4 +---
ckanext/spatial/tests/conftest.py | 2 +-
2 files changed, 2 insertions(+), 4 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 6c66ae3..21fe47a 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -90,10 +90,8 @@ jobs:
mkdir subdir
mv test-github-actions.ini subdir
- # setup spatial table
+ # setup postgis
psql --host=ckan-postgres --username=ckan -d ckan_test --command="CREATE EXTENSION postgis;"
- psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER VIEW geometry_columns OWNER TO ckan_default;"
- psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER TABLE spatial_ref_sys OWNER TO ckan_default;"
ckan -c subdir/test-github-actions.ini harvester initdb
ckan -c subdir/test-github-actions.ini spatial initdb
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index eb8c01f..a0df5f0 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -31,7 +31,7 @@ def _create_postgis_extension():
def _drop_postgis_extension():
conn = Session.connection()
- conn.execute("drop extension postgis cascade;")
+ conn.execute("drop extension postgis;")
Session.commit()
From cfd0ff7f24452b2ac2a34da297592bfb01ea5b93 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 19:16:13 +0900
Subject: [PATCH 037/139] WIP: use pass_spatial_ref_sys
---
.github/workflows/ci.yml | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 21fe47a..59c0695 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -54,8 +54,9 @@ jobs:
- name: install and setup ckan
run: |
- git clone https://github.com/ckan/ckan
+ git clone https://github.com/smellman/ckan
cd ckan
+ git checkout pass_spatial_ref_sys
pip install -r requirement-setuptools.txt
pip install -r requirements.txt
pip install -r dev-requirements.txt
From 2e11aee2ae9705597dc36c1ec21d69264c47cb2c Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 19:42:55 +0900
Subject: [PATCH 038/139] WIP: ckan_default become spatial_ref_sys owner
---
.github/workflows/ci.yml | 2 ++
1 file changed, 2 insertions(+)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 59c0695..f0276f1 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -93,6 +93,8 @@ jobs:
# setup postgis
psql --host=ckan-postgres --username=ckan -d ckan_test --command="CREATE EXTENSION postgis;"
+ psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER VIEW geometry_columns OWNER TO ckan_default;"
+ psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER TABLE spatial_ref_sys OWNER TO ckan_default;"
ckan -c subdir/test-github-actions.ini harvester initdb
ckan -c subdir/test-github-actions.ini spatial initdb
From 3d82e761abd67758fff5f63f6391088e09d1cd62 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 20:04:33 +0900
Subject: [PATCH 039/139] WIP: downgrade to postgis 2.5
---
.github/workflows/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index f0276f1..b3a3465 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -25,7 +25,7 @@ jobs:
# Label used to access the service container
ckan-postgres:
# Docker Hub image
- image: postgis/postgis:12-3.0
+ image: postgis/postgis:12-2.5
# Provide the user and password for postgres
env:
POSTGRES_PASSWORD: ckan
From c46936a5e7101df4f2c85fb76c5da1c858c5d8a4 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 20:45:15 +0900
Subject: [PATCH 040/139] WIP: drop table spatial_ref_sys when drop extension
postgis
---
ckanext/spatial/tests/conftest.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index a0df5f0..d255994 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -32,6 +32,7 @@ def _create_postgis_extension():
def _drop_postgis_extension():
conn = Session.connection()
conn.execute("drop extension postgis;")
+ conn.execute("drop table spatial_ref_sys;")
Session.commit()
From 6e57837f9bc2e7b00e05644b376c27dcf2912760 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 20:53:25 +0900
Subject: [PATCH 041/139] WIP: test: remove spatial_ref_sys from
metadata.tables
---
ckanext/spatial/tests/conftest.py | 21 +++------------------
1 file changed, 3 insertions(+), 18 deletions(-)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index d255994..638ca77 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -23,19 +23,6 @@ def _execute_script(script_path):
Session.commit()
-def _create_postgis_extension():
- conn = Session.connection()
- conn.execute("create extension postgis;")
- Session.commit()
-
-
-def _drop_postgis_extension():
- conn = Session.connection()
- conn.execute("drop extension postgis;")
- conn.execute("drop table spatial_ref_sys;")
- Session.commit()
-
-
def create_postgis_tables():
scripts_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "scripts"
@@ -43,17 +30,15 @@ def create_postgis_tables():
if postgis_version()[:1] == "1":
_execute_script(os.path.join(scripts_path, "spatial_ref_sys.sql"))
_execute_script(os.path.join(scripts_path, "geometry_columns.sql"))
- elif postgis_version()[:1] == "2":
- _execute_script(os.path.join(scripts_path, "spatial_ref_sys.sql"))
else:
- _create_postgis_extension()
+ _execute_script(os.path.join(scripts_path, "spatial_ref_sys.sql"))
@pytest.fixture
def spatial_clean_db(reset_db):
# reset_db will fail to drop table spatial_ref_sys
- Session.close_all()
- _drop_postgis_extension()
+ if "spatial_ref_sys" in meta.metadata.tables:
+ meta.metadata.remove(meta.metadata.tables["spatial_ref_sys"])
reset_db()
# This will create the PostGIS tables (geometry_columns and
From 468ed052d9751a10b9385c40bcb23f964e259863 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 21:10:00 +0900
Subject: [PATCH 042/139] WIP: test: added debug comment
---
ckanext/spatial/tests/conftest.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index 638ca77..32c4742 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -37,7 +37,9 @@ def create_postgis_tables():
@pytest.fixture
def spatial_clean_db(reset_db):
# reset_db will fail to drop table spatial_ref_sys
+ print("debug: spatial_clean_db")
if "spatial_ref_sys" in meta.metadata.tables:
+ print("debug: remove spatial_ref_sys from meta.metadata")
meta.metadata.remove(meta.metadata.tables["spatial_ref_sys"])
reset_db()
From e9af0c8da26236431d805d77b7c8c1ae5c74e59d Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 21:37:52 +0900
Subject: [PATCH 043/139] WIP: show all messages
---
.github/workflows/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index b3a3465..503a9d6 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -102,4 +102,4 @@ jobs:
- name: run ci
run: |
- pytest --ckan-ini=subdir/test-github-actions.ini ckanext/spatial/tests
+ pytest -v --capture=no --ckan-ini=subdir/test-github-actions.ini ckanext/spatial/tests
From 4681706b835cde4369a360132eaaa33c11b43367 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 21:52:42 +0900
Subject: [PATCH 044/139] WIP: remove spatial_ref_sys table in ckan's clean_db
---
ckanext/spatial/tests/conftest.py | 5 -----
1 file changed, 5 deletions(-)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index 32c4742..d936b88 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -36,11 +36,6 @@ def create_postgis_tables():
@pytest.fixture
def spatial_clean_db(reset_db):
- # reset_db will fail to drop table spatial_ref_sys
- print("debug: spatial_clean_db")
- if "spatial_ref_sys" in meta.metadata.tables:
- print("debug: remove spatial_ref_sys from meta.metadata")
- meta.metadata.remove(meta.metadata.tables["spatial_ref_sys"])
reset_db()
# This will create the PostGIS tables (geometry_columns and
From b0967a10e0558b5b7b5b7d7aab3de694f69476ab Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 22:12:16 +0900
Subject: [PATCH 045/139] WIP: added --reset-db option
---
.github/workflows/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 503a9d6..e6cb3e5 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -102,4 +102,4 @@ jobs:
- name: run ci
run: |
- pytest -v --capture=no --ckan-ini=subdir/test-github-actions.ini ckanext/spatial/tests
+ pytest --reset-db -v --capture=no --ckan-ini=subdir/test-github-actions.ini ckanext/spatial/tests
From ddbc64fee9a298e0e11b0530c65f72d90149b64c Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 22:19:12 +0900
Subject: [PATCH 046/139] WIP: test: --reset-db option
---
.github/workflows/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index e6cb3e5..3112ae2 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -102,4 +102,4 @@ jobs:
- name: run ci
run: |
- pytest --reset-db -v --capture=no --ckan-ini=subdir/test-github-actions.ini ckanext/spatial/tests
+ pytest -v --capture=no --ckan-ini=subdir/test-github-actions.ini --reset-db ckanext/spatial/tests
From 32244f6e3f078ef671b6b66cfbf56a7e47eecf1d Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 22:27:14 +0900
Subject: [PATCH 047/139] WIP: remove --reset-db option, use
test-github-actions.ini for db init
---
.github/workflows/ci.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 3112ae2..c027ed4 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -73,7 +73,6 @@ jobs:
# Database Initialization
ckan -c test-core-circle-ci.ini datastore set-permissions | psql --host=ckan-postgres --username=ckan
- ckan -c test-core-circle-ci.ini db init
- name: install ckanext-harvest
run: |
@@ -96,10 +95,11 @@ jobs:
psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER VIEW geometry_columns OWNER TO ckan_default;"
psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER TABLE spatial_ref_sys OWNER TO ckan_default;"
+ ckan -c subdir/test-github-actions.ini db init
ckan -c subdir/test-github-actions.ini harvester initdb
ckan -c subdir/test-github-actions.ini spatial initdb
- name: run ci
run: |
- pytest -v --capture=no --ckan-ini=subdir/test-github-actions.ini --reset-db ckanext/spatial/tests
+ pytest -v --capture=no --ckan-ini=subdir/test-github-actions.ini ckanext/spatial/tests
From 55df859a5b46f88f97276b18ab34eec4b19c6dde Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 22:45:28 +0900
Subject: [PATCH 048/139] WIP: test: added ckan_harvester and test*harvester
---
test-github-actions.ini | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/test-github-actions.ini b/test-github-actions.ini
index a4bdd7c..3cc8947 100644
--- a/test-github-actions.ini
+++ b/test-github-actions.ini
@@ -14,7 +14,7 @@ port = 5000
[app:main]
use = config:../ckan/test-core.ini
ckan.legacy_templates = false
-ckan.plugins = test_spatial_plugin harvest spatial_metadata spatial_query spatial_harvest_metadata_api gemini_csw_harvester gemini_doc_harvester gemini_waf_harvester
+ckan.plugins = test_spatial_plugin harvest ckan_harvester test_harvester test_harvester2 test_action_harvester spatial_metadata spatial_query spatial_harvest_metadata_api gemini_csw_harvester gemini_doc_harvester gemini_waf_harvester
ckan.spatial.srid = 4326
ckan.spatial.default_map_extent=-6.88,49.74,0.50,59.2
ckan.spatial.testing = true
From 0820b1929fdb4593ef764014f63bd1a4ba01d876 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 6 Jan 2021 23:03:22 +0900
Subject: [PATCH 049/139] WIP: test: added with_plugins
---
ckanext/spatial/tests/functional/test_package.py | 2 +-
ckanext/spatial/tests/functional/test_widgets.py | 2 +-
ckanext/spatial/tests/lib/test_spatial.py | 2 +-
ckanext/spatial/tests/model/test_package_extent.py | 2 +-
4 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/ckanext/spatial/tests/functional/test_package.py b/ckanext/spatial/tests/functional/test_package.py
index 8a83805..7451655 100644
--- a/ckanext/spatial/tests/functional/test_package.py
+++ b/ckanext/spatial/tests/functional/test_package.py
@@ -11,7 +11,7 @@ from ckanext.spatial.model import PackageExtent
from ckanext.spatial.tests.base import SpatialTestBase
-@pytest.mark.usefixtures("spatial_clean_db")
+@pytest.mark.usefixtures("with_plugins", "spatial_clean_db")
class TestSpatialExtra(SpatialTestBase):
def test_spatial_extra_base(self, app):
diff --git a/ckanext/spatial/tests/functional/test_widgets.py b/ckanext/spatial/tests/functional/test_widgets.py
index 57bd1ab..973ccdc 100644
--- a/ckanext/spatial/tests/functional/test_widgets.py
+++ b/ckanext/spatial/tests/functional/test_widgets.py
@@ -7,7 +7,7 @@ import ckan.tests.factories as factories
class TestSpatialWidgets(SpatialTestBase):
- @pytest.mark.usefixtures("spatial_clean_db")
+ @pytest.mark.usefixtures("with_plugins", "spatial_clean_db")
def test_dataset_map(self, app):
dataset = factories.Dataset(
extras=[
diff --git a/ckanext/spatial/tests/lib/test_spatial.py b/ckanext/spatial/tests/lib/test_spatial.py
index 409ed70..ea3829c 100644
--- a/ckanext/spatial/tests/lib/test_spatial.py
+++ b/ckanext/spatial/tests/lib/test_spatial.py
@@ -38,7 +38,7 @@ def create_package(**package_dict):
return context.get("id")
-@pytest.mark.usefixtures("spatial_clean_db")
+@pytest.mark.usefixtures("with_plugins", "spatial_clean_db")
class TestCompareGeometries(SpatialTestBase):
def _get_extent_object(self, geometry):
if isinstance(geometry, six.string_types):
diff --git a/ckanext/spatial/tests/model/test_package_extent.py b/ckanext/spatial/tests/model/test_package_extent.py
index 5a28598..eda6aa0 100644
--- a/ckanext/spatial/tests/model/test_package_extent.py
+++ b/ckanext/spatial/tests/model/test_package_extent.py
@@ -12,7 +12,7 @@ from ckanext.spatial.geoalchemy_common import WKTElement, legacy_geoalchemy
from ckanext.spatial.tests.base import SpatialTestBase
-@pytest.mark.usefixtures("spatial_clean_db")
+@pytest.mark.usefixtures("with_plugins", "spatial_clean_db")
class TestPackageExtent(SpatialTestBase):
def test_create_extent(self):
From eece0a89c2e7c66eb01fc9edeee6da878e330091 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 11:08:40 +0900
Subject: [PATCH 050/139] WIP: test: remove package_extent before
spatial_db_setup()
---
ckanext/spatial/tests/conftest.py | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index d936b88..0182fd8 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -44,12 +44,12 @@ def spatial_clean_db(reset_db):
if not table.exists():
create_postgis_tables()
- # When running the tests with the --reset-db option for some
- # reason the metadata holds a reference to the `package_extent`
- # table after being deleted, causing an InvalidRequestError
- # exception when trying to recreate it further on
- if "package_extent" in meta.metadata.tables:
- meta.metadata.remove(meta.metadata.tables["package_extent"])
+ # When running the tests with the --reset-db option for some
+ # reason the metadata holds a reference to the `package_extent`
+ # table after being deleted, causing an InvalidRequestError
+ # exception when trying to recreate it further on
+ if "package_extent" in meta.metadata.tables:
+ meta.metadata.remove(meta.metadata.tables["package_extent"])
spatial_db_setup()
From 14d98bc78105b07aa2cdacd01756ab719acba449 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 11:30:55 +0900
Subject: [PATCH 051/139] WIP: test: use {} instead of set()
---
ckanext/spatial/tests/lib/test_spatial.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/ckanext/spatial/tests/lib/test_spatial.py b/ckanext/spatial/tests/lib/test_spatial.py
index ea3829c..26bcf73 100644
--- a/ckanext/spatial/tests/lib/test_spatial.py
+++ b/ckanext/spatial/tests/lib/test_spatial.py
@@ -127,7 +127,7 @@ class TestBboxQuery(SpatialQueryTestBase):
bbox_dict = self.x_values_to_bbox((2, 5))
package_ids = [res.package_id for res in bbox_query(bbox_dict)]
package_titles = [model.Package.get(id_).title for id_ in package_ids]
- assert(set(package_titles) == set(("(0, 3)", "(0, 4)", "(4, 5)")))
+ assert(set(package_titles) == {"(0, 3)", "(0, 4)", "(4, 5)"})
class TestBboxQueryOrdered(SpatialQueryTestBase):
From 26286fa3851db08d7e2e23f549d49ee118d7926c Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 11:42:53 +0900
Subject: [PATCH 052/139] WIP: test: use clean_db instead of spatial_clean_db
---
ckanext/spatial/tests/functional/test_package.py | 2 +-
ckanext/spatial/tests/functional/test_widgets.py | 2 +-
ckanext/spatial/tests/lib/test_spatial.py | 4 ++--
ckanext/spatial/tests/model/test_package_extent.py | 2 +-
4 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/ckanext/spatial/tests/functional/test_package.py b/ckanext/spatial/tests/functional/test_package.py
index 7451655..1d43826 100644
--- a/ckanext/spatial/tests/functional/test_package.py
+++ b/ckanext/spatial/tests/functional/test_package.py
@@ -11,7 +11,7 @@ from ckanext.spatial.model import PackageExtent
from ckanext.spatial.tests.base import SpatialTestBase
-@pytest.mark.usefixtures("with_plugins", "spatial_clean_db")
+@pytest.mark.usefixtures("with_plugins", "clean_db")
class TestSpatialExtra(SpatialTestBase):
def test_spatial_extra_base(self, app):
diff --git a/ckanext/spatial/tests/functional/test_widgets.py b/ckanext/spatial/tests/functional/test_widgets.py
index 973ccdc..b3868bd 100644
--- a/ckanext/spatial/tests/functional/test_widgets.py
+++ b/ckanext/spatial/tests/functional/test_widgets.py
@@ -7,7 +7,7 @@ import ckan.tests.factories as factories
class TestSpatialWidgets(SpatialTestBase):
- @pytest.mark.usefixtures("with_plugins", "spatial_clean_db")
+ @pytest.mark.usefixtures("with_plugins", "clean_db")
def test_dataset_map(self, app):
dataset = factories.Dataset(
extras=[
diff --git a/ckanext/spatial/tests/lib/test_spatial.py b/ckanext/spatial/tests/lib/test_spatial.py
index 26bcf73..e7fce54 100644
--- a/ckanext/spatial/tests/lib/test_spatial.py
+++ b/ckanext/spatial/tests/lib/test_spatial.py
@@ -38,7 +38,7 @@ def create_package(**package_dict):
return context.get("id")
-@pytest.mark.usefixtures("with_plugins", "spatial_clean_db")
+@pytest.mark.usefixtures("with_plugins", "clean_db")
class TestCompareGeometries(SpatialTestBase):
def _get_extent_object(self, geometry):
if isinstance(geometry, six.string_types):
@@ -99,7 +99,7 @@ class SpatialQueryTestBase(SpatialTestBase):
maxy = 1
@pytest.fixture(autouse=True)
- def initial_data(self, spatial_clean_db):
+ def initial_data(self, clean_db):
for fixture_x in self.fixtures_x:
bbox = self.x_values_to_bbox(fixture_x)
bbox_geojson = bbox_2_geojson(bbox)
diff --git a/ckanext/spatial/tests/model/test_package_extent.py b/ckanext/spatial/tests/model/test_package_extent.py
index eda6aa0..58fdb48 100644
--- a/ckanext/spatial/tests/model/test_package_extent.py
+++ b/ckanext/spatial/tests/model/test_package_extent.py
@@ -12,7 +12,7 @@ from ckanext.spatial.geoalchemy_common import WKTElement, legacy_geoalchemy
from ckanext.spatial.tests.base import SpatialTestBase
-@pytest.mark.usefixtures("with_plugins", "spatial_clean_db")
+@pytest.mark.usefixtures("with_plugins", "clean_db")
class TestPackageExtent(SpatialTestBase):
def test_create_extent(self):
From 4cca5822e3f327944ccf80304ac4f85e15c24b4d Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 11:52:46 +0900
Subject: [PATCH 053/139] WIP: test: use harvest_setup
---
.github/workflows/ci.yml | 2 +-
ckanext/spatial/tests/functional/test_package.py | 2 +-
ckanext/spatial/tests/model/test_package_extent.py | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index c027ed4..bfb8c45 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -102,4 +102,4 @@ jobs:
- name: run ci
run: |
- pytest -v --capture=no --ckan-ini=subdir/test-github-actions.ini ckanext/spatial/tests
+ pytest --ckan-ini=subdir/test-github-actions.ini ckanext/spatial/tests
diff --git a/ckanext/spatial/tests/functional/test_package.py b/ckanext/spatial/tests/functional/test_package.py
index 1d43826..cf86ea7 100644
--- a/ckanext/spatial/tests/functional/test_package.py
+++ b/ckanext/spatial/tests/functional/test_package.py
@@ -11,7 +11,7 @@ from ckanext.spatial.model import PackageExtent
from ckanext.spatial.tests.base import SpatialTestBase
-@pytest.mark.usefixtures("with_plugins", "clean_db")
+@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup')
class TestSpatialExtra(SpatialTestBase):
def test_spatial_extra_base(self, app):
diff --git a/ckanext/spatial/tests/model/test_package_extent.py b/ckanext/spatial/tests/model/test_package_extent.py
index 58fdb48..64d24a1 100644
--- a/ckanext/spatial/tests/model/test_package_extent.py
+++ b/ckanext/spatial/tests/model/test_package_extent.py
@@ -12,7 +12,7 @@ from ckanext.spatial.geoalchemy_common import WKTElement, legacy_geoalchemy
from ckanext.spatial.tests.base import SpatialTestBase
-@pytest.mark.usefixtures("with_plugins", "clean_db")
+@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup')
class TestPackageExtent(SpatialTestBase):
def test_create_extent(self):
From c0626ccbe427f32c90f6f500bbc7b5794cc5e772 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 12:28:21 +0900
Subject: [PATCH 054/139] WIP: test: test_api use harvest_setup
---
ckanext/spatial/tests/test_api.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/ckanext/spatial/tests/test_api.py b/ckanext/spatial/tests/test_api.py
index 9798f14..767b3d2 100644
--- a/ckanext/spatial/tests/test_api.py
+++ b/ckanext/spatial/tests/test_api.py
@@ -16,7 +16,7 @@ extents = {
}
-@pytest.mark.usefixtures("clean_db")
+@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup')
class TestAction(SpatialTestBase):
def test_spatial_query(self):
dataset = factories.Dataset(
@@ -155,7 +155,7 @@ class TestAction(SpatialTestBase):
assert(result["results"][0]["id"] == dataset["id"])
-@pytest.mark.usefixtures("clean_db")
+@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup')
class TestHarvestedMetadataAPI(SpatialTestBase):
def test_api(self, app):
try:
From d7a2b7ed4948f325bc3a0e02d9e36e4d01b5cbe7 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 12:39:09 +0900
Subject: [PATCH 055/139] WIP: test: added harvest_setup fixture
---
ckanext/spatial/tests/conftest.py | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index 0182fd8..0ec1af6 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -9,6 +9,7 @@ from ckan.model import Session, meta
from ckanext.spatial.geoalchemy_common import postgis_version
from ckanext.spatial.model.package_extent import setup as spatial_db_setup
from ckanext.harvest.model import setup as harvest_model_setup
+import ckanext.harvest.model as harvest_model
def _execute_script(script_path):
@@ -55,3 +56,8 @@ def spatial_clean_db(reset_db):
# Setup the harvest tables
harvest_model_setup()
+
+
+@pytest.fixture
+def harvest_setup():
+ harvest_model.setup()
\ No newline at end of file
From 6c3c9f4046f6198e00b98a993a075a81e7c784d1 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 12:47:19 +0900
Subject: [PATCH 056/139] WIP: test: added spatial_setup fixture
---
ckanext/spatial/tests/conftest.py | 20 +++++++++++++++++++-
ckanext/spatial/tests/test_api.py | 4 ++--
2 files changed, 21 insertions(+), 3 deletions(-)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index 0ec1af6..97293f3 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -60,4 +60,22 @@ def spatial_clean_db(reset_db):
@pytest.fixture
def harvest_setup():
- harvest_model.setup()
\ No newline at end of file
+ harvest_model.setup()
+
+
+@pytest.fixture
+def spatial_setup():
+ # This will create the PostGIS tables (geometry_columns and
+ # spatial_ref_sys) which were deleted when rebuilding the database
+ table = Table("spatial_ref_sys", meta.metadata)
+ if not table.exists():
+ create_postgis_tables()
+
+ # When running the tests with the --reset-db option for some
+ # reason the metadata holds a reference to the `package_extent`
+ # table after being deleted, causing an InvalidRequestError
+ # exception when trying to recreate it further on
+ if "package_extent" in meta.metadata.tables:
+ meta.metadata.remove(meta.metadata.tables["package_extent"])
+
+ spatial_db_setup()
diff --git a/ckanext/spatial/tests/test_api.py b/ckanext/spatial/tests/test_api.py
index 767b3d2..1a58112 100644
--- a/ckanext/spatial/tests/test_api.py
+++ b/ckanext/spatial/tests/test_api.py
@@ -16,7 +16,7 @@ extents = {
}
-@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup')
+@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
class TestAction(SpatialTestBase):
def test_spatial_query(self):
dataset = factories.Dataset(
@@ -155,7 +155,7 @@ class TestAction(SpatialTestBase):
assert(result["results"][0]["id"] == dataset["id"])
-@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup')
+@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
class TestHarvestedMetadataAPI(SpatialTestBase):
def test_api(self, app):
try:
From 1663e93d71318029f0cda30f882cc6663c1160f4 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 12:55:43 +0900
Subject: [PATCH 057/139] WIP: test: remove package_extent issue
---
ckanext/spatial/tests/conftest.py | 7 -------
1 file changed, 7 deletions(-)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index 97293f3..ee1cd22 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -71,11 +71,4 @@ def spatial_setup():
if not table.exists():
create_postgis_tables()
- # When running the tests with the --reset-db option for some
- # reason the metadata holds a reference to the `package_extent`
- # table after being deleted, causing an InvalidRequestError
- # exception when trying to recreate it further on
- if "package_extent" in meta.metadata.tables:
- meta.metadata.remove(meta.metadata.tables["package_extent"])
-
spatial_db_setup()
From bf29fa0d9078c49928423f62b5e190d7f842b20e Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 13:02:39 +0900
Subject: [PATCH 058/139] WIP: test: revert package_extent issue
---
ckanext/spatial/tests/conftest.py | 19 +++++++++++++------
1 file changed, 13 insertions(+), 6 deletions(-)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index ee1cd22..01e6cff 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -45,12 +45,12 @@ def spatial_clean_db(reset_db):
if not table.exists():
create_postgis_tables()
- # When running the tests with the --reset-db option for some
- # reason the metadata holds a reference to the `package_extent`
- # table after being deleted, causing an InvalidRequestError
- # exception when trying to recreate it further on
- if "package_extent" in meta.metadata.tables:
- meta.metadata.remove(meta.metadata.tables["package_extent"])
+ # When running the tests with the --reset-db option for some
+ # reason the metadata holds a reference to the `package_extent`
+ # table after being deleted, causing an InvalidRequestError
+ # exception when trying to recreate it further on
+ if "package_extent" in meta.metadata.tables:
+ meta.metadata.remove(meta.metadata.tables["package_extent"])
spatial_db_setup()
@@ -71,4 +71,11 @@ def spatial_setup():
if not table.exists():
create_postgis_tables()
+ # When running the tests with the --reset-db option for some
+ # reason the metadata holds a reference to the `package_extent`
+ # table after being deleted, causing an InvalidRequestError
+ # exception when trying to recreate it further on
+ if "package_extent" in meta.metadata.tables:
+ meta.metadata.remove(meta.metadata.tables["package_extent"])
+
spatial_db_setup()
From 68acc993cd1719d8ec599a5923474d8538a8e354 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 13:11:59 +0900
Subject: [PATCH 059/139] WIP: test: extend_existing flag
---
ckanext/spatial/geoalchemy_common.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/ckanext/spatial/geoalchemy_common.py b/ckanext/spatial/geoalchemy_common.py
index 308455d..c76e28f 100644
--- a/ckanext/spatial/geoalchemy_common.py
+++ b/ckanext/spatial/geoalchemy_common.py
@@ -72,6 +72,7 @@ def setup_spatial_table(package_extent_class, db_srid=None):
Column('package_id', types.UnicodeText, primary_key=True),
Column('the_geom', Geometry('GEOMETRY', srid=db_srid,
management=management)),
+ extend_existing=True
)
meta.mapper(package_extent_class, package_extent_table)
From 84d06e0809d98722b6279dbac330af8e137318e8 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 13:19:13 +0900
Subject: [PATCH 060/139] WIP: use ckan master
---
.github/workflows/ci.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index bfb8c45..1db6584 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -54,9 +54,9 @@ jobs:
- name: install and setup ckan
run: |
- git clone https://github.com/smellman/ckan
+ git clone https://github.com/ckan/ckan
cd ckan
- git checkout pass_spatial_ref_sys
+ # git checkout pass_spatial_ref_sys
pip install -r requirement-setuptools.txt
pip install -r requirements.txt
pip install -r dev-requirements.txt
From 12eb8c05e0119a7bfc8469310f144f07d2f217a6 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 13:38:57 +0900
Subject: [PATCH 061/139] WIP: drop index for package_extent
---
.github/workflows/ci.yml | 4 ++--
ckanext/spatial/tests/conftest.py | 2 ++
2 files changed, 4 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 1db6584..bfb8c45 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -54,9 +54,9 @@ jobs:
- name: install and setup ckan
run: |
- git clone https://github.com/ckan/ckan
+ git clone https://github.com/smellman/ckan
cd ckan
- # git checkout pass_spatial_ref_sys
+ git checkout pass_spatial_ref_sys
pip install -r requirement-setuptools.txt
pip install -r requirements.txt
pip install -r dev-requirements.txt
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index 01e6cff..00bcb72 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -77,5 +77,7 @@ def spatial_setup():
# exception when trying to recreate it further on
if "package_extent" in meta.metadata.tables:
meta.metadata.remove(meta.metadata.tables["package_extent"])
+ Session.execute("DROP INDEX idx_package_extent_the_geom")
+ Session.commit()
spatial_db_setup()
From 77558ec3bffa895ec03f86c5ea9a5c330a47818e Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 13:45:33 +0900
Subject: [PATCH 062/139] WIP: force remove index for package_extent
---
ckanext/spatial/tests/conftest.py | 16 ++++++++--------
1 file changed, 8 insertions(+), 8 deletions(-)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index 00bcb72..45a63d9 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -71,13 +71,13 @@ def spatial_setup():
if not table.exists():
create_postgis_tables()
- # When running the tests with the --reset-db option for some
- # reason the metadata holds a reference to the `package_extent`
- # table after being deleted, causing an InvalidRequestError
- # exception when trying to recreate it further on
- if "package_extent" in meta.metadata.tables:
- meta.metadata.remove(meta.metadata.tables["package_extent"])
- Session.execute("DROP INDEX idx_package_extent_the_geom")
- Session.commit()
+ # When running the tests with the --reset-db option for some
+ # reason the metadata holds a reference to the `package_extent`
+ # table after being deleted, causing an InvalidRequestError
+ # exception when trying to recreate it further on
+ if "package_extent" in meta.metadata.tables:
+ meta.metadata.remove(meta.metadata.tables["package_extent"])
+ Session.execute("DROP INDEX idx_package_extent_the_geom")
+ Session.commit()
spatial_db_setup()
From 23a36d1937da2eb2df51119fb9e8032ab74141f2 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 13:55:24 +0900
Subject: [PATCH 063/139] WIP: added if exists option, added spatial_setup
fixture to all
---
ckanext/spatial/tests/conftest.py | 2 +-
ckanext/spatial/tests/functional/test_package.py | 2 +-
ckanext/spatial/tests/functional/test_widgets.py | 2 +-
ckanext/spatial/tests/lib/test_spatial.py | 2 +-
ckanext/spatial/tests/model/test_package_extent.py | 2 +-
5 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index 45a63d9..29f225e 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -77,7 +77,7 @@ def spatial_setup():
# exception when trying to recreate it further on
if "package_extent" in meta.metadata.tables:
meta.metadata.remove(meta.metadata.tables["package_extent"])
- Session.execute("DROP INDEX idx_package_extent_the_geom")
+ Session.execute("DROP INDEX IF EXISTS idx_package_extent_the_geom")
Session.commit()
spatial_db_setup()
diff --git a/ckanext/spatial/tests/functional/test_package.py b/ckanext/spatial/tests/functional/test_package.py
index cf86ea7..aaf4c53 100644
--- a/ckanext/spatial/tests/functional/test_package.py
+++ b/ckanext/spatial/tests/functional/test_package.py
@@ -11,7 +11,7 @@ from ckanext.spatial.model import PackageExtent
from ckanext.spatial.tests.base import SpatialTestBase
-@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup')
+@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
class TestSpatialExtra(SpatialTestBase):
def test_spatial_extra_base(self, app):
diff --git a/ckanext/spatial/tests/functional/test_widgets.py b/ckanext/spatial/tests/functional/test_widgets.py
index b3868bd..a7069b1 100644
--- a/ckanext/spatial/tests/functional/test_widgets.py
+++ b/ckanext/spatial/tests/functional/test_widgets.py
@@ -7,7 +7,7 @@ import ckan.tests.factories as factories
class TestSpatialWidgets(SpatialTestBase):
- @pytest.mark.usefixtures("with_plugins", "clean_db")
+ @pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
def test_dataset_map(self, app):
dataset = factories.Dataset(
extras=[
diff --git a/ckanext/spatial/tests/lib/test_spatial.py b/ckanext/spatial/tests/lib/test_spatial.py
index e7fce54..e2a047c 100644
--- a/ckanext/spatial/tests/lib/test_spatial.py
+++ b/ckanext/spatial/tests/lib/test_spatial.py
@@ -38,7 +38,7 @@ def create_package(**package_dict):
return context.get("id")
-@pytest.mark.usefixtures("with_plugins", "clean_db")
+@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
class TestCompareGeometries(SpatialTestBase):
def _get_extent_object(self, geometry):
if isinstance(geometry, six.string_types):
diff --git a/ckanext/spatial/tests/model/test_package_extent.py b/ckanext/spatial/tests/model/test_package_extent.py
index 64d24a1..ba464d7 100644
--- a/ckanext/spatial/tests/model/test_package_extent.py
+++ b/ckanext/spatial/tests/model/test_package_extent.py
@@ -12,7 +12,7 @@ from ckanext.spatial.geoalchemy_common import WKTElement, legacy_geoalchemy
from ckanext.spatial.tests.base import SpatialTestBase
-@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup')
+@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
class TestPackageExtent(SpatialTestBase):
def test_create_extent(self):
From 4224a5c45f965dba115458456c495e98f3f22549 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 14:16:37 +0900
Subject: [PATCH 064/139] WIP: start server in ci
---
.github/workflows/ci.yml | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index bfb8c45..1c953d5 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -98,7 +98,8 @@ jobs:
ckan -c subdir/test-github-actions.ini db init
ckan -c subdir/test-github-actions.ini harvester initdb
ckan -c subdir/test-github-actions.ini spatial initdb
-
+ # Start server
+ ckan -c subdir/test-core-circle-ci.ini run
- name: run ci
run: |
From dc5bbba917e585394d6302c78ad9d8934b512825 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 14:23:14 +0900
Subject: [PATCH 065/139] WIP: fix typo
---
.github/workflows/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 1c953d5..3873ccb 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -99,7 +99,7 @@ jobs:
ckan -c subdir/test-github-actions.ini harvester initdb
ckan -c subdir/test-github-actions.ini spatial initdb
# Start server
- ckan -c subdir/test-core-circle-ci.ini run
+ ckan -c subdir/test-github-actions.ini run
- name: run ci
run: |
From eea232ae740e651d896108e54245560d11bb790a Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 14:49:43 +0900
Subject: [PATCH 066/139] WIP: build frontend
---
.github/workflows/ci.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 3873ccb..07ec8b9 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -98,8 +98,8 @@ jobs:
ckan -c subdir/test-github-actions.ini db init
ckan -c subdir/test-github-actions.ini harvester initdb
ckan -c subdir/test-github-actions.ini spatial initdb
- # Start server
- ckan -c subdir/test-github-actions.ini run
+ # build frontend
+ ckan -c subdir/test-github-actions.ini front-end-build
- name: run ci
run: |
From 6d5b2765d1983696b636405b64999fbb5e92a003 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 17:02:19 +0900
Subject: [PATCH 067/139] WIP: use master branch
---
.github/workflows/ci.yml | 5 +----
1 file changed, 1 insertion(+), 4 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 07ec8b9..fdb6e61 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -54,9 +54,8 @@ jobs:
- name: install and setup ckan
run: |
- git clone https://github.com/smellman/ckan
+ git clone https://github.com/ckan/ckan
cd ckan
- git checkout pass_spatial_ref_sys
pip install -r requirement-setuptools.txt
pip install -r requirements.txt
pip install -r dev-requirements.txt
@@ -98,8 +97,6 @@ jobs:
ckan -c subdir/test-github-actions.ini db init
ckan -c subdir/test-github-actions.ini harvester initdb
ckan -c subdir/test-github-actions.ini spatial initdb
- # build frontend
- ckan -c subdir/test-github-actions.ini front-end-build
- name: run ci
run: |
From ff9dc44bc058a8b228f28b391a365758485bd5ce Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 17:08:27 +0900
Subject: [PATCH 068/139] WIP: create/drop extension postgis in ci
---
ckanext/spatial/tests/conftest.py | 12 ++++++++++++
ckanext/spatial/tests/test_api.py | 4 ++--
2 files changed, 14 insertions(+), 2 deletions(-)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index 29f225e..17f9b73 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -24,6 +24,11 @@ def _execute_script(script_path):
Session.commit()
+def _create_postgis_extension():
+ Session.execute("CREATE EXTENSION IF NOT EXISTS postgis")
+ Session.commit()
+
+
def create_postgis_tables():
scripts_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "scripts"
@@ -32,9 +37,16 @@ def create_postgis_tables():
_execute_script(os.path.join(scripts_path, "spatial_ref_sys.sql"))
_execute_script(os.path.join(scripts_path, "geometry_columns.sql"))
else:
+ _create_postgis_extension()
_execute_script(os.path.join(scripts_path, "spatial_ref_sys.sql"))
+@pytest.fixture
+def clean_postgis():
+ Session.execute("DROP EXTENSION IF EXISTS postgis")
+ Session.commit()
+
+
@pytest.fixture
def spatial_clean_db(reset_db):
reset_db()
diff --git a/ckanext/spatial/tests/test_api.py b/ckanext/spatial/tests/test_api.py
index 1a58112..029f3a7 100644
--- a/ckanext/spatial/tests/test_api.py
+++ b/ckanext/spatial/tests/test_api.py
@@ -16,7 +16,7 @@ extents = {
}
-@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
+@pytest.mark.usefixtures('with_plugins', 'clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
class TestAction(SpatialTestBase):
def test_spatial_query(self):
dataset = factories.Dataset(
@@ -155,7 +155,7 @@ class TestAction(SpatialTestBase):
assert(result["results"][0]["id"] == dataset["id"])
-@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
+@pytest.mark.usefixtures('with_plugins', 'clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
class TestHarvestedMetadataAPI(SpatialTestBase):
def test_api(self, app):
try:
From cdb027aaca3084576f8d48e24920ee8c0bf206a8 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 17:20:17 +0900
Subject: [PATCH 069/139] WIP: test: enable to remove postgis extension
---
.github/workflows/ci.yml | 1 +
1 file changed, 1 insertion(+)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index fdb6e61..b0f87e8 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -90,6 +90,7 @@ jobs:
mv test-github-actions.ini subdir
# setup postgis
+ psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER ROLE ckan_default WITH superuser;"
psql --host=ckan-postgres --username=ckan -d ckan_test --command="CREATE EXTENSION postgis;"
psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER VIEW geometry_columns OWNER TO ckan_default;"
psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER TABLE spatial_ref_sys OWNER TO ckan_default;"
From 787cbb5c89a471984cd202f678bf3efcf0d27cf4 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 19:06:29 +0900
Subject: [PATCH 070/139] WIP: test: create postgis extension with ckan_default
user
---
.github/workflows/ci.yml | 5 +----
ckanext/spatial/tests/conftest.py | 2 +-
2 files changed, 2 insertions(+), 5 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index b0f87e8..18ef84f 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -90,10 +90,7 @@ jobs:
mv test-github-actions.ini subdir
# setup postgis
- psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER ROLE ckan_default WITH superuser;"
- psql --host=ckan-postgres --username=ckan -d ckan_test --command="CREATE EXTENSION postgis;"
- psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER VIEW geometry_columns OWNER TO ckan_default;"
- psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER TABLE spatial_ref_sys OWNER TO ckan_default;"
+ PGPASSWORD='${CKAN_POSTGRES_PWD}' psql --host=ckan-postgres --username='${CKAN_POSTGRES_USER}' -d ckan_test --command="CREATE EXTENSION postgis;"
ckan -c subdir/test-github-actions.ini db init
ckan -c subdir/test-github-actions.ini harvester initdb
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index 17f9b73..5054a0c 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -43,7 +43,7 @@ def create_postgis_tables():
@pytest.fixture
def clean_postgis():
- Session.execute("DROP EXTENSION IF EXISTS postgis")
+ Session.execute("DROP EXTENSION IF EXISTS postgis CASCADE")
Session.commit()
From b518cfac404e1bf80987ef945181bf76c0a12007 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 19:32:27 +0900
Subject: [PATCH 071/139] WIP: test: setup postgis in test fixture
---
.github/workflows/ci.yml | 3 ---
1 file changed, 3 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 18ef84f..e010e49 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -89,9 +89,6 @@ jobs:
mkdir subdir
mv test-github-actions.ini subdir
- # setup postgis
- PGPASSWORD='${CKAN_POSTGRES_PWD}' psql --host=ckan-postgres --username='${CKAN_POSTGRES_USER}' -d ckan_test --command="CREATE EXTENSION postgis;"
-
ckan -c subdir/test-github-actions.ini db init
ckan -c subdir/test-github-actions.ini harvester initdb
ckan -c subdir/test-github-actions.ini spatial initdb
From cdee92f20e92fc693912cbb83f171c2bee4206a2 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 19:44:21 +0900
Subject: [PATCH 072/139] WIP: test: create extension postgis via ckan_default
user
---
.github/workflows/ci.yml | 3 +++
1 file changed, 3 insertions(+)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index e010e49..2531936 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -89,6 +89,9 @@ jobs:
mkdir subdir
mv test-github-actions.ini subdir
+ # setup postgis
+ psql -command="CREATE EXTENSION postgis;" "postgresql://${CKAN_POSTGRES_USER}:${CKAN_POSTGRES_PWD}@ckan-postgres/${CKAN_POSTGRES_DB}"
+
ckan -c subdir/test-github-actions.ini db init
ckan -c subdir/test-github-actions.ini harvester initdb
ckan -c subdir/test-github-actions.ini spatial initdb
From 9ddce2f024bef4d00ae597267c2e405a2430b13f Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 19:51:13 +0900
Subject: [PATCH 073/139] WIP: fix typo
---
.github/workflows/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 2531936..5b6aa06 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -90,7 +90,7 @@ jobs:
mv test-github-actions.ini subdir
# setup postgis
- psql -command="CREATE EXTENSION postgis;" "postgresql://${CKAN_POSTGRES_USER}:${CKAN_POSTGRES_PWD}@ckan-postgres/${CKAN_POSTGRES_DB}"
+ psql --command="CREATE EXTENSION postgis;" "postgresql://${CKAN_POSTGRES_USER}:${CKAN_POSTGRES_PWD}@ckan-postgres/${CKAN_POSTGRES_DB}"
ckan -c subdir/test-github-actions.ini db init
ckan -c subdir/test-github-actions.ini harvester initdb
From 1046309d2363e93b5242b6ab4306e2d8f919bd21 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 20:32:51 +0900
Subject: [PATCH 074/139] WIP: test: ckan_default become superuser
---
.github/workflows/ci.yml | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 5b6aa06..b0f87e8 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -90,7 +90,10 @@ jobs:
mv test-github-actions.ini subdir
# setup postgis
- psql --command="CREATE EXTENSION postgis;" "postgresql://${CKAN_POSTGRES_USER}:${CKAN_POSTGRES_PWD}@ckan-postgres/${CKAN_POSTGRES_DB}"
+ psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER ROLE ckan_default WITH superuser;"
+ psql --host=ckan-postgres --username=ckan -d ckan_test --command="CREATE EXTENSION postgis;"
+ psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER VIEW geometry_columns OWNER TO ckan_default;"
+ psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER TABLE spatial_ref_sys OWNER TO ckan_default;"
ckan -c subdir/test-github-actions.ini db init
ckan -c subdir/test-github-actions.ini harvester initdb
From 9f45d31f21f287940cd07d1ae951c891e6977051 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 20:44:24 +0900
Subject: [PATCH 075/139] WIP: create postgis extension before check postgis
version
---
ckanext/spatial/tests/conftest.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index 5054a0c..da89ede 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -30,6 +30,7 @@ def _create_postgis_extension():
def create_postgis_tables():
+ _create_postgis_extension()
scripts_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "scripts"
)
@@ -37,7 +38,6 @@ def create_postgis_tables():
_execute_script(os.path.join(scripts_path, "spatial_ref_sys.sql"))
_execute_script(os.path.join(scripts_path, "geometry_columns.sql"))
else:
- _create_postgis_extension()
_execute_script(os.path.join(scripts_path, "spatial_ref_sys.sql"))
From 075b6d6b89cfc07b6d015f15387206d5d2b5c1ae Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 7 Jan 2021 20:50:59 +0900
Subject: [PATCH 076/139] WIP: drop import sql
---
ckanext/spatial/tests/conftest.py | 20 --------------------
1 file changed, 20 deletions(-)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index da89ede..7a412cb 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -12,18 +12,6 @@ from ckanext.harvest.model import setup as harvest_model_setup
import ckanext.harvest.model as harvest_model
-def _execute_script(script_path):
-
- conn = Session.connection()
- script = open(script_path, "r").read()
- for cmd in script.split(";"):
- cmd = re.sub(r"--(.*)|[\n\t]", "", cmd)
- if len(cmd):
- conn.execute(cmd)
-
- Session.commit()
-
-
def _create_postgis_extension():
Session.execute("CREATE EXTENSION IF NOT EXISTS postgis")
Session.commit()
@@ -31,14 +19,6 @@ def _create_postgis_extension():
def create_postgis_tables():
_create_postgis_extension()
- scripts_path = os.path.join(
- os.path.dirname(os.path.abspath(__file__)), "scripts"
- )
- if postgis_version()[:1] == "1":
- _execute_script(os.path.join(scripts_path, "spatial_ref_sys.sql"))
- _execute_script(os.path.join(scripts_path, "geometry_columns.sql"))
- else:
- _execute_script(os.path.join(scripts_path, "spatial_ref_sys.sql"))
@pytest.fixture
From a1ba3fdee25064c237a63446e2121b5f9d31f1ee Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 8 Jan 2021 12:46:09 +0900
Subject: [PATCH 077/139] WIP: drop package_extent table in clean up postgis
---
ckanext/spatial/tests/conftest.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index 7a412cb..790c0e8 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -23,6 +23,7 @@ def create_postgis_tables():
@pytest.fixture
def clean_postgis():
+ Session.execute("DROP TABLE IF EXISTS package_extent")
Session.execute("DROP EXTENSION IF EXISTS postgis CASCADE")
Session.commit()
From b0a3290ba8098a3733282379e6b343a373a08038 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 14 Jan 2021 12:35:57 +0900
Subject: [PATCH 078/139] WIP: added fixture each test
---
ckanext/spatial/tests/lib/test_spatial.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/ckanext/spatial/tests/lib/test_spatial.py b/ckanext/spatial/tests/lib/test_spatial.py
index e2a047c..609bd05 100644
--- a/ckanext/spatial/tests/lib/test_spatial.py
+++ b/ckanext/spatial/tests/lib/test_spatial.py
@@ -38,7 +38,7 @@ def create_package(**package_dict):
return context.get("id")
-@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
+@pytest.mark.usefixtures('with_plugins', 'clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
class TestCompareGeometries(SpatialTestBase):
def _get_extent_object(self, geometry):
if isinstance(geometry, six.string_types):
@@ -119,6 +119,7 @@ class SpatialQueryTestBase(SpatialTestBase):
}
+@pytest.mark.usefixtures('with_plugins', 'clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
class TestBboxQuery(SpatialQueryTestBase):
# x values for the fixtures
fixtures_x = [(0, 1), (0, 3), (0, 4), (4, 5), (6, 7)]
@@ -130,6 +131,7 @@ class TestBboxQuery(SpatialQueryTestBase):
assert(set(package_titles) == {"(0, 3)", "(0, 4)", "(4, 5)"})
+@pytest.mark.usefixtures('with_plugins', 'clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
class TestBboxQueryOrdered(SpatialQueryTestBase):
# x values for the fixtures
fixtures_x = [(0, 9), (1, 8), (2, 7), (3, 6), (4, 5), (8, 9)]
@@ -150,6 +152,7 @@ class TestBboxQueryOrdered(SpatialQueryTestBase):
)
+@pytest.mark.usefixtures('with_plugins', 'clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
class TestBboxQueryPerformance(SpatialQueryTestBase):
# x values for the fixtures
fixtures_x = [
From 4ff78e0c170e66d664ea5330492fba78e7cd4a62 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 14 Jan 2021 12:52:39 +0900
Subject: [PATCH 079/139] WIP: remove clean_db
---
ckanext/spatial/tests/lib/test_spatial.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/ckanext/spatial/tests/lib/test_spatial.py b/ckanext/spatial/tests/lib/test_spatial.py
index 609bd05..f2503c1 100644
--- a/ckanext/spatial/tests/lib/test_spatial.py
+++ b/ckanext/spatial/tests/lib/test_spatial.py
@@ -99,7 +99,7 @@ class SpatialQueryTestBase(SpatialTestBase):
maxy = 1
@pytest.fixture(autouse=True)
- def initial_data(self, clean_db):
+ def initial_data(self):
for fixture_x in self.fixtures_x:
bbox = self.x_values_to_bbox(fixture_x)
bbox_geojson = bbox_2_geojson(bbox)
From 3477eccefd2d729e9e90b9c948eb88ce0825a3da Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 14 Jan 2021 14:09:51 +0900
Subject: [PATCH 080/139] WIP: call initial_data in each function(autouse=True
fixture doesn't work)
---
ckanext/spatial/tests/lib/test_spatial.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/ckanext/spatial/tests/lib/test_spatial.py b/ckanext/spatial/tests/lib/test_spatial.py
index f2503c1..13db00d 100644
--- a/ckanext/spatial/tests/lib/test_spatial.py
+++ b/ckanext/spatial/tests/lib/test_spatial.py
@@ -98,7 +98,6 @@ class SpatialQueryTestBase(SpatialTestBase):
miny = 0
maxy = 1
- @pytest.fixture(autouse=True)
def initial_data(self):
for fixture_x in self.fixtures_x:
bbox = self.x_values_to_bbox(fixture_x)
@@ -125,6 +124,7 @@ class TestBboxQuery(SpatialQueryTestBase):
fixtures_x = [(0, 1), (0, 3), (0, 4), (4, 5), (6, 7)]
def test_query(self):
+ self.initial_data()
bbox_dict = self.x_values_to_bbox((2, 5))
package_ids = [res.package_id for res in bbox_query(bbox_dict)]
package_titles = [model.Package.get(id_).title for id_ in package_ids]
@@ -137,6 +137,7 @@ class TestBboxQueryOrdered(SpatialQueryTestBase):
fixtures_x = [(0, 9), (1, 8), (2, 7), (3, 6), (4, 5), (8, 9)]
def test_query(self):
+ self.initial_data()
bbox_dict = self.x_values_to_bbox((2, 7))
q = bbox_query_ordered(bbox_dict)
package_ids = [res.package_id for res in q]
From f623dfb568296dd8bef7a31dc916c29f8344ce50 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 14 Jan 2021 15:11:16 +0900
Subject: [PATCH 081/139] WIP: use asset instead of resource
---
.../spatial/templates/spatial/snippets/dataset_map_base.html | 3 +--
.../templates/spatial/snippets/dataset_map_resource.html | 1 -
ckanext/spatial/templates/spatial/snippets/spatial_query.html | 3 +--
.../templates/spatial/snippets/spatial_query_resource.html | 1 -
4 files changed, 2 insertions(+), 6 deletions(-)
delete mode 100644 ckanext/spatial/templates/spatial/snippets/dataset_map_resource.html
delete mode 100644 ckanext/spatial/templates/spatial/snippets/spatial_query_resource.html
diff --git a/ckanext/spatial/templates/spatial/snippets/dataset_map_base.html b/ckanext/spatial/templates/spatial/snippets/dataset_map_base.html
index 2582b28..ed88c39 100644
--- a/ckanext/spatial/templates/spatial/snippets/dataset_map_base.html
+++ b/ckanext/spatial/templates/spatial/snippets/dataset_map_base.html
@@ -18,5 +18,4 @@ extent
-{% set type = 'asset' if h.ckan_version() > '2.9' else 'resource' %}
-{% include 'spatial/snippets/dataset_map_' ~ type ~ '.html' %}
+{% include 'spatial/snippets/dataset_map_asset.html' %}
diff --git a/ckanext/spatial/templates/spatial/snippets/dataset_map_resource.html b/ckanext/spatial/templates/spatial/snippets/dataset_map_resource.html
deleted file mode 100644
index 38c2f42..0000000
--- a/ckanext/spatial/templates/spatial/snippets/dataset_map_resource.html
+++ /dev/null
@@ -1 +0,0 @@
-{% resource 'ckanext-spatial/dataset_map' %}
diff --git a/ckanext/spatial/templates/spatial/snippets/spatial_query.html b/ckanext/spatial/templates/spatial/snippets/spatial_query.html
index b6e6cc8..451340c 100644
--- a/ckanext/spatial/templates/spatial/snippets/spatial_query.html
+++ b/ckanext/spatial/templates/spatial/snippets/spatial_query.html
@@ -27,5 +27,4 @@ e.g.
-{% set type = 'asset' if h.ckan_version() > '2.9' else 'resource' %}
-{% include 'spatial/snippets/spatial_query_' ~ type ~ '.html' %}
+{% include 'spatial/snippets/spatial_query_asset.html' %}
diff --git a/ckanext/spatial/templates/spatial/snippets/spatial_query_resource.html b/ckanext/spatial/templates/spatial/snippets/spatial_query_resource.html
deleted file mode 100644
index 499cde4..0000000
--- a/ckanext/spatial/templates/spatial/snippets/spatial_query_resource.html
+++ /dev/null
@@ -1 +0,0 @@
-{% resource 'ckanext-spatial/spatial_query' %}
From 90bea3c26acc20db5f59181b8a3d1836866abdf4 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Thu, 14 Jan 2021 15:12:50 +0900
Subject: [PATCH 082/139] remove travis ci settings
---
.travis.yml | 51 --------------------
bin/travis-build.bash | 108 ------------------------------------------
bin/travis-run.bash | 3 --
3 files changed, 162 deletions(-)
delete mode 100644 .travis.yml
delete mode 100644 bin/travis-build.bash
delete mode 100644 bin/travis-run.bash
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index bf79f5d..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,51 +0,0 @@
-dist: trusty
-
-os: linux
-language: python
-
-install:
- - bash bin/travis-build.bash
-services:
- - redis
- - postgresql
-addons:
- postgresql: 9.6
- apt:
- packages:
- - postgresql-9.6-postgis-2.3
-
-script: bash bin/travis-run.bash
-before_install:
- - pip install codecov
-after_success:
- - codecov
-
-jobs:
- include:
- - stage: Flake8
- python: 2.7
- env: FLAKE8=True
- install:
- - pip install flake8==3.5.0
- - pip install pycodestyle==2.3.0
- script:
- - flake8 --version
- # stop the build if there are Python syntax errors or undefined names
- - flake8 . --count --select=E901,E999,F821,F822,F823 --show-source --statistics --exclude ckan
- # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
- # - flake8 . --count --max-line-length=127 --statistics --exclude ckan --exit-zero
- - stage: Tests
- python: "2.7"
- env: CKANVERSION=master
- - python: "3.6"
- env: CKANVERSION=master
- - python: "2.7"
- env: CKANVERSION=2.8
- - python: "2.7"
- env: CKANVERSION=2.7
- - python: "2.7"
- env: CKANVERSION=2.6
-
-cache:
- directories:
- - $HOME/.cache/pip
diff --git a/bin/travis-build.bash b/bin/travis-build.bash
deleted file mode 100644
index 1bad2d5..0000000
--- a/bin/travis-build.bash
+++ /dev/null
@@ -1,108 +0,0 @@
-#!/bin/bash
-set -e
-
-echo "This is travis-build.bash..."
-
-echo "Installing the packages that CKAN requires..."
-sudo apt-get update -qq
-sudo apt-get install solr-jetty
-
-
-if python -c 'import sys;exit(sys.version_info < (3,))'
-then
- PYTHONVERSION=3
-else
- PYTHONVERSION=2
-fi
-
-echo "Installing CKAN and its Python dependencies..."
-git clone https://github.com/ckan/ckan
-cd ckan
-if [ $CKANVERSION == 'master' ]
-then
- echo "CKAN version: master"
-else
- CKAN_TAG=$(git tag | grep ^ckan-$CKANVERSION | sort --version-sort | tail -n 1)
- git checkout $CKAN_TAG
- echo "CKAN version: ${CKAN_TAG#ckan-}"
-fi
-
-if [ -f requirement-setuptools.txt ]
-then
- pip install -r requirement-setuptools.txt
-fi
-
-python setup.py develop
-
-if [ -f requirements-py2.txt ] && [ $PYTHONVERSION = 2 ]
-then
- grep -v psycopg2 < requirements-py2.txt > reqs.txt
-else
- grep -v psycopg2 < requirements.txt > reqs.txt
-fi
-pip install psycopg2==2.7.7 # workaround travis 10 psycopg2 incompatibility
-pip install -r reqs.txt
-pip install -r dev-requirements.txt
-cd -
-
-echo "Setting up Solr..."
-# solr is multicore for tests on ckan master now, but it's easier to run tests
-# on Travis single-core still.
-# see https://github.com/ckan/ckan/issues/2972
-sed -i -e 's/solr_url.*/solr_url = http:\/\/127.0.0.1:8983\/solr/' ckan/test-core.ini
-printf "NO_START=0\nJETTY_HOST=127.0.0.1\nJETTY_PORT=8983\nJAVA_HOME=$JAVA_HOME" | sudo tee /etc/default/jetty
-sudo cp ckan/ckan/config/solr/schema.xml /etc/solr/conf/schema.xml
-sudo service jetty restart
-
-echo "Creating the PostgreSQL user and database..."
-sudo -u postgres psql -c "CREATE USER ckan_default WITH PASSWORD 'pass';"
-sudo -u postgres psql -c 'CREATE DATABASE ckan_test WITH OWNER ckan_default;'
-
-echo "Setting up PostGIS on the database..."
-sudo -u postgres psql -d ckan_test -c 'CREATE EXTENSION postgis;'
-sudo -u postgres psql -d ckan_test -c 'ALTER VIEW geometry_columns OWNER TO ckan_default;'
-sudo -u postgres psql -d ckan_test -c 'ALTER TABLE spatial_ref_sys OWNER TO ckan_default;'
-
-echo "Install other libraries required..."
-sudo apt-get install python-dev libxml2-dev libxslt1-dev libgeos-c1
-
-echo "Initialising the database..."
-cd ckan
-if [ $CKANVERSION \< '2.9' ]
-then
- paster db init -c test-core.ini
-else
- ckan -c test-core.ini db init
-fi
-cd -
-
-echo "Installing ckanext-harvest and its requirements..."
-git clone https://github.com/ckan/ckanext-harvest
-cd ckanext-harvest
-python setup.py develop
-pip install -r pip-requirements.txt
-if [ $CKANVERSION \< '2.9' ]
-then
- paster harvester initdb -c ../ckan/test-core.ini
-fi
-
-cd -
-
-echo "Installing ckanext-spatial and its requirements..."
-pip install -r pip-requirements.txt
-python setup.py develop
-pip install pycsw
-
-echo "Moving test.ini into a subdir..."
-mkdir subdir
-mv test.ini subdir
-
-if [ $CKANVERSION \< '2.9' ]
-then
- paster spatial initdb -c subdir/test.ini
-else
- ckan -c subdir/test.ini harvester initdb
- ckan -c subdir/test.ini spatial initdb
-fi
-
-echo "travis-build.bash is done."
diff --git a/bin/travis-run.bash b/bin/travis-run.bash
deleted file mode 100644
index 9a791ad..0000000
--- a/bin/travis-run.bash
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/sh -e
-
-pytest --ckan-ini=subdir/test.ini ckanext/spatial/tests
From d75beac59e6af77d7d1a9394d056e16c971092cd Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 14:01:12 +0900
Subject: [PATCH 083/139] WIP: rewrite ci
---
.github/workflows/ci.yml | 172 +++++++++++++++++----------------------
1 file changed, 76 insertions(+), 96 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index b0f87e8..025a985 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -2,103 +2,83 @@ name: ckanext-spatial ci
on: [push, pull_request]
jobs:
- # Label of the container job
- container-job:
- # Containers must run in Linux based operating systems
+ lint:
runs-on: ubuntu-latest
- # Docker Hub image that `container-job` executes in
- container:
- image: python:3-stretch
- env:
- CKAN_DATASTORE_POSTGRES_DB: datastore_test
- CKAN_DATASTORE_POSTGRES_READ_USER: datastore_read
- CKAN_DATASTORE_POSTGRES_READ_PWD: pass
- CKAN_DATASTORE_POSTGRES_WRITE_USER: datastore_write
- CKAN_DATASTORE_POSTGRES_WRITE_PWD: pass
- CKAN_POSTGRES_DB: ckan_test
- CKAN_POSTGRES_USER: ckan_default
- CKAN_POSTGRES_PWD: pass
- PGPASSWORD: ckan
-
- # Service containers to run with `container-job`
- services:
- # Label used to access the service container
- ckan-postgres:
- # Docker Hub image
- image: postgis/postgis:12-2.5
- # Provide the user and password for postgres
- env:
- POSTGRES_PASSWORD: ckan
- POSTGRES_USER: ckan
- # Set health checks to wait until postgres has started
- options: >-
- --health-cmd pg_isready
- --health-interval 10s
- --health-timeout 5s
- --health-retries 5
- ckan-redis:
- # Docker Hub image
- image: redis:3
-
steps:
- # Downloads a copy of the code in your repository before running CI tests
- - name: Check out repository code
- uses: actions/checkout@v2
+ - uses: actions/checkout@v2
+ - uses: actions/setup-python@v2
+ with:
+ python-version: '3.6'
+ - name: Install requirements
+ run: pip install flake8 pycodestyle
+ - name: Check syntax
+ run: flake8 . --count --select=E901,E999,F821,F822,F823 --show-source --statistics --exclude ckan
+ test:
+ needs: lint
+ strategy:
+ matrix:
+ ckan-version: [2.9, 2.9-py2, 2.8, 2.7]
+ fail-fast: false
+
+ name: CKAN ${{ matrix.ckan-version }}
+ runs-on: ubuntu-latest
+ container:
+ image: openknowledge/ckan-dev:${{ matrix.ckan-version }}
+ services:
+ solr:
+ image: ckan/ckan-solr-dev:${{ matrix.ckan-version }}
+ postgres:
+ image: postgis/postgis:12-2.5
+ env:
+ POSTGRES_USER: postgres
+ POSTGRES_PASSWORD: postgres
+ POSTGRES_DB: postgres
+ options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
+ redis:
+ image: redis:3
+ env:
+ CKAN_SQLALCHEMY_URL: postgresql://ckan_default:pass@postgres/ckan_test
+ CKAN_DATASTORE_WRITE_URL: postgresql://datastore_write:pass@postgres/datastore_test
+ CKAN_DATASTORE_READ_URL: postgresql://datastore_read:pass@postgres/datastore_test
+ CKAN_SOLR_URL: http://solr:8983/solr/ckan
+ CKAN_REDIS_URL: redis://redis:6379/1
+ PGPASSWORD: postgres
+
+ steps:
+ - uses: actions/checkout@v2
+ - name: Create Database
+ run: |
+ psql --host=postgres --username=postgres --command="CREATE USER ckan WITH PASSWORD 'pass' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
+ createdb --encoding=utf-8 --host=postgres --username=postgres --owner=ckan ckan_default
+ psql --host=postgres --username=ckan --command="CREATE USER datastore_write WITH PASSWORD 'pass' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
+ psql --host=postgres --username=ckan --command="CREATE USER datastore_read WITH PASSWORD 'pass' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
+ createdb --encoding=utf-8 --host=ckan-postgres --username=ckan --owner=datastore_write datastore_test
+ - name: Install harvester
+ run: |
+ git clone https://github.com/ckan/ckanext-harvest
+ cd ckanext-harvest
+ python setup.py develop
+ pip install -r pip-requirements.txt
+ - name: Install requirements
+ run: |
+ pip install -r pip-requirements.txt
+ pip install -r dev-requirements.txt
+ pip install -e .
+ # Replace default path to CKAN core config file with the one on the container
+ sed -i -e 's/use = config:.*/use = config:\/srv\/app\/src\/ckan\/test-core.ini/' test.ini
+ - name: Setup extension (CKAN >= 2.9)
+ if: ${{ matrix.ckan-version != '2.7' && matrix.ckan-version != '2.8' }}
+ run: |
+ ckan -c test.ini db init
+ ckan -c test.ini harvester initdb
+ ckan -c init.ini spatial initdb
+ - name: Setup extension (CKAN < 2.9)
+ if: ${{ matrix.ckan-version == '2.7' || matrix.ckan-version == '2.8' }}
+ run: |
+ paster --plugin=ckan db init -c test.ini
+ paster --plugin=ckanext-harvest harvester initdb -c test.ini
+ paster --plugin=ckanext-spatial spatial initdb -c test.ini
+ - name: Run tests
+ run: pytest --ckan-ini=test.ini --cov=ckanext.harvest --disable-warnings ckanext/spatial/tests
- - name: Install node js and dependency
- run: |
- curl -sL https://deb.nodesource.com/setup_10.x | bash -
- apt install -y nodejs
- apt install -y libgtk2.0-0 libgtk-3-0 libnotify-dev libgconf-2-4 libnss3 libxss1 libasound2 libxtst6 xauth xvfb
- apt install -y postgresql-client solr-jetty openjdk-8-jdk
-
- - name: install and setup ckan
- run: |
- git clone https://github.com/ckan/ckan
- cd ckan
- pip install -r requirement-setuptools.txt
- pip install -r requirements.txt
- pip install -r dev-requirements.txt
- python setup.py develop
- # SOLR config
- cp ckan/config/solr/schema.xml /etc/solr/conf/schema.xml
- service jetty9 restart || true # erroring out but does seem to work
- # Database Creation
- psql --host=ckan-postgres --username=ckan --command="CREATE USER ${CKAN_POSTGRES_USER} WITH PASSWORD '${CKAN_POSTGRES_PWD}' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
- createdb --encoding=utf-8 --host=ckan-postgres --username=ckan --owner=${CKAN_POSTGRES_USER} ${CKAN_POSTGRES_DB}
- psql --host=ckan-postgres --username=ckan --command="CREATE USER ${CKAN_DATASTORE_POSTGRES_READ_USER} WITH PASSWORD '${CKAN_DATASTORE_POSTGRES_READ_PWD}' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
- psql --host=ckan-postgres --username=ckan --command="CREATE USER ${CKAN_DATASTORE_POSTGRES_WRITE_USER} WITH PASSWORD '${CKAN_DATASTORE_POSTGRES_WRITE_PWD}' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
- createdb --encoding=utf-8 --host=ckan-postgres --username=ckan --owner=${CKAN_DATASTORE_POSTGRES_WRITE_USER} ${CKAN_DATASTORE_POSTGRES_DB}
-
- # Database Initialization
- ckan -c test-core-circle-ci.ini datastore set-permissions | psql --host=ckan-postgres --username=ckan
- - name: install ckanext-harvest
- run: |
- git clone https://github.com/ckan/ckanext-harvest
- cd ckanext-harvest
- python setup.py develop
- pip install -r pip-requirements.txt
-
- - name: install ckanext-spatial
- run: |
- pip install -r pip-requirements.txt
- python setup.py develop
- pip install pycsw
-
- mkdir subdir
- mv test-github-actions.ini subdir
-
- # setup postgis
- psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER ROLE ckan_default WITH superuser;"
- psql --host=ckan-postgres --username=ckan -d ckan_test --command="CREATE EXTENSION postgis;"
- psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER VIEW geometry_columns OWNER TO ckan_default;"
- psql --host=ckan-postgres --username=ckan -d ckan_test --command="ALTER TABLE spatial_ref_sys OWNER TO ckan_default;"
-
- ckan -c subdir/test-github-actions.ini db init
- ckan -c subdir/test-github-actions.ini harvester initdb
- ckan -c subdir/test-github-actions.ini spatial initdb
-
- - name: run ci
- run: |
- pytest --ckan-ini=subdir/test-github-actions.ini ckanext/spatial/tests
From 0fb40c4d73ff1be361752dab33cb20e83f5376d8 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 14:15:22 +0900
Subject: [PATCH 084/139] WIP: fixes Syntax Error
---
bin/ckan_pycsw.py | 16 ++++++++--------
1 file changed, 8 insertions(+), 8 deletions(-)
diff --git a/bin/ckan_pycsw.py b/bin/ckan_pycsw.py
index e45f15d..e8508bf 100644
--- a/bin/ckan_pycsw.py
+++ b/bin/ckan_pycsw.py
@@ -75,7 +75,7 @@ def load(pycsw_config, ckan_url):
response = requests.get(url)
listing = response.json()
if not isinstance(listing, dict):
- raise RuntimeError, 'Wrong API response: %s' % listing
+ raise RuntimeError('Wrong API response: %s' % listing)
results = listing.get('results')
if not results:
break
@@ -115,7 +115,7 @@ def load(pycsw_config, ckan_url):
ckan_id=ckan_id).delete()
log.info('Deleted %s' % ckan_id)
repo.session.commit()
- except Exception, err:
+ except Exception as err:
repo.session.rollback()
raise
@@ -128,7 +128,7 @@ def load(pycsw_config, ckan_url):
try:
repo.insert(record, 'local', util.get_today_and_now())
log.info('Inserted %s' % ckan_id)
- except Exception, err:
+ except Exception as err:
log.error('ERROR: not inserted %s Error:%s' % (ckan_id, err))
for ckan_id in changed:
@@ -145,9 +145,9 @@ def load(pycsw_config, ckan_url):
ckan_id=ckan_id).update(update_dict)
repo.session.commit()
log.info('Changed %s' % ckan_id)
- except Exception, err:
+ except Exception as err:
repo.session.rollback()
- raise RuntimeError, 'ERROR: %s' % str(err)
+ raise RuntimeError('ERROR: %s' % str(err))
def clear(pycsw_config):
@@ -174,13 +174,13 @@ def get_record(context, repo, ckan_url, ckan_id, ckan_info):
try:
xml = etree.parse(io.BytesIO(response.content))
- except Exception, err:
+ except Exception as err:
log.error('Could not pass xml doc from %s, Error: %s' % (ckan_id, err))
return
try:
record = metadata.parse_record(context, xml, repo)[0]
- except Exception, err:
+ except Exception as err:
log.error('Could not extract metadata from %s, Error: %s' % (ckan_id, err))
return
@@ -270,5 +270,5 @@ if __name__ == '__main__':
elif arg.command == 'clear':
clear(pycsw_config)
else:
- print 'Unknown command {0}'.format(arg.command)
+ print('Unknown command {0}'.format(arg.command))
sys.exit(1)
From 37cc1d2ec1c3be2ee0cd5f7b80ffae0e39898a5d Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 14:20:08 +0900
Subject: [PATCH 085/139] WIP: fixes user and host
---
.github/workflows/ci.yml | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 025a985..5d58b28 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -50,9 +50,9 @@ jobs:
run: |
psql --host=postgres --username=postgres --command="CREATE USER ckan WITH PASSWORD 'pass' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
createdb --encoding=utf-8 --host=postgres --username=postgres --owner=ckan ckan_default
- psql --host=postgres --username=ckan --command="CREATE USER datastore_write WITH PASSWORD 'pass' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
- psql --host=postgres --username=ckan --command="CREATE USER datastore_read WITH PASSWORD 'pass' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
- createdb --encoding=utf-8 --host=ckan-postgres --username=ckan --owner=datastore_write datastore_test
+ psql --host=postgres --username=postgres --command="CREATE USER datastore_write WITH PASSWORD 'pass' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
+ psql --host=postgres --username=postgres --command="CREATE USER datastore_read WITH PASSWORD 'pass' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
+ createdb --encoding=utf-8 --host=postgres --username=postgres --owner=datastore_write datastore_test
- name: Install harvester
run: |
git clone https://github.com/ckan/ckanext-harvest
From 853842f0b1d6548bf59a93ed84fb26470154ae51 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 14:33:28 +0900
Subject: [PATCH 086/139] WIP: install via pip
---
.github/workflows/ci.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 5d58b28..f52e7ef 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -57,12 +57,12 @@ jobs:
run: |
git clone https://github.com/ckan/ckanext-harvest
cd ckanext-harvest
- python setup.py develop
pip install -r pip-requirements.txt
+ pip install -r dev-requirements.txt
+ pip install -e .
- name: Install requirements
run: |
pip install -r pip-requirements.txt
- pip install -r dev-requirements.txt
pip install -e .
# Replace default path to CKAN core config file with the one on the container
sed -i -e 's/use = config:.*/use = config:\/srv\/app\/src\/ckan\/test-core.ini/' test.ini
From 02f573073852193b0760fb150c53482adedd2750 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 14:44:45 +0900
Subject: [PATCH 087/139] WIP: install geos
---
.github/workflows/ci.yml | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index f52e7ef..19cbd87 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -60,6 +60,10 @@ jobs:
pip install -r pip-requirements.txt
pip install -r dev-requirements.txt
pip install -e .
+ - name: Install dependency
+ run: |
+ cd /srv/app
+ apk add --no-cache --virtual .build-deps geos geos-dev
- name: Install requirements
run: |
pip install -r pip-requirements.txt
From 6aae2812f565949735fc9c0e4b11a2ee646eeb14 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 14:49:05 +0900
Subject: [PATCH 088/139] WIP: added proj
---
.github/workflows/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 19cbd87..ba07c78 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -63,7 +63,7 @@ jobs:
- name: Install dependency
run: |
cd /srv/app
- apk add --no-cache --virtual .build-deps geos geos-dev
+ apk add --no-cache --virtual .build-deps geos geos-dev proj
- name: Install requirements
run: |
pip install -r pip-requirements.txt
From 787dd598ee4b4bf294e563a97b391bde8a40527a Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 14:57:50 +0900
Subject: [PATCH 089/139] WIP: added proj-util
---
.github/workflows/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index ba07c78..3a99909 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -63,7 +63,7 @@ jobs:
- name: Install dependency
run: |
cd /srv/app
- apk add --no-cache --virtual .build-deps geos geos-dev proj
+ apk add --no-cache --virtual .build-deps geos geos-dev proj-util
- name: Install requirements
run: |
pip install -r pip-requirements.txt
From 09c766f369bc7384d0a55c1ac8f36c15f8b19ffa Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 15:08:50 +0900
Subject: [PATCH 090/139] WIP: downgrade pyproj
---
pip-requirements.txt | 1 +
1 file changed, 1 insertion(+)
diff --git a/pip-requirements.txt b/pip-requirements.txt
index 5061280..087bd84 100644
--- a/pip-requirements.txt
+++ b/pip-requirements.txt
@@ -1,6 +1,7 @@
GeoAlchemy>=0.6
GeoAlchemy2==0.5.0
Shapely>=1.2.13
+pyproj==2.6.1
OWSLib==0.18.0
lxml>=2.3
argparse
From 849c0b815cb15740368bfe2deeac30549f1bd775 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 15:09:23 +0900
Subject: [PATCH 091/139] WIP: added gcc, libxml2 and libxlst
---
.github/workflows/ci.yml | 10 +++++++++-
1 file changed, 9 insertions(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 3a99909..38c9e29 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -63,7 +63,15 @@ jobs:
- name: Install dependency
run: |
cd /srv/app
- apk add --no-cache --virtual .build-deps geos geos-dev proj-util
+ apk add --no-cache \
+ geos \
+ proj-util \
+ libxml2 \
+ libxslt
+ apk add --no-cache --virtual .build-deps \
+ gcc \
+ libxml2-dev \
+ libxslt-dev
- name: Install requirements
run: |
pip install -r pip-requirements.txt
From 071d29a4b6a5c178252f13399c8a60e0dd523cfd Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 15:20:36 +0900
Subject: [PATCH 092/139] WIP: downgrade pyproj 2.2.2
---
pip-requirements.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pip-requirements.txt b/pip-requirements.txt
index 087bd84..66aeaec 100644
--- a/pip-requirements.txt
+++ b/pip-requirements.txt
@@ -1,7 +1,7 @@
GeoAlchemy>=0.6
GeoAlchemy2==0.5.0
Shapely>=1.2.13
-pyproj==2.6.1
+pyproj==2.2.2
OWSLib==0.18.0
lxml>=2.3
argparse
From e97f103a4def9235a7db0c5df5b61689f77405c1 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 15:21:55 +0900
Subject: [PATCH 093/139] WIP: install python-dev or python3-dev
---
.github/workflows/ci.yml | 14 +++++++++++++-
1 file changed, 13 insertions(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 38c9e29..809bc1f 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -60,7 +60,7 @@ jobs:
pip install -r pip-requirements.txt
pip install -r dev-requirements.txt
pip install -e .
- - name: Install dependency
+ - name: Install dependency (common)
run: |
cd /srv/app
apk add --no-cache \
@@ -72,6 +72,18 @@ jobs:
gcc \
libxml2-dev \
libxslt-dev
+ - name: Install dependency (python2)
+ if: ${{ matrix.ckan-version != '2.9' }}
+ run: |
+ cd /srv/app
+ apk add --no-cache --virtual .build-deps \
+ python-dev
+ - name: Install dependency (python3)
+ if: ${{ matrix.ckan-version != '2.9' }}
+ run: |
+ cd /srv/app
+ apk add --no-cache --virtual .build-deps \
+ python3-dev
- name: Install requirements
run: |
pip install -r pip-requirements.txt
From 75e7f404acfe6c54e86c00d1d98118e8fc9c1604 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 15:31:53 +0900
Subject: [PATCH 094/139] WIP: install python2-dev
---
.github/workflows/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 809bc1f..24b7ac7 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -77,7 +77,7 @@ jobs:
run: |
cd /srv/app
apk add --no-cache --virtual .build-deps \
- python-dev
+ python2-dev
- name: Install dependency (python3)
if: ${{ matrix.ckan-version != '2.9' }}
run: |
From 578592a459f340021ce64e51865d7c46384c5311 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 15:38:13 +0900
Subject: [PATCH 095/139] WIP: install packages in global
---
.github/workflows/ci.yml | 10 +++-------
1 file changed, 3 insertions(+), 7 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 24b7ac7..6bfb87e 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -62,27 +62,23 @@ jobs:
pip install -e .
- name: Install dependency (common)
run: |
- cd /srv/app
apk add --no-cache \
geos \
proj-util \
libxml2 \
- libxslt
- apk add --no-cache --virtual .build-deps \
+ libxslt \
gcc \
libxml2-dev \
libxslt-dev
- name: Install dependency (python2)
if: ${{ matrix.ckan-version != '2.9' }}
run: |
- cd /srv/app
- apk add --no-cache --virtual .build-deps \
+ apk add --no-cache \
python2-dev
- name: Install dependency (python3)
if: ${{ matrix.ckan-version != '2.9' }}
run: |
- cd /srv/app
- apk add --no-cache --virtual .build-deps \
+ apk add --no-cache \
python3-dev
- name: Install requirements
run: |
From 1b627d2f8069ffcbe7a4b550e908100ba051474e Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 15:42:29 +0900
Subject: [PATCH 096/139] WIP: install geos-dev and proj-dev
---
.github/workflows/ci.yml | 2 ++
1 file changed, 2 insertions(+)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 6bfb87e..f1b1b59 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -64,7 +64,9 @@ jobs:
run: |
apk add --no-cache \
geos \
+ geos-dev \
proj-util \
+ proj-dev \
libxml2 \
libxslt \
gcc \
From 4ae6c4e1f70a88c1a390d4b091554897ef775fce Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 15:48:48 +0900
Subject: [PATCH 097/139] WIP: fixes database name and user
---
.github/workflows/ci.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index f1b1b59..cd1292b 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -48,8 +48,8 @@ jobs:
- uses: actions/checkout@v2
- name: Create Database
run: |
- psql --host=postgres --username=postgres --command="CREATE USER ckan WITH PASSWORD 'pass' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
- createdb --encoding=utf-8 --host=postgres --username=postgres --owner=ckan ckan_default
+ psql --host=postgres --username=postgres --command="CREATE USER ckan_default WITH PASSWORD 'pass' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
+ createdb --encoding=utf-8 --host=postgres --username=postgres --owner=ckan_default ckan_test
psql --host=postgres --username=postgres --command="CREATE USER datastore_write WITH PASSWORD 'pass' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
psql --host=postgres --username=postgres --command="CREATE USER datastore_read WITH PASSWORD 'pass' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
createdb --encoding=utf-8 --host=postgres --username=postgres --owner=datastore_write datastore_test
From 9fe84f2491ff324701f5c68ea8e34c81a4892ec5 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 15:52:32 +0900
Subject: [PATCH 098/139] WIP: fix typo
---
.github/workflows/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index cd1292b..a2605fb 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -93,7 +93,7 @@ jobs:
run: |
ckan -c test.ini db init
ckan -c test.ini harvester initdb
- ckan -c init.ini spatial initdb
+ ckan -c test.ini spatial initdb
- name: Setup extension (CKAN < 2.9)
if: ${{ matrix.ckan-version == '2.7' || matrix.ckan-version == '2.8' }}
run: |
From d0735a9bf8d9a344cdc45830436816d535af4e80 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 16:00:04 +0900
Subject: [PATCH 099/139] WIP: fixes missing python3-dev install, split
pip-requirements.txt for supports both python2 and 3
---
.github/workflows/ci.yml | 11 +++++++++--
pip3-requirements.txt | 10 ++++++++++
2 files changed, 19 insertions(+), 2 deletions(-)
create mode 100644 pip3-requirements.txt
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index a2605fb..33e7446 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -78,13 +78,20 @@ jobs:
apk add --no-cache \
python2-dev
- name: Install dependency (python3)
- if: ${{ matrix.ckan-version != '2.9' }}
+ if: ${{ matrix.ckan-version == '2.9' }}
run: |
apk add --no-cache \
python3-dev
- - name: Install requirements
+ - name: Install requirements (python2)
+ if: ${{ matrix.ckan-version != '2.9' }}
run: |
pip install -r pip-requirements.txt
+ - name: Install requirements (python3)
+ if: ${{ matrix.ckan-version == '2.9' }}
+ run: |
+ pip install -r pip3-requirements.txt
+ - name: Install requirements
+ run: |
pip install -e .
# Replace default path to CKAN core config file with the one on the container
sed -i -e 's/use = config:.*/use = config:\/srv\/app\/src\/ckan\/test-core.ini/' test.ini
diff --git a/pip3-requirements.txt b/pip3-requirements.txt
new file mode 100644
index 0000000..087bd84
--- /dev/null
+++ b/pip3-requirements.txt
@@ -0,0 +1,10 @@
+GeoAlchemy>=0.6
+GeoAlchemy2==0.5.0
+Shapely>=1.2.13
+pyproj==2.6.1
+OWSLib==0.18.0
+lxml>=2.3
+argparse
+pyparsing>=2.1.10
+requests>=1.1.0
+six
From 60f5cf1c21aba1ede1f952188e3a42e81211a33c Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 16:11:28 +0900
Subject: [PATCH 100/139] WIP: setup postgis
---
.github/workflows/ci.yml | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 33e7446..45f2228 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -95,6 +95,12 @@ jobs:
pip install -e .
# Replace default path to CKAN core config file with the one on the container
sed -i -e 's/use = config:.*/use = config:\/srv\/app\/src\/ckan\/test-core.ini/' test.ini
+ - name: setup postgis
+ run: |
+ psql --host=postgres --username=postgres -d ckan_test --command="ALTER ROLE ckan_default WITH superuser;"
+ psql --host=postgres --username=postgres -d ckan_test --command="CREATE EXTENSION postgis;"
+ psql --host=postgres --username=postgres -d ckan_test --command="ALTER VIEW geometry_columns OWNER TO ckan_default;"
+ psql --host=postgres --username=postgres -d ckan_test --command="ALTER TABLE spatial_ref_sys OWNER TO ckan_default;"
- name: Setup extension (CKAN >= 2.9)
if: ${{ matrix.ckan-version != '2.7' && matrix.ckan-version != '2.8' }}
run: |
From e690b783ffd00ff41c3cf90e70eecdbf0406e9b2 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 16:19:23 +0900
Subject: [PATCH 101/139] WIP: remove some option from pytest
---
.github/workflows/ci.yml | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 45f2228..3f0f8fc 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -114,6 +114,4 @@ jobs:
paster --plugin=ckanext-harvest harvester initdb -c test.ini
paster --plugin=ckanext-spatial spatial initdb -c test.ini
- name: Run tests
- run: pytest --ckan-ini=test.ini --cov=ckanext.harvest --disable-warnings ckanext/spatial/tests
-
-
+ run: pytest --ckan-ini=test.ini ckanext/spatial/tests
From 856331ed612c58a50de5139849e5575debdc5201 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 16:24:08 +0900
Subject: [PATCH 102/139] WIP: remove --ckan-ini option
---
.github/workflows/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 3f0f8fc..21e2caf 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -114,4 +114,4 @@ jobs:
paster --plugin=ckanext-harvest harvester initdb -c test.ini
paster --plugin=ckanext-spatial spatial initdb -c test.ini
- name: Run tests
- run: pytest --ckan-ini=test.ini ckanext/spatial/tests
+ run: pytest ckanext/spatial/tests
From a4cb1ada5a3b0575b0e5bbc916f6ee6971ce0096 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 16:42:01 +0900
Subject: [PATCH 103/139] WIP: test: remove ckan_setup
---
.github/workflows/ci.yml | 2 +-
conftest.py | 1 -
2 files changed, 1 insertion(+), 2 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 21e2caf..3f0f8fc 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -114,4 +114,4 @@ jobs:
paster --plugin=ckanext-harvest harvester initdb -c test.ini
paster --plugin=ckanext-spatial spatial initdb -c test.ini
- name: Run tests
- run: pytest ckanext/spatial/tests
+ run: pytest --ckan-ini=test.ini ckanext/spatial/tests
diff --git a/conftest.py b/conftest.py
index 89d0a32..c3d9f21 100644
--- a/conftest.py
+++ b/conftest.py
@@ -1,6 +1,5 @@
# -*- coding: utf-8 -*-
pytest_plugins = [
- u'ckanext.spatial.tests.ckan_setup',
u'ckanext.spatial.tests.fixtures',
]
From a9bb23c4a67b4ddcd30b3e06f5d5896e42dba1ea Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Fri, 15 Jan 2021 17:04:37 +0900
Subject: [PATCH 104/139] WIP: downgrade postgis
---
.github/workflows/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 3f0f8fc..388a790 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -28,7 +28,7 @@ jobs:
solr:
image: ckan/ckan-solr-dev:${{ matrix.ckan-version }}
postgres:
- image: postgis/postgis:12-2.5
+ image: postgis/postgis:9.6-2.5
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
From 20e75982645eca24b845467af27587c4127883bf Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 10:43:59 +0000
Subject: [PATCH 105/139] WIP: supports ckan 2.8
---
.../spatial/snippets/dataset_map_base.html | 3 +-
.../snippets/dataset_map_resource.html | 1 +
.../spatial/snippets/spatial_query.html | 4 +-
.../snippets/spatial_query_resource.html | 1 +
.../spatial/tests/functional/test_package.py | 118 ++++++++++++------
.../spatial/tests/functional/test_widgets.py | 13 +-
6 files changed, 96 insertions(+), 44 deletions(-)
create mode 100644 ckanext/spatial/templates/spatial/snippets/dataset_map_resource.html
create mode 100644 ckanext/spatial/templates/spatial/snippets/spatial_query_resource.html
diff --git a/ckanext/spatial/templates/spatial/snippets/dataset_map_base.html b/ckanext/spatial/templates/spatial/snippets/dataset_map_base.html
index ed88c39..2582b28 100644
--- a/ckanext/spatial/templates/spatial/snippets/dataset_map_base.html
+++ b/ckanext/spatial/templates/spatial/snippets/dataset_map_base.html
@@ -18,4 +18,5 @@ extent
-{% include 'spatial/snippets/dataset_map_asset.html' %}
+{% set type = 'asset' if h.ckan_version() > '2.9' else 'resource' %}
+{% include 'spatial/snippets/dataset_map_' ~ type ~ '.html' %}
diff --git a/ckanext/spatial/templates/spatial/snippets/dataset_map_resource.html b/ckanext/spatial/templates/spatial/snippets/dataset_map_resource.html
new file mode 100644
index 0000000..38c2f42
--- /dev/null
+++ b/ckanext/spatial/templates/spatial/snippets/dataset_map_resource.html
@@ -0,0 +1 @@
+{% resource 'ckanext-spatial/dataset_map' %}
diff --git a/ckanext/spatial/templates/spatial/snippets/spatial_query.html b/ckanext/spatial/templates/spatial/snippets/spatial_query.html
index 451340c..0ee3b98 100644
--- a/ckanext/spatial/templates/spatial/snippets/spatial_query.html
+++ b/ckanext/spatial/templates/spatial/snippets/spatial_query.html
@@ -26,5 +26,5 @@ e.g.
-
-{% include 'spatial/snippets/spatial_query_asset.html' %}
+{% set type = 'asset' if h.ckan_version() > '2.9' else 'resource' %}
+{% include 'spatial/snippets/spatial_query_' ~ type ~ '.html' %}
diff --git a/ckanext/spatial/templates/spatial/snippets/spatial_query_resource.html b/ckanext/spatial/templates/spatial/snippets/spatial_query_resource.html
new file mode 100644
index 0000000..499cde4
--- /dev/null
+++ b/ckanext/spatial/templates/spatial/snippets/spatial_query_resource.html
@@ -0,0 +1 @@
+{% resource 'ckanext-spatial/spatial_query' %}
diff --git a/ckanext/spatial/tests/functional/test_package.py b/ckanext/spatial/tests/functional/test_package.py
index aaf4c53..811b000 100644
--- a/ckanext/spatial/tests/functional/test_package.py
+++ b/ckanext/spatial/tests/functional/test_package.py
@@ -5,11 +5,15 @@ import pytest
from ckan.model import Session
from ckan.lib.helpers import url_for
+import ckan.plugins.toolkit as tk
+
import ckan.tests.factories as factories
from ckanext.spatial.model import PackageExtent
from ckanext.spatial.tests.base import SpatialTestBase
+if not tk.check_ckan_version(min_version="2.9"):
+ import ckan.tests.helpers as helpers
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
class TestSpatialExtra(SpatialTestBase):
@@ -19,16 +23,24 @@ class TestSpatialExtra(SpatialTestBase):
env = {"REMOTE_USER": user["name"].encode("ascii")}
dataset = factories.Dataset(user=user)
- offset = url_for("dataset.edit", id=dataset["id"])
+ if tk.check_ckan_version(min_version="2.9"):
+ offset = url_for("dataset.edit", id=dataset["id"])
+ else:
+ offset = url_for(controller="package", action="edit", id=dataset["id"])
res = app.get(offset, extra_environ=env)
- data = {
- "name": dataset['name'],
- "extras__0__key": u"spatial",
- "extras__0__value": self.geojson_examples["point"]
- }
-
- res = app.post(offset, environ_overrides=env, data=data)
+ if tk.check_ckan_version(min_version="2.9"):
+ data = {
+ "name": dataset['name'],
+ "extras__0__key": u"spatial",
+ "extras__0__value": self.geojson_examples["point"]
+ }
+ res = app.post(offset, environ_overrides=env, data=data)
+ else:
+ form = res.forms[1]
+ form['extras__0__key'] = u'spatial'
+ form['extras__0__value'] = self.geojson_examples['point']
+ res = helpers.submit_and_follow(app, form, env, 'save')
assert "Error" not in res, res
@@ -59,28 +71,42 @@ class TestSpatialExtra(SpatialTestBase):
env = {"REMOTE_USER": user["name"].encode("ascii")}
dataset = factories.Dataset(user=user)
- offset = url_for("dataset.edit", id=dataset["id"])
+ if tk.check_ckan_version(min_version="2.9"):
+ offset = url_for("dataset.edit", id=dataset["id"])
+ else:
+ offset = url_for(controller="package", action="edit", id=dataset["id"])
res = app.get(offset, extra_environ=env)
- data = {
- "name": dataset['name'],
- "extras__0__key": u"spatial",
- "extras__0__value": self.geojson_examples["point"]
- }
- res = app.post(offset, environ_overrides=env, data=data)
+ if tk.check_ckan_version(min_version="2.9"):
+ data = {
+ "name": dataset['name'],
+ "extras__0__key": u"spatial",
+ "extras__0__value": self.geojson_examples["point"]
+ }
+ res = app.post(offset, environ_overrides=env, data=data)
+ else:
+ form = res.forms[1]
+ form['extras__0__key'] = u'spatial'
+ form['extras__0__value'] = self.geojson_examples['point']
+ res = helpers.submit_and_follow(app, form, env, 'save')
assert "Error" not in res, res
res = app.get(offset, extra_environ=env)
- data = {
- "name": dataset['name'],
- "extras__0__key": u"spatial",
- "extras__0__value": self.geojson_examples["polygon"]
- }
-
- res = app.post(offset, environ_overrides=env, data=data)
+ if tk.check_ckan_version(min_version="2.9"):
+ data = {
+ "name": dataset['name'],
+ "extras__0__key": u"spatial",
+ "extras__0__value": self.geojson_examples["polygon"]
+ }
+ res = app.post(offset, environ_overrides=env, data=data)
+ else:
+ form = res.forms[1]
+ form['extras__0__key'] = u'spatial'
+ form['extras__0__value'] = self.geojson_examples['polygon']
+ res = helpers.submit_and_follow(app, form, env, 'save')
assert "Error" not in res, res
@@ -107,16 +133,24 @@ class TestSpatialExtra(SpatialTestBase):
env = {"REMOTE_USER": user["name"].encode("ascii")}
dataset = factories.Dataset(user=user)
- offset = url_for("dataset.edit", id=dataset["id"])
+ if tk.check_ckan_version(min_version="2.9"):
+ offset = url_for("dataset.edit", id=dataset["id"])
+ else:
+ offset = url_for(controller="package", action="edit", id=dataset["id"])
res = app.get(offset, extra_environ=env)
- data = {
- "name": dataset['name'],
- "extras__0__key": u"spatial",
- "extras__0__value": u'{"Type":Bad Json]'
- }
-
- res = app.post(offset, environ_overrides=env, data=data)
+ if tk.check_ckan_version(min_version="2.9"):
+ data = {
+ "name": dataset['name'],
+ "extras__0__key": u"spatial",
+ "extras__0__value": u'{"Type":Bad Json]'
+ }
+ res = app.post(offset, environ_overrides=env, data=data)
+ else:
+ form = res.forms[1]
+ form['extras__0__key'] = u'spatial'
+ form['extras__0__value'] = u'{"Type":Bad Json]'
+ res = helpers.webtest_submit(form, extra_environ=env, name='save')
assert "Error" in res, res
assert "Spatial" in res
@@ -128,16 +162,24 @@ class TestSpatialExtra(SpatialTestBase):
env = {"REMOTE_USER": user["name"].encode("ascii")}
dataset = factories.Dataset(user=user)
- offset = url_for("dataset.edit", id=dataset["id"])
+ if tk.check_ckan_version(min_version="2.9"):
+ offset = url_for("dataset.edit", id=dataset["id"])
+ else:
+ offset = url_for(controller="package", action="edit", id=dataset["id"])
res = app.get(offset, extra_environ=env)
- data = {
- "name": dataset['name'],
- "extras__0__key": u"spatial",
- "extras__0__value": u'{"Type":"Bad_GeoJSON","a":2}'
- }
-
- res = app.post(offset, environ_overrides=env, data=data)
+ if tk.check_ckan_version(min_version="2.9"):
+ data = {
+ "name": dataset['name'],
+ "extras__0__key": u"spatial",
+ "extras__0__value": u'{"Type":"Bad_GeoJSON","a":2}'
+ }
+ res = app.post(offset, environ_overrides=env, data=data)
+ else:
+ form = res.forms[1]
+ form['extras__0__key'] = u'spatial'
+ form['extras__0__value'] = u'{"Type":"Bad_GeoJSON","a":2}'
+ res = helpers.webtest_submit(form, extra_environ=env, name='save')
assert "Error" in res, res
assert "Spatial" in res
diff --git a/ckanext/spatial/tests/functional/test_widgets.py b/ckanext/spatial/tests/functional/test_widgets.py
index a7069b1..756a02c 100644
--- a/ckanext/spatial/tests/functional/test_widgets.py
+++ b/ckanext/spatial/tests/functional/test_widgets.py
@@ -5,23 +5,30 @@ from ckanext.spatial.tests.base import SpatialTestBase
import ckan.tests.factories as factories
+import ckan.plugins.toolkit as tk
class TestSpatialWidgets(SpatialTestBase):
- @pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
+ @pytest.mark.usefixtures('with_plugins', 'clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
def test_dataset_map(self, app):
dataset = factories.Dataset(
extras=[
{"key": "spatial", "value": self.geojson_examples["point"]}
],
)
- offset = url_for("dataset.read", id=dataset["id"])
+ if tk.check_ckan_version(min_version="2.9"):
+ offset = url_for("dataset.read", id=dataset["id"])
+ else:
+ offset = url_for(controller="package", action="read", id=dataset["id"])
res = app.get(offset)
assert 'data-module="dataset-map"' in res
assert "dataset_map.js" in res
def test_spatial_search_widget(self, app):
- offset = url_for("dataset.search")
+ if tk.check_ckan_version(min_version="2.9"):
+ offset = url_for("dataset.search")
+ else:
+ offset = url_for(controller="package", action="search")
res = app.get(offset)
assert 'data-module="spatial-query"' in res
From 0b4050957e159b9e6f292bf526184dcf322b03da Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 11:07:16 +0000
Subject: [PATCH 106/139] WIP: remove package_*_return_url
---
test.ini | 7 -------
1 file changed, 7 deletions(-)
diff --git a/test.ini b/test.ini
index c752556..b178643 100644
--- a/test.ini
+++ b/test.ini
@@ -21,13 +21,6 @@ ckan.spatial.testing = true
ckan.spatial.validator.profiles = iso19139,constraints,gemini2
ckan.harvest.mq.type = redis
-# NB: other test configuration should go in test-core.ini, which is
-# what the postgres tests use.
-
-package_new_return_url = http://test.ckan.net/dataset/?test=new
-package_edit_return_url = http://test.ckan.net/dataset/?test=edit
-
-
# Logging configuration
[loggers]
keys = root, ckan, sqlalchemy
From b0ed33ef89745600789d745da8bb35c5678a0e9a Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 11:36:45 +0000
Subject: [PATCH 107/139] WIP: remove with_plugins fixture
---
ckanext/spatial/tests/test_api.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/ckanext/spatial/tests/test_api.py b/ckanext/spatial/tests/test_api.py
index 029f3a7..a92e26e 100644
--- a/ckanext/spatial/tests/test_api.py
+++ b/ckanext/spatial/tests/test_api.py
@@ -16,7 +16,7 @@ extents = {
}
-@pytest.mark.usefixtures('with_plugins', 'clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
+@pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
class TestAction(SpatialTestBase):
def test_spatial_query(self):
dataset = factories.Dataset(
From d919c04da59970366985b5fd81fda70b051c7c6f Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 20:48:07 +0900
Subject: [PATCH 108/139] WIP: added test for ci
---
ckanext/spatial/tests/test_api.py | 3 +++
1 file changed, 3 insertions(+)
diff --git a/ckanext/spatial/tests/test_api.py b/ckanext/spatial/tests/test_api.py
index a92e26e..fd0d6e0 100644
--- a/ckanext/spatial/tests/test_api.py
+++ b/ckanext/spatial/tests/test_api.py
@@ -25,6 +25,9 @@ class TestAction(SpatialTestBase):
]
)
+ result = helpers.call_action("package_search")
+ assert(result["count"] == 1)
+
result = helpers.call_action(
"package_search", extras={"ext_bbox": "-180,-90,180,90"}
)
From 75cf94ffef5e3c8f7161c6d265c50cdd6202e261 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 12:05:03 +0000
Subject: [PATCH 109/139] WIP: fixes search_backend to postgis
---
test.ini | 1 +
1 file changed, 1 insertion(+)
diff --git a/test.ini b/test.ini
index b178643..d52610b 100644
--- a/test.ini
+++ b/test.ini
@@ -19,6 +19,7 @@ ckan.spatial.srid = 4326
ckan.spatial.default_map_extent=-6.88,49.74,0.50,59.2
ckan.spatial.testing = true
ckan.spatial.validator.profiles = iso19139,constraints,gemini2
+ckanext.spatial.search_backend = postgis
ckan.harvest.mq.type = redis
# Logging configuration
From 4448fede1bc0d985df6fbabdeaf5ab8bcd4ffde4 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 12:20:30 +0000
Subject: [PATCH 110/139] WIP: set fixture each functions
---
ckanext/spatial/tests/test_api.py | 13 +++++++++++--
1 file changed, 11 insertions(+), 2 deletions(-)
diff --git a/ckanext/spatial/tests/test_api.py b/ckanext/spatial/tests/test_api.py
index fd0d6e0..7cc0dfe 100644
--- a/ckanext/spatial/tests/test_api.py
+++ b/ckanext/spatial/tests/test_api.py
@@ -15,9 +15,8 @@ extents = {
"dateline2": '{"type":"Polygon","coordinates":[[[170,60],[-170,60],[-170,70],[170,70],[170,60]]]}',
}
-
-@pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
class TestAction(SpatialTestBase):
+ @pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
def test_spatial_query(self):
dataset = factories.Dataset(
extras=[
@@ -35,6 +34,7 @@ class TestAction(SpatialTestBase):
assert(result["count"] == 1)
assert(result["results"][0]["id"] == dataset["id"])
+ @pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
def test_spatial_query_outside_bbox(self):
factories.Dataset(
@@ -49,6 +49,7 @@ class TestAction(SpatialTestBase):
assert(result["count"] == 0)
+ @pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
def test_spatial_query_wrong_bbox(self):
with pytest.raises(SearchError):
helpers.call_action(
@@ -56,6 +57,7 @@ class TestAction(SpatialTestBase):
extras={"ext_bbox": "-10,-20,10,a"},
)
+ @pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
def test_spatial_query_nz(self):
dataset = factories.Dataset(
extras=[{"key": "spatial", "value": extents["nz"]}]
@@ -68,6 +70,7 @@ class TestAction(SpatialTestBase):
assert(result["count"] == 1)
assert(result["results"][0]["id"] == dataset["id"])
+ @pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
def test_spatial_query_nz_wrap(self):
dataset = factories.Dataset(
extras=[{"key": "spatial", "value": extents["nz"]}]
@@ -79,6 +82,7 @@ class TestAction(SpatialTestBase):
assert(result["count"] == 1)
assert(result["results"][0]["id"] == dataset["id"])
+ @pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
def test_spatial_query_ohio(self):
dataset = factories.Dataset(
@@ -92,6 +96,7 @@ class TestAction(SpatialTestBase):
assert(result["count"] == 1)
assert(result["results"][0]["id"] == dataset["id"])
+ @pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
def test_spatial_query_ohio_wrap(self):
dataset = factories.Dataset(
@@ -105,6 +110,7 @@ class TestAction(SpatialTestBase):
assert(result["count"] == 1)
assert(result["results"][0]["id"] == dataset["id"])
+ @pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
def test_spatial_query_dateline_1(self):
dataset = factories.Dataset(
@@ -118,6 +124,7 @@ class TestAction(SpatialTestBase):
assert(result["count"] == 1)
assert(result["results"][0]["id"] == dataset["id"])
+ @pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
def test_spatial_query_dateline_2(self):
dataset = factories.Dataset(
@@ -131,6 +138,7 @@ class TestAction(SpatialTestBase):
assert(result["count"] == 1)
assert(result["results"][0]["id"] == dataset["id"])
+ @pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
def test_spatial_query_dateline_3(self):
dataset = factories.Dataset(
@@ -144,6 +152,7 @@ class TestAction(SpatialTestBase):
assert(result["count"] == 1)
assert(result["results"][0]["id"] == dataset["id"])
+ @pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
def test_spatial_query_dateline_4(self):
dataset = factories.Dataset(
From 706c47b2d91dd01cba3a3555cee9e0411c48589a Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 21:31:53 +0900
Subject: [PATCH 111/139] WIP: use postgis 10-2.5 image
---
.github/workflows/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 388a790..2dc9939 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -28,7 +28,7 @@ jobs:
solr:
image: ckan/ckan-solr-dev:${{ matrix.ckan-version }}
postgres:
- image: postgis/postgis:9.6-2.5
+ image: postgis/postgis:10-2.5
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
From 3364ca09ab7a6ca46a11c7e4f133d965e1323689 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 12:49:28 +0000
Subject: [PATCH 112/139] WIP: don't drop idx_package_extent_the_geom
---
ckanext/spatial/tests/conftest.py | 26 --------------------------
1 file changed, 26 deletions(-)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index 790c0e8..0f3a97f 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -27,30 +27,6 @@ def clean_postgis():
Session.execute("DROP EXTENSION IF EXISTS postgis CASCADE")
Session.commit()
-
-@pytest.fixture
-def spatial_clean_db(reset_db):
- reset_db()
-
- # This will create the PostGIS tables (geometry_columns and
- # spatial_ref_sys) which were deleted when rebuilding the database
- table = Table("spatial_ref_sys", meta.metadata)
- if not table.exists():
- create_postgis_tables()
-
- # When running the tests with the --reset-db option for some
- # reason the metadata holds a reference to the `package_extent`
- # table after being deleted, causing an InvalidRequestError
- # exception when trying to recreate it further on
- if "package_extent" in meta.metadata.tables:
- meta.metadata.remove(meta.metadata.tables["package_extent"])
-
- spatial_db_setup()
-
- # Setup the harvest tables
- harvest_model_setup()
-
-
@pytest.fixture
def harvest_setup():
harvest_model.setup()
@@ -70,7 +46,5 @@ def spatial_setup():
# exception when trying to recreate it further on
if "package_extent" in meta.metadata.tables:
meta.metadata.remove(meta.metadata.tables["package_extent"])
- Session.execute("DROP INDEX IF EXISTS idx_package_extent_the_geom")
- Session.commit()
spatial_db_setup()
From 1e8b2237295fdd2cdad06869f1f2274ad7767540 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 13:11:09 +0000
Subject: [PATCH 113/139] WIP: test: copy error function to last test
---
.../spatial/tests/model/test_package_extent.py | 18 ++++++++++++++++++
1 file changed, 18 insertions(+)
diff --git a/ckanext/spatial/tests/model/test_package_extent.py b/ckanext/spatial/tests/model/test_package_extent.py
index ba464d7..6ab6bd3 100644
--- a/ckanext/spatial/tests/model/test_package_extent.py
+++ b/ckanext/spatial/tests/model/test_package_extent.py
@@ -5,6 +5,7 @@ from shapely.geometry import asShape
from ckan.model import Session
from ckan.lib.helpers import json
+import ckan.tests.helpers as helpers
import ckan.tests.factories as factories
from ckanext.spatial.model import PackageExtent
@@ -104,3 +105,20 @@ class TestPackageExtent(SpatialTestBase):
"ST_Polygon"
)
assert(package_extent.the_geom.srid == self.db_srid)
+
+ def test_spatial_query(self):
+ dataset = factories.Dataset(
+ extras=[
+ {"key": "spatial", "value": self.geojson_examples["point"]}
+ ]
+ )
+
+ result = helpers.call_action("package_search")
+ assert(result["count"] == 1)
+
+ result = helpers.call_action(
+ "package_search", extras={"ext_bbox": "-180,-90,180,90"}
+ )
+
+ assert(result["count"] == 1)
+ assert(result["results"][0]["id"] == dataset["id"])
From 56b42907d92393ca63b540cb7eb5de3c610c183c Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 13:29:35 +0000
Subject: [PATCH 114/139] WIP: don't set alter view and table, remove test code
---
.github/workflows/ci.yml | 2 --
.../spatial/tests/model/test_package_extent.py | 16 ----------------
2 files changed, 18 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 2dc9939..1cfc49f 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -99,8 +99,6 @@ jobs:
run: |
psql --host=postgres --username=postgres -d ckan_test --command="ALTER ROLE ckan_default WITH superuser;"
psql --host=postgres --username=postgres -d ckan_test --command="CREATE EXTENSION postgis;"
- psql --host=postgres --username=postgres -d ckan_test --command="ALTER VIEW geometry_columns OWNER TO ckan_default;"
- psql --host=postgres --username=postgres -d ckan_test --command="ALTER TABLE spatial_ref_sys OWNER TO ckan_default;"
- name: Setup extension (CKAN >= 2.9)
if: ${{ matrix.ckan-version != '2.7' && matrix.ckan-version != '2.8' }}
run: |
diff --git a/ckanext/spatial/tests/model/test_package_extent.py b/ckanext/spatial/tests/model/test_package_extent.py
index 6ab6bd3..66178d6 100644
--- a/ckanext/spatial/tests/model/test_package_extent.py
+++ b/ckanext/spatial/tests/model/test_package_extent.py
@@ -106,19 +106,3 @@ class TestPackageExtent(SpatialTestBase):
)
assert(package_extent.the_geom.srid == self.db_srid)
- def test_spatial_query(self):
- dataset = factories.Dataset(
- extras=[
- {"key": "spatial", "value": self.geojson_examples["point"]}
- ]
- )
-
- result = helpers.call_action("package_search")
- assert(result["count"] == 1)
-
- result = helpers.call_action(
- "package_search", extras={"ext_bbox": "-180,-90,180,90"}
- )
-
- assert(result["count"] == 1)
- assert(result["results"][0]["id"] == dataset["id"])
From 7d3dbfcc0f484da8c2d944b00279f833dba13c16 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 22:43:40 +0900
Subject: [PATCH 115/139] WIP: debug print
---
ckanext/spatial/lib/__init__.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/ckanext/spatial/lib/__init__.py b/ckanext/spatial/lib/__init__.py
index 1646a86..8bd5fde 100644
--- a/ckanext/spatial/lib/__init__.py
+++ b/ckanext/spatial/lib/__init__.py
@@ -158,6 +158,7 @@ def bbox_query(bbox,srid=None):
.filter(PackageExtent.package_id==Package.id) \
.filter(PackageExtent.the_geom.intersects(input_geometry)) \
.filter(Package.state==u'active')
+ print(extents)
return extents
def bbox_query_ordered(bbox, srid=None):
From 487cbc8d53f9a5cdb299bc19941211367efe2558 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 22:51:13 +0900
Subject: [PATCH 116/139] WIP: debug print
---
ckanext/spatial/plugin/__init__.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/ckanext/spatial/plugin/__init__.py b/ckanext/spatial/plugin/__init__.py
index 8847a24..dd73504 100644
--- a/ckanext/spatial/plugin/__init__.py
+++ b/ckanext/spatial/plugin/__init__.py
@@ -361,6 +361,7 @@ class SpatialQuery(SpatialQueryMixin, p.SingletonPlugin):
if are_no_results:
# We don't need to perform the search
search_params['abort_search'] = True
+ print("abort_search")
else:
# We'll perform the existing search but also filtering by the ids
# of datasets within the bbox
From 01ad817444e36818bb90939cc942b4426c0bb4ec Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 22:57:47 +0900
Subject: [PATCH 117/139] WIP: debug print
---
ckanext/spatial/plugin/__init__.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/ckanext/spatial/plugin/__init__.py b/ckanext/spatial/plugin/__init__.py
index dd73504..78b0133 100644
--- a/ckanext/spatial/plugin/__init__.py
+++ b/ckanext/spatial/plugin/__init__.py
@@ -328,6 +328,7 @@ class SpatialQuery(SpatialQueryMixin, p.SingletonPlugin):
def _params_for_postgis_search(self, bbox, search_params):
from ckanext.spatial.lib import bbox_query, bbox_query_ordered
from ckan.lib.search import SearchError
+ print("_params_for_postgis_search")
# Note: This will be deprecated at some point in favour of the
# Solr 4 spatial sorting capabilities
From 5b744cb5b4678b70835d3ee115e0a1661afa482a Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 23:04:52 +0900
Subject: [PATCH 118/139] WIP: debug print
---
ckanext/spatial/plugin/__init__.py | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/ckanext/spatial/plugin/__init__.py b/ckanext/spatial/plugin/__init__.py
index 78b0133..c56175a 100644
--- a/ckanext/spatial/plugin/__init__.py
+++ b/ckanext/spatial/plugin/__init__.py
@@ -358,6 +358,7 @@ class SpatialQuery(SpatialQueryMixin, p.SingletonPlugin):
else:
extents = bbox_query(bbox)
are_no_results = extents.count() == 0
+ print(extents)
if are_no_results:
# We don't need to perform the search
@@ -373,7 +374,8 @@ class SpatialQuery(SpatialQueryMixin, p.SingletonPlugin):
new_q += '(%s)' % ' OR '.join(['id:%s' % id for id in bbox_query_ids])
search_params['q'] = new_q
-
+ print("search_params")
+ print(search_params)
return search_params
def after_search(self, search_results, search_params):
From 2a8da191a84e8bba7025a8582902888276665c58 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 23:15:21 +0900
Subject: [PATCH 119/139] WIP: debug print
---
ckanext/spatial/plugin/__init__.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/ckanext/spatial/plugin/__init__.py b/ckanext/spatial/plugin/__init__.py
index c56175a..222d55e 100644
--- a/ckanext/spatial/plugin/__init__.py
+++ b/ckanext/spatial/plugin/__init__.py
@@ -383,7 +383,10 @@ class SpatialQuery(SpatialQueryMixin, p.SingletonPlugin):
# Note: This will be deprecated at some point in favour of the
# Solr 4 spatial sorting capabilities
-
+ print("search_results")
+ print(search_results)
+ print("search_params")
+ print(search_params)
if search_params.get('extras', {}).get('ext_spatial') and \
p.toolkit.asbool(config.get('ckanext.spatial.use_postgis_sorting', 'False')):
# Apply the spatial sort
From 1d7d913cdbba3a3a05f6c68eea21566e9d53468e Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 23:27:33 +0900
Subject: [PATCH 120/139] WIP: debug print
---
ckanext/spatial/plugin/__init__.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/ckanext/spatial/plugin/__init__.py b/ckanext/spatial/plugin/__init__.py
index 222d55e..15a7f6d 100644
--- a/ckanext/spatial/plugin/__init__.py
+++ b/ckanext/spatial/plugin/__init__.py
@@ -358,6 +358,7 @@ class SpatialQuery(SpatialQueryMixin, p.SingletonPlugin):
else:
extents = bbox_query(bbox)
are_no_results = extents.count() == 0
+ print("extents")
print(extents)
if are_no_results:
From 93f7502bdbbf15ac54769d3497ec6c3b9cf4b306 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 23:34:12 +0900
Subject: [PATCH 121/139] WIP: don't remove metadata
---
ckanext/spatial/tests/conftest.py | 14 +-------------
1 file changed, 1 insertion(+), 13 deletions(-)
diff --git a/ckanext/spatial/tests/conftest.py b/ckanext/spatial/tests/conftest.py
index 0f3a97f..14d4299 100644
--- a/ckanext/spatial/tests/conftest.py
+++ b/ckanext/spatial/tests/conftest.py
@@ -34,17 +34,5 @@ def harvest_setup():
@pytest.fixture
def spatial_setup():
- # This will create the PostGIS tables (geometry_columns and
- # spatial_ref_sys) which were deleted when rebuilding the database
- table = Table("spatial_ref_sys", meta.metadata)
- if not table.exists():
- create_postgis_tables()
-
- # When running the tests with the --reset-db option for some
- # reason the metadata holds a reference to the `package_extent`
- # table after being deleted, causing an InvalidRequestError
- # exception when trying to recreate it further on
- if "package_extent" in meta.metadata.tables:
- meta.metadata.remove(meta.metadata.tables["package_extent"])
-
+ create_postgis_tables()
spatial_db_setup()
From 36d0d9bfa4312b31c7f19aec827beafcda2e5f59 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 15:24:26 +0000
Subject: [PATCH 122/139] WIP: debug
---
ckanext/spatial/plugin/__init__.py | 2 ++
ckanext/spatial/plugin/pylons_plugin.py | 2 +-
ckanext/spatial/tests/test_api.py | 3 ++-
3 files changed, 5 insertions(+), 2 deletions(-)
diff --git a/ckanext/spatial/plugin/__init__.py b/ckanext/spatial/plugin/__init__.py
index 15a7f6d..c587d81 100644
--- a/ckanext/spatial/plugin/__init__.py
+++ b/ckanext/spatial/plugin/__init__.py
@@ -369,6 +369,8 @@ class SpatialQuery(SpatialQueryMixin, p.SingletonPlugin):
# We'll perform the existing search but also filtering by the ids
# of datasets within the bbox
bbox_query_ids = [extent.package_id for extent in extents]
+ print("bbox_query_ids")
+ print(bbox_query_ids)
q = search_params.get('q','').strip() or '""'
new_q = '%s AND ' % q if q else ''
diff --git a/ckanext/spatial/plugin/pylons_plugin.py b/ckanext/spatial/plugin/pylons_plugin.py
index cbd5006..10d7756 100644
--- a/ckanext/spatial/plugin/pylons_plugin.py
+++ b/ckanext/spatial/plugin/pylons_plugin.py
@@ -7,7 +7,7 @@ class SpatialQueryMixin(p.SingletonPlugin):
# IRoutes
def before_map(self, map):
-
+ print("before map")
map.connect('api_spatial_query', '/api/2/search/{register:dataset|package}/geo',
controller='ckanext.spatial.controllers.api:ApiController',
action='spatial_query')
diff --git a/ckanext/spatial/tests/test_api.py b/ckanext/spatial/tests/test_api.py
index 7cc0dfe..79e3155 100644
--- a/ckanext/spatial/tests/test_api.py
+++ b/ckanext/spatial/tests/test_api.py
@@ -26,12 +26,13 @@ class TestAction(SpatialTestBase):
result = helpers.call_action("package_search")
assert(result["count"] == 1)
+ print(result)
result = helpers.call_action(
"package_search", extras={"ext_bbox": "-180,-90,180,90"}
)
- assert(result["count"] == 1)
+ assert(result["count"] == 2)
assert(result["results"][0]["id"] == dataset["id"])
@pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
From 6617d21484b2138185676889425be59b3be3e9f1 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 15:47:18 +0000
Subject: [PATCH 123/139] WIP: postgis 10-3.1
---
.github/workflows/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 1cfc49f..277d1df 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -28,7 +28,7 @@ jobs:
solr:
image: ckan/ckan-solr-dev:${{ matrix.ckan-version }}
postgres:
- image: postgis/postgis:10-2.5
+ image: postgis/postgis:10-3.1
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
From 43140c8d30d7b057199b2cb53619eb75a0923792 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 15:57:28 +0000
Subject: [PATCH 124/139] WIP: test: skip db setup
---
.github/workflows/ci.yml | 6 ------
1 file changed, 6 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 277d1df..992c274 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -99,12 +99,6 @@ jobs:
run: |
psql --host=postgres --username=postgres -d ckan_test --command="ALTER ROLE ckan_default WITH superuser;"
psql --host=postgres --username=postgres -d ckan_test --command="CREATE EXTENSION postgis;"
- - name: Setup extension (CKAN >= 2.9)
- if: ${{ matrix.ckan-version != '2.7' && matrix.ckan-version != '2.8' }}
- run: |
- ckan -c test.ini db init
- ckan -c test.ini harvester initdb
- ckan -c test.ini spatial initdb
- name: Setup extension (CKAN < 2.9)
if: ${{ matrix.ckan-version == '2.7' || matrix.ckan-version == '2.8' }}
run: |
From 4f47dda3d95b610cdef32ae77e73e4287077b36d Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Mon, 18 Jan 2021 16:02:02 +0000
Subject: [PATCH 125/139] WIP: test: skip db setup
---
.github/workflows/ci.yml | 6 ------
1 file changed, 6 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 992c274..f925874 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -99,11 +99,5 @@ jobs:
run: |
psql --host=postgres --username=postgres -d ckan_test --command="ALTER ROLE ckan_default WITH superuser;"
psql --host=postgres --username=postgres -d ckan_test --command="CREATE EXTENSION postgis;"
- - name: Setup extension (CKAN < 2.9)
- if: ${{ matrix.ckan-version == '2.7' || matrix.ckan-version == '2.8' }}
- run: |
- paster --plugin=ckan db init -c test.ini
- paster --plugin=ckanext-harvest harvester initdb -c test.ini
- paster --plugin=ckanext-spatial spatial initdb -c test.ini
- name: Run tests
run: pytest --ckan-ini=test.ini ckanext/spatial/tests
From 2477721d18473d1a94919265754a26a296610bf8 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Tue, 19 Jan 2021 03:29:25 +0000
Subject: [PATCH 126/139] WIP: fixes warning
---
ckanext/spatial/lib/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/ckanext/spatial/lib/__init__.py b/ckanext/spatial/lib/__init__.py
index 8bd5fde..322583c 100644
--- a/ckanext/spatial/lib/__init__.py
+++ b/ckanext/spatial/lib/__init__.py
@@ -105,7 +105,7 @@ def validate_bbox(bbox_values):
if isinstance(bbox_values,six.string_types):
bbox_values = bbox_values.split(',')
- if len(bbox_values) is not 4:
+ if len(bbox_values) != 4:
return None
try:
From 5bdd481d990603e8e7ecffdd7eb3df367ffb1c81 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Tue, 19 Jan 2021 03:46:41 +0000
Subject: [PATCH 127/139] WIP: remove ""
---
ckanext/spatial/plugin/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/ckanext/spatial/plugin/__init__.py b/ckanext/spatial/plugin/__init__.py
index c587d81..f91bf74 100644
--- a/ckanext/spatial/plugin/__init__.py
+++ b/ckanext/spatial/plugin/__init__.py
@@ -373,7 +373,7 @@ class SpatialQuery(SpatialQueryMixin, p.SingletonPlugin):
print(bbox_query_ids)
q = search_params.get('q','').strip() or '""'
- new_q = '%s AND ' % q if q else ''
+ new_q = '%s AND ' % q if q != '""' else ''
new_q += '(%s)' % ' OR '.join(['id:%s' % id for id in bbox_query_ids])
search_params['q'] = new_q
From 8821118aa618d4ee849f3a5f30b84c79714f999b Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Tue, 19 Jan 2021 04:02:23 +0000
Subject: [PATCH 128/139] WIP: remove print debug and fixes test
---
ckanext/spatial/lib/__init__.py | 1 -
ckanext/spatial/plugin/__init__.py | 13 +------------
ckanext/spatial/tests/test_api.py | 6 +-----
3 files changed, 2 insertions(+), 18 deletions(-)
diff --git a/ckanext/spatial/lib/__init__.py b/ckanext/spatial/lib/__init__.py
index 322583c..c929557 100644
--- a/ckanext/spatial/lib/__init__.py
+++ b/ckanext/spatial/lib/__init__.py
@@ -158,7 +158,6 @@ def bbox_query(bbox,srid=None):
.filter(PackageExtent.package_id==Package.id) \
.filter(PackageExtent.the_geom.intersects(input_geometry)) \
.filter(Package.state==u'active')
- print(extents)
return extents
def bbox_query_ordered(bbox, srid=None):
diff --git a/ckanext/spatial/plugin/__init__.py b/ckanext/spatial/plugin/__init__.py
index f91bf74..8329134 100644
--- a/ckanext/spatial/plugin/__init__.py
+++ b/ckanext/spatial/plugin/__init__.py
@@ -328,7 +328,6 @@ class SpatialQuery(SpatialQueryMixin, p.SingletonPlugin):
def _params_for_postgis_search(self, bbox, search_params):
from ckanext.spatial.lib import bbox_query, bbox_query_ordered
from ckan.lib.search import SearchError
- print("_params_for_postgis_search")
# Note: This will be deprecated at some point in favour of the
# Solr 4 spatial sorting capabilities
@@ -358,27 +357,21 @@ class SpatialQuery(SpatialQueryMixin, p.SingletonPlugin):
else:
extents = bbox_query(bbox)
are_no_results = extents.count() == 0
- print("extents")
- print(extents)
if are_no_results:
# We don't need to perform the search
search_params['abort_search'] = True
- print("abort_search")
else:
# We'll perform the existing search but also filtering by the ids
# of datasets within the bbox
bbox_query_ids = [extent.package_id for extent in extents]
- print("bbox_query_ids")
- print(bbox_query_ids)
q = search_params.get('q','').strip() or '""'
new_q = '%s AND ' % q if q != '""' else ''
new_q += '(%s)' % ' OR '.join(['id:%s' % id for id in bbox_query_ids])
search_params['q'] = new_q
- print("search_params")
- print(search_params)
+
return search_params
def after_search(self, search_results, search_params):
@@ -386,10 +379,6 @@ class SpatialQuery(SpatialQueryMixin, p.SingletonPlugin):
# Note: This will be deprecated at some point in favour of the
# Solr 4 spatial sorting capabilities
- print("search_results")
- print(search_results)
- print("search_params")
- print(search_params)
if search_params.get('extras', {}).get('ext_spatial') and \
p.toolkit.asbool(config.get('ckanext.spatial.use_postgis_sorting', 'False')):
# Apply the spatial sort
diff --git a/ckanext/spatial/tests/test_api.py b/ckanext/spatial/tests/test_api.py
index 79e3155..2542a19 100644
--- a/ckanext/spatial/tests/test_api.py
+++ b/ckanext/spatial/tests/test_api.py
@@ -24,15 +24,11 @@ class TestAction(SpatialTestBase):
]
)
- result = helpers.call_action("package_search")
- assert(result["count"] == 1)
- print(result)
-
result = helpers.call_action(
"package_search", extras={"ext_bbox": "-180,-90,180,90"}
)
- assert(result["count"] == 2)
+ assert(result["count"] == 1)
assert(result["results"][0]["id"] == dataset["id"])
@pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
From e57edfd6a09b740d0f8128546094076f0d8a7986 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Tue, 19 Jan 2021 04:09:44 +0000
Subject: [PATCH 129/139] remove unused ini
---
test-github-actions.ini | 73 -----------------------------------------
1 file changed, 73 deletions(-)
delete mode 100644 test-github-actions.ini
diff --git a/test-github-actions.ini b/test-github-actions.ini
deleted file mode 100644
index 3cc8947..0000000
--- a/test-github-actions.ini
+++ /dev/null
@@ -1,73 +0,0 @@
-[DEFAULT]
-debug = false
-# Uncomment and replace with the address which should receive any error reports
-#email_to = you@yourdomain.com
-smtp_server = localhost
-error_email_from = paste@localhost
-
-[server:main]
-use = egg:Paste#http
-host = 0.0.0.0
-port = 5000
-
-
-[app:main]
-use = config:../ckan/test-core.ini
-ckan.legacy_templates = false
-ckan.plugins = test_spatial_plugin harvest ckan_harvester test_harvester test_harvester2 test_action_harvester spatial_metadata spatial_query spatial_harvest_metadata_api gemini_csw_harvester gemini_doc_harvester gemini_waf_harvester
-ckan.spatial.srid = 4326
-ckan.spatial.default_map_extent=-6.88,49.74,0.50,59.2
-ckan.spatial.testing = true
-ckan.spatial.validator.profiles = iso19139,constraints,gemini2
-ckan.harvest.mq.type = redis
-
-# NB: other test configuration should go in test-core.ini, which is
-# what the postgres tests use.
-
-package_new_return_url = http://test.ckan.net/dataset/?test=new
-package_edit_return_url = http://test.ckan.net/dataset/?test=edit
-
-# copy from ckan/test-core-circle-ci.ini
-ckan.datastore.write_url = postgresql://datastore_write:pass@ckan-postgres/datastore_test
-ckan.datastore.read_url = postgresql://datastore_read:pass@ckan-postgres/datastore_test
-
-ckan.redis.url = redis://ckan-redis:6379/1
-
-sqlalchemy.url = postgresql://ckan_default:pass@ckan-postgres/ckan_test
-
-solr_url = http://localhost:8080/solr
-
-[loggers]
-keys = root, ckan, sqlalchemy
-
-[handlers]
-keys = console
-
-[formatters]
-keys = generic
-
-[logger_root]
-level = WARN
-handlers = console
-
-[logger_ckan]
-qualname = ckan
-handlers =
-level = INFO
-
-[logger_sqlalchemy]
-handlers =
-qualname = sqlalchemy.engine
-level = WARNING
-# "level = INFO" logs SQL queries.
-# "level = DEBUG" logs SQL queries and results.
-# "level = WARNING" logs neither.
-
-[handler_console]
-class = StreamHandler
-args = (sys.stdout,)
-level = NOTSET
-formatter = generic
-
-[formatter_generic]
-format = %(asctime)s %(levelname)-5.5s [%(name)s] %(message)s
From 2934317f4026cb9d0d8096401034b9b6aba695f9 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Tue, 19 Jan 2021 04:15:12 +0000
Subject: [PATCH 130/139] added note for github action and added failback code
---
ckanext/spatial/plugin/__init__.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/ckanext/spatial/plugin/__init__.py b/ckanext/spatial/plugin/__init__.py
index 8329134..03277f3 100644
--- a/ckanext/spatial/plugin/__init__.py
+++ b/ckanext/spatial/plugin/__init__.py
@@ -367,7 +367,8 @@ class SpatialQuery(SpatialQueryMixin, p.SingletonPlugin):
bbox_query_ids = [extent.package_id for extent in extents]
q = search_params.get('q','').strip() or '""'
- new_q = '%s AND ' % q if q != '""' else ''
+ # Note: `"" AND` query doesn't work in github ci
+ new_q = '%s AND ' % q if q and q != '""' else ''
new_q += '(%s)' % ' OR '.join(['id:%s' % id for id in bbox_query_ids])
search_params['q'] = new_q
From dd42aa8318e595d718806bc3f4ce87f5e14f4aa8 Mon Sep 17 00:00:00 2001
From: Taro Matsuzawa
Date: Wed, 26 May 2021 07:24:07 +0900
Subject: [PATCH 131/139] remove unused print debug
---
ckanext/spatial/commands/spatial.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/ckanext/spatial/commands/spatial.py b/ckanext/spatial/commands/spatial.py
index 5708077..33722d2 100644
--- a/ckanext/spatial/commands/spatial.py
+++ b/ckanext/spatial/commands/spatial.py
@@ -37,7 +37,6 @@ class Spatial(CkanCommand):
def command(self):
self._load_config()
- print('')
if len(self.args) == 0:
self.parser.print_usage()
From 17c7ffd37fdfef4e354c3596b5a2b47d6abe3808 Mon Sep 17 00:00:00 2001
From: amercader
Date: Fri, 28 May 2021 11:12:44 +0200
Subject: [PATCH 132/139] Align GA file and requirements file with rest of
extensions
---
.github/workflows/{ci.yml => test.yml} | 10 ++--------
pip-requirements-py2.txt | 1 +
pip-requirements.txt | 11 +----------
requirements-py2.txt | 10 ++++++++++
pip3-requirements.txt => requirements.txt | 0
5 files changed, 14 insertions(+), 18 deletions(-)
rename .github/workflows/{ci.yml => test.yml} (92%)
create mode 120000 pip-requirements-py2.txt
mode change 100644 => 120000 pip-requirements.txt
create mode 100644 requirements-py2.txt
rename pip3-requirements.txt => requirements.txt (100%)
diff --git a/.github/workflows/ci.yml b/.github/workflows/test.yml
similarity index 92%
rename from .github/workflows/ci.yml
rename to .github/workflows/test.yml
index f925874..ca5cbfa 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/test.yml
@@ -77,19 +77,13 @@ jobs:
run: |
apk add --no-cache \
python2-dev
+ pip install -r requirements-py2.txt
- name: Install dependency (python3)
if: ${{ matrix.ckan-version == '2.9' }}
run: |
apk add --no-cache \
python3-dev
- - name: Install requirements (python2)
- if: ${{ matrix.ckan-version != '2.9' }}
- run: |
- pip install -r pip-requirements.txt
- - name: Install requirements (python3)
- if: ${{ matrix.ckan-version == '2.9' }}
- run: |
- pip install -r pip3-requirements.txt
+ pip install -r requirements.txt
- name: Install requirements
run: |
pip install -e .
diff --git a/pip-requirements-py2.txt b/pip-requirements-py2.txt
new file mode 120000
index 0000000..983ca39
--- /dev/null
+++ b/pip-requirements-py2.txt
@@ -0,0 +1 @@
+requirements-py2.txt
\ No newline at end of file
diff --git a/pip-requirements.txt b/pip-requirements.txt
deleted file mode 100644
index 66aeaec..0000000
--- a/pip-requirements.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-GeoAlchemy>=0.6
-GeoAlchemy2==0.5.0
-Shapely>=1.2.13
-pyproj==2.2.2
-OWSLib==0.18.0
-lxml>=2.3
-argparse
-pyparsing>=2.1.10
-requests>=1.1.0
-six
diff --git a/pip-requirements.txt b/pip-requirements.txt
new file mode 120000
index 0000000..d54bfb5
--- /dev/null
+++ b/pip-requirements.txt
@@ -0,0 +1 @@
+requirements.txt
\ No newline at end of file
diff --git a/requirements-py2.txt b/requirements-py2.txt
new file mode 100644
index 0000000..66aeaec
--- /dev/null
+++ b/requirements-py2.txt
@@ -0,0 +1,10 @@
+GeoAlchemy>=0.6
+GeoAlchemy2==0.5.0
+Shapely>=1.2.13
+pyproj==2.2.2
+OWSLib==0.18.0
+lxml>=2.3
+argparse
+pyparsing>=2.1.10
+requests>=1.1.0
+six
diff --git a/pip3-requirements.txt b/requirements.txt
similarity index 100%
rename from pip3-requirements.txt
rename to requirements.txt
From bb2bf38c721089b2e50510671906b496f870adaa Mon Sep 17 00:00:00 2001
From: amercader
Date: Fri, 28 May 2021 11:28:21 +0200
Subject: [PATCH 133/139] Fix badge, add codecov
---
.github/workflows/test.yml | 9 +++++++--
README.rst | 5 +++--
2 files changed, 10 insertions(+), 4 deletions(-)
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index ca5cbfa..b63fe36 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -1,4 +1,4 @@
-name: ckanext-spatial ci
+name: Tests
on: [push, pull_request]
jobs:
@@ -94,4 +94,9 @@ jobs:
psql --host=postgres --username=postgres -d ckan_test --command="ALTER ROLE ckan_default WITH superuser;"
psql --host=postgres --username=postgres -d ckan_test --command="CREATE EXTENSION postgis;"
- name: Run tests
- run: pytest --ckan-ini=test.ini ckanext/spatial/tests
+ run: pytest --ckan-ini=test.ini --cov=ckanext.spatial --cov-report=xml --cov-append --disable-warnings ckanext/spatial/tests
+
+ - name: Upload coverage report to codecov
+ uses: codecov/codecov-action@v1
+ with:
+ file: ./coverage.xml
diff --git a/README.rst b/README.rst
index 580aa15..0184f75 100644
--- a/README.rst
+++ b/README.rst
@@ -2,8 +2,9 @@
ckanext-spatial - Geo related plugins for CKAN
==============================================
-.. image:: https://travis-ci.org/ckan/ckanext-spatial.svg?branch=master
- :target: https://travis-ci.org/ckan/ckanext-spatial
+.. image:: https://github.com/ckan/ckanext-spatial/workflows/Tests/badge.svg?branch=master
+ :target: https://github.com/ckan/ckanext-spatial/actions
+
This extension contains plugins that add geospatial capabilities to CKAN_,
including:
From cdf4b70bb74050a258575ab2c67dfad97e2768a2 Mon Sep 17 00:00:00 2001
From: amercader
Date: Fri, 28 May 2021 13:27:12 +0200
Subject: [PATCH 134/139] Update CLI docstrings and documentation, remove
unused file
---
bin/ckan_pycsw.py | 183 +++++++++++++----------
ckanext/spatial/cli.py | 22 ++-
ckanext/spatial/tests/test_csw_client.py | 71 ---------
doc/csw.rst | 12 +-
doc/install.rst | 4 +
doc/spatial-search.rst | 4 +
6 files changed, 135 insertions(+), 161 deletions(-)
delete mode 100644 ckanext/spatial/tests/test_csw_client.py
diff --git a/bin/ckan_pycsw.py b/bin/ckan_pycsw.py
index e8508bf..47e171b 100644
--- a/bin/ckan_pycsw.py
+++ b/bin/ckan_pycsw.py
@@ -2,6 +2,9 @@ import sys
import logging
import datetime
import io
+import os
+import argparse
+from six.moves.configparser import SafeConfigParser
import requests
from lxml import etree
@@ -10,58 +13,66 @@ from pycsw.core import metadata, repository, util
import pycsw.core.config
import pycsw.core.admin
-logging.basicConfig(format='%(message)s', level=logging.INFO)
+logging.basicConfig(format="%(message)s", level=logging.INFO)
log = logging.getLogger(__name__)
+
def setup_db(pycsw_config):
"""Setup database tables and indexes"""
from sqlalchemy import Column, Text
- database = pycsw_config.get('repository', 'database')
- table_name = pycsw_config.get('repository', 'table', 'records')
+ database = pycsw_config.get("repository", "database")
+ table_name = pycsw_config.get("repository", "table", "records")
ckan_columns = [
- Column('ckan_id', Text, index=True),
- Column('ckan_modified', Text),
+ Column("ckan_id", Text, index=True),
+ Column("ckan_modified", Text),
]
- pycsw.core.admin.setup_db(database,
- table_name, '',
+ pycsw.core.admin.setup_db(
+ database,
+ table_name,
+ "",
create_plpythonu_functions=False,
- extra_columns=ckan_columns)
+ extra_columns=ckan_columns,
+ )
def set_keywords(pycsw_config_file, pycsw_config, ckan_url, limit=20):
"""set pycsw service metadata keywords from top limit CKAN tags"""
- log.info('Fetching tags from %s', ckan_url)
- url = ckan_url + 'api/tag_counts'
+ log.info("Fetching tags from %s", ckan_url)
+ url = ckan_url + "api/tag_counts"
response = requests.get(url)
tags = response.json()
- log.info('Deriving top %d tags', limit)
+ log.info("Deriving top %d tags", limit)
# uniquify and sort by top limit
tags_unique = [list(x) for x in set(tuple(x) for x in tags)]
tags_sorted = sorted(tags_unique, key=lambda x: x[1], reverse=1)[0:limit]
- keywords = ','.join('%s' % tn[0] for tn in tags_sorted)
+ keywords = ",".join("%s" % tn[0] for tn in tags_sorted)
- log.info('Setting tags in pycsw configuration file %s', pycsw_config_file)
- pycsw_config.set('metadata:main', 'identification_keywords', keywords)
- with open(pycsw_config_file, 'wb') as configfile:
+ log.info("Setting tags in pycsw configuration file %s", pycsw_config_file)
+ pycsw_config.set("metadata:main", "identification_keywords", keywords)
+ with open(pycsw_config_file, "wb") as configfile:
pycsw_config.write(configfile)
def load(pycsw_config, ckan_url):
- database = pycsw_config.get('repository', 'database')
- table_name = pycsw_config.get('repository', 'table', 'records')
+ database = pycsw_config.get("repository", "database")
+ table_name = pycsw_config.get("repository", "table", "records")
context = pycsw.core.config.StaticContext()
repo = repository.Repository(database, context, table=table_name)
- log.info('Started gathering CKAN datasets identifiers: {0}'.format(str(datetime.datetime.now())))
+ log.info(
+ "Started gathering CKAN datasets identifiers: {0}".format(
+ str(datetime.datetime.now())
+ )
+ )
query = 'api/search/dataset?qjson={"fl":"id,metadata_modified,extras_harvest_object_id,extras_metadata_source", "q":"harvest_object_id:[\\"\\" TO *]", "limit":1000, "start":%s}'
@@ -75,23 +86,25 @@ def load(pycsw_config, ckan_url):
response = requests.get(url)
listing = response.json()
if not isinstance(listing, dict):
- raise RuntimeError('Wrong API response: %s' % listing)
- results = listing.get('results')
+ raise RuntimeError("Wrong API response: %s" % listing)
+ results = listing.get("results")
if not results:
break
for result in results:
- gathered_records[result['id']] = {
- 'metadata_modified': result['metadata_modified'],
- 'harvest_object_id': result['extras']['harvest_object_id'],
- 'source': result['extras'].get('metadata_source')
+ gathered_records[result["id"]] = {
+ "metadata_modified": result["metadata_modified"],
+ "harvest_object_id": result["extras"]["harvest_object_id"],
+ "source": result["extras"].get("metadata_source"),
}
start = start + 1000
- log.debug('Gathered %s' % start)
+ log.debug("Gathered %s" % start)
- log.info('Gather finished ({0} datasets): {1}'.format(
- len(gathered_records.keys()),
- str(datetime.datetime.now())))
+ log.info(
+ "Gather finished ({0} datasets): {1}".format(
+ len(gathered_records.keys()), str(datetime.datetime.now())
+ )
+ )
existing_records = {}
@@ -105,17 +118,16 @@ def load(pycsw_config, ckan_url):
changed = set()
for key in set(gathered_records) & set(existing_records):
- if gathered_records[key]['metadata_modified'] > existing_records[key]:
+ if gathered_records[key]["metadata_modified"] > existing_records[key]:
changed.add(key)
for ckan_id in deleted:
try:
repo.session.begin()
- repo.session.query(repo.dataset.ckan_id).filter_by(
- ckan_id=ckan_id).delete()
- log.info('Deleted %s' % ckan_id)
+ repo.session.query(repo.dataset.ckan_id).filter_by(ckan_id=ckan_id).delete()
+ log.info("Deleted %s" % ckan_id)
repo.session.commit()
- except Exception as err:
+ except Exception:
repo.session.rollback()
raise
@@ -123,76 +135,81 @@ def load(pycsw_config, ckan_url):
ckan_info = gathered_records[ckan_id]
record = get_record(context, repo, ckan_url, ckan_id, ckan_info)
if not record:
- log.info('Skipped record %s' % ckan_id)
+ log.info("Skipped record %s" % ckan_id)
continue
try:
- repo.insert(record, 'local', util.get_today_and_now())
- log.info('Inserted %s' % ckan_id)
+ repo.insert(record, "local", util.get_today_and_now())
+ log.info("Inserted %s" % ckan_id)
except Exception as err:
- log.error('ERROR: not inserted %s Error:%s' % (ckan_id, err))
+ log.error("ERROR: not inserted %s Error:%s" % (ckan_id, err))
for ckan_id in changed:
ckan_info = gathered_records[ckan_id]
record = get_record(context, repo, ckan_url, ckan_id, ckan_info)
if not record:
continue
- update_dict = dict([(getattr(repo.dataset, key),
- getattr(record, key)) \
- for key in record.__dict__.keys() if key != '_sa_instance_state'])
+ update_dict = dict(
+ [
+ (getattr(repo.dataset, key), getattr(record, key))
+ for key in record.__dict__.keys()
+ if key != "_sa_instance_state"
+ ]
+ )
try:
repo.session.begin()
- repo.session.query(repo.dataset).filter_by(
- ckan_id=ckan_id).update(update_dict)
+ repo.session.query(repo.dataset).filter_by(ckan_id=ckan_id).update(
+ update_dict
+ )
repo.session.commit()
- log.info('Changed %s' % ckan_id)
+ log.info("Changed %s" % ckan_id)
except Exception as err:
repo.session.rollback()
- raise RuntimeError('ERROR: %s' % str(err))
+ raise RuntimeError("ERROR: %s" % str(err))
def clear(pycsw_config):
from sqlalchemy import create_engine, MetaData, Table
- database = pycsw_config.get('repository', 'database')
- table_name = pycsw_config.get('repository', 'table', 'records')
+ database = pycsw_config.get("repository", "database")
+ table_name = pycsw_config.get("repository", "table", "records")
- log.debug('Creating engine')
+ log.debug("Creating engine")
engine = create_engine(database)
records = Table(table_name, MetaData(engine))
records.delete().execute()
- log.info('Table cleared')
+ log.info("Table cleared")
def get_record(context, repo, ckan_url, ckan_id, ckan_info):
- query = ckan_url + 'harvest/object/%s'
- url = query % ckan_info['harvest_object_id']
+ query = ckan_url + "harvest/object/%s"
+ url = query % ckan_info["harvest_object_id"]
response = requests.get(url)
- if ckan_info['source'] == 'arcgis':
+ if ckan_info["source"] == "arcgis":
return
try:
xml = etree.parse(io.BytesIO(response.content))
except Exception as err:
- log.error('Could not pass xml doc from %s, Error: %s' % (ckan_id, err))
+ log.error("Could not pass xml doc from %s, Error: %s" % (ckan_id, err))
return
try:
record = metadata.parse_record(context, xml, repo)[0]
except Exception as err:
- log.error('Could not extract metadata from %s, Error: %s' % (ckan_id, err))
+ log.error("Could not extract metadata from %s, Error: %s" % (ckan_id, err))
return
if not record.identifier:
record.identifier = ckan_id
record.ckan_id = ckan_id
- record.ckan_modified = ckan_info['metadata_modified']
+ record.ckan_modified = ckan_info["metadata_modified"]
return record
-usage='''
+usage = """
Manages the CKAN-pycsw integration
python ckan-pycsw.py setup [-p]
@@ -211,18 +228,19 @@ All commands require the pycsw configuration file. By default it will try
to find a file called 'default.cfg' in the same directory, but you'll
probably need to provide the actual location via the -p option:
- paster ckan-pycsw setup -p /etc/ckan/default/pycsw.cfg
+ python ckan_pycsw.py setup -p /etc/ckan/default/pycsw.cfg
The load command requires a CKAN URL from where the datasets will be pulled:
- paster ckan-pycsw load -p /etc/ckan/default/pycsw.cfg -u http://localhost
+ python ckan_pycsw.py load -p /etc/ckan/default/pycsw.cfg -u http://localhost
+
+"""
-'''
def _load_config(file_path):
abs_path = os.path.abspath(file_path)
if not os.path.exists(abs_path):
- raise AssertionError('pycsw config file {0} does not exist.'.format(abs_path))
+ raise AssertionError("pycsw config file {0} does not exist.".format(abs_path))
config = SafeConfigParser()
config.read(abs_path)
@@ -230,25 +248,24 @@ def _load_config(file_path):
return config
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description="\n".split(usage)[0], usage=usage)
+ parser.add_argument("command", help="Command to perform")
-import os
-import argparse
-from ConfigParser import SafeConfigParser
+ parser.add_argument(
+ "-p",
+ "--pycsw_config",
+ action="store",
+ default="default.cfg",
+ help="pycsw config file to use.",
+ )
-if __name__ == '__main__':
- parser = argparse.ArgumentParser(
- description='\n'.split(usage)[0],
- usage=usage)
- parser.add_argument('command',
- help='Command to perform')
-
- parser.add_argument('-p', '--pycsw_config',
- action='store', default='default.cfg',
- help='pycsw config file to use.')
-
- parser.add_argument('-u', '--ckan_url',
- action='store',
- help='CKAN instance to import the datasets from.')
+ parser.add_argument(
+ "-u",
+ "--ckan_url",
+ action="store",
+ help="CKAN instance to import the datasets from.",
+ )
if len(sys.argv) <= 1:
parser.print_usage()
@@ -257,18 +274,18 @@ if __name__ == '__main__':
arg = parser.parse_args()
pycsw_config = _load_config(arg.pycsw_config)
- if arg.command == 'setup':
+ if arg.command == "setup":
setup_db(pycsw_config)
- elif arg.command in ['load', 'set_keywords']:
+ elif arg.command in ["load", "set_keywords"]:
if not arg.ckan_url:
- raise AssertionError('You need to provide a CKAN URL with -u or --ckan_url')
- ckan_url = arg.ckan_url.rstrip('/') + '/'
- if arg.command == 'load':
+ raise AssertionError("You need to provide a CKAN URL with -u or --ckan_url")
+ ckan_url = arg.ckan_url.rstrip("/") + "/"
+ if arg.command == "load":
load(pycsw_config, ckan_url)
else:
set_keywords(arg.pycsw_config, pycsw_config, ckan_url)
- elif arg.command == 'clear':
+ elif arg.command == "clear":
clear(pycsw_config)
else:
- print('Unknown command {0}'.format(arg.command))
+ print("Unknown command {0}".format(arg.command))
sys.exit(1)
diff --git a/ckanext/spatial/cli.py b/ckanext/spatial/cli.py
index 7be5a0b..a1a3008 100644
--- a/ckanext/spatial/cli.py
+++ b/ckanext/spatial/cli.py
@@ -14,7 +14,7 @@ def get_commands():
]
-@click.group(u"spatial-validation", short_help=u"Validation commands")
+@click.group(u"spatial-validation", short_help=u"Spatial formats validation commands")
def spatial_validation():
pass
@@ -22,18 +22,28 @@ def spatial_validation():
@spatial_validation.command()
@click.argument('pkg', required=False)
def report(pkg):
+ """
+ Performs validation on the harvested metadata, either for all
+ packages or the one specified.
+ """
+
return util.report(pkg)
@spatial_validation.command('report-csv')
@click.argument('filepath')
def report_csv(filepath):
+ """
+ Performs validation on all the harvested metadata in the db and
+ writes a report in CSV format to the given filepath.
+ """
return util.report_csv(filepath)
@spatial_validation.command('file')
@click.argument('filepath')
def validate_file(filepath):
+ """Performs validation on the given metadata file."""
return util.validate_file(filepath)
@@ -45,9 +55,19 @@ def spatial():
@spatial.command()
@click.argument('srid', required=False)
def initdb(srid):
+ """
+ Creates the necessary tables. You must have PostGIS installed
+ and configured in the database.
+ You can provide the SRID of the geometry column. Default is 4326.
+ """
return util.initdb(srid)
@spatial.command('extents')
def update_extents():
+ """
+ Creates or updates the extent geometry column for datasets with
+ an extent defined in the 'spatial' extra.
+ """
+
return util.update_extents()
diff --git a/ckanext/spatial/tests/test_csw_client.py b/ckanext/spatial/tests/test_csw_client.py
deleted file mode 100644
index 032f34a..0000000
--- a/ckanext/spatial/tests/test_csw_client.py
+++ /dev/null
@@ -1,71 +0,0 @@
-import time
-from six.moves.urllib.request import urlopen
-from six.moves.urllib.error import URLError
-import os
-
-import pytest
-
-from ckan.plugins.toolkit import config
-
-from ckan.model import engine_is_sqlite
-
-
-# copied from ckan/tests/__init__ to save importing it and therefore
-# setting up Pylons.
-class CkanServerCase(object):
- @staticmethod
- def _system(cmd):
- import subprocess
-
- (status, output) = subprocess.getstatusoutput(cmd)
- if status:
- raise Exception("Couldn't execute cmd: %s: %s" % (cmd, output))
-
- @classmethod
- def _paster(cls, cmd, config_path_rel):
- config_path = os.path.join(config["here"], config_path_rel)
- cls._system("paster --plugin ckan %s --config=%s" % (cmd, config_path))
-
- @staticmethod
- def _start_ckan_server(config_file=None):
- if not config_file:
- config_file = config["__file__"]
- config_path = config_file
- import subprocess
-
- process = subprocess.Popen(["paster", "serve", config_path])
- return process
-
- @staticmethod
- def _wait_for_url(url="http://127.0.0.1:5000/", timeout=15):
- for i in range(int(timeout) * 100):
- try:
- urlopen(url)
- except URLError:
- time.sleep(0.01)
- else:
- break
-
- @staticmethod
- def _stop_ckan_server(process):
- pid = process.pid
- pid = int(pid)
- if os.system("kill -9 %d" % pid):
- raise Exception(
- "Can't kill foreign CKAN instance (pid: %d)." % pid
- )
-
-
-class CkanProcess(CkanServerCase):
- @classmethod
- def setup_class(cls):
- if engine_is_sqlite():
- return pytest.skip("Non-memory database needed for this test")
-
- cls.pid = cls._start_ckan_server()
- ## Don't need to init database, since it is same database as this process uses
- cls._wait_for_url()
-
- @classmethod
- def teardown_class(cls):
- cls._stop_ckan_server(cls.pid)
diff --git a/doc/csw.rst b/doc/csw.rst
index db32fe4..3c4cf23 100644
--- a/doc/csw.rst
+++ b/doc/csw.rst
@@ -55,7 +55,7 @@ All necessary tasks are done with the ``ckan-pycsw`` command. To get more
details of its usage, run the following::
cd /usr/lib/ckan/default/src/ckanext-spatial
- paster ckan-pycsw --help
+ python bin/ckan_pycsw.py --help
Setup
@@ -114,11 +114,11 @@ Setup
The rest of the options are described `here `_.
-4. Setup the pycsw table. This is done with the ``ckan-pycsw`` paster command
+4. Setup the pycsw table. This is done with the ``ckan-pycsw`` script
(Remember to have the virtualenv activated when running it)::
cd /usr/lib/ckan/default/src/ckanext-spatial
- paster ckan-pycsw setup -p /etc/ckan/default/pycsw.cfg
+ python bin/ckan_pycsw.py setup -p /etc/ckan/default/pycsw.cfg
At this point you should be ready to run pycsw with the wsgi script that it
includes::
@@ -135,7 +135,7 @@ Setup
command for this::
cd /usr/lib/ckan/default/src/ckanext-spatial
- paster ckan-pycsw load -p /etc/ckan/default/pycsw.cfg
+ python bin/ckan_pycsw.py load -p /etc/ckan/default/pycsw.cfg
When the loading is finished, check that results are returned when visiting
this link:
@@ -155,7 +155,7 @@ values can be set in the pycsw configuration ``metadata:main`` section. If you
would like the CSW service metadata keywords to be reflective of the CKAN
tags, run the following convenience command::
- paster ckan-pycsw set_keywords -p /etc/ckan/default/pycsw.cfg
+ python ckan_pycsw.py set_keywords -p /etc/ckan/default/pycsw.cfg
Note that you must have privileges to write to the pycsw configuration file.
@@ -170,7 +170,7 @@ keep CKAN and pycsw in sync, and serve pycsw with Apache + mod_wsgi like CKAN.
and copy the following lines::
# m h dom mon dow command
- 0 * * * * /usr/lib/ckan/default/bin/paster --plugin=ckanext-spatial ckan-pycsw load -p /etc/ckan/default/pycsw.cfg
+ 0 * * * * /var/lib/ckan/default/bin/python /var/lib/ckan/default/src/ckanext-spatial/bin/ckan_pycsw.py load -p /etc/ckan/default/pycsw.cfg
This particular example will run the load command every hour. You can of
course modify this periodicity, for instance reducing it for huge instances.
diff --git a/doc/install.rst b/doc/install.rst
index 5c4bcf3..eab0a97 100644
--- a/doc/install.rst
+++ b/doc/install.rst
@@ -140,6 +140,10 @@ plugins on the configuration ini file (eg when restarting Apache).
If for some reason you need to explicitly create the table beforehand, you can
do it with the following command (with the virtualenv activated)::
+ (pyenv) $ ckan --config=mysite.ini spatial initdb [srid]
+
+On CKAN 2.8 and below use::
+
(pyenv) $ paster --plugin=ckanext-spatial spatial initdb [srid] --config=mysite.ini
You can define the SRID of the geometry column. Default is 4326. If you are not
diff --git a/doc/spatial-search.rst b/doc/spatial-search.rst
index cd6986e..4aa14a2 100644
--- a/doc/spatial-search.rst
+++ b/doc/spatial-search.rst
@@ -61,6 +61,10 @@ synchronize the information stored in the extra with the geometry table.
If you already have datasets when you enable Spatial Search then you'll need to
reindex them:
+ ckan --config=/etc/ckan/default/development.ini search-index rebuild
+
+..note:: For CKAN 2.8 and below use:
+
paster --plugin=ckan search-index rebuild --config=/etc/ckan/default/development.ini
From 5b05a12d6160100e0c8612531f1d57e2fb07b721 Mon Sep 17 00:00:00 2001
From: amercader
Date: Fri, 28 May 2021 13:48:08 +0200
Subject: [PATCH 135/139] Reduce duplication on blueprint/controllers
---
ckanext/spatial/controllers/api.py | 78 +++++++-------------------
ckanext/spatial/util.py | 80 +++++++++++++++++++++++++--
ckanext/spatial/views.py | 88 +++---------------------------
requirements-py2.txt | 1 +
requirements.txt | 1 +
5 files changed, 107 insertions(+), 141 deletions(-)
diff --git a/ckanext/spatial/controllers/api.py b/ckanext/spatial/controllers/api.py
index 0d8eb95..ef379e5 100644
--- a/ckanext/spatial/controllers/api.py
+++ b/ckanext/spatial/controllers/api.py
@@ -1,18 +1,14 @@
import logging
-import six
-
-from six import StringIO
from pylons import response
-from pkg_resources import resource_stream
-from lxml import etree
-from ckan.lib.base import request, config, abort
+from ckan.lib.base import request, abort
from ckan.controllers.api import ApiController as BaseApiController
from ckan.model import Session
from ckanext.harvest.model import HarvestObject, HarvestObjectExtra
from ckanext.spatial.lib import get_srid, validate_bbox, bbox_query
+from ckanext.spatial import util
log = logging.getLogger(__name__)
@@ -54,7 +50,7 @@ class HarvestMetadataApiController(BaseApiController):
def _get_content(self, id):
obj = Session.query(HarvestObject) \
- .filter(HarvestObject.id == id).first()
+ .filter(HarvestObject.id == id).first()
if obj:
return obj.content
else:
@@ -62,62 +58,21 @@ class HarvestMetadataApiController(BaseApiController):
def _get_original_content(self, id):
extra = Session.query(HarvestObjectExtra).join(HarvestObject) \
- .filter(HarvestObject.id == id) \
- .filter(
- HarvestObjectExtra.key == 'original_document'
- ).first()
+ .filter(HarvestObject.id == id) \
+ .filter(
+ HarvestObjectExtra.key == 'original_document'
+ ).first()
if extra:
return extra.value
else:
return None
- def _transform_to_html(self, content, xslt_package=None, xslt_path=None):
-
- xslt_package = xslt_package or __name__
- xslt_path = xslt_path or \
- '../templates/ckanext/spatial/gemini2-html-stylesheet.xsl'
-
- # optimise -- read transform only once and compile rather
- # than at each request
- with resource_stream(xslt_package, xslt_path) as style:
- style_xml = etree.parse(style)
- transformer = etree.XSLT(style_xml)
-
- xml = etree.parse(StringIO(content and six.text_type(content)))
- html = transformer(xml)
-
- response.headers['Content-Type'] = 'text/html; charset=utf-8'
- response.headers['Content-Length'] = len(content)
-
- result = etree.tostring(html, pretty_print=True)
-
- return result
-
def _get_xslt(self, original=False):
- if original:
- config_option = \
- 'ckanext.spatial.harvest.xslt_html_content_original'
- else:
- config_option = 'ckanext.spatial.harvest.xslt_html_content'
-
- xslt_package = None
- xslt_path = None
- xslt = config.get(config_option, None)
- if xslt:
- if ':' in xslt:
- xslt = xslt.split(':')
- xslt_package = xslt[0]
- xslt_path = xslt[1]
- else:
- log.error(
- 'XSLT should be defined in the form :' +
- ', eg ckanext.myext:templates/my.xslt')
-
- return xslt_package, xslt_path
+ return util.get_xslt(original)
def display_xml_original(self, id):
- content = self._get_original_content(id)
+ content = util.get_harvest_object_original_content(id)
if not content:
abort(404)
@@ -136,13 +91,22 @@ class HarvestMetadataApiController(BaseApiController):
abort(404)
xslt_package, xslt_path = self._get_xslt()
- return self._transform_to_html(content, xslt_package, xslt_path)
+ out = util.transform_to_html(content, xslt_package, xslt_path)
+ response.headers['Content-Type'] = 'text/html; charset=utf-8'
+ response.headers['Content-Length'] = len(out)
+
+ return out
def display_html_original(self, id):
- content = self._get_original_content(id)
+ content = util.get_harvest_object_original_content(id)
if content is None:
abort(404)
xslt_package, xslt_path = self._get_xslt(original=True)
- return self._transform_to_html(content, xslt_package, xslt_path)
+
+ out = util.transform_to_html(content, xslt_package, xslt_path)
+ response.headers['Content-Type'] = 'text/html; charset=utf-8'
+ response.headers['Content-Length'] = len(out)
+
+ return out
diff --git a/ckanext/spatial/util.py b/ckanext/spatial/util.py
index b25564e..5e9f30c 100644
--- a/ckanext/spatial/util.py
+++ b/ckanext/spatial/util.py
@@ -6,21 +6,25 @@ import sys
import six
+from pkg_resources import resource_stream
import logging
from ckan.lib.helpers import json
from lxml import etree
from pprint import pprint
+from ckan import model
from ckanext.spatial.lib import save_package_extent
+from ckanext.spatial.lib.reports import validation_report
+from ckanext.spatial.harvesters import SpatialHarvester
+from ckanext.spatial.model import ISODocument
+
+from ckantoolkit import config
log = logging.getLogger(__name__)
def report(pkg=None):
- from ckan import model
- from ckanext.harvest.model import HarvestObject
- from ckanext.spatial.lib.reports import validation_report
if pkg:
package_ref = six.text_type(pkg)
@@ -37,8 +41,6 @@ def report(pkg=None):
def validate_file(metadata_filepath):
- from ckanext.spatial.harvesters import SpatialHarvester
- from ckanext.spatial.model import ISODocument
if not os.path.exists(metadata_filepath):
print('Filepath %s not found' % metadata_filepath)
@@ -134,3 +136,71 @@ def update_extents():
len(packages))
print(msg)
+
+
+def get_xslt(original=False):
+ if original:
+ config_option = \
+ 'ckanext.spatial.harvest.xslt_html_content_original'
+ else:
+ config_option = 'ckanext.spatial.harvest.xslt_html_content'
+
+ xslt_package = None
+ xslt_path = None
+ xslt = config.get(config_option, None)
+ if xslt:
+ if ':' in xslt:
+ xslt = xslt.split(':')
+ xslt_package = xslt[0]
+ xslt_path = xslt[1]
+ else:
+ log.error(
+ 'XSLT should be defined in the form :'
+ ', eg ckanext.myext:templates/my.xslt')
+
+ return xslt_package, xslt_path
+
+
+def get_harvest_object_original_content(id):
+ from ckanext.harvest.model import HarvestObject, HarvestObjectExtra
+
+ extra = model.Session.query(
+ HarvestObjectExtra
+ ).join(HarvestObject).filter(HarvestObject.id == id).filter(
+ HarvestObjectExtra.key == 'original_document'
+ ).first()
+
+ if extra:
+ return extra.value
+ else:
+ return None
+
+
+def get_harvest_object_content(id):
+ from ckanext.harvest.model import HarvestObject
+ obj = model.Session.query(HarvestObject).filter(HarvestObject.id == id).first()
+ if obj:
+ return obj.content
+ else:
+ return None
+
+
+def _transform_to_html(content, xslt_package=None, xslt_path=None):
+
+ xslt_package = xslt_package or __name__
+ xslt_path = xslt_path or \
+ '../templates/ckanext/spatial/gemini2-html-stylesheet.xsl'
+
+ # optimise -- read transform only once and compile rather
+ # than at each request
+ with resource_stream(xslt_package, xslt_path) as style:
+ style_xml = etree.parse(style)
+ transformer = etree.XSLT(style_xml)
+
+ xml = etree.parse(six.StringIO(content and six.text_type(content)))
+ html = transformer(xml)
+
+ result = etree.tostring(html, pretty_print=True)
+
+ return result
+
diff --git a/ckanext/spatial/views.py b/ckanext/spatial/views.py
index 7f287b9..3c4d082 100644
--- a/ckanext/spatial/views.py
+++ b/ckanext/spatial/views.py
@@ -3,18 +3,15 @@
import logging
from flask import Blueprint, make_response
-from lxml import etree
-from pkg_resources import resource_stream
import ckan.lib.helpers as h
import ckan.plugins.toolkit as tk
-from ckan.common import request, config
-from ckan.model import Session
+from ckantoolkit import request
from ckan.views.api import _finish_ok, _finish_bad_request
from ckanext.spatial.lib import get_srid, validate_bbox, bbox_query
+from ckanext.spatial import util
-from six import StringIO
log = logging.getLogger(__name__)
@@ -57,75 +54,8 @@ def harvest_object_redirect_html(id):
return h.redirect_to('/harvest/object/{}/html'.format(id))
-def _get_original_content(id):
- from ckanext.harvest.model import HarvestObject, HarvestObjectExtra
-
- extra = Session.query(
- HarvestObjectExtra
- ).join(HarvestObject).filter(HarvestObject.id == id).filter(
- HarvestObjectExtra.key == 'original_document'
- ).first()
-
- if extra:
- return extra.value
- else:
- return None
-
-
-def _get_content(id):
- from ckanext.harvest.model import HarvestObject
- obj = Session.query(HarvestObject).filter(HarvestObject.id == id).first()
- if obj:
- return obj.content
- else:
- return None
-
-
-def _transform_to_html(content, xslt_package=None, xslt_path=None):
-
- xslt_package = xslt_package or __name__
- xslt_path = xslt_path or \
- '../templates/ckanext/spatial/gemini2-html-stylesheet.xsl'
-
- # optimise -- read transform only once and compile rather
- # than at each request
- with resource_stream(xslt_package, xslt_path) as style:
- style_xml = etree.parse(style)
- transformer = etree.XSLT(style_xml)
-
- xml = etree.parse(StringIO(content.encode('utf-8')))
- html = transformer(xml)
-
- result = etree.tostring(html, pretty_print=True)
-
- return result
-
-
-def _get_xslt(original=False):
-
- if original:
- config_option = \
- 'ckanext.spatial.harvest.xslt_html_content_original'
- else:
- config_option = 'ckanext.spatial.harvest.xslt_html_content'
-
- xslt_package = None
- xslt_path = None
- xslt = config.get(config_option, None)
- if xslt:
- if ':' in xslt:
- xslt = xslt.split(':')
- xslt_package = xslt[0]
- xslt_path = xslt[1]
- else:
- log.error('XSLT should be defined in the form :' +
- ', eg ckanext.myext:templates/my.xslt')
-
- return xslt_package, xslt_path
-
-
def display_xml_original(id):
- content = _get_original_content(id)
+ content = util.get_harvest_object_original_content(id)
if not content:
return tk.abort(404)
@@ -138,26 +68,26 @@ def display_xml_original(id):
def display_html(id):
- content = _get_content(id)
+ content = util.get_harvest_object_content(id)
if not content:
return tk.abort(404)
headers = {'Content-Type': 'text/html; charset=utf-8'}
- xslt_package, xslt_path = _get_xslt()
- content = _transform_to_html(content, xslt_package, xslt_path)
+ xslt_package, xslt_path = util.get_xslt()
+ content = util.transform_to_html(content, xslt_package, xslt_path)
return make_response((content, 200, headers))
def display_html_original(id):
- content = _get_original_content(id)
+ content = util.get_harvest_object_original_content(id)
if content is None:
return tk.abort(404)
headers = {'Content-Type': 'text/html; charset=utf-8'}
- xslt_package, xslt_path = _get_xslt(original=True)
- content = _transform_to_html(content, xslt_package, xslt_path)
+ xslt_package, xslt_path = util.get_xslt(original=True)
+ content = util.transform_to_html(content, xslt_package, xslt_path)
return make_response((content, 200, headers))
diff --git a/requirements-py2.txt b/requirements-py2.txt
index 66aeaec..4216370 100644
--- a/requirements-py2.txt
+++ b/requirements-py2.txt
@@ -1,3 +1,4 @@
+ckantoolkit
GeoAlchemy>=0.6
GeoAlchemy2==0.5.0
Shapely>=1.2.13
diff --git a/requirements.txt b/requirements.txt
index 087bd84..387f417 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,3 +1,4 @@
+ckantoolkit
GeoAlchemy>=0.6
GeoAlchemy2==0.5.0
Shapely>=1.2.13
From ac470f2c90283ed42908e00ed4be81debbf3b150 Mon Sep 17 00:00:00 2001
From: amercader
Date: Fri, 28 May 2021 14:15:39 +0200
Subject: [PATCH 136/139] Better version checks in templates
---
.../spatial/templates/spatial/snippets/dataset_map_base.html | 2 +-
ckanext/spatial/templates/spatial/snippets/spatial_query.html | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/ckanext/spatial/templates/spatial/snippets/dataset_map_base.html b/ckanext/spatial/templates/spatial/snippets/dataset_map_base.html
index 2582b28..a970aeb 100644
--- a/ckanext/spatial/templates/spatial/snippets/dataset_map_base.html
+++ b/ckanext/spatial/templates/spatial/snippets/dataset_map_base.html
@@ -18,5 +18,5 @@ extent
-{% set type = 'asset' if h.ckan_version() > '2.9' else 'resource' %}
+{% set type = 'asset' if h.ckan_version().split('.')[1] | int >= 9 else 'resource' %}
{% include 'spatial/snippets/dataset_map_' ~ type ~ '.html' %}
diff --git a/ckanext/spatial/templates/spatial/snippets/spatial_query.html b/ckanext/spatial/templates/spatial/snippets/spatial_query.html
index 0ee3b98..90f8ce2 100644
--- a/ckanext/spatial/templates/spatial/snippets/spatial_query.html
+++ b/ckanext/spatial/templates/spatial/snippets/spatial_query.html
@@ -26,5 +26,5 @@ e.g.
-{% set type = 'asset' if h.ckan_version() > '2.9' else 'resource' %}
+{% set type = 'asset' if h.ckan_version().split('.')[1] | int >= 9 else 'resource' %}
{% include 'spatial/snippets/spatial_query_' ~ type ~ '.html' %}
From 00bd462403d1702603fb73f9796b503506ebe4ff Mon Sep 17 00:00:00 2001
From: amercader
Date: Fri, 28 May 2021 14:16:44 +0200
Subject: [PATCH 137/139] Use ckantoolkit whenever possible
---
ckanext/spatial/harvesters/__init__.py | 1 +
ckanext/spatial/harvesters/base.py | 6 +---
ckanext/spatial/helpers.py | 5 +--
ckanext/spatial/lib/__init__.py | 7 ++--
ckanext/spatial/plugin/__init__.py | 47 +++++++++++++------------
ckanext/spatial/plugin/pylons_plugin.py | 1 -
ckanext/spatial/util.py | 1 -
7 files changed, 30 insertions(+), 38 deletions(-)
diff --git a/ckanext/spatial/harvesters/__init__.py b/ckanext/spatial/harvesters/__init__.py
index 0093d42..07b5eaf 100644
--- a/ckanext/spatial/harvesters/__init__.py
+++ b/ckanext/spatial/harvesters/__init__.py
@@ -6,6 +6,7 @@ except ImportError:
import pkgutil
__path__ = pkgutil.extend_path(__path__, __name__)
+from ckanext.spatial.harvesters.base import SpatialHarvester
from ckanext.spatial.harvesters.csw import CSWHarvester
from ckanext.spatial.harvesters.waf import WAFHarvester
from ckanext.spatial.harvesters.doc import DocHarvester
diff --git a/ckanext/spatial/harvesters/base.py b/ckanext/spatial/harvesters/base.py
index ff5839b..ccb47cb 100644
--- a/ckanext/spatial/harvesters/base.py
+++ b/ckanext/spatial/harvesters/base.py
@@ -33,11 +33,7 @@ from ckanext.harvest.model import HarvestObject
from ckanext.spatial.validation import Validators, all_validators
from ckanext.spatial.model import ISODocument
from ckanext.spatial.interfaces import ISpatialHarvester
-
-if p.toolkit.check_ckan_version("2.9"):
- config = p.toolkit.config
-else:
- from pylons import config
+from ckantoolkit import config
log = logging.getLogger(__name__)
diff --git a/ckanext/spatial/helpers.py b/ckanext/spatial/helpers.py
index 98466ea..02f5def 100644
--- a/ckanext/spatial/helpers.py
+++ b/ckanext/spatial/helpers.py
@@ -3,10 +3,7 @@ import logging
from ckan import plugins as p
from ckan.lib import helpers as h
-if p.toolkit.check_ckan_version("2.9"):
- config = p.toolkit.config
-else:
- from pylons import config
+from ckantoolkit import config
log = logging.getLogger(__name__)
diff --git a/ckanext/spatial/lib/__init__.py b/ckanext/spatial/lib/__init__.py
index c929557..18c9a00 100644
--- a/ckanext/spatial/lib/__init__.py
+++ b/ckanext/spatial/lib/__init__.py
@@ -3,19 +3,16 @@ import logging
from string import Template
from ckan.model import Session, Package
-import ckan.plugins.toolkit as tk
+import ckantoolkit as tk
from ckanext.spatial.model import PackageExtent
from shapely.geometry import asShape
-if tk.check_ckan_version("2.9"):
- config = tk.config
-else:
- from ckan.lib.base import config
from ckanext.spatial.geoalchemy_common import (WKTElement, ST_Transform,
compare_geometry_fields,
)
+config = tk.config
log = logging.getLogger(__name__)
diff --git a/ckanext/spatial/plugin/__init__.py b/ckanext/spatial/plugin/__init__.py
index 03277f3..a4e52cc 100644
--- a/ckanext/spatial/plugin/__init__.py
+++ b/ckanext/spatial/plugin/__init__.py
@@ -4,22 +4,25 @@ import mimetypes
from logging import getLogger
import six
+import ckantoolkit as tk
from ckan import plugins as p
from ckan.lib.helpers import json
-if p.toolkit.check_ckan_version(min_version="2.9"):
- config = p.toolkit.config
+if tk.check_ckan_version(min_version="2.9.0"):
from ckanext.spatial.plugin.flask_plugin import (
SpatialQueryMixin, HarvestMetadataApiMixin
)
else:
- from pylons import config
from ckanext.spatial.plugin.pylons_plugin import (
SpatialQueryMixin, HarvestMetadataApiMixin
)
+
+config = tk.config
+
+
def check_geoalchemy_requirement():
'''Checks if a suitable geoalchemy version installed
@@ -32,7 +35,7 @@ def check_geoalchemy_requirement():
'For more details see the "Troubleshooting" section of the ' +
'install documentation')
- if p.toolkit.check_ckan_version(min_version='2.3'):
+ if tk.check_ckan_version(min_version='2.3'):
try:
import geoalchemy2
except ImportError:
@@ -54,19 +57,19 @@ def package_error_summary(error_dict):
def prettify(field_name):
field_name = re.sub('(?
Date: Fri, 28 May 2021 14:24:07 +0200
Subject: [PATCH 138/139] Try to fix the docs
---
doc-requirements.txt | 9 +++------
1 file changed, 3 insertions(+), 6 deletions(-)
diff --git a/doc-requirements.txt b/doc-requirements.txt
index a2bd0c7..d1fa67b 100644
--- a/doc-requirements.txt
+++ b/doc-requirements.txt
@@ -1,8 +1,5 @@
-e git+https://github.com/ckan/ckan#egg=ckan
-r https://raw.githubusercontent.com/ckan/ckan/master/requirements.txt
-GeoAlchemy>=0.6
-OWSLib==0.8.6
-lxml>=2.3
-pyparsing==1.5.6
-Sphinx==1.2.3
-sphinx-rtd-theme==0.1.7
+-r requirements.txt
+Sphinx==1.8.5
+sphinx-rtd-theme==0.4.3
From 90ee774c86ea148eeb04905f9e8761c365df3fa8 Mon Sep 17 00:00:00 2001
From: amercader
Date: Fri, 28 May 2021 14:42:01 +0200
Subject: [PATCH 139/139] Doc and README tweaks
---
README.rst | 11 +++++------
doc/_templates/footer.html | 6 ++----
doc/conf.py | 2 +-
3 files changed, 8 insertions(+), 11 deletions(-)
diff --git a/README.rst b/README.rst
index 0184f75..ee7e33a 100644
--- a/README.rst
+++ b/README.rst
@@ -27,9 +27,9 @@ https://docs.ckan.org/projects/ckanext-spatial/en/latest/
Community
---------
-* Developer mailing list: `ckan-dev@lists.okfn.org `_
-* Developer IRC channel: `#ckan on irc.freenode.net `_
-* `Issue tracker `_
+* `Developer mailing list `_
+* `Gitter channel `_
+* `Issue tracker `_
Contributing
@@ -37,13 +37,13 @@ Contributing
For contributing to ckanext-spatial or its documentation, follow the same
guidelines that apply to CKAN core, described in
-`CONTRIBUTING `_.
+`CONTRIBUTING `_.
Copying and License
-------------------
-This material is copyright (c) 2006-2016 Open Knowledge Foundation.
+This material is copyright (c) 2011-2021 Open Knowledge Foundation and contributors.
It is open and licensed under the GNU Affero General Public License (AGPL) v3.0
whose full text may be found at:
@@ -55,4 +55,3 @@ http://www.fsf.org/licensing/licenses/agpl-3.0.html
.. _pycsw: http://pycsw.org
.. _GeoJSON: http://geojson.org
.. _ckanext-geoview: https://github.com/ckan/ckanext-geoview
-
diff --git a/doc/_templates/footer.html b/doc/_templates/footer.html
index b1492cd..d457c8b 100644
--- a/doc/_templates/footer.html
+++ b/doc/_templates/footer.html
@@ -11,11 +11,9 @@
- Source
+ Source
—
- Issues
- —
- Mailing List
+ Issues
—
Twitter @CKANProject
diff --git a/doc/conf.py b/doc/conf.py
index ab4097c..142a13d 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -41,7 +41,7 @@ master_doc = 'index'
# General information about the project.
project = u'ckanext-spatial'
-copyright = u'2015, Open Knowledge'
+copyright = u'© 2011-2021 Open Knowledge Foundation and contributors.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the