Merge branch 'smellman-dev-py3'
This commit is contained in:
commit
992b2753fc
|
@ -0,0 +1,102 @@
|
|||
name: Tests
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.6'
|
||||
- name: Install requirements
|
||||
run: pip install flake8 pycodestyle
|
||||
- name: Check syntax
|
||||
run: flake8 . --count --select=E901,E999,F821,F822,F823 --show-source --statistics --exclude ckan
|
||||
test:
|
||||
needs: lint
|
||||
strategy:
|
||||
matrix:
|
||||
ckan-version: [2.9, 2.9-py2, 2.8, 2.7]
|
||||
fail-fast: false
|
||||
|
||||
name: CKAN ${{ matrix.ckan-version }}
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: openknowledge/ckan-dev:${{ matrix.ckan-version }}
|
||||
services:
|
||||
solr:
|
||||
image: ckan/ckan-solr-dev:${{ matrix.ckan-version }}
|
||||
postgres:
|
||||
image: postgis/postgis:10-3.1
|
||||
env:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_DB: postgres
|
||||
options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
|
||||
redis:
|
||||
image: redis:3
|
||||
env:
|
||||
CKAN_SQLALCHEMY_URL: postgresql://ckan_default:pass@postgres/ckan_test
|
||||
CKAN_DATASTORE_WRITE_URL: postgresql://datastore_write:pass@postgres/datastore_test
|
||||
CKAN_DATASTORE_READ_URL: postgresql://datastore_read:pass@postgres/datastore_test
|
||||
CKAN_SOLR_URL: http://solr:8983/solr/ckan
|
||||
CKAN_REDIS_URL: redis://redis:6379/1
|
||||
PGPASSWORD: postgres
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Create Database
|
||||
run: |
|
||||
psql --host=postgres --username=postgres --command="CREATE USER ckan_default WITH PASSWORD 'pass' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
|
||||
createdb --encoding=utf-8 --host=postgres --username=postgres --owner=ckan_default ckan_test
|
||||
psql --host=postgres --username=postgres --command="CREATE USER datastore_write WITH PASSWORD 'pass' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
|
||||
psql --host=postgres --username=postgres --command="CREATE USER datastore_read WITH PASSWORD 'pass' NOSUPERUSER NOCREATEDB NOCREATEROLE;"
|
||||
createdb --encoding=utf-8 --host=postgres --username=postgres --owner=datastore_write datastore_test
|
||||
- name: Install harvester
|
||||
run: |
|
||||
git clone https://github.com/ckan/ckanext-harvest
|
||||
cd ckanext-harvest
|
||||
pip install -r pip-requirements.txt
|
||||
pip install -r dev-requirements.txt
|
||||
pip install -e .
|
||||
- name: Install dependency (common)
|
||||
run: |
|
||||
apk add --no-cache \
|
||||
geos \
|
||||
geos-dev \
|
||||
proj-util \
|
||||
proj-dev \
|
||||
libxml2 \
|
||||
libxslt \
|
||||
gcc \
|
||||
libxml2-dev \
|
||||
libxslt-dev
|
||||
- name: Install dependency (python2)
|
||||
if: ${{ matrix.ckan-version != '2.9' }}
|
||||
run: |
|
||||
apk add --no-cache \
|
||||
python2-dev
|
||||
pip install -r requirements-py2.txt
|
||||
- name: Install dependency (python3)
|
||||
if: ${{ matrix.ckan-version == '2.9' }}
|
||||
run: |
|
||||
apk add --no-cache \
|
||||
python3-dev
|
||||
pip install -r requirements.txt
|
||||
- name: Install requirements
|
||||
run: |
|
||||
pip install -e .
|
||||
# Replace default path to CKAN core config file with the one on the container
|
||||
sed -i -e 's/use = config:.*/use = config:\/srv\/app\/src\/ckan\/test-core.ini/' test.ini
|
||||
- name: setup postgis
|
||||
run: |
|
||||
psql --host=postgres --username=postgres -d ckan_test --command="ALTER ROLE ckan_default WITH superuser;"
|
||||
psql --host=postgres --username=postgres -d ckan_test --command="CREATE EXTENSION postgis;"
|
||||
- name: Run tests
|
||||
run: pytest --ckan-ini=test.ini --cov=ckanext.spatial --cov-report=xml --cov-append --disable-warnings ckanext/spatial/tests
|
||||
|
||||
- name: Upload coverage report to codecov
|
||||
uses: codecov/codecov-action@v1
|
||||
with:
|
||||
file: ./coverage.xml
|
25
.travis.yml
25
.travis.yml
|
@ -1,25 +0,0 @@
|
|||
language: python
|
||||
dist: trusty
|
||||
python:
|
||||
- "2.7"
|
||||
cache: pip
|
||||
env:
|
||||
- CKANVERSION=master
|
||||
- CKANVERSION=release-v2.6-latest
|
||||
- CKANVERSION=2.7
|
||||
- CKANVERSION=2.8
|
||||
sudo: required
|
||||
addons:
|
||||
postgresql: 9.6
|
||||
apt:
|
||||
packages:
|
||||
- postgresql-9.6-postgis-2.3
|
||||
services:
|
||||
- redis-server
|
||||
install:
|
||||
- bash bin/travis-build.bash
|
||||
script: sh bin/travis-run.sh
|
||||
branches:
|
||||
except:
|
||||
- stable
|
||||
- release-v2.0
|
16
README.rst
16
README.rst
|
@ -2,8 +2,9 @@
|
|||
ckanext-spatial - Geo related plugins for CKAN
|
||||
==============================================
|
||||
|
||||
.. image:: https://travis-ci.org/ckan/ckanext-spatial.svg?branch=master
|
||||
:target: https://travis-ci.org/ckan/ckanext-spatial
|
||||
.. image:: https://github.com/ckan/ckanext-spatial/workflows/Tests/badge.svg?branch=master
|
||||
:target: https://github.com/ckan/ckanext-spatial/actions
|
||||
|
||||
|
||||
This extension contains plugins that add geospatial capabilities to CKAN_,
|
||||
including:
|
||||
|
@ -26,9 +27,9 @@ https://docs.ckan.org/projects/ckanext-spatial/en/latest/
|
|||
Community
|
||||
---------
|
||||
|
||||
* Developer mailing list: `ckan-dev@lists.okfn.org <http://lists.okfn.org/mailman/listinfo/ckan-dev>`_
|
||||
* Developer IRC channel: `#ckan on irc.freenode.net <http://webchat.freenode.net/?channels=ckan>`_
|
||||
* `Issue tracker <https://github.com/okfn/ckanext-spatial/issues>`_
|
||||
* `Developer mailing list <https://groups.google.com/a/ckan.org/forum/#!forum/ckan-dev>`_
|
||||
* `Gitter channel <https://gitter.im/ckan/chat>`_
|
||||
* `Issue tracker <https://github.com/ckan/ckanext-spatial/issues>`_
|
||||
|
||||
|
||||
Contributing
|
||||
|
@ -36,13 +37,13 @@ Contributing
|
|||
|
||||
For contributing to ckanext-spatial or its documentation, follow the same
|
||||
guidelines that apply to CKAN core, described in
|
||||
`CONTRIBUTING <https://github.com/okfn/ckan/blob/master/CONTRIBUTING.rst>`_.
|
||||
`CONTRIBUTING <https://github.com/ckan/ckan/blob/master/CONTRIBUTING.rst>`_.
|
||||
|
||||
|
||||
Copying and License
|
||||
-------------------
|
||||
|
||||
This material is copyright (c) 2006-2016 Open Knowledge Foundation.
|
||||
This material is copyright (c) 2011-2021 Open Knowledge Foundation and contributors.
|
||||
|
||||
It is open and licensed under the GNU Affero General Public License (AGPL) v3.0
|
||||
whose full text may be found at:
|
||||
|
@ -54,4 +55,3 @@ http://www.fsf.org/licensing/licenses/agpl-3.0.html
|
|||
.. _pycsw: http://pycsw.org
|
||||
.. _GeoJSON: http://geojson.org
|
||||
.. _ckanext-geoview: https://github.com/ckan/ckanext-geoview
|
||||
|
||||
|
|
|
@ -2,6 +2,9 @@ import sys
|
|||
import logging
|
||||
import datetime
|
||||
import io
|
||||
import os
|
||||
import argparse
|
||||
from six.moves.configparser import SafeConfigParser
|
||||
|
||||
import requests
|
||||
from lxml import etree
|
||||
|
@ -10,58 +13,66 @@ from pycsw.core import metadata, repository, util
|
|||
import pycsw.core.config
|
||||
import pycsw.core.admin
|
||||
|
||||
logging.basicConfig(format='%(message)s', level=logging.INFO)
|
||||
logging.basicConfig(format="%(message)s", level=logging.INFO)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_db(pycsw_config):
|
||||
"""Setup database tables and indexes"""
|
||||
|
||||
from sqlalchemy import Column, Text
|
||||
|
||||
database = pycsw_config.get('repository', 'database')
|
||||
table_name = pycsw_config.get('repository', 'table', 'records')
|
||||
database = pycsw_config.get("repository", "database")
|
||||
table_name = pycsw_config.get("repository", "table", "records")
|
||||
|
||||
ckan_columns = [
|
||||
Column('ckan_id', Text, index=True),
|
||||
Column('ckan_modified', Text),
|
||||
Column("ckan_id", Text, index=True),
|
||||
Column("ckan_modified", Text),
|
||||
]
|
||||
|
||||
pycsw.core.admin.setup_db(database,
|
||||
table_name, '',
|
||||
pycsw.core.admin.setup_db(
|
||||
database,
|
||||
table_name,
|
||||
"",
|
||||
create_plpythonu_functions=False,
|
||||
extra_columns=ckan_columns)
|
||||
extra_columns=ckan_columns,
|
||||
)
|
||||
|
||||
|
||||
def set_keywords(pycsw_config_file, pycsw_config, ckan_url, limit=20):
|
||||
"""set pycsw service metadata keywords from top limit CKAN tags"""
|
||||
|
||||
log.info('Fetching tags from %s', ckan_url)
|
||||
url = ckan_url + 'api/tag_counts'
|
||||
log.info("Fetching tags from %s", ckan_url)
|
||||
url = ckan_url + "api/tag_counts"
|
||||
response = requests.get(url)
|
||||
tags = response.json()
|
||||
|
||||
log.info('Deriving top %d tags', limit)
|
||||
log.info("Deriving top %d tags", limit)
|
||||
# uniquify and sort by top limit
|
||||
tags_unique = [list(x) for x in set(tuple(x) for x in tags)]
|
||||
tags_sorted = sorted(tags_unique, key=lambda x: x[1], reverse=1)[0:limit]
|
||||
keywords = ','.join('%s' % tn[0] for tn in tags_sorted)
|
||||
keywords = ",".join("%s" % tn[0] for tn in tags_sorted)
|
||||
|
||||
log.info('Setting tags in pycsw configuration file %s', pycsw_config_file)
|
||||
pycsw_config.set('metadata:main', 'identification_keywords', keywords)
|
||||
with open(pycsw_config_file, 'wb') as configfile:
|
||||
log.info("Setting tags in pycsw configuration file %s", pycsw_config_file)
|
||||
pycsw_config.set("metadata:main", "identification_keywords", keywords)
|
||||
with open(pycsw_config_file, "wb") as configfile:
|
||||
pycsw_config.write(configfile)
|
||||
|
||||
|
||||
def load(pycsw_config, ckan_url):
|
||||
|
||||
database = pycsw_config.get('repository', 'database')
|
||||
table_name = pycsw_config.get('repository', 'table', 'records')
|
||||
database = pycsw_config.get("repository", "database")
|
||||
table_name = pycsw_config.get("repository", "table", "records")
|
||||
|
||||
context = pycsw.core.config.StaticContext()
|
||||
repo = repository.Repository(database, context, table=table_name)
|
||||
|
||||
log.info('Started gathering CKAN datasets identifiers: {0}'.format(str(datetime.datetime.now())))
|
||||
log.info(
|
||||
"Started gathering CKAN datasets identifiers: {0}".format(
|
||||
str(datetime.datetime.now())
|
||||
)
|
||||
)
|
||||
|
||||
query = 'api/search/dataset?qjson={"fl":"id,metadata_modified,extras_harvest_object_id,extras_metadata_source", "q":"harvest_object_id:[\\"\\" TO *]", "limit":1000, "start":%s}'
|
||||
|
||||
|
@ -75,23 +86,25 @@ def load(pycsw_config, ckan_url):
|
|||
response = requests.get(url)
|
||||
listing = response.json()
|
||||
if not isinstance(listing, dict):
|
||||
raise RuntimeError, 'Wrong API response: %s' % listing
|
||||
results = listing.get('results')
|
||||
raise RuntimeError("Wrong API response: %s" % listing)
|
||||
results = listing.get("results")
|
||||
if not results:
|
||||
break
|
||||
for result in results:
|
||||
gathered_records[result['id']] = {
|
||||
'metadata_modified': result['metadata_modified'],
|
||||
'harvest_object_id': result['extras']['harvest_object_id'],
|
||||
'source': result['extras'].get('metadata_source')
|
||||
gathered_records[result["id"]] = {
|
||||
"metadata_modified": result["metadata_modified"],
|
||||
"harvest_object_id": result["extras"]["harvest_object_id"],
|
||||
"source": result["extras"].get("metadata_source"),
|
||||
}
|
||||
|
||||
start = start + 1000
|
||||
log.debug('Gathered %s' % start)
|
||||
log.debug("Gathered %s" % start)
|
||||
|
||||
log.info('Gather finished ({0} datasets): {1}'.format(
|
||||
len(gathered_records.keys()),
|
||||
str(datetime.datetime.now())))
|
||||
log.info(
|
||||
"Gather finished ({0} datasets): {1}".format(
|
||||
len(gathered_records.keys()), str(datetime.datetime.now())
|
||||
)
|
||||
)
|
||||
|
||||
existing_records = {}
|
||||
|
||||
|
@ -105,17 +118,16 @@ def load(pycsw_config, ckan_url):
|
|||
changed = set()
|
||||
|
||||
for key in set(gathered_records) & set(existing_records):
|
||||
if gathered_records[key]['metadata_modified'] > existing_records[key]:
|
||||
if gathered_records[key]["metadata_modified"] > existing_records[key]:
|
||||
changed.add(key)
|
||||
|
||||
for ckan_id in deleted:
|
||||
try:
|
||||
repo.session.begin()
|
||||
repo.session.query(repo.dataset.ckan_id).filter_by(
|
||||
ckan_id=ckan_id).delete()
|
||||
log.info('Deleted %s' % ckan_id)
|
||||
repo.session.query(repo.dataset.ckan_id).filter_by(ckan_id=ckan_id).delete()
|
||||
log.info("Deleted %s" % ckan_id)
|
||||
repo.session.commit()
|
||||
except Exception, err:
|
||||
except Exception:
|
||||
repo.session.rollback()
|
||||
raise
|
||||
|
||||
|
@ -123,76 +135,81 @@ def load(pycsw_config, ckan_url):
|
|||
ckan_info = gathered_records[ckan_id]
|
||||
record = get_record(context, repo, ckan_url, ckan_id, ckan_info)
|
||||
if not record:
|
||||
log.info('Skipped record %s' % ckan_id)
|
||||
log.info("Skipped record %s" % ckan_id)
|
||||
continue
|
||||
try:
|
||||
repo.insert(record, 'local', util.get_today_and_now())
|
||||
log.info('Inserted %s' % ckan_id)
|
||||
except Exception, err:
|
||||
log.error('ERROR: not inserted %s Error:%s' % (ckan_id, err))
|
||||
repo.insert(record, "local", util.get_today_and_now())
|
||||
log.info("Inserted %s" % ckan_id)
|
||||
except Exception as err:
|
||||
log.error("ERROR: not inserted %s Error:%s" % (ckan_id, err))
|
||||
|
||||
for ckan_id in changed:
|
||||
ckan_info = gathered_records[ckan_id]
|
||||
record = get_record(context, repo, ckan_url, ckan_id, ckan_info)
|
||||
if not record:
|
||||
continue
|
||||
update_dict = dict([(getattr(repo.dataset, key),
|
||||
getattr(record, key)) \
|
||||
for key in record.__dict__.keys() if key != '_sa_instance_state'])
|
||||
update_dict = dict(
|
||||
[
|
||||
(getattr(repo.dataset, key), getattr(record, key))
|
||||
for key in record.__dict__.keys()
|
||||
if key != "_sa_instance_state"
|
||||
]
|
||||
)
|
||||
try:
|
||||
repo.session.begin()
|
||||
repo.session.query(repo.dataset).filter_by(
|
||||
ckan_id=ckan_id).update(update_dict)
|
||||
repo.session.query(repo.dataset).filter_by(ckan_id=ckan_id).update(
|
||||
update_dict
|
||||
)
|
||||
repo.session.commit()
|
||||
log.info('Changed %s' % ckan_id)
|
||||
except Exception, err:
|
||||
log.info("Changed %s" % ckan_id)
|
||||
except Exception as err:
|
||||
repo.session.rollback()
|
||||
raise RuntimeError, 'ERROR: %s' % str(err)
|
||||
raise RuntimeError("ERROR: %s" % str(err))
|
||||
|
||||
|
||||
def clear(pycsw_config):
|
||||
|
||||
from sqlalchemy import create_engine, MetaData, Table
|
||||
|
||||
database = pycsw_config.get('repository', 'database')
|
||||
table_name = pycsw_config.get('repository', 'table', 'records')
|
||||
database = pycsw_config.get("repository", "database")
|
||||
table_name = pycsw_config.get("repository", "table", "records")
|
||||
|
||||
log.debug('Creating engine')
|
||||
log.debug("Creating engine")
|
||||
engine = create_engine(database)
|
||||
records = Table(table_name, MetaData(engine))
|
||||
records.delete().execute()
|
||||
log.info('Table cleared')
|
||||
log.info("Table cleared")
|
||||
|
||||
|
||||
def get_record(context, repo, ckan_url, ckan_id, ckan_info):
|
||||
query = ckan_url + 'harvest/object/%s'
|
||||
url = query % ckan_info['harvest_object_id']
|
||||
query = ckan_url + "harvest/object/%s"
|
||||
url = query % ckan_info["harvest_object_id"]
|
||||
response = requests.get(url)
|
||||
|
||||
if ckan_info['source'] == 'arcgis':
|
||||
if ckan_info["source"] == "arcgis":
|
||||
return
|
||||
|
||||
try:
|
||||
xml = etree.parse(io.BytesIO(response.content))
|
||||
except Exception, err:
|
||||
log.error('Could not pass xml doc from %s, Error: %s' % (ckan_id, err))
|
||||
except Exception as err:
|
||||
log.error("Could not pass xml doc from %s, Error: %s" % (ckan_id, err))
|
||||
return
|
||||
|
||||
try:
|
||||
record = metadata.parse_record(context, xml, repo)[0]
|
||||
except Exception, err:
|
||||
log.error('Could not extract metadata from %s, Error: %s' % (ckan_id, err))
|
||||
except Exception as err:
|
||||
log.error("Could not extract metadata from %s, Error: %s" % (ckan_id, err))
|
||||
return
|
||||
|
||||
if not record.identifier:
|
||||
record.identifier = ckan_id
|
||||
record.ckan_id = ckan_id
|
||||
record.ckan_modified = ckan_info['metadata_modified']
|
||||
record.ckan_modified = ckan_info["metadata_modified"]
|
||||
|
||||
return record
|
||||
|
||||
|
||||
usage='''
|
||||
usage = """
|
||||
Manages the CKAN-pycsw integration
|
||||
|
||||
python ckan-pycsw.py setup [-p]
|
||||
|
@ -211,18 +228,19 @@ All commands require the pycsw configuration file. By default it will try
|
|||
to find a file called 'default.cfg' in the same directory, but you'll
|
||||
probably need to provide the actual location via the -p option:
|
||||
|
||||
paster ckan-pycsw setup -p /etc/ckan/default/pycsw.cfg
|
||||
python ckan_pycsw.py setup -p /etc/ckan/default/pycsw.cfg
|
||||
|
||||
The load command requires a CKAN URL from where the datasets will be pulled:
|
||||
|
||||
paster ckan-pycsw load -p /etc/ckan/default/pycsw.cfg -u http://localhost
|
||||
python ckan_pycsw.py load -p /etc/ckan/default/pycsw.cfg -u http://localhost
|
||||
|
||||
"""
|
||||
|
||||
'''
|
||||
|
||||
def _load_config(file_path):
|
||||
abs_path = os.path.abspath(file_path)
|
||||
if not os.path.exists(abs_path):
|
||||
raise AssertionError('pycsw config file {0} does not exist.'.format(abs_path))
|
||||
raise AssertionError("pycsw config file {0} does not exist.".format(abs_path))
|
||||
|
||||
config = SafeConfigParser()
|
||||
config.read(abs_path)
|
||||
|
@ -230,25 +248,24 @@ def _load_config(file_path):
|
|||
return config
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="\n".split(usage)[0], usage=usage)
|
||||
parser.add_argument("command", help="Command to perform")
|
||||
|
||||
import os
|
||||
import argparse
|
||||
from ConfigParser import SafeConfigParser
|
||||
parser.add_argument(
|
||||
"-p",
|
||||
"--pycsw_config",
|
||||
action="store",
|
||||
default="default.cfg",
|
||||
help="pycsw config file to use.",
|
||||
)
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(
|
||||
description='\n'.split(usage)[0],
|
||||
usage=usage)
|
||||
parser.add_argument('command',
|
||||
help='Command to perform')
|
||||
|
||||
parser.add_argument('-p', '--pycsw_config',
|
||||
action='store', default='default.cfg',
|
||||
help='pycsw config file to use.')
|
||||
|
||||
parser.add_argument('-u', '--ckan_url',
|
||||
action='store',
|
||||
help='CKAN instance to import the datasets from.')
|
||||
parser.add_argument(
|
||||
"-u",
|
||||
"--ckan_url",
|
||||
action="store",
|
||||
help="CKAN instance to import the datasets from.",
|
||||
)
|
||||
|
||||
if len(sys.argv) <= 1:
|
||||
parser.print_usage()
|
||||
|
@ -257,18 +274,18 @@ if __name__ == '__main__':
|
|||
arg = parser.parse_args()
|
||||
pycsw_config = _load_config(arg.pycsw_config)
|
||||
|
||||
if arg.command == 'setup':
|
||||
if arg.command == "setup":
|
||||
setup_db(pycsw_config)
|
||||
elif arg.command in ['load', 'set_keywords']:
|
||||
elif arg.command in ["load", "set_keywords"]:
|
||||
if not arg.ckan_url:
|
||||
raise AssertionError('You need to provide a CKAN URL with -u or --ckan_url')
|
||||
ckan_url = arg.ckan_url.rstrip('/') + '/'
|
||||
if arg.command == 'load':
|
||||
raise AssertionError("You need to provide a CKAN URL with -u or --ckan_url")
|
||||
ckan_url = arg.ckan_url.rstrip("/") + "/"
|
||||
if arg.command == "load":
|
||||
load(pycsw_config, ckan_url)
|
||||
else:
|
||||
set_keywords(arg.pycsw_config, pycsw_config, ckan_url)
|
||||
elif arg.command == 'clear':
|
||||
elif arg.command == "clear":
|
||||
clear(pycsw_config)
|
||||
else:
|
||||
print 'Unknown command {0}'.format(arg.command)
|
||||
print("Unknown command {0}".format(arg.command))
|
||||
sys.exit(1)
|
||||
|
|
|
@ -1,78 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
echo "This is travis-build.bash..."
|
||||
|
||||
echo "Installing the packages that CKAN requires..."
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install solr-jetty
|
||||
|
||||
echo "Installing CKAN and its Python dependencies..."
|
||||
git clone https://github.com/ckan/ckan
|
||||
cd ckan
|
||||
if [ $CKANVERSION != 'master' ]
|
||||
then
|
||||
git checkout $CKANVERSION
|
||||
fi
|
||||
|
||||
# Unpin CKAN's psycopg2 dependency get an important bugfix
|
||||
# https://stackoverflow.com/questions/47044854/error-installing-psycopg2-2-6-2
|
||||
sed -i '/psycopg2/c\psycopg2' requirements.txt
|
||||
|
||||
python setup.py develop
|
||||
if [ -f requirements-py2.txt ]
|
||||
then
|
||||
pip install -r requirements-py2.txt
|
||||
else
|
||||
pip install -r requirements.txt
|
||||
fi
|
||||
pip install -r dev-requirements.txt
|
||||
cd -
|
||||
|
||||
echo "Setting up Solr..."
|
||||
# solr is multicore for tests on ckan master now, but it's easier to run tests
|
||||
# on Travis single-core still.
|
||||
# see https://github.com/ckan/ckan/issues/2972
|
||||
sed -i -e 's/solr_url.*/solr_url = http:\/\/127.0.0.1:8983\/solr/' ckan/test-core.ini
|
||||
printf "NO_START=0\nJETTY_HOST=127.0.0.1\nJETTY_PORT=8983\nJAVA_HOME=$JAVA_HOME" | sudo tee /etc/default/jetty
|
||||
sudo cp ckan/ckan/config/solr/schema.xml /etc/solr/conf/schema.xml
|
||||
sudo service jetty restart
|
||||
|
||||
echo "Creating the PostgreSQL user and database..."
|
||||
sudo -u postgres psql -c "CREATE USER ckan_default WITH PASSWORD 'pass';"
|
||||
sudo -u postgres psql -c 'CREATE DATABASE ckan_test WITH OWNER ckan_default;'
|
||||
|
||||
echo "Setting up PostGIS on the database..."
|
||||
sudo -u postgres psql -d ckan_test -c 'CREATE EXTENSION postgis;'
|
||||
sudo -u postgres psql -d ckan_test -c 'ALTER VIEW geometry_columns OWNER TO ckan_default;'
|
||||
sudo -u postgres psql -d ckan_test -c 'ALTER TABLE spatial_ref_sys OWNER TO ckan_default;'
|
||||
|
||||
echo "Install other libraries required..."
|
||||
sudo apt-get install python-dev libxml2-dev libxslt1-dev libgeos-c1
|
||||
|
||||
echo "Initialising the database..."
|
||||
cd ckan
|
||||
paster db init -c test-core.ini
|
||||
cd -
|
||||
|
||||
echo "Installing ckanext-harvest and its requirements..."
|
||||
git clone https://github.com/ckan/ckanext-harvest
|
||||
cd ckanext-harvest
|
||||
python setup.py develop
|
||||
pip install -r pip-requirements.txt
|
||||
|
||||
paster harvester initdb -c ../ckan/test-core.ini
|
||||
cd -
|
||||
|
||||
echo "Installing ckanext-spatial and its requirements..."
|
||||
pip install -r pip-requirements.txt
|
||||
python setup.py develop
|
||||
|
||||
|
||||
echo "Moving test.ini into a subdir..."
|
||||
mkdir subdir
|
||||
mv test.ini subdir
|
||||
|
||||
paster spatial initdb -c subdir/test.ini
|
||||
|
||||
echo "travis-build.bash is done."
|
|
@ -1,3 +0,0 @@
|
|||
#!/bin/sh -e
|
||||
|
||||
nosetests --ckan --nologcapture --with-pylons=subdir/test.ini ckanext/spatial
|
|
@ -0,0 +1,73 @@
|
|||
# encoding: utf-8
|
||||
import click
|
||||
import logging
|
||||
|
||||
import ckanext.spatial.util as util
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_commands():
|
||||
return [
|
||||
spatial,
|
||||
spatial_validation
|
||||
]
|
||||
|
||||
|
||||
@click.group(u"spatial-validation", short_help=u"Spatial formats validation commands")
|
||||
def spatial_validation():
|
||||
pass
|
||||
|
||||
|
||||
@spatial_validation.command()
|
||||
@click.argument('pkg', required=False)
|
||||
def report(pkg):
|
||||
"""
|
||||
Performs validation on the harvested metadata, either for all
|
||||
packages or the one specified.
|
||||
"""
|
||||
|
||||
return util.report(pkg)
|
||||
|
||||
|
||||
@spatial_validation.command('report-csv')
|
||||
@click.argument('filepath')
|
||||
def report_csv(filepath):
|
||||
"""
|
||||
Performs validation on all the harvested metadata in the db and
|
||||
writes a report in CSV format to the given filepath.
|
||||
"""
|
||||
return util.report_csv(filepath)
|
||||
|
||||
|
||||
@spatial_validation.command('file')
|
||||
@click.argument('filepath')
|
||||
def validate_file(filepath):
|
||||
"""Performs validation on the given metadata file."""
|
||||
return util.validate_file(filepath)
|
||||
|
||||
|
||||
@click.group(short_help=u"Performs spatially related operations.")
|
||||
def spatial():
|
||||
pass
|
||||
|
||||
|
||||
@spatial.command()
|
||||
@click.argument('srid', required=False)
|
||||
def initdb(srid):
|
||||
"""
|
||||
Creates the necessary tables. You must have PostGIS installed
|
||||
and configured in the database.
|
||||
You can provide the SRID of the geometry column. Default is 4326.
|
||||
"""
|
||||
return util.initdb(srid)
|
||||
|
||||
|
||||
@spatial.command('extents')
|
||||
def update_extents():
|
||||
"""
|
||||
Creates or updates the extent geometry column for datasets with
|
||||
an extent defined in the 'spatial' extra.
|
||||
"""
|
||||
|
||||
return util.update_extents()
|
|
@ -1,3 +1,4 @@
|
|||
from __future__ import print_function
|
||||
import sys
|
||||
import logging
|
||||
|
||||
|
@ -63,4 +64,4 @@ option:
|
|||
elif cmd == 'clear':
|
||||
ckan_pycsw.clear(config)
|
||||
else:
|
||||
print 'Command %s not recognized' % cmd
|
||||
print('Command %s not recognized' % cmd)
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
from __future__ import print_function
|
||||
import sys
|
||||
import re
|
||||
from pprint import pprint
|
||||
import logging
|
||||
|
||||
import logging
|
||||
from ckan.lib.cli import CkanCommand
|
||||
from ckan.lib.helpers import json
|
||||
from ckanext.spatial.lib import save_package_extent
|
||||
|
||||
import ckanext.spatial.util as util
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
class Spatial(CkanCommand):
|
||||
|
@ -20,7 +21,7 @@ class Spatial(CkanCommand):
|
|||
spatial extents
|
||||
Creates or updates the extent geometry column for datasets with
|
||||
an extent defined in the 'spatial' extra.
|
||||
|
||||
|
||||
The commands should be run from the ckanext-spatial directory and expect
|
||||
a development.ini file to be present. Most of the time you will
|
||||
specify the config explicitly though::
|
||||
|
@ -31,67 +32,29 @@ class Spatial(CkanCommand):
|
|||
|
||||
summary = __doc__.split('\n')[0]
|
||||
usage = __doc__
|
||||
max_args = 2
|
||||
max_args = 2
|
||||
min_args = 0
|
||||
|
||||
def command(self):
|
||||
self._load_config()
|
||||
print ''
|
||||
|
||||
if len(self.args) == 0:
|
||||
self.parser.print_usage()
|
||||
sys.exit(1)
|
||||
cmd = self.args[0]
|
||||
if cmd == 'initdb':
|
||||
self.initdb()
|
||||
self.initdb()
|
||||
elif cmd == 'extents':
|
||||
self.update_extents()
|
||||
else:
|
||||
print 'Command %s not recognized' % cmd
|
||||
print('Command %s not recognized' % cmd)
|
||||
|
||||
def initdb(self):
|
||||
if len(self.args) >= 2:
|
||||
srid = unicode(self.args[1])
|
||||
srid = self.args[1]
|
||||
else:
|
||||
srid = None
|
||||
|
||||
from ckanext.spatial.model import setup as db_setup
|
||||
|
||||
db_setup(srid)
|
||||
|
||||
print 'DB tables created'
|
||||
return util.initdb(srid)
|
||||
|
||||
def update_extents(self):
|
||||
from ckan.model import PackageExtra, Package, Session
|
||||
conn = Session.connection()
|
||||
packages = [extra.package \
|
||||
for extra in \
|
||||
Session.query(PackageExtra).filter(PackageExtra.key == 'spatial').all()]
|
||||
|
||||
errors = []
|
||||
count = 0
|
||||
for package in packages:
|
||||
try:
|
||||
value = package.extras['spatial']
|
||||
log.debug('Received: %r' % value)
|
||||
geometry = json.loads(value)
|
||||
|
||||
count += 1
|
||||
except ValueError,e:
|
||||
errors.append(u'Package %s - Error decoding JSON object: %s' % (package.id,str(e)))
|
||||
except TypeError,e:
|
||||
errors.append(u'Package %s - Error decoding JSON object: %s' % (package.id,str(e)))
|
||||
|
||||
save_package_extent(package.id,geometry)
|
||||
|
||||
|
||||
Session.commit()
|
||||
|
||||
if errors:
|
||||
msg = 'Errors were found:\n%s' % '\n'.join(errors)
|
||||
print msg
|
||||
|
||||
msg = "Done. Extents generated for %i out of %i packages" % (count,len(packages))
|
||||
|
||||
print msg
|
||||
|
||||
return util.update_extents()
|
||||
|
|
|
@ -1,13 +1,12 @@
|
|||
from __future__ import print_function
|
||||
import sys
|
||||
import re
|
||||
import os
|
||||
from pprint import pprint
|
||||
|
||||
import logging
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from ckan.lib.cli import CkanCommand
|
||||
|
||||
import ckanext.spatial.util as util
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
class Validation(CkanCommand):
|
||||
|
@ -21,7 +20,7 @@ class Validation(CkanCommand):
|
|||
validation report-csv <filename>.csv
|
||||
Performs validation on all the harvested metadata in the db and
|
||||
writes a report in CSV format to the given filepath.
|
||||
|
||||
|
||||
validation file <filename>.xml
|
||||
Performs validation on the given metadata file.
|
||||
'''
|
||||
|
@ -32,7 +31,7 @@ class Validation(CkanCommand):
|
|||
|
||||
def command(self):
|
||||
if not self.args or self.args[0] in ['--help', '-h', 'help']:
|
||||
print self.usage
|
||||
print(self.usage)
|
||||
sys.exit(1)
|
||||
|
||||
self._load_config()
|
||||
|
@ -45,84 +44,28 @@ class Validation(CkanCommand):
|
|||
elif cmd == 'file':
|
||||
self.validate_file()
|
||||
else:
|
||||
print 'Command %s not recognized' % cmd
|
||||
print('Command %s not recognized' % cmd)
|
||||
|
||||
def report(self):
|
||||
from ckan import model
|
||||
from ckanext.harvest.model import HarvestObject
|
||||
from ckanext.spatial.lib.reports import validation_report
|
||||
|
||||
if len(self.args) >= 2:
|
||||
package_ref = unicode(self.args[1])
|
||||
pkg = model.Package.get(package_ref)
|
||||
if not pkg:
|
||||
print 'Package ref "%s" not recognised' % package_ref
|
||||
sys.exit(1)
|
||||
pkg = self.args[1]
|
||||
else:
|
||||
pkg = None
|
||||
|
||||
report = validation_report(package_id=pkg.id)
|
||||
for row in report.get_rows_html_formatted():
|
||||
print
|
||||
for i, col_name in enumerate(report.column_names):
|
||||
print ' %s: %s' % (col_name, row[i])
|
||||
return util.report(pkg)
|
||||
|
||||
def validate_file(self):
|
||||
from ckanext.spatial.harvesters import SpatialHarvester
|
||||
from ckanext.spatial.model import ISODocument
|
||||
|
||||
if len(self.args) > 2:
|
||||
print 'Too many parameters %i' % len(self.args)
|
||||
print('Too many parameters %i' % len(self.args))
|
||||
sys.exit(1)
|
||||
if len(self.args) < 2:
|
||||
print 'Not enough parameters %i' % len(self.args)
|
||||
print('Not enough parameters %i' % len(self.args))
|
||||
sys.exit(1)
|
||||
metadata_filepath = self.args[1]
|
||||
if not os.path.exists(metadata_filepath):
|
||||
print 'Filepath %s not found' % metadata_filepath
|
||||
sys.exit(1)
|
||||
with open(metadata_filepath, 'rb') as f:
|
||||
metadata_xml = f.read()
|
||||
|
||||
validators = SpatialHarvester()._get_validator()
|
||||
print 'Validators: %r' % validators.profiles
|
||||
try:
|
||||
xml_string = metadata_xml.encode("utf-8")
|
||||
except UnicodeDecodeError, e:
|
||||
print 'ERROR: Unicode Error reading file \'%s\': %s' % \
|
||||
(metadata_filepath, e)
|
||||
sys.exit(1)
|
||||
#import pdb; pdb.set_trace()
|
||||
xml = etree.fromstring(xml_string)
|
||||
|
||||
# XML validation
|
||||
valid, errors = validators.is_valid(xml)
|
||||
|
||||
# CKAN read of values
|
||||
if valid:
|
||||
try:
|
||||
iso_document = ISODocument(xml_string)
|
||||
iso_values = iso_document.read_values()
|
||||
except Exception, e:
|
||||
valid = False
|
||||
errors.append('CKAN exception reading values from ISODocument: %s' % e)
|
||||
|
||||
print '***************'
|
||||
print 'Summary'
|
||||
print '***************'
|
||||
print 'File: \'%s\'' % metadata_filepath
|
||||
print 'Valid: %s' % valid
|
||||
if not valid:
|
||||
print 'Errors:'
|
||||
print pprint(errors)
|
||||
print '***************'
|
||||
return util.validate_file(self.args[1])
|
||||
|
||||
def report_csv(self):
|
||||
from ckanext.spatial.lib.reports import validation_report
|
||||
if len(self.args) != 2:
|
||||
print 'Wrong number of arguments'
|
||||
print('Wrong number of arguments')
|
||||
sys.exit(1)
|
||||
csv_filepath = self.args[1]
|
||||
report = validation_report()
|
||||
with open(csv_filepath, 'wb') as f:
|
||||
f.write(report.get_csv())
|
||||
return util.report_csv(self.args[1])
|
||||
|
|
|
@ -1,20 +1,14 @@
|
|||
import logging
|
||||
|
||||
try:
|
||||
from cStringIO import StringIO
|
||||
except ImportError:
|
||||
from StringIO import StringIO
|
||||
|
||||
from pylons import response
|
||||
from pkg_resources import resource_stream
|
||||
from lxml import etree
|
||||
|
||||
from ckan.lib.base import request, config, abort
|
||||
from ckan.lib.base import request, abort
|
||||
from ckan.controllers.api import ApiController as BaseApiController
|
||||
from ckan.model import Session
|
||||
|
||||
from ckanext.harvest.model import HarvestObject, HarvestObjectExtra
|
||||
from ckanext.spatial.lib import get_srid, validate_bbox, bbox_query
|
||||
from ckanext.spatial import util
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -26,7 +20,7 @@ class ApiController(BaseApiController):
|
|||
error_400_msg = \
|
||||
'Please provide a suitable bbox parameter [minx,miny,maxx,maxy]'
|
||||
|
||||
if not 'bbox' in request.params:
|
||||
if 'bbox' not in request.params:
|
||||
abort(400, error_400_msg)
|
||||
|
||||
bbox = validate_bbox(request.params['bbox'])
|
||||
|
@ -56,7 +50,7 @@ class HarvestMetadataApiController(BaseApiController):
|
|||
def _get_content(self, id):
|
||||
|
||||
obj = Session.query(HarvestObject) \
|
||||
.filter(HarvestObject.id == id).first()
|
||||
.filter(HarvestObject.id == id).first()
|
||||
if obj:
|
||||
return obj.content
|
||||
else:
|
||||
|
@ -64,62 +58,21 @@ class HarvestMetadataApiController(BaseApiController):
|
|||
|
||||
def _get_original_content(self, id):
|
||||
extra = Session.query(HarvestObjectExtra).join(HarvestObject) \
|
||||
.filter(HarvestObject.id == id) \
|
||||
.filter(
|
||||
HarvestObjectExtra.key == 'original_document'
|
||||
).first()
|
||||
.filter(HarvestObject.id == id) \
|
||||
.filter(
|
||||
HarvestObjectExtra.key == 'original_document'
|
||||
).first()
|
||||
if extra:
|
||||
return extra.value
|
||||
else:
|
||||
return None
|
||||
|
||||
def _transform_to_html(self, content, xslt_package=None, xslt_path=None):
|
||||
|
||||
xslt_package = xslt_package or __name__
|
||||
xslt_path = xslt_path or \
|
||||
'../templates/ckanext/spatial/gemini2-html-stylesheet.xsl'
|
||||
|
||||
# optimise -- read transform only once and compile rather
|
||||
# than at each request
|
||||
with resource_stream(xslt_package, xslt_path) as style:
|
||||
style_xml = etree.parse(style)
|
||||
transformer = etree.XSLT(style_xml)
|
||||
|
||||
xml = etree.parse(StringIO(content.encode('utf-8')))
|
||||
html = transformer(xml)
|
||||
|
||||
response.headers['Content-Type'] = 'text/html; charset=utf-8'
|
||||
response.headers['Content-Length'] = len(content)
|
||||
|
||||
result = etree.tostring(html, pretty_print=True)
|
||||
|
||||
return result
|
||||
|
||||
def _get_xslt(self, original=False):
|
||||
|
||||
if original:
|
||||
config_option = \
|
||||
'ckanext.spatial.harvest.xslt_html_content_original'
|
||||
else:
|
||||
config_option = 'ckanext.spatial.harvest.xslt_html_content'
|
||||
|
||||
xslt_package = None
|
||||
xslt_path = None
|
||||
xslt = config.get(config_option, None)
|
||||
if xslt:
|
||||
if ':' in xslt:
|
||||
xslt = xslt.split(':')
|
||||
xslt_package = xslt[0]
|
||||
xslt_path = xslt[1]
|
||||
else:
|
||||
log.error(
|
||||
'XSLT should be defined in the form <package>:<path>' +
|
||||
', eg ckanext.myext:templates/my.xslt')
|
||||
|
||||
return xslt_package, xslt_path
|
||||
return util.get_xslt(original)
|
||||
|
||||
def display_xml_original(self, id):
|
||||
content = self._get_original_content(id)
|
||||
content = util.get_harvest_object_original_content(id)
|
||||
|
||||
if not content:
|
||||
abort(404)
|
||||
|
@ -127,7 +80,7 @@ class HarvestMetadataApiController(BaseApiController):
|
|||
response.headers['Content-Type'] = 'application/xml; charset=utf-8'
|
||||
response.headers['Content-Length'] = len(content)
|
||||
|
||||
if not '<?xml' in content.split('\n')[0]:
|
||||
if '<?xml' not in content.split('\n')[0]:
|
||||
content = u'<?xml version="1.0" encoding="UTF-8"?>\n' + content
|
||||
return content.encode('utf-8')
|
||||
|
||||
|
@ -138,13 +91,22 @@ class HarvestMetadataApiController(BaseApiController):
|
|||
abort(404)
|
||||
|
||||
xslt_package, xslt_path = self._get_xslt()
|
||||
return self._transform_to_html(content, xslt_package, xslt_path)
|
||||
out = util.transform_to_html(content, xslt_package, xslt_path)
|
||||
response.headers['Content-Type'] = 'text/html; charset=utf-8'
|
||||
response.headers['Content-Length'] = len(out)
|
||||
|
||||
return out
|
||||
|
||||
def display_html_original(self, id):
|
||||
content = self._get_original_content(id)
|
||||
content = util.get_harvest_object_original_content(id)
|
||||
|
||||
if content is None:
|
||||
abort(404)
|
||||
|
||||
xslt_package, xslt_path = self._get_xslt(original=True)
|
||||
return self._transform_to_html(content, xslt_package, xslt_path)
|
||||
|
||||
out = util.transform_to_html(content, xslt_package, xslt_path)
|
||||
response.headers['Content-Type'] = 'text/html; charset=utf-8'
|
||||
response.headers['Content-Length'] = len(out)
|
||||
|
||||
return out
|
||||
|
|
|
@ -1,38 +0,0 @@
|
|||
import urllib2
|
||||
|
||||
from ckan.lib.base import BaseController, c, request, \
|
||||
response, render, abort
|
||||
|
||||
from ckan.model import Package
|
||||
|
||||
|
||||
class ViewController(BaseController):
|
||||
|
||||
def wms_preview(self, id):
|
||||
# check if package exists
|
||||
c.pkg = Package.get(id)
|
||||
if c.pkg is None:
|
||||
abort(404, 'Dataset not found')
|
||||
|
||||
for res in c.pkg.resources:
|
||||
if res.format.lower() == 'wms':
|
||||
c.wms_url = res.url \
|
||||
if '?' not in res.url else res.url.split('?')[0]
|
||||
break
|
||||
if not c.wms_url:
|
||||
abort(400, 'This dataset does not have a WMS resource')
|
||||
|
||||
return render('ckanext/spatial/wms_preview.html')
|
||||
|
||||
def proxy(self):
|
||||
if 'url' not in request.params:
|
||||
abort(400)
|
||||
try:
|
||||
server_response = urllib2.urlopen(request.params['url'])
|
||||
headers = server_response.info()
|
||||
if headers.get('Content-Type'):
|
||||
response.content_type = headers.get('Content-Type')
|
||||
return server_response.read()
|
||||
except urllib2.HTTPError as e:
|
||||
response.status_int = e.getcode()
|
||||
return
|
|
@ -72,6 +72,7 @@ def setup_spatial_table(package_extent_class, db_srid=None):
|
|||
Column('package_id', types.UnicodeText, primary_key=True),
|
||||
Column('the_geom', Geometry('GEOMETRY', srid=db_srid,
|
||||
management=management)),
|
||||
extend_existing=True
|
||||
)
|
||||
|
||||
meta.mapper(package_extent_class, package_extent_table)
|
||||
|
|
|
@ -6,6 +6,7 @@ except ImportError:
|
|||
import pkgutil
|
||||
__path__ = pkgutil.extend_path(__path__, __name__)
|
||||
|
||||
from ckanext.spatial.harvesters.base import SpatialHarvester
|
||||
from ckanext.spatial.harvesters.csw import CSWHarvester
|
||||
from ckanext.spatial.harvesters.waf import WAFHarvester
|
||||
from ckanext.spatial.harvesters.doc import DocHarvester
|
||||
|
|
|
@ -1,19 +1,20 @@
|
|||
import six
|
||||
from six.moves.urllib.parse import urlparse
|
||||
from six.moves.urllib.request import urlopen
|
||||
|
||||
import re
|
||||
import cgitb
|
||||
import warnings
|
||||
import urllib2
|
||||
|
||||
import sys
|
||||
import logging
|
||||
from string import Template
|
||||
from urlparse import urlparse
|
||||
from datetime import datetime
|
||||
import uuid
|
||||
import hashlib
|
||||
import dateutil
|
||||
import mimetypes
|
||||
|
||||
|
||||
from pylons import config
|
||||
from owslib import wms
|
||||
import requests
|
||||
from lxml import etree
|
||||
|
@ -32,6 +33,7 @@ from ckanext.harvest.model import HarvestObject
|
|||
from ckanext.spatial.validation import Validators, all_validators
|
||||
from ckanext.spatial.model import ISODocument
|
||||
from ckanext.spatial.interfaces import ISpatialHarvester
|
||||
from ckantoolkit import config
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -87,7 +89,7 @@ def guess_resource_format(url, use_mimetypes=True):
|
|||
'arcgis_rest': ('arcgis/rest/services',),
|
||||
}
|
||||
|
||||
for resource_type, parts in resource_types.iteritems():
|
||||
for resource_type, parts in resource_types.items():
|
||||
if any(part in url for part in parts):
|
||||
return resource_type
|
||||
|
||||
|
@ -97,7 +99,7 @@ def guess_resource_format(url, use_mimetypes=True):
|
|||
'gml': ('gml',),
|
||||
}
|
||||
|
||||
for file_type, extensions in file_types.iteritems():
|
||||
for file_type, extensions in file_types.items():
|
||||
if any(url.endswith(extension) for extension in extensions):
|
||||
return file_type
|
||||
|
||||
|
@ -155,7 +157,7 @@ class SpatialHarvester(HarvesterBase):
|
|||
if not isinstance(source_config_obj[key],bool):
|
||||
raise ValueError('%s must be boolean' % key)
|
||||
|
||||
except ValueError, e:
|
||||
except ValueError as e:
|
||||
raise e
|
||||
|
||||
return source_config
|
||||
|
@ -203,7 +205,7 @@ class SpatialHarvester(HarvesterBase):
|
|||
:returns: A dataset dictionary (package_dict)
|
||||
:rtype: dict
|
||||
'''
|
||||
|
||||
|
||||
tags = []
|
||||
|
||||
if 'tags' in iso_values:
|
||||
|
@ -235,7 +237,7 @@ class SpatialHarvester(HarvesterBase):
|
|||
if package is None or package.title != iso_values['title']:
|
||||
name = self._gen_new_name(iso_values['title'])
|
||||
if not name:
|
||||
name = self._gen_new_name(str(iso_values['guid']))
|
||||
name = self._gen_new_name(six.text_type(iso_values['guid']))
|
||||
if not name:
|
||||
raise Exception('Could not generate a unique name from the title or the GUID. Please choose a more unique title.')
|
||||
package_dict['name'] = name
|
||||
|
@ -334,7 +336,7 @@ class SpatialHarvester(HarvesterBase):
|
|||
parties[party['organisation-name']].append(party['role'])
|
||||
else:
|
||||
parties[party['organisation-name']] = [party['role']]
|
||||
extras['responsible-party'] = [{'name': k, 'roles': v} for k, v in parties.iteritems()]
|
||||
extras['responsible-party'] = [{'name': k, 'roles': v} for k, v in parties.items()]
|
||||
|
||||
if len(iso_values['bbox']) > 0:
|
||||
bbox = iso_values['bbox'][0]
|
||||
|
@ -348,8 +350,8 @@ class SpatialHarvester(HarvesterBase):
|
|||
xmax = float(bbox['east'])
|
||||
ymin = float(bbox['south'])
|
||||
ymax = float(bbox['north'])
|
||||
except ValueError, e:
|
||||
self._save_object_error('Error parsing bounding box value: {0}'.format(str(e)),
|
||||
except ValueError as e:
|
||||
self._save_object_error('Error parsing bounding box value: {0}'.format(six.text_type(e)),
|
||||
harvest_object, 'Import')
|
||||
else:
|
||||
# Construct a GeoJSON extent so ckanext-spatial can register the extent geometry
|
||||
|
@ -402,11 +404,11 @@ class SpatialHarvester(HarvesterBase):
|
|||
default_extras = self.source_config.get('default_extras',{})
|
||||
if default_extras:
|
||||
override_extras = self.source_config.get('override_extras',False)
|
||||
for key,value in default_extras.iteritems():
|
||||
for key,value in default_extras.items():
|
||||
log.debug('Processing extra %s', key)
|
||||
if not key in extras or override_extras:
|
||||
# Look for replacement strings
|
||||
if isinstance(value,basestring):
|
||||
if isinstance(value,six.string_types):
|
||||
value = value.format(harvest_source_id=harvest_object.job.source.id,
|
||||
harvest_source_url=harvest_object.job.source.url.strip('/'),
|
||||
harvest_source_title=harvest_object.job.source.title,
|
||||
|
@ -415,7 +417,7 @@ class SpatialHarvester(HarvesterBase):
|
|||
extras[key] = value
|
||||
|
||||
extras_as_dict = []
|
||||
for key, value in extras.iteritems():
|
||||
for key, value in extras.items():
|
||||
if isinstance(value, (list, dict)):
|
||||
extras_as_dict.append({'key': key, 'value': json.dumps(value)})
|
||||
else:
|
||||
|
@ -509,8 +511,8 @@ class SpatialHarvester(HarvesterBase):
|
|||
|
||||
iso_parser = ISODocument(harvest_object.content)
|
||||
iso_values = iso_parser.read_values()
|
||||
except Exception, e:
|
||||
self._save_object_error('Error parsing ISO document for object {0}: {1}'.format(harvest_object.id, str(e)),
|
||||
except Exception as e:
|
||||
self._save_object_error('Error parsing ISO document for object {0}: {1}'.format(harvest_object.id, six.text_type(e)),
|
||||
harvest_object, 'Import')
|
||||
return False
|
||||
|
||||
|
@ -580,7 +582,7 @@ class SpatialHarvester(HarvesterBase):
|
|||
|
||||
# The default package schema does not like Upper case tags
|
||||
tag_schema = logic.schema.default_tags_schema()
|
||||
tag_schema['name'] = [not_empty, unicode]
|
||||
tag_schema['name'] = [not_empty, six.text_type]
|
||||
|
||||
# Flag this object as the current one
|
||||
harvest_object.current = True
|
||||
|
@ -593,8 +595,8 @@ class SpatialHarvester(HarvesterBase):
|
|||
|
||||
# We need to explicitly provide a package ID, otherwise ckanext-spatial
|
||||
# won't be be able to link the extent to the package.
|
||||
package_dict['id'] = unicode(uuid.uuid4())
|
||||
package_schema['id'] = [unicode]
|
||||
package_dict['id'] = six.text_type(uuid.uuid4())
|
||||
package_schema['id'] = [six.text_type]
|
||||
|
||||
# Save reference to the package on the object
|
||||
harvest_object.package_id = package_dict['id']
|
||||
|
@ -608,8 +610,8 @@ class SpatialHarvester(HarvesterBase):
|
|||
try:
|
||||
package_id = p.toolkit.get_action('package_create')(context, package_dict)
|
||||
log.info('Created new package %s with guid %s', package_id, harvest_object.guid)
|
||||
except p.toolkit.ValidationError, e:
|
||||
self._save_object_error('Validation Error: %s' % str(e.error_summary), harvest_object, 'Import')
|
||||
except p.toolkit.ValidationError as e:
|
||||
self._save_object_error('Validation Error: %s' % six.text_type(e.error_summary), harvest_object, 'Import')
|
||||
return False
|
||||
|
||||
elif status == 'change':
|
||||
|
@ -654,8 +656,8 @@ class SpatialHarvester(HarvesterBase):
|
|||
try:
|
||||
package_id = p.toolkit.get_action('package_update')(context, package_dict)
|
||||
log.info('Updated package %s with guid %s', package_id, harvest_object.guid)
|
||||
except p.toolkit.ValidationError, e:
|
||||
self._save_object_error('Validation Error: %s' % str(e.error_summary), harvest_object, 'Import')
|
||||
except p.toolkit.ValidationError as e:
|
||||
self._save_object_error('Validation Error: %s' % six.text_type(e.error_summary), harvest_object, 'Import')
|
||||
return False
|
||||
|
||||
model.Session.commit()
|
||||
|
@ -670,13 +672,13 @@ class SpatialHarvester(HarvesterBase):
|
|||
'''
|
||||
try:
|
||||
capabilities_url = wms.WMSCapabilitiesReader().capabilities_url(url)
|
||||
res = urllib2.urlopen(capabilities_url, None, 10)
|
||||
res = urlopen(capabilities_url, None, 10)
|
||||
xml = res.read()
|
||||
|
||||
s = wms.WebMapService(url, xml=xml)
|
||||
return isinstance(s.contents, dict) and s.contents != {}
|
||||
except Exception, e:
|
||||
log.error('WMS check for %s failed with exception: %s' % (url, str(e)))
|
||||
except Exception as e:
|
||||
log.error('WMS check for %s failed with exception: %s' % (url, six.text_type(e)))
|
||||
return False
|
||||
|
||||
def _get_object_extra(self, harvest_object, key):
|
||||
|
@ -767,7 +769,7 @@ class SpatialHarvester(HarvesterBase):
|
|||
DEPRECATED: Use _get_content_as_unicode instead
|
||||
'''
|
||||
url = url.replace(' ', '%20')
|
||||
http_response = urllib2.urlopen(url)
|
||||
http_response = urlopen(url)
|
||||
return http_response.read()
|
||||
|
||||
def _get_content_as_unicode(self, url):
|
||||
|
@ -818,8 +820,8 @@ class SpatialHarvester(HarvesterBase):
|
|||
|
||||
try:
|
||||
xml = etree.fromstring(document_string)
|
||||
except etree.XMLSyntaxError, e:
|
||||
self._save_object_error('Could not parse XML file: {0}'.format(str(e)), harvest_object, 'Import')
|
||||
except etree.XMLSyntaxError as e:
|
||||
self._save_object_error('Could not parse XML file: {0}'.format(six.text_type(e)), harvest_object, 'Import')
|
||||
return False, None, []
|
||||
|
||||
valid, profile, errors = validator.is_valid(xml)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import re
|
||||
import urllib
|
||||
import urlparse
|
||||
import six
|
||||
from six.moves.urllib.parse import urlparse, urlunparse, urlencode
|
||||
|
||||
import logging
|
||||
|
||||
|
@ -22,7 +22,7 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
|
|||
'''
|
||||
implements(IHarvester)
|
||||
|
||||
csw=None
|
||||
csw = None
|
||||
|
||||
def info(self):
|
||||
return {
|
||||
|
@ -31,13 +31,12 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
|
|||
'description': 'A server that implements OGC\'s Catalog Service for the Web (CSW) standard'
|
||||
}
|
||||
|
||||
|
||||
def get_original_url(self, harvest_object_id):
|
||||
obj = model.Session.query(HarvestObject).\
|
||||
filter(HarvestObject.id==harvest_object_id).\
|
||||
first()
|
||||
|
||||
parts = urlparse.urlparse(obj.source.url)
|
||||
parts = urlparse(obj.source.url)
|
||||
|
||||
params = {
|
||||
'SERVICE': 'CSW',
|
||||
|
@ -48,12 +47,12 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
|
|||
'ID': obj.guid
|
||||
}
|
||||
|
||||
url = urlparse.urlunparse((
|
||||
url = urlunparse((
|
||||
parts.scheme,
|
||||
parts.netloc,
|
||||
parts.path,
|
||||
None,
|
||||
urllib.urlencode(params),
|
||||
urlencode(params),
|
||||
None
|
||||
))
|
||||
|
||||
|
@ -72,7 +71,7 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
|
|||
|
||||
try:
|
||||
self._setup_csw_client(url)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self._save_gather_error('Error contacting the CSW server: %s' % e, harvest_job)
|
||||
return None
|
||||
|
||||
|
@ -100,14 +99,13 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
|
|||
continue
|
||||
|
||||
guids_in_harvest.add(identifier)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self._save_gather_error('Error for the identifier %s [%r]' % (identifier,e), harvest_job)
|
||||
continue
|
||||
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
log.error('Exception: %s' % text_traceback())
|
||||
self._save_gather_error('Error gathering the identifiers from the CSW server [%s]' % str(e), harvest_job)
|
||||
self._save_gather_error('Error gathering the identifiers from the CSW server [%s]' % six.text_type(e), harvest_job)
|
||||
return None
|
||||
|
||||
new = guids_in_harvest - guids_in_db
|
||||
|
@ -157,7 +155,7 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
|
|||
url = harvest_object.source.url
|
||||
try:
|
||||
self._setup_csw_client(url)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self._save_object_error('Error contacting the CSW server: %s' % e,
|
||||
harvest_object)
|
||||
return False
|
||||
|
@ -165,7 +163,7 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
|
|||
identifier = harvest_object.guid
|
||||
try:
|
||||
record = self.csw.getrecordbyid([identifier], outputschema=self.output_schema())
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self._save_object_error('Error getting the CSW record with GUID %s' % identifier, harvest_object)
|
||||
return False
|
||||
|
||||
|
@ -182,7 +180,7 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
|
|||
|
||||
harvest_object.content = content.strip()
|
||||
harvest_object.save()
|
||||
except Exception,e:
|
||||
except Exception as e:
|
||||
self._save_object_error('Error saving the harvest object for GUID %s [%r]' % \
|
||||
(identifier, e), harvest_object)
|
||||
return False
|
||||
|
@ -192,4 +190,3 @@ class CSWHarvester(SpatialHarvester, SingletonPlugin):
|
|||
|
||||
def _setup_csw_client(self, url):
|
||||
self.csw = CswService(url)
|
||||
|
||||
|
|
|
@ -52,7 +52,7 @@ class DocHarvester(SpatialHarvester, SingletonPlugin):
|
|||
# Get contents
|
||||
try:
|
||||
content = self._get_content_as_unicode(url)
|
||||
except Exception,e:
|
||||
except Exception as e:
|
||||
self._save_gather_error('Unable to get content for URL: %s: %r' % \
|
||||
(url, e),harvest_job)
|
||||
return None
|
||||
|
|
|
@ -8,8 +8,9 @@ but can be easily adapted for other INSPIRE/ISO19139 XML metadata
|
|||
- GeminiWafHarvester - An index page with links to GEMINI resources
|
||||
|
||||
'''
|
||||
import six
|
||||
import os
|
||||
from urlparse import urlparse
|
||||
from six.moves.urllib.parse import urlparse
|
||||
from datetime import datetime
|
||||
from numbers import Number
|
||||
import uuid
|
||||
|
@ -70,12 +71,12 @@ class GeminiHarvester(SpatialHarvester):
|
|||
try:
|
||||
self.import_gemini_object(harvest_object.content)
|
||||
return True
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
log.error('Exception during import: %s' % text_traceback())
|
||||
if not str(e).strip():
|
||||
if not six.text_type(e).strip():
|
||||
self._save_object_error('Error importing Gemini document.', harvest_object, 'Import')
|
||||
else:
|
||||
self._save_object_error('Error importing Gemini document: %s' % str(e), harvest_object, 'Import')
|
||||
self._save_object_error('Error importing Gemini document: %s' % six.text_type(e), harvest_object, 'Import')
|
||||
raise
|
||||
if debug_exception_mode:
|
||||
raise
|
||||
|
@ -97,7 +98,7 @@ class GeminiHarvester(SpatialHarvester):
|
|||
log.error('Errors found for object with GUID %s:' % self.obj.guid)
|
||||
self._save_object_error(out,self.obj,'Import')
|
||||
|
||||
unicode_gemini_string = etree.tostring(xml, encoding=unicode, pretty_print=True)
|
||||
unicode_gemini_string = etree.tostring(xml, encoding='utf8', pretty_print=True)
|
||||
|
||||
# may raise Exception for errors
|
||||
package_dict = self.write_package_from_gemini_string(unicode_gemini_string)
|
||||
|
@ -223,10 +224,10 @@ class GeminiHarvester(SpatialHarvester):
|
|||
extras['licence_url'] = licence_url_extracted
|
||||
|
||||
extras['access_constraints'] = gemini_values.get('limitations-on-public-access','')
|
||||
if gemini_values.has_key('temporal-extent-begin'):
|
||||
if 'temporal-extent-begin' in gemini_values:
|
||||
#gemini_values['temporal-extent-begin'].sort()
|
||||
extras['temporal_coverage-from'] = gemini_values['temporal-extent-begin']
|
||||
if gemini_values.has_key('temporal-extent-end'):
|
||||
if 'temporal-extent-end' in gemini_values:
|
||||
#gemini_values['temporal-extent-end'].sort()
|
||||
extras['temporal_coverage-to'] = gemini_values['temporal-extent-end']
|
||||
|
||||
|
@ -274,7 +275,7 @@ class GeminiHarvester(SpatialHarvester):
|
|||
if package is None or package.title != gemini_values['title']:
|
||||
name = self.gen_new_name(gemini_values['title'])
|
||||
if not name:
|
||||
name = self.gen_new_name(str(gemini_guid))
|
||||
name = self.gen_new_name(six.text_type(gemini_guid))
|
||||
if not name:
|
||||
raise Exception('Could not generate a unique name from the title or the GUID. Please choose a more unique title.')
|
||||
package_dict['name'] = name
|
||||
|
@ -318,8 +319,8 @@ class GeminiHarvester(SpatialHarvester):
|
|||
view_resources[0]['ckan_recommended_wms_preview'] = True
|
||||
|
||||
extras_as_dict = []
|
||||
for key,value in extras.iteritems():
|
||||
if isinstance(value,(basestring,Number)):
|
||||
for key,value in extras.items():
|
||||
if isinstance(value, six.string_types + (Number,)):
|
||||
extras_as_dict.append({'key':key,'value':value})
|
||||
else:
|
||||
extras_as_dict.append({'key':key,'value':json.dumps(value)})
|
||||
|
@ -412,8 +413,8 @@ class GeminiHarvester(SpatialHarvester):
|
|||
else:
|
||||
counter = 1
|
||||
while counter < 101:
|
||||
if name+str(counter) not in taken:
|
||||
return name+str(counter)
|
||||
if name+six.text_type(counter) not in taken:
|
||||
return name+six.text_type(counter)
|
||||
counter = counter + 1
|
||||
return None
|
||||
|
||||
|
@ -453,7 +454,7 @@ class GeminiHarvester(SpatialHarvester):
|
|||
|
||||
# The default package schema does not like Upper case tags
|
||||
tag_schema = logic.schema.default_tags_schema()
|
||||
tag_schema['name'] = [not_empty,unicode]
|
||||
tag_schema['name'] = [not_empty,six.text_type]
|
||||
package_schema['tags'] = tag_schema
|
||||
|
||||
# TODO: user
|
||||
|
@ -466,8 +467,8 @@ class GeminiHarvester(SpatialHarvester):
|
|||
if not package:
|
||||
# We need to explicitly provide a package ID, otherwise ckanext-spatial
|
||||
# won't be be able to link the extent to the package.
|
||||
package_dict['id'] = unicode(uuid.uuid4())
|
||||
package_schema['id'] = [unicode]
|
||||
package_dict['id'] = six.text_type(uuid.uuid4())
|
||||
package_schema['id'] = [six.text_type]
|
||||
|
||||
action_function = get_action('package_create')
|
||||
else:
|
||||
|
@ -476,8 +477,8 @@ class GeminiHarvester(SpatialHarvester):
|
|||
|
||||
try:
|
||||
package_dict = action_function(context, package_dict)
|
||||
except ValidationError,e:
|
||||
raise Exception('Validation Error: %s' % str(e.error_summary))
|
||||
except ValidationError as e:
|
||||
raise Exception('Validation Error: %s' % six.text_type(e.error_summary))
|
||||
if debug_exception_mode:
|
||||
raise
|
||||
|
||||
|
@ -539,7 +540,7 @@ class GeminiCswHarvester(GeminiHarvester, SingletonPlugin):
|
|||
|
||||
try:
|
||||
self._setup_csw_client(url)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self._save_gather_error('Error contacting the CSW server: %s' % e, harvest_job)
|
||||
return None
|
||||
|
||||
|
@ -565,13 +566,13 @@ class GeminiCswHarvester(GeminiHarvester, SingletonPlugin):
|
|||
|
||||
ids.append(obj.id)
|
||||
used_identifiers.append(identifier)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self._save_gather_error('Error for the identifier %s [%r]' % (identifier,e), harvest_job)
|
||||
continue
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
log.error('Exception: %s' % text_traceback())
|
||||
self._save_gather_error('Error gathering the identifiers from the CSW server [%s]' % str(e), harvest_job)
|
||||
self._save_gather_error('Error gathering the identifiers from the CSW server [%s]' % six.text_type(e), harvest_job)
|
||||
return None
|
||||
|
||||
if len(ids) == 0:
|
||||
|
@ -587,7 +588,7 @@ class GeminiCswHarvester(GeminiHarvester, SingletonPlugin):
|
|||
url = harvest_object.source.url
|
||||
try:
|
||||
self._setup_csw_client(url)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self._save_object_error('Error contacting the CSW server: %s' % e,
|
||||
harvest_object)
|
||||
return False
|
||||
|
@ -595,7 +596,7 @@ class GeminiCswHarvester(GeminiHarvester, SingletonPlugin):
|
|||
identifier = harvest_object.guid
|
||||
try:
|
||||
record = self.csw.getrecordbyid([identifier])
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self._save_object_error('Error getting the CSW record with GUID %s' % identifier, harvest_object)
|
||||
return False
|
||||
|
||||
|
@ -608,7 +609,7 @@ class GeminiCswHarvester(GeminiHarvester, SingletonPlugin):
|
|||
# Save the fetch contents in the HarvestObject
|
||||
harvest_object.content = record['xml']
|
||||
harvest_object.save()
|
||||
except Exception,e:
|
||||
except Exception as e:
|
||||
self._save_object_error('Error saving the harvest object for GUID %s [%r]' % \
|
||||
(identifier, e), harvest_object)
|
||||
return False
|
||||
|
@ -646,7 +647,7 @@ class GeminiDocHarvester(GeminiHarvester, SingletonPlugin):
|
|||
# Get contents
|
||||
try:
|
||||
content = self._get_content(url)
|
||||
except Exception,e:
|
||||
except Exception as e:
|
||||
self._save_gather_error('Unable to get content for URL: %s: %r' % \
|
||||
(url, e),harvest_job)
|
||||
return None
|
||||
|
@ -668,7 +669,7 @@ class GeminiDocHarvester(GeminiHarvester, SingletonPlugin):
|
|||
else:
|
||||
self._save_gather_error('Could not get the GUID for source %s' % url, harvest_job)
|
||||
return None
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self._save_gather_error('Error parsing the document. Is this a valid Gemini document?: %s [%r]'% (url,e),harvest_job)
|
||||
if debug_exception_mode:
|
||||
raise
|
||||
|
@ -707,7 +708,7 @@ class GeminiWafHarvester(GeminiHarvester, SingletonPlugin):
|
|||
# Get contents
|
||||
try:
|
||||
content = self._get_content(url)
|
||||
except Exception,e:
|
||||
except Exception as e:
|
||||
self._save_gather_error('Unable to get content for URL: %s: %r' % \
|
||||
(url, e),harvest_job)
|
||||
return None
|
||||
|
@ -716,7 +717,7 @@ class GeminiWafHarvester(GeminiHarvester, SingletonPlugin):
|
|||
for url in self._extract_urls(content,url):
|
||||
try:
|
||||
content = self._get_content(url)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
msg = 'Couldn\'t harvest WAF link: %s: %s' % (url, e)
|
||||
self._save_gather_error(msg,harvest_job)
|
||||
continue
|
||||
|
@ -737,11 +738,11 @@ class GeminiWafHarvester(GeminiHarvester, SingletonPlugin):
|
|||
ids.append(obj.id)
|
||||
|
||||
|
||||
except Exception,e:
|
||||
except Exception as e:
|
||||
msg = 'Could not get GUID for source %s: %r' % (url,e)
|
||||
self._save_gather_error(msg,harvest_job)
|
||||
continue
|
||||
except Exception,e:
|
||||
except Exception as e:
|
||||
msg = 'Error extracting URLs from %s' % url
|
||||
self._save_gather_error(msg,harvest_job)
|
||||
return None
|
||||
|
@ -765,7 +766,7 @@ class GeminiWafHarvester(GeminiHarvester, SingletonPlugin):
|
|||
try:
|
||||
parser = etree.HTMLParser()
|
||||
tree = etree.fromstring(content, parser=parser)
|
||||
except Exception, inst:
|
||||
except Exception as inst:
|
||||
msg = 'Couldn\'t parse content into a tree: %s: %s' \
|
||||
% (inst, content)
|
||||
raise Exception(msg)
|
||||
|
@ -795,5 +796,3 @@ class GeminiWafHarvester(GeminiHarvester, SingletonPlugin):
|
|||
base_url += '/'
|
||||
log.debug('WAF base URL: %s', base_url)
|
||||
return [base_url + i for i in urls]
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
from __future__ import print_function
|
||||
|
||||
import six
|
||||
from six.moves.urllib.parse import urljoin
|
||||
import logging
|
||||
import hashlib
|
||||
from urlparse import urljoin
|
||||
|
||||
import dateutil.parser
|
||||
import pyparsing as parse
|
||||
import requests
|
||||
|
@ -61,7 +65,7 @@ class WAFHarvester(SpatialHarvester, SingletonPlugin):
|
|||
try:
|
||||
response = requests.get(source_url, timeout=60)
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.RequestException, e:
|
||||
except requests.exceptions.RequestException as e:
|
||||
self._save_gather_error('Unable to get content for URL: %s: %r' % \
|
||||
(source_url, e),harvest_job)
|
||||
return None
|
||||
|
@ -96,7 +100,7 @@ class WAFHarvester(SpatialHarvester, SingletonPlugin):
|
|||
try:
|
||||
for url, modified_date in _extract_waf(content,source_url,scraper):
|
||||
url_to_modified_harvest[url] = modified_date
|
||||
except Exception,e:
|
||||
except Exception as e:
|
||||
msg = 'Error extracting URLs from %s, error was %s' % (source_url, e)
|
||||
self._save_gather_error(msg,harvest_job)
|
||||
return None
|
||||
|
@ -195,7 +199,7 @@ class WAFHarvester(SpatialHarvester, SingletonPlugin):
|
|||
# Get contents
|
||||
try:
|
||||
content = self._get_content_as_unicode(url)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
msg = 'Could not harvest WAF link {0}: {1}'.format(url, e)
|
||||
self._save_object_error(msg, harvest_object)
|
||||
return False
|
||||
|
@ -298,8 +302,8 @@ def _extract_waf(content, base_url, scraper, results = None, depth=0):
|
|||
try:
|
||||
response = requests.get(new_url)
|
||||
content = response.content
|
||||
except Exception, e:
|
||||
print str(e)
|
||||
except Exception as e:
|
||||
print(six.text_type(e))
|
||||
continue
|
||||
_extract_waf(content, new_url, scraper, results, new_depth)
|
||||
continue
|
||||
|
@ -308,11 +312,10 @@ def _extract_waf(content, base_url, scraper, results = None, depth=0):
|
|||
date = record.date
|
||||
if date:
|
||||
try:
|
||||
date = str(dateutil.parser.parse(date))
|
||||
except Exception, e:
|
||||
date = six.text_type(dateutil.parser.parse(date))
|
||||
except Exception as e:
|
||||
raise
|
||||
date = None
|
||||
results.append((urljoin(base_url, record.url), date))
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import logging
|
||||
from pylons import config
|
||||
|
||||
from ckan import plugins as p
|
||||
from ckan.lib import helpers as h
|
||||
|
||||
from ckantoolkit import config
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -55,7 +56,7 @@ def get_responsible_party(value):
|
|||
out = []
|
||||
parties = h.json.loads(value)
|
||||
for party in parties:
|
||||
roles = [formatted[role] if role in formatted.keys() else p.toolkit._(role.capitalize()) for role in party['roles']]
|
||||
roles = [formatted[role] if role in list(formatted.keys()) else p.toolkit._(role.capitalize()) for role in party['roles']]
|
||||
out.append('{0} ({1})'.format(party['name'], ', '.join(roles)))
|
||||
return '; '.join(out)
|
||||
except (ValueError, TypeError):
|
||||
|
@ -68,4 +69,4 @@ def get_common_map_config():
|
|||
base map (ie those starting with 'ckanext.spatial.common_map.')
|
||||
'''
|
||||
namespace = 'ckanext.spatial.common_map.'
|
||||
return dict([(k.replace(namespace, ''), v) for k, v in config.iteritems() if k.startswith(namespace)])
|
||||
return dict([(k.replace(namespace, ''), v) for k, v in config.items() if k.startswith(namespace)])
|
||||
|
|
|
@ -1,15 +1,18 @@
|
|||
import six
|
||||
import logging
|
||||
from string import Template
|
||||
|
||||
from ckan.model import Session, Package
|
||||
from ckan.lib.base import config
|
||||
import ckantoolkit as tk
|
||||
|
||||
from ckanext.spatial.model import PackageExtent
|
||||
from shapely.geometry import asShape
|
||||
|
||||
|
||||
from ckanext.spatial.geoalchemy_common import (WKTElement, ST_Transform,
|
||||
compare_geometry_fields,
|
||||
)
|
||||
config = tk.config
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -96,10 +99,10 @@ def validate_bbox(bbox_values):
|
|||
Any problems and it returns None.
|
||||
'''
|
||||
|
||||
if isinstance(bbox_values,basestring):
|
||||
if isinstance(bbox_values,six.string_types):
|
||||
bbox_values = bbox_values.split(',')
|
||||
|
||||
if len(bbox_values) is not 4:
|
||||
if len(bbox_values) != 4:
|
||||
return None
|
||||
|
||||
try:
|
||||
|
@ -108,7 +111,7 @@ def validate_bbox(bbox_values):
|
|||
bbox['miny'] = float(bbox_values[1])
|
||||
bbox['maxx'] = float(bbox_values[2])
|
||||
bbox['maxy'] = float(bbox_values[3])
|
||||
except ValueError,e:
|
||||
except ValueError as e:
|
||||
return None
|
||||
|
||||
return bbox
|
||||
|
@ -167,7 +170,7 @@ def bbox_query_ordered(bbox, srid=None):
|
|||
|
||||
input_geometry = _bbox_2_wkt(bbox, srid)
|
||||
|
||||
params = {'query_bbox': str(input_geometry),
|
||||
params = {'query_bbox': six.text_type(input_geometry),
|
||||
'query_srid': input_geometry.srid}
|
||||
|
||||
# First get the area of the query box
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
Some very thin wrapper classes around those in OWSLib
|
||||
for convenience.
|
||||
"""
|
||||
|
||||
import six
|
||||
import logging
|
||||
|
||||
from owslib.etree import etree
|
||||
|
@ -17,14 +17,14 @@ class OwsService(object):
|
|||
def __init__(self, endpoint=None):
|
||||
if endpoint is not None:
|
||||
self._ows(endpoint)
|
||||
|
||||
|
||||
def __call__(self, args):
|
||||
return getattr(self, args.operation)(**self._xmd(args))
|
||||
|
||||
|
||||
@classmethod
|
||||
def _operations(cls):
|
||||
return [x for x in dir(cls) if not x.startswith("_")]
|
||||
|
||||
|
||||
def _xmd(self, obj):
|
||||
md = {}
|
||||
for attr in [x for x in dir(obj) if not x.startswith("_")]:
|
||||
|
@ -33,7 +33,7 @@ class OwsService(object):
|
|||
pass
|
||||
elif callable(val):
|
||||
pass
|
||||
elif isinstance(val, basestring):
|
||||
elif isinstance(val, six.string_types):
|
||||
md[attr] = val
|
||||
elif isinstance(val, int):
|
||||
md[attr] = val
|
||||
|
@ -42,7 +42,7 @@ class OwsService(object):
|
|||
else:
|
||||
md[attr] = self._xmd(val)
|
||||
return md
|
||||
|
||||
|
||||
def _ows(self, endpoint=None, **kw):
|
||||
if not hasattr(self, "_Implementation"):
|
||||
raise NotImplementedError("Needs an Implementation")
|
||||
|
@ -51,7 +51,7 @@ class OwsService(object):
|
|||
raise ValueError("Must specify a service endpoint")
|
||||
self.__ows_obj__ = self._Implementation(endpoint)
|
||||
return self.__ows_obj__
|
||||
|
||||
|
||||
def getcapabilities(self, debug=False, **kw):
|
||||
ows = self._ows(**kw)
|
||||
caps = self._xmd(ows)
|
||||
|
@ -60,7 +60,7 @@ class OwsService(object):
|
|||
if "response" in caps: del caps["response"]
|
||||
if "owscommon" in caps: del caps["owscommon"]
|
||||
return caps
|
||||
|
||||
|
||||
class CswService(OwsService):
|
||||
"""
|
||||
Perform various operations on a CSW service
|
||||
|
@ -97,7 +97,7 @@ class CswService(OwsService):
|
|||
csw.exceptionreport.exceptions
|
||||
#log.error(err)
|
||||
raise CswError(err)
|
||||
return [self._xmd(r) for r in csw.records.values()]
|
||||
return [self._xmd(r) for r in list(csw.records.values())]
|
||||
|
||||
def getidentifiers(self, qtype=None, typenames="csw:Record", esn="brief",
|
||||
keywords=[], limit=None, page=10, outputschema="gmd",
|
||||
|
@ -134,7 +134,7 @@ class CswService(OwsService):
|
|||
if matches == 0:
|
||||
matches = csw.results['matches']
|
||||
|
||||
identifiers = csw.records.keys()
|
||||
identifiers = list(csw.records.keys())
|
||||
if limit is not None:
|
||||
identifiers = identifiers[:(limit-startposition)]
|
||||
for ident in identifiers:
|
||||
|
@ -170,7 +170,7 @@ class CswService(OwsService):
|
|||
raise CswError(err)
|
||||
if not csw.records:
|
||||
return
|
||||
record = self._xmd(csw.records.values()[0])
|
||||
record = self._xmd(list(csw.records.values())[0])
|
||||
|
||||
## strip off the enclosing results container, we only want the metadata
|
||||
#md = csw._exml.find("/gmd:MD_Metadata")#, namespaces=namespaces)
|
||||
|
@ -178,13 +178,13 @@ class CswService(OwsService):
|
|||
md = csw._exml.find("/{http://www.isotc211.org/2005/gmd}MD_Metadata")
|
||||
mdtree = etree.ElementTree(md)
|
||||
try:
|
||||
record["xml"] = etree.tostring(mdtree, pretty_print=True, encoding=unicode)
|
||||
record["xml"] = etree.tostring(mdtree, pretty_print=True, encoding=str)
|
||||
except TypeError:
|
||||
# API incompatibilities between different flavours of elementtree
|
||||
try:
|
||||
record["xml"] = etree.tostring(mdtree, pretty_print=True, encoding=unicode)
|
||||
record["xml"] = etree.tostring(mdtree, pretty_print=True, encoding=str)
|
||||
except AssertionError:
|
||||
record["xml"] = etree.tostring(md, pretty_print=True, encoding=unicode)
|
||||
record["xml"] = etree.tostring(md, pretty_print=True, encoding=str)
|
||||
|
||||
record["xml"] = '<?xml version="1.0" encoding="UTF-8"?>\n' + record["xml"]
|
||||
record["tree"] = mdtree
|
||||
|
|
|
@ -3,10 +3,10 @@ Library for creating reports that can be displayed easily in an HTML table
|
|||
and then saved as a CSV.
|
||||
'''
|
||||
|
||||
from six import text_type, StringIO
|
||||
import datetime
|
||||
import csv
|
||||
try: from cStringIO import StringIO
|
||||
except ImportError: from StringIO import StringIO
|
||||
|
||||
|
||||
class ReportTable(object):
|
||||
def __init__(self, column_names):
|
||||
|
@ -51,10 +51,10 @@ class ReportTable(object):
|
|||
for cell in row:
|
||||
if isinstance(cell, datetime.datetime):
|
||||
cell = cell.strftime('%Y-%m-%d %H:%M')
|
||||
elif isinstance(cell, (int, long)):
|
||||
cell = str(cell)
|
||||
elif isinstance(cell, int):
|
||||
cell = text_type(cell)
|
||||
elif isinstance(cell, (list, tuple)):
|
||||
cell = str(cell)
|
||||
cell = text_type(cell)
|
||||
elif cell is None:
|
||||
cell = ''
|
||||
else:
|
||||
|
@ -62,8 +62,7 @@ class ReportTable(object):
|
|||
row_formatted.append(cell)
|
||||
try:
|
||||
csvwriter.writerow(row_formatted)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise Exception("%s: %s, %s"%(e, row, row_formatted))
|
||||
csvout.seek(0)
|
||||
return csvout.read()
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from __future__ import absolute_import
|
||||
# this is a namespace package
|
||||
try:
|
||||
import pkg_resources
|
||||
|
@ -6,5 +7,5 @@ except ImportError:
|
|||
import pkgutil
|
||||
__path__ = pkgutil.extend_path(__path__, __name__)
|
||||
|
||||
from package_extent import *
|
||||
from harvested_metadata import *
|
||||
from .package_extent import *
|
||||
from .harvested_metadata import *
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from lxml import etree
|
||||
import six
|
||||
|
||||
import logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -37,10 +38,7 @@ class MappedXmlDocument(MappedXmlObject):
|
|||
def get_xml_tree(self):
|
||||
if self.xml_tree is None:
|
||||
parser = etree.XMLParser(remove_blank_text=True)
|
||||
if type(self.xml_str) == unicode:
|
||||
xml_str = self.xml_str.encode('utf8')
|
||||
else:
|
||||
xml_str = self.xml_str
|
||||
xml_str = six.ensure_str(self.xml_str)
|
||||
self.xml_tree = etree.fromstring(xml_str, parser=parser)
|
||||
return self.xml_tree
|
||||
|
||||
|
@ -95,7 +93,7 @@ class MappedXmlElement(MappedXmlObject):
|
|||
elif type(element) == etree._ElementStringResult:
|
||||
value = str(element)
|
||||
elif type(element) == etree._ElementUnicodeResult:
|
||||
value = unicode(element)
|
||||
value = str(element)
|
||||
else:
|
||||
value = self.element_tostring(element)
|
||||
return value
|
||||
|
@ -954,7 +952,7 @@ class ISODocument(MappedXmlDocument):
|
|||
for responsible_party in values['responsible-organisation']:
|
||||
if isinstance(responsible_party, dict) and \
|
||||
isinstance(responsible_party.get('contact-info'), dict) and \
|
||||
responsible_party['contact-info'].has_key('email'):
|
||||
'email' in responsible_party['contact-info']:
|
||||
value = responsible_party['contact-info']['email']
|
||||
if value:
|
||||
break
|
||||
|
|
|
@ -33,7 +33,7 @@ def setup(srid=None):
|
|||
if not package_extent_table.exists():
|
||||
try:
|
||||
package_extent_table.create()
|
||||
except Exception,e:
|
||||
except Exception as e:
|
||||
# Make sure the table does not remain incorrectly created
|
||||
# (eg without geom column or constraints)
|
||||
if package_extent_table.exists():
|
||||
|
|
|
@ -3,12 +3,25 @@ import re
|
|||
import mimetypes
|
||||
from logging import getLogger
|
||||
|
||||
from pylons import config
|
||||
import six
|
||||
import ckantoolkit as tk
|
||||
|
||||
from ckan import plugins as p
|
||||
|
||||
from ckan.lib.helpers import json
|
||||
|
||||
if tk.check_ckan_version(min_version="2.9.0"):
|
||||
from ckanext.spatial.plugin.flask_plugin import (
|
||||
SpatialQueryMixin, HarvestMetadataApiMixin
|
||||
)
|
||||
else:
|
||||
from ckanext.spatial.plugin.pylons_plugin import (
|
||||
SpatialQueryMixin, HarvestMetadataApiMixin
|
||||
)
|
||||
|
||||
|
||||
config = tk.config
|
||||
|
||||
|
||||
def check_geoalchemy_requirement():
|
||||
'''Checks if a suitable geoalchemy version installed
|
||||
|
@ -22,7 +35,7 @@ def check_geoalchemy_requirement():
|
|||
'For more details see the "Troubleshooting" section of the ' +
|
||||
'install documentation')
|
||||
|
||||
if p.toolkit.check_ckan_version(min_version='2.3'):
|
||||
if tk.check_ckan_version(min_version='2.3'):
|
||||
try:
|
||||
import geoalchemy2
|
||||
except ImportError:
|
||||
|
@ -44,19 +57,19 @@ def package_error_summary(error_dict):
|
|||
def prettify(field_name):
|
||||
field_name = re.sub('(?<!\w)[Uu]rl(?!\w)', 'URL',
|
||||
field_name.replace('_', ' ').capitalize())
|
||||
return p.toolkit._(field_name.replace('_', ' '))
|
||||
return tk._(field_name.replace('_', ' '))
|
||||
|
||||
summary = {}
|
||||
for key, error in error_dict.iteritems():
|
||||
for key, error in error_dict.items():
|
||||
if key == 'resources':
|
||||
summary[p.toolkit._('Resources')] = p.toolkit._(
|
||||
summary[tk._('Resources')] = tk._(
|
||||
'Package resource(s) invalid')
|
||||
elif key == 'extras':
|
||||
summary[p.toolkit._('Extras')] = p.toolkit._('Missing Value')
|
||||
summary[tk._('Extras')] = tk._('Missing Value')
|
||||
elif key == 'extras_validation':
|
||||
summary[p.toolkit._('Extras')] = error[0]
|
||||
summary[tk._('Extras')] = error[0]
|
||||
else:
|
||||
summary[p.toolkit._(prettify(key))] = error[0]
|
||||
summary[tk._(prettify(key))] = error[0]
|
||||
return summary
|
||||
|
||||
class SpatialMetadata(p.SingletonPlugin):
|
||||
|
@ -69,7 +82,7 @@ class SpatialMetadata(p.SingletonPlugin):
|
|||
def configure(self, config):
|
||||
from ckanext.spatial.model.package_extent import setup as setup_model
|
||||
|
||||
if not p.toolkit.asbool(config.get('ckan.spatial.testing', 'False')):
|
||||
if not tk.asbool(config.get('ckan.spatial.testing', 'False')):
|
||||
log.debug('Setting up the spatial model')
|
||||
setup_model()
|
||||
|
||||
|
@ -77,9 +90,9 @@ class SpatialMetadata(p.SingletonPlugin):
|
|||
''' Set up the resource library, public directory and
|
||||
template directory for all the spatial extensions
|
||||
'''
|
||||
p.toolkit.add_public_directory(config, 'public')
|
||||
p.toolkit.add_template_directory(config, 'templates')
|
||||
p.toolkit.add_resource('public', 'ckanext-spatial')
|
||||
tk.add_public_directory(config, '../public')
|
||||
tk.add_template_directory(config, '../templates')
|
||||
tk.add_resource('../public', 'ckanext-spatial')
|
||||
|
||||
# Add media types for common extensions not included in the mimetypes
|
||||
# module
|
||||
|
@ -110,24 +123,24 @@ class SpatialMetadata(p.SingletonPlugin):
|
|||
try:
|
||||
log.debug('Received: %r' % extra.value)
|
||||
geometry = json.loads(extra.value)
|
||||
except ValueError,e:
|
||||
error_dict = {'spatial':[u'Error decoding JSON object: %s' % str(e)]}
|
||||
raise p.toolkit.ValidationError(error_dict, error_summary=package_error_summary(error_dict))
|
||||
except TypeError,e:
|
||||
error_dict = {'spatial':[u'Error decoding JSON object: %s' % str(e)]}
|
||||
raise p.toolkit.ValidationError(error_dict, error_summary=package_error_summary(error_dict))
|
||||
except ValueError as e:
|
||||
error_dict = {'spatial':[u'Error decoding JSON object: %s' % six.text_type(e)]}
|
||||
raise tk.ValidationError(error_dict, error_summary=package_error_summary(error_dict))
|
||||
except TypeError as e:
|
||||
error_dict = {'spatial':[u'Error decoding JSON object: %s' % six.text_type(e)]}
|
||||
raise tk.ValidationError(error_dict, error_summary=package_error_summary(error_dict))
|
||||
|
||||
try:
|
||||
save_package_extent(package.id,geometry)
|
||||
|
||||
except ValueError,e:
|
||||
error_dict = {'spatial':[u'Error creating geometry: %s' % str(e)]}
|
||||
raise p.toolkit.ValidationError(error_dict, error_summary=package_error_summary(error_dict))
|
||||
except Exception, e:
|
||||
except ValueError as e:
|
||||
error_dict = {'spatial':[u'Error creating geometry: %s' % six.text_type(e)]}
|
||||
raise tk.ValidationError(error_dict, error_summary=package_error_summary(error_dict))
|
||||
except Exception as e:
|
||||
if bool(os.getenv('DEBUG')):
|
||||
raise
|
||||
error_dict = {'spatial':[u'Error: %s' % str(e)]}
|
||||
raise p.toolkit.ValidationError(error_dict, error_summary=package_error_summary(error_dict))
|
||||
error_dict = {'spatial':[u'Error: %s' % six.text_type(e)]}
|
||||
raise tk.ValidationError(error_dict, error_summary=package_error_summary(error_dict))
|
||||
|
||||
elif (extra.state == 'active' and not extra.value) or extra.state == 'deleted':
|
||||
# Delete extent from table
|
||||
|
@ -150,9 +163,8 @@ class SpatialMetadata(p.SingletonPlugin):
|
|||
'get_common_map_config' : spatial_helpers.get_common_map_config,
|
||||
}
|
||||
|
||||
class SpatialQuery(p.SingletonPlugin):
|
||||
class SpatialQuery(SpatialQueryMixin, p.SingletonPlugin):
|
||||
|
||||
p.implements(p.IRoutes, inherit=True)
|
||||
p.implements(p.IPackageController, inherit=True)
|
||||
p.implements(p.IConfigurable, inherit=True)
|
||||
|
||||
|
@ -161,17 +173,10 @@ class SpatialQuery(p.SingletonPlugin):
|
|||
def configure(self, config):
|
||||
|
||||
self.search_backend = config.get('ckanext.spatial.search_backend', 'postgis')
|
||||
if self.search_backend != 'postgis' and not p.toolkit.check_ckan_version('2.0.1'):
|
||||
if self.search_backend != 'postgis' and not tk.check_ckan_version('2.0.1'):
|
||||
msg = 'The Solr backends for the spatial search require CKAN 2.0.1 or higher. ' + \
|
||||
'Please upgrade CKAN or select the \'postgis\' backend.'
|
||||
raise p.toolkit.CkanVersionException(msg)
|
||||
|
||||
def before_map(self, map):
|
||||
|
||||
map.connect('api_spatial_query', '/api/2/search/{register:dataset|package}/geo',
|
||||
controller='ckanext.spatial.controllers.api:ApiController',
|
||||
action='spatial_query')
|
||||
return map
|
||||
raise tk.CkanVersionException(msg)
|
||||
|
||||
def before_index(self, pkg_dict):
|
||||
import shapely
|
||||
|
@ -180,7 +185,7 @@ class SpatialQuery(p.SingletonPlugin):
|
|||
if pkg_dict.get('extras_spatial', None) and self.search_backend in ('solr', 'solr-spatial-field'):
|
||||
try:
|
||||
geometry = json.loads(pkg_dict['extras_spatial'])
|
||||
except ValueError, e:
|
||||
except ValueError as e:
|
||||
log.error('Geometry not valid GeoJSON, not indexing')
|
||||
return pkg_dict
|
||||
|
||||
|
@ -330,7 +335,7 @@ class SpatialQuery(p.SingletonPlugin):
|
|||
# Note: This will be deprecated at some point in favour of the
|
||||
# Solr 4 spatial sorting capabilities
|
||||
if search_params.get('sort') == 'spatial desc' and \
|
||||
p.toolkit.asbool(config.get('ckanext.spatial.use_postgis_sorting', 'False')):
|
||||
tk.asbool(config.get('ckanext.spatial.use_postgis_sorting', 'False')):
|
||||
if search_params['q'] or search_params['fq']:
|
||||
raise SearchError('Spatial ranking cannot be mixed with other search parameters')
|
||||
# ...because it is too inefficient to use SOLR to filter
|
||||
|
@ -365,7 +370,8 @@ class SpatialQuery(p.SingletonPlugin):
|
|||
bbox_query_ids = [extent.package_id for extent in extents]
|
||||
|
||||
q = search_params.get('q','').strip() or '""'
|
||||
new_q = '%s AND ' % q if q else ''
|
||||
# Note: `"" AND` query doesn't work in github ci
|
||||
new_q = '%s AND ' % q if q and q != '""' else ''
|
||||
new_q += '(%s)' % ' OR '.join(['id:%s' % id for id in bbox_query_ids])
|
||||
|
||||
search_params['q'] = new_q
|
||||
|
@ -377,9 +383,8 @@ class SpatialQuery(p.SingletonPlugin):
|
|||
|
||||
# Note: This will be deprecated at some point in favour of the
|
||||
# Solr 4 spatial sorting capabilities
|
||||
|
||||
if search_params.get('extras', {}).get('ext_spatial') and \
|
||||
p.toolkit.asbool(config.get('ckanext.spatial.use_postgis_sorting', 'False')):
|
||||
tk.asbool(config.get('ckanext.spatial.use_postgis_sorting', 'False')):
|
||||
# Apply the spatial sort
|
||||
querier = PackageSearchQuery()
|
||||
pkgs = []
|
||||
|
@ -390,7 +395,8 @@ class SpatialQuery(p.SingletonPlugin):
|
|||
search_results['results'] = pkgs
|
||||
return search_results
|
||||
|
||||
class HarvestMetadataApi(p.SingletonPlugin):
|
||||
|
||||
class HarvestMetadataApi(HarvestMetadataApiMixin, p.SingletonPlugin):
|
||||
'''
|
||||
Harvest Metadata API
|
||||
(previously called "InspireApi")
|
||||
|
@ -398,31 +404,4 @@ class HarvestMetadataApi(p.SingletonPlugin):
|
|||
A way for a user to view the harvested metadata XML, either as a raw file or
|
||||
styled to view in a web browser.
|
||||
'''
|
||||
p.implements(p.IRoutes)
|
||||
|
||||
def before_map(self, route_map):
|
||||
controller = "ckanext.spatial.controllers.api:HarvestMetadataApiController"
|
||||
|
||||
# Showing the harvest object content is an action of the default
|
||||
# harvest plugin, so just redirect there
|
||||
route_map.redirect('/api/2/rest/harvestobject/{id:.*}/xml',
|
||||
'/harvest/object/{id}',
|
||||
_redirect_code='301 Moved Permanently')
|
||||
|
||||
route_map.connect('/harvest/object/{id}/original', controller=controller,
|
||||
action='display_xml_original')
|
||||
|
||||
route_map.connect('/harvest/object/{id}/html', controller=controller,
|
||||
action='display_html')
|
||||
route_map.connect('/harvest/object/{id}/html/original', controller=controller,
|
||||
action='display_html_original')
|
||||
|
||||
# Redirect old URL to a nicer and unversioned one
|
||||
route_map.redirect('/api/2/rest/harvestobject/:id/html',
|
||||
'/harvest/object/{id}/html',
|
||||
_redirect_code='301 Moved Permanently')
|
||||
|
||||
return route_map
|
||||
|
||||
def after_map(self, route_map):
|
||||
return route_map
|
||||
pass
|
|
@ -0,0 +1,29 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import ckan.plugins as p
|
||||
import ckanext.spatial.views as blueprints
|
||||
from ckanext.spatial.cli import get_commands
|
||||
|
||||
|
||||
class SpatialQueryMixin(p.SingletonPlugin):
|
||||
p.implements(p.IBlueprint)
|
||||
p.implements(p.IClick)
|
||||
|
||||
# IBlueprint
|
||||
|
||||
def get_blueprint(self):
|
||||
return [blueprints.api]
|
||||
|
||||
# IClick
|
||||
|
||||
def get_commands(self):
|
||||
return get_commands()
|
||||
|
||||
|
||||
class HarvestMetadataApiMixin(p.SingletonPlugin):
|
||||
p.implements(p.IBlueprint)
|
||||
|
||||
# IBlueprint
|
||||
|
||||
def get_blueprint(self):
|
||||
return [blueprints.harvest_metadata]
|
|
@ -0,0 +1,40 @@
|
|||
import ckan.plugins as p
|
||||
|
||||
|
||||
|
||||
class SpatialQueryMixin(p.SingletonPlugin):
|
||||
p.implements(p.IRoutes, inherit=True)
|
||||
|
||||
# IRoutes
|
||||
def before_map(self, map):
|
||||
map.connect('api_spatial_query', '/api/2/search/{register:dataset|package}/geo',
|
||||
controller='ckanext.spatial.controllers.api:ApiController',
|
||||
action='spatial_query')
|
||||
return map
|
||||
|
||||
class HarvestMetadataApiMixin(p.SingletonPlugin):
|
||||
p.implements(p.IRoutes, inherit=True)
|
||||
|
||||
def before_map(self, route_map):
|
||||
controller = "ckanext.spatial.controllers.api:HarvestMetadataApiController"
|
||||
|
||||
# Showing the harvest object content is an action of the default
|
||||
# harvest plugin, so just redirect there
|
||||
route_map.redirect('/api/2/rest/harvestobject/{id:.*}/xml',
|
||||
'/harvest/object/{id}',
|
||||
_redirect_code='301 Moved Permanently')
|
||||
|
||||
route_map.connect('/harvest/object/{id}/original', controller=controller,
|
||||
action='display_xml_original')
|
||||
|
||||
route_map.connect('/harvest/object/{id}/html', controller=controller,
|
||||
action='display_html')
|
||||
route_map.connect('/harvest/object/{id}/html/original', controller=controller,
|
||||
action='display_html_original')
|
||||
|
||||
# Redirect old URL to a nicer and unversioned one
|
||||
route_map.redirect('/api/2/rest/harvestobject/:id/html',
|
||||
'/harvest/object/{id}/html',
|
||||
_redirect_code='301 Moved Permanently')
|
||||
|
||||
return route_map
|
|
@ -0,0 +1,37 @@
|
|||
dataset_map_js:
|
||||
filter: rjsmin
|
||||
output: ckanext-spatial/%(version)s_dataset_map.js
|
||||
extra:
|
||||
preload:
|
||||
- base/main
|
||||
contents:
|
||||
- js/vendor/leaflet/leaflet.js
|
||||
- js/common_map.js
|
||||
- js/dataset_map.js
|
||||
|
||||
dataset_map_css:
|
||||
filters: cssrewrite
|
||||
output: ckanext-spatial/%(version)s_dataset_map.css
|
||||
contents:
|
||||
- js/vendor/leaflet/leaflet.css
|
||||
- css/dataset_map.css
|
||||
|
||||
spatial_query_js:
|
||||
filter: rjsmin
|
||||
output: ckanext-spatial/%(version)s_spatial_query.js
|
||||
extra:
|
||||
preload:
|
||||
- base/main
|
||||
contents:
|
||||
- js/vendor/leaflet/leaflet.js
|
||||
- js/vendor/leaflet.draw/leaflet.draw.js
|
||||
- js/common_map.js
|
||||
- js/spatial_query.js
|
||||
|
||||
spatial_query_css:
|
||||
filters: cssrewrite
|
||||
output: ckanext-spatial/%(version)s_spatial_query.css
|
||||
contents:
|
||||
- js/vendor/leaflet/leaflet.css
|
||||
- js/vendor/leaflet.draw/leaflet.draw.css
|
||||
- css/spatial_query.css
|
|
@ -0,0 +1,2 @@
|
|||
{% asset 'ckanext-spatial/dataset_map_js' %}
|
||||
{% asset 'ckanext-spatial/dataset_map_css' %}
|
|
@ -14,8 +14,9 @@ extent
|
|||
<div class="dataset-map" data-module="dataset-map" data-extent="{{ extent }}" data-module-site_url="{{ h.dump_json(h.url('/', locale='default', qualified=true)) }}" data-module-map_config="{{ h.dump_json(map_config) }}">
|
||||
<div id="dataset-map-container"></div>
|
||||
<div id="dataset-map-attribution">
|
||||
{% snippet "spatial/snippets/map_attribution.html", map_config=map_config %}
|
||||
{% snippet "spatial/snippets/map_attribution.html", map_config=map_config %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% resource 'ckanext-spatial/dataset_map' %}
|
||||
{% set type = 'asset' if h.ckan_version().split('.')[1] | int >= 9 else 'resource' %}
|
||||
{% include 'spatial/snippets/dataset_map_' ~ type ~ '.html' %}
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
{% resource 'ckanext-spatial/dataset_map' %}
|
|
@ -2,29 +2,29 @@
|
|||
Displays a map widget to define a spatial filter on the dataset search page sidebar
|
||||
|
||||
default_extent
|
||||
Initial map extent (Optional, defaults to the whole world). It can be defined
|
||||
either as a pair of coordinates or as a GeoJSON bounding box.
|
||||
Initial map extent (Optional, defaults to the whole world). It can be defined
|
||||
either as a pair of coordinates or as a GeoJSON bounding box.
|
||||
|
||||
e.g.
|
||||
{% snippet "spatial/snippets/spatial_query.html", default_extent=[[15.62, -139.21], [64.92, -61.87]] %}
|
||||
{% snippet "spatial/snippets/spatial_query.html", default_extent=[[15.62, -139.21], [64.92, -61.87]] %}
|
||||
|
||||
{% snippet "spatial/snippets/spatial_query.html", default_extent="{ \"type\": \"Polygon\", \"coordinates\": [[[74.89, 29.39],[74.89, 38.45], [60.50, 38.45], [60.50, 29.39], [74.89, 29.39]]]}" %}
|
||||
{% snippet "spatial/snippets/spatial_query.html", default_extent="{ \"type\": \"Polygon\", \"coordinates\": [[[74.89, 29.39],[74.89, 38.45], [60.50, 38.45], [60.50, 29.39], [74.89, 29.39]]]}" %}
|
||||
|
||||
#}
|
||||
<section id="dataset-map" class="module module-narrow module-shallow">
|
||||
<h2 class="module-heading">
|
||||
<i class="icon-medium icon-globe"></i>
|
||||
{{ _('Filter by location') }}
|
||||
<a href="{{ h.remove_url_param(['ext_bbox','ext_prev_extent', 'ext_location']) }}" class="action">{{ _('Clear') }}</a>
|
||||
</h2>
|
||||
{% set map_config = h.get_common_map_config() %}
|
||||
<div class="dataset-map" data-module="spatial-query" data-default_extent="{{ default_extent }}" data-module-map_config="{{ h.dump_json(map_config) }}">
|
||||
<div id="dataset-map-container"></div>
|
||||
</div>
|
||||
<div id="dataset-map-attribution">
|
||||
{% snippet "spatial/snippets/map_attribution.html", map_config=map_config %}
|
||||
</div>
|
||||
<h2 class="module-heading">
|
||||
<i class="icon-medium icon-globe"></i>
|
||||
{{ _('Filter by location') }}
|
||||
<a href="{{ h.remove_url_param(['ext_bbox','ext_prev_extent', 'ext_location']) }}" class="action">{{ _('Clear') }}</a>
|
||||
</h2>
|
||||
{% set map_config = h.get_common_map_config() %}
|
||||
<div class="dataset-map" data-module="spatial-query" data-default_extent="{{ default_extent }}" data-module-map_config="{{ h.dump_json(map_config) }}">
|
||||
<div id="dataset-map-container"></div>
|
||||
</div>
|
||||
<div id="dataset-map-attribution">
|
||||
{% snippet "spatial/snippets/map_attribution.html", map_config=map_config %}
|
||||
</div>
|
||||
</section>
|
||||
|
||||
{% resource 'ckanext-spatial/spatial_query' %}
|
||||
|
||||
{% set type = 'asset' if h.ckan_version().split('.')[1] | int >= 9 else 'resource' %}
|
||||
{% include 'spatial/snippets/spatial_query_' ~ type ~ '.html' %}
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
{% asset 'ckanext-spatial/spatial_query_js' %}
|
||||
{% asset 'ckanext-spatial/spatial_query_css' %}
|
|
@ -0,0 +1 @@
|
|||
{% resource 'ckanext-spatial/spatial_query' %}
|
|
@ -1,7 +1,9 @@
|
|||
# this is a namespace package
|
||||
try:
|
||||
import pkg_resources
|
||||
|
||||
pkg_resources.declare_namespace(__name__)
|
||||
except ImportError:
|
||||
import pkgutil
|
||||
|
||||
__path__ = pkgutil.extend_path(__path__, __name__)
|
||||
|
|
|
@ -1,77 +1,27 @@
|
|||
import os
|
||||
import re
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from sqlalchemy import Table
|
||||
from nose.plugins.skip import SkipTest
|
||||
|
||||
from ckan.model import Session, repo, meta, engine_is_sqlite
|
||||
from ckanext.spatial.geoalchemy_common import postgis_version
|
||||
from ckanext.spatial.model.package_extent import setup as spatial_db_setup
|
||||
from ckanext.harvest.model import setup as harvest_model_setup
|
||||
import pytest
|
||||
|
||||
geojson_examples = {
|
||||
'point':'{"type":"Point","coordinates":[100.0,0.0]}',
|
||||
'point_2':'{"type":"Point","coordinates":[20,10]}',
|
||||
'line':'{"type":"LineString","coordinates":[[100.0,0.0],[101.0,1.0]]}',
|
||||
'polygon':'{"type":"Polygon","coordinates":[[[100.0,0.0],[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]]]}',
|
||||
'polygon_holes':'{"type":"Polygon","coordinates":[[[100.0,0.0],[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]],[[100.2,0.2],[100.8,0.2],[100.8,0.8],[100.2,0.8],[100.2,0.2]]]}',
|
||||
'multipoint':'{"type":"MultiPoint","coordinates":[[100.0,0.0],[101.0,1.0]]}',
|
||||
'multiline':'{"type":"MultiLineString","coordinates":[[[100.0,0.0],[101.0,1.0]],[[102.0,2.0],[103.0,3.0]]]}',
|
||||
'multipolygon':'{"type":"MultiPolygon","coordinates":[[[[102.0,2.0],[103.0,2.0],[103.0,3.0],[102.0,3.0],[102.0,2.0]]],[[[100.0,0.0],[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]],[[100.2,0.2],[100.8,0.2],[100.8,0.8],[100.2,0.8],[100.2,0.2]]]]}'}
|
||||
|
||||
|
||||
def _execute_script(script_path):
|
||||
|
||||
conn = Session.connection()
|
||||
script = open(script_path, 'r').read()
|
||||
for cmd in script.split(';'):
|
||||
cmd = re.sub(r'--(.*)|[\n\t]', '', cmd)
|
||||
if len(cmd):
|
||||
conn.execute(cmd)
|
||||
|
||||
Session.commit()
|
||||
|
||||
|
||||
def create_postgis_tables():
|
||||
scripts_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
|
||||
'scripts')
|
||||
if postgis_version()[:1] == '1':
|
||||
_execute_script(os.path.join(scripts_path, 'spatial_ref_sys.sql'))
|
||||
_execute_script(os.path.join(scripts_path, 'geometry_columns.sql'))
|
||||
else:
|
||||
_execute_script(os.path.join(scripts_path, 'spatial_ref_sys.sql'))
|
||||
"point": '{"type":"Point","coordinates":[100.0,0.0]}',
|
||||
"point_2": '{"type":"Point","coordinates":[20,10]}',
|
||||
"line": '{"type":"LineString","coordinates":[[100.0,0.0],[101.0,1.0]]}',
|
||||
"polygon": '{"type":"Polygon","coordinates":[[[100.0,0.0],[101.0,0.0],'
|
||||
'[101.0,1.0],[100.0,1.0],[100.0,0.0]]]}',
|
||||
"polygon_holes": '{"type":"Polygon","coordinates":[[[100.0,0.0],'
|
||||
'[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]],[[100.2,0.2],'
|
||||
'[100.8,0.2],[100.8,0.8],[100.2,0.8],[100.2,0.2]]]}',
|
||||
"multipoint": '{"type":"MultiPoint","coordinates":'
|
||||
'[[100.0,0.0],[101.0,1.0]]}',
|
||||
"multiline": '{"type":"MultiLineString","coordinates":[[[100.0,0.0],'
|
||||
'[101.0,1.0]],[[102.0,2.0],[103.0,3.0]]]}',
|
||||
"multipolygon": '{"type":"MultiPolygon","coordinates":[[[[102.0,2.0],'
|
||||
'[103.0,2.0],[103.0,3.0],[102.0,3.0],[102.0,2.0]]],[[[100.0,0.0],'
|
||||
'[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]],[[100.2,0.2],'
|
||||
'[100.8,0.2],[100.8,0.8],[100.2,0.8],[100.2,0.2]]]]}',
|
||||
}
|
||||
|
||||
|
||||
class SpatialTestBase(object):
|
||||
|
||||
db_srid = 4326
|
||||
|
||||
geojson_examples = geojson_examples
|
||||
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
if engine_is_sqlite():
|
||||
raise SkipTest("PostGIS is required for this test")
|
||||
|
||||
# This will create the PostGIS tables (geometry_columns and
|
||||
# spatial_ref_sys) which were deleted when rebuilding the database
|
||||
table = Table('spatial_ref_sys', meta.metadata)
|
||||
if not table.exists():
|
||||
create_postgis_tables()
|
||||
|
||||
# When running the tests with the --reset-db option for some
|
||||
# reason the metadata holds a reference to the `package_extent`
|
||||
# table after being deleted, causing an InvalidRequestError
|
||||
# exception when trying to recreate it further on
|
||||
if 'package_extent' in meta.metadata.tables:
|
||||
meta.metadata.remove(meta.metadata.tables['package_extent'])
|
||||
|
||||
spatial_db_setup()
|
||||
|
||||
# Setup the harvest tables
|
||||
harvest_model_setup()
|
||||
|
||||
@classmethod
|
||||
def teardown_class(cls):
|
||||
repo.rebuild_db()
|
||||
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
try:
|
||||
from ckan.tests.pytest_ckan.ckan_setup import *
|
||||
except ImportError:
|
||||
from ckan.config.middleware import make_app
|
||||
from ckan.common import config
|
||||
|
||||
import pkg_resources
|
||||
from paste.deploy import loadapp
|
||||
import sys
|
||||
import os
|
||||
|
||||
import pylons
|
||||
from pylons.i18n.translation import _get_translator
|
||||
|
||||
def pytest_addoption(parser):
|
||||
"""Allow using custom config file during tests.
|
||||
"""
|
||||
parser.addoption(u"--ckan-ini", action=u"store")
|
||||
|
||||
def pytest_sessionstart(session):
|
||||
"""Initialize CKAN environment.
|
||||
"""
|
||||
global pylonsapp
|
||||
path = os.getcwd()
|
||||
sys.path.insert(0, path)
|
||||
pkg_resources.working_set.add_entry(path)
|
||||
pylonsapp = loadapp(
|
||||
"config:" + session.config.option.ckan_ini, relative_to=path,
|
||||
)
|
||||
|
||||
# Initialize a translator for tests that utilize i18n
|
||||
translator = _get_translator(pylons.config.get("lang"))
|
||||
pylons.translator._push_object(translator)
|
||||
|
||||
class FakeResponse:
|
||||
headers = {} # because render wants to delete Pragma
|
||||
|
||||
pylons.response._push_object(FakeResponse)
|
|
@ -0,0 +1,38 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import pytest
|
||||
import os
|
||||
import re
|
||||
from sqlalchemy import Table
|
||||
|
||||
from ckan.model import Session, meta
|
||||
from ckanext.spatial.geoalchemy_common import postgis_version
|
||||
from ckanext.spatial.model.package_extent import setup as spatial_db_setup
|
||||
from ckanext.harvest.model import setup as harvest_model_setup
|
||||
import ckanext.harvest.model as harvest_model
|
||||
|
||||
|
||||
def _create_postgis_extension():
|
||||
Session.execute("CREATE EXTENSION IF NOT EXISTS postgis")
|
||||
Session.commit()
|
||||
|
||||
|
||||
def create_postgis_tables():
|
||||
_create_postgis_extension()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def clean_postgis():
|
||||
Session.execute("DROP TABLE IF EXISTS package_extent")
|
||||
Session.execute("DROP EXTENSION IF EXISTS postgis CASCADE")
|
||||
Session.commit()
|
||||
|
||||
@pytest.fixture
|
||||
def harvest_setup():
|
||||
harvest_model.setup()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def spatial_setup():
|
||||
create_postgis_tables()
|
||||
spatial_db_setup()
|
|
@ -0,0 +1,152 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
try:
|
||||
from ckan.tests.pytest_ckan.fixtures import *
|
||||
|
||||
except ImportError:
|
||||
import pytest
|
||||
|
||||
import ckan.tests.helpers as test_helpers
|
||||
import ckan.plugins
|
||||
import ckan.lib.search as search
|
||||
|
||||
from ckan.common import config
|
||||
|
||||
@pytest.fixture
|
||||
def ckan_config(request, monkeypatch):
|
||||
"""Allows to override the configuration object used by tests
|
||||
|
||||
Takes into account config patches introduced by the ``ckan_config``
|
||||
mark.
|
||||
|
||||
If you just want to set one or more configuration options for the
|
||||
scope of a test (or a test class), use the ``ckan_config`` mark::
|
||||
|
||||
@pytest.mark.ckan_config('ckan.auth.create_unowned_dataset', True)
|
||||
def test_auth_create_unowned_dataset():
|
||||
|
||||
# ...
|
||||
|
||||
To use the custom config inside a test, apply the
|
||||
``ckan_config`` mark to it and inject the ``ckan_config`` fixture:
|
||||
|
||||
.. literalinclude:: /../ckan/tests/pytest_ckan/test_fixtures.py
|
||||
:start-after: # START-CONFIG-OVERRIDE
|
||||
:end-before: # END-CONFIG-OVERRIDE
|
||||
|
||||
If the change only needs to be applied locally, use the
|
||||
``monkeypatch`` fixture
|
||||
|
||||
.. literalinclude:: /../ckan/tests/test_common.py
|
||||
:start-after: # START-CONFIG-OVERRIDE
|
||||
:end-before: # END-CONFIG-OVERRIDE
|
||||
|
||||
"""
|
||||
_original = config.copy()
|
||||
for mark in request.node.iter_markers(u"ckan_config"):
|
||||
monkeypatch.setitem(config, *mark.args)
|
||||
yield config
|
||||
config.clear()
|
||||
config.update(_original)
|
||||
|
||||
@pytest.fixture
|
||||
def make_app(ckan_config):
|
||||
"""Factory for client app instances.
|
||||
|
||||
Unless you need to create app instances lazily for some reason,
|
||||
use the ``app`` fixture instead.
|
||||
"""
|
||||
return test_helpers._get_test_app
|
||||
|
||||
@pytest.fixture
|
||||
def app(make_app):
|
||||
"""Returns a client app instance to use in functional tests
|
||||
|
||||
To use it, just add the ``app`` parameter to your test function signature::
|
||||
|
||||
def test_dataset_search(self, app):
|
||||
|
||||
url = h.url_for('dataset.search')
|
||||
|
||||
response = app.get(url)
|
||||
|
||||
|
||||
"""
|
||||
return make_app()
|
||||
|
||||
@pytest.fixture(scope=u"session")
|
||||
def reset_db():
|
||||
"""Callable for resetting the database to the initial state.
|
||||
|
||||
If possible use the ``clean_db`` fixture instead.
|
||||
|
||||
"""
|
||||
return test_helpers.reset_db
|
||||
|
||||
@pytest.fixture(scope=u"session")
|
||||
def reset_index():
|
||||
"""Callable for cleaning search index.
|
||||
|
||||
If possible use the ``clean_index`` fixture instead.
|
||||
"""
|
||||
return search.clear_all
|
||||
|
||||
@pytest.fixture
|
||||
def clean_db(reset_db):
|
||||
"""Resets the database to the initial state.
|
||||
|
||||
This can be used either for all tests in a class::
|
||||
|
||||
@pytest.mark.usefixtures("clean_db")
|
||||
class TestExample(object):
|
||||
|
||||
def test_example(self):
|
||||
|
||||
or for a single test::
|
||||
|
||||
class TestExample(object):
|
||||
|
||||
@pytest.mark.usefixtures("clean_db")
|
||||
def test_example(self):
|
||||
|
||||
"""
|
||||
reset_db()
|
||||
|
||||
@pytest.fixture
|
||||
def clean_index(reset_index):
|
||||
"""Clear search index before starting the test.
|
||||
"""
|
||||
reset_index()
|
||||
|
||||
@pytest.fixture
|
||||
def with_plugins(ckan_config):
|
||||
"""Load all plugins specified by the ``ckan.plugins`` config option
|
||||
at the beginning of the test. When the test ends (even it fails), it will
|
||||
unload all the plugins in the reverse order.
|
||||
|
||||
.. literalinclude:: /../ckan/tests/test_factories.py
|
||||
:start-after: # START-CONFIG-OVERRIDE
|
||||
:end-before: # END-CONFIG-OVERRIDE
|
||||
|
||||
"""
|
||||
plugins = ckan_config["ckan.plugins"].split()
|
||||
for plugin in plugins:
|
||||
if not ckan.plugins.plugin_loaded(plugin):
|
||||
ckan.plugins.load(plugin)
|
||||
yield
|
||||
for plugin in reversed(plugins):
|
||||
if ckan.plugins.plugin_loaded(plugin):
|
||||
ckan.plugins.unload(plugin)
|
||||
|
||||
@pytest.fixture
|
||||
def test_request_context(app):
|
||||
"""Provide function for creating Flask request context.
|
||||
"""
|
||||
return app.flask_app.test_request_context
|
||||
|
||||
@pytest.fixture
|
||||
def with_request_context(test_request_context):
|
||||
"""Execute test inside requests context
|
||||
"""
|
||||
with test_request_context():
|
||||
yield
|
|
@ -1,7 +1,9 @@
|
|||
# this is a namespace package
|
||||
try:
|
||||
import pkg_resources
|
||||
|
||||
pkg_resources.declare_namespace(__name__)
|
||||
except ImportError:
|
||||
import pkgutil
|
||||
|
||||
__path__ = pkgutil.extend_path(__path__, __name__)
|
||||
|
|
|
@ -1,147 +1,186 @@
|
|||
import json
|
||||
from nose.tools import assert_equals
|
||||
|
||||
import pytest
|
||||
|
||||
from ckan.model import Session
|
||||
from ckan.lib.helpers import url_for
|
||||
|
||||
try:
|
||||
import ckan.new_tests.helpers as helpers
|
||||
import ckan.new_tests.factories as factories
|
||||
except ImportError:
|
||||
import ckan.tests.helpers as helpers
|
||||
import ckan.tests.factories as factories
|
||||
import ckan.plugins.toolkit as tk
|
||||
|
||||
import ckan.tests.factories as factories
|
||||
|
||||
from ckanext.spatial.model import PackageExtent
|
||||
from ckanext.spatial.geoalchemy_common import legacy_geoalchemy
|
||||
from ckanext.spatial.tests.base import SpatialTestBase
|
||||
|
||||
if not tk.check_ckan_version(min_version="2.9"):
|
||||
import ckan.tests.helpers as helpers
|
||||
|
||||
class TestSpatialExtra(SpatialTestBase, helpers.FunctionalTestBase):
|
||||
|
||||
def test_spatial_extra(self):
|
||||
app = self._get_test_app()
|
||||
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
|
||||
class TestSpatialExtra(SpatialTestBase):
|
||||
def test_spatial_extra_base(self, app):
|
||||
|
||||
user = factories.User()
|
||||
env = {'REMOTE_USER': user['name'].encode('ascii')}
|
||||
env = {"REMOTE_USER": user["name"].encode("ascii")}
|
||||
dataset = factories.Dataset(user=user)
|
||||
|
||||
offset = url_for(controller='package', action='edit', id=dataset['id'])
|
||||
res = app.get(offset, extra_environ=env)
|
||||
|
||||
form = res.forms[1]
|
||||
form['extras__0__key'] = u'spatial'
|
||||
form['extras__0__value'] = self.geojson_examples['point']
|
||||
|
||||
res = helpers.submit_and_follow(app, form, env, 'save')
|
||||
|
||||
assert 'Error' not in res, res
|
||||
|
||||
package_extent = Session.query(PackageExtent) \
|
||||
.filter(PackageExtent.package_id == dataset['id']).first()
|
||||
|
||||
geojson = json.loads(self.geojson_examples['point'])
|
||||
|
||||
assert_equals(package_extent.package_id, dataset['id'])
|
||||
if legacy_geoalchemy:
|
||||
assert_equals(Session.scalar(package_extent.the_geom.x),
|
||||
geojson['coordinates'][0])
|
||||
assert_equals(Session.scalar(package_extent.the_geom.y),
|
||||
geojson['coordinates'][1])
|
||||
assert_equals(Session.scalar(package_extent.the_geom.srid),
|
||||
self.db_srid)
|
||||
if tk.check_ckan_version(min_version="2.9"):
|
||||
offset = url_for("dataset.edit", id=dataset["id"])
|
||||
else:
|
||||
from sqlalchemy import func
|
||||
assert_equals(
|
||||
Session.query(func.ST_X(package_extent.the_geom)).first()[0],
|
||||
geojson['coordinates'][0])
|
||||
assert_equals(
|
||||
Session.query(func.ST_Y(package_extent.the_geom)).first()[0],
|
||||
geojson['coordinates'][1])
|
||||
assert_equals(package_extent.the_geom.srid, self.db_srid)
|
||||
|
||||
def test_spatial_extra_edit(self):
|
||||
app = self._get_test_app()
|
||||
|
||||
user = factories.User()
|
||||
env = {'REMOTE_USER': user['name'].encode('ascii')}
|
||||
dataset = factories.Dataset(user=user)
|
||||
|
||||
offset = url_for(controller='package', action='edit', id=dataset['id'])
|
||||
offset = url_for(controller="package", action="edit", id=dataset["id"])
|
||||
res = app.get(offset, extra_environ=env)
|
||||
|
||||
form = res.forms[1]
|
||||
form['extras__0__key'] = u'spatial'
|
||||
form['extras__0__value'] = self.geojson_examples['point']
|
||||
|
||||
res = helpers.submit_and_follow(app, form, env, 'save')
|
||||
|
||||
assert 'Error' not in res, res
|
||||
|
||||
res = app.get(offset, extra_environ=env)
|
||||
|
||||
form = res.forms[1]
|
||||
form['extras__0__key'] = u'spatial'
|
||||
form['extras__0__value'] = self.geojson_examples['polygon']
|
||||
|
||||
res = helpers.submit_and_follow(app, form, env, 'save')
|
||||
|
||||
assert 'Error' not in res, res
|
||||
|
||||
package_extent = Session.query(PackageExtent) \
|
||||
.filter(PackageExtent.package_id == dataset['id']).first()
|
||||
|
||||
assert_equals(package_extent.package_id, dataset['id'])
|
||||
if legacy_geoalchemy:
|
||||
assert_equals(
|
||||
Session.scalar(package_extent.the_geom.geometry_type),
|
||||
'ST_Polygon')
|
||||
assert_equals(
|
||||
Session.scalar(package_extent.the_geom.srid),
|
||||
self.db_srid)
|
||||
if tk.check_ckan_version(min_version="2.9"):
|
||||
data = {
|
||||
"name": dataset['name'],
|
||||
"extras__0__key": u"spatial",
|
||||
"extras__0__value": self.geojson_examples["point"]
|
||||
}
|
||||
res = app.post(offset, environ_overrides=env, data=data)
|
||||
else:
|
||||
from sqlalchemy import func
|
||||
assert_equals(
|
||||
Session.query(
|
||||
func.ST_GeometryType(package_extent.the_geom)).first()[0],
|
||||
'ST_Polygon')
|
||||
assert_equals(package_extent.the_geom.srid, self.db_srid)
|
||||
form = res.forms[1]
|
||||
form['extras__0__key'] = u'spatial'
|
||||
form['extras__0__value'] = self.geojson_examples['point']
|
||||
res = helpers.submit_and_follow(app, form, env, 'save')
|
||||
|
||||
def test_spatial_extra_bad_json(self):
|
||||
app = self._get_test_app()
|
||||
assert "Error" not in res, res
|
||||
|
||||
package_extent = (
|
||||
Session.query(PackageExtent)
|
||||
.filter(PackageExtent.package_id == dataset["id"])
|
||||
.first()
|
||||
)
|
||||
|
||||
geojson = json.loads(self.geojson_examples["point"])
|
||||
|
||||
assert package_extent.package_id == dataset["id"]
|
||||
from sqlalchemy import func
|
||||
|
||||
assert (
|
||||
Session.query(func.ST_X(package_extent.the_geom)).first()[0]
|
||||
== geojson["coordinates"][0]
|
||||
)
|
||||
assert (
|
||||
Session.query(func.ST_Y(package_extent.the_geom)).first()[0]
|
||||
== geojson["coordinates"][1]
|
||||
)
|
||||
assert package_extent.the_geom.srid == self.db_srid
|
||||
|
||||
def test_spatial_extra_edit(self, app):
|
||||
|
||||
user = factories.User()
|
||||
env = {'REMOTE_USER': user['name'].encode('ascii')}
|
||||
env = {"REMOTE_USER": user["name"].encode("ascii")}
|
||||
dataset = factories.Dataset(user=user)
|
||||
|
||||
offset = url_for(controller='package', action='edit', id=dataset['id'])
|
||||
if tk.check_ckan_version(min_version="2.9"):
|
||||
offset = url_for("dataset.edit", id=dataset["id"])
|
||||
else:
|
||||
offset = url_for(controller="package", action="edit", id=dataset["id"])
|
||||
res = app.get(offset, extra_environ=env)
|
||||
|
||||
form = res.forms[1]
|
||||
form['extras__0__key'] = u'spatial'
|
||||
form['extras__0__value'] = u'{"Type":Bad Json]'
|
||||
|
||||
res = helpers.webtest_submit(form, extra_environ=env, name='save')
|
||||
if tk.check_ckan_version(min_version="2.9"):
|
||||
data = {
|
||||
"name": dataset['name'],
|
||||
"extras__0__key": u"spatial",
|
||||
"extras__0__value": self.geojson_examples["point"]
|
||||
}
|
||||
res = app.post(offset, environ_overrides=env, data=data)
|
||||
else:
|
||||
form = res.forms[1]
|
||||
form['extras__0__key'] = u'spatial'
|
||||
form['extras__0__value'] = self.geojson_examples['point']
|
||||
res = helpers.submit_and_follow(app, form, env, 'save')
|
||||
|
||||
assert 'Error' in res, res
|
||||
assert 'Spatial' in res
|
||||
assert 'Error decoding JSON object' in res
|
||||
assert "Error" not in res, res
|
||||
|
||||
def test_spatial_extra_bad_geojson(self):
|
||||
app = self._get_test_app()
|
||||
res = app.get(offset, extra_environ=env)
|
||||
|
||||
if tk.check_ckan_version(min_version="2.9"):
|
||||
data = {
|
||||
"name": dataset['name'],
|
||||
"extras__0__key": u"spatial",
|
||||
"extras__0__value": self.geojson_examples["polygon"]
|
||||
}
|
||||
res = app.post(offset, environ_overrides=env, data=data)
|
||||
else:
|
||||
form = res.forms[1]
|
||||
form['extras__0__key'] = u'spatial'
|
||||
form['extras__0__value'] = self.geojson_examples['polygon']
|
||||
res = helpers.submit_and_follow(app, form, env, 'save')
|
||||
|
||||
assert "Error" not in res, res
|
||||
|
||||
package_extent = (
|
||||
Session.query(PackageExtent)
|
||||
.filter(PackageExtent.package_id == dataset["id"])
|
||||
.first()
|
||||
)
|
||||
|
||||
assert package_extent.package_id == dataset["id"]
|
||||
from sqlalchemy import func
|
||||
|
||||
assert (
|
||||
Session.query(
|
||||
func.ST_GeometryType(package_extent.the_geom)
|
||||
).first()[0]
|
||||
== "ST_Polygon"
|
||||
)
|
||||
assert package_extent.the_geom.srid == self.db_srid
|
||||
|
||||
def test_spatial_extra_bad_json(self, app):
|
||||
|
||||
user = factories.User()
|
||||
env = {'REMOTE_USER': user['name'].encode('ascii')}
|
||||
env = {"REMOTE_USER": user["name"].encode("ascii")}
|
||||
dataset = factories.Dataset(user=user)
|
||||
|
||||
offset = url_for(controller='package', action='edit', id=dataset['id'])
|
||||
if tk.check_ckan_version(min_version="2.9"):
|
||||
offset = url_for("dataset.edit", id=dataset["id"])
|
||||
else:
|
||||
offset = url_for(controller="package", action="edit", id=dataset["id"])
|
||||
res = app.get(offset, extra_environ=env)
|
||||
|
||||
form = res.forms[1]
|
||||
form['extras__0__key'] = u'spatial'
|
||||
form['extras__0__value'] = u'{"Type":"Bad_GeoJSON","a":2}'
|
||||
if tk.check_ckan_version(min_version="2.9"):
|
||||
data = {
|
||||
"name": dataset['name'],
|
||||
"extras__0__key": u"spatial",
|
||||
"extras__0__value": u'{"Type":Bad Json]'
|
||||
}
|
||||
res = app.post(offset, environ_overrides=env, data=data)
|
||||
else:
|
||||
form = res.forms[1]
|
||||
form['extras__0__key'] = u'spatial'
|
||||
form['extras__0__value'] = u'{"Type":Bad Json]'
|
||||
res = helpers.webtest_submit(form, extra_environ=env, name='save')
|
||||
|
||||
res = helpers.webtest_submit(form, extra_environ=env, name='save')
|
||||
assert "Error" in res, res
|
||||
assert "Spatial" in res
|
||||
assert "Error decoding JSON object" in res
|
||||
|
||||
assert 'Error' in res, res
|
||||
assert 'Spatial' in res
|
||||
assert 'Error creating geometry' in res
|
||||
def test_spatial_extra_bad_geojson(self, app):
|
||||
|
||||
user = factories.User()
|
||||
env = {"REMOTE_USER": user["name"].encode("ascii")}
|
||||
dataset = factories.Dataset(user=user)
|
||||
|
||||
if tk.check_ckan_version(min_version="2.9"):
|
||||
offset = url_for("dataset.edit", id=dataset["id"])
|
||||
else:
|
||||
offset = url_for(controller="package", action="edit", id=dataset["id"])
|
||||
res = app.get(offset, extra_environ=env)
|
||||
|
||||
if tk.check_ckan_version(min_version="2.9"):
|
||||
data = {
|
||||
"name": dataset['name'],
|
||||
"extras__0__key": u"spatial",
|
||||
"extras__0__value": u'{"Type":"Bad_GeoJSON","a":2}'
|
||||
}
|
||||
res = app.post(offset, environ_overrides=env, data=data)
|
||||
else:
|
||||
form = res.forms[1]
|
||||
form['extras__0__key'] = u'spatial'
|
||||
form['extras__0__value'] = u'{"Type":"Bad_GeoJSON","a":2}'
|
||||
res = helpers.webtest_submit(form, extra_environ=env, name='save')
|
||||
|
||||
assert "Error" in res, res
|
||||
assert "Spatial" in res
|
||||
assert "Error creating geometry" in res
|
||||
|
|
|
@ -1,38 +1,35 @@
|
|||
import pytest
|
||||
from ckan.lib.helpers import url_for
|
||||
|
||||
from ckanext.spatial.tests.base import SpatialTestBase
|
||||
|
||||
try:
|
||||
import ckan.new_tests.helpers as helpers
|
||||
import ckan.new_tests.factories as factories
|
||||
except ImportError:
|
||||
import ckan.tests.helpers as helpers
|
||||
import ckan.tests.factories as factories
|
||||
import ckan.tests.factories as factories
|
||||
|
||||
import ckan.plugins.toolkit as tk
|
||||
|
||||
class TestSpatialWidgets(SpatialTestBase, helpers.FunctionalTestBase):
|
||||
|
||||
def test_dataset_map(self):
|
||||
app = self._get_test_app()
|
||||
|
||||
user = factories.User()
|
||||
class TestSpatialWidgets(SpatialTestBase):
|
||||
@pytest.mark.usefixtures('with_plugins', 'clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
|
||||
def test_dataset_map(self, app):
|
||||
dataset = factories.Dataset(
|
||||
user=user,
|
||||
extras=[{'key': 'spatial',
|
||||
'value': self.geojson_examples['point']}]
|
||||
extras=[
|
||||
{"key": "spatial", "value": self.geojson_examples["point"]}
|
||||
],
|
||||
)
|
||||
offset = url_for(controller='package', action='read', id=dataset['id'])
|
||||
if tk.check_ckan_version(min_version="2.9"):
|
||||
offset = url_for("dataset.read", id=dataset["id"])
|
||||
else:
|
||||
offset = url_for(controller="package", action="read", id=dataset["id"])
|
||||
res = app.get(offset)
|
||||
|
||||
assert 'data-module="dataset-map"' in res
|
||||
assert 'dataset_map.js' in res
|
||||
assert "dataset_map.js" in res
|
||||
|
||||
def test_spatial_search_widget(self):
|
||||
|
||||
app = self._get_test_app()
|
||||
|
||||
offset = url_for(controller='package', action='search')
|
||||
def test_spatial_search_widget(self, app):
|
||||
if tk.check_ckan_version(min_version="2.9"):
|
||||
offset = url_for("dataset.search")
|
||||
else:
|
||||
offset = url_for(controller="package", action="search")
|
||||
res = app.get(offset)
|
||||
|
||||
assert 'data-module="spatial-query"' in res
|
||||
assert 'spatial_query.js' in res
|
||||
assert "spatial_query.js" in res
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
import six
|
||||
|
||||
import time
|
||||
import random
|
||||
|
||||
from nose.tools import assert_equal
|
||||
import pytest
|
||||
|
||||
from shapely.geometry import asShape
|
||||
|
||||
|
@ -13,137 +15,161 @@ from ckan.lib.munge import munge_title_to_name
|
|||
|
||||
from ckanext.spatial.model import PackageExtent
|
||||
from ckanext.spatial.lib import validate_bbox, bbox_query, bbox_query_ordered
|
||||
from ckanext.spatial.geoalchemy_common import WKTElement, compare_geometry_fields
|
||||
from ckanext.spatial.geoalchemy_common import (
|
||||
WKTElement,
|
||||
compare_geometry_fields,
|
||||
)
|
||||
from ckanext.spatial.tests.base import SpatialTestBase
|
||||
|
||||
|
||||
class TestCompareGeometries(SpatialTestBase):
|
||||
def create_package(**package_dict):
|
||||
user = plugins.toolkit.get_action("get_site_user")(
|
||||
{"model": model, "ignore_auth": True}, {}
|
||||
)
|
||||
context = {
|
||||
"model": model,
|
||||
"session": model.Session,
|
||||
"user": user["name"],
|
||||
"extras_as_string": True,
|
||||
"api_version": 2,
|
||||
"ignore_auth": True,
|
||||
}
|
||||
package_dict = package_create(context, package_dict)
|
||||
return context.get("id")
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('with_plugins', 'clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
|
||||
class TestCompareGeometries(SpatialTestBase):
|
||||
def _get_extent_object(self, geometry):
|
||||
if isinstance(geometry, basestring):
|
||||
if isinstance(geometry, six.string_types):
|
||||
geometry = json.loads(geometry)
|
||||
shape = asShape(geometry)
|
||||
return PackageExtent(package_id='xxx',
|
||||
the_geom=WKTElement(shape.wkt, 4326))
|
||||
return PackageExtent(
|
||||
package_id="xxx", the_geom=WKTElement(shape.wkt, 4326)
|
||||
)
|
||||
|
||||
def test_same_points(self):
|
||||
|
||||
extent1 = self._get_extent_object(self.geojson_examples['point'])
|
||||
extent2 = self._get_extent_object(self.geojson_examples['point'])
|
||||
extent1 = self._get_extent_object(self.geojson_examples["point"])
|
||||
extent2 = self._get_extent_object(self.geojson_examples["point"])
|
||||
|
||||
assert compare_geometry_fields(extent1.the_geom, extent2.the_geom)
|
||||
|
||||
def test_different_points(self):
|
||||
|
||||
extent1 = self._get_extent_object(self.geojson_examples['point'])
|
||||
extent2 = self._get_extent_object(self.geojson_examples['point_2'])
|
||||
extent1 = self._get_extent_object(self.geojson_examples["point"])
|
||||
extent2 = self._get_extent_object(self.geojson_examples["point_2"])
|
||||
|
||||
assert not compare_geometry_fields(extent1.the_geom, extent2.the_geom)
|
||||
|
||||
|
||||
|
||||
class TestValidateBbox:
|
||||
bbox_dict = {'minx': -4.96,
|
||||
'miny': 55.70,
|
||||
'maxx': -3.78,
|
||||
'maxy': 56.43}
|
||||
class TestValidateBbox(object):
|
||||
bbox_dict = {"minx": -4.96, "miny": 55.70, "maxx": -3.78, "maxy": 56.43}
|
||||
|
||||
def test_string(self):
|
||||
res = validate_bbox("-4.96,55.70,-3.78,56.43")
|
||||
assert_equal(res, self.bbox_dict)
|
||||
assert(res == self.bbox_dict)
|
||||
|
||||
def test_list(self):
|
||||
res = validate_bbox([-4.96, 55.70, -3.78, 56.43])
|
||||
assert_equal(res, self.bbox_dict)
|
||||
assert(res == self.bbox_dict)
|
||||
|
||||
def test_bad(self):
|
||||
res = validate_bbox([-4.96, 55.70, -3.78])
|
||||
assert_equal(res, None)
|
||||
assert(res is None)
|
||||
|
||||
def test_bad_2(self):
|
||||
res = validate_bbox('random')
|
||||
assert_equal(res, None)
|
||||
res = validate_bbox("random")
|
||||
assert(res is None)
|
||||
|
||||
|
||||
def bbox_2_geojson(bbox_dict):
|
||||
return '{"type":"Polygon","coordinates":[[[%(minx)s, %(miny)s],[%(minx)s, %(maxy)s], [%(maxx)s, %(maxy)s], [%(maxx)s, %(miny)s], [%(minx)s, %(miny)s]]]}' % bbox_dict
|
||||
return (
|
||||
'{"type":"Polygon","coordinates":[[[%(minx)s, %(miny)s],'
|
||||
'[%(minx)s, %(maxy)s], [%(maxx)s, %(maxy)s], '
|
||||
'[%(maxx)s, %(miny)s], [%(minx)s, %(miny)s]]]}'
|
||||
% bbox_dict
|
||||
)
|
||||
|
||||
|
||||
class SpatialQueryTestBase(SpatialTestBase):
|
||||
'''Base class for tests of spatial queries'''
|
||||
"""Base class for tests of spatial queries"""
|
||||
|
||||
miny = 0
|
||||
maxy = 1
|
||||
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
SpatialTestBase.setup_class()
|
||||
for fixture_x in cls.fixtures_x:
|
||||
bbox = cls.x_values_to_bbox(fixture_x)
|
||||
def initial_data(self):
|
||||
for fixture_x in self.fixtures_x:
|
||||
bbox = self.x_values_to_bbox(fixture_x)
|
||||
bbox_geojson = bbox_2_geojson(bbox)
|
||||
cls.create_package(name=munge_title_to_name(str(fixture_x)),
|
||||
title=str(fixture_x),
|
||||
extras=[{'key': 'spatial',
|
||||
'value': bbox_geojson}])
|
||||
|
||||
@classmethod
|
||||
def create_package(cls, **package_dict):
|
||||
user = plugins.toolkit.get_action('get_site_user')({'model': model, 'ignore_auth': True}, {})
|
||||
context = {'model': model,
|
||||
'session': model.Session,
|
||||
'user': user['name'],
|
||||
'extras_as_string': True,
|
||||
'api_version': 2,
|
||||
'ignore_auth': True,
|
||||
}
|
||||
package_dict = package_create(context, package_dict)
|
||||
return context.get('id')
|
||||
create_package(
|
||||
name=munge_title_to_name(six.text_type(fixture_x)),
|
||||
title=six.text_type(fixture_x),
|
||||
extras=[{"key": "spatial", "value": bbox_geojson}],
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def x_values_to_bbox(cls, x_tuple):
|
||||
return {'minx': x_tuple[0], 'maxx': x_tuple[1],
|
||||
'miny': cls.miny, 'maxy': cls.maxy}
|
||||
return {
|
||||
"minx": x_tuple[0],
|
||||
"maxx": x_tuple[1],
|
||||
"miny": cls.miny,
|
||||
"maxy": cls.maxy,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('with_plugins', 'clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
|
||||
class TestBboxQuery(SpatialQueryTestBase):
|
||||
# x values for the fixtures
|
||||
fixtures_x = [(0, 1), (0, 3), (0, 4), (4, 5), (6, 7)]
|
||||
|
||||
def test_query(self):
|
||||
self.initial_data()
|
||||
bbox_dict = self.x_values_to_bbox((2, 5))
|
||||
package_ids = [res.package_id for res in bbox_query(bbox_dict)]
|
||||
package_titles = [model.Package.get(id_).title for id_ in package_ids]
|
||||
assert_equal(set(package_titles),
|
||||
set(('(0, 3)', '(0, 4)', '(4, 5)')))
|
||||
assert(set(package_titles) == {"(0, 3)", "(0, 4)", "(4, 5)"})
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('with_plugins', 'clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
|
||||
class TestBboxQueryOrdered(SpatialQueryTestBase):
|
||||
# x values for the fixtures
|
||||
fixtures_x = [(0, 9), (1, 8), (2, 7), (3, 6), (4, 5),
|
||||
(8, 9)]
|
||||
fixtures_x = [(0, 9), (1, 8), (2, 7), (3, 6), (4, 5), (8, 9)]
|
||||
|
||||
def test_query(self):
|
||||
self.initial_data()
|
||||
bbox_dict = self.x_values_to_bbox((2, 7))
|
||||
q = bbox_query_ordered(bbox_dict)
|
||||
package_ids = [res.package_id for res in q]
|
||||
package_titles = [model.Package.get(id_).title for id_ in package_ids]
|
||||
# check the right items are returned
|
||||
assert_equal(set(package_titles),
|
||||
set(('(0, 9)', '(1, 8)', '(2, 7)', '(3, 6)', '(4, 5)')))
|
||||
assert(
|
||||
set(package_titles) ==
|
||||
set(("(0, 9)", "(1, 8)", "(2, 7)", "(3, 6)", "(4, 5)"))
|
||||
)
|
||||
# check the order is good
|
||||
assert_equal(package_titles,
|
||||
['(2, 7)', '(1, 8)', '(3, 6)', '(0, 9)', '(4, 5)'])
|
||||
assert(
|
||||
package_titles == ["(2, 7)", "(1, 8)", "(3, 6)", "(0, 9)", "(4, 5)"]
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('with_plugins', 'clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
|
||||
class TestBboxQueryPerformance(SpatialQueryTestBase):
|
||||
# x values for the fixtures
|
||||
fixtures_x = [(random.uniform(0, 3), random.uniform(3,9)) \
|
||||
for x in xrange(10)] # increase the number to 1000 say
|
||||
fixtures_x = [
|
||||
(random.uniform(0, 3), random.uniform(3, 9)) for x in range(10)
|
||||
] # increase the number to 1000 say
|
||||
|
||||
def test_query(self):
|
||||
bbox_dict = self.x_values_to_bbox((2, 7))
|
||||
t0 = time.time()
|
||||
q = bbox_query(bbox_dict)
|
||||
bbox_query(bbox_dict)
|
||||
t1 = time.time()
|
||||
print 'bbox_query took: ', t1-t0
|
||||
print("bbox_query took: ", t1 - t0)
|
||||
|
||||
def test_query_ordered(self):
|
||||
bbox_dict = self.x_values_to_bbox((2, 7))
|
||||
t0 = time.time()
|
||||
q = bbox_query_ordered(bbox_dict)
|
||||
bbox_query_ordered(bbox_dict)
|
||||
t1 = time.time()
|
||||
print 'bbox_query_ordered took: ', t1-t0
|
||||
print("bbox_query_ordered took: ", t1 - t0)
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
# this is a namespace package
|
||||
try:
|
||||
import pkg_resources
|
||||
|
||||
pkg_resources.declare_namespace(__name__)
|
||||
except ImportError:
|
||||
import pkgutil
|
||||
|
||||
__path__ = pkgutil.extend_path(__path__, __name__)
|
||||
|
|
|
@ -1,34 +0,0 @@
|
|||
import os
|
||||
|
||||
from nose.tools import assert_equal
|
||||
|
||||
from ckanext.spatial.model import ISODocument
|
||||
|
||||
def open_xml_fixture(xml_filename):
|
||||
xml_filepath = os.path.join(os.path.dirname(__file__),
|
||||
'xml',
|
||||
xml_filename)
|
||||
with open(xml_filepath, 'rb') as f:
|
||||
xml_string_raw = f.read()
|
||||
|
||||
try:
|
||||
xml_string = xml_string_raw.encode("utf-8")
|
||||
except UnicodeDecodeError, e:
|
||||
assert 0, 'ERROR: Unicode Error reading file \'%s\': %s' % \
|
||||
(metadata_filepath, e)
|
||||
return xml_string
|
||||
|
||||
def test_simple():
|
||||
xml_string = open_xml_fixture('gemini_dataset.xml')
|
||||
iso_document = ISODocument(xml_string)
|
||||
iso_values = iso_document.read_values()
|
||||
assert_equal(iso_values['guid'], 'test-dataset-1')
|
||||
assert_equal(iso_values['metadata-date'], '2011-09-23T10:06:08')
|
||||
|
||||
def test_multiplicity_warning():
|
||||
# This dataset lacks a value for Metadata Date and should
|
||||
# produce a log.warning, but not raise an exception.
|
||||
xml_string = open_xml_fixture('FCSConservancyPolygons.xml')
|
||||
iso_document = ISODocument(xml_string)
|
||||
iso_values = iso_document.read_values()
|
||||
assert_equal(iso_values['guid'], 'B8A22DF4-B0DC-4F0B-A713-0CF5F8784A28')
|
|
@ -1,90 +1,108 @@
|
|||
from nose.tools import assert_equals
|
||||
import pytest
|
||||
|
||||
from shapely.geometry import asShape
|
||||
|
||||
from ckan.model import Session
|
||||
from ckan.lib.helpers import json
|
||||
try:
|
||||
import ckan.new_tests.factories as factories
|
||||
except ImportError:
|
||||
import ckan.tests.factories as factories
|
||||
|
||||
import ckan.tests.helpers as helpers
|
||||
import ckan.tests.factories as factories
|
||||
|
||||
from ckanext.spatial.model import PackageExtent
|
||||
from ckanext.spatial.geoalchemy_common import WKTElement, legacy_geoalchemy
|
||||
from ckanext.spatial.tests.base import SpatialTestBase
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
|
||||
class TestPackageExtent(SpatialTestBase):
|
||||
|
||||
def test_create_extent(self):
|
||||
|
||||
package = factories.Dataset()
|
||||
|
||||
geojson = json.loads(self.geojson_examples['point'])
|
||||
geojson = json.loads(self.geojson_examples["point"])
|
||||
|
||||
shape = asShape(geojson)
|
||||
package_extent = PackageExtent(package_id=package['id'],
|
||||
the_geom=WKTElement(shape.wkt,
|
||||
self.db_srid))
|
||||
package_extent = PackageExtent(
|
||||
package_id=package["id"],
|
||||
the_geom=WKTElement(shape.wkt, self.db_srid),
|
||||
)
|
||||
package_extent.save()
|
||||
|
||||
assert_equals(package_extent.package_id, package['id'])
|
||||
assert(package_extent.package_id == package["id"])
|
||||
if legacy_geoalchemy:
|
||||
assert_equals(Session.scalar(package_extent.the_geom.x),
|
||||
geojson['coordinates'][0])
|
||||
assert_equals(Session.scalar(package_extent.the_geom.y),
|
||||
geojson['coordinates'][1])
|
||||
assert_equals(Session.scalar(package_extent.the_geom.srid),
|
||||
self.db_srid)
|
||||
assert(
|
||||
Session.scalar(package_extent.the_geom.x) ==
|
||||
geojson["coordinates"][0]
|
||||
)
|
||||
assert(
|
||||
Session.scalar(package_extent.the_geom.y) ==
|
||||
geojson["coordinates"][1]
|
||||
)
|
||||
assert(
|
||||
Session.scalar(package_extent.the_geom.srid) == self.db_srid
|
||||
)
|
||||
else:
|
||||
from sqlalchemy import func
|
||||
assert_equals(
|
||||
Session.query(func.ST_X(package_extent.the_geom)).first()[0],
|
||||
geojson['coordinates'][0])
|
||||
assert_equals(
|
||||
Session.query(func.ST_Y(package_extent.the_geom)).first()[0],
|
||||
geojson['coordinates'][1])
|
||||
assert_equals(package_extent.the_geom.srid, self.db_srid)
|
||||
|
||||
assert(
|
||||
Session.query(func.ST_X(package_extent.the_geom)).first()[0] ==
|
||||
geojson["coordinates"][0]
|
||||
)
|
||||
assert(
|
||||
Session.query(func.ST_Y(package_extent.the_geom)).first()[0] ==
|
||||
geojson["coordinates"][1]
|
||||
)
|
||||
assert(package_extent.the_geom.srid == self.db_srid)
|
||||
|
||||
def test_update_extent(self):
|
||||
|
||||
package = factories.Dataset()
|
||||
|
||||
geojson = json.loads(self.geojson_examples['point'])
|
||||
geojson = json.loads(self.geojson_examples["point"])
|
||||
|
||||
shape = asShape(geojson)
|
||||
package_extent = PackageExtent(package_id=package['id'],
|
||||
the_geom=WKTElement(shape.wkt,
|
||||
self.db_srid))
|
||||
package_extent = PackageExtent(
|
||||
package_id=package["id"],
|
||||
the_geom=WKTElement(shape.wkt, self.db_srid),
|
||||
)
|
||||
package_extent.save()
|
||||
if legacy_geoalchemy:
|
||||
assert_equals(
|
||||
Session.scalar(package_extent.the_geom.geometry_type),
|
||||
'ST_Point')
|
||||
assert(
|
||||
Session.scalar(package_extent.the_geom.geometry_type) ==
|
||||
"ST_Point"
|
||||
)
|
||||
else:
|
||||
from sqlalchemy import func
|
||||
assert_equals(
|
||||
|
||||
assert(
|
||||
Session.query(
|
||||
func.ST_GeometryType(package_extent.the_geom)).first()[0],
|
||||
'ST_Point')
|
||||
func.ST_GeometryType(package_extent.the_geom)
|
||||
).first()[0] ==
|
||||
"ST_Point"
|
||||
)
|
||||
|
||||
# Update the geometry (Point -> Polygon)
|
||||
geojson = json.loads(self.geojson_examples['polygon'])
|
||||
geojson = json.loads(self.geojson_examples["polygon"])
|
||||
|
||||
shape = asShape(geojson)
|
||||
package_extent.the_geom = WKTElement(shape.wkt, self.db_srid)
|
||||
package_extent.save()
|
||||
|
||||
assert_equals(package_extent.package_id, package['id'])
|
||||
assert(package_extent.package_id == package["id"])
|
||||
if legacy_geoalchemy:
|
||||
assert_equals(
|
||||
Session.scalar(package_extent.the_geom.geometry_type),
|
||||
'ST_Polygon')
|
||||
assert_equals(
|
||||
Session.scalar(package_extent.the_geom.srid),
|
||||
self.db_srid)
|
||||
assert(
|
||||
Session.scalar(package_extent.the_geom.geometry_type) ==
|
||||
"ST_Polygon"
|
||||
)
|
||||
assert(
|
||||
Session.scalar(package_extent.the_geom.srid) == self.db_srid
|
||||
)
|
||||
else:
|
||||
assert_equals(
|
||||
assert(
|
||||
Session.query(
|
||||
func.ST_GeometryType(package_extent.the_geom)).first()[0],
|
||||
'ST_Polygon')
|
||||
assert_equals(package_extent.the_geom.srid, self.db_srid)
|
||||
func.ST_GeometryType(package_extent.the_geom)
|
||||
).first()[0] ==
|
||||
"ST_Polygon"
|
||||
)
|
||||
assert(package_extent.the_geom.srid == self.db_srid)
|
||||
|
||||
|
|
|
@ -1,203 +1,201 @@
|
|||
from nose.plugins.skip import SkipTest
|
||||
from nose.tools import assert_equals, assert_raises
|
||||
import pytest
|
||||
|
||||
from ckan.model import Session
|
||||
from ckan.lib.search import SearchError
|
||||
try:
|
||||
import ckan.new_tests.helpers as helpers
|
||||
import ckan.new_tests.factories as factories
|
||||
except ImportError:
|
||||
import ckan.tests.helpers as helpers
|
||||
import ckan.tests.factories as factories
|
||||
|
||||
import ckan.tests.helpers as helpers
|
||||
import ckan.tests.factories as factories
|
||||
|
||||
from ckanext.spatial.tests.base import SpatialTestBase
|
||||
|
||||
extents = {
|
||||
'nz': '{"type":"Polygon","coordinates":[[[174,-38],[176,-38],[176,-40],[174,-40],[174,-38]]]}',
|
||||
'ohio': '{"type": "Polygon","coordinates": [[[-84,38],[-84,40],[-80,42],[-80,38],[-84,38]]]}',
|
||||
'dateline': '{"type":"Polygon","coordinates":[[[169,70],[169,60],[192,60],[192,70],[169,70]]]}',
|
||||
'dateline2': '{"type":"Polygon","coordinates":[[[170,60],[-170,60],[-170,70],[170,70],[170,60]]]}',
|
||||
"nz": '{"type":"Polygon","coordinates":[[[174,-38],[176,-38],[176,-40],[174,-40],[174,-38]]]}',
|
||||
"ohio": '{"type": "Polygon","coordinates": [[[-84,38],[-84,40],[-80,42],[-80,38],[-84,38]]]}',
|
||||
"dateline": '{"type":"Polygon","coordinates":[[[169,70],[169,60],[192,60],[192,70],[169,70]]]}',
|
||||
"dateline2": '{"type":"Polygon","coordinates":[[[170,60],[-170,60],[-170,70],[170,70],[170,60]]]}',
|
||||
}
|
||||
|
||||
|
||||
class TestAction(SpatialTestBase):
|
||||
|
||||
def teardown(self):
|
||||
helpers.reset_db()
|
||||
|
||||
@pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
|
||||
def test_spatial_query(self):
|
||||
|
||||
dataset = factories.Dataset(
|
||||
extras=[{'key': 'spatial',
|
||||
'value': self.geojson_examples['point']}]
|
||||
extras=[
|
||||
{"key": "spatial", "value": self.geojson_examples["point"]}
|
||||
]
|
||||
)
|
||||
|
||||
result = helpers.call_action(
|
||||
'package_search',
|
||||
extras={'ext_bbox': '-180,-90,180,90'})
|
||||
"package_search", extras={"ext_bbox": "-180,-90,180,90"}
|
||||
)
|
||||
|
||||
assert_equals(result['count'], 1)
|
||||
assert_equals(result['results'][0]['id'], dataset['id'])
|
||||
assert(result["count"] == 1)
|
||||
assert(result["results"][0]["id"] == dataset["id"])
|
||||
|
||||
@pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
|
||||
def test_spatial_query_outside_bbox(self):
|
||||
|
||||
factories.Dataset(
|
||||
extras=[{'key': 'spatial',
|
||||
'value': self.geojson_examples['point']}]
|
||||
extras=[
|
||||
{"key": "spatial", "value": self.geojson_examples["point"]}
|
||||
]
|
||||
)
|
||||
|
||||
result = helpers.call_action(
|
||||
'package_search',
|
||||
extras={'ext_bbox': '-10,-20,10,20'})
|
||||
"package_search", extras={"ext_bbox": "-10,-20,10,20"}
|
||||
)
|
||||
|
||||
assert_equals(result['count'], 0)
|
||||
assert(result["count"] == 0)
|
||||
|
||||
@pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
|
||||
def test_spatial_query_wrong_bbox(self):
|
||||
with pytest.raises(SearchError):
|
||||
helpers.call_action(
|
||||
"package_search",
|
||||
extras={"ext_bbox": "-10,-20,10,a"},
|
||||
)
|
||||
|
||||
assert_raises(SearchError, helpers.call_action,
|
||||
'package_search', extras={'ext_bbox': '-10,-20,10,a'})
|
||||
|
||||
@pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
|
||||
def test_spatial_query_nz(self):
|
||||
|
||||
dataset = factories.Dataset(
|
||||
extras=[{'key': 'spatial',
|
||||
'value': extents['nz']}]
|
||||
extras=[{"key": "spatial", "value": extents["nz"]}]
|
||||
)
|
||||
|
||||
result = helpers.call_action(
|
||||
'package_search',
|
||||
extras={'ext_bbox': '56,-54,189,-28'})
|
||||
"package_search", extras={"ext_bbox": "56,-54,189,-28"}
|
||||
)
|
||||
|
||||
assert_equals(result['count'], 1)
|
||||
assert_equals(result['results'][0]['id'], dataset['id'])
|
||||
assert(result["count"] == 1)
|
||||
assert(result["results"][0]["id"] == dataset["id"])
|
||||
|
||||
@pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
|
||||
def test_spatial_query_nz_wrap(self):
|
||||
|
||||
dataset = factories.Dataset(
|
||||
extras=[{'key': 'spatial',
|
||||
'value': extents['nz']}]
|
||||
extras=[{"key": "spatial", "value": extents["nz"]}]
|
||||
)
|
||||
result = helpers.call_action(
|
||||
"package_search", extras={"ext_bbox": "-203,-54,-167,-28"}
|
||||
)
|
||||
|
||||
result = helpers.call_action(
|
||||
'package_search',
|
||||
extras={'ext_bbox': '-203,-54,-167,-28'})
|
||||
|
||||
assert_equals(result['count'], 1)
|
||||
assert_equals(result['results'][0]['id'], dataset['id'])
|
||||
assert(result["count"] == 1)
|
||||
assert(result["results"][0]["id"] == dataset["id"])
|
||||
|
||||
@pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
|
||||
def test_spatial_query_ohio(self):
|
||||
|
||||
dataset = factories.Dataset(
|
||||
extras=[{'key': 'spatial',
|
||||
'value': extents['ohio']}]
|
||||
extras=[{"key": "spatial", "value": extents["ohio"]}]
|
||||
)
|
||||
|
||||
result = helpers.call_action(
|
||||
'package_search',
|
||||
extras={'ext_bbox': '-110,37,-78,53'})
|
||||
"package_search", extras={"ext_bbox": "-110,37,-78,53"}
|
||||
)
|
||||
|
||||
assert_equals(result['count'], 1)
|
||||
assert_equals(result['results'][0]['id'], dataset['id'])
|
||||
assert(result["count"] == 1)
|
||||
assert(result["results"][0]["id"] == dataset["id"])
|
||||
|
||||
@pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
|
||||
def test_spatial_query_ohio_wrap(self):
|
||||
|
||||
dataset = factories.Dataset(
|
||||
extras=[{'key': 'spatial',
|
||||
'value': extents['ohio']}]
|
||||
extras=[{"key": "spatial", "value": extents["ohio"]}]
|
||||
)
|
||||
|
||||
result = helpers.call_action(
|
||||
'package_search',
|
||||
extras={'ext_bbox': '258,37,281,51'})
|
||||
"package_search", extras={"ext_bbox": "258,37,281,51"}
|
||||
)
|
||||
|
||||
assert_equals(result['count'], 1)
|
||||
assert_equals(result['results'][0]['id'], dataset['id'])
|
||||
assert(result["count"] == 1)
|
||||
assert(result["results"][0]["id"] == dataset["id"])
|
||||
|
||||
@pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
|
||||
def test_spatial_query_dateline_1(self):
|
||||
|
||||
dataset = factories.Dataset(
|
||||
extras=[{'key': 'spatial',
|
||||
'value': extents['dateline']}]
|
||||
extras=[{"key": "spatial", "value": extents["dateline"]}]
|
||||
)
|
||||
|
||||
result = helpers.call_action(
|
||||
'package_search',
|
||||
extras={'ext_bbox': '-197,56,-128,70'})
|
||||
"package_search", extras={"ext_bbox": "-197,56,-128,70"}
|
||||
)
|
||||
|
||||
assert_equals(result['count'], 1)
|
||||
assert_equals(result['results'][0]['id'], dataset['id'])
|
||||
assert(result["count"] == 1)
|
||||
assert(result["results"][0]["id"] == dataset["id"])
|
||||
|
||||
@pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
|
||||
def test_spatial_query_dateline_2(self):
|
||||
|
||||
dataset = factories.Dataset(
|
||||
extras=[{'key': 'spatial',
|
||||
'value': extents['dateline']}]
|
||||
extras=[{"key": "spatial", "value": extents["dateline"]}]
|
||||
)
|
||||
|
||||
result = helpers.call_action(
|
||||
'package_search',
|
||||
extras={'ext_bbox': '162,54,237,70'})
|
||||
"package_search", extras={"ext_bbox": "162,54,237,70"}
|
||||
)
|
||||
|
||||
assert_equals(result['count'], 1)
|
||||
assert_equals(result['results'][0]['id'], dataset['id'])
|
||||
assert(result["count"] == 1)
|
||||
assert(result["results"][0]["id"] == dataset["id"])
|
||||
|
||||
@pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
|
||||
def test_spatial_query_dateline_3(self):
|
||||
|
||||
dataset = factories.Dataset(
|
||||
extras=[{'key': 'spatial',
|
||||
'value': extents['dateline2']}]
|
||||
extras=[{"key": "spatial", "value": extents["dateline2"]}]
|
||||
)
|
||||
|
||||
result = helpers.call_action(
|
||||
'package_search',
|
||||
extras={'ext_bbox': '-197,56,-128,70'})
|
||||
"package_search", extras={"ext_bbox": "-197,56,-128,70"}
|
||||
)
|
||||
|
||||
assert_equals(result['count'], 1)
|
||||
assert_equals(result['results'][0]['id'], dataset['id'])
|
||||
assert(result["count"] == 1)
|
||||
assert(result["results"][0]["id"] == dataset["id"])
|
||||
|
||||
@pytest.mark.usefixtures('clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
|
||||
def test_spatial_query_dateline_4(self):
|
||||
|
||||
dataset = factories.Dataset(
|
||||
extras=[{'key': 'spatial',
|
||||
'value': extents['dateline2']}]
|
||||
extras=[{"key": "spatial", "value": extents["dateline2"]}]
|
||||
)
|
||||
|
||||
result = helpers.call_action(
|
||||
'package_search',
|
||||
extras={'ext_bbox': '162,54,237,70'})
|
||||
"package_search", extras={"ext_bbox": "162,54,237,70"}
|
||||
)
|
||||
|
||||
assert_equals(result['count'], 1)
|
||||
assert_equals(result['results'][0]['id'], dataset['id'])
|
||||
assert(result["count"] == 1)
|
||||
assert(result["results"][0]["id"] == dataset["id"])
|
||||
|
||||
|
||||
|
||||
class TestHarvestedMetadataAPI(SpatialTestBase, helpers.FunctionalTestBase):
|
||||
|
||||
def test_api(self):
|
||||
@pytest.mark.usefixtures('with_plugins', 'clean_postgis', 'clean_db', 'clean_index', 'harvest_setup', 'spatial_setup')
|
||||
class TestHarvestedMetadataAPI(SpatialTestBase):
|
||||
def test_api(self, app):
|
||||
try:
|
||||
from ckanext.harvest.model import (HarvestObject, HarvestJob,
|
||||
HarvestSource,
|
||||
HarvestObjectExtra)
|
||||
from ckanext.harvest.model import (
|
||||
HarvestObject,
|
||||
HarvestJob,
|
||||
HarvestSource,
|
||||
HarvestObjectExtra,
|
||||
)
|
||||
except ImportError:
|
||||
raise SkipTest('The harvester extension is needed for these tests')
|
||||
raise pytest.skip(
|
||||
"The harvester extension is needed for these tests")
|
||||
|
||||
content1 = '<xml>Content 1</xml>'
|
||||
content1 = "<xml>Content 1</xml>"
|
||||
ho1 = HarvestObject(
|
||||
guid='test-ho-1',
|
||||
job=HarvestJob(source=HarvestSource(url='http://', type='xx')),
|
||||
content=content1)
|
||||
guid="test-ho-1",
|
||||
job=HarvestJob(source=HarvestSource(url="http://", type="xx")),
|
||||
content=content1,
|
||||
)
|
||||
|
||||
content2 = '<xml>Content 2</xml>'
|
||||
original_content2 = '<xml>Original Content 2</xml>'
|
||||
content2 = "<xml>Content 2</xml>"
|
||||
original_content2 = "<xml>Original Content 2</xml>"
|
||||
ho2 = HarvestObject(
|
||||
guid='test-ho-2',
|
||||
job=HarvestJob(source=HarvestSource(url='http://', type='xx')),
|
||||
content=content2)
|
||||
guid="test-ho-2",
|
||||
job=HarvestJob(source=HarvestSource(url="http://", type="xx")),
|
||||
content=content2,
|
||||
)
|
||||
|
||||
hoe = HarvestObjectExtra(
|
||||
key='original_document',
|
||||
value=original_content2,
|
||||
object=ho2)
|
||||
key="original_document", value=original_content2, object=ho2
|
||||
)
|
||||
|
||||
Session.add(ho1)
|
||||
Session.add(ho2)
|
||||
|
@ -207,68 +205,28 @@ class TestHarvestedMetadataAPI(SpatialTestBase, helpers.FunctionalTestBase):
|
|||
object_id_1 = ho1.id
|
||||
object_id_2 = ho2.id
|
||||
|
||||
app = self._get_test_app()
|
||||
|
||||
# Test redirects for old URLs
|
||||
url = '/api/2/rest/harvestobject/{0}/xml'.format(object_id_1)
|
||||
r = app.get(url)
|
||||
assert_equals(r.status_int, 301)
|
||||
assert ('/harvest/object/{0}'.format(object_id_1)
|
||||
in r.headers['Location'])
|
||||
|
||||
url = '/api/2/rest/harvestobject/{0}/html'.format(object_id_1)
|
||||
r = app.get(url)
|
||||
assert_equals(r.status_int, 301)
|
||||
assert ('/harvest/object/{0}/html'.format(object_id_1)
|
||||
in r.headers['Location'])
|
||||
|
||||
# Access object content
|
||||
url = '/harvest/object/{0}'.format(object_id_1)
|
||||
r = app.get(url)
|
||||
assert_equals(r.status_int, 200)
|
||||
assert_equals(r.headers['Content-Type'],
|
||||
'application/xml; charset=utf-8')
|
||||
assert_equals(
|
||||
r.body,
|
||||
'<?xml version="1.0" encoding="UTF-8"?>\n<xml>Content 1</xml>')
|
||||
url = "/harvest/object/{0}".format(object_id_1)
|
||||
r = app.get(url, status=200)
|
||||
assert(
|
||||
r.headers["Content-Type"] == "application/xml; charset=utf-8"
|
||||
)
|
||||
assert(
|
||||
r.body ==
|
||||
'<?xml version="1.0" encoding="UTF-8"?>\n<xml>Content 1</xml>'
|
||||
)
|
||||
|
||||
# Access original content in object extra (if present)
|
||||
url = '/harvest/object/{0}/original'.format(object_id_1)
|
||||
url = "/harvest/object/{0}/original".format(object_id_1)
|
||||
r = app.get(url, status=404)
|
||||
assert_equals(r.status_int, 404)
|
||||
|
||||
url = '/harvest/object/{0}/original'.format(object_id_2)
|
||||
r = app.get(url)
|
||||
assert_equals(r.status_int, 200)
|
||||
assert_equals(r.headers['Content-Type'],
|
||||
'application/xml; charset=utf-8')
|
||||
assert_equals(
|
||||
r.body,
|
||||
url = "/harvest/object/{0}/original".format(object_id_2)
|
||||
r = app.get(url, status=200)
|
||||
assert(
|
||||
r.headers["Content-Type"] == "application/xml; charset=utf-8"
|
||||
)
|
||||
assert(
|
||||
r.body ==
|
||||
'<?xml version="1.0" encoding="UTF-8"?>\n'
|
||||
+ '<xml>Original Content 2</xml>')
|
||||
|
||||
# Access HTML transformation
|
||||
url = '/harvest/object/{0}/html'.format(object_id_1)
|
||||
r = app.get(url)
|
||||
assert_equals(r.status_int, 200)
|
||||
assert_equals(r.headers['Content-Type'],
|
||||
'text/html; charset=utf-8')
|
||||
assert 'GEMINI record about' in r.body
|
||||
|
||||
url = '/harvest/object/{0}/html/original'.format(object_id_1)
|
||||
r = app.get(url, status=404)
|
||||
assert_equals(r.status_int, 404)
|
||||
|
||||
url = '/harvest/object/{0}/html'.format(object_id_2)
|
||||
r = app.get(url)
|
||||
assert_equals(r.status_int, 200)
|
||||
assert_equals(r.headers['Content-Type'],
|
||||
'text/html; charset=utf-8')
|
||||
assert 'GEMINI record about' in r.body
|
||||
|
||||
url = '/harvest/object/{0}/html/original'.format(object_id_2)
|
||||
r = app.get(url)
|
||||
assert_equals(r.status_int, 200)
|
||||
assert_equals(r.headers['Content-Type'],
|
||||
'text/html; charset=utf-8')
|
||||
assert 'GEMINI record about' in r.body
|
||||
+ "<xml>Original Content 2</xml>"
|
||||
)
|
||||
|
|
|
@ -1,66 +0,0 @@
|
|||
import time
|
||||
from urllib2 import urlopen
|
||||
import os
|
||||
|
||||
from pylons import config
|
||||
from nose.plugins.skip import SkipTest
|
||||
|
||||
from ckan.model import engine_is_sqlite
|
||||
|
||||
# copied from ckan/tests/__init__ to save importing it and therefore
|
||||
# setting up Pylons.
|
||||
class CkanServerCase:
|
||||
@staticmethod
|
||||
def _system(cmd):
|
||||
import commands
|
||||
(status, output) = commands.getstatusoutput(cmd)
|
||||
if status:
|
||||
raise Exception, "Couldn't execute cmd: %s: %s" % (cmd, output)
|
||||
|
||||
@classmethod
|
||||
def _paster(cls, cmd, config_path_rel):
|
||||
config_path = os.path.join(config['here'], config_path_rel)
|
||||
cls._system('paster --plugin ckan %s --config=%s' % (cmd, config_path))
|
||||
|
||||
@staticmethod
|
||||
def _start_ckan_server(config_file=None):
|
||||
if not config_file:
|
||||
config_file = config['__file__']
|
||||
config_path = config_file
|
||||
import subprocess
|
||||
process = subprocess.Popen(['paster', 'serve', config_path])
|
||||
return process
|
||||
|
||||
@staticmethod
|
||||
def _wait_for_url(url='http://127.0.0.1:5000/', timeout=15):
|
||||
for i in range(int(timeout)*100):
|
||||
import urllib2
|
||||
import time
|
||||
try:
|
||||
response = urllib2.urlopen(url)
|
||||
except urllib2.URLError:
|
||||
time.sleep(0.01)
|
||||
else:
|
||||
break
|
||||
|
||||
@staticmethod
|
||||
def _stop_ckan_server(process):
|
||||
pid = process.pid
|
||||
pid = int(pid)
|
||||
if os.system("kill -9 %d" % pid):
|
||||
raise Exception, "Can't kill foreign CKAN instance (pid: %d)." % pid
|
||||
|
||||
class CkanProcess(CkanServerCase):
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
if engine_is_sqlite():
|
||||
raise SkipTest("Non-memory database needed for this test")
|
||||
|
||||
cls.pid = cls._start_ckan_server()
|
||||
## Don't need to init database, since it is same database as this process uses
|
||||
cls._wait_for_url()
|
||||
|
||||
@classmethod
|
||||
def teardown_class(cls):
|
||||
cls._stop_ckan_server(cls.pid)
|
||||
|
File diff suppressed because it is too large
Load Diff
|
@ -1,7 +1,9 @@
|
|||
# this is a namespace package
|
||||
try:
|
||||
import pkg_resources
|
||||
|
||||
pkg_resources.declare_namespace(__name__)
|
||||
except ImportError:
|
||||
import pkgutil
|
||||
|
||||
__path__ = pkgutil.extend_path(__path__, __name__)
|
||||
|
|
|
@ -6,4 +6,4 @@ class TestSpatialPlugin(p.SingletonPlugin):
|
|||
p.implements(p.IConfigurer, inherit=True)
|
||||
|
||||
def update_config(self, config):
|
||||
p.toolkit.add_template_directory(config, 'templates')
|
||||
p.toolkit.add_template_directory(config, "templates")
|
||||
|
|
|
@ -7,122 +7,182 @@ from ckanext.spatial import validation
|
|||
|
||||
# other validation tests are in test_harvest.py
|
||||
|
||||
class TestValidation:
|
||||
|
||||
class TestValidation(object):
|
||||
def _get_file_path(self, file_name):
|
||||
return os.path.join(os.path.dirname(__file__), 'xml', file_name)
|
||||
return os.path.join(os.path.dirname(__file__), "xml", file_name)
|
||||
|
||||
def get_validation_errors(self, validator, validation_test_filename):
|
||||
validation_test_filepath = self._get_file_path(validation_test_filename)
|
||||
validation_test_filepath = self._get_file_path(
|
||||
validation_test_filename
|
||||
)
|
||||
xml = etree.parse(validation_test_filepath)
|
||||
is_valid, errors = validator.is_valid(xml)
|
||||
|
||||
return ';'.join([e[0] for e in errors])
|
||||
return ";".join([e[0] for e in errors])
|
||||
|
||||
def test_iso19139_failure(self):
|
||||
errors = self.get_validation_errors(validation.ISO19139Schema,
|
||||
'iso19139/dataset-invalid.xml')
|
||||
errors = self.get_validation_errors(
|
||||
validation.ISO19139Schema, "iso19139/dataset-invalid.xml"
|
||||
)
|
||||
|
||||
assert len(errors) > 0
|
||||
assert_in('Dataset schema (gmx.xsd)', errors)
|
||||
assert_in('{http://www.isotc211.org/2005/gmd}nosuchelement\': This element is not expected.', errors)
|
||||
assert_in("Dataset schema (gmx.xsd)", errors)
|
||||
assert_in(
|
||||
"{http://www.isotc211.org/2005/gmd}nosuchelement': This element is not expected.",
|
||||
errors,
|
||||
)
|
||||
|
||||
def test_iso19139_pass(self):
|
||||
errors = self.get_validation_errors(validation.ISO19139Schema,
|
||||
'iso19139/dataset.xml')
|
||||
assert_equal(errors, '')
|
||||
errors = self.get_validation_errors(
|
||||
validation.ISO19139Schema, "iso19139/dataset.xml"
|
||||
)
|
||||
assert_equal(errors, "")
|
||||
|
||||
# Gemini2.1 tests are basically the same as those in test_harvest.py but
|
||||
# a few little differences make it worth not removing them in
|
||||
# test_harvest
|
||||
|
||||
def test_01_dataset_fail_iso19139_schema(self):
|
||||
errors = self.get_validation_errors(validation.ISO19139EdenSchema,
|
||||
'gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element.xml')
|
||||
errors = self.get_validation_errors(
|
||||
validation.ISO19139EdenSchema,
|
||||
"gemini2.1/validation/01_Dataset_Invalid_XSD_No_Such_Element.xml",
|
||||
)
|
||||
assert len(errors) > 0
|
||||
assert_in('(gmx.xsd)', errors)
|
||||
assert_in('\'{http://www.isotc211.org/2005/gmd}nosuchelement\': This element is not expected.', errors)
|
||||
assert_in("(gmx.xsd)", errors)
|
||||
assert_in(
|
||||
"'{http://www.isotc211.org/2005/gmd}nosuchelement': This element is not expected.",
|
||||
errors,
|
||||
)
|
||||
|
||||
def test_02_dataset_fail_constraints_schematron(self):
|
||||
errors = self.get_validation_errors(validation.ConstraintsSchematron14,
|
||||
'gemini2.1/validation/02_Dataset_Invalid_19139_Missing_Data_Format.xml')
|
||||
errors = self.get_validation_errors(
|
||||
validation.ConstraintsSchematron14,
|
||||
"gemini2.1/validation/02_Dataset_Invalid_19139_Missing_Data_Format.xml",
|
||||
)
|
||||
assert len(errors) > 0
|
||||
assert_in('MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0', errors)
|
||||
assert_in(
|
||||
"MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0",
|
||||
errors,
|
||||
)
|
||||
|
||||
def test_03_dataset_fail_gemini_schematron(self):
|
||||
errors = self.get_validation_errors(validation.Gemini2Schematron,
|
||||
'gemini2.1/validation/03_Dataset_Invalid_GEMINI_Missing_Keyword.xml')
|
||||
errors = self.get_validation_errors(
|
||||
validation.Gemini2Schematron,
|
||||
"gemini2.1/validation/03_Dataset_Invalid_GEMINI_Missing_Keyword.xml",
|
||||
)
|
||||
assert len(errors) > 0
|
||||
assert_in('Descriptive keywords are mandatory', errors)
|
||||
assert_in("Descriptive keywords are mandatory", errors)
|
||||
|
||||
def assert_passes_all_gemini2_1_validation(self, xml_filepath):
|
||||
errs = self.get_validation_errors(validation.ISO19139EdenSchema,
|
||||
xml_filepath)
|
||||
assert not errs, 'ISO19139EdenSchema: ' + errs
|
||||
errs = self.get_validation_errors(validation.ConstraintsSchematron14,
|
||||
xml_filepath)
|
||||
assert not errs, 'ConstraintsSchematron14: ' + errs
|
||||
errs = self.get_validation_errors(validation.Gemini2Schematron,
|
||||
xml_filepath)
|
||||
assert not errs, 'Gemini2Schematron: ' + errs
|
||||
errs = self.get_validation_errors(
|
||||
validation.ISO19139EdenSchema, xml_filepath
|
||||
)
|
||||
assert not errs, "ISO19139EdenSchema: " + errs
|
||||
errs = self.get_validation_errors(
|
||||
validation.ConstraintsSchematron14, xml_filepath
|
||||
)
|
||||
assert not errs, "ConstraintsSchematron14: " + errs
|
||||
errs = self.get_validation_errors(
|
||||
validation.Gemini2Schematron, xml_filepath
|
||||
)
|
||||
assert not errs, "Gemini2Schematron: " + errs
|
||||
|
||||
def test_04_dataset_valid(self):
|
||||
self.assert_passes_all_gemini2_1_validation('gemini2.1/validation/04_Dataset_Valid.xml')
|
||||
self.assert_passes_all_gemini2_1_validation(
|
||||
"gemini2.1/validation/04_Dataset_Valid.xml"
|
||||
)
|
||||
|
||||
def test_05_series_fail_iso19139_schema(self):
|
||||
errors = self.get_validation_errors(validation.ISO19139EdenSchema,
|
||||
'gemini2.1/validation/05_Series_Invalid_XSD_No_Such_Element.xml')
|
||||
errors = self.get_validation_errors(
|
||||
validation.ISO19139EdenSchema,
|
||||
"gemini2.1/validation/05_Series_Invalid_XSD_No_Such_Element.xml",
|
||||
)
|
||||
assert len(errors) > 0
|
||||
assert_in('(gmx.xsd)', errors)
|
||||
assert_in('\'{http://www.isotc211.org/2005/gmd}nosuchelement\': This element is not expected.', errors)
|
||||
assert_in("(gmx.xsd)", errors)
|
||||
assert_in(
|
||||
"'{http://www.isotc211.org/2005/gmd}nosuchelement': This element is not expected.",
|
||||
errors,
|
||||
)
|
||||
|
||||
def test_06_series_fail_constraints_schematron(self):
|
||||
errors = self.get_validation_errors(validation.ConstraintsSchematron14,
|
||||
'gemini2.1/validation/06_Series_Invalid_19139_Missing_Data_Format.xml')
|
||||
errors = self.get_validation_errors(
|
||||
validation.ConstraintsSchematron14,
|
||||
"gemini2.1/validation/06_Series_Invalid_19139_Missing_Data_Format.xml",
|
||||
)
|
||||
assert len(errors) > 0
|
||||
assert_in('MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0', errors)
|
||||
assert_in(
|
||||
"MD_Distribution / MD_Format: count(distributionFormat + distributorFormat) > 0",
|
||||
errors,
|
||||
)
|
||||
|
||||
def test_07_series_fail_gemini_schematron(self):
|
||||
errors = self.get_validation_errors(validation.Gemini2Schematron,
|
||||
'gemini2.1/validation/07_Series_Invalid_GEMINI_Missing_Keyword.xml')
|
||||
errors = self.get_validation_errors(
|
||||
validation.Gemini2Schematron,
|
||||
"gemini2.1/validation/07_Series_Invalid_GEMINI_Missing_Keyword.xml",
|
||||
)
|
||||
assert len(errors) > 0
|
||||
assert_in('Descriptive keywords are mandatory', errors)
|
||||
assert_in("Descriptive keywords are mandatory", errors)
|
||||
|
||||
def test_08_series_valid(self):
|
||||
self.assert_passes_all_gemini2_1_validation('gemini2.1/validation/08_Series_Valid.xml')
|
||||
self.assert_passes_all_gemini2_1_validation(
|
||||
"gemini2.1/validation/08_Series_Valid.xml"
|
||||
)
|
||||
|
||||
def test_09_service_fail_iso19139_schema(self):
|
||||
errors = self.get_validation_errors(validation.ISO19139EdenSchema,
|
||||
'gemini2.1/validation/09_Service_Invalid_No_Such_Element.xml')
|
||||
errors = self.get_validation_errors(
|
||||
validation.ISO19139EdenSchema,
|
||||
"gemini2.1/validation/09_Service_Invalid_No_Such_Element.xml",
|
||||
)
|
||||
assert len(errors) > 0
|
||||
assert_in('(gmx.xsd & srv.xsd)', errors)
|
||||
assert_in('\'{http://www.isotc211.org/2005/gmd}nosuchelement\': This element is not expected.', errors)
|
||||
assert_in("(gmx.xsd & srv.xsd)", errors)
|
||||
assert_in(
|
||||
"'{http://www.isotc211.org/2005/gmd}nosuchelement': This element is not expected.",
|
||||
errors,
|
||||
)
|
||||
|
||||
def test_10_service_fail_constraints_schematron(self):
|
||||
errors = self.get_validation_errors(validation.ConstraintsSchematron14,
|
||||
'gemini2.1/validation/10_Service_Invalid_19139_Level_Description.xml')
|
||||
errors = self.get_validation_errors(
|
||||
validation.ConstraintsSchematron14,
|
||||
"gemini2.1/validation/10_Service_Invalid_19139_Level_Description.xml",
|
||||
)
|
||||
assert len(errors) > 0
|
||||
assert_in("DQ_Scope: 'levelDescription' is mandatory if 'level' notEqual 'dataset' or 'series'.", errors)
|
||||
assert_in(
|
||||
"DQ_Scope: 'levelDescription' is mandatory if 'level' notEqual 'dataset' or 'series'.",
|
||||
errors,
|
||||
)
|
||||
|
||||
def test_11_service_fail_gemini_schematron(self):
|
||||
errors = self.get_validation_errors(validation.Gemini2Schematron,
|
||||
'gemini2.1/validation/11_Service_Invalid_GEMINI_Service_Type.xml')
|
||||
errors = self.get_validation_errors(
|
||||
validation.Gemini2Schematron,
|
||||
"gemini2.1/validation/11_Service_Invalid_GEMINI_Service_Type.xml",
|
||||
)
|
||||
assert len(errors) > 0
|
||||
assert_in("Service type shall be one of 'discovery', 'view', 'download', 'transformation', 'invoke' or 'other' following INSPIRE generic names.", errors)
|
||||
assert_in(
|
||||
"Service type shall be one of 'discovery', 'view', 'download', 'transformation', 'invoke' or 'other' following INSPIRE generic names.",
|
||||
errors,
|
||||
)
|
||||
|
||||
def test_12_service_valid(self):
|
||||
self.assert_passes_all_gemini2_1_validation('gemini2.1/validation/12_Service_Valid.xml')
|
||||
self.assert_passes_all_gemini2_1_validation(
|
||||
"gemini2.1/validation/12_Service_Valid.xml"
|
||||
)
|
||||
|
||||
def test_13_dataset_fail_iso19139_schema_2(self):
|
||||
# This test Dataset has srv tags and only Service metadata should.
|
||||
errors = self.get_validation_errors(validation.ISO19139EdenSchema,
|
||||
'gemini2.1/validation/13_Dataset_Invalid_Element_srv.xml')
|
||||
errors = self.get_validation_errors(
|
||||
validation.ISO19139EdenSchema,
|
||||
"gemini2.1/validation/13_Dataset_Invalid_Element_srv.xml",
|
||||
)
|
||||
assert len(errors) > 0
|
||||
assert_in('(gmx.xsd)', errors)
|
||||
assert_in('Element \'{http://www.isotc211.org/2005/srv}SV_ServiceIdentification\': This element is not expected.', errors)
|
||||
assert_in("(gmx.xsd)", errors)
|
||||
assert_in(
|
||||
"Element '{http://www.isotc211.org/2005/srv}SV_ServiceIdentification': This element is not expected.",
|
||||
errors,
|
||||
)
|
||||
|
||||
def test_schematron_error_extraction(self):
|
||||
validation_error_xml = '''
|
||||
validation_error_xml = """
|
||||
<root xmlns:svrl="http://purl.oclc.org/dsdl/svrl">
|
||||
<svrl:failed-assert test="srv:serviceType/*[1] = 'discovery' or srv:serviceType/*[1] = 'view' or srv:serviceType/*[1] = 'download' or srv:serviceType/*[1] = 'transformation' or srv:serviceType/*[1] = 'invoke' or srv:serviceType/*[1] = 'other'" location="/*[local-name()='MD_Metadata' and namespace-uri()='http://www.isotc211.org/2005/gmd']/*[local-name()='identificationInfo' and namespace-uri()='http://www.isotc211.org/2005/gmd']/*[local-name()='SV_ServiceIdentification' and namespace-uri()='http://www.isotc211.org/2005/srv']">
|
||||
<svrl:text>
|
||||
|
@ -130,24 +190,27 @@ class TestValidation:
|
|||
</svrl:text>
|
||||
</svrl:failed-assert>
|
||||
</root>
|
||||
'''
|
||||
"""
|
||||
failure_xml = etree.fromstring(validation_error_xml)
|
||||
fail_element = failure_xml.getchildren()[0]
|
||||
details = validation.SchematronValidator.extract_error_details(fail_element)
|
||||
details = validation.SchematronValidator.extract_error_details(
|
||||
fail_element
|
||||
)
|
||||
if isinstance(details, tuple):
|
||||
details = details[1]
|
||||
assert_in("srv:serviceType/*[1] = 'discovery'", details)
|
||||
assert_in("/*[local-name()='MD_Metadata'", details)
|
||||
assert_in("Service type shall be one of 'discovery'", details)
|
||||
|
||||
|
||||
def test_error_line_numbers(self):
|
||||
file_path = self._get_file_path('iso19139/dataset-invalid.xml')
|
||||
file_path = self._get_file_path("iso19139/dataset-invalid.xml")
|
||||
xml = etree.parse(file_path)
|
||||
is_valid, profile, errors = validation.Validators(profiles=['iso19139']).is_valid(xml)
|
||||
is_valid, profile, errors = validation.Validators(
|
||||
profiles=["iso19139"]
|
||||
).is_valid(xml)
|
||||
assert not is_valid
|
||||
assert len(errors) == 2
|
||||
|
||||
message, line = errors[1]
|
||||
assert 'This element is not expected' in message
|
||||
assert "This element is not expected" in message
|
||||
assert line == 3
|
||||
|
|
|
@ -1,27 +1,34 @@
|
|||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
|
||||
import SimpleHTTPServer
|
||||
import SocketServer
|
||||
try:
|
||||
from http.server import SimpleHTTPRequestHandler
|
||||
from socketserver import TCPServer
|
||||
except ImportError:
|
||||
from SimpleHTTPServer import SimpleHTTPRequestHandler
|
||||
from SocketServer import TCPServer
|
||||
|
||||
from threading import Thread
|
||||
|
||||
|
||||
PORT = 8999
|
||||
|
||||
def serve(port=PORT):
|
||||
'''Serves test XML files over HTTP'''
|
||||
|
||||
# Make sure we serve from the tests' XML directory
|
||||
os.chdir(os.path.join(os.path.dirname(os.path.abspath(__file__)),
|
||||
'xml'))
|
||||
|
||||
Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
|
||||
|
||||
class TestServer(SocketServer.TCPServer):
|
||||
def serve(port=PORT):
|
||||
"""Serves test XML files over HTTP"""
|
||||
|
||||
# Make sure we serve from the tests' XML directory
|
||||
os.chdir(os.path.join(os.path.dirname(os.path.abspath(__file__)), "xml"))
|
||||
|
||||
Handler = SimpleHTTPRequestHandler
|
||||
|
||||
class TestServer(TCPServer):
|
||||
allow_reuse_address = True
|
||||
|
||||
|
||||
httpd = TestServer(("", PORT), Handler)
|
||||
|
||||
print 'Serving test HTTP server at port', PORT
|
||||
|
||||
print("Serving test HTTP server at port", PORT)
|
||||
|
||||
httpd_thread = Thread(target=httpd.serve_forever)
|
||||
httpd_thread.setDaemon(True)
|
||||
|
|
|
@ -0,0 +1,205 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
|
||||
import six
|
||||
|
||||
from pkg_resources import resource_stream
|
||||
import logging
|
||||
from ckan.lib.helpers import json
|
||||
from lxml import etree
|
||||
from pprint import pprint
|
||||
|
||||
from ckan import model
|
||||
from ckanext.spatial.lib import save_package_extent
|
||||
from ckanext.spatial.lib.reports import validation_report
|
||||
from ckanext.spatial.harvesters import SpatialHarvester
|
||||
from ckanext.spatial.model import ISODocument
|
||||
|
||||
from ckantoolkit import config
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def report(pkg=None):
|
||||
|
||||
if pkg:
|
||||
package_ref = six.text_type(pkg)
|
||||
pkg = model.Package.get(package_ref)
|
||||
if not pkg:
|
||||
print('Package ref "%s" not recognised' % package_ref)
|
||||
sys.exit(1)
|
||||
|
||||
report = validation_report(package_id=pkg.id)
|
||||
for row in report.get_rows_html_formatted():
|
||||
print()
|
||||
for i, col_name in enumerate(report.column_names):
|
||||
print(' %s: %s' % (col_name, row[i]))
|
||||
|
||||
|
||||
def validate_file(metadata_filepath):
|
||||
|
||||
if not os.path.exists(metadata_filepath):
|
||||
print('Filepath %s not found' % metadata_filepath)
|
||||
sys.exit(1)
|
||||
with open(metadata_filepath, 'rb') as f:
|
||||
metadata_xml = f.read()
|
||||
|
||||
validators = SpatialHarvester()._get_validator()
|
||||
print('Validators: %r' % validators.profiles)
|
||||
try:
|
||||
xml_string = metadata_xml.encode("utf-8")
|
||||
except UnicodeDecodeError as e:
|
||||
print('ERROR: Unicode Error reading file \'%s\': %s' % \
|
||||
(metadata_filepath, e))
|
||||
sys.exit(1)
|
||||
xml = etree.fromstring(xml_string)
|
||||
|
||||
# XML validation
|
||||
valid, errors = validators.is_valid(xml)
|
||||
|
||||
# CKAN read of values
|
||||
if valid:
|
||||
try:
|
||||
iso_document = ISODocument(xml_string)
|
||||
iso_values = iso_document.read_values()
|
||||
except Exception as e:
|
||||
valid = False
|
||||
errors.append(
|
||||
'CKAN exception reading values from ISODocument: %s' % e)
|
||||
|
||||
print('***************')
|
||||
print('Summary')
|
||||
print('***************')
|
||||
print('File: \'%s\'' % metadata_filepath)
|
||||
print('Valid: %s' % valid)
|
||||
if not valid:
|
||||
print('Errors:')
|
||||
print(pprint(errors))
|
||||
print('***************')
|
||||
|
||||
|
||||
def report_csv(csv_filepath):
|
||||
from ckanext.spatial.lib.reports import validation_report
|
||||
report = validation_report()
|
||||
with open(csv_filepath, 'wb') as f:
|
||||
f.write(report.get_csv())
|
||||
|
||||
|
||||
def initdb(srid=None):
|
||||
if srid:
|
||||
srid = six.text_type(srid)
|
||||
|
||||
from ckanext.spatial.model import setup as db_setup
|
||||
|
||||
db_setup(srid)
|
||||
|
||||
print('DB tables created')
|
||||
|
||||
|
||||
def update_extents():
|
||||
from ckan.model import PackageExtra, Package, Session
|
||||
conn = Session.connection()
|
||||
packages = [extra.package \
|
||||
for extra in \
|
||||
Session.query(PackageExtra).filter(PackageExtra.key == 'spatial').all()]
|
||||
|
||||
errors = []
|
||||
count = 0
|
||||
for package in packages:
|
||||
try:
|
||||
value = package.extras['spatial']
|
||||
log.debug('Received: %r' % value)
|
||||
geometry = json.loads(value)
|
||||
|
||||
count += 1
|
||||
except ValueError as e:
|
||||
errors.append(u'Package %s - Error decoding JSON object: %s' %
|
||||
(package.id, six.text_type(e)))
|
||||
except TypeError as e:
|
||||
errors.append(u'Package %s - Error decoding JSON object: %s' %
|
||||
(package.id, six.text_type(e)))
|
||||
|
||||
save_package_extent(package.id, geometry)
|
||||
|
||||
Session.commit()
|
||||
|
||||
if errors:
|
||||
msg = 'Errors were found:\n%s' % '\n'.join(errors)
|
||||
print(msg)
|
||||
|
||||
msg = "Done. Extents generated for %i out of %i packages" % (count,
|
||||
len(packages))
|
||||
|
||||
print(msg)
|
||||
|
||||
|
||||
def get_xslt(original=False):
|
||||
if original:
|
||||
config_option = \
|
||||
'ckanext.spatial.harvest.xslt_html_content_original'
|
||||
else:
|
||||
config_option = 'ckanext.spatial.harvest.xslt_html_content'
|
||||
|
||||
xslt_package = None
|
||||
xslt_path = None
|
||||
xslt = config.get(config_option, None)
|
||||
if xslt:
|
||||
if ':' in xslt:
|
||||
xslt = xslt.split(':')
|
||||
xslt_package = xslt[0]
|
||||
xslt_path = xslt[1]
|
||||
else:
|
||||
log.error(
|
||||
'XSLT should be defined in the form <package>:<path>'
|
||||
', eg ckanext.myext:templates/my.xslt')
|
||||
|
||||
return xslt_package, xslt_path
|
||||
|
||||
|
||||
def get_harvest_object_original_content(id):
|
||||
from ckanext.harvest.model import HarvestObject, HarvestObjectExtra
|
||||
|
||||
extra = model.Session.query(
|
||||
HarvestObjectExtra
|
||||
).join(HarvestObject).filter(HarvestObject.id == id).filter(
|
||||
HarvestObjectExtra.key == 'original_document'
|
||||
).first()
|
||||
|
||||
if extra:
|
||||
return extra.value
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def get_harvest_object_content(id):
|
||||
from ckanext.harvest.model import HarvestObject
|
||||
obj = model.Session.query(HarvestObject).filter(HarvestObject.id == id).first()
|
||||
if obj:
|
||||
return obj.content
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def _transform_to_html(content, xslt_package=None, xslt_path=None):
|
||||
|
||||
xslt_package = xslt_package or __name__
|
||||
xslt_path = xslt_path or \
|
||||
'../templates/ckanext/spatial/gemini2-html-stylesheet.xsl'
|
||||
|
||||
# optimise -- read transform only once and compile rather
|
||||
# than at each request
|
||||
with resource_stream(xslt_package, xslt_path) as style:
|
||||
style_xml = etree.parse(style)
|
||||
transformer = etree.XSLT(style_xml)
|
||||
|
||||
xml = etree.parse(six.StringIO(content and six.text_type(content)))
|
||||
html = transformer(xml)
|
||||
|
||||
result = etree.tostring(html, pretty_print=True)
|
||||
|
||||
return result
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
from __future__ import absolute_import
|
||||
# this is a namespace package
|
||||
try:
|
||||
import pkg_resources
|
||||
|
@ -6,4 +7,4 @@ except ImportError:
|
|||
import pkgutil
|
||||
__path__ = pkgutil.extend_path(__path__, __name__)
|
||||
|
||||
from validation import *
|
||||
from .validation import *
|
||||
|
|
|
@ -256,7 +256,7 @@ class SchematronValidator(BaseValidator):
|
|||
"xml/schematron/iso_abstract_expand.xsl",
|
||||
"xml/schematron/iso_svrl_for_xslt1.xsl",
|
||||
]
|
||||
if isinstance(schema, file):
|
||||
if hasattr(schema, 'read'):
|
||||
compiled = etree.parse(schema)
|
||||
else:
|
||||
compiled = schema
|
||||
|
|
|
@ -0,0 +1,104 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import logging
|
||||
|
||||
from flask import Blueprint, make_response
|
||||
|
||||
import ckan.lib.helpers as h
|
||||
import ckan.plugins.toolkit as tk
|
||||
from ckantoolkit import request
|
||||
from ckan.views.api import _finish_ok, _finish_bad_request
|
||||
|
||||
from ckanext.spatial.lib import get_srid, validate_bbox, bbox_query
|
||||
from ckanext.spatial import util
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
api = Blueprint("spatial_api", __name__)
|
||||
|
||||
|
||||
def spatial_query(register):
|
||||
error_400_msg = \
|
||||
'Please provide a suitable bbox parameter [minx,miny,maxx,maxy]'
|
||||
|
||||
if 'bbox' not in request.args:
|
||||
return _finish_bad_request(error_400_msg)
|
||||
|
||||
bbox = validate_bbox(request.params['bbox'])
|
||||
|
||||
if not bbox:
|
||||
return _finish_bad_request(error_400_msg)
|
||||
|
||||
srid = get_srid(request.args.get('crs')) if 'crs' in \
|
||||
request.args else None
|
||||
|
||||
extents = bbox_query(bbox, srid)
|
||||
|
||||
ids = [extent.package_id for extent in extents]
|
||||
output = dict(count=len(ids), results=ids)
|
||||
|
||||
return _finish_ok(output)
|
||||
|
||||
|
||||
api.add_url_rule('/api/2/search/<register>/geo', view_func=spatial_query)
|
||||
|
||||
harvest_metadata = Blueprint("spatial_harvest_metadata", __name__)
|
||||
|
||||
|
||||
def harvest_object_redirect_xml(id):
|
||||
return h.redirect_to('/harvest/object/{}'.format(id))
|
||||
|
||||
|
||||
def harvest_object_redirect_html(id):
|
||||
return h.redirect_to('/harvest/object/{}/html'.format(id))
|
||||
|
||||
|
||||
def display_xml_original(id):
|
||||
content = util.get_harvest_object_original_content(id)
|
||||
|
||||
if not content:
|
||||
return tk.abort(404)
|
||||
|
||||
headers = {'Content-Type': 'application/xml; charset=utf-8'}
|
||||
|
||||
if '<?xml' not in content.split('\n')[0]:
|
||||
content = u'<?xml version="1.0" encoding="UTF-8"?>\n' + content
|
||||
return make_response((content, 200, headers))
|
||||
|
||||
|
||||
def display_html(id):
|
||||
content = util.get_harvest_object_content(id)
|
||||
|
||||
if not content:
|
||||
return tk.abort(404)
|
||||
headers = {'Content-Type': 'text/html; charset=utf-8'}
|
||||
|
||||
xslt_package, xslt_path = util.get_xslt()
|
||||
content = util.transform_to_html(content, xslt_package, xslt_path)
|
||||
return make_response((content, 200, headers))
|
||||
|
||||
|
||||
def display_html_original(id):
|
||||
content = util.get_harvest_object_original_content(id)
|
||||
|
||||
if content is None:
|
||||
return tk.abort(404)
|
||||
headers = {'Content-Type': 'text/html; charset=utf-8'}
|
||||
|
||||
xslt_package, xslt_path = util.get_xslt(original=True)
|
||||
content = util.transform_to_html(content, xslt_package, xslt_path)
|
||||
return make_response((content, 200, headers))
|
||||
|
||||
|
||||
harvest_metadata.add_url_rule('/api/2/rest/harvestobject/<id>/xml',
|
||||
view_func=harvest_object_redirect_xml)
|
||||
harvest_metadata.add_url_rule('/api/2/rest/harvestobject/<id>/html',
|
||||
view_func=harvest_object_redirect_html)
|
||||
|
||||
harvest_metadata.add_url_rule('/harvest/object/<id>/original',
|
||||
view_func=display_xml_original)
|
||||
harvest_metadata.add_url_rule('/harvest/object/<id>/html',
|
||||
view_func=display_html)
|
||||
harvest_metadata.add_url_rule('/harvest/object/<id>/html/original',
|
||||
view_func=display_html_original)
|
|
@ -0,0 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
pytest_plugins = [
|
||||
u'ckanext.spatial.tests.fixtures',
|
||||
]
|
|
@ -1,8 +1,5 @@
|
|||
-e git+https://github.com/ckan/ckan#egg=ckan
|
||||
-r https://raw.githubusercontent.com/ckan/ckan/master/requirements.txt
|
||||
GeoAlchemy>=0.6
|
||||
OWSLib==0.8.6
|
||||
lxml>=2.3
|
||||
pyparsing==1.5.6
|
||||
Sphinx==1.2.3
|
||||
sphinx-rtd-theme==0.1.7
|
||||
-r requirements.txt
|
||||
Sphinx==1.8.5
|
||||
sphinx-rtd-theme==0.4.3
|
||||
|
|
|
@ -11,11 +11,9 @@
|
|||
</p>
|
||||
|
||||
<p>
|
||||
<a href="https://github.com/okfn/ckanext-spatial">Source</a>
|
||||
<a href="https://github.com/ckan/ckanext-spatial">Source</a>
|
||||
—
|
||||
<a href="https://github.com/okfn/ckanext-spatial/issues">Issues</a>
|
||||
—
|
||||
<a href="http://lists.okfn.org/mailman/listinfo/ckan-dev">Mailing List</a>
|
||||
<a href="https://github.com/ckan/ckanext-spatial/issues">Issues</a>
|
||||
—
|
||||
<a href="http://twitter.com/CKANProject">Twitter @CKANProject</a>
|
||||
</p>
|
||||
|
|
|
@ -41,7 +41,7 @@ master_doc = 'index'
|
|||
|
||||
# General information about the project.
|
||||
project = u'ckanext-spatial'
|
||||
copyright = u'2015, Open Knowledge'
|
||||
copyright = u'© 2011-2021 <a href="https://okfn.org/">Open Knowledge Foundation</a> and <a href="https://github.com/ckan/ckanext-spatial/graphs/contributors">contributors</a>.'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
|
|
12
doc/csw.rst
12
doc/csw.rst
|
@ -55,7 +55,7 @@ All necessary tasks are done with the ``ckan-pycsw`` command. To get more
|
|||
details of its usage, run the following::
|
||||
|
||||
cd /usr/lib/ckan/default/src/ckanext-spatial
|
||||
paster ckan-pycsw --help
|
||||
python bin/ckan_pycsw.py --help
|
||||
|
||||
|
||||
Setup
|
||||
|
@ -114,11 +114,11 @@ Setup
|
|||
|
||||
The rest of the options are described `here <http://docs.pycsw.org/en/latest/configuration.html>`_.
|
||||
|
||||
4. Setup the pycsw table. This is done with the ``ckan-pycsw`` paster command
|
||||
4. Setup the pycsw table. This is done with the ``ckan-pycsw`` script
|
||||
(Remember to have the virtualenv activated when running it)::
|
||||
|
||||
cd /usr/lib/ckan/default/src/ckanext-spatial
|
||||
paster ckan-pycsw setup -p /etc/ckan/default/pycsw.cfg
|
||||
python bin/ckan_pycsw.py setup -p /etc/ckan/default/pycsw.cfg
|
||||
|
||||
At this point you should be ready to run pycsw with the wsgi script that it
|
||||
includes::
|
||||
|
@ -135,7 +135,7 @@ Setup
|
|||
command for this::
|
||||
|
||||
cd /usr/lib/ckan/default/src/ckanext-spatial
|
||||
paster ckan-pycsw load -p /etc/ckan/default/pycsw.cfg
|
||||
python bin/ckan_pycsw.py load -p /etc/ckan/default/pycsw.cfg
|
||||
|
||||
When the loading is finished, check that results are returned when visiting
|
||||
this link:
|
||||
|
@ -155,7 +155,7 @@ values can be set in the pycsw configuration ``metadata:main`` section. If you
|
|||
would like the CSW service metadata keywords to be reflective of the CKAN
|
||||
tags, run the following convenience command::
|
||||
|
||||
paster ckan-pycsw set_keywords -p /etc/ckan/default/pycsw.cfg
|
||||
python ckan_pycsw.py set_keywords -p /etc/ckan/default/pycsw.cfg
|
||||
|
||||
Note that you must have privileges to write to the pycsw configuration file.
|
||||
|
||||
|
@ -170,7 +170,7 @@ keep CKAN and pycsw in sync, and serve pycsw with Apache + mod_wsgi like CKAN.
|
|||
and copy the following lines::
|
||||
|
||||
# m h dom mon dow command
|
||||
0 * * * * /usr/lib/ckan/default/bin/paster --plugin=ckanext-spatial ckan-pycsw load -p /etc/ckan/default/pycsw.cfg
|
||||
0 * * * * /var/lib/ckan/default/bin/python /var/lib/ckan/default/src/ckanext-spatial/bin/ckan_pycsw.py load -p /etc/ckan/default/pycsw.cfg
|
||||
|
||||
This particular example will run the load command every hour. You can of
|
||||
course modify this periodicity, for instance reducing it for huge instances.
|
||||
|
|
|
@ -140,6 +140,10 @@ plugins on the configuration ini file (eg when restarting Apache).
|
|||
If for some reason you need to explicitly create the table beforehand, you can
|
||||
do it with the following command (with the virtualenv activated)::
|
||||
|
||||
(pyenv) $ ckan --config=mysite.ini spatial initdb [srid]
|
||||
|
||||
On CKAN 2.8 and below use::
|
||||
|
||||
(pyenv) $ paster --plugin=ckanext-spatial spatial initdb [srid] --config=mysite.ini
|
||||
|
||||
You can define the SRID of the geometry column. Default is 4326. If you are not
|
||||
|
|
|
@ -61,6 +61,10 @@ synchronize the information stored in the extra with the geometry table.
|
|||
If you already have datasets when you enable Spatial Search then you'll need to
|
||||
reindex them:
|
||||
|
||||
ckan --config=/etc/ckan/default/development.ini search-index rebuild
|
||||
|
||||
..note:: For CKAN 2.8 and below use:
|
||||
|
||||
paster --plugin=ckan search-index rebuild --config=/etc/ckan/default/development.ini
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
requirements-py2.txt
|
|
@ -1,8 +0,0 @@
|
|||
GeoAlchemy>=0.6
|
||||
GeoAlchemy2==0.5.0
|
||||
Shapely>=1.2.13
|
||||
OWSLib==0.8.6
|
||||
lxml>=2.3
|
||||
argparse
|
||||
pyparsing>=2.1.10
|
||||
requests>=1.1.0
|
|
@ -0,0 +1 @@
|
|||
requirements.txt
|
|
@ -0,0 +1,11 @@
|
|||
ckantoolkit
|
||||
GeoAlchemy>=0.6
|
||||
GeoAlchemy2==0.5.0
|
||||
Shapely>=1.2.13
|
||||
pyproj==2.2.2
|
||||
OWSLib==0.18.0
|
||||
lxml>=2.3
|
||||
argparse
|
||||
pyparsing>=2.1.10
|
||||
requests>=1.1.0
|
||||
six
|
|
@ -0,0 +1,11 @@
|
|||
ckantoolkit
|
||||
GeoAlchemy>=0.6
|
||||
GeoAlchemy2==0.5.0
|
||||
Shapely>=1.2.13
|
||||
pyproj==2.6.1
|
||||
OWSLib==0.18.0
|
||||
lxml>=2.3
|
||||
argparse
|
||||
pyparsing>=2.1.10
|
||||
requests>=1.1.0
|
||||
six
|
|
@ -0,0 +1,7 @@
|
|||
[tool:pytest]
|
||||
norecursedirs=ckanext/spatial/tests/nose
|
||||
|
||||
filterwarnings =
|
||||
ignore::sqlalchemy.exc.SADeprecationWarning
|
||||
ignore::sqlalchemy.exc.SAWarning
|
||||
ignore::DeprecationWarning
|
1
setup.py
1
setup.py
|
@ -45,6 +45,5 @@ setup(
|
|||
|
||||
[ckan.test_plugins]
|
||||
test_spatial_plugin = ckanext.spatial.tests.test_plugin.plugin:TestSpatialPlugin
|
||||
|
||||
""",
|
||||
)
|
||||
|
|
4
test.ini
4
test.ini
|
@ -19,10 +19,8 @@ ckan.spatial.srid = 4326
|
|||
ckan.spatial.default_map_extent=-6.88,49.74,0.50,59.2
|
||||
ckan.spatial.testing = true
|
||||
ckan.spatial.validator.profiles = iso19139,constraints,gemini2
|
||||
ckanext.spatial.search_backend = postgis
|
||||
ckan.harvest.mq.type = redis
|
||||
# NB: other test configuration should go in test-core.ini, which is
|
||||
# what the postgres tests use.
|
||||
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
|
|
Loading…
Reference in New Issue