2013-01-15 20:29:17 +01:00
|
|
|
import types
|
2011-03-28 16:52:43 +02:00
|
|
|
from logging import getLogger
|
|
|
|
|
2013-03-13 00:31:06 +01:00
|
|
|
from sqlalchemy.util import OrderedDict
|
|
|
|
|
2012-11-29 12:48:36 +01:00
|
|
|
from ckan import logic
|
2012-12-12 12:49:55 +01:00
|
|
|
from ckan import model
|
2012-08-10 10:59:18 +02:00
|
|
|
import ckan.plugins as p
|
2012-11-29 12:48:36 +01:00
|
|
|
from ckan.lib.plugins import DefaultDatasetForm
|
2015-12-09 15:44:36 +01:00
|
|
|
try:
|
|
|
|
from ckan.lib.plugins import DefaultTranslation
|
|
|
|
except ImportError:
|
|
|
|
class DefaultTranslation():
|
|
|
|
pass
|
|
|
|
|
2012-11-29 12:48:36 +01:00
|
|
|
from ckan.lib.navl import dictization_functions
|
2012-08-10 10:59:18 +02:00
|
|
|
|
2012-12-12 12:49:55 +01:00
|
|
|
from ckanext.harvest import logic as harvest_logic
|
|
|
|
|
2012-03-05 18:10:02 +01:00
|
|
|
from ckanext.harvest.model import setup as model_setup
|
2012-12-12 12:49:55 +01:00
|
|
|
from ckanext.harvest.model import HarvestSource, HarvestJob, HarvestObject
|
2016-05-11 13:29:53 +02:00
|
|
|
from ckanext.harvest.log import DBLogHandler
|
2012-11-29 12:48:36 +01:00
|
|
|
|
|
|
|
|
2015-12-09 15:44:36 +01:00
|
|
|
|
2011-03-28 16:52:43 +02:00
|
|
|
log = getLogger(__name__)
|
2012-04-10 21:53:29 +02:00
|
|
|
assert not log.disabled
|
2011-03-28 16:52:43 +02:00
|
|
|
|
2013-03-06 17:33:46 +01:00
|
|
|
DATASET_TYPE_NAME = 'harvest'
|
2012-11-29 12:48:36 +01:00
|
|
|
|
2015-12-09 15:44:36 +01:00
|
|
|
|
|
|
|
class Harvest(p.SingletonPlugin, DefaultDatasetForm, DefaultTranslation):
|
2012-03-01 13:02:16 +01:00
|
|
|
|
2012-08-10 10:59:18 +02:00
|
|
|
p.implements(p.IConfigurable)
|
|
|
|
p.implements(p.IRoutes, inherit=True)
|
|
|
|
p.implements(p.IConfigurer, inherit=True)
|
|
|
|
p.implements(p.IActions)
|
|
|
|
p.implements(p.IAuthFunctions)
|
2012-11-29 12:48:36 +01:00
|
|
|
p.implements(p.IDatasetForm)
|
2012-11-29 17:48:44 +01:00
|
|
|
p.implements(p.IPackageController, inherit=True)
|
2012-12-12 12:54:50 +01:00
|
|
|
p.implements(p.ITemplateHelpers)
|
2013-03-13 00:31:06 +01:00
|
|
|
p.implements(p.IFacets, inherit=True)
|
2015-12-09 15:44:36 +01:00
|
|
|
if p.toolkit.check_ckan_version(min_version='2.5.0'):
|
|
|
|
p.implements(p.ITranslation, inherit=True)
|
|
|
|
|
2012-11-29 17:48:44 +01:00
|
|
|
|
2012-12-05 19:54:28 +01:00
|
|
|
startup = False
|
|
|
|
|
2012-11-29 17:48:44 +01:00
|
|
|
## IPackageController
|
|
|
|
|
2012-12-12 12:47:57 +01:00
|
|
|
def after_create(self, context, data_dict):
|
2012-12-05 19:54:28 +01:00
|
|
|
if 'type' in data_dict and data_dict['type'] == DATASET_TYPE_NAME and not self.startup:
|
2012-11-29 17:48:44 +01:00
|
|
|
# Create an actual HarvestSource object
|
2013-03-12 18:30:31 +01:00
|
|
|
_create_harvest_source_object(context, data_dict)
|
2012-11-29 17:48:44 +01:00
|
|
|
|
2012-12-12 12:47:57 +01:00
|
|
|
def after_update(self, context, data_dict):
|
2012-11-30 12:10:21 +01:00
|
|
|
if 'type' in data_dict and data_dict['type'] == DATASET_TYPE_NAME:
|
2012-11-29 17:48:44 +01:00
|
|
|
# Edit the actual HarvestSource object
|
2013-03-12 18:30:31 +01:00
|
|
|
_update_harvest_source_object(context, data_dict)
|
2012-11-29 12:48:36 +01:00
|
|
|
|
2013-03-12 14:14:07 +01:00
|
|
|
def after_delete(self, context, data_dict):
|
2013-03-13 18:31:39 +01:00
|
|
|
|
|
|
|
package_dict = p.toolkit.get_action('package_show')(context, {'id': data_dict['id']})
|
|
|
|
|
|
|
|
if 'type' in package_dict and package_dict['type'] == DATASET_TYPE_NAME:
|
2013-03-12 14:14:07 +01:00
|
|
|
# Delete the actual HarvestSource object
|
2013-03-13 18:31:39 +01:00
|
|
|
_delete_harvest_source_object(context, package_dict)
|
2012-11-29 12:48:36 +01:00
|
|
|
|
2013-03-21 03:31:34 +01:00
|
|
|
def before_view(self, data_dict):
|
|
|
|
|
2014-02-11 19:16:49 +01:00
|
|
|
# check_ckan_version should be more clever than this
|
|
|
|
if p.toolkit.check_ckan_version(max_version='2.1.99') and (
|
|
|
|
not 'type' in data_dict or data_dict['type'] != DATASET_TYPE_NAME):
|
2013-03-21 03:31:34 +01:00
|
|
|
# This is a normal dataset, check if it was harvested and if so, add
|
|
|
|
# info about the HarvestObject and HarvestSource
|
|
|
|
harvest_object = model.Session.query(HarvestObject) \
|
|
|
|
.filter(HarvestObject.package_id==data_dict['id']) \
|
|
|
|
.filter(HarvestObject.current==True) \
|
|
|
|
.first()
|
|
|
|
|
|
|
|
if harvest_object:
|
|
|
|
for key, value in [
|
|
|
|
('harvest_object_id', harvest_object.id),
|
|
|
|
('harvest_source_id', harvest_object.source.id),
|
|
|
|
('harvest_source_title', harvest_object.source.title),
|
|
|
|
]:
|
|
|
|
_add_extra(data_dict, key, value)
|
|
|
|
return data_dict
|
2012-12-12 12:49:55 +01:00
|
|
|
|
|
|
|
|
2016-02-12 14:15:09 +01:00
|
|
|
def before_search(self, search_params):
|
|
|
|
'''Prevents the harvesters being shown in dataset search results.'''
|
|
|
|
|
|
|
|
fq = search_params.get('fq', '')
|
2016-02-17 10:30:19 +01:00
|
|
|
if 'dataset_type:harvest' not in fq:
|
2016-03-23 09:38:04 +01:00
|
|
|
fq = u"{0} -dataset_type:harvest".format(search_params.get('fq', ''))
|
2016-02-12 14:15:09 +01:00
|
|
|
search_params.update({'fq': fq})
|
|
|
|
|
|
|
|
return search_params
|
|
|
|
|
2013-03-21 03:31:34 +01:00
|
|
|
def after_show(self, context, data_dict):
|
2012-12-12 12:49:55 +01:00
|
|
|
|
|
|
|
if 'type' in data_dict and data_dict['type'] == DATASET_TYPE_NAME:
|
|
|
|
# This is a harvest source dataset, add extra info from the
|
|
|
|
# HarvestSource object
|
|
|
|
source = HarvestSource.get(data_dict['id'])
|
|
|
|
if not source:
|
|
|
|
log.error('Harvest source not found for dataset {0}'.format(data_dict['id']))
|
|
|
|
return data_dict
|
|
|
|
|
2016-03-21 13:35:09 +01:00
|
|
|
st_action_name = 'harvest_source_show_status'
|
|
|
|
try:
|
|
|
|
status_action = p.toolkit.get_action(st_action_name)
|
|
|
|
except KeyError:
|
|
|
|
logic.clear_actions_cache()
|
|
|
|
status_action = p.toolkit.get_action(st_action_name)
|
|
|
|
|
|
|
|
data_dict['status'] = status_action(context, {'id': source.id})
|
2012-12-12 12:49:55 +01:00
|
|
|
|
|
|
|
elif not 'type' in data_dict or data_dict['type'] != DATASET_TYPE_NAME:
|
|
|
|
# This is a normal dataset, check if it was harvested and if so, add
|
|
|
|
# info about the HarvestObject and HarvestSource
|
|
|
|
|
|
|
|
harvest_object = model.Session.query(HarvestObject) \
|
|
|
|
.filter(HarvestObject.package_id==data_dict['id']) \
|
|
|
|
.filter(HarvestObject.current==True) \
|
|
|
|
.first()
|
|
|
|
|
2014-08-14 16:31:39 +02:00
|
|
|
# If the harvest extras are there, remove them. This can happen eg
|
|
|
|
# when calling package_update or resource_update, which call
|
|
|
|
# package_show
|
|
|
|
if data_dict.get('extras'):
|
2014-01-13 11:30:52 +01:00
|
|
|
data_dict['extras'][:] = [e for e in data_dict.get('extras', [])
|
|
|
|
if not e['key']
|
|
|
|
in ('harvest_object_id', 'harvest_source_id', 'harvest_source_title',)]
|
|
|
|
|
|
|
|
|
|
|
|
# We only want to add these extras at index time so they are part
|
|
|
|
# of the cached data_dict used to display, search results etc. We
|
|
|
|
# don't want them added when editing the dataset, otherwise we get
|
|
|
|
# duplicated key errors.
|
|
|
|
# The only way to detect indexing right now is checking that
|
|
|
|
# validate is set to False.
|
2013-03-21 03:31:34 +01:00
|
|
|
if harvest_object and not context.get('validate', True):
|
2012-12-12 12:49:55 +01:00
|
|
|
for key, value in [
|
|
|
|
('harvest_object_id', harvest_object.id),
|
|
|
|
('harvest_source_id', harvest_object.source.id),
|
|
|
|
('harvest_source_title', harvest_object.source.title),
|
|
|
|
]:
|
2013-03-21 03:31:34 +01:00
|
|
|
_add_extra(data_dict, key, value)
|
2012-12-12 12:49:55 +01:00
|
|
|
|
|
|
|
return data_dict
|
|
|
|
|
2012-11-29 12:48:36 +01:00
|
|
|
## IDatasetForm
|
|
|
|
|
|
|
|
def is_fallback(self):
|
|
|
|
return False
|
|
|
|
|
|
|
|
def package_types(self):
|
|
|
|
return [DATASET_TYPE_NAME]
|
|
|
|
|
|
|
|
def package_form(self):
|
|
|
|
return 'source/new_source_form.html'
|
|
|
|
|
2012-12-05 15:51:20 +01:00
|
|
|
def search_template(self):
|
|
|
|
return 'source/search.html'
|
|
|
|
|
2012-12-05 16:47:02 +01:00
|
|
|
def read_template(self):
|
|
|
|
return 'source/read.html'
|
|
|
|
|
2012-11-30 19:53:13 +01:00
|
|
|
def new_template(self):
|
|
|
|
return 'source/new.html'
|
|
|
|
|
|
|
|
def edit_template(self):
|
|
|
|
return 'source/edit.html'
|
|
|
|
|
2012-11-29 12:48:36 +01:00
|
|
|
def setup_template_variables(self, context, data_dict):
|
|
|
|
|
2013-02-25 13:18:30 +01:00
|
|
|
p.toolkit.c.harvest_source = p.toolkit.c.pkg_dict
|
2012-11-30 19:53:13 +01:00
|
|
|
|
|
|
|
p.toolkit.c.dataset_type = DATASET_TYPE_NAME
|
|
|
|
|
2013-03-25 18:38:07 +01:00
|
|
|
|
|
|
|
def create_package_schema(self):
|
2012-11-30 14:20:37 +01:00
|
|
|
'''
|
2013-03-25 18:38:07 +01:00
|
|
|
Returns the schema for mapping package data from a form to a format
|
|
|
|
suitable for the database.
|
2012-11-30 14:20:37 +01:00
|
|
|
'''
|
2013-03-25 18:38:07 +01:00
|
|
|
from ckanext.harvest.logic.schema import harvest_source_create_package_schema
|
|
|
|
schema = harvest_source_create_package_schema()
|
2012-12-05 19:54:28 +01:00
|
|
|
if self.startup:
|
|
|
|
schema['id'] = [unicode]
|
2013-03-25 18:38:07 +01:00
|
|
|
|
2012-12-05 19:54:28 +01:00
|
|
|
return schema
|
2012-11-30 14:20:37 +01:00
|
|
|
|
2013-03-25 18:38:07 +01:00
|
|
|
def update_package_schema(self):
|
2012-11-29 12:48:36 +01:00
|
|
|
'''
|
|
|
|
Returns the schema for mapping package data from a form to a format
|
|
|
|
suitable for the database.
|
|
|
|
'''
|
2013-03-28 16:00:44 +01:00
|
|
|
from ckanext.harvest.logic.schema import harvest_source_update_package_schema
|
|
|
|
schema = harvest_source_update_package_schema()
|
2012-11-29 12:48:36 +01:00
|
|
|
|
2013-03-28 16:00:44 +01:00
|
|
|
return schema
|
2013-01-16 18:45:33 +01:00
|
|
|
|
2013-03-25 18:38:07 +01:00
|
|
|
def show_package_schema(self):
|
2012-11-29 17:52:10 +01:00
|
|
|
'''
|
|
|
|
Returns the schema for mapping package data from the database into a
|
|
|
|
format suitable for the form
|
|
|
|
'''
|
2013-03-25 18:38:07 +01:00
|
|
|
from ckanext.harvest.logic.schema import harvest_source_show_package_schema
|
2012-11-29 17:52:10 +01:00
|
|
|
|
2013-03-25 18:38:07 +01:00
|
|
|
return harvest_source_show_package_schema()
|
2012-11-29 17:52:10 +01:00
|
|
|
|
2011-03-28 16:52:43 +02:00
|
|
|
def configure(self, config):
|
2012-03-05 18:10:02 +01:00
|
|
|
|
2012-12-05 19:54:28 +01:00
|
|
|
self.startup = True
|
|
|
|
|
2012-03-05 18:10:02 +01:00
|
|
|
# Setup harvest model
|
|
|
|
model_setup()
|
2018-10-25 12:19:43 +02:00
|
|
|
|
2016-05-16 13:15:12 +02:00
|
|
|
# Configure database logger
|
|
|
|
_configure_db_logger(config)
|
2011-03-28 16:52:43 +02:00
|
|
|
|
2012-12-05 19:54:28 +01:00
|
|
|
self.startup = False
|
|
|
|
|
2011-03-28 16:52:43 +02:00
|
|
|
def before_map(self, map):
|
|
|
|
|
2013-03-06 17:33:46 +01:00
|
|
|
# Most of the routes are defined via the IDatasetForm interface
|
|
|
|
# (ie they are the ones for a package type)
|
2011-05-13 15:17:58 +02:00
|
|
|
controller = 'ckanext.harvest.controllers.view:ViewController'
|
2011-09-08 11:27:36 +02:00
|
|
|
|
2013-03-06 17:33:46 +01:00
|
|
|
map.connect('{0}_delete'.format(DATASET_TYPE_NAME), '/' + DATASET_TYPE_NAME + '/delete/:id',controller=controller, action='delete')
|
|
|
|
map.connect('{0}_refresh'.format(DATASET_TYPE_NAME), '/' + DATASET_TYPE_NAME + '/refresh/:id',controller=controller,
|
|
|
|
action='refresh')
|
2013-02-25 13:03:34 +01:00
|
|
|
map.connect('{0}_admin'.format(DATASET_TYPE_NAME), '/' + DATASET_TYPE_NAME + '/admin/:id', controller=controller, action='admin')
|
|
|
|
map.connect('{0}_about'.format(DATASET_TYPE_NAME), '/' + DATASET_TYPE_NAME + '/about/:id', controller=controller, action='about')
|
2013-05-16 18:51:48 +02:00
|
|
|
map.connect('{0}_clear'.format(DATASET_TYPE_NAME), '/' + DATASET_TYPE_NAME + '/clear/:id', controller=controller, action='clear')
|
2013-02-08 13:15:14 +01:00
|
|
|
|
2013-02-04 19:20:58 +01:00
|
|
|
map.connect('harvest_job_list', '/' + DATASET_TYPE_NAME + '/{source}/job', controller=controller, action='list_jobs')
|
2013-01-23 18:33:44 +01:00
|
|
|
map.connect('harvest_job_show_last', '/' + DATASET_TYPE_NAME + '/{source}/job/last', controller=controller, action='show_last_job')
|
|
|
|
map.connect('harvest_job_show', '/' + DATASET_TYPE_NAME + '/{source}/job/{id}', controller=controller, action='show_job')
|
2017-10-25 16:46:08 +02:00
|
|
|
map.connect('harvest_job_abort', '/' + DATASET_TYPE_NAME + '/{source}/job/{id}/abort', controller=controller, action='abort_job')
|
2013-01-23 18:33:44 +01:00
|
|
|
|
2013-03-06 17:33:46 +01:00
|
|
|
map.connect('harvest_object_show', '/' + DATASET_TYPE_NAME + '/object/:id', controller=controller, action='show_object')
|
2014-04-30 18:45:07 +02:00
|
|
|
map.connect('harvest_object_for_dataset_show', '/dataset/harvest_object/:id', controller=controller, action='show_object', ref_type='dataset')
|
2013-03-06 17:33:46 +01:00
|
|
|
|
2013-03-06 16:43:10 +01:00
|
|
|
org_controller = 'ckanext.harvest.controllers.organization:OrganizationController'
|
2013-03-06 17:33:46 +01:00
|
|
|
map.connect('{0}_org_list'.format(DATASET_TYPE_NAME), '/organization/' + DATASET_TYPE_NAME + '/' + '{id}', controller=org_controller, action='source_list')
|
2013-01-23 19:04:19 +01:00
|
|
|
|
2011-03-28 16:52:43 +02:00
|
|
|
return map
|
|
|
|
|
|
|
|
def update_config(self, config):
|
2015-12-10 16:46:57 +01:00
|
|
|
if not p.toolkit.check_ckan_version(min_version='2.0'):
|
|
|
|
assert 0, 'CKAN before 2.0 not supported by ckanext-harvest - '\
|
|
|
|
'genshi templates not supported any more'
|
2015-12-10 17:11:56 +01:00
|
|
|
if p.toolkit.asbool(config.get('ckan.legacy_templates', False)):
|
|
|
|
log.warn('Old genshi templates not supported any more by '
|
|
|
|
'ckanext-harvest so you should set ckan.legacy_templates '
|
|
|
|
'option to True any more.')
|
2015-12-10 16:52:46 +01:00
|
|
|
p.toolkit.add_template_directory(config, 'templates')
|
2012-08-10 10:59:18 +02:00
|
|
|
p.toolkit.add_public_directory(config, 'public')
|
2013-02-08 18:19:04 +01:00
|
|
|
p.toolkit.add_resource('fanstatic_library', 'ckanext-harvest')
|
2013-02-25 19:07:34 +01:00
|
|
|
p.toolkit.add_resource('public/ckanext/harvest/javascript', 'harvest-extra-field')
|
2012-02-29 11:59:02 +01:00
|
|
|
|
2013-01-09 18:32:05 +01:00
|
|
|
## IActions
|
|
|
|
|
2012-02-29 11:59:02 +01:00
|
|
|
def get_actions(self):
|
|
|
|
|
2013-01-09 18:32:05 +01:00
|
|
|
module_root = 'ckanext.harvest.logic.action'
|
|
|
|
action_functions = _get_logic_functions(module_root)
|
2012-03-01 13:02:16 +01:00
|
|
|
|
2013-01-09 18:32:05 +01:00
|
|
|
return action_functions
|
2012-03-01 13:02:16 +01:00
|
|
|
|
2013-01-09 18:32:05 +01:00
|
|
|
## IAuthFunctions
|
2012-03-02 17:49:39 +01:00
|
|
|
|
2013-01-09 18:32:05 +01:00
|
|
|
def get_auth_functions(self):
|
2012-03-02 17:49:39 +01:00
|
|
|
|
2013-01-09 18:32:05 +01:00
|
|
|
module_root = 'ckanext.harvest.logic.auth'
|
|
|
|
auth_functions = _get_logic_functions(module_root)
|
2012-03-02 17:49:39 +01:00
|
|
|
|
|
|
|
return auth_functions
|
|
|
|
|
2012-12-12 12:54:50 +01:00
|
|
|
## ITemplateHelpers
|
2013-01-09 18:32:05 +01:00
|
|
|
|
2012-12-12 12:54:50 +01:00
|
|
|
def get_helpers(self):
|
|
|
|
from ckanext.harvest import helpers as harvest_helpers
|
2013-01-09 12:07:44 +01:00
|
|
|
return {
|
|
|
|
'package_list_for_source': harvest_helpers.package_list_for_source,
|
2017-10-31 15:56:38 +01:00
|
|
|
'package_count_for_source': harvest_helpers.package_count_for_source,
|
2013-01-09 12:07:44 +01:00
|
|
|
'harvesters_info': harvest_helpers.harvesters_info,
|
|
|
|
'harvester_types': harvest_helpers.harvester_types,
|
|
|
|
'harvest_frequencies': harvest_helpers.harvest_frequencies,
|
2013-01-24 19:21:05 +01:00
|
|
|
'link_for_harvest_object': harvest_helpers.link_for_harvest_object,
|
2013-02-25 19:07:34 +01:00
|
|
|
'harvest_source_extra_fields': harvest_helpers.harvest_source_extra_fields,
|
2018-10-25 12:19:43 +02:00
|
|
|
'bootstrap_version': harvest_helpers.bootstrap_version,
|
2013-01-09 12:07:44 +01:00
|
|
|
}
|
2012-12-12 12:54:50 +01:00
|
|
|
|
2013-03-13 00:31:06 +01:00
|
|
|
def dataset_facets(self, facets_dict, package_type):
|
|
|
|
|
|
|
|
if package_type <> 'harvest':
|
|
|
|
return facets_dict
|
|
|
|
|
|
|
|
return OrderedDict([('frequency', 'Frequency'),
|
|
|
|
('source_type','Type'),
|
|
|
|
])
|
|
|
|
|
|
|
|
def organization_facets(self, facets_dict, organization_type, package_type):
|
|
|
|
|
|
|
|
if package_type <> 'harvest':
|
|
|
|
return facets_dict
|
|
|
|
|
|
|
|
return OrderedDict([('frequency', 'Frequency'),
|
|
|
|
('source_type','Type'),
|
|
|
|
])
|
2012-12-12 12:54:50 +01:00
|
|
|
|
2013-03-21 03:31:34 +01:00
|
|
|
def _add_extra(data_dict, key, value):
|
|
|
|
if not 'extras' in data_dict:
|
|
|
|
data_dict['extras'] = []
|
|
|
|
|
|
|
|
data_dict['extras'].append({
|
|
|
|
'key': key, 'value': value, 'state': u'active'
|
|
|
|
})
|
|
|
|
|
2013-01-09 18:32:05 +01:00
|
|
|
def _get_logic_functions(module_root, logic_functions = {}):
|
2012-03-02 17:49:39 +01:00
|
|
|
|
2015-11-11 04:49:25 +01:00
|
|
|
for module_name in ['get', 'create', 'update', 'patch', 'delete']:
|
2013-01-09 18:32:05 +01:00
|
|
|
module_path = '%s.%s' % (module_root, module_name,)
|
2015-03-19 13:47:31 +01:00
|
|
|
|
|
|
|
module = __import__(module_path)
|
2012-03-02 17:49:39 +01:00
|
|
|
|
|
|
|
for part in module_path.split('.')[1:]:
|
|
|
|
module = getattr(module, part)
|
|
|
|
|
|
|
|
for key, value in module.__dict__.items():
|
2013-08-05 19:39:44 +02:00
|
|
|
if not key.startswith('_') and (hasattr(value, '__call__')
|
|
|
|
and (value.__module__ == module_path)):
|
2013-01-09 18:32:05 +01:00
|
|
|
logic_functions[key] = value
|
2012-03-02 17:49:39 +01:00
|
|
|
|
2013-01-09 18:32:05 +01:00
|
|
|
return logic_functions
|
2012-03-01 13:02:16 +01:00
|
|
|
|
2013-03-12 18:30:31 +01:00
|
|
|
def _create_harvest_source_object(context, data_dict):
|
2012-11-29 17:48:44 +01:00
|
|
|
'''
|
|
|
|
Creates an actual HarvestSource object with the data dict
|
|
|
|
of the harvest_source dataset. All validation and authorization
|
|
|
|
checks should be used by now, so this function is not to be used
|
|
|
|
directly to create harvest sources. The created harvest source will
|
|
|
|
have the same id as the dataset.
|
|
|
|
|
|
|
|
:param data_dict: A standard package data_dict
|
|
|
|
|
|
|
|
:returns: The created HarvestSource object
|
|
|
|
:rtype: HarvestSource object
|
|
|
|
'''
|
|
|
|
|
|
|
|
log.info('Creating harvest source: %r', data_dict)
|
|
|
|
|
|
|
|
source = HarvestSource()
|
|
|
|
|
|
|
|
source.id = data_dict['id']
|
|
|
|
source.url = data_dict['url'].strip()
|
|
|
|
|
|
|
|
# Avoids clashes with the dataset type
|
|
|
|
source.type = data_dict['source_type']
|
|
|
|
|
|
|
|
opt = ['active', 'title', 'description', 'user_id',
|
|
|
|
'publisher_id', 'config', 'frequency']
|
|
|
|
for o in opt:
|
|
|
|
if o in data_dict and data_dict[o] is not None:
|
|
|
|
source.__setattr__(o,data_dict[o])
|
|
|
|
|
2013-03-13 14:19:43 +01:00
|
|
|
source.active = not data_dict.get('state', None) == 'deleted'
|
2012-11-29 17:48:44 +01:00
|
|
|
|
|
|
|
# Don't commit yet, let package_create do it
|
|
|
|
source.add()
|
|
|
|
log.info('Harvest source created: %s', source.id)
|
|
|
|
|
|
|
|
return source
|
|
|
|
|
2013-03-12 18:30:31 +01:00
|
|
|
def _update_harvest_source_object(context, data_dict):
|
2012-11-29 17:48:44 +01:00
|
|
|
'''
|
|
|
|
Updates an actual HarvestSource object with the data dict
|
|
|
|
of the harvest_source dataset. All validation and authorization
|
|
|
|
checks should be used by now, so this function is not to be used
|
|
|
|
directly to update harvest sources.
|
|
|
|
|
|
|
|
:param data_dict: A standard package data_dict
|
|
|
|
|
|
|
|
:returns: The created HarvestSource object
|
|
|
|
:rtype: HarvestSource object
|
|
|
|
'''
|
|
|
|
|
|
|
|
source_id = data_dict.get('id')
|
|
|
|
|
|
|
|
log.info('Harvest source %s update: %r', source_id, data_dict)
|
|
|
|
source = HarvestSource.get(source_id)
|
|
|
|
if not source:
|
|
|
|
log.error('Harvest source %s does not exist', source_id)
|
|
|
|
raise logic.NotFound('Harvest source %s does not exist' % source_id)
|
|
|
|
|
|
|
|
|
|
|
|
fields = ['url', 'title', 'description', 'user_id',
|
|
|
|
'publisher_id', 'frequency']
|
|
|
|
for f in fields:
|
|
|
|
if f in data_dict and data_dict[f] is not None:
|
|
|
|
if f == 'url':
|
|
|
|
data_dict[f] = data_dict[f].strip()
|
|
|
|
source.__setattr__(f,data_dict[f])
|
|
|
|
|
|
|
|
# Avoids clashes with the dataset type
|
|
|
|
if 'source_type' in data_dict:
|
|
|
|
source.type = data_dict['source_type']
|
|
|
|
|
|
|
|
if 'config' in data_dict:
|
|
|
|
source.config = data_dict['config']
|
|
|
|
|
2013-03-13 14:19:43 +01:00
|
|
|
# Don't change state unless explicitly set in the dict
|
|
|
|
if 'state' in data_dict:
|
|
|
|
source.active = data_dict.get('state') == 'active'
|
2013-03-12 16:35:49 +01:00
|
|
|
|
2012-11-29 17:48:44 +01:00
|
|
|
# Don't commit yet, let package_create do it
|
|
|
|
source.add()
|
|
|
|
|
|
|
|
# Abort any pending jobs
|
|
|
|
if not source.active:
|
|
|
|
jobs = HarvestJob.filter(source=source,status=u'New')
|
|
|
|
log.info('Harvest source %s not active, so aborting %i outstanding jobs', source_id, jobs.count())
|
|
|
|
if jobs:
|
|
|
|
for job in jobs:
|
|
|
|
job.status = u'Aborted'
|
|
|
|
job.add()
|
|
|
|
|
|
|
|
return source
|
2013-03-12 14:14:07 +01:00
|
|
|
|
2013-03-12 18:30:31 +01:00
|
|
|
def _delete_harvest_source_object(context, data_dict):
|
2013-03-12 14:14:07 +01:00
|
|
|
'''
|
|
|
|
Deletes an actual HarvestSource object with the id provided on the
|
|
|
|
data dict of the harvest_source dataset. Similarly to the datasets,
|
|
|
|
the source object is not actually deleted, just flagged as inactive.
|
|
|
|
All validation and authorization checks should be used by now, so
|
|
|
|
this function is not to be used directly to delete harvest sources.
|
|
|
|
|
|
|
|
:param data_dict: A standard package data_dict
|
|
|
|
|
|
|
|
:returns: The deleted HarvestSource object
|
|
|
|
:rtype: HarvestSource object
|
|
|
|
'''
|
|
|
|
|
|
|
|
source_id = data_dict.get('id')
|
|
|
|
|
|
|
|
log.info('Deleting harvest source: %s', source_id)
|
|
|
|
|
|
|
|
source = HarvestSource.get(source_id)
|
|
|
|
if not source:
|
|
|
|
log.warn('Harvest source %s does not exist', source_id)
|
|
|
|
raise p.toolkit.ObjectNotFound('Harvest source %s does not exist' % source_id)
|
|
|
|
|
|
|
|
# Don't actually delete the record, just flag it as inactive
|
|
|
|
source.active = False
|
|
|
|
source.save()
|
|
|
|
|
|
|
|
# Abort any pending jobs
|
|
|
|
jobs = HarvestJob.filter(source=source, status=u'New')
|
|
|
|
if jobs:
|
|
|
|
log.info('Aborting %i jobs due to deleted harvest source', jobs.count())
|
|
|
|
for job in jobs:
|
|
|
|
job.status = u'Aborted'
|
|
|
|
job.save()
|
|
|
|
|
|
|
|
log.debug('Harvest source %s deleted', source_id)
|
|
|
|
|
|
|
|
return source
|
2016-05-11 13:29:53 +02:00
|
|
|
|
2016-05-16 13:15:12 +02:00
|
|
|
def _configure_db_logger(config):
|
2016-05-11 13:29:53 +02:00
|
|
|
# Log scope
|
2018-10-25 12:19:43 +02:00
|
|
|
#
|
2016-05-11 13:29:53 +02:00
|
|
|
# -1 - do not log to the database
|
|
|
|
# 0 - log everything
|
|
|
|
# 1 - model, logic.action, logic.validators, harvesters
|
|
|
|
# 2 - model, logic.action, logic.validators
|
|
|
|
# 3 - model, logic.action
|
|
|
|
# 4 - logic.action
|
|
|
|
# 5 - model
|
|
|
|
# 6 - plugin
|
|
|
|
# 7 - harvesters
|
|
|
|
#
|
2016-05-16 13:15:12 +02:00
|
|
|
scope = p.toolkit.asint(config.get('ckan.harvest.log_scope', -1))
|
2016-05-11 13:29:53 +02:00
|
|
|
if scope == -1:
|
|
|
|
return
|
2018-10-25 12:19:43 +02:00
|
|
|
|
2016-05-11 13:29:53 +02:00
|
|
|
parent_logger = 'ckanext.harvest'
|
2018-10-25 12:19:43 +02:00
|
|
|
children = ['plugin', 'model', 'logic.action.create', 'logic.action.delete',
|
|
|
|
'logic.action.get', 'logic.action.patch', 'logic.action.update',
|
2016-05-11 13:29:53 +02:00
|
|
|
'logic.validators', 'harvesters.base', 'harvesters.ckanharvester']
|
2018-10-25 12:19:43 +02:00
|
|
|
|
2016-05-11 13:29:53 +02:00
|
|
|
children_ = {0: children, 1: children[1:], 2: children[1:-2],
|
|
|
|
3: children[1:-3], 4: children[2:-3], 5: children[1:2],
|
|
|
|
6: children[:1], 7: children[-2:]}
|
2018-10-25 12:19:43 +02:00
|
|
|
|
2016-05-11 13:29:53 +02:00
|
|
|
# Get log level from config param - default: DEBUG
|
|
|
|
from logging import DEBUG, INFO, WARNING, ERROR, CRITICAL
|
2016-05-16 13:15:12 +02:00
|
|
|
level = config.get('ckan.harvest.log_level', 'debug').upper()
|
|
|
|
if level == 'DEBUG':
|
|
|
|
level = DEBUG
|
|
|
|
elif level == 'INFO':
|
|
|
|
level = INFO
|
|
|
|
elif level == 'WARNING':
|
|
|
|
level = WARNING
|
|
|
|
elif level == 'ERROR':
|
|
|
|
level = ERROR
|
|
|
|
elif level == 'CRITICAL':
|
|
|
|
level = CRITICAL
|
2016-05-11 13:29:53 +02:00
|
|
|
else:
|
2016-05-16 13:15:12 +02:00
|
|
|
level = DEBUG
|
2016-05-11 13:29:53 +02:00
|
|
|
|
|
|
|
loggers = children_.get(scope)
|
2018-10-25 12:19:43 +02:00
|
|
|
|
2016-05-11 13:29:53 +02:00
|
|
|
# Get root logger and set db handler
|
|
|
|
logger = getLogger(parent_logger)
|
|
|
|
if scope < 1:
|
2016-05-16 13:15:12 +02:00
|
|
|
logger.addHandler(DBLogHandler(level=level))
|
2016-05-11 13:29:53 +02:00
|
|
|
|
|
|
|
# Set db handler to all child loggers
|
|
|
|
for _ in loggers:
|
|
|
|
child_logger = logger.getChild(_)
|
2016-05-16 13:15:12 +02:00
|
|
|
child_logger.addHandler(DBLogHandler(level=level))
|