2012-06-08 18:09:22 +02:00
|
|
|
import logging
|
2012-02-29 16:20:35 +01:00
|
|
|
|
2013-09-17 17:49:19 +02:00
|
|
|
import ckan
|
2012-11-30 15:03:04 +01:00
|
|
|
from ckan import logic
|
|
|
|
|
|
|
|
from ckan.logic import NotFound, check_access
|
2012-10-29 18:15:02 +01:00
|
|
|
from ckanext.harvest.logic import HarvestJobExists
|
2012-02-29 16:20:35 +01:00
|
|
|
|
2012-11-30 15:03:04 +01:00
|
|
|
from ckanext.harvest.plugin import DATASET_TYPE_NAME
|
2013-09-17 17:49:19 +02:00
|
|
|
from ckanext.harvest.model import (HarvestSource, HarvestJob, HarvestObject,
|
|
|
|
HarvestObjectExtra)
|
2013-09-04 15:17:01 +02:00
|
|
|
from ckanext.harvest.logic.dictization import (harvest_job_dictize,
|
|
|
|
harvest_object_dictize)
|
2013-09-17 17:49:19 +02:00
|
|
|
from ckanext.harvest.logic.schema import (harvest_source_show_package_schema,
|
|
|
|
harvest_object_create_schema)
|
2012-02-29 16:20:35 +01:00
|
|
|
from ckanext.harvest.logic.action.get import harvest_source_list,harvest_job_list
|
|
|
|
|
2012-06-08 18:09:22 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2013-09-17 17:49:19 +02:00
|
|
|
_validate = ckan.lib.navl.dictization_functions.validate
|
|
|
|
|
|
|
|
class InactiveSource(Exception):
|
|
|
|
pass
|
|
|
|
|
2012-02-29 16:20:35 +01:00
|
|
|
def harvest_source_create(context,data_dict):
|
2012-11-30 15:03:04 +01:00
|
|
|
'''
|
|
|
|
Creates a new harvest source
|
|
|
|
|
|
|
|
This method just proxies the request to package_create,
|
|
|
|
which will create a harvest_source dataset type and the
|
|
|
|
HarvestSource object. All auth checks and validation will
|
|
|
|
be done there .We only make sure to set the dataset type.
|
|
|
|
|
|
|
|
Note that the harvest source type (ckan, waf, csw, etc)
|
|
|
|
is now set via the source_type field.
|
|
|
|
|
|
|
|
:param url: the URL for the harvest source
|
|
|
|
:type url: string
|
|
|
|
:param name: the name of the new harvest source, must be between 2 and 100
|
|
|
|
characters long and contain only lowercase alphanumeric characters
|
|
|
|
:type name: string
|
|
|
|
:param title: the title of the dataset (optional, default: same as
|
|
|
|
``name``)
|
|
|
|
:type title: string
|
|
|
|
:param notes: a description of the harvest source (optional)
|
|
|
|
:type notes: string
|
|
|
|
:param source_type: the harvester type for this source. This must be one
|
|
|
|
of the registerd harvesters, eg 'ckan', 'csw', etc.
|
|
|
|
:type source_type: string
|
|
|
|
:param frequency: the frequency in wich this harvester should run. See
|
|
|
|
``ckanext.harvest.model`` source for possible values. Default is
|
|
|
|
'MANUAL'
|
|
|
|
:type frequency: string
|
|
|
|
:param config: extra configuration options for the particular harvester
|
|
|
|
type. Should be a serialized as JSON. (optional)
|
|
|
|
:type config: string
|
|
|
|
|
|
|
|
|
|
|
|
:returns: the newly created harvest source
|
|
|
|
:rtype: dictionary
|
|
|
|
'''
|
2012-02-29 16:20:35 +01:00
|
|
|
|
2012-06-08 18:09:22 +02:00
|
|
|
log.info('Creating harvest source: %r', data_dict)
|
2012-02-29 16:20:35 +01:00
|
|
|
|
2012-11-30 15:03:04 +01:00
|
|
|
data_dict['type'] = DATASET_TYPE_NAME
|
2012-02-29 16:20:35 +01:00
|
|
|
|
2012-11-30 15:03:04 +01:00
|
|
|
context['extras_as_string'] = True
|
2014-09-29 13:43:37 +02:00
|
|
|
source = logic.get_action('package_create')(context, data_dict)
|
2012-02-29 16:20:35 +01:00
|
|
|
|
2012-11-30 15:03:04 +01:00
|
|
|
return source
|
2012-02-29 16:20:35 +01:00
|
|
|
|
2012-10-29 18:15:02 +01:00
|
|
|
|
2012-02-29 16:20:35 +01:00
|
|
|
def harvest_job_create(context,data_dict):
|
2012-06-08 18:09:22 +02:00
|
|
|
log.info('Harvest job create: %r', data_dict)
|
2012-03-01 13:02:16 +01:00
|
|
|
check_access('harvest_job_create',context,data_dict)
|
|
|
|
|
2012-02-29 16:20:35 +01:00
|
|
|
source_id = data_dict['source_id']
|
|
|
|
|
|
|
|
# Check if source exists
|
|
|
|
source = HarvestSource.get(source_id)
|
|
|
|
if not source:
|
2012-06-08 18:09:22 +02:00
|
|
|
log.warn('Harvest source %s does not exist', source_id)
|
2012-02-29 16:20:35 +01:00
|
|
|
raise NotFound('Harvest source %s does not exist' % source_id)
|
|
|
|
|
|
|
|
# Check if the source is active
|
|
|
|
if not source.active:
|
2012-06-08 18:09:22 +02:00
|
|
|
log.warn('Harvest job cannot be created for inactive source %s', source_id)
|
2012-02-29 16:20:35 +01:00
|
|
|
raise Exception('Can not create jobs on inactive sources')
|
|
|
|
|
2012-12-13 17:33:44 +01:00
|
|
|
# Check if there already is an unrun or currently running job for this source
|
|
|
|
exists = _check_for_existing_jobs(context, source_id)
|
|
|
|
if exists:
|
2012-06-08 18:09:22 +02:00
|
|
|
log.warn('There is already an unrun job %r for this source %s', exists, source_id)
|
2012-10-29 18:15:02 +01:00
|
|
|
raise HarvestJobExists('There already is an unrun job for this source')
|
2012-02-29 16:20:35 +01:00
|
|
|
|
|
|
|
job = HarvestJob()
|
|
|
|
job.source = source
|
|
|
|
|
|
|
|
job.save()
|
2012-06-08 18:09:22 +02:00
|
|
|
log.info('Harvest job saved %s', job.id)
|
2012-02-29 16:20:35 +01:00
|
|
|
return harvest_job_dictize(job,context)
|
|
|
|
|
|
|
|
def harvest_job_create_all(context,data_dict):
|
2012-06-08 18:09:22 +02:00
|
|
|
log.info('Harvest job create all: %r', data_dict)
|
2012-03-01 13:02:16 +01:00
|
|
|
check_access('harvest_job_create_all',context,data_dict)
|
|
|
|
|
2012-02-29 16:20:35 +01:00
|
|
|
data_dict.update({'only_active':True})
|
|
|
|
|
|
|
|
# Get all active sources
|
|
|
|
sources = harvest_source_list(context,data_dict)
|
|
|
|
jobs = []
|
|
|
|
# Create a new job for each, if there isn't already one
|
|
|
|
for source in sources:
|
2013-07-29 12:06:58 +02:00
|
|
|
exists = _check_for_existing_jobs(context, source['id'])
|
2012-12-13 17:33:44 +01:00
|
|
|
if exists:
|
2013-07-29 12:06:58 +02:00
|
|
|
log.info('Skipping source %s as it already has a pending job', source['id'])
|
2012-02-29 16:20:35 +01:00
|
|
|
continue
|
|
|
|
|
|
|
|
job = harvest_job_create(context,{'source_id':source['id']})
|
|
|
|
jobs.append(job)
|
|
|
|
|
2012-06-08 18:09:22 +02:00
|
|
|
log.info('Created jobs for %i harvest sources', len(jobs))
|
2012-02-29 16:20:35 +01:00
|
|
|
return jobs
|
|
|
|
|
2012-12-13 17:33:44 +01:00
|
|
|
def _check_for_existing_jobs(context, source_id):
|
|
|
|
'''
|
|
|
|
Given a source id, checks if there are jobs for this source
|
|
|
|
with status 'New' or 'Running'
|
|
|
|
|
|
|
|
rtype: boolean
|
|
|
|
'''
|
|
|
|
data_dict ={
|
|
|
|
'source_id':source_id,
|
|
|
|
'status':u'New'
|
|
|
|
}
|
|
|
|
exist_new = harvest_job_list(context,data_dict)
|
|
|
|
data_dict ={
|
|
|
|
'source_id':source_id,
|
|
|
|
'status':u'Running'
|
|
|
|
}
|
|
|
|
exist_running = harvest_job_list(context,data_dict)
|
|
|
|
exist = len(exist_new + exist_running) > 0
|
|
|
|
|
|
|
|
return exist
|
2013-09-04 15:17:01 +02:00
|
|
|
|
2013-09-17 17:49:19 +02:00
|
|
|
def harvest_object_create(context, data_dict):
|
|
|
|
""" Create a new harvest object
|
|
|
|
|
|
|
|
:type guid: string (optional)
|
|
|
|
:type content: string (optional)
|
|
|
|
:type job_id: string
|
|
|
|
:type source_id: string (optional)
|
|
|
|
:type package_id: string (optional)
|
|
|
|
:type extras: dict (optional)
|
|
|
|
"""
|
|
|
|
check_access('harvest_object_create', context, data_dict)
|
|
|
|
data, errors = _validate(data_dict, harvest_object_create_schema(), context)
|
|
|
|
|
|
|
|
if errors:
|
|
|
|
raise logic.ValidationError(errors)
|
|
|
|
|
|
|
|
obj = HarvestObject(
|
|
|
|
guid=data.get('guid'),
|
|
|
|
content=data.get('content'),
|
2013-10-03 16:51:37 +02:00
|
|
|
job=data['job_id'],
|
2013-09-17 17:49:19 +02:00
|
|
|
harvest_source_id=data.get('source_id'),
|
|
|
|
package_id=data.get('package_id'),
|
|
|
|
extras=[ HarvestObjectExtra(key=k, value=v)
|
|
|
|
for k, v in data.get('extras', {}).items() ]
|
|
|
|
)
|
|
|
|
|
|
|
|
obj.save()
|
2013-09-04 15:17:01 +02:00
|
|
|
return harvest_object_dictize(obj, context)
|