2011-03-14 14:34:48 +01:00
|
|
|
import sys
|
|
|
|
from pprint import pprint
|
|
|
|
|
2012-02-29 11:59:02 +01:00
|
|
|
from ckan import model
|
2012-03-01 13:46:42 +01:00
|
|
|
from ckan.logic import get_action, ValidationError
|
2016-05-06 18:44:02 +02:00
|
|
|
from ckan.plugins import toolkit
|
2012-02-29 11:59:02 +01:00
|
|
|
|
2011-03-14 14:34:48 +01:00
|
|
|
from ckan.lib.cli import CkanCommand
|
2011-03-18 16:44:40 +01:00
|
|
|
|
2011-03-14 14:34:48 +01:00
|
|
|
class Harvester(CkanCommand):
|
|
|
|
'''Harvests remotely mastered metadata
|
|
|
|
|
|
|
|
Usage:
|
2011-04-13 13:39:53 +02:00
|
|
|
|
|
|
|
harvester initdb
|
|
|
|
- Creates the necessary tables in the database
|
|
|
|
|
2013-08-14 12:54:51 +02:00
|
|
|
harvester source {name} {url} {type} [{title}] [{active}] [{owner_org}] [{frequency}] [{config}]
|
2011-03-14 14:34:48 +01:00
|
|
|
- create new harvest source
|
|
|
|
|
2015-10-28 18:51:58 +01:00
|
|
|
harvester source {source-id/name}
|
|
|
|
- shows a harvest source
|
2011-03-14 14:34:48 +01:00
|
|
|
|
2015-10-28 18:51:58 +01:00
|
|
|
harvester rmsource {source-id/name}
|
|
|
|
- remove (deactivate) a harvester source, whilst leaving any related
|
|
|
|
datasets, jobs and objects
|
|
|
|
|
|
|
|
harvester clearsource {source-id/name}
|
|
|
|
- clears all datasets, jobs and objects related to a harvest source,
|
|
|
|
but keeps the source itself
|
2015-06-10 11:26:22 +02:00
|
|
|
|
2011-05-13 17:00:36 +02:00
|
|
|
harvester sources [all]
|
2011-03-14 14:34:48 +01:00
|
|
|
- lists harvest sources
|
2011-04-08 18:07:19 +02:00
|
|
|
If 'all' is defined, it also shows the Inactive sources
|
2011-03-14 14:34:48 +01:00
|
|
|
|
2015-10-28 18:51:58 +01:00
|
|
|
harvester job {source-id/name}
|
2015-10-28 22:58:36 +01:00
|
|
|
- create new harvest job and runs it (puts it on the gather queue)
|
2011-05-13 17:00:36 +02:00
|
|
|
|
2011-03-14 14:34:48 +01:00
|
|
|
harvester jobs
|
2011-04-05 12:53:39 +02:00
|
|
|
- lists harvest jobs
|
2011-03-14 14:34:48 +01:00
|
|
|
|
2015-12-02 08:59:08 +01:00
|
|
|
harvester job_abort {source-id/source-name/obj-id}
|
2015-10-28 18:51:58 +01:00
|
|
|
- marks a job as "Aborted" so that the source can be restarted afresh.
|
|
|
|
It ensures that the job's harvest objects status are also marked
|
|
|
|
finished. You should ensure that neither the job nor its objects are
|
|
|
|
currently in the gather/fetch queues.
|
|
|
|
|
2011-03-14 14:34:48 +01:00
|
|
|
harvester run
|
2015-10-28 18:51:58 +01:00
|
|
|
- starts any harvest jobs that have been created by putting them onto
|
2015-11-03 01:40:25 +01:00
|
|
|
the gather queue. Also checks running jobs - if finished it
|
2015-10-28 18:51:58 +01:00
|
|
|
changes their status to Finished.
|
|
|
|
|
|
|
|
harvester run_test {source-id/name}
|
|
|
|
- runs a harvest - for testing only.
|
|
|
|
This does all the stages of the harvest (creates job, gather, fetch,
|
|
|
|
import) without involving the web UI or the queue backends. This is
|
|
|
|
useful for testing a harvester without having to fire up
|
|
|
|
gather/fetch_consumer processes, as is done in production.
|
2011-04-06 13:45:00 +02:00
|
|
|
|
|
|
|
harvester gather_consumer
|
|
|
|
- starts the consumer for the gathering queue
|
|
|
|
|
|
|
|
harvester fetch_consumer
|
|
|
|
- starts the consumer for the fetching queue
|
2011-05-10 17:06:57 +02:00
|
|
|
|
2012-11-13 15:41:38 +01:00
|
|
|
harvester purge_queues
|
2012-11-07 10:51:25 +01:00
|
|
|
- removes all jobs from fetch and gather queue
|
2015-10-23 12:52:22 +02:00
|
|
|
WARNING: if using Redis, this command purges all data in the current
|
|
|
|
Redis database
|
2016-05-06 18:44:02 +02:00
|
|
|
|
|
|
|
harvester clean_harvest_log
|
|
|
|
- Clean-up mechanism for the harvest log table.
|
|
|
|
You can configure the time frame through the configuration
|
|
|
|
parameter `ckan.harvest.log_timeframe`. The default time frame is 30 days
|
2012-11-07 10:51:25 +01:00
|
|
|
|
2015-12-08 17:17:39 +01:00
|
|
|
harvester [-j] [-o|-g|-p {id/guid}] [--segments={segments}] import [{source-id}]
|
2014-05-15 17:30:30 +02:00
|
|
|
- perform the import stage with the last fetched objects, for a certain
|
|
|
|
source or a single harvest object. Please note that no objects will
|
|
|
|
be fetched from the remote server. It will only affect the objects
|
|
|
|
already present in the database.
|
|
|
|
|
2015-10-28 18:51:58 +01:00
|
|
|
To import a particular harvest source, specify its id as an argument.
|
|
|
|
To import a particular harvest object use the -o option.
|
2015-12-08 17:17:39 +01:00
|
|
|
To import a particular guid use the -g option.
|
2015-10-28 18:51:58 +01:00
|
|
|
To import a particular package use the -p option.
|
2011-05-10 17:06:57 +02:00
|
|
|
|
2015-10-28 18:51:58 +01:00
|
|
|
You will need to specify the -j flag in cases where the datasets are
|
|
|
|
not yet created (e.g. first harvest, or all previous harvests have
|
|
|
|
failed)
|
2012-07-30 13:11:55 +02:00
|
|
|
|
2012-08-02 19:41:59 +02:00
|
|
|
The --segments flag allows to define a string containing hex digits that represent which of
|
|
|
|
the 16 harvest object segments to import. e.g. 15af will run segments 1,5,a,f
|
|
|
|
|
2011-09-06 19:25:17 +02:00
|
|
|
harvester job-all
|
|
|
|
- create new harvest jobs for all active sources.
|
2016-05-06 18:44:02 +02:00
|
|
|
https://www.facebook.com/
|
2013-01-22 17:43:25 +01:00
|
|
|
harvester reindex
|
|
|
|
- reindexes the harvest source datasets
|
|
|
|
|
2011-03-14 14:34:48 +01:00
|
|
|
The commands should be run from the ckanext-harvest directory and expect
|
|
|
|
a development.ini file to be present. Most of the time you will
|
|
|
|
specify the config explicitly though::
|
|
|
|
|
|
|
|
paster harvester sources --config=../ckan/development.ini
|
|
|
|
|
|
|
|
'''
|
|
|
|
|
|
|
|
summary = __doc__.split('\n')[0]
|
|
|
|
usage = __doc__
|
2013-08-14 12:43:32 +02:00
|
|
|
max_args = 9
|
2011-03-14 14:34:48 +01:00
|
|
|
min_args = 0
|
|
|
|
|
2012-07-30 13:11:55 +02:00
|
|
|
def __init__(self,name):
|
|
|
|
|
|
|
|
super(Harvester,self).__init__(name)
|
|
|
|
|
|
|
|
self.parser.add_option('-j', '--no-join-datasets', dest='no_join_datasets',
|
|
|
|
action='store_true', default=False, help='Do not join harvest objects to existing datasets')
|
|
|
|
|
2014-05-15 17:30:30 +02:00
|
|
|
self.parser.add_option('-o', '--harvest-object-id', dest='harvest_object_id',
|
2015-12-08 17:17:39 +01:00
|
|
|
default=False, help='Id of the harvest object to which perform the import stage')
|
2014-05-15 17:30:30 +02:00
|
|
|
|
|
|
|
self.parser.add_option('-p', '--package-id', dest='package_id',
|
2015-12-08 17:17:39 +01:00
|
|
|
default=False, help='Id of the package whose harvest object to perform the import stage for')
|
|
|
|
|
|
|
|
self.parser.add_option('-g', '--guid', dest='guid',
|
|
|
|
default=False, help='Guid of the harvest object to which perform the import stage for')
|
2014-05-15 17:30:30 +02:00
|
|
|
|
2012-08-02 19:41:59 +02:00
|
|
|
self.parser.add_option('--segments', dest='segments',
|
|
|
|
default=False, help=
|
|
|
|
'''A string containing hex digits that represent which of
|
|
|
|
the 16 harvest object segments to import. e.g. 15af will run segments 1,5,a,f''')
|
|
|
|
|
2011-03-14 14:34:48 +01:00
|
|
|
def command(self):
|
|
|
|
self._load_config()
|
2012-03-01 13:46:42 +01:00
|
|
|
|
|
|
|
# We'll need a sysadmin user to perform most of the actions
|
|
|
|
# We will use the sysadmin site user (named as the site_id)
|
|
|
|
context = {'model':model,'session':model.Session,'ignore_auth':True}
|
|
|
|
self.admin_user = get_action('get_site_user')(context,{})
|
|
|
|
|
|
|
|
|
2011-04-06 13:45:00 +02:00
|
|
|
print ''
|
2011-03-14 14:34:48 +01:00
|
|
|
|
|
|
|
if len(self.args) == 0:
|
|
|
|
self.parser.print_usage()
|
|
|
|
sys.exit(1)
|
|
|
|
cmd = self.args[0]
|
|
|
|
if cmd == 'source':
|
2015-10-28 18:51:58 +01:00
|
|
|
if len(self.args) > 2:
|
|
|
|
self.create_harvest_source()
|
|
|
|
else:
|
|
|
|
self.show_harvest_source()
|
2015-06-10 16:22:04 +02:00
|
|
|
elif cmd == 'rmsource':
|
2011-04-05 12:53:39 +02:00
|
|
|
self.remove_harvest_source()
|
2015-06-10 11:19:10 +02:00
|
|
|
elif cmd == 'clearsource':
|
|
|
|
self.clear_harvest_source()
|
2011-03-14 14:34:48 +01:00
|
|
|
elif cmd == 'sources':
|
|
|
|
self.list_harvest_sources()
|
|
|
|
elif cmd == 'job':
|
2011-04-05 12:53:39 +02:00
|
|
|
self.create_harvest_job()
|
2011-03-14 14:34:48 +01:00
|
|
|
elif cmd == 'jobs':
|
2011-04-05 12:53:39 +02:00
|
|
|
self.list_harvest_jobs()
|
2015-10-28 18:51:58 +01:00
|
|
|
elif cmd == 'job_abort':
|
|
|
|
self.job_abort()
|
2011-03-14 14:34:48 +01:00
|
|
|
elif cmd == 'run':
|
|
|
|
self.run_harvester()
|
2015-10-28 18:51:58 +01:00
|
|
|
elif cmd == 'run_test':
|
|
|
|
self.run_test_harvest()
|
2011-04-06 13:45:00 +02:00
|
|
|
elif cmd == 'gather_consumer':
|
2011-04-08 16:54:33 +02:00
|
|
|
import logging
|
2015-06-11 14:56:22 +02:00
|
|
|
from ckanext.harvest.queue import (get_gather_consumer,
|
|
|
|
gather_callback, get_gather_queue_name)
|
2011-04-08 16:54:33 +02:00
|
|
|
logging.getLogger('amqplib').setLevel(logging.INFO)
|
2011-04-06 13:45:00 +02:00
|
|
|
consumer = get_gather_consumer()
|
2015-06-11 14:56:22 +02:00
|
|
|
for method, header, body in consumer.consume(queue=get_gather_queue_name()):
|
2013-04-21 18:04:57 +02:00
|
|
|
gather_callback(consumer, method, header, body)
|
2011-04-06 13:45:00 +02:00
|
|
|
elif cmd == 'fetch_consumer':
|
2011-04-08 16:54:33 +02:00
|
|
|
import logging
|
|
|
|
logging.getLogger('amqplib').setLevel(logging.INFO)
|
2015-06-11 14:56:22 +02:00
|
|
|
from ckanext.harvest.queue import (get_fetch_consumer, fetch_callback,
|
|
|
|
get_fetch_queue_name)
|
2011-04-06 13:45:00 +02:00
|
|
|
consumer = get_fetch_consumer()
|
2015-06-11 14:56:22 +02:00
|
|
|
for method, header, body in consumer.consume(queue=get_fetch_queue_name()):
|
2012-11-15 15:14:55 +01:00
|
|
|
fetch_callback(consumer, method, header, body)
|
2012-11-07 10:51:25 +01:00
|
|
|
elif cmd == 'purge_queues':
|
|
|
|
from ckanext.harvest.queue import purge_queues
|
|
|
|
purge_queues()
|
2011-05-10 17:06:57 +02:00
|
|
|
elif cmd == 'initdb':
|
2011-04-13 13:39:53 +02:00
|
|
|
self.initdb()
|
2011-05-10 17:06:57 +02:00
|
|
|
elif cmd == 'import':
|
2011-09-28 15:27:28 +02:00
|
|
|
self.initdb()
|
2011-05-10 17:06:57 +02:00
|
|
|
self.import_stage()
|
2011-09-06 19:25:17 +02:00
|
|
|
elif cmd == 'job-all':
|
|
|
|
self.create_harvest_job_all()
|
2012-02-29 16:20:35 +01:00
|
|
|
elif cmd == 'harvesters-info':
|
|
|
|
harvesters_info = get_action('harvesters_info_show')()
|
|
|
|
pprint(harvesters_info)
|
2013-01-22 17:43:25 +01:00
|
|
|
elif cmd == 'reindex':
|
|
|
|
self.reindex()
|
2016-05-06 18:44:02 +02:00
|
|
|
elif cmd == 'clean_harvest_log':
|
|
|
|
self.clean_harvest_log()
|
2011-03-14 14:34:48 +01:00
|
|
|
else:
|
|
|
|
print 'Command %s not recognized' % cmd
|
|
|
|
|
|
|
|
def _load_config(self):
|
|
|
|
super(Harvester, self)._load_config()
|
2011-05-13 17:00:36 +02:00
|
|
|
|
2011-04-13 13:39:53 +02:00
|
|
|
def initdb(self):
|
|
|
|
from ckanext.harvest.model import setup as db_setup
|
|
|
|
db_setup()
|
|
|
|
|
|
|
|
print 'DB tables created'
|
2011-03-14 14:34:48 +01:00
|
|
|
|
2011-04-05 12:53:39 +02:00
|
|
|
def create_harvest_source(self):
|
2011-03-29 18:23:49 +02:00
|
|
|
|
2011-04-05 12:53:39 +02:00
|
|
|
if len(self.args) >= 2:
|
2013-08-14 12:31:23 +02:00
|
|
|
name = unicode(self.args[1])
|
2011-03-29 18:23:49 +02:00
|
|
|
else:
|
2013-08-05 23:46:21 +02:00
|
|
|
print 'Please provide a source name'
|
2011-04-05 12:53:39 +02:00
|
|
|
sys.exit(1)
|
|
|
|
if len(self.args) >= 3:
|
2013-08-05 23:46:21 +02:00
|
|
|
url = unicode(self.args[2])
|
2011-04-05 12:53:39 +02:00
|
|
|
else:
|
2013-08-05 23:46:21 +02:00
|
|
|
print 'Please provide a source URL'
|
2011-04-05 12:53:39 +02:00
|
|
|
sys.exit(1)
|
|
|
|
if len(self.args) >= 4:
|
2013-08-05 23:46:21 +02:00
|
|
|
type = unicode(self.args[3])
|
2011-04-05 12:53:39 +02:00
|
|
|
else:
|
2013-08-05 23:46:21 +02:00
|
|
|
print 'Please provide a source type'
|
|
|
|
sys.exit(1)
|
|
|
|
|
2011-04-05 12:53:39 +02:00
|
|
|
if len(self.args) >= 5:
|
2013-08-14 12:54:51 +02:00
|
|
|
title = unicode(self.args[4])
|
2011-04-05 12:53:39 +02:00
|
|
|
else:
|
2013-08-14 12:54:51 +02:00
|
|
|
title = None
|
2011-04-05 12:53:39 +02:00
|
|
|
if len(self.args) >= 6:
|
2013-08-05 23:46:21 +02:00
|
|
|
active = not(self.args[5].lower() == 'false' or \
|
|
|
|
self.args[5] == '0')
|
2011-06-13 16:56:19 +02:00
|
|
|
else:
|
2013-08-05 23:46:21 +02:00
|
|
|
active = True
|
2011-06-13 16:56:19 +02:00
|
|
|
if len(self.args) >= 7:
|
2013-08-14 12:54:51 +02:00
|
|
|
owner_org = unicode(self.args[6])
|
2011-04-05 12:53:39 +02:00
|
|
|
else:
|
2013-08-14 12:54:51 +02:00
|
|
|
owner_org = None
|
2012-10-29 18:15:02 +01:00
|
|
|
if len(self.args) >= 8:
|
2013-08-14 12:54:51 +02:00
|
|
|
frequency = unicode(self.args[7])
|
2012-10-29 18:15:02 +01:00
|
|
|
if not frequency:
|
2012-11-05 14:17:32 +01:00
|
|
|
frequency = 'MANUAL'
|
2012-10-29 18:15:02 +01:00
|
|
|
else:
|
2012-11-05 14:17:32 +01:00
|
|
|
frequency = 'MANUAL'
|
2013-08-14 12:54:51 +02:00
|
|
|
if len(self.args) >= 9:
|
|
|
|
config = unicode(self.args[8])
|
|
|
|
else:
|
|
|
|
config = None
|
|
|
|
|
2011-05-13 17:00:36 +02:00
|
|
|
try:
|
2012-02-29 16:20:35 +01:00
|
|
|
data_dict = {
|
2013-08-14 12:54:51 +02:00
|
|
|
'name': name,
|
|
|
|
'url': url,
|
|
|
|
'source_type': type,
|
|
|
|
'title': title,
|
2011-05-13 17:00:36 +02:00
|
|
|
'active':active,
|
2013-08-14 12:54:51 +02:00
|
|
|
'owner_org': owner_org,
|
|
|
|
'frequency': frequency,
|
|
|
|
'config': config,
|
|
|
|
}
|
2011-05-13 17:00:36 +02:00
|
|
|
|
2013-08-14 13:28:27 +02:00
|
|
|
context = {
|
|
|
|
'model':model,
|
|
|
|
'session':model.Session,
|
|
|
|
'user': self.admin_user['name'],
|
|
|
|
'ignore_auth': True,
|
|
|
|
}
|
2012-02-29 16:20:35 +01:00
|
|
|
source = get_action('harvest_source_create')(context,data_dict)
|
2011-05-13 17:00:36 +02:00
|
|
|
print 'Created new harvest source:'
|
|
|
|
self.print_harvest_source(source)
|
|
|
|
|
2012-02-29 11:59:02 +01:00
|
|
|
sources = get_action('harvest_source_list')(context,{})
|
2011-05-13 17:00:36 +02:00
|
|
|
self.print_there_are('harvest source', sources)
|
|
|
|
|
2012-10-29 18:15:02 +01:00
|
|
|
# Create a harvest job for the new source if not regular job.
|
|
|
|
if not data_dict['frequency']:
|
2015-10-28 22:58:36 +01:00
|
|
|
get_action('harvest_job_create')(
|
|
|
|
context, {'source_id': source['id'], 'run': True})
|
2012-10-29 18:15:02 +01:00
|
|
|
print 'A new Harvest Job for this source has also been created'
|
|
|
|
|
2011-05-13 17:00:36 +02:00
|
|
|
except ValidationError,e:
|
|
|
|
print 'An error occurred:'
|
|
|
|
print str(e.error_dict)
|
|
|
|
raise e
|
|
|
|
|
2015-10-28 18:51:58 +01:00
|
|
|
def show_harvest_source(self):
|
|
|
|
|
|
|
|
if len(self.args) >= 2:
|
|
|
|
source_id_or_name = unicode(self.args[1])
|
|
|
|
else:
|
|
|
|
print 'Please provide a source name'
|
|
|
|
sys.exit(1)
|
|
|
|
context = {'model': model, 'session': model.Session,
|
|
|
|
'user': self.admin_user['name']}
|
|
|
|
source = get_action('harvest_source_show')(
|
|
|
|
context, {'id': source_id_or_name})
|
|
|
|
self.print_harvest_source(source)
|
|
|
|
|
2011-04-05 12:53:39 +02:00
|
|
|
def remove_harvest_source(self):
|
|
|
|
if len(self.args) >= 2:
|
2015-10-28 18:51:58 +01:00
|
|
|
source_id_or_name = unicode(self.args[1])
|
2011-04-05 12:53:39 +02:00
|
|
|
else:
|
|
|
|
print 'Please provide a source id'
|
|
|
|
sys.exit(1)
|
2015-10-28 18:51:58 +01:00
|
|
|
context = {'model': model, 'session': model.Session,
|
|
|
|
'user': self.admin_user['name']}
|
|
|
|
source = get_action('harvest_source_show')(
|
|
|
|
context, {'id': source_id_or_name})
|
|
|
|
get_action('harvest_source_delete')(context, {'id': source['id']})
|
|
|
|
print 'Removed harvest source: %s' % source_id_or_name
|
2011-05-13 17:00:36 +02:00
|
|
|
|
2015-06-10 11:19:10 +02:00
|
|
|
def clear_harvest_source(self):
|
|
|
|
if len(self.args) >= 2:
|
2015-10-28 18:51:58 +01:00
|
|
|
source_id_or_name = unicode(self.args[1])
|
2015-06-10 11:19:10 +02:00
|
|
|
else:
|
|
|
|
print 'Please provide a source id'
|
|
|
|
sys.exit(1)
|
2015-10-28 18:51:58 +01:00
|
|
|
context = {'model': model, 'session': model.Session,
|
|
|
|
'user': self.admin_user['name']}
|
|
|
|
source = get_action('harvest_source_show')(
|
|
|
|
context, {'id': source_id_or_name})
|
|
|
|
get_action('harvest_source_clear')(context, {'id': source['id']})
|
|
|
|
print 'Cleared harvest source: %s' % source_id_or_name
|
2015-06-10 11:19:10 +02:00
|
|
|
|
2011-04-05 12:53:39 +02:00
|
|
|
def list_harvest_sources(self):
|
2011-04-08 18:07:19 +02:00
|
|
|
if len(self.args) >= 2 and self.args[1] == 'all':
|
2012-02-29 11:59:02 +01:00
|
|
|
data_dict = {}
|
2011-04-08 18:07:19 +02:00
|
|
|
what = 'harvest source'
|
|
|
|
else:
|
2012-02-29 11:59:02 +01:00
|
|
|
data_dict = {'only_active':True}
|
2011-04-08 18:07:19 +02:00
|
|
|
what = 'active harvest source'
|
|
|
|
|
2012-03-01 13:46:42 +01:00
|
|
|
context = {'model': model,'session':model.Session, 'user': self.admin_user['name']}
|
2012-02-29 11:59:02 +01:00
|
|
|
sources = get_action('harvest_source_list')(context,data_dict)
|
2011-04-05 12:53:39 +02:00
|
|
|
self.print_harvest_sources(sources)
|
2011-04-08 18:07:19 +02:00
|
|
|
self.print_there_are(what=what, sequence=sources)
|
2011-04-05 12:53:39 +02:00
|
|
|
|
|
|
|
def create_harvest_job(self):
|
|
|
|
if len(self.args) >= 2:
|
2015-10-28 18:51:58 +01:00
|
|
|
source_id_or_name = unicode(self.args[1])
|
2011-04-05 12:53:39 +02:00
|
|
|
else:
|
|
|
|
print 'Please provide a source id'
|
|
|
|
sys.exit(1)
|
2015-10-28 18:51:58 +01:00
|
|
|
context = {'model': model, 'session': model.Session,
|
|
|
|
'user': self.admin_user['name']}
|
|
|
|
source = get_action('harvest_source_show')(
|
|
|
|
context, {'id': source_id_or_name})
|
2011-04-05 12:53:39 +02:00
|
|
|
|
2012-03-19 18:28:53 +01:00
|
|
|
context = {'model': model,'session':model.Session, 'user': self.admin_user['name']}
|
2015-10-28 22:58:36 +01:00
|
|
|
job = get_action('harvest_job_create')(
|
2015-11-03 01:40:25 +01:00
|
|
|
context, {'source_id': source['id'], 'run': True})
|
2011-03-18 16:44:40 +01:00
|
|
|
|
2011-04-05 12:53:39 +02:00
|
|
|
self.print_harvest_job(job)
|
2012-02-29 11:59:02 +01:00
|
|
|
jobs = get_action('harvest_job_list')(context,{'status':u'New'})
|
2013-02-27 12:34:09 +01:00
|
|
|
self.print_there_are('harvest job', jobs, condition=u'New')
|
2011-04-05 12:53:39 +02:00
|
|
|
|
|
|
|
def list_harvest_jobs(self):
|
2012-03-07 16:20:49 +01:00
|
|
|
context = {'model': model, 'user': self.admin_user['name'], 'session':model.Session}
|
2012-02-29 11:59:02 +01:00
|
|
|
jobs = get_action('harvest_job_list')(context,{})
|
|
|
|
|
2011-04-05 12:53:39 +02:00
|
|
|
self.print_harvest_jobs(jobs)
|
|
|
|
self.print_there_are(what='harvest job', sequence=jobs)
|
2011-05-13 17:00:36 +02:00
|
|
|
|
2015-10-28 18:51:58 +01:00
|
|
|
def job_abort(self):
|
|
|
|
if len(self.args) >= 2:
|
2015-12-02 08:59:08 +01:00
|
|
|
job_or_source_id_or_name = unicode(self.args[1])
|
2015-10-28 18:51:58 +01:00
|
|
|
else:
|
2015-12-02 08:59:08 +01:00
|
|
|
print 'Please provide a job id or source name/id'
|
2015-10-28 18:51:58 +01:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
context = {'model': model, 'user': self.admin_user['name'],
|
|
|
|
'session': model.Session}
|
2015-12-02 08:59:08 +01:00
|
|
|
job = get_action('harvest_job_abort')(
|
|
|
|
context, {'id': job_or_source_id_or_name})
|
2015-10-28 18:51:58 +01:00
|
|
|
print 'Job status: {0}'.format(job['status'])
|
|
|
|
|
2011-04-08 16:54:33 +02:00
|
|
|
def run_harvester(self):
|
2015-10-28 22:58:36 +01:00
|
|
|
context = {'model': model, 'user': self.admin_user['name'],
|
|
|
|
'session': model.Session}
|
|
|
|
get_action('harvest_jobs_run')(context, {})
|
2011-04-06 13:45:00 +02:00
|
|
|
|
2015-10-28 18:51:58 +01:00
|
|
|
def run_test_harvest(self):
|
|
|
|
from ckanext.harvest import queue
|
|
|
|
from ckanext.harvest.tests import lib
|
|
|
|
from ckanext.harvest.logic import HarvestJobExists
|
|
|
|
from ckanext.harvest.model import HarvestJob
|
|
|
|
|
|
|
|
# Determine the source
|
|
|
|
if len(self.args) >= 2:
|
|
|
|
source_id_or_name = unicode(self.args[1])
|
|
|
|
else:
|
|
|
|
print 'Please provide a source id'
|
|
|
|
sys.exit(1)
|
|
|
|
context = {'model': model, 'session': model.Session,
|
|
|
|
'user': self.admin_user['name']}
|
|
|
|
source = get_action('harvest_source_show')(
|
|
|
|
context, {'id': source_id_or_name})
|
|
|
|
|
|
|
|
# Determine the job
|
|
|
|
try:
|
|
|
|
job_dict = get_action('harvest_job_create')(
|
|
|
|
context, {'source_id': source['id']})
|
|
|
|
except HarvestJobExists:
|
|
|
|
running_jobs = get_action('harvest_job_list')(
|
|
|
|
context, {'source_id': source['id'], 'status': 'Running'})
|
|
|
|
if running_jobs:
|
|
|
|
print '\nSource "%s" apparently has a "Running" job:\n%r' \
|
|
|
|
% (source.get('name') or source['id'], running_jobs)
|
|
|
|
resp = raw_input('Abort it? (y/n)')
|
|
|
|
if not resp.lower().startswith('y'):
|
|
|
|
sys.exit(1)
|
|
|
|
job_dict = get_action('harvest_job_abort')(
|
|
|
|
context, {'source_id': source['id']})
|
|
|
|
else:
|
|
|
|
print 'Reusing existing harvest job'
|
|
|
|
jobs = get_action('harvest_job_list')(
|
|
|
|
context, {'source_id': source['id'], 'status': 'New'})
|
|
|
|
assert len(jobs) == 1, \
|
|
|
|
'Multiple "New" jobs for this source! %r' % jobs
|
|
|
|
job_dict = jobs[0]
|
|
|
|
job_obj = HarvestJob.get(job_dict['id'])
|
|
|
|
|
|
|
|
harvester = queue.get_harvester(source['source_type'])
|
|
|
|
assert harvester, \
|
|
|
|
'No harvester found for type: %s' % source['source_type']
|
|
|
|
lib.run_harvest_job(job_obj, harvester)
|
|
|
|
|
2011-05-10 17:06:57 +02:00
|
|
|
def import_stage(self):
|
2012-08-02 19:41:59 +02:00
|
|
|
|
2011-05-10 17:06:57 +02:00
|
|
|
if len(self.args) >= 2:
|
2015-10-28 18:51:58 +01:00
|
|
|
source_id_or_name = unicode(self.args[1])
|
|
|
|
context = {'model': model, 'session': model.Session,
|
|
|
|
'user': self.admin_user['name']}
|
|
|
|
source = get_action('harvest_source_show')(
|
|
|
|
context, {'id': source_id_or_name})
|
|
|
|
source_id = source['id']
|
2011-05-10 17:06:57 +02:00
|
|
|
else:
|
|
|
|
source_id = None
|
2012-07-30 13:11:55 +02:00
|
|
|
|
2015-10-28 18:51:58 +01:00
|
|
|
context = {'model': model, 'session': model.Session,
|
|
|
|
'user': self.admin_user['name'],
|
2012-08-02 19:41:59 +02:00
|
|
|
'join_datasets': not self.options.no_join_datasets,
|
|
|
|
'segments': self.options.segments}
|
2012-07-30 13:11:55 +02:00
|
|
|
|
2014-05-15 17:30:30 +02:00
|
|
|
objs_count = get_action('harvest_objects_import')(context,{
|
|
|
|
'source_id': source_id,
|
|
|
|
'harvest_object_id': self.options.harvest_object_id,
|
|
|
|
'package_id': self.options.package_id,
|
2015-12-08 17:17:39 +01:00
|
|
|
'guid': self.options.guid,
|
2014-05-15 17:30:30 +02:00
|
|
|
})
|
2012-02-29 16:20:35 +01:00
|
|
|
|
2014-05-15 17:30:30 +02:00
|
|
|
print '%s objects reimported' % objs_count
|
2011-05-10 17:06:57 +02:00
|
|
|
|
2011-09-06 19:25:17 +02:00
|
|
|
def create_harvest_job_all(self):
|
2012-03-07 16:20:49 +01:00
|
|
|
context = {'model': model, 'user': self.admin_user['name'], 'session':model.Session}
|
2012-02-29 16:20:35 +01:00
|
|
|
jobs = get_action('harvest_job_create_all')(context,{})
|
|
|
|
print 'Created %s new harvest jobs' % len(jobs)
|
2011-09-06 19:25:17 +02:00
|
|
|
|
2013-01-22 17:43:25 +01:00
|
|
|
def reindex(self):
|
|
|
|
context = {'model': model, 'user': self.admin_user['name']}
|
|
|
|
get_action('harvest_sources_reindex')(context,{})
|
|
|
|
|
|
|
|
|
2011-03-14 14:34:48 +01:00
|
|
|
def print_harvest_sources(self, sources):
|
|
|
|
if sources:
|
2011-04-06 13:45:00 +02:00
|
|
|
print ''
|
2011-03-14 14:34:48 +01:00
|
|
|
for source in sources:
|
|
|
|
self.print_harvest_source(source)
|
|
|
|
|
|
|
|
def print_harvest_source(self, source):
|
2013-08-05 23:50:30 +02:00
|
|
|
print 'Source id: %s' % source.get('id')
|
2015-10-28 18:51:58 +01:00
|
|
|
if 'name' in source:
|
|
|
|
# 'name' is only there if the source comes from the Package
|
|
|
|
print ' name: %s' % source.get('name')
|
2013-08-05 23:50:30 +02:00
|
|
|
print ' url: %s' % source.get('url')
|
2015-10-28 18:51:58 +01:00
|
|
|
# 'type' if source comes from HarvestSource, 'source_type' if it comes
|
|
|
|
# from the Package
|
|
|
|
print ' type: %s' % (source.get('source_type') or
|
|
|
|
source.get('type'))
|
|
|
|
print ' active: %s' % (source.get('active',
|
|
|
|
source.get('state') == 'active'))
|
2013-08-05 23:50:30 +02:00
|
|
|
print 'frequency: %s' % source.get('frequency')
|
|
|
|
print ' jobs: %s' % source.get('status').get('job_count')
|
2011-04-05 12:53:39 +02:00
|
|
|
print ''
|
|
|
|
|
|
|
|
def print_harvest_jobs(self, jobs):
|
2011-03-14 14:34:48 +01:00
|
|
|
if jobs:
|
2011-04-05 12:53:39 +02:00
|
|
|
print ''
|
2011-03-14 14:34:48 +01:00
|
|
|
for job in jobs:
|
2011-04-05 12:53:39 +02:00
|
|
|
self.print_harvest_job(job)
|
|
|
|
|
|
|
|
def print_harvest_job(self, job):
|
2013-08-05 23:50:30 +02:00
|
|
|
print ' Job id: %s' % job.get('id')
|
|
|
|
print ' status: %s' % job.get('status')
|
|
|
|
print ' source: %s' % job.get('source_id')
|
2013-02-27 12:34:09 +01:00
|
|
|
print ' objects: %s' % len(job.get('objects', []))
|
2011-04-05 13:55:58 +02:00
|
|
|
|
2013-02-28 20:06:21 +01:00
|
|
|
print 'gather_errors: %s' % len(job.get('gather_errors', []))
|
2013-03-05 13:49:20 +01:00
|
|
|
for error in job.get('gather_errors', []):
|
|
|
|
print ' %s' % error['message']
|
2011-05-13 17:00:36 +02:00
|
|
|
|
2011-04-05 12:53:39 +02:00
|
|
|
print ''
|
2011-03-14 14:34:48 +01:00
|
|
|
|
2011-04-05 12:53:39 +02:00
|
|
|
def print_there_are(self, what, sequence, condition=''):
|
2011-03-14 14:34:48 +01:00
|
|
|
is_singular = self.is_singular(sequence)
|
2011-04-05 12:53:39 +02:00
|
|
|
print 'There %s %s %s%s%s' % (
|
|
|
|
is_singular and 'is' or 'are',
|
2011-03-14 14:34:48 +01:00
|
|
|
len(sequence),
|
2011-04-05 12:53:39 +02:00
|
|
|
condition and ('%s ' % condition.lower()) or '',
|
2011-03-14 14:34:48 +01:00
|
|
|
what,
|
2011-04-05 12:53:39 +02:00
|
|
|
not is_singular and 's' or '',
|
2011-03-14 14:34:48 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
def is_singular(self, sequence):
|
|
|
|
return len(sequence) == 1
|
|
|
|
|
2016-05-06 18:44:02 +02:00
|
|
|
def clean_harvest_log(self):
|
|
|
|
from datetime import datetime, timedelta
|
|
|
|
from pylons import config
|
|
|
|
from ckanext.harvest.model import clean_harvest_log
|
|
|
|
|
|
|
|
# Log time frame - in days
|
|
|
|
log_timeframe = toolkit.asint(config.get('ckan.harvest.log_timeframe', 30))
|
|
|
|
condition = datetime.utcnow() - timedelta(days=log_timeframe)
|
|
|
|
|
|
|
|
# Delete logs older then the given date
|
|
|
|
clean_harvest_log(condition=condition)
|