fix job reporting all datasets deleted when actually nothing changed during last two harvests

This commit is contained in:
Jari Voutilainen 2014-09-10 09:12:30 +03:00
parent f34c1e6d74
commit 97f09913cf
3 changed files with 4 additions and 2 deletions

View File

@ -178,6 +178,8 @@ class HarvesterBase(SingletonPlugin):
else:
log.info('Package with GUID %s not updated, skipping...' % harvest_object.guid)
harvest_object.report_status = 'not modified'
harvest_object.save()
return
# Flag the other objects linking to this package as not current anymore

View File

@ -165,7 +165,7 @@ class CKANHarvester(HarvesterBase):
package_ids.append(package_id)
else:
log.info('No packages have been updated on the remote CKAN instance since the last harvest job')
return None
return []
except urllib2.HTTPError,e:
if e.getcode() == 400:

View File

@ -318,7 +318,7 @@ def fetch_and_import_stages(harvester, obj):
else:
obj.state = "ERROR"
obj.save()
if obj.report_status:
if obj.report_status == 'not modified':
return
if obj.state == 'ERROR':
obj.report_status = 'errored'