fix job reporting all datasets deleted when actually nothing changed during last two harvests
This commit is contained in:
parent
f34c1e6d74
commit
97f09913cf
|
@ -178,6 +178,8 @@ class HarvesterBase(SingletonPlugin):
|
|||
|
||||
else:
|
||||
log.info('Package with GUID %s not updated, skipping...' % harvest_object.guid)
|
||||
harvest_object.report_status = 'not modified'
|
||||
harvest_object.save()
|
||||
return
|
||||
|
||||
# Flag the other objects linking to this package as not current anymore
|
||||
|
|
|
@ -165,7 +165,7 @@ class CKANHarvester(HarvesterBase):
|
|||
package_ids.append(package_id)
|
||||
else:
|
||||
log.info('No packages have been updated on the remote CKAN instance since the last harvest job')
|
||||
return None
|
||||
return []
|
||||
|
||||
except urllib2.HTTPError,e:
|
||||
if e.getcode() == 400:
|
||||
|
|
|
@ -318,7 +318,7 @@ def fetch_and_import_stages(harvester, obj):
|
|||
else:
|
||||
obj.state = "ERROR"
|
||||
obj.save()
|
||||
if obj.report_status:
|
||||
if obj.report_status == 'not modified':
|
||||
return
|
||||
if obj.state == 'ERROR':
|
||||
obj.report_status = 'errored'
|
||||
|
|
Loading…
Reference in New Issue