Fix resource updating for harvested datasets
Starting from 2.2, resource_update calls package_show before updating the resource via a package_update call. The dict passed had the harvest extras (eg harvest_object_id) added which made the update call fails due to duplicated extra keys. To fix it we now remove any harvest extras on after_show if there is a 'for_edit' property on the context.
This commit is contained in:
parent
278a8e1ada
commit
467fb7bb8f
|
@ -95,7 +95,21 @@ class Harvest(p.SingletonPlugin, DefaultDatasetForm):
|
||||||
.filter(HarvestObject.current==True) \
|
.filter(HarvestObject.current==True) \
|
||||||
.first()
|
.first()
|
||||||
|
|
||||||
# validate is false is passed only on indexing.
|
if context.get('for_edit'):
|
||||||
|
# If we are editing the dataset, check if the harvest extras
|
||||||
|
# are there, and if so, remove them. This can happen eg when
|
||||||
|
# doing resource_update, which calls package_show
|
||||||
|
data_dict['extras'][:] = [e for e in data_dict.get('extras', [])
|
||||||
|
if not e['key']
|
||||||
|
in ('harvest_object_id', 'harvest_source_id', 'harvest_source_title',)]
|
||||||
|
|
||||||
|
|
||||||
|
# We only want to add these extras at index time so they are part
|
||||||
|
# of the cached data_dict used to display, search results etc. We
|
||||||
|
# don't want them added when editing the dataset, otherwise we get
|
||||||
|
# duplicated key errors.
|
||||||
|
# The only way to detect indexing right now is checking that
|
||||||
|
# validate is set to False.
|
||||||
if harvest_object and not context.get('validate', True):
|
if harvest_object and not context.get('validate', True):
|
||||||
for key, value in [
|
for key, value in [
|
||||||
('harvest_object_id', harvest_object.id),
|
('harvest_object_id', harvest_object.id),
|
||||||
|
|
Loading…
Reference in New Issue