Refactor and test in order to comply to PR requirements

This commit is contained in:
Cazaril 2016-07-07 14:36:12 +02:00 committed by Ender
parent 5b6bd59715
commit 014779a096
4 changed files with 63 additions and 113 deletions

View File

@ -52,92 +52,8 @@ def package_acquired(context, request_data):
:rtype: dict
'''
log.info('Notification received: %s' % request_data)
# Check access
plugins.toolkit.check_access(constants.PACKAGE_ACQUIRED, context, request_data)
# Get the parser from the configuration
class_path = config.get(PARSER_CONFIG_PROP, '')
if class_path != '':
try:
cls = class_path.split(':')
class_package = cls[0]
class_name = cls[1]
parser_cls = getattr(importlib.import_module(class_package), class_name)
parser = parser_cls()
except Exception as e:
raise plugins.toolkit.ValidationError({'message': '%s: %s' % (type(e).__name__, str(e))})
else:
raise plugins.toolkit.ValidationError({'message': '%s not configured' % PARSER_CONFIG_PROP})
# Parse the result using the parser set in the configuration
# Expected result: {'errors': ["...", "...", ...]
# 'users_datasets': [{'user': 'user_name', 'datasets': ['ds1', 'ds2', ...]}, ...]}
result = parser.parse_notification(request_data)
warns = []
# Introduce the users into the datasets
for user_info in result['users_datasets']:
for dataset_id in user_info['datasets']:
try:
context_pkg_show = context.copy()
context_pkg_show['ignore_auth'] = True
context_pkg_show[constants.CONTEXT_CALLBACK] = True
dataset = plugins.toolkit.get_action('package_show')(context_pkg_show, {'id': dataset_id})
# This operation can only be performed with private datasets
# This check is redundant since the package_update function will throw an exception
# if a list of allowed users is included in a public dataset. However, this check
# should be performed in order to avoid strange future exceptions
if dataset.get('private', None) is True:
# Create the array if it does not exist
if constants.ALLOWED_USERS not in dataset or dataset[constants.ALLOWED_USERS] is None:
dataset[constants.ALLOWED_USERS] = []
# Add the user only if it is not in the list
if user_info['user'] not in dataset[constants.ALLOWED_USERS]:
dataset[constants.ALLOWED_USERS].append(user_info['user'])
context_pkg_update = context.copy()
context_pkg_update['ignore_auth'] = True
# Set creator as the user who is performing the changes
user_show = plugins.toolkit.get_action('user_show')
creator_user_id = dataset.get('creator_user_id', '')
user_show_context = {'ignore_auth': True}
user = user_show(user_show_context, {'id': creator_user_id})
context_pkg_update['user'] = user.get('name', '')
plugins.toolkit.get_action('package_update')(context_pkg_update, dataset)
log.info('Allowed Users added correctly')
else:
log.warn('The user %s is already allowed to access the %s dataset' % (user_info['user'], dataset_id))
else:
log.warn('Dataset %s is public. Allowed Users cannot be added')
warns.append('Unable to upload the dataset %s: It\'s a public dataset' % dataset_id)
except plugins.toolkit.ObjectNotFound:
# If a dataset does not exist in the instance, an error message will be returned to the user.
# However the process won't stop and the process will continue with the remaining datasets.
log.warn('Dataset %s was not found in this instance' % dataset_id)
warns.append('Dataset %s was not found in this instance' % dataset_id)
except plugins.toolkit.ValidationError as e:
# Some datasets does not allow to introduce the list of allowed users since this property is
# only valid for private datasets outside an organization. In this case, a wanr will return
# but the process will continue
# WARN: This exception should not be risen anymore since public datasets are not updated.
message = '%s(%s): %s' % (dataset_id, constants.ALLOWED_USERS, e.error_dict[constants.ALLOWED_USERS][0])
log.warn(message)
warns.append(message)
# Return warnings that inform about non-existing datasets
if len(warns) > 0:
return {'warns': warns}
context['method'] = 'grant'
return _process_package(context, request_data)
def acquisitions_list(context, data_dict):
'''
@ -194,9 +110,10 @@ def acquisitions_list(context, data_dict):
pass
return result
def package_deleted(context, request_data):
'''
API action to be called every time a user deletes a dataset in an external service.
API action to be called in order to revoke access grants of an user.
This API should be called to delete the user from the list of allowed users.
@ -217,10 +134,15 @@ def package_deleted(context, request_data):
:rtype: dict
'''
context['method'] = 'revoke'
return _process_package(context, request_data)
def _process_package(context, request_data):
log.info('Notification received: %s' % request_data)
# Check access
plugins.toolkit.check_access(constants.PACKAGE_DELETED, context, request_data)
method = constants.PACKAGE_ACQUIRED if context.get('method') == 'grant' else constants.PACKAGE_DELETED
plugins.toolkit.check_access(method, context, request_data)
# Get the parser from the configuration
class_path = config.get(PARSER_CONFIG_PROP, '')
@ -263,9 +185,15 @@ def package_deleted(context, request_data):
if constants.ALLOWED_USERS not in dataset or dataset[constants.ALLOWED_USERS] is None:
dataset[constants.ALLOWED_USERS] = []
method = context['method'] == 'grant'
present = user_info['user'] in dataset[constants.ALLOWED_USERS]
# Deletes the user only if it is in the list
if user_info['user'] in dataset[constants.ALLOWED_USERS]:
dataset[constants.ALLOWED_USERS].remove(user_info['user'])
if (not method and present) or (method and not present):
if method:
dataset[constants.ALLOWED_USERS].append(user_info['user'])
else:
dataset[constants.ALLOWED_USERS].remove(user_info['user'])
context_pkg_update = context.copy()
context_pkg_update['ignore_auth'] = True
@ -277,11 +205,11 @@ def package_deleted(context, request_data):
context_pkg_update['user'] = user.get('name', '')
plugins.toolkit.get_action('package_update')(context_pkg_update, dataset)
log.info('Allowed Users deleted correctly')
log.info('Action -%s access to user- ended successfully' % context['method'])
else:
log.warn('The user %s is already deleted from accessing the %s dataset' % (user_info['user'], dataset_id))
log.warn('Action -%s access to user- not completed. The dataset %s already %s access to the user %s' % (context['method'], dataset_id, context['method'], user_info['user']))
else:
log.warn('Dataset %s is public. Allowed Users cannot be deleted')
log.warn('Dataset %s is public. Cannot %s access to users' % (dataset_id, context['method']))
warns.append('Unable to upload the dataset %s: It\'s a public dataset' % dataset_id)
except plugins.toolkit.ObjectNotFound:

View File

@ -61,3 +61,4 @@ class FiWareNotificationParser(object):
raise tk.ValidationError({'message': 'Invalid resource format'})
return {'users_datasets': [{'user': user_name, 'datasets': datasets}]}

View File

@ -64,7 +64,6 @@ class ActionsTest(unittest.TestCase):
])
def test_class_cannot_be_loaded(self, class_path, class_name, path_exist, class_exist, expected_error):
class_package = class_path
class_package += ':' + class_name if class_name else ''
actions.config = {PARSER_CONFIG_PROP: class_package}
@ -80,11 +79,9 @@ class ActionsTest(unittest.TestCase):
actions.importlib.import_module = MagicMock(side_effect=ImportError(IMPORT_ERROR_MSG) if not path_exist else None,
return_value=package if path_exist else None)
# Call the function
if expected_error:
with self.assertRaises(actions.plugins.toolkit.ValidationError) as cm:
actions.package_acquired({}, {})
actions.package_deleted({},{})
self.assertEqual(cm.exception.error_dict['message'], expected_error)
else:
# Exception is not risen
@ -93,17 +90,6 @@ class ActionsTest(unittest.TestCase):
# Checks
self.assertEquals(0, actions.plugins.toolkit.get_action.call_count)
if expected_error:
with self.assertRaises(actions.plugins.toolkit.ValidationError) as cm:
actions.package_deleted({},{})
self.assertEqual(cm.exception.error_dict['message'], expected_error)
else:
# Exception is not risen
self.assertEquals(None, actions.package_deleted({},{}))
# Checks
self.assertEquals(0, actions.plugins.toolkit.get_action.call_count)
def configure_mocks(self, parse_result, datasets_not_found=[], not_updatable_datasets=[],
allowed_users=None, creator_user={'id': '1234', 'name': 'ckan'}):
@ -182,7 +168,7 @@ class ActionsTest(unittest.TestCase):
datasets_not_found, not_updatable_datasets, allowed_users, creator_user)
# Call the function
context = {'user': 'user1', 'model': 'model', 'auth_obj': {'id': 1}}
context = {'user': 'user1', 'model': 'model', 'auth_obj': {'id': 1}, 'method': 'grant'}
result = actions.package_acquired(context, users_info)
# Calculate the list of warns
@ -338,7 +324,7 @@ class ActionsTest(unittest.TestCase):
datasets_not_found, not_updatable_datasets, allowed_users, creator_user)
# Call the function
context = {'user': 'user1', 'model': 'model', 'auth_obj': {'id': 1}}
context = {'user': 'user1', 'model': 'model', 'auth_obj': {'id': 1}, 'method': 'revoke'}
result = actions.package_deleted(context, users_info)
# Calculate the list of warns

View File

@ -4,10 +4,45 @@ host = 0.0.0.0
port = 5000
[app:main]
use = config:/usr/lib/ckan/default/src/ckan/test-core.ini
use = config:./ckan/test-core.ini
ckan.legacy_templates = no
ckan.plugins = privatedatasets
ckan.privatedatasets.parser = ckanext.privatedatasets.parsers.fiware:FiWareNotificationParser
ckan.privatedatasets.show_acquire_url_on_create = True
ckan.privatedatasets.show_acquire_url_on_edit = True
sqlalchemy.url = postgresql://ckan_default:pass@127.0.0.1:5432/ckan_test
ckan.datastore.write_url = postgresql://ckan_default:pass@127.0.0.1:5432/datastore_test
ckan.datastore.read_url = postgresql://datastore_default:pass@127.0.0.1:5432/datastore_test
ckan.storage_path=data/storage
sqlalchemy.url = postgresql://ckan_default:pass@127.0.0.1:5432/ckan_test
ckan.datastore.write_url = postgresql://ckan_default:pass@127.0.0.1:5432/datastore_test
ckan.datastore.read_url = postgresql://datastore_default:pass@127.0.0.1:5432/datastore_test
ckan.storage_path=data/storage
sqlalchemy.url = postgresql://ckan_default:pass@127.0.0.1:5432/ckan_test
ckan.datastore.write_url = postgresql://ckan_default:pass@127.0.0.1:5432/datastore_test
ckan.datastore.read_url = postgresql://datastore_default:pass@127.0.0.1:5432/datastore_test
ckan.storage_path=data/storage
sqlalchemy.url = postgresql://ckan_default:pass@127.0.0.1:5432/ckan_test
ckan.datastore.write_url = postgresql://ckan_default:pass@127.0.0.1:5432/datastore_test
ckan.datastore.read_url = postgresql://datastore_default:pass@127.0.0.1:5432/datastore_test
ckan.storage_path=data/storage
sqlalchemy.url = postgresql://ckan_default:pass@127.0.0.1:5432/ckan_test
ckan.datastore.write_url = postgresql://ckan_default:pass@127.0.0.1:5432/datastore_test
ckan.datastore.read_url = postgresql://datastore_default:pass@127.0.0.1:5432/datastore_test
ckan.storage_path=data/storage
sqlalchemy.url = postgresql://ckan_default:pass@127.0.0.1:5432/ckan_test
ckan.datastore.write_url = postgresql://ckan_default:pass@127.0.0.1:5432/datastore_test
ckan.datastore.read_url = postgresql://datastore_default:pass@127.0.0.1:5432/datastore_test
ckan.storage_path=data/storage