summaryrefslogtreecommitdiffstats
path: root/src/pybind/mgr/dashboard/tests
diff options
context:
space:
mode:
Diffstat (limited to 'src/pybind/mgr/dashboard/tests')
-rw-r--r--src/pybind/mgr/dashboard/tests/__init__.py262
-rw-r--r--src/pybind/mgr/dashboard/tests/helper.py56
-rw-r--r--src/pybind/mgr/dashboard/tests/test_access_control.py695
-rw-r--r--src/pybind/mgr/dashboard/tests/test_api_auditing.py94
-rw-r--r--src/pybind/mgr/dashboard/tests/test_auth.py20
-rw-r--r--src/pybind/mgr/dashboard/tests/test_ceph_service.py67
-rw-r--r--src/pybind/mgr/dashboard/tests/test_cephfs.py48
-rw-r--r--src/pybind/mgr/dashboard/tests/test_controllers.py192
-rw-r--r--src/pybind/mgr/dashboard/tests/test_docs.py71
-rw-r--r--src/pybind/mgr/dashboard/tests/test_erasure_code_profile.py36
-rw-r--r--src/pybind/mgr/dashboard/tests/test_exceptions.py157
-rw-r--r--src/pybind/mgr/dashboard/tests/test_feature_toggles.py61
-rw-r--r--src/pybind/mgr/dashboard/tests/test_ganesha.py642
-rw-r--r--src/pybind/mgr/dashboard/tests/test_grafana.py116
-rw-r--r--src/pybind/mgr/dashboard/tests/test_home.py68
-rw-r--r--src/pybind/mgr/dashboard/tests/test_iscsi.py998
-rw-r--r--src/pybind/mgr/dashboard/tests/test_notification.py138
-rw-r--r--src/pybind/mgr/dashboard/tests/test_osd.py240
-rw-r--r--src/pybind/mgr/dashboard/tests/test_plugin_debug.py38
-rw-r--r--src/pybind/mgr/dashboard/tests/test_pool.py117
-rw-r--r--src/pybind/mgr/dashboard/tests/test_prometheus.py128
-rw-r--r--src/pybind/mgr/dashboard/tests/test_rbd_mirroring.py95
-rw-r--r--src/pybind/mgr/dashboard/tests/test_rbd_service.py37
-rw-r--r--src/pybind/mgr/dashboard/tests/test_rest_client.py94
-rw-r--r--src/pybind/mgr/dashboard/tests/test_rest_tasks.py86
-rw-r--r--src/pybind/mgr/dashboard/tests/test_rgw.py129
-rw-r--r--src/pybind/mgr/dashboard/tests/test_rgw_client.py112
-rw-r--r--src/pybind/mgr/dashboard/tests/test_settings.py191
-rw-r--r--src/pybind/mgr/dashboard/tests/test_sso.py157
-rw-r--r--src/pybind/mgr/dashboard/tests/test_task.py433
-rw-r--r--src/pybind/mgr/dashboard/tests/test_tools.py188
31 files changed, 5766 insertions, 0 deletions
diff --git a/src/pybind/mgr/dashboard/tests/__init__.py b/src/pybind/mgr/dashboard/tests/__init__.py
new file mode 100644
index 00000000..58998690
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/__init__.py
@@ -0,0 +1,262 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=too-many-arguments
+from __future__ import absolute_import
+
+import json
+import threading
+import sys
+import time
+
+import cherrypy
+from cherrypy._cptools import HandlerWrapperTool
+from cherrypy.test import helper
+from pyfakefs import fake_filesystem
+
+from mgr_module import CLICommand, MgrModule
+
+from .. import logger, mgr
+from ..controllers import json_error_page, generate_controller_routes
+from ..services.auth import AuthManagerTool
+from ..services.exception import dashboard_exception_handler
+
+from ..plugins import PLUGIN_MANAGER
+from ..plugins import feature_toggles, debug # noqa # pylint: disable=unused-import
+
+
+PLUGIN_MANAGER.hook.init()
+PLUGIN_MANAGER.hook.register_commands()
+
+
+class CmdException(Exception):
+ def __init__(self, retcode, message):
+ super(CmdException, self).__init__(message)
+ self.retcode = retcode
+
+
+def exec_dashboard_cmd(command_handler, cmd, **kwargs):
+ inbuf = kwargs['inbuf'] if 'inbuf' in kwargs else None
+ cmd_dict = {'prefix': 'dashboard {}'.format(cmd)}
+ cmd_dict.update(kwargs)
+ if cmd_dict['prefix'] not in CLICommand.COMMANDS:
+ ret, out, err = command_handler(cmd_dict)
+ if ret < 0:
+ raise CmdException(ret, err)
+ try:
+ return json.loads(out)
+ except ValueError:
+ return out
+
+ ret, out, err = CLICommand.COMMANDS[cmd_dict['prefix']].call(mgr, cmd_dict, inbuf)
+ if ret < 0:
+ raise CmdException(ret, err)
+ try:
+ return json.loads(out)
+ except ValueError:
+ return out
+
+
+class KVStoreMockMixin(object):
+ CONFIG_KEY_DICT = {}
+
+ @classmethod
+ def mock_set_module_option(cls, attr, val):
+ cls.CONFIG_KEY_DICT[attr] = val
+
+ @classmethod
+ def mock_get_module_option(cls, attr, default=None):
+ return cls.CONFIG_KEY_DICT.get(attr, default)
+
+ @classmethod
+ def mock_kv_store(cls):
+ cls.CONFIG_KEY_DICT.clear()
+ mgr.set_module_option.side_effect = cls.mock_set_module_option
+ mgr.get_module_option.side_effect = cls.mock_get_module_option
+ # kludge below
+ mgr.set_store.side_effect = cls.mock_set_module_option
+ mgr.get_store.side_effect = cls.mock_get_module_option
+
+ @classmethod
+ def get_key(cls, key):
+ return cls.CONFIG_KEY_DICT.get(key, None)
+
+
+class CLICommandTestMixin(KVStoreMockMixin):
+ @classmethod
+ def exec_cmd(cls, cmd, **kwargs):
+ return exec_dashboard_cmd(None, cmd, **kwargs)
+
+
+class FakeFsMixin(object):
+ fs = fake_filesystem.FakeFilesystem()
+ f_open = fake_filesystem.FakeFileOpen(fs)
+ f_os = fake_filesystem.FakeOsModule(fs)
+
+ if sys.version_info > (3, 0):
+ builtins_open = 'builtins.open'
+ else:
+ builtins_open = '__builtin__.open'
+
+
+class ControllerTestCase(helper.CPWebCase):
+ _endpoints_cache = {}
+
+ @classmethod
+ def setup_controllers(cls, ctrl_classes, base_url=''):
+ if not isinstance(ctrl_classes, list):
+ ctrl_classes = [ctrl_classes]
+ mapper = cherrypy.dispatch.RoutesDispatcher()
+ endpoint_list = []
+ for ctrl in ctrl_classes:
+ inst = ctrl()
+
+ # We need to cache the controller endpoints because
+ # BaseController#endpoints method is not idempontent
+ # and a controller might be needed by more than one
+ # unit test.
+ if ctrl not in cls._endpoints_cache:
+ ctrl_endpoints = ctrl.endpoints()
+ cls._endpoints_cache[ctrl] = ctrl_endpoints
+
+ ctrl_endpoints = cls._endpoints_cache[ctrl]
+ for endpoint in ctrl_endpoints:
+ endpoint.inst = inst
+ endpoint_list.append(endpoint)
+ endpoint_list = sorted(endpoint_list, key=lambda e: e.url)
+ for endpoint in endpoint_list:
+ generate_controller_routes(endpoint, mapper, base_url)
+ if base_url == '':
+ base_url = '/'
+ cherrypy.tree.mount(None, config={
+ base_url: {'request.dispatch': mapper}})
+
+ def __init__(self, *args, **kwargs):
+ cherrypy.tools.authenticate = AuthManagerTool()
+ cherrypy.tools.dashboard_exception_handler = HandlerWrapperTool(dashboard_exception_handler,
+ priority=31)
+ cherrypy.config.update({
+ 'error_page.default': json_error_page,
+ 'tools.json_in.on': True,
+ 'tools.json_in.force': False
+ })
+ PLUGIN_MANAGER.hook.configure_cherrypy(config=cherrypy.config)
+ super(ControllerTestCase, self).__init__(*args, **kwargs)
+
+ def _request(self, url, method, data=None, headers=None):
+ if not data:
+ b = None
+ h = None
+ else:
+ b = json.dumps(data)
+ h = [('Content-Type', 'application/json'),
+ ('Content-Length', str(len(b)))]
+ if headers:
+ h = headers
+ self.getPage(url, method=method, body=b, headers=h)
+
+ def _get(self, url, headers=None):
+ self._request(url, 'GET', headers=headers)
+
+ def _post(self, url, data=None):
+ self._request(url, 'POST', data)
+
+ def _delete(self, url, data=None):
+ self._request(url, 'DELETE', data)
+
+ def _put(self, url, data=None):
+ self._request(url, 'PUT', data)
+
+ def _task_request(self, method, url, data, timeout):
+ self._request(url, method, data)
+ if self.status != '202 Accepted':
+ logger.info("task finished immediately")
+ return
+
+ res = self.jsonBody()
+ self.assertIsInstance(res, dict)
+ self.assertIn('name', res)
+ self.assertIn('metadata', res)
+
+ task_name = res['name']
+ task_metadata = res['metadata']
+
+ # pylint: disable=protected-access
+ class Waiter(threading.Thread):
+ def __init__(self, task_name, task_metadata, tc):
+ super(Waiter, self).__init__()
+ self.task_name = task_name
+ self.task_metadata = task_metadata
+ self.ev = threading.Event()
+ self.abort = False
+ self.res_task = None
+ self.tc = tc
+
+ def run(self):
+ running = True
+ while running and not self.abort:
+ logger.info("task (%s, %s) is still executing", self.task_name,
+ self.task_metadata)
+ time.sleep(1)
+ self.tc._get('/api/task?name={}'.format(self.task_name))
+ res = self.tc.jsonBody()
+ for task in res['finished_tasks']:
+ if task['metadata'] == self.task_metadata:
+ # task finished
+ running = False
+ self.res_task = task
+ self.ev.set()
+
+ thread = Waiter(task_name, task_metadata, self)
+ thread.start()
+ status = thread.ev.wait(timeout)
+ if not status:
+ # timeout expired
+ thread.abort = True
+ thread.join()
+ raise Exception("Waiting for task ({}, {}) to finish timed out"
+ .format(task_name, task_metadata))
+ logger.info("task (%s, %s) finished", task_name, task_metadata)
+ if thread.res_task['success']:
+ self.body = json.dumps(thread.res_task['ret_value'])
+ if method == 'POST':
+ self.status = '201 Created'
+ elif method == 'PUT':
+ self.status = '200 OK'
+ elif method == 'DELETE':
+ self.status = '204 No Content'
+ return
+ else:
+ if 'status' in thread.res_task['exception']:
+ self.status = thread.res_task['exception']['status']
+ else:
+ self.status = 500
+ self.body = json.dumps(thread.res_task['exception'])
+ return
+
+ def _task_post(self, url, data=None, timeout=60):
+ self._task_request('POST', url, data, timeout)
+
+ def _task_delete(self, url, timeout=60):
+ self._task_request('DELETE', url, None, timeout)
+
+ def _task_put(self, url, data=None, timeout=60):
+ self._task_request('PUT', url, data, timeout)
+
+ def jsonBody(self):
+ body_str = self.body.decode('utf-8') if isinstance(self.body, bytes) else self.body
+ return json.loads(body_str)
+
+ def assertJsonBody(self, data, msg=None):
+ """Fail if value != self.body."""
+ json_body = self.jsonBody()
+ if data != json_body:
+ if msg is None:
+ msg = 'expected body:\n%r\n\nactual body:\n%r' % (
+ data, json_body)
+ self._handlewebError(msg)
+
+ def assertInJsonBody(self, data, msg=None):
+ json_body = self.jsonBody()
+ if data not in json_body:
+ if msg is None:
+ msg = 'expected %r to be in %r' % (data, json_body)
+ self._handlewebError(msg)
diff --git a/src/pybind/mgr/dashboard/tests/helper.py b/src/pybind/mgr/dashboard/tests/helper.py
new file mode 100644
index 00000000..9ec043d6
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/helper.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+try:
+ from typing import Dict, Any # pylint: disable=unused-import
+except ImportError:
+ pass
+
+
+def update_dict(data, update_data):
+ # type: (Dict[Any, Any], Dict[Any, Any]) -> Dict[Any]
+ """ Update a dictionary recursively.
+
+ Eases doing so by providing the option to separate the key to be updated by dot characters. If
+ a key provided does not exist, it will raise an KeyError instead of just updating the
+ dictionary.
+
+ Limitations
+
+ Please note that the functionality provided by this method can only be used if the dictionary to
+ be updated (`data`) does not contain dot characters in its keys.
+
+ :raises KeyError:
+
+ >>> update_dict({'foo': {'bar': 5}}, {'foo.bar': 10})
+ {'foo': {'bar': 10}}
+
+ >>> update_dict({'foo': {'bar': 5}}, {'xyz': 10})
+ Traceback (most recent call last):
+ ...
+ KeyError: 'xyz'
+
+ >>> update_dict({'foo': {'bar': 5}}, {'foo.xyz': 10})
+ Traceback (most recent call last):
+ ...
+ KeyError: 'xyz'
+ """
+ for k, v in update_data.items():
+ keys = k.split('.')
+ element = None
+ for i, key in enumerate(keys):
+ last = False
+ if len(keys) == i + 1:
+ last = True
+
+ if not element:
+ element = data[key]
+ elif not last:
+ element = element[key] # pylint: disable=unsubscriptable-object
+
+ if last:
+ if key not in element:
+ raise KeyError(key)
+
+ element[key] = v
+ return data
diff --git a/src/pybind/mgr/dashboard/tests/test_access_control.py b/src/pybind/mgr/dashboard/tests/test_access_control.py
new file mode 100644
index 00000000..34452267
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_access_control.py
@@ -0,0 +1,695 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=dangerous-default-value,too-many-public-methods
+from __future__ import absolute_import
+
+import errno
+import json
+import tempfile
+import time
+import unittest
+
+from mgr_module import ERROR_MSG_EMPTY_INPUT_FILE
+
+from . import CmdException, CLICommandTestMixin
+from .. import mgr
+from ..security import Scope, Permission
+from ..services.access_control import load_access_control_db, \
+ password_hash, AccessControlDB, \
+ SYSTEM_ROLES
+
+
+class AccessControlTest(unittest.TestCase, CLICommandTestMixin):
+
+ @classmethod
+ def setUpClass(cls):
+ cls.mock_kv_store()
+ mgr.ACCESS_CONTROL_DB = None
+
+ def setUp(self):
+ self.CONFIG_KEY_DICT.clear()
+ load_access_control_db()
+
+ def load_persistent_db(self):
+ config_key = AccessControlDB.accessdb_config_key()
+ self.assertIn(config_key, self.CONFIG_KEY_DICT)
+ db_json = self.CONFIG_KEY_DICT[config_key]
+ db = json.loads(db_json)
+ return db
+
+ # The DB is written to persistent storage the first time it is saved.
+ # However, should an operation fail due to <reasons>, we may end up in
+ # a state where we have a completely empty CONFIG_KEY_DICT (our mock
+ # equivalent to the persistent state). While this works for most of the
+ # tests in this class, that would prevent us from testing things like
+ # "run a command that is expected to fail, and then ensure nothing
+ # happened", because we'd be asserting in `load_persistent_db()` due to
+ # the map being empty.
+ #
+ # This function will therefore force state to be written to our mock
+ # persistent state. We could have added this extra step to
+ # `load_persistent_db()` directly, but that would conflict with the
+ # upgrade tests. This way, we can selectively enforce this requirement
+ # where we believe it to be necessary; generically speaking, this should
+ # not be needed unless we're testing very specific behaviors.
+ #
+ def setup_and_load_persistent_db(self):
+ mgr.ACCESS_CTRL_DB.save()
+ self.load_persistent_db()
+
+ def validate_persistent_role(self, rolename, scopes_permissions,
+ description=None):
+ db = self.load_persistent_db()
+ self.assertIn('roles', db)
+ self.assertIn(rolename, db['roles'])
+ self.assertEqual(db['roles'][rolename]['name'], rolename)
+ self.assertEqual(db['roles'][rolename]['description'], description)
+ self.assertDictEqual(db['roles'][rolename]['scopes_permissions'],
+ scopes_permissions)
+
+ def validate_persistent_no_role(self, rolename):
+ db = self.load_persistent_db()
+ self.assertIn('roles', db)
+ self.assertNotIn(rolename, db['roles'])
+
+ def validate_persistent_user(self, username, roles, password=None,
+ name=None, email=None, lastUpdate=None):
+ db = self.load_persistent_db()
+ self.assertIn('users', db)
+ self.assertIn(username, db['users'])
+ self.assertEqual(db['users'][username]['username'], username)
+ self.assertListEqual(db['users'][username]['roles'], roles)
+ if password:
+ self.assertEqual(db['users'][username]['password'], password)
+ if name:
+ self.assertEqual(db['users'][username]['name'], name)
+ if email:
+ self.assertEqual(db['users'][username]['email'], email)
+ if lastUpdate:
+ self.assertEqual(db['users'][username]['lastUpdate'], lastUpdate)
+
+ def validate_persistent_no_user(self, username):
+ db = self.load_persistent_db()
+ self.assertIn('users', db)
+ self.assertNotIn(username, db['users'])
+
+ def test_create_role(self):
+ role = self.exec_cmd('ac-role-create', rolename='test_role')
+ self.assertDictEqual(role, {'name': 'test_role', 'description': None,
+ 'scopes_permissions': {}})
+ self.validate_persistent_role('test_role', {})
+
+ def test_create_role_with_desc(self):
+ role = self.exec_cmd('ac-role-create', rolename='test_role',
+ description='Test Role')
+ self.assertDictEqual(role, {'name': 'test_role',
+ 'description': 'Test Role',
+ 'scopes_permissions': {}})
+ self.validate_persistent_role('test_role', {}, 'Test Role')
+
+ def test_create_duplicate_role(self):
+ self.test_create_role()
+
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-create', rolename='test_role')
+
+ self.assertEqual(ctx.exception.retcode, -errno.EEXIST)
+ self.assertEqual(str(ctx.exception), "Role 'test_role' already exists")
+
+ def test_delete_role(self):
+ self.test_create_role()
+ out = self.exec_cmd('ac-role-delete', rolename='test_role')
+ self.assertEqual(out, "Role 'test_role' deleted")
+ self.validate_persistent_no_role('test_role')
+
+ def test_delete_nonexistent_role(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-delete', rolename='test_role')
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "Role 'test_role' does not exist")
+
+ def test_show_single_role(self):
+ self.test_create_role()
+ role = self.exec_cmd('ac-role-show', rolename='test_role')
+ self.assertDictEqual(role, {'name': 'test_role', 'description': None,
+ 'scopes_permissions': {}})
+
+ def test_show_nonexistent_role(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-show', rolename='test_role')
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "Role 'test_role' does not exist")
+
+ def test_show_system_roles(self):
+ roles = self.exec_cmd('ac-role-show')
+ self.assertEqual(len(roles), len(SYSTEM_ROLES))
+ for role in roles:
+ self.assertIn(role, SYSTEM_ROLES)
+
+ def test_show_system_role(self):
+ role = self.exec_cmd('ac-role-show', rolename="read-only")
+ self.assertEqual(role['name'], 'read-only')
+ self.assertEqual(role['description'], 'Read-Only')
+
+ def test_delete_system_role(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-delete', rolename='administrator')
+
+ self.assertEqual(ctx.exception.retcode, -errno.EPERM)
+ self.assertEqual(str(ctx.exception),
+ "Cannot delete system role 'administrator'")
+
+ def test_add_role_scope_perms(self):
+ self.test_create_role()
+ self.exec_cmd('ac-role-add-scope-perms', rolename='test_role',
+ scopename=Scope.POOL,
+ permissions=[Permission.READ, Permission.DELETE])
+ role = self.exec_cmd('ac-role-show', rolename='test_role')
+ self.assertDictEqual(role, {'name': 'test_role',
+ 'description': None,
+ 'scopes_permissions': {
+ Scope.POOL: [Permission.DELETE,
+ Permission.READ]
+ }})
+ self.validate_persistent_role('test_role', {
+ Scope.POOL: [Permission.DELETE, Permission.READ]
+ })
+
+ def test_del_role_scope_perms(self):
+ self.test_add_role_scope_perms()
+ self.exec_cmd('ac-role-add-scope-perms', rolename='test_role',
+ scopename=Scope.MONITOR,
+ permissions=[Permission.READ, Permission.CREATE])
+ self.validate_persistent_role('test_role', {
+ Scope.POOL: [Permission.DELETE, Permission.READ],
+ Scope.MONITOR: [Permission.CREATE, Permission.READ]
+ })
+ self.exec_cmd('ac-role-del-scope-perms', rolename='test_role',
+ scopename=Scope.POOL)
+ role = self.exec_cmd('ac-role-show', rolename='test_role')
+ self.assertDictEqual(role, {'name': 'test_role',
+ 'description': None,
+ 'scopes_permissions': {
+ Scope.MONITOR: [Permission.CREATE,
+ Permission.READ]
+ }})
+ self.validate_persistent_role('test_role', {
+ Scope.MONITOR: [Permission.CREATE, Permission.READ]
+ })
+
+ def test_add_role_scope_perms_nonexistent_role(self):
+
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-add-scope-perms', rolename='test_role',
+ scopename='pool',
+ permissions=['read', 'delete'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "Role 'test_role' does not exist")
+
+ def test_add_role_invalid_scope_perms(self):
+ self.test_create_role()
+
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-add-scope-perms', rolename='test_role',
+ scopename='invalidscope',
+ permissions=['read', 'delete'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.EINVAL)
+ self.assertEqual(str(ctx.exception),
+ "Scope 'invalidscope' is not valid\n Possible values: "
+ "{}".format(Scope.all_scopes()))
+
+ def test_add_role_scope_invalid_perms(self):
+ self.test_create_role()
+
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-add-scope-perms', rolename='test_role',
+ scopename='pool', permissions=['invalidperm'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.EINVAL)
+ self.assertEqual(str(ctx.exception),
+ "Permission 'invalidperm' is not valid\n Possible "
+ "values: {}".format(Permission.all_permissions()))
+
+ def test_del_role_scope_perms_nonexistent_role(self):
+
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-del-scope-perms', rolename='test_role',
+ scopename='pool')
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "Role 'test_role' does not exist")
+
+ def test_del_role_nonexistent_scope_perms(self):
+ self.test_add_role_scope_perms()
+
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-del-scope-perms', rolename='test_role',
+ scopename='nonexistentscope')
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception),
+ "There are no permissions for scope 'nonexistentscope' "
+ "in role 'test_role'")
+
+ def test_not_permitted_add_role_scope_perms(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-add-scope-perms', rolename='read-only',
+ scopename='pool', permissions=['read', 'delete'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.EPERM)
+ self.assertEqual(str(ctx.exception),
+ "Cannot update system role 'read-only'")
+
+ def test_not_permitted_del_role_scope_perms(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-del-scope-perms', rolename='read-only',
+ scopename='pool')
+
+ self.assertEqual(ctx.exception.retcode, -errno.EPERM)
+ self.assertEqual(str(ctx.exception),
+ "Cannot update system role 'read-only'")
+
+ def test_create_user(self, username='admin', rolename=None):
+ user = self.exec_cmd('ac-user-create', username=username,
+ rolename=rolename, inbuf='admin',
+ name='{} User'.format(username),
+ email='{}@user.com'.format(username))
+
+ pass_hash = password_hash('admin', user['password'])
+ self.assertDictEqual(user, {
+ 'username': username,
+ 'password': pass_hash,
+ 'lastUpdate': user['lastUpdate'],
+ 'name': '{} User'.format(username),
+ 'email': '{}@user.com'.format(username),
+ 'roles': [rolename] if rolename else []
+ })
+ self.validate_persistent_user(username, [rolename] if rolename else [],
+ pass_hash, '{} User'.format(username),
+ '{}@user.com'.format(username),
+ user['lastUpdate'])
+ return user
+
+ def test_create_user_with_role(self):
+ self.test_add_role_scope_perms()
+ self.test_create_user(rolename='test_role')
+
+ def test_create_user_with_system_role(self):
+ self.test_create_user(rolename='administrator')
+
+ def test_delete_user(self):
+ self.test_create_user()
+ out = self.exec_cmd('ac-user-delete', username='admin')
+ self.assertEqual(out, "User 'admin' deleted")
+ users = self.exec_cmd('ac-user-show')
+ self.assertEqual(len(users), 0)
+ self.validate_persistent_no_user('admin')
+
+ def test_create_duplicate_user(self):
+ self.test_create_user()
+
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-create', username='admin', inbuf='admin')
+
+ self.assertEqual(ctx.exception.retcode, -errno.EEXIST)
+ self.assertEqual(str(ctx.exception), "User 'admin' already exists")
+
+ def test_create_users_with_dne_role(self):
+ # one time call to setup our persistent db
+ self.setup_and_load_persistent_db()
+
+ # create a user with a role that does not exist; expect a failure
+ try:
+ self.exec_cmd('ac-user-create', username='foo',
+ rolename='dne_role', inbuf='foopass',
+ name='foo User', email='foo@user.com')
+ except CmdException as e:
+ self.assertEqual(e.retcode, -errno.ENOENT)
+
+ db = self.load_persistent_db()
+ if 'users' in db:
+ self.assertNotIn('foo', db['users'])
+
+ # We could just finish our test here, given we ensured that the user
+ # with a non-existent role is not in persistent storage. However,
+ # we're going to test the database's consistency, making sure that
+ # side-effects are not written to persistent storage once we commit
+ # an unrelated operation. To ensure this, we'll issue another
+ # operation that is sharing the same code path, and will check whether
+ # the next operation commits dirty state.
+
+ # create a role (this will be 'test_role')
+ self.test_create_role()
+ self.exec_cmd('ac-user-create', username='bar',
+ rolename='test_role', inbuf='barpass',
+ name='bar User', email='bar@user.com')
+
+ # validate db:
+ # user 'foo' should not exist
+ # user 'bar' should exist and have role 'test_role'
+ self.validate_persistent_user('bar', ['test_role'])
+
+ db = self.load_persistent_db()
+ self.assertIn('users', db)
+ self.assertNotIn('foo', db['users'])
+
+ def test_delete_nonexistent_user(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-delete', username='admin')
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "User 'admin' does not exist")
+
+ def test_add_user_roles(self, username='admin',
+ roles=['pool-manager', 'block-manager']):
+ user_orig = self.test_create_user(username)
+ uroles = []
+ for role in roles:
+ uroles.append(role)
+ uroles.sort()
+ user = self.exec_cmd('ac-user-add-roles', username=username,
+ roles=[role])
+ self.assertDictContainsSubset({'roles': uroles}, user)
+ self.validate_persistent_user(username, uroles)
+ self.assertGreaterEqual(user['lastUpdate'], user_orig['lastUpdate'])
+
+ def test_add_user_roles2(self):
+ user_orig = self.test_create_user()
+ user = self.exec_cmd('ac-user-add-roles', username="admin",
+ roles=['pool-manager', 'block-manager'])
+ self.assertDictContainsSubset(
+ {'roles': ['block-manager', 'pool-manager']}, user)
+ self.validate_persistent_user('admin', ['block-manager',
+ 'pool-manager'])
+ self.assertGreaterEqual(user['lastUpdate'], user_orig['lastUpdate'])
+
+ def test_add_user_roles_not_existent_user(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-add-roles', username="admin",
+ roles=['pool-manager', 'block-manager'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "User 'admin' does not exist")
+
+ def test_add_user_roles_not_existent_role(self):
+ self.test_create_user()
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-add-roles', username="admin",
+ roles=['Invalid Role'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception),
+ "Role 'Invalid Role' does not exist")
+
+ def test_set_user_roles(self):
+ user_orig = self.test_create_user()
+ user = self.exec_cmd('ac-user-add-roles', username="admin",
+ roles=['pool-manager'])
+ self.assertDictContainsSubset(
+ {'roles': ['pool-manager']}, user)
+ self.validate_persistent_user('admin', ['pool-manager'])
+ self.assertGreaterEqual(user['lastUpdate'], user_orig['lastUpdate'])
+ user2 = self.exec_cmd('ac-user-set-roles', username="admin",
+ roles=['rgw-manager', 'block-manager'])
+ self.assertDictContainsSubset(
+ {'roles': ['block-manager', 'rgw-manager']}, user2)
+ self.validate_persistent_user('admin', ['block-manager',
+ 'rgw-manager'])
+ self.assertGreaterEqual(user2['lastUpdate'], user['lastUpdate'])
+
+ def test_set_user_roles_not_existent_user(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-set-roles', username="admin",
+ roles=['pool-manager', 'block-manager'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "User 'admin' does not exist")
+
+ def test_set_user_roles_not_existent_role(self):
+ self.test_create_user()
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-set-roles', username="admin",
+ roles=['Invalid Role'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception),
+ "Role 'Invalid Role' does not exist")
+
+ def test_del_user_roles(self):
+ self.test_add_user_roles()
+ user = self.exec_cmd('ac-user-del-roles', username="admin",
+ roles=['pool-manager'])
+ self.assertDictContainsSubset(
+ {'roles': ['block-manager']}, user)
+ self.validate_persistent_user('admin', ['block-manager'])
+
+ def test_del_user_roles_not_existent_user(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-del-roles', username="admin",
+ roles=['pool-manager', 'block-manager'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "User 'admin' does not exist")
+
+ def test_del_user_roles_not_existent_role(self):
+ self.test_create_user()
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-del-roles', username="admin",
+ roles=['Invalid Role'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception),
+ "Role 'Invalid Role' does not exist")
+
+ def test_del_user_roles_not_associated_role(self):
+ self.test_create_user()
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-del-roles', username="admin",
+ roles=['rgw-manager'])
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception),
+ "Role 'rgw-manager' is not associated with user "
+ "'admin'")
+
+ def test_show_user(self):
+ self.test_add_user_roles()
+ user = self.exec_cmd('ac-user-show', username='admin')
+ pass_hash = password_hash('admin', user['password'])
+ self.assertDictEqual(user, {
+ 'username': 'admin',
+ 'lastUpdate': user['lastUpdate'],
+ 'password': pass_hash,
+ 'name': 'admin User',
+ 'email': 'admin@user.com',
+ 'roles': ['block-manager', 'pool-manager']
+ })
+
+ def test_show_nonexistent_user(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-show', username='admin')
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "User 'admin' does not exist")
+
+ def test_show_all_users(self):
+ self.test_add_user_roles('admin', ['administrator'])
+ self.test_add_user_roles('guest', ['read-only'])
+ users = self.exec_cmd('ac-user-show')
+ self.assertEqual(len(users), 2)
+ for user in users:
+ self.assertIn(user, ['admin', 'guest'])
+
+ def test_del_role_associated_with_user(self):
+ self.test_create_role()
+ self.test_add_user_roles('guest', ['test_role'])
+
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-role-delete', rolename='test_role')
+
+ self.assertEqual(ctx.exception.retcode, -errno.EPERM)
+ self.assertEqual(str(ctx.exception),
+ "Role 'test_role' is still associated with user "
+ "'guest'")
+
+ def test_set_user_info(self):
+ user_orig = self.test_create_user()
+ user = self.exec_cmd('ac-user-set-info', username='admin',
+ name='Admin Name', email='admin@admin.com')
+ pass_hash = password_hash('admin', user['password'])
+ self.assertDictEqual(user, {
+ 'username': 'admin',
+ 'password': pass_hash,
+ 'name': 'Admin Name',
+ 'email': 'admin@admin.com',
+ 'lastUpdate': user['lastUpdate'],
+ 'roles': []
+ })
+ self.validate_persistent_user('admin', [], pass_hash, 'Admin Name',
+ 'admin@admin.com')
+ self.assertEqual(user['lastUpdate'], user_orig['lastUpdate'])
+
+ def test_set_user_info_nonexistent_user(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-set-info', username='admin',
+ name='Admin Name', email='admin@admin.com')
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "User 'admin' does not exist")
+
+ def test_set_user_password(self):
+ user_orig = self.test_create_user()
+ user = self.exec_cmd('ac-user-set-password', username='admin',
+ inbuf='newpass')
+ pass_hash = password_hash('newpass', user['password'])
+ self.assertDictEqual(user, {
+ 'username': 'admin',
+ 'password': pass_hash,
+ 'name': 'admin User',
+ 'email': 'admin@user.com',
+ 'lastUpdate': user['lastUpdate'],
+ 'roles': []
+ })
+ self.validate_persistent_user('admin', [], pass_hash, 'admin User',
+ 'admin@user.com')
+ self.assertGreaterEqual(user['lastUpdate'], user_orig['lastUpdate'])
+
+ def test_sanitize_password(self):
+ self.test_create_user()
+ password = 'myPass\\n\\r\\n'
+ with tempfile.TemporaryFile(mode='w+') as pwd_file:
+ # Add new line separators (like some text editors when a file is saved).
+ pwd_file.write('{}{}'.format(password, '\n\r\n\n'))
+ pwd_file.seek(0)
+ user = self.exec_cmd('ac-user-set-password', username='admin',
+ inbuf=pwd_file.read(), force_password=True)
+ pass_hash = password_hash(password, user['password'])
+ self.assertEqual(user['password'], pass_hash)
+
+ def test_unicode_password(self):
+ self.test_create_user()
+ password = '章鱼不是密码'
+ with tempfile.TemporaryFile(mode='w+') as pwd_file:
+ pwd_file.write(password)
+ pwd_file.seek(0)
+ user = self.exec_cmd('ac-user-set-password', username='admin',
+ inbuf=pwd_file.read(), force_password=True)
+ pass_hash = password_hash(password, user['password'])
+ self.assertEqual(user['password'], pass_hash)
+
+ def test_set_user_password_nonexistent_user(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-set-password', username='admin',
+ inbuf='newpass')
+
+ self.assertEqual(ctx.exception.retcode, -errno.ENOENT)
+ self.assertEqual(str(ctx.exception), "User 'admin' does not exist")
+
+ def test_set_user_password_empty(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('ac-user-set-password', username='admin', inbuf='\n')
+
+ self.assertEqual(ctx.exception.retcode, -errno.EINVAL)
+ self.assertEqual(str(ctx.exception), ERROR_MSG_EMPTY_INPUT_FILE)
+
+ def test_set_login_credentials(self):
+ self.exec_cmd('set-login-credentials', username='admin',
+ inbuf='admin')
+ user = self.exec_cmd('ac-user-show', username='admin')
+ pass_hash = password_hash('admin', user['password'])
+ self.assertDictEqual(user, {
+ 'username': 'admin',
+ 'password': pass_hash,
+ 'name': None,
+ 'email': None,
+ 'lastUpdate': user['lastUpdate'],
+ 'roles': ['administrator']
+ })
+ self.validate_persistent_user('admin', ['administrator'], pass_hash,
+ None, None)
+
+ def test_set_login_credentials_for_existing_user(self):
+ self.test_add_user_roles('admin', ['read-only'])
+ self.exec_cmd('set-login-credentials', username='admin',
+ inbuf='admin2')
+ user = self.exec_cmd('ac-user-show', username='admin')
+ pass_hash = password_hash('admin2', user['password'])
+ self.assertDictEqual(user, {
+ 'username': 'admin',
+ 'password': pass_hash,
+ 'name': 'admin User',
+ 'email': 'admin@user.com',
+ 'lastUpdate': user['lastUpdate'],
+ 'roles': ['read-only']
+ })
+ self.validate_persistent_user('admin', ['read-only'], pass_hash,
+ 'admin User', 'admin@user.com')
+
+ def test_load_v1(self):
+ self.CONFIG_KEY_DICT['accessdb_v1'] = '''
+ {{
+ "users": {{
+ "admin": {{
+ "username": "admin",
+ "password":
+ "$2b$12$sd0Az7mm3FaJl8kN3b/xwOuztaN0sWUwC1SJqjM4wcDw/s5cmGbLK",
+ "roles": ["block-manager", "test_role"],
+ "name": "admin User",
+ "email": "admin@user.com",
+ "lastUpdate": {}
+ }}
+ }},
+ "roles": {{
+ "test_role": {{
+ "name": "test_role",
+ "description": "Test Role",
+ "scopes_permissions": {{
+ "{}": ["{}", "{}"],
+ "{}": ["{}"]
+ }}
+ }}
+ }},
+ "version": 1
+ }}
+ '''.format(int(round(time.time())), Scope.ISCSI, Permission.READ,
+ Permission.UPDATE, Scope.POOL, Permission.CREATE)
+
+ load_access_control_db()
+ role = self.exec_cmd('ac-role-show', rolename="test_role")
+ self.assertDictEqual(role, {
+ 'name': 'test_role',
+ 'description': "Test Role",
+ 'scopes_permissions': {
+ Scope.ISCSI: [Permission.READ, Permission.UPDATE],
+ Scope.POOL: [Permission.CREATE]
+ }
+ })
+ user = self.exec_cmd('ac-user-show', username="admin")
+ self.assertDictEqual(user, {
+ 'username': 'admin',
+ 'lastUpdate': user['lastUpdate'],
+ 'password':
+ "$2b$12$sd0Az7mm3FaJl8kN3b/xwOuztaN0sWUwC1SJqjM4wcDw/s5cmGbLK",
+ 'name': 'admin User',
+ 'email': 'admin@user.com',
+ 'roles': ['block-manager', 'test_role']
+ })
+
+ def test_update_from_previous_version_v1(self):
+ self.CONFIG_KEY_DICT['username'] = 'admin'
+ self.CONFIG_KEY_DICT['password'] = \
+ '$2b$12$sd0Az7mm3FaJl8kN3b/xwOuztaN0sWUwC1SJqjM4wcDw/s5cmGbLK'
+ load_access_control_db()
+ user = self.exec_cmd('ac-user-show', username="admin")
+ self.assertDictEqual(user, {
+ 'username': 'admin',
+ 'lastUpdate': user['lastUpdate'],
+ 'password':
+ "$2b$12$sd0Az7mm3FaJl8kN3b/xwOuztaN0sWUwC1SJqjM4wcDw/s5cmGbLK",
+ 'name': None,
+ 'email': None,
+ 'roles': ['administrator']
+ })
diff --git a/src/pybind/mgr/dashboard/tests/test_api_auditing.py b/src/pybind/mgr/dashboard/tests/test_api_auditing.py
new file mode 100644
index 00000000..ae95e340
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_api_auditing.py
@@ -0,0 +1,94 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import re
+import json
+import cherrypy
+import mock
+
+from . import ControllerTestCase, KVStoreMockMixin
+from ..controllers import RESTController, Controller
+from ..tools import RequestLoggingTool
+from .. import mgr
+
+
+# pylint: disable=W0613
+@Controller('/foo', secure=False)
+class FooResource(RESTController):
+ def create(self, password):
+ pass
+
+ def get(self, key):
+ pass
+
+ def delete(self, key):
+ pass
+
+ def set(self, key, password, secret_key=None):
+ pass
+
+
+class ApiAuditingTest(ControllerTestCase, KVStoreMockMixin):
+
+ def __init__(self, *args, **kwargs):
+ cherrypy.tools.request_logging = RequestLoggingTool()
+ cherrypy.config.update({'tools.request_logging.on': True})
+ super(ApiAuditingTest, self).__init__(*args, **kwargs)
+
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([FooResource])
+
+ def setUp(self):
+ self.mock_kv_store()
+ mgr.cluster_log = mock.Mock()
+ mgr.set_module_option('AUDIT_API_ENABLED', True)
+ mgr.set_module_option('AUDIT_API_LOG_PAYLOAD', True)
+
+ def _validate_cluster_log_msg(self, path, method, user, params):
+ channel, _, msg = mgr.cluster_log.call_args_list[0][0]
+ self.assertEqual(channel, 'audit')
+ pattern = r'^\[DASHBOARD\] from=\'(.+)\' path=\'(.+)\' ' \
+ 'method=\'(.+)\' user=\'(.+)\' params=\'(.+)\'$'
+ m = re.match(pattern, msg)
+ self.assertEqual(m.group(2), path)
+ self.assertEqual(m.group(3), method)
+ self.assertEqual(m.group(4), user)
+ self.assertDictEqual(json.loads(m.group(5)), params)
+
+ def test_no_audit(self):
+ mgr.set_module_option('AUDIT_API_ENABLED', False)
+ self._delete('/foo/test1')
+ mgr.cluster_log.assert_not_called()
+
+ def test_no_payload(self):
+ mgr.set_module_option('AUDIT_API_LOG_PAYLOAD', False)
+ self._delete('/foo/test1')
+ _, _, msg = mgr.cluster_log.call_args_list[0][0]
+ self.assertNotIn('params=', msg)
+
+ def test_no_audit_get(self):
+ self._get('/foo/test1')
+ mgr.cluster_log.assert_not_called()
+
+ def test_audit_put(self):
+ self._put('/foo/test1', {'password': 'y', 'secret_key': 1234})
+ mgr.cluster_log.assert_called_once()
+ self._validate_cluster_log_msg('/foo/test1', 'PUT', 'None',
+ {'key': 'test1',
+ 'password': '***',
+ 'secret_key': '***'})
+
+ def test_audit_post(self):
+ with mock.patch('dashboard.services.auth.JwtManager.get_username',
+ return_value='hugo'):
+ self._post('/foo?password=1234')
+ mgr.cluster_log.assert_called_once()
+ self._validate_cluster_log_msg('/foo', 'POST', 'hugo',
+ {'password': '***'})
+
+ def test_audit_delete(self):
+ self._delete('/foo/test1')
+ mgr.cluster_log.assert_called_once()
+ self._validate_cluster_log_msg('/foo/test1', 'DELETE',
+ 'None', {'key': 'test1'})
diff --git a/src/pybind/mgr/dashboard/tests/test_auth.py b/src/pybind/mgr/dashboard/tests/test_auth.py
new file mode 100644
index 00000000..6f1d2a08
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_auth.py
@@ -0,0 +1,20 @@
+import unittest
+
+from .. import mgr
+from ..services.auth import JwtManager
+
+
+class JwtManagerTest(unittest.TestCase):
+
+ def test_generate_token_and_decode(self):
+ mgr.get_module_option.return_value = JwtManager.JWT_TOKEN_TTL
+ mgr.get_store.return_value = 'jwt_secret'
+
+ token = JwtManager.gen_token('my-username')
+ self.assertIsInstance(token, str)
+ self.assertTrue(token)
+
+ decoded_token = JwtManager.decode_token(token)
+ self.assertIsInstance(decoded_token, dict)
+ self.assertEqual(decoded_token['iss'], 'ceph-dashboard')
+ self.assertEqual(decoded_token['username'], 'my-username')
diff --git a/src/pybind/mgr/dashboard/tests/test_ceph_service.py b/src/pybind/mgr/dashboard/tests/test_ceph_service.py
new file mode 100644
index 00000000..5111e68d
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_ceph_service.py
@@ -0,0 +1,67 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=dangerous-default-value,too-many-public-methods
+from __future__ import absolute_import
+
+import unittest
+try:
+ import mock
+except ImportError:
+ import unittest.mock as mock
+
+from ..services.ceph_service import CephService
+
+
+class CephServiceTest(unittest.TestCase):
+ pools = [{
+ 'pool_name': 'good_pool',
+ 'pool': 1,
+ }, {
+ 'pool_name': 'bad_pool',
+ 'pool': 2,
+ 'flaky': 'option_x'
+ }]
+
+ def setUp(self):
+ # Mock get_pool_list
+ self.list_patch = mock.patch('dashboard.services.ceph_service.CephService.get_pool_list')
+ self.list = self.list_patch.start()
+ self.list.return_value = self.pools
+ # Mock mgr.get
+ self.mgr_patch = mock.patch('dashboard.mgr.get')
+ self.mgr = self.mgr_patch.start()
+ self.mgr.return_value = {
+ 'by_pool': {
+ '1': {'active+clean': 16},
+ '2': {'creating+incomplete': 16},
+ }
+ }
+ self.service = CephService()
+
+ def tearDown(self):
+ self.list_patch.stop()
+ self.mgr_patch.stop()
+
+ def test_get_pool_by_attribute_with_match(self):
+ self.assertEqual(self.service.get_pool_by_attribute('pool', 1), self.pools[0])
+ self.assertEqual(self.service.get_pool_by_attribute('pool_name', 'bad_pool'), self.pools[1])
+
+ def test_get_pool_by_attribute_without_a_match(self):
+ self.assertEqual(self.service.get_pool_by_attribute('pool', 3), None)
+ self.assertEqual(self.service.get_pool_by_attribute('not_there', 'sth'), None)
+
+ def test_get_pool_by_attribute_matching_a_not_always_set_attribute(self):
+ self.assertEqual(self.service.get_pool_by_attribute('flaky', 'option_x'), self.pools[1])
+
+ @mock.patch('dashboard.mgr.rados.pool_reverse_lookup', return_value='good_pool')
+ def test_get_pool_name_from_id_with_match(self, _mock):
+ self.assertEqual(self.service.get_pool_name_from_id(1), 'good_pool')
+
+ @mock.patch('dashboard.mgr.rados.pool_reverse_lookup', return_value=None)
+ def test_get_pool_name_from_id_without_match(self, _mock):
+ self.assertEqual(self.service.get_pool_name_from_id(3), None)
+
+ def test_get_pool_pg_status(self):
+ self.assertEqual(self.service.get_pool_pg_status('good_pool'), {'active+clean': 16})
+
+ def test_get_pg_status_without_match(self):
+ self.assertEqual(self.service.get_pool_pg_status('no-pool'), {})
diff --git a/src/pybind/mgr/dashboard/tests/test_cephfs.py b/src/pybind/mgr/dashboard/tests/test_cephfs.py
new file mode 100644
index 00000000..e9abda53
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_cephfs.py
@@ -0,0 +1,48 @@
+# -*- coding: utf-8 -*-
+from collections import defaultdict
+try:
+ from mock import Mock
+except ImportError:
+ from unittest.mock import Mock
+
+from .. import mgr
+from . import ControllerTestCase
+from ..controllers.cephfs import CephFS
+
+
+class MetaDataMock(object):
+ def get(self, _x, _y):
+ return 'bar'
+
+
+def get_metadata_mock(key, meta_key):
+ return {
+ 'mds': {
+ None: None, # Unknown key
+ 'foo': MetaDataMock()
+ }[meta_key]
+ }[key]
+
+
+class CephFsTest(ControllerTestCase):
+ cephFs = CephFS()
+
+ @classmethod
+ def setup_server(cls):
+ mgr.get_metadata = Mock(side_effect=get_metadata_mock)
+
+ def tearDown(self):
+ mgr.get_metadata.stop()
+
+ def test_append_of_mds_metadata_if_key_is_not_found(self):
+ mds_versions = defaultdict(list)
+ # pylint: disable=protected-access
+ self.cephFs._append_mds_metadata(mds_versions, None)
+ self.assertEqual(len(mds_versions), 0)
+
+ def test_append_of_mds_metadata_with_existing_metadata(self):
+ mds_versions = defaultdict(list)
+ # pylint: disable=protected-access
+ self.cephFs._append_mds_metadata(mds_versions, 'foo')
+ self.assertEqual(len(mds_versions), 1)
+ self.assertEqual(mds_versions['bar'], ['foo'])
diff --git a/src/pybind/mgr/dashboard/tests/test_controllers.py b/src/pybind/mgr/dashboard/tests/test_controllers.py
new file mode 100644
index 00000000..0e880470
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_controllers.py
@@ -0,0 +1,192 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+from . import ControllerTestCase
+from ..controllers import BaseController, RESTController, Controller, \
+ ApiController, Endpoint
+
+
+@Controller("/btest/{key}", base_url="/ui", secure=False)
+class BTest(BaseController):
+ @Endpoint()
+ def test1(self, key, opt=1):
+ return {'key': key, 'opt': opt}
+
+ @Endpoint()
+ def test2(self, key, skey, opt=1):
+ return {'key': key, 'skey': skey, 'opt': opt}
+
+ @Endpoint(path="/foo/{skey}/test-3")
+ def test3(self, key, skey, opt=1):
+ return {'key': key, 'skey': skey, 'opt': opt}
+
+ @Endpoint('POST', path="/foo/{skey}/test-3", query_params=['opt'])
+ def test4(self, key, skey, data, opt=1):
+ return {'key': key, 'skey': skey, 'data': data, 'opt': opt}
+
+ @Endpoint('PUT', path_params=['skey'], query_params=['opt'])
+ def test5(self, key, skey, data1, data2=None, opt=1):
+ return {'key': key, 'skey': skey, 'data1': data1, 'data2': data2,
+ 'opt': opt}
+
+ @Endpoint('GET', json_response=False)
+ def test6(self, key, opt=1):
+ return "My Formatted string key={} opt={}".format(key, opt)
+
+ @Endpoint()
+ def __call__(self, key, opt=1):
+ return {'key': key, 'opt': opt}
+
+
+@ApiController("/rtest/{key}", secure=False)
+class RTest(RESTController):
+ RESOURCE_ID = 'skey/ekey'
+
+ def list(self, key, opt=1):
+ return {'key': key, 'opt': opt}
+
+ def create(self, key, data1, data2=None):
+ return {'key': key, 'data1': data1, 'data2': data2}
+
+ def get(self, key, skey, ekey, opt=1):
+ return {'key': key, 'skey': skey, 'ekey': ekey, 'opt': opt}
+
+ def set(self, key, skey, ekey, data):
+ return {'key': key, 'skey': skey, 'ekey': ekey, 'data': data}
+
+ def delete(self, key, skey, ekey, opt=1):
+ pass
+
+ def bulk_set(self, key, data1, data2=None):
+ return {'key': key, 'data1': data1, 'data2': data2}
+
+ def bulk_delete(self, key, opt=1):
+ pass
+
+ @RESTController.Collection('POST')
+ def cmethod(self, key, data):
+ return {'key': key, 'data': data}
+
+ @RESTController.Resource('GET')
+ def rmethod(self, key, skey, ekey, opt=1):
+ return {'key': key, 'skey': skey, 'ekey': ekey, 'opt': opt}
+
+
+@Controller("/", secure=False)
+class Root(BaseController):
+ @Endpoint(json_response=False)
+ def __call__(self):
+ return "<html></html>"
+
+
+class ControllersTest(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([BTest, RTest], "/test")
+
+ def test_1(self):
+ self._get('/test/ui/btest/{}/test1?opt=3'.format(100))
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '100', 'opt': '3'})
+
+ def test_2(self):
+ self._get('/test/ui/btest/{}/test2/{}?opt=3'.format(100, 200))
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '100', 'skey': '200', 'opt': '3'})
+
+ def test_3(self):
+ self._get('/test/ui/btest/{}/foo/{}/test-3?opt=3'.format(100, 200))
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '100', 'skey': '200', 'opt': '3'})
+
+ def test_4(self):
+ self._post('/test/ui/btest/{}/foo/{}/test-3?opt=3'.format(100, 200),
+ {'data': 30})
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '100', 'skey': '200', 'data': 30,
+ 'opt': '3'})
+
+ def test_5(self):
+ self._put('/test/ui/btest/{}/test5/{}?opt=3'.format(100, 200),
+ {'data1': 40, 'data2': "hello"})
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '100', 'skey': '200', 'data1': 40,
+ 'data2': "hello", 'opt': '3'})
+
+ def test_6(self):
+ self._get('/test/ui/btest/{}/test6'.format(100))
+ self.assertStatus(200)
+ self.assertBody("My Formatted string key=100 opt=1")
+
+ def test_7(self):
+ self._get('/test/ui/btest/{}?opt=3'.format(100))
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '100', 'opt': '3'})
+
+ def test_rest_list(self):
+ self._get('/test/api/rtest/{}?opt=2'.format(300))
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '300', 'opt': '2'})
+
+ def test_rest_create(self):
+ self._post('/test/api/rtest/{}'.format(300),
+ {'data1': 20, 'data2': True})
+ self.assertStatus(201)
+ self.assertJsonBody({'key': '300', 'data1': 20, 'data2': True})
+
+ def test_rest_get(self):
+ self._get('/test/api/rtest/{}/{}/{}?opt=3'.format(300, 1, 2))
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '300', 'skey': '1', 'ekey': '2',
+ 'opt': '3'})
+
+ def test_rest_set(self):
+ self._put('/test/api/rtest/{}/{}/{}'.format(300, 1, 2),
+ {'data': 40})
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '300', 'skey': '1', 'ekey': '2',
+ 'data': 40})
+
+ def test_rest_delete(self):
+ self._delete('/test/api/rtest/{}/{}/{}?opt=3'.format(300, 1, 2))
+ self.assertStatus(204)
+
+ def test_rest_bulk_set(self):
+ self._put('/test/api/rtest/{}'.format(300),
+ {'data1': 20, 'data2': True})
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '300', 'data1': 20, 'data2': True})
+
+ self._put('/test/api/rtest/{}'.format(400),
+ {'data1': 20, 'data2': ['one', 'two', 'three']})
+ self.assertStatus(200)
+ self.assertJsonBody({
+ 'key': '400',
+ 'data1': 20,
+ 'data2': ['one', 'two', 'three'],
+ })
+
+ def test_rest_bulk_delete(self):
+ self._delete('/test/api/rtest/{}?opt=2'.format(300))
+ self.assertStatus(204)
+
+ def test_rest_collection(self):
+ self._post('/test/api/rtest/{}/cmethod'.format(300), {'data': 30})
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '300', 'data': 30})
+
+ def test_rest_resource(self):
+ self._get('/test/api/rtest/{}/{}/{}/rmethod?opt=4'.format(300, 2, 3))
+ self.assertStatus(200)
+ self.assertJsonBody({'key': '300', 'skey': '2', 'ekey': '3',
+ 'opt': '4'})
+
+
+class RootControllerTest(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([Root])
+
+ def test_index(self):
+ self._get("/")
+ self.assertBody("<html></html>")
diff --git a/src/pybind/mgr/dashboard/tests/test_docs.py b/src/pybind/mgr/dashboard/tests/test_docs.py
new file mode 100644
index 00000000..4d6c2576
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_docs.py
@@ -0,0 +1,71 @@
+# # -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+from . import ControllerTestCase
+from ..controllers import RESTController, ApiController, Endpoint, EndpointDoc, ControllerDoc
+from ..controllers.docs import Docs
+
+
+# Dummy controller and endpoint that can be assigned with @EndpointDoc and @GroupDoc
+@ControllerDoc("Group description", group="FooGroup")
+@ApiController("/doctest/", secure=False)
+class DecoratedController(RESTController):
+ @EndpointDoc(
+ description="Endpoint description",
+ group="BarGroup",
+ parameters={
+ 'parameter': (int, "Description of parameter"),
+ },
+ responses={
+ 200: {
+ 'resp': (str, 'Description of response')
+ },
+ },
+ )
+ @Endpoint(json_response=False)
+ def decorated_func(self, parameter):
+ pass
+
+
+# To assure functionality of @EndpointDoc, @GroupDoc
+class DocDecoratorsTest(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([DecoratedController, Docs], "/test")
+
+ def test_group_info_attr(self):
+ test_ctrl = DecoratedController()
+ self.assertTrue(hasattr(test_ctrl, 'doc_info'))
+ self.assertIn('tag_descr', test_ctrl.doc_info)
+ self.assertIn('tag', test_ctrl.doc_info)
+
+ def test_endpoint_info_attr(self):
+ test_ctrl = DecoratedController()
+ test_endpoint = test_ctrl.decorated_func
+ self.assertTrue(hasattr(test_endpoint, 'doc_info'))
+ self.assertIn('summary', test_endpoint.doc_info)
+ self.assertIn('tag', test_endpoint.doc_info)
+ self.assertIn('parameters', test_endpoint.doc_info)
+ self.assertIn('response', test_endpoint.doc_info)
+
+
+# To assure functionality of Docs.py
+# pylint: disable=protected-access
+class DocsTest(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([Docs], "/test")
+
+ def test_type_to_str(self):
+ self.assertEqual(Docs()._type_to_str(str), "string")
+
+ def test_gen_paths(self):
+ outcome = Docs()._gen_paths(False, "")['/api/doctest//decorated_func/{parameter}']['get']
+ self.assertIn('tags', outcome)
+ self.assertIn('summary', outcome)
+ self.assertIn('parameters', outcome)
+ self.assertIn('responses', outcome)
+
+ def test_gen_tags(self):
+ outcome = Docs()._gen_tags(False)[0]
+ self.assertEqual({'description': 'Group description', 'name': 'FooGroup'}, outcome)
diff --git a/src/pybind/mgr/dashboard/tests/test_erasure_code_profile.py b/src/pybind/mgr/dashboard/tests/test_erasure_code_profile.py
new file mode 100644
index 00000000..88575c0a
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_erasure_code_profile.py
@@ -0,0 +1,36 @@
+# -*- coding: utf-8 -*-
+
+from .. import mgr
+from . import ControllerTestCase
+from ..controllers.erasure_code_profile import ErasureCodeProfile
+
+
+class ErasureCodeProfileTest(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ mgr.get.side_effect = lambda key: {
+ 'osd_map': {
+ 'erasure_code_profiles': {
+ 'test': {
+ 'k': '2',
+ 'm': '1'
+ }
+ }
+ },
+ 'health': {'json': '{"status": 1}'},
+ 'fs_map': {'filesystems': []},
+
+ }[key]
+ # pylint: disable=protected-access
+ ErasureCodeProfile._cp_config['tools.authenticate.on'] = False
+ cls.setup_controllers([ErasureCodeProfile])
+
+ def test_list(self):
+ self._get('/api/erasure_code_profile')
+ self.assertStatus(200)
+ self.assertJsonBody([{'k': 2, 'm': 1, 'name': 'test'}])
+
+ def test_get(self):
+ self._get('/api/erasure_code_profile/test')
+ self.assertStatus(200)
+ self.assertJsonBody({'k': 2, 'm': 1, 'name': 'test'})
diff --git a/src/pybind/mgr/dashboard/tests/test_exceptions.py b/src/pybind/mgr/dashboard/tests/test_exceptions.py
new file mode 100644
index 00000000..5607f1dd
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_exceptions.py
@@ -0,0 +1,157 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import time
+
+import rados
+
+from . import ControllerTestCase
+from ..services.ceph_service import SendCommandError
+from ..controllers import RESTController, Controller, Task, Endpoint
+from ..services.exception import handle_rados_error, handle_send_command_error, \
+ serialize_dashboard_exception
+from ..tools import ViewCache, TaskManager, NotificationQueue
+
+
+# pylint: disable=W0613
+@Controller('foo', secure=False)
+class FooResource(RESTController):
+
+ @Endpoint()
+ @handle_rados_error('foo')
+ def no_exception(self, param1, param2):
+ return [param1, param2]
+
+ @Endpoint()
+ @handle_rados_error('foo')
+ def error_foo_controller(self):
+ raise rados.OSError('hi', errno=-42)
+
+ @Endpoint()
+ @handle_send_command_error('foo')
+ def error_send_command(self):
+ raise SendCommandError('hi', 'prefix', {}, -42)
+
+ @Endpoint()
+ def error_generic(self):
+ raise rados.Error('hi')
+
+ @Endpoint()
+ def vc_no_data(self):
+ @ViewCache(timeout=0)
+ def _no_data():
+ time.sleep(0.2)
+
+ _no_data()
+ assert False
+
+ @handle_rados_error('foo')
+ @Endpoint()
+ def vc_exception(self):
+ @ViewCache(timeout=10)
+ def _raise():
+ raise rados.OSError('hi', errno=-42)
+
+ _raise()
+ assert False
+
+ @Endpoint()
+ def internal_server_error(self):
+ return 1/0
+
+ @handle_send_command_error('foo')
+ def list(self):
+ raise SendCommandError('list', 'prefix', {}, -42)
+
+ @Endpoint()
+ @Task('task_exceptions/task_exception', {1: 2}, 1.0,
+ exception_handler=serialize_dashboard_exception)
+ @handle_rados_error('foo')
+ def task_exception(self):
+ raise rados.OSError('hi', errno=-42)
+
+ @Endpoint()
+ def wait_task_exception(self):
+ ex, _ = TaskManager.list('task_exceptions/task_exception')
+ return bool(len(ex))
+
+
+# pylint: disable=C0102
+class Root(object):
+ foo = FooResource()
+
+
+class RESTControllerTest(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ NotificationQueue.start_queue()
+ TaskManager.init()
+ cls.setup_controllers([FooResource])
+
+ def test_no_exception(self):
+ self._get('/foo/no_exception/a/b')
+ self.assertStatus(200)
+ self.assertJsonBody(
+ ['a', 'b']
+ )
+
+ def test_error_foo_controller(self):
+ self._get('/foo/error_foo_controller')
+ self.assertStatus(400)
+ self.assertJsonBody(
+ {'detail': '[errno -42] hi', 'code': "42", 'component': 'foo'}
+ )
+
+ def test_error_send_command(self):
+ self._get('/foo/error_send_command')
+ self.assertStatus(400)
+ self.assertJsonBody(
+ {'detail': '[errno -42] hi', 'code': "42", 'component': 'foo'}
+ )
+
+ def test_error_send_command_list(self):
+ self._get('/foo/')
+ self.assertStatus(400)
+ self.assertJsonBody(
+ {'detail': '[errno -42] list', 'code': "42", 'component': 'foo'}
+ )
+
+ def test_error_foo_generic(self):
+ self._get('/foo/error_generic')
+ self.assertJsonBody({'detail': 'hi', 'code': 'Error', 'component': None})
+ self.assertStatus(400)
+
+ def test_viewcache_no_data(self):
+ self._get('/foo/vc_no_data')
+ self.assertStatus(200)
+ self.assertJsonBody({'status': ViewCache.VALUE_NONE, 'value': None})
+
+ def test_viewcache_exception(self):
+ self._get('/foo/vc_exception')
+ self.assertStatus(400)
+ self.assertJsonBody(
+ {'detail': '[errno -42] hi', 'code': "42", 'component': 'foo'}
+ )
+
+ def test_task_exception(self):
+ self._get('/foo/task_exception')
+ self.assertStatus(400)
+ self.assertJsonBody(
+ {'detail': '[errno -42] hi', 'code': "42", 'component': 'foo',
+ 'task': {'name': 'task_exceptions/task_exception', 'metadata': {'1': 2}}}
+ )
+
+ self._get('/foo/wait_task_exception')
+ while self.jsonBody():
+ time.sleep(0.5)
+ self._get('/foo/wait_task_exception')
+
+ def test_internal_server_error(self):
+ self._get('/foo/internal_server_error')
+ self.assertStatus(500)
+ self.assertIn('unexpected condition', self.jsonBody()['detail'])
+
+ def test_404(self):
+ self._get('/foonot_found')
+ self.assertStatus(404)
+ self.assertIn('detail', self.jsonBody())
diff --git a/src/pybind/mgr/dashboard/tests/test_feature_toggles.py b/src/pybind/mgr/dashboard/tests/test_feature_toggles.py
new file mode 100644
index 00000000..5c70c88a
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_feature_toggles.py
@@ -0,0 +1,61 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import unittest
+from mock import Mock, patch
+
+from . import KVStoreMockMixin
+from ..plugins.feature_toggles import FeatureToggles, Features
+
+
+class SettingsTest(unittest.TestCase, KVStoreMockMixin):
+ @classmethod
+ def setUpClass(cls):
+ cls.mock_kv_store()
+ cls.CONFIG_KEY_DICT['url_prefix'] = ''
+
+ # Mock MODULE_OPTIONS
+ from .. import mgr
+ cls.mgr = mgr
+
+ # Populate real endpoint map
+ from ..controllers import load_controllers
+ cls.controllers = load_controllers()
+
+ # Initialize FeatureToggles plugin
+ cls.plugin = FeatureToggles()
+ cls.CONFIG_KEY_DICT.update(
+ {k['name']: k['default'] for k in cls.plugin.get_options()})
+ cls.plugin.setup()
+
+ def test_filter_request_when_all_features_enabled(self):
+ """
+ This test iterates over all the registered endpoints to ensure that, with default
+ feature toggles, none is disabled.
+ """
+ import cherrypy
+
+ request = Mock()
+ for controller in self.controllers:
+ request.path_info = controller.get_path()
+ try:
+ self.plugin.filter_request_before_handler(request)
+ except cherrypy.HTTPError:
+ self.fail("Request filtered {} and it shouldn't".format(
+ request.path_info))
+
+ def test_filter_request_when_some_feature_enabled(self):
+ """
+ This test focuses on a single feature and checks whether it's actually
+ disabled
+ """
+ import cherrypy
+
+ self.plugin.register_commands()['handle_command'](
+ self.mgr, 'disable', ['cephfs'])
+
+ with patch.object(self.plugin, '_get_feature_from_request',
+ return_value=Features.CEPHFS):
+ with self.assertRaises(cherrypy.HTTPError):
+ request = Mock()
+ self.plugin.filter_request_before_handler(request)
diff --git a/src/pybind/mgr/dashboard/tests/test_ganesha.py b/src/pybind/mgr/dashboard/tests/test_ganesha.py
new file mode 100644
index 00000000..5dced126
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_ganesha.py
@@ -0,0 +1,642 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import unittest
+
+from mock import MagicMock, Mock
+
+import orchestrator
+from . import KVStoreMockMixin
+from .. import mgr
+from ..settings import Settings
+from ..services import ganesha
+from ..services.ganesha import GaneshaConf, Export, GaneshaConfParser
+
+
+class GaneshaConfTest(unittest.TestCase, KVStoreMockMixin):
+ export_1 = """
+EXPORT {
+ Export_ID=1;
+ Protocols = 4;
+ Path = /;
+ Pseudo = /cephfs_a/;
+ Access_Type = RW;
+ Protocols = 4;
+ Attr_Expiration_Time = 0;
+ # Delegations = R;
+ # Squash = root;
+
+ FSAL {
+ Name = CEPH;
+ Filesystem = "a";
+ User_Id = "ganesha";
+ # Secret_Access_Key = "YOUR SECRET KEY HERE";
+ }
+
+ CLIENT
+ {
+ Clients = 192.168.0.10, 192.168.1.0/8;
+ Squash = None;
+ }
+
+ CLIENT
+ {
+ Clients = 192.168.0.0/16;
+ Squash = All;
+ Access_Type = RO;
+ }
+}
+"""
+
+ export_2 = """
+EXPORT
+{
+ Export_ID=2;
+
+ Path = "/";
+
+ Pseudo = "/rgw";
+
+ Access_Type = RW;
+
+ squash = AllAnonymous;
+
+ Protocols = 4, 3;
+
+ Transports = TCP, UDP;
+
+ FSAL {
+ Name = RGW;
+ User_Id = "testuser";
+ Access_Key_Id ="access_key";
+ Secret_Access_Key = "secret_key";
+ }
+}
+"""
+
+ conf_nodea = '''
+%url rados://ganesha/ns/export-2
+
+%url "rados://ganesha/ns/export-1"'''
+
+ conf_nodeb = '%url "rados://ganesha/ns/export-1"'
+
+ class RObject(object):
+ def __init__(self, key, raw):
+ self.key = key
+ self.raw = raw
+
+ def read(self, _):
+ return self.raw.encode('utf-8')
+
+ def stat(self):
+ return len(self.raw), None
+
+ def _ioctx_write_full_mock(self, key, content):
+ if key not in self.temp_store:
+ self.temp_store[key] = GaneshaConfTest.RObject(key,
+ content.decode('utf-8'))
+ else:
+ self.temp_store[key].raw = content.decode('utf-8')
+
+ def _ioctx_remove_mock(self, key):
+ del self.temp_store[key]
+
+ def _ioctx_list_objects_mock(self):
+ return [obj for _, obj in self.temp_store.items()]
+
+ def setUp(self):
+ self.mock_kv_store()
+
+ Settings.GANESHA_CLUSTERS_RADOS_POOL_NAMESPACE = "ganesha/ns"
+
+ self.temp_store = {
+ 'export-1': GaneshaConfTest.RObject("export-1", self.export_1),
+ 'conf-nodea': GaneshaConfTest.RObject("conf-nodea", self.conf_nodea),
+ 'export-2': GaneshaConfTest.RObject("export-2", self.export_2),
+ 'conf-nodeb': GaneshaConfTest.RObject("conf-nodeb", self.conf_nodeb)
+ }
+
+ self.io_mock = MagicMock()
+ self.io_mock.list_objects.side_effect = self._ioctx_list_objects_mock
+ self.io_mock.write_full.side_effect = self._ioctx_write_full_mock
+ self.io_mock.remove_object.side_effect = self._ioctx_remove_mock
+
+ ioctx_mock = MagicMock()
+ ioctx_mock.__enter__ = Mock(return_value=(self.io_mock))
+ ioctx_mock.__exit__ = Mock(return_value=None)
+
+ mgr.rados = MagicMock()
+ mgr.rados.open_ioctx.return_value = ioctx_mock
+
+ # pylint: disable=protected-access
+ mgr._select_orchestrator.side_effect = orchestrator.NoOrchestrator()
+
+ ganesha.CephX = MagicMock()
+ ganesha.CephX.list_clients.return_value = ['ganesha']
+ ganesha.CephX.get_client_key.return_value = 'ganesha'
+
+ ganesha.CephFS = MagicMock()
+
+ def test_export_parser_1(self):
+ blocks = GaneshaConfParser(self.export_1).parse()
+ self.assertIsInstance(blocks, list)
+ self.assertEqual(len(blocks), 1)
+ export = Export.from_export_block(blocks[0], '_default_',
+ GaneshaConf.ganesha_defaults({}))
+
+ self.assertEqual(export.export_id, 1)
+ self.assertEqual(export.path, "/")
+ self.assertEqual(export.pseudo, "/cephfs_a")
+ self.assertIsNone(export.tag)
+ self.assertEqual(export.access_type, "RW")
+ self.assertEqual(export.squash, "root_squash")
+ self.assertEqual(export.protocols, {4})
+ self.assertEqual(export.transports, {"TCP", "UDP"})
+ self.assertEqual(export.fsal.name, "CEPH")
+ self.assertEqual(export.fsal.user_id, "ganesha")
+ self.assertEqual(export.fsal.fs_name, "a")
+ self.assertEqual(export.fsal.sec_label_xattr, None)
+ self.assertEqual(len(export.clients), 2)
+ self.assertEqual(export.clients[0].addresses,
+ ["192.168.0.10", "192.168.1.0/8"])
+ self.assertEqual(export.clients[0].squash, "no_root_squash")
+ self.assertIsNone(export.clients[0].access_type)
+ self.assertEqual(export.clients[1].addresses, ["192.168.0.0/16"])
+ self.assertEqual(export.clients[1].squash, "all_squash")
+ self.assertEqual(export.clients[1].access_type, "RO")
+ self.assertEqual(export.cluster_id, '_default_')
+ self.assertEqual(export.attr_expiration_time, 0)
+ self.assertEqual(export.security_label, False)
+
+ def test_export_parser_2(self):
+ blocks = GaneshaConfParser(self.export_2).parse()
+ self.assertIsInstance(blocks, list)
+ self.assertEqual(len(blocks), 1)
+ export = Export.from_export_block(blocks[0], '_default_',
+ GaneshaConf.ganesha_defaults({}))
+
+ self.assertEqual(export.export_id, 2)
+ self.assertEqual(export.path, "/")
+ self.assertEqual(export.pseudo, "/rgw")
+ self.assertIsNone(export.tag)
+ self.assertEqual(export.access_type, "RW")
+ self.assertEqual(export.squash, "all_squash")
+ self.assertEqual(export.protocols, {4, 3})
+ self.assertEqual(export.transports, {"TCP", "UDP"})
+ self.assertEqual(export.fsal.name, "RGW")
+ self.assertEqual(export.fsal.rgw_user_id, "testuser")
+ self.assertEqual(export.fsal.access_key, "access_key")
+ self.assertEqual(export.fsal.secret_key, "secret_key")
+ self.assertEqual(len(export.clients), 0)
+ self.assertEqual(export.cluster_id, '_default_')
+
+ def test_daemon_conf_parser_a(self):
+ blocks = GaneshaConfParser(self.conf_nodea).parse()
+ self.assertIsInstance(blocks, list)
+ self.assertEqual(len(blocks), 2)
+ self.assertEqual(blocks[0]['block_name'], "%url")
+ self.assertEqual(blocks[0]['value'], "rados://ganesha/ns/export-2")
+ self.assertEqual(blocks[1]['block_name'], "%url")
+ self.assertEqual(blocks[1]['value'], "rados://ganesha/ns/export-1")
+
+ def test_daemon_conf_parser_b(self):
+ blocks = GaneshaConfParser(self.conf_nodeb).parse()
+ self.assertIsInstance(blocks, list)
+ self.assertEqual(len(blocks), 1)
+ self.assertEqual(blocks[0]['block_name'], "%url")
+ self.assertEqual(blocks[0]['value'], "rados://ganesha/ns/export-1")
+
+ def test_ganesha_conf(self):
+ ganesha_conf = GaneshaConf.instance('_default_')
+ exports = ganesha_conf.exports
+
+ self.assertEqual(len(exports.items()), 2)
+ self.assertIn(1, exports)
+ self.assertIn(2, exports)
+
+ # export_id = 1 asserts
+ export = exports[1]
+ self.assertEqual(export.export_id, 1)
+ self.assertEqual(export.path, "/")
+ self.assertEqual(export.pseudo, "/cephfs_a")
+ self.assertIsNone(export.tag)
+ self.assertEqual(export.access_type, "RW")
+ self.assertEqual(export.squash, "root_squash")
+ self.assertEqual(export.protocols, {4})
+ self.assertEqual(export.transports, {"TCP", "UDP"})
+ self.assertEqual(export.fsal.name, "CEPH")
+ self.assertEqual(export.fsal.user_id, "ganesha")
+ self.assertEqual(export.fsal.fs_name, "a")
+ self.assertEqual(export.fsal.sec_label_xattr, None)
+ self.assertEqual(len(export.clients), 2)
+ self.assertEqual(export.clients[0].addresses,
+ ["192.168.0.10", "192.168.1.0/8"])
+ self.assertEqual(export.clients[0].squash, "no_root_squash")
+ self.assertIsNone(export.clients[0].access_type)
+ self.assertEqual(export.clients[1].addresses, ["192.168.0.0/16"])
+ self.assertEqual(export.clients[1].squash, "all_squash")
+ self.assertEqual(export.clients[1].access_type, "RO")
+ self.assertEqual(export.attr_expiration_time, 0)
+ self.assertEqual(export.security_label, False)
+
+ # export_id = 2 asserts
+ export = exports[2]
+ self.assertEqual(export.export_id, 2)
+ self.assertEqual(export.path, "/")
+ self.assertEqual(export.pseudo, "/rgw")
+ self.assertIsNone(export.tag)
+ self.assertEqual(export.access_type, "RW")
+ self.assertEqual(export.squash, "all_squash")
+ self.assertEqual(export.protocols, {4, 3})
+ self.assertEqual(export.transports, {"TCP", "UDP"})
+ self.assertEqual(export.fsal.name, "RGW")
+ self.assertEqual(export.fsal.rgw_user_id, "testuser")
+ self.assertEqual(export.fsal.access_key, "access_key")
+ self.assertEqual(export.fsal.secret_key, "secret_key")
+ self.assertEqual(len(export.clients), 0)
+
+ def test_config_dict(self):
+ conf = GaneshaConf.instance('_default_')
+ export = conf.exports[1]
+ ex_dict = export.to_dict()
+ self.assertDictEqual(ex_dict, {
+ 'daemons': ['nodea', 'nodeb'],
+ 'export_id': 1,
+ 'path': '/',
+ 'pseudo': '/cephfs_a',
+ 'cluster_id': '_default_',
+ 'tag': None,
+ 'access_type': 'RW',
+ 'squash': 'root_squash',
+ 'security_label': False,
+ 'protocols': [4],
+ 'transports': ['TCP', 'UDP'],
+ 'clients': [{
+ 'addresses': ["192.168.0.10", "192.168.1.0/8"],
+ 'access_type': None,
+ 'squash': 'no_root_squash'
+ }, {
+ 'addresses': ["192.168.0.0/16"],
+ 'access_type': 'RO',
+ 'squash': 'all_squash'
+ }],
+ 'fsal': {
+ 'name': 'CEPH',
+ 'user_id': 'ganesha',
+ 'fs_name': 'a',
+ 'sec_label_xattr': None
+ }
+ })
+
+ export = conf.exports[2]
+ ex_dict = export.to_dict()
+ self.assertDictEqual(ex_dict, {
+ 'daemons': ['nodea'],
+ 'export_id': 2,
+ 'path': '/',
+ 'pseudo': '/rgw',
+ 'cluster_id': '_default_',
+ 'tag': None,
+ 'access_type': 'RW',
+ 'squash': 'all_squash',
+ 'security_label': False,
+ 'protocols': [3, 4],
+ 'transports': ['TCP', 'UDP'],
+ 'clients': [],
+ 'fsal': {
+ 'name': 'RGW',
+ 'rgw_user_id': 'testuser'
+ }
+ })
+
+ def test_config_from_dict(self):
+ export = Export.from_dict(1, {
+ 'daemons': ['nodea', 'nodeb'],
+ 'export_id': 1,
+ 'path': '/',
+ 'cluster_id': '_default_',
+ 'pseudo': '/cephfs_a',
+ 'tag': None,
+ 'access_type': 'RW',
+ 'squash': 'root_squash',
+ 'security_label': True,
+ 'protocols': [4],
+ 'transports': ['TCP', 'UDP'],
+ 'clients': [{
+ 'addresses': ["192.168.0.10", "192.168.1.0/8"],
+ 'access_type': None,
+ 'squash': 'no_root_squash'
+ }, {
+ 'addresses': ["192.168.0.0/16"],
+ 'access_type': 'RO',
+ 'squash': 'all_squash'
+ }],
+ 'fsal': {
+ 'name': 'CEPH',
+ 'user_id': 'ganesha',
+ 'fs_name': 'a',
+ 'sec_label_xattr': 'security.selinux'
+ }
+ })
+
+ self.assertEqual(export.export_id, 1)
+ self.assertEqual(export.path, "/")
+ self.assertEqual(export.pseudo, "/cephfs_a")
+ self.assertIsNone(export.tag)
+ self.assertEqual(export.access_type, "RW")
+ self.assertEqual(export.squash, "root_squash")
+ self.assertEqual(export.protocols, {4})
+ self.assertEqual(export.transports, {"TCP", "UDP"})
+ self.assertEqual(export.fsal.name, "CEPH")
+ self.assertEqual(export.fsal.user_id, "ganesha")
+ self.assertEqual(export.fsal.fs_name, "a")
+ self.assertEqual(export.fsal.sec_label_xattr, 'security.selinux')
+ self.assertEqual(len(export.clients), 2)
+ self.assertEqual(export.clients[0].addresses,
+ ["192.168.0.10", "192.168.1.0/8"])
+ self.assertEqual(export.clients[0].squash, "no_root_squash")
+ self.assertIsNone(export.clients[0].access_type)
+ self.assertEqual(export.clients[1].addresses, ["192.168.0.0/16"])
+ self.assertEqual(export.clients[1].squash, "all_squash")
+ self.assertEqual(export.clients[1].access_type, "RO")
+ self.assertEqual(export.daemons, {"nodeb", "nodea"})
+ self.assertEqual(export.cluster_id, '_default_')
+ self.assertEqual(export.attr_expiration_time, 0)
+ self.assertEqual(export.security_label, True)
+
+ export = Export.from_dict(2, {
+ 'daemons': ['nodea'],
+ 'export_id': 2,
+ 'path': '/',
+ 'pseudo': '/rgw',
+ 'cluster_id': '_default_',
+ 'tag': None,
+ 'access_type': 'RW',
+ 'squash': 'all_squash',
+ 'security_label': False,
+ 'protocols': [4, 3],
+ 'transports': ['TCP', 'UDP'],
+ 'clients': [],
+ 'fsal': {
+ 'name': 'RGW',
+ 'rgw_user_id': 'testuser'
+ }
+ })
+
+ self.assertEqual(export.export_id, 2)
+ self.assertEqual(export.path, "/")
+ self.assertEqual(export.pseudo, "/rgw")
+ self.assertIsNone(export.tag)
+ self.assertEqual(export.access_type, "RW")
+ self.assertEqual(export.squash, "all_squash")
+ self.assertEqual(export.protocols, {4, 3})
+ self.assertEqual(export.transports, {"TCP", "UDP"})
+ self.assertEqual(export.fsal.name, "RGW")
+ self.assertEqual(export.fsal.rgw_user_id, "testuser")
+ self.assertIsNone(export.fsal.access_key)
+ self.assertIsNone(export.fsal.secret_key)
+ self.assertEqual(len(export.clients), 0)
+ self.assertEqual(export.daemons, {"nodea"})
+ self.assertEqual(export.cluster_id, '_default_')
+
+ def test_gen_raw_config(self):
+ conf = GaneshaConf.instance('_default_')
+ # pylint: disable=W0212
+ export = conf.exports[1]
+ del conf.exports[1]
+ conf._save_export(export)
+ conf = GaneshaConf.instance('_default_')
+ exports = conf.exports
+ self.assertEqual(len(exports.items()), 2)
+ self.assertIn(1, exports)
+ self.assertIn(2, exports)
+
+ # export_id = 1 asserts
+ export = exports[1]
+ self.assertEqual(export.export_id, 1)
+ self.assertEqual(export.path, "/")
+ self.assertEqual(export.pseudo, "/cephfs_a")
+ self.assertIsNone(export.tag)
+ self.assertEqual(export.access_type, "RW")
+ self.assertEqual(export.squash, "root_squash")
+ self.assertEqual(export.protocols, {4})
+ self.assertEqual(export.transports, {"TCP", "UDP"})
+ self.assertEqual(export.fsal.name, "CEPH")
+ self.assertEqual(export.fsal.user_id, "ganesha")
+ self.assertEqual(export.fsal.fs_name, "a")
+ self.assertEqual(export.fsal.sec_label_xattr, None)
+ self.assertEqual(len(export.clients), 2)
+ self.assertEqual(export.clients[0].addresses,
+ ["192.168.0.10", "192.168.1.0/8"])
+ self.assertEqual(export.clients[0].squash, "no_root_squash")
+ self.assertIsNone(export.clients[0].access_type)
+ self.assertEqual(export.clients[1].addresses, ["192.168.0.0/16"])
+ self.assertEqual(export.clients[1].squash, "all_squash")
+ self.assertEqual(export.clients[1].access_type, "RO")
+ self.assertEqual(export.daemons, {"nodeb", "nodea"})
+ self.assertEqual(export.cluster_id, '_default_')
+ self.assertEqual(export.attr_expiration_time, 0)
+ self.assertEqual(export.security_label, False)
+
+ # export_id = 2 asserts
+ export = exports[2]
+ self.assertEqual(export.export_id, 2)
+ self.assertEqual(export.path, "/")
+ self.assertEqual(export.pseudo, "/rgw")
+ self.assertIsNone(export.tag)
+ self.assertEqual(export.access_type, "RW")
+ self.assertEqual(export.squash, "all_squash")
+ self.assertEqual(export.protocols, {4, 3})
+ self.assertEqual(export.transports, {"TCP", "UDP"})
+ self.assertEqual(export.fsal.name, "RGW")
+ self.assertEqual(export.fsal.rgw_user_id, "testuser")
+ self.assertEqual(export.fsal.access_key, "access_key")
+ self.assertEqual(export.fsal.secret_key, "secret_key")
+ self.assertEqual(len(export.clients), 0)
+ self.assertEqual(export.daemons, {"nodea"})
+ self.assertEqual(export.cluster_id, '_default_')
+
+ def test_update_export(self):
+ ganesha.RgwClient = MagicMock()
+ admin_inst_mock = MagicMock()
+ admin_inst_mock.get_user_keys.return_value = {
+ 'access_key': 'access_key',
+ 'secret_key': 'secret_key'
+ }
+ ganesha.RgwClient.admin_instance.return_value = admin_inst_mock
+
+ conf = GaneshaConf.instance('_default_')
+ conf.update_export({
+ 'export_id': 2,
+ 'daemons': ["nodeb"],
+ 'path': 'bucket',
+ 'pseudo': '/rgw/bucket',
+ 'cluster_id': '_default_',
+ 'tag': 'bucket_tag',
+ 'access_type': 'RW',
+ 'squash': 'all_squash',
+ 'security_label': False,
+ 'protocols': [4, 3],
+ 'transports': ['TCP', 'UDP'],
+ 'clients': [{
+ 'addresses': ["192.168.0.0/16"],
+ 'access_type': None,
+ 'squash': None
+ }],
+ 'fsal': {
+ 'name': 'RGW',
+ 'rgw_user_id': 'testuser'
+ }
+ })
+
+ conf = GaneshaConf.instance('_default_')
+ export = conf.get_export(2)
+ self.assertEqual(export.export_id, 2)
+ self.assertEqual(export.path, "bucket")
+ self.assertEqual(export.pseudo, "/rgw/bucket")
+ self.assertEqual(export.tag, "bucket_tag")
+ self.assertEqual(export.access_type, "RW")
+ self.assertEqual(export.squash, "all_squash")
+ self.assertEqual(export.protocols, {4, 3})
+ self.assertEqual(export.transports, {"TCP", "UDP"})
+ self.assertEqual(export.fsal.name, "RGW")
+ self.assertEqual(export.fsal.rgw_user_id, "testuser")
+ self.assertEqual(export.fsal.access_key, "access_key")
+ self.assertEqual(export.fsal.secret_key, "secret_key")
+ self.assertEqual(len(export.clients), 1)
+ self.assertEqual(export.clients[0].addresses, ["192.168.0.0/16"])
+ self.assertIsNone(export.clients[0].squash)
+ self.assertIsNone(export.clients[0].access_type)
+ self.assertEqual(export.daemons, {"nodeb"})
+ self.assertEqual(export.cluster_id, '_default_')
+
+ def test_remove_export(self):
+ conf = GaneshaConf.instance('_default_')
+ conf.remove_export(1)
+ exports = conf.list_exports()
+ self.assertEqual(len(exports), 1)
+ self.assertEqual(2, exports[0].export_id)
+ export = conf.get_export(2)
+ self.assertEqual(export.export_id, 2)
+ self.assertEqual(export.path, "/")
+ self.assertEqual(export.pseudo, "/rgw")
+ self.assertIsNone(export.tag)
+ self.assertEqual(export.access_type, "RW")
+ self.assertEqual(export.squash, "all_squash")
+ self.assertEqual(export.protocols, {4, 3})
+ self.assertEqual(export.transports, {"TCP", "UDP"})
+ self.assertEqual(export.fsal.name, "RGW")
+ self.assertEqual(export.fsal.rgw_user_id, "testuser")
+ self.assertEqual(export.fsal.access_key, "access_key")
+ self.assertEqual(export.fsal.secret_key, "secret_key")
+ self.assertEqual(len(export.clients), 0)
+ self.assertEqual(export.daemons, {"nodea"})
+ self.assertEqual(export.cluster_id, '_default_')
+
+ def test_create_export_rgw(self):
+ ganesha.RgwClient = MagicMock()
+ admin_inst_mock = MagicMock()
+ admin_inst_mock.get_user_keys.return_value = {
+ 'access_key': 'access_key2',
+ 'secret_key': 'secret_key2'
+ }
+ ganesha.RgwClient.admin_instance.return_value = admin_inst_mock
+
+ conf = GaneshaConf.instance('_default_')
+ ex_id = conf.create_export({
+ 'daemons': ["nodeb"],
+ 'path': 'bucket',
+ 'pseudo': '/rgw/bucket',
+ 'tag': 'bucket_tag',
+ 'cluster_id': '_default_',
+ 'access_type': 'RW',
+ 'squash': 'all_squash',
+ 'security_label': False,
+ 'protocols': [4, 3],
+ 'transports': ['TCP', 'UDP'],
+ 'clients': [{
+ 'addresses': ["192.168.0.0/16"],
+ 'access_type': None,
+ 'squash': None
+ }],
+ 'fsal': {
+ 'name': 'RGW',
+ 'rgw_user_id': 'testuser'
+ }
+ })
+
+ conf = GaneshaConf.instance('_default_')
+ exports = conf.list_exports()
+ self.assertEqual(len(exports), 3)
+ export = conf.get_export(ex_id)
+ self.assertEqual(export.export_id, ex_id)
+ self.assertEqual(export.path, "bucket")
+ self.assertEqual(export.pseudo, "/rgw/bucket")
+ self.assertEqual(export.tag, "bucket_tag")
+ self.assertEqual(export.access_type, "RW")
+ self.assertEqual(export.squash, "all_squash")
+ self.assertEqual(export.protocols, {4, 3})
+ self.assertEqual(export.transports, {"TCP", "UDP"})
+ self.assertEqual(export.fsal.name, "RGW")
+ self.assertEqual(export.fsal.rgw_user_id, "testuser")
+ self.assertEqual(export.fsal.access_key, "access_key2")
+ self.assertEqual(export.fsal.secret_key, "secret_key2")
+ self.assertEqual(len(export.clients), 1)
+ self.assertEqual(export.clients[0].addresses, ["192.168.0.0/16"])
+ self.assertIsNone(export.clients[0].squash)
+ self.assertIsNone(export.clients[0].access_type)
+ self.assertEqual(export.daemons, {"nodeb"})
+ self.assertEqual(export.cluster_id, '_default_')
+
+ def test_create_export_cephfs(self):
+ ganesha.CephX = MagicMock()
+ ganesha.CephX.list_clients.return_value = ["fs"]
+ ganesha.CephX.get_client_key.return_value = "fs_key"
+
+ ganesha.CephFS = MagicMock()
+ ganesha.CephFS.dir_exists.return_value = True
+
+ conf = GaneshaConf.instance('_default_')
+ ex_id = conf.create_export({
+ 'daemons': ['nodea', 'nodeb'],
+ 'path': '/',
+ 'pseudo': '/cephfs2',
+ 'cluster_id': '_default_',
+ 'tag': None,
+ 'access_type': 'RW',
+ 'squash': 'all_squash',
+ 'security_label': True,
+ 'protocols': [4],
+ 'transports': ['TCP'],
+ 'clients': [],
+ 'fsal': {
+ 'name': 'CEPH',
+ 'user_id': 'fs',
+ 'fs_name': None,
+ 'sec_label_xattr': 'security.selinux'
+ }
+ })
+
+ conf = GaneshaConf.instance('_default_')
+ exports = conf.list_exports()
+ self.assertEqual(len(exports), 3)
+ export = conf.get_export(ex_id)
+ self.assertEqual(export.export_id, ex_id)
+ self.assertEqual(export.path, "/")
+ self.assertEqual(export.pseudo, "/cephfs2")
+ self.assertIsNone(export.tag)
+ self.assertEqual(export.access_type, "RW")
+ self.assertEqual(export.squash, "all_squash")
+ self.assertEqual(export.protocols, {4})
+ self.assertEqual(export.transports, {"TCP"})
+ self.assertEqual(export.fsal.name, "CEPH")
+ self.assertEqual(export.fsal.user_id, "fs")
+ self.assertEqual(export.fsal.cephx_key, "fs_key")
+ self.assertEqual(export.fsal.sec_label_xattr, "security.selinux")
+ self.assertIsNone(export.fsal.fs_name)
+ self.assertEqual(len(export.clients), 0)
+ self.assertEqual(export.daemons, {"nodeb", "nodea"})
+ self.assertEqual(export.cluster_id, '_default_')
+ self.assertEqual(export.attr_expiration_time, 0)
+ self.assertEqual(export.security_label, True)
diff --git a/src/pybind/mgr/dashboard/tests/test_grafana.py b/src/pybind/mgr/dashboard/tests/test_grafana.py
new file mode 100644
index 00000000..02597dfe
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_grafana.py
@@ -0,0 +1,116 @@
+import json
+import unittest
+
+try:
+ from mock import patch
+except ImportError:
+ from unittest.mock import patch
+
+from . import ControllerTestCase, KVStoreMockMixin
+from ..controllers.grafana import Grafana
+from ..grafana import GrafanaRestClient
+from ..settings import Settings
+
+
+class GrafanaTest(ControllerTestCase, KVStoreMockMixin):
+ @classmethod
+ def setup_server(cls):
+ # pylint: disable=protected-access
+ Grafana._cp_config['tools.authenticate.on'] = False
+ cls.setup_controllers([Grafana])
+
+ def setUp(self):
+ self.mock_kv_store()
+
+ @staticmethod
+ def server_settings(
+ url='http://localhost:3000',
+ user='admin',
+ password='admin',
+ ):
+ if url is not None:
+ Settings.GRAFANA_API_URL = url
+ if user is not None:
+ Settings.GRAFANA_API_USERNAME = user
+ if password is not None:
+ Settings.GRAFANA_API_PASSWORD = password
+
+ def test_url(self):
+ self.server_settings()
+ self._get('/api/grafana/url')
+ self.assertStatus(200)
+ self.assertJsonBody({'instance': 'http://localhost:3000'})
+
+ def test_validation(self):
+ self.server_settings()
+ self._get('/api/grafana/validation/foo')
+ self.assertStatus(500)
+
+ def test_dashboards_unavailable_no_url(self):
+ self.server_settings(url=None)
+ self._post('/api/grafana/dashboards')
+ self.assertStatus(500)
+
+ def test_dashboards_unavailable_no_user(self):
+ self.server_settings(user=None)
+ self._post('/api/grafana/dashboards')
+ self.assertStatus(500)
+
+ def test_dashboards_unavailable_no_password(self):
+ self.server_settings(password=None)
+ self._post('/api/grafana/dashboards')
+ self.assertStatus(500)
+
+
+class GrafanaRestClientTest(unittest.TestCase, KVStoreMockMixin):
+ headers = {
+ 'Accept': 'application/json',
+ 'Content-Type': 'application/json',
+ }
+ payload = json.dumps({
+ 'dashboard': 'foo',
+ 'overwrite': True
+ })
+
+ def setUp(self):
+ self.mock_kv_store()
+ Settings.GRAFANA_API_URL = 'https://foo/bar'
+ Settings.GRAFANA_API_USERNAME = 'xyz'
+ Settings.GRAFANA_API_PASSWORD = 'abc'
+ Settings.GRAFANA_API_SSL_VERIFY = True
+
+ def test_ssl_verify_url_validation(self):
+ with patch('requests.request') as mock_request:
+ rest_client = GrafanaRestClient()
+ rest_client.url_validation('FOO', Settings.GRAFANA_API_URL)
+ mock_request.assert_called_with('FOO', Settings.GRAFANA_API_URL,
+ verify=True)
+
+ def test_no_ssl_verify_url_validation(self):
+ Settings.GRAFANA_API_SSL_VERIFY = False
+ with patch('requests.request') as mock_request:
+ rest_client = GrafanaRestClient()
+ rest_client.url_validation('BAR', Settings.GRAFANA_API_URL)
+ mock_request.assert_called_with('BAR', Settings.GRAFANA_API_URL,
+ verify=False)
+
+ def test_ssl_verify_push_dashboard(self):
+ with patch('requests.post') as mock_request:
+ rest_client = GrafanaRestClient()
+ rest_client.push_dashboard('foo')
+ mock_request.assert_called_with(
+ Settings.GRAFANA_API_URL + '/api/dashboards/db',
+ auth=(Settings.GRAFANA_API_USERNAME,
+ Settings.GRAFANA_API_PASSWORD),
+ data=self.payload, headers=self.headers, verify=True)
+
+ def test_no_ssl_verify_push_dashboard(self):
+ Settings.GRAFANA_API_SSL_VERIFY = False
+ with patch('requests.post') as mock_request:
+ rest_client = GrafanaRestClient()
+ rest_client.push_dashboard('foo')
+ mock_request.assert_called_with(
+ Settings.GRAFANA_API_URL + '/api/dashboards/db',
+ auth=(Settings.GRAFANA_API_USERNAME,
+ Settings.GRAFANA_API_PASSWORD),
+ data=self.payload, headers=self.headers, verify=False)
diff --git a/src/pybind/mgr/dashboard/tests/test_home.py b/src/pybind/mgr/dashboard/tests/test_home.py
new file mode 100644
index 00000000..c3088b7a
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_home.py
@@ -0,0 +1,68 @@
+from __future__ import absolute_import
+
+import logging
+import os
+
+try:
+ import mock
+except ImportError:
+ import unittest.mock as mock
+
+from . import ControllerTestCase, FakeFsMixin
+from .. import mgr
+
+from ..controllers.home import HomeController, LanguageMixin
+
+logger = logging.getLogger()
+
+
+class HomeTest(ControllerTestCase, FakeFsMixin):
+ @classmethod
+ def setup_server(cls):
+ frontend_path = mgr.get_frontend_path()
+ cls.fs.reset()
+ cls.fs.create_dir(frontend_path)
+ cls.fs.create_file(
+ os.path.join(frontend_path, '..', 'package.json'),
+ contents='{"config":{"locale": "en-US"}}')
+ with mock.patch(cls.builtins_open, new=cls.f_open),\
+ mock.patch('os.listdir', new=cls.f_os.listdir):
+ lang = LanguageMixin()
+ cls.fs.create_file(
+ os.path.join(lang.DEFAULT_LANGUAGE_PATH, 'index.html'),
+ contents='<!doctype html><html lang="en"><body></body></html>')
+ cls.setup_controllers([HomeController])
+
+ @mock.patch(FakeFsMixin.builtins_open, new=FakeFsMixin.f_open)
+ @mock.patch('os.stat', new=FakeFsMixin.f_os.stat)
+ @mock.patch('os.listdir', new=FakeFsMixin.f_os.listdir)
+ def test_home_default_lang(self):
+ self._get('/')
+ self.assertStatus(200)
+ logger.info(self.body)
+ self.assertIn('<html lang="en">', self.body.decode('utf-8'))
+
+ @mock.patch(FakeFsMixin.builtins_open, new=FakeFsMixin.f_open)
+ @mock.patch('os.stat', new=FakeFsMixin.f_os.stat)
+ @mock.patch('os.listdir', new=FakeFsMixin.f_os.listdir)
+ def test_home_uplevel_check(self):
+ self._get('/../../../../../../etc/shadow')
+ self.assertStatus(403)
+
+ @mock.patch(FakeFsMixin.builtins_open, new=FakeFsMixin.f_open)
+ @mock.patch('os.stat', new=FakeFsMixin.f_os.stat)
+ @mock.patch('os.listdir', new=FakeFsMixin.f_os.listdir)
+ def test_home_en_us(self):
+ self._get('/', headers=[('Accept-Language', 'en-US')])
+ self.assertStatus(200)
+ logger.info(self.body)
+ self.assertIn('<html lang="en">', self.body.decode('utf-8'))
+
+ @mock.patch(FakeFsMixin.builtins_open, new=FakeFsMixin.f_open)
+ @mock.patch('os.stat', new=FakeFsMixin.f_os.stat)
+ @mock.patch('os.listdir', new=FakeFsMixin.f_os.listdir)
+ def test_home_non_supported_lang(self):
+ self._get('/', headers=[('Accept-Language', 'NO-NO')])
+ self.assertStatus(200)
+ logger.info(self.body)
+ self.assertIn('<html lang="en">', self.body.decode('utf-8'))
diff --git a/src/pybind/mgr/dashboard/tests/test_iscsi.py b/src/pybind/mgr/dashboard/tests/test_iscsi.py
new file mode 100644
index 00000000..34d2f014
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_iscsi.py
@@ -0,0 +1,998 @@
+# pylint: disable=too-many-public-methods, too-many-lines
+
+import copy
+import errno
+import json
+import unittest
+
+try:
+ import mock
+except ImportError:
+ import unittest.mock as mock
+
+from mgr_module import ERROR_MSG_NO_INPUT_FILE
+
+from . import CmdException, ControllerTestCase, CLICommandTestMixin, KVStoreMockMixin
+from .. import mgr
+from ..controllers.iscsi import Iscsi, IscsiTarget
+from ..services.iscsi_client import IscsiClient
+from ..services.orchestrator import OrchClient
+from ..rest_client import RequestException
+
+
+class IscsiTestCli(unittest.TestCase, CLICommandTestMixin):
+
+ def setUp(self):
+ self.mock_kv_store()
+ # pylint: disable=protected-access
+ IscsiClientMock._instance = IscsiClientMock()
+ IscsiClient.instance = IscsiClientMock.instance
+
+ def test_cli_add_gateway_invalid_url(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('iscsi-gateway-add', inbuf='http:/hello.com')
+
+ self.assertEqual(ctx.exception.retcode, -errno.EINVAL)
+ self.assertEqual(str(ctx.exception),
+ "Invalid service URL 'http:/hello.com'. Valid format: "
+ "'<scheme>://<username>:<password>@<host>[:port]'.")
+
+ def test_cli_add_gateway_empty_url(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('iscsi-gateway-add', inbuf='')
+
+ self.assertEqual(ctx.exception.retcode, -errno.EINVAL)
+ self.assertEqual(str(ctx.exception), ERROR_MSG_NO_INPUT_FILE)
+
+ def test_cli_add_gateway(self):
+ self.exec_cmd('iscsi-gateway-add', inbuf='https://admin:admin@10.17.5.1:5001')
+ self.exec_cmd('iscsi-gateway-add', inbuf='https://admin:admin@10.17.5.2:5001')
+ iscsi_config = json.loads(self.get_key("_iscsi_config"))
+ self.assertEqual(iscsi_config['gateways'], {
+ 'node1': {
+ 'service_url': 'https://admin:admin@10.17.5.1:5001'
+ },
+ 'node2': {
+ 'service_url': 'https://admin:admin@10.17.5.2:5001'
+ }
+ })
+
+ def test_cli_remove_gateway(self):
+ self.test_cli_add_gateway()
+ self.exec_cmd('iscsi-gateway-rm', name='node1')
+ iscsi_config = json.loads(self.get_key("_iscsi_config"))
+ self.assertEqual(iscsi_config['gateways'], {
+ 'node2': {
+ 'service_url': 'https://admin:admin@10.17.5.2:5001'
+ }
+ })
+
+
+class IscsiTestController(ControllerTestCase, KVStoreMockMixin):
+
+ @classmethod
+ def setup_server(cls):
+ OrchClient().available = lambda: False
+ mgr.rados.side_effect = None
+ # pylint: disable=protected-access
+ Iscsi._cp_config['tools.authenticate.on'] = False
+ IscsiTarget._cp_config['tools.authenticate.on'] = False
+ cls.setup_controllers([Iscsi, IscsiTarget])
+
+ def setUp(self):
+ self.mock_kv_store()
+ self.CONFIG_KEY_DICT['_iscsi_config'] = '''
+ {
+ "gateways": {
+ "node1": {
+ "service_url": "https://admin:admin@10.17.5.1:5001"
+ },
+ "node2": {
+ "service_url": "https://admin:admin@10.17.5.2:5001"
+ }
+ }
+ }
+ '''
+ # pylint: disable=protected-access
+ IscsiClientMock._instance = IscsiClientMock()
+ IscsiClient.instance = IscsiClientMock.instance
+
+ def test_enable_discoveryauth(self):
+ discoveryauth = {
+ 'user': 'myiscsiusername',
+ 'password': 'myiscsipassword',
+ 'mutual_user': 'myiscsiusername2',
+ 'mutual_password': 'myiscsipassword2'
+ }
+ self._put('/api/iscsi/discoveryauth', discoveryauth)
+ self.assertStatus(200)
+ self.assertJsonBody(discoveryauth)
+ self._get('/api/iscsi/discoveryauth')
+ self.assertStatus(200)
+ self.assertJsonBody(discoveryauth)
+
+ def test_bad_discoveryauth(self):
+ discoveryauth = {
+ 'user': 'myiscsiusername',
+ 'password': 'myiscsipasswordmyiscsipasswordmyiscsipassword',
+ 'mutual_user': '',
+ 'mutual_password': ''
+ }
+ put_response = {
+ 'detail': 'Bad authentication',
+ 'code': 'target_bad_auth',
+ 'component': 'iscsi'
+ }
+ get_response = {
+ 'user': '',
+ 'password': '',
+ 'mutual_user': '',
+ 'mutual_password': ''
+ }
+ self._put('/api/iscsi/discoveryauth', discoveryauth)
+ self.assertStatus(400)
+ self.assertJsonBody(put_response)
+ self._get('/api/iscsi/discoveryauth')
+ self.assertStatus(200)
+ self.assertJsonBody(get_response)
+
+ def test_disable_discoveryauth(self):
+ discoveryauth = {
+ 'user': '',
+ 'password': '',
+ 'mutual_user': '',
+ 'mutual_password': ''
+ }
+ self._put('/api/iscsi/discoveryauth', discoveryauth)
+ self.assertStatus(200)
+ self.assertJsonBody(discoveryauth)
+ self._get('/api/iscsi/discoveryauth')
+ self.assertStatus(200)
+ self.assertJsonBody(discoveryauth)
+
+ def test_list_empty(self):
+ self._get('/api/iscsi/target')
+ self.assertStatus(200)
+ self.assertJsonBody([])
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_list(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw1"
+ request = copy.deepcopy(iscsi_target_request)
+ request['target_iqn'] = target_iqn
+ self._task_post('/api/iscsi/target', request)
+ self.assertStatus(201)
+ self._get('/api/iscsi/target')
+ self.assertStatus(200)
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ self.assertJsonBody([response])
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_create(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw2"
+ request = copy.deepcopy(iscsi_target_request)
+ request['target_iqn'] = target_iqn
+ self._task_post('/api/iscsi/target', request)
+ self.assertStatus(201)
+ self._get('/api/iscsi/target/{}'.format(request['target_iqn']))
+ self.assertStatus(200)
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ self.assertJsonBody(response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_delete(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw3"
+ request = copy.deepcopy(iscsi_target_request)
+ request['target_iqn'] = target_iqn
+ self._task_post('/api/iscsi/target', request)
+ self.assertStatus(201)
+ self._task_delete('/api/iscsi/target/{}'.format(request['target_iqn']))
+ self.assertStatus(204)
+ self._get('/api/iscsi/target')
+ self.assertStatus(200)
+ self.assertJsonBody([])
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_add_client(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw4"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'].append(
+ {
+ "luns": [{"image": "lun1", "pool": "rbd"}],
+ "client_iqn": "iqn.1994-05.com.redhat:rh7-client3",
+ "auth": {
+ "password": "myiscsipassword5",
+ "user": "myiscsiusername5",
+ "mutual_password": "myiscsipassword6",
+ "mutual_user": "myiscsiusername6"}
+ })
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['clients'].append(
+ {
+ "luns": [{"image": "lun1", "pool": "rbd"}],
+ "client_iqn": "iqn.1994-05.com.redhat:rh7-client3",
+ "auth": {
+ "password": "myiscsipassword5",
+ "user": "myiscsiusername5",
+ "mutual_password": "myiscsipassword6",
+ "mutual_user": "myiscsiusername6"},
+ "info": {
+ "alias": "",
+ "ip_address": [],
+ "state": {}
+ }
+ })
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_add_bad_client(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw4"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'].append(
+ {
+ "luns": [{"image": "lun1", "pool": "rbd"}],
+ "client_iqn": "iqn.1994-05.com.redhat:rh7-client4",
+ "auth": {
+ "password": "myiscsipassword7myiscsipassword7myiscsipasswo",
+ "user": "myiscsiusername7",
+ "mutual_password": "myiscsipassword8",
+ "mutual_user": "myiscsiusername8"}
+ })
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+
+ self._task_post('/api/iscsi/target', create_request)
+ self.assertStatus(201)
+ self._task_put('/api/iscsi/target/{}'.format(create_request['target_iqn']), update_request)
+ self.assertStatus(400)
+ self._get('/api/iscsi/target/{}'.format(update_request['new_target_iqn']))
+ self.assertStatus(200)
+ self.assertJsonBody(response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_change_client_password(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw5"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'][0]['auth']['password'] = 'MyNewPassword'
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['clients'][0]['auth']['password'] = 'MyNewPassword'
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_rename_client(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw6"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'][0]['client_iqn'] = 'iqn.1994-05.com.redhat:rh7-client0'
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['clients'][0]['client_iqn'] = 'iqn.1994-05.com.redhat:rh7-client0'
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_add_disk(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw7"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['disks'].append(
+ {
+ "image": "lun3",
+ "pool": "rbd",
+ "controls": {},
+ "backstore": "user:rbd"
+ })
+ update_request['clients'][0]['luns'].append({"image": "lun3", "pool": "rbd"})
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['disks'].append(
+ {
+ "image": "lun3",
+ "pool": "rbd",
+ "controls": {},
+ "backstore": "user:rbd",
+ "wwn": "64af6678-9694-4367-bacc-f8eb0baa2",
+ "lun": 2
+
+ })
+ response['clients'][0]['luns'].append({"image": "lun3", "pool": "rbd"})
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_change_disk_image(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw8"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['disks'][0]['image'] = 'lun0'
+ update_request['clients'][0]['luns'][0]['image'] = 'lun0'
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['disks'][0]['image'] = 'lun0'
+ response['clients'][0]['luns'][0]['image'] = 'lun0'
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_change_disk_controls(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw9"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['disks'][0]['controls'] = {"qfull_timeout": 15}
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['disks'][0]['controls'] = {"qfull_timeout": 15}
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_rename_target(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw10"
+ new_target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw11"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = new_target_iqn
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = new_target_iqn
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_rename_group(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw12"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['groups'][0]['group_id'] = 'mygroup0'
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['groups'][0]['group_id'] = 'mygroup0'
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_add_client_to_group(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw13"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'].append(
+ {
+ "luns": [],
+ "client_iqn": "iqn.1994-05.com.redhat:rh7-client3",
+ "auth": {
+ "password": None,
+ "user": None,
+ "mutual_password": None,
+ "mutual_user": None}
+ })
+ update_request['groups'][0]['members'].append('iqn.1994-05.com.redhat:rh7-client3')
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['clients'].append(
+ {
+ "luns": [],
+ "client_iqn": "iqn.1994-05.com.redhat:rh7-client3",
+ "auth": {
+ "password": None,
+ "user": None,
+ "mutual_password": None,
+ "mutual_user": None},
+ "info": {
+ "alias": "",
+ "ip_address": [],
+ "state": {}
+ }
+ })
+ response['groups'][0]['members'].append('iqn.1994-05.com.redhat:rh7-client3')
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_remove_client_from_group(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw14"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['groups'][0]['members'].remove('iqn.1994-05.com.redhat:rh7-client2')
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['groups'][0]['members'].remove('iqn.1994-05.com.redhat:rh7-client2')
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_remove_groups(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw15"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['groups'] = []
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['groups'] = []
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_add_client_to_multiple_groups(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw16"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ create_request['groups'].append(copy.deepcopy(create_request['groups'][0]))
+ create_request['groups'][1]['group_id'] = 'mygroup2'
+ self._task_post('/api/iscsi/target', create_request)
+ self.assertStatus(400)
+ self.assertJsonBody({
+ 'detail': 'Each initiator can only be part of 1 group at a time',
+ 'code': 'initiator_in_multiple_groups',
+ 'component': 'iscsi'
+ })
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_remove_client_lun(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw17"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ create_request['clients'][0]['luns'] = [
+ {"image": "lun1", "pool": "rbd"},
+ {"image": "lun2", "pool": "rbd"},
+ {"image": "lun3", "pool": "rbd"}
+ ]
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'][0]['luns'] = [
+ {"image": "lun1", "pool": "rbd"},
+ {"image": "lun3", "pool": "rbd"}
+ ]
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['clients'][0]['luns'] = [
+ {"image": "lun1", "pool": "rbd"},
+ {"image": "lun3", "pool": "rbd"}
+ ]
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_change_client_auth(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw18"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'][0]['auth']['password'] = 'myiscsipasswordX'
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['clients'][0]['auth']['password'] = 'myiscsipasswordX'
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_remove_client_logged_in(self, _validate_image_mock):
+ client_info = {
+ 'alias': '',
+ 'ip_address': [],
+ 'state': {'LOGGED_IN': ['node1']}
+ }
+ # pylint: disable=protected-access
+ IscsiClientMock._instance.clientinfo = client_info
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw19"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'].pop(0)
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ for client in response['clients']:
+ client['info'] = client_info
+ update_response = {
+ 'detail': "Client 'iqn.1994-05.com.redhat:rh7-client' cannot be deleted until it's "
+ "logged out",
+ 'code': 'client_logged_in',
+ 'component': 'iscsi'
+ }
+ self._update_iscsi_target(create_request, update_request, 400, update_response, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_remove_client(self, _validate_image_mock):
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw20"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'].pop(0)
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['clients'].pop(0)
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_add_image_to_group_with_client_logged_in(self, _validate_image_mock):
+ client_info = {
+ 'alias': '',
+ 'ip_address': [],
+ 'state': {'LOGGED_IN': ['node1']}
+ }
+ new_disk = {"pool": "rbd", "image": "lun1"}
+ # pylint: disable=protected-access
+ IscsiClientMock._instance.clientinfo = client_info
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw21"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['groups'][0]['disks'].append(new_disk)
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['groups'][0]['disks'].insert(0, new_disk)
+ for client in response['clients']:
+ client['info'] = client_info
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_add_image_to_initiator_with_client_logged_in(self, _validate_image_mock):
+ client_info = {
+ 'alias': '',
+ 'ip_address': [],
+ 'state': {'LOGGED_IN': ['node1']}
+ }
+ new_disk = {"pool": "rbd", "image": "lun2"}
+ # pylint: disable=protected-access
+ IscsiClientMock._instance.clientinfo = client_info
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw22"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['clients'][0]['luns'].append(new_disk)
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['clients'][0]['luns'].append(new_disk)
+ for client in response['clients']:
+ client['info'] = client_info
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ @mock.patch('dashboard.controllers.iscsi.IscsiTarget._validate_image')
+ def test_remove_image_from_group_with_client_logged_in(self, _validate_image_mock):
+ client_info = {
+ 'alias': '',
+ 'ip_address': [],
+ 'state': {'LOGGED_IN': ['node1']}
+ }
+ # pylint: disable=protected-access
+ IscsiClientMock._instance.clientinfo = client_info
+ target_iqn = "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw23"
+ create_request = copy.deepcopy(iscsi_target_request)
+ create_request['target_iqn'] = target_iqn
+ update_request = copy.deepcopy(create_request)
+ update_request['new_target_iqn'] = target_iqn
+ update_request['groups'][0]['disks'] = []
+ response = copy.deepcopy(iscsi_target_response)
+ response['target_iqn'] = target_iqn
+ response['groups'][0]['disks'] = []
+ for client in response['clients']:
+ client['info'] = client_info
+ self._update_iscsi_target(create_request, update_request, 200, None, response)
+
+ def _update_iscsi_target(self, create_request, update_request, update_response_code,
+ update_response, response):
+ self._task_post('/api/iscsi/target', create_request)
+ self.assertStatus(201)
+ self._task_put('/api/iscsi/target/{}'.format(create_request['target_iqn']), update_request)
+ self.assertStatus(update_response_code)
+ self.assertJsonBody(update_response)
+ self._get('/api/iscsi/target/{}'.format(update_request['new_target_iqn']))
+ self.assertStatus(200)
+ self.assertJsonBody(response)
+
+
+iscsi_target_request = {
+ "target_iqn": "iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw",
+ "portals": [
+ {"ip": "192.168.100.202", "host": "node2"},
+ {"ip": "10.0.2.15", "host": "node2"},
+ {"ip": "192.168.100.203", "host": "node3"}
+ ],
+ "disks": [
+ {"image": "lun1", "pool": "rbd", "backstore": "user:rbd",
+ "controls": {"max_data_area_mb": 128}},
+ {"image": "lun2", "pool": "rbd", "backstore": "user:rbd",
+ "controls": {"max_data_area_mb": 128}}
+ ],
+ "clients": [
+ {
+ "luns": [{"image": "lun1", "pool": "rbd"}],
+ "client_iqn": "iqn.1994-05.com.redhat:rh7-client",
+ "auth": {
+ "password": "myiscsipassword1",
+ "user": "myiscsiusername1",
+ "mutual_password": "myiscsipassword2",
+ "mutual_user": "myiscsiusername2"}
+ },
+ {
+ "luns": [],
+ "client_iqn": "iqn.1994-05.com.redhat:rh7-client2",
+ "auth": {
+ "password": "myiscsipassword3",
+ "user": "myiscsiusername3",
+ "mutual_password": "myiscsipassword4",
+ "mutual_user": "myiscsiusername4"
+ }
+ }
+ ],
+ "acl_enabled": True,
+ "auth": {
+ "password": "",
+ "user": "",
+ "mutual_password": "",
+ "mutual_user": ""},
+ "target_controls": {},
+ "groups": [
+ {
+ "group_id": "mygroup",
+ "disks": [{"pool": "rbd", "image": "lun2"}],
+ "members": ["iqn.1994-05.com.redhat:rh7-client2"]
+ }
+ ]
+}
+
+iscsi_target_response = {
+ 'target_iqn': 'iqn.2003-01.com.redhat.iscsi-gw:iscsi-igw',
+ 'portals': [
+ {'host': 'node2', 'ip': '10.0.2.15'},
+ {'host': 'node2', 'ip': '192.168.100.202'},
+ {'host': 'node3', 'ip': '192.168.100.203'}
+ ],
+ 'disks': [
+ {'pool': 'rbd', 'image': 'lun1', 'backstore': 'user:rbd',
+ 'wwn': '64af6678-9694-4367-bacc-f8eb0baa0', 'lun': 0,
+ 'controls': {'max_data_area_mb': 128}},
+ {'pool': 'rbd', 'image': 'lun2', 'backstore': 'user:rbd',
+ 'wwn': '64af6678-9694-4367-bacc-f8eb0baa1', 'lun': 1,
+ 'controls': {'max_data_area_mb': 128}}
+ ],
+ 'clients': [
+ {
+ 'client_iqn': 'iqn.1994-05.com.redhat:rh7-client',
+ 'luns': [{'pool': 'rbd', 'image': 'lun1'}],
+ 'auth': {
+ 'user': 'myiscsiusername1',
+ 'password': 'myiscsipassword1',
+ 'mutual_password': 'myiscsipassword2',
+ 'mutual_user': 'myiscsiusername2'
+ },
+ 'info': {
+ 'alias': '',
+ 'ip_address': [],
+ 'state': {}
+ }
+ },
+ {
+ 'client_iqn': 'iqn.1994-05.com.redhat:rh7-client2',
+ 'luns': [],
+ 'auth': {
+ 'user': 'myiscsiusername3',
+ 'password': 'myiscsipassword3',
+ 'mutual_password': 'myiscsipassword4',
+ 'mutual_user': 'myiscsiusername4'
+ },
+ 'info': {
+ 'alias': '',
+ 'ip_address': [],
+ 'state': {}
+ }
+ }
+ ],
+ "acl_enabled": True,
+ "auth": {
+ "password": "",
+ "user": "",
+ "mutual_password": "",
+ "mutual_user": ""},
+ 'groups': [
+ {
+ 'group_id': 'mygroup',
+ 'disks': [{'pool': 'rbd', 'image': 'lun2'}],
+ 'members': ['iqn.1994-05.com.redhat:rh7-client2']
+ }
+ ],
+ 'target_controls': {},
+ 'info': {
+ 'num_sessions': 0
+ }
+}
+
+
+class IscsiClientMock(object):
+
+ _instance = None
+
+ def __init__(self):
+ self.gateway_name = None
+ self.service_url = None
+ self.config = {
+ "created": "2019/01/17 08:57:16",
+ "discovery_auth": {
+ "username": "",
+ "password": "",
+ "password_encryption_enabled": False,
+ "mutual_username": "",
+ "mutual_password": "",
+ "mutual_password_encryption_enabled": False
+ },
+ "disks": {},
+ "epoch": 0,
+ "gateways": {},
+ "targets": {},
+ "updated": "",
+ "version": 11
+ }
+ self.clientinfo = {
+ 'alias': '',
+ 'ip_address': [],
+ 'state': {}
+ }
+
+ @classmethod
+ def instance(cls, gateway_name=None, service_url=None):
+ cls._instance.gateway_name = gateway_name
+ cls._instance.service_url = service_url
+ # pylint: disable=unused-argument
+ return cls._instance
+
+ def ping(self):
+ return {
+ "message": "pong"
+ }
+
+ def get_settings(self):
+ return {
+ "api_version": 2,
+ "backstores": [
+ "user:rbd"
+ ],
+ "config": {
+ "minimum_gateways": 2
+ },
+ "default_backstore": "user:rbd",
+ "required_rbd_features": {
+ "rbd": 0,
+ "user:rbd": 4,
+ },
+ "unsupported_rbd_features": {
+ "rbd": 88,
+ "user:rbd": 0,
+ },
+ "disk_default_controls": {
+ "user:rbd": {
+ "hw_max_sectors": 1024,
+ "max_data_area_mb": 8,
+ "osd_op_timeout": 30,
+ "qfull_timeout": 5
+ }
+ },
+ "target_default_controls": {
+ "cmdsn_depth": 128,
+ "dataout_timeout": 20,
+ "first_burst_length": 262144,
+ "immediate_data": "Yes",
+ "initial_r2t": "Yes",
+ "max_burst_length": 524288,
+ "max_outstanding_r2t": 1,
+ "max_recv_data_segment_length": 262144,
+ "max_xmit_data_segment_length": 262144,
+ "nopin_response_timeout": 5,
+ "nopin_timeout": 5
+ }
+ }
+
+ def get_config(self):
+ return copy.deepcopy(self.config)
+
+ def create_target(self, target_iqn, target_controls):
+ self.config['targets'][target_iqn] = {
+ "clients": {},
+ "acl_enabled": True,
+ "auth": {
+ "username": "",
+ "password": "",
+ "password_encryption_enabled": False,
+ "mutual_username": "",
+ "mutual_password": "",
+ "mutual_password_encryption_enabled": False
+ },
+ "controls": target_controls,
+ "created": "2019/01/17 09:22:34",
+ "disks": {},
+ "groups": {},
+ "portals": {}
+ }
+
+ def create_gateway(self, target_iqn, gateway_name, ip_addresses):
+ target_config = self.config['targets'][target_iqn]
+ if 'ip_list' not in target_config:
+ target_config['ip_list'] = []
+ target_config['ip_list'] += ip_addresses
+ target_config['portals'][gateway_name] = {
+ "portal_ip_addresses": ip_addresses
+ }
+
+ def delete_gateway(self, target_iqn, gateway_name):
+ target_config = self.config['targets'][target_iqn]
+ portal_config = target_config['portals'][gateway_name]
+ for ip in portal_config['portal_ip_addresses']:
+ target_config['ip_list'].remove(ip)
+ target_config['portals'].pop(gateway_name)
+
+ def create_disk(self, pool, image, backstore, wwn):
+ if wwn is None:
+ wwn = '64af6678-9694-4367-bacc-f8eb0baa' + str(len(self.config['disks']))
+ image_id = '{}/{}'.format(pool, image)
+ self.config['disks'][image_id] = {
+ "pool": pool,
+ "image": image,
+ "backstore": backstore,
+ "controls": {},
+ "wwn": wwn
+ }
+
+ def create_target_lun(self, target_iqn, image_id, lun):
+ target_config = self.config['targets'][target_iqn]
+ if lun is None:
+ lun = len(target_config['disks'])
+ target_config['disks'][image_id] = {
+ "lun_id": lun
+ }
+ self.config['disks'][image_id]['owner'] = list(target_config['portals'].keys())[0]
+
+ def reconfigure_disk(self, pool, image, controls):
+ image_id = '{}/{}'.format(pool, image)
+ settings = self.get_settings()
+ backstore = self.config['disks'][image_id]['backstore']
+ disk_default_controls = settings['disk_default_controls'][backstore]
+ new_controls = {}
+ for control_k, control_v in controls.items():
+ if control_v != disk_default_controls[control_k]:
+ new_controls[control_k] = control_v
+ self.config['disks'][image_id]['controls'] = new_controls
+
+ def create_client(self, target_iqn, client_iqn):
+ target_config = self.config['targets'][target_iqn]
+ target_config['clients'][client_iqn] = {
+ "auth": {
+ "username": "",
+ "password": "",
+ "password_encryption_enabled": False,
+ "mutual_username": "",
+ "mutual_password": "",
+ "mutual_password_encryption_enabled": False
+ },
+ "group_name": "",
+ "luns": {}
+ }
+
+ def create_client_lun(self, target_iqn, client_iqn, image_id):
+ target_config = self.config['targets'][target_iqn]
+ target_config['clients'][client_iqn]['luns'][image_id] = {}
+
+ def delete_client_lun(self, target_iqn, client_iqn, image_id):
+ target_config = self.config['targets'][target_iqn]
+ del target_config['clients'][client_iqn]['luns'][image_id]
+
+ def create_client_auth(self, target_iqn, client_iqn, user, password, m_user, m_password):
+ target_config = self.config['targets'][target_iqn]
+ target_config['clients'][client_iqn]['auth']['username'] = user
+ target_config['clients'][client_iqn]['auth']['password'] = password
+ target_config['clients'][client_iqn]['auth']['mutual_username'] = m_user
+ target_config['clients'][client_iqn]['auth']['mutual_password'] = m_password
+
+ def create_group(self, target_iqn, group_name, members, image_ids):
+ target_config = self.config['targets'][target_iqn]
+ target_config['groups'][group_name] = {
+ "disks": {},
+ "members": []
+ }
+ for image_id in image_ids:
+ target_config['groups'][group_name]['disks'][image_id] = {}
+ target_config['groups'][group_name]['members'] = members
+
+ def update_group(self, target_iqn, group_name, members, image_ids):
+ target_config = self.config['targets'][target_iqn]
+ group = target_config['groups'][group_name]
+ old_members = group['members']
+ disks = group['disks']
+ target_config['groups'][group_name] = {
+ "disks": {},
+ "members": []
+ }
+
+ for image_id in disks.keys():
+ if image_id not in image_ids:
+ target_config['groups'][group_name]['disks'][image_id] = {}
+
+ new_members = []
+ for member_iqn in old_members:
+ if member_iqn not in members:
+ new_members.append(member_iqn)
+ target_config['groups'][group_name]['members'] = new_members
+
+ def delete_group(self, target_iqn, group_name):
+ target_config = self.config['targets'][target_iqn]
+ del target_config['groups'][group_name]
+
+ def delete_client(self, target_iqn, client_iqn):
+ target_config = self.config['targets'][target_iqn]
+ del target_config['clients'][client_iqn]
+
+ def delete_target_lun(self, target_iqn, image_id):
+ target_config = self.config['targets'][target_iqn]
+ target_config['disks'].pop(image_id)
+ del self.config['disks'][image_id]['owner']
+
+ def delete_disk(self, pool, image):
+ image_id = '{}/{}'.format(pool, image)
+ del self.config['disks'][image_id]
+
+ def delete_target(self, target_iqn):
+ del self.config['targets'][target_iqn]
+
+ def get_ip_addresses(self):
+ ips = {
+ 'node1': ['192.168.100.201'],
+ 'node2': ['192.168.100.202', '10.0.2.15'],
+ 'node3': ['192.168.100.203']
+ }
+ return {'data': ips[self.gateway_name]}
+
+ def get_hostname(self):
+ hostnames = {
+ 'https://admin:admin@10.17.5.1:5001': 'node1',
+ 'https://admin:admin@10.17.5.2:5001': 'node2',
+ 'https://admin:admin@10.17.5.3:5001': 'node3'
+ }
+ if self.service_url not in hostnames:
+ raise RequestException('No route to host')
+ return {'data': hostnames[self.service_url]}
+
+ def update_discoveryauth(self, user, password, mutual_user, mutual_password):
+ self.config['discovery_auth']['username'] = user
+ self.config['discovery_auth']['password'] = password
+ self.config['discovery_auth']['mutual_username'] = mutual_user
+ self.config['discovery_auth']['mutual_password'] = mutual_password
+
+ def update_targetacl(self, target_iqn, action):
+ self.config['targets'][target_iqn]['acl_enabled'] = (action == 'enable_acl')
+
+ def update_targetauth(self, target_iqn, user, password, mutual_user, mutual_password):
+ target_config = self.config['targets'][target_iqn]
+ target_config['auth']['username'] = user
+ target_config['auth']['password'] = password
+ target_config['auth']['mutual_username'] = mutual_user
+ target_config['auth']['mutual_password'] = mutual_password
+
+ def get_targetinfo(self, target_iqn):
+ # pylint: disable=unused-argument
+ return {
+ 'num_sessions': 0
+ }
+
+ def get_clientinfo(self, target_iqn, client_iqn):
+ # pylint: disable=unused-argument
+ return self.clientinfo
diff --git a/src/pybind/mgr/dashboard/tests/test_notification.py b/src/pybind/mgr/dashboard/tests/test_notification.py
new file mode 100644
index 00000000..185ddc12
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_notification.py
@@ -0,0 +1,138 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import random
+import time
+import unittest
+
+
+from ..tools import NotificationQueue
+
+
+class Listener(object):
+ # pylint: disable=too-many-instance-attributes
+ def __init__(self):
+ self.type1 = []
+ self.type1_ts = []
+ self.type2 = []
+ self.type2_ts = []
+ self.type1_3 = []
+ self.type1_3_ts = []
+ self.all = []
+ self.all_ts = []
+
+ def register(self):
+ NotificationQueue.register(self.log_type1, 'type1', priority=90)
+ NotificationQueue.register(self.log_type2, 'type2')
+ NotificationQueue.register(self.log_type1_3, ['type1', 'type3'])
+ NotificationQueue.register(self.log_all, priority=50)
+
+ # these should be ignored by the queue
+ NotificationQueue.register(self.log_type1, 'type1')
+ NotificationQueue.register(self.log_type1_3, ['type1', 'type3'])
+ NotificationQueue.register(self.log_all)
+
+ def log_type1(self, val):
+ self.type1_ts.append(time.time())
+ self.type1.append(val)
+
+ def log_type2(self, val):
+ self.type2_ts.append(time.time())
+ self.type2.append(val)
+
+ def log_type1_3(self, val):
+ self.type1_3_ts.append(time.time())
+ self.type1_3.append(val)
+
+ def log_all(self, val):
+ self.all_ts.append(time.time())
+ self.all.append(val)
+
+ def clear(self):
+ self.type1 = []
+ self.type1_ts = []
+ self.type2 = []
+ self.type2_ts = []
+ self.type1_3 = []
+ self.type1_3_ts = []
+ self.all = []
+ self.all_ts = []
+ NotificationQueue.deregister(self.log_type1, 'type1')
+ NotificationQueue.deregister(self.log_type2, 'type2')
+ NotificationQueue.deregister(self.log_type1_3, ['type1', 'type3'])
+ NotificationQueue.deregister(self.log_all)
+
+
+class NotificationQueueTest(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls.listener = Listener()
+
+ def setUp(self):
+ self.listener.register()
+
+ def tearDown(self):
+ self.listener.clear()
+
+ def test_invalid_register(self):
+ with self.assertRaises(Exception) as ctx:
+ NotificationQueue.register(None, 1)
+ self.assertEqual(str(ctx.exception),
+ "n_types param is neither a string nor a list")
+
+ def test_notifications(self):
+ NotificationQueue.start_queue()
+ NotificationQueue.new_notification('type1', 1)
+ NotificationQueue.new_notification('type2', 2)
+ NotificationQueue.new_notification('type3', 3)
+ NotificationQueue.stop()
+ self.assertEqual(self.listener.type1, [1])
+ self.assertEqual(self.listener.type2, [2])
+ self.assertEqual(self.listener.type1_3, [1, 3])
+ self.assertEqual(self.listener.all, [1, 2, 3])
+
+ # validate priorities
+ self.assertLessEqual(self.listener.type1_3_ts[0], self.listener.all_ts[0])
+ self.assertLessEqual(self.listener.all_ts[0], self.listener.type1_ts[0])
+ self.assertLessEqual(self.listener.type2_ts[0], self.listener.all_ts[1])
+ self.assertLessEqual(self.listener.type1_3_ts[1], self.listener.all_ts[2])
+
+ def test_notifications2(self):
+ NotificationQueue.start_queue()
+ for i in range(0, 600):
+ typ = "type{}".format(i % 3 + 1)
+ if random.random() < 0.5:
+ time.sleep(0.002)
+ NotificationQueue.new_notification(typ, i)
+ NotificationQueue.stop()
+ for i in range(0, 600):
+ typ = i % 3 + 1
+ if typ == 1:
+ self.assertIn(i, self.listener.type1)
+ self.assertIn(i, self.listener.type1_3)
+ elif typ == 2:
+ self.assertIn(i, self.listener.type2)
+ elif typ == 3:
+ self.assertIn(i, self.listener.type1_3)
+ self.assertIn(i, self.listener.all)
+
+ self.assertEqual(len(self.listener.type1), 200)
+ self.assertEqual(len(self.listener.type2), 200)
+ self.assertEqual(len(self.listener.type1_3), 400)
+ self.assertEqual(len(self.listener.all), 600)
+
+ def test_deregister(self):
+ NotificationQueue.start_queue()
+ NotificationQueue.new_notification('type1', 1)
+ NotificationQueue.new_notification('type3', 3)
+ NotificationQueue.stop()
+ self.assertEqual(self.listener.type1, [1])
+ self.assertEqual(self.listener.type1_3, [1, 3])
+
+ NotificationQueue.start_queue()
+ NotificationQueue.deregister(self.listener.log_type1_3, ['type1'])
+ NotificationQueue.new_notification('type1', 4)
+ NotificationQueue.new_notification('type3', 5)
+ NotificationQueue.stop()
+ self.assertEqual(self.listener.type1, [1, 4])
+ self.assertEqual(self.listener.type1_3, [1, 3, 5])
diff --git a/src/pybind/mgr/dashboard/tests/test_osd.py b/src/pybind/mgr/dashboard/tests/test_osd.py
new file mode 100644
index 00000000..0f24d25e
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_osd.py
@@ -0,0 +1,240 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import uuid
+from contextlib import contextmanager
+
+from mock import patch
+
+from . import ControllerTestCase
+from ..controllers.osd import Osd
+from .. import mgr
+from .helper import update_dict
+
+try:
+ from typing import List, Dict, Any # pylint: disable=unused-import
+except ImportError:
+ pass # Only requried for type hints
+
+
+class OsdHelper(object):
+ DEFAULT_OSD_IDS = [0, 1, 2]
+
+ @staticmethod
+ def _gen_osdmap_tree_node(node_id, node_type, children=None, update_data=None):
+ # type: (int, str, List[int], Dict[str, Any]) -> Dict[str, Any]
+ assert node_type in ['root', 'host', 'osd']
+ if node_type in ['root', 'host']:
+ assert children is not None
+
+ node_types = {
+ 'root': {
+ 'id': node_id,
+ 'name': 'default',
+ 'type': 'root',
+ 'type_id': 10,
+ 'children': children,
+ },
+ 'host': {
+ 'id': node_id,
+ 'name': 'ceph-1',
+ 'type': 'host',
+ 'type_id': 1,
+ 'pool_weights': {},
+ 'children': children,
+ },
+ 'osd': {
+ 'id': node_id,
+ 'device_class': 'hdd',
+ 'type': 'osd',
+ 'type_id': 0,
+ 'crush_weight': 0.009796142578125,
+ 'depth': 2,
+ 'pool_weights': {},
+ 'exists': 1,
+ 'status': 'up',
+ 'reweight': 1.0,
+ 'primary_affinity': 1.0,
+ 'name': 'osd.{}'.format(node_id),
+ }
+ }
+ node = node_types[node_type]
+
+ return update_dict(node, update_data) if update_data else node
+
+ @staticmethod
+ def _gen_osd_stats(osd_id, update_data=None):
+ # type: (int, Dict[str, Any]) -> Dict[str, Any]
+ stats = {
+ 'osd': osd_id,
+ 'up_from': 11,
+ 'seq': 47244640581,
+ 'num_pgs': 50,
+ 'kb': 10551288,
+ 'kb_used': 1119736,
+ 'kb_used_data': 5504,
+ 'kb_used_omap': 0,
+ 'kb_used_meta': 1048576,
+ 'kb_avail': 9431552,
+ 'statfs': {
+ 'total': 10804518912,
+ 'available': 9657909248,
+ 'internally_reserved': 1073741824,
+ 'allocated': 5636096,
+ 'data_stored': 102508,
+ 'data_compressed': 0,
+ 'data_compressed_allocated': 0,
+ 'data_compressed_original': 0,
+ 'omap_allocated': 0,
+ 'internal_metadata': 1073741824
+ },
+ 'hb_peers': [0, 1],
+ 'snap_trim_queue_len': 0,
+ 'num_snap_trimming': 0,
+ 'op_queue_age_hist': {
+ 'histogram': [],
+ 'upper_bound': 1
+ },
+ 'perf_stat': {
+ 'commit_latency_ms': 0.0,
+ 'apply_latency_ms': 0.0,
+ 'commit_latency_ns': 0,
+ 'apply_latency_ns': 0
+ },
+ 'alerts': [],
+ }
+ return stats if not update_data else update_dict(stats, update_data)
+
+ @staticmethod
+ def _gen_osd_map_osd(osd_id):
+ # type: (int) -> Dict[str, Any]
+ return {
+ 'osd': osd_id,
+ 'up': 1,
+ 'in': 1,
+ 'weight': 1.0,
+ 'primary_affinity': 1.0,
+ 'last_clean_begin': 0,
+ 'last_clean_end': 0,
+ 'up_from': 5,
+ 'up_thru': 21,
+ 'down_at': 0,
+ 'lost_at': 0,
+ 'public_addrs': {
+ 'addrvec': [{
+ 'type': 'v2',
+ 'nonce': 1302,
+ 'addr': '172.23.0.2:6802'
+ }, {
+ 'type': 'v1',
+ 'nonce': 1302,
+ 'addr': '172.23.0.2:6803'
+ }]
+ },
+ 'cluster_addrs': {
+ 'addrvec': [{
+ 'type': 'v2',
+ 'nonce': 1302,
+ 'addr': '172.23.0.2:6804'
+ }, {
+ 'type': 'v1',
+ 'nonce': 1302,
+ 'addr': '172.23.0.2:6805'
+ }]
+ },
+ 'heartbeat_back_addrs': {
+ 'addrvec': [{
+ 'type': 'v2',
+ 'nonce': 1302,
+ 'addr': '172.23.0.2:6808'
+ }, {
+ 'type': 'v1',
+ 'nonce': 1302,
+ 'addr': '172.23.0.2:6809'
+ }]
+ },
+ 'heartbeat_front_addrs': {
+ 'addrvec': [{
+ 'type': 'v2',
+ 'nonce': 1302,
+ 'addr': '172.23.0.2:6806'
+ }, {
+ 'type': 'v1',
+ 'nonce': 1302,
+ 'addr': '172.23.0.2:6807'
+ }]
+ },
+ 'state': ['exists', 'up'],
+ 'uuid': str(uuid.uuid4()),
+ 'public_addr': '172.23.0.2:6803/1302',
+ 'cluster_addr': '172.23.0.2:6805/1302',
+ 'heartbeat_back_addr': '172.23.0.2:6809/1302',
+ 'heartbeat_front_addr': '172.23.0.2:6807/1302',
+ 'id': osd_id,
+ }
+
+ @classmethod
+ def gen_osdmap(cls, ids=None):
+ # type: (List[int]) -> Dict[str, Any]
+ return {str(i): cls._gen_osd_map_osd(i) for i in ids or cls.DEFAULT_OSD_IDS}
+
+ @classmethod
+ def gen_osd_stats(cls, ids=None):
+ # type: (List[int]) -> List[Dict[str, Any]]
+ return [cls._gen_osd_stats(i) for i in ids or cls.DEFAULT_OSD_IDS]
+
+ @classmethod
+ def gen_osdmap_tree_nodes(cls, ids=None):
+ # type: (List[int]) -> List[Dict[str, Any]]
+ return [
+ cls._gen_osdmap_tree_node(-1, 'root', [-3]),
+ cls._gen_osdmap_tree_node(-3, 'host', ids or cls.DEFAULT_OSD_IDS),
+ ] + [cls._gen_osdmap_tree_node(node_id, 'osd') for node_id in ids or cls.DEFAULT_OSD_IDS]
+
+ @classmethod
+ def gen_mgr_get_counter(cls):
+ # type: () -> List[List[int]]
+ return [[1551973855, 35], [1551973860, 35], [1551973865, 35], [1551973870, 35]]
+
+
+class OsdTest(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ Osd._cp_config['tools.authenticate.on'] = False # pylint: disable=protected-access
+ cls.setup_controllers([Osd])
+
+ @contextmanager
+ def _mock_osd_list(self, osd_stat_ids, osdmap_tree_node_ids, osdmap_ids):
+ def mgr_get_replacement(*args, **kwargs):
+ method = args[0] or kwargs['method']
+ if method == 'osd_stats':
+ return {'osd_stats': OsdHelper.gen_osd_stats(osd_stat_ids)}
+ if method == 'osd_map_tree':
+ return {'nodes': OsdHelper.gen_osdmap_tree_nodes(osdmap_tree_node_ids)}
+ raise NotImplementedError()
+
+ def mgr_get_counter_replacement(svc_type, _, path):
+ if svc_type == 'osd':
+ return {path: OsdHelper.gen_mgr_get_counter()}
+ raise NotImplementedError()
+
+ with patch.object(Osd, 'get_osd_map', return_value=OsdHelper.gen_osdmap(osdmap_ids)):
+ with patch.object(mgr, 'get', side_effect=mgr_get_replacement):
+ with patch.object(mgr, 'get_counter', side_effect=mgr_get_counter_replacement):
+ with patch.object(mgr, 'get_latest', return_value=1146609664):
+ yield
+
+ def test_osd_list_aggregation(self):
+ """
+ This test emulates the state of a cluster where an OSD has only been
+ removed (with e.g. `ceph osd rm`), but it hasn't been removed from the
+ CRUSH map. Ceph reports a health warning alongside a `1 osds exist in
+ the crush map but not in the osdmap` warning in such a case.
+ """
+ osds_actual = [0, 1]
+ osds_leftover = [0, 1, 2]
+ with self._mock_osd_list(osd_stat_ids=osds_actual, osdmap_tree_node_ids=osds_leftover,
+ osdmap_ids=osds_actual):
+ self._get('/api/osd')
+ self.assertEqual(len(self.jsonBody()), 2, 'It should display two OSDs without failure')
+ self.assertStatus(200)
diff --git a/src/pybind/mgr/dashboard/tests/test_plugin_debug.py b/src/pybind/mgr/dashboard/tests/test_plugin_debug.py
new file mode 100644
index 00000000..e6999782
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_plugin_debug.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+from . import CLICommandTestMixin, ControllerTestCase
+
+
+class TestPluginDebug(ControllerTestCase, CLICommandTestMixin):
+ @classmethod
+ def setup_server(cls):
+ # pylint: disable=protected-access
+ cls.setup_controllers([])
+
+ def setUp(self):
+ self.mock_kv_store()
+
+ def test_debug_disabled(self):
+ self.exec_cmd('debug', action='disable')
+
+ self._get('/api/unexisting_controller')
+ self.assertStatus(404)
+
+ data = self.jsonBody()
+ self.assertGreater(len(data), 0)
+ self.assertNotIn('traceback', data)
+ self.assertNotIn('version', data)
+ self.assertIn('request_id', data)
+
+ def test_debug_enabled(self):
+ self.exec_cmd('debug', action='enable')
+
+ self._get('/api/unexisting_controller')
+ self.assertStatus(404)
+
+ data = self.jsonBody()
+ self.assertGreater(len(data), 0)
+ self.assertIn('traceback', data)
+ self.assertIn('version', data)
+ self.assertIn('request_id', data)
diff --git a/src/pybind/mgr/dashboard/tests/test_pool.py b/src/pybind/mgr/dashboard/tests/test_pool.py
new file mode 100644
index 00000000..16d7d7b1
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_pool.py
@@ -0,0 +1,117 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=protected-access
+import time
+import mock
+
+from . import ControllerTestCase
+from ..controllers.pool import Pool
+from ..controllers.task import Task
+from ..tools import NotificationQueue, TaskManager
+
+
+class MockTask(object):
+ percentages = []
+
+ def set_progress(self, percentage):
+ self.percentages.append(percentage)
+
+
+class PoolControllerTest(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ Task._cp_config['tools.authenticate.on'] = False
+ Pool._cp_config['tools.authenticate.on'] = False
+ cls.setup_controllers([Pool, Task])
+
+ @mock.patch('dashboard.controllers.pool.Pool._get')
+ @mock.patch('dashboard.services.ceph_service.CephService.send_command')
+ def test_creation(self, send_command, _get):
+ _get.side_effect = [{
+ 'pool_name': 'test-pool',
+ 'pg_num': 64,
+ 'pg_num_target': 63,
+ 'pg_placement_num': 64,
+ 'pg_placement_num_target': 63
+ }, {
+ 'pool_name': 'test-pool',
+ 'pg_num': 64,
+ 'pg_num_target': 64,
+ 'pg_placement_num': 64,
+ 'pg_placement_num_target': 64
+ }]
+ NotificationQueue.start_queue()
+ TaskManager.init()
+
+ def _send_cmd(*args, **kwargs): # pylint: disable=unused-argument
+ time.sleep(3)
+
+ send_command.side_effect = _send_cmd
+
+ self._task_post('/api/pool', {
+ 'pool': 'test-pool',
+ 'pool_type': 1,
+ 'pg_num': 64
+ }, 10)
+ self.assertStatus(201)
+ self.assertEqual(_get.call_count, 2)
+ NotificationQueue.stop()
+
+ @mock.patch('dashboard.controllers.pool.Pool._get')
+ def test_wait_for_pgs_without_waiting(self, _get):
+ _get.side_effect = [{
+ 'pool_name': 'test-pool',
+ 'pg_num': 32,
+ 'pg_num_target': 32,
+ 'pg_placement_num': 32,
+ 'pg_placement_num_target': 32
+ }]
+ Pool._wait_for_pgs('test-pool')
+ self.assertEqual(_get.call_count, 1)
+
+ @mock.patch('dashboard.controllers.pool.Pool._get')
+ def test_wait_for_pgs_with_waiting(self, _get):
+ task = MockTask()
+ orig_method = TaskManager.current_task
+ TaskManager.current_task = mock.MagicMock()
+ TaskManager.current_task.return_value = task
+ _get.side_effect = [{
+ 'pool_name': 'test-pool',
+ 'pg_num': 64,
+ 'pg_num_target': 32,
+ 'pg_placement_num': 64,
+ 'pg_placement_num_target': 64
+ }, {
+ 'pool_name': 'test-pool',
+ 'pg_num': 63,
+ 'pg_num_target': 32,
+ 'pg_placement_num': 62,
+ 'pg_placement_num_target': 32
+ }, {
+ 'pool_name': 'test-pool',
+ 'pg_num': 48,
+ 'pg_num_target': 32,
+ 'pg_placement_num': 48,
+ 'pg_placement_num_target': 32
+ }, {
+ 'pool_name': 'test-pool',
+ 'pg_num': 48,
+ 'pg_num_target': 32,
+ 'pg_placement_num': 33,
+ 'pg_placement_num_target': 32
+ }, {
+ 'pool_name': 'test-pool',
+ 'pg_num': 33,
+ 'pg_num_target': 32,
+ 'pg_placement_num': 32,
+ 'pg_placement_num_target': 32
+ }, {
+ 'pool_name': 'test-pool',
+ 'pg_num': 32,
+ 'pg_num_target': 32,
+ 'pg_placement_num': 32,
+ 'pg_placement_num_target': 32
+ }]
+ Pool._wait_for_pgs('test-pool')
+ self.assertEqual(_get.call_count, 6)
+ self.assertEqual(task.percentages, [0, 5, 50, 73, 98])
+ TaskManager.current_task = orig_method
diff --git a/src/pybind/mgr/dashboard/tests/test_prometheus.py b/src/pybind/mgr/dashboard/tests/test_prometheus.py
new file mode 100644
index 00000000..73dedbab
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_prometheus.py
@@ -0,0 +1,128 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=protected-access
+from mock import patch
+
+from . import ControllerTestCase
+from .. import mgr
+from ..controllers.prometheus import Prometheus, PrometheusReceiver, PrometheusNotifications
+
+
+class PrometheusControllerTest(ControllerTestCase):
+ alert_host = 'http://alertmanager:9093/mock'
+ alert_host_api = alert_host + '/api/v1'
+
+ prometheus_host = 'http://prometheus:9090/mock'
+ prometheus_host_api = prometheus_host + '/api/v1'
+
+ @classmethod
+ def setup_server(cls):
+ settings = {
+ 'ALERTMANAGER_API_HOST': cls.alert_host,
+ 'PROMETHEUS_API_HOST': cls.prometheus_host
+ }
+ mgr.get_module_option.side_effect = settings.get
+ Prometheus._cp_config['tools.authenticate.on'] = False
+ PrometheusNotifications._cp_config['tools.authenticate.on'] = False
+ cls.setup_controllers([Prometheus, PrometheusNotifications, PrometheusReceiver])
+
+ def test_rules(self):
+ with patch('requests.request') as mock_request:
+ self._get('/api/prometheus/rules')
+ mock_request.assert_called_with('GET', self.prometheus_host_api + '/rules',
+ json=None, params={})
+
+ def test_list(self):
+ with patch('requests.request') as mock_request:
+ self._get('/api/prometheus')
+ mock_request.assert_called_with('GET', self.alert_host_api + '/alerts',
+ json=None, params={})
+
+ def test_get_silences(self):
+ with patch('requests.request') as mock_request:
+ self._get('/api/prometheus/silences')
+ mock_request.assert_called_with('GET', self.alert_host_api + '/silences',
+ json=None, params={})
+
+ def test_add_silence(self):
+ with patch('requests.request') as mock_request:
+ self._post('/api/prometheus/silence', {'id': 'new-silence'})
+ mock_request.assert_called_with('POST', self.alert_host_api + '/silences',
+ params=None, json={'id': 'new-silence'})
+
+ def test_update_silence(self):
+ with patch('requests.request') as mock_request:
+ self._post('/api/prometheus/silence', {'id': 'update-silence'})
+ mock_request.assert_called_with('POST', self.alert_host_api + '/silences',
+ params=None, json={'id': 'update-silence'})
+
+ def test_expire_silence(self):
+ with patch('requests.request') as mock_request:
+ self._delete('/api/prometheus/silence/0')
+ mock_request.assert_called_with('DELETE', self.alert_host_api + '/silence/0',
+ json=None, params=None)
+
+ def test_silences_empty_delete(self):
+ with patch('requests.request') as mock_request:
+ self._delete('/api/prometheus/silence')
+ mock_request.assert_not_called()
+
+ def test_post_on_receiver(self):
+ PrometheusReceiver.notifications = []
+ self._post('/api/prometheus_receiver', {'name': 'foo'})
+ self.assertEqual(len(PrometheusReceiver.notifications), 1)
+ notification = PrometheusReceiver.notifications[0]
+ self.assertEqual(notification['name'], 'foo')
+ self.assertTrue(len(notification['notified']) > 20)
+
+ def test_get_empty_list_with_no_notifications(self):
+ PrometheusReceiver.notifications = []
+ self._get('/api/prometheus/notifications')
+ self.assertStatus(200)
+ self.assertJsonBody([])
+ self._get('/api/prometheus/notifications?from=last')
+ self.assertStatus(200)
+ self.assertJsonBody([])
+
+ def test_get_all_notification(self):
+ PrometheusReceiver.notifications = []
+ self._post('/api/prometheus_receiver', {'name': 'foo'})
+ self._post('/api/prometheus_receiver', {'name': 'bar'})
+ self._get('/api/prometheus/notifications')
+ self.assertStatus(200)
+ self.assertJsonBody(PrometheusReceiver.notifications)
+
+ def test_get_last_notification_with_use_of_last_keyword(self):
+ PrometheusReceiver.notifications = []
+ self._post('/api/prometheus_receiver', {'name': 'foo'})
+ self._post('/api/prometheus_receiver', {'name': 'bar'})
+ self._get('/api/prometheus/notifications?from=last')
+ self.assertStatus(200)
+ last = PrometheusReceiver.notifications[1]
+ self.assertJsonBody([last])
+
+ def test_get_no_notification_with_unknown_id(self):
+ PrometheusReceiver.notifications = []
+ self._post('/api/prometheus_receiver', {'name': 'foo'})
+ self._post('/api/prometheus_receiver', {'name': 'bar'})
+ self._get('/api/prometheus/notifications?from=42')
+ self.assertStatus(200)
+ self.assertJsonBody([])
+
+ def test_get_no_notification_since_with_last_notification(self):
+ PrometheusReceiver.notifications = []
+ self._post('/api/prometheus_receiver', {'name': 'foo'})
+ notification = PrometheusReceiver.notifications[0]
+ self._get('/api/prometheus/notifications?from=' + notification['id'])
+ self.assertStatus(200)
+ self.assertJsonBody([])
+
+ def test_get_notifications_since_last_notification(self):
+ PrometheusReceiver.notifications = []
+ self._post('/api/prometheus_receiver', {'name': 'foobar'})
+ next_to_last = PrometheusReceiver.notifications[0]
+ self._post('/api/prometheus_receiver', {'name': 'foo'})
+ self._post('/api/prometheus_receiver', {'name': 'bar'})
+ self._get('/api/prometheus/notifications?from=' + next_to_last['id'])
+ forelast = PrometheusReceiver.notifications[1]
+ last = PrometheusReceiver.notifications[2]
+ self.assertEqual(self.jsonBody(), [forelast, last])
diff --git a/src/pybind/mgr/dashboard/tests/test_rbd_mirroring.py b/src/pybind/mgr/dashboard/tests/test_rbd_mirroring.py
new file mode 100644
index 00000000..b307c209
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_rbd_mirroring.py
@@ -0,0 +1,95 @@
+from __future__ import absolute_import
+
+import json
+import mock
+
+from . import ControllerTestCase
+from .. import mgr
+from ..controllers.summary import Summary
+from ..controllers.rbd_mirroring import RbdMirroringSummary
+
+
+mock_list_servers = [{
+ 'hostname': 'ceph-host',
+ 'services': [{'id': 3, 'type': 'rbd-mirror'}]
+}]
+
+mock_get_metadata = {
+ 'id': 1,
+ 'instance_id': 3,
+ 'ceph_version': 'ceph version 13.0.0-5719 mimic (dev)'
+}
+
+_status = {
+ 1: {
+ 'callouts': {},
+ 'image_local_count': 5,
+ 'image_remote_count': 6,
+ 'image_error_count': 7,
+ 'image_warning_count': 8,
+ 'name': 'pool_name'
+ }
+}
+
+mock_get_daemon_status = {
+ 'json': json.dumps(_status)
+}
+
+mock_osd_map = {
+ 'pools': [{
+ 'pool_name': 'rbd',
+ 'application_metadata': {'rbd'}
+ }]
+}
+
+
+class RbdMirroringSummaryControllerTest(ControllerTestCase):
+
+ @classmethod
+ def setup_server(cls):
+ mgr.list_servers.return_value = mock_list_servers
+ mgr.get_metadata = mock.Mock(return_value=mock_get_metadata)
+ mgr.get_daemon_status.return_value = mock_get_daemon_status
+ mgr.get.side_effect = lambda key: {
+ 'osd_map': mock_osd_map,
+ 'health': {'json': '{"status": 1}'},
+ 'fs_map': {'filesystems': []},
+ 'mgr_map': {
+ 'services': {
+ 'dashboard': 'https://ceph.dev:11000/'
+ },
+ }
+ }[key]
+ mgr.url_prefix = ''
+ mgr.get_mgr_id.return_value = 0
+ mgr.have_mon_connection.return_value = True
+ mgr.version = 'ceph version 13.1.0-534-g23d3751b89 ' \
+ '(23d3751b897b31d2bda57aeaf01acb5ff3c4a9cd) ' \
+ 'nautilus (dev)'
+
+ # pylint: disable=protected-access
+ RbdMirroringSummary._cp_config['tools.authenticate.on'] = False
+ Summary._cp_config['tools.authenticate.on'] = False
+ # pylint: enable=protected-access
+
+ cls.setup_controllers([RbdMirroringSummary, Summary], '/test')
+
+ @mock.patch('dashboard.controllers.rbd_mirroring.rbd')
+ def test_default(self, rbd_mock): # pylint: disable=W0613
+ self._get('/test/api/block/mirroring/summary')
+ result = self.jsonBody()
+ self.assertStatus(200)
+ self.assertEqual(result['status'], 0)
+ for k in ['daemons', 'pools', 'image_error', 'image_syncing', 'image_ready']:
+ self.assertIn(k, result['content_data'])
+
+ @mock.patch('dashboard.controllers.BaseController._has_permissions')
+ @mock.patch('dashboard.controllers.rbd_mirroring.rbd')
+ def test_summary(self, rbd_mock, has_perms_mock): # pylint: disable=W0613
+ """We're also testing `summary`, as it also uses code from `rbd_mirroring.py`"""
+ has_perms_mock.return_value = True
+ self._get('/test/api/summary')
+ self.assertStatus(200)
+
+ summary = self.jsonBody()['rbd_mirroring']
+ self.assertEqual(summary, {'errors': 0, 'warnings': 1})
diff --git a/src/pybind/mgr/dashboard/tests/test_rbd_service.py b/src/pybind/mgr/dashboard/tests/test_rbd_service.py
new file mode 100644
index 00000000..a4511fa9
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_rbd_service.py
@@ -0,0 +1,37 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=dangerous-default-value,too-many-public-methods
+from __future__ import absolute_import
+
+import unittest
+try:
+ import mock
+except ImportError:
+ import unittest.mock as mock
+
+from ..services.rbd import RbdConfiguration
+
+
+class RbdServiceTest(unittest.TestCase):
+
+ @mock.patch('dashboard.services.rbd.RbdConfiguration._rbd.config_list')
+ @mock.patch('dashboard.mgr.get')
+ @mock.patch('dashboard.services.ceph_service.CephService.get_pool_list')
+ def test_pool_rbd_configuration_with_different_pg_states(self, get_pool_list, get, config_list):
+ get_pool_list.return_value = [{
+ 'pool_name': 'good-pool',
+ 'pool': 1,
+ }, {
+ 'pool_name': 'bad-pool',
+ 'pool': 2,
+ }]
+ get.return_value = {
+ 'by_pool': {
+ '1': {'active+clean': 32},
+ '2': {'creating+incomplete': 32},
+ }
+ }
+ config_list.return_value = [1, 2, 3]
+ config = RbdConfiguration('bad-pool')
+ self.assertEqual(config.list(), [])
+ config = RbdConfiguration('good-pool')
+ self.assertEqual(config.list(), [1, 2, 3])
diff --git a/src/pybind/mgr/dashboard/tests/test_rest_client.py b/src/pybind/mgr/dashboard/tests/test_rest_client.py
new file mode 100644
index 00000000..36ecd51a
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_rest_client.py
@@ -0,0 +1,94 @@
+# -*- coding: utf-8 -*-
+import unittest
+import requests.exceptions
+
+from mock import patch
+from urllib3.exceptions import MaxRetryError, ProtocolError
+from .. import mgr
+from ..rest_client import RequestException, RestClient
+
+
+class RestClientTest(unittest.TestCase):
+ def setUp(self):
+ settings = {'REST_REQUESTS_TIMEOUT': 45}
+ mgr.get_module_option.side_effect = settings.get
+
+ def test_timeout_auto_set(self):
+ with patch('requests.Session.request') as mock_request:
+ rest_client = RestClient('localhost', 8000)
+ rest_client.session.request('GET', '/test')
+ mock_request.assert_called_with('GET', '/test', timeout=45)
+
+ def test_timeout_auto_set_arg(self):
+ with patch('requests.Session.request') as mock_request:
+ rest_client = RestClient('localhost', 8000)
+ rest_client.session.request(
+ 'GET', '/test', None, None, None, None,
+ None, None, None)
+ mock_request.assert_called_with(
+ 'GET', '/test', None, None, None, None,
+ None, None, None, timeout=45)
+
+ def test_timeout_no_auto_set_kwarg(self):
+ with patch('requests.Session.request') as mock_request:
+ rest_client = RestClient('localhost', 8000)
+ rest_client.session.request('GET', '/test', timeout=20)
+ mock_request.assert_called_with('GET', '/test', timeout=20)
+
+ def test_timeout_no_auto_set_arg(self):
+ with patch('requests.Session.request') as mock_request:
+ rest_client = RestClient('localhost', 8000)
+ rest_client.session.request(
+ 'GET', '/test', None, None, None, None,
+ None, None, 40)
+ mock_request.assert_called_with(
+ 'GET', '/test', None, None, None, None,
+ None, None, 40)
+
+
+class RestClientDoRequestTest(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls.mock_requests = patch('requests.Session').start()
+ cls.rest_client = RestClient('localhost', 8000, 'UnitTest')
+
+ def test_do_request_exception_no_args(self):
+ self.mock_requests().get.side_effect = requests.exceptions.ConnectionError()
+ with self.assertRaises(RequestException) as context:
+ self.rest_client.do_request('GET', '/test')
+ self.assertEqual('UnitTest REST API cannot be reached. Please '
+ 'check your configuration and that the API '
+ 'endpoint is accessible',
+ context.exception.message)
+
+ def test_do_request_exception_args_1(self):
+ self.mock_requests().post.side_effect = requests.exceptions.ConnectionError(
+ MaxRetryError('Abc', 'http://xxx.yyy', 'too many redirects'))
+ with self.assertRaises(RequestException) as context:
+ self.rest_client.do_request('POST', '/test')
+ self.assertEqual('UnitTest REST API cannot be reached. Please '
+ 'check your configuration and that the API '
+ 'endpoint is accessible',
+ context.exception.message)
+
+ def test_do_request_exception_args_2(self):
+ self.mock_requests().put.side_effect = requests.exceptions.ConnectionError(
+ ProtocolError('Connection broken: xyz'))
+ with self.assertRaises(RequestException) as context:
+ self.rest_client.do_request('PUT', '/test')
+ self.assertEqual('UnitTest REST API cannot be reached. Please '
+ 'check your configuration and that the API '
+ 'endpoint is accessible',
+ context.exception.message)
+
+ def test_do_request_exception_nested_args(self):
+ self.mock_requests().delete.side_effect = requests.exceptions.ConnectionError(
+ MaxRetryError('Xyz', 'https://foo.bar',
+ Exception('Foo: [Errno -42] bla bla bla')))
+ with self.assertRaises(RequestException) as context:
+ self.rest_client.do_request('DELETE', '/test')
+ self.assertEqual('UnitTest REST API cannot be reached: bla '
+ 'bla bla [errno -42]. Please check your '
+ 'configuration and that the API endpoint '
+ 'is accessible',
+ context.exception.message)
diff --git a/src/pybind/mgr/dashboard/tests/test_rest_tasks.py b/src/pybind/mgr/dashboard/tests/test_rest_tasks.py
new file mode 100644
index 00000000..191ef812
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_rest_tasks.py
@@ -0,0 +1,86 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=blacklisted-name
+
+import time
+
+from . import ControllerTestCase
+from ..controllers import Controller, RESTController, Task
+from ..controllers.task import Task as TaskController
+from ..tools import NotificationQueue, TaskManager
+
+
+@Controller('/test/task', secure=False)
+class TaskTest(RESTController):
+ sleep_time = 0.0
+
+ @Task('task/create', {'param': '{param}'}, wait_for=1.0)
+ def create(self, param):
+ time.sleep(TaskTest.sleep_time)
+ return {'my_param': param}
+
+ @Task('task/set', {'param': '{2}'}, wait_for=1.0)
+ def set(self, key, param=None):
+ time.sleep(TaskTest.sleep_time)
+ return {'key': key, 'my_param': param}
+
+ @Task('task/delete', ['{key}'], wait_for=1.0)
+ def delete(self, key):
+ # pylint: disable=unused-argument
+ time.sleep(TaskTest.sleep_time)
+
+ @Task('task/foo', ['{param}'])
+ @RESTController.Collection('POST')
+ def foo(self, param):
+ return {'my_param': param}
+
+ @Task('task/bar', ['{key}', '{param}'])
+ @RESTController.Resource('PUT')
+ def bar(self, key, param=None):
+ return {'my_param': param, 'key': key}
+
+ @Task('task/query', ['{param}'])
+ @RESTController.Collection('POST', query_params=['param'])
+ def query(self, param=None):
+ return {'my_param': param}
+
+
+class TaskControllerTest(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ # pylint: disable=protected-access
+ NotificationQueue.start_queue()
+ TaskManager.init()
+ TaskTest._cp_config['tools.authenticate.on'] = False
+ TaskController._cp_config['tools.authenticate.on'] = False
+ cls.setup_controllers([TaskTest, TaskController])
+
+ @classmethod
+ def tearDownClass(cls):
+ NotificationQueue.stop()
+
+ def setUp(self):
+ TaskTest.sleep_time = 0.0
+
+ def test_create_task(self):
+ self._task_post('/test/task', {'param': 'hello'})
+ self.assertJsonBody({'my_param': 'hello'})
+
+ def test_long_set_task(self):
+ TaskTest.sleep_time = 2.0
+ self._task_put('/test/task/2', {'param': 'hello'})
+ self.assertJsonBody({'key': '2', 'my_param': 'hello'})
+
+ def test_delete_task(self):
+ self._task_delete('/test/task/hello')
+
+ def test_foo_task(self):
+ self._task_post('/test/task/foo', {'param': 'hello'})
+ self.assertJsonBody({'my_param': 'hello'})
+
+ def test_bar_task(self):
+ self._task_put('/test/task/3/bar', {'param': 'hello'})
+ self.assertJsonBody({'my_param': 'hello', 'key': '3'})
+
+ def test_query_param(self):
+ self._task_post('/test/task/query')
+ self.assertJsonBody({'my_param': None})
diff --git a/src/pybind/mgr/dashboard/tests/test_rgw.py b/src/pybind/mgr/dashboard/tests/test_rgw.py
new file mode 100644
index 00000000..9f586be2
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_rgw.py
@@ -0,0 +1,129 @@
+import mock
+
+from .. import mgr
+from ..controllers.rgw import Rgw, RgwUser
+from . import ControllerTestCase # pylint: disable=no-name-in-module
+
+
+class RgwControllerTestCase(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ Rgw._cp_config['tools.authenticate.on'] = False # pylint: disable=protected-access
+ cls.setup_controllers([Rgw], '/test')
+
+ def test_status_no_service(self):
+ mgr.list_servers.return_value = []
+ self._get('/test/api/rgw/status')
+ self.assertStatus(200)
+ self.assertJsonBody({'available': False, 'message': 'No RGW service is running.'})
+
+
+class RgwUserControllerTestCase(ControllerTestCase):
+ @classmethod
+ def setup_server(cls):
+ RgwUser._cp_config['tools.authenticate.on'] = False # pylint: disable=protected-access
+ cls.setup_controllers([RgwUser], '/test')
+
+ @mock.patch('dashboard.controllers.rgw.RgwRESTController.proxy')
+ def test_user_list(self, mock_proxy):
+ mock_proxy.side_effect = [{
+ 'count': 3,
+ 'keys': ['test1', 'test2', 'test3'],
+ 'truncated': False
+ }]
+ self._get('/test/api/rgw/user')
+ self.assertStatus(200)
+ mock_proxy.assert_has_calls([
+ mock.call('GET', 'user?list', {})
+ ])
+ self.assertJsonBody(['test1', 'test2', 'test3'])
+
+ @mock.patch('dashboard.controllers.rgw.RgwRESTController.proxy')
+ def test_user_list_marker(self, mock_proxy):
+ mock_proxy.side_effect = [{
+ 'count': 3,
+ 'keys': ['test1', 'test2', 'test3'],
+ 'marker': 'foo:bar',
+ 'truncated': True
+ }, {
+ 'count': 1,
+ 'keys': ['admin'],
+ 'truncated': False
+ }]
+ self._get('/test/api/rgw/user')
+ self.assertStatus(200)
+ mock_proxy.assert_has_calls([
+ mock.call('GET', 'user?list', {}),
+ mock.call('GET', 'user?list', {'marker': 'foo:bar'})
+ ])
+ self.assertJsonBody(['test1', 'test2', 'test3', 'admin'])
+
+ @mock.patch('dashboard.controllers.rgw.RgwRESTController.proxy')
+ def test_user_list_duplicate_marker(self, mock_proxy):
+ mock_proxy.side_effect = [{
+ 'count': 3,
+ 'keys': ['test1', 'test2', 'test3'],
+ 'marker': 'foo:bar',
+ 'truncated': True
+ }, {
+ 'count': 3,
+ 'keys': ['test4', 'test5', 'test6'],
+ 'marker': 'foo:bar',
+ 'truncated': True
+ }, {
+ 'count': 1,
+ 'keys': ['admin'],
+ 'truncated': False
+ }]
+ self._get('/test/api/rgw/user')
+ self.assertStatus(500)
+
+ @mock.patch('dashboard.controllers.rgw.RgwRESTController.proxy')
+ def test_user_list_invalid_marker(self, mock_proxy):
+ mock_proxy.side_effect = [{
+ 'count': 3,
+ 'keys': ['test1', 'test2', 'test3'],
+ 'marker': 'foo:bar',
+ 'truncated': True
+ }, {
+ 'count': 3,
+ 'keys': ['test4', 'test5', 'test6'],
+ 'marker': '',
+ 'truncated': True
+ }, {
+ 'count': 1,
+ 'keys': ['admin'],
+ 'truncated': False
+ }]
+ self._get('/test/api/rgw/user')
+ self.assertStatus(500)
+
+ @mock.patch('dashboard.controllers.rgw.RgwRESTController.proxy')
+ @mock.patch.object(RgwUser, '_keys_allowed')
+ def test_user_get_with_keys(self, keys_allowed, mock_proxy):
+ keys_allowed.return_value = True
+ mock_proxy.return_value = {
+ 'tenant': '',
+ 'user_id': 'my_user_id',
+ 'keys': [],
+ 'swift_keys': []
+ }
+ self._get('/test/api/rgw/user/testuser')
+ self.assertStatus(200)
+ self.assertInJsonBody('keys')
+ self.assertInJsonBody('swift_keys')
+
+ @mock.patch('dashboard.controllers.rgw.RgwRESTController.proxy')
+ @mock.patch.object(RgwUser, '_keys_allowed')
+ def test_user_get_without_keys(self, keys_allowed, mock_proxy):
+ keys_allowed.return_value = False
+ mock_proxy.return_value = {
+ 'tenant': '',
+ 'user_id': 'my_user_id',
+ 'keys': [],
+ 'swift_keys': []
+ }
+ self._get('/test/api/rgw/user/testuser')
+ self.assertStatus(200)
+ self.assertNotIn('keys', self.jsonBody())
+ self.assertNotIn('swift_keys', self.jsonBody())
diff --git a/src/pybind/mgr/dashboard/tests/test_rgw_client.py b/src/pybind/mgr/dashboard/tests/test_rgw_client.py
new file mode 100644
index 00000000..0824665f
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_rgw_client.py
@@ -0,0 +1,112 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=too-many-public-methods
+import unittest
+
+from ..services.rgw_client import RgwClient, _parse_frontend_config
+from ..settings import Settings
+from . import KVStoreMockMixin
+
+
+class RgwClientTest(unittest.TestCase, KVStoreMockMixin):
+ def setUp(self):
+ RgwClient._user_instances.clear() # pylint: disable=protected-access
+ self.mock_kv_store()
+ self.CONFIG_KEY_DICT.update({
+ 'RGW_API_ACCESS_KEY': 'klausmustermann',
+ 'RGW_API_SECRET_KEY': 'supergeheim',
+ 'RGW_API_HOST': 'localhost',
+ 'RGW_API_USER_ID': 'rgwadmin'
+ })
+
+ def test_ssl_verify(self):
+ Settings.RGW_API_SSL_VERIFY = True
+ instance = RgwClient.admin_instance()
+ self.assertTrue(instance.session.verify)
+
+ def test_no_ssl_verify(self):
+ Settings.RGW_API_SSL_VERIFY = False
+ instance = RgwClient.admin_instance()
+ self.assertFalse(instance.session.verify)
+
+
+class RgwClientHelperTest(unittest.TestCase):
+ def test_parse_frontend_config_1(self):
+ self.assertEqual(_parse_frontend_config('beast port=8000'), (8000, False))
+
+ def test_parse_frontend_config_2(self):
+ self.assertEqual(_parse_frontend_config('beast port=80 port=8000'), (80, False))
+
+ def test_parse_frontend_config_3(self):
+ self.assertEqual(_parse_frontend_config('beast ssl_port=443 port=8000'), (443, True))
+
+ def test_parse_frontend_config_4(self):
+ self.assertEqual(_parse_frontend_config('beast endpoint=192.168.0.100:8000'), (8000, False))
+
+ def test_parse_frontend_config_5(self):
+ self.assertEqual(_parse_frontend_config('beast endpoint=[::1]'), (80, False))
+
+ def test_parse_frontend_config_6(self):
+ self.assertEqual(_parse_frontend_config(
+ 'beast ssl_endpoint=192.168.0.100:8443'), (8443, True))
+
+ def test_parse_frontend_config_7(self):
+ self.assertEqual(_parse_frontend_config('beast ssl_endpoint=192.168.0.100'), (443, True))
+
+ def test_parse_frontend_config_8(self):
+ self.assertEqual(_parse_frontend_config(
+ 'beast ssl_endpoint=[::1]:8443 endpoint=192.0.2.3:80'), (8443, True))
+
+ def test_parse_frontend_config_9(self):
+ self.assertEqual(_parse_frontend_config(
+ 'beast port=8080 endpoint=192.0.2.3:80'), (8080, False))
+
+ def test_parse_frontend_config_10(self):
+ self.assertEqual(_parse_frontend_config(
+ 'beast ssl_endpoint=192.0.2.3:8443 port=8080'), (8443, True))
+
+ def test_parse_frontend_config_11(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=8000s'), (8000, True))
+
+ def test_parse_frontend_config_12(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=443s port=8000'), (443, True))
+
+ def test_parse_frontend_config_13(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=192.0.2.3:80'), (80, False))
+
+ def test_parse_frontend_config_14(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=172.5.2.51:8080s'), (8080, True))
+
+ def test_parse_frontend_config_15(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=[::]:8080'), (8080, False))
+
+ def test_parse_frontend_config_16(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=ip6-localhost:80s'), (80, True))
+
+ def test_parse_frontend_config_17(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=[2001:0db8::1234]:80'), (80, False))
+
+ def test_parse_frontend_config_18(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=[::1]:8443s'), (8443, True))
+
+ def test_parse_frontend_config_19(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=127.0.0.1:8443s+8000'), (8443, True))
+
+ def test_parse_frontend_config_20(self):
+ self.assertEqual(_parse_frontend_config('civetweb port=127.0.0.1:8080+443s'), (8080, False))
+
+ def test_parse_frontend_config_21(self):
+ with self.assertRaises(LookupError) as ctx:
+ _parse_frontend_config('civetweb port=xyz')
+ self.assertEqual(str(ctx.exception),
+ 'Failed to determine RGW port from "civetweb port=xyz"')
+
+ def test_parse_frontend_config_22(self):
+ with self.assertRaises(LookupError) as ctx:
+ _parse_frontend_config('civetweb')
+ self.assertEqual(str(ctx.exception), 'Failed to determine RGW port from "civetweb"')
+
+ def test_parse_frontend_config_23(self):
+ with self.assertRaises(LookupError) as ctx:
+ _parse_frontend_config('mongoose port=8080')
+ self.assertEqual(str(ctx.exception),
+ 'Failed to determine RGW port from "mongoose port=8080"')
diff --git a/src/pybind/mgr/dashboard/tests/test_settings.py b/src/pybind/mgr/dashboard/tests/test_settings.py
new file mode 100644
index 00000000..2d050429
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_settings.py
@@ -0,0 +1,191 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import errno
+import unittest
+
+from mgr_module import ERROR_MSG_EMPTY_INPUT_FILE
+
+from . import KVStoreMockMixin, ControllerTestCase
+from .. import settings
+from ..controllers.settings import Settings as SettingsController
+from ..settings import Settings, handle_option_command
+
+
+class SettingsTest(unittest.TestCase, KVStoreMockMixin):
+ @classmethod
+ def setUpClass(cls):
+ # pylint: disable=protected-access
+ settings.Options.GRAFANA_API_HOST = ('localhost', str)
+ settings.Options.GRAFANA_API_PORT = (3000, int)
+ settings.Options.GRAFANA_ENABLED = (False, bool)
+ settings._OPTIONS_COMMAND_MAP = settings._options_command_map()
+
+ def setUp(self):
+ self.mock_kv_store()
+ if Settings.GRAFANA_API_HOST != 'localhost':
+ Settings.GRAFANA_API_HOST = 'localhost'
+ if Settings.GRAFANA_API_PORT != 3000:
+ Settings.GRAFANA_API_PORT = 3000
+
+ def test_get_setting(self):
+ self.assertEqual(Settings.GRAFANA_API_HOST, 'localhost')
+ self.assertEqual(Settings.GRAFANA_API_PORT, 3000)
+ self.assertEqual(Settings.GRAFANA_ENABLED, False)
+
+ def test_set_setting(self):
+ Settings.GRAFANA_API_HOST = 'grafanahost'
+ self.assertEqual(Settings.GRAFANA_API_HOST, 'grafanahost')
+
+ Settings.GRAFANA_API_PORT = 50
+ self.assertEqual(Settings.GRAFANA_API_PORT, 50)
+
+ Settings.GRAFANA_ENABLED = True
+ self.assertEqual(Settings.GRAFANA_ENABLED, True)
+
+ def test_get_cmd(self):
+ r, out, err = handle_option_command(
+ {'prefix': 'dashboard get-grafana-api-port'},
+ None
+ )
+ self.assertEqual(r, 0)
+ self.assertEqual(out, '3000')
+ self.assertEqual(err, '')
+
+ def test_set_cmd(self):
+ r, out, err = handle_option_command(
+ {'prefix': 'dashboard set-grafana-api-port',
+ 'value': '4000'},
+ None
+ )
+ self.assertEqual(r, 0)
+ self.assertEqual(out, 'Option GRAFANA_API_PORT updated')
+ self.assertEqual(err, '')
+
+ def test_set_secret_empty(self):
+ r, out, err = handle_option_command(
+ {'prefix': 'dashboard set-rgw-api-secret-key'},
+ None
+ )
+ self.assertEqual(r, -errno.EINVAL)
+ self.assertEqual(out, '')
+ self.assertEqual(err, ERROR_MSG_EMPTY_INPUT_FILE)
+
+ def test_set_secret(self):
+ r, out, err = handle_option_command(
+ {'prefix': 'dashboard set-rgw-api-secret-key'},
+ 'my-secret'
+ )
+ self.assertEqual(r, 0)
+ self.assertEqual(out, 'Option RGW_API_SECRET_KEY updated')
+ self.assertEqual(err, '')
+
+ def test_reset_cmd(self):
+ r, out, err = handle_option_command(
+ {'prefix': 'dashboard reset-grafana-enabled'},
+ None
+ )
+ self.assertEqual(r, 0)
+ self.assertEqual(out, 'Option {} reset to default value "{}"'.format(
+ 'GRAFANA_ENABLED', Settings.GRAFANA_ENABLED))
+ self.assertEqual(err, '')
+
+ def test_inv_cmd(self):
+ r, out, err = handle_option_command(
+ {'prefix': 'dashboard get-non-existent-option'},
+ None
+ )
+ self.assertEqual(r, -errno.ENOSYS)
+ self.assertEqual(out, '')
+ self.assertEqual(err, "Command not found "
+ "'dashboard get-non-existent-option'")
+
+ def test_sync(self):
+ Settings.GRAFANA_API_PORT = 5000
+ r, out, err = handle_option_command(
+ {'prefix': 'dashboard get-grafana-api-port'},
+ None
+ )
+ self.assertEqual(r, 0)
+ self.assertEqual(out, '5000')
+ self.assertEqual(err, '')
+ r, out, err = handle_option_command(
+ {'prefix': 'dashboard set-grafana-api-host',
+ 'value': 'new-local-host'},
+ None
+ )
+ self.assertEqual(r, 0)
+ self.assertEqual(out, 'Option GRAFANA_API_HOST updated')
+ self.assertEqual(err, '')
+ self.assertEqual(Settings.GRAFANA_API_HOST, 'new-local-host')
+
+ def test_attribute_error(self):
+ with self.assertRaises(AttributeError) as ctx:
+ _ = Settings.NON_EXISTENT_OPTION
+
+ self.assertEqual(str(ctx.exception),
+ "type object 'Options' has no attribute 'NON_EXISTENT_OPTION'")
+
+
+class SettingsControllerTest(ControllerTestCase, KVStoreMockMixin):
+ @classmethod
+ def setup_server(cls):
+ # pylint: disable=protected-access
+
+ SettingsController._cp_config['tools.authenticate.on'] = False
+ cls.setup_controllers([SettingsController])
+
+ def setUp(self):
+ self.mock_kv_store()
+
+ def test_settings_list(self):
+ self._get('/api/settings')
+ data = self.jsonBody()
+ self.assertTrue(len(data) > 0)
+ self.assertStatus(200)
+ self.assertIn('default', data[0].keys())
+ self.assertIn('type', data[0].keys())
+ self.assertIn('name', data[0].keys())
+ self.assertIn('value', data[0].keys())
+
+ def test_rgw_daemon_get(self):
+ self._get('/api/settings/grafana-api-username')
+ self.assertStatus(200)
+ self.assertJsonBody({
+ u'default': u'admin',
+ u'type': u'str',
+ u'name': u'GRAFANA_API_USERNAME',
+ u'value': u'admin',
+ })
+
+ def test_set(self):
+ self._put('/api/settings/GRAFANA_API_USERNAME', {'value': 'foo'},)
+ self.assertStatus(200)
+
+ self._get('/api/settings/GRAFANA_API_USERNAME')
+ self.assertStatus(200)
+ self.assertInJsonBody('default')
+ self.assertInJsonBody('type')
+ self.assertInJsonBody('name')
+ self.assertInJsonBody('value')
+ self.assertEqual(self.jsonBody()['value'], 'foo')
+
+ def test_bulk_set(self):
+ self._put('/api/settings', {
+ 'GRAFANA_API_USERNAME': 'foo',
+ 'GRAFANA_API_HOST': 'somehost',
+ })
+ self.assertStatus(200)
+
+ self._get('/api/settings/grafana-api-username')
+ self.assertStatus(200)
+ body = self.jsonBody()
+ self.assertEqual(body['value'], 'foo')
+
+ self._get('/api/settings/grafana-api-username')
+ self.assertStatus(200)
+ self.assertEqual(self.jsonBody()['value'], 'foo')
+
+ self._get('/api/settings/grafana-api-host')
+ self.assertStatus(200)
+ self.assertEqual(self.jsonBody()['value'], 'somehost')
diff --git a/src/pybind/mgr/dashboard/tests/test_sso.py b/src/pybind/mgr/dashboard/tests/test_sso.py
new file mode 100644
index 00000000..f8681b89
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_sso.py
@@ -0,0 +1,157 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=dangerous-default-value,too-many-public-methods
+from __future__ import absolute_import
+
+import errno
+import unittest
+
+from . import CmdException, exec_dashboard_cmd, KVStoreMockMixin
+from ..services.sso import handle_sso_command, load_sso_db
+
+
+class AccessControlTest(unittest.TestCase, KVStoreMockMixin):
+ IDP_METADATA = '''<?xml version="1.0"?>
+<md:EntityDescriptor xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata"
+ xmlns:ds="http://www.w3.org/2000/09/xmldsig#"
+ entityID="https://testidp.ceph.com/simplesamlphp/saml2/idp/metadata.php"
+ ID="pfx8ca6fbd7-6062-d4a9-7995-0730aeb8114f">
+ <ds:Signature>
+ <ds:SignedInfo>
+ <ds:CanonicalizationMethod Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/>
+ <ds:SignatureMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha256"/>
+ <ds:Reference URI="#pfx8ca6fbd7-6062-d4a9-7995-0730aeb8114f">
+ <ds:Transforms>
+ <ds:Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature"/>
+ <ds:Transform Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/>
+ </ds:Transforms>
+ <ds:DigestMethod Algorithm="http://www.w3.org/2001/04/xmlenc#sha256"/>
+ <ds:DigestValue>v6V8fooEUeq/LO/59JCfJF69Tw3ohN52OGAY6X3jX8w=</ds:DigestValue>
+ </ds:Reference>
+ </ds:SignedInfo>
+ <ds:SignatureValue>IDP_SIGNATURE_VALUE</ds:SignatureValue>
+ <ds:KeyInfo>
+ <ds:X509Data>
+ <ds:X509Certificate>IDP_X509_CERTIFICATE</ds:X509Certificate>
+ </ds:X509Data>
+ </ds:KeyInfo>
+ </ds:Signature>
+ <md:IDPSSODescriptor protocolSupportEnumeration="urn:oasis:names:tc:SAML:2.0:protocol">
+ <md:KeyDescriptor use="signing">
+ <ds:KeyInfo xmlns:ds="http://www.w3.org/2000/09/xmldsig#">
+ <ds:X509Data>
+ <ds:X509Certificate>IDP_X509_CERTIFICATE</ds:X509Certificate>
+ </ds:X509Data>
+ </ds:KeyInfo>
+ </md:KeyDescriptor>
+ <md:KeyDescriptor use="encryption">
+ <ds:KeyInfo xmlns:ds="http://www.w3.org/2000/09/xmldsig#">
+ <ds:X509Data>
+ <ds:X509Certificate>IDP_X509_CERTIFICATE</ds:X509Certificate>
+ </ds:X509Data>
+ </ds:KeyInfo>
+ </md:KeyDescriptor>
+ <md:SingleLogoutService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
+ Location="https://testidp.ceph.com/simplesamlphp/saml2/idp/SingleLogoutService.php"/>
+ <md:NameIDFormat>urn:oasis:names:tc:SAML:2.0:nameid-format:transient</md:NameIDFormat>
+ <md:SingleSignOnService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
+ Location="https://testidp.ceph.com/simplesamlphp/saml2/idp/SSOService.php"/>
+ </md:IDPSSODescriptor>
+</md:EntityDescriptor>'''
+
+ def setUp(self):
+ self.mock_kv_store()
+ load_sso_db()
+
+ @classmethod
+ def exec_cmd(cls, cmd, **kwargs):
+ return exec_dashboard_cmd(handle_sso_command, cmd, **kwargs)
+
+ def validate_onelogin_settings(self, onelogin_settings, ceph_dashboard_base_url, uid,
+ sp_x509cert, sp_private_key, signature_enabled):
+ self.assertIn('sp', onelogin_settings)
+ self.assertIn('entityId', onelogin_settings['sp'])
+ self.assertEqual(onelogin_settings['sp']['entityId'],
+ '{}/auth/saml2/metadata'.format(ceph_dashboard_base_url))
+
+ self.assertIn('assertionConsumerService', onelogin_settings['sp'])
+ self.assertIn('url', onelogin_settings['sp']['assertionConsumerService'])
+ self.assertEqual(onelogin_settings['sp']['assertionConsumerService']['url'],
+ '{}/auth/saml2'.format(ceph_dashboard_base_url))
+
+ self.assertIn('attributeConsumingService', onelogin_settings['sp'])
+ attribute_consuming_service = onelogin_settings['sp']['attributeConsumingService']
+ self.assertIn('requestedAttributes', attribute_consuming_service)
+ requested_attributes = attribute_consuming_service['requestedAttributes']
+ self.assertEqual(len(requested_attributes), 1)
+ self.assertIn('name', requested_attributes[0])
+ self.assertEqual(requested_attributes[0]['name'], uid)
+
+ self.assertIn('singleLogoutService', onelogin_settings['sp'])
+ self.assertIn('url', onelogin_settings['sp']['singleLogoutService'])
+ self.assertEqual(onelogin_settings['sp']['singleLogoutService']['url'],
+ '{}/auth/saml2/logout'.format(ceph_dashboard_base_url))
+
+ self.assertIn('x509cert', onelogin_settings['sp'])
+ self.assertEqual(onelogin_settings['sp']['x509cert'], sp_x509cert)
+
+ self.assertIn('privateKey', onelogin_settings['sp'])
+ self.assertEqual(onelogin_settings['sp']['privateKey'], sp_private_key)
+
+ self.assertIn('security', onelogin_settings)
+ self.assertIn('authnRequestsSigned', onelogin_settings['security'])
+ self.assertEqual(onelogin_settings['security']['authnRequestsSigned'], signature_enabled)
+
+ self.assertIn('logoutRequestSigned', onelogin_settings['security'])
+ self.assertEqual(onelogin_settings['security']['logoutRequestSigned'], signature_enabled)
+
+ self.assertIn('logoutResponseSigned', onelogin_settings['security'])
+ self.assertEqual(onelogin_settings['security']['logoutResponseSigned'], signature_enabled)
+
+ self.assertIn('wantMessagesSigned', onelogin_settings['security'])
+ self.assertEqual(onelogin_settings['security']['wantMessagesSigned'], signature_enabled)
+
+ self.assertIn('wantAssertionsSigned', onelogin_settings['security'])
+ self.assertEqual(onelogin_settings['security']['wantAssertionsSigned'], signature_enabled)
+
+ def test_sso_saml2_setup(self):
+ result = self.exec_cmd('sso setup saml2',
+ ceph_dashboard_base_url='https://cephdashboard.local',
+ idp_metadata=self.IDP_METADATA)
+ self.validate_onelogin_settings(result, 'https://cephdashboard.local', 'uid', '', '',
+ False)
+
+ def test_sso_enable_saml2(self):
+ with self.assertRaises(CmdException) as ctx:
+ self.exec_cmd('sso enable saml2')
+
+ self.assertEqual(ctx.exception.retcode, -errno.EPERM)
+ self.assertEqual(str(ctx.exception), 'Single Sign-On is not configured: '
+ 'use `ceph dashboard sso setup saml2`')
+
+ self.exec_cmd('sso setup saml2',
+ ceph_dashboard_base_url='https://cephdashboard.local',
+ idp_metadata=self.IDP_METADATA)
+
+ result = self.exec_cmd('sso enable saml2')
+ self.assertEqual(result, 'SSO is "enabled" with "SAML2" protocol.')
+
+ def test_sso_disable(self):
+ result = self.exec_cmd('sso disable')
+ self.assertEqual(result, 'SSO is "disabled".')
+
+ def test_sso_status(self):
+ result = self.exec_cmd('sso status')
+ self.assertEqual(result, 'SSO is "disabled".')
+
+ self.exec_cmd('sso setup saml2',
+ ceph_dashboard_base_url='https://cephdashboard.local',
+ idp_metadata=self.IDP_METADATA)
+
+ result = self.exec_cmd('sso status')
+ self.assertEqual(result, 'SSO is "enabled" with "SAML2" protocol.')
+
+ def test_sso_show_saml2(self):
+ result = self.exec_cmd('sso show saml2')
+ self.assertEqual(result, {
+ 'onelogin_settings': {}
+ })
diff --git a/src/pybind/mgr/dashboard/tests/test_task.py b/src/pybind/mgr/dashboard/tests/test_task.py
new file mode 100644
index 00000000..c10af640
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_task.py
@@ -0,0 +1,433 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import json
+import unittest
+import threading
+import time
+from collections import defaultdict
+from functools import partial
+
+from ..services.exception import serialize_dashboard_exception
+from ..tools import NotificationQueue, TaskManager, TaskExecutor
+
+
+class MyTask(object):
+ class CallbackExecutor(TaskExecutor):
+ def __init__(self, fail, progress):
+ super(MyTask.CallbackExecutor, self).__init__()
+ self.fail = fail
+ self.progress = progress
+
+ def init(self, task):
+ super(MyTask.CallbackExecutor, self).init(task)
+ args = [self.callback]
+ args.extend(self.task.fn_args)
+ self.task.fn_args = args
+
+ def callback(self, result):
+ self.task.set_progress(self.progress)
+ if self.fail:
+ self.finish(None, Exception("Task Unexpected Exception"))
+ else:
+ self.finish(result, None)
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, op_seconds, wait=False, fail=False, progress=50,
+ is_async=False, handle_ex=False):
+ self.op_seconds = op_seconds
+ self.wait = wait
+ self.fail = fail
+ self.progress = progress
+ self.is_async = is_async
+ self.handle_ex = handle_ex
+ self._event = threading.Event()
+
+ def run(self, ns, timeout=None):
+ args = ['dummy arg']
+ kwargs = {'dummy': 'arg'}
+ h_ex = partial(serialize_dashboard_exception,
+ include_http_status=True) if self.handle_ex else None
+ if not self.is_async:
+ task = TaskManager.run(
+ ns, self.metadata(), self.task_op, args, kwargs,
+ exception_handler=h_ex)
+ else:
+ task = TaskManager.run(
+ ns, self.metadata(), self.task_async_op, args, kwargs,
+ executor=MyTask.CallbackExecutor(self.fail, self.progress),
+ exception_handler=h_ex)
+ return task.wait(timeout)
+
+ def task_op(self, *args, **kwargs):
+ time.sleep(self.op_seconds)
+ TaskManager.current_task().set_progress(self.progress)
+ if self.fail:
+ raise Exception("Task Unexpected Exception")
+ if self.wait:
+ self._event.wait()
+ return {'args': list(args), 'kwargs': kwargs}
+
+ def task_async_op(self, callback, *args, **kwargs):
+ if self.fail == "premature":
+ raise Exception("Task Unexpected Exception")
+
+ def _run_bg():
+ time.sleep(self.op_seconds)
+ if self.wait:
+ self._event.wait()
+ callback({'args': list(args), 'kwargs': kwargs})
+
+ worker = threading.Thread(target=_run_bg)
+ worker.start()
+
+ def resume(self):
+ self._event.set()
+
+ def metadata(self):
+ return {
+ 'op_seconds': self.op_seconds,
+ 'wait': self.wait,
+ 'fail': self.fail,
+ 'progress': self.progress,
+ 'is_async': self.is_async,
+ 'handle_ex': self.handle_ex
+ }
+
+
+class TaskTest(unittest.TestCase):
+
+ TASK_FINISHED_MAP = defaultdict(threading.Event)
+
+ @classmethod
+ def _handle_task(cls, task):
+ cls.TASK_FINISHED_MAP[task.name].set()
+
+ @classmethod
+ def wait_for_task(cls, name):
+ cls.TASK_FINISHED_MAP[name].wait()
+
+ @classmethod
+ def setUpClass(cls):
+ NotificationQueue.start_queue()
+ TaskManager.init()
+ NotificationQueue.register(cls._handle_task, 'cd_task_finished',
+ priority=100)
+
+ @classmethod
+ def tearDownClass(cls):
+ NotificationQueue.deregister(cls._handle_task, 'cd_task_finished')
+ NotificationQueue.stop()
+
+ def setUp(self):
+ TaskManager.FINISHED_TASK_SIZE = 10
+ TaskManager.FINISHED_TASK_TTL = 60.0
+
+ def assertTaskResult(self, result):
+ self.assertEqual(result,
+ {'args': ['dummy arg'], 'kwargs': {'dummy': 'arg'}})
+
+ def test_fast_task(self):
+ task1 = MyTask(1)
+ state, result = task1.run('test1/task1')
+ self.assertEqual(state, TaskManager.VALUE_DONE)
+ self.assertTaskResult(result)
+ self.wait_for_task('test1/task1')
+ _, fn_t = TaskManager.list('test1/*')
+ self.assertEqual(len(fn_t), 1)
+ self.assertIsNone(fn_t[0].exception)
+ self.assertTaskResult(fn_t[0].ret_value)
+ self.assertEqual(fn_t[0].progress, 100)
+
+ def test_slow_task(self):
+ task1 = MyTask(1)
+ state, result = task1.run('test2/task1', 0.5)
+ self.assertEqual(state, TaskManager.VALUE_EXECUTING)
+ self.assertIsNone(result)
+ self.wait_for_task('test2/task1')
+ _, fn_t = TaskManager.list('test2/*')
+ self.assertEqual(len(fn_t), 1)
+ self.assertIsNone(fn_t[0].exception)
+ self.assertTaskResult(fn_t[0].ret_value)
+ self.assertEqual(fn_t[0].progress, 100)
+
+ def test_fast_task_with_failure(self):
+ task1 = MyTask(1, fail=True, progress=40)
+
+ with self.assertRaises(Exception) as ctx:
+ task1.run('test3/task1')
+
+ self.assertEqual(str(ctx.exception), "Task Unexpected Exception")
+ self.wait_for_task('test3/task1')
+ _, fn_t = TaskManager.list('test3/*')
+ self.assertEqual(len(fn_t), 1)
+ self.assertIsNone(fn_t[0].ret_value)
+ self.assertEqual(str(fn_t[0].exception), "Task Unexpected Exception")
+ self.assertEqual(fn_t[0].progress, 40)
+
+ def test_slow_task_with_failure(self):
+ task1 = MyTask(1, fail=True, progress=70)
+ state, result = task1.run('test4/task1', 0.5)
+ self.assertEqual(state, TaskManager.VALUE_EXECUTING)
+ self.assertIsNone(result)
+ self.wait_for_task('test4/task1')
+ _, fn_t = TaskManager.list('test4/*')
+ self.assertEqual(len(fn_t), 1)
+ self.assertIsNone(fn_t[0].ret_value)
+ self.assertEqual(str(fn_t[0].exception), "Task Unexpected Exception")
+ self.assertEqual(fn_t[0].progress, 70)
+
+ def test_executing_tasks_list(self):
+ task1 = MyTask(0, wait=True, progress=30)
+ task2 = MyTask(0, wait=True, progress=60)
+ state, result = task1.run('test5/task1', 0.5)
+ self.assertEqual(state, TaskManager.VALUE_EXECUTING)
+ self.assertIsNone(result)
+ ex_t, _ = TaskManager.list('test5/*')
+ self.assertEqual(len(ex_t), 1)
+ self.assertEqual(ex_t[0].name, 'test5/task1')
+ self.assertEqual(ex_t[0].progress, 30)
+ state, result = task2.run('test5/task2', 0.5)
+ self.assertEqual(state, TaskManager.VALUE_EXECUTING)
+ self.assertIsNone(result)
+ ex_t, _ = TaskManager.list('test5/*')
+ self.assertEqual(len(ex_t), 2)
+ for task in ex_t:
+ if task.name == 'test5/task1':
+ self.assertEqual(task.progress, 30)
+ elif task.name == 'test5/task2':
+ self.assertEqual(task.progress, 60)
+ task2.resume()
+ self.wait_for_task('test5/task2')
+ ex_t, _ = TaskManager.list('test5/*')
+ self.assertEqual(len(ex_t), 1)
+ self.assertEqual(ex_t[0].name, 'test5/task1')
+ task1.resume()
+ self.wait_for_task('test5/task1')
+ ex_t, _ = TaskManager.list('test5/*')
+ self.assertEqual(len(ex_t), 0)
+
+ def test_task_idempotent(self):
+ task1 = MyTask(0, wait=True)
+ task1_clone = MyTask(0, wait=True)
+ state, result = task1.run('test6/task1', 0.5)
+ self.assertEqual(state, TaskManager.VALUE_EXECUTING)
+ self.assertIsNone(result)
+ ex_t, _ = TaskManager.list('test6/*')
+ self.assertEqual(len(ex_t), 1)
+ self.assertEqual(ex_t[0].name, 'test6/task1')
+ state, result = task1_clone.run('test6/task1', 0.5)
+ self.assertEqual(state, TaskManager.VALUE_EXECUTING)
+ self.assertIsNone(result)
+ ex_t, _ = TaskManager.list('test6/*')
+ self.assertEqual(len(ex_t), 1)
+ self.assertEqual(ex_t[0].name, 'test6/task1')
+ task1.resume()
+ self.wait_for_task('test6/task1')
+ ex_t, fn_t = TaskManager.list('test6/*')
+ self.assertEqual(len(ex_t), 0)
+ self.assertEqual(len(fn_t), 1)
+
+ def test_finished_cleanup(self):
+ TaskManager.FINISHED_TASK_SIZE = 2
+ TaskManager.FINISHED_TASK_TTL = 0.5
+ task1 = MyTask(0)
+ task2 = MyTask(0)
+ state, result = task1.run('test7/task1')
+ self.assertEqual(state, TaskManager.VALUE_DONE)
+ self.assertTaskResult(result)
+ self.wait_for_task('test7/task1')
+ state, result = task2.run('test7/task2')
+ self.assertEqual(state, TaskManager.VALUE_DONE)
+ self.assertTaskResult(result)
+ self.wait_for_task('test7/task2')
+ time.sleep(1)
+ _, fn_t = TaskManager.list('test7/*')
+ self.assertEqual(len(fn_t), 2)
+ for idx, task in enumerate(fn_t):
+ self.assertEqual(task.name,
+ "test7/task{}".format(len(fn_t)-idx))
+ task3 = MyTask(0)
+ state, result = task3.run('test7/task3')
+ self.assertEqual(state, TaskManager.VALUE_DONE)
+ self.assertTaskResult(result)
+ self.wait_for_task('test7/task3')
+ time.sleep(1)
+ _, fn_t = TaskManager.list('test7/*')
+ self.assertEqual(len(fn_t), 3)
+ for idx, task in enumerate(fn_t):
+ self.assertEqual(task.name,
+ "test7/task{}".format(len(fn_t)-idx))
+ _, fn_t = TaskManager.list('test7/*')
+ self.assertEqual(len(fn_t), 2)
+ for idx, task in enumerate(fn_t):
+ self.assertEqual(task.name,
+ "test7/task{}".format(len(fn_t)-idx+1))
+
+ def test_task_serialization_format(self):
+ task1 = MyTask(0, wait=True, progress=20)
+ task2 = MyTask(1)
+ task1.run('test8/task1', 0.5)
+ task2.run('test8/task2', 0.5)
+ self.wait_for_task('test8/task2')
+ ex_t, fn_t = TaskManager.list_serializable('test8/*')
+ self.assertEqual(len(ex_t), 1)
+ self.assertEqual(len(fn_t), 1)
+
+ try:
+ json.dumps(ex_t)
+ except ValueError as ex:
+ self.fail("Failed to serialize executing tasks: {}".format(str(ex)))
+
+ try:
+ json.dumps(fn_t)
+ except ValueError as ex:
+ self.fail("Failed to serialize finished tasks: {}".format(str(ex)))
+
+ # validate executing tasks attributes
+ self.assertEqual(len(ex_t[0].keys()), 4)
+ self.assertEqual(ex_t[0]['name'], 'test8/task1')
+ self.assertEqual(ex_t[0]['metadata'], task1.metadata())
+ self.assertIsNotNone(ex_t[0]['begin_time'])
+ self.assertEqual(ex_t[0]['progress'], 20)
+ # validate finished tasks attributes
+ self.assertEqual(len(fn_t[0].keys()), 9)
+ self.assertEqual(fn_t[0]['name'], 'test8/task2')
+ self.assertEqual(fn_t[0]['metadata'], task2.metadata())
+ self.assertIsNotNone(fn_t[0]['begin_time'])
+ self.assertIsNotNone(fn_t[0]['end_time'])
+ self.assertGreaterEqual(fn_t[0]['duration'], 1.0)
+ self.assertEqual(fn_t[0]['progress'], 100)
+ self.assertTrue(fn_t[0]['success'])
+ self.assertTaskResult(fn_t[0]['ret_value'])
+ self.assertIsNone(fn_t[0]['exception'])
+ task1.resume()
+ self.wait_for_task('test8/task1')
+
+ def test_fast_async_task(self):
+ task1 = MyTask(1, is_async=True)
+ state, result = task1.run('test9/task1')
+ self.assertEqual(state, TaskManager.VALUE_DONE)
+ self.assertTaskResult(result)
+ self.wait_for_task('test9/task1')
+ _, fn_t = TaskManager.list('test9/*')
+ self.assertEqual(len(fn_t), 1)
+ self.assertIsNone(fn_t[0].exception)
+ self.assertTaskResult(fn_t[0].ret_value)
+ self.assertEqual(fn_t[0].progress, 100)
+
+ def test_slow_async_task(self):
+ task1 = MyTask(1, is_async=True)
+ state, result = task1.run('test10/task1', 0.5)
+ self.assertEqual(state, TaskManager.VALUE_EXECUTING)
+ self.assertIsNone(result)
+ self.wait_for_task('test10/task1')
+ _, fn_t = TaskManager.list('test10/*')
+ self.assertEqual(len(fn_t), 1)
+ self.assertIsNone(fn_t[0].exception)
+ self.assertTaskResult(fn_t[0].ret_value)
+ self.assertEqual(fn_t[0].progress, 100)
+
+ def test_fast_async_task_with_failure(self):
+ task1 = MyTask(1, fail=True, progress=40, is_async=True)
+
+ with self.assertRaises(Exception) as ctx:
+ task1.run('test11/task1')
+
+ self.assertEqual(str(ctx.exception), "Task Unexpected Exception")
+ self.wait_for_task('test11/task1')
+ _, fn_t = TaskManager.list('test11/*')
+ self.assertEqual(len(fn_t), 1)
+ self.assertIsNone(fn_t[0].ret_value)
+ self.assertEqual(str(fn_t[0].exception), "Task Unexpected Exception")
+ self.assertEqual(fn_t[0].progress, 40)
+
+ def test_slow_async_task_with_failure(self):
+ task1 = MyTask(1, fail=True, progress=70, is_async=True)
+ state, result = task1.run('test12/task1', 0.5)
+ self.assertEqual(state, TaskManager.VALUE_EXECUTING)
+ self.assertIsNone(result)
+ self.wait_for_task('test12/task1')
+ _, fn_t = TaskManager.list('test12/*')
+ self.assertEqual(len(fn_t), 1)
+ self.assertIsNone(fn_t[0].ret_value)
+ self.assertEqual(str(fn_t[0].exception), "Task Unexpected Exception")
+ self.assertEqual(fn_t[0].progress, 70)
+
+ def test_fast_async_task_with_premature_failure(self):
+ task1 = MyTask(1, fail="premature", progress=40, is_async=True)
+
+ with self.assertRaises(Exception) as ctx:
+ task1.run('test13/task1')
+
+ self.assertEqual(str(ctx.exception), "Task Unexpected Exception")
+ self.wait_for_task('test13/task1')
+ _, fn_t = TaskManager.list('test13/*')
+ self.assertEqual(len(fn_t), 1)
+ self.assertIsNone(fn_t[0].ret_value)
+ self.assertEqual(str(fn_t[0].exception), "Task Unexpected Exception")
+
+ def test_task_serialization_format_on_failure(self):
+ task1 = MyTask(1, fail=True)
+ task1.run('test14/task1', 0.5)
+ self.wait_for_task('test14/task1')
+ ex_t, fn_t = TaskManager.list_serializable('test14/*')
+ self.assertEqual(len(ex_t), 0)
+ self.assertEqual(len(fn_t), 1)
+ # validate finished tasks attributes
+
+ try:
+ json.dumps(fn_t)
+ except TypeError as ex:
+ self.fail("Failed to serialize finished tasks: {}".format(str(ex)))
+
+ self.assertEqual(len(fn_t[0].keys()), 9)
+ self.assertEqual(fn_t[0]['name'], 'test14/task1')
+ self.assertEqual(fn_t[0]['metadata'], task1.metadata())
+ self.assertIsNotNone(fn_t[0]['begin_time'])
+ self.assertIsNotNone(fn_t[0]['end_time'])
+ self.assertGreaterEqual(fn_t[0]['duration'], 1.0)
+ self.assertEqual(fn_t[0]['progress'], 50)
+ self.assertFalse(fn_t[0]['success'])
+ self.assertIsNotNone(fn_t[0]['exception'])
+ self.assertEqual(fn_t[0]['exception'],
+ {"detail": "Task Unexpected Exception"})
+
+ def test_task_serialization_format_on_failure_with_handler(self):
+ task1 = MyTask(1, fail=True, handle_ex=True)
+ task1.run('test15/task1', 0.5)
+ self.wait_for_task('test15/task1')
+ ex_t, fn_t = TaskManager.list_serializable('test15/*')
+ self.assertEqual(len(ex_t), 0)
+ self.assertEqual(len(fn_t), 1)
+ # validate finished tasks attributes
+
+ try:
+ json.dumps(fn_t)
+ except TypeError as ex:
+ self.fail("Failed to serialize finished tasks: {}".format(str(ex)))
+
+ self.assertEqual(len(fn_t[0].keys()), 9)
+ self.assertEqual(fn_t[0]['name'], 'test15/task1')
+ self.assertEqual(fn_t[0]['metadata'], task1.metadata())
+ self.assertIsNotNone(fn_t[0]['begin_time'])
+ self.assertIsNotNone(fn_t[0]['end_time'])
+ self.assertGreaterEqual(fn_t[0]['duration'], 1.0)
+ self.assertEqual(fn_t[0]['progress'], 50)
+ self.assertFalse(fn_t[0]['success'])
+ self.assertIsNotNone(fn_t[0]['exception'])
+ self.assertEqual(fn_t[0]['exception'], {
+ 'component': None,
+ 'detail': 'Task Unexpected Exception',
+ 'status': 500,
+ 'task': {
+ 'metadata': {
+ 'fail': True,
+ 'handle_ex': True,
+ 'is_async': False,
+ 'op_seconds': 1,
+ 'progress': 50,
+ 'wait': False},
+ 'name': 'test15/task1'
+ }
+ })
diff --git a/src/pybind/mgr/dashboard/tests/test_tools.py b/src/pybind/mgr/dashboard/tests/test_tools.py
new file mode 100644
index 00000000..1c2c2e5b
--- /dev/null
+++ b/src/pybind/mgr/dashboard/tests/test_tools.py
@@ -0,0 +1,188 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import unittest
+
+import cherrypy
+from cherrypy.lib.sessions import RamSession
+from mock import patch
+
+from . import ControllerTestCase
+from ..services.exception import handle_rados_error
+from ..controllers import RESTController, ApiController, Controller, \
+ BaseController, Proxy
+from ..tools import is_valid_ipv6_address, dict_contains_path, \
+ RequestLoggingTool
+
+
+# pylint: disable=W0613
+@Controller('/foo', secure=False)
+class FooResource(RESTController):
+ elems = []
+
+ def list(self):
+ return FooResource.elems
+
+ def create(self, a):
+ FooResource.elems.append({'a': a})
+ return {'a': a}
+
+ def get(self, key):
+ return {'detail': (key, [])}
+
+ def delete(self, key):
+ del FooResource.elems[int(key)]
+
+ def bulk_delete(self):
+ FooResource.elems = []
+
+ def set(self, key, newdata):
+ FooResource.elems[int(key)] = {'newdata': newdata}
+ return dict(key=key, newdata=newdata)
+
+
+@Controller('/foo/:key/:method', secure=False)
+class FooResourceDetail(RESTController):
+ def list(self, key, method):
+ return {'detail': (key, [method])}
+
+
+@ApiController('/rgw/proxy', secure=False)
+class GenerateControllerRoutesController(BaseController):
+ @Proxy()
+ def __call__(self, path, **params):
+ pass
+
+
+@ApiController('/fooargs', secure=False)
+class FooArgs(RESTController):
+ def set(self, code, name=None, opt1=None, opt2=None):
+ return {'code': code, 'name': name, 'opt1': opt1, 'opt2': opt2}
+
+ @handle_rados_error('foo')
+ def create(self, my_arg_name):
+ return my_arg_name
+
+ def list(self):
+ raise cherrypy.NotFound()
+
+
+# pylint: disable=blacklisted-name
+class Root(object):
+ foo = FooResource()
+ fooargs = FooArgs()
+
+
+class RESTControllerTest(ControllerTestCase):
+
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers(
+ [FooResource, FooResourceDetail, FooArgs, GenerateControllerRoutesController])
+
+ def test_empty(self):
+ self._delete("/foo")
+ self.assertStatus(204)
+ self._get("/foo")
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Type', 'application/json')
+ self.assertBody('[]')
+
+ def test_fill(self):
+ sess_mock = RamSession()
+ with patch('cherrypy.session', sess_mock, create=True):
+ data = {'a': 'b'}
+ for _ in range(5):
+ self._post("/foo", data)
+ self.assertJsonBody(data)
+ self.assertStatus(201)
+ self.assertHeader('Content-Type', 'application/json')
+
+ self._get("/foo")
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Type', 'application/json')
+ self.assertJsonBody([data] * 5)
+
+ self._put('/foo/0', {'newdata': 'newdata'})
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Type', 'application/json')
+ self.assertJsonBody({'newdata': 'newdata', 'key': '0'})
+
+ def test_not_implemented(self):
+ self._put("/foo")
+ self.assertStatus(404)
+ body = self.jsonBody()
+ self.assertIsInstance(body, dict)
+ assert body['detail'] == "The path '/foo' was not found."
+ assert '404' in body['status']
+
+ def test_args_from_json(self):
+ self._put("/api/fooargs/hello", {'name': 'world'})
+ self.assertJsonBody({'code': 'hello', 'name': 'world', 'opt1': None, 'opt2': None})
+
+ self._put("/api/fooargs/hello", {'name': 'world', 'opt1': 'opt1'})
+ self.assertJsonBody({'code': 'hello', 'name': 'world', 'opt1': 'opt1', 'opt2': None})
+
+ self._put("/api/fooargs/hello", {'name': 'world', 'opt2': 'opt2'})
+ self.assertJsonBody({'code': 'hello', 'name': 'world', 'opt1': None, 'opt2': 'opt2'})
+
+ def test_detail_route(self):
+ self._get('/foo/default')
+ self.assertJsonBody({'detail': ['default', []]})
+
+ self._get('/foo/default/default')
+ self.assertJsonBody({'detail': ['default', ['default']]})
+
+ self._get('/foo/1/detail')
+ self.assertJsonBody({'detail': ['1', ['detail']]})
+
+ self._post('/foo/1/detail', 'post-data')
+ self.assertStatus(404)
+
+ def test_generate_controller_routes(self):
+ # We just need to add this controller in setup_server():
+ # noinspection PyStatementEffect
+ # pylint: disable=pointless-statement
+ GenerateControllerRoutesController
+
+
+class RequestLoggingToolTest(ControllerTestCase):
+
+ def __init__(self, *args, **kwargs):
+ cherrypy.tools.request_logging = RequestLoggingTool()
+ cherrypy.config.update({'tools.request_logging.on': True})
+ super(RequestLoggingToolTest, self).__init__(*args, **kwargs)
+
+ @classmethod
+ def setup_server(cls):
+ cls.setup_controllers([FooResource])
+
+ def test_is_logged(self):
+ with patch('logging.Logger.debug') as mock_logger_debug:
+ self._put('/foo/0', {'newdata': 'xyz'})
+ self.assertStatus(200)
+ call_args_list = mock_logger_debug.call_args_list
+ _, host, _, method, user, path = call_args_list[0][0]
+ self.assertEqual(host, '127.0.0.1')
+ self.assertEqual(method, 'PUT')
+ self.assertIsNone(user)
+ self.assertEqual(path, '/foo/0')
+
+
+class TestFunctions(unittest.TestCase):
+
+ def test_is_valid_ipv6_address(self):
+ self.assertTrue(is_valid_ipv6_address('::'))
+ self.assertTrue(is_valid_ipv6_address('::1'))
+ self.assertFalse(is_valid_ipv6_address('127.0.0.1'))
+ self.assertFalse(is_valid_ipv6_address('localhost'))
+ self.assertTrue(is_valid_ipv6_address('1200:0000:AB00:1234:0000:2552:7777:1313'))
+ self.assertFalse(is_valid_ipv6_address('1200::AB00:1234::2552:7777:1313'))
+
+ def test_dict_contains_path(self):
+ x = {'a': {'b': {'c': 'foo'}}}
+ self.assertTrue(dict_contains_path(x, ['a', 'b', 'c']))
+ self.assertTrue(dict_contains_path(x, ['a', 'b', 'c']))
+ self.assertTrue(dict_contains_path(x, ['a']))
+ self.assertFalse(dict_contains_path(x, ['a', 'c']))
+ self.assertTrue(dict_contains_path(x, []))